diff --git a/assets/dist/ort-wasm-simd-threaded.jsep.mjs b/assets/dist/ort-wasm-simd-threaded.jsep.mjs index 0e8c0c7..1960118 100644 --- a/assets/dist/ort-wasm-simd-threaded.jsep.mjs +++ b/assets/dist/ort-wasm-simd-threaded.jsep.mjs @@ -7,111 +7,111 @@ async function(moduleArg = {}) { var moduleRtn; function e(){m.buffer!=q.buffer&&u();return q}function w(){m.buffer!=q.buffer&&u();return ba}function ca(){m.buffer!=q.buffer&&u();return da}function ea(){m.buffer!=q.buffer&&u();return fa}function z(){m.buffer!=q.buffer&&u();return ha}function A(){m.buffer!=q.buffer&&u();return ia}function ja(){m.buffer!=q.buffer&&u();return ka}function la(){m.buffer!=q.buffer&&u();return ma} -var B=Object.assign({},moduleArg),na,oa,pa=new Promise((a,b)=>{na=a;oa=b}),qa="object"==typeof window,C="function"==typeof importScripts,D="object"==typeof process&&"object"==typeof process.versions&&"string"==typeof process.versions.node,E=C&&"em-pthread"==self.name;if(D){const {createRequire:a}=await import("module");var require=a(import.meta.url),ra=require("worker_threads");global.Worker=ra.Worker;E=(C=!ra.ic)&&"em-pthread"==ra.workerData}"use strict"; -B.mountExternalData=(a,b)=>{(B.Cb||(B.Cb=new Map)).set(a,b)};B.unmountExternalData=()=>{delete B.Cb};var SharedArrayBuffer=globalThis.SharedArrayBuffer??(new WebAssembly.Memory({initial:0,maximum:0,shared:!0})).buffer.constructor;"use strict"; -let ta=()=>{const a=(c,d,f)=>(...g)=>{const k=F,l=d?.();g=c(...g);const p=d?.();l!==p&&(c=p,f(l),d=f=null);return F!=k?sa():g},b=c=>async(...d)=>{try{if(B.Bb)throw Error("Session already started");const f=B.Bb={Zb:d[0],errors:[]},g=await c(...d);if(B.Bb!==f)throw Error("Session mismatch");B.Jb?.flush();const k=f.errors;if(0p);if(0B._OrtCreateSession, +var B=Object.assign({},moduleArg),na,oa,pa=new Promise((a,b)=>{na=a;oa=b}),qa="object"==typeof window,C="function"==typeof importScripts,D="object"==typeof process&&"object"==typeof process.versions&&"string"==typeof process.versions.node,E=C&&"em-pthread"==self.name;if(D){const {createRequire:a}=await import("module");var require=a(import.meta.url),ra=require("worker_threads");global.Worker=ra.Worker;E=(C=!ra.lc)&&"em-pthread"==ra.workerData}"use strict"; +B.mountExternalData=(a,b)=>{(B.Fb||(B.Fb=new Map)).set(a,b)};B.unmountExternalData=()=>{delete B.Fb};var SharedArrayBuffer=globalThis.SharedArrayBuffer??(new WebAssembly.Memory({initial:0,maximum:0,shared:!0})).buffer.constructor;"use strict"; +let ta=()=>{const a=(c,d,f)=>(...g)=>{const k=F,l=d?.();g=c(...g);const p=d?.();l!==p&&(c=p,f(l),d=f=null);return F!=k?sa():g},b=c=>async(...d)=>{try{if(B.Eb)throw Error("Session already started");const f=B.Eb={bc:d[0],errors:[]},g=await c(...d);if(B.Eb!==f)throw Error("Session mismatch");B.Mb?.flush();const k=f.errors;if(0p);if(0B._OrtCreateSession, c=>B._OrtCreateSession=c);B._OrtRun=b(a(B._OrtRun,()=>B._OrtRun,c=>B._OrtRun=c));B._OrtRunWithBinding=b(a(B._OrtRunWithBinding,()=>B._OrtRunWithBinding,c=>B._OrtRunWithBinding=c));B._OrtBindInput=a(B._OrtBindInput,()=>B._OrtBindInput,c=>B._OrtBindInput=c);ta=void 0}; -B.jsepInit=(a,b)=>{ta?.();if("webgpu"===a){[B.Jb,B.Qb,B.Ub,B.Kb,B.Tb,B.gb,B.Vb,B.Xb,B.Rb,B.Sb,B.Wb]=b;const c=B.Jb;B.jsepRegisterBuffer=(d,f,g,k)=>c.registerBuffer(d,f,g,k);B.jsepGetBuffer=d=>c.getBuffer(d);B.jsepCreateDownloader=(d,f,g)=>c.createDownloader(d,f,g);B.jsepOnReleaseSession=d=>{c.onReleaseSession(d)};B.jsepOnRunStart=d=>c.onRunStart(d)}};var ua=Object.assign({},B),va="./this.program",wa=(a,b)=>{throw b;},G="",xa,ya,za; +B.jsepInit=(a,b)=>{ta?.();if("webgpu"===a){[B.Mb,B.Tb,B.Xb,B.Nb,B.Wb,B.jb,B.Yb,B.$b,B.Ub,B.Vb,B.Zb]=b;const c=B.Mb;B.jsepRegisterBuffer=(d,f,g,k)=>c.registerBuffer(d,f,g,k);B.jsepGetBuffer=d=>c.getBuffer(d);B.jsepCreateDownloader=(d,f,g)=>c.createDownloader(d,f,g);B.jsepOnReleaseSession=d=>{c.onReleaseSession(d)};B.jsepOnRunStart=d=>c.onRunStart(d)}};var ua=Object.assign({},B),va="./this.program",wa=(a,b)=>{throw b;},G="",xa,ya,za; if(D){var fs=require("fs"),Aa=require("path");G=require("url").fileURLToPath(new URL("./",import.meta.url));xa=(a,b)=>{a=Ba(a)?new URL(a):Aa.normalize(a);return fs.readFileSync(a,b?void 0:"utf8")};za=a=>{a=xa(a,!0);a.buffer||(a=new Uint8Array(a));return a};ya=(a,b,c,d=!0)=>{a=Ba(a)?new URL(a):Aa.normalize(a);fs.readFile(a,d?void 0:"utf8",(f,g)=>{f?c(f):b(d?g.buffer:g)})};!B.thisProgram&&1{process.exitCode= a;throw b;}}else if(qa||C)C?G=self.location.href:"undefined"!=typeof document&&document.currentScript&&(G=document.currentScript.src),_scriptName&&(G=_scriptName),G.startsWith("blob:")?G="":G=G.substr(0,G.replace(/[?#].*/,"").lastIndexOf("/")+1),D||(xa=a=>{var b=new XMLHttpRequest;b.open("GET",a,!1);b.send(null);return b.responseText},C&&(za=a=>{var b=new XMLHttpRequest;b.open("GET",a,!1);b.responseType="arraybuffer";b.send(null);return new Uint8Array(b.response)}),ya=(a,b,c)=>{var d=new XMLHttpRequest; d.open("GET",a,!0);d.responseType="arraybuffer";d.onload=()=>{200==d.status||0==d.status&&d.response?b(d.response):c()};d.onerror=c;d.send(null)});D&&"undefined"==typeof performance&&(global.performance=require("perf_hooks").performance);var Ca=console.log.bind(console),Da=console.error.bind(console);D&&(Ca=(...a)=>fs.writeSync(1,a.join(" ")+"\n"),Da=(...a)=>fs.writeSync(2,a.join(" ")+"\n"));var Ea=Ca,H=Da;Object.assign(B,ua);ua=null; -if(E){var Fa;if(D){var Ga=ra.parentPort;Ga.on("message",b=>onmessage({data:b}));Object.assign(globalThis,{self:global,importScripts:()=>{},postMessage:b=>Ga.postMessage(b),performance:global.performance||{now:Date.now}})}var Ha=!1;H=function(...b){b=b.join(" ");D?fs.writeSync(2,b+"\n"):console.error(b)};self.alert=function(...b){postMessage({Ib:"alert",text:b.join(" "),jc:Ia()})};B.instantiateWasm=(b,c)=>new Promise(d=>{Fa=f=>{f=new WebAssembly.Instance(f,Ja());c(f);d()}});self.onunhandledrejection= -b=>{throw b.reason||b;};function a(b){try{var c=b.data,d=c.cmd;if("load"===d){let f=[];self.onmessage=g=>f.push(g);self.startWorker=()=>{postMessage({cmd:"loaded"});for(let g of f)a(g);self.onmessage=a};for(const g of c.handlers)if(!B[g]||B[g].proxy)B[g]=(...k)=>{postMessage({Ib:"callHandler",hc:g,args:k})},"print"==g&&(Ea=B[g]),"printErr"==g&&(H=B[g]);m=c.wasmMemory;u();Fa(c.wasmModule)}else if("run"===d){Ka(c.pthread_ptr,0,0,1,0,0);La(c.pthread_ptr);Ma();Na();Ha||(Oa(),Ha=!0);try{Pa(c.start_routine, +if(E){var Fa;if(D){var Ga=ra.parentPort;Ga.on("message",b=>onmessage({data:b}));Object.assign(globalThis,{self:global,importScripts:()=>{},postMessage:b=>Ga.postMessage(b),performance:global.performance||{now:Date.now}})}var Ha=!1;H=function(...b){b=b.join(" ");D?fs.writeSync(2,b+"\n"):console.error(b)};self.alert=function(...b){postMessage({Lb:"alert",text:b.join(" "),mc:Ia()})};B.instantiateWasm=(b,c)=>new Promise(d=>{Fa=f=>{f=new WebAssembly.Instance(f,Ja());c(f);d()}});self.onunhandledrejection= +b=>{throw b.reason||b;};function a(b){try{var c=b.data,d=c.cmd;if("load"===d){let f=[];self.onmessage=g=>f.push(g);self.startWorker=()=>{postMessage({cmd:"loaded"});for(let g of f)a(g);self.onmessage=a};for(const g of c.handlers)if(!B[g]||B[g].proxy)B[g]=(...k)=>{postMessage({Lb:"callHandler",kc:g,args:k})},"print"==g&&(Ea=B[g]),"printErr"==g&&(H=B[g]);m=c.wasmMemory;u();Fa(c.wasmModule)}else if("run"===d){Ka(c.pthread_ptr,0,0,1,0,0);La(c.pthread_ptr);Ma();Na();Ha||(Oa(),Ha=!0);try{Pa(c.start_routine, c.arg)}catch(f){if("unwind"!=f)throw f;}}else"cancel"===d?Ia()&&Qa(-1):"setimmediate"!==c.target&&("checkMailbox"===d?Ha&&Ra():d&&(H(`worker: received unknown command ${d}`),H(c)))}catch(f){throw Sa(),f;}}self.onmessage=a}var m,Ta,I=!1,Ua,q,ba,da,fa,ha,ia,ka,J,Va,ma; function u(){var a=m.buffer;B.HEAP8=q=new Int8Array(a);B.HEAP16=da=new Int16Array(a);B.HEAPU8=ba=new Uint8Array(a);B.HEAPU16=fa=new Uint16Array(a);B.HEAP32=ha=new Int32Array(a);B.HEAPU32=ia=new Uint32Array(a);B.HEAPF32=ka=new Float32Array(a);B.HEAPF64=ma=new Float64Array(a);B.HEAP64=J=new BigInt64Array(a);B.HEAPU64=Va=new BigUint64Array(a)} if(!E){if(B.wasmMemory)m=B.wasmMemory;else if(m=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0}),!(m.buffer instanceof SharedArrayBuffer))throw H("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),D&&H("(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and/or recent version)"),Error("bad memory"); u()}var Wa=[],Xa=[],Ya=[],Za=0,$a=null,ab=null;function bb(){Za--;if(0==Za&&(null!==$a&&(clearInterval($a),$a=null),ab)){var a=ab;ab=null;a()}}function cb(a){a="Aborted("+a+")";H(a);I=!0;Ua=1;a=new WebAssembly.RuntimeError(a+". Build with -sASSERTIONS for more info.");oa(a);throw a;}var db=a=>a.startsWith("data:application/octet-stream;base64,"),Ba=a=>a.startsWith("file://"),eb;function fb(a){if(za)return za(a);throw"both async and sync fetching of the wasm failed";} function gb(a){if(qa||C){if("function"==typeof fetch&&!Ba(a))return fetch(a,{credentials:"same-origin"}).then(b=>{if(!b.ok)throw`failed to load wasm binary file at '${a}'`;return b.arrayBuffer()}).catch(()=>fb(a));if(ya)return new Promise((b,c)=>{ya(a,d=>b(new Uint8Array(d)),c)})}return Promise.resolve().then(()=>fb(a))}function hb(a,b,c){return gb(a).then(d=>WebAssembly.instantiate(d,b)).then(c,d=>{H(`failed to asynchronously prepare wasm: ${d}`);cb(d)})} function ib(a,b){var c=eb;return"function"!=typeof WebAssembly.instantiateStreaming||db(c)||Ba(c)||D||"function"!=typeof fetch?hb(c,a,b):fetch(c,{credentials:"same-origin"}).then(d=>WebAssembly.instantiateStreaming(d,a).then(b,function(f){H(`wasm streaming compile failed: ${f}`);H("falling back to ArrayBuffer instantiation");return hb(c,a,b)}))} -function Ja(){jb={wa:kb,b:lb,Y:mb,y:nb,ma:ob,U:pb,W:qb,na:rb,ka:sb,da:tb,ja:ub,I:vb,V:wb,S:xb,la:yb,T:zb,sa:Ab,C:Bb,M:Cb,L:Db,B:Eb,s:Fb,p:Gb,D:Hb,x:Ib,N:Jb,ra:Kb,ga:Lb,Q:Mb,Z:Nb,E:Ob,fa:La,pa:Pb,u:Qb,A:Rb,o:Sb,k:Tb,c:Ub,n:Vb,j:Wb,xa:Xb,r:Yb,d:Zb,v:$b,m:ac,g:bc,l:cc,i:dc,h:ec,e:fc,aa:gc,ba:hc,ca:ic,_:jc,$:kc,P:lc,f:mc,K:nc,F:oc,J:pc,ta:qc,oa:rc,R:sc,t:tc,w:uc,O:vc,va:wc,ua:xc,ha:yc,ia:zc,X:Ac,z:Bc,H:Cc,ea:Dc,G:Ec,a:m,qa:Fc,q:Gc};return{a:jb}} -var Hc={1336340:(a,b,c,d)=>{if("undefined"==typeof B||!B.Cb)return 1;a=K(a>>>0);a.startsWith("./")&&(a=a.substring(2));a=B.Cb.get(a);if(!a)return 2;b>>>=0;c>>>=0;d>>>=0;if(b+c>a.byteLength)return 3;try{return w().set(a.subarray(b,b+c),d>>>0),0}catch{return 4}},1336841:()=>{B.Rb()},1336872:()=>{B.Sb()},1336901:()=>{B.Wb()},1336926:a=>B.Qb(a),1336959:a=>B.Ub(a),1336991:(a,b,c)=>{B.Kb(a,b,c,!0)},1337030:(a,b,c)=>{B.Kb(a,b,c)},1337063:()=>"undefined"!==typeof wasmOffsetConverter,1337120:a=>{B.gb("Abs", -a,void 0)},1337171:a=>{B.gb("Neg",a,void 0)},1337222:a=>{B.gb("Floor",a,void 0)},1337275:a=>{B.gb("Ceil",a,void 0)},1337327:a=>{B.gb("Reciprocal",a,void 0)},1337385:a=>{B.gb("Sqrt",a,void 0)},1337437:a=>{B.gb("Exp",a,void 0)},1337488:a=>{B.gb("Erf",a,void 0)},1337539:a=>{B.gb("Sigmoid",a,void 0)},1337594:(a,b,c)=>{B.gb("HardSigmoid",a,{alpha:b,beta:c})},1337673:a=>{B.gb("Log",a,void 0)},1337724:a=>{B.gb("Sin",a,void 0)},1337775:a=>{B.gb("Cos",a,void 0)},1337826:a=>{B.gb("Tan",a,void 0)},1337877:a=> -{B.gb("Asin",a,void 0)},1337929:a=>{B.gb("Acos",a,void 0)},1337981:a=>{B.gb("Atan",a,void 0)},1338033:a=>{B.gb("Sinh",a,void 0)},1338085:a=>{B.gb("Cosh",a,void 0)},1338137:a=>{B.gb("Asinh",a,void 0)},1338190:a=>{B.gb("Acosh",a,void 0)},1338243:a=>{B.gb("Atanh",a,void 0)},1338296:a=>{B.gb("Tanh",a,void 0)},1338348:a=>{B.gb("Not",a,void 0)},1338399:(a,b,c)=>{B.gb("Clip",a,{min:b,max:c})},1338468:a=>{B.gb("Clip",a,void 0)},1338520:(a,b)=>{B.gb("Elu",a,{alpha:b})},1338578:a=>{B.gb("Relu",a,void 0)},1338630:(a, -b)=>{B.gb("LeakyRelu",a,{alpha:b})},1338694:(a,b)=>{B.gb("ThresholdedRelu",a,{alpha:b})},1338764:(a,b)=>{B.gb("Cast",a,{to:b})},1338822:a=>{B.gb("Add",a,void 0)},1338873:a=>{B.gb("Sub",a,void 0)},1338924:a=>{B.gb("Mul",a,void 0)},1338975:a=>{B.gb("Div",a,void 0)},1339026:a=>{B.gb("Pow",a,void 0)},1339077:a=>{B.gb("Equal",a,void 0)},1339130:a=>{B.gb("Greater",a,void 0)},1339185:a=>{B.gb("GreaterOrEqual",a,void 0)},1339247:a=>{B.gb("Less",a,void 0)},1339299:a=>{B.gb("LessOrEqual",a,void 0)},1339358:(a, -b,c,d,f)=>{B.gb("ReduceMean",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1339517:(a,b,c,d,f)=>{B.gb("ReduceMax",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1339675:(a,b,c,d,f)=>{B.gb("ReduceMin",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1339833:(a,b,c,d,f)=>{B.gb("ReduceProd",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})}, -1339992:(a,b,c,d,f)=>{B.gb("ReduceSum",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340150:(a,b,c,d,f)=>{B.gb("ReduceL1",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340307:(a,b,c,d,f)=>{B.gb("ReduceL2",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340464:(a,b,c,d,f)=>{B.gb("ReduceLogSum",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>> -0)):[]})},1340625:(a,b,c,d,f)=>{B.gb("ReduceSumSquare",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340789:(a,b,c,d,f)=>{B.gb("ReduceLogSumExp",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340953:a=>{B.gb("Where",a,void 0)},1341006:(a,b,c)=>{B.gb("Transpose",a,{perm:b?Array.from(z().subarray(b>>>0,c>>>0)):[]})},1341114:(a,b,c,d)=>{B.gb("DepthToSpace",a,{blocksize:b,mode:K(c),format:d?"NHWC":"NCHW"})},1341247:(a, -b,c,d)=>{B.gb("DepthToSpace",a,{blocksize:b,mode:K(c),format:d?"NHWC":"NCHW"})},1341380:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t)=>{B.gb("ConvTranspose",a,{format:p?"NHWC":"NCHW",autoPad:b,dilations:[c],group:d,kernelShape:[f],pads:[g,k],strides:[l],wIsConst:()=>!!e()[n>>>0],outputPadding:r?Array.from(z().subarray(r>>>0,x>>>0)):[],outputShape:y?Array.from(z().subarray(y>>>0,h>>>0)):[],activation:K(t)})},1341781:(a,b,c,d,f,g,k,l,p,n,r,x,y,h)=>{B.gb("ConvTranspose",a,{format:l?"NHWC":"NCHW",autoPad:b,dilations:Array.from(z().subarray(c>>> -0,(c>>>0)+2>>>0)),group:d,kernelShape:Array.from(z().subarray(f>>>0,(f>>>0)+2>>>0)),pads:Array.from(z().subarray(g>>>0,(g>>>0)+4>>>0)),strides:Array.from(z().subarray(k>>>0,(k>>>0)+2>>>0)),wIsConst:()=>!!e()[p>>>0],outputPadding:n?Array.from(z().subarray(n>>>0,r>>>0)):[],outputShape:x?Array.from(z().subarray(x>>>0,y>>>0)):[],activation:K(h)})},1342346:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t)=>{B.gb("ConvTranspose",a,{format:p?"NHWC":"NCHW",autoPad:b,dilations:[c],group:d,kernelShape:[f],pads:[g,k],strides:[l], -wIsConst:()=>!!e()[n>>>0],outputPadding:r?Array.from(z().subarray(r>>>0,x>>>0)):[],outputShape:y?Array.from(z().subarray(y>>>0,h>>>0)):[],activation:K(t)})},1342747:(a,b,c,d,f,g,k,l,p,n,r,x,y,h)=>{B.gb("ConvTranspose",a,{format:l?"NHWC":"NCHW",autoPad:b,dilations:Array.from(z().subarray(c>>>0,(c>>>0)+2>>>0)),group:d,kernelShape:Array.from(z().subarray(f>>>0,(f>>>0)+2>>>0)),pads:Array.from(z().subarray(g>>>0,(g>>>0)+4>>>0)),strides:Array.from(z().subarray(k>>>0,(k>>>0)+2>>>0)),wIsConst:()=>!!e()[p>>> -0],outputPadding:n?Array.from(z().subarray(n>>>0,r>>>0)):[],outputShape:x?Array.from(z().subarray(x>>>0,y>>>0)):[],activation:K(h)})},1343312:(a,b)=>{B.gb("GlobalAveragePool",a,{format:b?"NHWC":"NCHW"})},1343403:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.gb("AveragePool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1343687:(a,b)=>{B.gb("GlobalAveragePool",a,{format:b?"NHWC":"NCHW"})},1343778:(a,b, -c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.gb("AveragePool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1344062:(a,b)=>{B.gb("GlobalMaxPool",a,{format:b?"NHWC":"NCHW"})},1344149:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.gb("MaxPool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1344429:(a,b)=>{B.gb("GlobalMaxPool", -a,{format:b?"NHWC":"NCHW"})},1344516:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.gb("MaxPool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1344796:(a,b,c,d,f)=>{B.gb("Gemm",a,{alpha:b,beta:c,transA:d,transB:f})},1344900:a=>{B.gb("MatMul",a,void 0)},1344954:(a,b,c,d)=>{B.gb("ArgMax",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})},1345062:(a,b,c,d)=>{B.gb("ArgMin",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})}, -1345170:(a,b)=>{B.gb("Softmax",a,{axis:b})},1345233:(a,b)=>{B.gb("Concat",a,{axis:b})},1345293:(a,b,c,d,f)=>{B.gb("Split",a,{axis:b,numOutputs:c,splitSizes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1345433:a=>{B.gb("Expand",a,void 0)},1345487:(a,b)=>{B.gb("Gather",a,{axis:Number(b)})},1345558:(a,b)=>{B.gb("GatherElements",a,{axis:Number(b)})},1345637:(a,b,c,d,f,g,k,l,p,n,r)=>{B.gb("Resize",a,{antialias:b,axes:c?Array.from(z().subarray(c>>>0,d>>>0)):[],coordinateTransformMode:K(f),cubicCoeffA:g, -excludeOutside:k,extrapolationValue:l,keepAspectRatioPolicy:K(p),mode:K(n),nearestMode:K(r)})},1345983:(a,b,c,d,f,g,k)=>{B.gb("Slice",a,{starts:b?Array.from(z().subarray(b>>>0,c>>>0)):[],ends:d?Array.from(z().subarray(d>>>0,f>>>0)):[],axes:g?Array.from(z().subarray(g>>>0,k>>>0)):[]})},1346199:a=>{B.gb("Tile",a,void 0)},1346251:(a,b,c)=>{B.gb("InstanceNormalization",a,{epsilon:b,format:c?"NHWC":"NCHW"})},1346365:(a,b,c)=>{B.gb("InstanceNormalization",a,{epsilon:b,format:c?"NHWC":"NCHW"})},1346479:a=> -{B.gb("Range",a,void 0)},1346532:(a,b)=>{B.gb("Einsum",a,{equation:K(b)})},1346613:(a,b,c,d,f)=>{B.gb("Pad",a,{mode:b,value:c,pads:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1346740:(a,b,c,d,f,g)=>{B.gb("BatchNormalization",a,{epsilon:b,momentum:c,spatial:!!f,trainingMode:!!d,format:g?"NHWC":"NCHW"})},1346909:(a,b,c,d,f,g)=>{B.gb("BatchNormalization",a,{epsilon:b,momentum:c,spatial:!!f,trainingMode:!!d,format:g?"NHWC":"NCHW"})},1347078:(a,b,c)=>{B.gb("CumSum",a,{exclusive:Number(b),reverse:Number(c)})}, -1347175:(a,b,c,d,f,g,k,l,p)=>{B.gb("Attention",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:f,doRotary:g,qkvHiddenSizes:k?Array.from(z().subarray(Number(l)>>>0,Number(l)+k>>>0)):[],pastPresentShareBuffer:!!p})},1347447:a=>{B.gb("BiasAdd",a,void 0)},1347502:a=>{B.gb("BiasSplitGelu",a,void 0)},1347563:a=>{B.gb("FastGelu",a,void 0)},1347619:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.gb("Conv",a,{format:x?"NHWC":"NCHW",auto_pad:b,dilations:c?Array.from(z().subarray(c>>>0,d>>>0)):[],group:f,kernel_shape:g? -Array.from(z().subarray(g>>>0,k>>>0)):[],pads:l?Array.from(z().subarray(l>>>0,p>>>0)):[],strides:n?Array.from(z().subarray(n>>>0,r>>>0)):[],w_is_const:()=>!!e()[y>>>0],activation:K(h),activation_params:t?Array.from(ja().subarray(t>>>0,v>>>0)):[]})},1348115:a=>{B.gb("Gelu",a,void 0)},1348167:(a,b,c,d)=>{B.gb("GroupQueryAttention",a,{numHeads:b,kvNumHeads:c,scale:d})},1348280:(a,b,c,d)=>{B.gb("LayerNormalization",a,{axis:b,epsilon:c,simplified:!!d})},1348391:(a,b,c,d)=>{B.gb("LayerNormalization",a, -{axis:b,epsilon:c,simplified:!!d})},1348502:(a,b,c,d,f,g)=>{B.gb("MatMulNBits",a,{k:b,n:c,accuracyLevel:d,bits:f,blockSize:g})},1348629:(a,b,c,d,f,g)=>{B.gb("MultiHeadAttention",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:f,doRotary:g})},1348788:(a,b)=>{B.gb("QuickGelu",a,{alpha:b})},1348852:(a,b,c,d,f)=>{B.gb("RotaryEmbedding",a,{interleaved:!!b,numHeads:c,rotaryEmbeddingDim:d,scale:f})},1348991:(a,b,c)=>{B.gb("SkipLayerNormalization",a,{epsilon:b,simplified:!!c})},1349093:a=>{B.Vb(a)}, -1349127:(a,b)=>B.Xb(a,b,B.Bb.Zb,B.Bb.errors),1349239:(a,b,c)=>{B.gb("SkipLayerNormalization",a,{epsilon:b,simplified:!!c})}};function kb(a,b,c){return Ic(async()=>{await B.Tb(a,b,c)})}function Jc(a){this.name="ExitStatus";this.message=`Program terminated with exit(${a})`;this.status=a} -var Kc=a=>{a.terminate();a.onmessage=()=>{}},Nc=a=>{0==L.length&&(Lc(),Mc(L[0]));var b=L.pop();if(!b)return 6;M.push(b);N[a.xb]=b;b.xb=a.xb;var c={cmd:"run",start_routine:a.$b,arg:a.Mb,pthread_ptr:a.xb};D&&b.unref();b.postMessage(c,a.ec);return 0},O=0,P=(a,b,...c)=>{for(var d=2*c.length,f=Oc(),g=Pc(8*d),k=g>>>3,l=0;l>>0]=p)}a=Qc(a,0,d,g,b);Rc(f);return a}; -function Sc(a){if(E)return P(0,1,a);Ua=a;if(!(0{Ua=a;if(E)throw Uc(a),"unwind";Sc(a)},L=[],M=[],Vc=[],N={};function Wc(){for(var a=B.numThreads-1;a--;)Lc();Wa.unshift(()=>{Za++;Xc(()=>bb())})}var Zc=a=>{var b=a.xb;delete N[b];L.push(a);M.splice(M.indexOf(a),1);a.xb=0;Yc(b)};function Na(){Vc.forEach(a=>a())} -var Mc=a=>new Promise(b=>{a.onmessage=g=>{g=g.data;var k=g.cmd;if(g.targetThread&&g.targetThread!=Ia()){var l=N[g.targetThread];l?l.postMessage(g,g.transferList):H(`Internal error! Worker sent a message "${k}" to target pthread ${g.targetThread}, but that thread no longer exists!`)}else if("checkMailbox"===k)Ra();else if("spawnThread"===k)Nc(g);else if("cleanupThread"===k)Zc(N[g.thread]);else if("killThread"===k)g=g.thread,k=N[g],delete N[g],Kc(k),Yc(g),M.splice(M.indexOf(k),1),k.xb=0;else if("cancelThread"=== -k)N[g.thread].postMessage({cmd:"cancel"});else if("loaded"===k)a.loaded=!0,D&&!a.xb&&a.unref(),b(a);else if("alert"===k)alert(`Thread ${g.threadId}: ${g.text}`);else if("setimmediate"===g.target)a.postMessage(g);else if("callHandler"===k)B[g.handler](...g.args);else k&&H(`worker sent an unknown command ${k}`)};a.onerror=g=>{H(`${"worker sent an error!"} ${g.filename}:${g.lineno}: ${g.message}`);throw g;};D&&(a.on("message",g=>a.onmessage({data:g})),a.on("error",g=>a.onerror(g)));var c=[],d=["onExit"], -f;for(f of d)B.hasOwnProperty(f)&&c.push(f);a.postMessage({cmd:"load",handlers:c,wasmMemory:m,wasmModule:Ta})});function Xc(a){E?a():Promise.all(L.map(Mc)).then(a)}function Lc(){var a=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});L.push(a)}var $c=a=>{for(;0{var a=Ia(),b=A()[a+52>>>2>>>0];a=A()[a+56>>>2>>>0];ad(b,b-a);Rc(b)},Pa=(a,b)=>{O=0;a=bd(a,b);0>>=0;var d=new cd(a);b>>>=0;c>>>=0;A()[d.Fb+16>>>2>>>0]=0;A()[d.Fb+4>>>2>>>0]=b;A()[d.Fb+8>>>2>>>0]=c;dd=a;ed++;throw dd;}function fd(a,b,c,d){return E?P(2,1,a,b,c,d):mb(a,b,c,d)} -function mb(a,b,c,d){a>>>=0;b>>>=0;c>>>=0;d>>>=0;if("undefined"==typeof SharedArrayBuffer)return H("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var f=[];if(E&&0===f.length)return fd(a,b,c,d);a={$b:c,xb:a,Mb:d,ec:f};return E?(a.Ib="spawnThread",postMessage(a,f),0):Nc(a)} -var gd="undefined"!=typeof TextDecoder?new TextDecoder("utf8"):void 0,hd=(a,b,c)=>{b>>>=0;var d=b+c;for(c=b;a[c]&&!(c>=d);)++c;if(16f?d+=String.fromCharCode(f):(f-=65536,d+=String.fromCharCode(55296|f>> -10,56320|f&1023))}}else d+=String.fromCharCode(f)}return d},K=(a,b)=>(a>>>=0)?hd(w(),a,b):"";function nb(a,b,c){return E?P(3,1,a,b,c):0}function ob(a,b){if(E)return P(4,1,a,b)} -var jd=a=>{for(var b=0,c=0;c=d?b++:2047>=d?b+=2:55296<=d&&57343>=d?(b+=4,++c):b+=3}return b},kd=(a,b,c,d)=>{c>>>=0;if(!(0=k){var l=a.charCodeAt(++g);k=65536+((k&1023)<<10)|l&1023}if(127>=k){if(c>=d)break;b[c++>>>0]=k}else{if(2047>=k){if(c+1>=d)break;b[c++>>>0]=192|k>>6}else{if(65535>=k){if(c+2>=d)break;b[c++>>>0]=224|k>>12}else{if(c+3>=d)break;b[c++>>>0]=240|k>> -18;b[c++>>>0]=128|k>>12&63}b[c++>>>0]=128|k>>6&63}b[c++>>>0]=128|k&63}}b[c>>>0]=0;return c-f},ld=(a,b,c)=>kd(a,w(),b,c);function pb(a,b){if(E)return P(5,1,a,b)}function qb(a,b,c){if(E)return P(6,1,a,b,c)}function rb(a,b,c){return E?P(7,1,a,b,c):0}function sb(a,b){if(E)return P(8,1,a,b)}function tb(a,b,c){if(E)return P(9,1,a,b,c)}function ub(a,b,c,d){if(E)return P(10,1,a,b,c,d)}function vb(a,b,c,d){if(E)return P(11,1,a,b,c,d)}function wb(a,b,c,d){if(E)return P(12,1,a,b,c,d)} -function xb(a){if(E)return P(13,1,a)}function yb(a,b){if(E)return P(14,1,a,b)}function zb(a,b,c){if(E)return P(15,1,a,b,c)}var Ab=()=>{cb("")},md,Q=a=>{for(var b="";w()[a>>>0];)b+=md[w()[a++>>>0]];return b},nd={},od={},pd={},R; -function qd(a,b,c={}){var d=b.name;if(!a)throw new R(`type "${d}" must have a positive integer typeid pointer`);if(od.hasOwnProperty(a)){if(c.Ob)return;throw new R(`Cannot register type '${d}' twice`);}od[a]=b;delete pd[a];nd.hasOwnProperty(a)&&(b=nd[a],delete nd[a],b.forEach(f=>f()))}function S(a,b,c={}){if(!("argPackAdvance"in b))throw new TypeError("registerType registeredInstance requires argPackAdvance");return qd(a,b,c)} -var rd=(a,b,c)=>{switch(b){case 1:return c?d=>e()[d>>>0]:d=>w()[d>>>0];case 2:return c?d=>ca()[d>>>1>>>0]:d=>ea()[d>>>1>>>0];case 4:return c?d=>z()[d>>>2>>>0]:d=>A()[d>>>2>>>0];case 8:return c?d=>J[d>>>3]:d=>Va[d>>>3];default:throw new TypeError(`invalid integer width (${b}): ${a}`);}}; -function Bb(a,b,c){a>>>=0;c>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:d=>d,toWireType:function(d,f){if("bigint"!=typeof f&&"number"!=typeof f)throw null===f?f="null":(d=typeof f,f="object"===d||"array"===d||"function"===d?f.toString():""+f),new TypeError(`Cannot convert "${f}" to ${this.name}`);"number"==typeof f&&(f=BigInt(f));return f},argPackAdvance:T,readValueFromPointer:rd(b,c,-1==b.indexOf("u")),Ab:null})}var T=8; -function Cb(a,b,c,d){a>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:function(f){return!!f},toWireType:function(f,g){return g?c:d},argPackAdvance:T,readValueFromPointer:function(f){return this.fromWireType(w()[f>>>0])},Ab:null})}var sd=[],U=[];function Ub(a){a>>>=0;9{if(!a)throw new R("Cannot use deleted val. handle = "+a);return U[a]},W=a=>{switch(a){case void 0:return 2;case null:return 4;case !0:return 6;case !1:return 8;default:const b=sd.pop()||U.length;U[b]=a;U[b+1]=1;return b}};function td(a){return this.fromWireType(A()[a>>>2>>>0])}var ud={name:"emscripten::val",fromWireType:a=>{var b=V(a);Ub(a);return b},toWireType:(a,b)=>W(b),argPackAdvance:T,readValueFromPointer:td,Ab:null};function Db(a){return S(a>>>0,ud)} -var vd=(a,b)=>{switch(b){case 4:return function(c){return this.fromWireType(ja()[c>>>2>>>0])};case 8:return function(c){return this.fromWireType(la()[c>>>3>>>0])};default:throw new TypeError(`invalid float width (${b}): ${a}`);}};function Eb(a,b,c){a>>>=0;c>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:d=>d,toWireType:(d,f)=>f,argPackAdvance:T,readValueFromPointer:vd(b,c),Ab:null})} -function Fb(a,b,c,d,f){a>>>=0;c>>>=0;b=Q(b>>>0);-1===f&&(f=4294967295);f=l=>l;if(0===d){var g=32-8*c;f=l=>l<>>g}var k=b.includes("unsigned")?function(l,p){return p>>>0}:function(l,p){return p};S(a,{name:b,fromWireType:f,toWireType:k,argPackAdvance:T,readValueFromPointer:rd(b,c,0!==d),Ab:null})} -function Gb(a,b,c){function d(g){var k=A()[g>>>2>>>0];g=A()[g+4>>>2>>>0];return new f(e().buffer,g,k)}a>>>=0;var f=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][b];c=Q(c>>>0);S(a,{name:c,fromWireType:d,argPackAdvance:T,readValueFromPointer:d},{Ob:!0})} -function Hb(a,b){a>>>=0;b=Q(b>>>0);var c="std::string"===b;S(a,{name:b,fromWireType:function(d){var f=A()[d>>>2>>>0],g=d+4;if(c)for(var k=g,l=0;l<=f;++l){var p=g+l;if(l==f||0==w()[p>>>0]){k=K(k,p-k);if(void 0===n)var n=k;else n+=String.fromCharCode(0),n+=k;k=p+1}}else{n=Array(f);for(l=0;l>>0]);n=n.join("")}X(d);return n},toWireType:function(d,f){f instanceof ArrayBuffer&&(f=new Uint8Array(f));var g="string"==typeof f;if(!(g||f instanceof Uint8Array||f instanceof -Uint8ClampedArray||f instanceof Int8Array))throw new R("Cannot pass non-string to std::string");var k=c&&g?jd(f):f.length;var l=wd(4+k+1),p=l+4;A()[l>>>2>>>0]=k;if(c&&g)ld(f,p,k+1);else if(g)for(g=0;g>>0]=n}else for(g=0;g>>0]=f[g];null!==d&&d.push(X,l);return l},argPackAdvance:T,readValueFromPointer:td,Ab(d){X(d)}})} -var xd="undefined"!=typeof TextDecoder?new TextDecoder("utf-16le"):void 0,yd=(a,b)=>{var c=a>>1;for(var d=c+b/2;!(c>=d)&&ea()[c>>>0];)++c;c<<=1;if(32=b/2);++d){var f=ca()[a+2*d>>>1>>>0];if(0==f)break;c+=String.fromCharCode(f)}return c},zd=(a,b,c)=>{c??=2147483647;if(2>c)return 0;c-=2;var d=b;c=c<2*a.length?c/2:a.length;for(var f=0;f>>1>>>0]=g;b+=2}ca()[b>>>1>>>0]=0;return b-d},Ad=a=>2*a.length,Bd=(a,b)=> -{for(var c=0,d="";!(c>=b/4);){var f=z()[a+4*c>>>2>>>0];if(0==f)break;++c;65536<=f?(f-=65536,d+=String.fromCharCode(55296|f>>10,56320|f&1023)):d+=String.fromCharCode(f)}return d},Cd=(a,b,c)=>{b>>>=0;c??=2147483647;if(4>c)return 0;var d=b;c=d+c-4;for(var f=0;f=g){var k=a.charCodeAt(++f);g=65536+((g&1023)<<10)|k&1023}z()[b>>>2>>>0]=g;b+=4;if(b+4>c)break}z()[b>>>2>>>0]=0;return b-d},Dd=a=>{for(var b=0,c=0;c{if("undefined"==typeof B||!B.Fb)return 1;a=K(a>>>0);a.startsWith("./")&&(a=a.substring(2));a=B.Fb.get(a);if(!a)return 2;b>>>=0;c>>>=0;d>>>=0;if(b+c>a.byteLength)return 3;try{return w().set(a.subarray(b,b+c),d>>>0),0}catch{return 4}},1338217:()=>{B.Ub()},1338248:()=>{B.Vb()},1338277:()=>{B.Zb()},1338302:a=>B.Tb(a),1338335:a=>B.Xb(a),1338367:(a,b,c)=>{B.Nb(a,b,c,!0)},1338406:(a,b,c)=>{B.Nb(a,b,c)},1338439:()=>"undefined"!==typeof wasmOffsetConverter,1338496:a=>{B.jb("Abs", +a,void 0)},1338547:a=>{B.jb("Neg",a,void 0)},1338598:a=>{B.jb("Floor",a,void 0)},1338651:a=>{B.jb("Ceil",a,void 0)},1338703:a=>{B.jb("Reciprocal",a,void 0)},1338761:a=>{B.jb("Sqrt",a,void 0)},1338813:a=>{B.jb("Exp",a,void 0)},1338864:a=>{B.jb("Erf",a,void 0)},1338915:a=>{B.jb("Sigmoid",a,void 0)},1338970:(a,b,c)=>{B.jb("HardSigmoid",a,{alpha:b,beta:c})},1339049:a=>{B.jb("Log",a,void 0)},1339100:a=>{B.jb("Sin",a,void 0)},1339151:a=>{B.jb("Cos",a,void 0)},1339202:a=>{B.jb("Tan",a,void 0)},1339253:a=> +{B.jb("Asin",a,void 0)},1339305:a=>{B.jb("Acos",a,void 0)},1339357:a=>{B.jb("Atan",a,void 0)},1339409:a=>{B.jb("Sinh",a,void 0)},1339461:a=>{B.jb("Cosh",a,void 0)},1339513:a=>{B.jb("Asinh",a,void 0)},1339566:a=>{B.jb("Acosh",a,void 0)},1339619:a=>{B.jb("Atanh",a,void 0)},1339672:a=>{B.jb("Tanh",a,void 0)},1339724:a=>{B.jb("Not",a,void 0)},1339775:(a,b,c)=>{B.jb("Clip",a,{min:b,max:c})},1339844:a=>{B.jb("Clip",a,void 0)},1339896:(a,b)=>{B.jb("Elu",a,{alpha:b})},1339954:a=>{B.jb("Relu",a,void 0)},1340006:(a, +b)=>{B.jb("LeakyRelu",a,{alpha:b})},1340070:(a,b)=>{B.jb("ThresholdedRelu",a,{alpha:b})},1340140:(a,b)=>{B.jb("Cast",a,{to:b})},1340198:a=>{B.jb("Add",a,void 0)},1340249:a=>{B.jb("Sub",a,void 0)},1340300:a=>{B.jb("Mul",a,void 0)},1340351:a=>{B.jb("Div",a,void 0)},1340402:a=>{B.jb("Pow",a,void 0)},1340453:a=>{B.jb("Equal",a,void 0)},1340506:a=>{B.jb("Greater",a,void 0)},1340561:a=>{B.jb("GreaterOrEqual",a,void 0)},1340623:a=>{B.jb("Less",a,void 0)},1340675:a=>{B.jb("LessOrEqual",a,void 0)},1340734:(a, +b,c,d,f)=>{B.jb("ReduceMean",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1340893:(a,b,c,d,f)=>{B.jb("ReduceMax",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1341051:(a,b,c,d,f)=>{B.jb("ReduceMin",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1341209:(a,b,c,d,f)=>{B.jb("ReduceProd",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})}, +1341368:(a,b,c,d,f)=>{B.jb("ReduceSum",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1341526:(a,b,c,d,f)=>{B.jb("ReduceL1",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1341683:(a,b,c,d,f)=>{B.jb("ReduceL2",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1341840:(a,b,c,d,f)=>{B.jb("ReduceLogSum",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>> +0)):[]})},1342001:(a,b,c,d,f)=>{B.jb("ReduceSumSquare",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1342165:(a,b,c,d,f)=>{B.jb("ReduceLogSumExp",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1342329:a=>{B.jb("Where",a,void 0)},1342382:(a,b,c)=>{B.jb("Transpose",a,{perm:b?Array.from(z().subarray(b>>>0,c>>>0)):[]})},1342490:(a,b,c,d)=>{B.jb("DepthToSpace",a,{blocksize:b,mode:K(c),format:d?"NHWC":"NCHW"})},1342623:(a, +b,c,d)=>{B.jb("DepthToSpace",a,{blocksize:b,mode:K(c),format:d?"NHWC":"NCHW"})},1342756:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t)=>{B.jb("ConvTranspose",a,{format:p?"NHWC":"NCHW",autoPad:b,dilations:[c],group:d,kernelShape:[f],pads:[g,k],strides:[l],wIsConst:()=>!!e()[n>>>0],outputPadding:r?Array.from(z().subarray(r>>>0,x>>>0)):[],outputShape:y?Array.from(z().subarray(y>>>0,h>>>0)):[],activation:K(t)})},1343157:(a,b,c,d,f,g,k,l,p,n,r,x,y,h)=>{B.jb("ConvTranspose",a,{format:l?"NHWC":"NCHW",autoPad:b,dilations:Array.from(z().subarray(c>>> +0,(c>>>0)+2>>>0)),group:d,kernelShape:Array.from(z().subarray(f>>>0,(f>>>0)+2>>>0)),pads:Array.from(z().subarray(g>>>0,(g>>>0)+4>>>0)),strides:Array.from(z().subarray(k>>>0,(k>>>0)+2>>>0)),wIsConst:()=>!!e()[p>>>0],outputPadding:n?Array.from(z().subarray(n>>>0,r>>>0)):[],outputShape:x?Array.from(z().subarray(x>>>0,y>>>0)):[],activation:K(h)})},1343722:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t)=>{B.jb("ConvTranspose",a,{format:p?"NHWC":"NCHW",autoPad:b,dilations:[c],group:d,kernelShape:[f],pads:[g,k],strides:[l], +wIsConst:()=>!!e()[n>>>0],outputPadding:r?Array.from(z().subarray(r>>>0,x>>>0)):[],outputShape:y?Array.from(z().subarray(y>>>0,h>>>0)):[],activation:K(t)})},1344123:(a,b,c,d,f,g,k,l,p,n,r,x,y,h)=>{B.jb("ConvTranspose",a,{format:l?"NHWC":"NCHW",autoPad:b,dilations:Array.from(z().subarray(c>>>0,(c>>>0)+2>>>0)),group:d,kernelShape:Array.from(z().subarray(f>>>0,(f>>>0)+2>>>0)),pads:Array.from(z().subarray(g>>>0,(g>>>0)+4>>>0)),strides:Array.from(z().subarray(k>>>0,(k>>>0)+2>>>0)),wIsConst:()=>!!e()[p>>> +0],outputPadding:n?Array.from(z().subarray(n>>>0,r>>>0)):[],outputShape:x?Array.from(z().subarray(x>>>0,y>>>0)):[],activation:K(h)})},1344688:(a,b)=>{B.jb("GlobalAveragePool",a,{format:b?"NHWC":"NCHW"})},1344779:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.jb("AveragePool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1345063:(a,b)=>{B.jb("GlobalAveragePool",a,{format:b?"NHWC":"NCHW"})},1345154:(a,b, +c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.jb("AveragePool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1345438:(a,b)=>{B.jb("GlobalMaxPool",a,{format:b?"NHWC":"NCHW"})},1345525:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.jb("MaxPool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1345805:(a,b)=>{B.jb("GlobalMaxPool", +a,{format:b?"NHWC":"NCHW"})},1345892:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.jb("MaxPool",a,{format:v?"NHWC":"NCHW",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:f,dilations:[g,k],kernel_shape:[l,p],pads:[n,r,x,y],strides:[h,t]})},1346172:(a,b,c,d,f)=>{B.jb("Gemm",a,{alpha:b,beta:c,transA:d,transB:f})},1346276:a=>{B.jb("MatMul",a,void 0)},1346330:(a,b,c,d)=>{B.jb("ArgMax",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})},1346438:(a,b,c,d)=>{B.jb("ArgMin",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})}, +1346546:(a,b)=>{B.jb("Softmax",a,{axis:b})},1346609:(a,b)=>{B.jb("Concat",a,{axis:b})},1346669:(a,b,c,d,f)=>{B.jb("Split",a,{axis:b,numOutputs:c,splitSizes:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1346809:a=>{B.jb("Expand",a,void 0)},1346863:(a,b)=>{B.jb("Gather",a,{axis:Number(b)})},1346934:(a,b)=>{B.jb("GatherElements",a,{axis:Number(b)})},1347013:(a,b,c,d,f,g,k,l,p,n,r)=>{B.jb("Resize",a,{antialias:b,axes:c?Array.from(z().subarray(c>>>0,d>>>0)):[],coordinateTransformMode:K(f),cubicCoeffA:g, +excludeOutside:k,extrapolationValue:l,keepAspectRatioPolicy:K(p),mode:K(n),nearestMode:K(r)})},1347359:(a,b,c,d,f,g,k)=>{B.jb("Slice",a,{starts:b?Array.from(z().subarray(b>>>0,c>>>0)):[],ends:d?Array.from(z().subarray(d>>>0,f>>>0)):[],axes:g?Array.from(z().subarray(g>>>0,k>>>0)):[]})},1347575:a=>{B.jb("Tile",a,void 0)},1347627:(a,b,c)=>{B.jb("InstanceNormalization",a,{epsilon:b,format:c?"NHWC":"NCHW"})},1347741:(a,b,c)=>{B.jb("InstanceNormalization",a,{epsilon:b,format:c?"NHWC":"NCHW"})},1347855:a=> +{B.jb("Range",a,void 0)},1347908:(a,b)=>{B.jb("Einsum",a,{equation:K(b)})},1347989:(a,b,c,d,f)=>{B.jb("Pad",a,{mode:b,value:c,pads:d?Array.from(z().subarray(d>>>0,f>>>0)):[]})},1348116:(a,b,c,d,f,g)=>{B.jb("BatchNormalization",a,{epsilon:b,momentum:c,spatial:!!f,trainingMode:!!d,format:g?"NHWC":"NCHW"})},1348285:(a,b,c,d,f,g)=>{B.jb("BatchNormalization",a,{epsilon:b,momentum:c,spatial:!!f,trainingMode:!!d,format:g?"NHWC":"NCHW"})},1348454:(a,b,c)=>{B.jb("CumSum",a,{exclusive:Number(b),reverse:Number(c)})}, +1348551:(a,b,c,d,f,g,k,l,p)=>{B.jb("Attention",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:f,doRotary:g,qkvHiddenSizes:k?Array.from(z().subarray(Number(l)>>>0,Number(l)+k>>>0)):[],pastPresentShareBuffer:!!p})},1348823:a=>{B.jb("BiasAdd",a,void 0)},1348878:a=>{B.jb("BiasSplitGelu",a,void 0)},1348939:a=>{B.jb("FastGelu",a,void 0)},1348995:(a,b,c,d,f,g,k,l,p,n,r,x,y,h,t,v)=>{B.jb("Conv",a,{format:x?"NHWC":"NCHW",auto_pad:b,dilations:c?Array.from(z().subarray(c>>>0,d>>>0)):[],group:f,kernel_shape:g? +Array.from(z().subarray(g>>>0,k>>>0)):[],pads:l?Array.from(z().subarray(l>>>0,p>>>0)):[],strides:n?Array.from(z().subarray(n>>>0,r>>>0)):[],w_is_const:()=>!!e()[y>>>0],activation:K(h),activation_params:t?Array.from(ja().subarray(t>>>0,v>>>0)):[]})},1349491:a=>{B.jb("Gelu",a,void 0)},1349543:(a,b,c,d)=>{B.jb("GroupQueryAttention",a,{numHeads:b,kvNumHeads:c,scale:d})},1349656:(a,b,c,d)=>{B.jb("LayerNormalization",a,{axis:b,epsilon:c,simplified:!!d})},1349767:(a,b,c,d)=>{B.jb("LayerNormalization",a, +{axis:b,epsilon:c,simplified:!!d})},1349878:(a,b,c,d,f,g)=>{B.jb("MatMulNBits",a,{k:b,n:c,accuracyLevel:d,bits:f,blockSize:g})},1350005:(a,b,c,d,f,g)=>{B.jb("MultiHeadAttention",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:f,doRotary:g})},1350164:(a,b)=>{B.jb("QuickGelu",a,{alpha:b})},1350228:(a,b,c,d,f)=>{B.jb("RotaryEmbedding",a,{interleaved:!!b,numHeads:c,rotaryEmbeddingDim:d,scale:f})},1350367:(a,b,c)=>{B.jb("SkipLayerNormalization",a,{epsilon:b,simplified:!!c})},1350469:a=>{B.Yb(a)}, +1350503:(a,b)=>B.$b(a,b,B.Eb.bc,B.Eb.errors),1350615:(a,b,c)=>{B.jb("SkipLayerNormalization",a,{epsilon:b,simplified:!!c})}};function lb(a,b,c){return Lc(async()=>{await B.Wb(a,b,c)})}function kb(){return"undefined"!==typeof wasmOffsetConverter}function Mc(a){this.name="ExitStatus";this.message=`Program terminated with exit(${a})`;this.status=a} +var Nc=a=>{a.terminate();a.onmessage=()=>{}},Qc=a=>{0==L.length&&(Oc(),Pc(L[0]));var b=L.pop();if(!b)return 6;M.push(b);N[a.Ab]=b;b.Ab=a.Ab;var c={cmd:"run",start_routine:a.cc,arg:a.Pb,pthread_ptr:a.Ab};D&&b.unref();b.postMessage(c,a.ic);return 0},O=0,P=(a,b,...c)=>{for(var d=2*c.length,f=Rc(),g=Sc(8*d),k=g>>>3,l=0;l>>0]=p)}a=Uc(a,0,d,g,b);Vc(f);return a}; +function Hc(a){if(E)return P(0,1,a);Ua=a;if(!(0{Ua=a;if(E)throw Wc(a),"unwind";Hc(a)},L=[],M=[],Xc=[],N={};function Yc(){for(var a=B.numThreads-1;a--;)Oc();Wa.unshift(()=>{Za++;Zc(()=>bb())})}var ad=a=>{var b=a.Ab;delete N[b];L.push(a);M.splice(M.indexOf(a),1);a.Ab=0;$c(b)};function Na(){Xc.forEach(a=>a())} +var Pc=a=>new Promise(b=>{a.onmessage=g=>{g=g.data;var k=g.cmd;if(g.targetThread&&g.targetThread!=Ia()){var l=N[g.targetThread];l?l.postMessage(g,g.transferList):H(`Internal error! Worker sent a message "${k}" to target pthread ${g.targetThread}, but that thread no longer exists!`)}else if("checkMailbox"===k)Ra();else if("spawnThread"===k)Qc(g);else if("cleanupThread"===k)ad(N[g.thread]);else if("killThread"===k)g=g.thread,k=N[g],delete N[g],Nc(k),$c(g),M.splice(M.indexOf(k),1),k.Ab=0;else if("cancelThread"=== +k)N[g.thread].postMessage({cmd:"cancel"});else if("loaded"===k)a.loaded=!0,D&&!a.Ab&&a.unref(),b(a);else if("alert"===k)alert(`Thread ${g.threadId}: ${g.text}`);else if("setimmediate"===g.target)a.postMessage(g);else if("callHandler"===k)B[g.handler](...g.args);else k&&H(`worker sent an unknown command ${k}`)};a.onerror=g=>{H(`${"worker sent an error!"} ${g.filename}:${g.lineno}: ${g.message}`);throw g;};D&&(a.on("message",g=>a.onmessage({data:g})),a.on("error",g=>a.onerror(g)));var c=[],d=["onExit"], +f;for(f of d)B.hasOwnProperty(f)&&c.push(f);a.postMessage({cmd:"load",handlers:c,wasmMemory:m,wasmModule:Ta})});function Zc(a){E?a():Promise.all(L.map(Pc)).then(a)}function Oc(){var a=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});L.push(a)}var bd=a=>{for(;0{var a=Ia(),b=A()[a+52>>>2>>>0];a=A()[a+56>>>2>>>0];cd(b,b-a);Vc(b)},Pa=(a,b)=>{O=0;a=dd(a,b);0>>=0;var d=new ed(a);b>>>=0;c>>>=0;A()[d.Ib+16>>>2>>>0]=0;A()[d.Ib+4>>>2>>>0]=b;A()[d.Ib+8>>>2>>>0]=c;fd=a;gd++;throw fd;}function hd(a,b,c,d){return E?P(2,1,a,b,c,d):nb(a,b,c,d)} +function nb(a,b,c,d){a>>>=0;b>>>=0;c>>>=0;d>>>=0;if("undefined"==typeof SharedArrayBuffer)return H("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var f=[];if(E&&0===f.length)return hd(a,b,c,d);a={cc:c,Ab:a,Pb:d,ic:f};return E?(a.Lb="spawnThread",postMessage(a,f),0):Qc(a)} +var jd="undefined"!=typeof TextDecoder?new TextDecoder("utf8"):void 0,kd=(a,b,c)=>{b>>>=0;var d=b+c;for(c=b;a[c]&&!(c>=d);)++c;if(16f?d+=String.fromCharCode(f):(f-=65536,d+=String.fromCharCode(55296|f>> +10,56320|f&1023))}}else d+=String.fromCharCode(f)}return d},K=(a,b)=>(a>>>=0)?kd(w(),a,b):"";function ob(a,b,c){return E?P(3,1,a,b,c):0}function pb(a,b){if(E)return P(4,1,a,b)} +var ld=a=>{for(var b=0,c=0;c=d?b++:2047>=d?b+=2:55296<=d&&57343>=d?(b+=4,++c):b+=3}return b},md=(a,b,c,d)=>{c>>>=0;if(!(0=k){var l=a.charCodeAt(++g);k=65536+((k&1023)<<10)|l&1023}if(127>=k){if(c>=d)break;b[c++>>>0]=k}else{if(2047>=k){if(c+1>=d)break;b[c++>>>0]=192|k>>6}else{if(65535>=k){if(c+2>=d)break;b[c++>>>0]=224|k>>12}else{if(c+3>=d)break;b[c++>>>0]=240|k>> +18;b[c++>>>0]=128|k>>12&63}b[c++>>>0]=128|k>>6&63}b[c++>>>0]=128|k&63}}b[c>>>0]=0;return c-f},nd=(a,b,c)=>md(a,w(),b,c);function qb(a,b){if(E)return P(5,1,a,b)}function rb(a,b,c){if(E)return P(6,1,a,b,c)}function sb(a,b,c){return E?P(7,1,a,b,c):0}function tb(a,b){if(E)return P(8,1,a,b)}function ub(a,b,c){if(E)return P(9,1,a,b,c)}function vb(a,b,c,d){if(E)return P(10,1,a,b,c,d)}function wb(a,b,c,d){if(E)return P(11,1,a,b,c,d)}function xb(a,b,c,d){if(E)return P(12,1,a,b,c,d)} +function yb(a){if(E)return P(13,1,a)}function zb(a,b){if(E)return P(14,1,a,b)}function Ab(a,b,c){if(E)return P(15,1,a,b,c)}var Bb=()=>{cb("")},od,Q=a=>{for(var b="";w()[a>>>0];)b+=od[w()[a++>>>0]];return b},pd={},qd={},rd={},R; +function sd(a,b,c={}){var d=b.name;if(!a)throw new R(`type "${d}" must have a positive integer typeid pointer`);if(qd.hasOwnProperty(a)){if(c.Rb)return;throw new R(`Cannot register type '${d}' twice`);}qd[a]=b;delete rd[a];pd.hasOwnProperty(a)&&(b=pd[a],delete pd[a],b.forEach(f=>f()))}function S(a,b,c={}){if(!("argPackAdvance"in b))throw new TypeError("registerType registeredInstance requires argPackAdvance");return sd(a,b,c)} +var td=(a,b,c)=>{switch(b){case 1:return c?d=>e()[d>>>0]:d=>w()[d>>>0];case 2:return c?d=>ca()[d>>>1>>>0]:d=>ea()[d>>>1>>>0];case 4:return c?d=>z()[d>>>2>>>0]:d=>A()[d>>>2>>>0];case 8:return c?d=>J[d>>>3]:d=>Va[d>>>3];default:throw new TypeError(`invalid integer width (${b}): ${a}`);}}; +function Cb(a,b,c){a>>>=0;c>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:d=>d,toWireType:function(d,f){if("bigint"!=typeof f&&"number"!=typeof f)throw null===f?f="null":(d=typeof f,f="object"===d||"array"===d||"function"===d?f.toString():""+f),new TypeError(`Cannot convert "${f}" to ${this.name}`);"number"==typeof f&&(f=BigInt(f));return f},argPackAdvance:T,readValueFromPointer:td(b,c,-1==b.indexOf("u")),Db:null})}var T=8; +function Db(a,b,c,d){a>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:function(f){return!!f},toWireType:function(f,g){return g?c:d},argPackAdvance:T,readValueFromPointer:function(f){return this.fromWireType(w()[f>>>0])},Db:null})}var ud=[],U=[];function Vb(a){a>>>=0;9{if(!a)throw new R("Cannot use deleted val. handle = "+a);return U[a]},W=a=>{switch(a){case void 0:return 2;case null:return 4;case !0:return 6;case !1:return 8;default:const b=ud.pop()||U.length;U[b]=a;U[b+1]=1;return b}};function vd(a){return this.fromWireType(A()[a>>>2>>>0])}var wd={name:"emscripten::val",fromWireType:a=>{var b=V(a);Vb(a);return b},toWireType:(a,b)=>W(b),argPackAdvance:T,readValueFromPointer:vd,Db:null};function Eb(a){return S(a>>>0,wd)} +var xd=(a,b)=>{switch(b){case 4:return function(c){return this.fromWireType(ja()[c>>>2>>>0])};case 8:return function(c){return this.fromWireType(la()[c>>>3>>>0])};default:throw new TypeError(`invalid float width (${b}): ${a}`);}};function Fb(a,b,c){a>>>=0;c>>>=0;b=Q(b>>>0);S(a,{name:b,fromWireType:d=>d,toWireType:(d,f)=>f,argPackAdvance:T,readValueFromPointer:xd(b,c),Db:null})} +function Gb(a,b,c,d,f){a>>>=0;c>>>=0;b=Q(b>>>0);-1===f&&(f=4294967295);f=l=>l;if(0===d){var g=32-8*c;f=l=>l<>>g}var k=b.includes("unsigned")?function(l,p){return p>>>0}:function(l,p){return p};S(a,{name:b,fromWireType:f,toWireType:k,argPackAdvance:T,readValueFromPointer:td(b,c,0!==d),Db:null})} +function Hb(a,b,c){function d(g){var k=A()[g>>>2>>>0];g=A()[g+4>>>2>>>0];return new f(e().buffer,g,k)}a>>>=0;var f=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][b];c=Q(c>>>0);S(a,{name:c,fromWireType:d,argPackAdvance:T,readValueFromPointer:d},{Rb:!0})} +function Ib(a,b){a>>>=0;b=Q(b>>>0);var c="std::string"===b;S(a,{name:b,fromWireType:function(d){var f=A()[d>>>2>>>0],g=d+4;if(c)for(var k=g,l=0;l<=f;++l){var p=g+l;if(l==f||0==w()[p>>>0]){k=K(k,p-k);if(void 0===n)var n=k;else n+=String.fromCharCode(0),n+=k;k=p+1}}else{n=Array(f);for(l=0;l>>0]);n=n.join("")}X(d);return n},toWireType:function(d,f){f instanceof ArrayBuffer&&(f=new Uint8Array(f));var g="string"==typeof f;if(!(g||f instanceof Uint8Array||f instanceof +Uint8ClampedArray||f instanceof Int8Array))throw new R("Cannot pass non-string to std::string");var k=c&&g?ld(f):f.length;var l=yd(4+k+1),p=l+4;A()[l>>>2>>>0]=k;if(c&&g)nd(f,p,k+1);else if(g)for(g=0;g>>0]=n}else for(g=0;g>>0]=f[g];null!==d&&d.push(X,l);return l},argPackAdvance:T,readValueFromPointer:vd,Db(d){X(d)}})} +var zd="undefined"!=typeof TextDecoder?new TextDecoder("utf-16le"):void 0,Ad=(a,b)=>{var c=a>>1;for(var d=c+b/2;!(c>=d)&&ea()[c>>>0];)++c;c<<=1;if(32=b/2);++d){var f=ca()[a+2*d>>>1>>>0];if(0==f)break;c+=String.fromCharCode(f)}return c},Bd=(a,b,c)=>{c??=2147483647;if(2>c)return 0;c-=2;var d=b;c=c<2*a.length?c/2:a.length;for(var f=0;f>>1>>>0]=g;b+=2}ca()[b>>>1>>>0]=0;return b-d},Cd=a=>2*a.length,Dd=(a,b)=> +{for(var c=0,d="";!(c>=b/4);){var f=z()[a+4*c>>>2>>>0];if(0==f)break;++c;65536<=f?(f-=65536,d+=String.fromCharCode(55296|f>>10,56320|f&1023)):d+=String.fromCharCode(f)}return d},Ed=(a,b,c)=>{b>>>=0;c??=2147483647;if(4>c)return 0;var d=b;c=d+c-4;for(var f=0;f=g){var k=a.charCodeAt(++f);g=65536+((g&1023)<<10)|k&1023}z()[b>>>2>>>0]=g;b+=4;if(b+4>c)break}z()[b>>>2>>>0]=0;return b-d},Fd=a=>{for(var b=0,c=0;c=d&&++c;b+=4}return b}; -function Ib(a,b,c){a>>>=0;b>>>=0;c>>>=0;c=Q(c);if(2===b){var d=yd;var f=zd;var g=Ad;var k=l=>ea()[l>>>1>>>0]}else 4===b&&(d=Bd,f=Cd,g=Dd,k=l=>A()[l>>>2>>>0]);S(a,{name:c,fromWireType:l=>{for(var p=A()[l>>>2>>>0],n,r=l+4,x=0;x<=p;++x){var y=l+4+x*b;if(x==p||0==k(y))r=d(r,y-r),void 0===n?n=r:(n+=String.fromCharCode(0),n+=r),r=y+b}X(l);return n},toWireType:(l,p)=>{if("string"!=typeof p)throw new R(`Cannot pass non-string to C++ string type ${c}`);var n=g(p),r=wd(4+n+b);A()[r>>>2>>>0]=n/b;f(p,r+4,n+b); -null!==l&&l.push(X,r);return r},argPackAdvance:T,readValueFromPointer:td,Ab(l){X(l)}})}function Jb(a,b){a>>>=0;b=Q(b>>>0);S(a,{Pb:!0,name:b,argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var Kb=()=>1;function Lb(a){Ka(a>>>0,!C,1,!qa,131072,!1);Na()}var Ed=a=>{if(!I)try{if(a(),!(0>>=0;"function"===typeof Atomics.fc&&(Atomics.fc(z(),a>>>2,a).value.then(Ra),a+=128,Atomics.store(z(),a>>>2,1))}var Ra=()=>{var a=Ia();a&&(La(a),Ed(Fd))};function Mb(a,b){a>>>=0;a==b>>>0?setTimeout(Ra):E?postMessage({targetThread:a,cmd:"checkMailbox"}):(a=N[a])&&a.postMessage({cmd:"checkMailbox"})}var Gd=[];function Nb(a,b,c,d,f){b>>>=0;d/=2;Gd.length=d;c=f>>>0>>>3;for(f=0;f>>0];return(b?Hc[b]:Hd[a])(...Gd)} -function Ob(a){a>>>=0;E?postMessage({cmd:"cleanupThread",thread:a}):Zc(N[a])}function Pb(a){D&&N[a>>>0].ref()}var Jd=(a,b)=>{var c=od[a];if(void 0===c)throw a=Id(a),c=Q(a),X(a),new R(`${b} has unknown type ${c}`);return c},Kd=(a,b,c)=>{var d=[];a=a.toWireType(d,c);d.length&&(A()[b>>>2>>>0]=W(d));return a};function Qb(a,b,c){b>>>=0;c>>>=0;a=V(a>>>0);b=Jd(b,"emval::as");return Kd(b,c,a)}var Ld=a=>{try{a()}catch(b){cb(b)}}; -function Md(){var a=Y,b={};for(let [c,d]of Object.entries(a))b[c]="function"==typeof d?(...f)=>{Nd.push(c);try{return d(...f)}finally{I||(Nd.pop(),F&&1===Z&&0===Nd.length&&(Z=0,O+=1,Ld(Od),"undefined"!=typeof Fibers&&Fibers.lc()))}}:d;return b}var Z=0,F=null,Pd=0,Nd=[],Qd={},Rd={},Sd=0,Td=null,Ud=[];function sa(){return new Promise((a,b)=>{Td={resolve:a,reject:b}})} -function Vd(){var a=wd(65548),b=a+12;A()[a>>>2>>>0]=b;A()[a+4>>>2>>>0]=b+65536;b=Nd[0];var c=Qd[b];void 0===c&&(c=Sd++,Qd[b]=c,Rd[c]=b);b=c;z()[a+8>>>2>>>0]=b;return a}function Wd(){var a=z()[F+8>>>2>>>0];a=Y[Rd[a]];--O;return a()} -function Xd(a){if(!I){if(0===Z){var b=!1,c=!1;a((d=0)=>{if(!I&&(Pd=d,b=!0,c)){Z=2;Ld(()=>Yd(F));"undefined"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.resume();d=!1;try{var f=Wd()}catch(l){f=l,d=!0}var g=!1;if(!F){var k=Td;k&&(Td=null,(d?k.reject:k.resolve)(f),g=!0)}if(d&&!g)throw f;}});c=!0;b||(Z=1,F=Vd(),"undefined"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.pause(),Ld(()=>Zd(F)))}else 2===Z?(Z=0,Ld($d),X(F),F=null,Ud.forEach(Ed)):cb(`invalid state: ${Z}`);return Pd}} -function Ic(a){return Xd(b=>{a().then(b)})}function Rb(a){a>>>=0;return Ic(()=>{a=V(a);return a.then(W)})}var ae=[];function Sb(a,b,c,d){c>>>=0;d>>>=0;a=ae[a>>>0];b=V(b>>>0);return a(null,b,c,d)}var be={},ce=a=>{var b=be[a];return void 0===b?Q(a):b};function Tb(a,b,c,d,f){c>>>=0;d>>>=0;f>>>=0;a=ae[a>>>0];b=V(b>>>0);c=ce(c);return a(b,b[c],d,f)}var de=()=>"object"==typeof globalThis?globalThis:Function("return this")();function Vb(a){a>>>=0;if(0===a)return W(de());a=ce(a);return W(de()[a])} -var ee=a=>{var b=ae.length;ae.push(a);return b},fe=(a,b)=>{for(var c=Array(a),d=0;d>>2>>>0],"parameter "+d);return c},ge=(a,b)=>Object.defineProperty(b,"name",{value:a});function he(a){var b=Function;if(!(b instanceof Function))throw new TypeError(`new_ called with constructor type ${typeof b} which is not a function`);var c=ge(b.name||"unknownFunctionName",function(){});c.prototype=b.prototype;c=new c;a=b.apply(c,a);return a instanceof Object?a:c} -function Wb(a,b,c){b=fe(a,b>>>0);var d=b.shift();a--;var f="return function (obj, func, destructorsRef, args) {\n",g=0,k=[];0===c&&k.push("obj");for(var l=["retType"],p=[d],n=0;nr.name).join(", ")}) => ${d.name}>`;return ee(ge(c,a))}function Xb(a){a=ce(a>>>0);return W(B[a])}function Yb(a,b){b>>>=0;a=V(a>>>0);b=V(b);return W(a[b])}function Zb(a){a>>>=0;9>>0);for(var b=Array(a.length),c=0;c>>0))}function cc(){return W({})} -function dc(a){a>>>=0;for(var b=V(a);b.length;){var c=b.pop();b.pop()(c)}Ub(a)}function ec(a,b,c){b>>>=0;c>>>=0;a=V(a>>>0);b=V(b);c=V(c);a[b]=c}function fc(a,b){b>>>=0;a=Jd(a>>>0,"_emval_take_value");a=a.readValueFromPointer(b);return W(a)} -function gc(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);z()[b>>>2>>>0]=a.getUTCSeconds();z()[b+4>>>2>>>0]=a.getUTCMinutes();z()[b+8>>>2>>>0]=a.getUTCHours();z()[b+12>>>2>>>0]=a.getUTCDate();z()[b+16>>>2>>>0]=a.getUTCMonth();z()[b+20>>>2>>>0]=a.getUTCFullYear()-1900;z()[b+24>>>2>>>0]=a.getUTCDay();a=(a.getTime()-Date.UTC(a.getUTCFullYear(),0,1,0,0,0,0))/864E5|0;z()[b+28>>>2>>>0]=a} -var ie=a=>0===a%4&&(0!==a%100||0===a%400),je=[0,31,60,91,121,152,182,213,244,274,305,335],ke=[0,31,59,90,120,151,181,212,243,273,304,334]; -function hc(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);z()[b>>>2>>>0]=a.getSeconds();z()[b+4>>>2>>>0]=a.getMinutes();z()[b+8>>>2>>>0]=a.getHours();z()[b+12>>>2>>>0]=a.getDate();z()[b+16>>>2>>>0]=a.getMonth();z()[b+20>>>2>>>0]=a.getFullYear()-1900;z()[b+24>>>2>>>0]=a.getDay();var c=(ie(a.getFullYear())?je:ke)[a.getMonth()]+a.getDate()-1|0;z()[b+28>>>2>>>0]=c;z()[b+36>>>2>>>0]=-(60*a.getTimezoneOffset());c=(new Date(a.getFullYear(),6,1)).getTimezoneOffset(); +function Jb(a,b,c){a>>>=0;b>>>=0;c>>>=0;c=Q(c);if(2===b){var d=Ad;var f=Bd;var g=Cd;var k=l=>ea()[l>>>1>>>0]}else 4===b&&(d=Dd,f=Ed,g=Fd,k=l=>A()[l>>>2>>>0]);S(a,{name:c,fromWireType:l=>{for(var p=A()[l>>>2>>>0],n,r=l+4,x=0;x<=p;++x){var y=l+4+x*b;if(x==p||0==k(y))r=d(r,y-r),void 0===n?n=r:(n+=String.fromCharCode(0),n+=r),r=y+b}X(l);return n},toWireType:(l,p)=>{if("string"!=typeof p)throw new R(`Cannot pass non-string to C++ string type ${c}`);var n=g(p),r=yd(4+n+b);A()[r>>>2>>>0]=n/b;f(p,r+4,n+b); +null!==l&&l.push(X,r);return r},argPackAdvance:T,readValueFromPointer:vd,Db(l){X(l)}})}function Kb(a,b){a>>>=0;b=Q(b>>>0);S(a,{Sb:!0,name:b,argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var Lb=()=>1;function Mb(a){Ka(a>>>0,!C,1,!qa,131072,!1);Na()}var Gd=a=>{if(!I)try{if(a(),!(0>>=0;"function"===typeof Atomics.jc&&(Atomics.jc(z(),a>>>2,a).value.then(Ra),a+=128,Atomics.store(z(),a>>>2,1))}var Ra=()=>{var a=Ia();a&&(La(a),Gd(Hd))};function Nb(a,b){a>>>=0;a==b>>>0?setTimeout(Ra):E?postMessage({targetThread:a,cmd:"checkMailbox"}):(a=N[a])&&a.postMessage({cmd:"checkMailbox"})}var Id=[];function Ob(a,b,c,d,f){b>>>=0;d/=2;Id.length=d;c=f>>>0>>>3;for(f=0;f>>0];return(b?Kc[b]:Jd[a])(...Id)} +function Pb(a){a>>>=0;E?postMessage({cmd:"cleanupThread",thread:a}):ad(N[a])}function Qb(a){D&&N[a>>>0].ref()}var Ld=(a,b)=>{var c=qd[a];if(void 0===c)throw a=Kd(a),c=Q(a),X(a),new R(`${b} has unknown type ${c}`);return c},Md=(a,b,c)=>{var d=[];a=a.toWireType(d,c);d.length&&(A()[b>>>2>>>0]=W(d));return a};function Rb(a,b,c){b>>>=0;c>>>=0;a=V(a>>>0);b=Ld(b,"emval::as");return Md(b,c,a)}var Nd=a=>{try{a()}catch(b){cb(b)}}; +function Od(){var a=Y,b={};for(let [c,d]of Object.entries(a))b[c]="function"==typeof d?(...f)=>{Pd.push(c);try{return d(...f)}finally{I||(Pd.pop(),F&&1===Z&&0===Pd.length&&(Z=0,O+=1,Nd(Qd),"undefined"!=typeof Fibers&&Fibers.oc()))}}:d;return b}var Z=0,F=null,Rd=0,Pd=[],Sd={},Td={},Ud=0,Vd=null,Wd=[];function sa(){return new Promise((a,b)=>{Vd={resolve:a,reject:b}})} +function Xd(){var a=yd(65548),b=a+12;A()[a>>>2>>>0]=b;A()[a+4>>>2>>>0]=b+65536;b=Pd[0];var c=Sd[b];void 0===c&&(c=Ud++,Sd[b]=c,Td[c]=b);b=c;z()[a+8>>>2>>>0]=b;return a}function Yd(){var a=z()[F+8>>>2>>>0];a=Y[Td[a]];--O;return a()} +function Zd(a){if(!I){if(0===Z){var b=!1,c=!1;a((d=0)=>{if(!I&&(Rd=d,b=!0,c)){Z=2;Nd(()=>$d(F));"undefined"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.resume();d=!1;try{var f=Yd()}catch(l){f=l,d=!0}var g=!1;if(!F){var k=Vd;k&&(Vd=null,(d?k.reject:k.resolve)(f),g=!0)}if(d&&!g)throw f;}});c=!0;b||(Z=1,F=Xd(),"undefined"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.pause(),Nd(()=>ae(F)))}else 2===Z?(Z=0,Nd(be),X(F),F=null,Wd.forEach(Gd)):cb(`invalid state: ${Z}`);return Rd}} +function Lc(a){return Zd(b=>{a().then(b)})}function Sb(a){a>>>=0;return Lc(()=>{a=V(a);return a.then(W)})}var ce=[];function Tb(a,b,c,d){c>>>=0;d>>>=0;a=ce[a>>>0];b=V(b>>>0);return a(null,b,c,d)}var de={},ee=a=>{var b=de[a];return void 0===b?Q(a):b};function Ub(a,b,c,d,f){c>>>=0;d>>>=0;f>>>=0;a=ce[a>>>0];b=V(b>>>0);c=ee(c);return a(b,b[c],d,f)}var fe=()=>"object"==typeof globalThis?globalThis:Function("return this")();function Wb(a){a>>>=0;if(0===a)return W(fe());a=ee(a);return W(fe()[a])} +var ge=a=>{var b=ce.length;ce.push(a);return b},he=(a,b)=>{for(var c=Array(a),d=0;d>>2>>>0],"parameter "+d);return c},ie=(a,b)=>Object.defineProperty(b,"name",{value:a});function je(a){var b=Function;if(!(b instanceof Function))throw new TypeError(`new_ called with constructor type ${typeof b} which is not a function`);var c=ie(b.name||"unknownFunctionName",function(){});c.prototype=b.prototype;c=new c;a=b.apply(c,a);return a instanceof Object?a:c} +function Xb(a,b,c){b=he(a,b>>>0);var d=b.shift();a--;var f="return function (obj, func, destructorsRef, args) {\n",g=0,k=[];0===c&&k.push("obj");for(var l=["retType"],p=[d],n=0;nr.name).join(", ")}) => ${d.name}>`;return ge(ie(c,a))}function Yb(a){a=ee(a>>>0);return W(B[a])}function Zb(a,b){b>>>=0;a=V(a>>>0);b=V(b);return W(a[b])}function $b(a){a>>>=0;9>>0);for(var b=Array(a.length),c=0;c>>0))}function dc(){return W({})} +function ec(a){a>>>=0;for(var b=V(a);b.length;){var c=b.pop();b.pop()(c)}Vb(a)}function fc(a,b,c){b>>>=0;c>>>=0;a=V(a>>>0);b=V(b);c=V(c);a[b]=c}function gc(a,b){b>>>=0;a=Ld(a>>>0,"_emval_take_value");a=a.readValueFromPointer(b);return W(a)} +function hc(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);z()[b>>>2>>>0]=a.getUTCSeconds();z()[b+4>>>2>>>0]=a.getUTCMinutes();z()[b+8>>>2>>>0]=a.getUTCHours();z()[b+12>>>2>>>0]=a.getUTCDate();z()[b+16>>>2>>>0]=a.getUTCMonth();z()[b+20>>>2>>>0]=a.getUTCFullYear()-1900;z()[b+24>>>2>>>0]=a.getUTCDay();a=(a.getTime()-Date.UTC(a.getUTCFullYear(),0,1,0,0,0,0))/864E5|0;z()[b+28>>>2>>>0]=a} +var ke=a=>0===a%4&&(0!==a%100||0===a%400),le=[0,31,60,91,121,152,182,213,244,274,305,335],me=[0,31,59,90,120,151,181,212,243,273,304,334]; +function ic(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);z()[b>>>2>>>0]=a.getSeconds();z()[b+4>>>2>>>0]=a.getMinutes();z()[b+8>>>2>>>0]=a.getHours();z()[b+12>>>2>>>0]=a.getDate();z()[b+16>>>2>>>0]=a.getMonth();z()[b+20>>>2>>>0]=a.getFullYear()-1900;z()[b+24>>>2>>>0]=a.getDay();var c=(ke(a.getFullYear())?le:me)[a.getMonth()]+a.getDate()-1|0;z()[b+28>>>2>>>0]=c;z()[b+36>>>2>>>0]=-(60*a.getTimezoneOffset());c=(new Date(a.getFullYear(),6,1)).getTimezoneOffset(); var d=(new Date(a.getFullYear(),0,1)).getTimezoneOffset();a=(c!=d&&a.getTimezoneOffset()==Math.min(d,c))|0;z()[b+32>>>2>>>0]=a} -function ic(a){a>>>=0;var b=new Date(z()[a+20>>>2>>>0]+1900,z()[a+16>>>2>>>0],z()[a+12>>>2>>>0],z()[a+8>>>2>>>0],z()[a+4>>>2>>>0],z()[a>>>2>>>0],0),c=z()[a+32>>>2>>>0],d=b.getTimezoneOffset(),f=(new Date(b.getFullYear(),6,1)).getTimezoneOffset(),g=(new Date(b.getFullYear(),0,1)).getTimezoneOffset(),k=Math.min(g,f);0>c?z()[a+32>>>2>>>0]=Number(f!=g&&k==d):0>>2>>>0]=b.getDay();c=(ie(b.getFullYear())?je:ke)[b.getMonth()]+ -b.getDate()-1|0;z()[a+28>>>2>>>0]=c;z()[a>>>2>>>0]=b.getSeconds();z()[a+4>>>2>>>0]=b.getMinutes();z()[a+8>>>2>>>0]=b.getHours();z()[a+12>>>2>>>0]=b.getDate();z()[a+16>>>2>>>0]=b.getMonth();z()[a+20>>>2>>>0]=b.getYear();a=b.getTime();return BigInt(isNaN(a)?-1:a/1E3)}function jc(a,b,c,d,f,g,k){return E?P(16,1,a,b,c,d,f,g,k):-52}function kc(a,b,c,d,f,g){if(E)return P(17,1,a,b,c,d,f,g)} -function lc(a,b,c,d){a>>>=0;b>>>=0;c>>>=0;d>>>=0;var f=(new Date).getFullYear(),g=new Date(f,0,1),k=new Date(f,6,1);f=g.getTimezoneOffset();var l=k.getTimezoneOffset(),p=Math.max(f,l);A()[a>>>2>>>0]=60*p;z()[b>>>2>>>0]=Number(f!=l);a=n=>n.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1];g=a(g);k=a(k);l{le.length=0;for(var c;c=w()[a++>>>0];){var d=105!=c;d&=112!=c;b+=d&&b%8?4:0;le.push(112==c?A()[b>>>2>>>0]:106==c?J[b>>>3]:105==c?z()[b>>>2>>>0]:la()[b>>>3>>>0]);b+=d?8:4}return le};function mc(a,b,c){a>>>=0;b=me(b>>>0,c>>>0);return Hc[a](...b)}function nc(a,b,c){a>>>=0;b=me(b>>>0,c>>>0);return Hc[a](...b)}var oc=()=>{},pc=()=>Date.now();function qc(a,b){return H(K(a>>>0,b>>>0))}var rc=()=>{O+=1;throw"unwind";};function sc(){return 4294901760}var tc; -tc=()=>performance.timeOrigin+performance.now();var uc=()=>D?require("os").cpus().length:navigator.hardwareConcurrency;function vc(a){a>>>=0;var b=w().length;if(a<=b||4294901760=c;c*=2){var d=b*(1+.2/c);d=Math.min(d,a+100663296);var f=Math;d=Math.max(a,d);a:{f=(f.min.call(f,4294901760,d+(65536-d%65536)%65536)-m.buffer.byteLength+65535)/65536;try{m.grow(f);u();var g=1;break a}catch(k){}g=void 0}if(g)return!0}return!1} -var ne=()=>{cb("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER");return 0},oe={},pe=a=>{a.forEach(b=>{var c=ne();c&&(oe[c]=b)})};function wc(){var a=Error().stack.toString().split("\n");"Error"==a[0]&&a.shift();pe(a);oe.Lb=ne();oe.Yb=a;return oe.Lb} -function xc(a,b,c){a>>>=0;b>>>=0;if(oe.Lb==a)var d=oe.Yb;else d=Error().stack.toString().split("\n"),"Error"==d[0]&&d.shift(),pe(d);for(var f=3;d[f]&&ne()!=a;)++f;for(a=0;a>>2>>>0]=ne();return a} -var qe={},se=()=>{if(!re){var a={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:("object"==typeof navigator&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:va||"./this.program"},b;for(b in qe)void 0===qe[b]?delete a[b]:a[b]=qe[b];var c=[];for(b in a)c.push(`${b}=${a[b]}`);re=c}return re},re; -function yc(a,b){if(E)return P(18,1,a,b);a>>>=0;b>>>=0;var c=0;se().forEach((d,f)=>{var g=b+c;f=A()[a+4*f>>>2>>>0]=g;for(g=0;g>>0]=d.charCodeAt(g);e()[f>>>0]=0;c+=d.length+1});return 0}function zc(a,b){if(E)return P(19,1,a,b);a>>>=0;b>>>=0;var c=se();A()[a>>>2>>>0]=c.length;var d=0;c.forEach(f=>d+=f.length+1);A()[b>>>2>>>0]=d;return 0}function Bc(a){return E?P(20,1,a):52}function Cc(a,b,c,d){return E?P(21,1,a,b,c,d):52}function Dc(a,b,c,d){return E?P(22,1,a,b,c,d):70} -var te=[null,[],[]];function Ec(a,b,c,d){if(E)return P(23,1,a,b,c,d);b>>>=0;c>>>=0;d>>>=0;for(var f=0,g=0;g>>2>>>0],l=A()[b+4>>>2>>>0];b+=8;for(var p=0;p>>0],r=te[a];0===n||10===n?((1===a?Ea:H)(hd(r,0)),r.length=0):r.push(n)}f+=l}A()[d>>>2>>>0]=f;return 0}var ue=[31,29,31,30,31,30,31,31,30,31,30,31],ve=[31,28,31,30,31,30,31,31,30,31,30,31];function we(a){var b=Array(jd(a)+1);kd(a,b,0,b.length);return b}var xe=(a,b)=>{e().set(a,b>>>0)}; -function Fc(a,b,c,d){function f(h,t,v){for(h="number"==typeof h?h.toString():h||"";h.lengthTc?-1:0aa-h.getDate())t-=aa-h.getDate()+1,h.setDate(1),11>v?h.setMonth(v+1):(h.setMonth(0),h.setFullYear(h.getFullYear()+1));else{h.setDate(h.getDate()+t);break}}v=new Date(h.getFullYear()+1,0,4);t=l(new Date(h.getFullYear(), -0,4));v=l(v);return 0>=k(t,h)?0>=k(v,h)?h.getFullYear()+1:h.getFullYear():h.getFullYear()-1}a>>>=0;b>>>=0;c>>>=0;d>>>=0;var n=A()[d+40>>>2>>>0];d={cc:z()[d>>>2>>>0],bc:z()[d+4>>>2>>>0],Db:z()[d+8>>>2>>>0],Hb:z()[d+12>>>2>>>0],Eb:z()[d+16>>>2>>>0],zb:z()[d+20>>>2>>>0],rb:z()[d+24>>>2>>>0],yb:z()[d+28>>>2>>>0],kc:z()[d+32>>>2>>>0],ac:z()[d+36>>>2>>>0],dc:n?K(n):""};c=K(c);n={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y", -"%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"};for(var r in n)c=c.replace(new RegExp(r,"g"),n[r]);var x="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),y="January February March April May June July August September October November December".split(" ");n={"%a":h=>x[h.rb].substring(0,3),"%A":h=> -x[h.rb],"%b":h=>y[h.Eb].substring(0,3),"%B":h=>y[h.Eb],"%C":h=>g((h.zb+1900)/100|0,2),"%d":h=>g(h.Hb,2),"%e":h=>f(h.Hb,2," "),"%g":h=>p(h).toString().substring(2),"%G":p,"%H":h=>g(h.Db,2),"%I":h=>{h=h.Db;0==h?h=12:12{for(var t=0,v=0;v<=h.Eb-1;t+=(ie(h.zb+1900)?ue:ve)[v++]);return g(h.Hb+t,3)},"%m":h=>g(h.Eb+1,2),"%M":h=>g(h.bc,2),"%n":()=>"\n","%p":h=>0<=h.Db&&12>h.Db?"AM":"PM","%S":h=>g(h.cc,2),"%t":()=>"\t","%u":h=>h.rb||7,"%U":h=>g(Math.floor((h.yb+7-h.rb)/7), -2),"%V":h=>{var t=Math.floor((h.yb+7-(h.rb+6)%7)/7);2>=(h.rb+371-h.yb-2)%7&&t++;if(t)53==t&&(v=(h.rb+371-h.yb)%7,4==v||3==v&&ie(h.zb)||(t=1));else{t=52;var v=(h.rb+7-h.yb-1)%7;(4==v||5==v&&ie(h.zb%400-1))&&t++}return g(t,2)},"%w":h=>h.rb,"%W":h=>g(Math.floor((h.yb+7-(h.rb+6)%7)/7),2),"%y":h=>(h.zb+1900).toString().substring(2),"%Y":h=>h.zb+1900,"%z":h=>{h=h.ac;var t=0<=h;h=Math.abs(h)/60;return(t?"+":"-")+String("0000"+(h/60*100+h%60)).slice(-4)},"%Z":h=>h.dc,"%%":()=>"%"};c=c.replace(/%%/g,"\x00\x00"); -for(r in n)c.includes(r)&&(c=c.replace(new RegExp(r,"g"),n[r](d)));c=c.replace(/\0\0/g,"%");r=we(c);if(r.length>b)return 0;xe(r,a);return r.length-1}function Gc(a,b,c,d){return Fc(a>>>0,b>>>0,c>>>0,d>>>0)}E||Wc();for(var ye=Array(256),ze=0;256>ze;++ze)ye[ze]=String.fromCharCode(ze);md=ye;R=B.BindingError=class extends Error{constructor(a){super(a);this.name="BindingError"}};B.InternalError=class extends Error{constructor(a){super(a);this.name="InternalError"}};U.push(0,1,void 0,1,null,1,!0,1,!1,1); -B.count_emval_handles=()=>U.length/2-5-sd.length; -var Hd=[Sc,Uc,fd,nb,ob,pb,qb,rb,sb,tb,ub,vb,wb,xb,yb,zb,jc,kc,yc,zc,Bc,Cc,Dc,Ec],jb,Y=function(){function a(c,d){Y=c.exports;Y=Md();Y=Ae();Vc.push(Y.fb);Xa.unshift(Y.ya);Ta=d;bb();return Y}var b=Ja();Za++;if(B.instantiateWasm)try{return B.instantiateWasm(b,a)}catch(c){H(`Module.instantiateWasm callback failed with error: ${c}`),oa(c)}eb||=B.locateFile?db("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":B.locateFile?B.locateFile("ort-wasm-simd-threaded.jsep.wasm",G):G+"ort-wasm-simd-threaded.jsep.wasm": -(new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url)).href;ib(b,function(c){a(c.instance,c.module)}).catch(oa);return{}}(),Id=a=>(Id=Y.za)(a),Oa=()=>(Oa=Y.Aa)();B._OrtInit=(a,b)=>(B._OrtInit=Y.Ba)(a,b);B._OrtGetLastError=(a,b)=>(B._OrtGetLastError=Y.Ca)(a,b);B._OrtCreateSessionOptions=(a,b,c,d,f,g,k,l,p,n)=>(B._OrtCreateSessionOptions=Y.Da)(a,b,c,d,f,g,k,l,p,n);B._OrtAppendExecutionProvider=(a,b)=>(B._OrtAppendExecutionProvider=Y.Ea)(a,b); -B._OrtAddFreeDimensionOverride=(a,b,c)=>(B._OrtAddFreeDimensionOverride=Y.Fa)(a,b,c);B._OrtAddSessionConfigEntry=(a,b,c)=>(B._OrtAddSessionConfigEntry=Y.Ga)(a,b,c);B._OrtReleaseSessionOptions=a=>(B._OrtReleaseSessionOptions=Y.Ha)(a);B._OrtCreateSession=(a,b,c)=>(B._OrtCreateSession=Y.Ia)(a,b,c);B._OrtReleaseSession=a=>(B._OrtReleaseSession=Y.Ja)(a);B._OrtGetInputOutputCount=(a,b,c)=>(B._OrtGetInputOutputCount=Y.Ka)(a,b,c);B._OrtGetInputName=(a,b)=>(B._OrtGetInputName=Y.La)(a,b); -B._OrtGetOutputName=(a,b)=>(B._OrtGetOutputName=Y.Ma)(a,b);B._OrtFree=a=>(B._OrtFree=Y.Na)(a);B._OrtCreateTensor=(a,b,c,d,f,g)=>(B._OrtCreateTensor=Y.Oa)(a,b,c,d,f,g);B._OrtGetTensorData=(a,b,c,d,f)=>(B._OrtGetTensorData=Y.Pa)(a,b,c,d,f);B._OrtReleaseTensor=a=>(B._OrtReleaseTensor=Y.Qa)(a);B._OrtCreateRunOptions=(a,b,c,d)=>(B._OrtCreateRunOptions=Y.Ra)(a,b,c,d);B._OrtAddRunConfigEntry=(a,b,c)=>(B._OrtAddRunConfigEntry=Y.Sa)(a,b,c);B._OrtReleaseRunOptions=a=>(B._OrtReleaseRunOptions=Y.Ta)(a); -B._OrtCreateBinding=a=>(B._OrtCreateBinding=Y.Ua)(a);B._OrtBindInput=(a,b,c)=>(B._OrtBindInput=Y.Va)(a,b,c);B._OrtBindOutput=(a,b,c,d)=>(B._OrtBindOutput=Y.Wa)(a,b,c,d);B._OrtClearBoundOutputs=a=>(B._OrtClearBoundOutputs=Y.Xa)(a);B._OrtReleaseBinding=a=>(B._OrtReleaseBinding=Y.Ya)(a);B._OrtRunWithBinding=(a,b,c,d,f)=>(B._OrtRunWithBinding=Y.Za)(a,b,c,d,f);B._OrtRun=(a,b,c,d,f,g,k,l)=>(B._OrtRun=Y._a)(a,b,c,d,f,g,k,l);B._OrtEndProfiling=a=>(B._OrtEndProfiling=Y.$a)(a); -B._JsepOutput=(a,b,c)=>(B._JsepOutput=Y.ab)(a,b,c);B._JsepGetNodeName=a=>(B._JsepGetNodeName=Y.bb)(a); -var Ia=()=>(Ia=Y.cb)(),wd=B._malloc=a=>(wd=B._malloc=Y.db)(a),X=B._free=a=>(X=B._free=Y.eb)(a),Ka=(a,b,c,d,f,g)=>(Ka=Y.hb)(a,b,c,d,f,g),Sa=()=>(Sa=Y.ib)(),Qc=(a,b,c,d,f)=>(Qc=Y.jb)(a,b,c,d,f),Yc=a=>(Yc=Y.kb)(a),Qa=a=>(Qa=Y.lb)(a),Fd=()=>(Fd=Y.mb)(),ad=(a,b)=>(ad=Y.nb)(a,b),Rc=a=>(Rc=Y.ob)(a),Pc=a=>(Pc=Y.pb)(a),Oc=()=>(Oc=Y.qb)(),bd=B.dynCall_ii=(a,b)=>(bd=B.dynCall_ii=Y.sb)(a,b),Zd=a=>(Zd=Y.tb)(a),Od=()=>(Od=Y.ub)(),Yd=a=>(Yd=Y.vb)(a),$d=()=>($d=Y.wb)();B.___start_em_js=1349341;B.___stop_em_js=1349502; -function Ae(){var a=Y;a=Object.assign({},a);var b=d=>f=>d(f)>>>0,c=d=>()=>d()>>>0;a.za=b(a.za);a.cb=c(a.cb);a.db=b(a.db);a.emscripten_main_runtime_thread_id=c(a.emscripten_main_runtime_thread_id);a.pb=b(a.pb);a.qb=c(a.qb);return a}B.stackSave=()=>Oc();B.stackRestore=a=>Rc(a);B.stackAlloc=a=>Pc(a);B.UTF8ToString=K;B.stringToUTF8=ld;B.lengthBytesUTF8=jd;var Be;ab=function Ce(){Be||De();Be||(ab=Ce)}; -function De(){if(!(0>>=0;var b=new Date(z()[a+20>>>2>>>0]+1900,z()[a+16>>>2>>>0],z()[a+12>>>2>>>0],z()[a+8>>>2>>>0],z()[a+4>>>2>>>0],z()[a>>>2>>>0],0),c=z()[a+32>>>2>>>0],d=b.getTimezoneOffset(),f=(new Date(b.getFullYear(),6,1)).getTimezoneOffset(),g=(new Date(b.getFullYear(),0,1)).getTimezoneOffset(),k=Math.min(g,f);0>c?z()[a+32>>>2>>>0]=Number(f!=g&&k==d):0>>2>>>0]=b.getDay();c=(ke(b.getFullYear())?le:me)[b.getMonth()]+ +b.getDate()-1|0;z()[a+28>>>2>>>0]=c;z()[a>>>2>>>0]=b.getSeconds();z()[a+4>>>2>>>0]=b.getMinutes();z()[a+8>>>2>>>0]=b.getHours();z()[a+12>>>2>>>0]=b.getDate();z()[a+16>>>2>>>0]=b.getMonth();z()[a+20>>>2>>>0]=b.getYear();a=b.getTime();return BigInt(isNaN(a)?-1:a/1E3)}function kc(a,b,c,d,f,g,k){return E?P(16,1,a,b,c,d,f,g,k):-52}function lc(a,b,c,d,f,g){if(E)return P(17,1,a,b,c,d,f,g)} +function mc(a,b,c,d){a>>>=0;b>>>=0;c>>>=0;d>>>=0;var f=(new Date).getFullYear(),g=new Date(f,0,1),k=new Date(f,6,1);f=g.getTimezoneOffset();var l=k.getTimezoneOffset(),p=Math.max(f,l);A()[a>>>2>>>0]=60*p;z()[b>>>2>>>0]=Number(f!=l);a=n=>n.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1];g=a(g);k=a(k);l{ne.length=0;for(var c;c=w()[a++>>>0];){var d=105!=c;d&=112!=c;b+=d&&b%8?4:0;ne.push(112==c?A()[b>>>2>>>0]:106==c?J[b>>>3]:105==c?z()[b>>>2>>>0]:la()[b>>>3>>>0]);b+=d?8:4}return ne};function nc(a,b,c){a>>>=0;b=oe(b>>>0,c>>>0);return Kc[a](...b)}function oc(a,b,c){a>>>=0;b=oe(b>>>0,c>>>0);return Kc[a](...b)}var pc=()=>{},qc=()=>Date.now();function rc(a,b){return H(K(a>>>0,b>>>0))}var sc=()=>{O+=1;throw"unwind";};function tc(){return 4294901760}var uc; +uc=()=>performance.timeOrigin+performance.now();var vc=()=>D?require("os").cpus().length:navigator.hardwareConcurrency;function wc(){cb("Cannot use emscripten_pc_get_function without -sUSE_OFFSET_CONVERTER");return 0} +function xc(a){a>>>=0;var b=w().length;if(a<=b||4294901760=c;c*=2){var d=b*(1+.2/c);d=Math.min(d,a+100663296);var f=Math;d=Math.max(a,d);a:{f=(f.min.call(f,4294901760,d+(65536-d%65536)%65536)-m.buffer.byteLength+65535)/65536;try{m.grow(f);u();var g=1;break a}catch(k){}g=void 0}if(g)return!0}return!1}var pe=()=>{cb("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER");return 0},qe={},re=a=>{a.forEach(b=>{var c=pe();c&&(qe[c]=b)})}; +function yc(){var a=Error().stack.toString().split("\n");"Error"==a[0]&&a.shift();re(a);qe.Ob=pe();qe.ac=a;return qe.Ob}function zc(a,b,c){a>>>=0;b>>>=0;if(qe.Ob==a)var d=qe.ac;else d=Error().stack.toString().split("\n"),"Error"==d[0]&&d.shift(),re(d);for(var f=3;d[f]&&pe()!=a;)++f;for(a=0;a>>2>>>0]=pe();return a} +var se={},ue=()=>{if(!te){var a={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:("object"==typeof navigator&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:va||"./this.program"},b;for(b in se)void 0===se[b]?delete a[b]:a[b]=se[b];var c=[];for(b in a)c.push(`${b}=${a[b]}`);te=c}return te},te; +function Ac(a,b){if(E)return P(18,1,a,b);a>>>=0;b>>>=0;var c=0;ue().forEach((d,f)=>{var g=b+c;f=A()[a+4*f>>>2>>>0]=g;for(g=0;g>>0]=d.charCodeAt(g);e()[f>>>0]=0;c+=d.length+1});return 0}function Bc(a,b){if(E)return P(19,1,a,b);a>>>=0;b>>>=0;var c=ue();A()[a>>>2>>>0]=c.length;var d=0;c.forEach(f=>d+=f.length+1);A()[b>>>2>>>0]=d;return 0}function Dc(a){return E?P(20,1,a):52}function Ec(a,b,c,d){return E?P(21,1,a,b,c,d):52}function Fc(a,b,c,d){return E?P(22,1,a,b,c,d):70} +var ve=[null,[],[]];function Gc(a,b,c,d){if(E)return P(23,1,a,b,c,d);b>>>=0;c>>>=0;d>>>=0;for(var f=0,g=0;g>>2>>>0],l=A()[b+4>>>2>>>0];b+=8;for(var p=0;p>>0],r=ve[a];0===n||10===n?((1===a?Ea:H)(kd(r,0)),r.length=0):r.push(n)}f+=l}A()[d>>>2>>>0]=f;return 0}var we=[31,29,31,30,31,30,31,31,30,31,30,31],xe=[31,28,31,30,31,30,31,31,30,31,30,31];function ye(a){var b=Array(ld(a)+1);md(a,b,0,b.length);return b}var ze=(a,b)=>{e().set(a,b>>>0)}; +function Ic(a,b,c,d){function f(h,t,v){for(h="number"==typeof h?h.toString():h||"";h.lengthTc?-1:0aa-h.getDate())t-=aa-h.getDate()+1,h.setDate(1),11>v?h.setMonth(v+1):(h.setMonth(0),h.setFullYear(h.getFullYear()+1));else{h.setDate(h.getDate()+t);break}}v=new Date(h.getFullYear()+1,0,4);t=l(new Date(h.getFullYear(), +0,4));v=l(v);return 0>=k(t,h)?0>=k(v,h)?h.getFullYear()+1:h.getFullYear():h.getFullYear()-1}a>>>=0;b>>>=0;c>>>=0;d>>>=0;var n=A()[d+40>>>2>>>0];d={fc:z()[d>>>2>>>0],ec:z()[d+4>>>2>>>0],Gb:z()[d+8>>>2>>>0],Kb:z()[d+12>>>2>>>0],Hb:z()[d+16>>>2>>>0],Cb:z()[d+20>>>2>>>0],ub:z()[d+24>>>2>>>0],Bb:z()[d+28>>>2>>>0],nc:z()[d+32>>>2>>>0],dc:z()[d+36>>>2>>>0],hc:n?K(n):""};c=K(c);n={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y", +"%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"};for(var r in n)c=c.replace(new RegExp(r,"g"),n[r]);var x="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),y="January February March April May June July August September October November December".split(" ");n={"%a":h=>x[h.ub].substring(0,3),"%A":h=> +x[h.ub],"%b":h=>y[h.Hb].substring(0,3),"%B":h=>y[h.Hb],"%C":h=>g((h.Cb+1900)/100|0,2),"%d":h=>g(h.Kb,2),"%e":h=>f(h.Kb,2," "),"%g":h=>p(h).toString().substring(2),"%G":p,"%H":h=>g(h.Gb,2),"%I":h=>{h=h.Gb;0==h?h=12:12{for(var t=0,v=0;v<=h.Hb-1;t+=(ke(h.Cb+1900)?we:xe)[v++]);return g(h.Kb+t,3)},"%m":h=>g(h.Hb+1,2),"%M":h=>g(h.ec,2),"%n":()=>"\n","%p":h=>0<=h.Gb&&12>h.Gb?"AM":"PM","%S":h=>g(h.fc,2),"%t":()=>"\t","%u":h=>h.ub||7,"%U":h=>g(Math.floor((h.Bb+7-h.ub)/7), +2),"%V":h=>{var t=Math.floor((h.Bb+7-(h.ub+6)%7)/7);2>=(h.ub+371-h.Bb-2)%7&&t++;if(t)53==t&&(v=(h.ub+371-h.Bb)%7,4==v||3==v&&ke(h.Cb)||(t=1));else{t=52;var v=(h.ub+7-h.Bb-1)%7;(4==v||5==v&&ke(h.Cb%400-1))&&t++}return g(t,2)},"%w":h=>h.ub,"%W":h=>g(Math.floor((h.Bb+7-(h.ub+6)%7)/7),2),"%y":h=>(h.Cb+1900).toString().substring(2),"%Y":h=>h.Cb+1900,"%z":h=>{h=h.dc;var t=0<=h;h=Math.abs(h)/60;return(t?"+":"-")+String("0000"+(h/60*100+h%60)).slice(-4)},"%Z":h=>h.hc,"%%":()=>"%"};c=c.replace(/%%/g,"\x00\x00"); +for(r in n)c.includes(r)&&(c=c.replace(new RegExp(r,"g"),n[r](d)));c=c.replace(/\0\0/g,"%");r=ye(c);if(r.length>b)return 0;ze(r,a);return r.length-1}function Jc(a,b,c,d){return Ic(a>>>0,b>>>0,c>>>0,d>>>0)}E||Yc();for(var Ae=Array(256),Be=0;256>Be;++Be)Ae[Be]=String.fromCharCode(Be);od=Ae;R=B.BindingError=class extends Error{constructor(a){super(a);this.name="BindingError"}};B.InternalError=class extends Error{constructor(a){super(a);this.name="InternalError"}};U.push(0,1,void 0,1,null,1,!0,1,!1,1); +B.count_emval_handles=()=>U.length/2-5-ud.length; +var Jd=[Hc,Wc,hd,ob,pb,qb,rb,sb,tb,ub,vb,wb,xb,yb,zb,Ab,kc,lc,Ac,Bc,Dc,Ec,Fc,Gc],jb,Y=function(){function a(c,d){Y=c.exports;Y=Od();Y=Ce();Xc.push(Y.ib);Xa.unshift(Y.Ba);Ta=d;bb();return Y}var b=Ja();Za++;if(B.instantiateWasm)try{return B.instantiateWasm(b,a)}catch(c){H(`Module.instantiateWasm callback failed with error: ${c}`),oa(c)}eb||=B.locateFile?db("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":B.locateFile?B.locateFile("ort-wasm-simd-threaded.jsep.wasm",G):G+"ort-wasm-simd-threaded.jsep.wasm": +(new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url)).href;ib(b,function(c){a(c.instance,c.module)}).catch(oa);return{}}(),Kd=a=>(Kd=Y.Ca)(a),Oa=()=>(Oa=Y.Da)();B._OrtInit=(a,b)=>(B._OrtInit=Y.Ea)(a,b);B._OrtGetLastError=(a,b)=>(B._OrtGetLastError=Y.Fa)(a,b);B._OrtCreateSessionOptions=(a,b,c,d,f,g,k,l,p,n)=>(B._OrtCreateSessionOptions=Y.Ga)(a,b,c,d,f,g,k,l,p,n);B._OrtAppendExecutionProvider=(a,b)=>(B._OrtAppendExecutionProvider=Y.Ha)(a,b); +B._OrtAddFreeDimensionOverride=(a,b,c)=>(B._OrtAddFreeDimensionOverride=Y.Ia)(a,b,c);B._OrtAddSessionConfigEntry=(a,b,c)=>(B._OrtAddSessionConfigEntry=Y.Ja)(a,b,c);B._OrtReleaseSessionOptions=a=>(B._OrtReleaseSessionOptions=Y.Ka)(a);B._OrtCreateSession=(a,b,c)=>(B._OrtCreateSession=Y.La)(a,b,c);B._OrtReleaseSession=a=>(B._OrtReleaseSession=Y.Ma)(a);B._OrtGetInputOutputCount=(a,b,c)=>(B._OrtGetInputOutputCount=Y.Na)(a,b,c);B._OrtGetInputName=(a,b)=>(B._OrtGetInputName=Y.Oa)(a,b); +B._OrtGetOutputName=(a,b)=>(B._OrtGetOutputName=Y.Pa)(a,b);B._OrtFree=a=>(B._OrtFree=Y.Qa)(a);B._OrtCreateTensor=(a,b,c,d,f,g)=>(B._OrtCreateTensor=Y.Ra)(a,b,c,d,f,g);B._OrtGetTensorData=(a,b,c,d,f)=>(B._OrtGetTensorData=Y.Sa)(a,b,c,d,f);B._OrtReleaseTensor=a=>(B._OrtReleaseTensor=Y.Ta)(a);B._OrtCreateRunOptions=(a,b,c,d)=>(B._OrtCreateRunOptions=Y.Ua)(a,b,c,d);B._OrtAddRunConfigEntry=(a,b,c)=>(B._OrtAddRunConfigEntry=Y.Va)(a,b,c);B._OrtReleaseRunOptions=a=>(B._OrtReleaseRunOptions=Y.Wa)(a); +B._OrtCreateBinding=a=>(B._OrtCreateBinding=Y.Xa)(a);B._OrtBindInput=(a,b,c)=>(B._OrtBindInput=Y.Ya)(a,b,c);B._OrtBindOutput=(a,b,c,d)=>(B._OrtBindOutput=Y.Za)(a,b,c,d);B._OrtClearBoundOutputs=a=>(B._OrtClearBoundOutputs=Y._a)(a);B._OrtReleaseBinding=a=>(B._OrtReleaseBinding=Y.$a)(a);B._OrtRunWithBinding=(a,b,c,d,f)=>(B._OrtRunWithBinding=Y.ab)(a,b,c,d,f);B._OrtRun=(a,b,c,d,f,g,k,l)=>(B._OrtRun=Y.bb)(a,b,c,d,f,g,k,l);B._OrtEndProfiling=a=>(B._OrtEndProfiling=Y.cb)(a); +B._JsepOutput=(a,b,c)=>(B._JsepOutput=Y.db)(a,b,c);B._JsepGetNodeName=a=>(B._JsepGetNodeName=Y.eb)(a); +var Ia=()=>(Ia=Y.fb)(),yd=B._malloc=a=>(yd=B._malloc=Y.gb)(a),X=B._free=a=>(X=B._free=Y.hb)(a),Ka=(a,b,c,d,f,g)=>(Ka=Y.kb)(a,b,c,d,f,g),Sa=()=>(Sa=Y.lb)(),Uc=(a,b,c,d,f)=>(Uc=Y.mb)(a,b,c,d,f),$c=a=>($c=Y.nb)(a),Qa=a=>(Qa=Y.ob)(a),Hd=()=>(Hd=Y.pb)(),cd=(a,b)=>(cd=Y.qb)(a,b),Vc=a=>(Vc=Y.rb)(a),Sc=a=>(Sc=Y.sb)(a),Rc=()=>(Rc=Y.tb)(),dd=B.dynCall_ii=(a,b)=>(dd=B.dynCall_ii=Y.vb)(a,b),ae=a=>(ae=Y.wb)(a),Qd=()=>(Qd=Y.xb)(),$d=a=>($d=Y.yb)(a),be=()=>(be=Y.zb)();B.___start_em_js=1350717;B.___stop_em_js=1350939; +function Ce(){var a=Y;a=Object.assign({},a);var b=d=>f=>d(f)>>>0,c=d=>()=>d()>>>0;a.Ca=b(a.Ca);a.fb=c(a.fb);a.gb=b(a.gb);a.emscripten_main_runtime_thread_id=c(a.emscripten_main_runtime_thread_id);a.sb=b(a.sb);a.tb=c(a.tb);return a}B.stackSave=()=>Rc();B.stackRestore=a=>Vc(a);B.stackAlloc=a=>Sc(a);B.UTF8ToString=K;B.stringToUTF8=nd;B.lengthBytesUTF8=ld;var De;ab=function Ee(){De||Fe();De||(ab=Ee)}; +function Fe(){if(!(0typeof require<"u"?require:typeof Proxy<"u"?new Proxy(r,{get:(e,n)=>(typeof require<"u"?require:e)[n]}):r)(function(r){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+r+'" is not supported')});var C=(r,e)=>()=>(r&&(e=r(r=0)),e);var Je=(r,e)=>()=>(e||r((e={exports:{}}).exports,e),e.exports),sn=(r,e)=>{for(var n in e)bo(r,n,{get:e[n],enumerable:!0})},Fl=(r,e,n,t)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of ev(e))!rv.call(r,o)&&o!==n&&bo(r,o,{get:()=>e[o],enumerable:!(t=Q0(e,o))||t.enumerable});return r};var un=(r,e,n)=>(n=r!=null?J0(tv(r)):{},Fl(e||!r||!r.__esModule?bo(n,"default",{value:r,enumerable:!0}):n,r)),Pn=r=>Fl(bo({},"__esModule",{value:!0}),r);var yo,Dr,vr,nv,xo,vo=C(()=>{"use strict";yo=new Map,Dr=[],vr=(r,e,n)=>{if(e&&typeof e.init=="function"&&typeof e.createInferenceSessionHandler=="function"){let t=yo.get(r);if(t===void 0)yo.set(r,{backend:e,priority:n});else{if(t.priority>n)return;if(t.priority===n&&t.backend!==e)throw new Error(`cannot register backend "${r}" using priority ${n}`)}if(n>=0){let o=Dr.indexOf(r);o!==-1&&Dr.splice(o,1);for(let i=0;i{let e=yo.get(r);if(!e)return"backend not found.";if(e.initialized)return e.backend;if(e.aborted)return e.error;{let n=!!e.initPromise;try{return n||(e.initPromise=e.backend.init(r)),await e.initPromise,e.initialized=!0,e.backend}catch(t){return n||(e.error=`${t}`,e.aborted=!0),e.error}finally{delete e.initPromise}}},xo=async r=>{let e=r.executionProviders||[],n=e.map(u=>typeof u=="string"?u:u.name),t=n.length===0?Dr:n,o,i=[],s=new Set;for(let u of t){let l=await nv(u);typeof l=="string"?i.push({name:u,err:l}):(o||(o=l),o===l&&s.add(u))}if(!o)throw new Error(`no available backend found. ERR: ${i.map(u=>`[${u.name}] ${u.err}`).join(", ")}`);for(let{name:u,err:l}of i)n.includes(u)&&console.warn(`removing requested execution provider "${u}" from session options because it is not available: ${l}`);let a=e.filter(u=>s.has(typeof u=="string"?u:u.name));return[o,new Proxy(r,{get:(u,l)=>l==="executionProviders"?a:Reflect.get(u,l)})]}});var Ml=C(()=>{"use strict";vo()});var Vl,Gl=C(()=>{"use strict";Vl="1.19.0"});var Ul,It,_a=C(()=>{"use strict";Gl();Ul="warning",It={wasm:{},webgl:{},webgpu:{},versions:{common:Vl},set logLevel(r){if(r!==void 0){if(typeof r!="string"||["verbose","info","warning","error","fatal"].indexOf(r)===-1)throw new Error(`Unsupported logging level: ${r}`);Ul=r}},get logLevel(){return Ul}};Object.defineProperty(It,"logLevel",{enumerable:!0})});var le,Wl=C(()=>{"use strict";_a();le=It});var Hl,ql,Kl=C(()=>{"use strict";Hl=(r,e)=>{let n=typeof document<"u"?document.createElement("canvas"):new OffscreenCanvas(1,1);n.width=r.dims[3],n.height=r.dims[2];let t=n.getContext("2d");if(t!=null){let o,i;e?.tensorLayout!==void 0&&e.tensorLayout==="NHWC"?(o=r.dims[2],i=r.dims[3]):(o=r.dims[3],i=r.dims[2]);let s=e?.format!==void 0?e.format:"RGB",a=e?.norm,u,l;a===void 0||a.mean===void 0?u=[255,255,255,255]:typeof a.mean=="number"?u=[a.mean,a.mean,a.mean,a.mean]:(u=[a.mean[0],a.mean[1],a.mean[2],0],a.mean[3]!==void 0&&(u[3]=a.mean[3])),a===void 0||a.bias===void 0?l=[0,0,0,0]:typeof a.bias=="number"?l=[a.bias,a.bias,a.bias,a.bias]:(l=[a.bias[0],a.bias[1],a.bias[2],0],a.bias[3]!==void 0&&(l[3]=a.bias[3]));let f=i*o,c=0,p=f,b=f*2,h=-1;s==="RGBA"?(c=0,p=f,b=f*2,h=f*3):s==="RGB"?(c=0,p=f,b=f*2):s==="RBG"&&(c=0,b=f,p=f*2);for(let g=0;g{let n=typeof document<"u"?document.createElement("canvas").getContext("2d"):new OffscreenCanvas(1,1).getContext("2d"),t;if(n!=null){let o,i,s;e?.tensorLayout!==void 0&&e.tensorLayout==="NHWC"?(o=r.dims[2],i=r.dims[1],s=r.dims[3]):(o=r.dims[3],i=r.dims[2],s=r.dims[1]);let a=e!==void 0&&e.format!==void 0?e.format:"RGB",u=e?.norm,l,f;u===void 0||u.mean===void 0?l=[255,255,255,255]:typeof u.mean=="number"?l=[u.mean,u.mean,u.mean,u.mean]:(l=[u.mean[0],u.mean[1],u.mean[2],255],u.mean[3]!==void 0&&(l[3]=u.mean[3])),u===void 0||u.bias===void 0?f=[0,0,0,0]:typeof u.bias=="number"?f=[u.bias,u.bias,u.bias,u.bias]:(f=[u.bias[0],u.bias[1],u.bias[2],0],u.bias[3]!==void 0&&(f[3]=u.bias[3]));let c=i*o;if(e!==void 0&&(e.format!==void 0&&s===4&&e.format!=="RGBA"||s===3&&e.format!=="RGB"&&e.format!=="BGR"))throw new Error("Tensor format doesn't match input tensor dims");let p=4,b=0,h=1,g=2,T=3,w=0,v=c,S=c*2,$=-1;a==="RGBA"?(w=0,v=c,S=c*2,$=c*3):a==="RGB"?(w=0,v=c,S=c*2):a==="RBG"&&(w=0,S=c,v=c*2),t=n.createImageData(o,i);for(let P=0;P{"use strict";wo();Ia=(r,e)=>{if(r===void 0)throw new Error("Image buffer must be defined");if(e.height===void 0||e.width===void 0)throw new Error("Image height and width must be defined");if(e.tensorLayout==="NHWC")throw new Error("NHWC Tensor layout is not supported yet");let{height:n,width:t}=e,o=e.norm??{mean:255,bias:0},i,s;typeof o.mean=="number"?i=[o.mean,o.mean,o.mean,o.mean]:i=[o.mean[0],o.mean[1],o.mean[2],o.mean[3]??255],typeof o.bias=="number"?s=[o.bias,o.bias,o.bias,o.bias]:s=[o.bias[0],o.bias[1],o.bias[2],o.bias[3]??0];let a=e.format!==void 0?e.format:"RGBA",u=e.tensorFormat!==void 0&&e.tensorFormat!==void 0?e.tensorFormat:"RGB",l=n*t,f=u==="RGBA"?new Float32Array(l*4):new Float32Array(l*3),c=4,p=0,b=1,h=2,g=3,T=0,w=l,v=l*2,S=-1;a==="RGB"&&(c=3,p=0,b=1,h=2,g=-1),u==="RGBA"?S=l*3:u==="RBG"?(T=0,v=l,w=l*2):u==="BGR"&&(v=0,w=l,T=l*2);for(let P=0;P{let n=typeof HTMLImageElement<"u"&&r instanceof HTMLImageElement,t=typeof ImageData<"u"&&r instanceof ImageData,o=typeof ImageBitmap<"u"&&r instanceof ImageBitmap,i=typeof r=="string",s,a=e??{},u=()=>{if(typeof document<"u")return document.createElement("canvas");if(typeof OffscreenCanvas<"u")return new OffscreenCanvas(1,1);throw new Error("Canvas is not supported")},l=f=>f instanceof HTMLCanvasElement||f instanceof OffscreenCanvas?f.getContext("2d"):null;if(n){let f=u();f.width=r.width,f.height=r.height;let c=l(f);if(c!=null){let p=r.height,b=r.width;if(e!==void 0&&e.resizedHeight!==void 0&&e.resizedWidth!==void 0&&(p=e.resizedHeight,b=e.resizedWidth),e!==void 0){if(a=e,e.tensorFormat!==void 0)throw new Error("Image input config format must be RGBA for HTMLImageElement");a.tensorFormat="RGBA",a.height=p,a.width=b}else a.tensorFormat="RGBA",a.height=p,a.width=b;c.drawImage(r,0,0),s=c.getImageData(0,0,b,p).data}else throw new Error("Can not access image data")}else if(t){let f,c;if(e!==void 0&&e.resizedWidth!==void 0&&e.resizedHeight!==void 0?(f=e.resizedHeight,c=e.resizedWidth):(f=r.height,c=r.width),e!==void 0&&(a=e),a.format="RGBA",a.height=f,a.width=c,e!==void 0){let p=u();p.width=c,p.height=f;let b=l(p);if(b!=null)b.putImageData(r,0,0),s=b.getImageData(0,0,c,f).data;else throw new Error("Can not access image data")}else s=r.data}else if(o){if(e===void 0)throw new Error("Please provide image config with format for Imagebitmap");let f=u();f.width=r.width,f.height=r.height;let c=l(f);if(c!=null){let p=r.height,b=r.width;return c.drawImage(r,0,0,b,p),s=c.getImageData(0,0,b,p).data,a.height=p,a.width=b,Ia(s,a)}else throw new Error("Can not access image data")}else{if(i)return new Promise((f,c)=>{let p=u(),b=l(p);if(!r||!b)return c();let h=new Image;h.crossOrigin="Anonymous",h.src=r,h.onload=()=>{p.width=h.width,p.height=h.height,b.drawImage(h,0,0,p.width,p.height);let g=b.getImageData(0,0,p.width,p.height);a.height=p.height,a.width=p.width,f(Ia(g.data,a))}});throw new Error("Input data provided is not supported - aborted tensor creation")}if(s!==void 0)return Ia(s,a);throw new Error("Input data provided is not supported - aborted tensor creation")},Xl=(r,e)=>{let{width:n,height:t,download:o,dispose:i}=e,s=[1,t,n,4];return new ht({location:"texture",type:"float32",texture:r,dims:s,download:o,dispose:i})},Zl=(r,e)=>{let{dataType:n,dims:t,download:o,dispose:i}=e;return new ht({location:"gpu-buffer",type:n??"float32",gpuBuffer:r,dims:t,download:o,dispose:i})},Yl=(r,e,n)=>new ht({location:"cpu-pinned",type:r,data:e,dims:n??[e.length]})});var Br,On,Ql,ec,tc=C(()=>{"use strict";Br=new Map([["float32",Float32Array],["uint8",Uint8Array],["int8",Int8Array],["uint16",Uint16Array],["int16",Int16Array],["int32",Int32Array],["bool",Uint8Array],["float64",Float64Array],["uint32",Uint32Array]]),On=new Map([[Float32Array,"float32"],[Uint8Array,"uint8"],[Int8Array,"int8"],[Uint16Array,"uint16"],[Int16Array,"int16"],[Int32Array,"int32"],[Float64Array,"float64"],[Uint32Array,"uint32"]]),Ql=!1,ec=()=>{if(!Ql){Ql=!0;let r=typeof BigInt64Array<"u"&&BigInt64Array.from,e=typeof BigUint64Array<"u"&&BigUint64Array.from,n=typeof Float16Array<"u"&&Float16Array.from;r&&(Br.set("int64",BigInt64Array),On.set(BigInt64Array,"int64")),e&&(Br.set("uint64",BigUint64Array),On.set(BigUint64Array,"uint64")),n?(Br.set("float16",Float16Array),On.set(Float16Array,"float16")):Br.set("float16",Uint16Array)}}});var rc,nc,oc=C(()=>{"use strict";wo();rc=r=>{let e=1;for(let n=0;n{switch(r.location){case"cpu":return new ht(r.type,r.data,e);case"cpu-pinned":return new ht({location:"cpu-pinned",data:r.data,type:r.type,dims:e});case"texture":return new ht({location:"texture",texture:r.texture,type:r.type,dims:e});case"gpu-buffer":return new ht({location:"gpu-buffer",gpuBuffer:r.gpuBuffer,type:r.type,dims:e});default:throw new Error(`tensorReshape: tensor location ${r.location} is not supported`)}}});var ht,wo=C(()=>{"use strict";Kl();Jl();tc();oc();ht=class{constructor(e,n,t){ec();let o,i;if(typeof e=="object"&&"location"in e)switch(this.dataLocation=e.location,o=e.type,i=e.dims,e.location){case"cpu-pinned":{let a=Br.get(o);if(!a)throw new TypeError(`unsupported type "${o}" to create tensor from pinned buffer`);if(!(e.data instanceof a))throw new TypeError(`buffer should be of type ${a.name}`);this.cpuData=e.data;break}case"texture":{if(o!=="float32")throw new TypeError(`unsupported type "${o}" to create tensor from texture`);this.gpuTextureData=e.texture,this.downloader=e.download,this.disposer=e.dispose;break}case"gpu-buffer":{if(o!=="float32"&&o!=="float16"&&o!=="int32"&&o!=="int64"&&o!=="uint32"&&o!=="uint8"&&o!=="bool")throw new TypeError(`unsupported type "${o}" to create tensor from gpu buffer`);this.gpuBufferData=e.gpuBuffer,this.downloader=e.download,this.disposer=e.dispose;break}default:throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`)}else{let a,u;if(typeof e=="string")if(o=e,u=t,e==="string"){if(!Array.isArray(n))throw new TypeError("A string tensor's data must be a string array.");a=n}else{let l=Br.get(e);if(l===void 0)throw new TypeError(`Unsupported tensor type: ${e}.`);if(Array.isArray(n)){if(e==="float16"&&l===Uint16Array)throw new TypeError("Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.");e==="uint64"||e==="int64"?a=l.from(n,BigInt):a=l.from(n)}else if(n instanceof l)a=n;else throw new TypeError(`A ${o} tensor's data must be type of ${l}`)}else if(u=n,Array.isArray(e)){if(e.length===0)throw new TypeError("Tensor type cannot be inferred from an empty array.");let l=typeof e[0];if(l==="string")o="string",a=e;else if(l==="boolean")o="bool",a=Uint8Array.from(e);else throw new TypeError(`Invalid element type of data array: ${l}.`)}else{let l=On.get(e.constructor);if(l===void 0)throw new TypeError(`Unsupported type for tensor data: ${e.constructor}.`);o=l,a=e}if(u===void 0)u=[a.length];else if(!Array.isArray(u))throw new TypeError("A tensor's dims must be a number array");i=u,this.cpuData=a,this.dataLocation="cpu"}let s=rc(i);if(this.cpuData&&s!==this.cpuData.length)throw new Error(`Tensor's size(${s}) does not match data length(${this.cpuData.length}).`);this.type=o,this.dims=i,this.size=s}static async fromImage(e,n){return jl(e,n)}static fromTexture(e,n){return Xl(e,n)}static fromGpuBuffer(e,n){return Zl(e,n)}static fromPinnedBuffer(e,n,t){return Yl(e,n,t)}toDataURL(e){return Hl(this,e)}toImageData(e){return ql(this,e)}get data(){if(this.ensureValid(),!this.cpuData)throw new Error("The data is not on CPU. Use `getData()` to download GPU data to CPU, or use `texture` or `gpuBuffer` property to access the GPU data directly.");return this.cpuData}get location(){return this.dataLocation}get texture(){if(this.ensureValid(),!this.gpuTextureData)throw new Error("The data is not stored as a WebGL texture.");return this.gpuTextureData}get gpuBuffer(){if(this.ensureValid(),!this.gpuBufferData)throw new Error("The data is not stored as a WebGPU buffer.");return this.gpuBufferData}async getData(e){switch(this.ensureValid(),this.dataLocation){case"cpu":case"cpu-pinned":return this.data;case"texture":case"gpu-buffer":{if(!this.downloader)throw new Error("The current tensor is not created with a specified data downloader.");if(this.isDownloading)throw new Error("The current tensor is being downloaded.");try{this.isDownloading=!0;let n=await this.downloader();return this.downloader=void 0,this.dataLocation="cpu",this.cpuData=n,e&&this.disposer&&(this.disposer(),this.disposer=void 0),n}finally{this.isDownloading=!1}}default:throw new Error(`cannot get data from location: ${this.dataLocation}`)}}dispose(){if(this.isDownloading)throw new Error("The current tensor is being downloaded.");this.disposer&&(this.disposer(),this.disposer=void 0),this.cpuData=void 0,this.gpuTextureData=void 0,this.gpuBufferData=void 0,this.downloader=void 0,this.isDownloading=void 0,this.dataLocation="none"}ensureValid(){if(this.dataLocation==="none")throw new Error("The tensor is disposed.")}reshape(e){if(this.ensureValid(),this.downloader||this.disposer)throw new Error("Cannot reshape a tensor that owns GPU resource.");return nc(this,e)}}});var it,To=C(()=>{"use strict";wo();it=ht});var _o,ic,St,yt,Sa=C(()=>{"use strict";_a();_o=(r,e)=>{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||console.timeStamp(`${r}::ORT::${e}`)},ic=(r,e)=>{let n=new Error().stack?.split(/\r\n|\r|\n/g)||[],t=!1;for(let o=0;o{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||ic("BEGIN",r)},yt=r=>{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||ic("END",r)}});var Io,ac=C(()=>{"use strict";vo();To();Sa();Io=class r{constructor(e){this.handler=e}async run(e,n,t){St();let o={},i={};if(typeof e!="object"||e===null||e instanceof it||Array.isArray(e))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let s=!0;if(typeof n=="object"){if(n===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(n instanceof it)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(n)){if(n.length===0)throw new TypeError("'fetches' cannot be an empty array.");s=!1;for(let l of n){if(typeof l!="string")throw new TypeError("'fetches' must be a string array or an object.");if(this.outputNames.indexOf(l)===-1)throw new RangeError(`'fetches' contains invalid output name: ${l}.`);o[l]=null}if(typeof t=="object"&&t!==null)i=t;else if(typeof t<"u")throw new TypeError("'options' must be an object.")}else{let l=!1,f=Object.getOwnPropertyNames(n);for(let c of this.outputNames)if(f.indexOf(c)!==-1){let p=n[c];(p===null||p instanceof it)&&(l=!0,s=!1,o[c]=p)}if(l){if(typeof t=="object"&&t!==null)i=t;else if(typeof t<"u")throw new TypeError("'options' must be an object.")}else i=n}}else if(typeof n<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let l of this.inputNames)if(typeof e[l]>"u")throw new Error(`input '${l}' is missing in 'feeds'.`);if(s)for(let l of this.outputNames)o[l]=null;let a=await this.handler.run(e,o,i),u={};for(let l in a)if(Object.hasOwnProperty.call(a,l)){let f=a[l];f instanceof it?u[l]=f:u[l]=new it(f.type,f.data,f.dims)}return yt(),u}async release(){return this.handler.dispose()}static async create(e,n,t,o){St();let i,s={};if(typeof e=="string"){if(i=e,typeof n=="object"&&n!==null)s=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else if(e instanceof Uint8Array){if(i=e,typeof n=="object"&&n!==null)s=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else if(e instanceof ArrayBuffer||typeof SharedArrayBuffer<"u"&&e instanceof SharedArrayBuffer){let f=e,c=0,p=e.byteLength;if(typeof n=="object"&&n!==null)s=n;else if(typeof n=="number"){if(c=n,!Number.isSafeInteger(c))throw new RangeError("'byteOffset' must be an integer.");if(c<0||c>=f.byteLength)throw new RangeError(`'byteOffset' is out of range [0, ${f.byteLength}).`);if(p=e.byteLength-c,typeof t=="number"){if(p=t,!Number.isSafeInteger(p))throw new RangeError("'byteLength' must be an integer.");if(p<=0||c+p>f.byteLength)throw new RangeError(`'byteLength' is out of range (0, ${f.byteLength-c}].`);if(typeof o=="object"&&o!==null)s=o;else if(typeof o<"u")throw new TypeError("'options' must be an object.")}else if(typeof t<"u")throw new TypeError("'byteLength' must be a number.")}else if(typeof n<"u")throw new TypeError("'options' must be an object.");i=new Uint8Array(f,c,p)}else throw new TypeError("Unexpected argument[0]: must be 'path' or 'buffer'.");let[a,u]=await xo(s),l=await a.createInferenceSessionHandler(i,u);return yt(),new r(l)}startProfiling(){this.handler.startProfiling()}endProfiling(){this.handler.endProfiling()}get inputNames(){return this.handler.inputNames}get outputNames(){return this.handler.outputNames}}});var ov,sc=C(()=>{"use strict";ac();ov=Io});var uc=C(()=>{"use strict"});var lc=C(()=>{"use strict"});var cc=C(()=>{"use strict"});var fc=C(()=>{"use strict"});var iv,So,dc=C(()=>{"use strict";vo();To();iv="Training backend could not be resolved. Make sure you're using the correct configuration & WebAssembly files.",So=class r{constructor(e,n,t){this.handler=e,this.hasOptimizerModel=n,this.hasEvalModel=t}get trainingInputNames(){return this.handler.inputNames}get trainingOutputNames(){return this.handler.outputNames}get evalInputNames(){if(this.hasEvalModel)return this.handler.evalInputNames;throw new Error("This training session has no evalModel loaded.")}get evalOutputNames(){if(this.hasEvalModel)return this.handler.evalOutputNames;throw new Error("This training session has no evalModel loaded.")}static async create(e,n){let t=e.evalModel||"",o=e.optimizerModel||"",i=n||{},[s,a]=await xo(i);if(s.createTrainingSessionHandler){let u=await s.createTrainingSessionHandler(e.checkpointState,e.trainModel,t,o,a);return new r(u,!!e.optimizerModel,!!e.evalModel)}else throw new Error(iv)}typeNarrowingForRunStep(e,n,t,o,i){let s={},a={};if(typeof t!="object"||t===null||t instanceof it||Array.isArray(t))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let u=!0;if(typeof o=="object"){if(o===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(o instanceof it)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(o)){if(o.length===0)throw new TypeError("'fetches' cannot be an empty array.");u=!1;for(let l of o){if(typeof l!="string")throw new TypeError("'fetches' must be a string array or an object.");if(n.indexOf(l)===-1)throw new RangeError(`'fetches' contains invalid output name: ${l}.`);s[l]=null}if(typeof i=="object"&&i!==null)a=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else{let l=!1,f=Object.getOwnPropertyNames(o);for(let c of n)if(f.indexOf(c)!==-1){let p=o[c];(p===null||p instanceof it)&&(l=!0,u=!1,s[c]=p)}if(l){if(typeof i=="object"&&i!==null)a=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else a=o}}else if(typeof o<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let l of e)if(typeof t[l]>"u")throw new Error(`input '${l}' is missing in 'feeds'.`);if(u)for(let l of n)s[l]=null;return[s,a]}convertHandlerReturnTypeToMapOfTensors(e){let n={};for(let t in e)if(Object.hasOwnProperty.call(e,t)){let o=e[t];o instanceof it?n[t]=o:n[t]=new it(o.type,o.data,o.dims)}return n}async lazyResetGrad(){await this.handler.lazyResetGrad()}async runTrainStep(e,n,t){let[o,i]=this.typeNarrowingForRunStep(this.trainingInputNames,this.trainingOutputNames,e,n,t),s=await this.handler.runTrainStep(e,o,i);return this.convertHandlerReturnTypeToMapOfTensors(s)}async runOptimizerStep(e){if(this.hasOptimizerModel)await this.handler.runOptimizerStep(e||{});else throw new Error("This TrainingSession has no OptimizerModel loaded.")}async runEvalStep(e,n,t){if(this.hasEvalModel){let[o,i]=this.typeNarrowingForRunStep(this.evalInputNames,this.evalOutputNames,e,n,t),s=await this.handler.runEvalStep(e,o,i);return this.convertHandlerReturnTypeToMapOfTensors(s)}else throw new Error("This TrainingSession has no EvalModel loaded.")}async getParametersSize(e=!0){return this.handler.getParametersSize(e)}async loadParametersBuffer(e,n=!0){let t=await this.getParametersSize(n);if(e.length!==4*t)throw new Error("Size of the buffer passed into loadParametersBuffer must match the number of parameters in the model. Please use getParametersSize method to check.");return this.handler.loadParametersBuffer(e,n)}async getContiguousParameters(e=!0){return this.handler.getContiguousParameters(e)}async release(){return this.handler.dispose()}}});var av,pc=C(()=>{"use strict";dc();av=So});var $a={};sn($a,{InferenceSession:()=>ov,TRACE:()=>_o,TRACE_FUNC_BEGIN:()=>St,TRACE_FUNC_END:()=>yt,Tensor:()=>it,TrainingSession:()=>av,env:()=>le,registerBackend:()=>vr});var ft=C(()=>{"use strict";Ml();Wl();sc();To();uc();lc();Sa();cc();fc();pc()});function wr(r,e,n,t){if(e===void 0)return uv(r);if(n===void 0)$o(r,e,1);else if(typeof n=="number"&&t===void 0)$o(r,e,n);else if(typeof n=="string"&&t===void 0)$o(r,n,1,e);else if(typeof n=="string"&&typeof t=="number")$o(r,n,t,e);else throw new TypeError("input is valid")}function uv(r){return{verbose:wr.verbose.bind(null,r),info:wr.info.bind(null,r),warning:wr.warning.bind(null,r),error:wr.error.bind(null,r),fatal:wr.fatal.bind(null,r)}}function $o(r,e,n,t){let o=En[t||""]||En[""];hc[r]{"use strict";Aa=class{log(e,n,t){}},Pa=class{log(e,n,t){console.log(`${this.color(e)} ${t?"\x1B[35m"+t+"\x1B[0m ":""}${n}`)}color(e){switch(e){case"verbose":return"\x1B[34;40mv\x1B[0m";case"info":return"\x1B[32mi\x1B[0m";case"warning":return"\x1B[30;43mw\x1B[0m";case"error":return"\x1B[31;40me\x1B[0m";case"fatal":return"\x1B[101mf\x1B[0m";default:throw new Error(`unsupported severity: ${e}`)}}},hc={verbose:1e3,info:2e3,warning:4e3,error:5e3,fatal:6e3},sv={none:new Aa,console:new Pa},gc={provider:"console",minimalSeverity:"warning",logDateTime:!0,logSourceLocation:!1},En={"":gc};(u=>{function r(l,f){u("verbose",l,f)}u.verbose=r;function e(l,f){u("info",l,f)}u.info=e;function n(l,f){u("warning",l,f)}u.warning=n;function t(l,f){u("error",l,f)}u.error=t;function o(l,f){u("fatal",l,f)}u.fatal=o;function i(l){En={},s("",l||{})}u.reset=i;function s(l,f){if(l==="*")i(f);else{let c=En[l]||gc;En[l]={provider:f.provider||c.provider,minimalSeverity:f.minimalSeverity||c.minimalSeverity,logDateTime:f.logDateTime===void 0?c.logDateTime:f.logDateTime,logSourceLocation:f.logSourceLocation===void 0?c.logSourceLocation:f.logSourceLocation}}}u.set=s;function a(l){let f={};l.logLevel&&(f.minimalSeverity=l.logLevel),s("",f)}u.setWithEnv=a})(wr||={});Re=wr,Po=class{constructor(e,n,t,o,i,s){this.category=e;this.name=n;this.startTime=t;this.endCallback=o;this.timer=i;this.ctx=s}async end(){return this.endCallback(this)}async checkTimer(){if(this.ctx===void 0||this.timer===void 0)throw new Error("No webgl timer found");return this.ctx.endTimer(),this.ctx.waitForQueryAndGetTime(this.timer)}},Oo=class{constructor(e,n,t,o){this.category=e;this.name=n;this.startTime=t;this.endTime=o}},Eo=class{constructor(e,n,t){this._started=!1;this._flushPointer=0;this._started=!1,this._maxNumberEvents=e===void 0?1e4:e,this._flushBatchSize=n===void 0?10:n,this._flushIntervalInMilliseconds=t===void 0?5e3:t}static create(e){return e===void 0?new this:new this(e.maxNumberEvents,e.flushBatchSize,e.flushIntervalInMilliseconds)}start(){this._started=!0,this._timingEvents=[],this._flushTime=Ao(),this._flushPointer=0}stop(){for(this._started=!1;this._flushPointer{a.then(async f=>{i&&await i.end(),u(f)},async f=>{i&&await i.end(),l(f)})});if(!s&&i){let u=i.end();if(u&&typeof u.then=="function")return new Promise((l,f)=>{u.then(()=>{l(a)},c=>{f(c)})})}return a}begin(e,n,t){if(!this._started)throw new Error("profiler is not started yet");if(t===void 0){let o=Ao();return this.flush(o),new Po(e,n,o,i=>this.endSync(i))}else{let o=t.beginTimer();return new Po(e,n,0,async i=>this.end(i),o,t)}}async end(e){let n=await e.checkTimer();this._timingEvents.length=this._flushBatchSize||e-this._flushTime>=this._flushIntervalInMilliseconds){for(let n=this._flushPointer;this._flushPointerperformance.now():Date.now});function bc(r,e,n){for(let t of n){let o=t[0],i=t[1],s=t[2],a=t[3],u=t[4];if(r.opType===o){for(let l of e)if((l.domain===i||l.domain==="ai.onnx"&&i==="")&&lv(l.version,s))return{opImpl:a,opInit:u}}}throw new TypeError(`cannot resolve operator '${r.opType}' with opsets: ${e.map(t=>`${t.domain||"ai.onnx"} v${t.version}`).join(", ")}`)}function lv(r,e){if(e.endsWith("+")){let n=Number.parseInt(e.substring(0,e.length-1),10);return!isNaN(n)&&n<=r}else if(e.split("-").length===2){let n=e.split("-"),t=Number.parseInt(n[0],10),o=Number.parseInt(n[1],10);return!isNaN(t)&&!isNaN(o)&&t<=r&&r<=o}else return Number.parseInt(e,10)===r}var yc=C(()=>{"use strict"});var xc=Je(Oa=>{"use strict";Oa.__esModule=!0;var cv=function(){function r(e){if(!e)throw new TypeError("Invalid argument; `value` has no value.");this.value=r.EMPTY,e&&r.isGuid(e)&&(this.value=e)}return r.isGuid=function(e){var n=e.toString();return e&&(e instanceof r||r.validator.test(n))},r.create=function(){return new r([r.gen(2),r.gen(1),r.gen(1),r.gen(1),r.gen(3)].join("-"))},r.createEmpty=function(){return new r("emptyguid")},r.parse=function(e){return new r(e)},r.raw=function(){return[r.gen(2),r.gen(1),r.gen(1),r.gen(1),r.gen(3)].join("-")},r.gen=function(e){for(var n="",t=0;t>>=0,(o=0<=r&&r<256)&&(t=Tc[r],t)?t:(n=ke(r,0,!0),o&&(Tc[r]=n),n)):(r|=0,(o=-128<=r&&r<128)&&(t=wc[r],t)?t:(n=ke(r,r<0?-1:0,!1),o&&(wc[r]=n),n))}function Et(r,e){if(isNaN(r))return e?fr:Nt;if(e){if(r<0)return fr;if(r>=$c)return Oc}else{if(r<=-Ic)return xt;if(r+1>=Ic)return Pc}return r<0?Et(-r,e).neg():ke(r%cn|0,r/cn|0,e)}function ke(r,e,n){return new Me(r,e,n)}function Ca(r,e,n){if(r.length===0)throw Error("empty string");if(typeof e=="number"?(n=e,e=!1):e=!!e,r==="NaN"||r==="Infinity"||r==="+Infinity"||r==="-Infinity")return e?fr:Nt;if(n=n||10,n<2||360)throw Error("interior hyphen");if(t===0)return Ca(r.substring(1),e,n).neg();for(var o=Et(Co(n,8)),i=Nt,s=0;s{Ot=null;try{Ot=new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([0,97,115,109,1,0,0,0,1,13,2,96,0,1,127,96,4,127,127,127,127,1,127,3,7,6,0,1,1,1,1,1,6,6,1,127,1,65,0,11,7,50,6,3,109,117,108,0,1,5,100,105,118,95,115,0,2,5,100,105,118,95,117,0,3,5,114,101,109,95,115,0,4,5,114,101,109,95,117,0,5,8,103,101,116,95,104,105,103,104,0,0,10,191,1,6,4,0,35,0,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,126,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,127,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,128,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,129,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,130,34,4,66,32,135,167,36,0,32,4,167,11])),{}).exports}catch{}Me.prototype.__isLong__;Object.defineProperty(Me.prototype,"__isLong__",{value:!0});Me.isLong=dt;wc={},Tc={};Me.fromInt=Lr;Me.fromNumber=Et;Me.fromBits=ke;Co=Math.pow;Me.fromString=Ca;Me.fromValue=zt;_c=65536,fv=1<<24,cn=_c*_c,$c=cn*cn,Ic=$c/2,Sc=Lr(fv),Nt=Lr(0);Me.ZERO=Nt;fr=Lr(0,!0);Me.UZERO=fr;ln=Lr(1);Me.ONE=ln;Ac=Lr(1,!0);Me.UONE=Ac;Ea=Lr(-1);Me.NEG_ONE=Ea;Pc=ke(-1,2147483647,!1);Me.MAX_VALUE=Pc;Oc=ke(-1,-1,!0);Me.MAX_UNSIGNED_VALUE=Oc;xt=ke(0,-2147483648,!1);Me.MIN_VALUE=xt;U=Me.prototype;U.toInt=function(){return this.unsigned?this.low>>>0:this.low};U.toNumber=function(){return this.unsigned?(this.high>>>0)*cn+(this.low>>>0):this.high*cn+(this.low>>>0)};U.toString=function(e){if(e=e||10,e<2||36>>0,f=l.toString(e);if(s=u,s.isZero())return f+a;for(;f.length<6;)f="0"+f;a=""+f+a}};U.getHighBits=function(){return this.high};U.getHighBitsUnsigned=function(){return this.high>>>0};U.getLowBits=function(){return this.low};U.getLowBitsUnsigned=function(){return this.low>>>0};U.getNumBitsAbs=function(){if(this.isNegative())return this.eq(xt)?64:this.neg().getNumBitsAbs();for(var e=this.high!=0?this.high:this.low,n=31;n>0&&!(e&1<=0};U.isOdd=function(){return(this.low&1)===1};U.isEven=function(){return(this.low&1)===0};U.equals=function(e){return dt(e)||(e=zt(e)),this.unsigned!==e.unsigned&&this.high>>>31===1&&e.high>>>31===1?!1:this.high===e.high&&this.low===e.low};U.eq=U.equals;U.notEquals=function(e){return!this.eq(e)};U.neq=U.notEquals;U.ne=U.notEquals;U.lessThan=function(e){return this.comp(e)<0};U.lt=U.lessThan;U.lessThanOrEqual=function(e){return this.comp(e)<=0};U.lte=U.lessThanOrEqual;U.le=U.lessThanOrEqual;U.greaterThan=function(e){return this.comp(e)>0};U.gt=U.greaterThan;U.greaterThanOrEqual=function(e){return this.comp(e)>=0};U.gte=U.greaterThanOrEqual;U.ge=U.greaterThanOrEqual;U.compare=function(e){if(dt(e)||(e=zt(e)),this.eq(e))return 0;var n=this.isNegative(),t=e.isNegative();return n&&!t?-1:!n&&t?1:this.unsigned?e.high>>>0>this.high>>>0||e.high===this.high&&e.low>>>0>this.low>>>0?-1:1:this.sub(e).isNegative()?-1:1};U.comp=U.compare;U.negate=function(){return!this.unsigned&&this.eq(xt)?xt:this.not().add(ln)};U.neg=U.negate;U.add=function(e){dt(e)||(e=zt(e));var n=this.high>>>16,t=this.high&65535,o=this.low>>>16,i=this.low&65535,s=e.high>>>16,a=e.high&65535,u=e.low>>>16,l=e.low&65535,f=0,c=0,p=0,b=0;return b+=i+l,p+=b>>>16,b&=65535,p+=o+u,c+=p>>>16,p&=65535,c+=t+a,f+=c>>>16,c&=65535,f+=n+s,f&=65535,ke(p<<16|b,f<<16|c,this.unsigned)};U.subtract=function(e){return dt(e)||(e=zt(e)),this.add(e.neg())};U.sub=U.subtract;U.multiply=function(e){if(this.isZero())return this;if(dt(e)||(e=zt(e)),Ot){var n=Ot.mul(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}if(e.isZero())return this.unsigned?fr:Nt;if(this.eq(xt))return e.isOdd()?xt:Nt;if(e.eq(xt))return this.isOdd()?xt:Nt;if(this.isNegative())return e.isNegative()?this.neg().mul(e.neg()):this.neg().mul(e).neg();if(e.isNegative())return this.mul(e.neg()).neg();if(this.lt(Sc)&&e.lt(Sc))return Et(this.toNumber()*e.toNumber(),this.unsigned);var t=this.high>>>16,o=this.high&65535,i=this.low>>>16,s=this.low&65535,a=e.high>>>16,u=e.high&65535,l=e.low>>>16,f=e.low&65535,c=0,p=0,b=0,h=0;return h+=s*f,b+=h>>>16,h&=65535,b+=i*f,p+=b>>>16,b&=65535,b+=s*l,p+=b>>>16,b&=65535,p+=o*f,c+=p>>>16,p&=65535,p+=i*l,c+=p>>>16,p&=65535,p+=s*u,c+=p>>>16,p&=65535,c+=t*f+o*l+i*u+s*a,c&=65535,ke(b<<16|h,c<<16|p,this.unsigned)};U.mul=U.multiply;U.divide=function(e){if(dt(e)||(e=zt(e)),e.isZero())throw Error("division by zero");if(Ot){if(!this.unsigned&&this.high===-2147483648&&e.low===-1&&e.high===-1)return this;var n=(this.unsigned?Ot.div_u:Ot.div_s)(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}if(this.isZero())return this.unsigned?fr:Nt;var t,o,i;if(this.unsigned){if(e.unsigned||(e=e.toUnsigned()),e.gt(this))return fr;if(e.gt(this.shru(1)))return Ac;i=fr}else{if(this.eq(xt)){if(e.eq(ln)||e.eq(Ea))return xt;if(e.eq(xt))return ln;var s=this.shr(1);return t=s.div(e).shl(1),t.eq(Nt)?e.isNegative()?ln:Ea:(o=this.sub(e.mul(t)),i=t.add(o.div(e)),i)}else if(e.eq(xt))return this.unsigned?fr:Nt;if(this.isNegative())return e.isNegative()?this.neg().div(e.neg()):this.neg().div(e).neg();if(e.isNegative())return this.div(e.neg()).neg();i=Nt}for(o=this;o.gte(e);){t=Math.max(1,Math.floor(o.toNumber()/e.toNumber()));for(var a=Math.ceil(Math.log(t)/Math.LN2),u=a<=48?1:Co(2,a-48),l=Et(t),f=l.mul(e);f.isNegative()||f.gt(o);)t-=u,l=Et(t,this.unsigned),f=l.mul(e);l.isZero()&&(l=ln),i=i.add(l),o=o.sub(f)}return i};U.div=U.divide;U.modulo=function(e){if(dt(e)||(e=zt(e)),Ot){var n=(this.unsigned?Ot.rem_u:Ot.rem_s)(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}return this.sub(this.div(e).mul(e))};U.mod=U.modulo;U.rem=U.modulo;U.not=function(){return ke(~this.low,~this.high,this.unsigned)};U.countLeadingZeros=function(){return this.high?Math.clz32(this.high):Math.clz32(this.low)+32};U.clz=U.countLeadingZeros;U.countTrailingZeros=function(){return this.low?vc(this.low):vc(this.high)+32};U.ctz=U.countTrailingZeros;U.and=function(e){return dt(e)||(e=zt(e)),ke(this.low&e.low,this.high&e.high,this.unsigned)};U.or=function(e){return dt(e)||(e=zt(e)),ke(this.low|e.low,this.high|e.high,this.unsigned)};U.xor=function(e){return dt(e)||(e=zt(e)),ke(this.low^e.low,this.high^e.high,this.unsigned)};U.shiftLeft=function(e){return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e<32?ke(this.low<>>32-e,this.unsigned):ke(0,this.low<>>e|this.high<<32-e,this.high>>e,this.unsigned):ke(this.high>>e-32,this.high>=0?0:-1,this.unsigned)};U.shr=U.shiftRight;U.shiftRightUnsigned=function(e){return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e<32?ke(this.low>>>e|this.high<<32-e,this.high>>>e,this.unsigned):e===32?ke(this.high,0,this.unsigned):ke(this.high>>>e-32,0,this.unsigned)};U.shru=U.shiftRightUnsigned;U.shr_u=U.shiftRightUnsigned;U.rotateLeft=function(e){var n;return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e===32?ke(this.high,this.low,this.unsigned):e<32?(n=32-e,ke(this.low<>>n,this.high<>>n,this.unsigned)):(e-=32,n=32-e,ke(this.high<>>n,this.low<>>n,this.unsigned))};U.rotl=U.rotateLeft;U.rotateRight=function(e){var n;return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e===32?ke(this.high,this.low,this.unsigned):e<32?(n=32-e,ke(this.high<>>e,this.low<>>e,this.unsigned)):(e-=32,n=32-e,ke(this.low<>>e,this.high<>>e,this.unsigned))};U.rotr=U.rotateRight;U.toSigned=function(){return this.unsigned?ke(this.low,this.high,!1):this};U.toUnsigned=function(){return this.unsigned?this:ke(this.low,this.high,!0)};U.toBytes=function(e){return e?this.toBytesLE():this.toBytesBE()};U.toBytesLE=function(){var e=this.high,n=this.low;return[n&255,n>>>8&255,n>>>16&255,n>>>24,e&255,e>>>8&255,e>>>16&255,e>>>24]};U.toBytesBE=function(){var e=this.high,n=this.low;return[e>>>24,e>>>16&255,e>>>8&255,e&255,n>>>24,n>>>16&255,n>>>8&255,n&255]};Me.fromBytes=function(e,n,t){return t?Me.fromBytesLE(e,n):Me.fromBytesBE(e,n)};Me.fromBytesLE=function(e,n){return new Me(e[0]|e[1]<<8|e[2]<<16|e[3]<<24,e[4]|e[5]<<8|e[6]<<16|e[7]<<24,n)};Me.fromBytesBE=function(e,n){return new Me(e[4]<<24|e[5]<<16|e[6]<<8|e[7],e[0]<<24|e[1]<<16|e[2]<<8|e[3],n)};dr=Me});var k,ko=C(()=>{k={};k.Offset;k.Table;k.SIZEOF_SHORT=2;k.SIZEOF_INT=4;k.FILE_IDENTIFIER_LENGTH=4;k.SIZE_PREFIX_LENGTH=4;k.Encoding={UTF8_BYTES:1,UTF16_STRING:2};k.int32=new Int32Array(2);k.float32=new Float32Array(k.int32.buffer);k.float64=new Float64Array(k.int32.buffer);k.isLittleEndian=new Uint16Array(new Uint8Array([1,0]).buffer)[0]===1;k.Long=function(r,e){this.low=r|0,this.high=e|0};k.Long.create=function(r,e){return r==0&&e==0?k.Long.ZERO:new k.Long(r,e)};k.Long.prototype.toFloat64=function(){return(this.low>>>0)+this.high*4294967296};k.Long.prototype.equals=function(r){return this.low==r.low&&this.high==r.high};k.Long.ZERO=new k.Long(0,0);k.Builder=function(r){if(r)var e=r;else var e=1024;this.bb=k.ByteBuffer.allocate(e),this.space=e,this.minalign=1,this.vtable=null,this.vtable_in_use=0,this.isNested=!1,this.object_start=0,this.vtables=[],this.vector_num_elems=0,this.force_defaults=!1};k.Builder.prototype.clear=function(){this.bb.clear(),this.space=this.bb.capacity(),this.minalign=1,this.vtable=null,this.vtable_in_use=0,this.isNested=!1,this.object_start=0,this.vtables=[],this.vector_num_elems=0,this.force_defaults=!1};k.Builder.prototype.forceDefaults=function(r){this.force_defaults=r};k.Builder.prototype.dataBuffer=function(){return this.bb};k.Builder.prototype.asUint8Array=function(){return this.bb.bytes().subarray(this.bb.position(),this.bb.position()+this.offset())};k.Builder.prototype.prep=function(r,e){r>this.minalign&&(this.minalign=r);for(var n=~(this.bb.capacity()-this.space+e)+1&r-1;this.space=0&&this.vtable[e]==0;e--);for(var n=e+1;e>=0;e--)this.addInt16(this.vtable[e]!=0?r-this.vtable[e]:0);var t=2;this.addInt16(r-this.object_start);var o=(n+t)*k.SIZEOF_SHORT;this.addInt16(o);var i=0,s=this.space;e:for(e=0;e=0;i--)this.writeInt8(o.charCodeAt(i))}this.prep(this.minalign,k.SIZEOF_INT+t),this.addOffset(r),t&&this.addInt32(this.bb.capacity()-this.space),this.bb.setPosition(this.space)};k.Builder.prototype.finishSizePrefixed=function(r,e){this.finish(r,e,!0)};k.Builder.prototype.requiredField=function(r,e){var n=this.bb.capacity()-r,t=n-this.bb.readInt32(n),o=this.bb.readInt16(t+e)!=0;if(!o)throw new Error("FlatBuffers: field "+e+" must be set")};k.Builder.prototype.startVector=function(r,e,n){this.notNested(),this.vector_num_elems=e,this.prep(k.SIZEOF_INT,r*e),this.prep(n,r*e)};k.Builder.prototype.endVector=function(){return this.writeInt32(this.vector_num_elems),this.offset()};k.Builder.prototype.createString=function(r){if(r instanceof Uint8Array)var e=r;else for(var e=[],n=0;n=56320)t=o;else{var i=r.charCodeAt(n++);t=(o<<10)+i+(65536-56623104-56320)}t<128?e.push(t):(t<2048?e.push(t>>6&31|192):(t<65536?e.push(t>>12&15|224):e.push(t>>18&7|240,t>>12&63|128),e.push(t>>6&63|128)),e.push(t&63|128))}this.addInt8(0),this.startVector(1,e.length,1),this.bb.setPosition(this.space-=e.length);for(var n=0,s=this.space,a=this.bb.bytes();n>24};k.ByteBuffer.prototype.readUint8=function(r){return this.bytes_[r]};k.ByteBuffer.prototype.readInt16=function(r){return this.readUint16(r)<<16>>16};k.ByteBuffer.prototype.readUint16=function(r){return this.bytes_[r]|this.bytes_[r+1]<<8};k.ByteBuffer.prototype.readInt32=function(r){return this.bytes_[r]|this.bytes_[r+1]<<8|this.bytes_[r+2]<<16|this.bytes_[r+3]<<24};k.ByteBuffer.prototype.readUint32=function(r){return this.readInt32(r)>>>0};k.ByteBuffer.prototype.readInt64=function(r){return new k.Long(this.readInt32(r),this.readInt32(r+4))};k.ByteBuffer.prototype.readUint64=function(r){return new k.Long(this.readUint32(r),this.readUint32(r+4))};k.ByteBuffer.prototype.readFloat32=function(r){return k.int32[0]=this.readInt32(r),k.float32[0]};k.ByteBuffer.prototype.readFloat64=function(r){return k.int32[k.isLittleEndian?0:1]=this.readInt32(r),k.int32[k.isLittleEndian?1:0]=this.readInt32(r+4),k.float64[0]};k.ByteBuffer.prototype.writeInt8=function(r,e){this.bytes_[r]=e};k.ByteBuffer.prototype.writeUint8=function(r,e){this.bytes_[r]=e};k.ByteBuffer.prototype.writeInt16=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8};k.ByteBuffer.prototype.writeUint16=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8};k.ByteBuffer.prototype.writeInt32=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8,this.bytes_[r+2]=e>>16,this.bytes_[r+3]=e>>24};k.ByteBuffer.prototype.writeUint32=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8,this.bytes_[r+2]=e>>16,this.bytes_[r+3]=e>>24};k.ByteBuffer.prototype.writeInt64=function(r,e){this.writeInt32(r,e.low),this.writeInt32(r+4,e.high)};k.ByteBuffer.prototype.writeUint64=function(r,e){this.writeUint32(r,e.low),this.writeUint32(r+4,e.high)};k.ByteBuffer.prototype.writeFloat32=function(r,e){k.float32[0]=e,this.writeInt32(r,k.int32[0])};k.ByteBuffer.prototype.writeFloat64=function(r,e){k.float64[0]=e,this.writeInt32(r,k.int32[k.isLittleEndian?0:1]),this.writeInt32(r+4,k.int32[k.isLittleEndian?1:0])};k.ByteBuffer.prototype.getBufferIdentifier=function(){if(this.bytes_.length>10)+55296,(i&1024-1)+56320))}return t};k.ByteBuffer.prototype.__indirect=function(r){return r+this.readInt32(r)};k.ByteBuffer.prototype.__vector=function(r){return r+this.readInt32(r)+k.SIZEOF_INT};k.ByteBuffer.prototype.__vector_len=function(r){return this.readInt32(r+this.readInt32(r))};k.ByteBuffer.prototype.__has_identifier=function(r){if(r.length!=k.FILE_IDENTIFIER_LENGTH)throw new Error("FlatBuffers: file identifier must be length "+k.FILE_IDENTIFIER_LENGTH);for(var e=0;e{"use strict";ko();(e=>{let r;(t=>{let n;(i=>{let o;(S=>(S[S.UNDEFINED=0]="UNDEFINED",S[S.FLOAT=1]="FLOAT",S[S.INT=2]="INT",S[S.STRING=3]="STRING",S[S.TENSOR=4]="TENSOR",S[S.GRAPH=5]="GRAPH",S[S.FLOATS=6]="FLOATS",S[S.INTS=7]="INTS",S[S.STRINGS=8]="STRINGS",S[S.TENSORS=9]="TENSORS",S[S.GRAPHS=10]="GRAPHS",S[S.SPARSE_TENSOR=11]="SPARSE_TENSOR",S[S.SPARSE_TENSORS=12]="SPARSE_TENSORS"))(o=i.AttributeType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(l=>(l[l.UNKNOWN=0]="UNKNOWN",l[l.VALUE=1]="VALUE",l[l.PARAM=2]="PARAM"))(o=i.DimensionValueType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(F=>(F[F.UNDEFINED=0]="UNDEFINED",F[F.FLOAT=1]="FLOAT",F[F.UINT8=2]="UINT8",F[F.INT8=3]="INT8",F[F.UINT16=4]="UINT16",F[F.INT16=5]="INT16",F[F.INT32=6]="INT32",F[F.INT64=7]="INT64",F[F.STRING=8]="STRING",F[F.BOOL=9]="BOOL",F[F.FLOAT16=10]="FLOAT16",F[F.DOUBLE=11]="DOUBLE",F[F.UINT32=12]="UINT32",F[F.UINT64=13]="UINT64",F[F.COMPLEX64=14]="COMPLEX64",F[F.COMPLEX128=15]="COMPLEX128",F[F.BFLOAT16=16]="BFLOAT16",F[F.FLOAT8E4M3FN=17]="FLOAT8E4M3FN",F[F.FLOAT8E4M3FNUZ=18]="FLOAT8E4M3FNUZ",F[F.FLOAT8E5M2=19]="FLOAT8E5M2",F[F.FLOAT8E5M2FNUZ=20]="FLOAT8E5M2FNUZ"))(o=i.TensorDataType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(u=>(u[u.Primitive=0]="Primitive",u[u.Fused=1]="Fused"))(o=i.NodeType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(f=>(f[f.NONE=0]="NONE",f[f.tensor_type=1]="tensor_type",f[f.sequence_type=2]="sequence_type",f[f.map_type=3]="map_type"))(o=i.TypeInfoValue||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsShape(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsShape(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}dim(a,u){let l=this.bb.__offset(this.bb_pos,4);return l?(u||new e.experimental.fbs.Dimension).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}dimLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}static startShape(a){a.startObject(1)}static addDim(a,u){a.addFieldOffset(0,u,0)}static createDimVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startDimVector(a,u){a.startVector(4,u,4)}static endShape(a){return a.endObject()}static createShape(a,u){return o.startShape(a),o.addDim(a,u),o.endShape(a)}}i.Shape=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsDimension(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsDimension(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}value(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.DimensionValue).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}denotation(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}static startDimension(a){a.startObject(2)}static addValue(a,u){a.addFieldOffset(0,u,0)}static addDenotation(a,u){a.addFieldOffset(1,u,0)}static endDimension(a){return a.endObject()}static createDimension(a,u,l){return o.startDimension(a),o.addValue(a,u),o.addDenotation(a,l),o.endDimension(a)}}i.Dimension=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsDimensionValue(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsDimensionValue(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}dimType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt8(this.bb_pos+a):0}dimValue(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}dimParam(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}static startDimensionValue(a){a.startObject(3)}static addDimType(a,u){a.addFieldInt8(0,u,0)}static addDimValue(a,u){a.addFieldInt64(1,u,a.createLong(0,0))}static addDimParam(a,u){a.addFieldOffset(2,u,0)}static endDimensionValue(a){return a.endObject()}static createDimensionValue(a,u,l,f){return o.startDimensionValue(a),o.addDimType(a,u),o.addDimValue(a,l),o.addDimParam(a,f),o.endDimensionValue(a)}}i.DimensionValue=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTensorTypeAndShape(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTensorTypeAndShape(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}elemType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt32(this.bb_pos+a):0}shape(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Shape).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startTensorTypeAndShape(a){a.startObject(2)}static addElemType(a,u){a.addFieldInt32(0,u,0)}static addShape(a,u){a.addFieldOffset(1,u,0)}static endTensorTypeAndShape(a){return a.endObject()}static createTensorTypeAndShape(a,u,l){return o.startTensorTypeAndShape(a),o.addElemType(a,u),o.addShape(a,l),o.endTensorTypeAndShape(a)}}i.TensorTypeAndShape=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsMapType(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsMapType(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}keyType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt32(this.bb_pos+a):0}valueType(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startMapType(a){a.startObject(2)}static addKeyType(a,u){a.addFieldInt32(0,u,0)}static addValueType(a,u){a.addFieldOffset(1,u,0)}static endMapType(a){return a.endObject()}static createMapType(a,u,l){return o.startMapType(a),o.addKeyType(a,u),o.addValueType(a,l),o.endMapType(a)}}i.MapType=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSequenceType(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSequenceType(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}elemType(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startSequenceType(a){a.startObject(1)}static addElemType(a,u){a.addFieldOffset(0,u,0)}static endSequenceType(a){return a.endObject()}static createSequenceType(a,u){return o.startSequenceType(a),o.addElemType(a,u),o.endSequenceType(a)}}i.SequenceType=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}nodeIndex(){return this.bb.readUint32(this.bb_pos)}srcArgIndex(){return this.bb.readInt32(this.bb_pos+4)}dstArgIndex(){return this.bb.readInt32(this.bb_pos+8)}static createEdgeEnd(a,u,l,f){return a.prep(4,12),a.writeInt32(f),a.writeInt32(l),a.writeInt32(u),a.offset()}}i.EdgeEnd=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsNodeEdge(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsNodeEdge(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}nodeIndex(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readUint32(this.bb_pos+a):0}inputEdges(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.EdgeEnd).__init(this.bb.__vector(this.bb_pos+l)+a*12,this.bb):null}inputEdgesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}outputEdges(a,u){let l=this.bb.__offset(this.bb_pos,8);return l?(u||new e.experimental.fbs.EdgeEnd).__init(this.bb.__vector(this.bb_pos+l)+a*12,this.bb):null}outputEdgesLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}static startNodeEdge(a){a.startObject(3)}static addNodeIndex(a,u){a.addFieldInt32(0,u,0)}static addInputEdges(a,u){a.addFieldOffset(1,u,0)}static startInputEdgesVector(a,u){a.startVector(12,u,4)}static addOutputEdges(a,u){a.addFieldOffset(2,u,0)}static startOutputEdgesVector(a,u){a.startVector(12,u,4)}static endNodeEdge(a){return a.endObject()}static createNodeEdge(a,u,l,f){return o.startNodeEdge(a),o.addNodeIndex(a,u),o.addInputEdges(a,l),o.addOutputEdges(a,f),o.endNodeEdge(a)}}i.NodeEdge=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsNode(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsNode(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}domain(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}sinceVersion(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readInt32(this.bb_pos+a):0}index(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.readUint32(this.bb_pos+a):0}opType(a){let u=this.bb.__offset(this.bb_pos,14);return u?this.bb.__string(this.bb_pos+u,a):null}type(){let a=this.bb.__offset(this.bb_pos,16);return a?this.bb.readInt32(this.bb_pos+a):0}executionProviderType(a){let u=this.bb.__offset(this.bb_pos,18);return u?this.bb.__string(this.bb_pos+u,a):null}inputs(a,u){let l=this.bb.__offset(this.bb_pos,20);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}inputsLength(){let a=this.bb.__offset(this.bb_pos,20);return a?this.bb.__vector_len(this.bb_pos+a):0}outputs(a,u){let l=this.bb.__offset(this.bb_pos,22);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}outputsLength(){let a=this.bb.__offset(this.bb_pos,22);return a?this.bb.__vector_len(this.bb_pos+a):0}attributes(a,u){let l=this.bb.__offset(this.bb_pos,24);return l?(u||new e.experimental.fbs.Attribute).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}attributesLength(){let a=this.bb.__offset(this.bb_pos,24);return a?this.bb.__vector_len(this.bb_pos+a):0}inputArgCounts(a){let u=this.bb.__offset(this.bb_pos,26);return u?this.bb.readInt32(this.bb.__vector(this.bb_pos+u)+a*4):0}inputArgCountsLength(){let a=this.bb.__offset(this.bb_pos,26);return a?this.bb.__vector_len(this.bb_pos+a):0}inputArgCountsArray(){let a=this.bb.__offset(this.bb_pos,26);return a?new Int32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}implicitInputs(a,u){let l=this.bb.__offset(this.bb_pos,28);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}implicitInputsLength(){let a=this.bb.__offset(this.bb_pos,28);return a?this.bb.__vector_len(this.bb_pos+a):0}static startNode(a){a.startObject(13)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addDomain(a,u){a.addFieldOffset(2,u,0)}static addSinceVersion(a,u){a.addFieldInt32(3,u,0)}static addIndex(a,u){a.addFieldInt32(4,u,0)}static addOpType(a,u){a.addFieldOffset(5,u,0)}static addType(a,u){a.addFieldInt32(6,u,0)}static addExecutionProviderType(a,u){a.addFieldOffset(7,u,0)}static addInputs(a,u){a.addFieldOffset(8,u,0)}static createInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInputsVector(a,u){a.startVector(4,u,4)}static addOutputs(a,u){a.addFieldOffset(9,u,0)}static createOutputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOutputsVector(a,u){a.startVector(4,u,4)}static addAttributes(a,u){a.addFieldOffset(10,u,0)}static createAttributesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startAttributesVector(a,u){a.startVector(4,u,4)}static addInputArgCounts(a,u){a.addFieldOffset(11,u,0)}static createInputArgCountsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addInt32(u[l]);return a.endVector()}static startInputArgCountsVector(a,u){a.startVector(4,u,4)}static addImplicitInputs(a,u){a.addFieldOffset(12,u,0)}static createImplicitInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startImplicitInputsVector(a,u){a.startVector(4,u,4)}static endNode(a){return a.endObject()}static createNode(a,u,l,f,c,p,b,h,g,T,w,v,S,$){return o.startNode(a),o.addName(a,u),o.addDocString(a,l),o.addDomain(a,f),o.addSinceVersion(a,c),o.addIndex(a,p),o.addOpType(a,b),o.addType(a,h),o.addExecutionProviderType(a,g),o.addInputs(a,T),o.addOutputs(a,w),o.addAttributes(a,v),o.addInputArgCounts(a,S),o.addImplicitInputs(a,$),o.endNode(a)}}i.Node=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsValueInfo(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsValueInfo(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}type(a){let u=this.bb.__offset(this.bb_pos,8);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startValueInfo(a){a.startObject(3)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addType(a,u){a.addFieldOffset(2,u,0)}static endValueInfo(a){return a.endObject()}static createValueInfo(a,u,l,f){return o.startValueInfo(a),o.addName(a,u),o.addDocString(a,l),o.addType(a,f),o.endValueInfo(a)}}i.ValueInfo=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTypeInfo(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTypeInfo(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}denotation(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}valueType(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readUint8(this.bb_pos+a):0}value(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__union(a,this.bb_pos+u):null}static startTypeInfo(a){a.startObject(3)}static addDenotation(a,u){a.addFieldOffset(0,u,0)}static addValueType(a,u){a.addFieldInt8(1,u,0)}static addValue(a,u){a.addFieldOffset(2,u,0)}static endTypeInfo(a){return a.endObject()}static createTypeInfo(a,u,l,f){return o.startTypeInfo(a),o.addDenotation(a,u),o.addValueType(a,l),o.addValue(a,f),o.endTypeInfo(a)}}i.TypeInfo=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsOperatorSetId(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsOperatorSetId(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}domain(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}version(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}static startOperatorSetId(a){a.startObject(2)}static addDomain(a,u){a.addFieldOffset(0,u,0)}static addVersion(a,u){a.addFieldInt64(1,u,a.createLong(0,0))}static endOperatorSetId(a){return a.endObject()}static createOperatorSetId(a,u,l){return o.startOperatorSetId(a),o.addDomain(a,u),o.addVersion(a,l),o.endOperatorSetId(a)}}i.OperatorSetId=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTensor(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTensor(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}dims(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}dimsLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}dataType(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readInt32(this.bb_pos+a):0}rawData(a){let u=this.bb.__offset(this.bb_pos,12);return u?this.bb.readUint8(this.bb.__vector(this.bb_pos+u)+a):0}rawDataLength(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.__vector_len(this.bb_pos+a):0}rawDataArray(){let a=this.bb.__offset(this.bb_pos,12);return a?new Uint8Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}stringData(a,u){let l=this.bb.__offset(this.bb_pos,14);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}stringDataLength(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.__vector_len(this.bb_pos+a):0}static startTensor(a){a.startObject(6)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addDims(a,u){a.addFieldOffset(2,u,0)}static createDimsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startDimsVector(a,u){a.startVector(8,u,8)}static addDataType(a,u){a.addFieldInt32(3,u,0)}static addRawData(a,u){a.addFieldOffset(4,u,0)}static createRawDataVector(a,u){a.startVector(1,u.length,1);for(let l=u.length-1;l>=0;l--)a.addInt8(u[l]);return a.endVector()}static startRawDataVector(a,u){a.startVector(1,u,1)}static addStringData(a,u){a.addFieldOffset(5,u,0)}static createStringDataVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startStringDataVector(a,u){a.startVector(4,u,4)}static endTensor(a){return a.endObject()}static createTensor(a,u,l,f,c,p,b){return o.startTensor(a),o.addName(a,u),o.addDocString(a,l),o.addDims(a,f),o.addDataType(a,c),o.addRawData(a,p),o.addStringData(a,b),o.endTensor(a)}}i.Tensor=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSparseTensor(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSparseTensor(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}values(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}indices(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}dims(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}dimsLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}static startSparseTensor(a){a.startObject(3)}static addValues(a,u){a.addFieldOffset(0,u,0)}static addIndices(a,u){a.addFieldOffset(1,u,0)}static addDims(a,u){a.addFieldOffset(2,u,0)}static createDimsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startDimsVector(a,u){a.startVector(8,u,8)}static endSparseTensor(a){return a.endObject()}static createSparseTensor(a,u,l,f){return o.startSparseTensor(a),o.addValues(a,u),o.addIndices(a,l),o.addDims(a,f),o.endSparseTensor(a)}}i.SparseTensor=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsAttribute(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsAttribute(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}type(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.readInt32(this.bb_pos+a):0}f(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readFloat32(this.bb_pos+a):0}i(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}s(a){let u=this.bb.__offset(this.bb_pos,14);return u?this.bb.__string(this.bb_pos+u,a):null}t(a){let u=this.bb.__offset(this.bb_pos,16);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}g(a){let u=this.bb.__offset(this.bb_pos,18);return u?(a||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}floats(a){let u=this.bb.__offset(this.bb_pos,20);return u?this.bb.readFloat32(this.bb.__vector(this.bb_pos+u)+a*4):0}floatsLength(){let a=this.bb.__offset(this.bb_pos,20);return a?this.bb.__vector_len(this.bb_pos+a):0}floatsArray(){let a=this.bb.__offset(this.bb_pos,20);return a?new Float32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}ints(a){let u=this.bb.__offset(this.bb_pos,22);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}intsLength(){let a=this.bb.__offset(this.bb_pos,22);return a?this.bb.__vector_len(this.bb_pos+a):0}strings(a,u){let l=this.bb.__offset(this.bb_pos,24);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}stringsLength(){let a=this.bb.__offset(this.bb_pos,24);return a?this.bb.__vector_len(this.bb_pos+a):0}tensors(a,u){let l=this.bb.__offset(this.bb_pos,26);return l?(u||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}tensorsLength(){let a=this.bb.__offset(this.bb_pos,26);return a?this.bb.__vector_len(this.bb_pos+a):0}graphs(a,u){let l=this.bb.__offset(this.bb_pos,28);return l?(u||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}graphsLength(){let a=this.bb.__offset(this.bb_pos,28);return a?this.bb.__vector_len(this.bb_pos+a):0}static startAttribute(a){a.startObject(13)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addType(a,u){a.addFieldInt32(2,u,0)}static addF(a,u){a.addFieldFloat32(3,u,0)}static addI(a,u){a.addFieldInt64(4,u,a.createLong(0,0))}static addS(a,u){a.addFieldOffset(5,u,0)}static addT(a,u){a.addFieldOffset(6,u,0)}static addG(a,u){a.addFieldOffset(7,u,0)}static addFloats(a,u){a.addFieldOffset(8,u,0)}static createFloatsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addFloat32(u[l]);return a.endVector()}static startFloatsVector(a,u){a.startVector(4,u,4)}static addInts(a,u){a.addFieldOffset(9,u,0)}static createIntsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startIntsVector(a,u){a.startVector(8,u,8)}static addStrings(a,u){a.addFieldOffset(10,u,0)}static createStringsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startStringsVector(a,u){a.startVector(4,u,4)}static addTensors(a,u){a.addFieldOffset(11,u,0)}static createTensorsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startTensorsVector(a,u){a.startVector(4,u,4)}static addGraphs(a,u){a.addFieldOffset(12,u,0)}static createGraphsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startGraphsVector(a,u){a.startVector(4,u,4)}static endAttribute(a){return a.endObject()}static createAttribute(a,u,l,f,c,p,b,h,g,T,w,v,S,$){return o.startAttribute(a),o.addName(a,u),o.addDocString(a,l),o.addType(a,f),o.addF(a,c),o.addI(a,p),o.addS(a,b),o.addT(a,h),o.addG(a,g),o.addFloats(a,T),o.addInts(a,w),o.addStrings(a,v),o.addTensors(a,S),o.addGraphs(a,$),o.endAttribute(a)}}i.Attribute=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsGraph(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsGraph(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}initializers(a,u){let l=this.bb.__offset(this.bb_pos,4);return l?(u||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}initializersLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}nodeArgs(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.ValueInfo).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodeArgsLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}nodes(a,u){let l=this.bb.__offset(this.bb_pos,8);return l?(u||new e.experimental.fbs.Node).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodesLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}maxNodeIndex(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readUint32(this.bb_pos+a):0}nodeEdges(a,u){let l=this.bb.__offset(this.bb_pos,12);return l?(u||new e.experimental.fbs.NodeEdge).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodeEdgesLength(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.__vector_len(this.bb_pos+a):0}inputs(a,u){let l=this.bb.__offset(this.bb_pos,14);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}inputsLength(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.__vector_len(this.bb_pos+a):0}outputs(a,u){let l=this.bb.__offset(this.bb_pos,16);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}outputsLength(){let a=this.bb.__offset(this.bb_pos,16);return a?this.bb.__vector_len(this.bb_pos+a):0}sparseInitializers(a,u){let l=this.bb.__offset(this.bb_pos,18);return l?(u||new e.experimental.fbs.SparseTensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}sparseInitializersLength(){let a=this.bb.__offset(this.bb_pos,18);return a?this.bb.__vector_len(this.bb_pos+a):0}static startGraph(a){a.startObject(8)}static addInitializers(a,u){a.addFieldOffset(0,u,0)}static createInitializersVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInitializersVector(a,u){a.startVector(4,u,4)}static addNodeArgs(a,u){a.addFieldOffset(1,u,0)}static createNodeArgsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodeArgsVector(a,u){a.startVector(4,u,4)}static addNodes(a,u){a.addFieldOffset(2,u,0)}static createNodesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodesVector(a,u){a.startVector(4,u,4)}static addMaxNodeIndex(a,u){a.addFieldInt32(3,u,0)}static addNodeEdges(a,u){a.addFieldOffset(4,u,0)}static createNodeEdgesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodeEdgesVector(a,u){a.startVector(4,u,4)}static addInputs(a,u){a.addFieldOffset(5,u,0)}static createInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInputsVector(a,u){a.startVector(4,u,4)}static addOutputs(a,u){a.addFieldOffset(6,u,0)}static createOutputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOutputsVector(a,u){a.startVector(4,u,4)}static addSparseInitializers(a,u){a.addFieldOffset(7,u,0)}static createSparseInitializersVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startSparseInitializersVector(a,u){a.startVector(4,u,4)}static endGraph(a){return a.endObject()}static createGraph(a,u,l,f,c,p,b,h,g){return o.startGraph(a),o.addInitializers(a,u),o.addNodeArgs(a,l),o.addNodes(a,f),o.addMaxNodeIndex(a,c),o.addNodeEdges(a,p),o.addInputs(a,b),o.addOutputs(a,h),o.addSparseInitializers(a,g),o.endGraph(a)}}i.Graph=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsModel(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsModel(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}irVersion(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}opsetImport(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.OperatorSetId).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}opsetImportLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}producerName(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}producerVersion(a){let u=this.bb.__offset(this.bb_pos,10);return u?this.bb.__string(this.bb_pos+u,a):null}domain(a){let u=this.bb.__offset(this.bb_pos,12);return u?this.bb.__string(this.bb_pos+u,a):null}modelVersion(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}docString(a){let u=this.bb.__offset(this.bb_pos,16);return u?this.bb.__string(this.bb_pos+u,a):null}graph(a){let u=this.bb.__offset(this.bb_pos,18);return u?(a||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}graphDocString(a){let u=this.bb.__offset(this.bb_pos,20);return u?this.bb.__string(this.bb_pos+u,a):null}static startModel(a){a.startObject(9)}static addIrVersion(a,u){a.addFieldInt64(0,u,a.createLong(0,0))}static addOpsetImport(a,u){a.addFieldOffset(1,u,0)}static createOpsetImportVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOpsetImportVector(a,u){a.startVector(4,u,4)}static addProducerName(a,u){a.addFieldOffset(2,u,0)}static addProducerVersion(a,u){a.addFieldOffset(3,u,0)}static addDomain(a,u){a.addFieldOffset(4,u,0)}static addModelVersion(a,u){a.addFieldInt64(5,u,a.createLong(0,0))}static addDocString(a,u){a.addFieldOffset(6,u,0)}static addGraph(a,u){a.addFieldOffset(7,u,0)}static addGraphDocString(a,u){a.addFieldOffset(8,u,0)}static endModel(a){return a.endObject()}static createModel(a,u,l,f,c,p,b,h,g,T){return o.startModel(a),o.addIrVersion(a,u),o.addOpsetImport(a,l),o.addProducerName(a,f),o.addProducerVersion(a,c),o.addDomain(a,p),o.addModelVersion(a,b),o.addDocString(a,h),o.addGraph(a,g),o.addGraphDocString(a,T),o.endModel(a)}}i.Model=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsKernelCreateInfos(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsKernelCreateInfos(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}nodeIndices(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.readUint32(this.bb.__vector(this.bb_pos+u)+a*4):0}nodeIndicesLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}nodeIndicesArray(){let a=this.bb.__offset(this.bb_pos,4);return a?new Uint32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}kernelDefHashes(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.readUint64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}kernelDefHashesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}static startKernelCreateInfos(a){a.startObject(2)}static addNodeIndices(a,u){a.addFieldOffset(0,u,0)}static createNodeIndicesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addInt32(u[l]);return a.endVector()}static startNodeIndicesVector(a,u){a.startVector(4,u,4)}static addKernelDefHashes(a,u){a.addFieldOffset(1,u,0)}static createKernelDefHashesVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startKernelDefHashesVector(a,u){a.startVector(8,u,8)}static endKernelCreateInfos(a){return a.endObject()}static createKernelCreateInfos(a,u,l){return o.startKernelCreateInfos(a),o.addNodeIndices(a,u),o.addKernelDefHashes(a,l),o.endKernelCreateInfos(a)}}i.KernelCreateInfos=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSubGraphSessionState(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSubGraphSessionState(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}graphId(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}sessionState(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.SessionState).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startSubGraphSessionState(a){a.startObject(2)}static addGraphId(a,u){a.addFieldOffset(0,u,0)}static addSessionState(a,u){a.addFieldOffset(1,u,0)}static endSubGraphSessionState(a){let u=a.endObject();return a.requiredField(u,4),u}static createSubGraphSessionState(a,u,l){return o.startSubGraphSessionState(a),o.addGraphId(a,u),o.addSessionState(a,l),o.endSubGraphSessionState(a)}}i.SubGraphSessionState=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSessionState(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSessionState(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}kernels(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.KernelCreateInfos).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}subGraphSessionStates(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.SubGraphSessionState).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}subGraphSessionStatesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}static startSessionState(a){a.startObject(2)}static addKernels(a,u){a.addFieldOffset(0,u,0)}static addSubGraphSessionStates(a,u){a.addFieldOffset(1,u,0)}static createSubGraphSessionStatesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startSubGraphSessionStatesVector(a,u){a.startVector(4,u,4)}static endSessionState(a){return a.endObject()}static createSessionState(a,u,l){return o.startSessionState(a),o.addKernels(a,u),o.addSubGraphSessionStates(a,l),o.endSessionState(a)}}i.SessionState=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsInferenceSession(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsInferenceSession(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static bufferHasIdentifier(a){return a.__has_identifier("ORTM")}ortVersion(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}model(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Model).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}sessionState(a){let u=this.bb.__offset(this.bb_pos,8);return u?(a||new e.experimental.fbs.SessionState).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startInferenceSession(a){a.startObject(3)}static addOrtVersion(a,u){a.addFieldOffset(0,u,0)}static addModel(a,u){a.addFieldOffset(1,u,0)}static addSessionState(a,u){a.addFieldOffset(2,u,0)}static endInferenceSession(a){return a.endObject()}static finishInferenceSessionBuffer(a,u){a.finish(u,"ORTM")}static finishSizePrefixedInferenceSessionBuffer(a,u){a.finish(u,"ORTM",!0)}static createInferenceSession(a,u,l,f){return o.startInferenceSession(a),o.addOrtVersion(a,u),o.addModel(a,l),o.addSessionState(a,f),o.endInferenceSession(a)}}i.InferenceSession=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={})});var Cc=Je((US,Ec)=>{"use strict";Ec.exports=dv;function dv(r,e){for(var n=new Array(arguments.length-1),t=0,o=2,i=!0;o{"use strict";var Do=Bc;Do.length=function(e){var n=e.length;if(!n)return 0;for(var t=0;--n%4>1&&e.charAt(n)==="=";)++t;return Math.ceil(e.length*3)/4-t};var fn=new Array(64),Dc=new Array(123);for(Ft=0;Ft<64;)Dc[fn[Ft]=Ft<26?Ft+65:Ft<52?Ft+71:Ft<62?Ft-4:Ft-59|43]=Ft++;var Ft;Do.encode=function(e,n,t){for(var o=null,i=[],s=0,a=0,u;n>2],u=(l&3)<<4,a=1;break;case 1:i[s++]=fn[u|l>>4],u=(l&15)<<2,a=2;break;case 2:i[s++]=fn[u|l>>6],i[s++]=fn[l&63],a=0;break}s>8191&&((o||(o=[])).push(String.fromCharCode.apply(String,i)),s=0)}return a&&(i[s++]=fn[u],i[s++]=61,a===1&&(i[s++]=61)),o?(s&&o.push(String.fromCharCode.apply(String,i.slice(0,s))),o.join("")):String.fromCharCode.apply(String,i.slice(0,s))};var kc="invalid encoding";Do.decode=function(e,n,t){for(var o=t,i=0,s,a=0;a1)break;if((u=Dc[u])===void 0)throw Error(kc);switch(i){case 0:s=u,i=1;break;case 1:n[t++]=s<<2|(u&48)>>4,s=u,i=2;break;case 2:n[t++]=(s&15)<<4|(u&60)>>2,s=u,i=3;break;case 3:n[t++]=(s&3)<<6|u,i=0;break}}if(i===1)throw Error(kc);return t-o};Do.test=function(e){return/^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(e)}});var Nc=Je((HS,Rc)=>{"use strict";Rc.exports=Bo;function Bo(){this._listeners={}}Bo.prototype.on=function(e,n,t){return(this._listeners[e]||(this._listeners[e]=[])).push({fn:n,ctx:t||this}),this};Bo.prototype.off=function(e,n){if(e===void 0)this._listeners={};else if(n===void 0)this._listeners[e]=[];else for(var t=this._listeners[e],o=0;o{"use strict";Uc.exports=zc(zc);function zc(r){return typeof Float32Array<"u"?function(){var e=new Float32Array([-0]),n=new Uint8Array(e.buffer),t=n[3]===128;function o(u,l,f){e[0]=u,l[f]=n[0],l[f+1]=n[1],l[f+2]=n[2],l[f+3]=n[3]}function i(u,l,f){e[0]=u,l[f]=n[3],l[f+1]=n[2],l[f+2]=n[1],l[f+3]=n[0]}r.writeFloatLE=t?o:i,r.writeFloatBE=t?i:o;function s(u,l){return n[0]=u[l],n[1]=u[l+1],n[2]=u[l+2],n[3]=u[l+3],e[0]}function a(u,l){return n[3]=u[l],n[2]=u[l+1],n[1]=u[l+2],n[0]=u[l+3],e[0]}r.readFloatLE=t?s:a,r.readFloatBE=t?a:s}():function(){function e(t,o,i,s){var a=o<0?1:0;if(a&&(o=-o),o===0)t(1/o>0?0:2147483648,i,s);else if(isNaN(o))t(2143289344,i,s);else if(o>34028234663852886e22)t((a<<31|2139095040)>>>0,i,s);else if(o<11754943508222875e-54)t((a<<31|Math.round(o/1401298464324817e-60))>>>0,i,s);else{var u=Math.floor(Math.log(o)/Math.LN2),l=Math.round(o*Math.pow(2,-u)*8388608)&8388607;t((a<<31|u+127<<23|l)>>>0,i,s)}}r.writeFloatLE=e.bind(null,Fc),r.writeFloatBE=e.bind(null,Mc);function n(t,o,i){var s=t(o,i),a=(s>>31)*2+1,u=s>>>23&255,l=s&8388607;return u===255?l?NaN:a*(1/0):u===0?a*1401298464324817e-60*l:a*Math.pow(2,u-150)*(l+8388608)}r.readFloatLE=n.bind(null,Vc),r.readFloatBE=n.bind(null,Gc)}(),typeof Float64Array<"u"?function(){var e=new Float64Array([-0]),n=new Uint8Array(e.buffer),t=n[7]===128;function o(u,l,f){e[0]=u,l[f]=n[0],l[f+1]=n[1],l[f+2]=n[2],l[f+3]=n[3],l[f+4]=n[4],l[f+5]=n[5],l[f+6]=n[6],l[f+7]=n[7]}function i(u,l,f){e[0]=u,l[f]=n[7],l[f+1]=n[6],l[f+2]=n[5],l[f+3]=n[4],l[f+4]=n[3],l[f+5]=n[2],l[f+6]=n[1],l[f+7]=n[0]}r.writeDoubleLE=t?o:i,r.writeDoubleBE=t?i:o;function s(u,l){return n[0]=u[l],n[1]=u[l+1],n[2]=u[l+2],n[3]=u[l+3],n[4]=u[l+4],n[5]=u[l+5],n[6]=u[l+6],n[7]=u[l+7],e[0]}function a(u,l){return n[7]=u[l],n[6]=u[l+1],n[5]=u[l+2],n[4]=u[l+3],n[3]=u[l+4],n[2]=u[l+5],n[1]=u[l+6],n[0]=u[l+7],e[0]}r.readDoubleLE=t?s:a,r.readDoubleBE=t?a:s}():function(){function e(t,o,i,s,a,u){var l=s<0?1:0;if(l&&(s=-s),s===0)t(0,a,u+o),t(1/s>0?0:2147483648,a,u+i);else if(isNaN(s))t(0,a,u+o),t(2146959360,a,u+i);else if(s>17976931348623157e292)t(0,a,u+o),t((l<<31|2146435072)>>>0,a,u+i);else{var f;if(s<22250738585072014e-324)f=s/5e-324,t(f>>>0,a,u+o),t((l<<31|f/4294967296)>>>0,a,u+i);else{var c=Math.floor(Math.log(s)/Math.LN2);c===1024&&(c=1023),f=s*Math.pow(2,-c),t(f*4503599627370496>>>0,a,u+o),t((l<<31|c+1023<<20|f*1048576&1048575)>>>0,a,u+i)}}}r.writeDoubleLE=e.bind(null,Fc,0,4),r.writeDoubleBE=e.bind(null,Mc,4,0);function n(t,o,i,s,a){var u=t(s,a+o),l=t(s,a+i),f=(l>>31)*2+1,c=l>>>20&2047,p=4294967296*(l&1048575)+u;return c===2047?p?NaN:f*(1/0):c===0?f*5e-324*p:f*Math.pow(2,c-1075)*(p+4503599627370496)}r.readDoubleLE=n.bind(null,Vc,0,4),r.readDoubleBE=n.bind(null,Gc,4,0)}(),r}function Fc(r,e,n){e[n]=r&255,e[n+1]=r>>>8&255,e[n+2]=r>>>16&255,e[n+3]=r>>>24}function Mc(r,e,n){e[n]=r>>>24,e[n+1]=r>>>16&255,e[n+2]=r>>>8&255,e[n+3]=r&255}function Vc(r,e){return(r[e]|r[e+1]<<8|r[e+2]<<16|r[e+3]<<24)>>>0}function Gc(r,e){return(r[e]<<24|r[e+1]<<16|r[e+2]<<8|r[e+3])>>>0}});var Hc=Je((exports,module)=>{"use strict";module.exports=inquire;function inquire(moduleName){try{var mod=eval("quire".replace(/^/,"re"))(moduleName);if(mod&&(mod.length||Object.keys(mod).length))return mod}catch(r){}return null}});var Kc=Je(qc=>{"use strict";var Da=qc;Da.length=function(e){for(var n=0,t=0,o=0;o191&&u<224?s[a++]=(u&31)<<6|e[n++]&63:u>239&&u<365?(u=((u&7)<<18|(e[n++]&63)<<12|(e[n++]&63)<<6|e[n++]&63)-65536,s[a++]=55296+(u>>10),s[a++]=56320+(u&1023)):s[a++]=(u&15)<<12|(e[n++]&63)<<6|e[n++]&63,a>8191&&((i||(i=[])).push(String.fromCharCode.apply(String,s)),a=0);return i?(a&&i.push(String.fromCharCode.apply(String,s.slice(0,a))),i.join("")):String.fromCharCode.apply(String,s.slice(0,a))};Da.write=function(e,n,t){for(var o=t,i,s,a=0;a>6|192,n[t++]=i&63|128):(i&64512)===55296&&((s=e.charCodeAt(a+1))&64512)===56320?(i=65536+((i&1023)<<10)+(s&1023),++a,n[t++]=i>>18|240,n[t++]=i>>12&63|128,n[t++]=i>>6&63|128,n[t++]=i&63|128):(n[t++]=i>>12|224,n[t++]=i>>6&63|128,n[t++]=i&63|128);return t-o}});var Xc=Je((jS,jc)=>{"use strict";jc.exports=pv;function pv(r,e,n){var t=n||8192,o=t>>>1,i=null,s=t;return function(u){if(u<1||u>o)return r(u);s+u>t&&(i=r(t),s=0);var l=e.call(i,s,s+=u);return s&7&&(s=(s|7)+1),l}}});var Yc=Je((XS,Zc)=>{"use strict";Zc.exports=at;var kn=_r();function at(r,e){this.lo=r>>>0,this.hi=e>>>0}var Rr=at.zero=new at(0,0);Rr.toNumber=function(){return 0};Rr.zzEncode=Rr.zzDecode=function(){return this};Rr.length=function(){return 1};var mv=at.zeroHash="\0\0\0\0\0\0\0\0";at.fromNumber=function(e){if(e===0)return Rr;var n=e<0;n&&(e=-e);var t=e>>>0,o=(e-t)/4294967296>>>0;return n&&(o=~o>>>0,t=~t>>>0,++t>4294967295&&(t=0,++o>4294967295&&(o=0))),new at(t,o)};at.from=function(e){if(typeof e=="number")return at.fromNumber(e);if(kn.isString(e))if(kn.Long)e=kn.Long.fromString(e);else return at.fromNumber(parseInt(e,10));return e.low||e.high?new at(e.low>>>0,e.high>>>0):Rr};at.prototype.toNumber=function(e){if(!e&&this.hi>>>31){var n=~this.lo+1>>>0,t=~this.hi>>>0;return n||(t=t+1>>>0),-(n+t*4294967296)}return this.lo+this.hi*4294967296};at.prototype.toLong=function(e){return kn.Long?new kn.Long(this.lo|0,this.hi|0,!!e):{low:this.lo|0,high:this.hi|0,unsigned:!!e}};var Tr=String.prototype.charCodeAt;at.fromHash=function(e){return e===mv?Rr:new at((Tr.call(e,0)|Tr.call(e,1)<<8|Tr.call(e,2)<<16|Tr.call(e,3)<<24)>>>0,(Tr.call(e,4)|Tr.call(e,5)<<8|Tr.call(e,6)<<16|Tr.call(e,7)<<24)>>>0)};at.prototype.toHash=function(){return String.fromCharCode(this.lo&255,this.lo>>>8&255,this.lo>>>16&255,this.lo>>>24,this.hi&255,this.hi>>>8&255,this.hi>>>16&255,this.hi>>>24)};at.prototype.zzEncode=function(){var e=this.hi>>31;return this.hi=((this.hi<<1|this.lo>>>31)^e)>>>0,this.lo=(this.lo<<1^e)>>>0,this};at.prototype.zzDecode=function(){var e=-(this.lo&1);return this.lo=((this.lo>>>1|this.hi<<31)^e)>>>0,this.hi=(this.hi>>>1^e)>>>0,this};at.prototype.length=function(){var e=this.lo,n=(this.lo>>>28|this.hi<<4)>>>0,t=this.hi>>>24;return t===0?n===0?e<16384?e<128?1:2:e<2097152?3:4:n<16384?n<128?5:6:n<2097152?7:8:t<128?9:10}});var _r=Je(Ba=>{"use strict";var re=Ba;re.asPromise=Cc();re.base64=Lc();re.EventEmitter=Nc();re.float=Wc();re.inquire=Hc();re.utf8=Kc();re.pool=Xc();re.LongBits=Yc();re.isNode=!!(typeof global<"u"&&global&&global.process&&global.process.versions&&global.process.versions.node);re.global=re.isNode&&global||typeof window<"u"&&window||typeof self<"u"&&self||Ba;re.emptyArray=Object.freeze?Object.freeze([]):[];re.emptyObject=Object.freeze?Object.freeze({}):{};re.isInteger=Number.isInteger||function(e){return typeof e=="number"&&isFinite(e)&&Math.floor(e)===e};re.isString=function(e){return typeof e=="string"||e instanceof String};re.isObject=function(e){return e&&typeof e=="object"};re.isset=re.isSet=function(e,n){var t=e[n];return t!=null&&e.hasOwnProperty(n)?typeof t!="object"||(Array.isArray(t)?t.length:Object.keys(t).length)>0:!1};re.Buffer=function(){try{var r=re.inquire("buffer").Buffer;return r.prototype.utf8Write?r:null}catch{return null}}();re._Buffer_from=null;re._Buffer_allocUnsafe=null;re.newBuffer=function(e){return typeof e=="number"?re.Buffer?re._Buffer_allocUnsafe(e):new re.Array(e):re.Buffer?re._Buffer_from(e):typeof Uint8Array>"u"?e:new Uint8Array(e)};re.Array=typeof Uint8Array<"u"?Uint8Array:Array;re.Long=re.global.dcodeIO&&re.global.dcodeIO.Long||re.global.Long||re.inquire("long");re.key2Re=/^true|false|0|1$/;re.key32Re=/^-?(?:0|[1-9][0-9]*)$/;re.key64Re=/^(?:[\\x00-\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;re.longToHash=function(e){return e?re.LongBits.from(e).toHash():re.LongBits.zeroHash};re.longFromHash=function(e,n){var t=re.LongBits.fromHash(e);return re.Long?re.Long.fromBits(t.lo,t.hi,n):t.toNumber(!!n)};function Jc(r,e,n){for(var t=Object.keys(e),o=0;o-1;--i)if(n[o[i]]===1&&this[o[i]]!==void 0&&this[o[i]]!==null)return o[i]}};re.oneOfSetter=function(e){return function(n){for(var t=0;t{"use strict";nf.exports=Pe;var Ct=_r(),La,Lo=Ct.LongBits,ef=Ct.base64,tf=Ct.utf8;function Dn(r,e,n){this.fn=r,this.len=e,this.next=void 0,this.val=n}function Na(){}function hv(r){this.head=r.head,this.tail=r.tail,this.len=r.len,this.next=r.states}function Pe(){this.len=0,this.head=new Dn(Na,0,0),this.tail=this.head,this.states=null}var rf=function(){return Ct.Buffer?function(){return(Pe.create=function(){return new La})()}:function(){return new Pe}};Pe.create=rf();Pe.alloc=function(e){return new Ct.Array(e)};Ct.Array!==Array&&(Pe.alloc=Ct.pool(Pe.alloc,Ct.Array.prototype.subarray));Pe.prototype._push=function(e,n,t){return this.tail=this.tail.next=new Dn(e,n,t),this.len+=n,this};function za(r,e,n){e[n]=r&255}function gv(r,e,n){for(;r>127;)e[n++]=r&127|128,r>>>=7;e[n]=r}function Fa(r,e){this.len=r,this.next=void 0,this.val=e}Fa.prototype=Object.create(Dn.prototype);Fa.prototype.fn=gv;Pe.prototype.uint32=function(e){return this.len+=(this.tail=this.tail.next=new Fa((e=e>>>0)<128?1:e<16384?2:e<2097152?3:e<268435456?4:5,e)).len,this};Pe.prototype.int32=function(e){return e<0?this._push(Ma,10,Lo.fromNumber(e)):this.uint32(e)};Pe.prototype.sint32=function(e){return this.uint32((e<<1^e>>31)>>>0)};function Ma(r,e,n){for(;r.hi;)e[n++]=r.lo&127|128,r.lo=(r.lo>>>7|r.hi<<25)>>>0,r.hi>>>=7;for(;r.lo>127;)e[n++]=r.lo&127|128,r.lo=r.lo>>>7;e[n++]=r.lo}Pe.prototype.uint64=function(e){var n=Lo.from(e);return this._push(Ma,n.length(),n)};Pe.prototype.int64=Pe.prototype.uint64;Pe.prototype.sint64=function(e){var n=Lo.from(e).zzEncode();return this._push(Ma,n.length(),n)};Pe.prototype.bool=function(e){return this._push(za,1,e?1:0)};function Ra(r,e,n){e[n]=r&255,e[n+1]=r>>>8&255,e[n+2]=r>>>16&255,e[n+3]=r>>>24}Pe.prototype.fixed32=function(e){return this._push(Ra,4,e>>>0)};Pe.prototype.sfixed32=Pe.prototype.fixed32;Pe.prototype.fixed64=function(e){var n=Lo.from(e);return this._push(Ra,4,n.lo)._push(Ra,4,n.hi)};Pe.prototype.sfixed64=Pe.prototype.fixed64;Pe.prototype.float=function(e){return this._push(Ct.float.writeFloatLE,4,e)};Pe.prototype.double=function(e){return this._push(Ct.float.writeDoubleLE,8,e)};var bv=Ct.Array.prototype.set?function(e,n,t){n.set(e,t)}:function(e,n,t){for(var o=0;o>>0;if(!n)return this._push(za,1,0);if(Ct.isString(e)){var t=Pe.alloc(n=ef.length(e));ef.decode(e,t,0),e=t}return this.uint32(n)._push(bv,n,e)};Pe.prototype.string=function(e){var n=tf.length(e);return n?this.uint32(n)._push(tf.write,n,e):this._push(za,1,0)};Pe.prototype.fork=function(){return this.states=new hv(this),this.head=this.tail=new Dn(Na,0,0),this.len=0,this};Pe.prototype.reset=function(){return this.states?(this.head=this.states.head,this.tail=this.states.tail,this.len=this.states.len,this.states=this.states.next):(this.head=this.tail=new Dn(Na,0,0),this.len=0),this};Pe.prototype.ldelim=function(){var e=this.head,n=this.tail,t=this.len;return this.reset().uint32(t),t&&(this.tail.next=e.next,this.tail=n,this.len+=t),this};Pe.prototype.finish=function(){for(var e=this.head.next,n=this.constructor.alloc(this.len),t=0;e;)e.fn(e.val,n,t),t+=e.len,e=e.next;return n};Pe._configure=function(r){La=r,Pe.create=rf(),La._configure()}});var sf=Je((JS,af)=>{"use strict";af.exports=Jt;var of=Va();(Jt.prototype=Object.create(of.prototype)).constructor=Jt;var Ir=_r();function Jt(){of.call(this)}Jt._configure=function(){Jt.alloc=Ir._Buffer_allocUnsafe,Jt.writeBytesBuffer=Ir.Buffer&&Ir.Buffer.prototype instanceof Uint8Array&&Ir.Buffer.prototype.set.name==="set"?function(e,n,t){n.set(e,t)}:function(e,n,t){if(e.copy)e.copy(n,t,0,e.length);else for(var o=0;o>>0;return this.uint32(n),n&&this._push(Jt.writeBytesBuffer,n,e),this};function yv(r,e,n){r.length<40?Ir.utf8.write(r,e,n):e.utf8Write?e.utf8Write(r,n):e.write(r,n)}Jt.prototype.string=function(e){var n=Ir.Buffer.byteLength(e);return this.uint32(n),n&&this._push(yv,n,e),this};Jt._configure()});var Wa=Je((QS,df)=>{"use strict";df.exports=je;var Mt=_r(),Ua,cf=Mt.LongBits,xv=Mt.utf8;function Vt(r,e){return RangeError("index out of range: "+r.pos+" + "+(e||1)+" > "+r.len)}function je(r){this.buf=r,this.pos=0,this.len=r.length}var uf=typeof Uint8Array<"u"?function(e){if(e instanceof Uint8Array||Array.isArray(e))return new je(e);throw Error("illegal buffer")}:function(e){if(Array.isArray(e))return new je(e);throw Error("illegal buffer")},ff=function(){return Mt.Buffer?function(n){return(je.create=function(o){return Mt.Buffer.isBuffer(o)?new Ua(o):uf(o)})(n)}:uf};je.create=ff();je.prototype._slice=Mt.Array.prototype.subarray||Mt.Array.prototype.slice;je.prototype.uint32=function(){var e=4294967295;return function(){if(e=(this.buf[this.pos]&127)>>>0,this.buf[this.pos++]<128||(e=(e|(this.buf[this.pos]&127)<<7)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&127)<<14)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&127)<<21)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&15)<<28)>>>0,this.buf[this.pos++]<128))return e;if((this.pos+=5)>this.len)throw this.pos=this.len,Vt(this,10);return e}}();je.prototype.int32=function(){return this.uint32()|0};je.prototype.sint32=function(){var e=this.uint32();return e>>>1^-(e&1)|0};function Ga(){var r=new cf(0,0),e=0;if(this.len-this.pos>4){for(;e<4;++e)if(r.lo=(r.lo|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r;if(r.lo=(r.lo|(this.buf[this.pos]&127)<<28)>>>0,r.hi=(r.hi|(this.buf[this.pos]&127)>>4)>>>0,this.buf[this.pos++]<128)return r;e=0}else{for(;e<3;++e){if(this.pos>=this.len)throw Vt(this);if(r.lo=(r.lo|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}return r.lo=(r.lo|(this.buf[this.pos++]&127)<>>0,r}if(this.len-this.pos>4){for(;e<5;++e)if(r.hi=(r.hi|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}else for(;e<5;++e){if(this.pos>=this.len)throw Vt(this);if(r.hi=(r.hi|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}throw Error("invalid varint encoding")}je.prototype.bool=function(){return this.uint32()!==0};function Ro(r,e){return(r[e-4]|r[e-3]<<8|r[e-2]<<16|r[e-1]<<24)>>>0}je.prototype.fixed32=function(){if(this.pos+4>this.len)throw Vt(this,4);return Ro(this.buf,this.pos+=4)};je.prototype.sfixed32=function(){if(this.pos+4>this.len)throw Vt(this,4);return Ro(this.buf,this.pos+=4)|0};function lf(){if(this.pos+8>this.len)throw Vt(this,8);return new cf(Ro(this.buf,this.pos+=4),Ro(this.buf,this.pos+=4))}je.prototype.float=function(){if(this.pos+4>this.len)throw Vt(this,4);var e=Mt.float.readFloatLE(this.buf,this.pos);return this.pos+=4,e};je.prototype.double=function(){if(this.pos+8>this.len)throw Vt(this,4);var e=Mt.float.readDoubleLE(this.buf,this.pos);return this.pos+=8,e};je.prototype.bytes=function(){var e=this.uint32(),n=this.pos,t=this.pos+e;if(t>this.len)throw Vt(this,e);if(this.pos+=e,Array.isArray(this.buf))return this.buf.slice(n,t);if(n===t){var o=Mt.Buffer;return o?o.alloc(0):new this.buf.constructor(0)}return this._slice.call(this.buf,n,t)};je.prototype.string=function(){var e=this.bytes();return xv.read(e,0,e.length)};je.prototype.skip=function(e){if(typeof e=="number"){if(this.pos+e>this.len)throw Vt(this,e);this.pos+=e}else do if(this.pos>=this.len)throw Vt(this);while(this.buf[this.pos++]&128);return this};je.prototype.skipType=function(r){switch(r){case 0:this.skip();break;case 1:this.skip(8);break;case 2:this.skip(this.uint32());break;case 3:for(;(r=this.uint32()&7)!==4;)this.skipType(r);break;case 5:this.skip(4);break;default:throw Error("invalid wire type "+r+" at offset "+this.pos)}return this};je._configure=function(r){Ua=r,je.create=ff(),Ua._configure();var e=Mt.Long?"toLong":"toNumber";Mt.merge(je.prototype,{int64:function(){return Ga.call(this)[e](!1)},uint64:function(){return Ga.call(this)[e](!0)},sint64:function(){return Ga.call(this).zzDecode()[e](!1)},fixed64:function(){return lf.call(this)[e](!0)},sfixed64:function(){return lf.call(this)[e](!1)}})}});var gf=Je((e$,hf)=>{"use strict";hf.exports=Nr;var mf=Wa();(Nr.prototype=Object.create(mf.prototype)).constructor=Nr;var pf=_r();function Nr(r){mf.call(this,r)}Nr._configure=function(){pf.Buffer&&(Nr.prototype._slice=pf.Buffer.prototype.slice)};Nr.prototype.string=function(){var e=this.uint32();return this.buf.utf8Slice?this.buf.utf8Slice(this.pos,this.pos=Math.min(this.pos+e,this.len)):this.buf.toString("utf-8",this.pos,this.pos=Math.min(this.pos+e,this.len))};Nr._configure()});var yf=Je((t$,bf)=>{"use strict";bf.exports=Bn;var Ha=_r();(Bn.prototype=Object.create(Ha.EventEmitter.prototype)).constructor=Bn;function Bn(r,e,n){if(typeof r!="function")throw TypeError("rpcImpl must be a function");Ha.EventEmitter.call(this),this.rpcImpl=r,this.requestDelimited=!!e,this.responseDelimited=!!n}Bn.prototype.rpcCall=function r(e,n,t,o,i){if(!o)throw TypeError("request must be specified");var s=this;if(!i)return Ha.asPromise(r,s,e,n,t,o);if(!s.rpcImpl){setTimeout(function(){i(Error("already ended"))},0);return}try{return s.rpcImpl(e,n[s.requestDelimited?"encodeDelimited":"encode"](o).finish(),function(u,l){if(u)return s.emit("error",u,e),i(u);if(l===null){s.end(!0);return}if(!(l instanceof t))try{l=t[s.responseDelimited?"decodeDelimited":"decode"](l)}catch(f){return s.emit("error",f,e),i(f)}return s.emit("data",l,e),i(null,l)})}catch(a){s.emit("error",a,e),setTimeout(function(){i(a)},0);return}};Bn.prototype.end=function(e){return this.rpcImpl&&(e||this.rpcImpl(null,null,null),this.rpcImpl=null,this.emit("end").off()),this}});var vf=Je(xf=>{"use strict";var vv=xf;vv.Service=yf()});var Tf=Je((n$,wf)=>{"use strict";wf.exports={}});var Sf=Je(If=>{"use strict";var vt=If;vt.build="minimal";vt.Writer=Va();vt.BufferWriter=sf();vt.Reader=Wa();vt.BufferReader=gf();vt.util=_r();vt.rpc=vf();vt.roots=Tf();vt.configure=_f;function _f(){vt.util._configure(),vt.Writer._configure(vt.BufferWriter),vt.Reader._configure(vt.BufferReader)}_f()});var Af=Je((i$,$f)=>{"use strict";$f.exports=Sf()});var dn=Je((a$,Pf)=>{"use strict";var Ve=Af(),H=Ve.Reader,Xe=Ve.Writer,A=Ve.util,I=Ve.roots.default||(Ve.roots.default={});I.onnx=function(){var r={};return r.Version=function(){var e={},n=Object.create(e);return n[e[0]="_START_VERSION"]=0,n[e[1]="IR_VERSION_2017_10_10"]=1,n[e[2]="IR_VERSION_2017_10_30"]=2,n[e[3]="IR_VERSION_2017_11_3"]=3,n[e[4]="IR_VERSION_2019_1_22"]=4,n[e[5]="IR_VERSION_2019_3_18"]=5,n[e[6]="IR_VERSION_2019_9_19"]=6,n[e[7]="IR_VERSION_2020_5_8"]=7,n[e[8]="IR_VERSION_2021_7_30"]=8,n[e[9]="IR_VERSION"]=9,n}(),r.AttributeProto=function(){function e(n){if(this.floats=[],this.ints=[],this.strings=[],this.tensors=[],this.graphs=[],this.sparseTensors=[],this.typeProtos=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.name=t.string();break}case 21:{s.refAttrName=t.string();break}case 13:{s.docString=t.string();break}case 20:{s.type=t.int32();break}case 2:{s.f=t.float();break}case 3:{s.i=t.int64();break}case 4:{s.s=t.bytes();break}case 5:{s.t=I.onnx.TensorProto.decode(t,t.uint32());break}case 6:{s.g=I.onnx.GraphProto.decode(t,t.uint32());break}case 22:{s.sparseTensor=I.onnx.SparseTensorProto.decode(t,t.uint32());break}case 14:{s.tp=I.onnx.TypeProto.decode(t,t.uint32());break}case 7:{if(s.floats&&s.floats.length||(s.floats=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.i.high>>>0).toNumber())),t.s!=null&&(typeof t.s=="string"?A.base64.decode(t.s,o.s=A.newBuffer(A.base64.length(t.s)),0):t.s.length>=0&&(o.s=t.s)),t.t!=null){if(typeof t.t!="object")throw TypeError(".onnx.AttributeProto.t: object expected");o.t=I.onnx.TensorProto.fromObject(t.t)}if(t.g!=null){if(typeof t.g!="object")throw TypeError(".onnx.AttributeProto.g: object expected");o.g=I.onnx.GraphProto.fromObject(t.g)}if(t.sparseTensor!=null){if(typeof t.sparseTensor!="object")throw TypeError(".onnx.AttributeProto.sparseTensor: object expected");o.sparseTensor=I.onnx.SparseTensorProto.fromObject(t.sparseTensor)}if(t.tp!=null){if(typeof t.tp!="object")throw TypeError(".onnx.AttributeProto.tp: object expected");o.tp=I.onnx.TypeProto.fromObject(t.tp)}if(t.floats){if(!Array.isArray(t.floats))throw TypeError(".onnx.AttributeProto.floats: array expected");o.floats=[];for(var i=0;i>>0,t.ints[i].high>>>0).toNumber())}if(t.strings){if(!Array.isArray(t.strings))throw TypeError(".onnx.AttributeProto.strings: array expected");o.strings=[];for(var i=0;i=0&&(o.strings[i]=t.strings[i])}if(t.tensors){if(!Array.isArray(t.tensors))throw TypeError(".onnx.AttributeProto.tensors: array expected");o.tensors=[];for(var i=0;i>>0,t.i.high>>>0).toNumber():t.i),t.s!=null&&t.hasOwnProperty("s")&&(i.s=o.bytes===String?A.base64.encode(t.s,0,t.s.length):o.bytes===Array?Array.prototype.slice.call(t.s):t.s),t.t!=null&&t.hasOwnProperty("t")&&(i.t=I.onnx.TensorProto.toObject(t.t,o)),t.g!=null&&t.hasOwnProperty("g")&&(i.g=I.onnx.GraphProto.toObject(t.g,o)),t.floats&&t.floats.length){i.floats=[];for(var a=0;a>>0,t.ints[a].high>>>0).toNumber():t.ints[a]}if(t.strings&&t.strings.length){i.strings=[];for(var a=0;a>>3){case 1:{s.name=t.string();break}case 2:{s.type=I.onnx.TypeProto.decode(t,t.uint32());break}case 3:{s.docString=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.name!=null&&t.hasOwnProperty("name")&&!A.isString(t.name))return"name: string expected";if(t.type!=null&&t.hasOwnProperty("type")){var o=I.onnx.TypeProto.verify(t.type);if(o)return"type."+o}return t.docString!=null&&t.hasOwnProperty("docString")&&!A.isString(t.docString)?"docString: string expected":null},e.fromObject=function(t){if(t instanceof I.onnx.ValueInfoProto)return t;var o=new I.onnx.ValueInfoProto;if(t.name!=null&&(o.name=String(t.name)),t.type!=null){if(typeof t.type!="object")throw TypeError(".onnx.ValueInfoProto.type: object expected");o.type=I.onnx.TypeProto.fromObject(t.type)}return t.docString!=null&&(o.docString=String(t.docString)),o},e.toObject=function(t,o){o||(o={});var i={};return o.defaults&&(i.name="",i.type=null,i.docString=""),t.name!=null&&t.hasOwnProperty("name")&&(i.name=t.name),t.type!=null&&t.hasOwnProperty("type")&&(i.type=I.onnx.TypeProto.toObject(t.type,o)),t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.ValueInfoProto"},e}(),r.NodeProto=function(){function e(n){if(this.input=[],this.output=[],this.attribute=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.input&&s.input.length||(s.input=[]),s.input.push(t.string());break}case 2:{s.output&&s.output.length||(s.output=[]),s.output.push(t.string());break}case 3:{s.name=t.string();break}case 4:{s.opType=t.string();break}case 7:{s.domain=t.string();break}case 5:{s.attribute&&s.attribute.length||(s.attribute=[]),s.attribute.push(I.onnx.AttributeProto.decode(t,t.uint32()));break}case 6:{s.docString=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.input!=null&&t.hasOwnProperty("input")){if(!Array.isArray(t.input))return"input: array expected";for(var o=0;o>>3){case 1:{s.initialization=I.onnx.GraphProto.decode(t,t.uint32());break}case 2:{s.algorithm=I.onnx.GraphProto.decode(t,t.uint32());break}case 3:{s.initializationBinding&&s.initializationBinding.length||(s.initializationBinding=[]),s.initializationBinding.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}case 4:{s.updateBinding&&s.updateBinding.length||(s.updateBinding=[]),s.updateBinding.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.initialization!=null&&t.hasOwnProperty("initialization")){var o=I.onnx.GraphProto.verify(t.initialization);if(o)return"initialization."+o}if(t.algorithm!=null&&t.hasOwnProperty("algorithm")){var o=I.onnx.GraphProto.verify(t.algorithm);if(o)return"algorithm."+o}if(t.initializationBinding!=null&&t.hasOwnProperty("initializationBinding")){if(!Array.isArray(t.initializationBinding))return"initializationBinding: array expected";for(var i=0;i>>3){case 1:{s.irVersion=t.int64();break}case 8:{s.opsetImport&&s.opsetImport.length||(s.opsetImport=[]),s.opsetImport.push(I.onnx.OperatorSetIdProto.decode(t,t.uint32()));break}case 2:{s.producerName=t.string();break}case 3:{s.producerVersion=t.string();break}case 4:{s.domain=t.string();break}case 5:{s.modelVersion=t.int64();break}case 6:{s.docString=t.string();break}case 7:{s.graph=I.onnx.GraphProto.decode(t,t.uint32());break}case 14:{s.metadataProps&&s.metadataProps.length||(s.metadataProps=[]),s.metadataProps.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}case 20:{s.trainingInfo&&s.trainingInfo.length||(s.trainingInfo=[]),s.trainingInfo.push(I.onnx.TrainingInfoProto.decode(t,t.uint32()));break}case 25:{s.functions&&s.functions.length||(s.functions=[]),s.functions.push(I.onnx.FunctionProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.irVersion!=null&&t.hasOwnProperty("irVersion")&&!A.isInteger(t.irVersion)&&!(t.irVersion&&A.isInteger(t.irVersion.low)&&A.isInteger(t.irVersion.high)))return"irVersion: integer|Long expected";if(t.opsetImport!=null&&t.hasOwnProperty("opsetImport")){if(!Array.isArray(t.opsetImport))return"opsetImport: array expected";for(var o=0;o>>0,t.irVersion.high>>>0).toNumber())),t.opsetImport){if(!Array.isArray(t.opsetImport))throw TypeError(".onnx.ModelProto.opsetImport: array expected");o.opsetImport=[];for(var i=0;i>>0,t.modelVersion.high>>>0).toNumber())),t.docString!=null&&(o.docString=String(t.docString)),t.graph!=null){if(typeof t.graph!="object")throw TypeError(".onnx.ModelProto.graph: object expected");o.graph=I.onnx.GraphProto.fromObject(t.graph)}if(t.metadataProps){if(!Array.isArray(t.metadataProps))throw TypeError(".onnx.ModelProto.metadataProps: array expected");o.metadataProps=[];for(var i=0;i>>0,t.irVersion.high>>>0).toNumber():t.irVersion),t.producerName!=null&&t.hasOwnProperty("producerName")&&(i.producerName=t.producerName),t.producerVersion!=null&&t.hasOwnProperty("producerVersion")&&(i.producerVersion=t.producerVersion),t.domain!=null&&t.hasOwnProperty("domain")&&(i.domain=t.domain),t.modelVersion!=null&&t.hasOwnProperty("modelVersion")&&(typeof t.modelVersion=="number"?i.modelVersion=o.longs===String?String(t.modelVersion):t.modelVersion:i.modelVersion=o.longs===String?A.Long.prototype.toString.call(t.modelVersion):o.longs===Number?new A.LongBits(t.modelVersion.low>>>0,t.modelVersion.high>>>0).toNumber():t.modelVersion),t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),t.graph!=null&&t.hasOwnProperty("graph")&&(i.graph=I.onnx.GraphProto.toObject(t.graph,o)),t.opsetImport&&t.opsetImport.length){i.opsetImport=[];for(var a=0;a>>3){case 1:{s.key=t.string();break}case 2:{s.value=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){return typeof t!="object"||t===null?"object expected":t.key!=null&&t.hasOwnProperty("key")&&!A.isString(t.key)?"key: string expected":t.value!=null&&t.hasOwnProperty("value")&&!A.isString(t.value)?"value: string expected":null},e.fromObject=function(t){if(t instanceof I.onnx.StringStringEntryProto)return t;var o=new I.onnx.StringStringEntryProto;return t.key!=null&&(o.key=String(t.key)),t.value!=null&&(o.value=String(t.value)),o},e.toObject=function(t,o){o||(o={});var i={};return o.defaults&&(i.key="",i.value=""),t.key!=null&&t.hasOwnProperty("key")&&(i.key=t.key),t.value!=null&&t.hasOwnProperty("value")&&(i.value=t.value),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.StringStringEntryProto"},e}(),r.TensorAnnotation=function(){function e(n){if(this.quantParameterTensorNames=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.tensorName=t.string();break}case 2:{s.quantParameterTensorNames&&s.quantParameterTensorNames.length||(s.quantParameterTensorNames=[]),s.quantParameterTensorNames.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.tensorName!=null&&t.hasOwnProperty("tensorName")&&!A.isString(t.tensorName))return"tensorName: string expected";if(t.quantParameterTensorNames!=null&&t.hasOwnProperty("quantParameterTensorNames")){if(!Array.isArray(t.quantParameterTensorNames))return"quantParameterTensorNames: array expected";for(var o=0;o>>3){case 1:{s.node&&s.node.length||(s.node=[]),s.node.push(I.onnx.NodeProto.decode(t,t.uint32()));break}case 2:{s.name=t.string();break}case 5:{s.initializer&&s.initializer.length||(s.initializer=[]),s.initializer.push(I.onnx.TensorProto.decode(t,t.uint32()));break}case 15:{s.sparseInitializer&&s.sparseInitializer.length||(s.sparseInitializer=[]),s.sparseInitializer.push(I.onnx.SparseTensorProto.decode(t,t.uint32()));break}case 10:{s.docString=t.string();break}case 11:{s.input&&s.input.length||(s.input=[]),s.input.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 12:{s.output&&s.output.length||(s.output=[]),s.output.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 13:{s.valueInfo&&s.valueInfo.length||(s.valueInfo=[]),s.valueInfo.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 14:{s.quantizationAnnotation&&s.quantizationAnnotation.length||(s.quantizationAnnotation=[]),s.quantizationAnnotation.push(I.onnx.TensorAnnotation.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.node!=null&&t.hasOwnProperty("node")){if(!Array.isArray(t.node))return"node: array expected";for(var o=0;o>>3){case 1:{if(s.dims&&s.dims.length||(s.dims=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.dims[i].high>>>0).toNumber())}if(t.dataType!=null&&(o.dataType=t.dataType|0),t.segment!=null){if(typeof t.segment!="object")throw TypeError(".onnx.TensorProto.segment: object expected");o.segment=I.onnx.TensorProto.Segment.fromObject(t.segment)}if(t.floatData){if(!Array.isArray(t.floatData))throw TypeError(".onnx.TensorProto.floatData: array expected");o.floatData=[];for(var i=0;i=0&&(o.stringData[i]=t.stringData[i])}if(t.int64Data){if(!Array.isArray(t.int64Data))throw TypeError(".onnx.TensorProto.int64Data: array expected");o.int64Data=[];for(var i=0;i>>0,t.int64Data[i].high>>>0).toNumber())}if(t.name!=null&&(o.name=String(t.name)),t.docString!=null&&(o.docString=String(t.docString)),t.rawData!=null&&(typeof t.rawData=="string"?A.base64.decode(t.rawData,o.rawData=A.newBuffer(A.base64.length(t.rawData)),0):t.rawData.length>=0&&(o.rawData=t.rawData)),t.externalData){if(!Array.isArray(t.externalData))throw TypeError(".onnx.TensorProto.externalData: array expected");o.externalData=[];for(var i=0;i>>0,t.uint64Data[i].high>>>0).toNumber(!0))}return o},e.toObject=function(t,o){o||(o={});var i={};if((o.arrays||o.defaults)&&(i.dims=[],i.floatData=[],i.int32Data=[],i.stringData=[],i.int64Data=[],i.doubleData=[],i.uint64Data=[],i.externalData=[]),o.defaults&&(i.dataType=0,i.segment=null,i.name="",o.bytes===String?i.rawData="":(i.rawData=[],o.bytes!==Array&&(i.rawData=A.newBuffer(i.rawData))),i.docString="",i.dataLocation=o.enums===String?"DEFAULT":0),t.dims&&t.dims.length){i.dims=[];for(var s=0;s>>0,t.dims[s].high>>>0).toNumber():t.dims[s]}if(t.dataType!=null&&t.hasOwnProperty("dataType")&&(i.dataType=t.dataType),t.segment!=null&&t.hasOwnProperty("segment")&&(i.segment=I.onnx.TensorProto.Segment.toObject(t.segment,o)),t.floatData&&t.floatData.length){i.floatData=[];for(var s=0;s>>0,t.int64Data[s].high>>>0).toNumber():t.int64Data[s]}if(t.name!=null&&t.hasOwnProperty("name")&&(i.name=t.name),t.rawData!=null&&t.hasOwnProperty("rawData")&&(i.rawData=o.bytes===String?A.base64.encode(t.rawData,0,t.rawData.length):o.bytes===Array?Array.prototype.slice.call(t.rawData):t.rawData),t.doubleData&&t.doubleData.length){i.doubleData=[];for(var s=0;s>>0,t.uint64Data[s].high>>>0).toNumber(!0):t.uint64Data[s]}if(t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),t.externalData&&t.externalData.length){i.externalData=[];for(var s=0;s>>3){case 1:{a.begin=o.int64();break}case 2:{a.end=o.int64();break}default:o.skipType(u&7);break}}return a},n.decodeDelimited=function(o){return o instanceof H||(o=new H(o)),this.decode(o,o.uint32())},n.verify=function(o){return typeof o!="object"||o===null?"object expected":o.begin!=null&&o.hasOwnProperty("begin")&&!A.isInteger(o.begin)&&!(o.begin&&A.isInteger(o.begin.low)&&A.isInteger(o.begin.high))?"begin: integer|Long expected":o.end!=null&&o.hasOwnProperty("end")&&!A.isInteger(o.end)&&!(o.end&&A.isInteger(o.end.low)&&A.isInteger(o.end.high))?"end: integer|Long expected":null},n.fromObject=function(o){if(o instanceof I.onnx.TensorProto.Segment)return o;var i=new I.onnx.TensorProto.Segment;return o.begin!=null&&(A.Long?(i.begin=A.Long.fromValue(o.begin)).unsigned=!1:typeof o.begin=="string"?i.begin=parseInt(o.begin,10):typeof o.begin=="number"?i.begin=o.begin:typeof o.begin=="object"&&(i.begin=new A.LongBits(o.begin.low>>>0,o.begin.high>>>0).toNumber())),o.end!=null&&(A.Long?(i.end=A.Long.fromValue(o.end)).unsigned=!1:typeof o.end=="string"?i.end=parseInt(o.end,10):typeof o.end=="number"?i.end=o.end:typeof o.end=="object"&&(i.end=new A.LongBits(o.end.low>>>0,o.end.high>>>0).toNumber())),i},n.toObject=function(o,i){i||(i={});var s={};if(i.defaults){if(A.Long){var a=new A.Long(0,0,!1);s.begin=i.longs===String?a.toString():i.longs===Number?a.toNumber():a}else s.begin=i.longs===String?"0":0;if(A.Long){var a=new A.Long(0,0,!1);s.end=i.longs===String?a.toString():i.longs===Number?a.toNumber():a}else s.end=i.longs===String?"0":0}return o.begin!=null&&o.hasOwnProperty("begin")&&(typeof o.begin=="number"?s.begin=i.longs===String?String(o.begin):o.begin:s.begin=i.longs===String?A.Long.prototype.toString.call(o.begin):i.longs===Number?new A.LongBits(o.begin.low>>>0,o.begin.high>>>0).toNumber():o.begin),o.end!=null&&o.hasOwnProperty("end")&&(typeof o.end=="number"?s.end=i.longs===String?String(o.end):o.end:s.end=i.longs===String?A.Long.prototype.toString.call(o.end):i.longs===Number?new A.LongBits(o.end.low>>>0,o.end.high>>>0).toNumber():o.end),s},n.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},n.getTypeUrl=function(o){return o===void 0&&(o="type.googleapis.com"),o+"/onnx.TensorProto.Segment"},n}(),e.DataLocation=function(){var n={},t=Object.create(n);return t[n[0]="DEFAULT"]=0,t[n[1]="EXTERNAL"]=1,t}(),e}(),r.SparseTensorProto=function(){function e(n){if(this.dims=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.values=I.onnx.TensorProto.decode(t,t.uint32());break}case 2:{s.indices=I.onnx.TensorProto.decode(t,t.uint32());break}case 3:{if(s.dims&&s.dims.length||(s.dims=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.dims[i].high>>>0).toNumber())}return o},e.toObject=function(t,o){o||(o={});var i={};if((o.arrays||o.defaults)&&(i.dims=[]),o.defaults&&(i.values=null,i.indices=null),t.values!=null&&t.hasOwnProperty("values")&&(i.values=I.onnx.TensorProto.toObject(t.values,o)),t.indices!=null&&t.hasOwnProperty("indices")&&(i.indices=I.onnx.TensorProto.toObject(t.indices,o)),t.dims&&t.dims.length){i.dims=[];for(var s=0;s>>0,t.dims[s].high>>>0).toNumber():t.dims[s]}return i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.SparseTensorProto"},e}(),r.TensorShapeProto=function(){function e(n){if(this.dim=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.dim&&s.dim.length||(s.dim=[]),s.dim.push(I.onnx.TensorShapeProto.Dimension.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.dim!=null&&t.hasOwnProperty("dim")){if(!Array.isArray(t.dim))return"dim: array expected";for(var o=0;o>>3){case 1:{u.dimValue=i.int64();break}case 2:{u.dimParam=i.string();break}case 3:{u.denotation=i.string();break}default:i.skipType(l&7);break}}return u},n.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},n.verify=function(i){if(typeof i!="object"||i===null)return"object expected";var s={};if(i.dimValue!=null&&i.hasOwnProperty("dimValue")&&(s.value=1,!A.isInteger(i.dimValue)&&!(i.dimValue&&A.isInteger(i.dimValue.low)&&A.isInteger(i.dimValue.high))))return"dimValue: integer|Long expected";if(i.dimParam!=null&&i.hasOwnProperty("dimParam")){if(s.value===1)return"value: multiple values";if(s.value=1,!A.isString(i.dimParam))return"dimParam: string expected"}return i.denotation!=null&&i.hasOwnProperty("denotation")&&!A.isString(i.denotation)?"denotation: string expected":null},n.fromObject=function(i){if(i instanceof I.onnx.TensorShapeProto.Dimension)return i;var s=new I.onnx.TensorShapeProto.Dimension;return i.dimValue!=null&&(A.Long?(s.dimValue=A.Long.fromValue(i.dimValue)).unsigned=!1:typeof i.dimValue=="string"?s.dimValue=parseInt(i.dimValue,10):typeof i.dimValue=="number"?s.dimValue=i.dimValue:typeof i.dimValue=="object"&&(s.dimValue=new A.LongBits(i.dimValue.low>>>0,i.dimValue.high>>>0).toNumber())),i.dimParam!=null&&(s.dimParam=String(i.dimParam)),i.denotation!=null&&(s.denotation=String(i.denotation)),s},n.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.denotation=""),i.dimValue!=null&&i.hasOwnProperty("dimValue")&&(typeof i.dimValue=="number"?a.dimValue=s.longs===String?String(i.dimValue):i.dimValue:a.dimValue=s.longs===String?A.Long.prototype.toString.call(i.dimValue):s.longs===Number?new A.LongBits(i.dimValue.low>>>0,i.dimValue.high>>>0).toNumber():i.dimValue,s.oneofs&&(a.value="dimValue")),i.dimParam!=null&&i.hasOwnProperty("dimParam")&&(a.dimParam=i.dimParam,s.oneofs&&(a.value="dimParam")),i.denotation!=null&&i.hasOwnProperty("denotation")&&(a.denotation=i.denotation),a},n.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},n.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TensorShapeProto.Dimension"},n}(),e}(),r.TypeProto=function(){function e(t){if(t)for(var o=Object.keys(t),i=0;i>>3){case 1:{a.tensorType=I.onnx.TypeProto.Tensor.decode(o,o.uint32());break}case 4:{a.sequenceType=I.onnx.TypeProto.Sequence.decode(o,o.uint32());break}case 5:{a.mapType=I.onnx.TypeProto.Map.decode(o,o.uint32());break}case 9:{a.optionalType=I.onnx.TypeProto.Optional.decode(o,o.uint32());break}case 8:{a.sparseTensorType=I.onnx.TypeProto.SparseTensor.decode(o,o.uint32());break}case 6:{a.denotation=o.string();break}default:o.skipType(u&7);break}}return a},e.decodeDelimited=function(o){return o instanceof H||(o=new H(o)),this.decode(o,o.uint32())},e.verify=function(o){if(typeof o!="object"||o===null)return"object expected";var i={};if(o.tensorType!=null&&o.hasOwnProperty("tensorType")){i.value=1;{var s=I.onnx.TypeProto.Tensor.verify(o.tensorType);if(s)return"tensorType."+s}}if(o.sequenceType!=null&&o.hasOwnProperty("sequenceType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Sequence.verify(o.sequenceType);if(s)return"sequenceType."+s}}if(o.mapType!=null&&o.hasOwnProperty("mapType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Map.verify(o.mapType);if(s)return"mapType."+s}}if(o.optionalType!=null&&o.hasOwnProperty("optionalType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Optional.verify(o.optionalType);if(s)return"optionalType."+s}}if(o.sparseTensorType!=null&&o.hasOwnProperty("sparseTensorType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.SparseTensor.verify(o.sparseTensorType);if(s)return"sparseTensorType."+s}}return o.denotation!=null&&o.hasOwnProperty("denotation")&&!A.isString(o.denotation)?"denotation: string expected":null},e.fromObject=function(o){if(o instanceof I.onnx.TypeProto)return o;var i=new I.onnx.TypeProto;if(o.tensorType!=null){if(typeof o.tensorType!="object")throw TypeError(".onnx.TypeProto.tensorType: object expected");i.tensorType=I.onnx.TypeProto.Tensor.fromObject(o.tensorType)}if(o.sequenceType!=null){if(typeof o.sequenceType!="object")throw TypeError(".onnx.TypeProto.sequenceType: object expected");i.sequenceType=I.onnx.TypeProto.Sequence.fromObject(o.sequenceType)}if(o.mapType!=null){if(typeof o.mapType!="object")throw TypeError(".onnx.TypeProto.mapType: object expected");i.mapType=I.onnx.TypeProto.Map.fromObject(o.mapType)}if(o.optionalType!=null){if(typeof o.optionalType!="object")throw TypeError(".onnx.TypeProto.optionalType: object expected");i.optionalType=I.onnx.TypeProto.Optional.fromObject(o.optionalType)}if(o.sparseTensorType!=null){if(typeof o.sparseTensorType!="object")throw TypeError(".onnx.TypeProto.sparseTensorType: object expected");i.sparseTensorType=I.onnx.TypeProto.SparseTensor.fromObject(o.sparseTensorType)}return o.denotation!=null&&(i.denotation=String(o.denotation)),i},e.toObject=function(o,i){i||(i={});var s={};return i.defaults&&(s.denotation=""),o.tensorType!=null&&o.hasOwnProperty("tensorType")&&(s.tensorType=I.onnx.TypeProto.Tensor.toObject(o.tensorType,i),i.oneofs&&(s.value="tensorType")),o.sequenceType!=null&&o.hasOwnProperty("sequenceType")&&(s.sequenceType=I.onnx.TypeProto.Sequence.toObject(o.sequenceType,i),i.oneofs&&(s.value="sequenceType")),o.mapType!=null&&o.hasOwnProperty("mapType")&&(s.mapType=I.onnx.TypeProto.Map.toObject(o.mapType,i),i.oneofs&&(s.value="mapType")),o.denotation!=null&&o.hasOwnProperty("denotation")&&(s.denotation=o.denotation),o.sparseTensorType!=null&&o.hasOwnProperty("sparseTensorType")&&(s.sparseTensorType=I.onnx.TypeProto.SparseTensor.toObject(o.sparseTensorType,i),i.oneofs&&(s.value="sparseTensorType")),o.optionalType!=null&&o.hasOwnProperty("optionalType")&&(s.optionalType=I.onnx.TypeProto.Optional.toObject(o.optionalType,i),i.oneofs&&(s.value="optionalType")),s},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(o){return o===void 0&&(o="type.googleapis.com"),o+"/onnx.TypeProto"},e.Tensor=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=i.int32();break}case 2:{u.shape=I.onnx.TensorShapeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")&&!A.isInteger(i.elemType))return"elemType: integer expected";if(i.shape!=null&&i.hasOwnProperty("shape")){var s=I.onnx.TensorShapeProto.verify(i.shape);if(s)return"shape."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Tensor)return i;var s=new I.onnx.TypeProto.Tensor;if(i.elemType!=null&&(s.elemType=i.elemType|0),i.shape!=null){if(typeof i.shape!="object")throw TypeError(".onnx.TypeProto.Tensor.shape: object expected");s.shape=I.onnx.TensorShapeProto.fromObject(i.shape)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=0,a.shape=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=i.elemType),i.shape!=null&&i.hasOwnProperty("shape")&&(a.shape=I.onnx.TensorShapeProto.toObject(i.shape,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Tensor"},t}(),e.Sequence=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")){var s=I.onnx.TypeProto.verify(i.elemType);if(s)return"elemType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Sequence)return i;var s=new I.onnx.TypeProto.Sequence;if(i.elemType!=null){if(typeof i.elemType!="object")throw TypeError(".onnx.TypeProto.Sequence.elemType: object expected");s.elemType=I.onnx.TypeProto.fromObject(i.elemType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=I.onnx.TypeProto.toObject(i.elemType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Sequence"},t}(),e.Map=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.keyType=i.int32();break}case 2:{u.valueType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.keyType!=null&&i.hasOwnProperty("keyType")&&!A.isInteger(i.keyType))return"keyType: integer expected";if(i.valueType!=null&&i.hasOwnProperty("valueType")){var s=I.onnx.TypeProto.verify(i.valueType);if(s)return"valueType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Map)return i;var s=new I.onnx.TypeProto.Map;if(i.keyType!=null&&(s.keyType=i.keyType|0),i.valueType!=null){if(typeof i.valueType!="object")throw TypeError(".onnx.TypeProto.Map.valueType: object expected");s.valueType=I.onnx.TypeProto.fromObject(i.valueType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.keyType=0,a.valueType=null),i.keyType!=null&&i.hasOwnProperty("keyType")&&(a.keyType=i.keyType),i.valueType!=null&&i.hasOwnProperty("valueType")&&(a.valueType=I.onnx.TypeProto.toObject(i.valueType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Map"},t}(),e.Optional=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")){var s=I.onnx.TypeProto.verify(i.elemType);if(s)return"elemType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Optional)return i;var s=new I.onnx.TypeProto.Optional;if(i.elemType!=null){if(typeof i.elemType!="object")throw TypeError(".onnx.TypeProto.Optional.elemType: object expected");s.elemType=I.onnx.TypeProto.fromObject(i.elemType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=I.onnx.TypeProto.toObject(i.elemType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Optional"},t}(),e.SparseTensor=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=i.int32();break}case 2:{u.shape=I.onnx.TensorShapeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")&&!A.isInteger(i.elemType))return"elemType: integer expected";if(i.shape!=null&&i.hasOwnProperty("shape")){var s=I.onnx.TensorShapeProto.verify(i.shape);if(s)return"shape."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.SparseTensor)return i;var s=new I.onnx.TypeProto.SparseTensor;if(i.elemType!=null&&(s.elemType=i.elemType|0),i.shape!=null){if(typeof i.shape!="object")throw TypeError(".onnx.TypeProto.SparseTensor.shape: object expected");s.shape=I.onnx.TensorShapeProto.fromObject(i.shape)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=0,a.shape=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=i.elemType),i.shape!=null&&i.hasOwnProperty("shape")&&(a.shape=I.onnx.TensorShapeProto.toObject(i.shape,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.SparseTensor"},t}(),e}(),r.OperatorSetIdProto=function(){function e(n){if(n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.domain=t.string();break}case 2:{s.version=t.int64();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){return typeof t!="object"||t===null?"object expected":t.domain!=null&&t.hasOwnProperty("domain")&&!A.isString(t.domain)?"domain: string expected":t.version!=null&&t.hasOwnProperty("version")&&!A.isInteger(t.version)&&!(t.version&&A.isInteger(t.version.low)&&A.isInteger(t.version.high))?"version: integer|Long expected":null},e.fromObject=function(t){if(t instanceof I.onnx.OperatorSetIdProto)return t;var o=new I.onnx.OperatorSetIdProto;return t.domain!=null&&(o.domain=String(t.domain)),t.version!=null&&(A.Long?(o.version=A.Long.fromValue(t.version)).unsigned=!1:typeof t.version=="string"?o.version=parseInt(t.version,10):typeof t.version=="number"?o.version=t.version:typeof t.version=="object"&&(o.version=new A.LongBits(t.version.low>>>0,t.version.high>>>0).toNumber())),o},e.toObject=function(t,o){o||(o={});var i={};if(o.defaults)if(i.domain="",A.Long){var s=new A.Long(0,0,!1);i.version=o.longs===String?s.toString():o.longs===Number?s.toNumber():s}else i.version=o.longs===String?"0":0;return t.domain!=null&&t.hasOwnProperty("domain")&&(i.domain=t.domain),t.version!=null&&t.hasOwnProperty("version")&&(typeof t.version=="number"?i.version=o.longs===String?String(t.version):t.version:i.version=o.longs===String?A.Long.prototype.toString.call(t.version):o.longs===Number?new A.LongBits(t.version.low>>>0,t.version.high>>>0).toNumber():t.version),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.OperatorSetIdProto"},e}(),r.OperatorStatus=function(){var e={},n=Object.create(e);return n[e[0]="EXPERIMENTAL"]=0,n[e[1]="STABLE"]=1,n}(),r.FunctionProto=function(){function e(n){if(this.input=[],this.output=[],this.attribute=[],this.attributeProto=[],this.node=[],this.opsetImport=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.name=t.string();break}case 4:{s.input&&s.input.length||(s.input=[]),s.input.push(t.string());break}case 5:{s.output&&s.output.length||(s.output=[]),s.output.push(t.string());break}case 6:{s.attribute&&s.attribute.length||(s.attribute=[]),s.attribute.push(t.string());break}case 11:{s.attributeProto&&s.attributeProto.length||(s.attributeProto=[]),s.attributeProto.push(I.onnx.AttributeProto.decode(t,t.uint32()));break}case 7:{s.node&&s.node.length||(s.node=[]),s.node.push(I.onnx.NodeProto.decode(t,t.uint32()));break}case 8:{s.docString=t.string();break}case 9:{s.opsetImport&&s.opsetImport.length||(s.opsetImport=[]),s.opsetImport.push(I.onnx.OperatorSetIdProto.decode(t,t.uint32()));break}case 10:{s.domain=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.name!=null&&t.hasOwnProperty("name")&&!A.isString(t.name))return"name: string expected";if(t.input!=null&&t.hasOwnProperty("input")){if(!Array.isArray(t.input))return"input: array expected";for(var o=0;o{"use strict";ko();ka();Ge=un(dn());Gr();zr=class{static arraysEqual(e,n){if(e.length!==n.length)return!1;for(let t=0;t1&&f>1)return;a[s-u]=Math.max(l,f)}return a}static index(e,n){let t=new Array(n.length);return r.fillIndex(e,n,t),t}static fillIndex(e,n,t){let o=e.length-n.length;for(let i=0;i=0;v--)l[v]=T%s[v],T=Math.floor(T/s[v]);h||(r.fillIndex(l,e.dims,f),p=e.get(f)),g||(r.fillIndex(l,n.dims,c),b=n.get(c)),u.set(l,t(p,b))}}return u}}static isValidBroadcast(e,n){let t=e.length,o=n.length;if(t>o)return!1;for(let i=1;i<=t;i++)if(e[t-i]!==1&&e[t-i]!==n[o-i])return!1;return!0}static getBroadcastDims(e,n){let t=e.length,o=[];for(let i=0;i1&&a===1&&o.unshift(s)}return o}},No=class{static getShapeOfGemmResult(e,n,t,o,i){if(e.length!==2||t.length!==2)throw new Error("shape need to be of size 2");let s,a,u;n?(s=e[1],a=e[0]):(s=e[0],a=e[1]);let l=-1;if(o?(u=t[0],l=1):(u=t[1],l=0),t[l]!==a)throw new Error("dimension mismatch");if(s<=0||u<=0||a<=0)throw new Error("invalid shape specified");if(i&&!gt.isValidBroadcast(i,[s,u]))throw new Error("gemm: invalid bias shape for broadcast");return[s,u,a]}},lt=class r{static tensorDataTypeFromProto(e){switch(e){case Ge.onnx.TensorProto.DataType.INT8:return"int8";case Ge.onnx.TensorProto.DataType.UINT8:return"uint8";case Ge.onnx.TensorProto.DataType.BOOL:return"bool";case Ge.onnx.TensorProto.DataType.INT16:return"int16";case Ge.onnx.TensorProto.DataType.UINT16:return"uint16";case Ge.onnx.TensorProto.DataType.INT32:return"int32";case Ge.onnx.TensorProto.DataType.UINT32:return"uint32";case Ge.onnx.TensorProto.DataType.FLOAT:return"float32";case Ge.onnx.TensorProto.DataType.DOUBLE:return"float64";case Ge.onnx.TensorProto.DataType.STRING:return"string";case Ge.onnx.TensorProto.DataType.INT64:return"int32";case Ge.onnx.TensorProto.DataType.UINT64:return"uint32";default:throw new Error(`unsupported data type: ${Ge.onnx.TensorProto.DataType[e]}`)}}static tensorDataTypeStringToEnum(e){switch(e){case"int8":return Ge.onnx.TensorProto.DataType.INT8;case"uint8":return Ge.onnx.TensorProto.DataType.UINT8;case"bool":return Ge.onnx.TensorProto.DataType.BOOL;case"int16":return Ge.onnx.TensorProto.DataType.INT16;case"uint16":return Ge.onnx.TensorProto.DataType.UINT16;case"int32":return Ge.onnx.TensorProto.DataType.INT32;case"uint32":return Ge.onnx.TensorProto.DataType.UINT32;case"float32":return Ge.onnx.TensorProto.DataType.FLOAT;case"float64":return Ge.onnx.TensorProto.DataType.DOUBLE;case"string":return Ge.onnx.TensorProto.DataType.STRING;case"int64":return Ge.onnx.TensorProto.DataType.INT64;case"uint64":return Ge.onnx.TensorProto.DataType.UINT64;default:throw new Error(`unsupported data type: ${e}`)}}static tensorDimsFromProto(e){return e.map(n=>dr.isLong(n)?n.toNumber():n)}static tensorValueTypeFromProto(e){return{tensorType:r.tensorDataTypeFromProto(e.elemType),shape:{dims:r.tensorDimsFromProto(e.shape.dim.map(n=>n.dimValue))}}}static tensorDimsFromORTFormat(e){let n=[];for(let t=0;te.length)throw new Error(`invalid dimension of ${n} for sizeFromDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,n,e.length)}static sizeToDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeToDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,0,n)}static getSizeFromDimensionRange(e,n,t){let o=1;for(let i=n;i=0;--o)t[o]=t[o+1]*e[o+1];return t}static transpose(e){return e.slice().reverse()}static indicesToOffset(e,n,t){t===void 0&&(t=e.length);let o=0;for(let i=0;i=n)throw new Error("unsupported axis for this operation.");return e<0?e+n:e}static normalizeAxes(e,n){return e.map(t=>this.normalizeAxis(t,n))}static incrementIndex(e,n,t){if(n.length===0||e.length===0)throw new Error("Index incrementing unsupported for scalar Tensor");if(t===void 0)t=n.length;else if(t<=0||t>n.length)throw new Error("Incorrect axis to increment on");for(let o=t-1;o>=0&&(e[o]++,!(e[o]=e.length)throw new Error("the dimension with value zero exceeds the dimension size of the input tensor");o[u]=e[u]}else o[u]=n[u];s*=o[u]}}let a=r.size(e);if(i!==-1){if(a%s!==0)throw new Error(`the input tensor cannot be reshaped to the requested shape. Input shape: [${e}] Output shape: [${n}]`);o[i]=a/s}else if(s!==a)throw new Error("reshapedDims and originalDims don't have matching sizes");return o}static sortBasedOnPerm(e,n){return n?n.map(t=>e[t]):e.slice().reverse()}static padShape(e,n){let t=e.length;return e.map((o,i)=>o+n[i]+n[i+t])}static areEqual(e,n){return e.length!==n.length?!1:e.every((t,o)=>t===n[o])}static validateDimsAndCalcSize(e){if(e.length>6)throw new TypeError("Only rank 0 to 6 is supported for tensor shape.");let n=1;for(let t of e){if(!Number.isInteger(t))throw new TypeError(`Invalid shape: ${t} is not an integer`);if(t<0||t>2147483647)throw new TypeError(`Invalid shape: length ${t} is not allowed`);n*=t}return n}static flattenShape(e,n){n<0&&(n+=e.length);let t=e.reduce((s,a)=>s*a,1),o=e.slice(n).reduce((s,a)=>s*a,1);return[t/o,o]}static squeezeShape(e,n){let t=new Array;n=r.normalizeAxes(n,e.length);for(let o=0;o=0;if(i&&e[o]!==1)throw new Error("squeeze an axis of size different than 1");(n.length===0&&e[o]>1||n.length>0&&!i)&&t.push(e[o])}return t}static unsqueezeShape(e,n){let t=new Array(e.length+n.length);t.fill(0);for(let i=0;i=t.length)throw new Error("'axes' has an out of range axis");if(t[s]!==0)throw new Error("'axes' has a duplicate axis");t[s]=1}let o=0;for(let i=0;i=t.length?t.push(n[a+2]):t[a]=n[a+2];for(let a=0;a=t[a]||s[a+t.length]>=t[a])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(e,n,t,o,i,s){if(s){if(i.length!==2*(e.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(n.length!==e.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==e.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let a=0;a{"use strict";Cf=un(xc());ka();Cn();ve=un(dn());De();Ka=ee.experimental.fbs,Qe=class r{constructor(e,n,t,o,i,s=Cf.Guid.create()){this.dims=e;this.type=n;this.dataProvider=t;this.asyncDataProvider=o;this.cache=i;this.dataId=s;this.size=J.validateDimsAndCalcSize(e);let a=this.size,u=t===void 0&&o===void 0&&i===void 0;if(i!==void 0&&i.length!==a)throw new RangeError("Input dims doesn't match data length.");if(n==="string"){if(i!==void 0&&(!Array.isArray(i)||!i.every(l=>typeof l=="string")))throw new TypeError("cache should be a string array");u&&(this.cache=new Array(a))}else{if(i!==void 0){let l=kf(n);if(!(i instanceof l))throw new TypeError(`cache should be type ${l.name}`)}if(u){let l=new ArrayBuffer(a*wv(n));this.cache=Tv(l,n)}}}get data(){if(this.cache===void 0){let e=this.dataProvider(this.dataId);if(e.length!==this.size)throw new Error("Length of data provided by the Data Provider is inconsistent with the dims of this Tensor.");this.cache=e}return this.cache}get stringData(){if(this.type!=="string")throw new TypeError("data type is not string");return this.data}get integerData(){switch(this.type){case"uint8":case"int8":case"uint16":case"int16":case"int32":case"uint32":case"bool":return this.data;default:throw new TypeError("data type is not integer (uint8, int8, uint16, int16, int32, uint32, bool)")}}get floatData(){switch(this.type){case"float32":case"float64":return this.data;default:throw new TypeError("data type is not float (float32, float64)")}}get numberData(){if(this.type!=="string")return this.data;throw new TypeError("type cannot be non-number (string)")}get(e){return this.data[J.indicesToOffset(e,this.strides)]}set(e,n){this.data[J.indicesToOffset(e,this.strides)]=n}async getData(){return this.cache===void 0&&(this.cache=await this.asyncDataProvider(this.dataId)),this.cache}get strides(){return this._strides||(this._strides=J.computeStrides(this.dims)),this._strides}static fromProto(e){if(!e)throw new Error("cannot construct Value from an empty tensor");let n=lt.tensorDataTypeFromProto(e.dataType),t=lt.tensorDimsFromProto(e.dims),o=new r(t,n);if(n==="string")e.stringData.forEach((i,s)=>{o.data[s]=Rn(i)});else if(e.rawData&&typeof e.rawData.byteLength=="number"&&e.rawData.byteLength>0){let i=o.data,s=new DataView(e.rawData.buffer,e.rawData.byteOffset,e.rawData.byteLength),a=Of(e.dataType),u=e.rawData.byteLength/a;if(e.rawData.byteLength%a!==0)throw new Error("invalid buffer length");if(i.length!==u)throw new Error("buffer length mismatch");for(let l=0;l0){let i=o.data,s=new DataView(e.rawDataArray().buffer,e.rawDataArray().byteOffset,e.rawDataLength()),a=Of(e.dataType()),u=e.rawDataLength()/a;if(e.rawDataLength()%a!==0)throw new Error("invalid buffer length");if(i.length!==u)throw new Error("buffer length mismatch");for(let l=0;ltypeof require<"u"?require:typeof Proxy<"u"?new Proxy(r,{get:(e,n)=>(typeof require<"u"?require:e)[n]}):r)(function(r){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+r+'" is not supported')});var C=(r,e)=>()=>(r&&(e=r(r=0)),e);var Je=(r,e)=>()=>(e||r((e={exports:{}}).exports,e),e.exports),un=(r,e)=>{for(var n in e)bo(r,n,{get:e[n],enumerable:!0})},Fl=(r,e,n,t)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of rv(e))!ov.call(r,o)&&o!==n&&bo(r,o,{get:()=>e[o],enumerable:!(t=tv(e,o))||t.enumerable});return r};var ln=(r,e,n)=>(n=r!=null?ev(nv(r)):{},Fl(e||!r||!r.__esModule?bo(n,"default",{value:r,enumerable:!0}):n,r)),Pn=r=>Fl(bo({},"__esModule",{value:!0}),r);var yo,Dr,vr,iv,xo,vo=C(()=>{"use strict";yo=new Map,Dr=[],vr=(r,e,n)=>{if(e&&typeof e.init=="function"&&typeof e.createInferenceSessionHandler=="function"){let t=yo.get(r);if(t===void 0)yo.set(r,{backend:e,priority:n});else{if(t.priority>n)return;if(t.priority===n&&t.backend!==e)throw new Error(`cannot register backend "${r}" using priority ${n}`)}if(n>=0){let o=Dr.indexOf(r);o!==-1&&Dr.splice(o,1);for(let i=0;i{let e=yo.get(r);if(!e)return"backend not found.";if(e.initialized)return e.backend;if(e.aborted)return e.error;{let n=!!e.initPromise;try{return n||(e.initPromise=e.backend.init(r)),await e.initPromise,e.initialized=!0,e.backend}catch(t){return n||(e.error=`${t}`,e.aborted=!0),e.error}finally{delete e.initPromise}}},xo=async r=>{let e=r.executionProviders||[],n=e.map(u=>typeof u=="string"?u:u.name),t=n.length===0?Dr:n,o,i=[],s=new Set;for(let u of t){let l=await iv(u);typeof l=="string"?i.push({name:u,err:l}):(o||(o=l),o===l&&s.add(u))}if(!o)throw new Error(`no available backend found. ERR: ${i.map(u=>`[${u.name}] ${u.err}`).join(", ")}`);for(let{name:u,err:l}of i)n.includes(u)&&console.warn(`removing requested execution provider "${u}" from session options because it is not available: ${l}`);let a=e.filter(u=>s.has(typeof u=="string"?u:u.name));return[o,new Proxy(r,{get:(u,l)=>l==="executionProviders"?a:Reflect.get(u,l)})]}});var Ml=C(()=>{"use strict";vo()});var Vl,Gl=C(()=>{"use strict";Vl="1.19.0"});var Ul,It,Ia=C(()=>{"use strict";Gl();Ul="warning",It={wasm:{},webgl:{},webgpu:{},versions:{common:Vl},set logLevel(r){if(r!==void 0){if(typeof r!="string"||["verbose","info","warning","error","fatal"].indexOf(r)===-1)throw new Error(`Unsupported logging level: ${r}`);Ul=r}},get logLevel(){return Ul}};Object.defineProperty(It,"logLevel",{enumerable:!0})});var le,Wl=C(()=>{"use strict";Ia();le=It});var Hl,ql,jl=C(()=>{"use strict";Hl=(r,e)=>{let n=typeof document<"u"?document.createElement("canvas"):new OffscreenCanvas(1,1);n.width=r.dims[3],n.height=r.dims[2];let t=n.getContext("2d");if(t!=null){let o,i;e?.tensorLayout!==void 0&&e.tensorLayout==="NHWC"?(o=r.dims[2],i=r.dims[3]):(o=r.dims[3],i=r.dims[2]);let s=e?.format!==void 0?e.format:"RGB",a=e?.norm,u,l;a===void 0||a.mean===void 0?u=[255,255,255,255]:typeof a.mean=="number"?u=[a.mean,a.mean,a.mean,a.mean]:(u=[a.mean[0],a.mean[1],a.mean[2],0],a.mean[3]!==void 0&&(u[3]=a.mean[3])),a===void 0||a.bias===void 0?l=[0,0,0,0]:typeof a.bias=="number"?l=[a.bias,a.bias,a.bias,a.bias]:(l=[a.bias[0],a.bias[1],a.bias[2],0],a.bias[3]!==void 0&&(l[3]=a.bias[3]));let f=i*o,c=0,p=f,b=f*2,h=-1;s==="RGBA"?(c=0,p=f,b=f*2,h=f*3):s==="RGB"?(c=0,p=f,b=f*2):s==="RBG"&&(c=0,b=f,p=f*2);for(let g=0;g{let n=typeof document<"u"?document.createElement("canvas").getContext("2d"):new OffscreenCanvas(1,1).getContext("2d"),t;if(n!=null){let o,i,s;e?.tensorLayout!==void 0&&e.tensorLayout==="NHWC"?(o=r.dims[2],i=r.dims[1],s=r.dims[3]):(o=r.dims[3],i=r.dims[2],s=r.dims[1]);let a=e!==void 0&&e.format!==void 0?e.format:"RGB",u=e?.norm,l,f;u===void 0||u.mean===void 0?l=[255,255,255,255]:typeof u.mean=="number"?l=[u.mean,u.mean,u.mean,u.mean]:(l=[u.mean[0],u.mean[1],u.mean[2],255],u.mean[3]!==void 0&&(l[3]=u.mean[3])),u===void 0||u.bias===void 0?f=[0,0,0,0]:typeof u.bias=="number"?f=[u.bias,u.bias,u.bias,u.bias]:(f=[u.bias[0],u.bias[1],u.bias[2],0],u.bias[3]!==void 0&&(f[3]=u.bias[3]));let c=i*o;if(e!==void 0&&(e.format!==void 0&&s===4&&e.format!=="RGBA"||s===3&&e.format!=="RGB"&&e.format!=="BGR"))throw new Error("Tensor format doesn't match input tensor dims");let p=4,b=0,h=1,g=2,T=3,w=0,v=c,S=c*2,$=-1;a==="RGBA"?(w=0,v=c,S=c*2,$=c*3):a==="RGB"?(w=0,v=c,S=c*2):a==="RBG"&&(w=0,S=c,v=c*2),t=n.createImageData(o,i);for(let P=0;P{"use strict";wo();Sa=(r,e)=>{if(r===void 0)throw new Error("Image buffer must be defined");if(e.height===void 0||e.width===void 0)throw new Error("Image height and width must be defined");if(e.tensorLayout==="NHWC")throw new Error("NHWC Tensor layout is not supported yet");let{height:n,width:t}=e,o=e.norm??{mean:255,bias:0},i,s;typeof o.mean=="number"?i=[o.mean,o.mean,o.mean,o.mean]:i=[o.mean[0],o.mean[1],o.mean[2],o.mean[3]??255],typeof o.bias=="number"?s=[o.bias,o.bias,o.bias,o.bias]:s=[o.bias[0],o.bias[1],o.bias[2],o.bias[3]??0];let a=e.format!==void 0?e.format:"RGBA",u=e.tensorFormat!==void 0&&e.tensorFormat!==void 0?e.tensorFormat:"RGB",l=n*t,f=u==="RGBA"?new Float32Array(l*4):new Float32Array(l*3),c=4,p=0,b=1,h=2,g=3,T=0,w=l,v=l*2,S=-1;a==="RGB"&&(c=3,p=0,b=1,h=2,g=-1),u==="RGBA"?S=l*3:u==="RBG"?(T=0,v=l,w=l*2):u==="BGR"&&(v=0,w=l,T=l*2);for(let P=0;P{let n=typeof HTMLImageElement<"u"&&r instanceof HTMLImageElement,t=typeof ImageData<"u"&&r instanceof ImageData,o=typeof ImageBitmap<"u"&&r instanceof ImageBitmap,i=typeof r=="string",s,a=e??{},u=()=>{if(typeof document<"u")return document.createElement("canvas");if(typeof OffscreenCanvas<"u")return new OffscreenCanvas(1,1);throw new Error("Canvas is not supported")},l=f=>f instanceof HTMLCanvasElement||f instanceof OffscreenCanvas?f.getContext("2d"):null;if(n){let f=u();f.width=r.width,f.height=r.height;let c=l(f);if(c!=null){let p=r.height,b=r.width;if(e!==void 0&&e.resizedHeight!==void 0&&e.resizedWidth!==void 0&&(p=e.resizedHeight,b=e.resizedWidth),e!==void 0){if(a=e,e.tensorFormat!==void 0)throw new Error("Image input config format must be RGBA for HTMLImageElement");a.tensorFormat="RGBA",a.height=p,a.width=b}else a.tensorFormat="RGBA",a.height=p,a.width=b;c.drawImage(r,0,0),s=c.getImageData(0,0,b,p).data}else throw new Error("Can not access image data")}else if(t){let f,c;if(e!==void 0&&e.resizedWidth!==void 0&&e.resizedHeight!==void 0?(f=e.resizedHeight,c=e.resizedWidth):(f=r.height,c=r.width),e!==void 0&&(a=e),a.format="RGBA",a.height=f,a.width=c,e!==void 0){let p=u();p.width=c,p.height=f;let b=l(p);if(b!=null)b.putImageData(r,0,0),s=b.getImageData(0,0,c,f).data;else throw new Error("Can not access image data")}else s=r.data}else if(o){if(e===void 0)throw new Error("Please provide image config with format for Imagebitmap");let f=u();f.width=r.width,f.height=r.height;let c=l(f);if(c!=null){let p=r.height,b=r.width;return c.drawImage(r,0,0,b,p),s=c.getImageData(0,0,b,p).data,a.height=p,a.width=b,Sa(s,a)}else throw new Error("Can not access image data")}else{if(i)return new Promise((f,c)=>{let p=u(),b=l(p);if(!r||!b)return c();let h=new Image;h.crossOrigin="Anonymous",h.src=r,h.onload=()=>{p.width=h.width,p.height=h.height,b.drawImage(h,0,0,p.width,p.height);let g=b.getImageData(0,0,p.width,p.height);a.height=p.height,a.width=p.width,f(Sa(g.data,a))}});throw new Error("Input data provided is not supported - aborted tensor creation")}if(s!==void 0)return Sa(s,a);throw new Error("Input data provided is not supported - aborted tensor creation")},Xl=(r,e)=>{let{width:n,height:t,download:o,dispose:i}=e,s=[1,t,n,4];return new ht({location:"texture",type:"float32",texture:r,dims:s,download:o,dispose:i})},Zl=(r,e)=>{let{dataType:n,dims:t,download:o,dispose:i}=e;return new ht({location:"gpu-buffer",type:n??"float32",gpuBuffer:r,dims:t,download:o,dispose:i})},Yl=(r,e,n)=>new ht({location:"cpu-pinned",type:r,data:e,dims:n??[e.length]})});var Br,On,Ql,ec,tc=C(()=>{"use strict";Br=new Map([["float32",Float32Array],["uint8",Uint8Array],["int8",Int8Array],["uint16",Uint16Array],["int16",Int16Array],["int32",Int32Array],["bool",Uint8Array],["float64",Float64Array],["uint32",Uint32Array]]),On=new Map([[Float32Array,"float32"],[Uint8Array,"uint8"],[Int8Array,"int8"],[Uint16Array,"uint16"],[Int16Array,"int16"],[Int32Array,"int32"],[Float64Array,"float64"],[Uint32Array,"uint32"]]),Ql=!1,ec=()=>{if(!Ql){Ql=!0;let r=typeof BigInt64Array<"u"&&BigInt64Array.from,e=typeof BigUint64Array<"u"&&BigUint64Array.from,n=typeof Float16Array<"u"&&Float16Array.from;r&&(Br.set("int64",BigInt64Array),On.set(BigInt64Array,"int64")),e&&(Br.set("uint64",BigUint64Array),On.set(BigUint64Array,"uint64")),n?(Br.set("float16",Float16Array),On.set(Float16Array,"float16")):Br.set("float16",Uint16Array)}}});var rc,nc,oc=C(()=>{"use strict";wo();rc=r=>{let e=1;for(let n=0;n{switch(r.location){case"cpu":return new ht(r.type,r.data,e);case"cpu-pinned":return new ht({location:"cpu-pinned",data:r.data,type:r.type,dims:e});case"texture":return new ht({location:"texture",texture:r.texture,type:r.type,dims:e});case"gpu-buffer":return new ht({location:"gpu-buffer",gpuBuffer:r.gpuBuffer,type:r.type,dims:e});default:throw new Error(`tensorReshape: tensor location ${r.location} is not supported`)}}});var ht,wo=C(()=>{"use strict";jl();Jl();tc();oc();ht=class{constructor(e,n,t){ec();let o,i;if(typeof e=="object"&&"location"in e)switch(this.dataLocation=e.location,o=e.type,i=e.dims,e.location){case"cpu-pinned":{let a=Br.get(o);if(!a)throw new TypeError(`unsupported type "${o}" to create tensor from pinned buffer`);if(!(e.data instanceof a))throw new TypeError(`buffer should be of type ${a.name}`);this.cpuData=e.data;break}case"texture":{if(o!=="float32")throw new TypeError(`unsupported type "${o}" to create tensor from texture`);this.gpuTextureData=e.texture,this.downloader=e.download,this.disposer=e.dispose;break}case"gpu-buffer":{if(o!=="float32"&&o!=="float16"&&o!=="int32"&&o!=="int64"&&o!=="uint32"&&o!=="uint8"&&o!=="bool")throw new TypeError(`unsupported type "${o}" to create tensor from gpu buffer`);this.gpuBufferData=e.gpuBuffer,this.downloader=e.download,this.disposer=e.dispose;break}default:throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`)}else{let a,u;if(typeof e=="string")if(o=e,u=t,e==="string"){if(!Array.isArray(n))throw new TypeError("A string tensor's data must be a string array.");a=n}else{let l=Br.get(e);if(l===void 0)throw new TypeError(`Unsupported tensor type: ${e}.`);if(Array.isArray(n)){if(e==="float16"&&l===Uint16Array)throw new TypeError("Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.");e==="uint64"||e==="int64"?a=l.from(n,BigInt):a=l.from(n)}else if(n instanceof l)a=n;else throw new TypeError(`A ${o} tensor's data must be type of ${l}`)}else if(u=n,Array.isArray(e)){if(e.length===0)throw new TypeError("Tensor type cannot be inferred from an empty array.");let l=typeof e[0];if(l==="string")o="string",a=e;else if(l==="boolean")o="bool",a=Uint8Array.from(e);else throw new TypeError(`Invalid element type of data array: ${l}.`)}else{let l=On.get(e.constructor);if(l===void 0)throw new TypeError(`Unsupported type for tensor data: ${e.constructor}.`);o=l,a=e}if(u===void 0)u=[a.length];else if(!Array.isArray(u))throw new TypeError("A tensor's dims must be a number array");i=u,this.cpuData=a,this.dataLocation="cpu"}let s=rc(i);if(this.cpuData&&s!==this.cpuData.length)throw new Error(`Tensor's size(${s}) does not match data length(${this.cpuData.length}).`);this.type=o,this.dims=i,this.size=s}static async fromImage(e,n){return Kl(e,n)}static fromTexture(e,n){return Xl(e,n)}static fromGpuBuffer(e,n){return Zl(e,n)}static fromPinnedBuffer(e,n,t){return Yl(e,n,t)}toDataURL(e){return Hl(this,e)}toImageData(e){return ql(this,e)}get data(){if(this.ensureValid(),!this.cpuData)throw new Error("The data is not on CPU. Use `getData()` to download GPU data to CPU, or use `texture` or `gpuBuffer` property to access the GPU data directly.");return this.cpuData}get location(){return this.dataLocation}get texture(){if(this.ensureValid(),!this.gpuTextureData)throw new Error("The data is not stored as a WebGL texture.");return this.gpuTextureData}get gpuBuffer(){if(this.ensureValid(),!this.gpuBufferData)throw new Error("The data is not stored as a WebGPU buffer.");return this.gpuBufferData}async getData(e){switch(this.ensureValid(),this.dataLocation){case"cpu":case"cpu-pinned":return this.data;case"texture":case"gpu-buffer":{if(!this.downloader)throw new Error("The current tensor is not created with a specified data downloader.");if(this.isDownloading)throw new Error("The current tensor is being downloaded.");try{this.isDownloading=!0;let n=await this.downloader();return this.downloader=void 0,this.dataLocation="cpu",this.cpuData=n,e&&this.disposer&&(this.disposer(),this.disposer=void 0),n}finally{this.isDownloading=!1}}default:throw new Error(`cannot get data from location: ${this.dataLocation}`)}}dispose(){if(this.isDownloading)throw new Error("The current tensor is being downloaded.");this.disposer&&(this.disposer(),this.disposer=void 0),this.cpuData=void 0,this.gpuTextureData=void 0,this.gpuBufferData=void 0,this.downloader=void 0,this.isDownloading=void 0,this.dataLocation="none"}ensureValid(){if(this.dataLocation==="none")throw new Error("The tensor is disposed.")}reshape(e){if(this.ensureValid(),this.downloader||this.disposer)throw new Error("Cannot reshape a tensor that owns GPU resource.");return nc(this,e)}}});var it,To=C(()=>{"use strict";wo();it=ht});var _o,ic,St,yt,$a=C(()=>{"use strict";Ia();_o=(r,e)=>{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||console.timeStamp(`${r}::ORT::${e}`)},ic=(r,e)=>{let n=new Error().stack?.split(/\r\n|\r|\n/g)||[],t=!1;for(let o=0;o{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||ic("BEGIN",r)},yt=r=>{(typeof It.trace>"u"?!It.wasm.trace:!It.trace)||ic("END",r)}});var Io,ac=C(()=>{"use strict";vo();To();$a();Io=class r{constructor(e){this.handler=e}async run(e,n,t){St();let o={},i={};if(typeof e!="object"||e===null||e instanceof it||Array.isArray(e))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let s=!0;if(typeof n=="object"){if(n===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(n instanceof it)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(n)){if(n.length===0)throw new TypeError("'fetches' cannot be an empty array.");s=!1;for(let l of n){if(typeof l!="string")throw new TypeError("'fetches' must be a string array or an object.");if(this.outputNames.indexOf(l)===-1)throw new RangeError(`'fetches' contains invalid output name: ${l}.`);o[l]=null}if(typeof t=="object"&&t!==null)i=t;else if(typeof t<"u")throw new TypeError("'options' must be an object.")}else{let l=!1,f=Object.getOwnPropertyNames(n);for(let c of this.outputNames)if(f.indexOf(c)!==-1){let p=n[c];(p===null||p instanceof it)&&(l=!0,s=!1,o[c]=p)}if(l){if(typeof t=="object"&&t!==null)i=t;else if(typeof t<"u")throw new TypeError("'options' must be an object.")}else i=n}}else if(typeof n<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let l of this.inputNames)if(typeof e[l]>"u")throw new Error(`input '${l}' is missing in 'feeds'.`);if(s)for(let l of this.outputNames)o[l]=null;let a=await this.handler.run(e,o,i),u={};for(let l in a)if(Object.hasOwnProperty.call(a,l)){let f=a[l];f instanceof it?u[l]=f:u[l]=new it(f.type,f.data,f.dims)}return yt(),u}async release(){return this.handler.dispose()}static async create(e,n,t,o){St();let i,s={};if(typeof e=="string"){if(i=e,typeof n=="object"&&n!==null)s=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else if(e instanceof Uint8Array){if(i=e,typeof n=="object"&&n!==null)s=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else if(e instanceof ArrayBuffer||typeof SharedArrayBuffer<"u"&&e instanceof SharedArrayBuffer){let f=e,c=0,p=e.byteLength;if(typeof n=="object"&&n!==null)s=n;else if(typeof n=="number"){if(c=n,!Number.isSafeInteger(c))throw new RangeError("'byteOffset' must be an integer.");if(c<0||c>=f.byteLength)throw new RangeError(`'byteOffset' is out of range [0, ${f.byteLength}).`);if(p=e.byteLength-c,typeof t=="number"){if(p=t,!Number.isSafeInteger(p))throw new RangeError("'byteLength' must be an integer.");if(p<=0||c+p>f.byteLength)throw new RangeError(`'byteLength' is out of range (0, ${f.byteLength-c}].`);if(typeof o=="object"&&o!==null)s=o;else if(typeof o<"u")throw new TypeError("'options' must be an object.")}else if(typeof t<"u")throw new TypeError("'byteLength' must be a number.")}else if(typeof n<"u")throw new TypeError("'options' must be an object.");i=new Uint8Array(f,c,p)}else throw new TypeError("Unexpected argument[0]: must be 'path' or 'buffer'.");let[a,u]=await xo(s),l=await a.createInferenceSessionHandler(i,u);return yt(),new r(l)}startProfiling(){this.handler.startProfiling()}endProfiling(){this.handler.endProfiling()}get inputNames(){return this.handler.inputNames}get outputNames(){return this.handler.outputNames}}});var av,sc=C(()=>{"use strict";ac();av=Io});var uc=C(()=>{"use strict"});var lc=C(()=>{"use strict"});var cc=C(()=>{"use strict"});var fc=C(()=>{"use strict"});var sv,So,dc=C(()=>{"use strict";vo();To();sv="Training backend could not be resolved. Make sure you're using the correct configuration & WebAssembly files.",So=class r{constructor(e,n,t){this.handler=e,this.hasOptimizerModel=n,this.hasEvalModel=t}get trainingInputNames(){return this.handler.inputNames}get trainingOutputNames(){return this.handler.outputNames}get evalInputNames(){if(this.hasEvalModel)return this.handler.evalInputNames;throw new Error("This training session has no evalModel loaded.")}get evalOutputNames(){if(this.hasEvalModel)return this.handler.evalOutputNames;throw new Error("This training session has no evalModel loaded.")}static async create(e,n){let t=e.evalModel||"",o=e.optimizerModel||"",i=n||{},[s,a]=await xo(i);if(s.createTrainingSessionHandler){let u=await s.createTrainingSessionHandler(e.checkpointState,e.trainModel,t,o,a);return new r(u,!!e.optimizerModel,!!e.evalModel)}else throw new Error(sv)}typeNarrowingForRunStep(e,n,t,o,i){let s={},a={};if(typeof t!="object"||t===null||t instanceof it||Array.isArray(t))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let u=!0;if(typeof o=="object"){if(o===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(o instanceof it)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(o)){if(o.length===0)throw new TypeError("'fetches' cannot be an empty array.");u=!1;for(let l of o){if(typeof l!="string")throw new TypeError("'fetches' must be a string array or an object.");if(n.indexOf(l)===-1)throw new RangeError(`'fetches' contains invalid output name: ${l}.`);s[l]=null}if(typeof i=="object"&&i!==null)a=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else{let l=!1,f=Object.getOwnPropertyNames(o);for(let c of n)if(f.indexOf(c)!==-1){let p=o[c];(p===null||p instanceof it)&&(l=!0,u=!1,s[c]=p)}if(l){if(typeof i=="object"&&i!==null)a=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else a=o}}else if(typeof o<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let l of e)if(typeof t[l]>"u")throw new Error(`input '${l}' is missing in 'feeds'.`);if(u)for(let l of n)s[l]=null;return[s,a]}convertHandlerReturnTypeToMapOfTensors(e){let n={};for(let t in e)if(Object.hasOwnProperty.call(e,t)){let o=e[t];o instanceof it?n[t]=o:n[t]=new it(o.type,o.data,o.dims)}return n}async lazyResetGrad(){await this.handler.lazyResetGrad()}async runTrainStep(e,n,t){let[o,i]=this.typeNarrowingForRunStep(this.trainingInputNames,this.trainingOutputNames,e,n,t),s=await this.handler.runTrainStep(e,o,i);return this.convertHandlerReturnTypeToMapOfTensors(s)}async runOptimizerStep(e){if(this.hasOptimizerModel)await this.handler.runOptimizerStep(e||{});else throw new Error("This TrainingSession has no OptimizerModel loaded.")}async runEvalStep(e,n,t){if(this.hasEvalModel){let[o,i]=this.typeNarrowingForRunStep(this.evalInputNames,this.evalOutputNames,e,n,t),s=await this.handler.runEvalStep(e,o,i);return this.convertHandlerReturnTypeToMapOfTensors(s)}else throw new Error("This TrainingSession has no EvalModel loaded.")}async getParametersSize(e=!0){return this.handler.getParametersSize(e)}async loadParametersBuffer(e,n=!0){let t=await this.getParametersSize(n);if(e.length!==4*t)throw new Error("Size of the buffer passed into loadParametersBuffer must match the number of parameters in the model. Please use getParametersSize method to check.");return this.handler.loadParametersBuffer(e,n)}async getContiguousParameters(e=!0){return this.handler.getContiguousParameters(e)}async release(){return this.handler.dispose()}}});var uv,pc=C(()=>{"use strict";dc();uv=So});var Aa={};un(Aa,{InferenceSession:()=>av,TRACE:()=>_o,TRACE_FUNC_BEGIN:()=>St,TRACE_FUNC_END:()=>yt,Tensor:()=>it,TrainingSession:()=>uv,env:()=>le,registerBackend:()=>vr});var ft=C(()=>{"use strict";Ml();Wl();sc();To();uc();lc();$a();cc();fc();pc()});function wr(r,e,n,t){if(e===void 0)return cv(r);if(n===void 0)$o(r,e,1);else if(typeof n=="number"&&t===void 0)$o(r,e,n);else if(typeof n=="string"&&t===void 0)$o(r,n,1,e);else if(typeof n=="string"&&typeof t=="number")$o(r,n,t,e);else throw new TypeError("input is valid")}function cv(r){return{verbose:wr.verbose.bind(null,r),info:wr.info.bind(null,r),warning:wr.warning.bind(null,r),error:wr.error.bind(null,r),fatal:wr.fatal.bind(null,r)}}function $o(r,e,n,t){let o=En[t||""]||En[""];hc[r]{"use strict";Pa=class{log(e,n,t){}},Oa=class{log(e,n,t){console.log(`${this.color(e)} ${t?"\x1B[35m"+t+"\x1B[0m ":""}${n}`)}color(e){switch(e){case"verbose":return"\x1B[34;40mv\x1B[0m";case"info":return"\x1B[32mi\x1B[0m";case"warning":return"\x1B[30;43mw\x1B[0m";case"error":return"\x1B[31;40me\x1B[0m";case"fatal":return"\x1B[101mf\x1B[0m";default:throw new Error(`unsupported severity: ${e}`)}}},hc={verbose:1e3,info:2e3,warning:4e3,error:5e3,fatal:6e3},lv={none:new Pa,console:new Oa},gc={provider:"console",minimalSeverity:"warning",logDateTime:!0,logSourceLocation:!1},En={"":gc};(u=>{function r(l,f){u("verbose",l,f)}u.verbose=r;function e(l,f){u("info",l,f)}u.info=e;function n(l,f){u("warning",l,f)}u.warning=n;function t(l,f){u("error",l,f)}u.error=t;function o(l,f){u("fatal",l,f)}u.fatal=o;function i(l){En={},s("",l||{})}u.reset=i;function s(l,f){if(l==="*")i(f);else{let c=En[l]||gc;En[l]={provider:f.provider||c.provider,minimalSeverity:f.minimalSeverity||c.minimalSeverity,logDateTime:f.logDateTime===void 0?c.logDateTime:f.logDateTime,logSourceLocation:f.logSourceLocation===void 0?c.logSourceLocation:f.logSourceLocation}}}u.set=s;function a(l){let f={};l.logLevel&&(f.minimalSeverity=l.logLevel),s("",f)}u.setWithEnv=a})(wr||={});Re=wr,Po=class{constructor(e,n,t,o,i,s){this.category=e;this.name=n;this.startTime=t;this.endCallback=o;this.timer=i;this.ctx=s}async end(){return this.endCallback(this)}async checkTimer(){if(this.ctx===void 0||this.timer===void 0)throw new Error("No webgl timer found");return this.ctx.endTimer(),this.ctx.waitForQueryAndGetTime(this.timer)}},Oo=class{constructor(e,n,t,o){this.category=e;this.name=n;this.startTime=t;this.endTime=o}},Eo=class{constructor(e,n,t){this._started=!1;this._flushPointer=0;this._started=!1,this._maxNumberEvents=e===void 0?1e4:e,this._flushBatchSize=n===void 0?10:n,this._flushIntervalInMilliseconds=t===void 0?5e3:t}static create(e){return e===void 0?new this:new this(e.maxNumberEvents,e.flushBatchSize,e.flushIntervalInMilliseconds)}start(){this._started=!0,this._timingEvents=[],this._flushTime=Ao(),this._flushPointer=0}stop(){for(this._started=!1;this._flushPointer{a.then(async f=>{i&&await i.end(),u(f)},async f=>{i&&await i.end(),l(f)})});if(!s&&i){let u=i.end();if(u&&typeof u.then=="function")return new Promise((l,f)=>{u.then(()=>{l(a)},c=>{f(c)})})}return a}begin(e,n,t){if(!this._started)throw new Error("profiler is not started yet");if(t===void 0){let o=Ao();return this.flush(o),new Po(e,n,o,i=>this.endSync(i))}else{let o=t.beginTimer();return new Po(e,n,0,async i=>this.end(i),o,t)}}async end(e){let n=await e.checkTimer();this._timingEvents.length=this._flushBatchSize||e-this._flushTime>=this._flushIntervalInMilliseconds){for(let n=this._flushPointer;this._flushPointerperformance.now():Date.now});function bc(r,e,n){for(let t of n){let o=t[0],i=t[1],s=t[2],a=t[3],u=t[4];if(r.opType===o){for(let l of e)if((l.domain===i||l.domain==="ai.onnx"&&i==="")&&fv(l.version,s))return{opImpl:a,opInit:u}}}throw new TypeError(`cannot resolve operator '${r.opType}' with opsets: ${e.map(t=>`${t.domain||"ai.onnx"} v${t.version}`).join(", ")}`)}function fv(r,e){if(e.endsWith("+")){let n=Number.parseInt(e.substring(0,e.length-1),10);return!isNaN(n)&&n<=r}else if(e.split("-").length===2){let n=e.split("-"),t=Number.parseInt(n[0],10),o=Number.parseInt(n[1],10);return!isNaN(t)&&!isNaN(o)&&t<=r&&r<=o}else return Number.parseInt(e,10)===r}var yc=C(()=>{"use strict"});var xc=Je(Ea=>{"use strict";Ea.__esModule=!0;var dv=function(){function r(e){if(!e)throw new TypeError("Invalid argument; `value` has no value.");this.value=r.EMPTY,e&&r.isGuid(e)&&(this.value=e)}return r.isGuid=function(e){var n=e.toString();return e&&(e instanceof r||r.validator.test(n))},r.create=function(){return new r([r.gen(2),r.gen(1),r.gen(1),r.gen(1),r.gen(3)].join("-"))},r.createEmpty=function(){return new r("emptyguid")},r.parse=function(e){return new r(e)},r.raw=function(){return[r.gen(2),r.gen(1),r.gen(1),r.gen(1),r.gen(3)].join("-")},r.gen=function(e){for(var n="",t=0;t>>=0,(o=0<=r&&r<256)&&(t=Tc[r],t)?t:(n=ke(r,0,!0),o&&(Tc[r]=n),n)):(r|=0,(o=-128<=r&&r<128)&&(t=wc[r],t)?t:(n=ke(r,r<0?-1:0,!1),o&&(wc[r]=n),n))}function Et(r,e){if(isNaN(r))return e?fr:Nt;if(e){if(r<0)return fr;if(r>=$c)return Oc}else{if(r<=-Ic)return xt;if(r+1>=Ic)return Pc}return r<0?Et(-r,e).neg():ke(r%fn|0,r/fn|0,e)}function ke(r,e,n){return new Me(r,e,n)}function ka(r,e,n){if(r.length===0)throw Error("empty string");if(typeof e=="number"?(n=e,e=!1):e=!!e,r==="NaN"||r==="Infinity"||r==="+Infinity"||r==="-Infinity")return e?fr:Nt;if(n=n||10,n<2||360)throw Error("interior hyphen");if(t===0)return ka(r.substring(1),e,n).neg();for(var o=Et(Co(n,8)),i=Nt,s=0;s{Ot=null;try{Ot=new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([0,97,115,109,1,0,0,0,1,13,2,96,0,1,127,96,4,127,127,127,127,1,127,3,7,6,0,1,1,1,1,1,6,6,1,127,1,65,0,11,7,50,6,3,109,117,108,0,1,5,100,105,118,95,115,0,2,5,100,105,118,95,117,0,3,5,114,101,109,95,115,0,4,5,114,101,109,95,117,0,5,8,103,101,116,95,104,105,103,104,0,0,10,191,1,6,4,0,35,0,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,126,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,127,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,128,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,129,34,4,66,32,135,167,36,0,32,4,167,11,36,1,1,126,32,0,173,32,1,173,66,32,134,132,32,2,173,32,3,173,66,32,134,132,130,34,4,66,32,135,167,36,0,32,4,167,11])),{}).exports}catch{}Me.prototype.__isLong__;Object.defineProperty(Me.prototype,"__isLong__",{value:!0});Me.isLong=dt;wc={},Tc={};Me.fromInt=Lr;Me.fromNumber=Et;Me.fromBits=ke;Co=Math.pow;Me.fromString=ka;Me.fromValue=zt;_c=65536,pv=1<<24,fn=_c*_c,$c=fn*fn,Ic=$c/2,Sc=Lr(pv),Nt=Lr(0);Me.ZERO=Nt;fr=Lr(0,!0);Me.UZERO=fr;cn=Lr(1);Me.ONE=cn;Ac=Lr(1,!0);Me.UONE=Ac;Ca=Lr(-1);Me.NEG_ONE=Ca;Pc=ke(-1,2147483647,!1);Me.MAX_VALUE=Pc;Oc=ke(-1,-1,!0);Me.MAX_UNSIGNED_VALUE=Oc;xt=ke(0,-2147483648,!1);Me.MIN_VALUE=xt;U=Me.prototype;U.toInt=function(){return this.unsigned?this.low>>>0:this.low};U.toNumber=function(){return this.unsigned?(this.high>>>0)*fn+(this.low>>>0):this.high*fn+(this.low>>>0)};U.toString=function(e){if(e=e||10,e<2||36>>0,f=l.toString(e);if(s=u,s.isZero())return f+a;for(;f.length<6;)f="0"+f;a=""+f+a}};U.getHighBits=function(){return this.high};U.getHighBitsUnsigned=function(){return this.high>>>0};U.getLowBits=function(){return this.low};U.getLowBitsUnsigned=function(){return this.low>>>0};U.getNumBitsAbs=function(){if(this.isNegative())return this.eq(xt)?64:this.neg().getNumBitsAbs();for(var e=this.high!=0?this.high:this.low,n=31;n>0&&!(e&1<=0};U.isOdd=function(){return(this.low&1)===1};U.isEven=function(){return(this.low&1)===0};U.equals=function(e){return dt(e)||(e=zt(e)),this.unsigned!==e.unsigned&&this.high>>>31===1&&e.high>>>31===1?!1:this.high===e.high&&this.low===e.low};U.eq=U.equals;U.notEquals=function(e){return!this.eq(e)};U.neq=U.notEquals;U.ne=U.notEquals;U.lessThan=function(e){return this.comp(e)<0};U.lt=U.lessThan;U.lessThanOrEqual=function(e){return this.comp(e)<=0};U.lte=U.lessThanOrEqual;U.le=U.lessThanOrEqual;U.greaterThan=function(e){return this.comp(e)>0};U.gt=U.greaterThan;U.greaterThanOrEqual=function(e){return this.comp(e)>=0};U.gte=U.greaterThanOrEqual;U.ge=U.greaterThanOrEqual;U.compare=function(e){if(dt(e)||(e=zt(e)),this.eq(e))return 0;var n=this.isNegative(),t=e.isNegative();return n&&!t?-1:!n&&t?1:this.unsigned?e.high>>>0>this.high>>>0||e.high===this.high&&e.low>>>0>this.low>>>0?-1:1:this.sub(e).isNegative()?-1:1};U.comp=U.compare;U.negate=function(){return!this.unsigned&&this.eq(xt)?xt:this.not().add(cn)};U.neg=U.negate;U.add=function(e){dt(e)||(e=zt(e));var n=this.high>>>16,t=this.high&65535,o=this.low>>>16,i=this.low&65535,s=e.high>>>16,a=e.high&65535,u=e.low>>>16,l=e.low&65535,f=0,c=0,p=0,b=0;return b+=i+l,p+=b>>>16,b&=65535,p+=o+u,c+=p>>>16,p&=65535,c+=t+a,f+=c>>>16,c&=65535,f+=n+s,f&=65535,ke(p<<16|b,f<<16|c,this.unsigned)};U.subtract=function(e){return dt(e)||(e=zt(e)),this.add(e.neg())};U.sub=U.subtract;U.multiply=function(e){if(this.isZero())return this;if(dt(e)||(e=zt(e)),Ot){var n=Ot.mul(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}if(e.isZero())return this.unsigned?fr:Nt;if(this.eq(xt))return e.isOdd()?xt:Nt;if(e.eq(xt))return this.isOdd()?xt:Nt;if(this.isNegative())return e.isNegative()?this.neg().mul(e.neg()):this.neg().mul(e).neg();if(e.isNegative())return this.mul(e.neg()).neg();if(this.lt(Sc)&&e.lt(Sc))return Et(this.toNumber()*e.toNumber(),this.unsigned);var t=this.high>>>16,o=this.high&65535,i=this.low>>>16,s=this.low&65535,a=e.high>>>16,u=e.high&65535,l=e.low>>>16,f=e.low&65535,c=0,p=0,b=0,h=0;return h+=s*f,b+=h>>>16,h&=65535,b+=i*f,p+=b>>>16,b&=65535,b+=s*l,p+=b>>>16,b&=65535,p+=o*f,c+=p>>>16,p&=65535,p+=i*l,c+=p>>>16,p&=65535,p+=s*u,c+=p>>>16,p&=65535,c+=t*f+o*l+i*u+s*a,c&=65535,ke(b<<16|h,c<<16|p,this.unsigned)};U.mul=U.multiply;U.divide=function(e){if(dt(e)||(e=zt(e)),e.isZero())throw Error("division by zero");if(Ot){if(!this.unsigned&&this.high===-2147483648&&e.low===-1&&e.high===-1)return this;var n=(this.unsigned?Ot.div_u:Ot.div_s)(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}if(this.isZero())return this.unsigned?fr:Nt;var t,o,i;if(this.unsigned){if(e.unsigned||(e=e.toUnsigned()),e.gt(this))return fr;if(e.gt(this.shru(1)))return Ac;i=fr}else{if(this.eq(xt)){if(e.eq(cn)||e.eq(Ca))return xt;if(e.eq(xt))return cn;var s=this.shr(1);return t=s.div(e).shl(1),t.eq(Nt)?e.isNegative()?cn:Ca:(o=this.sub(e.mul(t)),i=t.add(o.div(e)),i)}else if(e.eq(xt))return this.unsigned?fr:Nt;if(this.isNegative())return e.isNegative()?this.neg().div(e.neg()):this.neg().div(e).neg();if(e.isNegative())return this.div(e.neg()).neg();i=Nt}for(o=this;o.gte(e);){t=Math.max(1,Math.floor(o.toNumber()/e.toNumber()));for(var a=Math.ceil(Math.log(t)/Math.LN2),u=a<=48?1:Co(2,a-48),l=Et(t),f=l.mul(e);f.isNegative()||f.gt(o);)t-=u,l=Et(t,this.unsigned),f=l.mul(e);l.isZero()&&(l=cn),i=i.add(l),o=o.sub(f)}return i};U.div=U.divide;U.modulo=function(e){if(dt(e)||(e=zt(e)),Ot){var n=(this.unsigned?Ot.rem_u:Ot.rem_s)(this.low,this.high,e.low,e.high);return ke(n,Ot.get_high(),this.unsigned)}return this.sub(this.div(e).mul(e))};U.mod=U.modulo;U.rem=U.modulo;U.not=function(){return ke(~this.low,~this.high,this.unsigned)};U.countLeadingZeros=function(){return this.high?Math.clz32(this.high):Math.clz32(this.low)+32};U.clz=U.countLeadingZeros;U.countTrailingZeros=function(){return this.low?vc(this.low):vc(this.high)+32};U.ctz=U.countTrailingZeros;U.and=function(e){return dt(e)||(e=zt(e)),ke(this.low&e.low,this.high&e.high,this.unsigned)};U.or=function(e){return dt(e)||(e=zt(e)),ke(this.low|e.low,this.high|e.high,this.unsigned)};U.xor=function(e){return dt(e)||(e=zt(e)),ke(this.low^e.low,this.high^e.high,this.unsigned)};U.shiftLeft=function(e){return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e<32?ke(this.low<>>32-e,this.unsigned):ke(0,this.low<>>e|this.high<<32-e,this.high>>e,this.unsigned):ke(this.high>>e-32,this.high>=0?0:-1,this.unsigned)};U.shr=U.shiftRight;U.shiftRightUnsigned=function(e){return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e<32?ke(this.low>>>e|this.high<<32-e,this.high>>>e,this.unsigned):e===32?ke(this.high,0,this.unsigned):ke(this.high>>>e-32,0,this.unsigned)};U.shru=U.shiftRightUnsigned;U.shr_u=U.shiftRightUnsigned;U.rotateLeft=function(e){var n;return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e===32?ke(this.high,this.low,this.unsigned):e<32?(n=32-e,ke(this.low<>>n,this.high<>>n,this.unsigned)):(e-=32,n=32-e,ke(this.high<>>n,this.low<>>n,this.unsigned))};U.rotl=U.rotateLeft;U.rotateRight=function(e){var n;return dt(e)&&(e=e.toInt()),(e&=63)===0?this:e===32?ke(this.high,this.low,this.unsigned):e<32?(n=32-e,ke(this.high<>>e,this.low<>>e,this.unsigned)):(e-=32,n=32-e,ke(this.low<>>e,this.high<>>e,this.unsigned))};U.rotr=U.rotateRight;U.toSigned=function(){return this.unsigned?ke(this.low,this.high,!1):this};U.toUnsigned=function(){return this.unsigned?this:ke(this.low,this.high,!0)};U.toBytes=function(e){return e?this.toBytesLE():this.toBytesBE()};U.toBytesLE=function(){var e=this.high,n=this.low;return[n&255,n>>>8&255,n>>>16&255,n>>>24,e&255,e>>>8&255,e>>>16&255,e>>>24]};U.toBytesBE=function(){var e=this.high,n=this.low;return[e>>>24,e>>>16&255,e>>>8&255,e&255,n>>>24,n>>>16&255,n>>>8&255,n&255]};Me.fromBytes=function(e,n,t){return t?Me.fromBytesLE(e,n):Me.fromBytesBE(e,n)};Me.fromBytesLE=function(e,n){return new Me(e[0]|e[1]<<8|e[2]<<16|e[3]<<24,e[4]|e[5]<<8|e[6]<<16|e[7]<<24,n)};Me.fromBytesBE=function(e,n){return new Me(e[4]<<24|e[5]<<16|e[6]<<8|e[7],e[0]<<24|e[1]<<16|e[2]<<8|e[3],n)};dr=Me});var k,ko=C(()=>{k={};k.Offset;k.Table;k.SIZEOF_SHORT=2;k.SIZEOF_INT=4;k.FILE_IDENTIFIER_LENGTH=4;k.SIZE_PREFIX_LENGTH=4;k.Encoding={UTF8_BYTES:1,UTF16_STRING:2};k.int32=new Int32Array(2);k.float32=new Float32Array(k.int32.buffer);k.float64=new Float64Array(k.int32.buffer);k.isLittleEndian=new Uint16Array(new Uint8Array([1,0]).buffer)[0]===1;k.Long=function(r,e){this.low=r|0,this.high=e|0};k.Long.create=function(r,e){return r==0&&e==0?k.Long.ZERO:new k.Long(r,e)};k.Long.prototype.toFloat64=function(){return(this.low>>>0)+this.high*4294967296};k.Long.prototype.equals=function(r){return this.low==r.low&&this.high==r.high};k.Long.ZERO=new k.Long(0,0);k.Builder=function(r){if(r)var e=r;else var e=1024;this.bb=k.ByteBuffer.allocate(e),this.space=e,this.minalign=1,this.vtable=null,this.vtable_in_use=0,this.isNested=!1,this.object_start=0,this.vtables=[],this.vector_num_elems=0,this.force_defaults=!1};k.Builder.prototype.clear=function(){this.bb.clear(),this.space=this.bb.capacity(),this.minalign=1,this.vtable=null,this.vtable_in_use=0,this.isNested=!1,this.object_start=0,this.vtables=[],this.vector_num_elems=0,this.force_defaults=!1};k.Builder.prototype.forceDefaults=function(r){this.force_defaults=r};k.Builder.prototype.dataBuffer=function(){return this.bb};k.Builder.prototype.asUint8Array=function(){return this.bb.bytes().subarray(this.bb.position(),this.bb.position()+this.offset())};k.Builder.prototype.prep=function(r,e){r>this.minalign&&(this.minalign=r);for(var n=~(this.bb.capacity()-this.space+e)+1&r-1;this.space=0&&this.vtable[e]==0;e--);for(var n=e+1;e>=0;e--)this.addInt16(this.vtable[e]!=0?r-this.vtable[e]:0);var t=2;this.addInt16(r-this.object_start);var o=(n+t)*k.SIZEOF_SHORT;this.addInt16(o);var i=0,s=this.space;e:for(e=0;e=0;i--)this.writeInt8(o.charCodeAt(i))}this.prep(this.minalign,k.SIZEOF_INT+t),this.addOffset(r),t&&this.addInt32(this.bb.capacity()-this.space),this.bb.setPosition(this.space)};k.Builder.prototype.finishSizePrefixed=function(r,e){this.finish(r,e,!0)};k.Builder.prototype.requiredField=function(r,e){var n=this.bb.capacity()-r,t=n-this.bb.readInt32(n),o=this.bb.readInt16(t+e)!=0;if(!o)throw new Error("FlatBuffers: field "+e+" must be set")};k.Builder.prototype.startVector=function(r,e,n){this.notNested(),this.vector_num_elems=e,this.prep(k.SIZEOF_INT,r*e),this.prep(n,r*e)};k.Builder.prototype.endVector=function(){return this.writeInt32(this.vector_num_elems),this.offset()};k.Builder.prototype.createString=function(r){if(r instanceof Uint8Array)var e=r;else for(var e=[],n=0;n=56320)t=o;else{var i=r.charCodeAt(n++);t=(o<<10)+i+(65536-56623104-56320)}t<128?e.push(t):(t<2048?e.push(t>>6&31|192):(t<65536?e.push(t>>12&15|224):e.push(t>>18&7|240,t>>12&63|128),e.push(t>>6&63|128)),e.push(t&63|128))}this.addInt8(0),this.startVector(1,e.length,1),this.bb.setPosition(this.space-=e.length);for(var n=0,s=this.space,a=this.bb.bytes();n>24};k.ByteBuffer.prototype.readUint8=function(r){return this.bytes_[r]};k.ByteBuffer.prototype.readInt16=function(r){return this.readUint16(r)<<16>>16};k.ByteBuffer.prototype.readUint16=function(r){return this.bytes_[r]|this.bytes_[r+1]<<8};k.ByteBuffer.prototype.readInt32=function(r){return this.bytes_[r]|this.bytes_[r+1]<<8|this.bytes_[r+2]<<16|this.bytes_[r+3]<<24};k.ByteBuffer.prototype.readUint32=function(r){return this.readInt32(r)>>>0};k.ByteBuffer.prototype.readInt64=function(r){return new k.Long(this.readInt32(r),this.readInt32(r+4))};k.ByteBuffer.prototype.readUint64=function(r){return new k.Long(this.readUint32(r),this.readUint32(r+4))};k.ByteBuffer.prototype.readFloat32=function(r){return k.int32[0]=this.readInt32(r),k.float32[0]};k.ByteBuffer.prototype.readFloat64=function(r){return k.int32[k.isLittleEndian?0:1]=this.readInt32(r),k.int32[k.isLittleEndian?1:0]=this.readInt32(r+4),k.float64[0]};k.ByteBuffer.prototype.writeInt8=function(r,e){this.bytes_[r]=e};k.ByteBuffer.prototype.writeUint8=function(r,e){this.bytes_[r]=e};k.ByteBuffer.prototype.writeInt16=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8};k.ByteBuffer.prototype.writeUint16=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8};k.ByteBuffer.prototype.writeInt32=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8,this.bytes_[r+2]=e>>16,this.bytes_[r+3]=e>>24};k.ByteBuffer.prototype.writeUint32=function(r,e){this.bytes_[r]=e,this.bytes_[r+1]=e>>8,this.bytes_[r+2]=e>>16,this.bytes_[r+3]=e>>24};k.ByteBuffer.prototype.writeInt64=function(r,e){this.writeInt32(r,e.low),this.writeInt32(r+4,e.high)};k.ByteBuffer.prototype.writeUint64=function(r,e){this.writeUint32(r,e.low),this.writeUint32(r+4,e.high)};k.ByteBuffer.prototype.writeFloat32=function(r,e){k.float32[0]=e,this.writeInt32(r,k.int32[0])};k.ByteBuffer.prototype.writeFloat64=function(r,e){k.float64[0]=e,this.writeInt32(r,k.int32[k.isLittleEndian?0:1]),this.writeInt32(r+4,k.int32[k.isLittleEndian?1:0])};k.ByteBuffer.prototype.getBufferIdentifier=function(){if(this.bytes_.length>10)+55296,(i&1024-1)+56320))}return t};k.ByteBuffer.prototype.__indirect=function(r){return r+this.readInt32(r)};k.ByteBuffer.prototype.__vector=function(r){return r+this.readInt32(r)+k.SIZEOF_INT};k.ByteBuffer.prototype.__vector_len=function(r){return this.readInt32(r+this.readInt32(r))};k.ByteBuffer.prototype.__has_identifier=function(r){if(r.length!=k.FILE_IDENTIFIER_LENGTH)throw new Error("FlatBuffers: file identifier must be length "+k.FILE_IDENTIFIER_LENGTH);for(var e=0;e{"use strict";ko();(e=>{let r;(t=>{let n;(i=>{let o;(S=>(S[S.UNDEFINED=0]="UNDEFINED",S[S.FLOAT=1]="FLOAT",S[S.INT=2]="INT",S[S.STRING=3]="STRING",S[S.TENSOR=4]="TENSOR",S[S.GRAPH=5]="GRAPH",S[S.FLOATS=6]="FLOATS",S[S.INTS=7]="INTS",S[S.STRINGS=8]="STRINGS",S[S.TENSORS=9]="TENSORS",S[S.GRAPHS=10]="GRAPHS",S[S.SPARSE_TENSOR=11]="SPARSE_TENSOR",S[S.SPARSE_TENSORS=12]="SPARSE_TENSORS"))(o=i.AttributeType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(l=>(l[l.UNKNOWN=0]="UNKNOWN",l[l.VALUE=1]="VALUE",l[l.PARAM=2]="PARAM"))(o=i.DimensionValueType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(F=>(F[F.UNDEFINED=0]="UNDEFINED",F[F.FLOAT=1]="FLOAT",F[F.UINT8=2]="UINT8",F[F.INT8=3]="INT8",F[F.UINT16=4]="UINT16",F[F.INT16=5]="INT16",F[F.INT32=6]="INT32",F[F.INT64=7]="INT64",F[F.STRING=8]="STRING",F[F.BOOL=9]="BOOL",F[F.FLOAT16=10]="FLOAT16",F[F.DOUBLE=11]="DOUBLE",F[F.UINT32=12]="UINT32",F[F.UINT64=13]="UINT64",F[F.COMPLEX64=14]="COMPLEX64",F[F.COMPLEX128=15]="COMPLEX128",F[F.BFLOAT16=16]="BFLOAT16",F[F.FLOAT8E4M3FN=17]="FLOAT8E4M3FN",F[F.FLOAT8E4M3FNUZ=18]="FLOAT8E4M3FNUZ",F[F.FLOAT8E5M2=19]="FLOAT8E5M2",F[F.FLOAT8E5M2FNUZ=20]="FLOAT8E5M2FNUZ"))(o=i.TensorDataType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(u=>(u[u.Primitive=0]="Primitive",u[u.Fused=1]="Fused"))(o=i.NodeType||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{let o;(f=>(f[f.NONE=0]="NONE",f[f.tensor_type=1]="tensor_type",f[f.sequence_type=2]="sequence_type",f[f.map_type=3]="map_type"))(o=i.TypeInfoValue||={})})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsShape(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsShape(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}dim(a,u){let l=this.bb.__offset(this.bb_pos,4);return l?(u||new e.experimental.fbs.Dimension).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}dimLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}static startShape(a){a.startObject(1)}static addDim(a,u){a.addFieldOffset(0,u,0)}static createDimVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startDimVector(a,u){a.startVector(4,u,4)}static endShape(a){return a.endObject()}static createShape(a,u){return o.startShape(a),o.addDim(a,u),o.endShape(a)}}i.Shape=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsDimension(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsDimension(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}value(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.DimensionValue).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}denotation(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}static startDimension(a){a.startObject(2)}static addValue(a,u){a.addFieldOffset(0,u,0)}static addDenotation(a,u){a.addFieldOffset(1,u,0)}static endDimension(a){return a.endObject()}static createDimension(a,u,l){return o.startDimension(a),o.addValue(a,u),o.addDenotation(a,l),o.endDimension(a)}}i.Dimension=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsDimensionValue(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsDimensionValue(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}dimType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt8(this.bb_pos+a):0}dimValue(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}dimParam(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}static startDimensionValue(a){a.startObject(3)}static addDimType(a,u){a.addFieldInt8(0,u,0)}static addDimValue(a,u){a.addFieldInt64(1,u,a.createLong(0,0))}static addDimParam(a,u){a.addFieldOffset(2,u,0)}static endDimensionValue(a){return a.endObject()}static createDimensionValue(a,u,l,f){return o.startDimensionValue(a),o.addDimType(a,u),o.addDimValue(a,l),o.addDimParam(a,f),o.endDimensionValue(a)}}i.DimensionValue=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTensorTypeAndShape(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTensorTypeAndShape(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}elemType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt32(this.bb_pos+a):0}shape(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Shape).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startTensorTypeAndShape(a){a.startObject(2)}static addElemType(a,u){a.addFieldInt32(0,u,0)}static addShape(a,u){a.addFieldOffset(1,u,0)}static endTensorTypeAndShape(a){return a.endObject()}static createTensorTypeAndShape(a,u,l){return o.startTensorTypeAndShape(a),o.addElemType(a,u),o.addShape(a,l),o.endTensorTypeAndShape(a)}}i.TensorTypeAndShape=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsMapType(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsMapType(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}keyType(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt32(this.bb_pos+a):0}valueType(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startMapType(a){a.startObject(2)}static addKeyType(a,u){a.addFieldInt32(0,u,0)}static addValueType(a,u){a.addFieldOffset(1,u,0)}static endMapType(a){return a.endObject()}static createMapType(a,u,l){return o.startMapType(a),o.addKeyType(a,u),o.addValueType(a,l),o.endMapType(a)}}i.MapType=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSequenceType(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSequenceType(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}elemType(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startSequenceType(a){a.startObject(1)}static addElemType(a,u){a.addFieldOffset(0,u,0)}static endSequenceType(a){return a.endObject()}static createSequenceType(a,u){return o.startSequenceType(a),o.addElemType(a,u),o.endSequenceType(a)}}i.SequenceType=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}nodeIndex(){return this.bb.readUint32(this.bb_pos)}srcArgIndex(){return this.bb.readInt32(this.bb_pos+4)}dstArgIndex(){return this.bb.readInt32(this.bb_pos+8)}static createEdgeEnd(a,u,l,f){return a.prep(4,12),a.writeInt32(f),a.writeInt32(l),a.writeInt32(u),a.offset()}}i.EdgeEnd=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsNodeEdge(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsNodeEdge(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}nodeIndex(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readUint32(this.bb_pos+a):0}inputEdges(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.EdgeEnd).__init(this.bb.__vector(this.bb_pos+l)+a*12,this.bb):null}inputEdgesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}outputEdges(a,u){let l=this.bb.__offset(this.bb_pos,8);return l?(u||new e.experimental.fbs.EdgeEnd).__init(this.bb.__vector(this.bb_pos+l)+a*12,this.bb):null}outputEdgesLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}static startNodeEdge(a){a.startObject(3)}static addNodeIndex(a,u){a.addFieldInt32(0,u,0)}static addInputEdges(a,u){a.addFieldOffset(1,u,0)}static startInputEdgesVector(a,u){a.startVector(12,u,4)}static addOutputEdges(a,u){a.addFieldOffset(2,u,0)}static startOutputEdgesVector(a,u){a.startVector(12,u,4)}static endNodeEdge(a){return a.endObject()}static createNodeEdge(a,u,l,f){return o.startNodeEdge(a),o.addNodeIndex(a,u),o.addInputEdges(a,l),o.addOutputEdges(a,f),o.endNodeEdge(a)}}i.NodeEdge=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsNode(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsNode(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}domain(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}sinceVersion(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readInt32(this.bb_pos+a):0}index(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.readUint32(this.bb_pos+a):0}opType(a){let u=this.bb.__offset(this.bb_pos,14);return u?this.bb.__string(this.bb_pos+u,a):null}type(){let a=this.bb.__offset(this.bb_pos,16);return a?this.bb.readInt32(this.bb_pos+a):0}executionProviderType(a){let u=this.bb.__offset(this.bb_pos,18);return u?this.bb.__string(this.bb_pos+u,a):null}inputs(a,u){let l=this.bb.__offset(this.bb_pos,20);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}inputsLength(){let a=this.bb.__offset(this.bb_pos,20);return a?this.bb.__vector_len(this.bb_pos+a):0}outputs(a,u){let l=this.bb.__offset(this.bb_pos,22);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}outputsLength(){let a=this.bb.__offset(this.bb_pos,22);return a?this.bb.__vector_len(this.bb_pos+a):0}attributes(a,u){let l=this.bb.__offset(this.bb_pos,24);return l?(u||new e.experimental.fbs.Attribute).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}attributesLength(){let a=this.bb.__offset(this.bb_pos,24);return a?this.bb.__vector_len(this.bb_pos+a):0}inputArgCounts(a){let u=this.bb.__offset(this.bb_pos,26);return u?this.bb.readInt32(this.bb.__vector(this.bb_pos+u)+a*4):0}inputArgCountsLength(){let a=this.bb.__offset(this.bb_pos,26);return a?this.bb.__vector_len(this.bb_pos+a):0}inputArgCountsArray(){let a=this.bb.__offset(this.bb_pos,26);return a?new Int32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}implicitInputs(a,u){let l=this.bb.__offset(this.bb_pos,28);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}implicitInputsLength(){let a=this.bb.__offset(this.bb_pos,28);return a?this.bb.__vector_len(this.bb_pos+a):0}static startNode(a){a.startObject(13)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addDomain(a,u){a.addFieldOffset(2,u,0)}static addSinceVersion(a,u){a.addFieldInt32(3,u,0)}static addIndex(a,u){a.addFieldInt32(4,u,0)}static addOpType(a,u){a.addFieldOffset(5,u,0)}static addType(a,u){a.addFieldInt32(6,u,0)}static addExecutionProviderType(a,u){a.addFieldOffset(7,u,0)}static addInputs(a,u){a.addFieldOffset(8,u,0)}static createInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInputsVector(a,u){a.startVector(4,u,4)}static addOutputs(a,u){a.addFieldOffset(9,u,0)}static createOutputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOutputsVector(a,u){a.startVector(4,u,4)}static addAttributes(a,u){a.addFieldOffset(10,u,0)}static createAttributesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startAttributesVector(a,u){a.startVector(4,u,4)}static addInputArgCounts(a,u){a.addFieldOffset(11,u,0)}static createInputArgCountsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addInt32(u[l]);return a.endVector()}static startInputArgCountsVector(a,u){a.startVector(4,u,4)}static addImplicitInputs(a,u){a.addFieldOffset(12,u,0)}static createImplicitInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startImplicitInputsVector(a,u){a.startVector(4,u,4)}static endNode(a){return a.endObject()}static createNode(a,u,l,f,c,p,b,h,g,T,w,v,S,$){return o.startNode(a),o.addName(a,u),o.addDocString(a,l),o.addDomain(a,f),o.addSinceVersion(a,c),o.addIndex(a,p),o.addOpType(a,b),o.addType(a,h),o.addExecutionProviderType(a,g),o.addInputs(a,T),o.addOutputs(a,w),o.addAttributes(a,v),o.addInputArgCounts(a,S),o.addImplicitInputs(a,$),o.endNode(a)}}i.Node=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsValueInfo(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsValueInfo(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}type(a){let u=this.bb.__offset(this.bb_pos,8);return u?(a||new e.experimental.fbs.TypeInfo).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startValueInfo(a){a.startObject(3)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addType(a,u){a.addFieldOffset(2,u,0)}static endValueInfo(a){return a.endObject()}static createValueInfo(a,u,l,f){return o.startValueInfo(a),o.addName(a,u),o.addDocString(a,l),o.addType(a,f),o.endValueInfo(a)}}i.ValueInfo=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTypeInfo(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTypeInfo(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}denotation(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}valueType(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readUint8(this.bb_pos+a):0}value(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__union(a,this.bb_pos+u):null}static startTypeInfo(a){a.startObject(3)}static addDenotation(a,u){a.addFieldOffset(0,u,0)}static addValueType(a,u){a.addFieldInt8(1,u,0)}static addValue(a,u){a.addFieldOffset(2,u,0)}static endTypeInfo(a){return a.endObject()}static createTypeInfo(a,u,l,f){return o.startTypeInfo(a),o.addDenotation(a,u),o.addValueType(a,l),o.addValue(a,f),o.endTypeInfo(a)}}i.TypeInfo=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsOperatorSetId(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsOperatorSetId(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}domain(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}version(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}static startOperatorSetId(a){a.startObject(2)}static addDomain(a,u){a.addFieldOffset(0,u,0)}static addVersion(a,u){a.addFieldInt64(1,u,a.createLong(0,0))}static endOperatorSetId(a){return a.endObject()}static createOperatorSetId(a,u,l){return o.startOperatorSetId(a),o.addDomain(a,u),o.addVersion(a,l),o.endOperatorSetId(a)}}i.OperatorSetId=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsTensor(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsTensor(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}dims(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}dimsLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}dataType(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readInt32(this.bb_pos+a):0}rawData(a){let u=this.bb.__offset(this.bb_pos,12);return u?this.bb.readUint8(this.bb.__vector(this.bb_pos+u)+a):0}rawDataLength(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.__vector_len(this.bb_pos+a):0}rawDataArray(){let a=this.bb.__offset(this.bb_pos,12);return a?new Uint8Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}stringData(a,u){let l=this.bb.__offset(this.bb_pos,14);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}stringDataLength(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.__vector_len(this.bb_pos+a):0}static startTensor(a){a.startObject(6)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addDims(a,u){a.addFieldOffset(2,u,0)}static createDimsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startDimsVector(a,u){a.startVector(8,u,8)}static addDataType(a,u){a.addFieldInt32(3,u,0)}static addRawData(a,u){a.addFieldOffset(4,u,0)}static createRawDataVector(a,u){a.startVector(1,u.length,1);for(let l=u.length-1;l>=0;l--)a.addInt8(u[l]);return a.endVector()}static startRawDataVector(a,u){a.startVector(1,u,1)}static addStringData(a,u){a.addFieldOffset(5,u,0)}static createStringDataVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startStringDataVector(a,u){a.startVector(4,u,4)}static endTensor(a){return a.endObject()}static createTensor(a,u,l,f,c,p,b){return o.startTensor(a),o.addName(a,u),o.addDocString(a,l),o.addDims(a,f),o.addDataType(a,c),o.addRawData(a,p),o.addStringData(a,b),o.endTensor(a)}}i.Tensor=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSparseTensor(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSparseTensor(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}values(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}indices(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}dims(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}dimsLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}static startSparseTensor(a){a.startObject(3)}static addValues(a,u){a.addFieldOffset(0,u,0)}static addIndices(a,u){a.addFieldOffset(1,u,0)}static addDims(a,u){a.addFieldOffset(2,u,0)}static createDimsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startDimsVector(a,u){a.startVector(8,u,8)}static endSparseTensor(a){return a.endObject()}static createSparseTensor(a,u,l,f){return o.startSparseTensor(a),o.addValues(a,u),o.addIndices(a,l),o.addDims(a,f),o.endSparseTensor(a)}}i.SparseTensor=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsAttribute(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsAttribute(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}name(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}docString(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.__string(this.bb_pos+u,a):null}type(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.readInt32(this.bb_pos+a):0}f(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readFloat32(this.bb_pos+a):0}i(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}s(a){let u=this.bb.__offset(this.bb_pos,14);return u?this.bb.__string(this.bb_pos+u,a):null}t(a){let u=this.bb.__offset(this.bb_pos,16);return u?(a||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}g(a){let u=this.bb.__offset(this.bb_pos,18);return u?(a||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}floats(a){let u=this.bb.__offset(this.bb_pos,20);return u?this.bb.readFloat32(this.bb.__vector(this.bb_pos+u)+a*4):0}floatsLength(){let a=this.bb.__offset(this.bb_pos,20);return a?this.bb.__vector_len(this.bb_pos+a):0}floatsArray(){let a=this.bb.__offset(this.bb_pos,20);return a?new Float32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}ints(a){let u=this.bb.__offset(this.bb_pos,22);return u?this.bb.readInt64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}intsLength(){let a=this.bb.__offset(this.bb_pos,22);return a?this.bb.__vector_len(this.bb_pos+a):0}strings(a,u){let l=this.bb.__offset(this.bb_pos,24);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}stringsLength(){let a=this.bb.__offset(this.bb_pos,24);return a?this.bb.__vector_len(this.bb_pos+a):0}tensors(a,u){let l=this.bb.__offset(this.bb_pos,26);return l?(u||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}tensorsLength(){let a=this.bb.__offset(this.bb_pos,26);return a?this.bb.__vector_len(this.bb_pos+a):0}graphs(a,u){let l=this.bb.__offset(this.bb_pos,28);return l?(u||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}graphsLength(){let a=this.bb.__offset(this.bb_pos,28);return a?this.bb.__vector_len(this.bb_pos+a):0}static startAttribute(a){a.startObject(13)}static addName(a,u){a.addFieldOffset(0,u,0)}static addDocString(a,u){a.addFieldOffset(1,u,0)}static addType(a,u){a.addFieldInt32(2,u,0)}static addF(a,u){a.addFieldFloat32(3,u,0)}static addI(a,u){a.addFieldInt64(4,u,a.createLong(0,0))}static addS(a,u){a.addFieldOffset(5,u,0)}static addT(a,u){a.addFieldOffset(6,u,0)}static addG(a,u){a.addFieldOffset(7,u,0)}static addFloats(a,u){a.addFieldOffset(8,u,0)}static createFloatsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addFloat32(u[l]);return a.endVector()}static startFloatsVector(a,u){a.startVector(4,u,4)}static addInts(a,u){a.addFieldOffset(9,u,0)}static createIntsVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startIntsVector(a,u){a.startVector(8,u,8)}static addStrings(a,u){a.addFieldOffset(10,u,0)}static createStringsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startStringsVector(a,u){a.startVector(4,u,4)}static addTensors(a,u){a.addFieldOffset(11,u,0)}static createTensorsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startTensorsVector(a,u){a.startVector(4,u,4)}static addGraphs(a,u){a.addFieldOffset(12,u,0)}static createGraphsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startGraphsVector(a,u){a.startVector(4,u,4)}static endAttribute(a){return a.endObject()}static createAttribute(a,u,l,f,c,p,b,h,g,T,w,v,S,$){return o.startAttribute(a),o.addName(a,u),o.addDocString(a,l),o.addType(a,f),o.addF(a,c),o.addI(a,p),o.addS(a,b),o.addT(a,h),o.addG(a,g),o.addFloats(a,T),o.addInts(a,w),o.addStrings(a,v),o.addTensors(a,S),o.addGraphs(a,$),o.endAttribute(a)}}i.Attribute=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsGraph(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsGraph(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}initializers(a,u){let l=this.bb.__offset(this.bb_pos,4);return l?(u||new e.experimental.fbs.Tensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}initializersLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}nodeArgs(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.ValueInfo).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodeArgsLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}nodes(a,u){let l=this.bb.__offset(this.bb_pos,8);return l?(u||new e.experimental.fbs.Node).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodesLength(){let a=this.bb.__offset(this.bb_pos,8);return a?this.bb.__vector_len(this.bb_pos+a):0}maxNodeIndex(){let a=this.bb.__offset(this.bb_pos,10);return a?this.bb.readUint32(this.bb_pos+a):0}nodeEdges(a,u){let l=this.bb.__offset(this.bb_pos,12);return l?(u||new e.experimental.fbs.NodeEdge).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}nodeEdgesLength(){let a=this.bb.__offset(this.bb_pos,12);return a?this.bb.__vector_len(this.bb_pos+a):0}inputs(a,u){let l=this.bb.__offset(this.bb_pos,14);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}inputsLength(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.__vector_len(this.bb_pos+a):0}outputs(a,u){let l=this.bb.__offset(this.bb_pos,16);return l?this.bb.__string(this.bb.__vector(this.bb_pos+l)+a*4,u):null}outputsLength(){let a=this.bb.__offset(this.bb_pos,16);return a?this.bb.__vector_len(this.bb_pos+a):0}sparseInitializers(a,u){let l=this.bb.__offset(this.bb_pos,18);return l?(u||new e.experimental.fbs.SparseTensor).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}sparseInitializersLength(){let a=this.bb.__offset(this.bb_pos,18);return a?this.bb.__vector_len(this.bb_pos+a):0}static startGraph(a){a.startObject(8)}static addInitializers(a,u){a.addFieldOffset(0,u,0)}static createInitializersVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInitializersVector(a,u){a.startVector(4,u,4)}static addNodeArgs(a,u){a.addFieldOffset(1,u,0)}static createNodeArgsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodeArgsVector(a,u){a.startVector(4,u,4)}static addNodes(a,u){a.addFieldOffset(2,u,0)}static createNodesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodesVector(a,u){a.startVector(4,u,4)}static addMaxNodeIndex(a,u){a.addFieldInt32(3,u,0)}static addNodeEdges(a,u){a.addFieldOffset(4,u,0)}static createNodeEdgesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startNodeEdgesVector(a,u){a.startVector(4,u,4)}static addInputs(a,u){a.addFieldOffset(5,u,0)}static createInputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startInputsVector(a,u){a.startVector(4,u,4)}static addOutputs(a,u){a.addFieldOffset(6,u,0)}static createOutputsVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOutputsVector(a,u){a.startVector(4,u,4)}static addSparseInitializers(a,u){a.addFieldOffset(7,u,0)}static createSparseInitializersVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startSparseInitializersVector(a,u){a.startVector(4,u,4)}static endGraph(a){return a.endObject()}static createGraph(a,u,l,f,c,p,b,h,g){return o.startGraph(a),o.addInitializers(a,u),o.addNodeArgs(a,l),o.addNodes(a,f),o.addMaxNodeIndex(a,c),o.addNodeEdges(a,p),o.addInputs(a,b),o.addOutputs(a,h),o.addSparseInitializers(a,g),o.endGraph(a)}}i.Graph=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsModel(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsModel(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}irVersion(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}opsetImport(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.OperatorSetId).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}opsetImportLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}producerName(a){let u=this.bb.__offset(this.bb_pos,8);return u?this.bb.__string(this.bb_pos+u,a):null}producerVersion(a){let u=this.bb.__offset(this.bb_pos,10);return u?this.bb.__string(this.bb_pos+u,a):null}domain(a){let u=this.bb.__offset(this.bb_pos,12);return u?this.bb.__string(this.bb_pos+u,a):null}modelVersion(){let a=this.bb.__offset(this.bb_pos,14);return a?this.bb.readInt64(this.bb_pos+a):this.bb.createLong(0,0)}docString(a){let u=this.bb.__offset(this.bb_pos,16);return u?this.bb.__string(this.bb_pos+u,a):null}graph(a){let u=this.bb.__offset(this.bb_pos,18);return u?(a||new e.experimental.fbs.Graph).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}graphDocString(a){let u=this.bb.__offset(this.bb_pos,20);return u?this.bb.__string(this.bb_pos+u,a):null}static startModel(a){a.startObject(9)}static addIrVersion(a,u){a.addFieldInt64(0,u,a.createLong(0,0))}static addOpsetImport(a,u){a.addFieldOffset(1,u,0)}static createOpsetImportVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startOpsetImportVector(a,u){a.startVector(4,u,4)}static addProducerName(a,u){a.addFieldOffset(2,u,0)}static addProducerVersion(a,u){a.addFieldOffset(3,u,0)}static addDomain(a,u){a.addFieldOffset(4,u,0)}static addModelVersion(a,u){a.addFieldInt64(5,u,a.createLong(0,0))}static addDocString(a,u){a.addFieldOffset(6,u,0)}static addGraph(a,u){a.addFieldOffset(7,u,0)}static addGraphDocString(a,u){a.addFieldOffset(8,u,0)}static endModel(a){return a.endObject()}static createModel(a,u,l,f,c,p,b,h,g,T){return o.startModel(a),o.addIrVersion(a,u),o.addOpsetImport(a,l),o.addProducerName(a,f),o.addProducerVersion(a,c),o.addDomain(a,p),o.addModelVersion(a,b),o.addDocString(a,h),o.addGraph(a,g),o.addGraphDocString(a,T),o.endModel(a)}}i.Model=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsKernelCreateInfos(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsKernelCreateInfos(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}nodeIndices(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.readUint32(this.bb.__vector(this.bb_pos+u)+a*4):0}nodeIndicesLength(){let a=this.bb.__offset(this.bb_pos,4);return a?this.bb.__vector_len(this.bb_pos+a):0}nodeIndicesArray(){let a=this.bb.__offset(this.bb_pos,4);return a?new Uint32Array(this.bb.bytes().buffer,this.bb.bytes().byteOffset+this.bb.__vector(this.bb_pos+a),this.bb.__vector_len(this.bb_pos+a)):null}kernelDefHashes(a){let u=this.bb.__offset(this.bb_pos,6);return u?this.bb.readUint64(this.bb.__vector(this.bb_pos+u)+a*8):this.bb.createLong(0,0)}kernelDefHashesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}static startKernelCreateInfos(a){a.startObject(2)}static addNodeIndices(a,u){a.addFieldOffset(0,u,0)}static createNodeIndicesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addInt32(u[l]);return a.endVector()}static startNodeIndicesVector(a,u){a.startVector(4,u,4)}static addKernelDefHashes(a,u){a.addFieldOffset(1,u,0)}static createKernelDefHashesVector(a,u){a.startVector(8,u.length,8);for(let l=u.length-1;l>=0;l--)a.addInt64(u[l]);return a.endVector()}static startKernelDefHashesVector(a,u){a.startVector(8,u,8)}static endKernelCreateInfos(a){return a.endObject()}static createKernelCreateInfos(a,u,l){return o.startKernelCreateInfos(a),o.addNodeIndices(a,u),o.addKernelDefHashes(a,l),o.endKernelCreateInfos(a)}}i.KernelCreateInfos=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSubGraphSessionState(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSubGraphSessionState(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}graphId(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}sessionState(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.SessionState).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startSubGraphSessionState(a){a.startObject(2)}static addGraphId(a,u){a.addFieldOffset(0,u,0)}static addSessionState(a,u){a.addFieldOffset(1,u,0)}static endSubGraphSessionState(a){let u=a.endObject();return a.requiredField(u,4),u}static createSubGraphSessionState(a,u,l){return o.startSubGraphSessionState(a),o.addGraphId(a,u),o.addSessionState(a,l),o.endSubGraphSessionState(a)}}i.SubGraphSessionState=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsSessionState(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsSessionState(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}kernels(a){let u=this.bb.__offset(this.bb_pos,4);return u?(a||new e.experimental.fbs.KernelCreateInfos).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}subGraphSessionStates(a,u){let l=this.bb.__offset(this.bb_pos,6);return l?(u||new e.experimental.fbs.SubGraphSessionState).__init(this.bb.__indirect(this.bb.__vector(this.bb_pos+l)+a*4),this.bb):null}subGraphSessionStatesLength(){let a=this.bb.__offset(this.bb_pos,6);return a?this.bb.__vector_len(this.bb_pos+a):0}static startSessionState(a){a.startObject(2)}static addKernels(a,u){a.addFieldOffset(0,u,0)}static addSubGraphSessionStates(a,u){a.addFieldOffset(1,u,0)}static createSubGraphSessionStatesVector(a,u){a.startVector(4,u.length,4);for(let l=u.length-1;l>=0;l--)a.addOffset(u[l]);return a.endVector()}static startSubGraphSessionStatesVector(a,u){a.startVector(4,u,4)}static endSessionState(a){return a.endObject()}static createSessionState(a,u,l){return o.startSessionState(a),o.addKernels(a,u),o.addSubGraphSessionStates(a,l),o.endSessionState(a)}}i.SessionState=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={});(e=>{let r;(t=>{let n;(i=>{class o{constructor(){this.bb=null;this.bb_pos=0}__init(a,u){return this.bb_pos=a,this.bb=u,this}static getRootAsInferenceSession(a,u){return(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static getSizePrefixedRootAsInferenceSession(a,u){return a.setPosition(a.position()+k.SIZE_PREFIX_LENGTH),(u||new o).__init(a.readInt32(a.position())+a.position(),a)}static bufferHasIdentifier(a){return a.__has_identifier("ORTM")}ortVersion(a){let u=this.bb.__offset(this.bb_pos,4);return u?this.bb.__string(this.bb_pos+u,a):null}model(a){let u=this.bb.__offset(this.bb_pos,6);return u?(a||new e.experimental.fbs.Model).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}sessionState(a){let u=this.bb.__offset(this.bb_pos,8);return u?(a||new e.experimental.fbs.SessionState).__init(this.bb.__indirect(this.bb_pos+u),this.bb):null}static startInferenceSession(a){a.startObject(3)}static addOrtVersion(a,u){a.addFieldOffset(0,u,0)}static addModel(a,u){a.addFieldOffset(1,u,0)}static addSessionState(a,u){a.addFieldOffset(2,u,0)}static endInferenceSession(a){return a.endObject()}static finishInferenceSessionBuffer(a,u){a.finish(u,"ORTM")}static finishSizePrefixedInferenceSessionBuffer(a,u){a.finish(u,"ORTM",!0)}static createInferenceSession(a,u,l,f){return o.startInferenceSession(a),o.addOrtVersion(a,u),o.addModel(a,l),o.addSessionState(a,f),o.endInferenceSession(a)}}i.InferenceSession=o})(n=t.fbs||={})})(r=e.experimental||={})})(ee||={})});var Cc=Je((HS,Ec)=>{"use strict";Ec.exports=mv;function mv(r,e){for(var n=new Array(arguments.length-1),t=0,o=2,i=!0;o{"use strict";var Do=Bc;Do.length=function(e){var n=e.length;if(!n)return 0;for(var t=0;--n%4>1&&e.charAt(n)==="=";)++t;return Math.ceil(e.length*3)/4-t};var dn=new Array(64),Dc=new Array(123);for(Ft=0;Ft<64;)Dc[dn[Ft]=Ft<26?Ft+65:Ft<52?Ft+71:Ft<62?Ft-4:Ft-59|43]=Ft++;var Ft;Do.encode=function(e,n,t){for(var o=null,i=[],s=0,a=0,u;n>2],u=(l&3)<<4,a=1;break;case 1:i[s++]=dn[u|l>>4],u=(l&15)<<2,a=2;break;case 2:i[s++]=dn[u|l>>6],i[s++]=dn[l&63],a=0;break}s>8191&&((o||(o=[])).push(String.fromCharCode.apply(String,i)),s=0)}return a&&(i[s++]=dn[u],i[s++]=61,a===1&&(i[s++]=61)),o?(s&&o.push(String.fromCharCode.apply(String,i.slice(0,s))),o.join("")):String.fromCharCode.apply(String,i.slice(0,s))};var kc="invalid encoding";Do.decode=function(e,n,t){for(var o=t,i=0,s,a=0;a1)break;if((u=Dc[u])===void 0)throw Error(kc);switch(i){case 0:s=u,i=1;break;case 1:n[t++]=s<<2|(u&48)>>4,s=u,i=2;break;case 2:n[t++]=(s&15)<<4|(u&60)>>2,s=u,i=3;break;case 3:n[t++]=(s&3)<<6|u,i=0;break}}if(i===1)throw Error(kc);return t-o};Do.test=function(e){return/^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(e)}});var Nc=Je((jS,Rc)=>{"use strict";Rc.exports=Bo;function Bo(){this._listeners={}}Bo.prototype.on=function(e,n,t){return(this._listeners[e]||(this._listeners[e]=[])).push({fn:n,ctx:t||this}),this};Bo.prototype.off=function(e,n){if(e===void 0)this._listeners={};else if(n===void 0)this._listeners[e]=[];else for(var t=this._listeners[e],o=0;o{"use strict";Uc.exports=zc(zc);function zc(r){return typeof Float32Array<"u"?function(){var e=new Float32Array([-0]),n=new Uint8Array(e.buffer),t=n[3]===128;function o(u,l,f){e[0]=u,l[f]=n[0],l[f+1]=n[1],l[f+2]=n[2],l[f+3]=n[3]}function i(u,l,f){e[0]=u,l[f]=n[3],l[f+1]=n[2],l[f+2]=n[1],l[f+3]=n[0]}r.writeFloatLE=t?o:i,r.writeFloatBE=t?i:o;function s(u,l){return n[0]=u[l],n[1]=u[l+1],n[2]=u[l+2],n[3]=u[l+3],e[0]}function a(u,l){return n[3]=u[l],n[2]=u[l+1],n[1]=u[l+2],n[0]=u[l+3],e[0]}r.readFloatLE=t?s:a,r.readFloatBE=t?a:s}():function(){function e(t,o,i,s){var a=o<0?1:0;if(a&&(o=-o),o===0)t(1/o>0?0:2147483648,i,s);else if(isNaN(o))t(2143289344,i,s);else if(o>34028234663852886e22)t((a<<31|2139095040)>>>0,i,s);else if(o<11754943508222875e-54)t((a<<31|Math.round(o/1401298464324817e-60))>>>0,i,s);else{var u=Math.floor(Math.log(o)/Math.LN2),l=Math.round(o*Math.pow(2,-u)*8388608)&8388607;t((a<<31|u+127<<23|l)>>>0,i,s)}}r.writeFloatLE=e.bind(null,Fc),r.writeFloatBE=e.bind(null,Mc);function n(t,o,i){var s=t(o,i),a=(s>>31)*2+1,u=s>>>23&255,l=s&8388607;return u===255?l?NaN:a*(1/0):u===0?a*1401298464324817e-60*l:a*Math.pow(2,u-150)*(l+8388608)}r.readFloatLE=n.bind(null,Vc),r.readFloatBE=n.bind(null,Gc)}(),typeof Float64Array<"u"?function(){var e=new Float64Array([-0]),n=new Uint8Array(e.buffer),t=n[7]===128;function o(u,l,f){e[0]=u,l[f]=n[0],l[f+1]=n[1],l[f+2]=n[2],l[f+3]=n[3],l[f+4]=n[4],l[f+5]=n[5],l[f+6]=n[6],l[f+7]=n[7]}function i(u,l,f){e[0]=u,l[f]=n[7],l[f+1]=n[6],l[f+2]=n[5],l[f+3]=n[4],l[f+4]=n[3],l[f+5]=n[2],l[f+6]=n[1],l[f+7]=n[0]}r.writeDoubleLE=t?o:i,r.writeDoubleBE=t?i:o;function s(u,l){return n[0]=u[l],n[1]=u[l+1],n[2]=u[l+2],n[3]=u[l+3],n[4]=u[l+4],n[5]=u[l+5],n[6]=u[l+6],n[7]=u[l+7],e[0]}function a(u,l){return n[7]=u[l],n[6]=u[l+1],n[5]=u[l+2],n[4]=u[l+3],n[3]=u[l+4],n[2]=u[l+5],n[1]=u[l+6],n[0]=u[l+7],e[0]}r.readDoubleLE=t?s:a,r.readDoubleBE=t?a:s}():function(){function e(t,o,i,s,a,u){var l=s<0?1:0;if(l&&(s=-s),s===0)t(0,a,u+o),t(1/s>0?0:2147483648,a,u+i);else if(isNaN(s))t(0,a,u+o),t(2146959360,a,u+i);else if(s>17976931348623157e292)t(0,a,u+o),t((l<<31|2146435072)>>>0,a,u+i);else{var f;if(s<22250738585072014e-324)f=s/5e-324,t(f>>>0,a,u+o),t((l<<31|f/4294967296)>>>0,a,u+i);else{var c=Math.floor(Math.log(s)/Math.LN2);c===1024&&(c=1023),f=s*Math.pow(2,-c),t(f*4503599627370496>>>0,a,u+o),t((l<<31|c+1023<<20|f*1048576&1048575)>>>0,a,u+i)}}}r.writeDoubleLE=e.bind(null,Fc,0,4),r.writeDoubleBE=e.bind(null,Mc,4,0);function n(t,o,i,s,a){var u=t(s,a+o),l=t(s,a+i),f=(l>>31)*2+1,c=l>>>20&2047,p=4294967296*(l&1048575)+u;return c===2047?p?NaN:f*(1/0):c===0?f*5e-324*p:f*Math.pow(2,c-1075)*(p+4503599627370496)}r.readDoubleLE=n.bind(null,Vc,0,4),r.readDoubleBE=n.bind(null,Gc,4,0)}(),r}function Fc(r,e,n){e[n]=r&255,e[n+1]=r>>>8&255,e[n+2]=r>>>16&255,e[n+3]=r>>>24}function Mc(r,e,n){e[n]=r>>>24,e[n+1]=r>>>16&255,e[n+2]=r>>>8&255,e[n+3]=r&255}function Vc(r,e){return(r[e]|r[e+1]<<8|r[e+2]<<16|r[e+3]<<24)>>>0}function Gc(r,e){return(r[e]<<24|r[e+1]<<16|r[e+2]<<8|r[e+3])>>>0}});var Hc=Je((exports,module)=>{"use strict";module.exports=inquire;function inquire(moduleName){try{var mod=eval("quire".replace(/^/,"re"))(moduleName);if(mod&&(mod.length||Object.keys(mod).length))return mod}catch(r){}return null}});var jc=Je(qc=>{"use strict";var Ba=qc;Ba.length=function(e){for(var n=0,t=0,o=0;o191&&u<224?s[a++]=(u&31)<<6|e[n++]&63:u>239&&u<365?(u=((u&7)<<18|(e[n++]&63)<<12|(e[n++]&63)<<6|e[n++]&63)-65536,s[a++]=55296+(u>>10),s[a++]=56320+(u&1023)):s[a++]=(u&15)<<12|(e[n++]&63)<<6|e[n++]&63,a>8191&&((i||(i=[])).push(String.fromCharCode.apply(String,s)),a=0);return i?(a&&i.push(String.fromCharCode.apply(String,s.slice(0,a))),i.join("")):String.fromCharCode.apply(String,s.slice(0,a))};Ba.write=function(e,n,t){for(var o=t,i,s,a=0;a>6|192,n[t++]=i&63|128):(i&64512)===55296&&((s=e.charCodeAt(a+1))&64512)===56320?(i=65536+((i&1023)<<10)+(s&1023),++a,n[t++]=i>>18|240,n[t++]=i>>12&63|128,n[t++]=i>>6&63|128,n[t++]=i&63|128):(n[t++]=i>>12|224,n[t++]=i>>6&63|128,n[t++]=i&63|128);return t-o}});var Xc=Je((ZS,Kc)=>{"use strict";Kc.exports=hv;function hv(r,e,n){var t=n||8192,o=t>>>1,i=null,s=t;return function(u){if(u<1||u>o)return r(u);s+u>t&&(i=r(t),s=0);var l=e.call(i,s,s+=u);return s&7&&(s=(s|7)+1),l}}});var Yc=Je((YS,Zc)=>{"use strict";Zc.exports=at;var kn=_r();function at(r,e){this.lo=r>>>0,this.hi=e>>>0}var Rr=at.zero=new at(0,0);Rr.toNumber=function(){return 0};Rr.zzEncode=Rr.zzDecode=function(){return this};Rr.length=function(){return 1};var gv=at.zeroHash="\0\0\0\0\0\0\0\0";at.fromNumber=function(e){if(e===0)return Rr;var n=e<0;n&&(e=-e);var t=e>>>0,o=(e-t)/4294967296>>>0;return n&&(o=~o>>>0,t=~t>>>0,++t>4294967295&&(t=0,++o>4294967295&&(o=0))),new at(t,o)};at.from=function(e){if(typeof e=="number")return at.fromNumber(e);if(kn.isString(e))if(kn.Long)e=kn.Long.fromString(e);else return at.fromNumber(parseInt(e,10));return e.low||e.high?new at(e.low>>>0,e.high>>>0):Rr};at.prototype.toNumber=function(e){if(!e&&this.hi>>>31){var n=~this.lo+1>>>0,t=~this.hi>>>0;return n||(t=t+1>>>0),-(n+t*4294967296)}return this.lo+this.hi*4294967296};at.prototype.toLong=function(e){return kn.Long?new kn.Long(this.lo|0,this.hi|0,!!e):{low:this.lo|0,high:this.hi|0,unsigned:!!e}};var Tr=String.prototype.charCodeAt;at.fromHash=function(e){return e===gv?Rr:new at((Tr.call(e,0)|Tr.call(e,1)<<8|Tr.call(e,2)<<16|Tr.call(e,3)<<24)>>>0,(Tr.call(e,4)|Tr.call(e,5)<<8|Tr.call(e,6)<<16|Tr.call(e,7)<<24)>>>0)};at.prototype.toHash=function(){return String.fromCharCode(this.lo&255,this.lo>>>8&255,this.lo>>>16&255,this.lo>>>24,this.hi&255,this.hi>>>8&255,this.hi>>>16&255,this.hi>>>24)};at.prototype.zzEncode=function(){var e=this.hi>>31;return this.hi=((this.hi<<1|this.lo>>>31)^e)>>>0,this.lo=(this.lo<<1^e)>>>0,this};at.prototype.zzDecode=function(){var e=-(this.lo&1);return this.lo=((this.lo>>>1|this.hi<<31)^e)>>>0,this.hi=(this.hi>>>1^e)>>>0,this};at.prototype.length=function(){var e=this.lo,n=(this.lo>>>28|this.hi<<4)>>>0,t=this.hi>>>24;return t===0?n===0?e<16384?e<128?1:2:e<2097152?3:4:n<16384?n<128?5:6:n<2097152?7:8:t<128?9:10}});var _r=Je(La=>{"use strict";var re=La;re.asPromise=Cc();re.base64=Lc();re.EventEmitter=Nc();re.float=Wc();re.inquire=Hc();re.utf8=jc();re.pool=Xc();re.LongBits=Yc();re.isNode=!!(typeof global<"u"&&global&&global.process&&global.process.versions&&global.process.versions.node);re.global=re.isNode&&global||typeof window<"u"&&window||typeof self<"u"&&self||La;re.emptyArray=Object.freeze?Object.freeze([]):[];re.emptyObject=Object.freeze?Object.freeze({}):{};re.isInteger=Number.isInteger||function(e){return typeof e=="number"&&isFinite(e)&&Math.floor(e)===e};re.isString=function(e){return typeof e=="string"||e instanceof String};re.isObject=function(e){return e&&typeof e=="object"};re.isset=re.isSet=function(e,n){var t=e[n];return t!=null&&e.hasOwnProperty(n)?typeof t!="object"||(Array.isArray(t)?t.length:Object.keys(t).length)>0:!1};re.Buffer=function(){try{var r=re.inquire("buffer").Buffer;return r.prototype.utf8Write?r:null}catch{return null}}();re._Buffer_from=null;re._Buffer_allocUnsafe=null;re.newBuffer=function(e){return typeof e=="number"?re.Buffer?re._Buffer_allocUnsafe(e):new re.Array(e):re.Buffer?re._Buffer_from(e):typeof Uint8Array>"u"?e:new Uint8Array(e)};re.Array=typeof Uint8Array<"u"?Uint8Array:Array;re.Long=re.global.dcodeIO&&re.global.dcodeIO.Long||re.global.Long||re.inquire("long");re.key2Re=/^true|false|0|1$/;re.key32Re=/^-?(?:0|[1-9][0-9]*)$/;re.key64Re=/^(?:[\\x00-\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;re.longToHash=function(e){return e?re.LongBits.from(e).toHash():re.LongBits.zeroHash};re.longFromHash=function(e,n){var t=re.LongBits.fromHash(e);return re.Long?re.Long.fromBits(t.lo,t.hi,n):t.toNumber(!!n)};function Jc(r,e,n){for(var t=Object.keys(e),o=0;o-1;--i)if(n[o[i]]===1&&this[o[i]]!==void 0&&this[o[i]]!==null)return o[i]}};re.oneOfSetter=function(e){return function(n){for(var t=0;t{"use strict";nf.exports=Pe;var Ct=_r(),Ra,Lo=Ct.LongBits,ef=Ct.base64,tf=Ct.utf8;function Dn(r,e,n){this.fn=r,this.len=e,this.next=void 0,this.val=n}function za(){}function bv(r){this.head=r.head,this.tail=r.tail,this.len=r.len,this.next=r.states}function Pe(){this.len=0,this.head=new Dn(za,0,0),this.tail=this.head,this.states=null}var rf=function(){return Ct.Buffer?function(){return(Pe.create=function(){return new Ra})()}:function(){return new Pe}};Pe.create=rf();Pe.alloc=function(e){return new Ct.Array(e)};Ct.Array!==Array&&(Pe.alloc=Ct.pool(Pe.alloc,Ct.Array.prototype.subarray));Pe.prototype._push=function(e,n,t){return this.tail=this.tail.next=new Dn(e,n,t),this.len+=n,this};function Fa(r,e,n){e[n]=r&255}function yv(r,e,n){for(;r>127;)e[n++]=r&127|128,r>>>=7;e[n]=r}function Ma(r,e){this.len=r,this.next=void 0,this.val=e}Ma.prototype=Object.create(Dn.prototype);Ma.prototype.fn=yv;Pe.prototype.uint32=function(e){return this.len+=(this.tail=this.tail.next=new Ma((e=e>>>0)<128?1:e<16384?2:e<2097152?3:e<268435456?4:5,e)).len,this};Pe.prototype.int32=function(e){return e<0?this._push(Va,10,Lo.fromNumber(e)):this.uint32(e)};Pe.prototype.sint32=function(e){return this.uint32((e<<1^e>>31)>>>0)};function Va(r,e,n){for(;r.hi;)e[n++]=r.lo&127|128,r.lo=(r.lo>>>7|r.hi<<25)>>>0,r.hi>>>=7;for(;r.lo>127;)e[n++]=r.lo&127|128,r.lo=r.lo>>>7;e[n++]=r.lo}Pe.prototype.uint64=function(e){var n=Lo.from(e);return this._push(Va,n.length(),n)};Pe.prototype.int64=Pe.prototype.uint64;Pe.prototype.sint64=function(e){var n=Lo.from(e).zzEncode();return this._push(Va,n.length(),n)};Pe.prototype.bool=function(e){return this._push(Fa,1,e?1:0)};function Na(r,e,n){e[n]=r&255,e[n+1]=r>>>8&255,e[n+2]=r>>>16&255,e[n+3]=r>>>24}Pe.prototype.fixed32=function(e){return this._push(Na,4,e>>>0)};Pe.prototype.sfixed32=Pe.prototype.fixed32;Pe.prototype.fixed64=function(e){var n=Lo.from(e);return this._push(Na,4,n.lo)._push(Na,4,n.hi)};Pe.prototype.sfixed64=Pe.prototype.fixed64;Pe.prototype.float=function(e){return this._push(Ct.float.writeFloatLE,4,e)};Pe.prototype.double=function(e){return this._push(Ct.float.writeDoubleLE,8,e)};var xv=Ct.Array.prototype.set?function(e,n,t){n.set(e,t)}:function(e,n,t){for(var o=0;o>>0;if(!n)return this._push(Fa,1,0);if(Ct.isString(e)){var t=Pe.alloc(n=ef.length(e));ef.decode(e,t,0),e=t}return this.uint32(n)._push(xv,n,e)};Pe.prototype.string=function(e){var n=tf.length(e);return n?this.uint32(n)._push(tf.write,n,e):this._push(Fa,1,0)};Pe.prototype.fork=function(){return this.states=new bv(this),this.head=this.tail=new Dn(za,0,0),this.len=0,this};Pe.prototype.reset=function(){return this.states?(this.head=this.states.head,this.tail=this.states.tail,this.len=this.states.len,this.states=this.states.next):(this.head=this.tail=new Dn(za,0,0),this.len=0),this};Pe.prototype.ldelim=function(){var e=this.head,n=this.tail,t=this.len;return this.reset().uint32(t),t&&(this.tail.next=e.next,this.tail=n,this.len+=t),this};Pe.prototype.finish=function(){for(var e=this.head.next,n=this.constructor.alloc(this.len),t=0;e;)e.fn(e.val,n,t),t+=e.len,e=e.next;return n};Pe._configure=function(r){Ra=r,Pe.create=rf(),Ra._configure()}});var sf=Je((e$,af)=>{"use strict";af.exports=Jt;var of=Ga();(Jt.prototype=Object.create(of.prototype)).constructor=Jt;var Ir=_r();function Jt(){of.call(this)}Jt._configure=function(){Jt.alloc=Ir._Buffer_allocUnsafe,Jt.writeBytesBuffer=Ir.Buffer&&Ir.Buffer.prototype instanceof Uint8Array&&Ir.Buffer.prototype.set.name==="set"?function(e,n,t){n.set(e,t)}:function(e,n,t){if(e.copy)e.copy(n,t,0,e.length);else for(var o=0;o>>0;return this.uint32(n),n&&this._push(Jt.writeBytesBuffer,n,e),this};function vv(r,e,n){r.length<40?Ir.utf8.write(r,e,n):e.utf8Write?e.utf8Write(r,n):e.write(r,n)}Jt.prototype.string=function(e){var n=Ir.Buffer.byteLength(e);return this.uint32(n),n&&this._push(vv,n,e),this};Jt._configure()});var Ha=Je((t$,df)=>{"use strict";df.exports=Ke;var Mt=_r(),Wa,cf=Mt.LongBits,wv=Mt.utf8;function Vt(r,e){return RangeError("index out of range: "+r.pos+" + "+(e||1)+" > "+r.len)}function Ke(r){this.buf=r,this.pos=0,this.len=r.length}var uf=typeof Uint8Array<"u"?function(e){if(e instanceof Uint8Array||Array.isArray(e))return new Ke(e);throw Error("illegal buffer")}:function(e){if(Array.isArray(e))return new Ke(e);throw Error("illegal buffer")},ff=function(){return Mt.Buffer?function(n){return(Ke.create=function(o){return Mt.Buffer.isBuffer(o)?new Wa(o):uf(o)})(n)}:uf};Ke.create=ff();Ke.prototype._slice=Mt.Array.prototype.subarray||Mt.Array.prototype.slice;Ke.prototype.uint32=function(){var e=4294967295;return function(){if(e=(this.buf[this.pos]&127)>>>0,this.buf[this.pos++]<128||(e=(e|(this.buf[this.pos]&127)<<7)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&127)<<14)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&127)<<21)>>>0,this.buf[this.pos++]<128)||(e=(e|(this.buf[this.pos]&15)<<28)>>>0,this.buf[this.pos++]<128))return e;if((this.pos+=5)>this.len)throw this.pos=this.len,Vt(this,10);return e}}();Ke.prototype.int32=function(){return this.uint32()|0};Ke.prototype.sint32=function(){var e=this.uint32();return e>>>1^-(e&1)|0};function Ua(){var r=new cf(0,0),e=0;if(this.len-this.pos>4){for(;e<4;++e)if(r.lo=(r.lo|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r;if(r.lo=(r.lo|(this.buf[this.pos]&127)<<28)>>>0,r.hi=(r.hi|(this.buf[this.pos]&127)>>4)>>>0,this.buf[this.pos++]<128)return r;e=0}else{for(;e<3;++e){if(this.pos>=this.len)throw Vt(this);if(r.lo=(r.lo|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}return r.lo=(r.lo|(this.buf[this.pos++]&127)<>>0,r}if(this.len-this.pos>4){for(;e<5;++e)if(r.hi=(r.hi|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}else for(;e<5;++e){if(this.pos>=this.len)throw Vt(this);if(r.hi=(r.hi|(this.buf[this.pos]&127)<>>0,this.buf[this.pos++]<128)return r}throw Error("invalid varint encoding")}Ke.prototype.bool=function(){return this.uint32()!==0};function Ro(r,e){return(r[e-4]|r[e-3]<<8|r[e-2]<<16|r[e-1]<<24)>>>0}Ke.prototype.fixed32=function(){if(this.pos+4>this.len)throw Vt(this,4);return Ro(this.buf,this.pos+=4)};Ke.prototype.sfixed32=function(){if(this.pos+4>this.len)throw Vt(this,4);return Ro(this.buf,this.pos+=4)|0};function lf(){if(this.pos+8>this.len)throw Vt(this,8);return new cf(Ro(this.buf,this.pos+=4),Ro(this.buf,this.pos+=4))}Ke.prototype.float=function(){if(this.pos+4>this.len)throw Vt(this,4);var e=Mt.float.readFloatLE(this.buf,this.pos);return this.pos+=4,e};Ke.prototype.double=function(){if(this.pos+8>this.len)throw Vt(this,4);var e=Mt.float.readDoubleLE(this.buf,this.pos);return this.pos+=8,e};Ke.prototype.bytes=function(){var e=this.uint32(),n=this.pos,t=this.pos+e;if(t>this.len)throw Vt(this,e);if(this.pos+=e,Array.isArray(this.buf))return this.buf.slice(n,t);if(n===t){var o=Mt.Buffer;return o?o.alloc(0):new this.buf.constructor(0)}return this._slice.call(this.buf,n,t)};Ke.prototype.string=function(){var e=this.bytes();return wv.read(e,0,e.length)};Ke.prototype.skip=function(e){if(typeof e=="number"){if(this.pos+e>this.len)throw Vt(this,e);this.pos+=e}else do if(this.pos>=this.len)throw Vt(this);while(this.buf[this.pos++]&128);return this};Ke.prototype.skipType=function(r){switch(r){case 0:this.skip();break;case 1:this.skip(8);break;case 2:this.skip(this.uint32());break;case 3:for(;(r=this.uint32()&7)!==4;)this.skipType(r);break;case 5:this.skip(4);break;default:throw Error("invalid wire type "+r+" at offset "+this.pos)}return this};Ke._configure=function(r){Wa=r,Ke.create=ff(),Wa._configure();var e=Mt.Long?"toLong":"toNumber";Mt.merge(Ke.prototype,{int64:function(){return Ua.call(this)[e](!1)},uint64:function(){return Ua.call(this)[e](!0)},sint64:function(){return Ua.call(this).zzDecode()[e](!1)},fixed64:function(){return lf.call(this)[e](!0)},sfixed64:function(){return lf.call(this)[e](!1)}})}});var gf=Je((r$,hf)=>{"use strict";hf.exports=Nr;var mf=Ha();(Nr.prototype=Object.create(mf.prototype)).constructor=Nr;var pf=_r();function Nr(r){mf.call(this,r)}Nr._configure=function(){pf.Buffer&&(Nr.prototype._slice=pf.Buffer.prototype.slice)};Nr.prototype.string=function(){var e=this.uint32();return this.buf.utf8Slice?this.buf.utf8Slice(this.pos,this.pos=Math.min(this.pos+e,this.len)):this.buf.toString("utf-8",this.pos,this.pos=Math.min(this.pos+e,this.len))};Nr._configure()});var yf=Je((n$,bf)=>{"use strict";bf.exports=Bn;var qa=_r();(Bn.prototype=Object.create(qa.EventEmitter.prototype)).constructor=Bn;function Bn(r,e,n){if(typeof r!="function")throw TypeError("rpcImpl must be a function");qa.EventEmitter.call(this),this.rpcImpl=r,this.requestDelimited=!!e,this.responseDelimited=!!n}Bn.prototype.rpcCall=function r(e,n,t,o,i){if(!o)throw TypeError("request must be specified");var s=this;if(!i)return qa.asPromise(r,s,e,n,t,o);if(!s.rpcImpl){setTimeout(function(){i(Error("already ended"))},0);return}try{return s.rpcImpl(e,n[s.requestDelimited?"encodeDelimited":"encode"](o).finish(),function(u,l){if(u)return s.emit("error",u,e),i(u);if(l===null){s.end(!0);return}if(!(l instanceof t))try{l=t[s.responseDelimited?"decodeDelimited":"decode"](l)}catch(f){return s.emit("error",f,e),i(f)}return s.emit("data",l,e),i(null,l)})}catch(a){s.emit("error",a,e),setTimeout(function(){i(a)},0);return}};Bn.prototype.end=function(e){return this.rpcImpl&&(e||this.rpcImpl(null,null,null),this.rpcImpl=null,this.emit("end").off()),this}});var vf=Je(xf=>{"use strict";var Tv=xf;Tv.Service=yf()});var Tf=Je((i$,wf)=>{"use strict";wf.exports={}});var Sf=Je(If=>{"use strict";var vt=If;vt.build="minimal";vt.Writer=Ga();vt.BufferWriter=sf();vt.Reader=Ha();vt.BufferReader=gf();vt.util=_r();vt.rpc=vf();vt.roots=Tf();vt.configure=_f;function _f(){vt.util._configure(),vt.Writer._configure(vt.BufferWriter),vt.Reader._configure(vt.BufferReader)}_f()});var Af=Je((s$,$f)=>{"use strict";$f.exports=Sf()});var pn=Je((u$,Pf)=>{"use strict";var Ve=Af(),H=Ve.Reader,Xe=Ve.Writer,A=Ve.util,I=Ve.roots.default||(Ve.roots.default={});I.onnx=function(){var r={};return r.Version=function(){var e={},n=Object.create(e);return n[e[0]="_START_VERSION"]=0,n[e[1]="IR_VERSION_2017_10_10"]=1,n[e[2]="IR_VERSION_2017_10_30"]=2,n[e[3]="IR_VERSION_2017_11_3"]=3,n[e[4]="IR_VERSION_2019_1_22"]=4,n[e[5]="IR_VERSION_2019_3_18"]=5,n[e[6]="IR_VERSION_2019_9_19"]=6,n[e[7]="IR_VERSION_2020_5_8"]=7,n[e[8]="IR_VERSION_2021_7_30"]=8,n[e[9]="IR_VERSION"]=9,n}(),r.AttributeProto=function(){function e(n){if(this.floats=[],this.ints=[],this.strings=[],this.tensors=[],this.graphs=[],this.sparseTensors=[],this.typeProtos=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.name=t.string();break}case 21:{s.refAttrName=t.string();break}case 13:{s.docString=t.string();break}case 20:{s.type=t.int32();break}case 2:{s.f=t.float();break}case 3:{s.i=t.int64();break}case 4:{s.s=t.bytes();break}case 5:{s.t=I.onnx.TensorProto.decode(t,t.uint32());break}case 6:{s.g=I.onnx.GraphProto.decode(t,t.uint32());break}case 22:{s.sparseTensor=I.onnx.SparseTensorProto.decode(t,t.uint32());break}case 14:{s.tp=I.onnx.TypeProto.decode(t,t.uint32());break}case 7:{if(s.floats&&s.floats.length||(s.floats=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.i.high>>>0).toNumber())),t.s!=null&&(typeof t.s=="string"?A.base64.decode(t.s,o.s=A.newBuffer(A.base64.length(t.s)),0):t.s.length>=0&&(o.s=t.s)),t.t!=null){if(typeof t.t!="object")throw TypeError(".onnx.AttributeProto.t: object expected");o.t=I.onnx.TensorProto.fromObject(t.t)}if(t.g!=null){if(typeof t.g!="object")throw TypeError(".onnx.AttributeProto.g: object expected");o.g=I.onnx.GraphProto.fromObject(t.g)}if(t.sparseTensor!=null){if(typeof t.sparseTensor!="object")throw TypeError(".onnx.AttributeProto.sparseTensor: object expected");o.sparseTensor=I.onnx.SparseTensorProto.fromObject(t.sparseTensor)}if(t.tp!=null){if(typeof t.tp!="object")throw TypeError(".onnx.AttributeProto.tp: object expected");o.tp=I.onnx.TypeProto.fromObject(t.tp)}if(t.floats){if(!Array.isArray(t.floats))throw TypeError(".onnx.AttributeProto.floats: array expected");o.floats=[];for(var i=0;i>>0,t.ints[i].high>>>0).toNumber())}if(t.strings){if(!Array.isArray(t.strings))throw TypeError(".onnx.AttributeProto.strings: array expected");o.strings=[];for(var i=0;i=0&&(o.strings[i]=t.strings[i])}if(t.tensors){if(!Array.isArray(t.tensors))throw TypeError(".onnx.AttributeProto.tensors: array expected");o.tensors=[];for(var i=0;i>>0,t.i.high>>>0).toNumber():t.i),t.s!=null&&t.hasOwnProperty("s")&&(i.s=o.bytes===String?A.base64.encode(t.s,0,t.s.length):o.bytes===Array?Array.prototype.slice.call(t.s):t.s),t.t!=null&&t.hasOwnProperty("t")&&(i.t=I.onnx.TensorProto.toObject(t.t,o)),t.g!=null&&t.hasOwnProperty("g")&&(i.g=I.onnx.GraphProto.toObject(t.g,o)),t.floats&&t.floats.length){i.floats=[];for(var a=0;a>>0,t.ints[a].high>>>0).toNumber():t.ints[a]}if(t.strings&&t.strings.length){i.strings=[];for(var a=0;a>>3){case 1:{s.name=t.string();break}case 2:{s.type=I.onnx.TypeProto.decode(t,t.uint32());break}case 3:{s.docString=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.name!=null&&t.hasOwnProperty("name")&&!A.isString(t.name))return"name: string expected";if(t.type!=null&&t.hasOwnProperty("type")){var o=I.onnx.TypeProto.verify(t.type);if(o)return"type."+o}return t.docString!=null&&t.hasOwnProperty("docString")&&!A.isString(t.docString)?"docString: string expected":null},e.fromObject=function(t){if(t instanceof I.onnx.ValueInfoProto)return t;var o=new I.onnx.ValueInfoProto;if(t.name!=null&&(o.name=String(t.name)),t.type!=null){if(typeof t.type!="object")throw TypeError(".onnx.ValueInfoProto.type: object expected");o.type=I.onnx.TypeProto.fromObject(t.type)}return t.docString!=null&&(o.docString=String(t.docString)),o},e.toObject=function(t,o){o||(o={});var i={};return o.defaults&&(i.name="",i.type=null,i.docString=""),t.name!=null&&t.hasOwnProperty("name")&&(i.name=t.name),t.type!=null&&t.hasOwnProperty("type")&&(i.type=I.onnx.TypeProto.toObject(t.type,o)),t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.ValueInfoProto"},e}(),r.NodeProto=function(){function e(n){if(this.input=[],this.output=[],this.attribute=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.input&&s.input.length||(s.input=[]),s.input.push(t.string());break}case 2:{s.output&&s.output.length||(s.output=[]),s.output.push(t.string());break}case 3:{s.name=t.string();break}case 4:{s.opType=t.string();break}case 7:{s.domain=t.string();break}case 5:{s.attribute&&s.attribute.length||(s.attribute=[]),s.attribute.push(I.onnx.AttributeProto.decode(t,t.uint32()));break}case 6:{s.docString=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.input!=null&&t.hasOwnProperty("input")){if(!Array.isArray(t.input))return"input: array expected";for(var o=0;o>>3){case 1:{s.initialization=I.onnx.GraphProto.decode(t,t.uint32());break}case 2:{s.algorithm=I.onnx.GraphProto.decode(t,t.uint32());break}case 3:{s.initializationBinding&&s.initializationBinding.length||(s.initializationBinding=[]),s.initializationBinding.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}case 4:{s.updateBinding&&s.updateBinding.length||(s.updateBinding=[]),s.updateBinding.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.initialization!=null&&t.hasOwnProperty("initialization")){var o=I.onnx.GraphProto.verify(t.initialization);if(o)return"initialization."+o}if(t.algorithm!=null&&t.hasOwnProperty("algorithm")){var o=I.onnx.GraphProto.verify(t.algorithm);if(o)return"algorithm."+o}if(t.initializationBinding!=null&&t.hasOwnProperty("initializationBinding")){if(!Array.isArray(t.initializationBinding))return"initializationBinding: array expected";for(var i=0;i>>3){case 1:{s.irVersion=t.int64();break}case 8:{s.opsetImport&&s.opsetImport.length||(s.opsetImport=[]),s.opsetImport.push(I.onnx.OperatorSetIdProto.decode(t,t.uint32()));break}case 2:{s.producerName=t.string();break}case 3:{s.producerVersion=t.string();break}case 4:{s.domain=t.string();break}case 5:{s.modelVersion=t.int64();break}case 6:{s.docString=t.string();break}case 7:{s.graph=I.onnx.GraphProto.decode(t,t.uint32());break}case 14:{s.metadataProps&&s.metadataProps.length||(s.metadataProps=[]),s.metadataProps.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}case 20:{s.trainingInfo&&s.trainingInfo.length||(s.trainingInfo=[]),s.trainingInfo.push(I.onnx.TrainingInfoProto.decode(t,t.uint32()));break}case 25:{s.functions&&s.functions.length||(s.functions=[]),s.functions.push(I.onnx.FunctionProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.irVersion!=null&&t.hasOwnProperty("irVersion")&&!A.isInteger(t.irVersion)&&!(t.irVersion&&A.isInteger(t.irVersion.low)&&A.isInteger(t.irVersion.high)))return"irVersion: integer|Long expected";if(t.opsetImport!=null&&t.hasOwnProperty("opsetImport")){if(!Array.isArray(t.opsetImport))return"opsetImport: array expected";for(var o=0;o>>0,t.irVersion.high>>>0).toNumber())),t.opsetImport){if(!Array.isArray(t.opsetImport))throw TypeError(".onnx.ModelProto.opsetImport: array expected");o.opsetImport=[];for(var i=0;i>>0,t.modelVersion.high>>>0).toNumber())),t.docString!=null&&(o.docString=String(t.docString)),t.graph!=null){if(typeof t.graph!="object")throw TypeError(".onnx.ModelProto.graph: object expected");o.graph=I.onnx.GraphProto.fromObject(t.graph)}if(t.metadataProps){if(!Array.isArray(t.metadataProps))throw TypeError(".onnx.ModelProto.metadataProps: array expected");o.metadataProps=[];for(var i=0;i>>0,t.irVersion.high>>>0).toNumber():t.irVersion),t.producerName!=null&&t.hasOwnProperty("producerName")&&(i.producerName=t.producerName),t.producerVersion!=null&&t.hasOwnProperty("producerVersion")&&(i.producerVersion=t.producerVersion),t.domain!=null&&t.hasOwnProperty("domain")&&(i.domain=t.domain),t.modelVersion!=null&&t.hasOwnProperty("modelVersion")&&(typeof t.modelVersion=="number"?i.modelVersion=o.longs===String?String(t.modelVersion):t.modelVersion:i.modelVersion=o.longs===String?A.Long.prototype.toString.call(t.modelVersion):o.longs===Number?new A.LongBits(t.modelVersion.low>>>0,t.modelVersion.high>>>0).toNumber():t.modelVersion),t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),t.graph!=null&&t.hasOwnProperty("graph")&&(i.graph=I.onnx.GraphProto.toObject(t.graph,o)),t.opsetImport&&t.opsetImport.length){i.opsetImport=[];for(var a=0;a>>3){case 1:{s.key=t.string();break}case 2:{s.value=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){return typeof t!="object"||t===null?"object expected":t.key!=null&&t.hasOwnProperty("key")&&!A.isString(t.key)?"key: string expected":t.value!=null&&t.hasOwnProperty("value")&&!A.isString(t.value)?"value: string expected":null},e.fromObject=function(t){if(t instanceof I.onnx.StringStringEntryProto)return t;var o=new I.onnx.StringStringEntryProto;return t.key!=null&&(o.key=String(t.key)),t.value!=null&&(o.value=String(t.value)),o},e.toObject=function(t,o){o||(o={});var i={};return o.defaults&&(i.key="",i.value=""),t.key!=null&&t.hasOwnProperty("key")&&(i.key=t.key),t.value!=null&&t.hasOwnProperty("value")&&(i.value=t.value),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.StringStringEntryProto"},e}(),r.TensorAnnotation=function(){function e(n){if(this.quantParameterTensorNames=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.tensorName=t.string();break}case 2:{s.quantParameterTensorNames&&s.quantParameterTensorNames.length||(s.quantParameterTensorNames=[]),s.quantParameterTensorNames.push(I.onnx.StringStringEntryProto.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.tensorName!=null&&t.hasOwnProperty("tensorName")&&!A.isString(t.tensorName))return"tensorName: string expected";if(t.quantParameterTensorNames!=null&&t.hasOwnProperty("quantParameterTensorNames")){if(!Array.isArray(t.quantParameterTensorNames))return"quantParameterTensorNames: array expected";for(var o=0;o>>3){case 1:{s.node&&s.node.length||(s.node=[]),s.node.push(I.onnx.NodeProto.decode(t,t.uint32()));break}case 2:{s.name=t.string();break}case 5:{s.initializer&&s.initializer.length||(s.initializer=[]),s.initializer.push(I.onnx.TensorProto.decode(t,t.uint32()));break}case 15:{s.sparseInitializer&&s.sparseInitializer.length||(s.sparseInitializer=[]),s.sparseInitializer.push(I.onnx.SparseTensorProto.decode(t,t.uint32()));break}case 10:{s.docString=t.string();break}case 11:{s.input&&s.input.length||(s.input=[]),s.input.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 12:{s.output&&s.output.length||(s.output=[]),s.output.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 13:{s.valueInfo&&s.valueInfo.length||(s.valueInfo=[]),s.valueInfo.push(I.onnx.ValueInfoProto.decode(t,t.uint32()));break}case 14:{s.quantizationAnnotation&&s.quantizationAnnotation.length||(s.quantizationAnnotation=[]),s.quantizationAnnotation.push(I.onnx.TensorAnnotation.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.node!=null&&t.hasOwnProperty("node")){if(!Array.isArray(t.node))return"node: array expected";for(var o=0;o>>3){case 1:{if(s.dims&&s.dims.length||(s.dims=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.dims[i].high>>>0).toNumber())}if(t.dataType!=null&&(o.dataType=t.dataType|0),t.segment!=null){if(typeof t.segment!="object")throw TypeError(".onnx.TensorProto.segment: object expected");o.segment=I.onnx.TensorProto.Segment.fromObject(t.segment)}if(t.floatData){if(!Array.isArray(t.floatData))throw TypeError(".onnx.TensorProto.floatData: array expected");o.floatData=[];for(var i=0;i=0&&(o.stringData[i]=t.stringData[i])}if(t.int64Data){if(!Array.isArray(t.int64Data))throw TypeError(".onnx.TensorProto.int64Data: array expected");o.int64Data=[];for(var i=0;i>>0,t.int64Data[i].high>>>0).toNumber())}if(t.name!=null&&(o.name=String(t.name)),t.docString!=null&&(o.docString=String(t.docString)),t.rawData!=null&&(typeof t.rawData=="string"?A.base64.decode(t.rawData,o.rawData=A.newBuffer(A.base64.length(t.rawData)),0):t.rawData.length>=0&&(o.rawData=t.rawData)),t.externalData){if(!Array.isArray(t.externalData))throw TypeError(".onnx.TensorProto.externalData: array expected");o.externalData=[];for(var i=0;i>>0,t.uint64Data[i].high>>>0).toNumber(!0))}return o},e.toObject=function(t,o){o||(o={});var i={};if((o.arrays||o.defaults)&&(i.dims=[],i.floatData=[],i.int32Data=[],i.stringData=[],i.int64Data=[],i.doubleData=[],i.uint64Data=[],i.externalData=[]),o.defaults&&(i.dataType=0,i.segment=null,i.name="",o.bytes===String?i.rawData="":(i.rawData=[],o.bytes!==Array&&(i.rawData=A.newBuffer(i.rawData))),i.docString="",i.dataLocation=o.enums===String?"DEFAULT":0),t.dims&&t.dims.length){i.dims=[];for(var s=0;s>>0,t.dims[s].high>>>0).toNumber():t.dims[s]}if(t.dataType!=null&&t.hasOwnProperty("dataType")&&(i.dataType=t.dataType),t.segment!=null&&t.hasOwnProperty("segment")&&(i.segment=I.onnx.TensorProto.Segment.toObject(t.segment,o)),t.floatData&&t.floatData.length){i.floatData=[];for(var s=0;s>>0,t.int64Data[s].high>>>0).toNumber():t.int64Data[s]}if(t.name!=null&&t.hasOwnProperty("name")&&(i.name=t.name),t.rawData!=null&&t.hasOwnProperty("rawData")&&(i.rawData=o.bytes===String?A.base64.encode(t.rawData,0,t.rawData.length):o.bytes===Array?Array.prototype.slice.call(t.rawData):t.rawData),t.doubleData&&t.doubleData.length){i.doubleData=[];for(var s=0;s>>0,t.uint64Data[s].high>>>0).toNumber(!0):t.uint64Data[s]}if(t.docString!=null&&t.hasOwnProperty("docString")&&(i.docString=t.docString),t.externalData&&t.externalData.length){i.externalData=[];for(var s=0;s>>3){case 1:{a.begin=o.int64();break}case 2:{a.end=o.int64();break}default:o.skipType(u&7);break}}return a},n.decodeDelimited=function(o){return o instanceof H||(o=new H(o)),this.decode(o,o.uint32())},n.verify=function(o){return typeof o!="object"||o===null?"object expected":o.begin!=null&&o.hasOwnProperty("begin")&&!A.isInteger(o.begin)&&!(o.begin&&A.isInteger(o.begin.low)&&A.isInteger(o.begin.high))?"begin: integer|Long expected":o.end!=null&&o.hasOwnProperty("end")&&!A.isInteger(o.end)&&!(o.end&&A.isInteger(o.end.low)&&A.isInteger(o.end.high))?"end: integer|Long expected":null},n.fromObject=function(o){if(o instanceof I.onnx.TensorProto.Segment)return o;var i=new I.onnx.TensorProto.Segment;return o.begin!=null&&(A.Long?(i.begin=A.Long.fromValue(o.begin)).unsigned=!1:typeof o.begin=="string"?i.begin=parseInt(o.begin,10):typeof o.begin=="number"?i.begin=o.begin:typeof o.begin=="object"&&(i.begin=new A.LongBits(o.begin.low>>>0,o.begin.high>>>0).toNumber())),o.end!=null&&(A.Long?(i.end=A.Long.fromValue(o.end)).unsigned=!1:typeof o.end=="string"?i.end=parseInt(o.end,10):typeof o.end=="number"?i.end=o.end:typeof o.end=="object"&&(i.end=new A.LongBits(o.end.low>>>0,o.end.high>>>0).toNumber())),i},n.toObject=function(o,i){i||(i={});var s={};if(i.defaults){if(A.Long){var a=new A.Long(0,0,!1);s.begin=i.longs===String?a.toString():i.longs===Number?a.toNumber():a}else s.begin=i.longs===String?"0":0;if(A.Long){var a=new A.Long(0,0,!1);s.end=i.longs===String?a.toString():i.longs===Number?a.toNumber():a}else s.end=i.longs===String?"0":0}return o.begin!=null&&o.hasOwnProperty("begin")&&(typeof o.begin=="number"?s.begin=i.longs===String?String(o.begin):o.begin:s.begin=i.longs===String?A.Long.prototype.toString.call(o.begin):i.longs===Number?new A.LongBits(o.begin.low>>>0,o.begin.high>>>0).toNumber():o.begin),o.end!=null&&o.hasOwnProperty("end")&&(typeof o.end=="number"?s.end=i.longs===String?String(o.end):o.end:s.end=i.longs===String?A.Long.prototype.toString.call(o.end):i.longs===Number?new A.LongBits(o.end.low>>>0,o.end.high>>>0).toNumber():o.end),s},n.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},n.getTypeUrl=function(o){return o===void 0&&(o="type.googleapis.com"),o+"/onnx.TensorProto.Segment"},n}(),e.DataLocation=function(){var n={},t=Object.create(n);return t[n[0]="DEFAULT"]=0,t[n[1]="EXTERNAL"]=1,t}(),e}(),r.SparseTensorProto=function(){function e(n){if(this.dims=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.values=I.onnx.TensorProto.decode(t,t.uint32());break}case 2:{s.indices=I.onnx.TensorProto.decode(t,t.uint32());break}case 3:{if(s.dims&&s.dims.length||(s.dims=[]),(a&7)===2)for(var u=t.uint32()+t.pos;t.pos>>0,t.dims[i].high>>>0).toNumber())}return o},e.toObject=function(t,o){o||(o={});var i={};if((o.arrays||o.defaults)&&(i.dims=[]),o.defaults&&(i.values=null,i.indices=null),t.values!=null&&t.hasOwnProperty("values")&&(i.values=I.onnx.TensorProto.toObject(t.values,o)),t.indices!=null&&t.hasOwnProperty("indices")&&(i.indices=I.onnx.TensorProto.toObject(t.indices,o)),t.dims&&t.dims.length){i.dims=[];for(var s=0;s>>0,t.dims[s].high>>>0).toNumber():t.dims[s]}return i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.SparseTensorProto"},e}(),r.TensorShapeProto=function(){function e(n){if(this.dim=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.dim&&s.dim.length||(s.dim=[]),s.dim.push(I.onnx.TensorShapeProto.Dimension.decode(t,t.uint32()));break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.dim!=null&&t.hasOwnProperty("dim")){if(!Array.isArray(t.dim))return"dim: array expected";for(var o=0;o>>3){case 1:{u.dimValue=i.int64();break}case 2:{u.dimParam=i.string();break}case 3:{u.denotation=i.string();break}default:i.skipType(l&7);break}}return u},n.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},n.verify=function(i){if(typeof i!="object"||i===null)return"object expected";var s={};if(i.dimValue!=null&&i.hasOwnProperty("dimValue")&&(s.value=1,!A.isInteger(i.dimValue)&&!(i.dimValue&&A.isInteger(i.dimValue.low)&&A.isInteger(i.dimValue.high))))return"dimValue: integer|Long expected";if(i.dimParam!=null&&i.hasOwnProperty("dimParam")){if(s.value===1)return"value: multiple values";if(s.value=1,!A.isString(i.dimParam))return"dimParam: string expected"}return i.denotation!=null&&i.hasOwnProperty("denotation")&&!A.isString(i.denotation)?"denotation: string expected":null},n.fromObject=function(i){if(i instanceof I.onnx.TensorShapeProto.Dimension)return i;var s=new I.onnx.TensorShapeProto.Dimension;return i.dimValue!=null&&(A.Long?(s.dimValue=A.Long.fromValue(i.dimValue)).unsigned=!1:typeof i.dimValue=="string"?s.dimValue=parseInt(i.dimValue,10):typeof i.dimValue=="number"?s.dimValue=i.dimValue:typeof i.dimValue=="object"&&(s.dimValue=new A.LongBits(i.dimValue.low>>>0,i.dimValue.high>>>0).toNumber())),i.dimParam!=null&&(s.dimParam=String(i.dimParam)),i.denotation!=null&&(s.denotation=String(i.denotation)),s},n.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.denotation=""),i.dimValue!=null&&i.hasOwnProperty("dimValue")&&(typeof i.dimValue=="number"?a.dimValue=s.longs===String?String(i.dimValue):i.dimValue:a.dimValue=s.longs===String?A.Long.prototype.toString.call(i.dimValue):s.longs===Number?new A.LongBits(i.dimValue.low>>>0,i.dimValue.high>>>0).toNumber():i.dimValue,s.oneofs&&(a.value="dimValue")),i.dimParam!=null&&i.hasOwnProperty("dimParam")&&(a.dimParam=i.dimParam,s.oneofs&&(a.value="dimParam")),i.denotation!=null&&i.hasOwnProperty("denotation")&&(a.denotation=i.denotation),a},n.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},n.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TensorShapeProto.Dimension"},n}(),e}(),r.TypeProto=function(){function e(t){if(t)for(var o=Object.keys(t),i=0;i>>3){case 1:{a.tensorType=I.onnx.TypeProto.Tensor.decode(o,o.uint32());break}case 4:{a.sequenceType=I.onnx.TypeProto.Sequence.decode(o,o.uint32());break}case 5:{a.mapType=I.onnx.TypeProto.Map.decode(o,o.uint32());break}case 9:{a.optionalType=I.onnx.TypeProto.Optional.decode(o,o.uint32());break}case 8:{a.sparseTensorType=I.onnx.TypeProto.SparseTensor.decode(o,o.uint32());break}case 6:{a.denotation=o.string();break}default:o.skipType(u&7);break}}return a},e.decodeDelimited=function(o){return o instanceof H||(o=new H(o)),this.decode(o,o.uint32())},e.verify=function(o){if(typeof o!="object"||o===null)return"object expected";var i={};if(o.tensorType!=null&&o.hasOwnProperty("tensorType")){i.value=1;{var s=I.onnx.TypeProto.Tensor.verify(o.tensorType);if(s)return"tensorType."+s}}if(o.sequenceType!=null&&o.hasOwnProperty("sequenceType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Sequence.verify(o.sequenceType);if(s)return"sequenceType."+s}}if(o.mapType!=null&&o.hasOwnProperty("mapType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Map.verify(o.mapType);if(s)return"mapType."+s}}if(o.optionalType!=null&&o.hasOwnProperty("optionalType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.Optional.verify(o.optionalType);if(s)return"optionalType."+s}}if(o.sparseTensorType!=null&&o.hasOwnProperty("sparseTensorType")){if(i.value===1)return"value: multiple values";i.value=1;{var s=I.onnx.TypeProto.SparseTensor.verify(o.sparseTensorType);if(s)return"sparseTensorType."+s}}return o.denotation!=null&&o.hasOwnProperty("denotation")&&!A.isString(o.denotation)?"denotation: string expected":null},e.fromObject=function(o){if(o instanceof I.onnx.TypeProto)return o;var i=new I.onnx.TypeProto;if(o.tensorType!=null){if(typeof o.tensorType!="object")throw TypeError(".onnx.TypeProto.tensorType: object expected");i.tensorType=I.onnx.TypeProto.Tensor.fromObject(o.tensorType)}if(o.sequenceType!=null){if(typeof o.sequenceType!="object")throw TypeError(".onnx.TypeProto.sequenceType: object expected");i.sequenceType=I.onnx.TypeProto.Sequence.fromObject(o.sequenceType)}if(o.mapType!=null){if(typeof o.mapType!="object")throw TypeError(".onnx.TypeProto.mapType: object expected");i.mapType=I.onnx.TypeProto.Map.fromObject(o.mapType)}if(o.optionalType!=null){if(typeof o.optionalType!="object")throw TypeError(".onnx.TypeProto.optionalType: object expected");i.optionalType=I.onnx.TypeProto.Optional.fromObject(o.optionalType)}if(o.sparseTensorType!=null){if(typeof o.sparseTensorType!="object")throw TypeError(".onnx.TypeProto.sparseTensorType: object expected");i.sparseTensorType=I.onnx.TypeProto.SparseTensor.fromObject(o.sparseTensorType)}return o.denotation!=null&&(i.denotation=String(o.denotation)),i},e.toObject=function(o,i){i||(i={});var s={};return i.defaults&&(s.denotation=""),o.tensorType!=null&&o.hasOwnProperty("tensorType")&&(s.tensorType=I.onnx.TypeProto.Tensor.toObject(o.tensorType,i),i.oneofs&&(s.value="tensorType")),o.sequenceType!=null&&o.hasOwnProperty("sequenceType")&&(s.sequenceType=I.onnx.TypeProto.Sequence.toObject(o.sequenceType,i),i.oneofs&&(s.value="sequenceType")),o.mapType!=null&&o.hasOwnProperty("mapType")&&(s.mapType=I.onnx.TypeProto.Map.toObject(o.mapType,i),i.oneofs&&(s.value="mapType")),o.denotation!=null&&o.hasOwnProperty("denotation")&&(s.denotation=o.denotation),o.sparseTensorType!=null&&o.hasOwnProperty("sparseTensorType")&&(s.sparseTensorType=I.onnx.TypeProto.SparseTensor.toObject(o.sparseTensorType,i),i.oneofs&&(s.value="sparseTensorType")),o.optionalType!=null&&o.hasOwnProperty("optionalType")&&(s.optionalType=I.onnx.TypeProto.Optional.toObject(o.optionalType,i),i.oneofs&&(s.value="optionalType")),s},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(o){return o===void 0&&(o="type.googleapis.com"),o+"/onnx.TypeProto"},e.Tensor=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=i.int32();break}case 2:{u.shape=I.onnx.TensorShapeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")&&!A.isInteger(i.elemType))return"elemType: integer expected";if(i.shape!=null&&i.hasOwnProperty("shape")){var s=I.onnx.TensorShapeProto.verify(i.shape);if(s)return"shape."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Tensor)return i;var s=new I.onnx.TypeProto.Tensor;if(i.elemType!=null&&(s.elemType=i.elemType|0),i.shape!=null){if(typeof i.shape!="object")throw TypeError(".onnx.TypeProto.Tensor.shape: object expected");s.shape=I.onnx.TensorShapeProto.fromObject(i.shape)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=0,a.shape=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=i.elemType),i.shape!=null&&i.hasOwnProperty("shape")&&(a.shape=I.onnx.TensorShapeProto.toObject(i.shape,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Tensor"},t}(),e.Sequence=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")){var s=I.onnx.TypeProto.verify(i.elemType);if(s)return"elemType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Sequence)return i;var s=new I.onnx.TypeProto.Sequence;if(i.elemType!=null){if(typeof i.elemType!="object")throw TypeError(".onnx.TypeProto.Sequence.elemType: object expected");s.elemType=I.onnx.TypeProto.fromObject(i.elemType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=I.onnx.TypeProto.toObject(i.elemType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Sequence"},t}(),e.Map=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.keyType=i.int32();break}case 2:{u.valueType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.keyType!=null&&i.hasOwnProperty("keyType")&&!A.isInteger(i.keyType))return"keyType: integer expected";if(i.valueType!=null&&i.hasOwnProperty("valueType")){var s=I.onnx.TypeProto.verify(i.valueType);if(s)return"valueType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Map)return i;var s=new I.onnx.TypeProto.Map;if(i.keyType!=null&&(s.keyType=i.keyType|0),i.valueType!=null){if(typeof i.valueType!="object")throw TypeError(".onnx.TypeProto.Map.valueType: object expected");s.valueType=I.onnx.TypeProto.fromObject(i.valueType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.keyType=0,a.valueType=null),i.keyType!=null&&i.hasOwnProperty("keyType")&&(a.keyType=i.keyType),i.valueType!=null&&i.hasOwnProperty("valueType")&&(a.valueType=I.onnx.TypeProto.toObject(i.valueType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Map"},t}(),e.Optional=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=I.onnx.TypeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")){var s=I.onnx.TypeProto.verify(i.elemType);if(s)return"elemType."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.Optional)return i;var s=new I.onnx.TypeProto.Optional;if(i.elemType!=null){if(typeof i.elemType!="object")throw TypeError(".onnx.TypeProto.Optional.elemType: object expected");s.elemType=I.onnx.TypeProto.fromObject(i.elemType)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=I.onnx.TypeProto.toObject(i.elemType,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.Optional"},t}(),e.SparseTensor=function(){function t(o){if(o)for(var i=Object.keys(o),s=0;s>>3){case 1:{u.elemType=i.int32();break}case 2:{u.shape=I.onnx.TensorShapeProto.decode(i,i.uint32());break}default:i.skipType(l&7);break}}return u},t.decodeDelimited=function(i){return i instanceof H||(i=new H(i)),this.decode(i,i.uint32())},t.verify=function(i){if(typeof i!="object"||i===null)return"object expected";if(i.elemType!=null&&i.hasOwnProperty("elemType")&&!A.isInteger(i.elemType))return"elemType: integer expected";if(i.shape!=null&&i.hasOwnProperty("shape")){var s=I.onnx.TensorShapeProto.verify(i.shape);if(s)return"shape."+s}return null},t.fromObject=function(i){if(i instanceof I.onnx.TypeProto.SparseTensor)return i;var s=new I.onnx.TypeProto.SparseTensor;if(i.elemType!=null&&(s.elemType=i.elemType|0),i.shape!=null){if(typeof i.shape!="object")throw TypeError(".onnx.TypeProto.SparseTensor.shape: object expected");s.shape=I.onnx.TensorShapeProto.fromObject(i.shape)}return s},t.toObject=function(i,s){s||(s={});var a={};return s.defaults&&(a.elemType=0,a.shape=null),i.elemType!=null&&i.hasOwnProperty("elemType")&&(a.elemType=i.elemType),i.shape!=null&&i.hasOwnProperty("shape")&&(a.shape=I.onnx.TensorShapeProto.toObject(i.shape,s)),a},t.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},t.getTypeUrl=function(i){return i===void 0&&(i="type.googleapis.com"),i+"/onnx.TypeProto.SparseTensor"},t}(),e}(),r.OperatorSetIdProto=function(){function e(n){if(n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.domain=t.string();break}case 2:{s.version=t.int64();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){return typeof t!="object"||t===null?"object expected":t.domain!=null&&t.hasOwnProperty("domain")&&!A.isString(t.domain)?"domain: string expected":t.version!=null&&t.hasOwnProperty("version")&&!A.isInteger(t.version)&&!(t.version&&A.isInteger(t.version.low)&&A.isInteger(t.version.high))?"version: integer|Long expected":null},e.fromObject=function(t){if(t instanceof I.onnx.OperatorSetIdProto)return t;var o=new I.onnx.OperatorSetIdProto;return t.domain!=null&&(o.domain=String(t.domain)),t.version!=null&&(A.Long?(o.version=A.Long.fromValue(t.version)).unsigned=!1:typeof t.version=="string"?o.version=parseInt(t.version,10):typeof t.version=="number"?o.version=t.version:typeof t.version=="object"&&(o.version=new A.LongBits(t.version.low>>>0,t.version.high>>>0).toNumber())),o},e.toObject=function(t,o){o||(o={});var i={};if(o.defaults)if(i.domain="",A.Long){var s=new A.Long(0,0,!1);i.version=o.longs===String?s.toString():o.longs===Number?s.toNumber():s}else i.version=o.longs===String?"0":0;return t.domain!=null&&t.hasOwnProperty("domain")&&(i.domain=t.domain),t.version!=null&&t.hasOwnProperty("version")&&(typeof t.version=="number"?i.version=o.longs===String?String(t.version):t.version:i.version=o.longs===String?A.Long.prototype.toString.call(t.version):o.longs===Number?new A.LongBits(t.version.low>>>0,t.version.high>>>0).toNumber():t.version),i},e.prototype.toJSON=function(){return this.constructor.toObject(this,Ve.util.toJSONOptions)},e.getTypeUrl=function(t){return t===void 0&&(t="type.googleapis.com"),t+"/onnx.OperatorSetIdProto"},e}(),r.OperatorStatus=function(){var e={},n=Object.create(e);return n[e[0]="EXPERIMENTAL"]=0,n[e[1]="STABLE"]=1,n}(),r.FunctionProto=function(){function e(n){if(this.input=[],this.output=[],this.attribute=[],this.attributeProto=[],this.node=[],this.opsetImport=[],n)for(var t=Object.keys(n),o=0;o>>3){case 1:{s.name=t.string();break}case 4:{s.input&&s.input.length||(s.input=[]),s.input.push(t.string());break}case 5:{s.output&&s.output.length||(s.output=[]),s.output.push(t.string());break}case 6:{s.attribute&&s.attribute.length||(s.attribute=[]),s.attribute.push(t.string());break}case 11:{s.attributeProto&&s.attributeProto.length||(s.attributeProto=[]),s.attributeProto.push(I.onnx.AttributeProto.decode(t,t.uint32()));break}case 7:{s.node&&s.node.length||(s.node=[]),s.node.push(I.onnx.NodeProto.decode(t,t.uint32()));break}case 8:{s.docString=t.string();break}case 9:{s.opsetImport&&s.opsetImport.length||(s.opsetImport=[]),s.opsetImport.push(I.onnx.OperatorSetIdProto.decode(t,t.uint32()));break}case 10:{s.domain=t.string();break}default:t.skipType(a&7);break}}return s},e.decodeDelimited=function(t){return t instanceof H||(t=new H(t)),this.decode(t,t.uint32())},e.verify=function(t){if(typeof t!="object"||t===null)return"object expected";if(t.name!=null&&t.hasOwnProperty("name")&&!A.isString(t.name))return"name: string expected";if(t.input!=null&&t.hasOwnProperty("input")){if(!Array.isArray(t.input))return"input: array expected";for(var o=0;o{"use strict";ko();Da();Ge=ln(pn());Gr();zr=class{static arraysEqual(e,n){if(e.length!==n.length)return!1;for(let t=0;t1&&f>1)return;a[s-u]=Math.max(l,f)}return a}static index(e,n){let t=new Array(n.length);return r.fillIndex(e,n,t),t}static fillIndex(e,n,t){let o=e.length-n.length;for(let i=0;i=0;v--)l[v]=T%s[v],T=Math.floor(T/s[v]);h||(r.fillIndex(l,e.dims,f),p=e.get(f)),g||(r.fillIndex(l,n.dims,c),b=n.get(c)),u.set(l,t(p,b))}}return u}}static isValidBroadcast(e,n){let t=e.length,o=n.length;if(t>o)return!1;for(let i=1;i<=t;i++)if(e[t-i]!==1&&e[t-i]!==n[o-i])return!1;return!0}static getBroadcastDims(e,n){let t=e.length,o=[];for(let i=0;i1&&a===1&&o.unshift(s)}return o}},No=class{static getShapeOfGemmResult(e,n,t,o,i){if(e.length!==2||t.length!==2)throw new Error("shape need to be of size 2");let s,a,u;n?(s=e[1],a=e[0]):(s=e[0],a=e[1]);let l=-1;if(o?(u=t[0],l=1):(u=t[1],l=0),t[l]!==a)throw new Error("dimension mismatch");if(s<=0||u<=0||a<=0)throw new Error("invalid shape specified");if(i&&!gt.isValidBroadcast(i,[s,u]))throw new Error("gemm: invalid bias shape for broadcast");return[s,u,a]}},lt=class r{static tensorDataTypeFromProto(e){switch(e){case Ge.onnx.TensorProto.DataType.INT8:return"int8";case Ge.onnx.TensorProto.DataType.UINT8:return"uint8";case Ge.onnx.TensorProto.DataType.BOOL:return"bool";case Ge.onnx.TensorProto.DataType.INT16:return"int16";case Ge.onnx.TensorProto.DataType.UINT16:return"uint16";case Ge.onnx.TensorProto.DataType.INT32:return"int32";case Ge.onnx.TensorProto.DataType.UINT32:return"uint32";case Ge.onnx.TensorProto.DataType.FLOAT:return"float32";case Ge.onnx.TensorProto.DataType.DOUBLE:return"float64";case Ge.onnx.TensorProto.DataType.STRING:return"string";case Ge.onnx.TensorProto.DataType.INT64:return"int32";case Ge.onnx.TensorProto.DataType.UINT64:return"uint32";default:throw new Error(`unsupported data type: ${Ge.onnx.TensorProto.DataType[e]}`)}}static tensorDataTypeStringToEnum(e){switch(e){case"int8":return Ge.onnx.TensorProto.DataType.INT8;case"uint8":return Ge.onnx.TensorProto.DataType.UINT8;case"bool":return Ge.onnx.TensorProto.DataType.BOOL;case"int16":return Ge.onnx.TensorProto.DataType.INT16;case"uint16":return Ge.onnx.TensorProto.DataType.UINT16;case"int32":return Ge.onnx.TensorProto.DataType.INT32;case"uint32":return Ge.onnx.TensorProto.DataType.UINT32;case"float32":return Ge.onnx.TensorProto.DataType.FLOAT;case"float64":return Ge.onnx.TensorProto.DataType.DOUBLE;case"string":return Ge.onnx.TensorProto.DataType.STRING;case"int64":return Ge.onnx.TensorProto.DataType.INT64;case"uint64":return Ge.onnx.TensorProto.DataType.UINT64;default:throw new Error(`unsupported data type: ${e}`)}}static tensorDimsFromProto(e){return e.map(n=>dr.isLong(n)?n.toNumber():n)}static tensorValueTypeFromProto(e){return{tensorType:r.tensorDataTypeFromProto(e.elemType),shape:{dims:r.tensorDimsFromProto(e.shape.dim.map(n=>n.dimValue))}}}static tensorDimsFromORTFormat(e){let n=[];for(let t=0;te.length)throw new Error(`invalid dimension of ${n} for sizeFromDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,n,e.length)}static sizeToDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeToDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,0,n)}static getSizeFromDimensionRange(e,n,t){let o=1;for(let i=n;i=0;--o)t[o]=t[o+1]*e[o+1];return t}static transpose(e){return e.slice().reverse()}static indicesToOffset(e,n,t){t===void 0&&(t=e.length);let o=0;for(let i=0;i=n)throw new Error("unsupported axis for this operation.");return e<0?e+n:e}static normalizeAxes(e,n){return e.map(t=>this.normalizeAxis(t,n))}static incrementIndex(e,n,t){if(n.length===0||e.length===0)throw new Error("Index incrementing unsupported for scalar Tensor");if(t===void 0)t=n.length;else if(t<=0||t>n.length)throw new Error("Incorrect axis to increment on");for(let o=t-1;o>=0&&(e[o]++,!(e[o]=e.length)throw new Error("the dimension with value zero exceeds the dimension size of the input tensor");o[u]=e[u]}else o[u]=n[u];s*=o[u]}}let a=r.size(e);if(i!==-1){if(a%s!==0)throw new Error(`the input tensor cannot be reshaped to the requested shape. Input shape: [${e}] Output shape: [${n}]`);o[i]=a/s}else if(s!==a)throw new Error("reshapedDims and originalDims don't have matching sizes");return o}static sortBasedOnPerm(e,n){return n?n.map(t=>e[t]):e.slice().reverse()}static padShape(e,n){let t=e.length;return e.map((o,i)=>o+n[i]+n[i+t])}static areEqual(e,n){return e.length!==n.length?!1:e.every((t,o)=>t===n[o])}static validateDimsAndCalcSize(e){if(e.length>6)throw new TypeError("Only rank 0 to 6 is supported for tensor shape.");let n=1;for(let t of e){if(!Number.isInteger(t))throw new TypeError(`Invalid shape: ${t} is not an integer`);if(t<0||t>2147483647)throw new TypeError(`Invalid shape: length ${t} is not allowed`);n*=t}return n}static flattenShape(e,n){n<0&&(n+=e.length);let t=e.reduce((s,a)=>s*a,1),o=e.slice(n).reduce((s,a)=>s*a,1);return[t/o,o]}static squeezeShape(e,n){let t=new Array;n=r.normalizeAxes(n,e.length);for(let o=0;o=0;if(i&&e[o]!==1)throw new Error("squeeze an axis of size different than 1");(n.length===0&&e[o]>1||n.length>0&&!i)&&t.push(e[o])}return t}static unsqueezeShape(e,n){let t=new Array(e.length+n.length);t.fill(0);for(let i=0;i=t.length)throw new Error("'axes' has an out of range axis");if(t[s]!==0)throw new Error("'axes' has a duplicate axis");t[s]=1}let o=0;for(let i=0;i=t.length?t.push(n[a+2]):t[a]=n[a+2];for(let a=0;a=t[a]||s[a+t.length]>=t[a])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(e,n,t,o,i,s){if(s){if(i.length!==2*(e.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(n.length!==e.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==e.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let a=0;a{"use strict";Cf=ln(xc());Da();Cn();ve=ln(pn());De();Ka=ee.experimental.fbs,Qe=class r{constructor(e,n,t,o,i,s=Cf.Guid.create()){this.dims=e;this.type=n;this.dataProvider=t;this.asyncDataProvider=o;this.cache=i;this.dataId=s;this.size=J.validateDimsAndCalcSize(e);let a=this.size,u=t===void 0&&o===void 0&&i===void 0;if(i!==void 0&&i.length!==a)throw new RangeError("Input dims doesn't match data length.");if(n==="string"){if(i!==void 0&&(!Array.isArray(i)||!i.every(l=>typeof l=="string")))throw new TypeError("cache should be a string array");u&&(this.cache=new Array(a))}else{if(i!==void 0){let l=kf(n);if(!(i instanceof l))throw new TypeError(`cache should be type ${l.name}`)}if(u){let l=new ArrayBuffer(a*_v(n));this.cache=Iv(l,n)}}}get data(){if(this.cache===void 0){let e=this.dataProvider(this.dataId);if(e.length!==this.size)throw new Error("Length of data provided by the Data Provider is inconsistent with the dims of this Tensor.");this.cache=e}return this.cache}get stringData(){if(this.type!=="string")throw new TypeError("data type is not string");return this.data}get integerData(){switch(this.type){case"uint8":case"int8":case"uint16":case"int16":case"int32":case"uint32":case"bool":return this.data;default:throw new TypeError("data type is not integer (uint8, int8, uint16, int16, int32, uint32, bool)")}}get floatData(){switch(this.type){case"float32":case"float64":return this.data;default:throw new TypeError("data type is not float (float32, float64)")}}get numberData(){if(this.type!=="string")return this.data;throw new TypeError("type cannot be non-number (string)")}get(e){return this.data[J.indicesToOffset(e,this.strides)]}set(e,n){this.data[J.indicesToOffset(e,this.strides)]=n}async getData(){return this.cache===void 0&&(this.cache=await this.asyncDataProvider(this.dataId)),this.cache}get strides(){return this._strides||(this._strides=J.computeStrides(this.dims)),this._strides}static fromProto(e){if(!e)throw new Error("cannot construct Value from an empty tensor");let n=lt.tensorDataTypeFromProto(e.dataType),t=lt.tensorDimsFromProto(e.dims),o=new r(t,n);if(n==="string")e.stringData.forEach((i,s)=>{o.data[s]=Rn(i)});else if(e.rawData&&typeof e.rawData.byteLength=="number"&&e.rawData.byteLength>0){let i=o.data,s=new DataView(e.rawData.buffer,e.rawData.byteOffset,e.rawData.byteLength),a=Of(e.dataType),u=e.rawData.byteLength/a;if(e.rawData.byteLength%a!==0)throw new Error("invalid buffer length");if(i.length!==u)throw new Error("buffer length mismatch");for(let l=0;l0){let i=o.data,s=new DataView(e.rawDataArray().buffer,e.rawDataArray().byteOffset,e.rawDataLength()),a=Of(e.dataType()),u=e.rawDataLength()/a;if(e.rawDataLength()%a!==0)throw new Error("invalid buffer length");if(i.length!==u)throw new Error("buffer length mismatch");for(let l=0;l{"use strict";_v={version:"",attribute:"attribute",varyingVertex:"varying",varyingFrag:"varying",texture2D:"texture2D",output:"gl_FragColor",outputDeclaration:""},Iv={version:"#version 300 es",attribute:"in",varyingVertex:"out",varyingFrag:"in",texture2D:"texture",output:"outputColor",outputDeclaration:"out vec4 outputColor;"}});var Se=C(()=>{"use strict"});async function Xa(r,e=t=>0,n){return new Promise((t,o)=>{let i=0,s=()=>{if(r()){t();return}i++;let a=e(i);if(n!=null&&i>=n){o();return}setTimeout(s,a)};s()})}function zo(r){return pn(typeof r<"u"&&r.length!==0,()=>"empty string found for sampler name"),"get"+r.charAt(0).toUpperCase()+r.slice(1)}function Rf(r){return pn(typeof r<"u"&&r.length!==0,()=>"empty string found for sampler name"),"get"+r.charAt(0).toUpperCase()+r.slice(1)+"AtOutCoords"}function mn(r,e){let n=JSON.parse(JSON.stringify(r));return n=e,n}function hn(r,e){return e.map(n=>r[n]).join(", ")}function bt(r){if(r<=1)return"int";if(r===2)return"ivec2";if(r===3)return"ivec3";if(r===4)return"ivec4";if(r===5)return"ivec5";if(r===6)return"ivec6";throw Error(`GPU for rank ${r} is not yet supported`)}function Gt(r=6){return["x","y","z","w","u","v"].slice(0,r)}var Qt=C(()=>{"use strict";De()});function Sv(r,e){return Gt(e).map(n=>`${r}.${n}`)}function gn(r,e){return e===1?[r]:Sv(r,e)}function er(){return` + `}var Sv,$v,He=C(()=>{"use strict";Sv={version:"",attribute:"attribute",varyingVertex:"varying",varyingFrag:"varying",texture2D:"texture2D",output:"gl_FragColor",outputDeclaration:""},$v={version:"#version 300 es",attribute:"in",varyingVertex:"out",varyingFrag:"in",texture2D:"texture",output:"outputColor",outputDeclaration:"out vec4 outputColor;"}});var Se=C(()=>{"use strict"});async function Za(r,e=t=>0,n){return new Promise((t,o)=>{let i=0,s=()=>{if(r()){t();return}i++;let a=e(i);if(n!=null&&i>=n){o();return}setTimeout(s,a)};s()})}function zo(r){return mn(typeof r<"u"&&r.length!==0,()=>"empty string found for sampler name"),"get"+r.charAt(0).toUpperCase()+r.slice(1)}function Rf(r){return mn(typeof r<"u"&&r.length!==0,()=>"empty string found for sampler name"),"get"+r.charAt(0).toUpperCase()+r.slice(1)+"AtOutCoords"}function hn(r,e){let n=JSON.parse(JSON.stringify(r));return n=e,n}function gn(r,e){return e.map(n=>r[n]).join(", ")}function bt(r){if(r<=1)return"int";if(r===2)return"ivec2";if(r===3)return"ivec3";if(r===4)return"ivec4";if(r===5)return"ivec5";if(r===6)return"ivec6";throw Error(`GPU for rank ${r} is not yet supported`)}function Gt(r=6){return["x","y","z","w","u","v"].slice(0,r)}var Qt=C(()=>{"use strict";De()});function Av(r,e){return Gt(e).map(n=>`${r}.${n}`)}function bn(r,e){return e===1?[r]:Av(r,e)}function er(){return` float getChannel(vec4 frag, int dim) { int modCoord = imod(dim, 2); return modCoord == 0 ? frag.r : frag.g; @@ -65,19 +65,19 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe (modCoord.y == 0. ? frag.r : frag.g) : (modCoord.y == 0. ? frag.b : frag.a); } - `}var Ur=C(()=>{"use strict";Qt()});function Av(r,e,n){if(r===0)return"false";if(r===1)return`rc > ${e[0]}`;let t="";for(let o=r-2;o= ${e[o-r+2]}`,o{"use strict";Qt()});function Ov(r,e,n){if(r===0)return"false";if(r===1)return`rc > ${e[0]}`;let t="";for(let o=r-2;o= ${e[o-r+2]}`,o= ${r[0]} ? 0. : getA(rc + 1), 0, 0`;let t="r, c",o="r, cp1",i="rp1, c",s="rp1, cp1",a="";if(n>2)for(let u=0;u= ${t}; bool cEdge = cp1 >= ${n}; - `}var Nf,$v,zf,Ff=C(()=>{"use strict";He();Se();Qt();Ur();Nf={name:"pack",inputNames:["A"],inputTypes:[1]},$v=(r,e)=>{let n=oe(r.session.backend.glContext.version),t=e.dims,o=t.length,i=e.dims.length,s=bt(i),a=gn("rc",i),u=Ov(i,a,t[t.length-2],t[t.length-1]),l;o===0?l=[1,1]:o===1?l=[t[0],1]:l=[t[i-1],t[i-2]];let f=Av(i,l,a),c=Pv(t,a),p=` + `}var Nf,Pv,zf,Ff=C(()=>{"use strict";He();Se();Qt();Ur();Nf={name:"pack",inputNames:["A"],inputTypes:[1]},Pv=(r,e)=>{let n=oe(r.session.backend.glContext.version),t=e.dims,o=t.length,i=e.dims.length,s=bt(i),a=bn("rc",i),u=Cv(i,a,t[t.length-2],t[t.length-1]),l;o===0?l=[1,1]:o===1?l=[t[0],1]:l=[t[i-1],t[i-2]];let f=Ov(i,l,a),c=Ev(t,a),p=` void main() { ${s} rc = getOutputCoords(); @@ -89,17 +89,17 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${n.output} = vec4(${c}); } } - `;return{...Nf,hasMain:!0,output:{dims:e.dims,type:e.type,textureType:2},shaderSource:p}},zf=(r,e)=>({...Nf,get:()=>$v(r,e)})});function Za(r){if(r.length===0)return[1,1,1];let e=1;for(let n=0;n1?r[r.length-2]:1,r[r.length-1]]}function Vf(r,e){let n=!1;return r.length===0||e.length===0?n=!0:r.length<2||e.length<2?n=r[r.length-1]===e[e.length-1]:n=r[r.length-1]===e[e.length-1]&&r[r.length-2]===e[e.length-2],n}function kv(r){let e=J.computeStrides(r),n=["b","r","c"],t="index";return` + `;return{...Nf,hasMain:!0,output:{dims:e.dims,type:e.type,textureType:2},shaderSource:p}},zf=(r,e)=>({...Nf,get:()=>Pv(r,e)})});function Ya(r){if(r.length===0)return[1,1,1];let e=1;for(let n=0;n1?r[r.length-2]:1,r[r.length-1]]}function Vf(r,e){let n=!1;return r.length===0||e.length===0?n=!0:r.length<2||e.length<2?n=r[r.length-1]===e[e.length-1]:n=r[r.length-1]===e[e.length-1]&&r[r.length-2]===e[e.length-2],n}function Bv(r){let e=J.computeStrides(r),n=["b","r","c"],t="index";return` ivec3 inputCoordsFromReshapedOutCoords(int index) { ${e.map((i,s)=>{let a=`int ${n[s]} = ${t} / ${i}`,u=s===e.length-1?`int ${n[s+1]} = ${t} - ${n[s]} * ${i}`:`index -= ${n[s]} * ${i}`;return`${a}; ${u};`}).join("")} return ivec3(b, r, c); } - `}function Dv(r){let e=J.computeStrides(r);return` + `}function Lv(r){let e=J.computeStrides(r);return` int getFlattenedIndex(ivec3 coords) { // reverse y, z order return coords.x * ${e[0]} + coords.z * ${e[1]} + coords.y; } -`}var Ev,Cv,Mf,Gf=C(()=>{"use strict";De();He();Se();Ur();Ev=r=>({name:"Reshape (packed)",inputTypes:[2],inputNames:["A"],cacheHint:`${r}`}),Cv=(r,e,n,t)=>{let o=e.dims,i=t,s="";for(let l=0;l<4;l++){let f="";switch(l){case 0:f="outputCoords = rc;";break;case 1:f="outputCoords = ivec3(rc.x, rc.y+1, rc.z);";break;case 2:f="outputCoords = ivec3(rc.x, rc.y, rc.z+1);";break;case 3:f="outputCoords = ivec3(rc.x, rc.y+1, rc.z+1);";break;default:throw new Error}s+=` +`}var kv,Dv,Mf,Gf=C(()=>{"use strict";De();He();Se();Ur();kv=r=>({name:"Reshape (packed)",inputTypes:[2],inputNames:["A"],cacheHint:`${r}`}),Dv=(r,e,n,t)=>{let o=e.dims,i=t,s="";for(let l=0;l<4;l++){let f="";switch(l){case 0:f="outputCoords = rc;";break;case 1:f="outputCoords = ivec3(rc.x, rc.y+1, rc.z);";break;case 2:f="outputCoords = ivec3(rc.x, rc.y, rc.z+1);";break;case 3:f="outputCoords = ivec3(rc.x, rc.y+1, rc.z+1);";break;default:throw new Error}s+=` ${f} ${l>0?"if(outputCoords.y < rows && outputCoords.z < cols){":""} int flattenedIndex = getFlattenedIndex(outputCoords); @@ -111,8 +111,8 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${l>0?"}":""} `}let a=oe(r.session.backend.glContext.version),u=` - ${kv(o)} - ${Dv(i)} + ${Bv(o)} + ${Lv(i)} ${er()} void main() { @@ -127,7 +127,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${s} ${a.output} = result; } - `;return{...n,output:{dims:i,type:e.type,textureType:2},shaderSource:u,hasMain:!0}},Mf=(r,e,n)=>{let t=Ev(n);return{...t,get:()=>Cv(r,e,t,n)}}});var Ya,Uf=C(()=>{"use strict";He();Se();Ya=(r,e)=>{let n=e.shape,t=oe(r.session.backend.glContext.version),o=` + `;return{...n,output:{dims:i,type:e.type,textureType:2},shaderSource:u,hasMain:!0}},Mf=(r,e,n)=>{let t=kv(n);return{...t,get:()=>Dv(r,e,t,n)}}});var Ja,Uf=C(()=>{"use strict";He();Se();Ja=(r,e)=>{let n=e.shape,t=oe(r.session.backend.glContext.version),o=` const float FLOAT_MAX = 1.70141184e38; const float FLOAT_MIN = 1.17549435e-38; @@ -174,7 +174,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe void main() { float value = ${t.texture2D}(X,TexCoords).r; ${t.output} = encodeAsUint8(value); - }`,i={name:"Uint8Encode",inputTypes:[0],inputNames:["X"],output:{dims:n,type:e.tensor.type,textureType:3},shaderSource:o,hasMain:!0};return r.executeProgram(i,[e.tensor])}});function Lv(r,e){if(r===1)return"rc";let n="";for(let t=0;t{"use strict";He();Se();Qt();Ur();Wf={name:"unpack",inputNames:["A"],inputTypes:[2]},Bv=(r,e)=>{let n=e.dims.length,t=gn("rc",n),o=t.slice(-2),i=bt(n),s=er(),u=e.dims.length===0?"":Lv(n,t),l=n<=1?"rc":`vec2(${o.join(",")})`,f=oe(r.session.backend.glContext.version),c=` + }`,i={name:"Uint8Encode",inputTypes:[0],inputNames:["X"],output:{dims:n,type:e.tensor.type,textureType:3},shaderSource:o,hasMain:!0};return r.executeProgram(i,[e.tensor])}});function Nv(r,e){if(r===1)return"rc";let n="";for(let t=0;t{"use strict";He();Se();Qt();Ur();Wf={name:"unpack",inputNames:["A"],inputTypes:[2]},Rv=(r,e)=>{let n=e.dims.length,t=bn("rc",n),o=t.slice(-2),i=bt(n),s=er(),u=e.dims.length===0?"":Nv(n,t),l=n<=1?"rc":`vec2(${o.join(",")})`,f=oe(r.session.backend.glContext.version),c=` ${s} void main() { ${i} rc = getOutputCoords(); @@ -184,7 +184,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${f.output} = vec4(getChannel(packedInput, ${l}), 0, 0, 0); } - `;return{...Wf,hasMain:!0,output:{dims:e.dims,type:e.type,textureType:0},shaderSource:c}},Hf=(r,e)=>({...Wf,get:()=>Bv(r,e)})});var Fo,Nn,Mo,zn=C(()=>{"use strict";Pt();Fo=class{constructor(e,n=1){if(n===1)this.internalFormat=e.R32F,this.format=e.RED,this.textureType=e.FLOAT,this.channelSize=n;else if(n===4)this.internalFormat=e.RGBA32F,this.format=e.RGBA,this.textureType=e.FLOAT,this.channelSize=n;else throw new Error(`Invalid number of channels: ${n}`)}encode(e,n){let t,o;return e.constructor!==Float32Array&&(Re.warning("Encoder","data was not of type Float32; creating new Float32Array"),o=new Float32Array(e)),n*this.channelSize>e.length?(Re.warning("Encoder","Source data too small. Allocating larger array"),o=e,t=this.allocate(n*this.channelSize),o.forEach((i,s)=>t[s]=i)):(o=e,t=o),t}allocate(e){return new Float32Array(e*4)}decode(e,n){return this.channelSize===1?e.filter((o,i)=>i%4===0).subarray(0,n):e.subarray(0,n)}},Nn=class{constructor(e,n=1,t){if(n!==1&&n!==4)throw new Error(`Invalid number of channels: ${n}`);this.internalFormat=e.RGBA,this.format=e.RGBA,this.channelSize=n,this.textureType=t||e.FLOAT}encode(e,n){let t=e;return this.channelSize===1&&(Re.verbose("Encoder","Exploding into a larger array"),t=this.allocate(n),e.forEach((o,i)=>t[i*4]=o)),t}allocate(e){return new Float32Array(e*4)}decode(e,n){return this.channelSize===1?e.filter((o,i)=>i%4===0).subarray(0,n):e.subarray(0,n)}},Mo=class{constructor(e,n=1){this.channelSize=4;if(n===1)this.internalFormat=e.ALPHA,this.format=e.ALPHA,this.textureType=e.UNSIGNED_BYTE,this.channelSize=n;else if(n===4)this.internalFormat=e.RGBA,this.format=e.RGBA,this.textureType=e.UNSIGNED_BYTE,this.channelSize=n;else throw new Error(`Invalid number of channels: ${n}`)}encode(e,n){return new Uint8Array(e.buffer,e.byteOffset,e.byteLength)}allocate(e){return new Uint8Array(e*this.channelSize)}decode(e,n){if(e instanceof Uint8Array)return e.subarray(0,n);throw new Error(`Invalid array type: ${e.constructor}`)}}});var Fn,Kf,Ja,jf=C(()=>{"use strict";De();Se();Fn=(r,e,n)=>{let t=n===0||n===1?1:4,o=n===2,i=n===1||n===2,s=n===4?e.length-1:void 0,a=n===4?e.map((u,l)=>l===e.length-1?u*4:u):void 0;return Ja(r,e,t,a,{isPacked:o,reverseWH:i,breakAxis:s})},Kf=(r,e,n)=>{let t=Fn(r,e,n);return[t.width,t.height]},Ja=(r,e,n=1,t,o)=>{let i=!!(o&&o.isPacked),[s,a]=r.computeTextureWH(i&&t||e,o),u=e.length,l=e.slice(0);if(u===0&&(l=[1]),n===1)t=e;else if(i){if(n!==4)throw new Error("a packed texture must be 4-channel");t=e,u>0&&(l[u-1]=Math.ceil(l[u-1]/2)),u>1&&(l[u-2]=Math.ceil(l[u-2]/2))}else if(!t)throw new Error("Unpacked shape is needed when using channels > 1");return{width:s,height:a,channels:n,isPacked:i,shape:l,strides:J.computeStrides(l),unpackedShape:t,reversedWH:o&&o.reverseWH}}});var Nv,Vo,Zf=C(()=>{"use strict";Pt();Gr();De();Ff();Gf();Uf();qf();zn();jf();Se();Nv=(r,e)=>{let n=e.map(o=>`${o.unpackedShape.join(",")};${o.width}x${o.height}`).join("_"),t=r.name;return r.cacheHint&&(t+="["+r.cacheHint+"]"),t+=":"+n,t},Vo=class{constructor(e){this.session=e;this.packedTextureDataCache=new Map,this.unpackedTextureDataCache=new Map}calculateTextureWidthAndHeight(e,n){return Kf(this.session.layoutStrategy,e,n)}executeProgram(e,n){if(n.lengththis.readTexture(s),async a=>this.readTextureAsync(s),void 0,i),texture:t};return this.setTextureData(s.tensor.dataId,s,e.isPacked),s}getTextureData(e,n=!1){return this.session.isInitializer(e)?this.session.getTextureData(e,n):n?this.packedTextureDataCache.get(e):this.unpackedTextureDataCache.get(e)}setTextureData(e,n,t=!1){this.session.isInitializer(e)?this.session.setTextureData(e,n,t):(t?this.packedTextureDataCache:this.unpackedTextureDataCache).set(e,n)}isTextureLayoutCached(e,n=!1){return!!this.getTextureData(e.dataId,n)}dispose(){this.session.textureManager.clearActiveTextures(),this.packedTextureDataCache.forEach(e=>this.session.textureManager.releaseTexture(e)),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache.forEach(e=>this.session.textureManager.releaseTexture(e)),this.unpackedTextureDataCache=new Map}readTexture(e){return e.isPacked?this.readTexture(this.unpack(e)):this.session.backend.glContext.isFloat32DownloadSupported?this.session.textureManager.readTexture(e,e.tensor.type,e.channels):this.session.textureManager.readUint8TextureAsFloat(Ya(this,e))}async readTextureAsync(e){return e.isPacked?this.readTextureAsync(this.unpack(e)):this.session.backend.glContext.isFloat32DownloadSupported?this.session.textureManager.readTextureAsync(e,e.tensor.type,e.channels):this.session.textureManager.readUint8TextureAsFloat(Ya(this,e))}pack(e){return this.executeProgram(zf(this,e.tensor),[e.tensor])}unpack(e){return this.executeProgram(Hf(this,e.tensor),[e.tensor])}}});var Qa,be,st=C(()=>{"use strict";Qa=class{constructor(e){Object.assign(this,e)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(e=>`${this[e]}`).join(";")),this.key}},be=r=>new Qa(r)});var Yf,Jf,Qf,zv,Fv,ed=C(()=>{"use strict";st();He();Se();Yf={name:"BatchNormalization",inputNames:["A","Scale","B","Mean","Variance"],inputTypes:[0,0,0,0,0]},Jf=(r,e,n)=>(Fv(e),[r.run({...Yf,cacheHint:n.cacheKey,get:()=>zv(r,e,n)},e)]),Qf=r=>{let e=r.attributes.getFloat("epsilon",1e-5),n=r.attributes.getFloat("momentum",.9),t=r.attributes.getInt("spatial",1);return be({epsilon:e,momentum:n,spatial:t})},zv=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),o=e[0].dims.length,[i,s]=r.calculateTextureWidthAndHeight(e[1].dims,0),a=` + `;return{...Wf,hasMain:!0,output:{dims:e.dims,type:e.type,textureType:0},shaderSource:c}},Hf=(r,e)=>({...Wf,get:()=>Rv(r,e)})});var Fo,Nn,Mo,zn=C(()=>{"use strict";Pt();Fo=class{constructor(e,n=1){if(n===1)this.internalFormat=e.R32F,this.format=e.RED,this.textureType=e.FLOAT,this.channelSize=n;else if(n===4)this.internalFormat=e.RGBA32F,this.format=e.RGBA,this.textureType=e.FLOAT,this.channelSize=n;else throw new Error(`Invalid number of channels: ${n}`)}encode(e,n){let t,o;return e.constructor!==Float32Array&&(Re.warning("Encoder","data was not of type Float32; creating new Float32Array"),o=new Float32Array(e)),n*this.channelSize>e.length?(Re.warning("Encoder","Source data too small. Allocating larger array"),o=e,t=this.allocate(n*this.channelSize),o.forEach((i,s)=>t[s]=i)):(o=e,t=o),t}allocate(e){return new Float32Array(e*4)}decode(e,n){return this.channelSize===1?e.filter((o,i)=>i%4===0).subarray(0,n):e.subarray(0,n)}},Nn=class{constructor(e,n=1,t){if(n!==1&&n!==4)throw new Error(`Invalid number of channels: ${n}`);this.internalFormat=e.RGBA,this.format=e.RGBA,this.channelSize=n,this.textureType=t||e.FLOAT}encode(e,n){let t=e;return this.channelSize===1&&(Re.verbose("Encoder","Exploding into a larger array"),t=this.allocate(n),e.forEach((o,i)=>t[i*4]=o)),t}allocate(e){return new Float32Array(e*4)}decode(e,n){return this.channelSize===1?e.filter((o,i)=>i%4===0).subarray(0,n):e.subarray(0,n)}},Mo=class{constructor(e,n=1){this.channelSize=4;if(n===1)this.internalFormat=e.ALPHA,this.format=e.ALPHA,this.textureType=e.UNSIGNED_BYTE,this.channelSize=n;else if(n===4)this.internalFormat=e.RGBA,this.format=e.RGBA,this.textureType=e.UNSIGNED_BYTE,this.channelSize=n;else throw new Error(`Invalid number of channels: ${n}`)}encode(e,n){return new Uint8Array(e.buffer,e.byteOffset,e.byteLength)}allocate(e){return new Uint8Array(e*this.channelSize)}decode(e,n){if(e instanceof Uint8Array)return e.subarray(0,n);throw new Error(`Invalid array type: ${e.constructor}`)}}});var Fn,jf,Qa,Kf=C(()=>{"use strict";De();Se();Fn=(r,e,n)=>{let t=n===0||n===1?1:4,o=n===2,i=n===1||n===2,s=n===4?e.length-1:void 0,a=n===4?e.map((u,l)=>l===e.length-1?u*4:u):void 0;return Qa(r,e,t,a,{isPacked:o,reverseWH:i,breakAxis:s})},jf=(r,e,n)=>{let t=Fn(r,e,n);return[t.width,t.height]},Qa=(r,e,n=1,t,o)=>{let i=!!(o&&o.isPacked),[s,a]=r.computeTextureWH(i&&t||e,o),u=e.length,l=e.slice(0);if(u===0&&(l=[1]),n===1)t=e;else if(i){if(n!==4)throw new Error("a packed texture must be 4-channel");t=e,u>0&&(l[u-1]=Math.ceil(l[u-1]/2)),u>1&&(l[u-2]=Math.ceil(l[u-2]/2))}else if(!t)throw new Error("Unpacked shape is needed when using channels > 1");return{width:s,height:a,channels:n,isPacked:i,shape:l,strides:J.computeStrides(l),unpackedShape:t,reversedWH:o&&o.reverseWH}}});var Fv,Vo,Zf=C(()=>{"use strict";Pt();Gr();De();Ff();Gf();Uf();qf();zn();Kf();Se();Fv=(r,e)=>{let n=e.map(o=>`${o.unpackedShape.join(",")};${o.width}x${o.height}`).join("_"),t=r.name;return r.cacheHint&&(t+="["+r.cacheHint+"]"),t+=":"+n,t},Vo=class{constructor(e){this.session=e;this.packedTextureDataCache=new Map,this.unpackedTextureDataCache=new Map}calculateTextureWidthAndHeight(e,n){return jf(this.session.layoutStrategy,e,n)}executeProgram(e,n){if(n.lengththis.readTexture(s),async a=>this.readTextureAsync(s),void 0,i),texture:t};return this.setTextureData(s.tensor.dataId,s,e.isPacked),s}getTextureData(e,n=!1){return this.session.isInitializer(e)?this.session.getTextureData(e,n):n?this.packedTextureDataCache.get(e):this.unpackedTextureDataCache.get(e)}setTextureData(e,n,t=!1){this.session.isInitializer(e)?this.session.setTextureData(e,n,t):(t?this.packedTextureDataCache:this.unpackedTextureDataCache).set(e,n)}isTextureLayoutCached(e,n=!1){return!!this.getTextureData(e.dataId,n)}dispose(){this.session.textureManager.clearActiveTextures(),this.packedTextureDataCache.forEach(e=>this.session.textureManager.releaseTexture(e)),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache.forEach(e=>this.session.textureManager.releaseTexture(e)),this.unpackedTextureDataCache=new Map}readTexture(e){return e.isPacked?this.readTexture(this.unpack(e)):this.session.backend.glContext.isFloat32DownloadSupported?this.session.textureManager.readTexture(e,e.tensor.type,e.channels):this.session.textureManager.readUint8TextureAsFloat(Ja(this,e))}async readTextureAsync(e){return e.isPacked?this.readTextureAsync(this.unpack(e)):this.session.backend.glContext.isFloat32DownloadSupported?this.session.textureManager.readTextureAsync(e,e.tensor.type,e.channels):this.session.textureManager.readUint8TextureAsFloat(Ja(this,e))}pack(e){return this.executeProgram(zf(this,e.tensor),[e.tensor])}unpack(e){return this.executeProgram(Hf(this,e.tensor),[e.tensor])}}});var es,be,st=C(()=>{"use strict";es=class{constructor(e){Object.assign(this,e)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(e=>`${this[e]}`).join(";")),this.key}},be=r=>new es(r)});var Yf,Jf,Qf,Mv,Vv,ed=C(()=>{"use strict";st();He();Se();Yf={name:"BatchNormalization",inputNames:["A","Scale","B","Mean","Variance"],inputTypes:[0,0,0,0,0]},Jf=(r,e,n)=>(Vv(e),[r.run({...Yf,cacheHint:n.cacheKey,get:()=>Mv(r,e,n)},e)]),Qf=r=>{let e=r.attributes.getFloat("epsilon",1e-5),n=r.attributes.getFloat("momentum",.9),t=r.attributes.getInt("spatial",1);return be({epsilon:e,momentum:n,spatial:t})},Mv=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),o=e[0].dims.length,[i,s]=r.calculateTextureWidthAndHeight(e[1].dims,0),a=` float process(int[${o}] indices) { vec2 position = offsetToCoords(indices[1], ${i}, ${s}); float scale = getColorAsFloat(${t.texture2D}(Scale, position)); @@ -193,42 +193,42 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe float b = getColorAsFloat(${t.texture2D}(B, position)); return scale * ( (_A(indices) - mean) / sqrt(variance + float(${n.epsilon})) ) + b; - }`;return{...Yf,output:{dims:e[0].dims,type:e[0].type,textureType:0},shaderSource:a}},Fv=r=>{if(!r||r.length!==5)throw new Error("BatchNormalization requires 5 inputs.");let e=r[0],n=r[1],t=r[2],o=r[3],i=r[4];if(e.dims.length<3||n.dims.length!==1||t.dims.length!==1||o.dims.length!==1||i.dims.length!==1)throw new Error("invalid input shape.");if(n.dims[0]!==e.dims[1]||t.dims[0]!==e.dims[1]||o.dims[0]!==e.dims[1]||i.dims[0]!==e.dims[1])throw new Error("invalid input shape.");if(e.type!=="float32"&&e.type!=="float64"||n.type!=="float32"&&n.type!=="float64"||t.type!=="float32"&&t.type!=="float64"||o.type!=="float32"&&o.type!=="float64"||i.type!=="float32"&&i.type!=="float64")throw new Error("invalid input tensor types.")}});var Go,kt,X,Mn,Uo,pr=C(()=>{"use strict";Go=class{constructor(e,n,t,o){this.glContext=e;this.programInfo=n;this.inputTextureLayouts=t;this.outputTextureLayout=o}},kt=class{constructor(e){this.context=e}},X=class{constructor(e,n){this.routineBody=e;this.dependencies=n}},Mn=class{constructor(e,n,t){this.name=e;t?this.dependencies=t:this.dependencies=[],n&&(this.routineBody=n)}addDependency(e){e&&this.dependencies.push(e)}},Uo=class{static returnOrderedNodes(e){if(!e||e.length===0)return[];if(e.length===1)return e;let n=new Set,t=new Set,o=new Array;return this.createOrderedNodes(e,n,t,o),o}static createOrderedNodes(e,n,t,o){for(let i=0;i0)for(let s=0;s{if(!r||r.length!==5)throw new Error("BatchNormalization requires 5 inputs.");let e=r[0],n=r[1],t=r[2],o=r[3],i=r[4];if(e.dims.length<3||n.dims.length!==1||t.dims.length!==1||o.dims.length!==1||i.dims.length!==1)throw new Error("invalid input shape.");if(n.dims[0]!==e.dims[1]||t.dims[0]!==e.dims[1]||o.dims[0]!==e.dims[1]||i.dims[0]!==e.dims[1])throw new Error("invalid input shape.");if(e.type!=="float32"&&e.type!=="float64"||n.type!=="float32"&&n.type!=="float64"||t.type!=="float32"&&t.type!=="float64"||o.type!=="float32"&&o.type!=="float64"||i.type!=="float32"&&i.type!=="float64")throw new Error("invalid input tensor types.")}});var Go,kt,X,Mn,Uo,pr=C(()=>{"use strict";Go=class{constructor(e,n,t,o){this.glContext=e;this.programInfo=n;this.inputTextureLayouts=t;this.outputTextureLayout=o}},kt=class{constructor(e){this.context=e}},X=class{constructor(e,n){this.routineBody=e;this.dependencies=n}},Mn=class{constructor(e,n,t){this.name=e;t?this.dependencies=t:this.dependencies=[],n&&(this.routineBody=n)}addDependency(e){e&&this.dependencies.push(e)}},Uo=class{static returnOrderedNodes(e){if(!e||e.length===0)return[];if(e.length===1)return e;let n=new Set,t=new Set,o=new Array;return this.createOrderedNodes(e,n,t,o),o}static createOrderedNodes(e,n,t,o){for(let i=0;i0)for(let s=0;s b); } @@ -238,7 +238,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe v1.b > v2.b, v1.a > v2.a ); } - `,name:r,type:0}}function Kv(){let r="less_";return{body:` + `,name:r,type:0}}function Xv(){let r="less_";return{body:` float ${r}(float a, float b) { return float(a < b); } @@ -248,7 +248,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe v1.b < v2.b, v1.a < v2.a ); } - `,name:r,type:0}}function jv(){let r="and_";return{body:` + `,name:r,type:0}}function Zv(){let r="and_";return{body:` float ${r}(float a, float b) { return float( bool(a) && bool(b) ); } @@ -260,7 +260,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe b1.b && b2.b, b1.a && b2.a ); } - `,name:r,type:0}}function Xv(){let r="or_";return{body:` + `,name:r,type:0}}function Yv(){let r="or_";return{body:` float ${r}(float a, float b) { return float( bool(a) || bool(b) ); } @@ -272,7 +272,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe b1.b || b2.b, b1.a || b2.a ); } - `,name:r,type:0}}function Zv(){let r="xor_";return{body:` + `,name:r,type:0}}function Jv(){let r="xor_";return{body:` float ${r}(float a, float b) { return float( bool(a) ^^ bool(b) ); } @@ -284,7 +284,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe b1.b ^^ b2.b, b1.a ^^ b2.a ); } - `,name:r,type:0}}function Yv(){return Qv("pow")}function Jv(){let r="prelu_";return{body:` + `,name:r,type:0}}function Qv(){return tw("pow")}function ew(){let r="prelu_";return{body:` float ${r}(float a, float b) { return a < 0.0 ? a * b: a; } @@ -296,14 +296,14 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe v1.a < 0.0 ? v1.a * v2.a: v1.a ); } - `,name:r,type:0}}function Qv(r){let e=`${r}_`;return{body:` + `,name:r,type:0}}function tw(r){let e=`${r}_`;return{body:` float ${e}(float a, float b) { return ${r}(a, b); } vec4 ${e}(vec4 v1, vec4 v2) { return ${r}(v1, v2); } - `,name:e,type:0}}var Dt,ew,td,rd,nd,od,id,ad,sd,ud,ld,cd,fd,dd,pd=C(()=>{"use strict";De();pr();He();Se();Dt=(r,e,n,t=e[0].type,o)=>{let i=r.session.pack?2:0;return{name:n.name,inputNames:["A","B"],inputTypes:[i,i],cacheHint:o,get:()=>ew(r,e,n,t)}},ew=(r,e,n,t=e[0].type)=>{let o=r.session.pack?2:0,i=!J.areEqual(e[0].dims,e[1].dims),s=e[0].dims,a=r.session.pack;if(i){let f=gt.calcShape(e[0].dims,e[1].dims,!1);if(!f)throw new Error("Can't perform binary op on the given tensors");s=f;let c=s.length,p=e[0].dims.length!==0?e[0].dims.length:1,b=e[1].dims.length!==0?e[1].dims.length:1,h=e[0].dims.length!==0?"bcastIndices_A(indices, aindices);":"aindices[0] = 0;",g=e[1].dims.length!==0?"bcastIndices_B(indices, bindices);":"bindices[0] = 0;",T=oe(r.session.backend.glContext.version),w=a?` + `,name:e,type:0}}var Dt,rw,td,rd,nd,od,id,ad,sd,ud,ld,cd,fd,dd,pd=C(()=>{"use strict";De();pr();He();Se();Dt=(r,e,n,t=e[0].type,o)=>{let i=r.session.pack?2:0;return{name:n.name,inputNames:["A","B"],inputTypes:[i,i],cacheHint:o,get:()=>rw(r,e,n,t)}},rw=(r,e,n,t=e[0].type)=>{let o=r.session.pack?2:0,i=!J.areEqual(e[0].dims,e[1].dims),s=e[0].dims,a=r.session.pack;if(i){let f=gt.calcShape(e[0].dims,e[1].dims,!1);if(!f)throw new Error("Can't perform binary op on the given tensors");s=f;let c=s.length,p=e[0].dims.length!==0?e[0].dims.length:1,b=e[1].dims.length!==0?e[1].dims.length:1,h=e[0].dims.length!==0?"bcastIndices_A(indices, aindices);":"aindices[0] = 0;",g=e[1].dims.length!==0?"bcastIndices_B(indices, bindices);":"bindices[0] = 0;",T=oe(r.session.backend.glContext.version),w=a?` ${n.body} void main() { vec4 a = getAAtOutCoords(); @@ -326,7 +326,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe vec4 result = ${n.name}(v1, v2); ${u.output} = result; } - `;return{name:n.name,inputNames:["A","B"],inputTypes:[o,o],output:{dims:e[0].dims,type:t,textureType:o},shaderSource:l,hasMain:!0}},td=(r,e)=>[r.run(Dt(r,e,Vv()),e)],rd=(r,e)=>[r.run(Dt(r,e,jv(),"bool"),e)],nd=(r,e)=>[r.run(Dt(r,e,Gv()),e)],od=(r,e)=>[r.run(Dt(r,e,Hv(),"bool"),e)],id=(r,e)=>[r.run(Dt(r,e,qv(),"bool"),e)],ad=(r,e)=>[r.run(Dt(r,e,Kv(),"bool"),e)],sd=(r,e)=>[r.run(Dt(r,e,Uv()),e)],ud=(r,e)=>[r.run(Dt(r,e,Xv(),"bool"),e)],ld=(r,e)=>[r.run(Dt(r,e,Yv()),e)],cd=(r,e)=>[r.run(Dt(r,e,Jv()),e)],fd=(r,e)=>[r.run(Dt(r,e,Wv()),e)],dd=(r,e)=>[r.run(Dt(r,e,Zv(),"bool"),e)]});var md,hd,rw,gd=C(()=>{"use strict";De();md=(r,e,n)=>(rw(e),[r.cast(e[0],n)]),hd=r=>lt.tensorDataTypeFromProto(r.attributes.getInt("to")),rw=r=>{if(!r||r.length!==1)throw new Error("Cast requires 1 input.");if(r[0].type==="string")throw new Error("Invalid input type.")}});var nw,ow,bd,Wo,yd=C(()=>{"use strict";He();Se();Qt();Ur();nw=(r,e)=>({name:"Concat (packed)",inputNames:Array.from({length:r},(n,t)=>`X${t}`),inputTypes:Array(r).fill(2),cacheHint:e}),ow=(r,e,n,t)=>{let o=n[0].dims.slice();if(t>=o.length||t<-1*o.length)throw new Error("axis specified for concat doesn't match input dimensionality");t<0&&(t=o.length+t);let i=o.slice(0);for(let P=1;PP.dims),c=Gt(s),p=new Array(f.length-1);p[0]=f[0][t];for(let P=1;P[r.run(Dt(r,e,Uv()),e)],rd=(r,e)=>[r.run(Dt(r,e,Zv(),"bool"),e)],nd=(r,e)=>[r.run(Dt(r,e,Wv()),e)],od=(r,e)=>[r.run(Dt(r,e,jv(),"bool"),e)],id=(r,e)=>[r.run(Dt(r,e,Kv(),"bool"),e)],ad=(r,e)=>[r.run(Dt(r,e,Xv(),"bool"),e)],sd=(r,e)=>[r.run(Dt(r,e,Hv()),e)],ud=(r,e)=>[r.run(Dt(r,e,Yv(),"bool"),e)],ld=(r,e)=>[r.run(Dt(r,e,Qv()),e)],cd=(r,e)=>[r.run(Dt(r,e,ew()),e)],fd=(r,e)=>[r.run(Dt(r,e,qv()),e)],dd=(r,e)=>[r.run(Dt(r,e,Jv(),"bool"),e)]});var md,hd,ow,gd=C(()=>{"use strict";De();md=(r,e,n)=>(ow(e),[r.cast(e[0],n)]),hd=r=>lt.tensorDataTypeFromProto(r.attributes.getInt("to")),ow=r=>{if(!r||r.length!==1)throw new Error("Cast requires 1 input.");if(r[0].type==="string")throw new Error("Invalid input type.")}});var iw,aw,bd,Wo,yd=C(()=>{"use strict";He();Se();Qt();Ur();iw=(r,e)=>({name:"Concat (packed)",inputNames:Array.from({length:r},(n,t)=>`X${t}`),inputTypes:Array(r).fill(2),cacheHint:e}),aw=(r,e,n,t)=>{let o=n[0].dims.slice();if(t>=o.length||t<-1*o.length)throw new Error("axis specified for concat doesn't match input dimensionality");t<0&&(t=o.length+t);let i=o.slice(0);for(let P=1;PP.dims),c=Gt(s),p=new Array(f.length-1);p[0]=f[0][t];for(let P=1;P{let t=nw(e.length,n.cacheKey);return{...t,get:()=>ow(r,t,e,n.axis)}},Wo=(r,e,n)=>{let t=r.indexOf(e);return r.map((i,s)=>s===t?`${i} - ${n}`:i).join()}});var xd,iw,aw,sw,vd,uw,lw,cw,wd,fw,Td=C(()=>{"use strict";st();Se();yd();xd=(r,e,n)=>(fw(e),r.session.pack&&e[0].dims.length>1?[r.run(bd(r,e,n),e)]:[r.run(sw(r,e,n),e)]),iw=(r,e)=>({name:"Concat",inputNames:Array.from({length:r},(n,t)=>`X${t}`),inputTypes:Array(r).fill(0),cacheHint:e}),aw=(r,e,n,t)=>{let o=n[0].dims.slice();if(t>=o.length||t<-1*o.length)throw new Error("axis specified for concat doesn't match input dimensionality");t<0&&(t=o.length+t);let i=o.slice(0);for(let b=1;b{let t=iw(e.length,n.cacheKey);return{...t,get:()=>aw(r,t,e,n.axis)}},Wo=(r,e,n)=>{let t=r.indexOf(e);return r.map((i,s)=>s===t?`${i} - ${n}`:i).join()}});var xd,sw,uw,lw,vd,cw,fw,dw,wd,pw,Td=C(()=>{"use strict";st();Se();yd();xd=(r,e,n)=>(pw(e),r.session.pack&&e[0].dims.length>1?[r.run(bd(r,e,n),e)]:[r.run(lw(r,e,n),e)]),sw=(r,e)=>({name:"Concat",inputNames:Array.from({length:r},(n,t)=>`X${t}`),inputTypes:Array(r).fill(0),cacheHint:e}),uw=(r,e,n,t)=>{let o=n[0].dims.slice();if(t>=o.length||t<-1*o.length)throw new Error("axis specified for concat doesn't match input dimensionality");t<0&&(t=o.length+t);let i=o.slice(0);for(let b=1;b{let t=iw(e.length,n.cacheKey);return{...t,get:()=>aw(r,t,e,n.axis)}},vd=r=>`int getTextureWhereDataResides(int index) { + }`;return{...e,output:{dims:i,type:n[0].type,textureType:0},shaderSource:p}},lw=(r,e,n)=>{let t=sw(e.length,n.cacheKey);return{...t,get:()=>uw(r,t,e,n.axis)}},vd=r=>`int getTextureWhereDataResides(int index) { ${r.map((n,t)=>`if(index<${n}) {return ${t};} `).join("")} - }`,uw=r=>vd(r),lw=(r,e)=>{let n=[`float fetchDataFromCorrectTexture(int textureIndex, int indices[${e}]) {`];for(let t=0;t{let e=["int getSizeInConcatAxisValueFromIndex(int index) {"];for(let n=0;nbe({axis:r.attributes.getInt("axis")}),fw=r=>{if(!r||r.length<1)throw new Error("too few inputs");let e=r[0].type,n=r[0].dims.length;if(e==="string")throw new Error("string tensor is not supported yet");for(let t of r){if(t.type!==e)throw new Error("input tensors should be one type");if(t.dims.length!==n)throw new Error("input tensors should have the same shape")}}});function dw(){return Bt("abs")}function pw(){return Bt("acos")}function mw(){return Bt("asin")}function hw(){return Bt("atan")}function gw(){return Bt("ceil")}function bw(){return Bt("cos")}function yw(r){let e="elu";return{body:` + }`,cw=r=>vd(r),fw=(r,e)=>{let n=[`float fetchDataFromCorrectTexture(int textureIndex, int indices[${e}]) {`];for(let t=0;t{let e=["int getSizeInConcatAxisValueFromIndex(int index) {"];for(let n=0;nbe({axis:r.attributes.getInt("axis")}),pw=r=>{if(!r||r.length<1)throw new Error("too few inputs");let e=r[0].type,n=r[0].dims.length;if(e==="string")throw new Error("string tensor is not supported yet");for(let t of r){if(t.type!==e)throw new Error("input tensors should be one type");if(t.dims.length!==n)throw new Error("input tensors should have the same shape")}}});function mw(){return Bt("abs")}function hw(){return Bt("acos")}function gw(){return Bt("asin")}function bw(){return Bt("atan")}function yw(){return Bt("ceil")}function xw(){return Bt("cos")}function vw(r){let e="elu";return{body:` const float alpha = float(${r}); float ${e}_(float a) { @@ -394,7 +394,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe vec4 ${e}_(vec4 v) { return vec4(${e}_(v.x), ${e}_(v.y), ${e}_(v.z), ${e}_(v.w)); } - `,name:e,type:0}}function xw(){return Bt("exp")}function vw(){return Bt("floor")}function es(r,e){let n="clip";return{body:` + `,name:e,type:0}}function ww(){return Bt("exp")}function Tw(){return Bt("floor")}function ts(r,e){let n="clip";return{body:` const float min = float(${r}); const float max = float(${e}); @@ -404,14 +404,14 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe vec4 ${n}_(vec4 v) { return clamp(v, min, max); } - `,name:n,type:0}}function ww(){let r="indentity";return{body:` + `,name:n,type:0}}function _w(){let r="indentity";return{body:` float ${r}_(float a) { return a; } vec4 ${r}_(vec4 v) { return v; } - `,name:r,type:0}}function Tw(r){let e="leakyRelu";return{body:` + `,name:r,type:0}}function Iw(r){let e="leakyRelu";return{body:` const float alpha = float(${r}); float ${e}_(float a) { @@ -420,14 +420,14 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe vec4 ${e}_(vec4 v) { return vec4(${e}_(v.x), ${e}_(v.y), ${e}_(v.z), ${e}_(v.w)); } - `,name:e,type:0}}function _w(){return Bt("log")}function Iw(){let r="neg";return{body:` + `,name:e,type:0}}function Sw(){return Bt("log")}function $w(){let r="neg";return{body:` float ${r}_(float a) { return -a; } vec4 ${r}_(vec4 v) { return -v; } - `,name:r,type:0}}function Sw(){let r="not";return{body:` + `,name:r,type:0}}function Aw(){let r="not";return{body:` float ${r}_(float a) { return float( ! bool(a) ); } @@ -440,21 +440,21 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe bvec4 ${r}_(bvec4 v) { return bvec4(!v.x, !v.y, !v.z, !v.w); } - `,name:r,type:0}}function $w(){return Bt("sin")}function ts(){let r="relu";return{body:` + `,name:r,type:0}}function Pw(){return Bt("sin")}function rs(){let r="relu";return{body:` float ${r}_(float a) { return max( a, 0.0 ); } vec4 ${r}_(vec4 v) { return max( v, 0.0 ); } - `,name:r,type:0}}function rs(){let r="sigmoid";return{body:` + `,name:r,type:0}}function ns(){let r="sigmoid";return{body:` float ${r}_(float a) { return 1.0 / (1.0 + exp(-a)); } vec4 ${r}_(vec4 v) { return 1.0 / (1.0 + exp(-v)); } - `,name:r,type:0}}function Aw(){return Bt("sqrt")}function Pw(){return Bt("tan")}function Ow(){let r="tanh";return{body:` + `,name:r,type:0}}function Ow(){return Bt("sqrt")}function Ew(){return Bt("tan")}function Cw(){let r="tanh";return{body:` float ${r}_(float a) { a = clamp(a, -10., 10.); a = exp(2.*a); @@ -472,14 +472,14 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe vec4 ${r}_(vec4 v) { return ${r}(v); } - `,name:r,type:0}}var Ew,Ze,_d,Id,Sd,$d,ns,Ad,Pd,Cw,Od,Ed,Cd,kd,Dd,Bd,os,Ld,Rd,Nd,zd,Fd,Md,Vd,Gd,Ud,Wd,Hd,is=C(()=>{"use strict";st();De();pr();He();Se();Ew=(r,e,n,t)=>{let o=r.session.pack?2:0,i=oe(r.session.backend.glContext.version);return{...e,output:{dims:n.dims,type:n.type,textureType:o},shaderSource:` + `,name:r,type:0}}var kw,Ze,_d,Id,Sd,$d,os,Ad,Pd,Dw,Od,Ed,Cd,kd,Dd,Bd,is,Ld,Rd,Nd,zd,Fd,Md,Vd,Gd,Ud,Wd,Hd,as=C(()=>{"use strict";st();De();pr();He();Se();kw=(r,e,n,t)=>{let o=r.session.pack?2:0,i=oe(r.session.backend.glContext.version);return{...e,output:{dims:n.dims,type:n.type,textureType:o},shaderSource:` ${t.body} void main() { vec4 v = ${i.texture2D}(A, TexCoords); v = ${t.name}_(v); ${i.output} = v; } - `,hasMain:!0}},Ze=(r,e,n,t)=>{let o=r.session.pack?2:0,i={name:n.name,inputTypes:[o],inputNames:["A"],cacheHint:t};return{...i,get:()=>Ew(r,i,e,n)}},_d=(r,e)=>[r.run(Ze(r,e[0],dw()),e)],Id=(r,e)=>[r.run(Ze(r,e[0],pw()),e)],Sd=(r,e)=>[r.run(Ze(r,e[0],mw()),e)],$d=(r,e)=>[r.run(Ze(r,e[0],hw()),e)],ns=(r,e,n)=>[r.run(Ze(r,e[0],es(n.min,n.max),n.cacheKey),e)],Ad=r=>be({min:r.attributes.getFloat("min",Mr),max:r.attributes.getFloat("max",Vr)}),Pd=(r,e)=>{let n=Cw(r,e);return ns(r,[e[0]],n)},Cw=(r,e)=>{if(e.length>=3&&(!r.session.isInitializer(e[1].dataId)||!r.session.isInitializer(e[2].dataId)))throw new Error("dynamic clip attributes are not allowed");let n=e.length>=3?e[1].numberData[0]:Mr,t=e.length>=3?e[2].numberData[0]:Vr;return be({min:n,max:t})},Od=(r,e)=>[r.run(Ze(r,e[0],gw()),e)],Ed=(r,e)=>[r.run(Ze(r,e[0],bw()),e)],Cd=(r,e,n)=>[r.run(Ze(r,e[0],yw(n.alpha),n.cacheKey),e)],kd=r=>be({alpha:r.attributes.getFloat("alpha",1)}),Dd=(r,e)=>[r.run(Ze(r,e[0],xw()),e)],Bd=(r,e)=>[r.run(Ze(r,e[0],vw()),e)],os=(r,e)=>[r.run(Ze(r,e[0],ww()),e)],Ld=(r,e,n)=>[r.run(Ze(r,e[0],Tw(n.alpha),n.cacheKey),e)],Rd=r=>be({alpha:r.attributes.getFloat("alpha",.01)}),Nd=(r,e)=>[r.run(Ze(r,e[0],_w()),e)],zd=(r,e)=>[r.run(Ze(r,e[0],Iw()),e)],Fd=(r,e)=>[r.run(Ze(r,e[0],Sw()),e)],Md=(r,e)=>[r.run(Ze(r,e[0],ts()),e)],Vd=(r,e)=>[r.run(Ze(r,e[0],rs()),e)],Gd=(r,e)=>[r.run(Ze(r,e[0],$w()),e)],Ud=(r,e)=>[r.run(Ze(r,e[0],Aw()),e)],Wd=(r,e)=>[r.run(Ze(r,e[0],Pw()),e)],Hd=(r,e)=>[r.run(Ze(r,e[0],Ow()),e)]});function tr(r){let e;switch(r.activation){case"Relu":e=ts();break;case"Sigmoid":e=rs();break;case"Clip":e=es(r.clipMin,r.clipMax);break;default:return{activationFunction:"",applyActivation:""}}let n=e.name,t=e.body,o=`value = ${n}_(value);`;return{activationFunction:t,applyActivation:o}}var bn,Wr=C(()=>{"use strict";De();is();bn=r=>{let e=r.getString("activation","");if(e==="Clip"){let[n,t]=r.getFloats("activation_params",[Mr,Vr]);return{activation:e,clipMax:t,clipMin:n,activationCacheKey:`${e}:${n},${t}`}}return{activation:e,activationCacheKey:e}}});var Dw,Bw,qd,Kd=C(()=>{"use strict";Pt();He();Se();Ho();Wr();Dw=(r,e)=>({name:"GroupedConv",inputNames:r?["X","W","Bias"]:["X","W"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e}),Bw=(r,e,n,t)=>{let i=e.length>2?"value += getBias(output_channel);":"",s=e[0].dims.slice(),a=e[1].dims.slice(),u=a[0]/t.group;Re.verbose("GroupedConv",`autpPad:${t.autoPad}, dilations:${t.dilations}, group:${t.group}, kernelShape:${t.kernelShape}, pads:${t.pads}, strides:${t.strides}`);let l=yn(s,a,t.dilations,t.pads,t.strides),f=oe(r.session.backend.glContext.version),{activationFunction:c,applyActivation:p}=tr(t),b=` + `,hasMain:!0}},Ze=(r,e,n,t)=>{let o=r.session.pack?2:0,i={name:n.name,inputTypes:[o],inputNames:["A"],cacheHint:t};return{...i,get:()=>kw(r,i,e,n)}},_d=(r,e)=>[r.run(Ze(r,e[0],mw()),e)],Id=(r,e)=>[r.run(Ze(r,e[0],hw()),e)],Sd=(r,e)=>[r.run(Ze(r,e[0],gw()),e)],$d=(r,e)=>[r.run(Ze(r,e[0],bw()),e)],os=(r,e,n)=>[r.run(Ze(r,e[0],ts(n.min,n.max),n.cacheKey),e)],Ad=r=>be({min:r.attributes.getFloat("min",Mr),max:r.attributes.getFloat("max",Vr)}),Pd=(r,e)=>{let n=Dw(r,e);return os(r,[e[0]],n)},Dw=(r,e)=>{if(e.length>=3&&(!r.session.isInitializer(e[1].dataId)||!r.session.isInitializer(e[2].dataId)))throw new Error("dynamic clip attributes are not allowed");let n=e.length>=3?e[1].numberData[0]:Mr,t=e.length>=3?e[2].numberData[0]:Vr;return be({min:n,max:t})},Od=(r,e)=>[r.run(Ze(r,e[0],yw()),e)],Ed=(r,e)=>[r.run(Ze(r,e[0],xw()),e)],Cd=(r,e,n)=>[r.run(Ze(r,e[0],vw(n.alpha),n.cacheKey),e)],kd=r=>be({alpha:r.attributes.getFloat("alpha",1)}),Dd=(r,e)=>[r.run(Ze(r,e[0],ww()),e)],Bd=(r,e)=>[r.run(Ze(r,e[0],Tw()),e)],is=(r,e)=>[r.run(Ze(r,e[0],_w()),e)],Ld=(r,e,n)=>[r.run(Ze(r,e[0],Iw(n.alpha),n.cacheKey),e)],Rd=r=>be({alpha:r.attributes.getFloat("alpha",.01)}),Nd=(r,e)=>[r.run(Ze(r,e[0],Sw()),e)],zd=(r,e)=>[r.run(Ze(r,e[0],$w()),e)],Fd=(r,e)=>[r.run(Ze(r,e[0],Aw()),e)],Md=(r,e)=>[r.run(Ze(r,e[0],rs()),e)],Vd=(r,e)=>[r.run(Ze(r,e[0],ns()),e)],Gd=(r,e)=>[r.run(Ze(r,e[0],Pw()),e)],Ud=(r,e)=>[r.run(Ze(r,e[0],Ow()),e)],Wd=(r,e)=>[r.run(Ze(r,e[0],Ew()),e)],Hd=(r,e)=>[r.run(Ze(r,e[0],Cw()),e)]});function tr(r){let e;switch(r.activation){case"Relu":e=rs();break;case"Sigmoid":e=ns();break;case"Clip":e=ts(r.clipMin,r.clipMax);break;default:return{activationFunction:"",applyActivation:""}}let n=e.name,t=e.body,o=`value = ${n}_(value);`;return{activationFunction:t,applyActivation:o}}var yn,Wr=C(()=>{"use strict";De();as();yn=r=>{let e=r.getString("activation","");if(e==="Clip"){let[n,t]=r.getFloats("activation_params",[Mr,Vr]);return{activation:e,clipMax:t,clipMin:n,activationCacheKey:`${e}:${n},${t}`}}return{activation:e,activationCacheKey:e}}});var Lw,Rw,qd,jd=C(()=>{"use strict";Pt();He();Se();Ho();Wr();Lw=(r,e)=>({name:"GroupedConv",inputNames:r?["X","W","Bias"]:["X","W"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e}),Rw=(r,e,n,t)=>{let i=e.length>2?"value += getBias(output_channel);":"",s=e[0].dims.slice(),a=e[1].dims.slice(),u=a[0]/t.group;Re.verbose("GroupedConv",`autpPad:${t.autoPad}, dilations:${t.dilations}, group:${t.group}, kernelShape:${t.kernelShape}, pads:${t.pads}, strides:${t.strides}`);let l=xn(s,a,t.dilations,t.pads,t.strides),f=oe(r.session.backend.glContext.version),{activationFunction:c,applyActivation:p}=tr(t),b=` const ivec2 strides = ivec2(${t.strides[0]}, ${t.strides[1]}); const ivec2 pads = ivec2(${t.pads[0]}, ${t.pads[1]}); ${c} @@ -516,7 +516,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${p} ${f.output} = vec4(value, .0, .0, .0); } -`;return{...n,output:{dims:l,type:e[0].type,textureType:0},shaderSource:b,hasMain:!0}},qd=(r,e,n)=>{let t=Dw(e.length>2,n.cacheKey);return{...t,get:()=>Bw(r,e,t,n)}}});var Lw,Rw,jd,Xd=C(()=>{"use strict";He();Se();Ur();Lw=r=>({name:"Im2Col (packed)",inputNames:["A"],inputTypes:[2],cacheHint:r}),Rw=(r,e,n,t,o,i)=>{let s=n.dims,a=t.dims,u=2,l=3,f=o.length,c=[a[1]*a[2]*a[3],o[2]*o[3]],p=a[2]*a[3],b=er(),h=oe(r.session.backend.glContext.version),g="";for(let w=0;w<=1;w++)for(let v=0;v<=1;v++)g+=` +`;return{...n,output:{dims:l,type:e[0].type,textureType:0},shaderSource:b,hasMain:!0}},qd=(r,e,n)=>{let t=Lw(e.length>2,n.cacheKey);return{...t,get:()=>Rw(r,e,t,n)}}});var Nw,zw,Kd,Xd=C(()=>{"use strict";He();Se();Ur();Nw=r=>({name:"Im2Col (packed)",inputNames:["A"],inputTypes:[2],cacheHint:r}),zw=(r,e,n,t,o,i)=>{let s=n.dims,a=t.dims,u=2,l=3,f=o.length,c=[a[1]*a[2]*a[3],o[2]*o[3]],p=a[2]*a[3],b=er(),h=oe(r.session.backend.glContext.version),g="";for(let w=0;w<=1;w++)for(let v=0;v<=1;v++)g+=` blockIndex = rc.x + ${v}; pos = rc.y + ${w}; @@ -552,7 +552,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${g} ${h.output} = result; } - `;return{...e,output:{dims:c,type:n.type,textureType:2},shaderSource:T,hasMain:!0}},jd=(r,e,n,t,o)=>{let i=Lw(o.cacheKey);return{...i,get:()=>Rw(r,i,e,n,t,o)}}});function zw(r,e,n){let t=e[0].dims,o=e[1].dims,i=gt.calcShape(t,o,!0);if(!i)throw new Error("Can't use matmul on the given tensors");let s=bt(i.length),a=Gt(),{activationFunction:u,applyActivation:l}=tr(n),f=e.length>2,c=f?"value += getBiasForMatmul();":"",p=f?`${ss(s,a,e[2].dims,i,!1)}`:"",b=i.length,h=t.length,g=o.length,T=t[t.length-1],w=` + `;return{...e,output:{dims:c,type:n.type,textureType:2},shaderSource:T,hasMain:!0}},Kd=(r,e,n,t,o)=>{let i=Nw(o.cacheKey);return{...i,get:()=>zw(r,i,e,n,t,o)}}});function Mw(r,e,n){let t=e[0].dims,o=e[1].dims,i=gt.calcShape(t,o,!0);if(!i)throw new Error("Can't use matmul on the given tensors");let s=bt(i.length),a=Gt(),{activationFunction:u,applyActivation:l}=tr(n),f=e.length>2,c=f?"value += getBiasForMatmul();":"",p=f?`${us(s,a,e[2].dims,i,!1)}`:"",b=i.length,h=t.length,g=o.length,T=t[t.length-1],w=` ${u} ${p} float process(int indices[${b}]) { @@ -570,7 +570,7 @@ var J0=Object.create;var bo=Object.defineProperty;var Q0=Object.getOwnPropertyDe ${c} ${l} return value; - }`;return{...r,output:{dims:i,type:e[0].type,textureType:0},shaderSource:w}}function as(r,e){let n=Nw(r.length>2,e.activationCacheKey);return{...n,get:()=>zw(n,r,e)}}function ss(r,e,n,t,o){let i="",s=n.length,a=t.length,u=a-s;a<2&&s>0?i="coords":i=n.map((g,T)=>`coords.${e[T+u]}`).join(", ");let f=gt.getBroadcastDims(n,t).map(g=>`coords.${e[g+u]} = 0;`).join(` + }`;return{...r,output:{dims:i,type:e[0].type,textureType:0},shaderSource:w}}function ss(r,e){let n=Fw(r.length>2,e.activationCacheKey);return{...n,get:()=>Mw(n,r,e)}}function us(r,e,n,t,o){let i="",s=n.length,a=t.length,u=a-s;a<2&&s>0?i="coords":i=n.map((g,T)=>`coords.${e[T+u]}`).join(", ");let f=gt.getBroadcastDims(n,t).map(g=>`coords.${e[g+u]} = 0;`).join(` `),p=J.size(n)===1,b="vec4(outputValue.xx, outputValue.yy)";return p&&(b="vec4(outputValue.x)"),o?` vec4 getBiasForMatmul() { ${r} coords = getOutputCoords(); @@ -582,7 +582,7 @@ float getBiasForMatmul() { ${r} coords = getOutputCoords(); ${f} return getBias(coords.x); -}`}var Zd,Yd,Nw,Fw,qo=C(()=>{"use strict";De();Se();Qt();Wr();us();Zd=(r,e,n)=>(Fw(e),r.session.pack?[r.run(Ko(r,e,n),e)]:[r.run(as(e,n),e)]),Yd=r=>bn(r.attributes),Nw=(r,e)=>({name:"MatMul",inputNames:r?["A","B","Bias"]:["A","B"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e});Fw=r=>{if(!r||r.length!==2)throw new Error("MatMul requires 2 inputs.");if(r[0].dims[r[0].dims.length-1]!==r[1].dims[r[1].dims.length-2])throw new Error("shared dimension does not match.");if(r[0].type!=="float32"&&r[0].type!=="float64"||r[1].type!=="float32"&&r[1].type!=="float64")throw new Error("inputs should be float type");if(r[0].type!==r[1].type)throw new Error("inputs types should match")}});function Gw(r,e,n,t){let o=[],i=[],s=n[0].dims,a=n[1].dims,u=s.length,l=a.length,f=t.length,c=f-u,p=f-l;o=s.map((S,$)=>`coords.${e[$+c]}`),o[u-1]="i*2",o.join(", "),i=a.map((S,$)=>`coords.${e[$+p]}`),i[l-2]="i*2",i.join(", ");let b=gt.getBroadcastDims(s,t),h=gt.getBroadcastDims(a,t),g=b.map(S=>`coords.${e[S+c]} = 0;`).join(` +}`}var Zd,Yd,Fw,Vw,qo=C(()=>{"use strict";De();Se();Qt();Wr();ls();Zd=(r,e,n)=>(Vw(e),r.session.pack?[r.run(jo(r,e,n),e)]:[r.run(ss(e,n),e)]),Yd=r=>yn(r.attributes),Fw=(r,e)=>({name:"MatMul",inputNames:r?["A","B","Bias"]:["A","B"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e});Vw=r=>{if(!r||r.length!==2)throw new Error("MatMul requires 2 inputs.");if(r[0].dims[r[0].dims.length-1]!==r[1].dims[r[1].dims.length-2])throw new Error("shared dimension does not match.");if(r[0].type!=="float32"&&r[0].type!=="float64"||r[1].type!=="float32"&&r[1].type!=="float64")throw new Error("inputs should be float type");if(r[0].type!==r[1].type)throw new Error("inputs types should match")}});function Ww(r,e,n,t){let o=[],i=[],s=n[0].dims,a=n[1].dims,u=s.length,l=a.length,f=t.length,c=f-u,p=f-l;o=s.map((S,$)=>`coords.${e[$+c]}`),o[u-1]="i*2",o.join(", "),i=a.map((S,$)=>`coords.${e[$+p]}`),i[l-2]="i*2",i.join(", ");let b=gt.getBroadcastDims(s,t),h=gt.getBroadcastDims(a,t),g=b.map(S=>`coords.${e[S+c]} = 0;`).join(` `),T=h.map(S=>`coords.${e[S+p]} = 0;`).join(` `),w=`int lastDim = coords.${e[f-1]}; coords.${e[f-1]} = coords.${e[f-2]}; @@ -601,7 +601,7 @@ vec4 getBAtOutCoordsMatmul(int i) { ${T} vec4 outputValue = getB(${i}); return outputValue; -}`}function Uw(r,e){let n="";for(let t=0;t{"use strict";De();He();Se();Qt();Wr();qo();Mw=(r,e)=>({name:"MatMul (packed)",inputNames:r?["A","B","Bias"]:["A","B"],inputTypes:r?[2,2,2]:[2,2],cacheHint:e}),Vw=(r,e,n,t)=>{let o=n.length>2,i=o?"value += getBiasForMatmul();":"",s=n[0].dims,a=n[1].dims,u=gt.calcShape(s,a,!0),l=!J.areEqual(n[0].dims,n[1].dims);if(!u)throw new Error("Can't use matmul on the given tensors");let f=s[s.length-1],c=Math.ceil(f/2),p=s.length,b=a.length,h=oe(r.session.backend.glContext.version),g=bt(u.length),T=u.length,w=Gt(),{activationFunction:v,applyActivation:S}=tr(t),$=o?`${ss(g,w,n[2].dims,u,!0)}`:"",P=l?`${Gw(g,w,n,u)}`:"",E=l?"getAAtOutCoordsMatmul(i)":`getA(${Uw(w,p)})`,N=l?"getBAtOutCoordsMatmul(i)":`getB(${Ww(w,b)})`,z=l?"":`${g} rc = +}`}function Hw(r,e){let n="";for(let t=0;t{"use strict";De();He();Se();Qt();Wr();qo();Gw=(r,e)=>({name:"MatMul (packed)",inputNames:r?["A","B","Bias"]:["A","B"],inputTypes:r?[2,2,2]:[2,2],cacheHint:e}),Uw=(r,e,n,t)=>{let o=n.length>2,i=o?"value += getBiasForMatmul();":"",s=n[0].dims,a=n[1].dims,u=gt.calcShape(s,a,!0),l=!J.areEqual(n[0].dims,n[1].dims);if(!u)throw new Error("Can't use matmul on the given tensors");let f=s[s.length-1],c=Math.ceil(f/2),p=s.length,b=a.length,h=oe(r.session.backend.glContext.version),g=bt(u.length),T=u.length,w=Gt(),{activationFunction:v,applyActivation:S}=tr(t),$=o?`${us(g,w,n[2].dims,u,!0)}`:"",P=l?`${Ww(g,w,n,u)}`:"",E=l?"getAAtOutCoordsMatmul(i)":`getA(${Hw(w,p)})`,N=l?"getBAtOutCoordsMatmul(i)":`getB(${qw(w,b)})`,z=l?"":`${g} rc = getOutputCoords(); int lastDim = rc.${w[T-1]}; rc.${w[T-1]} = rc.${w[T-2]}; rc.${w[T-2]} = lastDim; `,q=` @@ -622,7 +622,7 @@ vec4 getBAtOutCoordsMatmul(int i) { ${i} ${S} ${h.output} = value; - }`;return{...e,output:{dims:u,type:n[0].type,textureType:2},shaderSource:q,hasMain:!0}},Ko=(r,e,n)=>{let t=Mw(e.length>2,n.activationCacheKey);return{...t,get:()=>Vw(r,t,e,n)}}});var Jd,Qd=C(()=>{"use strict";Ho();Xd();us();Jd=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=yn(t,o,n.dilations,n.pads,n.strides),s=r.run(jd(r,e[0],e[1],i,n),[e[0]]),a=r.reshapePacked(e[1],[o[0],o[1]*o[2]*o[3]]),u=e.length===3?[a,s,e[2]]:[a,s],l=r.run(Ko(r,u,n),u);return r.reshapePacked(l,i)}});var Hw,qw,ep,ls,cs=C(()=>{"use strict";Se();Hw=r=>({name:"Im2Col",inputNames:["X"],inputTypes:[0],cacheHint:r}),qw=(r,e,n,t,o,i)=>{let s=n.dims,a=t.dims,u=o.length,l=ls(s,a,o,4),f=` + }`;return{...e,output:{dims:u,type:n[0].type,textureType:2},shaderSource:q,hasMain:!0}},jo=(r,e,n)=>{let t=Gw(e.length>2,n.activationCacheKey);return{...t,get:()=>Uw(r,t,e,n)}}});var Jd,Qd=C(()=>{"use strict";Ho();Xd();ls();Jd=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=xn(t,o,n.dilations,n.pads,n.strides),s=r.run(Kd(r,e[0],e[1],i,n),[e[0]]),a=r.reshapePacked(e[1],[o[0],o[1]*o[2]*o[3]]),u=e.length===3?[a,s,e[2]]:[a,s],l=r.run(jo(r,u,n),u);return r.reshapePacked(l,i)}});var jw,Kw,ep,cs,fs=C(()=>{"use strict";Se();jw=r=>({name:"Im2Col",inputNames:["X"],inputTypes:[0],cacheHint:r}),Kw=(r,e,n,t,o,i)=>{let s=n.dims,a=t.dims,u=o.length,l=cs(s,a,o,4),f=` const int XC = ${s[1]}; const int XH = ${s[2]}; const int XW = ${s[3]}; @@ -666,7 +666,7 @@ vec4 getBAtOutCoordsMatmul(int i) { } return value; } - `;return{...e,output:{dims:l,type:n.type,textureType:4},shaderSource:f}},ep=(r,e,n,t,o)=>{let i=Hw(o.cacheKey);return{...i,get:()=>qw(r,i,e,n,t,o)}},ls=(r,e,n,t=4)=>[n[0],n[2],n[3],Math.ceil(r[1]*e[2]*e[3]/t)]});var Kw,jw,tp,rp=C(()=>{"use strict";De();He();Se();Wr();cs();Kw=(r,e)=>({name:"ConvDotProduct",inputNames:r?["Im2Col","K","B"]:["Im2Col","K"],inputTypes:r?[0,4,0]:[0,4],cacheKey:e.activationCacheKey}),jw=(r,e,n,t,o)=>{let i=n[0].dims,s=n[1].dims,a=[s[0],Math.ceil(i[1]*s[2]*s[3]/4)],u=ls(i,s,t),[l,f]=r.calculateTextureWidthAndHeight(a,4),c=J.computeStrides(u),[p,b]=r.calculateTextureWidthAndHeight(u,4),h=t.length,g=n.length<3?"0.0":"_B(b)",T=Math.ceil(i[1]*s[2]*s[3]/4),{activationFunction:w,applyActivation:v}=tr(o),S=oe(r.session.backend.glContext.version),$=` + `;return{...e,output:{dims:l,type:n.type,textureType:4},shaderSource:f}},ep=(r,e,n,t,o)=>{let i=jw(o.cacheKey);return{...i,get:()=>Kw(r,i,e,n,t,o)}},cs=(r,e,n,t=4)=>[n[0],n[2],n[3],Math.ceil(r[1]*e[2]*e[3]/t)]});var Xw,Zw,tp,rp=C(()=>{"use strict";De();He();Se();Wr();fs();Xw=(r,e)=>({name:"ConvDotProduct",inputNames:r?["Im2Col","K","B"]:["Im2Col","K"],inputTypes:r?[0,4,0]:[0,4],cacheKey:e.activationCacheKey}),Zw=(r,e,n,t,o)=>{let i=n[0].dims,s=n[1].dims,a=[s[0],Math.ceil(i[1]*s[2]*s[3]/4)],u=cs(i,s,t),[l,f]=r.calculateTextureWidthAndHeight(a,4),c=J.computeStrides(u),[p,b]=r.calculateTextureWidthAndHeight(u,4),h=t.length,g=n.length<3?"0.0":"_B(b)",T=Math.ceil(i[1]*s[2]*s[3]/4),{activationFunction:w,applyActivation:v}=tr(o),S=oe(r.session.backend.glContext.version),$=` ${w} float process(int indices[${h}]) { int b[1]; @@ -687,7 +687,7 @@ float process(int indices[${h}]) { } ${v} return value; -}`;return{...e,output:{dims:t,type:n[0].type,textureType:0},shaderSource:$}},tp=(r,e,n,t)=>{let o=Kw(e.length>2,t);return{...o,get:()=>jw(r,o,e,n,t)}}});var yn,fs,Xw,Zw,Yw,Jw,ds,Qw,Ho=C(()=>{"use strict";st();De();Kd();Qd();rp();Wr();cs();qo();yn=(r,e,n,t,o)=>{let i=r[0],s=r.slice(2),a=s.length,u=e[0],f=e.slice(2).map((h,g)=>h+(h-1)*(n[g]-1)),p=s.map((h,g)=>h+t[g]+t[g+a]).map((h,g)=>Math.floor((h-f[g]+o[g])/o[g]));return[i,u].concat(...p)},fs=(r,e,n)=>(Qw(e,n),Xw(r,e,n)),Xw=(r,e,n)=>{let t=Jw(n,e),o=r.session.pack,i=t.kernelShape[0]===1&&t.kernelShape[1]===1;return t.group>1?[r.run(qd(r,e,t),e)]:i&&o?[Zw(r,e,t)]:o&&e[0].dims.length===4&&e[0].dims[0]===1&&!i?[Jd(r,e,t)]:[Yw(r,e,t)]},Zw=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=yn(t,o,n.dilations,n.pads,n.strides),s=r.reshapeUnpacked(e[0],[t[1],t[2]*t[3]]),a=r.reshapeUnpacked(e[1],[o[0],o[1]]),u=e.length>2?[a,s,e[2]]:[a,s],l=r.run(as(u,n),u);return r.reshapeUnpacked(l,i)},Yw=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=yn(t,o,n.dilations,n.pads,n.strides),s=r.run(ep(r,e[0],e[1],i,n),[e[0]]),a=e.length===3?[s,e[1],e[2]]:[s,e[1]];return r.run(tp(r,e,i,n),a)},Jw=(r,e)=>{let n=r.kernelShape.slice();if(r.kernelShape.length===0)for(let i=2;i{let e=r.attributes,n=bn(e),t=e.getString("auto_pad","NOTSET"),o=e.getInts("dilations",[1,1]),i=e.getInt("group",1),s=e.getInts("kernel_shape",[]),a=e.getInts("pads",[0,0,0,0]),u=e.getInts("strides",[1,1]);return be({autoPad:t,dilations:o,group:i,kernelShape:s,pads:a,strides:u,...n})},Qw=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4||r[1].dims.length!==4)throw new Error("currently only support 2-dimensional conv");let n=r[0].dims[1],t=r[1].dims[1]*e.group;if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(r.length===3&&(r[2].dims.length!==1||r[1].dims[0]!==r[2].dims[0]))throw new Error("invalid bias");let o=r[0].dims.length-2;if(e.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(e.strides.length!==o)throw new Error(`strides should be ${o}D`);if(e.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(r[0].type!=="float32"||r[1].type!=="float32")throw new Error("Conv input(X,W) should be float tensor");if(r.length===3&&r[2].type!=="float32")throw new Error("Conv input(bias) should be float tensor")}});var eT,tT,rT,np,nT,oT,iT,aT,sT,uT,op,lT,ip=C(()=>{"use strict";st();He();Se();Wr();eT=(r,e,n,t,o,i)=>(r-1)*e+n+(t-1)*o+1-i,tT=(r,e,n,t,o)=>{let i=Math.floor(r/2);e==="SAME_UPPER"?(n[t]=i,n[o]=r-i):e==="SAME_LOWER"&&(n[t]=r-i,n[o]=i)},rT=(r,e,n,t,o,i,s,a)=>{let u=r.length-2,l=a.length===0;for(let f=0;f(lT(e,n),nT(r,e,n)),nT=(r,e,n)=>{let t=uT(n,e);return[sT(r,e,t)]},oT=(r,e)=>({name:"ConvTranspose",inputNames:r?["X","W","B"]:["X","W"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e}),iT=(r,e,n,t)=>{let i=e.length>2?"getB(output_channel)":"0.0",s=e[0].dims,a=e[1].dims,u=a[1],l=a[0]/t.group,f=[e[0].dims[0],e[1].dims[1]*t.group,...t.outputShape],c=oe(r.session.backend.glContext.version),{activationFunction:p,applyActivation:b}=tr(t),h=` +}`;return{...e,output:{dims:t,type:n[0].type,textureType:0},shaderSource:$}},tp=(r,e,n,t)=>{let o=Xw(e.length>2,t);return{...o,get:()=>Zw(r,o,e,n,t)}}});var xn,ds,Yw,Jw,Qw,eT,ps,tT,Ho=C(()=>{"use strict";st();De();jd();Qd();rp();Wr();fs();qo();xn=(r,e,n,t,o)=>{let i=r[0],s=r.slice(2),a=s.length,u=e[0],f=e.slice(2).map((h,g)=>h+(h-1)*(n[g]-1)),p=s.map((h,g)=>h+t[g]+t[g+a]).map((h,g)=>Math.floor((h-f[g]+o[g])/o[g]));return[i,u].concat(...p)},ds=(r,e,n)=>(tT(e,n),Yw(r,e,n)),Yw=(r,e,n)=>{let t=eT(n,e),o=r.session.pack,i=t.kernelShape[0]===1&&t.kernelShape[1]===1;return t.group>1?[r.run(qd(r,e,t),e)]:i&&o?[Jw(r,e,t)]:o&&e[0].dims.length===4&&e[0].dims[0]===1&&!i?[Jd(r,e,t)]:[Qw(r,e,t)]},Jw=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=xn(t,o,n.dilations,n.pads,n.strides),s=r.reshapeUnpacked(e[0],[t[1],t[2]*t[3]]),a=r.reshapeUnpacked(e[1],[o[0],o[1]]),u=e.length>2?[a,s,e[2]]:[a,s],l=r.run(ss(u,n),u);return r.reshapeUnpacked(l,i)},Qw=(r,e,n)=>{let t=e[0].dims,o=e[1].dims,i=xn(t,o,n.dilations,n.pads,n.strides),s=r.run(ep(r,e[0],e[1],i,n),[e[0]]),a=e.length===3?[s,e[1],e[2]]:[s,e[1]];return r.run(tp(r,e,i,n),a)},eT=(r,e)=>{let n=r.kernelShape.slice();if(r.kernelShape.length===0)for(let i=2;i{let e=r.attributes,n=yn(e),t=e.getString("auto_pad","NOTSET"),o=e.getInts("dilations",[1,1]),i=e.getInt("group",1),s=e.getInts("kernel_shape",[]),a=e.getInts("pads",[0,0,0,0]),u=e.getInts("strides",[1,1]);return be({autoPad:t,dilations:o,group:i,kernelShape:s,pads:a,strides:u,...n})},tT=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4||r[1].dims.length!==4)throw new Error("currently only support 2-dimensional conv");let n=r[0].dims[1],t=r[1].dims[1]*e.group;if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(r.length===3&&(r[2].dims.length!==1||r[1].dims[0]!==r[2].dims[0]))throw new Error("invalid bias");let o=r[0].dims.length-2;if(e.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(e.strides.length!==o)throw new Error(`strides should be ${o}D`);if(e.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(r[0].type!=="float32"||r[1].type!=="float32")throw new Error("Conv input(X,W) should be float tensor");if(r.length===3&&r[2].type!=="float32")throw new Error("Conv input(bias) should be float tensor")}});var rT,nT,oT,np,iT,aT,sT,uT,lT,cT,op,fT,ip=C(()=>{"use strict";st();He();Se();Wr();rT=(r,e,n,t,o,i)=>(r-1)*e+n+(t-1)*o+1-i,nT=(r,e,n,t,o)=>{let i=Math.floor(r/2);e==="SAME_UPPER"?(n[t]=i,n[o]=r-i):e==="SAME_LOWER"&&(n[t]=r-i,n[o]=i)},oT=(r,e,n,t,o,i,s,a)=>{let u=r.length-2,l=a.length===0;for(let f=0;f(fT(e,n),iT(r,e,n)),iT=(r,e,n)=>{let t=cT(n,e);return[lT(r,e,t)]},aT=(r,e)=>({name:"ConvTranspose",inputNames:r?["X","W","B"]:["X","W"],inputTypes:r?[0,0,0]:[0,0],cacheHint:e}),sT=(r,e,n,t)=>{let i=e.length>2?"getB(output_channel)":"0.0",s=e[0].dims,a=e[1].dims,u=a[1],l=a[0]/t.group,f=[e[0].dims[0],e[1].dims[1]*t.group,...t.outputShape],c=oe(r.session.backend.glContext.version),{activationFunction:p,applyActivation:b}=tr(t),h=` const ivec2 strides = ivec2(${t.strides[0]}, ${t.strides[1]}); const ivec2 pads = ivec2(${t.pads[0]}, ${t.pads[1]}); ${p} @@ -724,14 +724,14 @@ float process(int indices[${h}]) { ${b} ${c.output} = vec4(value, .0, .0, .0); } -`;return{...n,output:{dims:f,type:e[0].type,textureType:0},shaderSource:h,hasMain:!0}},aT=(r,e,n)=>{let t=oT(e.length>2,n.cacheKey);return{...t,get:()=>iT(r,e,t,n)}},sT=(r,e,n)=>r.run(aT(r,e,n),e),uT=(r,e)=>{let n=r.kernelShape.slice();if(r.kernelShape.length===0)for(let a=2;a{let e=r.attributes,n=bn(e),t=e.getString("auto_pad","NOTSET"),o=e.getInts("dilations",[1,1]),i=e.getInt("group",1),s=e.getInts("kernel_shape",[]),a=e.getInts("output_padding",[0,0]),u=e.getInts("output_shape",[]),l=e.getInts("pads",[0,0,0,0]),f=e.getInts("strides",[1,1]);return be({autoPad:t,dilations:o,group:i,kernelShape:s,outputPadding:a,outputShape:u,pads:l,strides:f,...n})},lT=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4||r[1].dims.length!==4)throw new Error("currently only support 2-dimensional conv");let n=r[0].dims[1],t=r[1].dims[0];if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=r[1].dims[1]*e.group;if(r.length===3&&(r[2].dims.length!==1||r[2].dims[0]!==o))throw new Error("invalid bias");let i=r[0].dims.length-2;if(e.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(e.strides.length!==i)throw new Error(`strides should be ${i}D`);if(e.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(e.outputPadding.length!==i)throw new Error(`output_padding should be ${i}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(e.outputShape.length!==0&&e.outputShape.length!==r[0].dims.length-2)throw new Error("invalid output shape");if(r[0].type!=="float32"||r[1].type!=="float32")throw new Error("ConvTranspose input(X,W) should be float tensor");if(r.length===3&&r[2].type!=="float32")throw new Error("ConvTranspose input(bias) should be float tensor")}});var ap,Hr,sp,cT,up,fT,dT,pT,jo=C(()=>{"use strict";st();De();Se();ap={name:"Transpose",inputNames:["A"],inputTypes:[0]},Hr=(r,e,n)=>(pT(e),[r.run({...ap,cacheHint:n.cacheKey,get:()=>cT(r,e[0],n.perm)},e)]),sp=r=>be({perm:r.attributes.getInts("perm",[])}),cT=(r,e,n)=>{let t=e.dims;n=up(t,n);let o=fT(t,n),i=t.length,s=` - ${dT("perm",n,i)} +`;return{...n,output:{dims:f,type:e[0].type,textureType:0},shaderSource:h,hasMain:!0}},uT=(r,e,n)=>{let t=aT(e.length>2,n.cacheKey);return{...t,get:()=>sT(r,e,t,n)}},lT=(r,e,n)=>r.run(uT(r,e,n),e),cT=(r,e)=>{let n=r.kernelShape.slice();if(r.kernelShape.length===0)for(let a=2;a{let e=r.attributes,n=yn(e),t=e.getString("auto_pad","NOTSET"),o=e.getInts("dilations",[1,1]),i=e.getInt("group",1),s=e.getInts("kernel_shape",[]),a=e.getInts("output_padding",[0,0]),u=e.getInts("output_shape",[]),l=e.getInts("pads",[0,0,0,0]),f=e.getInts("strides",[1,1]);return be({autoPad:t,dilations:o,group:i,kernelShape:s,outputPadding:a,outputShape:u,pads:l,strides:f,...n})},fT=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4||r[1].dims.length!==4)throw new Error("currently only support 2-dimensional conv");let n=r[0].dims[1],t=r[1].dims[0];if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=r[1].dims[1]*e.group;if(r.length===3&&(r[2].dims.length!==1||r[2].dims[0]!==o))throw new Error("invalid bias");let i=r[0].dims.length-2;if(e.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(e.strides.length!==i)throw new Error(`strides should be ${i}D`);if(e.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(e.outputPadding.length!==i)throw new Error(`output_padding should be ${i}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(e.outputShape.length!==0&&e.outputShape.length!==r[0].dims.length-2)throw new Error("invalid output shape");if(r[0].type!=="float32"||r[1].type!=="float32")throw new Error("ConvTranspose input(X,W) should be float tensor");if(r.length===3&&r[2].type!=="float32")throw new Error("ConvTranspose input(bias) should be float tensor")}});var ap,Hr,sp,dT,up,pT,mT,hT,Ko=C(()=>{"use strict";st();De();Se();ap={name:"Transpose",inputNames:["A"],inputTypes:[0]},Hr=(r,e,n)=>(hT(e),[r.run({...ap,cacheHint:n.cacheKey,get:()=>dT(r,e[0],n.perm)},e)]),sp=r=>be({perm:r.attributes.getInts("perm",[])}),dT=(r,e,n)=>{let t=e.dims;n=up(t,n);let o=pT(t,n),i=t.length,s=` + ${mT("perm",n,i)} float process(int indices[${i}]) { int a[${i}]; perm(a, indices); return _A(a); - }`;return{...ap,output:{dims:o,type:e.type,textureType:0},shaderSource:s}},up=(r,e)=>(e&&e.length!==r.length&&(e=[...r.keys()].reverse()),e),fT=(r,e)=>(e=up(r,e),J.sortBasedOnPerm(r,e)),dT=(r,e,n)=>{let t=[];t.push(`void ${r}(out int a[${n}], int src[${n}]) {`);for(let o=0;o{if(!r||r.length!==1)throw new Error("Transpose requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("input should be float tensor")}});var lp,cp,mT,fp=C(()=>{"use strict";jo();lp=(r,e,n)=>{mT(e);let t=n.blocksize,o=t*t,i=n.mode==="DCR"?[0,3,4,1,5,2]:[0,1,4,2,5,3],s=n.mode==="DCR"?[e[0].dims[0],t,t,e[0].dims[1]/o,e[0].dims[2],e[0].dims[3]]:[e[0].dims[0],e[0].dims[1]/o,t,t,e[0].dims[2],e[0].dims[3]],a=r.reshapeUnpacked(e[0],s),u={perm:i,cacheKey:`${i}`},[l]=Hr(r,[a],u),f=[e[0].dims[0],e[0].dims[1]/o,e[0].dims[2]*t,e[0].dims[3]*t];return[r.reshapeUnpacked(l,f)]},cp=r=>{let e=r.attributes.getInt("blocksize");if(e<1)throw new Error(`blocksize must be >= 1, but got : ${e} for DepthToSpace`);let n=r.attributes.getString("mode","DCR");if(n!=="DCR"&&n!=="CRD")throw new Error(`unrecognized mode: ${n} for DepthToSpace`);return{mode:n,blocksize:e}},mT=r=>{if(r.length!==1)throw new Error(`DepthToSpace expect 1 inputs, but got ${r.length}`);if(r[0].type==="string"||r[0].dims.length!==4)throw new TypeError("DepthToSpace input should be a 4-D numeric tensor")}});var dp,pp,hT,mp=C(()=>{"use strict";De();dp=(r,e,n)=>{hT(e,n);let t=J.flattenShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},pp=r=>r.attributes.getInt("axis",1),hT=(r,e)=>{if(!r||r.length!==1)throw new Error("Flatten requires 1 input.");let n=r[0].dims.length;if(n===0)throw new Error("scalar tensor is not supported.");if(e<-n||e>n)throw new Error("Invalid axis");if(r[0].type==="string")throw new Error("string tensor is not supported.")}});var Sr,Vn=C(()=>{"use strict";Sr=["float32","float64","int32","int16","int8","uint16","uint32","uint8"]});var hp,gp,gT,bT,yT,xT,bp=C(()=>{"use strict";st();Vn();De();Se();hp=(r,e,n)=>(xT(e,n.axis),[r.run(yT(r,e,n),e)]),gp=r=>be({axis:r.attributes.getInt("axis",0)}),gT={name:"Gather",inputNames:["A","B"],inputTypes:[0,0]},bT=(r,e,n,t)=>{let o=n[0].dims.slice(),i=n[1].dims.slice(),s=new Array(o.length+i.length-1);t=J.normalizeAxis(t,o.length);let a=[];for(let p=0;p(e&&e.length!==r.length&&(e=[...r.keys()].reverse()),e),pT=(r,e)=>(e=up(r,e),J.sortBasedOnPerm(r,e)),mT=(r,e,n)=>{let t=[];t.push(`void ${r}(out int a[${n}], int src[${n}]) {`);for(let o=0;o{if(!r||r.length!==1)throw new Error("Transpose requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("input should be float tensor")}});var lp,cp,gT,fp=C(()=>{"use strict";Ko();lp=(r,e,n)=>{gT(e);let t=n.blocksize,o=t*t,i=n.mode==="DCR"?[0,3,4,1,5,2]:[0,1,4,2,5,3],s=n.mode==="DCR"?[e[0].dims[0],t,t,e[0].dims[1]/o,e[0].dims[2],e[0].dims[3]]:[e[0].dims[0],e[0].dims[1]/o,t,t,e[0].dims[2],e[0].dims[3]],a=r.reshapeUnpacked(e[0],s),u={perm:i,cacheKey:`${i}`},[l]=Hr(r,[a],u),f=[e[0].dims[0],e[0].dims[1]/o,e[0].dims[2]*t,e[0].dims[3]*t];return[r.reshapeUnpacked(l,f)]},cp=r=>{let e=r.attributes.getInt("blocksize");if(e<1)throw new Error(`blocksize must be >= 1, but got : ${e} for DepthToSpace`);let n=r.attributes.getString("mode","DCR");if(n!=="DCR"&&n!=="CRD")throw new Error(`unrecognized mode: ${n} for DepthToSpace`);return{mode:n,blocksize:e}},gT=r=>{if(r.length!==1)throw new Error(`DepthToSpace expect 1 inputs, but got ${r.length}`);if(r[0].type==="string"||r[0].dims.length!==4)throw new TypeError("DepthToSpace input should be a 4-D numeric tensor")}});var dp,pp,bT,mp=C(()=>{"use strict";De();dp=(r,e,n)=>{bT(e,n);let t=J.flattenShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},pp=r=>r.attributes.getInt("axis",1),bT=(r,e)=>{if(!r||r.length!==1)throw new Error("Flatten requires 1 input.");let n=r[0].dims.length;if(n===0)throw new Error("scalar tensor is not supported.");if(e<-n||e>n)throw new Error("Invalid axis");if(r[0].type==="string")throw new Error("string tensor is not supported.")}});var Sr,Vn=C(()=>{"use strict";Sr=["float32","float64","int32","int16","int8","uint16","uint32","uint8"]});var hp,gp,yT,xT,vT,wT,bp=C(()=>{"use strict";st();Vn();De();Se();hp=(r,e,n)=>(wT(e,n.axis),[r.run(vT(r,e,n),e)]),gp=r=>be({axis:r.attributes.getInt("axis",0)}),yT={name:"Gather",inputNames:["A","B"],inputTypes:[0,0]},xT=(r,e,n,t)=>{let o=n[0].dims.slice(),i=n[1].dims.slice(),s=new Array(o.length+i.length-1);t=J.normalizeAxis(t,o.length);let a=[];for(let p=0;p{let t={...gT,cacheHint:n.cacheKey};return{...t,get:()=>bT(r,t,e,n.axis)}},xT=(r,e)=>{if(!r||r.length!==2)throw new Error("Gather requires 2 inputs.");let n=r[0].dims.length;if(n<1)throw new Error("Invalid input shape.");if(e<-n||e>n-1)throw new Error("Invalid axis.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invaid input type.");if(r[1].type!=="int32"&&r[1].type!=="int16")throw new Error("Invaid input type.")}});var ps,yp,xp,vp,vT,wT,TT,wp=C(()=>{"use strict";st();De();Se();ps=(r,e,n)=>(TT(e,n),[r.run(vT(e,n),e)]),yp=(r,e)=>{let n=r.attributes.getInt("transA",0)!==0,t=r.attributes.getInt("transB",0)!==0,o=r.attributes.getFloat("alpha",1),i=r.attributes.getFloat("beta",1);return be({transA:n,transB:t,alpha:o,beta:i,isOptionalC:e})},xp=r=>yp(r,!1),vp=r=>yp(r,!0),vT=(r,e)=>{let n={name:"Gemm",inputNames:r.length===3?["A","B","C"]:["A","B"],inputTypes:r.length===3?[0,0,0]:[0,0],key:e.cacheKey};return{...n,get:()=>wT(n,r,e)}},wT=(r,e,n)=>{let t=e[0].dims.slice(),o=e[1].dims.slice(),[i,s]=No.getShapeOfGemmResult(t,n.transA,o,n.transB,e.length===3?e[2].dims:void 0),a=[i,s];if(!a)throw new Error("Can't use gemm on the given tensors");let u=t[t.length-1],l="";n.transA&&(u=t[0]),n.transA&&n.transB?l="value += _A_T(a) * _B_T(b);":n.transA&&!n.transB?l="value += _A_T(a) * _B(b);":!n.transA&&n.transB?l="value += _A(a) * _B_T(b);":!n.transA&&!n.transB&&(l="value += _A(a) * _B(b);");let f=a.length,c=e.length===3?`int c[${e[2].dims.length}];`:"",p=e.length===3?"bcastIndices_C(indices, c);":"",b=e.length===3?"value += beta * _C(c);":"",h=` + }`;return{...e,output:{dims:s,type:n[0].type,textureType:0},shaderSource:c}},vT=(r,e,n)=>{let t={...yT,cacheHint:n.cacheKey};return{...t,get:()=>xT(r,t,e,n.axis)}},wT=(r,e)=>{if(!r||r.length!==2)throw new Error("Gather requires 2 inputs.");let n=r[0].dims.length;if(n<1)throw new Error("Invalid input shape.");if(e<-n||e>n-1)throw new Error("Invalid axis.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invaid input type.");if(r[1].type!=="int32"&&r[1].type!=="int16")throw new Error("Invaid input type.")}});var ms,yp,xp,vp,TT,_T,IT,wp=C(()=>{"use strict";st();De();Se();ms=(r,e,n)=>(IT(e,n),[r.run(TT(e,n),e)]),yp=(r,e)=>{let n=r.attributes.getInt("transA",0)!==0,t=r.attributes.getInt("transB",0)!==0,o=r.attributes.getFloat("alpha",1),i=r.attributes.getFloat("beta",1);return be({transA:n,transB:t,alpha:o,beta:i,isOptionalC:e})},xp=r=>yp(r,!1),vp=r=>yp(r,!0),TT=(r,e)=>{let n={name:"Gemm",inputNames:r.length===3?["A","B","C"]:["A","B"],inputTypes:r.length===3?[0,0,0]:[0,0],key:e.cacheKey};return{...n,get:()=>_T(n,r,e)}},_T=(r,e,n)=>{let t=e[0].dims.slice(),o=e[1].dims.slice(),[i,s]=No.getShapeOfGemmResult(t,n.transA,o,n.transB,e.length===3?e[2].dims:void 0),a=[i,s];if(!a)throw new Error("Can't use gemm on the given tensors");let u=t[t.length-1],l="";n.transA&&(u=t[0]),n.transA&&n.transB?l="value += _A_T(a) * _B_T(b);":n.transA&&!n.transB?l="value += _A_T(a) * _B(b);":!n.transA&&n.transB?l="value += _A(a) * _B_T(b);":!n.transA&&!n.transB&&(l="value += _A(a) * _B(b);");let f=a.length,c=e.length===3?`int c[${e[2].dims.length}];`:"",p=e.length===3?"bcastIndices_C(indices, c);":"",b=e.length===3?"value += beta * _C(c);":"",h=` float process(int indices[${f}]) { int a[${f}]; int b[${f}]; @@ -761,12 +761,12 @@ float process(int indices[${h}]) { value = value * alpha; ${b} return value; - }`;return{...r,output:{dims:a,type:e[0].type,textureType:0},variables:[{name:"alpha",type:"float",data:n.alpha},{name:"beta",type:"float",data:n.beta}],shaderSource:h}},TT=(r,e)=>{if(!r)throw new Error("Input is missing");if(e.isOptionalC&&(r.length<2||r.length>3))throw new Error("Invaid input shape.");if(!e.isOptionalC&&r.length!==3)throw new Error("Gemm requires 3 inputs");if(r.length===3&&r[2].dims.length!==1&&r[2].dims.length!==2)throw new Error("Invalid input shape of C");if(r[0].type!=="float32"&&r[0].type!=="float64"||r[1].type!=="float32"&&r[1].type!=="float64"||r.length===3&&r[2].type!=="float32"&&r[2].type!=="float64")throw new Error("Invalid input type.");if(r[0].type!==r[1].type||r.length===3&&r[0].type!==r[2].type)throw new Error("Input types are mismatched")}});var Tp,_p,_T,IT,ST,$T,AT,Ip=C(()=>{"use strict";st();Se();Tp=(r,e,n)=>(AT(e),[r.run(ST(r,e,n),e)]),_p=r=>{let e=r.attributes.getFloat("scale"),n=r.attributes.getFloats("bias");return be({scale:e,bias:n})},_T={name:"ImageScaler",inputNames:["X"],inputTypes:[0]},IT=(r,e,n,t)=>{let o=n[0].dims.slice(),i=o.length,a=` - ${$T(t.bias.length)} + }`;return{...r,output:{dims:a,type:e[0].type,textureType:0},variables:[{name:"alpha",type:"float",data:n.alpha},{name:"beta",type:"float",data:n.beta}],shaderSource:h}},IT=(r,e)=>{if(!r)throw new Error("Input is missing");if(e.isOptionalC&&(r.length<2||r.length>3))throw new Error("Invaid input shape.");if(!e.isOptionalC&&r.length!==3)throw new Error("Gemm requires 3 inputs");if(r.length===3&&r[2].dims.length!==1&&r[2].dims.length!==2)throw new Error("Invalid input shape of C");if(r[0].type!=="float32"&&r[0].type!=="float64"||r[1].type!=="float32"&&r[1].type!=="float64"||r.length===3&&r[2].type!=="float32"&&r[2].type!=="float64")throw new Error("Invalid input type.");if(r[0].type!==r[1].type||r.length===3&&r[0].type!==r[2].type)throw new Error("Input types are mismatched")}});var Tp,_p,ST,$T,AT,PT,OT,Ip=C(()=>{"use strict";st();Se();Tp=(r,e,n)=>(OT(e),[r.run(AT(r,e,n),e)]),_p=r=>{let e=r.attributes.getFloat("scale"),n=r.attributes.getFloats("bias");return be({scale:e,bias:n})},ST={name:"ImageScaler",inputNames:["X"],inputTypes:[0]},$T=(r,e,n,t)=>{let o=n[0].dims.slice(),i=o.length,a=` + ${PT(t.bias.length)} float process(int indices[${i}]) { return _X(indices) * scale + getBias(bias, indices[1]); - }`;return{...e,output:{dims:o,type:n[0].type,textureType:0},variables:[{name:"bias",type:"float",arrayLength:t.bias.length,data:t.bias},{name:"scale",type:"float",data:t.scale}],shaderSource:a}},ST=(r,e,n)=>{let t={..._T,cacheHint:n.cacheKey};return{...t,get:()=>IT(r,t,e,n)}},$T=r=>{let e=[`float getBias(float bias[${r}], int channel) {`];for(let n=0;n{if(!r||r.length!==1)throw new Error("ImageScaler requires 1 input.");if(r[0].dims.length!==4)throw new Error("Invalid input shape.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")}});var $p,Ap,Sp,PT,OT,ET,CT,kT,DT,Pp=C(()=>{"use strict";He();Se();$p=(r,e,n)=>{DT(e);let t=r.run(OT(e[0]),e);return[r.run(kT(r,e[0],n,t.dims),[e[0],t,e[1],e[2]])]},Ap=r=>r.attributes.getFloat("epsilon",1e-5),Sp={name:"InstanceNormalization_MeanAndVariance",inputNames:["X"],inputTypes:[0]},PT=(r,e)=>{let n=e.dims.slice(),t=n[1],o=n[2]*n[3],i=[n[0],t],s=` + }`;return{...e,output:{dims:o,type:n[0].type,textureType:0},variables:[{name:"bias",type:"float",arrayLength:t.bias.length,data:t.bias},{name:"scale",type:"float",data:t.scale}],shaderSource:a}},AT=(r,e,n)=>{let t={...ST,cacheHint:n.cacheKey};return{...t,get:()=>$T(r,t,e,n)}},PT=r=>{let e=[`float getBias(float bias[${r}], int channel) {`];for(let n=0;n{if(!r||r.length!==1)throw new Error("ImageScaler requires 1 input.");if(r[0].dims.length!==4)throw new Error("Invalid input shape.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")}});var $p,Ap,Sp,ET,CT,kT,DT,BT,LT,Pp=C(()=>{"use strict";He();Se();$p=(r,e,n)=>{LT(e);let t=r.run(CT(e[0]),e);return[r.run(BT(r,e[0],n,t.dims),[e[0],t,e[1],e[2]])]},Ap=r=>r.attributes.getFloat("epsilon",1e-5),Sp={name:"InstanceNormalization_MeanAndVariance",inputNames:["X"],inputTypes:[0]},ET=(r,e)=>{let n=e.dims.slice(),t=n[1],o=n[2]*n[3],i=[n[0],t],s=` vec4 process(int[2] indices) { vec4 v = vec4(0.0); int a[4]; @@ -795,7 +795,7 @@ float process(int indices[${h}]) { v.g = temp / float(${o}); return v; - }`;return{...r,output:{dims:i,type:e.type,textureType:4},shaderSource:s}},OT=r=>({...Sp,get:()=>PT(Sp,r)}),ET={name:"InstanceNormalization_ComputeOutput",inputNames:["X","MeanAndVariance","Scale","B"],inputTypes:[0,4,0,0]},CT=(r,e,n,t,o)=>{let i=oe(r.session.backend.glContext.version),[s,a]=r.calculateTextureWidthAndHeight(o,4),[u,l]=[s/4,a],f=` + }`;return{...r,output:{dims:i,type:e.type,textureType:4},shaderSource:s}},CT=r=>({...Sp,get:()=>ET(Sp,r)}),kT={name:"InstanceNormalization_ComputeOutput",inputNames:["X","MeanAndVariance","Scale","B"],inputTypes:[0,4,0,0]},DT=(r,e,n,t,o)=>{let i=oe(r.session.backend.glContext.version),[s,a]=r.calculateTextureWidthAndHeight(o,4),[u,l]=[s/4,a],f=` vec4 get_MeanAndVariance(int[2] mv) { int offset = indicesToOffset_MeanAndVariance(mv); vec2 coords = offsetToCoords(offset, ${u}, ${l}); @@ -816,7 +816,7 @@ float process(int indices[${h}]) { float b = _B(sb); return scale * (_X(indices) - mean) / sqrt(variance + epsilon) + b; - }`;return{...e,output:{dims:n.dims,type:n.type,textureType:0},variables:[{name:"epsilon",type:"float",data:t}],shaderSource:f}},kT=(r,e,n,t)=>{let o={...ET,cacheHint:`${n}`};return{...o,get:()=>CT(r,o,e,n,t)}},DT=r=>{if(!r||r.length!==3)throw new Error("InstanceNormalization requires 3 inputs.");let e=r[0],n=r[1],t=r[2];if(e.dims.length<3||n.dims.length!==1||t.dims.length!==1)throw new Error("Invalid input shape.");if(n.dims[0]!==e.dims[1]||t.dims[0]!==e.dims[1])throw new Error("Input shapes are mismatched.");if(e.type!=="float32"&&e.type!=="float64"||n.type!=="float32"&&n.type!=="float64"||t.type!=="float32"&&t.type!=="float64")throw new Error("Invalid input type.");if(r[0].dims.length!==4)throw new Error("Only support 4-D input shape.")}});function BT(r,e){let n=r[0].dims[1],t=r[0].dims.length,o=-Math.floor((e.size-1)/2),i=Math.ceil((e.size-1)/2),s=`float(${e.alpha}) / float(${e.size})`,a=`float(${e.bias})`,u=`float(${e.beta})`,l=` + }`;return{...e,output:{dims:n.dims,type:n.type,textureType:0},variables:[{name:"epsilon",type:"float",data:t}],shaderSource:f}},BT=(r,e,n,t)=>{let o={...kT,cacheHint:`${n}`};return{...o,get:()=>DT(r,o,e,n,t)}},LT=r=>{if(!r||r.length!==3)throw new Error("InstanceNormalization requires 3 inputs.");let e=r[0],n=r[1],t=r[2];if(e.dims.length<3||n.dims.length!==1||t.dims.length!==1)throw new Error("Invalid input shape.");if(n.dims[0]!==e.dims[1]||t.dims[0]!==e.dims[1])throw new Error("Input shapes are mismatched.");if(e.type!=="float32"&&e.type!=="float64"||n.type!=="float32"&&n.type!=="float64"||t.type!=="float32"&&t.type!=="float64")throw new Error("Invalid input type.");if(r[0].dims.length!==4)throw new Error("Only support 4-D input shape.")}});function RT(r,e){let n=r[0].dims[1],t=r[0].dims.length,o=-Math.floor((e.size-1)/2),i=Math.ceil((e.size-1)/2),s=`float(${e.alpha}) / float(${e.size})`,a=`float(${e.bias})`,u=`float(${e.beta})`,l=` float process(int indices[${t}]) { int c = indices[1]; float x = _X(indices); @@ -831,11 +831,11 @@ float process(int indices[${h}]) { } } return x / pow(${a} + ${s} * square_sum, ${u}); - }`;return{...Cp,cacheHint:e.cacheKey,output:{dims:r[0].dims,type:r[0].type,textureType:0},shaderSource:l}}function LT(r,e){return{...Cp,cacheHint:e.cacheKey,get:()=>BT(r,e)}}var Op,Ep,Cp,RT,kp=C(()=>{"use strict";st();Se();Op=(r,e,n)=>(RT(e),[r.run(LT(e,n),e)]),Ep=r=>{let e=r.attributes.getFloat("alpha",1e-4),n=r.attributes.getFloat("beta",.75),t=r.attributes.getFloat("bias",1),o=r.attributes.getInt("size");return be({alpha:e,beta:n,bias:t,size:o})},Cp={name:"LRN",inputNames:["X"],inputTypes:[0]};RT=r=>{if(!r||r.length!==1)throw new Error("LRN requires 1 input.");if(r[0].dims.length!==4)throw new Error('currently only support LRN for input with "NCHW" format');if(r[0].type!=="float32")throw new Error("input should be float type")}});var NT,ms,Dp,Bp,Lp,zT,FT,MT,VT,GT,UT,WT,HT,Rp=C(()=>{"use strict";st();De();He();Se();NT={name:"Pad",inputNames:["A"],inputTypes:[0]},ms=(r,e,n)=>(MT(e),[r.run({...NT,cacheHint:n.cacheKey,get:()=>FT(r,e[0],n)},e)]),Dp=r=>{let e=r.attributes.getString("mode","constant"),n=r.attributes.getFloat("value",0),t=r.attributes.getInts("pads");return be({mode:e,value:n,pads:t})},Bp=(r,e,n)=>{VT(e);let t=zT(r,e,n);return ms(r,[e[0]],t)},Lp=r=>r.attributes.getString("mode","constant"),zT=(r,e,n)=>{if(!r.session.isInitializer(e[1].dataId)||e.length>=3&&!r.session.isInitializer(e[2].dataId))throw new Error("dynamic pad attributes are not allowed");let t=Array.from(e[1].integerData),o=e.length>=3?e[2].floatData[0]:0;return be({mode:n,pads:t,value:o})},FT=(r,e,n)=>{let t=J.padShape(e.dims.slice(),n.pads),o=t.length,s=` - ${GT(r,e,n)} + }`;return{...Cp,cacheHint:e.cacheKey,output:{dims:r[0].dims,type:r[0].type,textureType:0},shaderSource:l}}function NT(r,e){return{...Cp,cacheHint:e.cacheKey,get:()=>RT(r,e)}}var Op,Ep,Cp,zT,kp=C(()=>{"use strict";st();Se();Op=(r,e,n)=>(zT(e),[r.run(NT(e,n),e)]),Ep=r=>{let e=r.attributes.getFloat("alpha",1e-4),n=r.attributes.getFloat("beta",.75),t=r.attributes.getFloat("bias",1),o=r.attributes.getInt("size");return be({alpha:e,beta:n,bias:t,size:o})},Cp={name:"LRN",inputNames:["X"],inputTypes:[0]};zT=r=>{if(!r||r.length!==1)throw new Error("LRN requires 1 input.");if(r[0].dims.length!==4)throw new Error('currently only support LRN for input with "NCHW" format');if(r[0].type!=="float32")throw new Error("input should be float type")}});var FT,hs,Dp,Bp,Lp,MT,VT,GT,UT,WT,HT,qT,jT,Rp=C(()=>{"use strict";st();De();He();Se();FT={name:"Pad",inputNames:["A"],inputTypes:[0]},hs=(r,e,n)=>(GT(e),[r.run({...FT,cacheHint:n.cacheKey,get:()=>VT(r,e[0],n)},e)]),Dp=r=>{let e=r.attributes.getString("mode","constant"),n=r.attributes.getFloat("value",0),t=r.attributes.getInts("pads");return be({mode:e,value:n,pads:t})},Bp=(r,e,n)=>{UT(e);let t=MT(r,e,n);return hs(r,[e[0]],t)},Lp=r=>r.attributes.getString("mode","constant"),MT=(r,e,n)=>{if(!r.session.isInitializer(e[1].dataId)||e.length>=3&&!r.session.isInitializer(e[2].dataId))throw new Error("dynamic pad attributes are not allowed");let t=Array.from(e[1].integerData),o=e.length>=3?e[2].floatData[0]:0;return be({mode:n,pads:t,value:o})},VT=(r,e,n)=>{let t=J.padShape(e.dims.slice(),n.pads),o=t.length,s=` + ${WT(r,e,n)} float process(int[${o}] indices) { return padA(indices); - }`;return{name:"Pad",inputNames:["A"],inputTypes:[0],output:{dims:t,type:e.type,textureType:0},shaderSource:s}},MT=r=>{if(!r||r.length!==1)throw new Error("Pad requires 1 input");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")},VT=r=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Pad requires 2 or 3 inputs");if(r[1].type!=="int32")throw new Error("Invalid input type.");if(r.length>=3&&r[2].type==="string")throw new Error("Invalid input type.")},GT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=r.calculateTextureWidthAndHeight(e.dims,0),s=J.computeStrides(e.dims);switch(n.mode){case"constant":return UT(t,e.dims,s,o,i,n.pads,n.value);case"reflect":return WT(t,e.dims,s,o,i,n.pads);case"edge":return HT(t,e.dims,s,o,i,n.pads);default:throw new Error("Invalid mode")}},UT=(r,e,n,t,o,i,s)=>{let a=e.length,u="";for(let l=a-1;l>=0;--l)u+=` + }`;return{name:"Pad",inputNames:["A"],inputTypes:[0],output:{dims:t,type:e.type,textureType:0},shaderSource:s}},GT=r=>{if(!r||r.length!==1)throw new Error("Pad requires 1 input");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")},UT=r=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Pad requires 2 or 3 inputs");if(r[1].type!=="int32")throw new Error("Invalid input type.");if(r.length>=3&&r[2].type==="string")throw new Error("Invalid input type.")},WT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=r.calculateTextureWidthAndHeight(e.dims,0),s=J.computeStrides(e.dims);switch(n.mode){case"constant":return HT(t,e.dims,s,o,i,n.pads,n.value);case"reflect":return qT(t,e.dims,s,o,i,n.pads);case"edge":return jT(t,e.dims,s,o,i,n.pads);default:throw new Error("Invalid mode")}},HT=(r,e,n,t,o,i,s)=>{let a=e.length,u="";for(let l=a-1;l>=0;--l)u+=` k = m[${l}] - ${i[l]}; if (k < 0) return constant; if (k >= ${e[l]}) return constant; @@ -850,7 +850,7 @@ float process(int indices[${h}]) { float value = getColorAsFloat(${r.texture2D}(A, coords)); return value; } - `},WT=(r,e,n,t,o,i)=>{let s=e.length,a="";for(let u=s-1;u>=0;--u)a+=` + `},qT=(r,e,n,t,o,i)=>{let s=e.length,a="";for(let u=s-1;u>=0;--u)a+=` k = m[${u}] - ${i[u]}; if (k < 0) { k = -k; } { @@ -868,7 +868,7 @@ float process(int indices[${h}]) { float value = getColorAsFloat(${r.texture2D}(A, coords)); return value; } - `},HT=(r,e,n,t,o,i)=>{let s=e.length,a="";for(let u=s-1;u>=0;--u)a+=` + `},jT=(r,e,n,t,o,i)=>{let s=e.length,a="";for(let u=s-1;u>=0;--u)a+=` k = m[${u}] - ${i[u]}; if (k < 0) k = 0; if (k >= ${e[u]}) k = ${e[u]-1}; @@ -882,13 +882,13 @@ float process(int indices[${h}]) { float value = getColorAsFloat(${r.texture2D}(A, coords)); return value; } - `}});var zp,Fp,Mp,Vp,Gp,Up,Wp,Hp,qp,qT,Np,Kp,Zo,jp,Xo,KT,Xp=C(()=>{"use strict";st();De();Se();zp=(r,e,n)=>{Zo(e);let t={name:"AveragePool",inputNames:["X"],inputTypes:[0],cacheHint:n.cacheKey};return[r.run({...t,get:()=>Mp(e,t,!1,n)},e)]},Fp=r=>{let e=r.attributes.getString("auto_pad","NOTSET"),n=r.attributes.getInt("ceil_mode",0),t=r.attributes.getInt("count_include_pad",0)!==0,o=r.attributes.getInts("kernel_shape"),i=r.attributes.getInts("strides",[]),s=r.attributes.getInts("pads",[]);if(n!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");return be({autoPad:e,ceilMode:n,countIncludePad:t,kernelShape:o,strides:i,pads:s})},Mp=(r,e,n,t)=>{let[o,i]=qp(r,t,n),s=J.size(o.kernelShape),a="value += _X(x);",u="";o.countIncludePad?u+=`value /= float(${s});`:u+=`value /= float(${s} - pad);`;let f=` - ${jp(r[0].dims,o,a,u,"0.0")} + `}});var zp,Fp,Mp,Vp,Gp,Up,Wp,Hp,qp,KT,Np,jp,Zo,Kp,Xo,XT,Xp=C(()=>{"use strict";st();De();Se();zp=(r,e,n)=>{Zo(e);let t={name:"AveragePool",inputNames:["X"],inputTypes:[0],cacheHint:n.cacheKey};return[r.run({...t,get:()=>Mp(e,t,!1,n)},e)]},Fp=r=>{let e=r.attributes.getString("auto_pad","NOTSET"),n=r.attributes.getInt("ceil_mode",0),t=r.attributes.getInt("count_include_pad",0)!==0,o=r.attributes.getInts("kernel_shape"),i=r.attributes.getInts("strides",[]),s=r.attributes.getInts("pads",[]);if(n!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");return be({autoPad:e,ceilMode:n,countIncludePad:t,kernelShape:o,strides:i,pads:s})},Mp=(r,e,n,t)=>{let[o,i]=qp(r,t,n),s=J.size(o.kernelShape),a="value += _X(x);",u="";o.countIncludePad?u+=`value /= float(${s});`:u+=`value /= float(${s} - pad);`;let f=` + ${Kp(r[0].dims,o,a,u,"0.0")} `;return{...e,output:{dims:i,type:r[0].type,textureType:0},shaderSource:f}},Vp=(r,e,n)=>{Zo(e);let t={name:"GlobalAveragePool",inputNames:["X"],inputTypes:[0],cacheHint:`${n.countIncludePad}`};return[r.run({...t,get:()=>Mp(e,t,!0,n)},e)]},Gp=r=>{let e=r.attributes.getInt("count_include_pad",0)!==0;return be({autoPad:"",ceilMode:0,countIncludePad:e,kernelShape:[],strides:[],pads:[]})},Up=(r,e,n)=>{Zo(e);let t={name:"MaxPool",inputNames:["X"],inputTypes:[0],cacheHint:n.cacheKey};return[r.run({...t,get:()=>Hp(e,t,!1,n)},e)]},Wp=r=>{let e=r.attributes.getString("auto_pad","NOTSET"),n=r.attributes.getInt("ceil_mode",0),t=r.attributes.getInts("kernel_shape"),o=r.attributes.getInts("strides",[]),i=r.attributes.getInts("pads",[]),s=r.attributes.getInt("storage_order",0),a=r.attributes.getInts("dilations",[]);if(s!==0)throw new Error("column major storage order is not yet supported for MaxPool");if(n!==0)throw new Error("using ceil() in shape computation is not yet supported for MaxPool");return be({autoPad:e,ceilMode:n,countIncludePad:!1,kernelShape:t,strides:o,pads:i,storageOrder:s,dilations:a})},Hp=(r,e,n,t)=>{let[o,i]=qp(r,t,n),s=` value = max(_X(x), value); `,a="",l=` - ${jp(r[0].dims,o,s,a,"-1e5")} - `;return{...e,output:{dims:i,type:r[0].type,textureType:0},shaderSource:l}},qp=(r,e,n)=>{let t=r[0].dims.slice(),o=Object.hasOwnProperty.call(e,"dilations"),i=e.kernelShape.slice(),s=e.strides.slice(),a=o?e.dilations.slice():[],u=e.pads.slice();Fr.adjustPoolAttributes(n,t,i,s,a,u);let l=Fr.computePoolOutputShape(n,t,s,a,i,u,e.autoPad),f=Object.assign({},e);return o?Object.assign(f,{kernelShape:i,strides:s,pads:u,dilations:a,cacheKey:e.cacheKey}):Object.assign(f,{kernelShape:i,strides:s,pads:u,cacheKey:e.cacheKey}),[f,l]},qT={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[],cacheKey:""},Np={name:"GlobalMaxPool",inputNames:["X"],inputTypes:[0]},Kp=(r,e)=>(Zo(e),[r.run({...Np,get:()=>Hp(e,Np,!0,qT)},e)]),Zo=r=>{if(!r||r.length!==1)throw new Error("Pool ops requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")},jp=(r,e,n,t,o)=>{let i=r.length;if(e.kernelShape.length<=2){let s=e.kernelShape[e.kernelShape.length-1],a=e.strides[e.strides.length-1],u=e.pads[e.pads.length/2-1],l=e.pads[e.pads.length-1],f=r[i-1],c="",p="",b="";if(u+l!==0?c=` + ${Kp(r[0].dims,o,s,a,"-1e5")} + `;return{...e,output:{dims:i,type:r[0].type,textureType:0},shaderSource:l}},qp=(r,e,n)=>{let t=r[0].dims.slice(),o=Object.hasOwnProperty.call(e,"dilations"),i=e.kernelShape.slice(),s=e.strides.slice(),a=o?e.dilations.slice():[],u=e.pads.slice();Fr.adjustPoolAttributes(n,t,i,s,a,u);let l=Fr.computePoolOutputShape(n,t,s,a,i,u,e.autoPad),f=Object.assign({},e);return o?Object.assign(f,{kernelShape:i,strides:s,pads:u,dilations:a,cacheKey:e.cacheKey}):Object.assign(f,{kernelShape:i,strides:s,pads:u,cacheKey:e.cacheKey}),[f,l]},KT={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[],cacheKey:""},Np={name:"GlobalMaxPool",inputNames:["X"],inputTypes:[0]},jp=(r,e)=>(Zo(e),[r.run({...Np,get:()=>Hp(e,Np,!0,KT)},e)]),Zo=r=>{if(!r||r.length!==1)throw new Error("Pool ops requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type.")},Kp=(r,e,n,t,o)=>{let i=r.length;if(e.kernelShape.length<=2){let s=e.kernelShape[e.kernelShape.length-1],a=e.strides[e.strides.length-1],u=e.pads[e.pads.length/2-1],l=e.pads[e.pads.length-1],f=r[i-1],c="",p="",b="";if(u+l!==0?c=` for (int i = 0; i < ${s}; i++) { x[${i} - 1] = indices[${i} - 1] * ${a} - ${u} + i; if (x[${i} - 1] < 0 || x[${i} - 1] >= ${f}) { @@ -925,7 +925,7 @@ float process(int indices[${h}]) { ${t} return value; } - `}else{let s=J.size(e.kernelShape),a=J.computeStrides(e.kernelShape),u=a.length,l=e.pads.length,f=KT(u),c=Xo(r,"inputDims"),p=Xo(e.pads,"pads"),b=Xo(a,"kernelStrides"),h=Xo(e.strides,"strides"),g=e.pads.reduce((v,S)=>v+S),T="";return g?T=` + `}else{let s=J.size(e.kernelShape),a=J.computeStrides(e.kernelShape),u=a.length,l=e.pads.length,f=XT(u),c=Xo(r,"inputDims"),p=Xo(e.pads,"pads"),b=Xo(a,"kernelStrides"),h=Xo(e.strides,"strides"),g=e.pads.reduce((v,S)=>v+S),T="";return g?T=` if (x[j] >= inputDims[j] || x[j] < 0) { pad++; isPad = true; @@ -969,7 +969,7 @@ float process(int indices[${h}]) { } `}},Xo=(r,e)=>{let n="";for(let t=0;t` + `;return n},XT=r=>` void offsetToIndices(int offset, int[${r}] strides, out int[${r}] indices) { if (${r} == 0) { return; @@ -979,7 +979,7 @@ float process(int indices[${h}]) { offset -= indices[i] * strides[i]; } indices[${r} - 1] = offset; - }`});var qr,$r,jT,XT,Zp,Yp,Jp,Qp,em,tm,rm,nm=C(()=>{"use strict";st();Vn();De();Se();qr=(r,e,n,t,o)=>{XT(e);let i={name:t,inputNames:["A"],inputTypes:[0]};return[r.run({...i,cacheHint:n.cacheKey,get:()=>jT(r,e,n,t,o,i)},e)]},$r=r=>{let e=r.attributes.getInts("axes",[]),n=r.attributes.getInt("keepdims",1)===1;return be({axes:e,keepDims:n})},jT=(r,e,n,t,o,i)=>{let s=[],a=e[0].dims.length||1,u=[],l=J.normalizeAxes(n.axes,e[0].dims.length),f=o(e,l),c=f[1];for(let h=0;h=0||l.length===0?(n.keepDims&&s.push(1),c=` + }`});var qr,$r,ZT,YT,Zp,Yp,Jp,Qp,em,tm,rm,nm=C(()=>{"use strict";st();Vn();De();Se();qr=(r,e,n,t,o)=>{YT(e);let i={name:t,inputNames:["A"],inputTypes:[0]};return[r.run({...i,cacheHint:n.cacheKey,get:()=>ZT(r,e,n,t,o,i)},e)]},$r=r=>{let e=r.attributes.getInts("axes",[]),n=r.attributes.getInt("keepdims",1)===1;return be({axes:e,keepDims:n})},ZT=(r,e,n,t,o,i)=>{let s=[],a=e[0].dims.length||1,u=[],l=J.normalizeAxes(n.axes,e[0].dims.length),f=o(e,l),c=f[1];for(let h=0;h=0||l.length===0?(n.keepDims&&s.push(1),c=` for(int j${h} = 0; j${h} < ${e[0].dims[h]}; j${h}++) { inputIdx[${h}] = j${h}; ${c} @@ -993,11 +993,11 @@ float process(int indices[${h}]) { ${c} ${f[2]} // final computation for reduce mean return value; - }`;return{...i,output:{dims:s,type:e[0].type,textureType:0},shaderSource:b}},XT=r=>{if(!r||r.length!==1)throw new Error("Reduce op requires 1 input.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.")},Zp=(r,e,n)=>qr(r,e,n,"ReduceSum",()=>["value = 0.0;","value += _A(inputIdx);",""]),Yp=(r,e,n)=>qr(r,e,n,"ReduceMean",(o,i)=>{let s=1;for(let a=0;a=0||i.length===0)&&(s*=o[0].dims[a]);return["value = 0.0;","value += _A(inputIdx);",`value /= ${s}.;`]}),Jp=(r,e,n)=>qr(r,e,n,"ReduceMax",(o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`inputIdx[${a}] = 0;`);return[`${s.join(` + }`;return{...i,output:{dims:s,type:e[0].type,textureType:0},shaderSource:b}},YT=r=>{if(!r||r.length!==1)throw new Error("Reduce op requires 1 input.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.")},Zp=(r,e,n)=>qr(r,e,n,"ReduceSum",()=>["value = 0.0;","value += _A(inputIdx);",""]),Yp=(r,e,n)=>qr(r,e,n,"ReduceMean",(o,i)=>{let s=1;for(let a=0;a=0||i.length===0)&&(s*=o[0].dims[a]);return["value = 0.0;","value += _A(inputIdx);",`value /= ${s}.;`]}),Jp=(r,e,n)=>qr(r,e,n,"ReduceMax",(o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`inputIdx[${a}] = 0;`);return[`${s.join(` `)} value = _A(inputIdx);`,"value = max(value, _A(inputIdx));",""]}),Qp=(r,e,n)=>qr(r,e,n,"ReduceMin",(o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`inputIdx[${a}] = 0;`);return[`${s.join(` `)} -value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr(r,e,n,"ReduceProd",()=>["value = 1.0;","value *= _A(inputIdx);",""]),tm=(r,e,n)=>qr(r,e,n,"ReduceLogSum",()=>["value = 0.0;","value += _A(inputIdx);","value = log(value);"]),rm=(r,e,n)=>qr(r,e,n,"ReduceLogSumSquare",()=>["float t; value = 0.0;","t = _A(inputIdx); value += t * t;",""])});var om,im=C(()=>{"use strict";De();om=(r,e)=>{let n=J.calculateReshapedDims(e[0].dims,e[1].integerData);return r.session.pack?[r.reshapePacked(e[0],n)]:[r.reshapeUnpacked(e[0],n)]}});var am,hs,sm,um,Gn,ZT,gs,Yo,bs=C(()=>{"use strict";st();He();Se();am={name:"Upsample",inputNames:["X"],inputTypes:[0]},hs=(r,e,n)=>(gs(e,n),[r.run({...am,cacheHint:n.cacheKey,get:()=>ZT(r,e,n)},e)]),sm=r=>Gn(r,7),um=r=>Gn(r,9),Gn=(r,e)=>{let n=e>=10,t=r.attributes.getString("mode","nearest");if(t!=="nearest"&&t!=="linear"&&(e<11||t!=="cubic"))throw new Error(`unrecognized mode: ${t}`);let o=[];e<9&&(o=r.attributes.getFloats("scales"),Yo(o,t,n));let i=r.attributes.getFloat("extrapolation_value",0),s=e>10?r.attributes.getString("coordinate_transformation_mode","half_pixel"):"asymmetric";if(["asymmetric","pytorch_half_pixel","tf_half_pixel_for_nn","align_corners","tf_crop_and_resize","half_pixel"].indexOf(s)===-1)throw new Error(`coordinate_transform_mode '${s}' is not supported`);let a=s==="tf_crop_and_resize",u=a,l=t==="nearest"&&e>=11?r.attributes.getString("nearest_mode","round_prefer_floor"):"";if(["round_prefer_floor","round_prefer_ceil","floor","ceil",""].indexOf(l)===-1)throw new Error(`nearest_mode '${l}' is not supported`);let f=r.attributes.getFloat("cubic_coeff_a",-.75),c=r.attributes.getInt("exclude_outside",0)!==0;if(c&&t!=="cubic")throw new Error("exclude_outside can be set to 1 only when mode is CUBIC.");let p=e<11?!0:t==="nearest"&&s==="asymmetric"&&l==="floor",b=0,h=0,g=0;return e>10?r.inputs.length>2?(b=1,h=2,g=3):(h=1,g=2):e===9&&(h=1),be({opset:e,isResize:n,mode:t,scales:o,extrapolationValue:i,coordinateTransformMode:s,useExtrapolation:u,needRoiInput:a,nearestMode:l,cubicCoefficientA:f,excludeOutside:c,useNearest2xOptimization:p,roiInputIdx:b,scalesInputIdx:h,sizesInputIdx:g})},ZT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=r.calculateTextureWidthAndHeight(e[0].dims,0),s=e[0].dims.map((g,T)=>Math.floor(g*n.scales[T])),[a,u]=r.calculateTextureWidthAndHeight(s,0),l=s.length,f=new Array(l),c=new Array(l),p=` +value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr(r,e,n,"ReduceProd",()=>["value = 1.0;","value *= _A(inputIdx);",""]),tm=(r,e,n)=>qr(r,e,n,"ReduceLogSum",()=>["value = 0.0;","value += _A(inputIdx);","value = log(value);"]),rm=(r,e,n)=>qr(r,e,n,"ReduceLogSumSquare",()=>["float t; value = 0.0;","t = _A(inputIdx); value += t * t;",""])});var om,im=C(()=>{"use strict";De();om=(r,e)=>{let n=J.calculateReshapedDims(e[0].dims,e[1].integerData);return r.session.pack?[r.reshapePacked(e[0],n)]:[r.reshapeUnpacked(e[0],n)]}});var am,gs,sm,um,Gn,JT,bs,Yo,ys=C(()=>{"use strict";st();He();Se();am={name:"Upsample",inputNames:["X"],inputTypes:[0]},gs=(r,e,n)=>(bs(e,n),[r.run({...am,cacheHint:n.cacheKey,get:()=>JT(r,e,n)},e)]),sm=r=>Gn(r,7),um=r=>Gn(r,9),Gn=(r,e)=>{let n=e>=10,t=r.attributes.getString("mode","nearest");if(t!=="nearest"&&t!=="linear"&&(e<11||t!=="cubic"))throw new Error(`unrecognized mode: ${t}`);let o=[];e<9&&(o=r.attributes.getFloats("scales"),Yo(o,t,n));let i=r.attributes.getFloat("extrapolation_value",0),s=e>10?r.attributes.getString("coordinate_transformation_mode","half_pixel"):"asymmetric";if(["asymmetric","pytorch_half_pixel","tf_half_pixel_for_nn","align_corners","tf_crop_and_resize","half_pixel"].indexOf(s)===-1)throw new Error(`coordinate_transform_mode '${s}' is not supported`);let a=s==="tf_crop_and_resize",u=a,l=t==="nearest"&&e>=11?r.attributes.getString("nearest_mode","round_prefer_floor"):"";if(["round_prefer_floor","round_prefer_ceil","floor","ceil",""].indexOf(l)===-1)throw new Error(`nearest_mode '${l}' is not supported`);let f=r.attributes.getFloat("cubic_coeff_a",-.75),c=r.attributes.getInt("exclude_outside",0)!==0;if(c&&t!=="cubic")throw new Error("exclude_outside can be set to 1 only when mode is CUBIC.");let p=e<11?!0:t==="nearest"&&s==="asymmetric"&&l==="floor",b=0,h=0,g=0;return e>10?r.inputs.length>2?(b=1,h=2,g=3):(h=1,g=2):e===9&&(h=1),be({opset:e,isResize:n,mode:t,scales:o,extrapolationValue:i,coordinateTransformMode:s,useExtrapolation:u,needRoiInput:a,nearestMode:l,cubicCoefficientA:f,excludeOutside:c,useNearest2xOptimization:p,roiInputIdx:b,scalesInputIdx:h,sizesInputIdx:g})},JT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=r.calculateTextureWidthAndHeight(e[0].dims,0),s=e[0].dims.map((g,T)=>Math.floor(g*n.scales[T])),[a,u]=r.calculateTextureWidthAndHeight(s,0),l=s.length,f=new Array(l),c=new Array(l),p=` int output_pitches[${l}]; int input_pitches[${l}]; `;for(let g=l-1;g>=0;g--)f[g]=g===l-1?1:f[g+1]*s[g+1],c[g]=g===l-1?1:c[g+1]*e[0].dims[g+1],p+=` @@ -1133,7 +1133,7 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( float y0 = x00 + float(y_offset) * (x01 - x00) / float(scales[0]); float y1 = x10 + float(y_offset) * (x11 - x10) / float(scales[0]); return y0 + float(x_offset) * (y1 - y0) / float(scales[1]); - }`;return{...am,output:{dims:s,type:e[0].type,textureType:0},shaderSource:h,variables:[{name:"scales",type:"int",arrayLength:n.scales.length,data:n.scales.map(g=>Math.ceil(g))}]}},gs=(r,e)=>{if(!r||e.opset<9&&r.length!==1||e.opset>=9&&e.opset<11&&r.length!==2||e.opset>=11&&r.length<2)throw new Error("invalid inputs.");if(e.scales.length>0&&r[0].dims.length!==e.scales.length)throw new Error("Invalid input shape.");if(r[0].type==="string")throw new Error("Invalid input tensor types.")},Yo=(r,e,n)=>{if(n){for(let t of r)if(t<=0)throw new Error("Scale value should be greater than 0.")}else for(let t of r)if(t<1)throw new Error("Scale value should be greater than or equal to 1.");if((e==="linear"||e==="cubic")&&r.length!==2&&(r.length!==4||r[0]!==1||r[1]!==1))throw new Error(`'Linear' mode and 'Cubic' mode only support 2-D inputs ('Bilinear', 'Bicubic') or 4-D inputs with the corresponding outermost 2 scale values being 1 in the ${n?"Resize":"Upsample"} opeartor.`)}});var ys,xs,lm,cm,YT,JT,QT,e_,fm=C(()=>{"use strict";He();Se();Qt();Ur();bs();ys={name:"Resize",inputNames:["A"],inputTypes:[2]},xs=(r,e,n)=>(gs(e,n),[r.run({...ys,cacheHint:n.cacheKey,get:()=>YT(r,e,n)},e)]),lm=r=>Gn(r,10),cm=r=>Gn(r,11),YT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=JT(e,n);if(o.every(S=>S===1)&&n.coordinateTransformMode!=="tf_crop_and_resize")return{...ys,output:{dims:i,type:e[0].type,textureType:2},hasMain:!0,shaderSource:`void main() { + }`;return{...am,output:{dims:s,type:e[0].type,textureType:0},shaderSource:h,variables:[{name:"scales",type:"int",arrayLength:n.scales.length,data:n.scales.map(g=>Math.ceil(g))}]}},bs=(r,e)=>{if(!r||e.opset<9&&r.length!==1||e.opset>=9&&e.opset<11&&r.length!==2||e.opset>=11&&r.length<2)throw new Error("invalid inputs.");if(e.scales.length>0&&r[0].dims.length!==e.scales.length)throw new Error("Invalid input shape.");if(r[0].type==="string")throw new Error("Invalid input tensor types.")},Yo=(r,e,n)=>{if(n){for(let t of r)if(t<=0)throw new Error("Scale value should be greater than 0.")}else for(let t of r)if(t<1)throw new Error("Scale value should be greater than or equal to 1.");if((e==="linear"||e==="cubic")&&r.length!==2&&(r.length!==4||r[0]!==1||r[1]!==1))throw new Error(`'Linear' mode and 'Cubic' mode only support 2-D inputs ('Bilinear', 'Bicubic') or 4-D inputs with the corresponding outermost 2 scale values being 1 in the ${n?"Resize":"Upsample"} opeartor.`)}});var xs,vs,lm,cm,QT,e_,t_,r_,fm=C(()=>{"use strict";He();Se();Qt();Ur();ys();xs={name:"Resize",inputNames:["A"],inputTypes:[2]},vs=(r,e,n)=>(bs(e,n),[r.run({...xs,cacheHint:n.cacheKey,get:()=>QT(r,e,n)},e)]),lm=r=>Gn(r,10),cm=r=>Gn(r,11),QT=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),[o,i]=e_(e,n);if(o.every(S=>S===1)&&n.coordinateTransformMode!=="tf_crop_and_resize")return{...xs,output:{dims:i,type:e[0].type,textureType:2},hasMain:!0,shaderSource:`void main() { vec4 v = ${t.texture2D}(X, TexCoords); ${t.output} = v; }`};let a=i.length;if(a<2)throw new Error(`output dimension should be at least 2, but got ${a}`);let u=i[a-2],l=i[a-1],f=e[0].dims;if(a!==f.length)throw new Error(`output dimension should match input ${f.length}, but got ${a}`);let c=f[a-2],p=f[a-1],b=o[a-2],h=o[a-1],g="";if(n.mode!=="linear")throw new Error(`resize (packed) does not support mode: '${n.mode}'`);switch(n.coordinateTransformMode){case"asymmetric":g=` @@ -1230,12 +1230,12 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( ${t.output} = vec4(newValue); } - `;return{...ys,output:{dims:i,type:e[0].type,textureType:2},hasMain:!0,shaderSource:v}},JT=(r,e)=>{let t=r[0].dims,o=e.scales,i;if(o.length===0){let a=r[e.scalesInputIdx];if(a&&a.size!==0){if(r[e.sizesInputIdx])throw new Error("Only one of scales or sizes must be provided as input.");o=QT(a,e.mode,e.isResize)}else{let u=r[e.sizesInputIdx];if(!u||u.size===0)throw new Error("Either scales or sizes MUST be provided as input.");i=Array.from(u.integerData),o=e_(i,t,e.mode,e.isResize)}}else if(r[e.sizesInputIdx])throw new Error("Only one of scales or sizes must be provided as input.");let s=i||t.map((a,u)=>Math.floor(a*o[u]));return[o,s]},QT=(r,e,n)=>{let t=Array.from(r.floatData);return Yo(t,e,n),t},e_=(r,e,n,t)=>{let o=e.length,i=new Array(o);for(let s=0,a=o;s{"use strict";Gr();dm=(r,e)=>(t_(e),[new Qe([e[0].dims.length],"int32",void 0,void 0,new Int32Array(e[0].dims))]),t_=r=>{if(!r||r.length!==1)throw new Error("Shape requires 1 input.")}});var vs,mm,hm,gm,r_,bm,n_,o_,ym=C(()=>{"use strict";st();Vn();De();Se();vs={name:"Slice",inputNames:["A"],inputTypes:[0]},mm=(r,e,n)=>(r_(e),[r.run({...vs,cacheHint:n.cacheKey,get:()=>gm(r,e[0],n)},e)]),hm=r=>{let e=r.attributes.getInts("starts"),n=r.attributes.getInts("ends"),t=r.attributes.getInts("axes",[]);return be({starts:e,ends:n,axes:t})},gm=(r,e,n)=>{let t=n.axes.length===0?e.dims.slice(0).map((c,p)=>p):n.axes,o=J.normalizeAxes(t,e.dims.length),i=n.starts.map((c,p)=>c>e.dims[o[p]]-1?e.dims[o[p]]:J.normalizeAxis(c,e.dims[o[p]])),s=n.ends.map((c,p)=>c>e.dims[o[p]]-1?e.dims[o[p]]:J.normalizeAxis(c,e.dims[o[p]])),a=e.dims.slice(),u=[];for(let c=0;c0&&u.push(`outputIdx[${o[c]}] += ${i[c]};`);let f=` + `;return{...xs,output:{dims:i,type:e[0].type,textureType:2},hasMain:!0,shaderSource:v}},e_=(r,e)=>{let t=r[0].dims,o=e.scales,i;if(o.length===0){let a=r[e.scalesInputIdx];if(a&&a.size!==0){if(r[e.sizesInputIdx])throw new Error("Only one of scales or sizes must be provided as input.");o=t_(a,e.mode,e.isResize)}else{let u=r[e.sizesInputIdx];if(!u||u.size===0)throw new Error("Either scales or sizes MUST be provided as input.");i=Array.from(u.integerData),o=r_(i,t,e.mode,e.isResize)}}else if(r[e.sizesInputIdx])throw new Error("Only one of scales or sizes must be provided as input.");let s=i||t.map((a,u)=>Math.floor(a*o[u]));return[o,s]},t_=(r,e,n)=>{let t=Array.from(r.floatData);return Yo(t,e,n),t},r_=(r,e,n,t)=>{let o=e.length,i=new Array(o);for(let s=0,a=o;s{"use strict";Gr();dm=(r,e)=>(n_(e),[new Qe([e[0].dims.length],"int32",void 0,void 0,new Int32Array(e[0].dims))]),n_=r=>{if(!r||r.length!==1)throw new Error("Shape requires 1 input.")}});var ws,mm,hm,gm,o_,bm,i_,a_,ym=C(()=>{"use strict";st();Vn();De();Se();ws={name:"Slice",inputNames:["A"],inputTypes:[0]},mm=(r,e,n)=>(o_(e),[r.run({...ws,cacheHint:n.cacheKey,get:()=>gm(r,e[0],n)},e)]),hm=r=>{let e=r.attributes.getInts("starts"),n=r.attributes.getInts("ends"),t=r.attributes.getInts("axes",[]);return be({starts:e,ends:n,axes:t})},gm=(r,e,n)=>{let t=n.axes.length===0?e.dims.slice(0).map((c,p)=>p):n.axes,o=J.normalizeAxes(t,e.dims.length),i=n.starts.map((c,p)=>c>e.dims[o[p]]-1?e.dims[o[p]]:J.normalizeAxis(c,e.dims[o[p]])),s=n.ends.map((c,p)=>c>e.dims[o[p]]-1?e.dims[o[p]]:J.normalizeAxis(c,e.dims[o[p]])),a=e.dims.slice(),u=[];for(let c=0;c0&&u.push(`outputIdx[${o[c]}] += ${i[c]};`);let f=` float process(int outputIdx[${a.length}]) { ${u.join(` `)} return _A(outputIdx); - }`;return{...vs,output:{dims:a,type:e.type,textureType:0},shaderSource:f}},r_=r=>{if(!r||r.length!==1)throw new Error("Slice requires 1 input.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.")},bm=(r,e)=>{o_(e);let n=n_(r,e);return[r.run({...vs,cacheHint:n.cacheKey,get:()=>gm(r,e[0],n)},[e[0]])]},n_=(r,e)=>{if(!r.session.isInitializer(e[1].dataId)||!r.session.isInitializer(e[2].dataId)||e.length>=4&&!r.session.isInitializer(e[3].dataId)||e.length>=5&&!r.session.isInitializer(e[4].dataId))throw new Error("dynamic slice attributes are not allowed");if(e.length>=5&&e[4].integerData.some(s=>s!==1))throw new Error("currently non-1 steps is not supported for Slice");let n=Array.from(e[1].integerData),t=Array.from(e[2].integerData),o=e.length>=4?Array.from(e[3].integerData):[],i=`${o};${n};${t}`;return{starts:n,ends:t,axes:o,cacheKey:i}},o_=r=>{if(!r||r.length<3||r.length>5)throw new Error("Invalid input number.");if(r[1].type!=="int32"||r[1].dims.length!==1)throw new Error("Invalid input type.");if(r[2].type!=="int32"||r[2].dims.length!==1)throw new Error("Invalid input type.");if(r.length>=4&&(r[3].type!=="int32"||r[3].dims.length!==1))throw new Error("Invalid input type.");if(r.length>=5&&(r[4].type!=="int32"||r[4].dims.length!==1))throw new Error("Invalid input type.")}});var xm,vm,wm,Tm,_m,Im,Sm,$m,i_,a_,s_,Am,Pm=C(()=>{"use strict";st();De();He();Se();jo();xm={name:"SoftmaxComputeMax",inputNames:["A"],inputTypes:[0]},vm={name:"SoftmaxComputeScale",inputNames:["A","Max"],inputTypes:[0,0]},wm={name:"SoftMax",inputNames:["A","Max","Norm"],inputTypes:[0,0,0]},Tm=(r,e,n)=>{Am(e);let t=e[0].dims.slice(),o=J.normalizeAxis(n.axis,t.length),i=J.sizeToDimension(t,o),s=J.sizeFromDimension(t,o);return $m(r,e,n,i,s)},_m=r=>be({axis:r.attributes.getInt("axis",1)}),Im=r=>be({axis:r.attributes.getInt("axis",-1)}),Sm=(r,e,n)=>{Am(e);let t=e[0].dims.slice(),o=J.normalizeAxis(n.axis,t.length),i=t.length,s=o!==i-1,a=[],u=[],l=[],f;s&&(u=Array.from({length:i}).map((h,g)=>g),u[o]=i-1,u[i-1]=o,u.map(h=>a.push(t[h])),f=be({perm:u}),l=Hr(r,e,f));let c=s?J.sizeToDimension(a,i-1):J.sizeToDimension(t,i-1),p=s?J.sizeFromDimension(a,i-1):J.sizeFromDimension(t,i-1),b=$m(r,s?l:e,n,c,p);return s?Hr(r,b,f):b},$m=(r,e,n,t,o)=>{let i=i_(r,e[0],t,o,[t]),s=r.run({...xm,cacheHint:n.cacheKey,get:()=>i},e),a=a_(r,e[0],t,o,i.output.dims,[t]),u=r.run({...vm,cacheHint:n.cacheKey,get:()=>a},[e[0],s]),l=s_(r,e[0],t,o,i.output.dims,a.output.dims);return[r.run({...wm,cacheHint:n.cacheKey,get:()=>l},[e[0],s,u])]},i_=(r,e,n,t,o)=>{let[i,s]=r.calculateTextureWidthAndHeight(e.dims,0),a=o.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(o.length!==1)throw new Error("Dimensionality of the output should be 1");if(o[0]!==n)throw new Error("Shape of the output should be equal to logical row count");let u=oe(r.session.backend.glContext.version),l=` + }`;return{...ws,output:{dims:a,type:e.type,textureType:0},shaderSource:f}},o_=r=>{if(!r||r.length!==1)throw new Error("Slice requires 1 input.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.")},bm=(r,e)=>{a_(e);let n=i_(r,e);return[r.run({...ws,cacheHint:n.cacheKey,get:()=>gm(r,e[0],n)},[e[0]])]},i_=(r,e)=>{if(!r.session.isInitializer(e[1].dataId)||!r.session.isInitializer(e[2].dataId)||e.length>=4&&!r.session.isInitializer(e[3].dataId)||e.length>=5&&!r.session.isInitializer(e[4].dataId))throw new Error("dynamic slice attributes are not allowed");if(e.length>=5&&e[4].integerData.some(s=>s!==1))throw new Error("currently non-1 steps is not supported for Slice");let n=Array.from(e[1].integerData),t=Array.from(e[2].integerData),o=e.length>=4?Array.from(e[3].integerData):[],i=`${o};${n};${t}`;return{starts:n,ends:t,axes:o,cacheKey:i}},a_=r=>{if(!r||r.length<3||r.length>5)throw new Error("Invalid input number.");if(r[1].type!=="int32"||r[1].dims.length!==1)throw new Error("Invalid input type.");if(r[2].type!=="int32"||r[2].dims.length!==1)throw new Error("Invalid input type.");if(r.length>=4&&(r[3].type!=="int32"||r[3].dims.length!==1))throw new Error("Invalid input type.");if(r.length>=5&&(r[4].type!=="int32"||r[4].dims.length!==1))throw new Error("Invalid input type.")}});var xm,vm,wm,Tm,_m,Im,Sm,$m,s_,u_,l_,Am,Pm=C(()=>{"use strict";st();De();He();Se();Ko();xm={name:"SoftmaxComputeMax",inputNames:["A"],inputTypes:[0]},vm={name:"SoftmaxComputeScale",inputNames:["A","Max"],inputTypes:[0,0]},wm={name:"SoftMax",inputNames:["A","Max","Norm"],inputTypes:[0,0,0]},Tm=(r,e,n)=>{Am(e);let t=e[0].dims.slice(),o=J.normalizeAxis(n.axis,t.length),i=J.sizeToDimension(t,o),s=J.sizeFromDimension(t,o);return $m(r,e,n,i,s)},_m=r=>be({axis:r.attributes.getInt("axis",1)}),Im=r=>be({axis:r.attributes.getInt("axis",-1)}),Sm=(r,e,n)=>{Am(e);let t=e[0].dims.slice(),o=J.normalizeAxis(n.axis,t.length),i=t.length,s=o!==i-1,a=[],u=[],l=[],f;s&&(u=Array.from({length:i}).map((h,g)=>g),u[o]=i-1,u[i-1]=o,u.map(h=>a.push(t[h])),f=be({perm:u}),l=Hr(r,e,f));let c=s?J.sizeToDimension(a,i-1):J.sizeToDimension(t,i-1),p=s?J.sizeFromDimension(a,i-1):J.sizeFromDimension(t,i-1),b=$m(r,s?l:e,n,c,p);return s?Hr(r,b,f):b},$m=(r,e,n,t,o)=>{let i=s_(r,e[0],t,o,[t]),s=r.run({...xm,cacheHint:n.cacheKey,get:()=>i},e),a=u_(r,e[0],t,o,i.output.dims,[t]),u=r.run({...vm,cacheHint:n.cacheKey,get:()=>a},[e[0],s]),l=l_(r,e[0],t,o,i.output.dims,a.output.dims);return[r.run({...wm,cacheHint:n.cacheKey,get:()=>l},[e[0],s,u])]},s_=(r,e,n,t,o)=>{let[i,s]=r.calculateTextureWidthAndHeight(e.dims,0),a=o.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(o.length!==1)throw new Error("Dimensionality of the output should be 1");if(o[0]!==n)throw new Error("Shape of the output should be equal to logical row count");let u=oe(r.session.backend.glContext.version),l=` float process(int[${a}] indices) { int logical_row_start_offset = indices[0] * ${t}; @@ -1250,7 +1250,7 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( } return max; - }`;return{...xm,output:{dims:o,type:e.type,textureType:0},shaderSource:l}},a_=(r,e,n,t,o,i)=>{let[s,a]=r.calculateTextureWidthAndHeight(e.dims,0),u=i.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(i.length!==1)throw new Error("Dimensionality of the output should be 1");if(i[0]!==n)throw new Error("Shape of the output should be equal to logical row count");if(o.length!==1)throw new Error("Dimensionality of the intermediate results should be 1");if(o[0]!==n)throw new Error("Shape of the intermediate results should be equal to logical row count");let l=oe(r.session.backend.glContext.version),f=` + }`;return{...xm,output:{dims:o,type:e.type,textureType:0},shaderSource:l}},u_=(r,e,n,t,o,i)=>{let[s,a]=r.calculateTextureWidthAndHeight(e.dims,0),u=i.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(i.length!==1)throw new Error("Dimensionality of the output should be 1");if(i[0]!==n)throw new Error("Shape of the output should be equal to logical row count");if(o.length!==1)throw new Error("Dimensionality of the intermediate results should be 1");if(o[0]!==n)throw new Error("Shape of the intermediate results should be equal to logical row count");let l=oe(r.session.backend.glContext.version),f=` float process(int[${u}] indices) { int logical_row_start_offset = indices[0] * ${t}; @@ -1263,7 +1263,7 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( } return norm_factor; - }`;return{...vm,output:{dims:i,type:e.type,textureType:0},shaderSource:f}},s_=(r,e,n,t,o,i)=>{let[s,a]=r.calculateTextureWidthAndHeight(e.dims,0),u=e.dims.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(o.length!==1||i.length!==1)throw new Error("Dimensionality of the intermediate results should be 1");if(o[0]!==n||i[0]!==n)throw new Error("Shape of the intermediate results should be equal to logical row count");let l=` + }`;return{...vm,output:{dims:i,type:e.type,textureType:0},shaderSource:f}},l_=(r,e,n,t,o,i)=>{let[s,a]=r.calculateTextureWidthAndHeight(e.dims,0),u=e.dims.length;if(n<1||t<1)throw new Error("Logical row count N and feature count D must be greater than or equal to 1");if(o.length!==1||i.length!==1)throw new Error("Dimensionality of the intermediate results should be 1");if(o[0]!==n||i[0]!==n)throw new Error("Shape of the intermediate results should be equal to logical row count");let l=` float process(int[${u}] indices) { // get offset of current logical tensor index from the 2-D texture coordinates (TexCoords) @@ -1282,31 +1282,31 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( return 0.0; return exp(_A(indices) - _Max(logical_row_index)) / norm_factor; - }`;return{...wm,output:{dims:e.dims,type:e.type,textureType:0},shaderSource:l}},Am=r=>{if(!r||r.length!==1)throw new Error("Softmax requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type")}});var Om,Em,Cm,u_,l_,c_,km=C(()=>{"use strict";st();De();Se();Om={name:"Split",inputNames:["A"],inputTypes:[0]},Em=(r,e,n)=>{c_(e);let t=J.normalizeAxis(n.axis,e[0].dims.length),o=u_(r,e,t,n),i=[];for(let s=0;sl_(r,e[0],n,t,s)},e));return i},Cm=r=>{let e=r.attributes.getInt("axis",0),n=r.attributes.getInts("split",[]),t=r.outputs.length;return be({axis:e,split:n,numOutputs:t})},u_=(r,e,n,t)=>{let[,o]=Ln.splitShape(e[0].dims,n,t.split,t.numOutputs);return o.length},l_=(r,e,n,t,o)=>{let[i,s]=Ln.splitShape(e.dims,t,n.split,n.numOutputs),a=s[o],u=i[o],f=` + }`;return{...wm,output:{dims:e.dims,type:e.type,textureType:0},shaderSource:l}},Am=r=>{if(!r||r.length!==1)throw new Error("Softmax requires 1 input.");if(r[0].type!=="float32"&&r[0].type!=="float64")throw new Error("Invalid input type")}});var Om,Em,Cm,c_,f_,d_,km=C(()=>{"use strict";st();De();Se();Om={name:"Split",inputNames:["A"],inputTypes:[0]},Em=(r,e,n)=>{d_(e);let t=J.normalizeAxis(n.axis,e[0].dims.length),o=c_(r,e,t,n),i=[];for(let s=0;sf_(r,e[0],n,t,s)},e));return i},Cm=r=>{let e=r.attributes.getInt("axis",0),n=r.attributes.getInts("split",[]),t=r.outputs.length;return be({axis:e,split:n,numOutputs:t})},c_=(r,e,n,t)=>{let[,o]=Ln.splitShape(e[0].dims,n,t.split,t.numOutputs);return o.length},f_=(r,e,n,t,o)=>{let[i,s]=Ln.splitShape(e.dims,t,n.split,n.numOutputs),a=s[o],u=i[o],f=` float process(int indices[${u.length}]) { indices[${t}] += ${a}; return _A(indices); } - `;return{...Om,cacheHint:`${n.cacheKey}:${o}`,output:{dims:u,type:e.type,textureType:0},shaderSource:f}},c_=r=>{if(!r||r.length!==1)throw new Error("Split requires one input.");if(r[0].type!=="int8"&&r[0].type!=="uint8"&&r[0].type!=="int16"&&r[0].type!=="uint16"&&r[0].type!=="int32"&&r[0].type!=="uint32"&&r[0].type!=="float32"&&r[0].type!=="float64"&&r[0].type!=="bool")throw new Error("Invalid input type.")}});var ws,Dm,Bm,f_,d_,Lm=C(()=>{"use strict";De();ws=(r,e,n)=>{f_(e);let t=J.squeezeShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},Dm=(r,e)=>(d_(e),ws(r,[e[0]],Array.from(e[1].integerData))),Bm=r=>r.attributes.getInts("axes"),f_=r=>{if(!r||r.length!==1)throw new Error("Squeeze requires 1 input.");if(r[0].type==="string")throw new Error("invalid input tensor types.")},d_=r=>{if(!r||r.length!==2)throw new Error("Squeeze requires 2 inputs.");if(r[1].type!=="int32")throw new Error("Invalid input type.")}});var Rm,p_,m_,Nm=C(()=>{"use strict";He();Se();Rm=(r,e)=>{m_(e);let n={name:"Sum",inputNames:e.map((o,i)=>`X${i}`),inputTypes:new Array(e.length).fill(0)};return[r.run({...n,get:()=>p_(r,e,n)},e)]},p_=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),o=e[0].dims.slice(),s=` + `;return{...Om,cacheHint:`${n.cacheKey}:${o}`,output:{dims:u,type:e.type,textureType:0},shaderSource:f}},d_=r=>{if(!r||r.length!==1)throw new Error("Split requires one input.");if(r[0].type!=="int8"&&r[0].type!=="uint8"&&r[0].type!=="int16"&&r[0].type!=="uint16"&&r[0].type!=="int32"&&r[0].type!=="uint32"&&r[0].type!=="float32"&&r[0].type!=="float64"&&r[0].type!=="bool")throw new Error("Invalid input type.")}});var Ts,Dm,Bm,p_,m_,Lm=C(()=>{"use strict";De();Ts=(r,e,n)=>{p_(e);let t=J.squeezeShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},Dm=(r,e)=>(m_(e),Ts(r,[e[0]],Array.from(e[1].integerData))),Bm=r=>r.attributes.getInts("axes"),p_=r=>{if(!r||r.length!==1)throw new Error("Squeeze requires 1 input.");if(r[0].type==="string")throw new Error("invalid input tensor types.")},m_=r=>{if(!r||r.length!==2)throw new Error("Squeeze requires 2 inputs.");if(r[1].type!=="int32")throw new Error("Invalid input type.")}});var Rm,h_,g_,Nm=C(()=>{"use strict";He();Se();Rm=(r,e)=>{g_(e);let n={name:"Sum",inputNames:e.map((o,i)=>`X${i}`),inputTypes:new Array(e.length).fill(0)};return[r.run({...n,get:()=>h_(r,e,n)},e)]},h_=(r,e,n)=>{let t=oe(r.session.backend.glContext.version),o=e[0].dims.slice(),s=` void main() { vec4 result = ${e.map((a,u)=>`${t.texture2D}(X${u},TexCoords)`).join(" + ")}; ${t.output} = result; } - `;return{...n,output:{dims:o,type:e[0].type,textureType:0},hasMain:!0,shaderSource:s}},m_=r=>{if(!r||r.length===0)throw new Error("Sum requires inputs.");let e=r[0].dims.length;for(let n=1;n{"use strict";Vn();Se();zm=(r,e)=>{g_(e);let n={name:"Tile",inputNames:["A"],inputTypes:[0]};return[r.run({...n,get:()=>h_(r,e,n)},e)]},h_=(r,e,n)=>{let t=e[0].dims.slice(),o=new Array(t.length),i=[];for(let u=0;u{if(!r||r.length===0)throw new Error("Sum requires inputs.");let e=r[0].dims.length;for(let n=1;n{"use strict";Vn();Se();zm=(r,e)=>{y_(e);let n={name:"Tile",inputNames:["A"],inputTypes:[0]};return[r.run({...n,get:()=>b_(r,e,n)},e)]},b_=(r,e,n)=>{let t=e[0].dims.slice(),o=new Array(t.length),i=[];for(let u=0;u{if(!r||r.length!==2)throw new Error("Tile requires 2 input.");if(r[1].dims.length!==1)throw new Error("The second input shape must 1 dimension.");if(r[1].dims[0]!==r[0].dims.length)throw new Error("Invalid input shape.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.");if(r[1].type!=="int32"&&r[1].type!=="int16")throw new Error("Invalid repeat type.")}});var Ts,Mm,Vm,b_,y_,Gm=C(()=>{"use strict";De();Ts=(r,e,n)=>{b_(e);let t=J.unsqueezeShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},Mm=(r,e)=>(y_(e),Ts(r,[e[0]],Array.from(e[1].integerData))),Vm=r=>r.attributes.getInts("axes"),b_=r=>{if(!r||r.length!==1)throw new Error("Unsqueeze requires 1 input.");if(r[0].type==="string")throw new Error("invalid input tensor types.")},y_=r=>{if(!r||r.length!==2)throw new Error("Unsqueeze requires 2 inputs.");if(r[1].type!=="int32")throw new Error("Invalid input type.")}});var Um,Wm=C(()=>{"use strict";ed();pd();gd();Td();Ho();ip();fp();mp();bp();wp();Ip();Pp();kp();qo();Rp();Xp();nm();im();fm();pm();ym();Pm();km();Lm();Nm();Fm();jo();is();Gm();bs();Um=[["Abs","","6+",_d],["Acos","","7+",Id],["Add","","7+",td],["And","","7+",rd],["Asin","","7+",Sd],["Atan","","7+",$d],["AveragePool","","7+",zp,Fp],["BatchNormalization","","7+",Jf,Qf],["Cast","","6+",md,hd],["Ceil","","6+",Od],["Clip","","6-10",ns,Ad],["Clip","","11+",Pd],["Concat","","4+",xd,wd],["Conv","","1+",fs,ds],["ConvTranspose","","1+",np,op],["Cos","","7+",Ed],["Div","","7+",nd],["Dropout","","7+",os],["DepthToSpace","","1+",lp,cp],["Equal","","7+",od],["Elu","","6+",Cd,kd],["Exp","","6+",Dd],["Flatten","","1+",dp,pp],["Floor","","6+",Bd],["FusedConv","com.microsoft","1+",fs,ds],["Gather","","1+",hp,gp],["Gemm","","7-10",ps,xp],["Gemm","","11+",ps,vp],["GlobalAveragePool","","1+",Vp,Gp],["GlobalMaxPool","","1+",Kp],["Greater","","7+",id],["Identity","","1+",os],["ImageScaler","","1+",Tp,_p],["InstanceNormalization","","6+",$p,Ap],["LeakyRelu","","6+",Ld,Rd],["Less","","7+",ad],["LRN","","1+",Op,Ep],["Log","","6+",Nd],["MatMul","","1+",Zd,Yd],["MaxPool","","1+",Up,Wp],["Mul","","7+",sd],["Neg","","6+",zd],["Not","","1+",Fd],["Or","","7+",ud],["Pad","","2-10",ms,Dp],["Pad","","11+",Bp,Lp],["Pow","","7+",ld],["PRelu","","7+",cd],["ReduceLogSum","","1+",tm,$r],["ReduceMax","","1+",Jp,$r],["ReduceMean","","1+",Yp,$r],["ReduceMin","","1+",Qp,$r],["ReduceProd","","1+",em,$r],["ReduceSum","","1-12",Zp,$r],["ReduceSumSquare","","1+",rm,$r],["Relu","","6+",Md],["Reshape","","5+",om],["Resize","","10",xs,lm],["Resize","","11+",xs,cm],["Shape","","1+",dm],["Sigmoid","","6+",Vd],["Sin","","7+",Gd],["Slice","","10+",bm],["Slice","","1-9",mm,hm],["Softmax","","1-12",Tm,_m],["Softmax","","13+",Sm,Im],["Split","","2-12",Em,Cm],["Sqrt","","6+",Ud],["Squeeze","","1-12",ws,Bm],["Squeeze","","13+",Dm],["Sub","","7+",fd],["Sum","","6+",Rm],["Tan","","7+",Wd],["Tanh","","6+",Hd],["Tile","","6+",zm],["Transpose","","1+",Hr,sp],["Upsample","","7-8",hs,sm],["Upsample","","9",hs,um],["Unsqueeze","","1-12",Ts,Vm],["Unsqueeze","","13+",Mm],["Xor","","7+",dd]]});function qm(r){let e={},n;for(;(n=Hm.exec(r))!==null;){let t=n[3].split(",").map(o=>{let i=o.trim().split(" ");return i&&i.length===2?{type:i[0],name:i[1]}:null}).filter(o=>o!==null);e[n[2]]={params:t,body:n[4]}}for(let t in e){let o=x_.replace("__FUNC__",t),i=new RegExp(o,"gm");for(;(n=i.exec(r))!==null;){let s=n[1],a=n[2],u=n[3].split(","),l=s?`${s} ${a};`:"",f=e[t].body,c="";e[t].params.forEach((b,h)=>{b&&(c+=`${b.type} ${b.name} = ${u[h]}; + `;return{...n,output:{dims:o,type:e[0].type,textureType:0},shaderSource:a}},y_=r=>{if(!r||r.length!==2)throw new Error("Tile requires 2 input.");if(r[1].dims.length!==1)throw new Error("The second input shape must 1 dimension.");if(r[1].dims[0]!==r[0].dims.length)throw new Error("Invalid input shape.");if(Sr.indexOf(r[0].type)===-1)throw new Error("Invalid input type.");if(r[1].type!=="int32"&&r[1].type!=="int16")throw new Error("Invalid repeat type.")}});var _s,Mm,Vm,x_,v_,Gm=C(()=>{"use strict";De();_s=(r,e,n)=>{x_(e);let t=J.unsqueezeShape(e[0].dims,n);return[r.reshapeUnpacked(e[0],t)]},Mm=(r,e)=>(v_(e),_s(r,[e[0]],Array.from(e[1].integerData))),Vm=r=>r.attributes.getInts("axes"),x_=r=>{if(!r||r.length!==1)throw new Error("Unsqueeze requires 1 input.");if(r[0].type==="string")throw new Error("invalid input tensor types.")},v_=r=>{if(!r||r.length!==2)throw new Error("Unsqueeze requires 2 inputs.");if(r[1].type!=="int32")throw new Error("Invalid input type.")}});var Um,Wm=C(()=>{"use strict";ed();pd();gd();Td();Ho();ip();fp();mp();bp();wp();Ip();Pp();kp();qo();Rp();Xp();nm();im();fm();pm();ym();Pm();km();Lm();Nm();Fm();Ko();as();Gm();ys();Um=[["Abs","","6+",_d],["Acos","","7+",Id],["Add","","7+",td],["And","","7+",rd],["Asin","","7+",Sd],["Atan","","7+",$d],["AveragePool","","7+",zp,Fp],["BatchNormalization","","7+",Jf,Qf],["Cast","","6+",md,hd],["Ceil","","6+",Od],["Clip","","6-10",os,Ad],["Clip","","11+",Pd],["Concat","","4+",xd,wd],["Conv","","1+",ds,ps],["ConvTranspose","","1+",np,op],["Cos","","7+",Ed],["Div","","7+",nd],["Dropout","","7+",is],["DepthToSpace","","1+",lp,cp],["Equal","","7+",od],["Elu","","6+",Cd,kd],["Exp","","6+",Dd],["Flatten","","1+",dp,pp],["Floor","","6+",Bd],["FusedConv","com.microsoft","1+",ds,ps],["Gather","","1+",hp,gp],["Gemm","","7-10",ms,xp],["Gemm","","11+",ms,vp],["GlobalAveragePool","","1+",Vp,Gp],["GlobalMaxPool","","1+",jp],["Greater","","7+",id],["Identity","","1+",is],["ImageScaler","","1+",Tp,_p],["InstanceNormalization","","6+",$p,Ap],["LeakyRelu","","6+",Ld,Rd],["Less","","7+",ad],["LRN","","1+",Op,Ep],["Log","","6+",Nd],["MatMul","","1+",Zd,Yd],["MaxPool","","1+",Up,Wp],["Mul","","7+",sd],["Neg","","6+",zd],["Not","","1+",Fd],["Or","","7+",ud],["Pad","","2-10",hs,Dp],["Pad","","11+",Bp,Lp],["Pow","","7+",ld],["PRelu","","7+",cd],["ReduceLogSum","","1+",tm,$r],["ReduceMax","","1+",Jp,$r],["ReduceMean","","1+",Yp,$r],["ReduceMin","","1+",Qp,$r],["ReduceProd","","1+",em,$r],["ReduceSum","","1-12",Zp,$r],["ReduceSumSquare","","1+",rm,$r],["Relu","","6+",Md],["Reshape","","5+",om],["Resize","","10",vs,lm],["Resize","","11+",vs,cm],["Shape","","1+",dm],["Sigmoid","","6+",Vd],["Sin","","7+",Gd],["Slice","","10+",bm],["Slice","","1-9",mm,hm],["Softmax","","1-12",Tm,_m],["Softmax","","13+",Sm,Im],["Split","","2-12",Em,Cm],["Sqrt","","6+",Ud],["Squeeze","","1-12",Ts,Bm],["Squeeze","","13+",Dm],["Sub","","7+",fd],["Sum","","6+",Rm],["Tan","","7+",Wd],["Tanh","","6+",Hd],["Tile","","6+",zm],["Transpose","","1+",Hr,sp],["Upsample","","7-8",gs,sm],["Upsample","","9",gs,um],["Unsqueeze","","1-12",_s,Vm],["Unsqueeze","","13+",Mm],["Xor","","7+",dd]]});function qm(r){let e={},n;for(;(n=Hm.exec(r))!==null;){let t=n[3].split(",").map(o=>{let i=o.trim().split(" ");return i&&i.length===2?{type:i[0],name:i[1]}:null}).filter(o=>o!==null);e[n[2]]={params:t,body:n[4]}}for(let t in e){let o=w_.replace("__FUNC__",t),i=new RegExp(o,"gm");for(;(n=i.exec(r))!==null;){let s=n[1],a=n[2],u=n[3].split(","),l=s?`${s} ${a};`:"",f=e[t].body,c="";e[t].params.forEach((b,h)=>{b&&(c+=`${b.type} ${b.name} = ${u[h]}; `)}),f=`${c} ${f}`,f=f.replace("return",`${a} = `);let p=` ${l} { ${f} } - `;r=r.replace(n[0],p)}}return r=r.replace(Hm,""),r}var Hm,x_,Km=C(()=>{"use strict";Hm=/@inline[\s\n\r]+(\w+)[\s\n\r]+([0-9a-zA-Z_]+)\s*\(([^)]*)\)\s*{(([^}]|[\n\r])*)}/gm,x_="(\\w+)?\\s+([_0-9a-zA-Z]+)\\s+=\\s+__FUNC__\\((.*)\\)\\s*;"});function xn(r,e){let n=[],t=[],o=e!=null&&Array.isArray(e)&&e.length===0,i=e==null||o?null:v_(e,r).sort(),s=0;for(let a=0;aa)&&r[a]===1&&(n.push(r[a]),t.push(a)),i[s]<=a&&s++}r[a]!==1&&(n.push(r[a]),t.push(a))}return{newShape:n,keptDims:t}}function v_(r,e){let n=e.length;return r=r==null?e.map((t,o)=>o):[].concat(r),pn(r.every(t=>t>=-n&&t`All values in axis param must be in range [-${n}, ${n}) but got axis ${r}`),pn(r.every(w_),()=>`All values in axis param must be integers but got axis ${r}`),r.map(t=>t<0?n+t:t)}function w_(r){return r%1===0}function T_(r){if(r.length===0)return 1;let e=r[0];for(let n=1;n{"use strict";Pt();De();Jo=class{constructor(e){this.maxTextureSize=e}computeTextureWH(e,n){let t=this.computeTexture(e,n);return n&&n.isPacked&&(t[0]/=2,t[1]/=2),n&&n.reverseWH?[t[1],t[0]]:t}computeTexture(e,n){let t=n&&n.isPacked;if(e.length===0)return t?[2,2]:[1,1];let o=this.maxTextureSize;if(n&&n.breakAxis!==void 0){let a=n.breakAxis>=e.length?1:e.slice(n.breakAxis).reduce((l,f)=>l*f),u=n.breakAxis<=0?1:e.slice(0,n.breakAxis).reduce((l,f)=>l*f);if(a>o||u>o)Re.verbose("TextureLayout",`Given width/height preferences were unattainable: shape:${e}, breakAxis:${n.breakAxis}`);else return[a,u]}let i=e.slice(0);t&&(o=o*2,i=i.map((a,u)=>u>=i.length-2?i[u]%2===0?i[u]:i[u]+1:i[u]),i.length===1&&(i=[2,i[0]])),i.length!==2&&(i=xn(i).newShape);let s=T_(i);return i.length<=1&&s<=o?[1,s]:i.length===2&&i[0]<=o&&i[1]<=o?i:i.length===3&&i[0]*i[1]<=o&&i[2]<=o?[i[0]*i[1],i[2]]:i.length===3&&i[0]<=o&&i[1]*i[2]<=o?[i[0],i[1]*i[2]]:i.length===4&&i[0]*i[1]*i[2]<=o&&i[3]<=o?[i[0]*i[1]*i[2],i[3]]:i.length===4&&i[0]<=o&&i[1]*i[2]*i[3]<=o?[i[0],i[1]*i[2]*i[3]]:t?jm(s/4).map(a=>a*2):jm(s)}}});var Qo,Xm=C(()=>{"use strict";De();pr();He();_s();Qt();Qo=class extends kt{constructor(n){super(n)}getFunctions(){return{...this.offsetToCoords(),...this.coordsToOffset(),...this.toVec(),...this.valueFrom(),...this.getCommonUtilFuncs(),...this.getInputsSamplingSnippets(),...this.getOutputSamplingSnippet()}}getCustomTypes(){return{}}offsetToCoords(){let n="offsetToCoords";return{offsetToCoords:new X(` + `;r=r.replace(n[0],p)}}return r=r.replace(Hm,""),r}var Hm,w_,jm=C(()=>{"use strict";Hm=/@inline[\s\n\r]+(\w+)[\s\n\r]+([0-9a-zA-Z_]+)\s*\(([^)]*)\)\s*{(([^}]|[\n\r])*)}/gm,w_="(\\w+)?\\s+([_0-9a-zA-Z]+)\\s+=\\s+__FUNC__\\((.*)\\)\\s*;"});function vn(r,e){let n=[],t=[],o=e!=null&&Array.isArray(e)&&e.length===0,i=e==null||o?null:T_(e,r).sort(),s=0;for(let a=0;aa)&&r[a]===1&&(n.push(r[a]),t.push(a)),i[s]<=a&&s++}r[a]!==1&&(n.push(r[a]),t.push(a))}return{newShape:n,keptDims:t}}function T_(r,e){let n=e.length;return r=r==null?e.map((t,o)=>o):[].concat(r),mn(r.every(t=>t>=-n&&t`All values in axis param must be in range [-${n}, ${n}) but got axis ${r}`),mn(r.every(__),()=>`All values in axis param must be integers but got axis ${r}`),r.map(t=>t<0?n+t:t)}function __(r){return r%1===0}function I_(r){if(r.length===0)return 1;let e=r[0];for(let n=1;n{"use strict";Pt();De();Jo=class{constructor(e){this.maxTextureSize=e}computeTextureWH(e,n){let t=this.computeTexture(e,n);return n&&n.isPacked&&(t[0]/=2,t[1]/=2),n&&n.reverseWH?[t[1],t[0]]:t}computeTexture(e,n){let t=n&&n.isPacked;if(e.length===0)return t?[2,2]:[1,1];let o=this.maxTextureSize;if(n&&n.breakAxis!==void 0){let a=n.breakAxis>=e.length?1:e.slice(n.breakAxis).reduce((l,f)=>l*f),u=n.breakAxis<=0?1:e.slice(0,n.breakAxis).reduce((l,f)=>l*f);if(a>o||u>o)Re.verbose("TextureLayout",`Given width/height preferences were unattainable: shape:${e}, breakAxis:${n.breakAxis}`);else return[a,u]}let i=e.slice(0);t&&(o=o*2,i=i.map((a,u)=>u>=i.length-2?i[u]%2===0?i[u]:i[u]+1:i[u]),i.length===1&&(i=[2,i[0]])),i.length!==2&&(i=vn(i).newShape);let s=I_(i);return i.length<=1&&s<=o?[1,s]:i.length===2&&i[0]<=o&&i[1]<=o?i:i.length===3&&i[0]*i[1]<=o&&i[2]<=o?[i[0]*i[1],i[2]]:i.length===3&&i[0]<=o&&i[1]*i[2]<=o?[i[0],i[1]*i[2]]:i.length===4&&i[0]*i[1]*i[2]<=o&&i[3]<=o?[i[0]*i[1]*i[2],i[3]]:i.length===4&&i[0]<=o&&i[1]*i[2]*i[3]<=o?[i[0],i[1]*i[2]*i[3]]:t?Km(s/4).map(a=>a*2):Km(s)}}});var Qo,Xm=C(()=>{"use strict";De();pr();He();Is();Qt();Qo=class extends kt{constructor(n){super(n)}getFunctions(){return{...this.offsetToCoords(),...this.coordsToOffset(),...this.toVec(),...this.valueFrom(),...this.getCommonUtilFuncs(),...this.getInputsSamplingSnippets(),...this.getOutputSamplingSnippet()}}getCustomTypes(){return{}}offsetToCoords(){let n="offsetToCoords";return{offsetToCoords:new X(` vec2 ${n}(int offset, int width, int height) { int t = offset / width; int s = offset - t*width; @@ -1480,13 +1480,13 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( float sampleTexture(sampler2D textureSampler, vec2 uv) { return ${o.texture2D}(textureSampler, uv).r; }`),n}getInputsSamplingSnippets(){let n={},t=this.context.outputTextureLayout;return this.context.programInfo.inputNames.forEach((o,i)=>{let s=this.context.inputTextureLayouts[i],a=zo(o);s.isPacked?n[a]=this.getPackedSamplerFromInput(a,o,s):n[a]=this.getUnpackedSamplerFromInput(a,o,s);let u=Rf(o);s.unpackedShape.length<=t.unpackedShape.length&&(s.isPacked?n[u]=this.getPackedSamplerAtOutputCoords(u,s,t,o):n[u]=this.getUnpackedSamplerAtOutputCoords(u,s,t,o))}),n}getPackedSamplerAtOutputCoords(n,t,o,i){let s=t.unpackedShape,a=o.unpackedShape,l=zo(i),f=s.length,c=a.length,p=gt.getBroadcastDims(s,a),b=bt(c),h=c-f,g,T=Gt();f===0?g="":c<2&&p.length>=1?g="coords = 0;":g=p.map(q=>`coords.${T[q+h]} = 0;`).join(` -`);let w="";c<2&&f>0?w="coords":w=s.map((q,K)=>`coords.${T[K+h]}`).join(", ");let v="return outputValue;",$=J.size(s)===1,E=J.size(a)===1;if(f===1&&!$&&!E)v=` +`);let w="";c<2&&f>0?w="coords":w=s.map((q,j)=>`coords.${T[j+h]}`).join(", ");let v="return outputValue;",$=J.size(s)===1,E=J.size(a)===1;if(f===1&&!$&&!E)v=` return vec4(outputValue.xy, outputValue.xy); `;else if($&&!E)c===1?v=` return vec4(outputValue.x, outputValue.x, 0., 0.); `:v=` return vec4(outputValue.x); - `;else if(p.length){let q=f-2,K=f-1;p.indexOf(q)>-1&&p.indexOf(K)>-1?v="return vec4(outputValue.x);":p.indexOf(q)>-1?v="return vec4(outputValue.x, outputValue.y, outputValue.x, outputValue.y);":p.indexOf(K)>-1&&(v="return vec4(outputValue.xx, outputValue.zz);")}let N=` + `;else if(p.length){let q=f-2,j=f-1;p.indexOf(q)>-1&&p.indexOf(j)>-1?v="return vec4(outputValue.x);":p.indexOf(q)>-1?v="return vec4(outputValue.x, outputValue.y, outputValue.x, outputValue.y);":p.indexOf(j)>-1&&(v="return vec4(outputValue.xx, outputValue.zz);")}let N=` int lastDim = coords.${T[c-1]}; coords.${T[c-1]} = coords.${T[c-2]}; coords.${T[c-2]} = lastDim; @@ -1523,9 +1523,9 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( }`;return new X(h)}let f=s,c=Math.ceil(i[1]/2),b=`vec4 ${n}(int row, int col) { vec2 uv = packedUVfrom2D(${f[1]}, ${f[0]}, ${c}, row, col); return ${a.texture2D}(${t}, uv); - }`;return new X(b,["coordinates.packedUVfrom2D"])}getPackedSampler3D(n,t,o){let i=o.unpackedShape,s=[o.width,o.height],a=[s[0],s[1]],u=oe(this.context.glContext.version);if(i[0]===1){let g=i.slice(1),T=[1,2],w=mn(i,g),v=["b","row","col"],S=JSON.parse(JSON.stringify(o));S.unpackedShape=w;let $=this.getPackedSamplerFromInput(n,t,S),E=`${$.routineBody} + }`;return new X(b,["coordinates.packedUVfrom2D"])}getPackedSampler3D(n,t,o){let i=o.unpackedShape,s=[o.width,o.height],a=[s[0],s[1]],u=oe(this.context.glContext.version);if(i[0]===1){let g=i.slice(1),T=[1,2],w=hn(i,g),v=["b","row","col"],S=JSON.parse(JSON.stringify(o));S.unpackedShape=w;let $=this.getPackedSamplerFromInput(n,t,S),E=`${$.routineBody} vec4 ${n}(int b, int row, int col) { - return ${n}(${hn(v,T)}); + return ${n}(${gn(v,T)}); } `;return new X(E,$.dependencies)}let l=a[0],f=a[1],c=Math.ceil(i[2]/2),p=c*Math.ceil(i[1]/2),h=`vec4 ${n}(int b, int row, int col) { vec2 uv = packedUVfrom3D( ${f}, ${l}, ${p}, ${c}, b, row, col); @@ -1569,10 +1569,10 @@ value = _A(inputIdx);`,"value = min(value, _A(inputIdx));",""]}),em=(r,e,n)=>qr( vec2 uv = (vec2(row, col) + halfCR) / vec2(${b}.0, ${h}.0); return sampleTexture(${t}, uv); } - `;return new X(g,["coordinates.sampleTexture"])}let{newShape:a,keptDims:u}=xn(i),l=a;if(l.lengthqr( vec2 uv = uvFromFlat(${f}, ${c}, index); return sampleTexture(${t}, uv); } - `;return new X(p,["coordinates.uvFromFlat","coordinates.sampleTexture","coordinates.coordsToOffset"])}getUnpackedSampler3D(n,t,o){let i=o.unpackedShape,s=i[1]*i[2],a=i[2],{newShape:u,keptDims:l}=xn(i),f=u;if(f.lengthqr( vec2 uv = uvFromFlat(${l}, ${f}, index); return sampleTexture(${t}, uv); } - `;return new X(c,["coordinates.uvFromFlat","coordinates.sampleTexture"])}getUnpackedSampler5D(n,t,o){let i=o.unpackedShape,s=i[4],a=i[3]*s,u=i[2]*a,l=i[1]*u,{newShape:f,keptDims:c}=xn(i);if(f.lengthqr( vec2 uv = uvFromFlat(${p}, ${b}, index); return sampleTexture(${t}, uv); } - `;return new X(h,["coordinates.sampleTexture","coordinates.uvFromFlat"])}getUnpackedSampler6D(n,t,o){let i=o.unpackedShape,s=i[5],a=i[4]*s,u=i[3]*a,l=i[2]*u,f=i[1]*l,{newShape:c,keptDims:p}=xn(i);if(c.lengthqr( int getVecItem(int m[${n}], int index) { ${t} } - `;return{getVecItem:new X(o)}}}});var Is,eh=C(()=>{"use strict";Xm();Zm();Ym();Jm();Qm();Is={encoding:ei,fragcolor:ti,vec:ni,shapeUtils:ri,coordinates:Qo}});var oi,th=C(()=>{"use strict";pr();Km();eh();He();oi=class{constructor(e,n,t,o){this.libs={};this.glslLibRoutineDependencyGraph={};this.context=new Go(e,n,t,o),Object.keys(Is).forEach(s=>{let a=new Is[s](this.context);this.libs[s]=a});let i=this.glslLibRoutineDependencyGraph;for(let s in this.libs){let u=this.libs[s].getFunctions();for(let l in u){let f=s+"."+l,c;i[f]?(c=i[f],c.routineBody=u[l].routineBody):(c=new Mn(f,u[l].routineBody),i[f]=c);let p=u[l].dependencies;if(p)for(let b=0;b{"use strict";Xm();Zm();Ym();Jm();Qm();Ss={encoding:ei,fragcolor:ti,vec:ni,shapeUtils:ri,coordinates:Qo}});var oi,th=C(()=>{"use strict";pr();jm();eh();He();oi=class{constructor(e,n,t,o){this.libs={};this.glslLibRoutineDependencyGraph={};this.context=new Go(e,n,t,o),Object.keys(Ss).forEach(s=>{let a=new Ss[s](this.context);this.libs[s]=a});let i=this.glslLibRoutineDependencyGraph;for(let s in this.libs){let u=this.libs[s].getFunctions();for(let l in u){let f=s+"."+l,c;i[f]?(c=i[f],c.routineBody=u[l].routineBody):(c=new Mn(f,u[l].routineBody),i[f]=c);let p=u[l].dependencies;if(p)for(let b=0;bqr( `;else throw new Error(`Missing body for the Glsl Library routine: ${n[o].name}`);return t}selectGlslLibRoutinesToBeIncluded(e){let n=[];return Object.keys(this.glslLibRoutineDependencyGraph).forEach(t=>{let o=t.split(".")[1];e.indexOf(o)!==-1&&n.push(this.glslLibRoutineDependencyGraph[t])}),Uo.returnOrderedNodes(n)}getUniforms(e,n){let t=[];if(e)for(let o of e)t.push(`uniform sampler2D ${o};`);if(n)for(let o of n)t.push(`uniform ${o.type} ${o.name}${o.arrayLength?`[${o.arrayLength}]`:""};`);return t.join(` `)}}});var ii,rh=C(()=>{"use strict";ft();Pt();th();He();ii=class{constructor(e,n,t){this.profiler=e;this.glContext=n;this.textureLayoutStrategy=t;this.repo=new Map,this.attributesBound=!1}getArtifact(e){return this.repo.get(e)}setArtifact(e,n){this.repo.set(e,n)}run(e,n,t){this.profiler.event("op",`ProgramManager.run ${e.programInfo.name??"unknown kernel"}`,()=>{let o=this.glContext.gl,i=e.program;o.useProgram(i);try{this.bindOutput(t),this.attributesBound||this.bindAttributes(e.attribLocations),this.bindUniforms(e.uniformLocations,e.programInfo.variables??[],n)}catch(s){throw Re.error("ProgramManager",e.programInfo.shaderSource),s}this.profiler.event("backend","GlContext.draw()",()=>{this.glContext.draw()})},this.glContext)}dispose(){this.vertexShader&&this.glContext.deleteShader(this.vertexShader),this.repo.forEach(e=>this.glContext.deleteProgram(e.program))}build(e,n,t){return this.profiler.event("backend","ProgramManager.build",()=>{let o=new oi(this.glContext,e,n,t),i=o.preprocess(),s=this.compile(i);return{programInfo:e,program:s,uniformLocations:this.getUniformLocations(s,o.context.programInfo.inputNames,o.context.programInfo.variables),attribLocations:this.getAttribLocations(s)}})}compile(e){if(!this.vertexShader){Re.verbose("ProrgramManager","Compiling and caching Vertex shader for the first time");let o=Df(this.glContext.version);this.vertexShader=this.glContext.compileShader(o,this.glContext.gl.VERTEX_SHADER)}le.debug&&Re.verbose("ProrgramManager",`FragShader: ${e} -`);let n=this.glContext.compileShader(e,this.glContext.gl.FRAGMENT_SHADER),t=this.glContext.createProgram(this.vertexShader,n);return this.glContext.deleteShader(n),t}bindOutput(e){let n=e.width,t=e.height;Re.verbose("ProrgramManager",`Binding output texture to Framebuffer: w/h=${n}/${t}, shape=${e.shape}, type=${e.tensor.type}`),this.glContext.attachFramebuffer(e.texture,n,t)}bindAttributes(e){let n=e.position,t=e.textureCoord;this.glContext.setVertexAttributes(n,t),this.attributesBound=!0}bindUniforms(e,n,t){let o=this.glContext.gl,i=0;for(let{name:s,type:a,location:u,arrayLength:l}of e){let f=n.find(c=>c.name===s)?.data;if(a!=="sampler2D"&&!f)throw new Error(`variable '${s}' does not have data defined in program info`);switch(a){case"sampler2D":this.bindTexture(t[i],u,i),i++;break;case"float":l?o.uniform1fv(u,f):o.uniform1f(u,f);break;case"int":l?o.uniform1iv(u,f):o.uniform1i(u,f);break;default:throw new Error(`Uniform not implemented: ${a}`)}}}bindTexture(e,n,t){this.glContext.bindTextureToUniform(e.texture,t,n)}getAttribLocations(e){return{position:this.getAttribLocation(e,"position"),textureCoord:this.getAttribLocation(e,"textureCoord")}}getUniformLocations(e,n,t){let o=[];if(n)for(let i of n)o.push({name:i,type:"sampler2D",location:this.getUniformLocation(e,i)});if(t)for(let i of t)o.push({...i,location:this.getUniformLocation(e,i.name)});return o}getUniformLocation(e,n){let o=this.glContext.gl.getUniformLocation(e,n);if(o===null)throw new Error(`Uniform ${n} not found.`);return o}getAttribLocation(e,n){return this.glContext.gl.getAttribLocation(e,n)}}});var ai,nh=C(()=>{"use strict";Pt();zn();ai=class{constructor(e,n,t,o){this.glContext=e;this.layoutStrategy=n;this.profiler=t;this.config=o;this.pendingRead=new Map;o.reuseTextures&&(this.inUseTextures=new Map,this.idleTextures=new Map,this.textureLookup=new Map)}createTextureFromLayout(e,n,t,o){let i=this.toEncoderType(e),s=this.glContext.getEncoder(i,n.channels||1,o);if(n.isPacked&&o===1)throw new Error("not implemented");let a=n.width,u=n.height,l,f;if(this.config.reuseTextures){l=`${a}x${u}_${s.format}_${s.internalFormat}_${s.textureType}`,f=this.inUseTextures.get(l),f||(f=[],this.inUseTextures.set(l,f));let p=this.idleTextures.get(l);if(p&&p.length>0){let b=p.pop();return f.push(b),o===1&&this.glContext.updateTexture(b,a,u,s,this.toTextureData(e,t)),b}}Re.verbose("TextureManager",`Creating new texture of size ${n.width}x${n.height}`);let c=this.glContext.allocateTexture(a,u,s,this.toTextureData(e,t));return this.config.reuseTextures&&(f.push(c),this.textureLookup.set(c,l)),c}readTexture(e,n,t){return t||(t=1),this.profiler.event("backend","TextureManager.readTexture",()=>{let o=e.shape.reduce((s,a)=>s*a)*t,i=this.glContext.readTexture(e.texture,e.width,e.height,o,this.toEncoderType(n),t);return this.toTensorData(n,i)})}async readTextureAsync(e,n,t){let o=e.tensor.dataId;if(t||(t=1),this.pendingRead.has(o)){let i=this.pendingRead.get(o);return new Promise(s=>i?.push(s))}return this.profiler.event("backend","TextureManager.readTextureAsync",async()=>{this.pendingRead.set(o,[]);let i=e.shape.reduce((l,f)=>l*f)*t;await this.glContext.createAndWaitForFence();let s=this.glContext.readTexture(e.texture,e.width,e.height,i,this.toEncoderType(n),t),a=this.toTensorData(n,s),u=this.pendingRead.get(o);return this.pendingRead.delete(o),u?.forEach(l=>l(a)),a})}readUint8TextureAsFloat(e){return this.profiler.event("backend","TextureManager.readUint8TextureAsFloat",()=>{let n=e.shape.reduce((o,i)=>o*i),t=this.glContext.readTexture(e.texture,e.width,e.height,n*4,"byte",4);return new Float32Array(t.buffer,t.byteOffset,n)})}releaseTexture(e,n){let t;if(this.config.reuseTextures&&(t=this.textureLookup.get(e.texture),t)){n&&this.textureLookup.delete(t);let o=this.inUseTextures.get(t);if(o){let i=o.indexOf(e.texture);if(i!==-1){o.splice(i,1);let s=this.idleTextures.get(t);s||(s=[],this.idleTextures.set(t,s)),s.push(e.texture)}}}(!t||n)&&(Re.verbose("TextureManager",`Deleting texture of size ${e.width}x${e.height}`),this.glContext.deleteTexture(e.texture))}toTensorData(e,n){switch(e){case"int16":return n instanceof Int16Array?n:Int16Array.from(n);case"int32":return n instanceof Int32Array?n:Int32Array.from(n);case"int8":return n instanceof Int8Array?n:Int8Array.from(n);case"uint16":return n instanceof Uint16Array?n:Uint16Array.from(n);case"uint32":return n instanceof Uint32Array?n:Uint32Array.from(n);case"uint8":case"bool":return n instanceof Uint8Array?n:Uint8Array.from(n);case"float32":return n instanceof Float32Array?n:Float32Array.from(n);case"float64":return n instanceof Float64Array?n:Float64Array.from(n);default:throw new Error(`TensorData type ${e} is not supported`)}}toTextureData(e,n){if(n)return n instanceof Float32Array?n:new Float32Array(n)}toEncoderType(e){return"float"}clearActiveTextures(){this.glContext.clearActiveTextures()}}});var si,oh=C(()=>{"use strict";Pt();yc();Zf();Wm();rh();_s();nh();si=class{constructor(e,n){this.backend=e;this.context=n;this.layoutStrategy=new Jo(e.glContext.maxTextureSize),this.programManager=new ii(this.context.profiler,e.glContext,this.layoutStrategy),this.textureManager=new ai(e.glContext,this.layoutStrategy,this.context.profiler,{reuseTextures:e.textureCacheMode==="full"}),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache=new Map,this.pack=e.pack,this.pack2unpackMap=new Map,this.unpack2packMap=new Map}createInferenceHandler(){return new Vo(this)}onGraphInitialized(e){let n=e.getValues().filter(t=>t.from===-1&&t.tensor).map(t=>t.tensor.dataId);this.initializers=new Set(n)}isInitializer(e){return this.initializers?this.initializers.has(e):!1}addInitializer(e){this.initializers.add(e)}getTextureData(e,n){return n?this.packedTextureDataCache.get(e):this.unpackedTextureDataCache.get(e)}setTextureData(e,n,t=!1){Re.verbose("WebGLSessionHandler","Storing Texture data in cache"),t?this.packedTextureDataCache.set(e,n):this.unpackedTextureDataCache.set(e,n)}dispose(){this.programManager.dispose(),this.textureManager.clearActiveTextures(),this.packedTextureDataCache.forEach(e=>this.textureManager.releaseTexture(e,!0)),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache.forEach(e=>this.textureManager.releaseTexture(e,!0)),this.unpackedTextureDataCache=new Map}resolve(e,n,t){let o=bc(e,n,Um);return{impl:o.opImpl,context:o.opInit?o.opInit(e,t):e}}}});function __(r){let e=0;for(;e{"use strict";ft();zn();zn();Qt();Un=class{constructor(e,n){this.frameBufferBound=!1;this.itemsToPoll=[];this.gl=e,this.version=n,this.getExtensions(),this.vertexbuffer=this.createVertexbuffer(),this.framebuffer=this.createFramebuffer(),this.queryVitalParameters()}allocateTexture(e,n,t,o){let i=this.gl,s=i.createTexture();i.bindTexture(i.TEXTURE_2D,s),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MIN_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MAG_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_S,i.CLAMP_TO_EDGE),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_T,i.CLAMP_TO_EDGE);let a=o?t.encode(o,e*n):null;return i.texImage2D(i.TEXTURE_2D,0,t.internalFormat,e,n,0,t.format,t.textureType,a),this.checkError(),s}updateTexture(e,n,t,o,i){let s=this.gl;s.bindTexture(s.TEXTURE_2D,e);let a=o.encode(i,n*t);s.texSubImage2D(s.TEXTURE_2D,0,0,0,n,t,o.format,o.textureType,a),this.checkError()}attachFramebuffer(e,n,t){let o=this.gl;o.bindTexture(o.TEXTURE_2D,e),o.bindFramebuffer(o.FRAMEBUFFER,this.framebuffer),o.framebufferTexture2D(o.FRAMEBUFFER,o.COLOR_ATTACHMENT0,o.TEXTURE_2D,e,0),this.checkError(),o.viewport(0,0,n,t),o.scissor(0,0,n,t)}readTexture(e,n,t,o,i,s){let a=this.gl;s||(s=1),this.frameBufferBound||this.attachFramebuffer(e,n,t);let u=this.getEncoder(i,s),l=u.allocate(n*t);return a.bindTexture(a.TEXTURE_2D,e),a.framebufferTexture2D(a.FRAMEBUFFER,a.COLOR_ATTACHMENT0,a.TEXTURE_2D,e,0),a.readPixels(0,0,n,t,a.RGBA,u.textureType,l),this.checkError(),u.decode(l,o)}isFramebufferReady(){return!0}getActiveTexture(){let e=this.gl;return`TEXTURE${e.getParameter(this.gl.ACTIVE_TEXTURE)-e.TEXTURE0}`}getTextureBinding(){return this.gl.getParameter(this.gl.TEXTURE_BINDING_2D)}getFramebufferBinding(){return this.gl.getParameter(this.gl.FRAMEBUFFER_BINDING)}setVertexAttributes(e,n){let t=this.gl;t.vertexAttribPointer(e,3,t.FLOAT,!1,20,0),t.enableVertexAttribArray(e),n!==-1&&(t.vertexAttribPointer(n,2,t.FLOAT,!1,20,12),t.enableVertexAttribArray(n)),this.checkError()}createProgram(e,n){let t=this.gl,o=t.createProgram();return t.attachShader(o,e),t.attachShader(o,n),t.linkProgram(o),o}compileShader(e,n){let t=this.gl,o=t.createShader(n);if(!o)throw new Error(`createShader() returned null with type ${n}`);if(t.shaderSource(o,e),t.compileShader(o),t.getShaderParameter(o,t.COMPILE_STATUS)===!1)throw new Error(`Failed to compile shader: ${t.getShaderInfoLog(o)} +`);let n=this.glContext.compileShader(e,this.glContext.gl.FRAGMENT_SHADER),t=this.glContext.createProgram(this.vertexShader,n);return this.glContext.deleteShader(n),t}bindOutput(e){let n=e.width,t=e.height;Re.verbose("ProrgramManager",`Binding output texture to Framebuffer: w/h=${n}/${t}, shape=${e.shape}, type=${e.tensor.type}`),this.glContext.attachFramebuffer(e.texture,n,t)}bindAttributes(e){let n=e.position,t=e.textureCoord;this.glContext.setVertexAttributes(n,t),this.attributesBound=!0}bindUniforms(e,n,t){let o=this.glContext.gl,i=0;for(let{name:s,type:a,location:u,arrayLength:l}of e){let f=n.find(c=>c.name===s)?.data;if(a!=="sampler2D"&&!f)throw new Error(`variable '${s}' does not have data defined in program info`);switch(a){case"sampler2D":this.bindTexture(t[i],u,i),i++;break;case"float":l?o.uniform1fv(u,f):o.uniform1f(u,f);break;case"int":l?o.uniform1iv(u,f):o.uniform1i(u,f);break;default:throw new Error(`Uniform not implemented: ${a}`)}}}bindTexture(e,n,t){this.glContext.bindTextureToUniform(e.texture,t,n)}getAttribLocations(e){return{position:this.getAttribLocation(e,"position"),textureCoord:this.getAttribLocation(e,"textureCoord")}}getUniformLocations(e,n,t){let o=[];if(n)for(let i of n)o.push({name:i,type:"sampler2D",location:this.getUniformLocation(e,i)});if(t)for(let i of t)o.push({...i,location:this.getUniformLocation(e,i.name)});return o}getUniformLocation(e,n){let o=this.glContext.gl.getUniformLocation(e,n);if(o===null)throw new Error(`Uniform ${n} not found.`);return o}getAttribLocation(e,n){return this.glContext.gl.getAttribLocation(e,n)}}});var ai,nh=C(()=>{"use strict";Pt();zn();ai=class{constructor(e,n,t,o){this.glContext=e;this.layoutStrategy=n;this.profiler=t;this.config=o;this.pendingRead=new Map;o.reuseTextures&&(this.inUseTextures=new Map,this.idleTextures=new Map,this.textureLookup=new Map)}createTextureFromLayout(e,n,t,o){let i=this.toEncoderType(e),s=this.glContext.getEncoder(i,n.channels||1,o);if(n.isPacked&&o===1)throw new Error("not implemented");let a=n.width,u=n.height,l,f;if(this.config.reuseTextures){l=`${a}x${u}_${s.format}_${s.internalFormat}_${s.textureType}`,f=this.inUseTextures.get(l),f||(f=[],this.inUseTextures.set(l,f));let p=this.idleTextures.get(l);if(p&&p.length>0){let b=p.pop();return f.push(b),o===1&&this.glContext.updateTexture(b,a,u,s,this.toTextureData(e,t)),b}}Re.verbose("TextureManager",`Creating new texture of size ${n.width}x${n.height}`);let c=this.glContext.allocateTexture(a,u,s,this.toTextureData(e,t));return this.config.reuseTextures&&(f.push(c),this.textureLookup.set(c,l)),c}readTexture(e,n,t){return t||(t=1),this.profiler.event("backend","TextureManager.readTexture",()=>{let o=e.shape.reduce((s,a)=>s*a)*t,i=this.glContext.readTexture(e.texture,e.width,e.height,o,this.toEncoderType(n),t);return this.toTensorData(n,i)})}async readTextureAsync(e,n,t){let o=e.tensor.dataId;if(t||(t=1),this.pendingRead.has(o)){let i=this.pendingRead.get(o);return new Promise(s=>i?.push(s))}return this.profiler.event("backend","TextureManager.readTextureAsync",async()=>{this.pendingRead.set(o,[]);let i=e.shape.reduce((l,f)=>l*f)*t;await this.glContext.createAndWaitForFence();let s=this.glContext.readTexture(e.texture,e.width,e.height,i,this.toEncoderType(n),t),a=this.toTensorData(n,s),u=this.pendingRead.get(o);return this.pendingRead.delete(o),u?.forEach(l=>l(a)),a})}readUint8TextureAsFloat(e){return this.profiler.event("backend","TextureManager.readUint8TextureAsFloat",()=>{let n=e.shape.reduce((o,i)=>o*i),t=this.glContext.readTexture(e.texture,e.width,e.height,n*4,"byte",4);return new Float32Array(t.buffer,t.byteOffset,n)})}releaseTexture(e,n){let t;if(this.config.reuseTextures&&(t=this.textureLookup.get(e.texture),t)){n&&this.textureLookup.delete(t);let o=this.inUseTextures.get(t);if(o){let i=o.indexOf(e.texture);if(i!==-1){o.splice(i,1);let s=this.idleTextures.get(t);s||(s=[],this.idleTextures.set(t,s)),s.push(e.texture)}}}(!t||n)&&(Re.verbose("TextureManager",`Deleting texture of size ${e.width}x${e.height}`),this.glContext.deleteTexture(e.texture))}toTensorData(e,n){switch(e){case"int16":return n instanceof Int16Array?n:Int16Array.from(n);case"int32":return n instanceof Int32Array?n:Int32Array.from(n);case"int8":return n instanceof Int8Array?n:Int8Array.from(n);case"uint16":return n instanceof Uint16Array?n:Uint16Array.from(n);case"uint32":return n instanceof Uint32Array?n:Uint32Array.from(n);case"uint8":case"bool":return n instanceof Uint8Array?n:Uint8Array.from(n);case"float32":return n instanceof Float32Array?n:Float32Array.from(n);case"float64":return n instanceof Float64Array?n:Float64Array.from(n);default:throw new Error(`TensorData type ${e} is not supported`)}}toTextureData(e,n){if(n)return n instanceof Float32Array?n:new Float32Array(n)}toEncoderType(e){return"float"}clearActiveTextures(){this.glContext.clearActiveTextures()}}});var si,oh=C(()=>{"use strict";Pt();yc();Zf();Wm();rh();Is();nh();si=class{constructor(e,n){this.backend=e;this.context=n;this.layoutStrategy=new Jo(e.glContext.maxTextureSize),this.programManager=new ii(this.context.profiler,e.glContext,this.layoutStrategy),this.textureManager=new ai(e.glContext,this.layoutStrategy,this.context.profiler,{reuseTextures:e.textureCacheMode==="full"}),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache=new Map,this.pack=e.pack,this.pack2unpackMap=new Map,this.unpack2packMap=new Map}createInferenceHandler(){return new Vo(this)}onGraphInitialized(e){let n=e.getValues().filter(t=>t.from===-1&&t.tensor).map(t=>t.tensor.dataId);this.initializers=new Set(n)}isInitializer(e){return this.initializers?this.initializers.has(e):!1}addInitializer(e){this.initializers.add(e)}getTextureData(e,n){return n?this.packedTextureDataCache.get(e):this.unpackedTextureDataCache.get(e)}setTextureData(e,n,t=!1){Re.verbose("WebGLSessionHandler","Storing Texture data in cache"),t?this.packedTextureDataCache.set(e,n):this.unpackedTextureDataCache.set(e,n)}dispose(){this.programManager.dispose(),this.textureManager.clearActiveTextures(),this.packedTextureDataCache.forEach(e=>this.textureManager.releaseTexture(e,!0)),this.packedTextureDataCache=new Map,this.unpackedTextureDataCache.forEach(e=>this.textureManager.releaseTexture(e,!0)),this.unpackedTextureDataCache=new Map}resolve(e,n,t){let o=bc(e,n,Um);return{impl:o.opImpl,context:o.opInit?o.opInit(e,t):e}}}});function S_(r){let e=0;for(;e{"use strict";ft();zn();zn();Qt();Un=class{constructor(e,n){this.frameBufferBound=!1;this.itemsToPoll=[];this.gl=e,this.version=n,this.getExtensions(),this.vertexbuffer=this.createVertexbuffer(),this.framebuffer=this.createFramebuffer(),this.queryVitalParameters()}allocateTexture(e,n,t,o){let i=this.gl,s=i.createTexture();i.bindTexture(i.TEXTURE_2D,s),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MIN_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_MAG_FILTER,i.NEAREST),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_S,i.CLAMP_TO_EDGE),i.texParameteri(i.TEXTURE_2D,i.TEXTURE_WRAP_T,i.CLAMP_TO_EDGE);let a=o?t.encode(o,e*n):null;return i.texImage2D(i.TEXTURE_2D,0,t.internalFormat,e,n,0,t.format,t.textureType,a),this.checkError(),s}updateTexture(e,n,t,o,i){let s=this.gl;s.bindTexture(s.TEXTURE_2D,e);let a=o.encode(i,n*t);s.texSubImage2D(s.TEXTURE_2D,0,0,0,n,t,o.format,o.textureType,a),this.checkError()}attachFramebuffer(e,n,t){let o=this.gl;o.bindTexture(o.TEXTURE_2D,e),o.bindFramebuffer(o.FRAMEBUFFER,this.framebuffer),o.framebufferTexture2D(o.FRAMEBUFFER,o.COLOR_ATTACHMENT0,o.TEXTURE_2D,e,0),this.checkError(),o.viewport(0,0,n,t),o.scissor(0,0,n,t)}readTexture(e,n,t,o,i,s){let a=this.gl;s||(s=1),this.frameBufferBound||this.attachFramebuffer(e,n,t);let u=this.getEncoder(i,s),l=u.allocate(n*t);return a.bindTexture(a.TEXTURE_2D,e),a.framebufferTexture2D(a.FRAMEBUFFER,a.COLOR_ATTACHMENT0,a.TEXTURE_2D,e,0),a.readPixels(0,0,n,t,a.RGBA,u.textureType,l),this.checkError(),u.decode(l,o)}isFramebufferReady(){return!0}getActiveTexture(){let e=this.gl;return`TEXTURE${e.getParameter(this.gl.ACTIVE_TEXTURE)-e.TEXTURE0}`}getTextureBinding(){return this.gl.getParameter(this.gl.TEXTURE_BINDING_2D)}getFramebufferBinding(){return this.gl.getParameter(this.gl.FRAMEBUFFER_BINDING)}setVertexAttributes(e,n){let t=this.gl;t.vertexAttribPointer(e,3,t.FLOAT,!1,20,0),t.enableVertexAttribArray(e),n!==-1&&(t.vertexAttribPointer(n,2,t.FLOAT,!1,20,12),t.enableVertexAttribArray(n)),this.checkError()}createProgram(e,n){let t=this.gl,o=t.createProgram();return t.attachShader(o,e),t.attachShader(o,n),t.linkProgram(o),o}compileShader(e,n){let t=this.gl,o=t.createShader(n);if(!o)throw new Error(`createShader() returned null with type ${n}`);if(t.shaderSource(o,e),t.compileShader(o),t.getShaderParameter(o,t.COMPILE_STATUS)===!1)throw new Error(`Failed to compile shader: ${t.getShaderInfoLog(o)} Shader source: -${e}`);return o}deleteShader(e){this.gl.deleteShader(e)}bindTextureToUniform(e,n,t){let o=this.gl;o.activeTexture(o.TEXTURE0+n),this.checkError(),o.bindTexture(o.TEXTURE_2D,e),this.checkError(),o.uniform1i(t,n),this.checkError()}draw(){this.gl.drawArrays(this.gl.TRIANGLE_STRIP,0,4),this.checkError()}checkError(){if(le.debug){let e=this.gl,n=e.getError(),t="";switch(n){case e.NO_ERROR:return;case e.INVALID_ENUM:t="INVALID_ENUM";break;case e.INVALID_VALUE:t="INVALID_VALUE";break;case e.INVALID_OPERATION:t="INVALID_OPERATION";break;case e.INVALID_FRAMEBUFFER_OPERATION:t="INVALID_FRAMEBUFFER_OPERATION";break;case e.OUT_OF_MEMORY:t="OUT_OF_MEMORY";break;case e.CONTEXT_LOST_WEBGL:t="CONTEXT_LOST_WEBGL";break;default:t=`Unknown WebGL Error: ${n.toString(16)}`}throw new Error(t)}}deleteTexture(e){this.gl.deleteTexture(e)}deleteProgram(e){this.gl.deleteProgram(e)}getEncoder(e,n,t=0){if(this.version===2)return new Fo(this.gl,n);switch(e){case"float":return t===1||this.isRenderFloat32Supported?new Nn(this.gl,n):new Nn(this.gl,n,this.textureHalfFloatExtension.HALF_FLOAT_OES);case"int":throw new Error("not implemented");case"byte":return new Mo(this.gl,n);default:throw new Error(`Invalid dataType: ${e}`)}}clearActiveTextures(){let e=this.gl;for(let n=0;nthis.isTimerResultAvailable(e)),this.getTimerResult(e)}async createAndWaitForFence(){let e=this.createFence(this.gl);return this.pollFence(e)}createFence(e){let n,t=e,o=t.fenceSync(t.SYNC_GPU_COMMANDS_COMPLETE,0);return e.flush(),o===null?n=()=>!0:n=()=>{let i=t.clientWaitSync(o,0,0);return i===t.ALREADY_SIGNALED||i===t.CONDITION_SATISFIED},{query:o,isFencePassed:n}}async pollFence(e){return new Promise(n=>{this.addItemToPoll(()=>e.isFencePassed(),()=>n())})}pollItems(){let e=__(this.itemsToPoll.map(n=>n.isDoneFn));for(let n=0;n<=e;++n){let{resolveFn:t}=this.itemsToPoll[n];t()}this.itemsToPoll=this.itemsToPoll.slice(e+1)}async addItemToPoll(e,n){this.itemsToPoll.push({isDoneFn:e,resolveFn:n}),!(this.itemsToPoll.length>1)&&await Xa(()=>(this.pollItems(),this.itemsToPoll.length===0))}}});function Ss(r){let e;if((!r||r==="webgl2")&&"webgl2"in vn?e=vn.webgl2:(!r||r==="webgl")&&"webgl"in vn&&(e=vn.webgl),!e)try{let t=S_();e=ah(t,r)}catch{let o=I_();e=ah(o,r)}r=r||e.version===1?"webgl":"webgl2";let n=e.gl;return vn[r]=e,n.isContextLost()?(delete vn[r],Ss(r)):(n.disable(n.DEPTH_TEST),n.disable(n.STENCIL_TEST),n.disable(n.BLEND),n.disable(n.DITHER),n.disable(n.POLYGON_OFFSET_FILL),n.disable(n.SAMPLE_COVERAGE),n.enable(n.SCISSOR_TEST),n.enable(n.CULL_FACE),n.cullFace(n.BACK),e)}function ah(r,e){let n={alpha:!1,depth:!1,antialias:!1,stencil:!1,preserveDrawingBuffer:!1,premultipliedAlpha:!1,failIfMajorPerformanceCaveat:!1},t,o=n;if((!e||e==="webgl2")&&(t=r.getContext("webgl2",o),t))try{return new Un(t,2)}catch(i){Re.warning("GlContextFactory",`failed to create WebGLContext using contextId 'webgl2'. Error: ${i}`)}if((!e||e==="webgl")&&(t=r.getContext("webgl",o)||r.getContext("experimental-webgl",o),t))try{return new Un(t,1)}catch(i){Re.warning("GlContextFactory",`failed to create WebGLContext using contextId 'webgl' or 'experimental-webgl'. Error: ${i}`)}throw new Error("WebGL is not supported")}function I_(){if(typeof document>"u")throw new TypeError("failed to create canvas: document is not supported");let r=document.createElement("canvas");return r.width=1,r.height=1,r}function S_(){if(typeof OffscreenCanvas>"u")throw new TypeError("failed to create offscreen canvas: OffscreenCanvas is not supported");return new OffscreenCanvas(1,1)}var vn,sh=C(()=>{"use strict";Pt();ih();vn={}});var ui,uh=C(()=>{"use strict";ft();Pt();oh();sh();ui=class{get contextId(){return le.webgl.contextId}set contextId(e){le.webgl.contextId=e}get matmulMaxBatchSize(){return le.webgl.matmulMaxBatchSize}set matmulMaxBatchSize(e){le.webgl.matmulMaxBatchSize=e}get textureCacheMode(){return le.webgl.textureCacheMode}set textureCacheMode(e){le.webgl.textureCacheMode=e}get pack(){return le.webgl.pack}set pack(e){le.webgl.pack=e}get async(){return le.webgl.async}set async(e){le.webgl.async=e}initialize(){try{return this.glContext=Ss(this.contextId),typeof this.matmulMaxBatchSize!="number"&&(this.matmulMaxBatchSize=16),typeof this.textureCacheMode!="string"&&(this.textureCacheMode="full"),typeof this.pack!="boolean"&&(this.pack=!1),typeof this.async!="boolean"&&(this.async=!1),Re.setWithEnv(le),le.webgl.context||Object.defineProperty(le.webgl,"context",{value:this.glContext.gl}),Re.verbose("WebGLBackend",`Created WebGLContext: ${typeof this.glContext} with matmulMaxBatchSize: ${this.matmulMaxBatchSize}; textureCacheMode: ${this.textureCacheMode}; pack: ${this.pack}; async: ${this.async}.`),!0}catch(e){return Re.warning("WebGLBackend",`Unable to initialize WebGLBackend. ${e}`),!1}}createSessionHandler(e){return new si(this,e)}dispose(){this.glContext.dispose()}}});async function $s(r){if(r){let e=typeof r=="string"?[r]:r;for(let n of e){let t=lh.get(n);if(t)return t;let o=await A_(n);if(o)return o}}else return $s(["webgl"]);throw new Error("no available backend to use")}async function A_(r){let e=$_;if(typeof e[r]<"u"&&P_(e[r])){let n=e[r],t=n.initialize();if(typeof t=="object"&&"then"in t&&(t=await t),t)return lh.set(r,n),n}}function P_(r){let e=r;return"initialize"in e&&typeof e.initialize=="function"&&"createSessionHandler"in e&&typeof e.createSessionHandler=="function"&&"dispose"in e&&typeof e.dispose=="function"}var lh,$_,ch=C(()=>{"use strict";uh();lh=new Map,$_={webgl:new ui}});var As,li,fh=C(()=>{"use strict";Pt();As=class{constructor(e,n){this.op=e;this.node=n}},li=class{constructor(e,n,t){this.graph=e;this.profiler=t;this.initialize(n)}initialize(e){this.profiler.event("session","ExecutionPlan.initialize",()=>{let n=this.graph.getNodes();if(n.length!==e.length)throw new Error("The size of nodes and OPs do not match.");this._ops=e.map((t,o)=>new As(t,n[o])),this.reset(),this._starter=[],this._ops.forEach((t,o)=>{let i=!0;for(let s of t.node.inputs)if(!this._values[s]&&this.graph.getInputIndices().indexOf(s)===-1){i=!1;break}i&&this._starter.push(o)})})}reset(){this._values=this.graph.getValues().map(e=>e.tensor)}async execute(e,n){return this.profiler.event("session","ExecutionPlan.execute",async()=>{this.reset();let t=e.createInferenceHandler(),o=this.graph.getInputIndices();if(n.length!==o.length)throw new Error(`number of input tensors don't match the number of inputs to the model: actual: ${n.length} expected: ${o.length}`);n.forEach((f,c)=>{let p=o[c];this._values[p]=f});let i=this._starter.slice(0),s=this.graph.getValues(),a=this.graph.getNodes(),u=0;for(;uthis._values[T]);if(p.indexOf(void 0)!==-1)throw new Error(`unresolved input detected: op: ${c.node}`);let b=p;Re.verbose("ExecPlan",`Running op:${c.node.name} (${b.map((T,w)=>`'${c.node.inputs[w]}': ${T.type}[${T.dims.join(",")}]`).join(", ")})`);let h=await this.profiler.event("node",c.node.name,async()=>c.op.impl(t,b,c.op.context));if(h.length!==c.node.outputs.length)throw new Error("the size of output does not match model definition.");h.forEach((T,w)=>{let v=c.node.outputs[w];if(this._values[v])throw new Error(`output [${v}] already has value: op:${c.node.name}`);this._values[v]=T});let g=new Set;h.forEach((T,w)=>{let v=c.node.outputs[w];for(let S of s[v].to){let $=a[S],P=!0;for(let E of $.inputs)if(!this._values[E]){P=!1;break}P&&g.add(S)}}),i.push(...g)}let l=[];for(let f=0;f{"use strict";Cn();we=un(dn());Gr();De();Lt=ee.experimental.fbs,Wn=class r{constructor(e){if(this._attributes=new Map,e!=null){for(let n of e)n instanceof we.onnx.AttributeProto?this._attributes.set(n.name,[r.getValue(n),r.getType(n)]):n instanceof Lt.Attribute&&this._attributes.set(n.name(),[r.getValue(n),r.getType(n)]);if(this._attributes.sizeQe.fromProto(i));if(e instanceof Lt.Attribute)return t.map(i=>Qe.fromOrtTensor(i))}return n===we.onnx.AttributeProto.AttributeType.STRING&&e instanceof we.onnx.AttributeProto?Rn(t):n===we.onnx.AttributeProto.AttributeType.STRINGS&&e instanceof we.onnx.AttributeProto?t.map(Rn):t}static getValueNoCheck(e){return e instanceof we.onnx.AttributeProto?this.getValueNoCheckFromOnnxFormat(e):this.getValueNoCheckFromOrtFormat(e)}static getValueNoCheckFromOnnxFormat(e){switch(e.type){case we.onnx.AttributeProto.AttributeType.FLOAT:return e.f;case we.onnx.AttributeProto.AttributeType.INT:return e.i;case we.onnx.AttributeProto.AttributeType.STRING:return e.s;case we.onnx.AttributeProto.AttributeType.TENSOR:return e.t;case we.onnx.AttributeProto.AttributeType.GRAPH:return e.g;case we.onnx.AttributeProto.AttributeType.FLOATS:return e.floats;case we.onnx.AttributeProto.AttributeType.INTS:return e.ints;case we.onnx.AttributeProto.AttributeType.STRINGS:return e.strings;case we.onnx.AttributeProto.AttributeType.TENSORS:return e.tensors;case we.onnx.AttributeProto.AttributeType.GRAPHS:return e.graphs;default:throw new Error(`unsupported attribute type: ${we.onnx.AttributeProto.AttributeType[e.type]}`)}}static getValueNoCheckFromOrtFormat(e){switch(e.type()){case Lt.AttributeType.FLOAT:return e.f();case Lt.AttributeType.INT:return e.i();case Lt.AttributeType.STRING:return e.s();case Lt.AttributeType.TENSOR:return e.t();case Lt.AttributeType.GRAPH:return e.g();case Lt.AttributeType.FLOATS:return e.floatsArray();case Lt.AttributeType.INTS:{let n=[];for(let t=0;t{"use strict";dh();Cn();Os=un(dn());Gr();De();ci=ee.experimental.fbs,Es={from:(r,e)=>new Ps(r,e)},rr=class{constructor(e){this._from=void 0,this._to=[],this.tensor=void 0,this.type=void 0,e&&(this.type=lt.tensorValueTypeFromProto(e.type.tensorType))}get from(){return this._from}get to(){return this._to}},fi=class{constructor(e,n){e instanceof Os.onnx.NodeProto?(this.name=e.name,this.opType=e.opType,this.attributes=new Wn(e.attribute)):e instanceof ci.Node&&(this.name=n??e.name(),this.opType=e.opType(),this.attributes=new Wn(lt.tensorAttributesFromORTFormat(e))),this.inputs=[],this.outputs=[],this.executeNode=!0}},Ps=class{constructor(e,n){if(!e)throw new TypeError("graph is empty");this.buildGraph(e),this.transformGraph(n),this.checkIsAcyclic()}getInputIndices(){return this._allInputIndices}getInputNames(){return this._allInputNames}getOutputIndices(){return this._allOutputIndices}getOutputNames(){return this._allOutputNames}getValues(){return this._allData}getNodes(){return this._nodes}buildGraph(e){if(e instanceof Os.onnx.GraphProto)this.buildGraphFromOnnxFormat(e);else if(e instanceof ci.Graph)this.buildGraphFromOrtFormat(e);else throw new TypeError("Graph type is not supported.")}buildGraphFromOnnxFormat(e){let n=new Map;this._allData=[],this._allInputIndices=[],this._allInputNames=[],this._allOutputIndices=[],this._allOutputNames=[],this._nodes=[];let t=new Map;if(!e.input)throw new Error("missing information in graph: input");let o=[];for(let i of e.input){if(n.has(i.name))throw new Error(`duplicated input name: ${i.name}`);let s=this._allData.push(new rr(i))-1;n.set(i.name,s),o.push(i.name)}if(!e.initializer)throw new Error("missing information in graph: initializer");for(let i of e.initializer){let s=n.get(i.name);if(s===void 0){let a=new rr;a.type={shape:{dims:lt.tensorDimsFromProto(i.dims)},tensorType:lt.tensorDataTypeFromProto(i.dataType)},s=this._allData.push(a)-1,n.set(i.name,s)}this._allData[s]._from=-1,this._allData[s].tensor=Qe.fromProto(i)}for(let i=0;i"u"&&(l=this._allData.push(new rr)-1,n.set(u,l)),s.outputs.push(l),this._allData[l]._from!==void 0)throw new Error(`multiple nodes output to one data value: ${l}`);if(this._allData[l]._from=i,a.opType==="Constant"){if(!a.attribute||a.attribute.length!==1||!a.attribute[0].t)throw new Error("missing attributes or missing tensor value in attributes for this Constant operator");if(!a.output||a.output.length!==1)throw new Error("missing output or incorrect number of outputs for this Constant operator");s.outputs.pop(),s.executeNode=!1,this._allData[l]._from=-1,this._allData[l].tensor=Qe.fromProto(a.attribute[0].t)}}}for(let i=0;i"u"){if(u===""&&(a.input.length===3||a.input.length===4)&&a.opType==="Resize")continue;throw new Error(`unrecognized input '${u}' for node: ${a.name}`)}s.inputs.push(l),this._allData[l]._to.push(i)}}return!0}buildGraphFromOrtFormat(e){let n=new Map;this._allData=[],this._allInputIndices=[],this._allInputNames=[],this._allOutputIndices=[],this._allOutputNames=[],this._nodes=[];let t=new Map,o=[];for(let i=0;i"u"&&(f=this._allData.push(new rr)-1,n.set(l,f)),s.outputs.push(f),this._allData[f]._from!==void 0)throw new Error(`multiple nodes output to one data value: ${f}`);if(this._allData[f]._from=i,a.opType()==="Constant"){if(a.attributesLength()!==1||!a.attributes(0).t())throw new Error("missing attributes or missing tensor value in attributes for this Constant operator");if(a.outputsLength()!==1)throw new Error("missing output or incorrect number of outputs for this Constant operator");s.outputs.pop(),s.executeNode=!1,this._allData[f]._from=-1,this._allData[f].tensor=Qe.fromOrtTensor(a.attributes(0).t())}}}for(let i=0;i"u")throw new Error(`unrecognized input '${l}' for node: ${a.name()}`);s.inputs.push(f),this._allData[f]._to.push(i)}}}checkIsAcyclic(){let e=new Set;this._allInputIndices.forEach(o=>{this._allData[o]._to.forEach(s=>{e.add(s)})});let n=Array.from(e),t=new Array(this._nodes.length).fill("white");for(;n.length>0;){let o=n.pop();t[o]==="gray"?t[o]="black":(n.push(o),t[o]="gray",this._nodes[o].outputs.forEach(i=>{let s=this._allData[i];if(typeof s.tensor<"u")throw new Error("node outputs should not be initialized");if(s._from!==o)throw new Error("from property of the Value object doesn't match index of Node being processed");s._to.forEach(a=>{if(t[a]==="gray")throw new Error("model graph is cyclic");t[a]==="white"&&n.push(a)})}))}}transformGraph(e){this.removeAllIdentityNodes(),this.removeAllDropoutNodes(),this.fuseConvActivationNodes(),e&&e.transformGraph(this),this.finalizeGraph()}finalizeGraph(){let e=0,n=new Array(this._nodes.length,0),t=0;for(let o=0;o{this._allData[i]._from=-2});this._nodes.splice(t,this._nodes.length-t);for(let o=0;o=0)i._to[s]=n[i._to[s]];else throw new Error("Trying to update a removed node")}e=0;for(let o=0;o0){let i=-1;this._allData[o].from!==void 0&&this._allData[o].from!==-1?(i=this._nodes[this._allData[o].from].outputs.indexOf(o+e),i!==-1&&(this._nodes[this._allData[o].from].outputs[i]=o)):(i=this._allInputIndices.indexOf(o+e),i!==-1&&(this._allInputIndices[i]=o)),this._allData[o].to.forEach(s=>{i=this._nodes[s].inputs.indexOf(o+e),i!==-1&&(this._nodes[s].inputs[i]=o)}),this._allData[o].to.length===0&&(i=this._allOutputIndices.indexOf(o+e),i!==-1&&(this._allOutputIndices[i]=o))}}}deleteNode(e){let n=this._nodes[e];if(n.outputs.length>1){for(let a=1;a0)throw new Error("Node deletion with more than one output connected to other nodes is not supported. ")}n.executeNode=!1;let t=n.inputs[0],o=n.outputs[0],i=this._allData[o].to;for(let a=0;a0)for(let a of i){let u=this._nodes[a].inputs.indexOf(o);if(u===-1)throw new Error("The Node object doesn't have the output Value in it's 'inputs' property ");this._nodes[a].inputs[u]=t,this._allData[t].to.push(a)}}removeAllDropoutNodes(){let e=0;for(let n of this._nodes){if(n.opType==="Dropout"){if(n.inputs.length!==1)throw new Error("Dropout nodes should only contain one input. ");if(n.outputs.length!==1&&n.outputs.length!==2)throw new Error("Dropout nodes should contain either 1 or 2 output(s)");if(n.outputs.length===2&&this._allData[n.outputs[1]]._to.length!==0)throw new Error("Dropout nodes's second output should not be referenced by other nodes");this.deleteNode(e)}e++}}removeAllIdentityNodes(){let e=0;for(let n of this._nodes)n.opType==="Identity"&&this.deleteNode(e),e++}isActivation(e){switch(e.opType){case"Relu":case"Sigmoid":case"Clip":return!0;default:return!1}}fuseConvActivationNodes(){for(let e of this._nodes)if(e.opType==="Conv"){let n=this._allData[e.outputs[0]]._to;if(n.length===1&&this.isActivation(this._nodes[n[0]])){let t=this._nodes[n[0]];if(t.opType==="Clip")if(t.inputs.length===1)try{e.attributes.set("activation_params","floats",[t.attributes.getFloat("min"),t.attributes.getFloat("max")])}catch{e.attributes.set("activation_params","floats",[Mr,Vr])}else if(t.inputs.length>=3&&this._allData[t.inputs[1]].tensor!==void 0&&this._allData[t.inputs[2]].tensor!==void 0)e.attributes.set("activation_params","floats",[this._allData[t.inputs[1]].tensor.floatData[0],this._allData[t.inputs[2]].tensor.floatData[0]]);else continue;e.attributes.set("activation","string",t.opType),this.deleteNode(n[0])}}}}});var mh,O_,di,hh=C(()=>{"use strict";ko();ph();Cn();mh=un(dn());De();O_=ee.experimental.fbs,di=class{constructor(){}load(e,n,t){let o;if(!t)try{this.loadFromOnnxFormat(e,n);return}catch(i){if(t!==void 0)throw i;o=i}try{this.loadFromOrtFormat(e,n)}catch(i){throw t!==void 0?i:new Error(`Failed to load model as ONNX format: ${o} -as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e);if(wt.longToNumber(t.irVersion)<3)throw new Error("only support ONNX model with IR_VERSION>=3");this._opsets=t.opsetImport.map(i=>({domain:i.domain,version:wt.longToNumber(i.version)})),this._graph=Es.from(t.graph,n)}loadFromOrtFormat(e,n){let t=new k.ByteBuffer(e),o=O_.InferenceSession.getRootAsInferenceSession(t).model();if(wt.longToNumber(o.irVersion())<3)throw new Error("only support ONNX model with IR_VERSION>=3");this._opsets=[];for(let s=0;s{"use strict";ch();fh();Pt();hh();pi=class{constructor(e={}){this._initialized=!1,this.backendHint=e.backendHint,this.profiler=Eo.create(e.profiler),this.context={profiler:this.profiler,graphInputTypes:[],graphInputDims:[]}}get inputNames(){return this._model.graph.getInputNames()}get outputNames(){return this._model.graph.getOutputNames()}startProfiling(){this.profiler.start()}endProfiling(){this.profiler.stop()}async loadModel(e,n,t){await this.profiler.event("session","Session.loadModel",async()=>{let o=await $s(this.backendHint);if(this.sessionHandler=o.createSessionHandler(this.context),this._model=new di,typeof e=="string"){let i=e.endsWith(".ort");{let a=await(await fetch(e)).arrayBuffer();this.initialize(new Uint8Array(a),i)}}else if(ArrayBuffer.isView(e))this.initialize(e);else{let i=new Uint8Array(e,n||0,t||e.byteLength);this.initialize(i)}})}initialize(e,n){if(this._initialized)throw new Error("already initialized");this.profiler.event("session","Session.initialize",()=>{let t=this.sessionHandler.transformGraph?this.sessionHandler:void 0;this._model.load(e,t,n),this.sessionHandler.onGraphInitialized&&this.sessionHandler.onGraphInitialized(this._model.graph),this.initializeOps(this._model.graph),this._executionPlan=new li(this._model.graph,this._ops,this.profiler)}),this._initialized=!0}async run(e){if(!this._initialized)throw new Error("session not initialized yet");return this.profiler.event("session","Session.run",async()=>{let n=this.normalizeAndValidateInputs(e),t=await this._executionPlan.execute(this.sessionHandler,n);return this.createOutput(t)})}normalizeAndValidateInputs(e){let n=this._model.graph.getInputNames();if(Array.isArray(e)){if(e.length!==n.length)throw new Error(`incorrect input array length: expected ${n.length} but got ${e.length}`)}else{if(e.size!==n.length)throw new Error(`incorrect input map size: expected ${n.length} but got ${e.size}`);let t=new Array(e.size),o=0;for(let i=0;i{"use strict";ft();Gr();mi=class{constructor(e){this.session=e;this.inputNames=this.session.inputNames,this.outputNames=this.session.outputNames}async dispose(){}async run(e,n,t){let o=new Map;for(let a in e)if(Object.hasOwnProperty.call(e,a)){let u=e[a];o.set(a,new Qe(u.dims,u.type,void 0,void 0,u.data))}let i=await this.session.run(o),s={};return i.forEach((a,u)=>{s[u]=new it(a.type,a.data,a.dims)}),s}startProfiling(){this.session.startProfiling()}endProfiling(){this.session.endProfiling()}}});var yh={};sn(yh,{onnxjsBackend:()=>E_});var Cs,E_,xh=C(()=>{"use strict";gh();bh();Cs=class{async init(){}async createInferenceSessionHandler(e,n){let t=new pi(n);return typeof e=="string"?await t.loadModel(e):await t.loadModel(e),new mi(t)}},E_=new Cs});var hi=C(()=>{"use strict"});var Th={};sn(Th,{default:()=>C_});var vh,wh,C_,_h=C(()=>{"use strict";ks();Kr();Hn();vh="ort-wasm-proxy-worker",wh=globalThis.self?.name===vh;wh&&(self.onmessage=r=>{let{type:e,in:n}=r.data;try{switch(e){case"init-wasm":gi(n.wasm).then(()=>{bi(n).then(()=>{postMessage({type:e})},t=>{postMessage({type:e,err:t})})},t=>{postMessage({type:e,err:t})});break;case"init-ep":{let{epName:t,env:o}=n;yi(o,t).then(()=>{postMessage({type:e})},i=>{postMessage({type:e,err:i})});break}case"copy-from":{let{buffer:t}=n,o=qn(t);postMessage({type:e,out:o});break}case"create":{let{model:t,options:o}=n;xi(t,o).then(i=>{postMessage({type:e,out:i})},i=>{postMessage({type:e,err:i})});break}case"release":vi(n),postMessage({type:e});break;case"run":{let{sessionId:t,inputIndices:o,inputs:i,outputIndices:s,options:a}=n;wi(t,o,i,s,new Array(s.length).fill(null),a).then(u=>{u.some(l=>l[3]!=="cpu")?postMessage({type:e,err:"Proxy does not support non-cpu tensor location."}):postMessage({type:e,out:u},_i([...i,...u]))},u=>{postMessage({type:e,err:u})});break}case"end-profiling":Ti(n),postMessage({type:e});break;default:}}catch(t){postMessage({type:e,err:t})}});C_=wh?null:r=>new Worker(r??wn,{type:"module",name:vh})});var Sh={};sn(Sh,{default:()=>k_});var Ds,Ih,k_,$h=C(()=>{"use strict";Ih=(Ds=import.meta.url,async function(r={}){function e(){return ae.buffer!=ge.buffer&&Fe(),ge}function n(){return ae.buffer!=ge.buffer&&Fe(),Ie}function t(){return ae.buffer!=ge.buffer&&Fe(),xe}function o(){return ae.buffer!=ge.buffer&&Fe(),se}function i(){return ae.buffer!=ge.buffer&&Fe(),pe}function s(){return ae.buffer!=ge.buffer&&Fe(),ce}function a(){return ae.buffer!=ge.buffer&&Fe(),ut}function u(){return ae.buffer!=ge.buffer&&Fe(),Te}var l,f,c=Object.assign({},r),p=new Promise((d,m)=>{l=d,f=m}),b=typeof window=="object",h=typeof importScripts=="function",g=h&&self.name=="em-pthread";c.mountExternalData=(d,m)=>{(c.Cb||(c.Cb=new Map)).set(d,m)},c.unmountExternalData=()=>{delete c.Cb};var T=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let w=()=>{let d=(y,x,_)=>(...O)=>{let R=Zt,M=x?.();O=y(...O);let j=x?.();return M!==j&&(y=j,_(M),x=_=null),Zt!=R?new Promise((Y,ne)=>{ma={resolve:Y,reject:ne}}):O},m=y=>async(...x)=>{try{if(c.Bb)throw Error("Session already started");let _=c.Bb={Zb:x[0],errors:[]},O=await y(...x);if(c.Bb!==_)throw Error("Session mismatch");c.Jb?.flush();let R=_.errors;if(0j),0c._OrtCreateSession,y=>c._OrtCreateSession=y),c._OrtRun=m(d(c._OrtRun,()=>c._OrtRun,y=>c._OrtRun=y)),c._OrtRunWithBinding=m(d(c._OrtRunWithBinding,()=>c._OrtRunWithBinding,y=>c._OrtRunWithBinding=y)),c._OrtBindInput=d(c._OrtBindInput,()=>c._OrtBindInput,y=>c._OrtBindInput=y),w=void 0};c.jsepInit=(d,m)=>{if(w?.(),d==="webgpu"){[c.Jb,c.Qb,c.Ub,c.Kb,c.Tb,c.gb,c.Vb,c.Xb,c.Rb,c.Sb,c.Wb]=m;let y=c.Jb;c.jsepRegisterBuffer=(x,_,O,R)=>y.registerBuffer(x,_,O,R),c.jsepGetBuffer=x=>y.getBuffer(x),c.jsepCreateDownloader=(x,_,O)=>y.createDownloader(x,_,O),c.jsepOnReleaseSession=x=>{y.onReleaseSession(x)},c.jsepOnRunStart=x=>y.onRunStart(x)}};var v,S,$=Object.assign({},c),P="./this.program",E=(d,m)=>{throw m},N="";(b||h)&&(h?N=self.location.href:typeof document<"u"&&document.currentScript&&(N=document.currentScript.src),Ds&&(N=Ds),N=N.startsWith("blob:")?"":N.substr(0,N.replace(/[?#].*/,"").lastIndexOf("/")+1),h&&(S=d=>{var m=new XMLHttpRequest;return m.open("GET",d,!1),m.responseType="arraybuffer",m.send(null),new Uint8Array(m.response)}),v=(d,m,y)=>{var x=new XMLHttpRequest;x.open("GET",d,!0),x.responseType="arraybuffer",x.onload=()=>{x.status==200||x.status==0&&x.response?m(x.response):y()},x.onerror=y,x.send(null)});var z=console.log.bind(console),q=console.error.bind(console),K=z,F=q;if(Object.assign(c,$),$=null,g){let d=function(m){try{var y=m.data,x=y.cmd;if(x==="load"){let _=[];self.onmessage=O=>_.push(O),self.startWorker=()=>{postMessage({cmd:"loaded"});for(let O of _)d(O);self.onmessage=d};for(let O of y.handlers)c[O]&&!c[O].proxy||(c[O]=(...R)=>{postMessage({Ib:"callHandler",hc:O,args:R})},O=="print"&&(K=c[O]),O=="printErr"&&(F=c[O]));ae=y.wasmMemory,Fe(),_e(y.wasmModule)}else if(x==="run"){ya(y.pthread_ptr,0,0,1,0,0),fa(y.pthread_ptr),Nx(),Eu(),$e||(Al(),$e=!0);try{zx(y.start_routine,y.arg)}catch(_){if(_!="unwind")throw _}}else x==="cancel"?an()&&ho(-1):y.target!=="setimmediate"&&(x==="checkMailbox"?$e&&io():x&&(F(`worker: received unknown command ${x}`),F(y)))}catch(_){throw Pl(),_}};var UI=d,_e,$e=!1;F=function(...m){m=m.join(" "),console.error(m)},self.alert=function(...m){postMessage({Ib:"alert",text:m.join(" "),jc:an()})},c.instantiateWasm=(m,y)=>new Promise(x=>{_e=_=>{_=new WebAssembly.Instance(_,Iu()),y(_),x()}}),self.onunhandledrejection=m=>{throw m.reason||m},self.onmessage=d}var ae,qe,Q,ge,Ie,xe,se,pe,ce,ut,V,ie,Te,tt=!1;function Fe(){var d=ae.buffer;c.HEAP8=ge=new Int8Array(d),c.HEAP16=xe=new Int16Array(d),c.HEAPU8=Ie=new Uint8Array(d),c.HEAPU16=se=new Uint16Array(d),c.HEAP32=pe=new Int32Array(d),c.HEAPU32=ce=new Uint32Array(d),c.HEAPF32=ut=new Float32Array(d),c.HEAPF64=Te=new Float64Array(d),c.HEAP64=V=new BigInt64Array(d),c.HEAPU64=ie=new BigUint64Array(d)}if(!g){if(c.wasmMemory)ae=c.wasmMemory;else if(!((ae=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof T))throw F("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),Error("bad memory");Fe()}var Ke=[],tn=[],mt=[],rt=0,Kt=null,hr=null;function no(){if(--rt==0&&(Kt!==null&&(clearInterval(Kt),Kt=null),hr)){var d=hr;hr=null,d()}}function An(d){throw F(d="Aborted("+d+")"),tt=!0,Q=1,d=new WebAssembly.RuntimeError(d+". Build with -sASSERTIONS for more info."),f(d),d}var xu,vu=d=>d.startsWith("data:application/octet-stream;base64,"),wu=d=>d.startsWith("file://");function Tu(d){if(S)return S(d);throw"both async and sync fetching of the wasm failed"}function _u(d,m,y){return function(x){if(b||h){if(typeof fetch=="function"&&!wu(x))return fetch(x,{credentials:"same-origin"}).then(_=>{if(!_.ok)throw`failed to load wasm binary file at '${x}'`;return _.arrayBuffer()}).catch(()=>Tu(x));if(v)return new Promise((_,O)=>{v(x,R=>_(new Uint8Array(R)),O)})}return Promise.resolve().then(()=>Tu(x))}(d).then(x=>WebAssembly.instantiate(x,m)).then(y,x=>{F(`failed to asynchronously prepare wasm: ${x}`),An(x)})}function Iu(){return{a:{wa:Rx,b:Mx,Y:Bu,y:Nu,ma:zu,U:Mu,W:Vu,na:Gu,ka:Uu,da:Wu,ja:Hu,I:qu,V:Ku,S:ju,la:Xu,T:Zu,sa:Vx,C:Ux,M:Wx,L:qx,B:jx,s:Xx,p:Zx,D:Yx,x:o0,N:i0,ra:a0,ga:s0,Q:u0,Z:l0,E:c0,fa,pa:f0,u:d0,A:h0,o:g0,k:y0,c:la,n:x0,j:T0,xa:_0,r:I0,d:S0,v:$0,m:A0,g:P0,l:O0,i:E0,h:C0,e:k0,aa:D0,ba:B0,ca:L0,_:cl,$:fl,P:R0,f:N0,K:z0,F:F0,J:M0,ta:V0,oa:G0,R:U0,t:pl,w:W0,O:H0,va:q0,ua:K0,ha:gl,ia:bl,X:oa,z:yl,H:xl,ea:vl,G:wl,a:ae,qa:Il,q:Z0}}}var ta={1336340:(d,m,y,x)=>{if(c===void 0||!c.Cb)return 1;if((d=ot(d>>>0)).startsWith("./")&&(d=d.substring(2)),!(d=c.Cb.get(d)))return 2;if(x>>>=0,(m>>>=0)+(y>>>=0)>d.byteLength)return 3;try{return n().set(d.subarray(m,m+y),x>>>0),0}catch{return 4}},1336841:()=>{c.Rb()},1336872:()=>{c.Sb()},1336901:()=>{c.Wb()},1336926:d=>c.Qb(d),1336959:d=>c.Ub(d),1336991:(d,m,y)=>{c.Kb(d,m,y,!0)},1337030:(d,m,y)=>{c.Kb(d,m,y)},1337063:()=>typeof wasmOffsetConverter<"u",1337120:d=>{c.gb("Abs",d,void 0)},1337171:d=>{c.gb("Neg",d,void 0)},1337222:d=>{c.gb("Floor",d,void 0)},1337275:d=>{c.gb("Ceil",d,void 0)},1337327:d=>{c.gb("Reciprocal",d,void 0)},1337385:d=>{c.gb("Sqrt",d,void 0)},1337437:d=>{c.gb("Exp",d,void 0)},1337488:d=>{c.gb("Erf",d,void 0)},1337539:d=>{c.gb("Sigmoid",d,void 0)},1337594:(d,m,y)=>{c.gb("HardSigmoid",d,{alpha:m,beta:y})},1337673:d=>{c.gb("Log",d,void 0)},1337724:d=>{c.gb("Sin",d,void 0)},1337775:d=>{c.gb("Cos",d,void 0)},1337826:d=>{c.gb("Tan",d,void 0)},1337877:d=>{c.gb("Asin",d,void 0)},1337929:d=>{c.gb("Acos",d,void 0)},1337981:d=>{c.gb("Atan",d,void 0)},1338033:d=>{c.gb("Sinh",d,void 0)},1338085:d=>{c.gb("Cosh",d,void 0)},1338137:d=>{c.gb("Asinh",d,void 0)},1338190:d=>{c.gb("Acosh",d,void 0)},1338243:d=>{c.gb("Atanh",d,void 0)},1338296:d=>{c.gb("Tanh",d,void 0)},1338348:d=>{c.gb("Not",d,void 0)},1338399:(d,m,y)=>{c.gb("Clip",d,{min:m,max:y})},1338468:d=>{c.gb("Clip",d,void 0)},1338520:(d,m)=>{c.gb("Elu",d,{alpha:m})},1338578:d=>{c.gb("Relu",d,void 0)},1338630:(d,m)=>{c.gb("LeakyRelu",d,{alpha:m})},1338694:(d,m)=>{c.gb("ThresholdedRelu",d,{alpha:m})},1338764:(d,m)=>{c.gb("Cast",d,{to:m})},1338822:d=>{c.gb("Add",d,void 0)},1338873:d=>{c.gb("Sub",d,void 0)},1338924:d=>{c.gb("Mul",d,void 0)},1338975:d=>{c.gb("Div",d,void 0)},1339026:d=>{c.gb("Pow",d,void 0)},1339077:d=>{c.gb("Equal",d,void 0)},1339130:d=>{c.gb("Greater",d,void 0)},1339185:d=>{c.gb("GreaterOrEqual",d,void 0)},1339247:d=>{c.gb("Less",d,void 0)},1339299:d=>{c.gb("LessOrEqual",d,void 0)},1339358:(d,m,y,x,_)=>{c.gb("ReduceMean",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1339517:(d,m,y,x,_)=>{c.gb("ReduceMax",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1339675:(d,m,y,x,_)=>{c.gb("ReduceMin",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1339833:(d,m,y,x,_)=>{c.gb("ReduceProd",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1339992:(d,m,y,x,_)=>{c.gb("ReduceSum",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340150:(d,m,y,x,_)=>{c.gb("ReduceL1",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340307:(d,m,y,x,_)=>{c.gb("ReduceL2",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340464:(d,m,y,x,_)=>{c.gb("ReduceLogSum",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340625:(d,m,y,x,_)=>{c.gb("ReduceSumSquare",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340789:(d,m,y,x,_)=>{c.gb("ReduceLogSumExp",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340953:d=>{c.gb("Where",d,void 0)},1341006:(d,m,y)=>{c.gb("Transpose",d,{perm:m?Array.from(i().subarray(m>>>0,y>>>0)):[]})},1341114:(d,m,y,x)=>{c.gb("DepthToSpace",d,{blocksize:m,mode:ot(y),format:x?"NHWC":"NCHW"})},1341247:(d,m,y,x)=>{c.gb("DepthToSpace",d,{blocksize:m,mode:ot(y),format:x?"NHWC":"NCHW"})},1341380:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe)=>{c.gb("ConvTranspose",d,{format:j?"NHWC":"NCHW",autoPad:m,dilations:[y],group:x,kernelShape:[_],pads:[O,R],strides:[M],wIsConst:()=>!!e()[Y>>>0],outputPadding:ne?Array.from(i().subarray(ne>>>0,Ae>>>0)):[],outputShape:Ce?Array.from(i().subarray(Ce>>>0,L>>>0)):[],activation:ot(fe)})},1341781:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L)=>{c.gb("ConvTranspose",d,{format:M?"NHWC":"NCHW",autoPad:m,dilations:Array.from(i().subarray(y>>>0,2+(y>>>0)>>>0)),group:x,kernelShape:Array.from(i().subarray(_>>>0,2+(_>>>0)>>>0)),pads:Array.from(i().subarray(O>>>0,4+(O>>>0)>>>0)),strides:Array.from(i().subarray(R>>>0,2+(R>>>0)>>>0)),wIsConst:()=>!!e()[j>>>0],outputPadding:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],outputShape:Ae?Array.from(i().subarray(Ae>>>0,Ce>>>0)):[],activation:ot(L)})},1342346:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe)=>{c.gb("ConvTranspose",d,{format:j?"NHWC":"NCHW",autoPad:m,dilations:[y],group:x,kernelShape:[_],pads:[O,R],strides:[M],wIsConst:()=>!!e()[Y>>>0],outputPadding:ne?Array.from(i().subarray(ne>>>0,Ae>>>0)):[],outputShape:Ce?Array.from(i().subarray(Ce>>>0,L>>>0)):[],activation:ot(fe)})},1342747:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L)=>{c.gb("ConvTranspose",d,{format:M?"NHWC":"NCHW",autoPad:m,dilations:Array.from(i().subarray(y>>>0,2+(y>>>0)>>>0)),group:x,kernelShape:Array.from(i().subarray(_>>>0,2+(_>>>0)>>>0)),pads:Array.from(i().subarray(O>>>0,4+(O>>>0)>>>0)),strides:Array.from(i().subarray(R>>>0,2+(R>>>0)>>>0)),wIsConst:()=>!!e()[j>>>0],outputPadding:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],outputShape:Ae?Array.from(i().subarray(Ae>>>0,Ce>>>0)):[],activation:ot(L)})},1343312:(d,m)=>{c.gb("GlobalAveragePool",d,{format:m?"NHWC":"NCHW"})},1343403:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe,Le)=>{c.gb("AveragePool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,j],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1343687:(d,m)=>{c.gb("GlobalAveragePool",d,{format:m?"NHWC":"NCHW"})},1343778:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe,Le)=>{c.gb("AveragePool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,j],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1344062:(d,m)=>{c.gb("GlobalMaxPool",d,{format:m?"NHWC":"NCHW"})},1344149:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe,Le)=>{c.gb("MaxPool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,j],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1344429:(d,m)=>{c.gb("GlobalMaxPool",d,{format:m?"NHWC":"NCHW"})},1344516:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe,Le)=>{c.gb("MaxPool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,j],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1344796:(d,m,y,x,_)=>{c.gb("Gemm",d,{alpha:m,beta:y,transA:x,transB:_})},1344900:d=>{c.gb("MatMul",d,void 0)},1344954:(d,m,y,x)=>{c.gb("ArgMax",d,{keepDims:!!m,selectLastIndex:!!y,axis:x})},1345062:(d,m,y,x)=>{c.gb("ArgMin",d,{keepDims:!!m,selectLastIndex:!!y,axis:x})},1345170:(d,m)=>{c.gb("Softmax",d,{axis:m})},1345233:(d,m)=>{c.gb("Concat",d,{axis:m})},1345293:(d,m,y,x,_)=>{c.gb("Split",d,{axis:m,numOutputs:y,splitSizes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1345433:d=>{c.gb("Expand",d,void 0)},1345487:(d,m)=>{c.gb("Gather",d,{axis:Number(m)})},1345558:(d,m)=>{c.gb("GatherElements",d,{axis:Number(m)})},1345637:(d,m,y,x,_,O,R,M,j,Y,ne)=>{c.gb("Resize",d,{antialias:m,axes:y?Array.from(i().subarray(y>>>0,x>>>0)):[],coordinateTransformMode:ot(_),cubicCoeffA:O,excludeOutside:R,extrapolationValue:M,keepAspectRatioPolicy:ot(j),mode:ot(Y),nearestMode:ot(ne)})},1345983:(d,m,y,x,_,O,R)=>{c.gb("Slice",d,{starts:m?Array.from(i().subarray(m>>>0,y>>>0)):[],ends:x?Array.from(i().subarray(x>>>0,_>>>0)):[],axes:O?Array.from(i().subarray(O>>>0,R>>>0)):[]})},1346199:d=>{c.gb("Tile",d,void 0)},1346251:(d,m,y)=>{c.gb("InstanceNormalization",d,{epsilon:m,format:y?"NHWC":"NCHW"})},1346365:(d,m,y)=>{c.gb("InstanceNormalization",d,{epsilon:m,format:y?"NHWC":"NCHW"})},1346479:d=>{c.gb("Range",d,void 0)},1346532:(d,m)=>{c.gb("Einsum",d,{equation:ot(m)})},1346613:(d,m,y,x,_)=>{c.gb("Pad",d,{mode:m,value:y,pads:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1346740:(d,m,y,x,_,O)=>{c.gb("BatchNormalization",d,{epsilon:m,momentum:y,spatial:!!_,trainingMode:!!x,format:O?"NHWC":"NCHW"})},1346909:(d,m,y,x,_,O)=>{c.gb("BatchNormalization",d,{epsilon:m,momentum:y,spatial:!!_,trainingMode:!!x,format:O?"NHWC":"NCHW"})},1347078:(d,m,y)=>{c.gb("CumSum",d,{exclusive:Number(m),reverse:Number(y)})},1347175:(d,m,y,x,_,O,R,M,j)=>{c.gb("Attention",d,{numHeads:m,isUnidirectional:y,maskFilterValue:x,scale:_,doRotary:O,qkvHiddenSizes:R?Array.from(i().subarray(Number(M)>>>0,Number(M)+R>>>0)):[],pastPresentShareBuffer:!!j})},1347447:d=>{c.gb("BiasAdd",d,void 0)},1347502:d=>{c.gb("BiasSplitGelu",d,void 0)},1347563:d=>{c.gb("FastGelu",d,void 0)},1347619:(d,m,y,x,_,O,R,M,j,Y,ne,Ae,Ce,L,fe,Le)=>{c.gb("Conv",d,{format:Ae?"NHWC":"NCHW",auto_pad:m,dilations:y?Array.from(i().subarray(y>>>0,x>>>0)):[],group:_,kernel_shape:O?Array.from(i().subarray(O>>>0,R>>>0)):[],pads:M?Array.from(i().subarray(M>>>0,j>>>0)):[],strides:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],w_is_const:()=>!!e()[Ce>>>0],activation:ot(L),activation_params:fe?Array.from(a().subarray(fe>>>0,Le>>>0)):[]})},1348115:d=>{c.gb("Gelu",d,void 0)},1348167:(d,m,y,x)=>{c.gb("GroupQueryAttention",d,{numHeads:m,kvNumHeads:y,scale:x})},1348280:(d,m,y,x)=>{c.gb("LayerNormalization",d,{axis:m,epsilon:y,simplified:!!x})},1348391:(d,m,y,x)=>{c.gb("LayerNormalization",d,{axis:m,epsilon:y,simplified:!!x})},1348502:(d,m,y,x,_,O)=>{c.gb("MatMulNBits",d,{k:m,n:y,accuracyLevel:x,bits:_,blockSize:O})},1348629:(d,m,y,x,_,O)=>{c.gb("MultiHeadAttention",d,{numHeads:m,isUnidirectional:y,maskFilterValue:x,scale:_,doRotary:O})},1348788:(d,m)=>{c.gb("QuickGelu",d,{alpha:m})},1348852:(d,m,y,x,_)=>{c.gb("RotaryEmbedding",d,{interleaved:!!m,numHeads:y,rotaryEmbeddingDim:x,scale:_})},1348991:(d,m,y)=>{c.gb("SkipLayerNormalization",d,{epsilon:m,simplified:!!y})},1349093:d=>{c.Vb(d)},1349127:(d,m)=>c.Xb(d,m,c.Bb.Zb,c.Bb.errors),1349239:(d,m,y)=>{c.gb("SkipLayerNormalization",d,{epsilon:m,simplified:!!y})}};function Rx(d,m,y){return il(async()=>{await c.Tb(d,m,y)})}function ra(d){this.name="ExitStatus",this.message=`Program terminated with exit(${d})`,this.status=d}var na=d=>{d.terminate(),d.onmessage=()=>{}},Su=d=>{gr.length==0&&(ku(),Cu(gr[0]));var m=gr.pop();if(!m)return 6;Cr.push(m),jt[d.xb]=m,m.xb=d.xb;var y={cmd:"run",start_routine:d.$b,arg:d.Mb,pthread_ptr:d.xb};return m.postMessage(y,d.ec),0},Er=0,We=(d,m,...y)=>{for(var x=2*y.length,_=wa(),O=va(8*x),R=O>>>3,M=0;M>>0]=j)}return d=Ol(d,0,x,O,m),go(_),d};function $u(d){if(g)return We(0,1,d);if(Q=d,!(0{if(Q=d,g)throw Au(d),"unwind";$u(d)},gr=[],Cr=[],Pu=[],jt={},Ou=d=>{var m=d.xb;delete jt[m],gr.push(d),Cr.splice(Cr.indexOf(d),1),d.xb=0,xa(m)};function Eu(){Pu.forEach(d=>d())}var Cu=d=>new Promise(m=>{d.onmessage=_=>{var O=(_=_.data).cmd;if(_.targetThread&&_.targetThread!=an()){var R=jt[_.targetThread];R?R.postMessage(_,_.transferList):F(`Internal error! Worker sent a message "${O}" to target pthread ${_.targetThread}, but that thread no longer exists!`)}else O==="checkMailbox"?io():O==="spawnThread"?Su(_):O==="cleanupThread"?Ou(jt[_.thread]):O==="killThread"?(_=_.thread,O=jt[_],delete jt[_],na(O),xa(_),Cr.splice(Cr.indexOf(O),1),O.xb=0):O==="cancelThread"?jt[_.thread].postMessage({cmd:"cancel"}):O==="loaded"?(d.loaded=!0,m(d)):O==="alert"?alert(`Thread ${_.threadId}: ${_.text}`):_.target==="setimmediate"?d.postMessage(_):O==="callHandler"?c[_.handler](..._.args):O&&F(`worker sent an unknown command ${O}`)},d.onerror=_=>{throw F(`worker sent an error! ${_.filename}:${_.lineno}: ${_.message}`),_};var y,x=[];for(y of["onExit"])c.hasOwnProperty(y)&&x.push(y);d.postMessage({cmd:"load",handlers:x,wasmMemory:ae,wasmModule:qe})});function ku(){var d=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});gr.push(d)}var oo=d=>{for(;0{var d=an(),m=s()[d+52>>>2>>>0];d=s()[d+56>>>2>>>0],Cl(m,m-d),go(m)},zx=(d,m)=>{Er=0,d=kl(d,m),0>>=0);throw m>>>=0,y>>>=0,s()[x.Fb+16>>>2>>>0]=0,s()[x.Fb+4>>>2>>>0]=m,s()[x.Fb+8>>>2>>>0]=y,d}function Du(d,m,y,x){return g?We(2,1,d,m,y,x):Bu(d,m,y,x)}function Bu(d,m,y,x){if(d>>>=0,m>>>=0,y>>>=0,x>>>=0,T===void 0)return F("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var _=[];return g&&_.length===0?Du(d,m,y,x):(d={$b:y,xb:d,Mb:x,ec:_},g?(d.Ib="spawnThread",postMessage(d,_),0):Su(d))}var Lu=typeof TextDecoder<"u"?new TextDecoder("utf8"):void 0,Ru=(d,m,y)=>{var x=(m>>>=0)+y;for(y=m;d[y]&&!(y>=x);)++y;if(16(_=(240&_)==224?(15&_)<<12|O<<6|R:(7&_)<<18|O<<12|R<<6|63&d[m++])?x+=String.fromCharCode(_):(_-=65536,x+=String.fromCharCode(55296|_>>10,56320|1023&_))}}else x+=String.fromCharCode(_)}return x},ot=(d,m)=>(d>>>=0)?Ru(n(),d,m):"";function Nu(d,m,y){return g?We(3,1,d,m,y):0}function zu(d,m){if(g)return We(4,1,d,m)}var ia=d=>{for(var m=0,y=0;y=x?m++:2047>=x?m+=2:55296<=x&&57343>=x?(m+=4,++y):m+=3}return m},Fu=(d,m,y,x)=>{if(!(0>>=0;x=y+x-1;for(var O=0;O=R&&(R=65536+((1023&R)<<10)|1023&d.charCodeAt(++O)),127>=R){if(y>=x)break;m[y++>>>0]=R}else{if(2047>=R){if(y+1>=x)break;m[y++>>>0]=192|R>>6}else{if(65535>=R){if(y+2>=x)break;m[y++>>>0]=224|R>>12}else{if(y+3>=x)break;m[y++>>>0]=240|R>>18,m[y++>>>0]=128|R>>12&63}m[y++>>>0]=128|R>>6&63}m[y++>>>0]=128|63&R}}return m[y>>>0]=0,y-_},rn=(d,m,y)=>Fu(d,n(),m,y);function Mu(d,m){if(g)return We(5,1,d,m)}function Vu(d,m,y){if(g)return We(6,1,d,m,y)}function Gu(d,m,y){return g?We(7,1,d,m,y):0}function Uu(d,m){if(g)return We(8,1,d,m)}function Wu(d,m,y){if(g)return We(9,1,d,m,y)}function Hu(d,m,y,x){if(g)return We(10,1,d,m,y,x)}function qu(d,m,y,x){if(g)return We(11,1,d,m,y,x)}function Ku(d,m,y,x){if(g)return We(12,1,d,m,y,x)}function ju(d){if(g)return We(13,1,d)}function Xu(d,m){if(g)return We(14,1,d,m)}function Zu(d,m,y){if(g)return We(15,1,d,m,y)}var Yu,br,Vx=()=>{An("")},Xt=d=>{for(var m="";n()[d>>>0];)m+=Yu[n()[d++>>>0]];return m},aa={},sa={},Gx={};function lr(d,m,y={}){if(!("argPackAdvance"in m))throw new TypeError("registerType registeredInstance requires argPackAdvance");return function(x,_,O={}){var R=_.name;if(!x)throw new br(`type "${R}" must have a positive integer typeid pointer`);if(sa.hasOwnProperty(x)){if(O.Ob)return;throw new br(`Cannot register type '${R}' twice`)}sa[x]=_,delete Gx[x],aa.hasOwnProperty(x)&&(_=aa[x],delete aa[x],_.forEach(M=>M()))}(d,m,y)}var Ju=(d,m,y)=>{switch(m){case 1:return y?x=>e()[x>>>0]:x=>n()[x>>>0];case 2:return y?x=>t()[x>>>1>>>0]:x=>o()[x>>>1>>>0];case 4:return y?x=>i()[x>>>2>>>0]:x=>s()[x>>>2>>>0];case 8:return y?x=>V[x>>>3]:x=>ie[x>>>3];default:throw new TypeError(`invalid integer width (${m}): ${d}`)}};function Ux(d,m,y){y>>>=0,lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:x=>x,toWireType:function(x,_){if(typeof _!="bigint"&&typeof _!="number")throw _=_===null?"null":(x=typeof _)=="object"||x==="array"||x==="function"?_.toString():""+_,new TypeError(`Cannot convert "${_}" to ${this.name}`);return typeof _=="number"&&(_=BigInt(_)),_},argPackAdvance:yr,readValueFromPointer:Ju(m,y,m.indexOf("u")==-1),Ab:null})}var yr=8;function Wx(d,m,y,x){lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:function(_){return!!_},toWireType:function(_,O){return O?y:x},argPackAdvance:yr,readValueFromPointer:function(_){return this.fromWireType(n()[_>>>0])},Ab:null})}var ua=[],cr=[];function la(d){9<(d>>>=0)&&--cr[d+1]==0&&(cr[d]=void 0,ua.push(d))}var $t=d=>{if(!d)throw new br("Cannot use deleted val. handle = "+d);return cr[d]},At=d=>{switch(d){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:let m=ua.pop()||cr.length;return cr[m]=d,cr[m+1]=1,m}};function ca(d){return this.fromWireType(s()[d>>>2>>>0])}var Hx={name:"emscripten::val",fromWireType:d=>{var m=$t(d);return la(d),m},toWireType:(d,m)=>At(m),argPackAdvance:yr,readValueFromPointer:ca,Ab:null};function qx(d){return lr(d>>>0,Hx)}var Kx=(d,m)=>{switch(m){case 4:return function(y){return this.fromWireType(a()[y>>>2>>>0])};case 8:return function(y){return this.fromWireType(u()[y>>>3>>>0])};default:throw new TypeError(`invalid float width (${m}): ${d}`)}};function jx(d,m,y){y>>>=0,lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:x=>x,toWireType:(x,_)=>_,argPackAdvance:yr,readValueFromPointer:Kx(m,y),Ab:null})}function Xx(d,m,y,x,_){if(d>>>=0,y>>>=0,m=Xt(m>>>0),_===-1&&(_=4294967295),_=M=>M,x===0){var O=32-8*y;_=M=>M<>>O}var R=m.includes("unsigned")?function(M,j){return j>>>0}:function(M,j){return j};lr(d,{name:m,fromWireType:_,toWireType:R,argPackAdvance:yr,readValueFromPointer:Ju(m,y,x!==0),Ab:null})}function Zx(d,m,y){function x(O){var R=s()[O>>>2>>>0];return O=s()[O+4>>>2>>>0],new _(e().buffer,O,R)}var _=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][m];lr(d>>>=0,{name:y=Xt(y>>>0),fromWireType:x,argPackAdvance:yr,readValueFromPointer:x},{Ob:!0})}function Yx(d,m){d>>>=0;var y=(m=Xt(m>>>0))==="std::string";lr(d,{name:m,fromWireType:function(x){var _=s()[x>>>2>>>0],O=x+4;if(y)for(var R=O,M=0;M<=_;++M){var j=O+M;if(M==_||n()[j>>>0]==0){if(R=ot(R,j-R),Y===void 0)var Y=R;else Y+=String.fromCharCode(0),Y+=R;R=j+1}}else{for(Y=Array(_),M=0;M<_;++M)Y[M]=String.fromCharCode(n()[O+M>>>0]);Y=Y.join("")}return Yt(x),Y},toWireType:function(x,_){_ instanceof ArrayBuffer&&(_=new Uint8Array(_));var O=typeof _=="string";if(!(O||_ instanceof Uint8Array||_ instanceof Uint8ClampedArray||_ instanceof Int8Array))throw new br("Cannot pass non-string to std::string");var R=y&&O?ia(_):_.length,M=mo(4+R+1),j=M+4;if(s()[M>>>2>>>0]=R,y&&O)rn(_,j,R+1);else if(O)for(O=0;O>>0]=Y}else for(O=0;O>>0]=_[O];return x!==null&&x.push(Yt,M),M},argPackAdvance:yr,readValueFromPointer:ca,Ab(x){Yt(x)}})}var Qu=typeof TextDecoder<"u"?new TextDecoder("utf-16le"):void 0,Jx=(d,m)=>{for(var y=d>>1,x=y+m/2;!(y>=x)&&o()[y>>>0];)++y;if(32<(y<<=1)-d&&Qu)return Qu.decode(n().slice(d,y));for(y="",x=0;!(x>=m/2);++x){var _=t()[d+2*x>>>1>>>0];if(_==0)break;y+=String.fromCharCode(_)}return y},Qx=(d,m,y)=>{if(y??=2147483647,2>y)return 0;var x=m;y=(y-=2)<2*d.length?y/2:d.length;for(var _=0;_>>1>>>0]=O,m+=2}return t()[m>>>1>>>0]=0,m-x},e0=d=>2*d.length,t0=(d,m)=>{for(var y=0,x="";!(y>=m/4);){var _=i()[d+4*y>>>2>>>0];if(_==0)break;++y,65536<=_?(_-=65536,x+=String.fromCharCode(55296|_>>10,56320|1023&_)):x+=String.fromCharCode(_)}return x},r0=(d,m,y)=>{if(m>>>=0,y??=2147483647,4>y)return 0;var x=m;y=x+y-4;for(var _=0;_=O&&(O=65536+((1023&O)<<10)|1023&d.charCodeAt(++_)),i()[m>>>2>>>0]=O,(m+=4)+4>y)break}return i()[m>>>2>>>0]=0,m-x},n0=d=>{for(var m=0,y=0;y=x&&++y,m+=4}return m};function o0(d,m,y){if(d>>>=0,m>>>=0,y=Xt(y>>>=0),m===2)var x=Jx,_=Qx,O=e0,R=M=>o()[M>>>1>>>0];else m===4&&(x=t0,_=r0,O=n0,R=M=>s()[M>>>2>>>0]);lr(d,{name:y,fromWireType:M=>{for(var j,Y=s()[M>>>2>>>0],ne=M+4,Ae=0;Ae<=Y;++Ae){var Ce=M+4+Ae*m;Ae!=Y&&R(Ce)!=0||(ne=x(ne,Ce-ne),j===void 0?j=ne:(j+=String.fromCharCode(0),j+=ne),ne=Ce+m)}return Yt(M),j},toWireType:(M,j)=>{if(typeof j!="string")throw new br(`Cannot pass non-string to C++ string type ${y}`);var Y=O(j),ne=mo(4+Y+m);return s()[ne>>>2>>>0]=Y/m,_(j,ne+4,Y+m),M!==null&&M.push(Yt,ne),ne},argPackAdvance:yr,readValueFromPointer:ca,Ab(M){Yt(M)}})}function i0(d,m){lr(d>>>=0,{Pb:!0,name:m=Xt(m>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var a0=()=>1;function s0(d){ya(d>>>0,!h,1,!b,131072,!1),Eu()}var el=d=>{if(!tt)try{if(d(),!(0>>=0,typeof Atomics.fc=="function"&&(Atomics.fc(i(),d>>>2,d).value.then(io),d+=128,Atomics.store(i(),d>>>2,1))}var io=()=>{var d=an();d&&(fa(d),el(El))};function u0(d,m){(d>>>=0)==m>>>0?setTimeout(io):g?postMessage({targetThread:d,cmd:"checkMailbox"}):(d=jt[d])&&d.postMessage({cmd:"checkMailbox"})}var da=[];function l0(d,m,y,x,_){for(m>>>=0,x/=2,da.length=x,y=_>>>0>>>3,_=0;_>>0];return(m?ta[m]:Y0[d])(...da)}function c0(d){d>>>=0,g?postMessage({cmd:"cleanupThread",thread:d}):Ou(jt[d])}function f0(d){}var pa=(d,m)=>{var y=sa[d];if(y===void 0)throw d=$l(d),y=Xt(d),Yt(d),new br(`${m} has unknown type ${y}`);return y},tl=(d,m,y)=>{var x=[];return d=d.toWireType(x,y),x.length&&(s()[m>>>2>>>0]=At(x)),d};function d0(d,m,y){return m>>>=0,y>>>=0,d=$t(d>>>0),m=pa(m,"emval::as"),tl(m,y,d)}var ao=d=>{try{d()}catch(m){An(m)}},xr=0,Zt=null,rl=0,so=[],nl={},ol={},p0=0,ma=null,m0=[];function il(d){return function(m){if(!tt){if(xr===0){var y=!1,x=!1;m((_=0)=>{if(!tt&&(rl=_,y=!0,x)){xr=2,ao(()=>Ll(Zt)),typeof Browser<"u"&&Browser.Gb.Nb&&Browser.Gb.resume(),_=!1;try{var O=function(){var j=i()[Zt+8>>>2>>>0];return j=te[ol[j]],--Er,j()}()}catch(j){O=j,_=!0}var R=!1;if(!Zt){var M=ma;M&&(ma=null,(_?M.reject:M.resolve)(O),R=!0)}if(_&&!R)throw O}}),x=!0,y||(xr=1,Zt=function(){var _=mo(65548),O=_+12;s()[_>>>2>>>0]=O,s()[_+4>>>2>>>0]=O+65536,O=so[0];var R=nl[O];return R===void 0&&(R=p0++,nl[O]=R,ol[R]=O),O=R,i()[_+8>>>2>>>0]=O,_}(),typeof Browser<"u"&&Browser.Gb.Nb&&Browser.Gb.pause(),ao(()=>Dl(Zt)))}else xr===2?(xr=0,ao(Rl),Yt(Zt),Zt=null,m0.forEach(el)):An(`invalid state: ${xr}`);return rl}}(m=>{d().then(m)})}function h0(d){return d>>>=0,il(()=>(d=$t(d)).then(At))}var uo=[];function g0(d,m,y,x){return y>>>=0,x>>>=0,(d=uo[d>>>0])(null,m=$t(m>>>0),y,x)}var b0={},lo=d=>{var m=b0[d];return m===void 0?Xt(d):m};function y0(d,m,y,x,_){return y>>>=0,x>>>=0,_>>>=0,(d=uo[d>>>0])(m=$t(m>>>0),m[y=lo(y)],x,_)}var al=()=>typeof globalThis=="object"?globalThis:Function("return this")();function x0(d){return(d>>>=0)==0?At(al()):(d=lo(d),At(al()[d]))}var v0=d=>{var m=uo.length;return uo.push(d),m},w0=(d,m)=>{for(var y=Array(d),x=0;x>>2>>>0],"parameter "+x);return y},sl=(d,m)=>Object.defineProperty(m,"name",{value:d});function T0(d,m,y){var x=(m=w0(d,m>>>0)).shift();d--;var _=`return function (obj, func, destructorsRef, args) { -`,O=0,R=[];y===0&&R.push("obj");for(var M=["retType"],j=[x],Y=0;Ythis.isTimerResultAvailable(e)),this.getTimerResult(e)}async createAndWaitForFence(){let e=this.createFence(this.gl);return this.pollFence(e)}createFence(e){let n,t=e,o=t.fenceSync(t.SYNC_GPU_COMMANDS_COMPLETE,0);return e.flush(),o===null?n=()=>!0:n=()=>{let i=t.clientWaitSync(o,0,0);return i===t.ALREADY_SIGNALED||i===t.CONDITION_SATISFIED},{query:o,isFencePassed:n}}async pollFence(e){return new Promise(n=>{this.addItemToPoll(()=>e.isFencePassed(),()=>n())})}pollItems(){let e=S_(this.itemsToPoll.map(n=>n.isDoneFn));for(let n=0;n<=e;++n){let{resolveFn:t}=this.itemsToPoll[n];t()}this.itemsToPoll=this.itemsToPoll.slice(e+1)}async addItemToPoll(e,n){this.itemsToPoll.push({isDoneFn:e,resolveFn:n}),!(this.itemsToPoll.length>1)&&await Za(()=>(this.pollItems(),this.itemsToPoll.length===0))}}});function $s(r){let e;if((!r||r==="webgl2")&&"webgl2"in wn?e=wn.webgl2:(!r||r==="webgl")&&"webgl"in wn&&(e=wn.webgl),!e)try{let t=A_();e=ah(t,r)}catch{let o=$_();e=ah(o,r)}r=r||e.version===1?"webgl":"webgl2";let n=e.gl;return wn[r]=e,n.isContextLost()?(delete wn[r],$s(r)):(n.disable(n.DEPTH_TEST),n.disable(n.STENCIL_TEST),n.disable(n.BLEND),n.disable(n.DITHER),n.disable(n.POLYGON_OFFSET_FILL),n.disable(n.SAMPLE_COVERAGE),n.enable(n.SCISSOR_TEST),n.enable(n.CULL_FACE),n.cullFace(n.BACK),e)}function ah(r,e){let n={alpha:!1,depth:!1,antialias:!1,stencil:!1,preserveDrawingBuffer:!1,premultipliedAlpha:!1,failIfMajorPerformanceCaveat:!1},t,o=n;if((!e||e==="webgl2")&&(t=r.getContext("webgl2",o),t))try{return new Un(t,2)}catch(i){Re.warning("GlContextFactory",`failed to create WebGLContext using contextId 'webgl2'. Error: ${i}`)}if((!e||e==="webgl")&&(t=r.getContext("webgl",o)||r.getContext("experimental-webgl",o),t))try{return new Un(t,1)}catch(i){Re.warning("GlContextFactory",`failed to create WebGLContext using contextId 'webgl' or 'experimental-webgl'. Error: ${i}`)}throw new Error("WebGL is not supported")}function $_(){if(typeof document>"u")throw new TypeError("failed to create canvas: document is not supported");let r=document.createElement("canvas");return r.width=1,r.height=1,r}function A_(){if(typeof OffscreenCanvas>"u")throw new TypeError("failed to create offscreen canvas: OffscreenCanvas is not supported");return new OffscreenCanvas(1,1)}var wn,sh=C(()=>{"use strict";Pt();ih();wn={}});var ui,uh=C(()=>{"use strict";ft();Pt();oh();sh();ui=class{get contextId(){return le.webgl.contextId}set contextId(e){le.webgl.contextId=e}get matmulMaxBatchSize(){return le.webgl.matmulMaxBatchSize}set matmulMaxBatchSize(e){le.webgl.matmulMaxBatchSize=e}get textureCacheMode(){return le.webgl.textureCacheMode}set textureCacheMode(e){le.webgl.textureCacheMode=e}get pack(){return le.webgl.pack}set pack(e){le.webgl.pack=e}get async(){return le.webgl.async}set async(e){le.webgl.async=e}initialize(){try{return this.glContext=$s(this.contextId),typeof this.matmulMaxBatchSize!="number"&&(this.matmulMaxBatchSize=16),typeof this.textureCacheMode!="string"&&(this.textureCacheMode="full"),typeof this.pack!="boolean"&&(this.pack=!1),typeof this.async!="boolean"&&(this.async=!1),Re.setWithEnv(le),le.webgl.context||Object.defineProperty(le.webgl,"context",{value:this.glContext.gl}),Re.verbose("WebGLBackend",`Created WebGLContext: ${typeof this.glContext} with matmulMaxBatchSize: ${this.matmulMaxBatchSize}; textureCacheMode: ${this.textureCacheMode}; pack: ${this.pack}; async: ${this.async}.`),!0}catch(e){return Re.warning("WebGLBackend",`Unable to initialize WebGLBackend. ${e}`),!1}}createSessionHandler(e){return new si(this,e)}dispose(){this.glContext.dispose()}}});async function As(r){if(r){let e=typeof r=="string"?[r]:r;for(let n of e){let t=lh.get(n);if(t)return t;let o=await O_(n);if(o)return o}}else return As(["webgl"]);throw new Error("no available backend to use")}async function O_(r){let e=P_;if(typeof e[r]<"u"&&E_(e[r])){let n=e[r],t=n.initialize();if(typeof t=="object"&&"then"in t&&(t=await t),t)return lh.set(r,n),n}}function E_(r){let e=r;return"initialize"in e&&typeof e.initialize=="function"&&"createSessionHandler"in e&&typeof e.createSessionHandler=="function"&&"dispose"in e&&typeof e.dispose=="function"}var lh,P_,ch=C(()=>{"use strict";uh();lh=new Map,P_={webgl:new ui}});var Ps,li,fh=C(()=>{"use strict";Pt();Ps=class{constructor(e,n){this.op=e;this.node=n}},li=class{constructor(e,n,t){this.graph=e;this.profiler=t;this.initialize(n)}initialize(e){this.profiler.event("session","ExecutionPlan.initialize",()=>{let n=this.graph.getNodes();if(n.length!==e.length)throw new Error("The size of nodes and OPs do not match.");this._ops=e.map((t,o)=>new Ps(t,n[o])),this.reset(),this._starter=[],this._ops.forEach((t,o)=>{let i=!0;for(let s of t.node.inputs)if(!this._values[s]&&this.graph.getInputIndices().indexOf(s)===-1){i=!1;break}i&&this._starter.push(o)})})}reset(){this._values=this.graph.getValues().map(e=>e.tensor)}async execute(e,n){return this.profiler.event("session","ExecutionPlan.execute",async()=>{this.reset();let t=e.createInferenceHandler(),o=this.graph.getInputIndices();if(n.length!==o.length)throw new Error(`number of input tensors don't match the number of inputs to the model: actual: ${n.length} expected: ${o.length}`);n.forEach((f,c)=>{let p=o[c];this._values[p]=f});let i=this._starter.slice(0),s=this.graph.getValues(),a=this.graph.getNodes(),u=0;for(;uthis._values[T]);if(p.indexOf(void 0)!==-1)throw new Error(`unresolved input detected: op: ${c.node}`);let b=p;Re.verbose("ExecPlan",`Running op:${c.node.name} (${b.map((T,w)=>`'${c.node.inputs[w]}': ${T.type}[${T.dims.join(",")}]`).join(", ")})`);let h=await this.profiler.event("node",c.node.name,async()=>c.op.impl(t,b,c.op.context));if(h.length!==c.node.outputs.length)throw new Error("the size of output does not match model definition.");h.forEach((T,w)=>{let v=c.node.outputs[w];if(this._values[v])throw new Error(`output [${v}] already has value: op:${c.node.name}`);this._values[v]=T});let g=new Set;h.forEach((T,w)=>{let v=c.node.outputs[w];for(let S of s[v].to){let $=a[S],P=!0;for(let E of $.inputs)if(!this._values[E]){P=!1;break}P&&g.add(S)}}),i.push(...g)}let l=[];for(let f=0;f{"use strict";Cn();we=ln(pn());Gr();De();Lt=ee.experimental.fbs,Wn=class r{constructor(e){if(this._attributes=new Map,e!=null){for(let n of e)n instanceof we.onnx.AttributeProto?this._attributes.set(n.name,[r.getValue(n),r.getType(n)]):n instanceof Lt.Attribute&&this._attributes.set(n.name(),[r.getValue(n),r.getType(n)]);if(this._attributes.sizeQe.fromProto(i));if(e instanceof Lt.Attribute)return t.map(i=>Qe.fromOrtTensor(i))}return n===we.onnx.AttributeProto.AttributeType.STRING&&e instanceof we.onnx.AttributeProto?Rn(t):n===we.onnx.AttributeProto.AttributeType.STRINGS&&e instanceof we.onnx.AttributeProto?t.map(Rn):t}static getValueNoCheck(e){return e instanceof we.onnx.AttributeProto?this.getValueNoCheckFromOnnxFormat(e):this.getValueNoCheckFromOrtFormat(e)}static getValueNoCheckFromOnnxFormat(e){switch(e.type){case we.onnx.AttributeProto.AttributeType.FLOAT:return e.f;case we.onnx.AttributeProto.AttributeType.INT:return e.i;case we.onnx.AttributeProto.AttributeType.STRING:return e.s;case we.onnx.AttributeProto.AttributeType.TENSOR:return e.t;case we.onnx.AttributeProto.AttributeType.GRAPH:return e.g;case we.onnx.AttributeProto.AttributeType.FLOATS:return e.floats;case we.onnx.AttributeProto.AttributeType.INTS:return e.ints;case we.onnx.AttributeProto.AttributeType.STRINGS:return e.strings;case we.onnx.AttributeProto.AttributeType.TENSORS:return e.tensors;case we.onnx.AttributeProto.AttributeType.GRAPHS:return e.graphs;default:throw new Error(`unsupported attribute type: ${we.onnx.AttributeProto.AttributeType[e.type]}`)}}static getValueNoCheckFromOrtFormat(e){switch(e.type()){case Lt.AttributeType.FLOAT:return e.f();case Lt.AttributeType.INT:return e.i();case Lt.AttributeType.STRING:return e.s();case Lt.AttributeType.TENSOR:return e.t();case Lt.AttributeType.GRAPH:return e.g();case Lt.AttributeType.FLOATS:return e.floatsArray();case Lt.AttributeType.INTS:{let n=[];for(let t=0;t{"use strict";dh();Cn();Es=ln(pn());Gr();De();ci=ee.experimental.fbs,Cs={from:(r,e)=>new Os(r,e)},rr=class{constructor(e){this._from=void 0,this._to=[],this.tensor=void 0,this.type=void 0,e&&(this.type=lt.tensorValueTypeFromProto(e.type.tensorType))}get from(){return this._from}get to(){return this._to}},fi=class{constructor(e,n){e instanceof Es.onnx.NodeProto?(this.name=e.name,this.opType=e.opType,this.attributes=new Wn(e.attribute)):e instanceof ci.Node&&(this.name=n??e.name(),this.opType=e.opType(),this.attributes=new Wn(lt.tensorAttributesFromORTFormat(e))),this.inputs=[],this.outputs=[],this.executeNode=!0}},Os=class{constructor(e,n){if(!e)throw new TypeError("graph is empty");this.buildGraph(e),this.transformGraph(n),this.checkIsAcyclic()}getInputIndices(){return this._allInputIndices}getInputNames(){return this._allInputNames}getOutputIndices(){return this._allOutputIndices}getOutputNames(){return this._allOutputNames}getValues(){return this._allData}getNodes(){return this._nodes}buildGraph(e){if(e instanceof Es.onnx.GraphProto)this.buildGraphFromOnnxFormat(e);else if(e instanceof ci.Graph)this.buildGraphFromOrtFormat(e);else throw new TypeError("Graph type is not supported.")}buildGraphFromOnnxFormat(e){let n=new Map;this._allData=[],this._allInputIndices=[],this._allInputNames=[],this._allOutputIndices=[],this._allOutputNames=[],this._nodes=[];let t=new Map;if(!e.input)throw new Error("missing information in graph: input");let o=[];for(let i of e.input){if(n.has(i.name))throw new Error(`duplicated input name: ${i.name}`);let s=this._allData.push(new rr(i))-1;n.set(i.name,s),o.push(i.name)}if(!e.initializer)throw new Error("missing information in graph: initializer");for(let i of e.initializer){let s=n.get(i.name);if(s===void 0){let a=new rr;a.type={shape:{dims:lt.tensorDimsFromProto(i.dims)},tensorType:lt.tensorDataTypeFromProto(i.dataType)},s=this._allData.push(a)-1,n.set(i.name,s)}this._allData[s]._from=-1,this._allData[s].tensor=Qe.fromProto(i)}for(let i=0;i"u"&&(l=this._allData.push(new rr)-1,n.set(u,l)),s.outputs.push(l),this._allData[l]._from!==void 0)throw new Error(`multiple nodes output to one data value: ${l}`);if(this._allData[l]._from=i,a.opType==="Constant"){if(!a.attribute||a.attribute.length!==1||!a.attribute[0].t)throw new Error("missing attributes or missing tensor value in attributes for this Constant operator");if(!a.output||a.output.length!==1)throw new Error("missing output or incorrect number of outputs for this Constant operator");s.outputs.pop(),s.executeNode=!1,this._allData[l]._from=-1,this._allData[l].tensor=Qe.fromProto(a.attribute[0].t)}}}for(let i=0;i"u"){if(u===""&&(a.input.length===3||a.input.length===4)&&a.opType==="Resize")continue;throw new Error(`unrecognized input '${u}' for node: ${a.name}`)}s.inputs.push(l),this._allData[l]._to.push(i)}}return!0}buildGraphFromOrtFormat(e){let n=new Map;this._allData=[],this._allInputIndices=[],this._allInputNames=[],this._allOutputIndices=[],this._allOutputNames=[],this._nodes=[];let t=new Map,o=[];for(let i=0;i"u"&&(f=this._allData.push(new rr)-1,n.set(l,f)),s.outputs.push(f),this._allData[f]._from!==void 0)throw new Error(`multiple nodes output to one data value: ${f}`);if(this._allData[f]._from=i,a.opType()==="Constant"){if(a.attributesLength()!==1||!a.attributes(0).t())throw new Error("missing attributes or missing tensor value in attributes for this Constant operator");if(a.outputsLength()!==1)throw new Error("missing output or incorrect number of outputs for this Constant operator");s.outputs.pop(),s.executeNode=!1,this._allData[f]._from=-1,this._allData[f].tensor=Qe.fromOrtTensor(a.attributes(0).t())}}}for(let i=0;i"u")throw new Error(`unrecognized input '${l}' for node: ${a.name()}`);s.inputs.push(f),this._allData[f]._to.push(i)}}}checkIsAcyclic(){let e=new Set;this._allInputIndices.forEach(o=>{this._allData[o]._to.forEach(s=>{e.add(s)})});let n=Array.from(e),t=new Array(this._nodes.length).fill("white");for(;n.length>0;){let o=n.pop();t[o]==="gray"?t[o]="black":(n.push(o),t[o]="gray",this._nodes[o].outputs.forEach(i=>{let s=this._allData[i];if(typeof s.tensor<"u")throw new Error("node outputs should not be initialized");if(s._from!==o)throw new Error("from property of the Value object doesn't match index of Node being processed");s._to.forEach(a=>{if(t[a]==="gray")throw new Error("model graph is cyclic");t[a]==="white"&&n.push(a)})}))}}transformGraph(e){this.removeAllIdentityNodes(),this.removeAllDropoutNodes(),this.fuseConvActivationNodes(),e&&e.transformGraph(this),this.finalizeGraph()}finalizeGraph(){let e=0,n=new Array(this._nodes.length,0),t=0;for(let o=0;o{this._allData[i]._from=-2});this._nodes.splice(t,this._nodes.length-t);for(let o=0;o=0)i._to[s]=n[i._to[s]];else throw new Error("Trying to update a removed node")}e=0;for(let o=0;o0){let i=-1;this._allData[o].from!==void 0&&this._allData[o].from!==-1?(i=this._nodes[this._allData[o].from].outputs.indexOf(o+e),i!==-1&&(this._nodes[this._allData[o].from].outputs[i]=o)):(i=this._allInputIndices.indexOf(o+e),i!==-1&&(this._allInputIndices[i]=o)),this._allData[o].to.forEach(s=>{i=this._nodes[s].inputs.indexOf(o+e),i!==-1&&(this._nodes[s].inputs[i]=o)}),this._allData[o].to.length===0&&(i=this._allOutputIndices.indexOf(o+e),i!==-1&&(this._allOutputIndices[i]=o))}}}deleteNode(e){let n=this._nodes[e];if(n.outputs.length>1){for(let a=1;a0)throw new Error("Node deletion with more than one output connected to other nodes is not supported. ")}n.executeNode=!1;let t=n.inputs[0],o=n.outputs[0],i=this._allData[o].to;for(let a=0;a0)for(let a of i){let u=this._nodes[a].inputs.indexOf(o);if(u===-1)throw new Error("The Node object doesn't have the output Value in it's 'inputs' property ");this._nodes[a].inputs[u]=t,this._allData[t].to.push(a)}}removeAllDropoutNodes(){let e=0;for(let n of this._nodes){if(n.opType==="Dropout"){if(n.inputs.length!==1)throw new Error("Dropout nodes should only contain one input. ");if(n.outputs.length!==1&&n.outputs.length!==2)throw new Error("Dropout nodes should contain either 1 or 2 output(s)");if(n.outputs.length===2&&this._allData[n.outputs[1]]._to.length!==0)throw new Error("Dropout nodes's second output should not be referenced by other nodes");this.deleteNode(e)}e++}}removeAllIdentityNodes(){let e=0;for(let n of this._nodes)n.opType==="Identity"&&this.deleteNode(e),e++}isActivation(e){switch(e.opType){case"Relu":case"Sigmoid":case"Clip":return!0;default:return!1}}fuseConvActivationNodes(){for(let e of this._nodes)if(e.opType==="Conv"){let n=this._allData[e.outputs[0]]._to;if(n.length===1&&this.isActivation(this._nodes[n[0]])){let t=this._nodes[n[0]];if(t.opType==="Clip")if(t.inputs.length===1)try{e.attributes.set("activation_params","floats",[t.attributes.getFloat("min"),t.attributes.getFloat("max")])}catch{e.attributes.set("activation_params","floats",[Mr,Vr])}else if(t.inputs.length>=3&&this._allData[t.inputs[1]].tensor!==void 0&&this._allData[t.inputs[2]].tensor!==void 0)e.attributes.set("activation_params","floats",[this._allData[t.inputs[1]].tensor.floatData[0],this._allData[t.inputs[2]].tensor.floatData[0]]);else continue;e.attributes.set("activation","string",t.opType),this.deleteNode(n[0])}}}}});var mh,C_,di,hh=C(()=>{"use strict";ko();ph();Cn();mh=ln(pn());De();C_=ee.experimental.fbs,di=class{constructor(){}load(e,n,t){let o;if(!t)try{this.loadFromOnnxFormat(e,n);return}catch(i){if(t!==void 0)throw i;o=i}try{this.loadFromOrtFormat(e,n)}catch(i){throw t!==void 0?i:new Error(`Failed to load model as ONNX format: ${o} +as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e);if(wt.longToNumber(t.irVersion)<3)throw new Error("only support ONNX model with IR_VERSION>=3");this._opsets=t.opsetImport.map(i=>({domain:i.domain,version:wt.longToNumber(i.version)})),this._graph=Cs.from(t.graph,n)}loadFromOrtFormat(e,n){let t=new k.ByteBuffer(e),o=C_.InferenceSession.getRootAsInferenceSession(t).model();if(wt.longToNumber(o.irVersion())<3)throw new Error("only support ONNX model with IR_VERSION>=3");this._opsets=[];for(let s=0;s{"use strict";ch();fh();Pt();hh();pi=class{constructor(e={}){this._initialized=!1,this.backendHint=e.backendHint,this.profiler=Eo.create(e.profiler),this.context={profiler:this.profiler,graphInputTypes:[],graphInputDims:[]}}get inputNames(){return this._model.graph.getInputNames()}get outputNames(){return this._model.graph.getOutputNames()}startProfiling(){this.profiler.start()}endProfiling(){this.profiler.stop()}async loadModel(e,n,t){await this.profiler.event("session","Session.loadModel",async()=>{let o=await As(this.backendHint);if(this.sessionHandler=o.createSessionHandler(this.context),this._model=new di,typeof e=="string"){let i=e.endsWith(".ort");{let a=await(await fetch(e)).arrayBuffer();this.initialize(new Uint8Array(a),i)}}else if(ArrayBuffer.isView(e))this.initialize(e);else{let i=new Uint8Array(e,n||0,t||e.byteLength);this.initialize(i)}})}initialize(e,n){if(this._initialized)throw new Error("already initialized");this.profiler.event("session","Session.initialize",()=>{let t=this.sessionHandler.transformGraph?this.sessionHandler:void 0;this._model.load(e,t,n),this.sessionHandler.onGraphInitialized&&this.sessionHandler.onGraphInitialized(this._model.graph),this.initializeOps(this._model.graph),this._executionPlan=new li(this._model.graph,this._ops,this.profiler)}),this._initialized=!0}async run(e){if(!this._initialized)throw new Error("session not initialized yet");return this.profiler.event("session","Session.run",async()=>{let n=this.normalizeAndValidateInputs(e),t=await this._executionPlan.execute(this.sessionHandler,n);return this.createOutput(t)})}normalizeAndValidateInputs(e){let n=this._model.graph.getInputNames();if(Array.isArray(e)){if(e.length!==n.length)throw new Error(`incorrect input array length: expected ${n.length} but got ${e.length}`)}else{if(e.size!==n.length)throw new Error(`incorrect input map size: expected ${n.length} but got ${e.size}`);let t=new Array(e.size),o=0;for(let i=0;i{"use strict";ft();Gr();mi=class{constructor(e){this.session=e;this.inputNames=this.session.inputNames,this.outputNames=this.session.outputNames}async dispose(){}async run(e,n,t){let o=new Map;for(let a in e)if(Object.hasOwnProperty.call(e,a)){let u=e[a];o.set(a,new Qe(u.dims,u.type,void 0,void 0,u.data))}let i=await this.session.run(o),s={};return i.forEach((a,u)=>{s[u]=new it(a.type,a.data,a.dims)}),s}startProfiling(){this.session.startProfiling()}endProfiling(){this.session.endProfiling()}}});var yh={};un(yh,{onnxjsBackend:()=>k_});var ks,k_,xh=C(()=>{"use strict";gh();bh();ks=class{async init(){}async createInferenceSessionHandler(e,n){let t=new pi(n);return typeof e=="string"?await t.loadModel(e):await t.loadModel(e),new mi(t)}},k_=new ks});var hi=C(()=>{"use strict"});var Th={};un(Th,{default:()=>D_});var vh,wh,D_,_h=C(()=>{"use strict";Ds();jr();Hn();vh="ort-wasm-proxy-worker",wh=globalThis.self?.name===vh;wh&&(self.onmessage=r=>{let{type:e,in:n}=r.data;try{switch(e){case"init-wasm":gi(n.wasm).then(()=>{bi(n).then(()=>{postMessage({type:e})},t=>{postMessage({type:e,err:t})})},t=>{postMessage({type:e,err:t})});break;case"init-ep":{let{epName:t,env:o}=n;yi(o,t).then(()=>{postMessage({type:e})},i=>{postMessage({type:e,err:i})});break}case"copy-from":{let{buffer:t}=n,o=qn(t);postMessage({type:e,out:o});break}case"create":{let{model:t,options:o}=n;xi(t,o).then(i=>{postMessage({type:e,out:i})},i=>{postMessage({type:e,err:i})});break}case"release":vi(n),postMessage({type:e});break;case"run":{let{sessionId:t,inputIndices:o,inputs:i,outputIndices:s,options:a}=n;wi(t,o,i,s,new Array(s.length).fill(null),a).then(u=>{u.some(l=>l[3]!=="cpu")?postMessage({type:e,err:"Proxy does not support non-cpu tensor location."}):postMessage({type:e,out:u},_i([...i,...u]))},u=>{postMessage({type:e,err:u})});break}case"end-profiling":Ti(n),postMessage({type:e});break;default:}}catch(t){postMessage({type:e,err:t})}});D_=wh?null:r=>new Worker(r??Tn,{type:"module",name:vh})});var Sh={};un(Sh,{default:()=>B_});var Bs,Ih,B_,$h=C(()=>{"use strict";Ih=(Bs=import.meta.url,async function(r={}){function e(){return ae.buffer!=ge.buffer&&Fe(),ge}function n(){return ae.buffer!=ge.buffer&&Fe(),Ie}function t(){return ae.buffer!=ge.buffer&&Fe(),xe}function o(){return ae.buffer!=ge.buffer&&Fe(),se}function i(){return ae.buffer!=ge.buffer&&Fe(),pe}function s(){return ae.buffer!=ge.buffer&&Fe(),ce}function a(){return ae.buffer!=ge.buffer&&Fe(),ut}function u(){return ae.buffer!=ge.buffer&&Fe(),Te}var l,f,c=Object.assign({},r),p=new Promise((d,m)=>{l=d,f=m}),b=typeof window=="object",h=typeof importScripts=="function",g=h&&self.name=="em-pthread";c.mountExternalData=(d,m)=>{(c.Fb||(c.Fb=new Map)).set(d,m)},c.unmountExternalData=()=>{delete c.Fb};var T=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let w=()=>{let d=(y,x,_)=>(...O)=>{let R=Zt,M=x?.();O=y(...O);let K=x?.();return M!==K&&(y=K,_(M),x=_=null),Zt!=R?new Promise((Y,ne)=>{ha={resolve:Y,reject:ne}}):O},m=y=>async(...x)=>{try{if(c.Eb)throw Error("Session already started");let _=c.Eb={bc:x[0],errors:[]},O=await y(...x);if(c.Eb!==_)throw Error("Session mismatch");c.Mb?.flush();let R=_.errors;if(0K),0c._OrtCreateSession,y=>c._OrtCreateSession=y),c._OrtRun=m(d(c._OrtRun,()=>c._OrtRun,y=>c._OrtRun=y)),c._OrtRunWithBinding=m(d(c._OrtRunWithBinding,()=>c._OrtRunWithBinding,y=>c._OrtRunWithBinding=y)),c._OrtBindInput=d(c._OrtBindInput,()=>c._OrtBindInput,y=>c._OrtBindInput=y),w=void 0};c.jsepInit=(d,m)=>{if(w?.(),d==="webgpu"){[c.Mb,c.Tb,c.Xb,c.Nb,c.Wb,c.jb,c.Yb,c.$b,c.Ub,c.Vb,c.Zb]=m;let y=c.Mb;c.jsepRegisterBuffer=(x,_,O,R)=>y.registerBuffer(x,_,O,R),c.jsepGetBuffer=x=>y.getBuffer(x),c.jsepCreateDownloader=(x,_,O)=>y.createDownloader(x,_,O),c.jsepOnReleaseSession=x=>{y.onReleaseSession(x)},c.jsepOnRunStart=x=>y.onRunStart(x)}};var v,S,$=Object.assign({},c),P="./this.program",E=(d,m)=>{throw m},N="";(b||h)&&(h?N=self.location.href:typeof document<"u"&&document.currentScript&&(N=document.currentScript.src),Bs&&(N=Bs),N=N.startsWith("blob:")?"":N.substr(0,N.replace(/[?#].*/,"").lastIndexOf("/")+1),h&&(S=d=>{var m=new XMLHttpRequest;return m.open("GET",d,!1),m.responseType="arraybuffer",m.send(null),new Uint8Array(m.response)}),v=(d,m,y)=>{var x=new XMLHttpRequest;x.open("GET",d,!0),x.responseType="arraybuffer",x.onload=()=>{x.status==200||x.status==0&&x.response?m(x.response):y()},x.onerror=y,x.send(null)});var z=console.log.bind(console),q=console.error.bind(console),j=z,F=q;if(Object.assign(c,$),$=null,g){let d=function(m){try{var y=m.data,x=y.cmd;if(x==="load"){let _=[];self.onmessage=O=>_.push(O),self.startWorker=()=>{postMessage({cmd:"loaded"});for(let O of _)d(O);self.onmessage=d};for(let O of y.handlers)c[O]&&!c[O].proxy||(c[O]=(...R)=>{postMessage({Lb:"callHandler",kc:O,args:R})},O=="print"&&(j=c[O]),O=="printErr"&&(F=c[O]));ae=y.wasmMemory,Fe(),_e(y.wasmModule)}else if(x==="run"){xa(y.pthread_ptr,0,0,1,0,0),da(y.pthread_ptr),zx(),Eu(),$e||(Al(),$e=!0);try{Fx(y.start_routine,y.arg)}catch(_){if(_!="unwind")throw _}}else x==="cancel"?sn()&&ho(-1):y.target!=="setimmediate"&&(x==="checkMailbox"?$e&&io():x&&(F(`worker: received unknown command ${x}`),F(y)))}catch(_){throw Pl(),_}};var HI=d,_e,$e=!1;F=function(...m){m=m.join(" "),console.error(m)},self.alert=function(...m){postMessage({Lb:"alert",text:m.join(" "),mc:sn()})},c.instantiateWasm=(m,y)=>new Promise(x=>{_e=_=>{_=new WebAssembly.Instance(_,Su()),y(_),x()}}),self.onunhandledrejection=m=>{throw m.reason||m},self.onmessage=d}var ae,qe,Q,ge,Ie,xe,se,pe,ce,ut,V,ie,Te,tt=!1;function Fe(){var d=ae.buffer;c.HEAP8=ge=new Int8Array(d),c.HEAP16=xe=new Int16Array(d),c.HEAPU8=Ie=new Uint8Array(d),c.HEAPU16=se=new Uint16Array(d),c.HEAP32=pe=new Int32Array(d),c.HEAPU32=ce=new Uint32Array(d),c.HEAPF32=ut=new Float32Array(d),c.HEAPF64=Te=new Float64Array(d),c.HEAP64=V=new BigInt64Array(d),c.HEAPU64=ie=new BigUint64Array(d)}if(!g){if(c.wasmMemory)ae=c.wasmMemory;else if(!((ae=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof T))throw F("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),Error("bad memory");Fe()}var je=[],tn=[],mt=[],rt=0,jt=null,hr=null;function no(){if(--rt==0&&(jt!==null&&(clearInterval(jt),jt=null),hr)){var d=hr;hr=null,d()}}function rn(d){throw F(d="Aborted("+d+")"),tt=!0,Q=1,d=new WebAssembly.RuntimeError(d+". Build with -sASSERTIONS for more info."),f(d),d}var vu,wu=d=>d.startsWith("data:application/octet-stream;base64,"),Tu=d=>d.startsWith("file://");function _u(d){if(S)return S(d);throw"both async and sync fetching of the wasm failed"}function Iu(d,m,y){return function(x){if(b||h){if(typeof fetch=="function"&&!Tu(x))return fetch(x,{credentials:"same-origin"}).then(_=>{if(!_.ok)throw`failed to load wasm binary file at '${x}'`;return _.arrayBuffer()}).catch(()=>_u(x));if(v)return new Promise((_,O)=>{v(x,R=>_(new Uint8Array(R)),O)})}return Promise.resolve().then(()=>_u(x))}(d).then(x=>WebAssembly.instantiate(x,m)).then(y,x=>{F(`failed to asynchronously prepare wasm: ${x}`),rn(x)})}function Su(){return{a:{M:Nx,za:Rx,b:Vx,$:Bu,z:Nu,pa:zu,X:Mu,Z:Vu,qa:Gu,na:Uu,ga:Wu,ma:Hu,J:qu,Y:ju,V:Ku,oa:Xu,W:Zu,va:Gx,D:Wx,P:Hx,O:jx,C:Xx,s:Zx,p:Yx,E:Jx,y:i0,Q:a0,ta:s0,ja:u0,T:l0,aa:c0,F:f0,ia:da,sa:d0,u:p0,B:g0,o:b0,k:x0,c:ca,n:v0,j:_0,Aa:I0,r:S0,d:$0,v:A0,m:P0,g:O0,l:E0,i:C0,h:k0,e:D0,da:B0,ea:L0,fa:R0,ba:cl,ca:fl,S:N0,f:z0,N:F0,G:M0,K:V0,w:G0,ra:U0,U:W0,t:pl,x:H0,L:q0,R:j0,ya:K0,xa:X0,ka:gl,la:bl,_:ia,A:yl,I:xl,ha:vl,H:wl,a:ae,wa:oa,ua:Il,q:J0}}}var ta={1337716:(d,m,y,x)=>{if(c===void 0||!c.Fb)return 1;if((d=ot(d>>>0)).startsWith("./")&&(d=d.substring(2)),!(d=c.Fb.get(d)))return 2;if(x>>>=0,(m>>>=0)+(y>>>=0)>d.byteLength)return 3;try{return n().set(d.subarray(m,m+y),x>>>0),0}catch{return 4}},1338217:()=>{c.Ub()},1338248:()=>{c.Vb()},1338277:()=>{c.Zb()},1338302:d=>c.Tb(d),1338335:d=>c.Xb(d),1338367:(d,m,y)=>{c.Nb(d,m,y,!0)},1338406:(d,m,y)=>{c.Nb(d,m,y)},1338439:()=>typeof wasmOffsetConverter<"u",1338496:d=>{c.jb("Abs",d,void 0)},1338547:d=>{c.jb("Neg",d,void 0)},1338598:d=>{c.jb("Floor",d,void 0)},1338651:d=>{c.jb("Ceil",d,void 0)},1338703:d=>{c.jb("Reciprocal",d,void 0)},1338761:d=>{c.jb("Sqrt",d,void 0)},1338813:d=>{c.jb("Exp",d,void 0)},1338864:d=>{c.jb("Erf",d,void 0)},1338915:d=>{c.jb("Sigmoid",d,void 0)},1338970:(d,m,y)=>{c.jb("HardSigmoid",d,{alpha:m,beta:y})},1339049:d=>{c.jb("Log",d,void 0)},1339100:d=>{c.jb("Sin",d,void 0)},1339151:d=>{c.jb("Cos",d,void 0)},1339202:d=>{c.jb("Tan",d,void 0)},1339253:d=>{c.jb("Asin",d,void 0)},1339305:d=>{c.jb("Acos",d,void 0)},1339357:d=>{c.jb("Atan",d,void 0)},1339409:d=>{c.jb("Sinh",d,void 0)},1339461:d=>{c.jb("Cosh",d,void 0)},1339513:d=>{c.jb("Asinh",d,void 0)},1339566:d=>{c.jb("Acosh",d,void 0)},1339619:d=>{c.jb("Atanh",d,void 0)},1339672:d=>{c.jb("Tanh",d,void 0)},1339724:d=>{c.jb("Not",d,void 0)},1339775:(d,m,y)=>{c.jb("Clip",d,{min:m,max:y})},1339844:d=>{c.jb("Clip",d,void 0)},1339896:(d,m)=>{c.jb("Elu",d,{alpha:m})},1339954:d=>{c.jb("Relu",d,void 0)},1340006:(d,m)=>{c.jb("LeakyRelu",d,{alpha:m})},1340070:(d,m)=>{c.jb("ThresholdedRelu",d,{alpha:m})},1340140:(d,m)=>{c.jb("Cast",d,{to:m})},1340198:d=>{c.jb("Add",d,void 0)},1340249:d=>{c.jb("Sub",d,void 0)},1340300:d=>{c.jb("Mul",d,void 0)},1340351:d=>{c.jb("Div",d,void 0)},1340402:d=>{c.jb("Pow",d,void 0)},1340453:d=>{c.jb("Equal",d,void 0)},1340506:d=>{c.jb("Greater",d,void 0)},1340561:d=>{c.jb("GreaterOrEqual",d,void 0)},1340623:d=>{c.jb("Less",d,void 0)},1340675:d=>{c.jb("LessOrEqual",d,void 0)},1340734:(d,m,y,x,_)=>{c.jb("ReduceMean",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1340893:(d,m,y,x,_)=>{c.jb("ReduceMax",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341051:(d,m,y,x,_)=>{c.jb("ReduceMin",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341209:(d,m,y,x,_)=>{c.jb("ReduceProd",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341368:(d,m,y,x,_)=>{c.jb("ReduceSum",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341526:(d,m,y,x,_)=>{c.jb("ReduceL1",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341683:(d,m,y,x,_)=>{c.jb("ReduceL2",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1341840:(d,m,y,x,_)=>{c.jb("ReduceLogSum",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1342001:(d,m,y,x,_)=>{c.jb("ReduceSumSquare",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1342165:(d,m,y,x,_)=>{c.jb("ReduceLogSumExp",d,{keepDims:!!m,noopWithEmptyAxes:!!y,axes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1342329:d=>{c.jb("Where",d,void 0)},1342382:(d,m,y)=>{c.jb("Transpose",d,{perm:m?Array.from(i().subarray(m>>>0,y>>>0)):[]})},1342490:(d,m,y,x)=>{c.jb("DepthToSpace",d,{blocksize:m,mode:ot(y),format:x?"NHWC":"NCHW"})},1342623:(d,m,y,x)=>{c.jb("DepthToSpace",d,{blocksize:m,mode:ot(y),format:x?"NHWC":"NCHW"})},1342756:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe)=>{c.jb("ConvTranspose",d,{format:K?"NHWC":"NCHW",autoPad:m,dilations:[y],group:x,kernelShape:[_],pads:[O,R],strides:[M],wIsConst:()=>!!e()[Y>>>0],outputPadding:ne?Array.from(i().subarray(ne>>>0,Ae>>>0)):[],outputShape:Ce?Array.from(i().subarray(Ce>>>0,L>>>0)):[],activation:ot(fe)})},1343157:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L)=>{c.jb("ConvTranspose",d,{format:M?"NHWC":"NCHW",autoPad:m,dilations:Array.from(i().subarray(y>>>0,2+(y>>>0)>>>0)),group:x,kernelShape:Array.from(i().subarray(_>>>0,2+(_>>>0)>>>0)),pads:Array.from(i().subarray(O>>>0,4+(O>>>0)>>>0)),strides:Array.from(i().subarray(R>>>0,2+(R>>>0)>>>0)),wIsConst:()=>!!e()[K>>>0],outputPadding:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],outputShape:Ae?Array.from(i().subarray(Ae>>>0,Ce>>>0)):[],activation:ot(L)})},1343722:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe)=>{c.jb("ConvTranspose",d,{format:K?"NHWC":"NCHW",autoPad:m,dilations:[y],group:x,kernelShape:[_],pads:[O,R],strides:[M],wIsConst:()=>!!e()[Y>>>0],outputPadding:ne?Array.from(i().subarray(ne>>>0,Ae>>>0)):[],outputShape:Ce?Array.from(i().subarray(Ce>>>0,L>>>0)):[],activation:ot(fe)})},1344123:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L)=>{c.jb("ConvTranspose",d,{format:M?"NHWC":"NCHW",autoPad:m,dilations:Array.from(i().subarray(y>>>0,2+(y>>>0)>>>0)),group:x,kernelShape:Array.from(i().subarray(_>>>0,2+(_>>>0)>>>0)),pads:Array.from(i().subarray(O>>>0,4+(O>>>0)>>>0)),strides:Array.from(i().subarray(R>>>0,2+(R>>>0)>>>0)),wIsConst:()=>!!e()[K>>>0],outputPadding:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],outputShape:Ae?Array.from(i().subarray(Ae>>>0,Ce>>>0)):[],activation:ot(L)})},1344688:(d,m)=>{c.jb("GlobalAveragePool",d,{format:m?"NHWC":"NCHW"})},1344779:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe,Le)=>{c.jb("AveragePool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,K],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1345063:(d,m)=>{c.jb("GlobalAveragePool",d,{format:m?"NHWC":"NCHW"})},1345154:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe,Le)=>{c.jb("AveragePool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,K],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1345438:(d,m)=>{c.jb("GlobalMaxPool",d,{format:m?"NHWC":"NCHW"})},1345525:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe,Le)=>{c.jb("MaxPool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,K],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1345805:(d,m)=>{c.jb("GlobalMaxPool",d,{format:m?"NHWC":"NCHW"})},1345892:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe,Le)=>{c.jb("MaxPool",d,{format:Le?"NHWC":"NCHW",auto_pad:m,ceil_mode:y,count_include_pad:x,storage_order:_,dilations:[O,R],kernel_shape:[M,K],pads:[Y,ne,Ae,Ce],strides:[L,fe]})},1346172:(d,m,y,x,_)=>{c.jb("Gemm",d,{alpha:m,beta:y,transA:x,transB:_})},1346276:d=>{c.jb("MatMul",d,void 0)},1346330:(d,m,y,x)=>{c.jb("ArgMax",d,{keepDims:!!m,selectLastIndex:!!y,axis:x})},1346438:(d,m,y,x)=>{c.jb("ArgMin",d,{keepDims:!!m,selectLastIndex:!!y,axis:x})},1346546:(d,m)=>{c.jb("Softmax",d,{axis:m})},1346609:(d,m)=>{c.jb("Concat",d,{axis:m})},1346669:(d,m,y,x,_)=>{c.jb("Split",d,{axis:m,numOutputs:y,splitSizes:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1346809:d=>{c.jb("Expand",d,void 0)},1346863:(d,m)=>{c.jb("Gather",d,{axis:Number(m)})},1346934:(d,m)=>{c.jb("GatherElements",d,{axis:Number(m)})},1347013:(d,m,y,x,_,O,R,M,K,Y,ne)=>{c.jb("Resize",d,{antialias:m,axes:y?Array.from(i().subarray(y>>>0,x>>>0)):[],coordinateTransformMode:ot(_),cubicCoeffA:O,excludeOutside:R,extrapolationValue:M,keepAspectRatioPolicy:ot(K),mode:ot(Y),nearestMode:ot(ne)})},1347359:(d,m,y,x,_,O,R)=>{c.jb("Slice",d,{starts:m?Array.from(i().subarray(m>>>0,y>>>0)):[],ends:x?Array.from(i().subarray(x>>>0,_>>>0)):[],axes:O?Array.from(i().subarray(O>>>0,R>>>0)):[]})},1347575:d=>{c.jb("Tile",d,void 0)},1347627:(d,m,y)=>{c.jb("InstanceNormalization",d,{epsilon:m,format:y?"NHWC":"NCHW"})},1347741:(d,m,y)=>{c.jb("InstanceNormalization",d,{epsilon:m,format:y?"NHWC":"NCHW"})},1347855:d=>{c.jb("Range",d,void 0)},1347908:(d,m)=>{c.jb("Einsum",d,{equation:ot(m)})},1347989:(d,m,y,x,_)=>{c.jb("Pad",d,{mode:m,value:y,pads:x?Array.from(i().subarray(x>>>0,_>>>0)):[]})},1348116:(d,m,y,x,_,O)=>{c.jb("BatchNormalization",d,{epsilon:m,momentum:y,spatial:!!_,trainingMode:!!x,format:O?"NHWC":"NCHW"})},1348285:(d,m,y,x,_,O)=>{c.jb("BatchNormalization",d,{epsilon:m,momentum:y,spatial:!!_,trainingMode:!!x,format:O?"NHWC":"NCHW"})},1348454:(d,m,y)=>{c.jb("CumSum",d,{exclusive:Number(m),reverse:Number(y)})},1348551:(d,m,y,x,_,O,R,M,K)=>{c.jb("Attention",d,{numHeads:m,isUnidirectional:y,maskFilterValue:x,scale:_,doRotary:O,qkvHiddenSizes:R?Array.from(i().subarray(Number(M)>>>0,Number(M)+R>>>0)):[],pastPresentShareBuffer:!!K})},1348823:d=>{c.jb("BiasAdd",d,void 0)},1348878:d=>{c.jb("BiasSplitGelu",d,void 0)},1348939:d=>{c.jb("FastGelu",d,void 0)},1348995:(d,m,y,x,_,O,R,M,K,Y,ne,Ae,Ce,L,fe,Le)=>{c.jb("Conv",d,{format:Ae?"NHWC":"NCHW",auto_pad:m,dilations:y?Array.from(i().subarray(y>>>0,x>>>0)):[],group:_,kernel_shape:O?Array.from(i().subarray(O>>>0,R>>>0)):[],pads:M?Array.from(i().subarray(M>>>0,K>>>0)):[],strides:Y?Array.from(i().subarray(Y>>>0,ne>>>0)):[],w_is_const:()=>!!e()[Ce>>>0],activation:ot(L),activation_params:fe?Array.from(a().subarray(fe>>>0,Le>>>0)):[]})},1349491:d=>{c.jb("Gelu",d,void 0)},1349543:(d,m,y,x)=>{c.jb("GroupQueryAttention",d,{numHeads:m,kvNumHeads:y,scale:x})},1349656:(d,m,y,x)=>{c.jb("LayerNormalization",d,{axis:m,epsilon:y,simplified:!!x})},1349767:(d,m,y,x)=>{c.jb("LayerNormalization",d,{axis:m,epsilon:y,simplified:!!x})},1349878:(d,m,y,x,_,O)=>{c.jb("MatMulNBits",d,{k:m,n:y,accuracyLevel:x,bits:_,blockSize:O})},1350005:(d,m,y,x,_,O)=>{c.jb("MultiHeadAttention",d,{numHeads:m,isUnidirectional:y,maskFilterValue:x,scale:_,doRotary:O})},1350164:(d,m)=>{c.jb("QuickGelu",d,{alpha:m})},1350228:(d,m,y,x,_)=>{c.jb("RotaryEmbedding",d,{interleaved:!!m,numHeads:y,rotaryEmbeddingDim:x,scale:_})},1350367:(d,m,y)=>{c.jb("SkipLayerNormalization",d,{epsilon:m,simplified:!!y})},1350469:d=>{c.Yb(d)},1350503:(d,m)=>c.$b(d,m,c.Eb.bc,c.Eb.errors),1350615:(d,m,y)=>{c.jb("SkipLayerNormalization",d,{epsilon:m,simplified:!!y})}};function Rx(d,m,y){return il(async()=>{await c.Wb(d,m,y)})}function Nx(){return typeof wasmOffsetConverter<"u"}function ra(d){this.name="ExitStatus",this.message=`Program terminated with exit(${d})`,this.status=d}var na=d=>{d.terminate(),d.onmessage=()=>{}},$u=d=>{gr.length==0&&(ku(),Cu(gr[0]));var m=gr.pop();if(!m)return 6;Cr.push(m),Kt[d.Ab]=m,m.Ab=d.Ab;var y={cmd:"run",start_routine:d.cc,arg:d.Pb,pthread_ptr:d.Ab};return m.postMessage(y,d.ic),0},Er=0,We=(d,m,...y)=>{for(var x=2*y.length,_=Ta(),O=wa(8*x),R=O>>>3,M=0;M>>0]=K)}return d=Ol(d,0,x,O,m),go(_),d};function oa(d){if(g)return We(0,1,d);if(Q=d,!(0{if(Q=d,g)throw Au(d),"unwind";oa(d)},gr=[],Cr=[],Pu=[],Kt={},Ou=d=>{var m=d.Ab;delete Kt[m],gr.push(d),Cr.splice(Cr.indexOf(d),1),d.Ab=0,va(m)};function Eu(){Pu.forEach(d=>d())}var Cu=d=>new Promise(m=>{d.onmessage=_=>{var O=(_=_.data).cmd;if(_.targetThread&&_.targetThread!=sn()){var R=Kt[_.targetThread];R?R.postMessage(_,_.transferList):F(`Internal error! Worker sent a message "${O}" to target pthread ${_.targetThread}, but that thread no longer exists!`)}else O==="checkMailbox"?io():O==="spawnThread"?$u(_):O==="cleanupThread"?Ou(Kt[_.thread]):O==="killThread"?(_=_.thread,O=Kt[_],delete Kt[_],na(O),va(_),Cr.splice(Cr.indexOf(O),1),O.Ab=0):O==="cancelThread"?Kt[_.thread].postMessage({cmd:"cancel"}):O==="loaded"?(d.loaded=!0,m(d)):O==="alert"?alert(`Thread ${_.threadId}: ${_.text}`):_.target==="setimmediate"?d.postMessage(_):O==="callHandler"?c[_.handler](..._.args):O&&F(`worker sent an unknown command ${O}`)},d.onerror=_=>{throw F(`worker sent an error! ${_.filename}:${_.lineno}: ${_.message}`),_};var y,x=[];for(y of["onExit"])c.hasOwnProperty(y)&&x.push(y);d.postMessage({cmd:"load",handlers:x,wasmMemory:ae,wasmModule:qe})});function ku(){var d=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});gr.push(d)}var oo=d=>{for(;0{var d=sn(),m=s()[d+52>>>2>>>0];d=s()[d+56>>>2>>>0],Cl(m,m-d),go(m)},Fx=(d,m)=>{Er=0,d=kl(d,m),0>>=0);throw m>>>=0,y>>>=0,s()[x.Ib+16>>>2>>>0]=0,s()[x.Ib+4>>>2>>>0]=m,s()[x.Ib+8>>>2>>>0]=y,d}function Du(d,m,y,x){return g?We(2,1,d,m,y,x):Bu(d,m,y,x)}function Bu(d,m,y,x){if(d>>>=0,m>>>=0,y>>>=0,x>>>=0,T===void 0)return F("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var _=[];return g&&_.length===0?Du(d,m,y,x):(d={cc:y,Ab:d,Pb:x,ic:_},g?(d.Lb="spawnThread",postMessage(d,_),0):$u(d))}var Lu=typeof TextDecoder<"u"?new TextDecoder("utf8"):void 0,Ru=(d,m,y)=>{var x=(m>>>=0)+y;for(y=m;d[y]&&!(y>=x);)++y;if(16(_=(240&_)==224?(15&_)<<12|O<<6|R:(7&_)<<18|O<<12|R<<6|63&d[m++])?x+=String.fromCharCode(_):(_-=65536,x+=String.fromCharCode(55296|_>>10,56320|1023&_))}}else x+=String.fromCharCode(_)}return x},ot=(d,m)=>(d>>>=0)?Ru(n(),d,m):"";function Nu(d,m,y){return g?We(3,1,d,m,y):0}function zu(d,m){if(g)return We(4,1,d,m)}var aa=d=>{for(var m=0,y=0;y=x?m++:2047>=x?m+=2:55296<=x&&57343>=x?(m+=4,++y):m+=3}return m},Fu=(d,m,y,x)=>{if(!(0>>=0;x=y+x-1;for(var O=0;O=R&&(R=65536+((1023&R)<<10)|1023&d.charCodeAt(++O)),127>=R){if(y>=x)break;m[y++>>>0]=R}else{if(2047>=R){if(y+1>=x)break;m[y++>>>0]=192|R>>6}else{if(65535>=R){if(y+2>=x)break;m[y++>>>0]=224|R>>12}else{if(y+3>=x)break;m[y++>>>0]=240|R>>18,m[y++>>>0]=128|R>>12&63}m[y++>>>0]=128|R>>6&63}m[y++>>>0]=128|63&R}}return m[y>>>0]=0,y-_},nn=(d,m,y)=>Fu(d,n(),m,y);function Mu(d,m){if(g)return We(5,1,d,m)}function Vu(d,m,y){if(g)return We(6,1,d,m,y)}function Gu(d,m,y){return g?We(7,1,d,m,y):0}function Uu(d,m){if(g)return We(8,1,d,m)}function Wu(d,m,y){if(g)return We(9,1,d,m,y)}function Hu(d,m,y,x){if(g)return We(10,1,d,m,y,x)}function qu(d,m,y,x){if(g)return We(11,1,d,m,y,x)}function ju(d,m,y,x){if(g)return We(12,1,d,m,y,x)}function Ku(d){if(g)return We(13,1,d)}function Xu(d,m){if(g)return We(14,1,d,m)}function Zu(d,m,y){if(g)return We(15,1,d,m,y)}var Yu,br,Gx=()=>{rn("")},Xt=d=>{for(var m="";n()[d>>>0];)m+=Yu[n()[d++>>>0]];return m},sa={},ua={},Ux={};function lr(d,m,y={}){if(!("argPackAdvance"in m))throw new TypeError("registerType registeredInstance requires argPackAdvance");return function(x,_,O={}){var R=_.name;if(!x)throw new br(`type "${R}" must have a positive integer typeid pointer`);if(ua.hasOwnProperty(x)){if(O.Rb)return;throw new br(`Cannot register type '${R}' twice`)}ua[x]=_,delete Ux[x],sa.hasOwnProperty(x)&&(_=sa[x],delete sa[x],_.forEach(M=>M()))}(d,m,y)}var Ju=(d,m,y)=>{switch(m){case 1:return y?x=>e()[x>>>0]:x=>n()[x>>>0];case 2:return y?x=>t()[x>>>1>>>0]:x=>o()[x>>>1>>>0];case 4:return y?x=>i()[x>>>2>>>0]:x=>s()[x>>>2>>>0];case 8:return y?x=>V[x>>>3]:x=>ie[x>>>3];default:throw new TypeError(`invalid integer width (${m}): ${d}`)}};function Wx(d,m,y){y>>>=0,lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:x=>x,toWireType:function(x,_){if(typeof _!="bigint"&&typeof _!="number")throw _=_===null?"null":(x=typeof _)=="object"||x==="array"||x==="function"?_.toString():""+_,new TypeError(`Cannot convert "${_}" to ${this.name}`);return typeof _=="number"&&(_=BigInt(_)),_},argPackAdvance:yr,readValueFromPointer:Ju(m,y,m.indexOf("u")==-1),Db:null})}var yr=8;function Hx(d,m,y,x){lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:function(_){return!!_},toWireType:function(_,O){return O?y:x},argPackAdvance:yr,readValueFromPointer:function(_){return this.fromWireType(n()[_>>>0])},Db:null})}var la=[],cr=[];function ca(d){9<(d>>>=0)&&--cr[d+1]==0&&(cr[d]=void 0,la.push(d))}var $t=d=>{if(!d)throw new br("Cannot use deleted val. handle = "+d);return cr[d]},At=d=>{switch(d){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:let m=la.pop()||cr.length;return cr[m]=d,cr[m+1]=1,m}};function fa(d){return this.fromWireType(s()[d>>>2>>>0])}var qx={name:"emscripten::val",fromWireType:d=>{var m=$t(d);return ca(d),m},toWireType:(d,m)=>At(m),argPackAdvance:yr,readValueFromPointer:fa,Db:null};function jx(d){return lr(d>>>0,qx)}var Kx=(d,m)=>{switch(m){case 4:return function(y){return this.fromWireType(a()[y>>>2>>>0])};case 8:return function(y){return this.fromWireType(u()[y>>>3>>>0])};default:throw new TypeError(`invalid float width (${m}): ${d}`)}};function Xx(d,m,y){y>>>=0,lr(d>>>=0,{name:m=Xt(m>>>0),fromWireType:x=>x,toWireType:(x,_)=>_,argPackAdvance:yr,readValueFromPointer:Kx(m,y),Db:null})}function Zx(d,m,y,x,_){if(d>>>=0,y>>>=0,m=Xt(m>>>0),_===-1&&(_=4294967295),_=M=>M,x===0){var O=32-8*y;_=M=>M<>>O}var R=m.includes("unsigned")?function(M,K){return K>>>0}:function(M,K){return K};lr(d,{name:m,fromWireType:_,toWireType:R,argPackAdvance:yr,readValueFromPointer:Ju(m,y,x!==0),Db:null})}function Yx(d,m,y){function x(O){var R=s()[O>>>2>>>0];return O=s()[O+4>>>2>>>0],new _(e().buffer,O,R)}var _=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][m];lr(d>>>=0,{name:y=Xt(y>>>0),fromWireType:x,argPackAdvance:yr,readValueFromPointer:x},{Rb:!0})}function Jx(d,m){d>>>=0;var y=(m=Xt(m>>>0))==="std::string";lr(d,{name:m,fromWireType:function(x){var _=s()[x>>>2>>>0],O=x+4;if(y)for(var R=O,M=0;M<=_;++M){var K=O+M;if(M==_||n()[K>>>0]==0){if(R=ot(R,K-R),Y===void 0)var Y=R;else Y+=String.fromCharCode(0),Y+=R;R=K+1}}else{for(Y=Array(_),M=0;M<_;++M)Y[M]=String.fromCharCode(n()[O+M>>>0]);Y=Y.join("")}return Yt(x),Y},toWireType:function(x,_){_ instanceof ArrayBuffer&&(_=new Uint8Array(_));var O=typeof _=="string";if(!(O||_ instanceof Uint8Array||_ instanceof Uint8ClampedArray||_ instanceof Int8Array))throw new br("Cannot pass non-string to std::string");var R=y&&O?aa(_):_.length,M=mo(4+R+1),K=M+4;if(s()[M>>>2>>>0]=R,y&&O)nn(_,K,R+1);else if(O)for(O=0;O>>0]=Y}else for(O=0;O>>0]=_[O];return x!==null&&x.push(Yt,M),M},argPackAdvance:yr,readValueFromPointer:fa,Db(x){Yt(x)}})}var Qu=typeof TextDecoder<"u"?new TextDecoder("utf-16le"):void 0,Qx=(d,m)=>{for(var y=d>>1,x=y+m/2;!(y>=x)&&o()[y>>>0];)++y;if(32<(y<<=1)-d&&Qu)return Qu.decode(n().slice(d,y));for(y="",x=0;!(x>=m/2);++x){var _=t()[d+2*x>>>1>>>0];if(_==0)break;y+=String.fromCharCode(_)}return y},e0=(d,m,y)=>{if(y??=2147483647,2>y)return 0;var x=m;y=(y-=2)<2*d.length?y/2:d.length;for(var _=0;_>>1>>>0]=O,m+=2}return t()[m>>>1>>>0]=0,m-x},t0=d=>2*d.length,r0=(d,m)=>{for(var y=0,x="";!(y>=m/4);){var _=i()[d+4*y>>>2>>>0];if(_==0)break;++y,65536<=_?(_-=65536,x+=String.fromCharCode(55296|_>>10,56320|1023&_)):x+=String.fromCharCode(_)}return x},n0=(d,m,y)=>{if(m>>>=0,y??=2147483647,4>y)return 0;var x=m;y=x+y-4;for(var _=0;_=O&&(O=65536+((1023&O)<<10)|1023&d.charCodeAt(++_)),i()[m>>>2>>>0]=O,(m+=4)+4>y)break}return i()[m>>>2>>>0]=0,m-x},o0=d=>{for(var m=0,y=0;y=x&&++y,m+=4}return m};function i0(d,m,y){if(d>>>=0,m>>>=0,y=Xt(y>>>=0),m===2)var x=Qx,_=e0,O=t0,R=M=>o()[M>>>1>>>0];else m===4&&(x=r0,_=n0,O=o0,R=M=>s()[M>>>2>>>0]);lr(d,{name:y,fromWireType:M=>{for(var K,Y=s()[M>>>2>>>0],ne=M+4,Ae=0;Ae<=Y;++Ae){var Ce=M+4+Ae*m;Ae!=Y&&R(Ce)!=0||(ne=x(ne,Ce-ne),K===void 0?K=ne:(K+=String.fromCharCode(0),K+=ne),ne=Ce+m)}return Yt(M),K},toWireType:(M,K)=>{if(typeof K!="string")throw new br(`Cannot pass non-string to C++ string type ${y}`);var Y=O(K),ne=mo(4+Y+m);return s()[ne>>>2>>>0]=Y/m,_(K,ne+4,Y+m),M!==null&&M.push(Yt,ne),ne},argPackAdvance:yr,readValueFromPointer:fa,Db(M){Yt(M)}})}function a0(d,m){lr(d>>>=0,{Sb:!0,name:m=Xt(m>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var s0=()=>1;function u0(d){xa(d>>>0,!h,1,!b,131072,!1),Eu()}var el=d=>{if(!tt)try{if(d(),!(0>>=0,typeof Atomics.jc=="function"&&(Atomics.jc(i(),d>>>2,d).value.then(io),d+=128,Atomics.store(i(),d>>>2,1))}var io=()=>{var d=sn();d&&(da(d),el(El))};function l0(d,m){(d>>>=0)==m>>>0?setTimeout(io):g?postMessage({targetThread:d,cmd:"checkMailbox"}):(d=Kt[d])&&d.postMessage({cmd:"checkMailbox"})}var pa=[];function c0(d,m,y,x,_){for(m>>>=0,x/=2,pa.length=x,y=_>>>0>>>3,_=0;_>>0];return(m?ta[m]:Q0[d])(...pa)}function f0(d){d>>>=0,g?postMessage({cmd:"cleanupThread",thread:d}):Ou(Kt[d])}function d0(d){}var ma=(d,m)=>{var y=ua[d];if(y===void 0)throw d=$l(d),y=Xt(d),Yt(d),new br(`${m} has unknown type ${y}`);return y},tl=(d,m,y)=>{var x=[];return d=d.toWireType(x,y),x.length&&(s()[m>>>2>>>0]=At(x)),d};function p0(d,m,y){return m>>>=0,y>>>=0,d=$t(d>>>0),m=ma(m,"emval::as"),tl(m,y,d)}var ao=d=>{try{d()}catch(m){rn(m)}},xr=0,Zt=null,rl=0,so=[],nl={},ol={},m0=0,ha=null,h0=[];function il(d){return function(m){if(!tt){if(xr===0){var y=!1,x=!1;m((_=0)=>{if(!tt&&(rl=_,y=!0,x)){xr=2,ao(()=>Ll(Zt)),typeof Browser<"u"&&Browser.Jb.Qb&&Browser.Jb.resume(),_=!1;try{var O=function(){var K=i()[Zt+8>>>2>>>0];return K=te[ol[K]],--Er,K()}()}catch(K){O=K,_=!0}var R=!1;if(!Zt){var M=ha;M&&(ha=null,(_?M.reject:M.resolve)(O),R=!0)}if(_&&!R)throw O}}),x=!0,y||(xr=1,Zt=function(){var _=mo(65548),O=_+12;s()[_>>>2>>>0]=O,s()[_+4>>>2>>>0]=O+65536,O=so[0];var R=nl[O];return R===void 0&&(R=m0++,nl[O]=R,ol[R]=O),O=R,i()[_+8>>>2>>>0]=O,_}(),typeof Browser<"u"&&Browser.Jb.Qb&&Browser.Jb.pause(),ao(()=>Dl(Zt)))}else xr===2?(xr=0,ao(Rl),Yt(Zt),Zt=null,h0.forEach(el)):rn(`invalid state: ${xr}`);return rl}}(m=>{d().then(m)})}function g0(d){return d>>>=0,il(()=>(d=$t(d)).then(At))}var uo=[];function b0(d,m,y,x){return y>>>=0,x>>>=0,(d=uo[d>>>0])(null,m=$t(m>>>0),y,x)}var y0={},lo=d=>{var m=y0[d];return m===void 0?Xt(d):m};function x0(d,m,y,x,_){return y>>>=0,x>>>=0,_>>>=0,(d=uo[d>>>0])(m=$t(m>>>0),m[y=lo(y)],x,_)}var al=()=>typeof globalThis=="object"?globalThis:Function("return this")();function v0(d){return(d>>>=0)==0?At(al()):(d=lo(d),At(al()[d]))}var w0=d=>{var m=uo.length;return uo.push(d),m},T0=(d,m)=>{for(var y=Array(d),x=0;x>>2>>>0],"parameter "+x);return y},sl=(d,m)=>Object.defineProperty(m,"name",{value:d});function _0(d,m,y){var x=(m=T0(d,m>>>0)).shift();d--;var _=`return function (obj, func, destructorsRef, args) { +`,O=0,R=[];y===0&&R.push("obj");for(var M=["retType"],K=[x],Y=0;Yne.name).join(", ")}) => ${x.name}>`,v0(sl(y,d))}function _0(d){return d=lo(d>>>0),At(c[d])}function I0(d,m){return m>>>=0,d=$t(d>>>0),m=$t(m),At(d[m])}function S0(d){9<(d>>>=0)&&(cr[d+1]+=1)}function $0(){return At([])}function A0(d){d=$t(d>>>0);for(var m=Array(d.length),y=0;y>>0))}function O0(){return At({})}function E0(d){for(var m=$t(d>>>=0);m.length;){var y=m.pop();m.pop()(y)}la(d)}function C0(d,m,y){m>>>=0,y>>>=0,d=$t(d>>>0),m=$t(m),y=$t(y),d[m]=y}function k0(d,m){return m>>>=0,d=(d=pa(d>>>0,"_emval_take_value")).readValueFromPointer(m),At(d)}function D0(d,m){d=-9007199254740992>d||9007199254740992>>=0,d=new Date(1e3*d),i()[m>>>2>>>0]=d.getUTCSeconds(),i()[m+4>>>2>>>0]=d.getUTCMinutes(),i()[m+8>>>2>>>0]=d.getUTCHours(),i()[m+12>>>2>>>0]=d.getUTCDate(),i()[m+16>>>2>>>0]=d.getUTCMonth(),i()[m+20>>>2>>>0]=d.getUTCFullYear()-1900,i()[m+24>>>2>>>0]=d.getUTCDay(),d=(d.getTime()-Date.UTC(d.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[m+28>>>2>>>0]=d}var nn=d=>d%4==0&&(d%100!=0||d%400==0),ul=[0,31,60,91,121,152,182,213,244,274,305,335],ll=[0,31,59,90,120,151,181,212,243,273,304,334];function B0(d,m){d=-9007199254740992>d||9007199254740992>>=0,d=new Date(1e3*d),i()[m>>>2>>>0]=d.getSeconds(),i()[m+4>>>2>>>0]=d.getMinutes(),i()[m+8>>>2>>>0]=d.getHours(),i()[m+12>>>2>>>0]=d.getDate(),i()[m+16>>>2>>>0]=d.getMonth(),i()[m+20>>>2>>>0]=d.getFullYear()-1900,i()[m+24>>>2>>>0]=d.getDay();var y=(nn(d.getFullYear())?ul:ll)[d.getMonth()]+d.getDate()-1|0;i()[m+28>>>2>>>0]=y,i()[m+36>>>2>>>0]=-60*d.getTimezoneOffset(),y=new Date(d.getFullYear(),6,1).getTimezoneOffset();var x=new Date(d.getFullYear(),0,1).getTimezoneOffset();d=0|(y!=x&&d.getTimezoneOffset()==Math.min(x,y)),i()[m+32>>>2>>>0]=d}function L0(d){d>>>=0;var m=new Date(i()[d+20>>>2>>>0]+1900,i()[d+16>>>2>>>0],i()[d+12>>>2>>>0],i()[d+8>>>2>>>0],i()[d+4>>>2>>>0],i()[d>>>2>>>0],0),y=i()[d+32>>>2>>>0],x=m.getTimezoneOffset(),_=new Date(m.getFullYear(),6,1).getTimezoneOffset(),O=new Date(m.getFullYear(),0,1).getTimezoneOffset(),R=Math.min(O,_);return 0>y?i()[d+32>>>2>>>0]=+(_!=O&&R==x):0>>2>>>0]=m.getDay(),y=(nn(m.getFullYear())?ul:ll)[m.getMonth()]+m.getDate()-1|0,i()[d+28>>>2>>>0]=y,i()[d>>>2>>>0]=m.getSeconds(),i()[d+4>>>2>>>0]=m.getMinutes(),i()[d+8>>>2>>>0]=m.getHours(),i()[d+12>>>2>>>0]=m.getDate(),i()[d+16>>>2>>>0]=m.getMonth(),i()[d+20>>>2>>>0]=m.getYear(),d=m.getTime(),BigInt(isNaN(d)?-1:d/1e3)}function cl(d,m,y,x,_,O,R){return g?We(16,1,d,m,y,x,_,O,R):-52}function fl(d,m,y,x,_,O){if(g)return We(17,1,d,m,y,x,_,O)}function R0(d,m,y,x){d>>>=0,m>>>=0,y>>>=0,x>>>=0;var _=new Date().getFullYear(),O=new Date(_,0,1),R=new Date(_,6,1);_=O.getTimezoneOffset();var M=R.getTimezoneOffset(),j=Math.max(_,M);s()[d>>>2>>>0]=60*j,i()[m>>>2>>>0]=+(_!=M),O=(d=Y=>Y.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1])(O),R=d(R),M<_?(rn(O,y,17),rn(R,x,17)):(rn(O,x,17),rn(R,y,17))}var ha=[],dl=(d,m)=>{ha.length=0;for(var y;y=n()[d++>>>0];){var x=y!=105;m+=(x&=y!=112)&&m%8?4:0,ha.push(y==112?s()[m>>>2>>>0]:y==106?V[m>>>3]:y==105?i()[m>>>2>>>0]:u()[m>>>3>>>0]),m+=x?8:4}return ha};function N0(d,m,y){return d>>>=0,m=dl(m>>>0,y>>>0),ta[d](...m)}function z0(d,m,y){return d>>>=0,m=dl(m>>>0,y>>>0),ta[d](...m)}var F0=()=>{},M0=()=>Date.now();function V0(d,m){return F(ot(d>>>0,m>>>0))}var pl,G0=()=>{throw Er+=1,"unwind"};function U0(){return 4294901760}pl=()=>performance.timeOrigin+performance.now();var W0=()=>navigator.hardwareConcurrency;function H0(d){d>>>=0;var m=n().length;if(d<=m||4294901760=y;y*=2){var x=m*(1+.2/y);x=Math.min(x,d+100663296);var _=Math;x=Math.max(d,x);e:{_=(_.min.call(_,4294901760,x+(65536-x%65536)%65536)-ae.buffer.byteLength+65535)/65536;try{ae.grow(_),Fe();var O=1;break e}catch{}O=void 0}if(O)return!0}return!1}var co=()=>(An("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER"),0),on={},ml=d=>{d.forEach(m=>{var y=co();y&&(on[y]=m)})};function q0(){var d=Error().stack.toString().split(` -`);return d[0]=="Error"&&d.shift(),ml(d),on.Lb=co(),on.Yb=d,on.Lb}function K0(d,m,y){if(d>>>=0,m>>>=0,on.Lb==d)var x=on.Yb;else(x=Error().stack.toString().split(` -`))[0]=="Error"&&x.shift(),ml(x);for(var _=3;x[_]&&co()!=d;)++_;for(d=0;d>>2>>>0]=co();return d}var ga,ba={},hl=()=>{if(!ga){var d,m={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:(typeof navigator=="object"&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:P||"./this.program"};for(d in ba)ba[d]===void 0?delete m[d]:m[d]=ba[d];var y=[];for(d in m)y.push(`${d}=${m[d]}`);ga=y}return ga};function gl(d,m){if(g)return We(18,1,d,m);d>>>=0,m>>>=0;var y=0;return hl().forEach((x,_)=>{var O=m+y;for(_=s()[d+4*_>>>2>>>0]=O,O=0;O>>0]=x.charCodeAt(O);e()[_>>>0]=0,y+=x.length+1}),0}function bl(d,m){if(g)return We(19,1,d,m);d>>>=0,m>>>=0;var y=hl();s()[d>>>2>>>0]=y.length;var x=0;return y.forEach(_=>x+=_.length+1),s()[m>>>2>>>0]=x,0}function yl(d){return g?We(20,1,d):52}function xl(d,m,y,x){return g?We(21,1,d,m,y,x):52}function vl(d,m,y,x){return g?We(22,1,d,m,y,x):70}var j0=[null,[],[]];function wl(d,m,y,x){if(g)return We(23,1,d,m,y,x);m>>>=0,y>>>=0,x>>>=0;for(var _=0,O=0;O>>2>>>0],M=s()[m+4>>>2>>>0];m+=8;for(var j=0;j>>0],ne=j0[d];Y===0||Y===10?((d===1?K:F)(Ru(ne,0)),ne.length=0):ne.push(Y)}_+=M}return s()[x>>>2>>>0]=_,0}var Tl=[31,29,31,30,31,30,31,31,30,31,30,31],_l=[31,28,31,30,31,30,31,31,30,31,30,31],X0=(d,m)=>{e().set(d,m>>>0)};function Il(d,m,y,x){function _(L,fe,Le){for(L=typeof L=="number"?L.toString():L||"";L.lengthzl?-1:0kr-L.getDate())){L.setDate(L.getDate()+fe);break}fe-=kr-L.getDate()+1,L.setDate(1),11>Le?L.setMonth(Le+1):(L.setMonth(0),L.setFullYear(L.getFullYear()+1))}return Le=new Date(L.getFullYear()+1,0,4),fe=M(new Date(L.getFullYear(),0,4)),Le=M(Le),0>=R(fe,L)?0>=R(Le,L)?L.getFullYear()+1:L.getFullYear():L.getFullYear()-1}d>>>=0,m>>>=0,y>>>=0,x>>>=0;var Y=s()[x+40>>>2>>>0];for(var ne in x={cc:i()[x>>>2>>>0],bc:i()[x+4>>>2>>>0],Db:i()[x+8>>>2>>>0],Hb:i()[x+12>>>2>>>0],Eb:i()[x+16>>>2>>>0],zb:i()[x+20>>>2>>>0],rb:i()[x+24>>>2>>>0],yb:i()[x+28>>>2>>>0],kc:i()[x+32>>>2>>>0],ac:i()[x+36>>>2>>>0],dc:Y?ot(Y):""},y=ot(y),Y={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y","%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"})y=y.replace(new RegExp(ne,"g"),Y[ne]);var Ae="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),Ce="January February March April May June July August September October November December".split(" ");for(ne in Y={"%a":L=>Ae[L.rb].substring(0,3),"%A":L=>Ae[L.rb],"%b":L=>Ce[L.Eb].substring(0,3),"%B":L=>Ce[L.Eb],"%C":L=>O((L.zb+1900)/100|0,2),"%d":L=>O(L.Hb,2),"%e":L=>_(L.Hb,2," "),"%g":L=>j(L).toString().substring(2),"%G":j,"%H":L=>O(L.Db,2),"%I":L=>((L=L.Db)==0?L=12:12{for(var fe=0,Le=0;Le<=L.Eb-1;fe+=(nn(L.zb+1900)?Tl:_l)[Le++]);return O(L.Hb+fe,3)},"%m":L=>O(L.Eb+1,2),"%M":L=>O(L.bc,2),"%n":()=>` -`,"%p":L=>0<=L.Db&&12>L.Db?"AM":"PM","%S":L=>O(L.cc,2),"%t":()=>" ","%u":L=>L.rb||7,"%U":L=>O(Math.floor((L.yb+7-L.rb)/7),2),"%V":L=>{var fe=Math.floor((L.yb+7-(L.rb+6)%7)/7);if(2>=(L.rb+371-L.yb-2)%7&&fe++,fe)fe==53&&((Le=(L.rb+371-L.yb)%7)==4||Le==3&&nn(L.zb)||(fe=1));else{fe=52;var Le=(L.rb+7-L.yb-1)%7;(Le==4||Le==5&&nn(L.zb%400-1))&&fe++}return O(fe,2)},"%w":L=>L.rb,"%W":L=>O(Math.floor((L.yb+7-(L.rb+6)%7)/7),2),"%y":L=>(L.zb+1900).toString().substring(2),"%Y":L=>L.zb+1900,"%z":L=>{var fe=0<=(L=L.ac);return L=Math.abs(L)/60,(fe?"+":"-")+("0000"+(L/60*100+L%60)).slice(-4)},"%Z":L=>L.dc,"%%":()=>"%"},y=y.replace(/%%/g,"\0\0"),Y)y.includes(ne)&&(y=y.replace(new RegExp(ne,"g"),Y[ne](x)));return ne=function(L){var fe=Array(ia(L)+1);return Fu(L,fe,0,fe.length),fe}(y=y.replace(/\0\0/g,"%")),ne.length>m?0:(X0(ne,d),ne.length-1)}function Z0(d,m,y,x){return Il(d>>>0,m>>>0,y>>>0,x>>>0)}g||function(){for(var d=c.numThreads-1;d--;)ku();Ke.unshift(()=>{rt++,function(m){g?m():Promise.all(gr.map(Cu)).then(m)}(()=>no())})}();for(var Sl=Array(256),fo=0;256>fo;++fo)Sl[fo]=String.fromCharCode(fo);Yu=Sl,br=c.BindingError=class extends Error{constructor(d){super(d),this.name="BindingError"}},c.InternalError=class extends Error{constructor(d){super(d),this.name="InternalError"}},cr.push(0,1,void 0,1,null,1,!0,1,!1,1),c.count_emval_handles=()=>cr.length/2-5-ua.length;var Y0=[$u,Au,Du,Nu,zu,Mu,Vu,Gu,Uu,Wu,Hu,qu,Ku,ju,Xu,Zu,cl,fl,gl,bl,yl,xl,vl,wl],te=function(){function d(y,x){return te=y.exports,te=function(){var _=te,O={};for(let[R,M]of Object.entries(_))O[R]=typeof M=="function"?(...j)=>{so.push(R);try{return M(...j)}finally{tt||(so.pop(),Zt&&xr===1&&so.length===0&&(xr=0,Er+=1,ao(Bl),typeof Fibers<"u"&&Fibers.lc()))}}:M;return O}(),te=function(){var _=te,O=M=>j=>M(j)>>>0,R=M=>()=>M()>>>0;return(_=Object.assign({},_)).za=O(_.za),_.cb=R(_.cb),_.db=O(_.db),_.emscripten_main_runtime_thread_id=R(_.emscripten_main_runtime_thread_id),_.pb=O(_.pb),_.qb=R(_.qb),_}(),Pu.push(te.fb),tn.unshift(te.ya),qe=x,no(),te}var m=Iu();if(rt++,c.instantiateWasm)try{return c.instantiateWasm(m,d)}catch(y){F(`Module.instantiateWasm callback failed with error: ${y}`),f(y)}return xu||=c.locateFile?vu("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":c.locateFile?c.locateFile("ort-wasm-simd-threaded.jsep.wasm",N):N+"ort-wasm-simd-threaded.jsep.wasm":new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url).href,function(y,x){var _=xu;return typeof WebAssembly.instantiateStreaming!="function"||vu(_)||wu(_)||typeof fetch!="function"?_u(_,y,x):fetch(_,{credentials:"same-origin"}).then(O=>WebAssembly.instantiateStreaming(O,y).then(x,function(R){return F(`wasm streaming compile failed: ${R}`),F("falling back to ArrayBuffer instantiation"),_u(_,y,x)}))}(m,function(y){d(y.instance,y.module)}).catch(f),{}}(),$l=d=>($l=te.za)(d),Al=()=>(Al=te.Aa)();c._OrtInit=(d,m)=>(c._OrtInit=te.Ba)(d,m),c._OrtGetLastError=(d,m)=>(c._OrtGetLastError=te.Ca)(d,m),c._OrtCreateSessionOptions=(d,m,y,x,_,O,R,M,j,Y)=>(c._OrtCreateSessionOptions=te.Da)(d,m,y,x,_,O,R,M,j,Y),c._OrtAppendExecutionProvider=(d,m)=>(c._OrtAppendExecutionProvider=te.Ea)(d,m),c._OrtAddFreeDimensionOverride=(d,m,y)=>(c._OrtAddFreeDimensionOverride=te.Fa)(d,m,y),c._OrtAddSessionConfigEntry=(d,m,y)=>(c._OrtAddSessionConfigEntry=te.Ga)(d,m,y),c._OrtReleaseSessionOptions=d=>(c._OrtReleaseSessionOptions=te.Ha)(d),c._OrtCreateSession=(d,m,y)=>(c._OrtCreateSession=te.Ia)(d,m,y),c._OrtReleaseSession=d=>(c._OrtReleaseSession=te.Ja)(d),c._OrtGetInputOutputCount=(d,m,y)=>(c._OrtGetInputOutputCount=te.Ka)(d,m,y),c._OrtGetInputName=(d,m)=>(c._OrtGetInputName=te.La)(d,m),c._OrtGetOutputName=(d,m)=>(c._OrtGetOutputName=te.Ma)(d,m),c._OrtFree=d=>(c._OrtFree=te.Na)(d),c._OrtCreateTensor=(d,m,y,x,_,O)=>(c._OrtCreateTensor=te.Oa)(d,m,y,x,_,O),c._OrtGetTensorData=(d,m,y,x,_)=>(c._OrtGetTensorData=te.Pa)(d,m,y,x,_),c._OrtReleaseTensor=d=>(c._OrtReleaseTensor=te.Qa)(d),c._OrtCreateRunOptions=(d,m,y,x)=>(c._OrtCreateRunOptions=te.Ra)(d,m,y,x),c._OrtAddRunConfigEntry=(d,m,y)=>(c._OrtAddRunConfigEntry=te.Sa)(d,m,y),c._OrtReleaseRunOptions=d=>(c._OrtReleaseRunOptions=te.Ta)(d),c._OrtCreateBinding=d=>(c._OrtCreateBinding=te.Ua)(d),c._OrtBindInput=(d,m,y)=>(c._OrtBindInput=te.Va)(d,m,y),c._OrtBindOutput=(d,m,y,x)=>(c._OrtBindOutput=te.Wa)(d,m,y,x),c._OrtClearBoundOutputs=d=>(c._OrtClearBoundOutputs=te.Xa)(d),c._OrtReleaseBinding=d=>(c._OrtReleaseBinding=te.Ya)(d),c._OrtRunWithBinding=(d,m,y,x,_)=>(c._OrtRunWithBinding=te.Za)(d,m,y,x,_),c._OrtRun=(d,m,y,x,_,O,R,M)=>(c._OrtRun=te._a)(d,m,y,x,_,O,R,M),c._OrtEndProfiling=d=>(c._OrtEndProfiling=te.$a)(d),c._JsepOutput=(d,m,y)=>(c._JsepOutput=te.ab)(d,m,y),c._JsepGetNodeName=d=>(c._JsepGetNodeName=te.bb)(d);var po,an=()=>(an=te.cb)(),mo=c._malloc=d=>(mo=c._malloc=te.db)(d),Yt=c._free=d=>(Yt=c._free=te.eb)(d),ya=(d,m,y,x,_,O)=>(ya=te.hb)(d,m,y,x,_,O),Pl=()=>(Pl=te.ib)(),Ol=(d,m,y,x,_)=>(Ol=te.jb)(d,m,y,x,_),xa=d=>(xa=te.kb)(d),ho=d=>(ho=te.lb)(d),El=()=>(El=te.mb)(),Cl=(d,m)=>(Cl=te.nb)(d,m),go=d=>(go=te.ob)(d),va=d=>(va=te.pb)(d),wa=()=>(wa=te.qb)(),kl=c.dynCall_ii=(d,m)=>(kl=c.dynCall_ii=te.sb)(d,m),Dl=d=>(Dl=te.tb)(d),Bl=()=>(Bl=te.ub)(),Ll=d=>(Ll=te.vb)(d),Rl=()=>(Rl=te.wb)();function Nl(){if(!(0wa(),c.stackRestore=d=>go(d),c.stackAlloc=d=>va(d),c.UTF8ToString=ot,c.stringToUTF8=rn,c.lengthBytesUTF8=ia,hr=function d(){po||Nl(),po||(hr=d)},Nl(),p}),k_=Ih;globalThis.self?.name==="em-pthread"&&Ih()});var wn,D_,B_,L_,Ah,Ph,R_,Oh,Hn=C(()=>{"use strict";hi();wn=!1?void 0:import.meta.url??(typeof document<"u"?document.currentScript?.src:typeof self<"u"?self.location?.href:void 0),D_=!1||typeof location>"u"?void 0:location.origin,B_=(r,e)=>{try{let n=e??wn;return(n?new URL(r,n):new URL(r)).origin===D_}catch{return!1}},L_=async r=>{let n=await(await fetch(r,{credentials:"same-origin"})).blob();return URL.createObjectURL(n)},Ah=(_h(),Pn(Th)).default,Ph=async()=>{if(!wn)throw new Error("Failed to load proxy worker: cannot determine the script source URL.");if(B_(wn))return[void 0,Ah()];let r=await L_(wn);return[r,Ah(r)]},R_=($h(),Pn(Sh)).default,Oh=async(r,e,n)=>[void 0,R_]});var Bs,Ls,Ii,Eh,N_,z_,gi,Ye,Kr=C(()=>{"use strict";Hn();Ls=!1,Ii=!1,Eh=!1,N_=()=>{if(typeof SharedArrayBuffer>"u")return!1;try{return typeof MessageChannel<"u"&&new MessageChannel().port1.postMessage(new SharedArrayBuffer(1)),WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,5,4,1,3,1,1,10,11,1,9,0,65,0,254,16,2,0,26,11]))}catch{return!1}},z_=()=>{try{return WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,10,30,1,28,0,65,0,253,15,253,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,186,1,26,11]))}catch{return!1}},gi=async r=>{if(Ls)return Promise.resolve();if(Ii)throw new Error("multiple calls to 'initializeWebAssembly()' detected.");if(Eh)throw new Error("previous call to 'initializeWebAssembly()' failed.");Ii=!0;let e=r.initTimeout,n=r.numThreads;if(!z_())throw new Error("WebAssembly SIMD is not supported in the current environment.");let t=N_();n>1&&!t&&(typeof self<"u"&&!self.crossOriginIsolated&&console.warn("env.wasm.numThreads is set to "+n+", but this will not work unless you enable crossOriginIsolated mode. See https://web.dev/cross-origin-isolation-guide/ for more info."),console.warn("WebAssembly multi-threading is not supported in the current environment. Falling back to single-threading."),r.numThreads=n=1);let o=r.wasmPaths,i=typeof o=="string"?o:void 0,s=o?.mjs,a=s?.href??s,u=o?.wasm,l=u?.href??u,[f,c]=await Oh(a,i,n>1),p=!1,b=[];if(e>0&&b.push(new Promise(h=>{setTimeout(()=>{p=!0,h()},e)})),b.push(new Promise((h,g)=>{let T={numThreads:n};(l||i)&&(T.locateFile=(w,v)=>l??(i??v)+w),c(T).then(w=>{Ii=!1,Ls=!0,Bs=w,h(),f&&URL.revokeObjectURL(f)},w=>{Ii=!1,Eh=!0,g(w)})})),await Promise.race(b),p)throw new Error(`WebAssembly backend initializing failed due to timeout: ${e}ms`)},Ye=()=>{if(Ls&&Bs)return Bs;throw new Error("WebAssembly is not initialized yet.")}});var nt,Kn,Ue,Si=C(()=>{"use strict";Kr();nt=(r,e)=>{let n=Ye(),t=n.lengthBytesUTF8(r)+1,o=n._malloc(t);return n.stringToUTF8(r,o,t),e.push(o),o},Kn=(r,e,n,t)=>{if(typeof r=="object"&&r!==null){if(n.has(r))throw new Error("Circular reference in options");n.add(r)}Object.entries(r).forEach(([o,i])=>{let s=e?e+o:o;if(typeof i=="object")Kn(i,s+".",n,t);else if(typeof i=="string"||typeof i=="number")t(s,i.toString());else if(typeof i=="boolean")t(s,i?"1":"0");else throw new Error(`Can't handle extra config type: ${typeof i}`)})},Ue=r=>{let e=Ye(),n=e.stackSave();try{let t=e.stackAlloc(8);e._OrtGetLastError(t,t+4);let o=e.HEAP32[t/4],i=e.HEAPU32[t/4+1],s=i?e.UTF8ToString(i):"";throw new Error(`${r} ERROR_CODE: ${o}, ERROR_MESSAGE: ${s}`)}finally{e.stackRestore(n)}}});var Ch,kh=C(()=>{"use strict";Kr();Si();Ch=r=>{let e=Ye(),n=0,t=[],o=r||{};try{if(r?.logSeverityLevel===void 0)o.logSeverityLevel=2;else if(typeof r.logSeverityLevel!="number"||!Number.isInteger(r.logSeverityLevel)||r.logSeverityLevel<0||r.logSeverityLevel>4)throw new Error(`log serverity level is not valid: ${r.logSeverityLevel}`);if(r?.logVerbosityLevel===void 0)o.logVerbosityLevel=0;else if(typeof r.logVerbosityLevel!="number"||!Number.isInteger(r.logVerbosityLevel))throw new Error(`log verbosity level is not valid: ${r.logVerbosityLevel}`);r?.terminate===void 0&&(o.terminate=!1);let i=0;return r?.tag!==void 0&&(i=nt(r.tag,t)),n=e._OrtCreateRunOptions(o.logSeverityLevel,o.logVerbosityLevel,!!o.terminate,i),n===0&&Ue("Can't create run options."),r?.extra!==void 0&&Kn(r.extra,"",new WeakSet,(s,a)=>{let u=nt(s,t),l=nt(a,t);e._OrtAddRunConfigEntry(n,u,l)!==0&&Ue(`Can't set a run config entry: ${s} - ${a}.`)}),[n,t]}catch(i){throw n!==0&&e._OrtReleaseRunOptions(n),t.forEach(s=>e._free(s)),i}}});var F_,M_,V_,G_,Dh,Bh=C(()=>{"use strict";Kr();Si();F_=r=>{switch(r){case"disabled":return 0;case"basic":return 1;case"extended":return 2;case"all":return 99;default:throw new Error(`unsupported graph optimization level: ${r}`)}},M_=r=>{switch(r){case"sequential":return 0;case"parallel":return 1;default:throw new Error(`unsupported execution mode: ${r}`)}},V_=r=>{r.extra||(r.extra={}),r.extra.session||(r.extra.session={});let e=r.extra.session;e.use_ort_model_bytes_directly||(e.use_ort_model_bytes_directly="1"),r.executionProviders&&r.executionProviders.some(n=>(typeof n=="string"?n:n.name)==="webgpu")&&(r.enableMemPattern=!1)},G_=(r,e,n)=>{for(let t of e){let o=typeof t=="string"?t:t.name;switch(o){case"webnn":if(o="WEBNN",typeof t!="string"){let a=t?.deviceType;if(a){let u=nt("deviceType",n),l=nt(a,n);Ye()._OrtAddSessionConfigEntry(r,u,l)!==0&&Ue(`Can't set a session config entry: 'deviceType' - ${a}.`)}}break;case"webgpu":if(o="JS",typeof t!="string"){let s=t;if(s?.preferredLayout){if(s.preferredLayout!=="NCHW"&&s.preferredLayout!=="NHWC")throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${s.preferredLayout}`);let a=nt("preferredLayout",n),u=nt(s.preferredLayout,n);Ye()._OrtAddSessionConfigEntry(r,a,u)!==0&&Ue(`Can't set a session config entry: 'preferredLayout' - ${s.preferredLayout}.`)}}break;case"wasm":case"cpu":continue;default:throw new Error(`not supported execution provider: ${o}`)}let i=nt(o,n);Ye()._OrtAppendExecutionProvider(r,i)!==0&&Ue(`Can't append execution provider: ${o}.`)}},Dh=r=>{let e=Ye(),n=0,t=[],o=r||{};V_(o);try{let i=F_(o.graphOptimizationLevel??"all"),s=M_(o.executionMode??"sequential"),a=typeof o.logId=="string"?nt(o.logId,t):0,u=o.logSeverityLevel??2;if(!Number.isInteger(u)||u<0||u>4)throw new Error(`log serverity level is not valid: ${u}`);let l=o.logVerbosityLevel??0;if(!Number.isInteger(l)||l<0||l>4)throw new Error(`log verbosity level is not valid: ${l}`);let f=typeof o.optimizedModelFilePath=="string"?nt(o.optimizedModelFilePath,t):0;if(n=e._OrtCreateSessionOptions(i,!!o.enableCpuMemArena,!!o.enableMemPattern,s,!!o.enableProfiling,0,a,u,l,f),n===0&&Ue("Can't create session options."),o.executionProviders&&G_(n,o.executionProviders,t),o.enableGraphCapture!==void 0){if(typeof o.enableGraphCapture!="boolean")throw new Error(`enableGraphCapture must be a boolean value: ${o.enableGraphCapture}`);let c=nt("enableGraphCapture",t),p=nt(o.enableGraphCapture.toString(),t);e._OrtAddSessionConfigEntry(n,c,p)!==0&&Ue(`Can't set a session config entry: 'enableGraphCapture' - ${o.enableGraphCapture}.`)}if(o.freeDimensionOverrides)for(let[c,p]of Object.entries(o.freeDimensionOverrides)){if(typeof c!="string")throw new Error(`free dimension override name must be a string: ${c}`);if(typeof p!="number"||!Number.isInteger(p)||p<0)throw new Error(`free dimension override value must be a non-negative integer: ${p}`);let b=nt(c,t);e._OrtAddFreeDimensionOverride(n,b,p)!==0&&Ue(`Can't set a free dimension override: ${c} - ${p}.`)}return o.extra!==void 0&&Kn(o.extra,"",new WeakSet,(c,p)=>{let b=nt(c,t),h=nt(p,t);e._OrtAddSessionConfigEntry(n,b,h)!==0&&Ue(`Can't set a session config entry: ${c} - ${p}.`)}),[n,t]}catch(i){throw n!==0&&e._OrtReleaseSessionOptions(n),t.forEach(s=>e._free(s)),i}}});var Rs,Ar,jr,$i,jn,Ai,Ns,ue=C(()=>{"use strict";Rs=r=>{switch(r){case"int8":return 3;case"uint8":return 2;case"bool":return 9;case"int16":return 5;case"uint16":return 4;case"int32":return 6;case"uint32":return 12;case"float16":return 10;case"float32":return 1;case"float64":return 11;case"string":return 8;case"int64":return 7;case"uint64":return 13;default:throw new Error(`unsupported data type: ${r}`)}},Ar=r=>{switch(r){case 3:return"int8";case 2:return"uint8";case 9:return"bool";case 5:return"int16";case 4:return"uint16";case 6:return"int32";case 12:return"uint32";case 10:return"float16";case 1:return"float32";case 11:return"float64";case 8:return"string";case 7:return"int64";case 13:return"uint64";default:throw new Error(`unsupported data type: ${r}`)}},jr=r=>[void 0,4,1,1,2,2,4,8,void 0,1,2,8,4,8,void 0,void 0,void 0][r],$i=r=>{switch(r){case"float16":return typeof Float16Array<"u"&&Float16Array.from?Float16Array:Uint16Array;case"float32":return Float32Array;case"uint8":return Uint8Array;case"int8":return Int8Array;case"uint16":return Uint16Array;case"int16":return Int16Array;case"int32":return Int32Array;case"bool":return Uint8Array;case"float64":return Float64Array;case"uint32":return Uint32Array;case"int64":return BigInt64Array;case"uint64":return BigUint64Array;default:throw new Error(`unsupported type: ${r}`)}},jn=r=>{switch(r){case"verbose":return 0;case"info":return 1;case"warning":return 2;case"error":return 3;case"fatal":return 4;default:throw new Error(`unsupported logging level: ${r}`)}},Ai=r=>r==="float32"||r==="float16"||r==="int32"||r==="int64"||r==="uint32"||r==="uint8"||r==="bool",Ns=r=>{switch(r){case"none":return 0;case"cpu":return 1;case"cpu-pinned":return 2;case"texture":return 3;case"gpu-buffer":return 4;default:throw new Error(`unsupported data location: ${r}`)}}});var Xn,zs=C(()=>{"use strict";hi();Xn=async r=>{if(typeof r=="string")if(!1)try{let{readFile:e}=Ta("node:fs/promises");return new Uint8Array(await e(r))}catch(e){if(e.code==="ERR_FS_FILE_TOO_LARGE"){let{createReadStream:n}=Ta("node:fs"),t=n(r),o=[];for await(let i of t)o.push(i);return new Uint8Array(Buffer.concat(o))}throw e}else{let e=await fetch(r);if(!e.ok)throw new Error(`failed to load external data file: ${r}`);let n=e.headers.get("Content-Length"),t=n?parseInt(n,10):0;if(t<1073741824)return new Uint8Array(await e.arrayBuffer());{if(!e.body)throw new Error(`failed to load external data file: ${r}, no response body.`);let o=e.body.getReader(),i;try{i=new ArrayBuffer(t)}catch(a){if(a instanceof RangeError){let u=Math.ceil(t/65536);i=new WebAssembly.Memory({initial:u,maximum:u}).buffer}else throw a}let s=0;for(;;){let{done:a,value:u}=await o.read();if(a)break;let l=u.byteLength;new Uint8Array(i,s,l).set(u),s+=l}return new Uint8Array(i,0,t)}}else return r instanceof Blob?new Uint8Array(await r.arrayBuffer()):r instanceof Uint8Array?r:new Uint8Array(r)}});var U_,W_,Lh,Rh,Nh,H_,Ne,mr=C(()=>{"use strict";ue();U_=["V","I","W","E","F"],W_=(r,e)=>{console.log(`[${U_[r]},${new Date().toISOString()}]${e}`)},Nh=(r,e)=>{Lh=r,Rh=e},H_=(r,e)=>{let n=jn(r),t=jn(Lh);n>=t&&W_(n,typeof e=="function"?e():e)},Ne=(...r)=>{Rh&&H_(...r)}});var zh,Fh=C(()=>{"use strict";ue();zh=(r,e)=>new($i(e))(r)});var Pi=C(()=>{"use strict"});var Mh,Fs,Ms,q_,K_,Vh,Gs,Vs,Uh,Wh=C(()=>{"use strict";mr();Pi();Mh=new Map([[64,250],[128,200],[256,200],[512,200],[2048,230],[4096,200],[8192,50],[16384,50],[32768,50],[65536,50],[131072,50],[262144,50],[524288,50],[1048576,50],[2097152,30],[4194304,20],[8388608,10],[12582912,10],[16777216,10],[26214400,15],[33554432,22],[44236800,2],[58982400,6],[67108864,6],[134217728,6],[167772160,6]]),Fs=[],Ms=r=>Math.ceil(r/16)*16,q_=r=>{for(let e=0;eK_++,Gs=async(r,e,n,t)=>{let o=Ms(n),i=r.device.createBuffer({size:o,usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});try{let s=r.getCommandEncoder();r.endComputePass(),s.copyBufferToBuffer(e,0,i,0,o),r.flush(),await i.mapAsync(GPUMapMode.READ);let a=i.getMappedRange();if(t){let u=t();return u.set(new Uint8Array(a,0,n)),u}else return new Uint8Array(a.slice(0,n))}finally{i.destroy()}},Vs=class{constructor(e){this.backend=e;this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.buffersForUploadingPending=[],this.buffersPending=[],this.externalBuffers=new Map,this.capturedPendingBuffers=new Map;for(let[n]of Mh)Fs.push(n),this.freeBuffers.set(n,[]),this.freeUniformBuffers.set(n,[])}upload(e,n){let t=n.buffer,o=n.byteOffset,i=n.byteLength,s=Ms(i),a=this.storageCache.get(e);if(!a)throw new Error("gpu data for uploading does not exist");if(a.originalSize!==i)throw new Error(`inconsistent data size. gpu data size=${a.originalSize}, data size=${i}`);let u=this.backend.device.createBuffer({mappedAtCreation:!0,size:s,usage:GPUBufferUsage.MAP_WRITE|GPUBufferUsage.COPY_SRC}),l=u.getMappedRange();new Uint8Array(l).set(new Uint8Array(t,o,i)),u.unmap();let f=this.backend.getCommandEncoder();this.backend.endComputePass(),f.copyBufferToBuffer(u,0,a.gpuData.buffer,0,s),Ne("verbose",()=>`[WebGPU] GpuDataManager.upload(id=${e})`),this.buffersForUploadingPending.push(u)}memcpy(e,n){let t=this.storageCache.get(e);if(!t)throw new Error("source gpu data for memcpy does not exist");let o=this.storageCache.get(n);if(!o)throw new Error("destination gpu data for memcpy does not exist");if(t.originalSize!==o.originalSize)throw new Error("inconsistent source and destination gpu data size");let i=Ms(t.originalSize),s=this.backend.getCommandEncoder();this.backend.endComputePass(),s.copyBufferToBuffer(t.gpuData.buffer,0,o.gpuData.buffer,0,i)}registerExternalBuffer(e,n,t){let o;if(t){if(o=this.externalBuffers.get(t),o===void 0)throw new Error("previous buffer is not registered");if(e===t)return Ne("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${n}) => id=${o}, buffer is the same, skip.`),o;if(this.backend.capturedCommandList.has(this.backend.currentSessionId))throw new Error(`Registering a different external buffer under graph capture mode is not supported yet. - Please use the previous external buffer!`);this.externalBuffers.delete(t)}else o=Vh();return this.storageCache.set(o,{gpuData:{id:o,type:0,buffer:e},originalSize:n}),this.externalBuffers.set(e,o),Ne("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${n}) => id=${o}, registered.`),o}unregisterExternalBuffer(e){let n=this.externalBuffers.get(e);n!==void 0&&(this.storageCache.delete(n),this.externalBuffers.delete(e),Ne("verbose",()=>`[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${n}`))}create(e,n=GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_SRC|GPUBufferUsage.COPY_DST){let t=q_(e),o,i=(n&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE,s=(n&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM;if(i||s){let l=(i?this.freeBuffers:this.freeUniformBuffers).get(t);l?l.length>0?o=l.pop():o=this.backend.device.createBuffer({size:t,usage:n}):o=this.backend.device.createBuffer({size:t,usage:n})}else o=this.backend.device.createBuffer({size:t,usage:n});let a={id:Vh(),type:0,buffer:o};return this.storageCache.set(a.id,{gpuData:a,originalSize:e}),Ne("verbose",()=>`[WebGPU] GpuDataManager.create(size=${e}) => id=${a.id}`),a}get(e){return this.storageCache.get(e)?.gpuData}release(e){let n=this.storageCache.get(e);if(!n)throw new Error("releasing data does not exist");return Ne("verbose",()=>`[WebGPU] GpuDataManager.release(id=${e}), gpuDataId=${n.gpuData.id}`),this.storageCache.delete(e),this.buffersPending.push(n.gpuData.buffer),n.originalSize}async download(e,n){let t=this.storageCache.get(e);if(!t)throw new Error("data does not exist");await Gs(this.backend,t.gpuData.buffer,t.originalSize,n)}refreshPendingBuffers(){for(let e of this.buffersForUploadingPending)e.destroy();if(this.buffersForUploadingPending=[],this.buffersPending.length!==0)if(this.backend.sessionStatus==="default"){for(let e of this.buffersPending){let n=Mh.get(e.size);if((e.usage&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE){let t=this.freeBuffers.get(e.size)||[];n===void 0||t.length>=n?e.destroy():t.push(e)}else if((e.usage&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM){let t=this.freeUniformBuffers.get(e.size)||[];n===void 0||t.length>=n?e.destroy():t.push(e)}else e.destroy()}this.buffersPending=[]}else{let e=this.capturedPendingBuffers.get(this.backend.currentSessionId);e||(e=[],this.capturedPendingBuffers.set(this.backend.currentSessionId,e));for(let n of this.buffersPending)e.push(n);this.buffersPending=[]}}dispose(){this.freeBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.freeUniformBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.storageCache.forEach(e=>{e.gpuData.buffer.destroy()}),this.capturedPendingBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.capturedPendingBuffers=new Map}onReleaseSession(e){let n=this.capturedPendingBuffers.get(e);n&&(n.forEach(t=>{t.destroy()}),this.capturedPendingBuffers.delete(e))}},Uh=(...r)=>new Vs(...r)});var Us,de,et=C(()=>{"use strict";Us=class{constructor(e){Object.assign(this,e)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(e=>`${this[e]}`).join(";")),this.key}},de=r=>new Us(r)});var Ws,nr,B,Xr,Oi,Ei,Ci,ye=C(()=>{"use strict";Ws=class{static calcMatMulShape(e,n){return e[1]!==n[0]?void 0:[e[0],n[1]]}},nr=class{static calcShape(e,n,t=!1){let o=e.length,i=n.length;if(o===0)return n;if(i===0)return e;let s=Math.max(e.length,n.length),a=new Array(s);if(t){if(o<2||i<2)return;let u=Ws.calcMatMulShape([e[o-2],e[o-1]],[n[i-2],n[i-1]]);if(u===void 0)return;[a[s-2],a[s-1]]=u}for(let u=t?3:1;u<=s;u++){let l=o-u<0?1:e[o-u],f=i-u<0?1:n[i-u];if(l!==f&&l>1&&f>1)return;let c=Math.max(l,f);if(l&&f)a[s-u]=Math.max(l,f);else{if(c>1)return;a[s-u]=0}}return a}static isValidBroadcast(e,n){let t=e.length,o=n.length;if(t>o)return!1;for(let i=1;i<=t;i++)if(e[t-i]!==1&&e[t-i]!==n[o-i])return!1;return!0}},B=class r{static size(e){return r.getSizeFromDimensionRange(e,0,e.length)}static convertShape(e,n=4){let t=e.length;if(t===0)return[];let o=new Array(t),i=t-1;for(;i>=0;){if(e[i]%n===0){o[i]=e[i]/n;break}if(n%e[i]!==0)throw new Error("cannot convert shape");o[i]=1,n/=e[i],i--}for(i--;i>=0;i--)o[i]=e[i];return o}static sizeFromDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeFromDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,n,e.length)}static sizeToDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeToDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,0,n)}static getSizeFromDimensionRange(e,n,t){let o=1;for(let i=n;i=0;--o)t[o]=t[o+1]*e[o+1];return t}static normalizeAxis(e,n){if(e<-n&&e>=n)throw new Error("unsupported axis for this operation.");return e<0?e+n:e}static normalizeAxes(e,n){return e.map(t=>this.normalizeAxis(t,n??e.length))}static sortBasedOnPerm(e,n){return n?n.map(t=>e[t]):e.slice().reverse()}static padShape(e,n){let t=e.length;return e.map((o,i)=>o+n[i]+n[i+t])}static areEqual(e,n){return e.length!==n.length?!1:e.every((t,o)=>t===n[o])}},Xr=class r{static adjustPoolAttributes(e,n,t,o,i,s){if(!e&&t.length!==n.length-2)throw new Error("length of specified kernel shapes should be 2 less than length of input dimensions");if(e)for(let a=0;a=t.length?t.push(n[a+2]):t[a]=n[a+2];for(let a=0;a=t[a]||s[a+t.length]>=t[a])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(e,n,t,o,i,s,a){if(a){if(i.length!==2*(e.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(n.length!==e.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==e.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let u=0;u{"use strict";ue();ye();Zr=64,qs=(r,e)=>{if(e===3)throw new Error("vec3 has same alignment as vec4, use vec4 instead");switch(r){case 10:return e>1?`vec${e}`:"f16";case 1:return e>1?`vec${e}`:"f32";case 6:return e>1?`vec${e}`:"i32";case 12:return e>1?`vec${e}`:"u32";case 7:if(e>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","i32"];case 13:if(e>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","u32"];case 9:if(e!==4)throw new Error("bool must be vec4");return["u32","vec4"];default:throw new Error(`Unknown data type: ${r}`)}},Be=(r,e=1)=>{let n=qs(r,e);return typeof n=="string"?n:n[0]},ct=(r,e=1)=>{let n=qs(r,e);return typeof n=="string"?n:n[1]},W=(...r)=>{let e=[];return r.forEach(n=>{n.length!==0&&e.push({type:12,data:n},{type:12,data:B.computeStrides(n)})}),e},ze=r=>r%4===0?4:r%2===0?2:1,Pr=(r="f32",e,n="0")=>!e||e===1?`${r}(${n})`:`vec${e}<${r}>(${n})`,Yr=(r,e,n)=>r==="f32"?n:e===1?`f32(${n})`:`vec${e}(${n})`,Ut=(r,e)=>e===4?`(${r}.x + ${r}.y + ${r}.z + ${r}.w)`:e===2?`(${r}.x + ${r}.y)`:e===3?`(${r}.x + ${r}.y + ${r}.z)`:r,Z=(r,e,n,t)=>r.startsWith("uniforms.")&&n>4?typeof e=="string"?t==="f16"?`${r}[(${e}) / 8][(${e}) % 8 / 4][(${e}) % 8 % 4]`:`${r}[(${e}) / 4][(${e}) % 4]`:t==="f16"?`${r}[${Math.floor(e/8)}][${Math.floor(e%8/4)}][${e%8%4}]`:`${r}[${Math.floor(e/4)}][${e%4}]`:n>1?`${r}[${e}]`:r,Ks=(r,e,n,t,o)=>{let i=typeof n=="number",s=i?n:n.length,a=[...new Array(s).keys()],u=s<2?"u32":s<=4?`vec${s}`:`array`,l=qs(e,o),f=typeof l=="string"?l:l[1],c=typeof l=="string"?l:l[0],p={indices:u,value:f,storage:c,tensor:e},b=V=>typeof V=="string"?V:`${V}u`,h={offsetToIndices:!1,indicesToOffset:!1,broadcastedIndicesToOffset:!1,set:!1,setByIndices:!1,get:!1,getByIndices:!1},g=i?"uniforms.":"",T=`${g}${r}_shape`,w=`${g}${r}_strides`,v="";for(let V=0;Vne.name).join(", ")}) => ${x.name}>`,w0(sl(y,d))}function I0(d){return d=lo(d>>>0),At(c[d])}function S0(d,m){return m>>>=0,d=$t(d>>>0),m=$t(m),At(d[m])}function $0(d){9<(d>>>=0)&&(cr[d+1]+=1)}function A0(){return At([])}function P0(d){d=$t(d>>>0);for(var m=Array(d.length),y=0;y>>0))}function E0(){return At({})}function C0(d){for(var m=$t(d>>>=0);m.length;){var y=m.pop();m.pop()(y)}ca(d)}function k0(d,m,y){m>>>=0,y>>>=0,d=$t(d>>>0),m=$t(m),y=$t(y),d[m]=y}function D0(d,m){return m>>>=0,d=(d=ma(d>>>0,"_emval_take_value")).readValueFromPointer(m),At(d)}function B0(d,m){d=-9007199254740992>d||9007199254740992>>=0,d=new Date(1e3*d),i()[m>>>2>>>0]=d.getUTCSeconds(),i()[m+4>>>2>>>0]=d.getUTCMinutes(),i()[m+8>>>2>>>0]=d.getUTCHours(),i()[m+12>>>2>>>0]=d.getUTCDate(),i()[m+16>>>2>>>0]=d.getUTCMonth(),i()[m+20>>>2>>>0]=d.getUTCFullYear()-1900,i()[m+24>>>2>>>0]=d.getUTCDay(),d=(d.getTime()-Date.UTC(d.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[m+28>>>2>>>0]=d}var on=d=>d%4==0&&(d%100!=0||d%400==0),ul=[0,31,60,91,121,152,182,213,244,274,305,335],ll=[0,31,59,90,120,151,181,212,243,273,304,334];function L0(d,m){d=-9007199254740992>d||9007199254740992>>=0,d=new Date(1e3*d),i()[m>>>2>>>0]=d.getSeconds(),i()[m+4>>>2>>>0]=d.getMinutes(),i()[m+8>>>2>>>0]=d.getHours(),i()[m+12>>>2>>>0]=d.getDate(),i()[m+16>>>2>>>0]=d.getMonth(),i()[m+20>>>2>>>0]=d.getFullYear()-1900,i()[m+24>>>2>>>0]=d.getDay();var y=(on(d.getFullYear())?ul:ll)[d.getMonth()]+d.getDate()-1|0;i()[m+28>>>2>>>0]=y,i()[m+36>>>2>>>0]=-60*d.getTimezoneOffset(),y=new Date(d.getFullYear(),6,1).getTimezoneOffset();var x=new Date(d.getFullYear(),0,1).getTimezoneOffset();d=0|(y!=x&&d.getTimezoneOffset()==Math.min(x,y)),i()[m+32>>>2>>>0]=d}function R0(d){d>>>=0;var m=new Date(i()[d+20>>>2>>>0]+1900,i()[d+16>>>2>>>0],i()[d+12>>>2>>>0],i()[d+8>>>2>>>0],i()[d+4>>>2>>>0],i()[d>>>2>>>0],0),y=i()[d+32>>>2>>>0],x=m.getTimezoneOffset(),_=new Date(m.getFullYear(),6,1).getTimezoneOffset(),O=new Date(m.getFullYear(),0,1).getTimezoneOffset(),R=Math.min(O,_);return 0>y?i()[d+32>>>2>>>0]=+(_!=O&&R==x):0>>2>>>0]=m.getDay(),y=(on(m.getFullYear())?ul:ll)[m.getMonth()]+m.getDate()-1|0,i()[d+28>>>2>>>0]=y,i()[d>>>2>>>0]=m.getSeconds(),i()[d+4>>>2>>>0]=m.getMinutes(),i()[d+8>>>2>>>0]=m.getHours(),i()[d+12>>>2>>>0]=m.getDate(),i()[d+16>>>2>>>0]=m.getMonth(),i()[d+20>>>2>>>0]=m.getYear(),d=m.getTime(),BigInt(isNaN(d)?-1:d/1e3)}function cl(d,m,y,x,_,O,R){return g?We(16,1,d,m,y,x,_,O,R):-52}function fl(d,m,y,x,_,O){if(g)return We(17,1,d,m,y,x,_,O)}function N0(d,m,y,x){d>>>=0,m>>>=0,y>>>=0,x>>>=0;var _=new Date().getFullYear(),O=new Date(_,0,1),R=new Date(_,6,1);_=O.getTimezoneOffset();var M=R.getTimezoneOffset(),K=Math.max(_,M);s()[d>>>2>>>0]=60*K,i()[m>>>2>>>0]=+(_!=M),O=(d=Y=>Y.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1])(O),R=d(R),M<_?(nn(O,y,17),nn(R,x,17)):(nn(O,x,17),nn(R,y,17))}var ga=[],dl=(d,m)=>{ga.length=0;for(var y;y=n()[d++>>>0];){var x=y!=105;m+=(x&=y!=112)&&m%8?4:0,ga.push(y==112?s()[m>>>2>>>0]:y==106?V[m>>>3]:y==105?i()[m>>>2>>>0]:u()[m>>>3>>>0]),m+=x?8:4}return ga};function z0(d,m,y){return d>>>=0,m=dl(m>>>0,y>>>0),ta[d](...m)}function F0(d,m,y){return d>>>=0,m=dl(m>>>0,y>>>0),ta[d](...m)}var M0=()=>{},V0=()=>Date.now();function G0(d,m){return F(ot(d>>>0,m>>>0))}var pl,U0=()=>{throw Er+=1,"unwind"};function W0(){return 4294901760}pl=()=>performance.timeOrigin+performance.now();var H0=()=>navigator.hardwareConcurrency;function q0(){return rn("Cannot use emscripten_pc_get_function without -sUSE_OFFSET_CONVERTER"),0}function j0(d){d>>>=0;var m=n().length;if(d<=m||4294901760=y;y*=2){var x=m*(1+.2/y);x=Math.min(x,d+100663296);var _=Math;x=Math.max(d,x);e:{_=(_.min.call(_,4294901760,x+(65536-x%65536)%65536)-ae.buffer.byteLength+65535)/65536;try{ae.grow(_),Fe();var O=1;break e}catch{}O=void 0}if(O)return!0}return!1}var co=()=>(rn("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER"),0),an={},ml=d=>{d.forEach(m=>{var y=co();y&&(an[y]=m)})};function K0(){var d=Error().stack.toString().split(` +`);return d[0]=="Error"&&d.shift(),ml(d),an.Ob=co(),an.ac=d,an.Ob}function X0(d,m,y){if(d>>>=0,m>>>=0,an.Ob==d)var x=an.ac;else(x=Error().stack.toString().split(` +`))[0]=="Error"&&x.shift(),ml(x);for(var _=3;x[_]&&co()!=d;)++_;for(d=0;d>>2>>>0]=co();return d}var ba,ya={},hl=()=>{if(!ba){var d,m={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:(typeof navigator=="object"&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:P||"./this.program"};for(d in ya)ya[d]===void 0?delete m[d]:m[d]=ya[d];var y=[];for(d in m)y.push(`${d}=${m[d]}`);ba=y}return ba};function gl(d,m){if(g)return We(18,1,d,m);d>>>=0,m>>>=0;var y=0;return hl().forEach((x,_)=>{var O=m+y;for(_=s()[d+4*_>>>2>>>0]=O,O=0;O>>0]=x.charCodeAt(O);e()[_>>>0]=0,y+=x.length+1}),0}function bl(d,m){if(g)return We(19,1,d,m);d>>>=0,m>>>=0;var y=hl();s()[d>>>2>>>0]=y.length;var x=0;return y.forEach(_=>x+=_.length+1),s()[m>>>2>>>0]=x,0}function yl(d){return g?We(20,1,d):52}function xl(d,m,y,x){return g?We(21,1,d,m,y,x):52}function vl(d,m,y,x){return g?We(22,1,d,m,y,x):70}var Z0=[null,[],[]];function wl(d,m,y,x){if(g)return We(23,1,d,m,y,x);m>>>=0,y>>>=0,x>>>=0;for(var _=0,O=0;O>>2>>>0],M=s()[m+4>>>2>>>0];m+=8;for(var K=0;K>>0],ne=Z0[d];Y===0||Y===10?((d===1?j:F)(Ru(ne,0)),ne.length=0):ne.push(Y)}_+=M}return s()[x>>>2>>>0]=_,0}var Tl=[31,29,31,30,31,30,31,31,30,31,30,31],_l=[31,28,31,30,31,30,31,31,30,31,30,31],Y0=(d,m)=>{e().set(d,m>>>0)};function Il(d,m,y,x){function _(L,fe,Le){for(L=typeof L=="number"?L.toString():L||"";L.lengthzl?-1:0kr-L.getDate())){L.setDate(L.getDate()+fe);break}fe-=kr-L.getDate()+1,L.setDate(1),11>Le?L.setMonth(Le+1):(L.setMonth(0),L.setFullYear(L.getFullYear()+1))}return Le=new Date(L.getFullYear()+1,0,4),fe=M(new Date(L.getFullYear(),0,4)),Le=M(Le),0>=R(fe,L)?0>=R(Le,L)?L.getFullYear()+1:L.getFullYear():L.getFullYear()-1}d>>>=0,m>>>=0,y>>>=0,x>>>=0;var Y=s()[x+40>>>2>>>0];for(var ne in x={fc:i()[x>>>2>>>0],ec:i()[x+4>>>2>>>0],Gb:i()[x+8>>>2>>>0],Kb:i()[x+12>>>2>>>0],Hb:i()[x+16>>>2>>>0],Cb:i()[x+20>>>2>>>0],ub:i()[x+24>>>2>>>0],Bb:i()[x+28>>>2>>>0],nc:i()[x+32>>>2>>>0],dc:i()[x+36>>>2>>>0],hc:Y?ot(Y):""},y=ot(y),Y={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y","%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"})y=y.replace(new RegExp(ne,"g"),Y[ne]);var Ae="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),Ce="January February March April May June July August September October November December".split(" ");for(ne in Y={"%a":L=>Ae[L.ub].substring(0,3),"%A":L=>Ae[L.ub],"%b":L=>Ce[L.Hb].substring(0,3),"%B":L=>Ce[L.Hb],"%C":L=>O((L.Cb+1900)/100|0,2),"%d":L=>O(L.Kb,2),"%e":L=>_(L.Kb,2," "),"%g":L=>K(L).toString().substring(2),"%G":K,"%H":L=>O(L.Gb,2),"%I":L=>((L=L.Gb)==0?L=12:12{for(var fe=0,Le=0;Le<=L.Hb-1;fe+=(on(L.Cb+1900)?Tl:_l)[Le++]);return O(L.Kb+fe,3)},"%m":L=>O(L.Hb+1,2),"%M":L=>O(L.ec,2),"%n":()=>` +`,"%p":L=>0<=L.Gb&&12>L.Gb?"AM":"PM","%S":L=>O(L.fc,2),"%t":()=>" ","%u":L=>L.ub||7,"%U":L=>O(Math.floor((L.Bb+7-L.ub)/7),2),"%V":L=>{var fe=Math.floor((L.Bb+7-(L.ub+6)%7)/7);if(2>=(L.ub+371-L.Bb-2)%7&&fe++,fe)fe==53&&((Le=(L.ub+371-L.Bb)%7)==4||Le==3&&on(L.Cb)||(fe=1));else{fe=52;var Le=(L.ub+7-L.Bb-1)%7;(Le==4||Le==5&&on(L.Cb%400-1))&&fe++}return O(fe,2)},"%w":L=>L.ub,"%W":L=>O(Math.floor((L.Bb+7-(L.ub+6)%7)/7),2),"%y":L=>(L.Cb+1900).toString().substring(2),"%Y":L=>L.Cb+1900,"%z":L=>{var fe=0<=(L=L.dc);return L=Math.abs(L)/60,(fe?"+":"-")+("0000"+(L/60*100+L%60)).slice(-4)},"%Z":L=>L.hc,"%%":()=>"%"},y=y.replace(/%%/g,"\0\0"),Y)y.includes(ne)&&(y=y.replace(new RegExp(ne,"g"),Y[ne](x)));return ne=function(L){var fe=Array(aa(L)+1);return Fu(L,fe,0,fe.length),fe}(y=y.replace(/\0\0/g,"%")),ne.length>m?0:(Y0(ne,d),ne.length-1)}function J0(d,m,y,x){return Il(d>>>0,m>>>0,y>>>0,x>>>0)}g||function(){for(var d=c.numThreads-1;d--;)ku();je.unshift(()=>{rt++,function(m){g?m():Promise.all(gr.map(Cu)).then(m)}(()=>no())})}();for(var Sl=Array(256),fo=0;256>fo;++fo)Sl[fo]=String.fromCharCode(fo);Yu=Sl,br=c.BindingError=class extends Error{constructor(d){super(d),this.name="BindingError"}},c.InternalError=class extends Error{constructor(d){super(d),this.name="InternalError"}},cr.push(0,1,void 0,1,null,1,!0,1,!1,1),c.count_emval_handles=()=>cr.length/2-5-la.length;var Q0=[oa,Au,Du,Nu,zu,Mu,Vu,Gu,Uu,Wu,Hu,qu,ju,Ku,Xu,Zu,cl,fl,gl,bl,yl,xl,vl,wl],te=function(){function d(y,x){return te=y.exports,te=function(){var _=te,O={};for(let[R,M]of Object.entries(_))O[R]=typeof M=="function"?(...K)=>{so.push(R);try{return M(...K)}finally{tt||(so.pop(),Zt&&xr===1&&so.length===0&&(xr=0,Er+=1,ao(Bl),typeof Fibers<"u"&&Fibers.oc()))}}:M;return O}(),te=function(){var _=te,O=M=>K=>M(K)>>>0,R=M=>()=>M()>>>0;return(_=Object.assign({},_)).Ca=O(_.Ca),_.fb=R(_.fb),_.gb=O(_.gb),_.emscripten_main_runtime_thread_id=R(_.emscripten_main_runtime_thread_id),_.sb=O(_.sb),_.tb=R(_.tb),_}(),Pu.push(te.ib),tn.unshift(te.Ba),qe=x,no(),te}var m=Su();if(rt++,c.instantiateWasm)try{return c.instantiateWasm(m,d)}catch(y){F(`Module.instantiateWasm callback failed with error: ${y}`),f(y)}return vu||=c.locateFile?wu("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":c.locateFile?c.locateFile("ort-wasm-simd-threaded.jsep.wasm",N):N+"ort-wasm-simd-threaded.jsep.wasm":new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url).href,function(y,x){var _=vu;return typeof WebAssembly.instantiateStreaming!="function"||wu(_)||Tu(_)||typeof fetch!="function"?Iu(_,y,x):fetch(_,{credentials:"same-origin"}).then(O=>WebAssembly.instantiateStreaming(O,y).then(x,function(R){return F(`wasm streaming compile failed: ${R}`),F("falling back to ArrayBuffer instantiation"),Iu(_,y,x)}))}(m,function(y){d(y.instance,y.module)}).catch(f),{}}(),$l=d=>($l=te.Ca)(d),Al=()=>(Al=te.Da)();c._OrtInit=(d,m)=>(c._OrtInit=te.Ea)(d,m),c._OrtGetLastError=(d,m)=>(c._OrtGetLastError=te.Fa)(d,m),c._OrtCreateSessionOptions=(d,m,y,x,_,O,R,M,K,Y)=>(c._OrtCreateSessionOptions=te.Ga)(d,m,y,x,_,O,R,M,K,Y),c._OrtAppendExecutionProvider=(d,m)=>(c._OrtAppendExecutionProvider=te.Ha)(d,m),c._OrtAddFreeDimensionOverride=(d,m,y)=>(c._OrtAddFreeDimensionOverride=te.Ia)(d,m,y),c._OrtAddSessionConfigEntry=(d,m,y)=>(c._OrtAddSessionConfigEntry=te.Ja)(d,m,y),c._OrtReleaseSessionOptions=d=>(c._OrtReleaseSessionOptions=te.Ka)(d),c._OrtCreateSession=(d,m,y)=>(c._OrtCreateSession=te.La)(d,m,y),c._OrtReleaseSession=d=>(c._OrtReleaseSession=te.Ma)(d),c._OrtGetInputOutputCount=(d,m,y)=>(c._OrtGetInputOutputCount=te.Na)(d,m,y),c._OrtGetInputName=(d,m)=>(c._OrtGetInputName=te.Oa)(d,m),c._OrtGetOutputName=(d,m)=>(c._OrtGetOutputName=te.Pa)(d,m),c._OrtFree=d=>(c._OrtFree=te.Qa)(d),c._OrtCreateTensor=(d,m,y,x,_,O)=>(c._OrtCreateTensor=te.Ra)(d,m,y,x,_,O),c._OrtGetTensorData=(d,m,y,x,_)=>(c._OrtGetTensorData=te.Sa)(d,m,y,x,_),c._OrtReleaseTensor=d=>(c._OrtReleaseTensor=te.Ta)(d),c._OrtCreateRunOptions=(d,m,y,x)=>(c._OrtCreateRunOptions=te.Ua)(d,m,y,x),c._OrtAddRunConfigEntry=(d,m,y)=>(c._OrtAddRunConfigEntry=te.Va)(d,m,y),c._OrtReleaseRunOptions=d=>(c._OrtReleaseRunOptions=te.Wa)(d),c._OrtCreateBinding=d=>(c._OrtCreateBinding=te.Xa)(d),c._OrtBindInput=(d,m,y)=>(c._OrtBindInput=te.Ya)(d,m,y),c._OrtBindOutput=(d,m,y,x)=>(c._OrtBindOutput=te.Za)(d,m,y,x),c._OrtClearBoundOutputs=d=>(c._OrtClearBoundOutputs=te._a)(d),c._OrtReleaseBinding=d=>(c._OrtReleaseBinding=te.$a)(d),c._OrtRunWithBinding=(d,m,y,x,_)=>(c._OrtRunWithBinding=te.ab)(d,m,y,x,_),c._OrtRun=(d,m,y,x,_,O,R,M)=>(c._OrtRun=te.bb)(d,m,y,x,_,O,R,M),c._OrtEndProfiling=d=>(c._OrtEndProfiling=te.cb)(d),c._JsepOutput=(d,m,y)=>(c._JsepOutput=te.db)(d,m,y),c._JsepGetNodeName=d=>(c._JsepGetNodeName=te.eb)(d);var po,sn=()=>(sn=te.fb)(),mo=c._malloc=d=>(mo=c._malloc=te.gb)(d),Yt=c._free=d=>(Yt=c._free=te.hb)(d),xa=(d,m,y,x,_,O)=>(xa=te.kb)(d,m,y,x,_,O),Pl=()=>(Pl=te.lb)(),Ol=(d,m,y,x,_)=>(Ol=te.mb)(d,m,y,x,_),va=d=>(va=te.nb)(d),ho=d=>(ho=te.ob)(d),El=()=>(El=te.pb)(),Cl=(d,m)=>(Cl=te.qb)(d,m),go=d=>(go=te.rb)(d),wa=d=>(wa=te.sb)(d),Ta=()=>(Ta=te.tb)(),kl=c.dynCall_ii=(d,m)=>(kl=c.dynCall_ii=te.vb)(d,m),Dl=d=>(Dl=te.wb)(d),Bl=()=>(Bl=te.xb)(),Ll=d=>(Ll=te.yb)(d),Rl=()=>(Rl=te.zb)();function Nl(){if(!(0Ta(),c.stackRestore=d=>go(d),c.stackAlloc=d=>wa(d),c.UTF8ToString=ot,c.stringToUTF8=nn,c.lengthBytesUTF8=aa,hr=function d(){po||Nl(),po||(hr=d)},Nl(),p}),B_=Ih;globalThis.self?.name==="em-pthread"&&Ih()});var Tn,L_,R_,N_,Ah,Ph,z_,Oh,Hn=C(()=>{"use strict";hi();Tn=!1?void 0:import.meta.url??(typeof document<"u"?document.currentScript?.src:typeof self<"u"?self.location?.href:void 0),L_=!1||typeof location>"u"?void 0:location.origin,R_=(r,e)=>{try{let n=e??Tn;return(n?new URL(r,n):new URL(r)).origin===L_}catch{return!1}},N_=async r=>{let n=await(await fetch(r,{credentials:"same-origin"})).blob();return URL.createObjectURL(n)},Ah=(_h(),Pn(Th)).default,Ph=async()=>{if(!Tn)throw new Error("Failed to load proxy worker: cannot determine the script source URL.");if(R_(Tn))return[void 0,Ah()];let r=await N_(Tn);return[r,Ah(r)]},z_=($h(),Pn(Sh)).default,Oh=async(r,e,n)=>[void 0,z_]});var Ls,Rs,Ii,Eh,F_,M_,gi,Ye,jr=C(()=>{"use strict";Hn();Rs=!1,Ii=!1,Eh=!1,F_=()=>{if(typeof SharedArrayBuffer>"u")return!1;try{return typeof MessageChannel<"u"&&new MessageChannel().port1.postMessage(new SharedArrayBuffer(1)),WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,5,4,1,3,1,1,10,11,1,9,0,65,0,254,16,2,0,26,11]))}catch{return!1}},M_=()=>{try{return WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,10,30,1,28,0,65,0,253,15,253,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,186,1,26,11]))}catch{return!1}},gi=async r=>{if(Rs)return Promise.resolve();if(Ii)throw new Error("multiple calls to 'initializeWebAssembly()' detected.");if(Eh)throw new Error("previous call to 'initializeWebAssembly()' failed.");Ii=!0;let e=r.initTimeout,n=r.numThreads;if(!M_())throw new Error("WebAssembly SIMD is not supported in the current environment.");let t=F_();n>1&&!t&&(typeof self<"u"&&!self.crossOriginIsolated&&console.warn("env.wasm.numThreads is set to "+n+", but this will not work unless you enable crossOriginIsolated mode. See https://web.dev/cross-origin-isolation-guide/ for more info."),console.warn("WebAssembly multi-threading is not supported in the current environment. Falling back to single-threading."),r.numThreads=n=1);let o=r.wasmPaths,i=typeof o=="string"?o:void 0,s=o?.mjs,a=s?.href??s,u=o?.wasm,l=u?.href??u,[f,c]=await Oh(a,i,n>1),p=!1,b=[];if(e>0&&b.push(new Promise(h=>{setTimeout(()=>{p=!0,h()},e)})),b.push(new Promise((h,g)=>{let T={numThreads:n};(l||i)&&(T.locateFile=(w,v)=>l??(i??v)+w),c(T).then(w=>{Ii=!1,Rs=!0,Ls=w,h(),f&&URL.revokeObjectURL(f)},w=>{Ii=!1,Eh=!0,g(w)})})),await Promise.race(b),p)throw new Error(`WebAssembly backend initializing failed due to timeout: ${e}ms`)},Ye=()=>{if(Rs&&Ls)return Ls;throw new Error("WebAssembly is not initialized yet.")}});var nt,jn,Ue,Si=C(()=>{"use strict";jr();nt=(r,e)=>{let n=Ye(),t=n.lengthBytesUTF8(r)+1,o=n._malloc(t);return n.stringToUTF8(r,o,t),e.push(o),o},jn=(r,e,n,t)=>{if(typeof r=="object"&&r!==null){if(n.has(r))throw new Error("Circular reference in options");n.add(r)}Object.entries(r).forEach(([o,i])=>{let s=e?e+o:o;if(typeof i=="object")jn(i,s+".",n,t);else if(typeof i=="string"||typeof i=="number")t(s,i.toString());else if(typeof i=="boolean")t(s,i?"1":"0");else throw new Error(`Can't handle extra config type: ${typeof i}`)})},Ue=r=>{let e=Ye(),n=e.stackSave();try{let t=e.stackAlloc(8);e._OrtGetLastError(t,t+4);let o=e.HEAP32[t/4],i=e.HEAPU32[t/4+1],s=i?e.UTF8ToString(i):"";throw new Error(`${r} ERROR_CODE: ${o}, ERROR_MESSAGE: ${s}`)}finally{e.stackRestore(n)}}});var Ch,kh=C(()=>{"use strict";jr();Si();Ch=r=>{let e=Ye(),n=0,t=[],o=r||{};try{if(r?.logSeverityLevel===void 0)o.logSeverityLevel=2;else if(typeof r.logSeverityLevel!="number"||!Number.isInteger(r.logSeverityLevel)||r.logSeverityLevel<0||r.logSeverityLevel>4)throw new Error(`log serverity level is not valid: ${r.logSeverityLevel}`);if(r?.logVerbosityLevel===void 0)o.logVerbosityLevel=0;else if(typeof r.logVerbosityLevel!="number"||!Number.isInteger(r.logVerbosityLevel))throw new Error(`log verbosity level is not valid: ${r.logVerbosityLevel}`);r?.terminate===void 0&&(o.terminate=!1);let i=0;return r?.tag!==void 0&&(i=nt(r.tag,t)),n=e._OrtCreateRunOptions(o.logSeverityLevel,o.logVerbosityLevel,!!o.terminate,i),n===0&&Ue("Can't create run options."),r?.extra!==void 0&&jn(r.extra,"",new WeakSet,(s,a)=>{let u=nt(s,t),l=nt(a,t);e._OrtAddRunConfigEntry(n,u,l)!==0&&Ue(`Can't set a run config entry: ${s} - ${a}.`)}),[n,t]}catch(i){throw n!==0&&e._OrtReleaseRunOptions(n),t.forEach(s=>e._free(s)),i}}});var V_,G_,U_,W_,Dh,Bh=C(()=>{"use strict";jr();Si();V_=r=>{switch(r){case"disabled":return 0;case"basic":return 1;case"extended":return 2;case"all":return 99;default:throw new Error(`unsupported graph optimization level: ${r}`)}},G_=r=>{switch(r){case"sequential":return 0;case"parallel":return 1;default:throw new Error(`unsupported execution mode: ${r}`)}},U_=r=>{r.extra||(r.extra={}),r.extra.session||(r.extra.session={});let e=r.extra.session;e.use_ort_model_bytes_directly||(e.use_ort_model_bytes_directly="1"),r.executionProviders&&r.executionProviders.some(n=>(typeof n=="string"?n:n.name)==="webgpu")&&(r.enableMemPattern=!1)},W_=(r,e,n)=>{for(let t of e){let o=typeof t=="string"?t:t.name;switch(o){case"webnn":if(o="WEBNN",typeof t!="string"){let a=t?.deviceType;if(a){let u=nt("deviceType",n),l=nt(a,n);Ye()._OrtAddSessionConfigEntry(r,u,l)!==0&&Ue(`Can't set a session config entry: 'deviceType' - ${a}.`)}}break;case"webgpu":if(o="JS",typeof t!="string"){let s=t;if(s?.preferredLayout){if(s.preferredLayout!=="NCHW"&&s.preferredLayout!=="NHWC")throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${s.preferredLayout}`);let a=nt("preferredLayout",n),u=nt(s.preferredLayout,n);Ye()._OrtAddSessionConfigEntry(r,a,u)!==0&&Ue(`Can't set a session config entry: 'preferredLayout' - ${s.preferredLayout}.`)}}break;case"wasm":case"cpu":continue;default:throw new Error(`not supported execution provider: ${o}`)}let i=nt(o,n);Ye()._OrtAppendExecutionProvider(r,i)!==0&&Ue(`Can't append execution provider: ${o}.`)}},Dh=r=>{let e=Ye(),n=0,t=[],o=r||{};U_(o);try{let i=V_(o.graphOptimizationLevel??"all"),s=G_(o.executionMode??"sequential"),a=typeof o.logId=="string"?nt(o.logId,t):0,u=o.logSeverityLevel??2;if(!Number.isInteger(u)||u<0||u>4)throw new Error(`log serverity level is not valid: ${u}`);let l=o.logVerbosityLevel??0;if(!Number.isInteger(l)||l<0||l>4)throw new Error(`log verbosity level is not valid: ${l}`);let f=typeof o.optimizedModelFilePath=="string"?nt(o.optimizedModelFilePath,t):0;if(n=e._OrtCreateSessionOptions(i,!!o.enableCpuMemArena,!!o.enableMemPattern,s,!!o.enableProfiling,0,a,u,l,f),n===0&&Ue("Can't create session options."),o.executionProviders&&W_(n,o.executionProviders,t),o.enableGraphCapture!==void 0){if(typeof o.enableGraphCapture!="boolean")throw new Error(`enableGraphCapture must be a boolean value: ${o.enableGraphCapture}`);let c=nt("enableGraphCapture",t),p=nt(o.enableGraphCapture.toString(),t);e._OrtAddSessionConfigEntry(n,c,p)!==0&&Ue(`Can't set a session config entry: 'enableGraphCapture' - ${o.enableGraphCapture}.`)}if(o.freeDimensionOverrides)for(let[c,p]of Object.entries(o.freeDimensionOverrides)){if(typeof c!="string")throw new Error(`free dimension override name must be a string: ${c}`);if(typeof p!="number"||!Number.isInteger(p)||p<0)throw new Error(`free dimension override value must be a non-negative integer: ${p}`);let b=nt(c,t);e._OrtAddFreeDimensionOverride(n,b,p)!==0&&Ue(`Can't set a free dimension override: ${c} - ${p}.`)}return o.extra!==void 0&&jn(o.extra,"",new WeakSet,(c,p)=>{let b=nt(c,t),h=nt(p,t);e._OrtAddSessionConfigEntry(n,b,h)!==0&&Ue(`Can't set a session config entry: ${c} - ${p}.`)}),[n,t]}catch(i){throw n!==0&&e._OrtReleaseSessionOptions(n),t.forEach(s=>e._free(s)),i}}});var Ns,Ar,Kr,$i,Kn,Ai,zs,ue=C(()=>{"use strict";Ns=r=>{switch(r){case"int8":return 3;case"uint8":return 2;case"bool":return 9;case"int16":return 5;case"uint16":return 4;case"int32":return 6;case"uint32":return 12;case"float16":return 10;case"float32":return 1;case"float64":return 11;case"string":return 8;case"int64":return 7;case"uint64":return 13;default:throw new Error(`unsupported data type: ${r}`)}},Ar=r=>{switch(r){case 3:return"int8";case 2:return"uint8";case 9:return"bool";case 5:return"int16";case 4:return"uint16";case 6:return"int32";case 12:return"uint32";case 10:return"float16";case 1:return"float32";case 11:return"float64";case 8:return"string";case 7:return"int64";case 13:return"uint64";default:throw new Error(`unsupported data type: ${r}`)}},Kr=r=>[void 0,4,1,1,2,2,4,8,void 0,1,2,8,4,8,void 0,void 0,void 0][r],$i=r=>{switch(r){case"float16":return typeof Float16Array<"u"&&Float16Array.from?Float16Array:Uint16Array;case"float32":return Float32Array;case"uint8":return Uint8Array;case"int8":return Int8Array;case"uint16":return Uint16Array;case"int16":return Int16Array;case"int32":return Int32Array;case"bool":return Uint8Array;case"float64":return Float64Array;case"uint32":return Uint32Array;case"int64":return BigInt64Array;case"uint64":return BigUint64Array;default:throw new Error(`unsupported type: ${r}`)}},Kn=r=>{switch(r){case"verbose":return 0;case"info":return 1;case"warning":return 2;case"error":return 3;case"fatal":return 4;default:throw new Error(`unsupported logging level: ${r}`)}},Ai=r=>r==="float32"||r==="float16"||r==="int32"||r==="int64"||r==="uint32"||r==="uint8"||r==="bool",zs=r=>{switch(r){case"none":return 0;case"cpu":return 1;case"cpu-pinned":return 2;case"texture":return 3;case"gpu-buffer":return 4;default:throw new Error(`unsupported data location: ${r}`)}}});var Xn,Fs=C(()=>{"use strict";hi();Xn=async r=>{if(typeof r=="string")if(!1)try{let{readFile:e}=_a("node:fs/promises");return new Uint8Array(await e(r))}catch(e){if(e.code==="ERR_FS_FILE_TOO_LARGE"){let{createReadStream:n}=_a("node:fs"),t=n(r),o=[];for await(let i of t)o.push(i);return new Uint8Array(Buffer.concat(o))}throw e}else{let e=await fetch(r);if(!e.ok)throw new Error(`failed to load external data file: ${r}`);let n=e.headers.get("Content-Length"),t=n?parseInt(n,10):0;if(t<1073741824)return new Uint8Array(await e.arrayBuffer());{if(!e.body)throw new Error(`failed to load external data file: ${r}, no response body.`);let o=e.body.getReader(),i;try{i=new ArrayBuffer(t)}catch(a){if(a instanceof RangeError){let u=Math.ceil(t/65536);i=new WebAssembly.Memory({initial:u,maximum:u}).buffer}else throw a}let s=0;for(;;){let{done:a,value:u}=await o.read();if(a)break;let l=u.byteLength;new Uint8Array(i,s,l).set(u),s+=l}return new Uint8Array(i,0,t)}}else return r instanceof Blob?new Uint8Array(await r.arrayBuffer()):r instanceof Uint8Array?r:new Uint8Array(r)}});var H_,q_,Lh,Rh,Nh,j_,Ne,mr=C(()=>{"use strict";ue();H_=["V","I","W","E","F"],q_=(r,e)=>{console.log(`[${H_[r]},${new Date().toISOString()}]${e}`)},Nh=(r,e)=>{Lh=r,Rh=e},j_=(r,e)=>{let n=Kn(r),t=Kn(Lh);n>=t&&q_(n,typeof e=="function"?e():e)},Ne=(...r)=>{Rh&&j_(...r)}});var zh,Fh=C(()=>{"use strict";ue();zh=(r,e)=>new($i(e))(r)});var Pi=C(()=>{"use strict"});var Mh,Ms,Vs,K_,X_,Vh,Us,Gs,Uh,Wh=C(()=>{"use strict";mr();Pi();Mh=new Map([[64,250],[128,200],[256,200],[512,200],[2048,230],[4096,200],[8192,50],[16384,50],[32768,50],[65536,50],[131072,50],[262144,50],[524288,50],[1048576,50],[2097152,30],[4194304,20],[8388608,10],[12582912,10],[16777216,10],[26214400,15],[33554432,22],[44236800,2],[58982400,6],[67108864,6],[134217728,6],[167772160,6]]),Ms=[],Vs=r=>Math.ceil(r/16)*16,K_=r=>{for(let e=0;eX_++,Us=async(r,e,n,t)=>{let o=Vs(n),i=r.device.createBuffer({size:o,usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});try{let s=r.getCommandEncoder();r.endComputePass(),s.copyBufferToBuffer(e,0,i,0,o),r.flush(),await i.mapAsync(GPUMapMode.READ);let a=i.getMappedRange();if(t){let u=t();return u.set(new Uint8Array(a,0,n)),u}else return new Uint8Array(a.slice(0,n))}finally{i.destroy()}},Gs=class{constructor(e){this.backend=e;this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.buffersForUploadingPending=[],this.buffersPending=[],this.externalBuffers=new Map,this.capturedPendingBuffers=new Map;for(let[n]of Mh)Ms.push(n),this.freeBuffers.set(n,[]),this.freeUniformBuffers.set(n,[])}upload(e,n){let t=n.buffer,o=n.byteOffset,i=n.byteLength,s=Vs(i),a=this.storageCache.get(e);if(!a)throw new Error("gpu data for uploading does not exist");if(a.originalSize!==i)throw new Error(`inconsistent data size. gpu data size=${a.originalSize}, data size=${i}`);let u=this.backend.device.createBuffer({mappedAtCreation:!0,size:s,usage:GPUBufferUsage.MAP_WRITE|GPUBufferUsage.COPY_SRC}),l=u.getMappedRange();new Uint8Array(l).set(new Uint8Array(t,o,i)),u.unmap();let f=this.backend.getCommandEncoder();this.backend.endComputePass(),f.copyBufferToBuffer(u,0,a.gpuData.buffer,0,s),Ne("verbose",()=>`[WebGPU] GpuDataManager.upload(id=${e})`),this.buffersForUploadingPending.push(u)}memcpy(e,n){let t=this.storageCache.get(e);if(!t)throw new Error("source gpu data for memcpy does not exist");let o=this.storageCache.get(n);if(!o)throw new Error("destination gpu data for memcpy does not exist");if(t.originalSize!==o.originalSize)throw new Error("inconsistent source and destination gpu data size");let i=Vs(t.originalSize),s=this.backend.getCommandEncoder();this.backend.endComputePass(),s.copyBufferToBuffer(t.gpuData.buffer,0,o.gpuData.buffer,0,i)}registerExternalBuffer(e,n,t){let o;if(t){if(o=this.externalBuffers.get(t),o===void 0)throw new Error("previous buffer is not registered");if(e===t)return Ne("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${n}) => id=${o}, buffer is the same, skip.`),o;if(this.backend.capturedCommandList.has(this.backend.currentSessionId))throw new Error(`Registering a different external buffer under graph capture mode is not supported yet. + Please use the previous external buffer!`);this.externalBuffers.delete(t)}else o=Vh();return this.storageCache.set(o,{gpuData:{id:o,type:0,buffer:e},originalSize:n}),this.externalBuffers.set(e,o),Ne("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${n}) => id=${o}, registered.`),o}unregisterExternalBuffer(e){let n=this.externalBuffers.get(e);n!==void 0&&(this.storageCache.delete(n),this.externalBuffers.delete(e),Ne("verbose",()=>`[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${n}`))}create(e,n=GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_SRC|GPUBufferUsage.COPY_DST){let t=K_(e),o,i=(n&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE,s=(n&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM;if(i||s){let l=(i?this.freeBuffers:this.freeUniformBuffers).get(t);l?l.length>0?o=l.pop():o=this.backend.device.createBuffer({size:t,usage:n}):o=this.backend.device.createBuffer({size:t,usage:n})}else o=this.backend.device.createBuffer({size:t,usage:n});let a={id:Vh(),type:0,buffer:o};return this.storageCache.set(a.id,{gpuData:a,originalSize:e}),Ne("verbose",()=>`[WebGPU] GpuDataManager.create(size=${e}) => id=${a.id}`),a}get(e){return this.storageCache.get(e)?.gpuData}release(e){let n=this.storageCache.get(e);if(!n)throw new Error("releasing data does not exist");return Ne("verbose",()=>`[WebGPU] GpuDataManager.release(id=${e}), gpuDataId=${n.gpuData.id}`),this.storageCache.delete(e),this.buffersPending.push(n.gpuData.buffer),n.originalSize}async download(e,n){let t=this.storageCache.get(e);if(!t)throw new Error("data does not exist");await Us(this.backend,t.gpuData.buffer,t.originalSize,n)}refreshPendingBuffers(){for(let e of this.buffersForUploadingPending)e.destroy();if(this.buffersForUploadingPending=[],this.buffersPending.length!==0)if(this.backend.sessionStatus==="default"){for(let e of this.buffersPending){let n=Mh.get(e.size);if((e.usage&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE){let t=this.freeBuffers.get(e.size)||[];n===void 0||t.length>=n?e.destroy():t.push(e)}else if((e.usage&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM){let t=this.freeUniformBuffers.get(e.size)||[];n===void 0||t.length>=n?e.destroy():t.push(e)}else e.destroy()}this.buffersPending=[]}else{let e=this.capturedPendingBuffers.get(this.backend.currentSessionId);e||(e=[],this.capturedPendingBuffers.set(this.backend.currentSessionId,e));for(let n of this.buffersPending)e.push(n);this.buffersPending=[]}}dispose(){this.freeBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.freeUniformBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.storageCache.forEach(e=>{e.gpuData.buffer.destroy()}),this.capturedPendingBuffers.forEach(e=>{e.forEach(n=>{n.destroy()})}),this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.capturedPendingBuffers=new Map}onReleaseSession(e){let n=this.capturedPendingBuffers.get(e);n&&(n.forEach(t=>{t.destroy()}),this.capturedPendingBuffers.delete(e))}},Uh=(...r)=>new Gs(...r)});var Ws,de,et=C(()=>{"use strict";Ws=class{constructor(e){Object.assign(this,e)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(e=>`${this[e]}`).join(";")),this.key}},de=r=>new Ws(r)});var Hs,nr,B,Xr,Oi,Ei,Ci,ye=C(()=>{"use strict";Hs=class{static calcMatMulShape(e,n){return e[1]!==n[0]?void 0:[e[0],n[1]]}},nr=class{static calcShape(e,n,t=!1){let o=e.length,i=n.length;if(o===0)return n;if(i===0)return e;let s=Math.max(e.length,n.length),a=new Array(s);if(t){if(o<2||i<2)return;let u=Hs.calcMatMulShape([e[o-2],e[o-1]],[n[i-2],n[i-1]]);if(u===void 0)return;[a[s-2],a[s-1]]=u}for(let u=t?3:1;u<=s;u++){let l=o-u<0?1:e[o-u],f=i-u<0?1:n[i-u];if(l!==f&&l>1&&f>1)return;let c=Math.max(l,f);if(l&&f)a[s-u]=Math.max(l,f);else{if(c>1)return;a[s-u]=0}}return a}static isValidBroadcast(e,n){let t=e.length,o=n.length;if(t>o)return!1;for(let i=1;i<=t;i++)if(e[t-i]!==1&&e[t-i]!==n[o-i])return!1;return!0}},B=class r{static size(e){return r.getSizeFromDimensionRange(e,0,e.length)}static convertShape(e,n=4){let t=e.length;if(t===0)return[];let o=new Array(t),i=t-1;for(;i>=0;){if(e[i]%n===0){o[i]=e[i]/n;break}if(n%e[i]!==0)throw new Error("cannot convert shape");o[i]=1,n/=e[i],i--}for(i--;i>=0;i--)o[i]=e[i];return o}static sizeFromDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeFromDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,n,e.length)}static sizeToDimension(e,n){if(n<0||n>e.length)throw new Error(`invalid dimension of ${n} for sizeToDimension as Tensor has ${e.length} dimensions.`);return r.getSizeFromDimensionRange(e,0,n)}static getSizeFromDimensionRange(e,n,t){let o=1;for(let i=n;i=0;--o)t[o]=t[o+1]*e[o+1];return t}static normalizeAxis(e,n){if(e<-n&&e>=n)throw new Error("unsupported axis for this operation.");return e<0?e+n:e}static normalizeAxes(e,n){return e.map(t=>this.normalizeAxis(t,n??e.length))}static sortBasedOnPerm(e,n){return n?n.map(t=>e[t]):e.slice().reverse()}static padShape(e,n){let t=e.length;return e.map((o,i)=>o+n[i]+n[i+t])}static areEqual(e,n){return e.length!==n.length?!1:e.every((t,o)=>t===n[o])}},Xr=class r{static adjustPoolAttributes(e,n,t,o,i,s){if(!e&&t.length!==n.length-2)throw new Error("length of specified kernel shapes should be 2 less than length of input dimensions");if(e)for(let a=0;a=t.length?t.push(n[a+2]):t[a]=n[a+2];for(let a=0;a=t[a]||s[a+t.length]>=t[a])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(e,n,t,o,i,s,a){if(a){if(i.length!==2*(e.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(n.length!==e.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==e.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let u=0;u{"use strict";ue();ye();Zr=64,js=(r,e)=>{if(e===3)throw new Error("vec3 has same alignment as vec4, use vec4 instead");switch(r){case 10:return e>1?`vec${e}`:"f16";case 1:return e>1?`vec${e}`:"f32";case 6:return e>1?`vec${e}`:"i32";case 12:return e>1?`vec${e}`:"u32";case 7:if(e>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","i32"];case 13:if(e>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","u32"];case 9:if(e!==4)throw new Error("bool must be vec4");return["u32","vec4"];default:throw new Error(`Unknown data type: ${r}`)}},Be=(r,e=1)=>{let n=js(r,e);return typeof n=="string"?n:n[0]},ct=(r,e=1)=>{let n=js(r,e);return typeof n=="string"?n:n[1]},W=(...r)=>{let e=[];return r.forEach(n=>{n.length!==0&&e.push({type:12,data:n},{type:12,data:B.computeStrides(n)})}),e},ze=r=>r%4===0?4:r%2===0?2:1,Pr=(r="f32",e,n="0")=>!e||e===1?`${r}(${n})`:`vec${e}<${r}>(${n})`,Yr=(r,e,n)=>r==="f32"?n:e===1?`f32(${n})`:`vec${e}(${n})`,Ut=(r,e)=>e===4?`(${r}.x + ${r}.y + ${r}.z + ${r}.w)`:e===2?`(${r}.x + ${r}.y)`:e===3?`(${r}.x + ${r}.y + ${r}.z)`:r,Z=(r,e,n,t)=>r.startsWith("uniforms.")&&n>4?typeof e=="string"?t==="f16"?`${r}[(${e}) / 8][(${e}) % 8 / 4][(${e}) % 8 % 4]`:`${r}[(${e}) / 4][(${e}) % 4]`:t==="f16"?`${r}[${Math.floor(e/8)}][${Math.floor(e%8/4)}][${e%8%4}]`:`${r}[${Math.floor(e/4)}][${e%4}]`:n>1?`${r}[${e}]`:r,Ks=(r,e,n,t,o)=>{let i=typeof n=="number",s=i?n:n.length,a=[...new Array(s).keys()],u=s<2?"u32":s<=4?`vec${s}`:`array`,l=js(e,o),f=typeof l=="string"?l:l[1],c=typeof l=="string"?l:l[0],p={indices:u,value:f,storage:c,tensor:e},b=V=>typeof V=="string"?V:`${V}u`,h={offsetToIndices:!1,indicesToOffset:!1,broadcastedIndicesToOffset:!1,set:!1,setByIndices:!1,get:!1,getByIndices:!1},g=i?"uniforms.":"",T=`${g}${r}_shape`,w=`${g}${r}_strides`,v="";for(let V=0;V(h.offsetToIndices=!0,s<2?V:`o2i_${r}(${V})`),P=[];if(s>=2)for(let V=s-1;V>=0;V--)P.push(`${Z(w,V,s)} * (indices[${V}])`);let E=s<2?"":` fn i2o_${r}(indices: ${p.indices}) -> u32 { return ${P.join("+")}; - }`,N=V=>(h.indicesToOffset=!0,s<2?V:`i2o_${r}(${V})`),z=(...V)=>s===0?"0u":`${p.indices}(${V.map(b).join(",")})`,q=(V,ie)=>s<2?`${V}`:`${Z(V,ie,s)}`,K=(V,ie,Te)=>s<2?`${V}=${Te};`:`${Z(V,ie,s)}=${Te};`,F={},_e=(V,ie)=>{h.broadcastedIndicesToOffset=!0;let Te=`${ie.name}broadcastedIndicesTo${r}Offset`;if(Te in F)return`${Te}(${V})`;let tt=[];for(let Fe=s-1;Fe>=0;Fe--){let Ke=ie.indicesGet("outputIndices",Fe+ie.rank-s);tt.push(`${q(w,Fe)} * (${Ke} % ${q(T,Fe)})`)}return F[Te]=`fn ${Te}(outputIndices: ${ie.type.indices}) -> u32 { + }`,N=V=>(h.indicesToOffset=!0,s<2?V:`i2o_${r}(${V})`),z=(...V)=>s===0?"0u":`${p.indices}(${V.map(b).join(",")})`,q=(V,ie)=>s<2?`${V}`:`${Z(V,ie,s)}`,j=(V,ie,Te)=>s<2?`${V}=${Te};`:`${Z(V,ie,s)}=${Te};`,F={},_e=(V,ie)=>{h.broadcastedIndicesToOffset=!0;let Te=`${ie.name}broadcastedIndicesTo${r}Offset`;if(Te in F)return`${Te}(${V})`;let tt=[];for(let Fe=s-1;Fe>=0;Fe--){let je=ie.indicesGet("outputIndices",Fe+ie.rank-s);tt.push(`${q(w,Fe)} * (${je} % ${q(T,Fe)})`)}return F[Te]=`fn ${Te}(outputIndices: ${ie.type.indices}) -> u32 { return ${tt.length>0?tt.join("+"):"0u"}; }`,`${Te}(${V})`},$e=(V,ie)=>(()=>{if(p.storage===p.value)return`${r}[${V}]=${ie};`;if(p.storage==="vec2"&&p.value==="i32")return`${r}[${V}]=vec2(u32(${ie}), select(0u, 0xFFFFFFFFu, ${ie} < 0));`;if(p.storage==="vec2"&&p.value==="u32")return`${r}[${V}]=vec2(u32(${ie}), 0u);`;if(p.storage==="u32"&&p.value==="vec4")return`${r}[${V}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${ie}));`;throw new Error(`not supported combination of storage type ${p.storage} and value type ${p.value} yet`)})(),ae=V=>(()=>{if(p.storage===p.value)return`${r}[${V}]`;if(p.storage==="vec2"&&p.value==="i32")return`i32(${r}[${V}].x)`;if(p.storage==="vec2"&&p.value==="u32")return`u32(${r}[${V}].x)`;if(p.storage==="u32"&&p.value==="vec4")return`vec4(bool(${r}[${V}] & 0xFFu), bool(${r}[${V}] & 0xFF00u), bool(${r}[${V}] & 0xFF0000u), bool(${r}[${V}] & 0xFF000000u))`;throw new Error(`not supported combination of storage type ${p.storage} and value type ${p.value} yet`)})(),qe=s<2?"":` fn get_${r}ByIndices(indices: ${p.indices}) -> ${f} { @@ -1838,7 +1838,7 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e fn set_${r}(${V}, value: ${f}) { set_${r}ByIndices(${z(ie)}, value); }`})();return{impl:()=>{let V=[],ie=!1;return h.offsetToIndices&&(V.push(S),ie=!0),h.indicesToOffset&&(V.push(E),ie=!0),h.broadcastedIndicesToOffset&&(Object.values(F).forEach(Te=>V.push(Te)),ie=!0),h.set&&(V.push(se),ie=!0),h.setByIndices&&(V.push(xe),ie=!0),h.get&&(V.push(Q),ie=!0),h.getByIndices&&(V.push(qe),ie=!0),!i&&ie&&V.unshift(`const ${T} = ${p.indices}(${n.join(",")});`,`const ${w} = ${p.indices}(${B.computeStrides(n).join(",")});`),V.join(` -`)},type:p,offsetToIndices:$,indicesToOffset:N,broadcastedIndicesToOffset:_e,indices:z,indicesGet:q,indicesSet:K,set:(...V)=>{if(V.length!==s+1)throw new Error(`indices length must be ${s}`);let ie=V[s];if(typeof ie!="string")throw new Error("value must be string");let Te=V.slice(0,s).map(b).join(",");return s===0?$e("0u",ie):s===1?$e(Te[0],ie):(h.set=!0,h.setByIndices=!0,h.indicesToOffset=!0,`set_${r}(${Te}, ${ie})`)},setByOffset:$e,setByIndices:(V,ie)=>s<2?$e(V,ie):(h.setByIndices=!0,h.indicesToOffset=!0,`set_${r}ByIndices(${V}, ${ie});`),get:ge,getByOffset:ae,getByIndices:Ie,usage:t,name:r,strides:w,shape:T,rank:s}},D=(r,e,n,t=1)=>Ks(r,e,n,"input",t),G=(r,e,n,t=1)=>Ks(r,e,n,"output",t),ki=(r,e,n,t=1)=>Ks(r,e,n,"internal",t),Hs=class{constructor(e,n){this.normalizedDispatchGroup=e;this.limits=n;this.internalVariables=[];this.variables=[];this.uniforms=[];this.variableIndex=0}guardAgainstOutOfBoundsWorkgroupSizes(e){return`if (global_idx >= ${typeof e=="number"?`${e}u`:e}) { return; }`}mainStart(e=Zr){let n=typeof e=="number"?e:e[0],t=typeof e=="number"?1:e[1],o=typeof e=="number"?1:e[2];if(n>this.limits.maxComputeWorkgroupSizeX||t>this.limits.maxComputeWorkgroupSizeY||o>this.limits.maxComputeWorkgroupSizeZ)throw new Error(`workgroup size [${n}, ${t}, ${o}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);if(n*t*o>this.limits.maxComputeInvocationsPerWorkgroup)throw new Error(`workgroup size [${n}, ${t}, ${o}] exceeds the maximum workgroup invocations ${this.limits.maxComputeInvocationsPerWorkgroup}.`);let i=this.normalizedDispatchGroup[1]===1&&this.normalizedDispatchGroup[2]===1,s=i?`@builtin(global_invocation_id) global_id : vec3, +`)},type:p,offsetToIndices:$,indicesToOffset:N,broadcastedIndicesToOffset:_e,indices:z,indicesGet:q,indicesSet:j,set:(...V)=>{if(V.length!==s+1)throw new Error(`indices length must be ${s}`);let ie=V[s];if(typeof ie!="string")throw new Error("value must be string");let Te=V.slice(0,s).map(b).join(",");return s===0?$e("0u",ie):s===1?$e(Te[0],ie):(h.set=!0,h.setByIndices=!0,h.indicesToOffset=!0,`set_${r}(${Te}, ${ie})`)},setByOffset:$e,setByIndices:(V,ie)=>s<2?$e(V,ie):(h.setByIndices=!0,h.indicesToOffset=!0,`set_${r}ByIndices(${V}, ${ie});`),get:ge,getByOffset:ae,getByIndices:Ie,usage:t,name:r,strides:w,shape:T,rank:s}},D=(r,e,n,t=1)=>Ks(r,e,n,"input",t),G=(r,e,n,t=1)=>Ks(r,e,n,"output",t),ki=(r,e,n,t=1)=>Ks(r,e,n,"internal",t),qs=class{constructor(e,n){this.normalizedDispatchGroup=e;this.limits=n;this.internalVariables=[];this.variables=[];this.uniforms=[];this.variableIndex=0}guardAgainstOutOfBoundsWorkgroupSizes(e){return`if (global_idx >= ${typeof e=="number"?`${e}u`:e}) { return; }`}mainStart(e=Zr){let n=typeof e=="number"?e:e[0],t=typeof e=="number"?1:e[1],o=typeof e=="number"?1:e[2];if(n>this.limits.maxComputeWorkgroupSizeX||t>this.limits.maxComputeWorkgroupSizeY||o>this.limits.maxComputeWorkgroupSizeZ)throw new Error(`workgroup size [${n}, ${t}, ${o}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);if(n*t*o>this.limits.maxComputeInvocationsPerWorkgroup)throw new Error(`workgroup size [${n}, ${t}, ${o}] exceeds the maximum workgroup invocations ${this.limits.maxComputeInvocationsPerWorkgroup}.`);let i=this.normalizedDispatchGroup[1]===1&&this.normalizedDispatchGroup[2]===1,s=i?`@builtin(global_invocation_id) global_id : vec3, @builtin(workgroup_id) workgroup_id : vec3, @builtin(local_invocation_id) local_id : vec3`:`@builtin(global_invocation_id) global_id : vec3, @builtin(local_invocation_id) local_id : vec3, @@ -1853,9 +1853,9 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e struct Uniforms { ${e.join(", ")} }; @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`}get additionalImplementations(){return this.uniformDeclaration()+this.variables.map(e=>e.impl()).join(` `)+this.internalVariables.map(e=>e.impl()).join(` -`)}get variablesInfo(){if(this.uniforms.length===0)return;let e=n=>[12,10,1,6][["u32","f16","f32","i32"].indexOf(n)];return this.uniforms.map(n=>[e(n.type),n.length??1])}},Hh=(r,e)=>new Hs(r,e),_n=(r,e)=>{let n=r.length,t=[];for(let o=0;o1&&s===1&&t.unshift(i)}return t}});var j_,qh,X_,Z_,Tt,Kh,jh,Jr=C(()=>{"use strict";ue();ye();et();he();j_=r=>{if(!r||r.length!==1)throw new Error("Transpose requires 1 input.")},qh=(r,e)=>e&&e.length!==r?[...new Array(r).keys()].reverse():e,X_=(r,e)=>B.sortBasedOnPerm(r,qh(r.length,e)),Z_=(r,e,n,t)=>{let o=[];o.push(`fn perm(i: ${t.type.indices}) -> ${n.type.indices} { +`)}get variablesInfo(){if(this.uniforms.length===0)return;let e=n=>[12,10,1,6][["u32","f16","f32","i32"].indexOf(n)];return this.uniforms.map(n=>[e(n.type),n.length??1])}},Hh=(r,e)=>new qs(r,e),In=(r,e)=>{let n=r.length,t=[];for(let o=0;o1&&s===1&&t.unshift(i)}return t}});var Z_,qh,Y_,J_,Tt,jh,Kh,Jr=C(()=>{"use strict";ue();ye();et();he();Z_=r=>{if(!r||r.length!==1)throw new Error("Transpose requires 1 input.")},qh=(r,e)=>e&&e.length!==r?[...new Array(r).keys()].reverse():e,Y_=(r,e)=>B.sortBasedOnPerm(r,qh(r.length,e)),J_=(r,e,n,t)=>{let o=[];o.push(`fn perm(i: ${t.type.indices}) -> ${n.type.indices} { var a: ${n.type.indices};`);for(let i=0;i{let n=r.dataType,t=r.dims.length,o=qh(t,e),i=X_(r.dims,o),s=G("output",n,i.length),a=D("a",n,t),u;if(o.length===2&&o[0]===1&&o[1]===0){let l=s.type.value,f=[16,16,1];u=c=>` +`)},Tt=(r,e)=>{let n=r.dataType,t=r.dims.length,o=qh(t,e),i=Y_(r.dims,o),s=G("output",n,i.length),a=D("a",n,t),u;if(o.length===2&&o[0]===1&&o[1]===0){let l=s.type.value,f=[16,16,1];u=c=>` ${c.registerUniform("output_size","u32").declareVariables(a,s)} var tile : array, ${f[0]}>; ${c.mainStart(f)} @@ -1875,7 +1875,7 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e }`}else u=l=>` ${l.registerUniform("output_size","u32").declareVariables(a,s)} - ${Z_(o,t,a,s)} + ${J_(o,t,a,s)} ${l.mainStart()} ${l.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -1884,7 +1884,7 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e let aIndices = perm(indices); ${s.setByOffset("global_idx",a.getByIndices("aIndices"))} - }`;return{name:"Transpose",shaderCache:{hint:`${e}`,inputDependencies:["rank"]},getRunData:l=>{let f=B.size(i);return{outputs:[{dims:i,dataType:l[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:[{type:12,data:f},...W(l[0].dims,i)]}},getShaderSource:u}},Kh=(r,e)=>{j_(r.inputs),r.compute(Tt(r.inputs[0],e.perm))},jh=r=>de({perm:r.perm})});var Y_,J_,Q_,e2,t2,r2,n2,o2,i2,a2,or,Xh,Zh,Yh,Jh,Qh,eg,tg,rg,ng,og,ig=C(()=>{"use strict";ue();ye();he();Di();Jr();Y_={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate * candidate",logSumExp:"bestValue + exp(candidate)",l1:"bestValue + abs(candidate)",l2:"bestValue + candidate * candidate",logSum:"bestValue + candidate"},J_={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate",logSumExp:"bestValue + candidate",l1:"bestValue + candidate",l2:"bestValue + candidate",logSum:"bestValue + candidate"},Q_={max:"_A[offset]",min:"_A[offset]",mean:"0",sum:"0",prod:"1",sumSquare:"0",logSumExp:"0",l1:"0",l2:"0",logSum:"0"},e2={max:"bestValue",min:"bestValue",sum:"bestValue",prod:"bestValue",sumSquare:"bestValue",logSumExp:"log(bestValue)",l1:"bestValue",l2:"sqrt(bestValue)",logSum:"log(bestValue)"},t2=(r,e)=>{let n=[];for(let t=e-r;t{let n=[],t=r.length;for(let i=0;ir[i]);return[n,o]},n2=(r,e)=>{let n=r.length+e.length,t=[],o=0;for(let i=0;i{for(let n=0;n{let n=[];if(!o2(r,e)){for(let t=0;tn.push(t))}return n},a2=(r,e,n,t,o,i,s)=>{let a=n[0].dims,u=B.size(i),l=B.size(s),f=D("_A",n[0].dataType,a),c=G("output",o,i),p=32,b=` + }`;return{name:"Transpose",shaderCache:{hint:`${e}`,inputDependencies:["rank"]},getRunData:l=>{let f=B.size(i);return{outputs:[{dims:i,dataType:l[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:[{type:12,data:f},...W(l[0].dims,i)]}},getShaderSource:u}},jh=(r,e)=>{Z_(r.inputs),r.compute(Tt(r.inputs[0],e.perm))},Kh=r=>de({perm:r.perm})});var Q_,e2,t2,r2,n2,o2,i2,a2,s2,u2,or,Xh,Zh,Yh,Jh,Qh,eg,tg,rg,ng,og,ig=C(()=>{"use strict";ue();ye();he();Di();Jr();Q_={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate * candidate",logSumExp:"bestValue + exp(candidate)",l1:"bestValue + abs(candidate)",l2:"bestValue + candidate * candidate",logSum:"bestValue + candidate"},e2={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate",logSumExp:"bestValue + candidate",l1:"bestValue + candidate",l2:"bestValue + candidate",logSum:"bestValue + candidate"},t2={max:"_A[offset]",min:"_A[offset]",mean:"0",sum:"0",prod:"1",sumSquare:"0",logSumExp:"0",l1:"0",l2:"0",logSum:"0"},r2={max:"bestValue",min:"bestValue",sum:"bestValue",prod:"bestValue",sumSquare:"bestValue",logSumExp:"log(bestValue)",l1:"bestValue",l2:"sqrt(bestValue)",logSum:"log(bestValue)"},n2=(r,e)=>{let n=[];for(let t=e-r;t{let n=[],t=r.length;for(let i=0;ir[i]);return[n,o]},i2=(r,e)=>{let n=r.length+e.length,t=[],o=0;for(let i=0;i{for(let n=0;n{let n=[];if(!a2(r,e)){for(let t=0;tn.push(t))}return n},u2=(r,e,n,t,o,i,s)=>{let a=n[0].dims,u=B.size(i),l=B.size(s),f=D("_A",n[0].dataType,a),c=G("output",o,i),p=32,b=` var aBestValues : array; `;return{name:r,shaderCache:e,getShaderSource:g=>` ${g.registerUniform("reduceSize","u32").declareVariables(f,c)} @@ -1897,11 +1897,11 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e let outputIndex = global_idx / ${p}; let offset = outputIndex * uniforms.reduceSize; - var bestValue = f32(${Q_[t]}); + var bestValue = f32(${t2[t]}); let Length = uniforms.reduceSize; for (var k = local_idx; k < Length; k = k + ${p}) { let candidate = f32(${f.getByOffset("offset + k")}); - bestValue = ${Y_[t]}; + bestValue = ${Q_[t]}; } aBestValues[local_idx] = bestValue; workgroupBarrier(); @@ -1912,7 +1912,7 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e let interval = DIV_CEIL(reduceSize, 2u); if (local_idx < currentSize) { let candidate = aBestValues[local_idx + interval]; - bestValue = ${J_[t]}; + bestValue = ${e2[t]}; aBestValues[local_idx] = bestValue; } reduceSize = interval; @@ -1920,9 +1920,9 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e } if (local_idx == 0u) { - ${c.setByOffset("outputIndex",`${t==="mean"?`${c.type.storage}(bestValue / f32(uniforms.reduceSize))`:`${c.type.storage}(${e2[t]})`}`)}; + ${c.setByOffset("outputIndex",`${t==="mean"?`${c.type.storage}(bestValue / f32(uniforms.reduceSize))`:`${c.type.storage}(${r2[t]})`}`)}; } - }`,getRunData:()=>({outputs:[{dims:i,dataType:o}],dispatchGroup:{x:u},programUniforms:[{type:12,data:l}]})}},or=(r,e,n,t)=>{let o=r.inputs.length===1?n:js(r.inputs,n),i=o.axes;i.length===0&&!o.noopWithEmptyAxes&&(i=r.inputs[0].dims.map((b,h)=>h));let s=B.normalizeAxes(i,r.inputs[0].dims.length),a=s,u=r.inputs[0],l=i2(a,r.inputs[0].dims.length);l.length>0&&(u=r.compute(Tt(r.inputs[0],l),{inputs:[0],outputs:[-1]})[0],a=t2(a.length,u.dims.length));let[f,c]=r2(u.dims,a),p=f;o.keepDims&&(p=n2(f,s)),r.compute(a2(e,{hint:o.cacheKey,inputDependencies:["type"]},[u],t,r.inputs[0].dataType,p,c),{inputs:[u]})},Xh=(r,e)=>{or(r,"ReduceMeanShared",e,"mean")},Zh=(r,e)=>{or(r,"ReduceL1Shared",e,"l1")},Yh=(r,e)=>{or(r,"ReduceL2Shared",e,"l2")},Jh=(r,e)=>{or(r,"ReduceLogSumExpShared",e,"logSumExp")},Qh=(r,e)=>{or(r,"ReduceMaxShared",e,"max")},eg=(r,e)=>{or(r,"ReduceMinShared",e,"min")},tg=(r,e)=>{or(r,"ReduceProdShared",e,"prod")},rg=(r,e)=>{or(r,"ReduceSumShared",e,"sum")},ng=(r,e)=>{or(r,"ReduceSumSquareShared",e,"sumSquare")},og=(r,e)=>{or(r,"ReduceLogSumShared",e,"logSum")}});var ir,s2,Bi,js,ar,u2,l2,c2,f2,d2,p2,m2,h2,g2,b2,sr,ag,sg,ug,lg,cg,fg,dg,pg,mg,hg,Di=C(()=>{"use strict";ue();ye();et();he();ig();ir=r=>{if(!r||r.length===0||r.length>2)throw new Error("Reduce op requires 1 or 2 inputs.");if(r.length===2&&r[1].dims.length!==1)throw new Error("Invalid axes input dims.")},s2=r=>["","",`var value = ${r.getByIndices("input_indices")};`,""],Bi=(r,e,n,t,o,i,s=!1,a=!1)=>{let u=[],l=n[0].dims,f=l.length,c=B.normalizeAxes(o,f),p=!a&&c.length===0;l.forEach((T,w)=>{p||c.indexOf(w)>=0?s&&u.push(1):u.push(T)});let b=u.length,h=B.size(u);return{name:r,shaderCache:e,getShaderSource:T=>{let w=[],v=D("_A",n[0].dataType,f),S=G("output",i,b),$=t(v,S,c),P=$[2];for(let E=0,N=0;E=0?(s&&N++,P=`for(var j${E}: u32 = 0; j${E} < ${l[E]}; j${E}++) { + }`,getRunData:()=>({outputs:[{dims:i,dataType:o}],dispatchGroup:{x:u},programUniforms:[{type:12,data:l}]})}},or=(r,e,n,t)=>{let o=r.inputs.length===1?n:Xs(r.inputs,n),i=o.axes;i.length===0&&!o.noopWithEmptyAxes&&(i=r.inputs[0].dims.map((b,h)=>h));let s=B.normalizeAxes(i,r.inputs[0].dims.length),a=s,u=r.inputs[0],l=s2(a,r.inputs[0].dims.length);l.length>0&&(u=r.compute(Tt(r.inputs[0],l),{inputs:[0],outputs:[-1]})[0],a=n2(a.length,u.dims.length));let[f,c]=o2(u.dims,a),p=f;o.keepDims&&(p=i2(f,s)),r.compute(u2(e,{hint:o.cacheKey,inputDependencies:["type"]},[u],t,r.inputs[0].dataType,p,c),{inputs:[u]})},Xh=(r,e)=>{or(r,"ReduceMeanShared",e,"mean")},Zh=(r,e)=>{or(r,"ReduceL1Shared",e,"l1")},Yh=(r,e)=>{or(r,"ReduceL2Shared",e,"l2")},Jh=(r,e)=>{or(r,"ReduceLogSumExpShared",e,"logSumExp")},Qh=(r,e)=>{or(r,"ReduceMaxShared",e,"max")},eg=(r,e)=>{or(r,"ReduceMinShared",e,"min")},tg=(r,e)=>{or(r,"ReduceProdShared",e,"prod")},rg=(r,e)=>{or(r,"ReduceSumShared",e,"sum")},ng=(r,e)=>{or(r,"ReduceSumSquareShared",e,"sumSquare")},og=(r,e)=>{or(r,"ReduceLogSumShared",e,"logSum")}});var ir,l2,Bi,Xs,ar,c2,f2,d2,p2,m2,h2,g2,b2,y2,x2,sr,ag,sg,ug,lg,cg,fg,dg,pg,mg,hg,Di=C(()=>{"use strict";ue();ye();et();he();ig();ir=r=>{if(!r||r.length===0||r.length>2)throw new Error("Reduce op requires 1 or 2 inputs.");if(r.length===2&&r[1].dims.length!==1)throw new Error("Invalid axes input dims.")},l2=r=>["","",`var value = ${r.getByIndices("input_indices")};`,""],Bi=(r,e,n,t,o,i,s=!1,a=!1)=>{let u=[],l=n[0].dims,f=l.length,c=B.normalizeAxes(o,f),p=!a&&c.length===0;l.forEach((T,w)=>{p||c.indexOf(w)>=0?s&&u.push(1):u.push(T)});let b=u.length,h=B.size(u);return{name:r,shaderCache:e,getShaderSource:T=>{let w=[],v=D("_A",n[0].dataType,f),S=G("output",i,b),$=t(v,S,c),P=$[2];for(let E=0,N=0;E=0?(s&&N++,P=`for(var j${E}: u32 = 0; j${E} < ${l[E]}; j${E}++) { ${$[2].includes("last_index")?`let last_index = j${E};`:""} ${v.indicesSet("input_indices",E,`j${E}`)} ${P} @@ -1943,9 +1943,9 @@ as ORT format: ${i}`)}}loadFromOnnxFormat(e,n){let t=mh.onnx.ModelProto.decode(e ${$[3]} ${$.length===4?S.setByOffset("global_idx","value"):$.slice(4).join(` `)} - }`},getRunData:()=>({outputs:[{dims:u,dataType:i}],dispatchGroup:{x:Math.ceil(h/64)},programUniforms:[{type:12,data:h},...W(l,u)]})}},js=(r,e)=>{let n=[];return r[1].dims[0]>0&&r[1].getBigInt64Array().forEach(t=>n.push(Number(t))),de({axes:n,keepDims:e.keepDims,noopWithEmptyAxes:e.noopWithEmptyAxes})},ar=(r,e,n,t)=>{let o=r.inputs,i=o.length===1?n:js(o,n);r.compute(Bi(e,{hint:i.cacheKey,inputDependencies:["rank"]},[o[0]],i.noopWithEmptyAxes&&i.axes.length===0?s2:t,i.axes,o[0].dataType,i.keepDims,i.noopWithEmptyAxes),{inputs:[0]})},u2=(r,e)=>{ir(r.inputs),ar(r,"ReduceLogSum",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${t.getByIndices("input_indices")};`,"value = log(value);"])},l2=(r,e)=>{ir(r.inputs),ar(r,"ReduceL1",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += abs(${t.getByIndices("input_indices")});`,""])},c2=(r,e)=>{ir(r.inputs),ar(r,"ReduceL2",e,(t,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${t.getByIndices("input_indices")}; value += (t * t);`,"value = sqrt(value);"])},f2=(r,e)=>{ir(r.inputs),ar(r,"ReduceLogSumExp",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += exp(${t.getByIndices("input_indices")});`,"value = log(value);"])},d2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMax",e,(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(t.indicesSet("input_indices",a,0));return[`${s.join(` -`)}`,`var value = ${t.getByIndices("input_indices")};`,`value = max(value, ${t.getByIndices("input_indices")});`,""]})},p2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMean",e,(t,o,i)=>{let s=1;for(let a=0;a=0||i.length===0)&&(s*=r.inputs[0].dims[a]);return["var sum = f32(0);","",`sum += f32(${t.getByIndices("input_indices")});`,`let value = ${o.type.value}(sum / ${s});`]})},m2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMin",e,(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`input_indices[${a}] = 0;`);return[`${s.join(` -`)}`,`var value = ${t.getByIndices("input_indices")};`,`value = min(value, ${t.getByIndices("input_indices")});`,""]})},h2=(r,e)=>{ir(r.inputs),ar(r,"ReduceProd",e,(t,o)=>[`var value = ${o.type.storage}(1);`,"",`value *= ${t.getByIndices("input_indices")};`,""])},g2=(r,e)=>{ir(r.inputs),ar(r,"ReduceSum",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${t.getByIndices("input_indices")};`,""])},b2=(r,e)=>{ir(r.inputs),ar(r,"ReduceSumSquare",e,(t,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${t.getByIndices("input_indices")}; value += t * t;`,""])},sr=(r,e,n)=>{if(e.length===0)return n;let t=1,o=1;for(let i=0;i1024},ag=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?p2(r,e):Xh(r,e)},sg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?l2(r,e):Zh(r,e)},ug=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?c2(r,e):Yh(r,e)},lg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?f2(r,e):Jh(r,e)},cg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?d2(r,e):Qh(r,e)},fg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?m2(r,e):eg(r,e)},dg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?h2(r,e):tg(r,e)},pg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?g2(r,e):rg(r,e)},mg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?b2(r,e):ng(r,e)},hg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?u2(r,e):og(r,e)}});var gg,bg,yg,Xs,xg=C(()=>{"use strict";ue();et();Di();gg=r=>{if(!r||r.length===0||r.length>2)throw new Error("ArgMinMaxOp op requires 1 or 2 inputs.");if(r[0].dataType!==1)throw new Error("Invalid input type.")},bg=(r,e)=>{gg(r.inputs);let n=(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`input_indices[${a}] = 0;`);return[`${s.join(` + }`},getRunData:()=>({outputs:[{dims:u,dataType:i}],dispatchGroup:{x:Math.ceil(h/64)},programUniforms:[{type:12,data:h},...W(l,u)]})}},Xs=(r,e)=>{let n=[];return r[1].dims[0]>0&&r[1].getBigInt64Array().forEach(t=>n.push(Number(t))),de({axes:n,keepDims:e.keepDims,noopWithEmptyAxes:e.noopWithEmptyAxes})},ar=(r,e,n,t)=>{let o=r.inputs,i=o.length===1?n:Xs(o,n);r.compute(Bi(e,{hint:i.cacheKey,inputDependencies:["rank"]},[o[0]],i.noopWithEmptyAxes&&i.axes.length===0?l2:t,i.axes,o[0].dataType,i.keepDims,i.noopWithEmptyAxes),{inputs:[0]})},c2=(r,e)=>{ir(r.inputs),ar(r,"ReduceLogSum",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${t.getByIndices("input_indices")};`,"value = log(value);"])},f2=(r,e)=>{ir(r.inputs),ar(r,"ReduceL1",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += abs(${t.getByIndices("input_indices")});`,""])},d2=(r,e)=>{ir(r.inputs),ar(r,"ReduceL2",e,(t,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${t.getByIndices("input_indices")}; value += (t * t);`,"value = sqrt(value);"])},p2=(r,e)=>{ir(r.inputs),ar(r,"ReduceLogSumExp",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += exp(${t.getByIndices("input_indices")});`,"value = log(value);"])},m2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMax",e,(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(t.indicesSet("input_indices",a,0));return[`${s.join(` +`)}`,`var value = ${t.getByIndices("input_indices")};`,`value = max(value, ${t.getByIndices("input_indices")});`,""]})},h2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMean",e,(t,o,i)=>{let s=1;for(let a=0;a=0||i.length===0)&&(s*=r.inputs[0].dims[a]);return["var sum = f32(0);","",`sum += f32(${t.getByIndices("input_indices")});`,`let value = ${o.type.value}(sum / ${s});`]})},g2=(r,e)=>{ir(r.inputs),ar(r,"ReduceMin",e,(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`input_indices[${a}] = 0;`);return[`${s.join(` +`)}`,`var value = ${t.getByIndices("input_indices")};`,`value = min(value, ${t.getByIndices("input_indices")});`,""]})},b2=(r,e)=>{ir(r.inputs),ar(r,"ReduceProd",e,(t,o)=>[`var value = ${o.type.storage}(1);`,"",`value *= ${t.getByIndices("input_indices")};`,""])},y2=(r,e)=>{ir(r.inputs),ar(r,"ReduceSum",e,(t,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${t.getByIndices("input_indices")};`,""])},x2=(r,e)=>{ir(r.inputs),ar(r,"ReduceSumSquare",e,(t,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${t.getByIndices("input_indices")}; value += t * t;`,""])},sr=(r,e,n)=>{if(e.length===0)return n;let t=1,o=1;for(let i=0;i1024},ag=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?h2(r,e):Xh(r,e)},sg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?f2(r,e):Zh(r,e)},ug=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?d2(r,e):Yh(r,e)},lg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?p2(r,e):Jh(r,e)},cg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?m2(r,e):Qh(r,e)},fg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?g2(r,e):eg(r,e)},dg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?b2(r,e):tg(r,e)},pg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?y2(r,e):rg(r,e)},mg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?x2(r,e):ng(r,e)},hg=(r,e)=>{sr(r.inputs[0].dims,e.axes,e.noopWithEmptyAxes)?c2(r,e):og(r,e)}});var gg,bg,yg,Zs,xg=C(()=>{"use strict";ue();et();Di();gg=r=>{if(!r||r.length===0||r.length>2)throw new Error("ArgMinMaxOp op requires 1 or 2 inputs.");if(r[0].dataType!==1)throw new Error("Invalid input type.")},bg=(r,e)=>{gg(r.inputs);let n=(t,o,i)=>{let s=[];for(let a=0;a=0||i.length===0)&&s.push(`input_indices[${a}] = 0;`);return[`${s.join(` `)}`,`var value = ${t.getByIndices("input_indices")}; var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLastIndex>0?"<=":"<"} value) { value = ${t.getByIndices("input_indices")}; @@ -1955,7 +1955,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLastIndex>0?">=":">"} value) { value = ${t.getByIndices("input_indices")}; best_index = i32(last_index); - }`,"",o.setByOffset("global_idx","best_index")]};r.compute(Bi("argMax",{hint:e.cacheKey,inputDependencies:["rank"]},[r.inputs[0]],n,[e.axis],7,e.keepDims),{inputs:[0]})},Xs=r=>de(r)});var y2,x2,v2,w2,In,T2,vg,Li=C(()=>{"use strict";ue();Pi();he();y2=(r,e)=>{let n=r[0],t=r[1],o=r[2],i=r[3],s=r[4],a=r[5];if(s&&a)throw new Error("Attention cannot have both past and relative_position_bias");if(n.dims.length!==3)throw new Error('Input "input" must have 3 dimensions');let u=n.dims[0],l=n.dims[1],f=n.dims[2];if(o.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimensions');if(t.dims.length!==2)throw new Error('Input "weights" is expected to have 2 dimensions');if(t.dims[0]!==f)throw new Error("Input 1 dimension 0 should have same length as dimension 2 of input 0");if(o.dims[0]!==t.dims[1])throw new Error('Input "bias" dimension 0 should have same length as dimension 1 of input "weights"');let c=o.dims[0]/3,p=c,b=p;if(e.qkvHiddenSizes.length>0){if(e.qkvHiddenSizes.length!==3)throw new Error("qkv_hidden_sizes attribute should have 3 elements");for(let S of e.qkvHiddenSizes)if(S%e.numHeads!==0)throw new Error("qkv_hidden_sizes should be divisible by num_heads");c=e.qkvHiddenSizes[0],p=e.qkvHiddenSizes[1],b=e.qkvHiddenSizes[2]}let h=l;if(c!==p)throw new Error("qkv_hidden_sizes first element should be same as the second");if(o.dims[0]!==c+p+b)throw new Error('Input "bias" dimension 0 should have same length as sum of Q/K/V hidden sizes');let g=0;if(s){if(p!==b)throw new Error('Input "past" expect k_hidden_size == v_hidden_size');if(s.dims.length!==5)throw new Error('Input "past" must have 5 dimensions');if(s.dims[0]!==2)throw new Error('Input "past" first dimension must be 2');if(s.dims[1]!==u)throw new Error('Input "past" second dimension must be batch_size');if(s.dims[2]!==e.numHeads)throw new Error('Input "past" third dimension must be num_heads');if(s.dims[4]!==p/e.numHeads)throw new Error('Input "past" fifth dimension must be k_hidden_size / num_heads');e.pastPresentShareBuffer||(g=s.dims[3])}let T=h+g,w=-1,v=0;if(i)throw new Error("Mask not supported");if(s)throw new Error("past is not supported");return{batchSize:u,sequenceLength:l,pastSequenceLength:g,kvSequenceLength:h,totalSequenceLength:T,maxSequenceLength:w,inputHiddenSize:f,hiddenSize:c,vHiddenSize:b,headSize:Math.floor(c/e.numHeads),vHeadSize:Math.floor(b/e.numHeads),numHeads:e.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:e.maskFilterValue,maskType:v,scale:e.scale,broadcastResPosBias:!1,passPastInKv:!1,qkvFormat:1}},x2=(r,e,n,t)=>{let o=ze(t),i=64,s=t/o;s{let b=G("x",e.dataType,e.dims,o),g=[{name:"d_inv",type:ct(e.dataType)},{name:"d_comp",type:"u32"},{name:"elements_per_thread",type:"u32"}];return` + }`,"",o.setByOffset("global_idx","best_index")]};r.compute(Bi("argMax",{hint:e.cacheKey,inputDependencies:["rank"]},[r.inputs[0]],n,[e.axis],7,e.keepDims),{inputs:[0]})},Zs=r=>de(r)});var v2,w2,T2,_2,Sn,I2,vg,Li=C(()=>{"use strict";ue();Pi();he();v2=(r,e)=>{let n=r[0],t=r[1],o=r[2],i=r[3],s=r[4],a=r[5];if(s&&a)throw new Error("Attention cannot have both past and relative_position_bias");if(n.dims.length!==3)throw new Error('Input "input" must have 3 dimensions');let u=n.dims[0],l=n.dims[1],f=n.dims[2];if(o.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimensions');if(t.dims.length!==2)throw new Error('Input "weights" is expected to have 2 dimensions');if(t.dims[0]!==f)throw new Error("Input 1 dimension 0 should have same length as dimension 2 of input 0");if(o.dims[0]!==t.dims[1])throw new Error('Input "bias" dimension 0 should have same length as dimension 1 of input "weights"');let c=o.dims[0]/3,p=c,b=p;if(e.qkvHiddenSizes.length>0){if(e.qkvHiddenSizes.length!==3)throw new Error("qkv_hidden_sizes attribute should have 3 elements");for(let S of e.qkvHiddenSizes)if(S%e.numHeads!==0)throw new Error("qkv_hidden_sizes should be divisible by num_heads");c=e.qkvHiddenSizes[0],p=e.qkvHiddenSizes[1],b=e.qkvHiddenSizes[2]}let h=l;if(c!==p)throw new Error("qkv_hidden_sizes first element should be same as the second");if(o.dims[0]!==c+p+b)throw new Error('Input "bias" dimension 0 should have same length as sum of Q/K/V hidden sizes');let g=0;if(s){if(p!==b)throw new Error('Input "past" expect k_hidden_size == v_hidden_size');if(s.dims.length!==5)throw new Error('Input "past" must have 5 dimensions');if(s.dims[0]!==2)throw new Error('Input "past" first dimension must be 2');if(s.dims[1]!==u)throw new Error('Input "past" second dimension must be batch_size');if(s.dims[2]!==e.numHeads)throw new Error('Input "past" third dimension must be num_heads');if(s.dims[4]!==p/e.numHeads)throw new Error('Input "past" fifth dimension must be k_hidden_size / num_heads');e.pastPresentShareBuffer||(g=s.dims[3])}let T=h+g,w=-1,v=0;if(i)throw new Error("Mask not supported");if(s)throw new Error("past is not supported");return{batchSize:u,sequenceLength:l,pastSequenceLength:g,kvSequenceLength:h,totalSequenceLength:T,maxSequenceLength:w,inputHiddenSize:f,hiddenSize:c,vHiddenSize:b,headSize:Math.floor(c/e.numHeads),vHeadSize:Math.floor(b/e.numHeads),numHeads:e.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:e.maskFilterValue,maskType:v,scale:e.scale,broadcastResPosBias:!1,passPastInKv:!1,qkvFormat:1}},w2=(r,e,n,t)=>{let o=ze(t),i=64,s=t/o;s{let b=G("x",e.dataType,e.dims,o),g=[{name:"d_inv",type:ct(e.dataType)},{name:"d_comp",type:"u32"},{name:"elements_per_thread",type:"u32"}];return` var thread_max: array; var thread_sum: array; ${p.registerUniforms(g).declareVariables(b)} @@ -1997,12 +1997,12 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas x[offset + i] = ${b.type.value}(exp(f32input - max_value) / sum); } } - }`};return{name:"AttentionProbsSoftmax",shaderCache:{hint:`${i};${l};${o}`},getShaderSource:c,getRunData:()=>({outputs:[],dispatchGroup:{x:n},programUniforms:u})}},v2=(r,e,n,t,o,i,s,a)=>{let u=a+i.kvSequenceLength,l=[i.batchSize,i.numHeads,i.sequenceLength,u],f=i.kvNumHeads===void 0&&r.outputCount>1,c=f?[i.batchSize,i.numHeads,u,i.headSize]:void 0,p=s.scale===0?1/Math.sqrt(i.headSize):s.scale,b=ze(i.headSize),h=i.headSize/b,g=12,T={x:Math.ceil(u/g),y:Math.ceil(i.sequenceLength/g),z:i.batchSize*i.numHeads},w=[{type:12,data:i.sequenceLength},{type:12,data:h},{type:12,data:u},{type:12,data:i.numHeads},{type:1,data:p},{type:12,data:a},{type:12,data:i.kvSequenceLength}],v=["type","type"];t&&v.push("type"),o&&v.push("type");let S=[{dims:l,dataType:e.dataType,gpuDataType:0}];f&&S.push({dims:c,dataType:e.dataType,gpuDataType:0});let $=P=>{let E=D("q",e.dataType,e.dims,b),N=D("key",n.dataType,n.dims,b),z=[E,N];if(t){let $e=D("past_key",t.dataType,t.dims,b);z.push($e)}o&&z.push(D("relative_position_bias",o.dataType,o.dims));let q=G("output",e.dataType,l),K=[q];f&&K.push(G("present_key",e.dataType,c,b));let F=ct(1,b),_e=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"alpha",type:"f32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` + }`};return{name:"AttentionProbsSoftmax",shaderCache:{hint:`${i};${l};${o}`},getShaderSource:c,getRunData:()=>({outputs:[],dispatchGroup:{x:n},programUniforms:u})}},T2=(r,e,n,t,o,i,s,a)=>{let u=a+i.kvSequenceLength,l=[i.batchSize,i.numHeads,i.sequenceLength,u],f=i.kvNumHeads===void 0&&r.outputCount>1,c=f?[i.batchSize,i.numHeads,u,i.headSize]:void 0,p=s.scale===0?1/Math.sqrt(i.headSize):s.scale,b=ze(i.headSize),h=i.headSize/b,g=12,T={x:Math.ceil(u/g),y:Math.ceil(i.sequenceLength/g),z:i.batchSize*i.numHeads},w=[{type:12,data:i.sequenceLength},{type:12,data:h},{type:12,data:u},{type:12,data:i.numHeads},{type:1,data:p},{type:12,data:a},{type:12,data:i.kvSequenceLength}],v=["type","type"];t&&v.push("type"),o&&v.push("type");let S=[{dims:l,dataType:e.dataType,gpuDataType:0}];f&&S.push({dims:c,dataType:e.dataType,gpuDataType:0});let $=P=>{let E=D("q",e.dataType,e.dims,b),N=D("key",n.dataType,n.dims,b),z=[E,N];if(t){let $e=D("past_key",t.dataType,t.dims,b);z.push($e)}o&&z.push(D("relative_position_bias",o.dataType,o.dims));let q=G("output",e.dataType,l),j=[q];f&&j.push(G("present_key",e.dataType,c,b));let F=ct(1,b),_e=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"alpha",type:"f32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` const TILE_SIZE = ${g}u; var tileQ: array<${E.type.storage}, ${g*g}>; var tileK: array<${E.type.storage}, ${g*g}>; - ${P.registerUniforms(_e).declareVariables(...z,...K)} + ${P.registerUniforms(_e).declareVariables(...z,...j)} ${P.mainStart([g,g,1])} // x holds the N and y holds the M let headIdx = workgroup_id.z; @@ -2045,7 +2045,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas var sum: f32 = ${(()=>{switch(b){case 1:return"value";case 2:return"value.x + value.y";case 4:return"value.x + value.y + value.z + value.w";default:throw new Error(`Unsupported components: ${b}`)}})()}; output[outputIdx] = ${q.type.value} (sum * uniforms.alpha) + ${o?"relative_position_bias[outputIdx]":"0.0"}; } - }`};return{name:"AttentionProbs",shaderCache:{hint:`${b};${o!==void 0};${t!==void 0};${r.outputCount}`,inputDependencies:v},getRunData:()=>({outputs:S,dispatchGroup:T,programUniforms:w}),getShaderSource:$}},w2=(r,e,n,t,o,i)=>{let s=i+o.kvSequenceLength,a=o.nReps?o.nReps:1,u=o.vHiddenSize*a,l=o.kvNumHeads==null&&r.outputCount>1,f=l?[o.batchSize,o.numHeads,s,o.headSize]:void 0,c=[o.batchSize,o.sequenceLength,u],p=12,b={x:Math.ceil(o.vHeadSize/p),y:Math.ceil(o.sequenceLength/p),z:o.batchSize*o.numHeads},h=[{type:12,data:o.sequenceLength},{type:12,data:s},{type:12,data:o.vHeadSize},{type:12,data:o.numHeads},{type:12,data:u},{type:12,data:i},{type:12,data:o.kvSequenceLength}],g=t?["type","type","type"]:["type","type"],T=[{dims:c,dataType:e.dataType,gpuDataType:0}];l&&T.push({dims:f,dataType:e.dataType,gpuDataType:0});let w=v=>{let S=D("probs",e.dataType,e.dims),$=D("v",n.dataType,n.dims),P=[S,$];t&&P.push(D("past_value",t.dataType,t.dims));let N=[G("output",e.dataType,c)];l&&N.push(G("present_value",e.dataType,f));let z=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"v_hidden_size",type:"u32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` + }`};return{name:"AttentionProbs",shaderCache:{hint:`${b};${o!==void 0};${t!==void 0};${r.outputCount}`,inputDependencies:v},getRunData:()=>({outputs:S,dispatchGroup:T,programUniforms:w}),getShaderSource:$}},_2=(r,e,n,t,o,i)=>{let s=i+o.kvSequenceLength,a=o.nReps?o.nReps:1,u=o.vHiddenSize*a,l=o.kvNumHeads==null&&r.outputCount>1,f=l?[o.batchSize,o.numHeads,s,o.headSize]:void 0,c=[o.batchSize,o.sequenceLength,u],p=12,b={x:Math.ceil(o.vHeadSize/p),y:Math.ceil(o.sequenceLength/p),z:o.batchSize*o.numHeads},h=[{type:12,data:o.sequenceLength},{type:12,data:s},{type:12,data:o.vHeadSize},{type:12,data:o.numHeads},{type:12,data:u},{type:12,data:i},{type:12,data:o.kvSequenceLength}],g=t?["type","type","type"]:["type","type"],T=[{dims:c,dataType:e.dataType,gpuDataType:0}];l&&T.push({dims:f,dataType:e.dataType,gpuDataType:0});let w=v=>{let S=D("probs",e.dataType,e.dims),$=D("v",n.dataType,n.dims),P=[S,$];t&&P.push(D("past_value",t.dataType,t.dims));let N=[G("output",e.dataType,c)];l&&N.push(G("present_value",e.dataType,f));let z=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"v_hidden_size",type:"u32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` const TILE_SIZE = ${p}u; var tileQ: array<${S.type.value}, ${p*p}>; var tileK: array<${S.type.value}, ${p*p}>; @@ -2096,7 +2096,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas + currentBatchHeadNumber * uniforms.N + n; output[outputIdx] = value; } - }`};return{name:"AttentionScore",shaderCache:{hint:`${t!==void 0};${r.outputCount}`,inputDependencies:g},getRunData:()=>({outputs:T,dispatchGroup:b,programUniforms:h}),getShaderSource:w}},In=(r,e,n,t,o,i,s,a,u,l,f)=>{let c=r.outputCount,p=l.kvNumHeads!==void 0||c>1?l.pastSequenceLength:0,b=p+l.kvSequenceLength,h=l.kvNumHeads===void 0&&c>1&&s?[e,n,s]:[e,n];u&&h.push(u);let g=r.compute(v2(r,e,n,c>1?s:void 0,u,l,f,p),{inputs:h,outputs:l.kvNumHeads===void 0&&c>1?[-1,1]:[-1]})[0];r.compute(x2(r,g,l.batchSize*l.numHeads*l.sequenceLength,b),{inputs:[g],outputs:[]});let T=l.kvNumHeads===void 0&&c>1&&a?[g,t,a]:[g,t];r.compute(w2(r,g,t,c>1&&a?a:void 0,l,p),{inputs:T,outputs:l.kvNumHeads===void 0&&c>1?[0,2]:[0]})},T2=(r,e)=>{let n=[e.batchSize,e.numHeads,e.sequenceLength,e.headSize],t=e.sequenceLength,o=e.inputHiddenSize,i=e.headSize,s=12,a={x:Math.ceil(e.headSize/s),y:Math.ceil(e.sequenceLength/s),z:e.batchSize*e.numHeads},u=[r.inputs[0],r.inputs[1],r.inputs[2]],l=[{type:12,data:t},{type:12,data:o},{type:12,data:i},{type:12,data:e.numHeads},{type:12,data:e.headSize},{type:12,data:e.hiddenSize},{type:12,data:e.hiddenSize+e.hiddenSize+e.vHiddenSize}],f=c=>{let p=G("output_q",u[0].dataType,n),b=G("output_k",u[0].dataType,n),h=G("output_v",u[0].dataType,n),g=D("input",u[0].dataType,u[0].dims),T=D("weight",u[1].dataType,u[1].dims),w=D("bias",u[2].dataType,u[2].dims),v=g.type.storage,S=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"head_size",type:"u32"},{name:"hidden_size",type:"u32"},{name:"ldb",type:"u32"}];return` + }`};return{name:"AttentionScore",shaderCache:{hint:`${t!==void 0};${r.outputCount}`,inputDependencies:g},getRunData:()=>({outputs:T,dispatchGroup:b,programUniforms:h}),getShaderSource:w}},Sn=(r,e,n,t,o,i,s,a,u,l,f)=>{let c=r.outputCount,p=l.kvNumHeads!==void 0||c>1?l.pastSequenceLength:0,b=p+l.kvSequenceLength,h=l.kvNumHeads===void 0&&c>1&&s?[e,n,s]:[e,n];u&&h.push(u);let g=r.compute(T2(r,e,n,c>1?s:void 0,u,l,f,p),{inputs:h,outputs:l.kvNumHeads===void 0&&c>1?[-1,1]:[-1]})[0];r.compute(w2(r,g,l.batchSize*l.numHeads*l.sequenceLength,b),{inputs:[g],outputs:[]});let T=l.kvNumHeads===void 0&&c>1&&a?[g,t,a]:[g,t];r.compute(_2(r,g,t,c>1&&a?a:void 0,l,p),{inputs:T,outputs:l.kvNumHeads===void 0&&c>1?[0,2]:[0]})},I2=(r,e)=>{let n=[e.batchSize,e.numHeads,e.sequenceLength,e.headSize],t=e.sequenceLength,o=e.inputHiddenSize,i=e.headSize,s=12,a={x:Math.ceil(e.headSize/s),y:Math.ceil(e.sequenceLength/s),z:e.batchSize*e.numHeads},u=[r.inputs[0],r.inputs[1],r.inputs[2]],l=[{type:12,data:t},{type:12,data:o},{type:12,data:i},{type:12,data:e.numHeads},{type:12,data:e.headSize},{type:12,data:e.hiddenSize},{type:12,data:e.hiddenSize+e.hiddenSize+e.vHiddenSize}],f=c=>{let p=G("output_q",u[0].dataType,n),b=G("output_k",u[0].dataType,n),h=G("output_v",u[0].dataType,n),g=D("input",u[0].dataType,u[0].dims),T=D("weight",u[1].dataType,u[1].dims),w=D("bias",u[2].dataType,u[2].dims),v=g.type.storage,S=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"head_size",type:"u32"},{name:"hidden_size",type:"u32"},{name:"ldb",type:"u32"}];return` const TILE_SIZE = ${s}u; var tileInput: array<${v}, ${s*s}>; var tileWeightQ: array<${v}, ${s*s}>; @@ -2151,7 +2151,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas output_k[outputIdx] = valueK; output_v[outputIdx] = valueV; } - }`};return r.compute({name:"AttentionPrepare",shaderCache:{inputDependencies:["type","type","type"]},getRunData:()=>({outputs:[{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0},{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0},{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0}],dispatchGroup:a,programUniforms:l}),getShaderSource:f},{inputs:u,outputs:[-1,-1,-1]})},vg=(r,e)=>{let n=y2(r.inputs,e),[t,o,i]=T2(r,n);return In(r,t,o,i,r.inputs[4],void 0,void 0,void 0,r.inputs[5],n,e)}});var _2,I2,S2,wg,Tg=C(()=>{"use strict";ft();ue();ye();et();he();_2=(r,e)=>{if(!r||r.length!==5)throw new Error("BatchNormalization requires 5 inputs");let n=(t,o,i)=>{let s=o.length;if(s!==t.length)throw new Error(`${i}: num dimensions != ${s}`);o.forEach((a,u)=>{if(a!==t[u])throw new Error(`${i}: dim[${u}] do not match`)})};if(r[0].dims.length>1){let t=e.format==="NHWC"?e.spatial?r[0].dims.slice(-1):r[0].dims.slice(-1).concat(r[0].dims.slice(1,r[0].dims.length-1)):r[0].dims.slice(1,e.spatial?2:void 0);n(r[1].dims,t,"Invalid input scale"),n(r[2].dims,t,"Invalid input B"),n(r[3].dims,t,"Invalid input mean"),n(r[4].dims,t,"Invalid input var")}else n(r[1].dims,[1],"Invalid input scale"),n(r[2].dims,[1],"Invalid input B"),n(r[3].dims,[1],"Invalid input mean"),n(r[4].dims,[1],"Invalid input var")},I2=(r,e)=>{let{epsilon:n,spatial:t,format:o}=e,i=r[0].dims,s=t?ze(i[i.length-1]):1,a=o==="NHWC"&&i.length>1?s:1,u=B.size(i)/s,l=t,f=l?i.length:i,c=D("x",r[0].dataType,r[0].dims,s),p=D("scale",r[1].dataType,r[1].dims,a),b=D("bias",r[2].dataType,r[2].dims,a),h=D("inputMean",r[3].dataType,r[3].dims,a),g=D("inputVar",r[4].dataType,r[4].dims,a),T=G("y",r[0].dataType,f,s),w=()=>{let S="";if(t)S=`let cOffset = ${i.length===1?"0u":o==="NHWC"?`outputIndices[${i.length-1}] / ${s}`:"outputIndices[1]"};`;else if(o==="NCHW")S=` + }`};return r.compute({name:"AttentionPrepare",shaderCache:{inputDependencies:["type","type","type"]},getRunData:()=>({outputs:[{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0},{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0},{dims:n,dataType:r.inputs[0].dataType,gpuDataType:0}],dispatchGroup:a,programUniforms:l}),getShaderSource:f},{inputs:u,outputs:[-1,-1,-1]})},vg=(r,e)=>{let n=v2(r.inputs,e),[t,o,i]=I2(r,n);return Sn(r,t,o,i,r.inputs[4],void 0,void 0,void 0,r.inputs[5],n,e)}});var S2,$2,A2,wg,Tg=C(()=>{"use strict";ft();ue();ye();et();he();S2=(r,e)=>{if(!r||r.length!==5)throw new Error("BatchNormalization requires 5 inputs");let n=(t,o,i)=>{let s=o.length;if(s!==t.length)throw new Error(`${i}: num dimensions != ${s}`);o.forEach((a,u)=>{if(a!==t[u])throw new Error(`${i}: dim[${u}] do not match`)})};if(r[0].dims.length>1){let t=e.format==="NHWC"?e.spatial?r[0].dims.slice(-1):r[0].dims.slice(-1).concat(r[0].dims.slice(1,r[0].dims.length-1)):r[0].dims.slice(1,e.spatial?2:void 0);n(r[1].dims,t,"Invalid input scale"),n(r[2].dims,t,"Invalid input B"),n(r[3].dims,t,"Invalid input mean"),n(r[4].dims,t,"Invalid input var")}else n(r[1].dims,[1],"Invalid input scale"),n(r[2].dims,[1],"Invalid input B"),n(r[3].dims,[1],"Invalid input mean"),n(r[4].dims,[1],"Invalid input var")},$2=(r,e)=>{let{epsilon:n,spatial:t,format:o}=e,i=r[0].dims,s=t?ze(i[i.length-1]):1,a=o==="NHWC"&&i.length>1?s:1,u=B.size(i)/s,l=t,f=l?i.length:i,c=D("x",r[0].dataType,r[0].dims,s),p=D("scale",r[1].dataType,r[1].dims,a),b=D("bias",r[2].dataType,r[2].dims,a),h=D("inputMean",r[3].dataType,r[3].dims,a),g=D("inputVar",r[4].dataType,r[4].dims,a),T=G("y",r[0].dataType,f,s),w=()=>{let S="";if(t)S=`let cOffset = ${i.length===1?"0u":o==="NHWC"?`outputIndices[${i.length-1}] / ${s}`:"outputIndices[1]"};`;else if(o==="NCHW")S=` ${T.indicesSet("outputIndices","0","0")} let cOffset = ${T.indicesToOffset("outputIndices")};`;else{S=`var cIndices = ${p.type.indices}(0); cIndices[0] = outputIndices[${i.length-1}];`;for(let $=1;$` @@ -2168,7 +2168,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas let x = ${c.getByOffset("global_idx")}; let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias; ${T.setByOffset("global_idx","value")} - }`;return{name:"BatchNormalization",shaderCache:{hint:`${e.epsilon}_${e.format}_${t}_${s}`,inputDependencies:l?["rank","type","type","type","type"]:void 0},getShaderSource:v,getRunData:()=>({outputs:[{dims:r[0].dims,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l?[{type:12,data:u},...W(i)]:[{type:12,data:u}]})}},S2=r=>de(r),wg=(r,e)=>{let{inputs:n,outputCount:t}=r,o=S2({...e,outputCount:t});if(le.webgpu.validateInputContent&&_2(n,o),e.trainingMode)throw new Error("BatchNormalization trainingMode is not supported yet.");r.compute(I2(n,o))}});var $2,A2,_g,Ig=C(()=>{"use strict";ye();he();$2=r=>{if(r[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![320,640,1280].includes(r[0].dims[2]))throw new Error("number of channels should be 320, 640 or 1280");if(r[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(r[0].dims[2]!==r[1].dims[0])throw new Error("last dimension of input and bias are not the same")},A2=r=>{let e=r[0].dims,n=r[0].dims[2],t=B.size(e)/4,o=r[0].dataType,i=D("input",o,e,4),s=D("bias",o,[n],4),a=D("residual",o,e,4),u=G("output",o,e,4);return{name:"BiasAdd",getRunData:()=>({outputs:[{dims:e,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(t/64)}}),getShaderSource:f=>` + }`;return{name:"BatchNormalization",shaderCache:{hint:`${e.epsilon}_${e.format}_${t}_${s}`,inputDependencies:l?["rank","type","type","type","type"]:void 0},getShaderSource:v,getRunData:()=>({outputs:[{dims:r[0].dims,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l?[{type:12,data:u},...W(i)]:[{type:12,data:u}]})}},A2=r=>de(r),wg=(r,e)=>{let{inputs:n,outputCount:t}=r,o=A2({...e,outputCount:t});if(le.webgpu.validateInputContent&&S2(n,o),e.trainingMode)throw new Error("BatchNormalization trainingMode is not supported yet.");r.compute($2(n,o))}});var P2,O2,_g,Ig=C(()=>{"use strict";ye();he();P2=r=>{if(r[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![320,640,1280].includes(r[0].dims[2]))throw new Error("number of channels should be 320, 640 or 1280");if(r[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(r[0].dims[2]!==r[1].dims[0])throw new Error("last dimension of input and bias are not the same")},O2=r=>{let e=r[0].dims,n=r[0].dims[2],t=B.size(e)/4,o=r[0].dataType,i=D("input",o,e,4),s=D("bias",o,[n],4),a=D("residual",o,e,4),u=G("output",o,e,4);return{name:"BiasAdd",getRunData:()=>({outputs:[{dims:e,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(t/64)}}),getShaderSource:f=>` const channels = ${n}u / 4; ${f.declareVariables(i,s,a,u)} @@ -2177,7 +2177,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas let value = ${i.getByOffset("global_idx")} + ${s.getByOffset("global_idx % channels")} + ${a.getByOffset("global_idx")}; ${u.setByOffset("global_idx","value")} - }`}},_g=r=>{$2(r.inputs),r.compute(A2(r.inputs))}});var P2,Ee,Sg,$g,Ag,Pg,Og,Eg,Cg,kg,Dg,O2,Bg,Lg,Rg,Ng,Zn,zg,Ri,Fg,Mg,Vg,Gg,Ug,Wg,Hg,qg,Kg,jg,Xg,Zg,Yg,Jg,Qg,eb,tb,rb,Zs,Ys,nb,ob,ib,E2,C2,ab,Ni=C(()=>{"use strict";ue();ye();et();he();P2=(r,e,n,t,o,i)=>{let s=Math.ceil(e/4),a="";typeof o=="string"?a=`${o}(a)`:a=o("a");let u=D("inputData",n,[s],4),l=G("outputData",t,[s],4);return` + }`}},_g=r=>{P2(r.inputs),r.compute(O2(r.inputs))}});var E2,Ee,Sg,$g,Ag,Pg,Og,Eg,Cg,kg,Dg,C2,Bg,Lg,Rg,Ng,Zn,zg,Ri,Fg,Mg,Vg,Gg,Ug,Wg,Hg,qg,jg,Kg,Xg,Zg,Yg,Jg,Qg,eb,tb,rb,Ys,Js,nb,ob,ib,k2,D2,ab,Ni=C(()=>{"use strict";ue();ye();et();he();E2=(r,e,n,t,o,i)=>{let s=Math.ceil(e/4),a="";typeof o=="string"?a=`${o}(a)`:a=o("a");let u=D("inputData",n,[s],4),l=G("outputData",t,[s],4);return` ${r.registerUniform("vec_size","u32").declareVariables(u,l)} ${i??""} @@ -2187,7 +2187,7 @@ var best_index : i32 = 0;`,`if (${t.getByIndices("input_indices")} ${e.selectLas let a = ${u.getByOffset("global_idx")}; ${l.setByOffset("global_idx",a)} - }`},Ee=(r,e,n,t,o,i=r.dataType)=>({name:e,shaderCache:{hint:o,inputDependencies:["type"]},getShaderSource:s=>P2(s,B.size(r.dims),r.dataType,i,n,t),getRunData:s=>({outputs:[{dims:r.dims,dataType:i}],dispatchGroup:{x:Math.ceil(B.size(s[0].dims)/64/4)},programUniforms:[{type:12,data:Math.ceil(B.size(r.dims)/4)}]})}),Sg=r=>{r.compute(Ee(r.inputs[0],"Abs","abs"))},$g=r=>{r.compute(Ee(r.inputs[0],"Acos","acos"))},Ag=r=>{r.compute(Ee(r.inputs[0],"Acosh","acosh"))},Pg=r=>{r.compute(Ee(r.inputs[0],"Asin","asin"))},Og=r=>{r.compute(Ee(r.inputs[0],"Asinh","asinh"))},Eg=r=>{r.compute(Ee(r.inputs[0],"Atan","atan"))},Cg=r=>{r.compute(Ee(r.inputs[0],"Atanh","atanh"))},kg=r=>de(r),Dg=(r,e)=>{let n;switch(e.to){case 10:n="vec4";break;case 1:n="vec4";break;case 12:n="vec4";break;case 6:n="vec4";break;case 9:n="vec4";break;default:throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${e.to}`)}r.compute(Ee(r.inputs[0],"Cast",n,void 0,e.cacheKey,e.to))},O2=r=>{let e=r.length>=2&&r[1].data!==0?r[1].getFloat32Array()[0]:Ei,n=r.length>=3&&r[2].data!==0?r[2].getFloat32Array()[0]:Ci;return de({min:e,max:n})},Bg=(r,e)=>{let n=r.inputs.length===1?e:O2(r.inputs),t=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Clip",o=>`clamp(${o}, clip_min_, clip_max_)`,` + }`},Ee=(r,e,n,t,o,i=r.dataType)=>({name:e,shaderCache:{hint:o,inputDependencies:["type"]},getShaderSource:s=>E2(s,B.size(r.dims),r.dataType,i,n,t),getRunData:s=>({outputs:[{dims:r.dims,dataType:i}],dispatchGroup:{x:Math.ceil(B.size(s[0].dims)/64/4)},programUniforms:[{type:12,data:Math.ceil(B.size(r.dims)/4)}]})}),Sg=r=>{r.compute(Ee(r.inputs[0],"Abs","abs"))},$g=r=>{r.compute(Ee(r.inputs[0],"Acos","acos"))},Ag=r=>{r.compute(Ee(r.inputs[0],"Acosh","acosh"))},Pg=r=>{r.compute(Ee(r.inputs[0],"Asin","asin"))},Og=r=>{r.compute(Ee(r.inputs[0],"Asinh","asinh"))},Eg=r=>{r.compute(Ee(r.inputs[0],"Atan","atan"))},Cg=r=>{r.compute(Ee(r.inputs[0],"Atanh","atanh"))},kg=r=>de(r),Dg=(r,e)=>{let n;switch(e.to){case 10:n="vec4";break;case 1:n="vec4";break;case 12:n="vec4";break;case 6:n="vec4";break;case 9:n="vec4";break;default:throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${e.to}`)}r.compute(Ee(r.inputs[0],"Cast",n,void 0,e.cacheKey,e.to))},C2=r=>{let e=r.length>=2&&r[1].data!==0?r[1].getFloat32Array()[0]:Ei,n=r.length>=3&&r[2].data!==0?r[2].getFloat32Array()[0]:Ci;return de({min:e,max:n})},Bg=(r,e)=>{let n=r.inputs.length===1?e:C2(r.inputs),t=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Clip",o=>`clamp(${o}, clip_min_, clip_max_)`,` const clip_min_: vec4<${t}> = vec4(${t}(${n.min})); const clip_max_: vec4<${t}> = vec4(${t}(${n.max})); `,n.cacheKey),{inputs:[0]})},Lg=r=>{r.compute(Ee(r.inputs[0],"Ceil","ceil"))},Rg=r=>{r.compute(Ee(r.inputs[0],"Cos","cos"))},Ng=r=>{r.compute(Ee(r.inputs[0],"Cosh","cosh"))},Zn=r=>de(r),zg=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Elu",t=>`elu_vf32(${t})`,` @@ -2211,7 +2211,7 @@ fn erf_vf32(v: vec4<${r}>) -> vec4<${r}> { let absv = abs(v); let x = 1.0 / (1.0 + r0 * absv); return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv)); -}`,Fg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Erf",n=>`erf_vf32(${n})`,Ri(e)))},Mg=r=>{r.compute(Ee(r.inputs[0],"Exp","exp"))},Vg=r=>{r.compute(Ee(r.inputs[0],"Floor","floor"))},Gg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Gelu",n=>`0.5 * ${n} * (1.0 + erf_vf32(${n} * 0.7071067811865475))`,Ri(e)))},Ug=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"LeakyRelu",t=>`select(leaky_relu_alpha_ * ${t}, ${t}, ${t} >= vec4<${n}>(0.0))`,`const leaky_relu_alpha_ = ${n}(${e.alpha});`,e.cacheKey))},Wg=r=>{r.compute(Ee(r.inputs[0],"Not",e=>`!${e}`))},Hg=r=>{r.compute(Ee(r.inputs[0],"Neg",e=>`-${e}`))},qg=r=>{r.compute(Ee(r.inputs[0],"Reciprocal",e=>`1.0/${e}`))},Kg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Relu",n=>`select(vec4<${e}>(0.0), ${n}, ${n} > vec4<${e}>(0.0))`))},jg=r=>{r.compute(Ee(r.inputs[0],"Sigmoid",e=>`(1.0 / (1.0 + exp(-${e})))`))},Xg=r=>de(r),Zg=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"HardSigmoid",t=>`max(vec4<${n}>(0.0), min(vec4<${n}>(1.0), ${e.alpha} * ${t} + vec4<${n}>(${e.beta})))`,void 0,e.cacheKey))},Yg=r=>{r.compute(Ee(r.inputs[0],"Sin","sin"))},Jg=r=>{r.compute(Ee(r.inputs[0],"Sinh","sinh"))},Qg=r=>{r.compute(Ee(r.inputs[0],"Sqrt","sqrt"))},eb=r=>{r.compute(Ee(r.inputs[0],"Tan","tan"))},tb=r=>`sign(${r}) * (1 - exp(-2 * abs(${r}))) / (1 + exp(-2 * abs(${r})))`,rb=r=>{r.compute(Ee(r.inputs[0],"Tanh",tb))},Zs=(r="f32")=>` +}`,Fg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Erf",n=>`erf_vf32(${n})`,Ri(e)))},Mg=r=>{r.compute(Ee(r.inputs[0],"Exp","exp"))},Vg=r=>{r.compute(Ee(r.inputs[0],"Floor","floor"))},Gg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Gelu",n=>`0.5 * ${n} * (1.0 + erf_vf32(${n} * 0.7071067811865475))`,Ri(e)))},Ug=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"LeakyRelu",t=>`select(leaky_relu_alpha_ * ${t}, ${t}, ${t} >= vec4<${n}>(0.0))`,`const leaky_relu_alpha_ = ${n}(${e.alpha});`,e.cacheKey))},Wg=r=>{r.compute(Ee(r.inputs[0],"Not",e=>`!${e}`))},Hg=r=>{r.compute(Ee(r.inputs[0],"Neg",e=>`-${e}`))},qg=r=>{r.compute(Ee(r.inputs[0],"Reciprocal",e=>`1.0/${e}`))},jg=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"Relu",n=>`select(vec4<${e}>(0.0), ${n}, ${n} > vec4<${e}>(0.0))`))},Kg=r=>{r.compute(Ee(r.inputs[0],"Sigmoid",e=>`(1.0 / (1.0 + exp(-${e})))`))},Xg=r=>de(r),Zg=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"HardSigmoid",t=>`max(vec4<${n}>(0.0), min(vec4<${n}>(1.0), ${e.alpha} * ${t} + vec4<${n}>(${e.beta})))`,void 0,e.cacheKey))},Yg=r=>{r.compute(Ee(r.inputs[0],"Sin","sin"))},Jg=r=>{r.compute(Ee(r.inputs[0],"Sinh","sinh"))},Qg=r=>{r.compute(Ee(r.inputs[0],"Sqrt","sqrt"))},eb=r=>{r.compute(Ee(r.inputs[0],"Tan","tan"))},tb=r=>`sign(${r}) * (1 - exp(-2 * abs(${r}))) / (1 + exp(-2 * abs(${r})))`,rb=r=>{r.compute(Ee(r.inputs[0],"Tanh",tb))},Ys=(r="f32")=>` const fast_gelu_a: ${r} = 0.5; const fast_gelu_b: ${r} = 0.7978845608028654; const fast_gelu_c: ${r} = 0.035677408136300125; @@ -2219,7 +2219,7 @@ const fast_gelu_c: ${r} = 0.035677408136300125; fn tanh_v(v: vec4<${r}>) -> vec4<${r}> { return ${tb("v")}; } -`,Ys=r=>`(fast_gelu_a + fast_gelu_a * tanh_v(${r} * (fast_gelu_c * ${r} * ${r} + fast_gelu_b))) * ${r}`,nb=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"FastGelu",Ys,Zs(e),void 0,r.inputs[0].dataType))},ob=(r,e)=>{let n=ct(r.inputs[0].dataType);return r.compute(Ee(r.inputs[0],"ThresholdedRelu",t=>`select(vec4<${n}>(0.0), ${t}, ${t} > thresholded_relu_alpha_)`,`const thresholded_relu_alpha_ = vec4<${n}>(${e.alpha});`,e.cacheKey)),0},ib=r=>{r.compute(Ee(r.inputs[0],"Log","log"))},E2=(r,e)=>` +`,Js=r=>`(fast_gelu_a + fast_gelu_a * tanh_v(${r} * (fast_gelu_c * ${r} * ${r} + fast_gelu_b))) * ${r}`,nb=r=>{let e=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"FastGelu",Js,Ys(e),void 0,r.inputs[0].dataType))},ob=(r,e)=>{let n=ct(r.inputs[0].dataType);return r.compute(Ee(r.inputs[0],"ThresholdedRelu",t=>`select(vec4<${n}>(0.0), ${t}, ${t} > thresholded_relu_alpha_)`,`const thresholded_relu_alpha_ = vec4<${n}>(${e.alpha});`,e.cacheKey)),0},ib=r=>{r.compute(Ee(r.inputs[0],"Log","log"))},k2=(r,e)=>` const alpha = vec4<${r}>(${e}); const one = ${r}(1.0); const zero = ${r}(0.0); @@ -2236,7 +2236,7 @@ fn quick_gelu_impl(x: vec4<${r}>) -> vec4<${r}> { } return x * x1; } -`,C2=r=>`quick_gelu_impl(${r})`,ab=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"QuickGelu",C2,E2(n,e.alpha),e.cacheKey,r.inputs[0].dataType))}});var k2,D2,ub,lb=C(()=>{"use strict";ye();he();Ni();k2=r=>{if(r[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![2560,5120,10240].includes(r[0].dims[2]))throw new Error("hidden state should be 2560, 5120 or 10240");if(r[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(r[0].dims[2]!==r[1].dims[0])throw new Error("last dimension of input and bias are not the same")},D2=r=>{let e=r[0].dims.slice();e[2]=e[2]/2;let n=D("input",r[0].dataType,r[0].dims,4),t=D("bias",r[0].dataType,[r[0].dims[2]],4),o=G("output",r[0].dataType,e,4),i=B.size(e)/4,s=Be(r[0].dataType);return{name:"BiasSplitGelu",getRunData:()=>({outputs:[{dims:e,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)}}),getShaderSource:u=>` +`,D2=r=>`quick_gelu_impl(${r})`,ab=(r,e)=>{let n=ct(r.inputs[0].dataType);r.compute(Ee(r.inputs[0],"QuickGelu",D2,k2(n,e.alpha),e.cacheKey,r.inputs[0].dataType))}});var B2,L2,ub,lb=C(()=>{"use strict";ye();he();Ni();B2=r=>{if(r[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![2560,5120,10240].includes(r[0].dims[2]))throw new Error("hidden state should be 2560, 5120 or 10240");if(r[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(r[0].dims[2]!==r[1].dims[0])throw new Error("last dimension of input and bias are not the same")},L2=r=>{let e=r[0].dims.slice();e[2]=e[2]/2;let n=D("input",r[0].dataType,r[0].dims,4),t=D("bias",r[0].dataType,[r[0].dims[2]],4),o=G("output",r[0].dataType,e,4),i=B.size(e)/4,s=Be(r[0].dataType);return{name:"BiasSplitGelu",getRunData:()=>({outputs:[{dims:e,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)}}),getShaderSource:u=>` const M_SQRT2 = sqrt(2.0); const halfChannels = ${r[0].dims[2]/4/2}u; @@ -2254,7 +2254,7 @@ fn quick_gelu_impl(x: vec4<${r}>) -> vec4<${r}> { let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1); ${o.setByOffset("global_idx","valueLeft * geluRight")} - }`}},ub=r=>{k2(r.inputs),r.compute(D2(r.inputs))}});var B2,L2,ur,cb,fb,db,pb,mb,hb,gb,bb,yb,xb,vb=C(()=>{"use strict";ue();ye();he();B2=(r,e,n,t,o,i,s,a,u,l,f,c)=>{let p,b;typeof a=="string"?p=b=(v,S)=>`${a}((${v}),(${S}))`:typeof a=="function"?p=b=a:(p=a.scalar,b=a.vector);let h=G("outputData",f,t.length,4),g=D("aData",u,e.length,4),T=D("bData",l,n.length,4),w;if(o)if(i){let v=B.size(e)===1,S=B.size(n)===1,$=e.length>0&&e[e.length-1]%4===0,P=n.length>0&&n[n.length-1]%4===0;v||S?w=h.setByOffset("global_idx",b(v?`${g.type.value}(${g.getByOffset("0")}.x)`:g.getByOffset("global_idx"),S?`${T.type.value}(${T.getByOffset("0")}.x)`:T.getByOffset("global_idx"))):w=` + }`}},ub=r=>{B2(r.inputs),r.compute(L2(r.inputs))}});var R2,N2,ur,cb,fb,db,pb,mb,hb,gb,bb,yb,xb,vb=C(()=>{"use strict";ue();ye();he();R2=(r,e,n,t,o,i,s,a,u,l,f,c)=>{let p,b;typeof a=="string"?p=b=(v,S)=>`${a}((${v}),(${S}))`:typeof a=="function"?p=b=a:(p=a.scalar,b=a.vector);let h=G("outputData",f,t.length,4),g=D("aData",u,e.length,4),T=D("bData",l,n.length,4),w;if(o)if(i){let v=B.size(e)===1,S=B.size(n)===1,$=e.length>0&&e[e.length-1]%4===0,P=n.length>0&&n[n.length-1]%4===0;v||S?w=h.setByOffset("global_idx",b(v?`${g.type.value}(${g.getByOffset("0")}.x)`:g.getByOffset("global_idx"),S?`${T.type.value}(${T.getByOffset("0")}.x)`:T.getByOffset("global_idx"))):w=` let outputIndices = ${h.offsetToIndices("global_idx * 4u")}; let offsetA = ${g.broadcastedIndicesToOffset("outputIndices",h)}; let offsetB = ${T.broadcastedIndicesToOffset("outputIndices",h)}; @@ -2287,7 +2287,7 @@ fn quick_gelu_impl(x: vec4<${r}>) -> vec4<${r}> { ${r.mainStart()} ${r.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} ${w} - }`},L2=(r,e,n,t,o,i,s=n.dataType)=>{let a=!B.areEqual(n.dims,t.dims),u=n.dims,l=B.size(n.dims),f=!1,c=!1,p=[a];if(a){let b=nr.calcShape(n.dims,t.dims,!1);if(!b)throw new Error("Can't perform binary op on the given tensors");u=b,l=B.size(u);let h=B.size(n.dims)===1,g=B.size(t.dims)===1,T=n.dims.length>0&&n.dims[n.dims.length-1]%4===0,w=t.dims.length>0&&t.dims[t.dims.length-1]%4===0;p.push(h),p.push(g),p.push(T),p.push(w);let v=1;for(let S=1;Sb.toString()).join("_"),inputDependencies:["rank","rank"]},getShaderSource:b=>B2(b,n.dims,t.dims,u,f,a,c,o,n.dataType,t.dataType,s,i),getRunData:()=>({outputs:[{dims:u,dataType:s}],dispatchGroup:{x:Math.ceil(l/64/4)},programUniforms:[{type:12,data:Math.ceil(B.size(u)/4)},...W(n.dims,t.dims,u)]})}},ur=(r,e,n,t,o,i)=>{r.compute(L2(e,o??"",r.inputs[0],r.inputs[1],n,t,i))},cb=r=>{ur(r,"Add",(e,n)=>`${e}+${n}`)},fb=r=>{ur(r,"Div",(e,n)=>`${e}/${n}`)},db=r=>{ur(r,"Equal",{scalar:(e,n)=>`u32(${e}==${n})`,vector:(e,n)=>`vec4(${e}==${n})`},void 0,void 0,9)},pb=r=>{ur(r,"Mul",(e,n)=>`${e}*${n}`)},mb=r=>{let e=D("input",r.inputs[0].dataType,r.inputs[0].dims).type.value;ur(r,"Pow",{scalar:(t,o)=>`pow_custom(${t},${o})`,vector:(t,o)=>`pow_vector_custom(${t},${o})`},` + }`},N2=(r,e,n,t,o,i,s=n.dataType)=>{let a=!B.areEqual(n.dims,t.dims),u=n.dims,l=B.size(n.dims),f=!1,c=!1,p=[a];if(a){let b=nr.calcShape(n.dims,t.dims,!1);if(!b)throw new Error("Can't perform binary op on the given tensors");u=b,l=B.size(u);let h=B.size(n.dims)===1,g=B.size(t.dims)===1,T=n.dims.length>0&&n.dims[n.dims.length-1]%4===0,w=t.dims.length>0&&t.dims[t.dims.length-1]%4===0;p.push(h),p.push(g),p.push(T),p.push(w);let v=1;for(let S=1;Sb.toString()).join("_"),inputDependencies:["rank","rank"]},getShaderSource:b=>R2(b,n.dims,t.dims,u,f,a,c,o,n.dataType,t.dataType,s,i),getRunData:()=>({outputs:[{dims:u,dataType:s}],dispatchGroup:{x:Math.ceil(l/64/4)},programUniforms:[{type:12,data:Math.ceil(B.size(u)/4)},...W(n.dims,t.dims,u)]})}},ur=(r,e,n,t,o,i)=>{r.compute(N2(e,o??"",r.inputs[0],r.inputs[1],n,t,i))},cb=r=>{ur(r,"Add",(e,n)=>`${e}+${n}`)},fb=r=>{ur(r,"Div",(e,n)=>`${e}/${n}`)},db=r=>{ur(r,"Equal",{scalar:(e,n)=>`u32(${e}==${n})`,vector:(e,n)=>`vec4(${e}==${n})`},void 0,void 0,9)},pb=r=>{ur(r,"Mul",(e,n)=>`${e}*${n}`)},mb=r=>{let e=D("input",r.inputs[0].dataType,r.inputs[0].dims).type.value;ur(r,"Pow",{scalar:(t,o)=>`pow_custom(${t},${o})`,vector:(t,o)=>`pow_vector_custom(${t},${o})`},` fn pow_custom(a : ${e}, b : ${e}) -> ${e} { if (b == ${e}(0.0)) { return ${e}(1.0); @@ -2300,7 +2300,7 @@ fn quick_gelu_impl(x: vec4<${r}>) -> vec4<${r}> { // TODO: implement vectorized pow return vec4<${e}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w)); } - `)},hb=r=>{ur(r,"Sub",(e,n)=>`${e}-${n}`)},gb=r=>{ur(r,"Greater",{scalar:(e,n)=>`u32(${e}>${n})`,vector:(e,n)=>`vec4(${e}>${n})`},void 0,void 0,9)},bb=r=>{ur(r,"Less",{scalar:(e,n)=>`u32(${e}<${n})`,vector:(e,n)=>`vec4(${e}<${n})`},void 0,void 0,9)},yb=r=>{ur(r,"GreaterOrEqual",{scalar:(e,n)=>`u32(${e}>=${n})`,vector:(e,n)=>`vec4(${e}>=${n})`},void 0,void 0,9)},xb=r=>{ur(r,"LessOrEqual",{scalar:(e,n)=>`u32(${e}<=${n})`,vector:(e,n)=>`vec4(${e}<=${n})`},void 0,void 0,9)}});var N2,z2,F2,M2,wb,Tb,_b=C(()=>{"use strict";ue();ye();et();he();N2=(r,e)=>{if(!r||r.length<1)throw new Error("too few inputs");let n=0,t=r[n],o=t.dataType,i=t.dims.length;r.forEach((s,a)=>{if(a!==n){if(s.dataType!==o)throw new Error("input tensors should be one type");if(s.dims.length!==i)throw new Error("input tensors should have the same shape");s.dims.forEach((u,l)=>{if(l!==e&&u!==t.dims[l])throw new Error("non concat dimensions must match")})}})},z2=(r,e)=>` + `)},hb=r=>{ur(r,"Sub",(e,n)=>`${e}-${n}`)},gb=r=>{ur(r,"Greater",{scalar:(e,n)=>`u32(${e}>${n})`,vector:(e,n)=>`vec4(${e}>${n})`},void 0,void 0,9)},bb=r=>{ur(r,"Less",{scalar:(e,n)=>`u32(${e}<${n})`,vector:(e,n)=>`vec4(${e}<${n})`},void 0,void 0,9)},yb=r=>{ur(r,"GreaterOrEqual",{scalar:(e,n)=>`u32(${e}>=${n})`,vector:(e,n)=>`vec4(${e}>=${n})`},void 0,void 0,9)},xb=r=>{ur(r,"LessOrEqual",{scalar:(e,n)=>`u32(${e}<=${n})`,vector:(e,n)=>`vec4(${e}<=${n})`},void 0,void 0,9)}});var F2,M2,V2,G2,wb,Tb,_b=C(()=>{"use strict";ue();ye();et();he();F2=(r,e)=>{if(!r||r.length<1)throw new Error("too few inputs");let n=0,t=r[n],o=t.dataType,i=t.dims.length;r.forEach((s,a)=>{if(a!==n){if(s.dataType!==o)throw new Error("input tensors should be one type");if(s.dims.length!==i)throw new Error("input tensors should have the same shape");s.dims.forEach((u,l)=>{if(l!==e&&u!==t.dims[l])throw new Error("non concat dimensions must match")})}})},M2=(r,e)=>` fn calculateInputIndex(index: u32) -> u32 { let sizeInConcatAxis = array(${e}); for (var i: u32 = 0u; i < ${r}; i += 1u ) { @@ -2309,12 +2309,12 @@ fn quick_gelu_impl(x: vec4<${r}>) -> vec4<${r}> { } } return ${r}u; - }`,F2=(r,e)=>{let n=r.length,t=[];for(let o=0;o{let o=B.size(n),i=new Array(r.length),s=new Array(r.length),a=0,u=[],l=[],f=[{type:12,data:o}];for(let g=0;g`uniforms.sizeInConcatAxis${g}`).join(","),h=g=>` + }`,V2=(r,e)=>{let n=r.length,t=[];for(let o=0;o{let o=B.size(n),i=new Array(r.length),s=new Array(r.length),a=0,u=[],l=[],f=[{type:12,data:o}];for(let g=0;g`uniforms.sizeInConcatAxis${g}`).join(","),h=g=>` ${(()=>{g.registerUniform("outputSize","u32");for(let T=0;T) -> vec4<${r}> { ${p} -= sizeInConcatAxis[inputIndex - 1u]; } - ${F2(s,c)} - }`;return{name:"Concat",shaderCache:{hint:`${e}`,inputDependencies:u},getRunData:()=>({outputs:[{dims:n,dataType:t}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:f}),getShaderSource:h}},wb=(r,e)=>{let n=r.inputs,t=n[0].dims,o=B.normalizeAxis(e.axis,t.length);N2(n,o);let i=t.slice();i[o]=n.reduce((a,u)=>a+(u.dims.length>o?u.dims[o]:0),0);let s=n.filter(a=>B.size(a.dims)>0);r.compute(M2(s,o,i,n[0].dataType),{inputs:s})},Tb=r=>de({axis:r.axis})});var Wt,Ht,qt,zi,Or=C(()=>{"use strict";ue();ye();Wt=(r,e,n="f32")=>{switch(r.activation){case"Relu":return`value = max(value, ${e}(0.0));`;case"Sigmoid":return`value = (${e}(1.0) / (${e}(1.0) + exp(-value)));`;case"Clip":return`value = clamp(value, ${e}(${n}(uniforms.clip_min)), ${e}(${n}(uniforms.clip_max)));`;case"HardSigmoid":return`value = max(${e}(0.0), min(${e}(1.0), ${n}(uniforms.alpha) * value + ${n}(uniforms.beta)));`;case"LeakyRelu":return`value = select(${n}(uniforms.alpha) * value, value, value >= ${e}(0.0));`;case"":return"";default:throw new Error(`Unsupported activation ${r.activation}`)}},Ht=(r,e)=>{r.activation==="Clip"?e.push({type:1,data:r.clipMax},{type:1,data:r.clipMin}):r.activation==="HardSigmoid"?e.push({type:1,data:r.alpha},{type:1,data:r.beta}):r.activation==="LeakyRelu"&&e.push({type:1,data:r.alpha})},qt=(r,e)=>{r.activation==="Clip"?e.push({name:"clip_max",type:"f32"},{name:"clip_min",type:"f32"}):r.activation==="HardSigmoid"?e.push({name:"alpha",type:"f32"},{name:"beta",type:"f32"}):r.activation==="LeakyRelu"&&e.push({name:"alpha",type:"f32"})},zi=r=>{let e=r?.activation||"";if(e==="HardSigmoid"){let[n,t]=r?.activation_params||[.2,.5];return{activation:e,alpha:n,beta:t}}else if(e==="Clip"){let[n,t]=r?.activation_params||[Ei,Ci];return{activation:e,clipMax:t,clipMin:n}}else if(e==="LeakyRelu"){let[n]=r?.activation_params||[.01];return{activation:e,alpha:n}}return{activation:e}}});var pt,Fi,Mi=C(()=>{"use strict";pt=(r,e)=>{switch(r){case 1:return e;case 2:return`vec2<${e}>`;case 3:return`vec3<${e}>`;case 4:return`vec4<${e}>`;default:throw new Error(`${r}-component is not supported.`)}},Fi=r=>` + ${V2(s,c)} + }`;return{name:"Concat",shaderCache:{hint:`${e}`,inputDependencies:u},getRunData:()=>({outputs:[{dims:n,dataType:t}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:f}),getShaderSource:h}},wb=(r,e)=>{let n=r.inputs,t=n[0].dims,o=B.normalizeAxis(e.axis,t.length);F2(n,o);let i=t.slice();i[o]=n.reduce((a,u)=>a+(u.dims.length>o?u.dims[o]:0),0);let s=n.filter(a=>B.size(a.dims)>0);r.compute(G2(s,o,i,n[0].dataType),{inputs:s})},Tb=r=>de({axis:r.axis})});var Wt,Ht,qt,zi,Or=C(()=>{"use strict";ue();ye();Wt=(r,e,n="f32")=>{switch(r.activation){case"Relu":return`value = max(value, ${e}(0.0));`;case"Sigmoid":return`value = (${e}(1.0) / (${e}(1.0) + exp(-value)));`;case"Clip":return`value = clamp(value, ${e}(${n}(uniforms.clip_min)), ${e}(${n}(uniforms.clip_max)));`;case"HardSigmoid":return`value = max(${e}(0.0), min(${e}(1.0), ${n}(uniforms.alpha) * value + ${n}(uniforms.beta)));`;case"LeakyRelu":return`value = select(${n}(uniforms.alpha) * value, value, value >= ${e}(0.0));`;case"":return"";default:throw new Error(`Unsupported activation ${r.activation}`)}},Ht=(r,e)=>{r.activation==="Clip"?e.push({type:1,data:r.clipMax},{type:1,data:r.clipMin}):r.activation==="HardSigmoid"?e.push({type:1,data:r.alpha},{type:1,data:r.beta}):r.activation==="LeakyRelu"&&e.push({type:1,data:r.alpha})},qt=(r,e)=>{r.activation==="Clip"?e.push({name:"clip_max",type:"f32"},{name:"clip_min",type:"f32"}):r.activation==="HardSigmoid"?e.push({name:"alpha",type:"f32"},{name:"beta",type:"f32"}):r.activation==="LeakyRelu"&&e.push({name:"alpha",type:"f32"})},zi=r=>{let e=r?.activation||"";if(e==="HardSigmoid"){let[n,t]=r?.activation_params||[.2,.5];return{activation:e,alpha:n,beta:t}}else if(e==="Clip"){let[n,t]=r?.activation_params||[Ei,Ci];return{activation:e,clipMax:t,clipMin:n}}else if(e==="LeakyRelu"){let[n]=r?.activation_params||[.01];return{activation:e,alpha:n}}return{activation:e}}});var pt,Fi,Mi=C(()=>{"use strict";pt=(r,e)=>{switch(r){case 1:return e;case 2:return`vec2<${e}>`;case 3:return`vec3<${e}>`;case 4:return`vec4<${e}>`;default:throw new Error(`${r}-component is not supported.`)}},Fi=r=>` ${r?"value = value + getBiasByOutputCoords(coords);":""} - `});var Vi,Js=C(()=>{"use strict";Vi=r=>` + `});var Vi,Qs=C(()=>{"use strict";Vi=r=>` fn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 { return dot(coords, vec4( shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1)); @@ -2339,7 +2339,7 @@ fn getOutputIndexFromCoords(coords : vec4) -> i32 { return dot(coords, vec4( i32(${r}.x), i32(${r}.y), i32(${r}.z), 1)); } -`});var V2,G2,Yn,Ib,U2,Jn,W2,Gi,Qn=C(()=>{"use strict";ue();ye();he();Or();Mi();V2=(r,e)=>r?` +`});var U2,W2,Yn,Ib,H2,Jn,q2,Gi,Qn=C(()=>{"use strict";ue();ye();he();Or();Mi();U2=(r,e)=>r?` mm_Asub[inputRow][inputCol] = mm_readA(batch, kStart + inputRow, globalRowStart / innerElementSize + inputCol${e?", batchIndices":""}); @@ -2347,7 +2347,7 @@ fn getOutputIndexFromCoords(coords : vec4) -> i32 { mm_Asub[inputRow][inputCol] = mm_readA(batch, globalRow + innerRow, kStart / innerElementSize + inputCol${e?", batchIndices":""}); - `,G2=(r,e)=>r?` + `,W2=(r,e)=>r?` let ACached0 = mm_Asub[k * innerElementSize][localRow]; let ACached1 = mm_Asub[k * innerElementSize + 1][localRow]; let ACached2 = mm_Asub[k * innerElementSize + 2][localRow]; @@ -2401,7 +2401,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) { let inputRow = tileRow + innerRow; let inputCol = tileCol; - ${V2(o,t)} + ${U2(o,t)} } // Load one tile of B into local memory. @@ -2420,7 +2420,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol]; ${p===3?"":"let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];"} - ${G2(o,p)} + ${W2(o,p)} } workgroupBarrier(); @@ -2437,7 +2437,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, mm_Asub[inputRow][inputCol] = mm_readA(batch, globalRowStart + inputRow, kStart + inputCol${e?", batchIndices":""}); - `,U2=r=>r?"let ACached = mm_Asub[k][tileRow + innerRow];":"let ACached = mm_Asub[tileRow + innerRow][k];",Jn=(r,e,n="f32",t,o=!1,i=32,s=!1,a=32,u=!1)=>{let l=r[1]*e[1],f=r[0]*e[0],c=o?l:i,p=o?i:l;if(!(p%e[1]===0&&c%e[0]===0&&i%e[1]===0))throw new Error(`tileAHight ${p} must be divisible by workgroupSize[1]${e[1]}, tileAWidth ${c} must be divisible by workgroupSize[0]${e[0]}, tileInner ${i} must be divisible by workgroupSize[1]${e[1]}`);let b=p/e[1],h=c/e[0],g=i/e[1],T=u?` + `,H2=r=>r?"let ACached = mm_Asub[k][tileRow + innerRow];":"let ACached = mm_Asub[tileRow + innerRow][k];",Jn=(r,e,n="f32",t,o=!1,i=32,s=!1,a=32,u=!1)=>{let l=r[1]*e[1],f=r[0]*e[0],c=o?l:i,p=o?i:l;if(!(p%e[1]===0&&c%e[0]===0&&i%e[1]===0))throw new Error(`tileAHight ${p} must be divisible by workgroupSize[1]${e[1]}, tileAWidth ${c} must be divisible by workgroupSize[0]${e[0]}, tileInner ${i} must be divisible by workgroupSize[1]${e[1]}`);let b=p/e[1],h=c/e[0],g=i/e[1],T=u?` let localRow = i32(localId.y); let localCol = i32(localId.x); let globalRowStart = i32(workgroupId.y) * ${l}; @@ -2528,7 +2528,7 @@ for (var t = 0; t < num_tiles; t = t + 1) { } for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) { - ${U2(o)} + ${H2(o)} for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) { acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol]; } @@ -2563,7 +2563,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, var acc : array, rowPerThread>; ${T} } -`},W2=(r,e,n,t,o,i=!1)=>{let[s,a,u]=o,[l,f,c,p]=t,b=_n(s,u),h=_n(a,u),g=Be(t[0].type.tensor),T=()=>{let S=f.rank,$=l.rank,P=`var aIndices: ${f.type.indices};`;for(let E=S-2-1,N=$-1;E>=0;E--,N--)P+=` +`},q2=(r,e,n,t,o,i=!1)=>{let[s,a,u]=o,[l,f,c,p]=t,b=In(s,u),h=In(a,u),g=Be(t[0].type.tensor),T=()=>{let S=f.rank,$=l.rank,P=`var aIndices: ${f.type.indices};`;for(let E=S-2-1,N=$-1;E>=0;E--,N--)P+=` aIndices[${E}] = ${$>1?`batchIndices[${N}]`:"batchIndices"};`;return b.forEach(E=>{P+=` aIndices[${E}] = 0;`}),P+=` aIndices[${S-2}] = u32(row); @@ -2604,11 +2604,11 @@ bIndices[${S-2}] = u32(row); ${p.setByIndices("vec3(coords)","value")} } } - `},Gi=(r,e,n,t,o=!1)=>{let i=r[0].dims,s=r[1].dims,a=i.slice(0,-2),u=s.slice(0,-2),l=t?t.slice(0,-2):n.slice(0,-2),f=B.size(l),c=i[i.length-2],p=i[i.length-1],b=s[s.length-1],h=p%4===0&&b%4===0,g=c<=8?[4,1,1]:[4,4,1],T=[8,8,1],w=[Math.ceil(b/T[0]/g[0]),Math.ceil(c/T[1]/g[1]),Math.ceil(f/T[2]/g[2])],v=h?4:1,S=[...a,c,p/v],$=S.length,P=[...u,p,b/v],E=P.length,N=[f,c,b/v],z=[{type:6,data:c},{type:6,data:b},{type:6,data:p}];Ht(e,z),z.push(...W(l,S,P));let q=["rank","rank"],K=r.length>2;K&&(z.push(...W(r[2].dims)),q.push("rank")),z.push(...W(N));let F=_e=>{let $e=l.length,ae=ki("batchDims",r[0].dataType,$e,1),qe=Be(r[0].dataType),Q=D("a",r[0].dataType,$,v),ge=D("b",r[1].dataType,E,v),Ie=G("result",r[0].dataType,N.length,v),xe=[Q,ge];if(K){let V=o?v:1;xe.push(D("bias",r[2].dataType,r[2].dims.length,V))}let se=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"}];qt(e,se);let pe=Be(Ie.type.tensor),ce=Wt(e,Ie.type.value,pe),ut=W2(v,K,ce,[ae,Q,ge,Ie],[a,u,l],o);return` + `},Gi=(r,e,n,t,o=!1)=>{let i=r[0].dims,s=r[1].dims,a=i.slice(0,-2),u=s.slice(0,-2),l=t?t.slice(0,-2):n.slice(0,-2),f=B.size(l),c=i[i.length-2],p=i[i.length-1],b=s[s.length-1],h=p%4===0&&b%4===0,g=c<=8?[4,1,1]:[4,4,1],T=[8,8,1],w=[Math.ceil(b/T[0]/g[0]),Math.ceil(c/T[1]/g[1]),Math.ceil(f/T[2]/g[2])],v=h?4:1,S=[...a,c,p/v],$=S.length,P=[...u,p,b/v],E=P.length,N=[f,c,b/v],z=[{type:6,data:c},{type:6,data:b},{type:6,data:p}];Ht(e,z),z.push(...W(l,S,P));let q=["rank","rank"],j=r.length>2;j&&(z.push(...W(r[2].dims)),q.push("rank")),z.push(...W(N));let F=_e=>{let $e=l.length,ae=ki("batchDims",r[0].dataType,$e,1),qe=Be(r[0].dataType),Q=D("a",r[0].dataType,$,v),ge=D("b",r[1].dataType,E,v),Ie=G("result",r[0].dataType,N.length,v),xe=[Q,ge];if(j){let V=o?v:1;xe.push(D("bias",r[2].dataType,r[2].dims.length,V))}let se=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"}];qt(e,se);let pe=Be(Ie.type.tensor),ce=Wt(e,Ie.type.value,pe),ut=q2(v,j,ce,[ae,Q,ge,Ie],[a,u,l],o);return` ${_e.registerUniforms(se).registerInternalVariables(ae).declareVariables(...xe,Ie)} ${ut} ${h?Yn(g,T,qe,ae):Jn(g,T,qe,ae)} - `};return{name:"MatMul",shaderCache:{hint:`${g};${e.activation};${h};${o}`,inputDependencies:q},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:w[0],y:w[1],z:w[2]},programUniforms:z}),getShaderSource:F}}});var H2,Sb,$b=C(()=>{"use strict";ue();mr();he();Or();Mi();Js();Qn();H2=(r,e,n,t,o=!1,i,s=4,a=4,u=4,l="f32")=>{let f=K=>{switch(K){case 1:return"resData = x[xIndex];";case 3:return`resData = vec3<${l}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;case 4:return"resData = x[xIndex / 4];";default:throw new Error(`innerElementSize ${K} is not supported.`)}},c=K=>{switch(K){case 1:return"return w[row * i32(uniforms.w_shape[3]) + colIn];";case 4:return"return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];";default:throw new Error(`innerElementSize ${K} is not supported.`)}},p=r?` + `};return{name:"MatMul",shaderCache:{hint:`${g};${e.activation};${h};${o}`,inputDependencies:q},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:w[0],y:w[1],z:w[2]},programUniforms:z}),getShaderSource:F}}});var j2,Sb,$b=C(()=>{"use strict";ue();mr();he();Or();Mi();Qs();Qn();j2=(r,e,n,t,o=!1,i,s=4,a=4,u=4,l="f32")=>{let f=j=>{switch(j){case 1:return"resData = x[xIndex];";case 3:return`resData = vec3<${l}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;case 4:return"resData = x[xIndex / 4];";default:throw new Error(`innerElementSize ${j} is not supported.`)}},c=j=>{switch(j){case 1:return"return w[row * i32(uniforms.w_shape[3]) + colIn];";case 4:return"return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];";default:throw new Error(`innerElementSize ${j} is not supported.`)}},p=r?` let coord = vec4(batch, xRow, xCol, xCh); `:` let coord = vec4(batch, xCh, xRow, xCol); @@ -2677,7 +2677,7 @@ bIndices[${S-2}] = u32(row); ${z} setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value); } - }`},Sb=(r,e,n,t,o,i,s,a)=>{let u=e.format==="NHWC",l=u?r[0].dims[3]:r[0].dims[1],f=n[0],c=u?n[2]:n[3],p=u?n[1]:n[2],b=u?n[3]:n[1],h=u&&(l%4===0||l%3===0)&&b%4===0,g=u?b:c*p,T=u?c*p:b,w=[8,8,1],v=t<=8?[4,1,1]:[4,4,1],S=[Math.ceil(g/w[0]/v[0]),Math.ceil(T/w[1]/v[1]),Math.ceil(f/w[2]/v[2])];Ne("verbose",()=>`[conv2d_mm_webgpu] dispatch = ${S}`);let $=h?u&&l%4!==0?3:4:1,P=w[1]*v[1],E=w[0]*v[0],N=Math.max(w[0]*$,w[1]),z=t%P===0,q=o%E===0,K=i%N===0,F=h?[$,4,4]:[1,1,1],_e=[{type:6,data:t},{type:6,data:o},{type:6,data:i},{type:6,data:[e.pads[0],e.pads[1]]},{type:6,data:e.strides},{type:6,data:e.dilations}];Ht(e,_e),_e.push(...W(r[0].dims,r[1].dims));let $e=["rank","rank"];s&&(_e.push(...W(r[2].dims)),$e.push("rank")),_e.push(...W(n));let ae=qe=>{let Q=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"pad",type:"i32",length:2},{name:"stride",type:"i32",length:2},{name:"dilation",type:"i32",length:2}];qt(e,Q);let ge=h?4:1,Ie=Be(r[0].dataType),xe=` + }`},Sb=(r,e,n,t,o,i,s,a)=>{let u=e.format==="NHWC",l=u?r[0].dims[3]:r[0].dims[1],f=n[0],c=u?n[2]:n[3],p=u?n[1]:n[2],b=u?n[3]:n[1],h=u&&(l%4===0||l%3===0)&&b%4===0,g=u?b:c*p,T=u?c*p:b,w=[8,8,1],v=t<=8?[4,1,1]:[4,4,1],S=[Math.ceil(g/w[0]/v[0]),Math.ceil(T/w[1]/v[1]),Math.ceil(f/w[2]/v[2])];Ne("verbose",()=>`[conv2d_mm_webgpu] dispatch = ${S}`);let $=h?u&&l%4!==0?3:4:1,P=w[1]*v[1],E=w[0]*v[0],N=Math.max(w[0]*$,w[1]),z=t%P===0,q=o%E===0,j=i%N===0,F=h?[$,4,4]:[1,1,1],_e=[{type:6,data:t},{type:6,data:o},{type:6,data:i},{type:6,data:[e.pads[0],e.pads[1]]},{type:6,data:e.strides},{type:6,data:e.dilations}];Ht(e,_e),_e.push(...W(r[0].dims,r[1].dims));let $e=["rank","rank"];s&&(_e.push(...W(r[2].dims)),$e.push("rank")),_e.push(...W(n));let ae=qe=>{let Q=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"pad",type:"i32",length:2},{name:"stride",type:"i32",length:2},{name:"dilation",type:"i32",length:2}];qt(e,Q);let ge=h?4:1,Ie=Be(r[0].dataType),xe=` fn setOutputAtIndex(flatIndex : i32, value : ${h?`vec4<${Ie}>`:Ie}) { result[flatIndex] = ${h?`vec4<${Ie}>`:Ie}(value); } @@ -2694,12 +2694,12 @@ bIndices[${S-2}] = u32(row); // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 }; ${qe.registerUniforms(Q).declareVariables(...ce,ut)} ${xe} - ${H2(u,z,q,K,s,e,F[0],F[1],F[2],Ie)} - ${h?Yn(v,w,Ie,void 0,!u,N):Jn(v,w,Ie,void 0,!u,N,!1,void 0,a)}`};return{name:"Conv2DMatMul",shaderCache:{hint:`${e.cacheKey};${$};${h};${z};${q};${K};${P};${E};${N}`,inputDependencies:$e},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:_e}),getShaderSource:ae}}});var q2,Ab,Ui,K2,Pb,j2,Ob,Eb,Cb=C(()=>{"use strict";ue();mr();ye();he();q2=r=>{let e=1;for(let n=0;ntypeof r=="number"?[r,r,r]:r,Ui=(r,e)=>e<=1?r:r+(r-1)*(e-1),K2=(r,e,n,t=1)=>{let o=Ui(e,t);return Math.floor((r[0]*(n-1)-n+o)/2)},Pb=(r,e,n,t,o)=>{o==null&&(o=K2(r,e[0],t[0]));let i=[0,0,0,n];for(let s=0;s<3;s++)r[s]+2*o>=e[s]&&(i[s]=Math.trunc((r[s]-e[s]+2*o)/t[s]+1));return i},j2=(r,e,n,t,o,i,s,a,u,l)=>{let f,c,p,b;if(r==="VALID"&&(r=0),typeof r=="number"){f={top:r,bottom:r,left:r,right:r,front:r,back:r};let h=Pb([e,n,t,1],[a,u,l],1,[o,i,s],r);c=h[0],p=h[1],b=h[2]}else if(Array.isArray(r)){if(!r.every((g,T,w)=>g===w[0]))throw Error(`Unsupported padding parameter: ${r}`);f={top:r[0],bottom:r[1],left:r[2],right:r[3],front:r[4],back:r[5]};let h=Pb([e,n,t,1],[a,u,l],1,[o,i,s],r[0]);c=h[0],p=h[1],b=h[2]}else if(r==="SAME_UPPER"){c=Math.ceil(e/o),p=Math.ceil(n/i),b=Math.ceil(t/s);let h=(c-1)*o+a-e,g=(p-1)*i+u-n,T=(b-1)*s+l-t,w=Math.floor(h/2),v=h-w,S=Math.floor(g/2),$=g-S,P=Math.floor(T/2),E=T-P;f={top:S,bottom:$,left:P,right:E,front:w,back:v}}else throw Error(`Unknown padding parameter: ${r}`);return{padInfo:f,outDepth:c,outHeight:p,outWidth:b}},Ob=(r,e,n,t,o,i=!1,s="channelsLast")=>{let a,u,l,f,c;if(s==="channelsLast")[a,u,l,f,c]=r;else if(s==="channelsFirst")[a,c,u,l,f]=r;else throw new Error(`Unknown dataFormat ${s}`);let[p,,b,h,g]=e,[T,w,v]=Ab(n),[S,$,P]=Ab(t),E=Ui(b,S),N=Ui(h,$),z=Ui(g,P),{padInfo:q,outDepth:K,outHeight:F,outWidth:_e}=j2(o,u,l,f,T,w,v,E,N,z),$e=i?p*c:p,ae=[0,0,0,0,0];return s==="channelsFirst"?ae=[a,$e,K,F,_e]:s==="channelsLast"&&(ae=[a,K,F,_e,$e]),{batchSize:a,dataFormat:s,inDepth:u,inHeight:l,inWidth:f,inChannels:c,outDepth:K,outHeight:F,outWidth:_e,outChannels:$e,padInfo:q,strideDepth:T,strideHeight:w,strideWidth:v,filterDepth:b,filterHeight:h,filterWidth:g,effectiveFilterDepth:E,effectiveFilterHeight:N,effectiveFilterWidth:z,dilationDepth:S,dilationHeight:$,dilationWidth:P,inShape:r,outShape:ae,filterShape:e}},Eb=(r,e,n,t,o,i)=>{let s=i==="channelsLast",a=s?r[0].dims[3]:r[0].dims[1],u=!1,l=[64,1,1],f={x:n.map((v,S)=>S)},c=[Math.ceil(q2(f.x.map(v=>n[v]))/l[0]),1,1];Ne("verbose",()=>`[conv3d_naive_webgpu] dispatch = ${c}`);let p=u?s&&a%4!==0?3:4:1,b=B.size(n),h=[{type:12,data:b},{type:12,data:t},{type:12,data:o},{type:12,data:e.strides},{type:12,data:e.dilations}];h.push(...W(r[0].dims,r[1].dims));let g=["rank","rank"],T=r.length===3;T&&(h.push(...W(r[2].dims)),g.push("rank")),h.push(...W(n));let w=v=>{let S=[{name:"output_size",type:"u32"},{name:"filter_dims",type:"u32",length:t.length},{name:"pads",type:"u32",length:o.length},{name:"strides",type:"u32",length:e.strides.length},{name:"dilations",type:"u32",length:e.dilations.length}],$=u?4:1,P=Be(r[0].dataType),E=D("x",r[0].dataType,r[0].dims.length,p===3?1:p),N=D("W",r[1].dataType,r[1].dims.length,$),z=[E,N],q=G("result",r[0].dataType,n.length,$),K="";if(T){let F=D("bias",r[2].dataType,r[2].dims.length,$);z.push(F),K+=` + ${j2(u,z,q,j,s,e,F[0],F[1],F[2],Ie)} + ${h?Yn(v,w,Ie,void 0,!u,N):Jn(v,w,Ie,void 0,!u,N,!1,void 0,a)}`};return{name:"Conv2DMatMul",shaderCache:{hint:`${e.cacheKey};${$};${h};${z};${q};${j};${P};${E};${N}`,inputDependencies:$e},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:_e}),getShaderSource:ae}}});var K2,Ab,Ui,X2,Pb,Z2,Ob,Eb,Cb=C(()=>{"use strict";ue();mr();ye();he();K2=r=>{let e=1;for(let n=0;ntypeof r=="number"?[r,r,r]:r,Ui=(r,e)=>e<=1?r:r+(r-1)*(e-1),X2=(r,e,n,t=1)=>{let o=Ui(e,t);return Math.floor((r[0]*(n-1)-n+o)/2)},Pb=(r,e,n,t,o)=>{o==null&&(o=X2(r,e[0],t[0]));let i=[0,0,0,n];for(let s=0;s<3;s++)r[s]+2*o>=e[s]&&(i[s]=Math.trunc((r[s]-e[s]+2*o)/t[s]+1));return i},Z2=(r,e,n,t,o,i,s,a,u,l)=>{let f,c,p,b;if(r==="VALID"&&(r=0),typeof r=="number"){f={top:r,bottom:r,left:r,right:r,front:r,back:r};let h=Pb([e,n,t,1],[a,u,l],1,[o,i,s],r);c=h[0],p=h[1],b=h[2]}else if(Array.isArray(r)){if(!r.every((g,T,w)=>g===w[0]))throw Error(`Unsupported padding parameter: ${r}`);f={top:r[0],bottom:r[1],left:r[2],right:r[3],front:r[4],back:r[5]};let h=Pb([e,n,t,1],[a,u,l],1,[o,i,s],r[0]);c=h[0],p=h[1],b=h[2]}else if(r==="SAME_UPPER"){c=Math.ceil(e/o),p=Math.ceil(n/i),b=Math.ceil(t/s);let h=(c-1)*o+a-e,g=(p-1)*i+u-n,T=(b-1)*s+l-t,w=Math.floor(h/2),v=h-w,S=Math.floor(g/2),$=g-S,P=Math.floor(T/2),E=T-P;f={top:S,bottom:$,left:P,right:E,front:w,back:v}}else throw Error(`Unknown padding parameter: ${r}`);return{padInfo:f,outDepth:c,outHeight:p,outWidth:b}},Ob=(r,e,n,t,o,i=!1,s="channelsLast")=>{let a,u,l,f,c;if(s==="channelsLast")[a,u,l,f,c]=r;else if(s==="channelsFirst")[a,c,u,l,f]=r;else throw new Error(`Unknown dataFormat ${s}`);let[p,,b,h,g]=e,[T,w,v]=Ab(n),[S,$,P]=Ab(t),E=Ui(b,S),N=Ui(h,$),z=Ui(g,P),{padInfo:q,outDepth:j,outHeight:F,outWidth:_e}=Z2(o,u,l,f,T,w,v,E,N,z),$e=i?p*c:p,ae=[0,0,0,0,0];return s==="channelsFirst"?ae=[a,$e,j,F,_e]:s==="channelsLast"&&(ae=[a,j,F,_e,$e]),{batchSize:a,dataFormat:s,inDepth:u,inHeight:l,inWidth:f,inChannels:c,outDepth:j,outHeight:F,outWidth:_e,outChannels:$e,padInfo:q,strideDepth:T,strideHeight:w,strideWidth:v,filterDepth:b,filterHeight:h,filterWidth:g,effectiveFilterDepth:E,effectiveFilterHeight:N,effectiveFilterWidth:z,dilationDepth:S,dilationHeight:$,dilationWidth:P,inShape:r,outShape:ae,filterShape:e}},Eb=(r,e,n,t,o,i)=>{let s=i==="channelsLast",a=s?r[0].dims[3]:r[0].dims[1],u=!1,l=[64,1,1],f={x:n.map((v,S)=>S)},c=[Math.ceil(K2(f.x.map(v=>n[v]))/l[0]),1,1];Ne("verbose",()=>`[conv3d_naive_webgpu] dispatch = ${c}`);let p=u?s&&a%4!==0?3:4:1,b=B.size(n),h=[{type:12,data:b},{type:12,data:t},{type:12,data:o},{type:12,data:e.strides},{type:12,data:e.dilations}];h.push(...W(r[0].dims,r[1].dims));let g=["rank","rank"],T=r.length===3;T&&(h.push(...W(r[2].dims)),g.push("rank")),h.push(...W(n));let w=v=>{let S=[{name:"output_size",type:"u32"},{name:"filter_dims",type:"u32",length:t.length},{name:"pads",type:"u32",length:o.length},{name:"strides",type:"u32",length:e.strides.length},{name:"dilations",type:"u32",length:e.dilations.length}],$=u?4:1,P=Be(r[0].dataType),E=D("x",r[0].dataType,r[0].dims.length,p===3?1:p),N=D("W",r[1].dataType,r[1].dims.length,$),z=[E,N],q=G("result",r[0].dataType,n.length,$),j="";if(T){let F=D("bias",r[2].dataType,r[2].dims.length,$);z.push(F),j+=` fn getBiasByOutputCoords(coords : array) -> ${u?`vec4<${P}>`:P} { return bias[${s?Z("coords",4,5):Z("coords",1,5)}${u?"/ 4":""}]; }`}return` - ${K} + ${j} fn getX(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 { let aIndices = array(d0, d1, d2, d3, d4); return ${E.getByIndices("aIndices")}; @@ -2802,7 +2802,7 @@ bIndices[${S-2}] = u32(row); } ${T?"dotProd = dotProd + getBiasByOutputCoords(coords)":""}; result[global_idx] = f32(dotProd); - }`};return{name:"Conv3DNaive",shaderCache:{hint:`${e.cacheKey};${s};${p};${T}`,inputDependencies:g},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:c[0],y:c[1],z:c[2]},programUniforms:h}),getShaderSource:w}}});var Qs,kb,Db=C(()=>{"use strict";ue();ye();he();eu();Or();Qs=(r,e,n)=>{let t=r.length>2,o=t?"value += b[output_channel];":"",i=r[0].dims,s=r[1].dims,a=s[0]/e.group,u=e.format==="NHWC",l=Wi(i,s,e.dilations,e.pads,e.strides,u),f=B.size(l),c=[{type:12,data:f},{type:12,data:e.dilations},{type:12,data:[e.strides[0],e.strides[1]]},{type:12,data:[e.pads[0],e.pads[1]]},{type:12,data:a}];Ht(e,c),c.push(...W(i,s));let p=["rank","rank"];t&&(c.push(...W(r[2].dims)),p.push("rank")),c.push(...W(l));let b=h=>{let g=G("output",r[0].dataType,l.length),T=Be(g.type.tensor),w=Wt(e,g.type.value,T),v=D("x",r[0].dataType,i.length),S=D("w",r[1].dataType,s.length),$=[v,S];t&&$.push(D("b",r[2].dataType,r[2].dims.length));let P=[{name:"output_size",type:"u32"},{name:"dilations",type:"u32",length:e.dilations.length},{name:"strides",type:"u32",length:2},{name:"pads",type:"u32",length:2},{name:"output_channels_per_group",type:"u32"}];return qt(e,P),` + }`};return{name:"Conv3DNaive",shaderCache:{hint:`${e.cacheKey};${s};${p};${T}`,inputDependencies:g},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:c[0],y:c[1],z:c[2]},programUniforms:h}),getShaderSource:w}}});var eu,kb,Db=C(()=>{"use strict";ue();ye();he();tu();Or();eu=(r,e,n)=>{let t=r.length>2,o=t?"value += b[output_channel];":"",i=r[0].dims,s=r[1].dims,a=s[0]/e.group,u=e.format==="NHWC",l=Wi(i,s,e.dilations,e.pads,e.strides,u),f=B.size(l),c=[{type:12,data:f},{type:12,data:e.dilations},{type:12,data:[e.strides[0],e.strides[1]]},{type:12,data:[e.pads[0],e.pads[1]]},{type:12,data:a}];Ht(e,c),c.push(...W(i,s));let p=["rank","rank"];t&&(c.push(...W(r[2].dims)),p.push("rank")),c.push(...W(l));let b=h=>{let g=G("output",r[0].dataType,l.length),T=Be(g.type.tensor),w=Wt(e,g.type.value,T),v=D("x",r[0].dataType,i.length),S=D("w",r[1].dataType,s.length),$=[v,S];t&&$.push(D("b",r[2].dataType,r[2].dims.length));let P=[{name:"output_size",type:"u32"},{name:"dilations",type:"u32",length:e.dilations.length},{name:"strides",type:"u32",length:2},{name:"pads",type:"u32",length:2},{name:"output_channels_per_group",type:"u32"}];return qt(e,P),` ${h.registerUniforms(P).declareVariables(...$,g)} ${h.mainStart()} @@ -2884,7 +2884,7 @@ bIndices[${S-2}] = u32(row); ${T} ${h.set("batch","row","col + i","output_channel","value")}; } - }`};return{name:"GroupedConv-Vectorize",shaderCache:{hint:`${e.cacheKey};${o};${i};${c};${u[0]};${u[1]}`,inputDependencies:t?["rank","rank","type"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:f}),getShaderSource:p}}});var tu,X2,Bb,ru=C(()=>{"use strict";ue();ye();Qn();he();Or();tu=(r,e,n,t,o=!1)=>{let i=r[0].dims,s=r[1].dims,a=i[i.length-2],u=s[s.length-1],l=i[i.length-1],f=ze(u),c=ze(l),p=ze(a),b=B.size(n)/f/p,h=r.length>2,g=t?t.slice(0,-2):n.slice(0,-2),w=[B.size(g),a,u],v=[{type:12,data:b},{type:12,data:a},{type:12,data:u},{type:12,data:l}];Ht(e,v),v.push(...W(g,i,s)),h&&v.push(...W(r[2].dims)),v.push(...W(w));let S=$=>{let P=ki("batch_dims",r[0].dataType,g.length),E=D("a",r[0].dataType,i.length,c),N=D("b",r[1].dataType,s.length,f),z=G("output",r[0].dataType,w.length,f),q=Be(z.type.tensor),K=Wt(e,z.type.value,q),F=[E,N],_e="";if(h){let se=o?f:1;F.push(D("bias",r[2].dataType,r[2].dims.length,se)),_e=`${o?`value += bias[col / ${se}];`:`value += ${z.type.value}(bias[row + i]);`}`}let $e=i.slice(0,-2),ae=s.slice(0,-2),qe=_n($e,g),Q=_n(ae,g),ge=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"}];qt(e,ge);let Ie=(se,pe)=>{let ce=se.rank,ut=se.name;if(ce===2)return`var ${ut}_indices = ${se.type.indices}(0u, 0u);`;let V=P.rank,ie=`var ${ut}_indices: ${se.type.indices};`;for(let Te=ce-2-1,tt=V-1;Te>=0;Te--,tt--)ie+=` + }`};return{name:"GroupedConv-Vectorize",shaderCache:{hint:`${e.cacheKey};${o};${i};${c};${u[0]};${u[1]}`,inputDependencies:t?["rank","rank","type"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:f}),getShaderSource:p}}});var ru,Y2,Bb,nu=C(()=>{"use strict";ue();ye();Qn();he();Or();ru=(r,e,n,t,o=!1)=>{let i=r[0].dims,s=r[1].dims,a=i[i.length-2],u=s[s.length-1],l=i[i.length-1],f=ze(u),c=ze(l),p=ze(a),b=B.size(n)/f/p,h=r.length>2,g=t?t.slice(0,-2):n.slice(0,-2),w=[B.size(g),a,u],v=[{type:12,data:b},{type:12,data:a},{type:12,data:u},{type:12,data:l}];Ht(e,v),v.push(...W(g,i,s)),h&&v.push(...W(r[2].dims)),v.push(...W(w));let S=$=>{let P=ki("batch_dims",r[0].dataType,g.length),E=D("a",r[0].dataType,i.length,c),N=D("b",r[1].dataType,s.length,f),z=G("output",r[0].dataType,w.length,f),q=Be(z.type.tensor),j=Wt(e,z.type.value,q),F=[E,N],_e="";if(h){let se=o?f:1;F.push(D("bias",r[2].dataType,r[2].dims.length,se)),_e=`${o?`value += bias[col / ${se}];`:`value += ${z.type.value}(bias[row + i]);`}`}let $e=i.slice(0,-2),ae=s.slice(0,-2),qe=In($e,g),Q=In(ae,g),ge=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"}];qt(e,ge);let Ie=(se,pe)=>{let ce=se.rank,ut=se.name;if(ce===2)return`var ${ut}_indices = ${se.type.indices}(0u, 0u);`;let V=P.rank,ie=`var ${ut}_indices: ${se.type.indices};`;for(let Te=ce-2-1,tt=V-1;Te>=0;Te--,tt--)ie+=` ${ut}_indices[${Te}] = ${V>1?`batch_indices[${tt}]`:"batch_indices"};`;return pe.forEach(Te=>{ie+=` ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${ut}_indices[${ce-1}] = 0u;`,ie},xe=()=>{let se=`var a_data: ${E.type.value};`;for(let pe=0;pe({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(b/64)},programUniforms:v}),getShaderSource:S}},X2=r=>{if(!r||r.length!==2)throw new Error("MatMul requires 2 inputs.");if(r[0].dims[r[0].dims.length-1]!==r[1].dims[r[1].dims.length-2])throw new Error("shared dimension does not match.")},Bb=r=>{X2(r.inputs);let e=nr.calcShape(r.inputs[0].dims,r.inputs[1].dims,!0);if(!e)throw new Error("Can't use matmul on the given tensors");let n=e[e.length-1],t=r.inputs[0].dims[r.inputs[0].dims.length-1];n<8&&t<8?r.compute(tu(r.inputs,{activation:""},e)):r.compute(Gi(r.inputs,{activation:""},e))}});var Wi,nu,Z2,ou,iu,Y2,J2,Q2,au,eu=C(()=>{"use strict";ye();$b();Cb();Qn();Db();Or();ru();Jr();Wi=(r,e,n,t,o,i)=>{let s=r[0],a=r.slice(i?1:2,i?3:4),u=a.length,l=e[0],c=e.slice(2).map((h,g)=>h+(h-1)*(n[g]-1)),b=a.map((h,g)=>h+t[g]+t[g+u]).map((h,g)=>Math.floor((h-c[g]+o[g])/o[g]));return b.splice(0,0,s),b.splice(i?3:1,0,l),b},nu=[2,3,1,0],Z2=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length>5)throw new Error("greater than 5D is not supported");if(r[0].dims.length!==r[1].dims.length)throw new Error("filter does not have same dimension as input");let n=r[0].dims[e.format==="NHWC"?r[0].dims.length-1:1],t=r[1].dims[1]*e.group;if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(r.length===3&&(r[2].dims.length!==1||r[1].dims[0]!==r[2].dims[0]))throw new Error("invalid bias");let o=r[0].dims.length-2;if(e.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(e.strides.length!==o)throw new Error(`strides should be ${o}D`);if(e.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape")},ou=(r,e)=>{let n=r.kernelShape.slice();for(let i=2;i{let e=zi(r),n=r.format,t=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][r.auto_pad],o=r.dilations,i=r.group,s=r.kernel_shape,a=r.pads,u=r.strides,l=r.w_is_const();return{autoPad:t,format:n,dilations:o,group:i,kernelShape:s,pads:a,strides:u,wIsConst:l,...e,cacheKey:`${r.format};${e.activation};`}},Y2=(r,e,n)=>{let t=ou(n,e),o=n.format==="NHWC";if(n.group!==1){if(!r.adapterInfo.isArchitecture("ampere")&&o&&e[1].dims[0]===n.group&&e[1].dims[1]===1&&n.dilations[0]===1&&n.dilations[1]===1){let N=Wi(e[0].dims,e[1].dims,n.dilations,t.pads,n.strides,o),z=r.kernelCustomData.wT??r.compute(Tt(e[1],nu),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=z);let q=[e[0],z];e.length===3&&q.push(e[2]),r.compute(kb(q,t,N),{inputs:q})}else r.compute(Qs(e,t));return}let i=e.length===3,s=e[0].dims[o?1:2],a=e[0].dims[o?2:3],u=e[0].dims[o?3:1],l=e[1].dims[2],f=e[1].dims[3],c=Wi(e[0].dims,e[1].dims,n.dilations,t.pads,n.strides,o),p=c[o?1:2],b=c[o?2:3],h=c[o?3:1],g=o&&l===s&&f===a&&n.pads[0]===0&&n.pads[1]===0;if(g||l===1&&f===1&&n.dilations[0]===1&&n.dilations[1]===1&&n.strides[0]===1&&n.strides[1]===1&&n.pads[0]===0&&n.pads[1]===0){let E=c[0],N,z,q,K=[];if(o){let $e=r.kernelCustomData.wT??r.compute(Tt(e[1],nu),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];if(n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=$e),g){let ae=s*a*u;N=e[0].reshape([1,E,ae]),z=$e.reshape([1,ae,h]),q=[1,E,h]}else N=e[0].reshape([E,s*a,u]),z=$e.reshape([1,u,h]),q=[E,p*b,h];K.push(N),K.push(z)}else N=e[0].reshape([E,u,s*a]),z=e[1].reshape([1,h,u]),q=[E,h,p*b],K.push(z),K.push(N);i&&K.push(e[2]);let F=q[2],_e=K[0].dims[K[0].dims.length-1];F<8&&_e<8?r.compute(tu(K,t,c,q,o),{inputs:K}):r.compute(Gi(K,t,c,q,o),{inputs:K});return}let T=!0,w=r.kernelCustomData.wT??r.compute(Tt(e[1],nu),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=w);let v=[e[0],w];i&&v.push(e[2]);let S=o?p*b:h,$=o?h:p*b,P=l*f*u;r.compute(Sb(v,t,c,S,$,P,i,T),{inputs:v})},J2=(r,e)=>{let n=e.format==="NHWC",t=[r.inputs[0].reshape(n?[r.inputs[0].dims[0],1,r.inputs[0].dims[1],r.inputs[0].dims[2]]:[r.inputs[0].dims[0],r.inputs[0].dims[1],1,r.inputs[0].dims[2]]),r.inputs[1].reshape([r.inputs[1].dims[0],r.inputs[1].dims[1],1,r.inputs[1].dims[2]])];r.inputs.length===3&&t.push(r.inputs[2]);let o=[0,e.pads[0],0,e.pads[1]],i=[1].concat(e.strides),s=[1].concat(e.dilations),a=[1].concat(e.kernelShape),u=ou({...e,pads:o,strides:i,dilations:s,kernelShape:a},t);r.compute(Qs(t,u,l=>n?[l[0],l[2],l[3]]:[]))},Q2=(r,e,n)=>{let t=n.format==="NHWC"?"channelsLast":"channelsFirst",o=ou(n,e),i=n.autoPad==="NOTSET"?n.pads:n.autoPad,s=Ob(e[0].dims,e[1].dims,n.strides,n.dilations,i,!1,t);r.compute(Eb(e,o,s.outShape,[s.filterDepth,s.filterHeight,s.filterWidth],[s.padInfo.front,s.padInfo.top,s.padInfo.left],t))},au=(r,e)=>{Z2(r.inputs,e),r.inputs[0].dims.length===3?J2(r,e):r.inputs[0].dims.length===5?Q2(r,r.inputs,e):Y2(r,r.inputs,e)}});var e1,Lb,Rb=C(()=>{"use strict";ue();mr();he();Or();Mi();Js();Qn();e1=(r,e=!1,n,t,o=4)=>{let i=w=>{switch(w){case 1:return"return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];";case 4:return` + `};return{name:"MatMulNaive",shaderCache:{hint:`${e.activation};${f};${c};${p};${o}`,inputDependencies:h?["rank","rank","rank"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(b/64)},programUniforms:v}),getShaderSource:S}},Y2=r=>{if(!r||r.length!==2)throw new Error("MatMul requires 2 inputs.");if(r[0].dims[r[0].dims.length-1]!==r[1].dims[r[1].dims.length-2])throw new Error("shared dimension does not match.")},Bb=r=>{Y2(r.inputs);let e=nr.calcShape(r.inputs[0].dims,r.inputs[1].dims,!0);if(!e)throw new Error("Can't use matmul on the given tensors");let n=e[e.length-1],t=r.inputs[0].dims[r.inputs[0].dims.length-1];n<8&&t<8?r.compute(ru(r.inputs,{activation:""},e)):r.compute(Gi(r.inputs,{activation:""},e))}});var Wi,ou,J2,iu,au,Q2,e1,t1,su,tu=C(()=>{"use strict";ye();$b();Cb();Qn();Db();Or();nu();Jr();Wi=(r,e,n,t,o,i)=>{let s=r[0],a=r.slice(i?1:2,i?3:4),u=a.length,l=e[0],c=e.slice(2).map((h,g)=>h+(h-1)*(n[g]-1)),b=a.map((h,g)=>h+t[g]+t[g+u]).map((h,g)=>Math.floor((h-c[g]+o[g])/o[g]));return b.splice(0,0,s),b.splice(i?3:1,0,l),b},ou=[2,3,1,0],J2=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length>5)throw new Error("greater than 5D is not supported");if(r[0].dims.length!==r[1].dims.length)throw new Error("filter does not have same dimension as input");let n=r[0].dims[e.format==="NHWC"?r[0].dims.length-1:1],t=r[1].dims[1]*e.group;if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(r.length===3&&(r[2].dims.length!==1||r[1].dims[0]!==r[2].dims[0]))throw new Error("invalid bias");let o=r[0].dims.length-2;if(e.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(e.strides.length!==o)throw new Error(`strides should be ${o}D`);if(e.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape")},iu=(r,e)=>{let n=r.kernelShape.slice();for(let i=2;i{let e=zi(r),n=r.format,t=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][r.auto_pad],o=r.dilations,i=r.group,s=r.kernel_shape,a=r.pads,u=r.strides,l=r.w_is_const();return{autoPad:t,format:n,dilations:o,group:i,kernelShape:s,pads:a,strides:u,wIsConst:l,...e,cacheKey:`${r.format};${e.activation};`}},Q2=(r,e,n)=>{let t=iu(n,e),o=n.format==="NHWC";if(n.group!==1){if(!r.adapterInfo.isArchitecture("ampere")&&o&&e[1].dims[0]===n.group&&e[1].dims[1]===1&&n.dilations[0]===1&&n.dilations[1]===1){let N=Wi(e[0].dims,e[1].dims,n.dilations,t.pads,n.strides,o),z=r.kernelCustomData.wT??r.compute(Tt(e[1],ou),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=z);let q=[e[0],z];e.length===3&&q.push(e[2]),r.compute(kb(q,t,N),{inputs:q})}else r.compute(eu(e,t));return}let i=e.length===3,s=e[0].dims[o?1:2],a=e[0].dims[o?2:3],u=e[0].dims[o?3:1],l=e[1].dims[2],f=e[1].dims[3],c=Wi(e[0].dims,e[1].dims,n.dilations,t.pads,n.strides,o),p=c[o?1:2],b=c[o?2:3],h=c[o?3:1],g=o&&l===s&&f===a&&n.pads[0]===0&&n.pads[1]===0;if(g||l===1&&f===1&&n.dilations[0]===1&&n.dilations[1]===1&&n.strides[0]===1&&n.strides[1]===1&&n.pads[0]===0&&n.pads[1]===0){let E=c[0],N,z,q,j=[];if(o){let $e=r.kernelCustomData.wT??r.compute(Tt(e[1],ou),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];if(n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=$e),g){let ae=s*a*u;N=e[0].reshape([1,E,ae]),z=$e.reshape([1,ae,h]),q=[1,E,h]}else N=e[0].reshape([E,s*a,u]),z=$e.reshape([1,u,h]),q=[E,p*b,h];j.push(N),j.push(z)}else N=e[0].reshape([E,u,s*a]),z=e[1].reshape([1,h,u]),q=[E,h,p*b],j.push(z),j.push(N);i&&j.push(e[2]);let F=q[2],_e=j[0].dims[j[0].dims.length-1];F<8&&_e<8?r.compute(ru(j,t,c,q,o),{inputs:j}):r.compute(Gi(j,t,c,q,o),{inputs:j});return}let T=!0,w=r.kernelCustomData.wT??r.compute(Tt(e[1],ou),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=w);let v=[e[0],w];i&&v.push(e[2]);let S=o?p*b:h,$=o?h:p*b,P=l*f*u;r.compute(Sb(v,t,c,S,$,P,i,T),{inputs:v})},e1=(r,e)=>{let n=e.format==="NHWC",t=[r.inputs[0].reshape(n?[r.inputs[0].dims[0],1,r.inputs[0].dims[1],r.inputs[0].dims[2]]:[r.inputs[0].dims[0],r.inputs[0].dims[1],1,r.inputs[0].dims[2]]),r.inputs[1].reshape([r.inputs[1].dims[0],r.inputs[1].dims[1],1,r.inputs[1].dims[2]])];r.inputs.length===3&&t.push(r.inputs[2]);let o=[0,e.pads[0],0,e.pads[1]],i=[1].concat(e.strides),s=[1].concat(e.dilations),a=[1].concat(e.kernelShape),u=iu({...e,pads:o,strides:i,dilations:s,kernelShape:a},t);r.compute(eu(t,u,l=>n?[l[0],l[2],l[3]]:[]))},t1=(r,e,n)=>{let t=n.format==="NHWC"?"channelsLast":"channelsFirst",o=iu(n,e),i=n.autoPad==="NOTSET"?n.pads:n.autoPad,s=Ob(e[0].dims,e[1].dims,n.strides,n.dilations,i,!1,t);r.compute(Eb(e,o,s.outShape,[s.filterDepth,s.filterHeight,s.filterWidth],[s.padInfo.front,s.padInfo.top,s.padInfo.left],t))},su=(r,e)=>{J2(r.inputs,e),r.inputs[0].dims.length===3?e1(r,e):r.inputs[0].dims.length===5?t1(r,r.inputs,e):Q2(r,r.inputs,e)}});var r1,Lb,Rb=C(()=>{"use strict";ue();mr();he();Or();Mi();Qs();Qn();r1=(r,e=!1,n,t,o=4)=>{let i=w=>{switch(w){case 1:return"return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];";case 4:return` let coord1 = vec4(coordX, coordY, col + 1, rowInner); let coord2 = vec4(coordX, coordY, col + 2, rowInner); let coord3 = vec4(coordX, coordY, col + 3, rowInner); @@ -3003,15 +3003,15 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${g} result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${o}] = value; } - }`},Lb=(r,e,n,t,o,i,s,a)=>{let u=e.format==="NHWC",l=u?r[0].dims[3]:r[0].dims[1],f=n[0],c=u?n[2]:n[3],p=u?n[1]:n[2],b=u?n[3]:n[1],h=u&&l%4===0&&l%3&&b%4===0,g=u?b:c*p,T=u?c*p:b,w=[8,8,1],v=t<=8?[4,1,1]:[4,4,1],S=[Math.ceil(g/w[0]/v[0]),Math.ceil(T/w[1]/v[1]),Math.ceil(f/w[2]/v[2])];Ne("verbose",()=>`[conv_backprop_mm_webgpu] dispatch = ${S}`);let $=h?4:1,P=Math.max(w[0]*$,w[1]),E=h?4:1,N=[e.kernelShape[u?1:2],e.kernelShape[u?2:3]],z=[N[0]+(e.dilations[0]<=1?0:(N[0]-1)*(e.dilations[0]-1)),N[1]+(e.dilations[1]<=1?0:(N[1]-1)*(e.dilations[1]-1))],q=[z[0]-1-Math.floor((e.pads[0]+e.pads[2])/2),z[1]-1-Math.floor((e.pads[1]+e.pads[3])/2)],K=[{type:6,data:t},{type:6,data:o},{type:6,data:i},{type:6,data:e.strides},{type:6,data:e.dilations},{type:6,data:N},{type:6,data:q}];Ht(e,K),K.push(...W(r[0].dims,r[1].dims));let F=["rank","rank"];s&&(K.push(...W(r[2].dims)),F.push("rank")),K.push(...W(n));let _e=$e=>{let ae=D("x",r[0].dataType,r[0].dims.length,E),qe=D("w",r[1].dataType,r[1].dims.length,1),Q=G("result",r[0].dataType,n.length,E),ge=[ae,qe],Ie="";if(s){let pe=D("bias",r[2].dataType,r[2].dims.length,E);ge.push(pe),Ie+=` + }`},Lb=(r,e,n,t,o,i,s,a)=>{let u=e.format==="NHWC",l=u?r[0].dims[3]:r[0].dims[1],f=n[0],c=u?n[2]:n[3],p=u?n[1]:n[2],b=u?n[3]:n[1],h=u&&l%4===0&&l%3&&b%4===0,g=u?b:c*p,T=u?c*p:b,w=[8,8,1],v=t<=8?[4,1,1]:[4,4,1],S=[Math.ceil(g/w[0]/v[0]),Math.ceil(T/w[1]/v[1]),Math.ceil(f/w[2]/v[2])];Ne("verbose",()=>`[conv_backprop_mm_webgpu] dispatch = ${S}`);let $=h?4:1,P=Math.max(w[0]*$,w[1]),E=h?4:1,N=[e.kernelShape[u?1:2],e.kernelShape[u?2:3]],z=[N[0]+(e.dilations[0]<=1?0:(N[0]-1)*(e.dilations[0]-1)),N[1]+(e.dilations[1]<=1?0:(N[1]-1)*(e.dilations[1]-1))],q=[z[0]-1-Math.floor((e.pads[0]+e.pads[2])/2),z[1]-1-Math.floor((e.pads[1]+e.pads[3])/2)],j=[{type:6,data:t},{type:6,data:o},{type:6,data:i},{type:6,data:e.strides},{type:6,data:e.dilations},{type:6,data:N},{type:6,data:q}];Ht(e,j),j.push(...W(r[0].dims,r[1].dims));let F=["rank","rank"];s&&(j.push(...W(r[2].dims)),F.push("rank")),j.push(...W(n));let _e=$e=>{let ae=D("x",r[0].dataType,r[0].dims.length,E),qe=D("w",r[1].dataType,r[1].dims.length,1),Q=G("result",r[0].dataType,n.length,E),ge=[ae,qe],Ie="";if(s){let pe=D("bias",r[2].dataType,r[2].dims.length,E);ge.push(pe),Ie+=` fn getBiasByOutputCoords(coords : vec4) -> ${pe.type.value} { return bias[coords.${u?"w":"y"}${h?"/ 4":""}]; }`}let xe=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"strides",type:"i32",length:2},{name:"dilations",type:"i32",length:2},{name:"filter_dims",type:"i32",length:N.length},{name:"pads",type:"i32",length:q.length}];qt(e,xe);let se=Be(r[0].dataType,1);if(se!=="f16"&&se!=="f32")throw new Error(`elemType ${se} is not supported.`);return` ${Vi("uniforms.result_strides")} ${$e.registerUniforms(xe).declareVariables(...ge,Q)}; ${Ie} - ${e1(u,s,e,ae.type.value,$)} - ${h?Yn(v,w,se,void 0,!u,P):Jn(v,w,se,void 0,!u,P,!1,void 0,a)}`};return{name:"Conv2DTransposeMatMul",shaderCache:{hint:`${e.cacheKey};${v};${w};${h}`,inputDependencies:F},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:K}),getShaderSource:_e}}});var t1,su,Nb=C(()=>{"use strict";ue();mr();ye();he();t1=(r,e,n,t,o,i=!1,s,a,u=!1)=>{let l=u?1:2,f=u?2:3,c=u?3:1,p=i?2:1,b=` + ${r1(u,s,e,ae.type.value,$)} + ${h?Yn(v,w,se,void 0,!u,P):Jn(v,w,se,void 0,!u,P,!1,void 0,a)}`};return{name:"Conv2DTransposeMatMul",shaderCache:{hint:`${e.cacheKey};${v};${w};${h}`,inputDependencies:F},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:j}),getShaderSource:_e}}});var n1,uu,Nb=C(()=>{"use strict";ue();mr();ye();he();n1=(r,e,n,t,o,i=!1,s,a,u=!1)=>{let l=u?1:2,f=u?2:3,c=u?3:1,p=i?2:1,b=` fn setOutputAtIndex(flatIndex : u32, value : ${i?`vec4<${s}>`:s}) { result[flatIndex] = ${i?`vec4<${s}>`:s}(value); }`;t&&(b+=` @@ -3174,7 +3174,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${r.mainStart()} ${r.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")}; - ${i?S:$}}`},su=(r,e,n)=>{let t=r.length>2,o=e.outputShape,i=B.size(o),s=[Math.ceil(i/64),1,1];Ne("verbose",()=>`[conv2d_backprop_webgpu] dispatch = ${s}`);let a=e.format==="NHWC",u=["rank","rank"],l=[e.strides[0],e.strides[1]],f=[e.kernelShape[a?1:2],e.kernelShape[a?2:3]],c=[e.dilations[0],e.dilations[1]],p=[f[0]+(e.dilations[0]<=1?0:(e.kernelShape[a?1:2]-1)*(e.dilations[0]-1)),f[1]+(e.dilations[1]<=1?0:(e.kernelShape[a?2:3]-1)*(e.dilations[1]-1))],b=[p[0]-1-Math.floor((e.pads[0]+e.pads[2])/2),p[1]-1-Math.floor(e.pads[1]+e.pads[3])/2],h=!1,g=e.group,T=r[1].dims,w=T[0]/g,v=T[1],S=[{type:12,data:i},{type:12,data:l},{type:12,data:f},{type:12,data:c},{type:12,data:p},{type:6,data:b},{type:12,data:w},{type:12,data:v},...W(r[0].dims,r[1].dims)];t&&(S.push(...W(r[2].dims)),u.push("rank")),S.push(...W(o));let $=s[1]===1&&s[2]===1,P=E=>{let N=[{name:"output_size",type:"u32"},{name:"strides",type:"u32",length:l.length},{name:"filter_dims",type:"u32",length:f.length},{name:"dilations",type:"u32",length:f.length},{name:"effective_filter_dims",type:"u32",length:p.length},{name:"pads",type:"i32",length:b.length},{name:"input_channels_per_group",type:"u32"},{name:"output_channels_per_group",type:"u32"}],z=Be(r[0].dataType);return`${t1(E,r,o,t,$,h,z,N,a)}`};return{name:"ConvTranspose2D",shaderCache:{hint:`${e.cacheKey};`,inputDependencies:u},getRunData:()=>({dispatchGroup:{x:s[0],y:s[1],z:s[2]},outputs:[{dims:n?n(o):o,dataType:r[0].dataType}],programUniforms:S}),getShaderSource:P}}});var r1,n1,o1,zb,Fb,i1,a1,s1,u1,Mb,Vb=C(()=>{"use strict";Rb();Nb();Or();Jr();r1=(r,e,n,t,o,i)=>(r-1)*e+n+(t-1)*o+1-i,n1=(r,e,n,t,o)=>{let i=Math.floor(r/2);e==="SAME_UPPER"?(n[t]=i,n[o]=r-i):e==="SAME_LOWER"&&(n[t]=r-i,n[o]=i)},o1=(r,e,n,t,o,i,s,a,u,l)=>{let f=r.length-2,c=l.length===0;if(u.length===0)for(let h=0;h{let n=r.kernelShape.slice();if(r.kernelShape.length===0||r.kernelShape.reduce((c,p)=>c*p,1)===0){n.length=0;for(let c=2;cc+p,0)===0){let c=e[0].dims.length-2;u=new Array(c).fill(1)}let l=r.strides.slice();if(l.reduce((c,p)=>c+p,0)===0){let c=e[0].dims.length-2;l=new Array(c).fill(1)}o1(a,n,u,r.autoPad,r.group,o,l,t,s,i);let f=Object.assign({},r);return Object.assign(f,{kernelShape:n,pads:o,outputPadding:s,outputShape:i,dilations:u,strides:l}),f},Fb=r=>{let e=zi(r),n=r.format,t=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][typeof r.autoPad>"u"?0:r.autoPad],o=r.dilations,i=r.group,s=r.kernelShape,a=r.pads,u=r.strides,l=r.wIsConst(),f=r.outputPadding,c=r.outputShape;return{autoPad:t,format:n,dilations:o,group:i,kernelShape:s,outputPadding:f,outputShape:c,pads:a,strides:u,wIsConst:l,...e,cacheKey:`${r.format};${e.activation};`}},i1=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4&&r[0].dims.length!==3)throw new Error("currently only support 2-dimensional conv");if(r[0].dims.length!==r[1].dims.length)throw new Error("filter does not have same dimension as input");let n=r[0].dims[e.format==="NHWC"?r[0].dims.length-1:1],t=r[1].dims[0];if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=r[1].dims[1]*e.group;if(r.length===3&&(r[2].dims.length!==1||r[2].dims[0]!==o))throw new Error("invalid bias");let i=r[0].dims.length-2;if(e.dilations.reduce((f,c)=>f+c,0)>0&&e.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(e.strides.reduce((f,c)=>f+c,0)>0&&e.strides.length!==i)throw new Error(`strides should be ${i}D`);if(e.pads.reduce((f,c)=>f+c,0)>0&&e.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(e.outputPadding.length!==i&&e.outputPadding.length!==0)throw new Error(`output_padding should be ${i}D`);if(e.kernelShape.reduce((f,c)=>f+c,0)>0&&e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(e.outputShape.length!==0&&e.outputShape.length!==r[0].dims.length-2)throw new Error("invalid output shape")},a1=[2,3,1,0],s1=(r,e,n)=>{let t=zb(n,e),o=n.format==="NHWC",i=t.outputShape,s=i[o?3:1],a=e[0].dims[o?3:1];if(t.group!==1||s===1&&a===1){r.compute(su(e,t));return}let u=i[o?1:2],l=i[o?2:3],f=e[1].dims[2],c=e[1].dims[3],p=o?u*l:s,b=o?s:u*l,h=f*c*a,g=!0,T=r.kernelCustomData.wT??r.compute(Tt(e[1],a1),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=T);let w=[e[0],T],v=e.length===3;v&&(!o&&e[2].dims.length===1?w.push(e[2].reshape([e[2].dims[0],1,1])):w.push(e[2])),r.compute(Lb(w,t,i,p,b,h,v,g),{inputs:w})},u1=(r,e)=>{let n=e.format==="NHWC",t=[r.inputs[0].reshape(n?[r.inputs[0].dims[0],1,r.inputs[0].dims[1],r.inputs[0].dims[2]]:[r.inputs[0].dims[0],r.inputs[0].dims[1],1,r.inputs[0].dims[2]]),r.inputs[1].reshape([r.inputs[1].dims[0],r.inputs[1].dims[1],1,r.inputs[1].dims[2]])];r.inputs.length===3&&t.push(r.inputs[2]);let o=e.kernelShape;(o.length===0||o[0]===0)&&(o=[r.inputs[1].dims[2]]);let i=e.dilations;(i.length===0||i[0]===0)&&(i=[1]);let s=e.strides;(s.length===0||s[0]===0)&&(s=[1]);let a=e.pads;a.length===0&&(a=[0,0]),a=[0,a[0],0,a[1]],s=[1].concat(s),i=[1].concat(i),o=[1].concat(o);let u=zb({...e,pads:a,strides:s,dilations:i,kernelShape:o},t);r.compute(su(t,u,l=>n?[l[0],l[2],l[3]]:[l[0],l[1],l[3]]))},Mb=(r,e)=>{i1(r.inputs,e),r.inputs[0].dims.length===3?u1(r,e):s1(r,r.inputs,e)}});var l1,Gb,Ub,Wb=C(()=>{"use strict";ue();ye();et();he();l1=(r,e,n,t)=>{let o=B.size(e),i=e.length,s=D("input",r,i),a=G("output",r,i),u=n.dataType===6?n.getInt32Array()[0]:Number(n.getBigInt64Array()[0]),l=B.normalizeAxis(u,i),f=c=>{let p=` i32(${s.indicesGet("inputIndices","uniforms.axis")}) `,b=Z("uniforms.input_shape","uniforms.axis",i),h=t.reverse?p+(t.exclusive?" + 1":""):"0",g=t.reverse?b:p+(t.exclusive?"":" + 1");return` + ${i?S:$}}`},uu=(r,e,n)=>{let t=r.length>2,o=e.outputShape,i=B.size(o),s=[Math.ceil(i/64),1,1];Ne("verbose",()=>`[conv2d_backprop_webgpu] dispatch = ${s}`);let a=e.format==="NHWC",u=["rank","rank"],l=[e.strides[0],e.strides[1]],f=[e.kernelShape[a?1:2],e.kernelShape[a?2:3]],c=[e.dilations[0],e.dilations[1]],p=[f[0]+(e.dilations[0]<=1?0:(e.kernelShape[a?1:2]-1)*(e.dilations[0]-1)),f[1]+(e.dilations[1]<=1?0:(e.kernelShape[a?2:3]-1)*(e.dilations[1]-1))],b=[p[0]-1-Math.floor((e.pads[0]+e.pads[2])/2),p[1]-1-Math.floor(e.pads[1]+e.pads[3])/2],h=!1,g=e.group,T=r[1].dims,w=T[0]/g,v=T[1],S=[{type:12,data:i},{type:12,data:l},{type:12,data:f},{type:12,data:c},{type:12,data:p},{type:6,data:b},{type:12,data:w},{type:12,data:v},...W(r[0].dims,r[1].dims)];t&&(S.push(...W(r[2].dims)),u.push("rank")),S.push(...W(o));let $=s[1]===1&&s[2]===1,P=E=>{let N=[{name:"output_size",type:"u32"},{name:"strides",type:"u32",length:l.length},{name:"filter_dims",type:"u32",length:f.length},{name:"dilations",type:"u32",length:f.length},{name:"effective_filter_dims",type:"u32",length:p.length},{name:"pads",type:"i32",length:b.length},{name:"input_channels_per_group",type:"u32"},{name:"output_channels_per_group",type:"u32"}],z=Be(r[0].dataType);return`${n1(E,r,o,t,$,h,z,N,a)}`};return{name:"ConvTranspose2D",shaderCache:{hint:`${e.cacheKey};`,inputDependencies:u},getRunData:()=>({dispatchGroup:{x:s[0],y:s[1],z:s[2]},outputs:[{dims:n?n(o):o,dataType:r[0].dataType}],programUniforms:S}),getShaderSource:P}}});var o1,i1,a1,zb,Fb,s1,u1,l1,c1,Mb,Vb=C(()=>{"use strict";Rb();Nb();Or();Jr();o1=(r,e,n,t,o,i)=>(r-1)*e+n+(t-1)*o+1-i,i1=(r,e,n,t,o)=>{let i=Math.floor(r/2);e==="SAME_UPPER"?(n[t]=i,n[o]=r-i):e==="SAME_LOWER"&&(n[t]=r-i,n[o]=i)},a1=(r,e,n,t,o,i,s,a,u,l)=>{let f=r.length-2,c=l.length===0;if(u.length===0)for(let h=0;h{let n=r.kernelShape.slice();if(r.kernelShape.length===0||r.kernelShape.reduce((c,p)=>c*p,1)===0){n.length=0;for(let c=2;cc+p,0)===0){let c=e[0].dims.length-2;u=new Array(c).fill(1)}let l=r.strides.slice();if(l.reduce((c,p)=>c+p,0)===0){let c=e[0].dims.length-2;l=new Array(c).fill(1)}a1(a,n,u,r.autoPad,r.group,o,l,t,s,i);let f=Object.assign({},r);return Object.assign(f,{kernelShape:n,pads:o,outputPadding:s,outputShape:i,dilations:u,strides:l}),f},Fb=r=>{let e=zi(r),n=r.format,t=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][typeof r.autoPad>"u"?0:r.autoPad],o=r.dilations,i=r.group,s=r.kernelShape,a=r.pads,u=r.strides,l=r.wIsConst(),f=r.outputPadding,c=r.outputShape;return{autoPad:t,format:n,dilations:o,group:i,kernelShape:s,outputPadding:f,outputShape:c,pads:a,strides:u,wIsConst:l,...e,cacheKey:`${r.format};${e.activation};`}},s1=(r,e)=>{if(!r||r.length!==2&&r.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(r[0].dims.length!==4&&r[0].dims.length!==3)throw new Error("currently only support 2-dimensional conv");if(r[0].dims.length!==r[1].dims.length)throw new Error("filter does not have same dimension as input");let n=r[0].dims[e.format==="NHWC"?r[0].dims.length-1:1],t=r[1].dims[0];if(n!==t)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=r[1].dims[1]*e.group;if(r.length===3&&(r[2].dims.length!==1||r[2].dims[0]!==o))throw new Error("invalid bias");let i=r[0].dims.length-2;if(e.dilations.reduce((f,c)=>f+c,0)>0&&e.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(e.strides.reduce((f,c)=>f+c,0)>0&&e.strides.length!==i)throw new Error(`strides should be ${i}D`);if(e.pads.reduce((f,c)=>f+c,0)>0&&e.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(e.outputPadding.length!==i&&e.outputPadding.length!==0)throw new Error(`output_padding should be ${i}D`);if(e.kernelShape.reduce((f,c)=>f+c,0)>0&&e.kernelShape.length!==0&&e.kernelShape.length!==r[1].dims.length-2)throw new Error("invalid kernel shape");if(e.outputShape.length!==0&&e.outputShape.length!==r[0].dims.length-2)throw new Error("invalid output shape")},u1=[2,3,1,0],l1=(r,e,n)=>{let t=zb(n,e),o=n.format==="NHWC",i=t.outputShape,s=i[o?3:1],a=e[0].dims[o?3:1];if(t.group!==1||s===1&&a===1){r.compute(uu(e,t));return}let u=i[o?1:2],l=i[o?2:3],f=e[1].dims[2],c=e[1].dims[3],p=o?u*l:s,b=o?s:u*l,h=f*c*a,g=!0,T=r.kernelCustomData.wT??r.compute(Tt(e[1],u1),{inputs:[1],outputs:[n.wIsConst?-2:-1]})[0];n.wIsConst&&!r.kernelCustomData.wT&&(r.kernelCustomData.wT=T);let w=[e[0],T],v=e.length===3;v&&(!o&&e[2].dims.length===1?w.push(e[2].reshape([e[2].dims[0],1,1])):w.push(e[2])),r.compute(Lb(w,t,i,p,b,h,v,g),{inputs:w})},c1=(r,e)=>{let n=e.format==="NHWC",t=[r.inputs[0].reshape(n?[r.inputs[0].dims[0],1,r.inputs[0].dims[1],r.inputs[0].dims[2]]:[r.inputs[0].dims[0],r.inputs[0].dims[1],1,r.inputs[0].dims[2]]),r.inputs[1].reshape([r.inputs[1].dims[0],r.inputs[1].dims[1],1,r.inputs[1].dims[2]])];r.inputs.length===3&&t.push(r.inputs[2]);let o=e.kernelShape;(o.length===0||o[0]===0)&&(o=[r.inputs[1].dims[2]]);let i=e.dilations;(i.length===0||i[0]===0)&&(i=[1]);let s=e.strides;(s.length===0||s[0]===0)&&(s=[1]);let a=e.pads;a.length===0&&(a=[0,0]),a=[0,a[0],0,a[1]],s=[1].concat(s),i=[1].concat(i),o=[1].concat(o);let u=zb({...e,pads:a,strides:s,dilations:i,kernelShape:o},t);r.compute(uu(t,u,l=>n?[l[0],l[2],l[3]]:[l[0],l[1],l[3]]))},Mb=(r,e)=>{s1(r.inputs,e),r.inputs[0].dims.length===3?c1(r,e):l1(r,r.inputs,e)}});var f1,Gb,Ub,Wb=C(()=>{"use strict";ue();ye();et();he();f1=(r,e,n,t)=>{let o=B.size(e),i=e.length,s=D("input",r,i),a=G("output",r,i),u=n.dataType===6?n.getInt32Array()[0]:Number(n.getBigInt64Array()[0]),l=B.normalizeAxis(u,i),f=c=>{let p=` i32(${s.indicesGet("inputIndices","uniforms.axis")}) `,b=Z("uniforms.input_shape","uniforms.axis",i),h=t.reverse?p+(t.exclusive?" + 1":""):"0",g=t.reverse?b:p+(t.exclusive?"":" + 1");return` ${c.registerUniform("outputSize","u32").registerUniform("axis","u32").declareVariables(s,a)} ${c.mainStart()} ${c.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} @@ -3187,12 +3187,12 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; sum = sum + ${s.getByIndices("inputIndices")}; } ${a.setByOffset("global_idx","sum")}; - }`};return{name:"CumSum",shaderCache:{hint:t.cacheKey,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:e,dataType:r}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:[{type:12,data:o},{type:12,data:l},...W(e,e)]}),getShaderSource:f}},Gb=(r,e)=>{let n=r.inputs[0].dims,t=r.inputs[0].dataType,o=r.inputs[1];r.compute(l1(t,n,o,e),{inputs:[0]})},Ub=r=>{let e=r.exclusive===1,n=r.reverse===1;return de({exclusive:e,reverse:n})}});var c1,f1,d1,Hb,qb,Kb=C(()=>{"use strict";ue();ye();et();he();c1=r=>{if(!r||r.length!==1)throw new Error("DepthToSpace requires 1 input.");if(r[0].dims.length!==4)throw new Error("DepthToSpace requires 4D input.")},f1=(r,e,n,t)=>{let o=[];o.push(`fn perm(i: ${t.type.indices}) -> ${n.type.indices} { + }`};return{name:"CumSum",shaderCache:{hint:t.cacheKey,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:e,dataType:r}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:[{type:12,data:o},{type:12,data:l},...W(e,e)]}),getShaderSource:f}},Gb=(r,e)=>{let n=r.inputs[0].dims,t=r.inputs[0].dataType,o=r.inputs[1];r.compute(f1(t,n,o,e),{inputs:[0]})},Ub=r=>{let e=r.exclusive===1,n=r.reverse===1;return de({exclusive:e,reverse:n})}});var d1,p1,m1,Hb,qb,jb=C(()=>{"use strict";ue();ye();et();he();d1=r=>{if(!r||r.length!==1)throw new Error("DepthToSpace requires 1 input.");if(r[0].dims.length!==4)throw new Error("DepthToSpace requires 4D input.")},p1=(r,e,n,t)=>{let o=[];o.push(`fn perm(i: ${t.type.indices}) -> ${n.type.indices} { var a: ${n.type.indices};`);for(let i=0;i{let n,t,o,i,s,a,u=e.format==="NHWC",l=e.blocksize,f=e.mode==="DCR";u?([n,t,o,i]=r.dims,s=f?[n,t,o,l,l,i/l**2]:[n,t,o,i/l**2,l,l],a=f?[0,1,3,2,4,5]:[0,1,4,2,5,3]):([n,t,o,i]=[r.dims[0],r.dims[2],r.dims[3],r.dims[1]],s=f?[n,l,l,i/l**2,t,o]:[n,i/l**2,l,l,t,o],a=f?[0,3,4,1,5,2]:[0,1,4,2,5,3]);let c=r.reshape(s),p=c.dims.length,b=r.dataType,h=D("a",b,p),g=G("output",b,p),T=w=>` +`)},m1=(r,e)=>{let n,t,o,i,s,a,u=e.format==="NHWC",l=e.blocksize,f=e.mode==="DCR";u?([n,t,o,i]=r.dims,s=f?[n,t,o,l,l,i/l**2]:[n,t,o,i/l**2,l,l],a=f?[0,1,3,2,4,5]:[0,1,4,2,5,3]):([n,t,o,i]=[r.dims[0],r.dims[2],r.dims[3],r.dims[1]],s=f?[n,l,l,i/l**2,t,o]:[n,i/l**2,l,l,t,o],a=f?[0,3,4,1,5,2]:[0,1,4,2,5,3]);let c=r.reshape(s),p=c.dims.length,b=r.dataType,h=D("a",b,p),g=G("output",b,p),T=w=>` ${w.registerUniform("output_size","u32").declareVariables(h,g)} - ${f1(a,p,h,g)} + ${p1(a,p,h,g)} ${w.mainStart()} ${w.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -3201,7 +3201,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let aIndices = perm(indices); ${g.setByOffset("global_idx",h.getByIndices("aIndices"))} - }`;return{name:"DepthToSpace",shaderCache:{hint:`${r.dims};${e.blocksize};${e.mode}`,inputDependencies:["rank"]},getRunData:w=>{let v=u?[n,t*l,o*l,i/l**2]:[n,i/l**2,t*l,o*l],S=B.size(v),$=c.dims,P=B.sortBasedOnPerm($,a);return{outputs:[{dims:v,dataType:w[0].dataType}],dispatchGroup:{x:Math.ceil(S/64)},programUniforms:[{type:12,data:S},...W($,P)]}},getShaderSource:T}},Hb=(r,e)=>{c1(r.inputs),r.compute(d1(r.inputs[0],e))},qb=r=>de({blocksize:r.blocksize,mode:r.mode,format:r.format})});var uu,Hi,jb,p1,m1,lu,cu,Xb,h1,Zb,Yb,Jb=C(()=>{"use strict";ue();ye();et();he();uu="[a-zA-Z]|\\.\\.\\.",Hi="("+uu+")+",jb="^"+Hi+"$",p1="("+Hi+",)*"+Hi,m1="^"+p1+"$",lu=class{constructor(e=-1){this.symbolToIndices=new Map,this.inputIndex=e}addSymbol(e,n){let t=this.symbolToIndices.get(e);t===void 0?t=[n]:t.push(n),this.symbolToIndices.set(e,t)}},cu=class{constructor(e,n){this.equation=n;this.hasEllipsis=!1,this.symbolToInfo=new Map,this.lhs=new Array,this.outputDims=[];let[t,o]=n.includes("->")?n.split("->",2):[n,""];if(!t.match(RegExp(m1)))throw new Error("Invalid LHS term");if(t.split(",").forEach((a,u)=>{let l=e[u].dims.slice();if(!a.match(RegExp(jb)))throw new Error("Invalid LHS term");let f=this.processTerm(a,!0,l,u);this.lhs.push(f)}),o==="")o+=[...this.symbolToInfo.entries()].filter(([a,u])=>u.count===1||a==="...").map(([a])=>a).join("");else if(!o.match(RegExp(Hi)))throw new Error("Invalid RHS");o.match(RegExp(uu,"g"))?.forEach(a=>{if(a==="...")this.outputDims=this.outputDims.concat(this.ellipsisDims);else{let u=this.symbolToInfo.get(a);if(u===void 0)throw new Error("Invalid RHS symbol");this.outputDims.push(u.dimValue)}}),this.rhs=this.processTerm(o,!1,this.outputDims)}addSymbol(e,n,t){let o=this.symbolToInfo.get(e);if(o!==void 0){if(o.dimValue!==n&&o.count!==1)throw new Error("Dimension mismatch");o.count++,o.inputIndices.push(t)}else o={count:1,dimValue:n,inputIndices:[t]};this.symbolToInfo.set(e,o)}processTerm(e,n,t,o=-1){let i=t.length,s=!1,a=[],u=0;if(!e.match(RegExp(jb))&&!n&&e!=="")throw new Error("Invalid LHS term");let l=e.match(RegExp(uu,"g")),f=new lu(o);return l?.forEach((c,p)=>{if(c==="..."){if(s)throw new Error("Only one ellipsis is allowed per input term");s=!0;let b=i-l.length+1;if(b<0)throw new Error("Ellipsis out of bounds");if(a=t.slice(u,u+b),this.hasEllipsis){if(this.ellipsisDims.length!==a.length||this.ellipsisDims.toString()!==a.toString())throw new Error("Ellipsis dimensions mismatch")}else if(n)this.hasEllipsis=!0,this.ellipsisDims=a;else throw new Error("Ellipsis must be specified in the LHS");for(let h=0;hr+"_max",h1=(r,e,n,t)=>{let i=r.map(f=>f.length).map((f,c)=>D(`input${c}`,e,f)),s=B.size(t),a=G("output",e,t.length),u=[...n.symbolToInfo.keys()].filter(f=>!n.rhs.symbolToIndices.has(f)),l=f=>{let c=[],p="var prod = 1.0;",b="var sum = 0.0;",h="sum += prod;",g=[],T=[],w=[],v=[],S=n.symbolToInfo.size===n.rhs.symbolToIndices.size;n.symbolToInfo.forEach((P,E)=>{if(n.rhs.symbolToIndices.has(E)){let N=n.rhs.symbolToIndices.get(E)?.[0];N!==void 0&&n.lhs.forEach((z,q)=>{if(P.inputIndices.includes(q)){let K=z.symbolToIndices.get(E);if(K===void 0)throw new Error("Invalid symbol error");K.forEach(F=>{c.push(`${i[q].indicesSet(`input${q}Indices`,F,a.indicesGet("outputIndices",N))}`)})}})}else n.lhs.forEach((N,z)=>{if(P.inputIndices.includes(z)){let q=N.symbolToIndices.get(E);if(q===void 0)throw new Error("Invalid symbol error");q.forEach(K=>{g.push(`${i[z].indicesSet(`input${z}Indices`,K,`${E}`)}`)}),v.push(`prod *= ${i[z].getByIndices(`input${z}Indices`)};`)}}),T.push(`for(var ${E}: u32 = 0; ${E} < uniforms.${Xb(E)}; ${E}++) {`),w.push("}")});let $=S?[...c,`let sum = ${i.map((P,E)=>P.getByIndices(`input${E}Indices`)).join(" * ")};`]:[...c,b,...T,...g,p,...v,h,...w];return` + }`;return{name:"DepthToSpace",shaderCache:{hint:`${r.dims};${e.blocksize};${e.mode}`,inputDependencies:["rank"]},getRunData:w=>{let v=u?[n,t*l,o*l,i/l**2]:[n,i/l**2,t*l,o*l],S=B.size(v),$=c.dims,P=B.sortBasedOnPerm($,a);return{outputs:[{dims:v,dataType:w[0].dataType}],dispatchGroup:{x:Math.ceil(S/64)},programUniforms:[{type:12,data:S},...W($,P)]}},getShaderSource:T}},Hb=(r,e)=>{d1(r.inputs),r.compute(m1(r.inputs[0],e))},qb=r=>de({blocksize:r.blocksize,mode:r.mode,format:r.format})});var lu,Hi,Kb,h1,g1,cu,fu,Xb,b1,Zb,Yb,Jb=C(()=>{"use strict";ue();ye();et();he();lu="[a-zA-Z]|\\.\\.\\.",Hi="("+lu+")+",Kb="^"+Hi+"$",h1="("+Hi+",)*"+Hi,g1="^"+h1+"$",cu=class{constructor(e=-1){this.symbolToIndices=new Map,this.inputIndex=e}addSymbol(e,n){let t=this.symbolToIndices.get(e);t===void 0?t=[n]:t.push(n),this.symbolToIndices.set(e,t)}},fu=class{constructor(e,n){this.equation=n;this.hasEllipsis=!1,this.symbolToInfo=new Map,this.lhs=new Array,this.outputDims=[];let[t,o]=n.includes("->")?n.split("->",2):[n,""];if(!t.match(RegExp(g1)))throw new Error("Invalid LHS term");if(t.split(",").forEach((a,u)=>{let l=e[u].dims.slice();if(!a.match(RegExp(Kb)))throw new Error("Invalid LHS term");let f=this.processTerm(a,!0,l,u);this.lhs.push(f)}),o==="")o+=[...this.symbolToInfo.entries()].filter(([a,u])=>u.count===1||a==="...").map(([a])=>a).join("");else if(!o.match(RegExp(Hi)))throw new Error("Invalid RHS");o.match(RegExp(lu,"g"))?.forEach(a=>{if(a==="...")this.outputDims=this.outputDims.concat(this.ellipsisDims);else{let u=this.symbolToInfo.get(a);if(u===void 0)throw new Error("Invalid RHS symbol");this.outputDims.push(u.dimValue)}}),this.rhs=this.processTerm(o,!1,this.outputDims)}addSymbol(e,n,t){let o=this.symbolToInfo.get(e);if(o!==void 0){if(o.dimValue!==n&&o.count!==1)throw new Error("Dimension mismatch");o.count++,o.inputIndices.push(t)}else o={count:1,dimValue:n,inputIndices:[t]};this.symbolToInfo.set(e,o)}processTerm(e,n,t,o=-1){let i=t.length,s=!1,a=[],u=0;if(!e.match(RegExp(Kb))&&!n&&e!=="")throw new Error("Invalid LHS term");let l=e.match(RegExp(lu,"g")),f=new cu(o);return l?.forEach((c,p)=>{if(c==="..."){if(s)throw new Error("Only one ellipsis is allowed per input term");s=!0;let b=i-l.length+1;if(b<0)throw new Error("Ellipsis out of bounds");if(a=t.slice(u,u+b),this.hasEllipsis){if(this.ellipsisDims.length!==a.length||this.ellipsisDims.toString()!==a.toString())throw new Error("Ellipsis dimensions mismatch")}else if(n)this.hasEllipsis=!0,this.ellipsisDims=a;else throw new Error("Ellipsis must be specified in the LHS");for(let h=0;hr+"_max",b1=(r,e,n,t)=>{let i=r.map(f=>f.length).map((f,c)=>D(`input${c}`,e,f)),s=B.size(t),a=G("output",e,t.length),u=[...n.symbolToInfo.keys()].filter(f=>!n.rhs.symbolToIndices.has(f)),l=f=>{let c=[],p="var prod = 1.0;",b="var sum = 0.0;",h="sum += prod;",g=[],T=[],w=[],v=[],S=n.symbolToInfo.size===n.rhs.symbolToIndices.size;n.symbolToInfo.forEach((P,E)=>{if(n.rhs.symbolToIndices.has(E)){let N=n.rhs.symbolToIndices.get(E)?.[0];N!==void 0&&n.lhs.forEach((z,q)=>{if(P.inputIndices.includes(q)){let j=z.symbolToIndices.get(E);if(j===void 0)throw new Error("Invalid symbol error");j.forEach(F=>{c.push(`${i[q].indicesSet(`input${q}Indices`,F,a.indicesGet("outputIndices",N))}`)})}})}else n.lhs.forEach((N,z)=>{if(P.inputIndices.includes(z)){let q=N.symbolToIndices.get(E);if(q===void 0)throw new Error("Invalid symbol error");q.forEach(j=>{g.push(`${i[z].indicesSet(`input${z}Indices`,j,`${E}`)}`)}),v.push(`prod *= ${i[z].getByIndices(`input${z}Indices`)};`)}}),T.push(`for(var ${E}: u32 = 0; ${E} < uniforms.${Xb(E)}; ${E}++) {`),w.push("}")});let $=S?[...c,`let sum = ${i.map((P,E)=>P.getByIndices(`input${E}Indices`)).join(" * ")};`]:[...c,b,...T,...g,p,...v,h,...w];return` ${f.registerUniforms(u.map(P=>({name:`${Xb(P)}`,type:"u32"}))).registerUniform("outputSize","u32").declareVariables(...i,a)} ${f.mainStart()} @@ -3212,7 +3212,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${$.join(` `)}; ${a.setByOffset("global_idx","sum")}; - }`};return{name:"Einsum",shaderCache:{hint:n.equation,inputDependencies:r.map(()=>"rank")},getRunData:()=>{let f=u.filter(p=>n.symbolToInfo.has(p)).map(p=>({type:12,data:n.symbolToInfo.get(p)?.dimValue||0}));f.push({type:12,data:s});let c=r.map((p,b)=>[...W(p)]).reduce((p,b)=>p.concat(b),f);return c.push(...W(t)),{outputs:[{dims:t,dataType:e}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:c}},getShaderSource:l}},Zb=(r,e)=>{let n=new cu(r.inputs,e.equation),t=n.outputDims,o=r.inputs.map((i,s)=>i.dims);r.compute(h1(o,r.inputs[0].dataType,n,t))},Yb=r=>{let e=r.equation.replace(/\s+/g,"");return de({equation:e})}});var g1,Qb,b1,y1,ey,ty=C(()=>{"use strict";ue();ye();he();g1=r=>{if(!r||r.length!==2)throw new Error("Expand requires 2 input.");let e=r[0].dims,n=Array.from(r[1].getBigInt64Array(),Number),t=n.length{let n=r.length-e.length,t=[];for(let o=0;or.length>e.length?Qb(r,e):Qb(e,r),y1=r=>{let e=r[0].dims,n=Array.from(r[1].getBigInt64Array(),Number),t=b1(e,n),o=r[0].dataType,i=o===9?4:1,s=Math.ceil(B.size(t)/i),a=l=>{let f=D("input",o,e.length,i),c=G("output",o,t.length,i),p;if(o===9){let b=(h,g,T="")=>` + }`};return{name:"Einsum",shaderCache:{hint:n.equation,inputDependencies:r.map(()=>"rank")},getRunData:()=>{let f=u.filter(p=>n.symbolToInfo.has(p)).map(p=>({type:12,data:n.symbolToInfo.get(p)?.dimValue||0}));f.push({type:12,data:s});let c=r.map((p,b)=>[...W(p)]).reduce((p,b)=>p.concat(b),f);return c.push(...W(t)),{outputs:[{dims:t,dataType:e}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:c}},getShaderSource:l}},Zb=(r,e)=>{let n=new fu(r.inputs,e.equation),t=n.outputDims,o=r.inputs.map((i,s)=>i.dims);r.compute(b1(o,r.inputs[0].dataType,n,t))},Yb=r=>{let e=r.equation.replace(/\s+/g,"");return de({equation:e})}});var y1,Qb,x1,v1,ey,ty=C(()=>{"use strict";ue();ye();he();y1=r=>{if(!r||r.length!==2)throw new Error("Expand requires 2 input.");let e=r[0].dims,n=Array.from(r[1].getBigInt64Array(),Number),t=n.length{let n=r.length-e.length,t=[];for(let o=0;or.length>e.length?Qb(r,e):Qb(e,r),v1=r=>{let e=r[0].dims,n=Array.from(r[1].getBigInt64Array(),Number),t=x1(e,n),o=r[0].dataType,i=o===9?4:1,s=Math.ceil(B.size(t)/i),a=l=>{let f=D("input",o,e.length,i),c=G("output",o,t.length,i),p;if(o===9){let b=(h,g,T="")=>` let outputIndices${g} = ${c.offsetToIndices(`outputOffset + ${g}u`)}; let offset${g} = ${f.broadcastedIndicesToOffset(`outputIndices${g}`,c)}; let index${g} = offset${g} / 4u; @@ -3234,13 +3234,13 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${l.registerUniform("vec_size","u32").declareVariables(f,c)} ${l.mainStart()} ${l.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} - ${p}`},u=[{type:12,data:s},...W(e,t)];return{name:"Expand",shaderCache:{hint:`${t.length}`,inputDependencies:["rank"]},getShaderSource:a,getRunData:()=>({outputs:[{dims:t,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:u})}},ey=r=>{g1(r.inputs),r.compute(y1(r.inputs),{inputs:[0]})}});var x1,ry,ny=C(()=>{"use strict";ue();ye();he();Ni();x1=r=>{let e=r[0].dataType,n=B.size(r[0].dims),t=B.size(r[1].dims),o=t%4===0,i=s=>{let a=D("x",e,[1],4),u=D("bias",e,[1],4),l=G("y",e,[1],4),f=[{name:"output_vec_size",type:"u32"},{name:"bias_size",type:"u32"}],c=b=>` + ${p}`},u=[{type:12,data:s},...W(e,t)];return{name:"Expand",shaderCache:{hint:`${t.length}`,inputDependencies:["rank"]},getShaderSource:a,getRunData:()=>({outputs:[{dims:t,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:u})}},ey=r=>{y1(r.inputs),r.compute(v1(r.inputs),{inputs:[0]})}});var w1,ry,ny=C(()=>{"use strict";ue();ye();he();Ni();w1=r=>{let e=r[0].dataType,n=B.size(r[0].dims),t=B.size(r[1].dims),o=t%4===0,i=s=>{let a=D("x",e,[1],4),u=D("bias",e,[1],4),l=G("y",e,[1],4),f=[{name:"output_vec_size",type:"u32"},{name:"bias_size",type:"u32"}],c=b=>` let bias${b}_offset: u32 = (global_idx * 4 + ${b}) % uniforms.bias_size; let bias${b} = ${u.getByOffset(`bias${b}_offset / 4`)}[bias${b}_offset % 4];`,p=o?` let bias = ${u.getByOffset("global_idx % (uniforms.bias_size / 4)")};`:`${c(0)}${c(1)}${c(2)}${c(3)} let bias = ${a.type.value}(bias0, bias1, bias2, bias3);`;return`${s.registerUniforms(f).declareVariables(a,u,l)} - ${Zs(ct(e))} + ${Ys(ct(e))} ${s.mainStart(Zr)} ${s.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_vec_size")} @@ -3248,8 +3248,8 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let x = ${a.getByOffset("global_idx")}; ${p} let x_in = x + bias; - ${l.setByOffset("global_idx",Ys("x_in"))} - }`};return{name:"FastGeluWithBias",shaderCache:{hint:`${o}`,inputDependencies:["type","type"]},getShaderSource:i,getRunData:s=>({outputs:[{dims:s[0].dims,dataType:s[0].dataType}],programUniforms:[{type:12,data:Math.ceil(n/4)},{type:12,data:t}],dispatchGroup:{x:Math.ceil(n/Zr/4)}})}},ry=r=>{r.inputs.length<2||B.size(r.inputs[1].dims)===0?nb(r):r.compute(x1(r.inputs))}});var v1,w1,oy,iy,ay=C(()=>{"use strict";ue();ye();et();he();v1=r=>{if(!r||r.length!==2)throw new Error("Gather requires 2 inputs.")},w1=(r,e)=>{let n=r[0].dims,t=r[1].dims,o=n.length,i=B.normalizeAxis(e.axis,o),s=n.slice(0);s.splice(i,1,...t);let a=n[i],u=r[0].dataType===9?4:1,l=Math.ceil(B.size(s)/u),f=[{type:12,data:l},{type:6,data:a},{type:12,data:i},...W(r[0].dims,r[1].dims,s)],c=p=>{let b=D("data",r[0].dataType,r[0].dims.length,u),h=D("inputIndices",r[1].dataType,r[1].dims.length),g=G("output",r[0].dataType,s.length,u),T=v=>{let S=t.length,$=`var indicesIndices${v} = ${h.type.indices}(0);`;for(let P=0;P1?`indicesIndices${v}[${P}]`:`indicesIndices${v}`} = ${s.length>1?`outputIndices${v}[uniforms.axis + ${P}]`:`outputIndices${v}`};`;$+=` + ${l.setByOffset("global_idx",Js("x_in"))} + }`};return{name:"FastGeluWithBias",shaderCache:{hint:`${o}`,inputDependencies:["type","type"]},getShaderSource:i,getRunData:s=>({outputs:[{dims:s[0].dims,dataType:s[0].dataType}],programUniforms:[{type:12,data:Math.ceil(n/4)},{type:12,data:t}],dispatchGroup:{x:Math.ceil(n/Zr/4)}})}},ry=r=>{r.inputs.length<2||B.size(r.inputs[1].dims)===0?nb(r):r.compute(w1(r.inputs))}});var T1,_1,oy,iy,ay=C(()=>{"use strict";ue();ye();et();he();T1=r=>{if(!r||r.length!==2)throw new Error("Gather requires 2 inputs.")},_1=(r,e)=>{let n=r[0].dims,t=r[1].dims,o=n.length,i=B.normalizeAxis(e.axis,o),s=n.slice(0);s.splice(i,1,...t);let a=n[i],u=r[0].dataType===9?4:1,l=Math.ceil(B.size(s)/u),f=[{type:12,data:l},{type:6,data:a},{type:12,data:i},...W(r[0].dims,r[1].dims,s)],c=p=>{let b=D("data",r[0].dataType,r[0].dims.length,u),h=D("inputIndices",r[1].dataType,r[1].dims.length),g=G("output",r[0].dataType,s.length,u),T=v=>{let S=t.length,$=`var indicesIndices${v} = ${h.type.indices}(0);`;for(let P=0;P1?`indicesIndices${v}[${P}]`:`indicesIndices${v}`} = ${s.length>1?`outputIndices${v}[uniforms.axis + ${P}]`:`outputIndices${v}`};`;$+=` var idx${v} = ${h.getByIndices(`indicesIndices${v}`)}; if (idx${v} < 0) { idx${v} = idx${v} + uniforms.axisDimLimit; @@ -3280,8 +3280,8 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${p.mainStart()} ${p.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} ${w} - }`};return{name:"Gather",shaderCache:{hint:e.cacheKey,inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:s,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(l/64)},programUniforms:f}),getShaderSource:c}},oy=r=>de({axis:r.axis}),iy=(r,e)=>{let n=r.inputs;v1(n),r.compute(w1(r.inputs,e))}});var T1,_1,sy,uy,ly=C(()=>{"use strict";ue();ye();et();he();T1=r=>{if(!r||r.length!==2)throw new Error("GatherElements requires 2 inputs.");if(r[0].dims.length<1)throw new Error("GatherElements requires that the data input be rank >= 1.");if(r[0].dims.length!==r[1].dims.length)throw new Error(`GatherElements requires that the data input and - indices input tensors be of same rank.`)},_1=(r,e)=>{let n=r[0].dims,t=r[0].dataType,o=n.length,i=r[1].dims,s=r[1].dataType,a=B.normalizeAxis(e.axis,o),u=n[a],l=i.slice(0),f=B.size(l),c=D("input",t,o),p=D("indicesInput",s,i.length),b=G("output",t,l.length),h=[{type:12,data:f},{type:6,data:u},{type:12,data:a}];return h.push(...W(n,i,l)),{name:"GatherElements",shaderCache:{inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:l,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:h}),getShaderSource:w=>` + }`};return{name:"Gather",shaderCache:{hint:e.cacheKey,inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:s,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(l/64)},programUniforms:f}),getShaderSource:c}},oy=r=>de({axis:r.axis}),iy=(r,e)=>{let n=r.inputs;T1(n),r.compute(_1(r.inputs,e))}});var I1,S1,sy,uy,ly=C(()=>{"use strict";ue();ye();et();he();I1=r=>{if(!r||r.length!==2)throw new Error("GatherElements requires 2 inputs.");if(r[0].dims.length<1)throw new Error("GatherElements requires that the data input be rank >= 1.");if(r[0].dims.length!==r[1].dims.length)throw new Error(`GatherElements requires that the data input and + indices input tensors be of same rank.`)},S1=(r,e)=>{let n=r[0].dims,t=r[0].dataType,o=n.length,i=r[1].dims,s=r[1].dataType,a=B.normalizeAxis(e.axis,o),u=n[a],l=i.slice(0),f=B.size(l),c=D("input",t,o),p=D("indicesInput",s,i.length),b=G("output",t,l.length),h=[{type:12,data:f},{type:6,data:u},{type:12,data:a}];return h.push(...W(n,i,l)),{name:"GatherElements",shaderCache:{inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:l,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:h}),getShaderSource:w=>` ${w.registerUniform("outputSize","u32").registerUniform("axisDimLimit","i32").registerUniform("axis","u32").declareVariables(c,p,b)} ${w.mainStart()} ${w.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} @@ -3297,7 +3297,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let value = ${c.getByIndices("inputIndices")}; ${b.setByOffset("global_idx","value")}; - }`}},sy=r=>de({axis:r.axis}),uy=(r,e)=>{let n=r.inputs;T1(n),r.compute(_1(r.inputs,e))}});var I1,S1,cy,fy,dy=C(()=>{"use strict";ue();ye();he();I1=r=>{if(!r)throw new Error("Input is missing");if(r.length<2||r.length>3)throw new Error("Invaid input number.");if(r.length===3&&r[2].dims.length>2)throw new Error("Invalid input shape of C");if(r[0].dataType!==r[1].dataType||r.length===3&&r[0].dataType!==r[2].dataType)throw new Error("Input types are mismatched")},S1=(r,e)=>{let n=r[0].dims.slice(),t=r[1].dims.slice(),[o,i,s]=Oi.getShapeOfGemmResult(n,e.transA,t,e.transB,r.length===3?r[2].dims:void 0),a=[o,i];if(!a)throw new Error("Can't use gemm on the given tensors");let u=B.size(a),l=[{type:12,data:u},{type:12,data:o},{type:12,data:i},{type:12,data:s},{type:1,data:e.alpha},{type:1,data:e.beta}],f=["type","type"];r.length===3&&(l.push(...W(r[2].dims)),f.push("rank")),l.push(...W(a));let c=p=>{let b="";e.transA&&e.transB?b="value += a[k * uniforms.M + m] * b[n * uniforms.K + k];":e.transA&&!e.transB?b="value += a[k * uniforms.M + m] * b[k * uniforms.N + n];":!e.transA&&e.transB?b="value += a[m * uniforms.K + k] * b[n * uniforms.K + k];":!e.transA&&!e.transB&&(b="value += a[m * uniforms.K + k] * b[k * uniforms.N + n];");let h=e.alpha===1?"":"value *= uniforms.alpha;",g=D("a",r[0].dataType,r[0].dims),T=D("b",r[1].dataType,r[1].dims),w=g.type.value,v=null,S=[g,T];r.length===3&&(v=D("c",r[2].dataType,r[2].dims.length),S.push(v));let $=G("output",r[0].dataType,a.length);S.push($);let P=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"},{name:"alpha",type:"f32"},{name:"beta",type:"f32"}];return` + }`}},sy=r=>de({axis:r.axis}),uy=(r,e)=>{let n=r.inputs;I1(n),r.compute(S1(r.inputs,e))}});var $1,A1,cy,fy,dy=C(()=>{"use strict";ue();ye();he();$1=r=>{if(!r)throw new Error("Input is missing");if(r.length<2||r.length>3)throw new Error("Invaid input number.");if(r.length===3&&r[2].dims.length>2)throw new Error("Invalid input shape of C");if(r[0].dataType!==r[1].dataType||r.length===3&&r[0].dataType!==r[2].dataType)throw new Error("Input types are mismatched")},A1=(r,e)=>{let n=r[0].dims.slice(),t=r[1].dims.slice(),[o,i,s]=Oi.getShapeOfGemmResult(n,e.transA,t,e.transB,r.length===3?r[2].dims:void 0),a=[o,i];if(!a)throw new Error("Can't use gemm on the given tensors");let u=B.size(a),l=[{type:12,data:u},{type:12,data:o},{type:12,data:i},{type:12,data:s},{type:1,data:e.alpha},{type:1,data:e.beta}],f=["type","type"];r.length===3&&(l.push(...W(r[2].dims)),f.push("rank")),l.push(...W(a));let c=p=>{let b="";e.transA&&e.transB?b="value += a[k * uniforms.M + m] * b[n * uniforms.K + k];":e.transA&&!e.transB?b="value += a[k * uniforms.M + m] * b[k * uniforms.N + n];":!e.transA&&e.transB?b="value += a[m * uniforms.K + k] * b[n * uniforms.K + k];":!e.transA&&!e.transB&&(b="value += a[m * uniforms.K + k] * b[k * uniforms.N + n];");let h=e.alpha===1?"":"value *= uniforms.alpha;",g=D("a",r[0].dataType,r[0].dims),T=D("b",r[1].dataType,r[1].dims),w=g.type.value,v=null,S=[g,T];r.length===3&&(v=D("c",r[2].dataType,r[2].dims.length),S.push(v));let $=G("output",r[0].dataType,a.length);S.push($);let P=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"},{name:"alpha",type:"f32"},{name:"beta",type:"f32"}];return` ${p.registerUniforms(P).declareVariables(...S)} ${p.mainStart()} @@ -3314,14 +3314,14 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${h} ${(()=>v!=null?`let cOffset = ${v.broadcastedIndicesToOffset("vec2(m, n)",$)}; value += ${w}(uniforms.beta) * ${v.getByOffset("cOffset")};`:"")()} output[global_idx] = value; - }`};return{name:"Gemm",shaderCache:{hint:`${e.cacheKey}`,inputDependencies:f},getRunData:()=>({outputs:[{dims:a,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l}),getShaderSource:c}},cy=r=>{let e=r.transA,n=r.transB,t=r.alpha,o=r.beta;return{transA:e,transB:n,alpha:t,beta:o,cacheKey:`${r.transA};${r.transB};${r.alpha===1}`}},fy=(r,e)=>{I1(r.inputs),r.compute(S1(r.inputs,e))}});var _t,P1,my,py,O1,eo,hy,fu=C(()=>{"use strict";ue();ye();et();Pi();Li();he();Jr();_t=(r,e)=>r.length>e&&r[e].dims.length>0&&B.size(r[e].dims)>0?r[e]:void 0,P1=(r,e)=>{let n=r[0],t=_t(r,1),o=_t(r,2),i=_t(r,3),s=_t(r,4),a=_t(r,5),u=_t(r,6),l=_t(r,7);if(n.dims.length!==3&&n.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let f=!1,c=n.dims[0],p=n.dims[1],b=n.dims.length===3?f?n.dims[2]/3:n.dims[2]:e.numHeads*n.dims[4],h=p,g=0,T=0,w=Math.floor(b/e.numHeads);if(u&&l){if(u.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(u.dims[0]!==c||u.dims[1]!==e.numHeads||u.dims[3]!==w)throw new Error('Input "past_key" shape (batch_size, num_heads, past_sequence_length, head_size)');if(l.dims[0]!==c||l.dims[1]!==e.numHeads||l.dims[3]!==w)throw new Error('Input "past_value" shape (batch_size, num_heads, past_sequence_length, head_size)');if(u.dims[2]!==l.dims[2])throw new Error('Input "past_key" and "past_value" shall have same dim 2 (past_sequence_length)');if(l.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');g=u.dims[2],T=u.dims[2]}else if(u||l)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let v;if(t){if(n.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(t.dims.length<3||t.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(n.dims[0]!==t.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(t.dims.length===3){if(t.dims[2]!==n.dims[2])throw new Error('Input "query" and "key" shall have same dim 2 (hidden_size)');v=2,h=t.dims[1]}else if(t.dims.length===5){if(t.dims[2]!==e.numHeads||t.dims[3]!==2||t.dims[4]!==w)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');v=5,h=t.dims[1]}else{if(t.dims[1]!==e.numHeads||t.dims[3]!==w)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');v=0,h=t.dims[2]}}else{if(n.dims.length!==3&&n.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(n.dims.length===5&&(n.dims[2]!==e.numHeads||n.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');v=3}if(i){if(i.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimension');if(o&&n.dims.length===5&&n.dims[3]===2)throw new Error("bias is not allowed for packed kv.")}let S=0;if(s){S=8;let z=s.dims;throw z.length===1?z[0]===c?S=1:z[0]===3*c+2&&(S=3):z.length===2&&z[0]===c&&z[1]===h&&(S=5),S===8?new Error('Input "key_padding_mask" shape shall be (batch_size) or (batch_size, kv_sequence_length)'):new Error("Mask not supported")}let $=!1,P=b;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(n.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(h!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');P=o.dims[2]}else{if(h!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');P=o.dims[1]*o.dims[3],$=!0}}let E=g+h,N=!1;if(s)throw new Error("Key padding mask is not supported");if(a){if(a.dims.length!==4)throw new Error('Input "relative_position_bias" is expected to have 4 dimensions');if(a.dims[0]!==c&&a.dims[0]!==1||a.dims[1]!==e.numHeads||a.dims[2]!==p||a.dims[3]!==E)throw new Error('Input "relative_position_bias" shape (batch_size, 1, sequence_length, kv_sequence_length)')}return{batchSize:c,sequenceLength:p,pastSequenceLength:g,kvSequenceLength:h,totalSequenceLength:E,maxSequenceLength:T,inputHiddenSize:0,hiddenSize:b,vHiddenSize:P,headSize:w,vHeadSize:Math.floor(P/e.numHeads),numHeads:e.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:e.maskFilterValue,maskType:S,scale:e.scale,broadcastResPosBias:N,passPastInKv:$,qkvFormat:v}},my=r=>de({...r}),py=de({perm:[0,2,1,3]}),O1=(r,e,n,t,o,i,s)=>{let a=[t,o,i],u=B.size(a),l=[{type:12,data:u},{type:12,data:s},{type:12,data:i}],f=c=>{let p=G("qkv_with_bias",e.dataType,a),b=D("qkv",e.dataType,a),h=D("bias",n.dataType,a),g=[{name:"output_size",type:"u32"},{name:"bias_offset",type:"u32"},{name:"hidden_size",type:"u32"}];return` + }`};return{name:"Gemm",shaderCache:{hint:`${e.cacheKey}`,inputDependencies:f},getRunData:()=>({outputs:[{dims:a,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l}),getShaderSource:c}},cy=r=>{let e=r.transA,n=r.transB,t=r.alpha,o=r.beta;return{transA:e,transB:n,alpha:t,beta:o,cacheKey:`${r.transA};${r.transB};${r.alpha===1}`}},fy=(r,e)=>{$1(r.inputs),r.compute(A1(r.inputs,e))}});var _t,E1,my,py,C1,eo,hy,du=C(()=>{"use strict";ue();ye();et();Pi();Li();he();Jr();_t=(r,e)=>r.length>e&&r[e].dims.length>0&&B.size(r[e].dims)>0?r[e]:void 0,E1=(r,e)=>{let n=r[0],t=_t(r,1),o=_t(r,2),i=_t(r,3),s=_t(r,4),a=_t(r,5),u=_t(r,6),l=_t(r,7);if(n.dims.length!==3&&n.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let f=!1,c=n.dims[0],p=n.dims[1],b=n.dims.length===3?f?n.dims[2]/3:n.dims[2]:e.numHeads*n.dims[4],h=p,g=0,T=0,w=Math.floor(b/e.numHeads);if(u&&l){if(u.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(u.dims[0]!==c||u.dims[1]!==e.numHeads||u.dims[3]!==w)throw new Error('Input "past_key" shape (batch_size, num_heads, past_sequence_length, head_size)');if(l.dims[0]!==c||l.dims[1]!==e.numHeads||l.dims[3]!==w)throw new Error('Input "past_value" shape (batch_size, num_heads, past_sequence_length, head_size)');if(u.dims[2]!==l.dims[2])throw new Error('Input "past_key" and "past_value" shall have same dim 2 (past_sequence_length)');if(l.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');g=u.dims[2],T=u.dims[2]}else if(u||l)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let v;if(t){if(n.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(t.dims.length<3||t.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(n.dims[0]!==t.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(t.dims.length===3){if(t.dims[2]!==n.dims[2])throw new Error('Input "query" and "key" shall have same dim 2 (hidden_size)');v=2,h=t.dims[1]}else if(t.dims.length===5){if(t.dims[2]!==e.numHeads||t.dims[3]!==2||t.dims[4]!==w)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');v=5,h=t.dims[1]}else{if(t.dims[1]!==e.numHeads||t.dims[3]!==w)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');v=0,h=t.dims[2]}}else{if(n.dims.length!==3&&n.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(n.dims.length===5&&(n.dims[2]!==e.numHeads||n.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');v=3}if(i){if(i.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimension');if(o&&n.dims.length===5&&n.dims[3]===2)throw new Error("bias is not allowed for packed kv.")}let S=0;if(s){S=8;let z=s.dims;throw z.length===1?z[0]===c?S=1:z[0]===3*c+2&&(S=3):z.length===2&&z[0]===c&&z[1]===h&&(S=5),S===8?new Error('Input "key_padding_mask" shape shall be (batch_size) or (batch_size, kv_sequence_length)'):new Error("Mask not supported")}let $=!1,P=b;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(n.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(h!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');P=o.dims[2]}else{if(h!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');P=o.dims[1]*o.dims[3],$=!0}}let E=g+h,N=!1;if(s)throw new Error("Key padding mask is not supported");if(a){if(a.dims.length!==4)throw new Error('Input "relative_position_bias" is expected to have 4 dimensions');if(a.dims[0]!==c&&a.dims[0]!==1||a.dims[1]!==e.numHeads||a.dims[2]!==p||a.dims[3]!==E)throw new Error('Input "relative_position_bias" shape (batch_size, 1, sequence_length, kv_sequence_length)')}return{batchSize:c,sequenceLength:p,pastSequenceLength:g,kvSequenceLength:h,totalSequenceLength:E,maxSequenceLength:T,inputHiddenSize:0,hiddenSize:b,vHiddenSize:P,headSize:w,vHeadSize:Math.floor(P/e.numHeads),numHeads:e.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:e.maskFilterValue,maskType:S,scale:e.scale,broadcastResPosBias:N,passPastInKv:$,qkvFormat:v}},my=r=>de({...r}),py=de({perm:[0,2,1,3]}),C1=(r,e,n,t,o,i,s)=>{let a=[t,o,i],u=B.size(a),l=[{type:12,data:u},{type:12,data:s},{type:12,data:i}],f=c=>{let p=G("qkv_with_bias",e.dataType,a),b=D("qkv",e.dataType,a),h=D("bias",n.dataType,a),g=[{name:"output_size",type:"u32"},{name:"bias_offset",type:"u32"},{name:"hidden_size",type:"u32"}];return` ${c.registerUniforms(g).declareVariables(b,h,p)} ${c.mainStart()} ${c.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset; qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx]; - }`};return r.compute({name:"MultiHeadAttentionAddBias",shaderCache:{inputDependencies:["type","type"]},getRunData:()=>({outputs:[{dims:a,dataType:e.dataType,gpuDataType:0}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l}),getShaderSource:f},{inputs:[e,n],outputs:[-1]})[0]},eo=(r,e,n,t,o,i,s,a)=>{let u=i;if(s){if(t===1)throw new Error("AddBiasReshape is not implemented. Please export your model with packed QKV or KV");return u=O1(r,i,s,e,t,n*o,a),u=u.reshape([e,t,n,o]),r.compute(Tt(u,py.perm),{inputs:[u],outputs:[-1]})[0]}else return i.dims.length===3&&(u=i.reshape([e,t,n,o])),r.compute(Tt(u,py.perm),{inputs:[u],outputs:[-1]})[0]},hy=(r,e)=>{let n=P1(r.inputs,e),t=r.inputs[0],o=_t(r.inputs,1),i=_t(r.inputs,2),s=_t(r.inputs,3),a=_t(r.inputs,4),u=_t(r.inputs,5),l=_t(r.inputs,6),f=_t(r.inputs,7);if(t.dims.length===5)throw new Error("Packed QKV is not implemented");if(o?.dims.length===5)throw new Error("Packed KV is not implemented");let c=o&&i&&o.dims.length===4&&i.dims.length===4,p=eo(r,n.batchSize,n.numHeads,n.sequenceLength,n.headSize,t,s,0);if(c)return In(r,p,o,i,a,void 0,l,f,u,n,e);if(!o||!i)throw new Error("key and value must be provided");let b=eo(r,n.batchSize,n.numHeads,n.kvSequenceLength,n.headSize,o,s,n.hiddenSize),h=eo(r,n.batchSize,n.numHeads,n.kvSequenceLength,n.vHeadSize,i,s,2*n.hiddenSize);In(r,p,b,h,a,void 0,l,f,u,n,e)}});var gy,E1,C1,du,by,pu=C(()=>{"use strict";ue();ye();he();gy=r=>Array.from(r.getBigInt64Array(),Number),E1=r=>{if(!r||r.length!==2)throw new Error("Tile requires 2 inputs.");if(r[0].dataType!==1&&r[0].dataType!==10&&r[0].dataType!==6&&r[0].dataType!==12)throw new Error("Tile only support float, float16, int32, and uint32 data types");if(r[1].dataType!==7)throw new Error("Tile `repeats` input should be of int64 data type");if(r[1].dims.length!==1)throw new Error("Tile `repeats` input should be 1-D");if(gy(r[1]).length!==r[0].dims.length)throw new Error("Tile `repeats` input should have same number of elements as rank of input data tensor")},C1=(r,e)=>{let n=[];for(let t=0;t{let n=r[0].dims,t=e??gy(r[1]),o=C1(n,t),i=B.size(o),s=r[0].dataType,a=D("input",s,n.length),u=G("output",s,o.length),l=f=>` + }`};return r.compute({name:"MultiHeadAttentionAddBias",shaderCache:{inputDependencies:["type","type"]},getRunData:()=>({outputs:[{dims:a,dataType:e.dataType,gpuDataType:0}],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:l}),getShaderSource:f},{inputs:[e,n],outputs:[-1]})[0]},eo=(r,e,n,t,o,i,s,a)=>{let u=i;if(s){if(t===1)throw new Error("AddBiasReshape is not implemented. Please export your model with packed QKV or KV");return u=C1(r,i,s,e,t,n*o,a),u=u.reshape([e,t,n,o]),r.compute(Tt(u,py.perm),{inputs:[u],outputs:[-1]})[0]}else return i.dims.length===3&&(u=i.reshape([e,t,n,o])),r.compute(Tt(u,py.perm),{inputs:[u],outputs:[-1]})[0]},hy=(r,e)=>{let n=E1(r.inputs,e),t=r.inputs[0],o=_t(r.inputs,1),i=_t(r.inputs,2),s=_t(r.inputs,3),a=_t(r.inputs,4),u=_t(r.inputs,5),l=_t(r.inputs,6),f=_t(r.inputs,7);if(t.dims.length===5)throw new Error("Packed QKV is not implemented");if(o?.dims.length===5)throw new Error("Packed KV is not implemented");let c=o&&i&&o.dims.length===4&&i.dims.length===4,p=eo(r,n.batchSize,n.numHeads,n.sequenceLength,n.headSize,t,s,0);if(c)return Sn(r,p,o,i,a,void 0,l,f,u,n,e);if(!o||!i)throw new Error("key and value must be provided");let b=eo(r,n.batchSize,n.numHeads,n.kvSequenceLength,n.headSize,o,s,n.hiddenSize),h=eo(r,n.batchSize,n.numHeads,n.kvSequenceLength,n.vHeadSize,i,s,2*n.hiddenSize);Sn(r,p,b,h,a,void 0,l,f,u,n,e)}});var gy,k1,D1,pu,by,mu=C(()=>{"use strict";ue();ye();he();gy=r=>Array.from(r.getBigInt64Array(),Number),k1=r=>{if(!r||r.length!==2)throw new Error("Tile requires 2 inputs.");if(r[0].dataType!==1&&r[0].dataType!==10&&r[0].dataType!==6&&r[0].dataType!==12)throw new Error("Tile only support float, float16, int32, and uint32 data types");if(r[1].dataType!==7)throw new Error("Tile `repeats` input should be of int64 data type");if(r[1].dims.length!==1)throw new Error("Tile `repeats` input should be 1-D");if(gy(r[1]).length!==r[0].dims.length)throw new Error("Tile `repeats` input should have same number of elements as rank of input data tensor")},D1=(r,e)=>{let n=[];for(let t=0;t{let n=r[0].dims,t=e??gy(r[1]),o=D1(n,t),i=B.size(o),s=r[0].dataType,a=D("input",s,n.length),u=G("output",s,o.length),l=f=>` const inputShape = ${a.indices(...n)}; ${f.registerUniform("output_size","u32").declareVariables(a,u)} ${f.mainStart()} @@ -3335,7 +3335,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${a.indicesSet("input_indices","i","input_dim_value")} } ${u.setByOffset("global_idx",a.getByIndices("input_indices"))} - }`;return{name:"Tile",shaderCache:{hint:`${t}`,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:o,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)},programUniforms:[{type:12,data:i},...W(r[0].dims,o)]}),getShaderSource:l}},by=r=>{E1(r.inputs),r.compute(du(r.inputs),{inputs:[0]})}});var k1,yy,vy,D1,xy,wy,Ty=C(()=>{"use strict";ue();ye();et();Li();he();fu();pu();Jr();k1=(r,e)=>{let n=r[0],t=r[1],o=r[2],i=r[3],s=r[4];if(n.dims.length!==3&&n.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let a=!1,u=n.dims[0],l=n.dims[1],f=n.dims.length===3?a?n.dims[2]/3:n.dims[2]:e.numHeads*n.dims[4],c=l,p=0,b=0,h=Math.floor(f/e.numHeads),g=i&&i.dims.length!==0,T=s&&s.dims.length!==0,w=!0;if(g&&T){if(i.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(s.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');w?(p=i.dims[1],b=i.dims[1]):(p=i.dims[2],b=i.dims[2])}else if(g||T)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let v;if(t){if(n.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(t.dims.length<3||t.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(n.dims[0]!==t.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(t.dims.length===3){if(n.dims[2]%t.dims[2]!==0)throw new Error('Dimension 2 of "query" should be a multiple of "key"');v=2,c=t.dims[1]}else if(t.dims.length===5){if(t.dims[2]!==e.numHeads||t.dims[3]!==2||t.dims[4]!==h)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');v=5,c=t.dims[1]}else{if(t.dims[1]!==e.numHeads||t.dims[3]!==h)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');v=0,c=t.dims[2]}}else{if(n.dims.length!==3&&n.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(n.dims.length===5&&(n.dims[2]!==e.numHeads||n.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');v=3}let S=0,$=!1,P=f;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(n.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(c!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');P=o.dims[2]}else{if(c!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');P=o.dims[1]*o.dims[3],$=!0}}let E=p+c,N=!1;return{batchSize:u,sequenceLength:l,pastSequenceLength:p,kvSequenceLength:c,totalSequenceLength:E,maxSequenceLength:b,inputHiddenSize:0,hiddenSize:f,vHiddenSize:P,headSize:h,vHeadSize:Math.floor(P/e.kvNumHeads),numHeads:e.numHeads,kvNumHeads:e.kvNumHeads,nReps:e.numHeads/e.kvNumHeads,pastPresentShareBuffer:!1,maskType:S,scale:e.scale,broadcastResPosBias:N,passPastInKv:$,qkvFormat:v,isPastkvBSNH:w}},yy=(r,e,n,t)=>{let o=[t.batchSize,t.totalSequenceLength,t.kvNumHeads,t.headSize],i=4,s=B.size(o)/i,a=t.totalSequenceLength,u=G("present_kv",n,o.length,i),l=D("new_kv",r.dataType,r.dims.length,i),f=e?D("past_kv",e.dataType,e.dims.length,i):void 0,c=Math.ceil(t.headSize/i),p={x:a,y:r.dims[0],z:1},b=e?["rank","rank"]:["rank"],h=[{type:12,data:s},{type:12,data:t.pastSequenceLength},{type:12,data:t.kvSequenceLength},{type:12,data:t.totalSequenceLength}],g=[l];f?(h.push(...W(r.dims),...W(e.dims),...W(o)),g.push(f)):h.push(...W(r.dims),...W(o));let T=[{name:"output_size",type:"u32"},{name:"past_seqlen",type:"u32"},{name:"new_seqlen",type:"u32"},{name:"present_seqlen",type:"u32"}],w=` let past_batch_stride = uniforms.past_seqlen * num_heads * H; + }`;return{name:"Tile",shaderCache:{hint:`${t}`,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:o,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)},programUniforms:[{type:12,data:i},...W(r[0].dims,o)]}),getShaderSource:l}},by=r=>{k1(r.inputs),r.compute(pu(r.inputs),{inputs:[0]})}});var B1,yy,vy,L1,xy,wy,Ty=C(()=>{"use strict";ue();ye();et();Li();he();du();mu();Jr();B1=(r,e)=>{let n=r[0],t=r[1],o=r[2],i=r[3],s=r[4];if(n.dims.length!==3&&n.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let a=!1,u=n.dims[0],l=n.dims[1],f=n.dims.length===3?a?n.dims[2]/3:n.dims[2]:e.numHeads*n.dims[4],c=l,p=0,b=0,h=Math.floor(f/e.numHeads),g=i&&i.dims.length!==0,T=s&&s.dims.length!==0,w=!0;if(g&&T){if(i.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(s.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');w?(p=i.dims[1],b=i.dims[1]):(p=i.dims[2],b=i.dims[2])}else if(g||T)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let v;if(t){if(n.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(t.dims.length<3||t.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(n.dims[0]!==t.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(t.dims.length===3){if(n.dims[2]%t.dims[2]!==0)throw new Error('Dimension 2 of "query" should be a multiple of "key"');v=2,c=t.dims[1]}else if(t.dims.length===5){if(t.dims[2]!==e.numHeads||t.dims[3]!==2||t.dims[4]!==h)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');v=5,c=t.dims[1]}else{if(t.dims[1]!==e.numHeads||t.dims[3]!==h)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');v=0,c=t.dims[2]}}else{if(n.dims.length!==3&&n.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(n.dims.length===5&&(n.dims[2]!==e.numHeads||n.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');v=3}let S=0,$=!1,P=f;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(n.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(c!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');P=o.dims[2]}else{if(c!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');P=o.dims[1]*o.dims[3],$=!0}}let E=p+c,N=!1;return{batchSize:u,sequenceLength:l,pastSequenceLength:p,kvSequenceLength:c,totalSequenceLength:E,maxSequenceLength:b,inputHiddenSize:0,hiddenSize:f,vHiddenSize:P,headSize:h,vHeadSize:Math.floor(P/e.kvNumHeads),numHeads:e.numHeads,kvNumHeads:e.kvNumHeads,nReps:e.numHeads/e.kvNumHeads,pastPresentShareBuffer:!1,maskType:S,scale:e.scale,broadcastResPosBias:N,passPastInKv:$,qkvFormat:v,isPastkvBSNH:w}},yy=(r,e,n,t)=>{let o=[t.batchSize,t.totalSequenceLength,t.kvNumHeads,t.headSize],i=4,s=B.size(o)/i,a=t.totalSequenceLength,u=G("present_kv",n,o.length,i),l=D("new_kv",r.dataType,r.dims.length,i),f=e?D("past_kv",e.dataType,e.dims.length,i):void 0,c=Math.ceil(t.headSize/i),p={x:a,y:r.dims[0],z:1},b=e?["rank","rank"]:["rank"],h=[{type:12,data:s},{type:12,data:t.pastSequenceLength},{type:12,data:t.kvSequenceLength},{type:12,data:t.totalSequenceLength}],g=[l];f?(h.push(...W(r.dims),...W(e.dims),...W(o)),g.push(f)):h.push(...W(r.dims),...W(o));let T=[{name:"output_size",type:"u32"},{name:"past_seqlen",type:"u32"},{name:"new_seqlen",type:"u32"},{name:"present_seqlen",type:"u32"}],w=` let past_batch_stride = uniforms.past_seqlen * num_heads * H; var past_head_stride = uniforms.past_seqlen * H; if (is_bsnh) { past_head_stride = H; @@ -3381,7 +3381,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h; ${S} - }`;return{name:"ConcatPastNew",shaderCache:{hint:`${t.kvNumHeads}${c}${!!e}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:o,dataType:n}],dispatchGroup:p,programUniforms:h}),getShaderSource:$}},vy=r=>de({...r}),D1=de({perm:[0,2,1,3]}),xy=(r,e,n,t,o)=>{let i=e,s=t.kvNumHeads,a=t.nReps;return e.dims.length===3&&t.kvSequenceLength!==0&&(i=e.reshape([t.batchSize,t.kvSequenceLength,s,t.headSize])),n?i=r.compute(yy(i,n,i.dataType,t),{inputs:[i,n],outputs:[t.isPastkvBSNH?o:-1]})[0]:i=r.compute(yy(i,void 0,i.dataType,t),{inputs:[i],outputs:[t.isPastkvBSNH?o:-1]})[0],a!==1&&(i=r.compute(du([i],[1,1,1,a]),{inputs:[i],outputs:[-1]})[0],i=i.reshape([t.batchSize,t.totalSequenceLength,s*a,t.headSize])),r.compute(Tt(i,D1.perm),{inputs:[i],outputs:[-1]})[0]},wy=(r,e)=>{let n=k1(r.inputs,e);if(r.inputs[0].dims.length===5)throw new Error("Packed QKV is not implemented");if(r.inputs[1]?.dims.length===5)throw new Error("Packed KV is not implemented");let t=eo(r,n.batchSize,n.numHeads,n.sequenceLength,n.headSize,r.inputs[0],void 0,0),o=r.inputs[3]&&r.inputs[3].dims.length!==0?r.inputs[3]:void 0,i=r.inputs[4]&&r.inputs[4].dims.length!==0?r.inputs[4]:void 0,s=xy(r,r.inputs[1],o,n,1),a=xy(r,r.inputs[2],i,n,2);In(r,t,s,a,void 0,void 0,void 0,void 0,void 0,n,e)}});var B1,L1,R1,_y,Iy=C(()=>{"use strict";ue();ye();he();B1=(r,e)=>{let n=r[0].dims,t=n,o=2,i=B.sizeToDimension(n,o),s=B.sizeFromDimension(n,o),a=ze(s),u=s/a,l=[n[0],n[1],u],f=["rank","type","type"],c=[{type:12,data:s},{type:12,data:u}];c.push(...W(l,l));let p=b=>{let h=D("x",r[0].dataType,l.length,a),g=D("scale",r[1].dataType,r[1].dims),T=D("bias",r[2].dataType,r[2].dims),w=G("output",r[0].dataType,l.length,a),v=[h,g,T,w],S=h.type.value,$=a===1?"f32":`vec${a}`,P=64,E=[{name:"normSize",type:"u32"},{name:"normPackedSize",type:"u32"}];return` + }`;return{name:"ConcatPastNew",shaderCache:{hint:`${t.kvNumHeads}${c}${!!e}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:o,dataType:n}],dispatchGroup:p,programUniforms:h}),getShaderSource:$}},vy=r=>de({...r}),L1=de({perm:[0,2,1,3]}),xy=(r,e,n,t,o)=>{let i=e,s=t.kvNumHeads,a=t.nReps;return e.dims.length===3&&t.kvSequenceLength!==0&&(i=e.reshape([t.batchSize,t.kvSequenceLength,s,t.headSize])),n?i=r.compute(yy(i,n,i.dataType,t),{inputs:[i,n],outputs:[t.isPastkvBSNH?o:-1]})[0]:i=r.compute(yy(i,void 0,i.dataType,t),{inputs:[i],outputs:[t.isPastkvBSNH?o:-1]})[0],a!==1&&(i=r.compute(pu([i],[1,1,1,a]),{inputs:[i],outputs:[-1]})[0],i=i.reshape([t.batchSize,t.totalSequenceLength,s*a,t.headSize])),r.compute(Tt(i,L1.perm),{inputs:[i],outputs:[-1]})[0]},wy=(r,e)=>{let n=B1(r.inputs,e);if(r.inputs[0].dims.length===5)throw new Error("Packed QKV is not implemented");if(r.inputs[1]?.dims.length===5)throw new Error("Packed KV is not implemented");let t=eo(r,n.batchSize,n.numHeads,n.sequenceLength,n.headSize,r.inputs[0],void 0,0),o=r.inputs[3]&&r.inputs[3].dims.length!==0?r.inputs[3]:void 0,i=r.inputs[4]&&r.inputs[4].dims.length!==0?r.inputs[4]:void 0,s=xy(r,r.inputs[1],o,n,1),a=xy(r,r.inputs[2],i,n,2);Sn(r,t,s,a,void 0,void 0,void 0,void 0,void 0,n,e)}});var R1,N1,z1,_y,Iy=C(()=>{"use strict";ue();ye();he();R1=(r,e)=>{let n=r[0].dims,t=n,o=2,i=B.sizeToDimension(n,o),s=B.sizeFromDimension(n,o),a=ze(s),u=s/a,l=[n[0],n[1],u],f=["rank","type","type"],c=[{type:12,data:s},{type:12,data:u}];c.push(...W(l,l));let p=b=>{let h=D("x",r[0].dataType,l.length,a),g=D("scale",r[1].dataType,r[1].dims),T=D("bias",r[2].dataType,r[2].dims),w=G("output",r[0].dataType,l.length,a),v=[h,g,T,w],S=h.type.value,$=a===1?"f32":`vec${a}`,P=64,E=[{name:"normSize",type:"u32"},{name:"normPackedSize",type:"u32"}];return` var meanShared : f32; var squaredNormShared : f32; var workgroupShared : array<${$}, ${P}>; @@ -3441,7 +3441,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let value = ${h.get("batch","channel","h")} * ${S}(${$}(channelScale)) + ${S}(${$}(channelShift)); ${w.set("batch","channel","h","value")}; } - }`};return{name:"InstanceNormalization",shaderCache:{hint:`${e.epsilon};${a}`,inputDependencies:f},getRunData:()=>({outputs:[{dims:t,dataType:r[0].dataType}],dispatchGroup:{x:i},programUniforms:c}),getShaderSource:p}},L1=(r,e,n,t,o,i,s,a)=>{let u=ze(s),l=64,f=u===1?"vec2f":`mat2x${u}f`,c=u===1?"f32":`vec${u}f`,p=(E,N)=>`${f}(${E}, ${N})`,b=o*s/u,h=Math.ceil(i/l),g=["type"],T=[{type:12,data:h},{type:12,data:i},{type:12,data:Math.floor(s/u)},{type:12,data:Math.floor(i*s/u)}],w=E=>{let N=D("input",e.dataType,e.dims,u);return` + }`};return{name:"InstanceNormalization",shaderCache:{hint:`${e.epsilon};${a}`,inputDependencies:f},getRunData:()=>({outputs:[{dims:t,dataType:r[0].dataType}],dispatchGroup:{x:i},programUniforms:c}),getShaderSource:p}},N1=(r,e,n,t,o,i,s,a)=>{let u=ze(s),l=64,f=u===1?"vec2f":`mat2x${u}f`,c=u===1?"f32":`vec${u}f`,p=(E,N)=>`${f}(${E}, ${N})`,b=o*s/u,h=Math.ceil(i/l),g=["type"],T=[{type:12,data:h},{type:12,data:i},{type:12,data:Math.floor(s/u)},{type:12,data:Math.floor(i*s/u)}],w=E=>{let N=D("input",e.dataType,e.dims,u);return` ${E.declareVariables(N)} @group(0) @binding(1) var output : array<${f}>; struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32}; @@ -3493,7 +3493,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let channelShift = ${c}(bias[currentChannelNumber]) - sum * channelScale; output[global_idx] = ${p("channelScale","channelShift")}; - }`};return r.compute({name:"InstanceNormComputeChannelScaleShift",shaderCache:{hint:`${u};${a}`,inputDependencies:$},getRunData:()=>({outputs:[{dims:[o,s,2],dataType:1}],dispatchGroup:{x:Math.ceil(b/64)},programUniforms:S}),getShaderSource:P},{inputs:[v,n,t],outputs:[-1]})[0]},R1=(r,e,n)=>{let t=e[0].dims,o=t,i=t[0],s=t[t.length-1],a=B.sizeFromDimension(t,1)/s,u=ze(s),l=B.size(o)/u,f=[{type:12,data:a},{type:12,data:Math.floor(s/u)}],c=["type","type"],p=L1(r,e[0],e[1],e[2],i,a,s,n.epsilon),b=h=>{let g=Be(e[0].dataType),T=u===1?"vec2f":`mat2x${u}f`,w=u===1?g:`vec${u}<${g}>`,v=D("input",e[0].dataType,e[0].dims,u),S=G("output",e[0].dataType,o,u);return` + }`};return r.compute({name:"InstanceNormComputeChannelScaleShift",shaderCache:{hint:`${u};${a}`,inputDependencies:$},getRunData:()=>({outputs:[{dims:[o,s,2],dataType:1}],dispatchGroup:{x:Math.ceil(b/64)},programUniforms:S}),getShaderSource:P},{inputs:[v,n,t],outputs:[-1]})[0]},z1=(r,e,n)=>{let t=e[0].dims,o=t,i=t[0],s=t[t.length-1],a=B.sizeFromDimension(t,1)/s,u=ze(s),l=B.size(o)/u,f=[{type:12,data:a},{type:12,data:Math.floor(s/u)}],c=["type","type"],p=N1(r,e[0],e[1],e[2],i,a,s,n.epsilon),b=h=>{let g=Be(e[0].dataType),T=u===1?"vec2f":`mat2x${u}f`,w=u===1?g:`vec${u}<${g}>`,v=D("input",e[0].dataType,e[0].dims,u),S=G("output",e[0].dataType,o,u);return` @group(0) @binding(0) var input : array<${v.type.storage}>; @group(0) @binding(1) var scaleInput : array<${T}>; @group(0) @binding(2) var output : array<${S.type.storage}>; @@ -3507,7 +3507,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber; let scale = scaleInput[scaleOffset]; output[global_idx] = fma(input[global_idx], ${w}(scale[0]), ${w}(scale[1])); - }`};r.compute({name:"InstanceNormalizationNHWC",shaderCache:{hint:`${u}`,inputDependencies:c},getRunData:()=>({outputs:[{dims:o,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(l/64)},programUniforms:f}),getShaderSource:b},{inputs:[e[0],p]})},_y=(r,e)=>{e.format==="NHWC"?R1(r,r.inputs,e):r.compute(B1(r.inputs,e))}});var N1,z1,Sy,$y=C(()=>{"use strict";ue();ye();he();N1=r=>{if(!r||r.length<2)throw new Error("layerNorm requires at least 2 inputs.")},z1=(r,e,n)=>{let t=e.simplified,o=r[0].dims,i=r[1],s=!t&&r[2],a=o,u=B.normalizeAxis(e.axis,o.length),l=B.sizeToDimension(o,u),f=B.sizeFromDimension(o,u),c=B.size(i.dims),p=s?B.size(s.dims):0;if(c!==f||s&&p!==f)throw new Error(`Size of X.shape()[axis:] == ${f}. + }`};r.compute({name:"InstanceNormalizationNHWC",shaderCache:{hint:`${u}`,inputDependencies:c},getRunData:()=>({outputs:[{dims:o,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(l/64)},programUniforms:f}),getShaderSource:b},{inputs:[e[0],p]})},_y=(r,e)=>{e.format==="NHWC"?z1(r,r.inputs,e):r.compute(R1(r.inputs,e))}});var F1,M1,Sy,$y=C(()=>{"use strict";ue();ye();he();F1=r=>{if(!r||r.length<2)throw new Error("layerNorm requires at least 2 inputs.")},M1=(r,e,n)=>{let t=e.simplified,o=r[0].dims,i=r[1],s=!t&&r[2],a=o,u=B.normalizeAxis(e.axis,o.length),l=B.sizeToDimension(o,u),f=B.sizeFromDimension(o,u),c=B.size(i.dims),p=s?B.size(s.dims):0;if(c!==f||s&&p!==f)throw new Error(`Size of X.shape()[axis:] == ${f}. Size of scale and bias (if provided) must match this. Got scale size of ${c} and bias size of ${p}`);let b=[];for(let P=0;P1,v=n>2,S=P=>{let E=Be(r[0].dataType),N=[D("x",r[0].dataType,r[0].dims,h),D("scale",i.dataType,i.dims,h)];s&&N.push(D("bias",s.dataType,s.dims,h)),N.push(G("output",r[0].dataType,a,h)),w&&N.push(G("mean_data_output",1,b)),v&&N.push(G("inv_std_output",1,b));let z=[{name:"norm_count",type:"u32"},{name:"norm_size",type:"f32"},{name:"norm_size_vectorized",type:"u32"},{name:"epsilon",type:"f32"}];return` ${P.registerUniforms(z).declareVariables(...N)} @@ -3535,7 +3535,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${w?"mean_data_output[global_idx] = mean":""}; ${v?"inv_std_output[global_idx] = inv_std_dev":""}; - }`},$=[{dims:a,dataType:r[0].dataType}];return w&&$.push({dims:b,dataType:1}),v&&$.push({dims:b,dataType:1}),{name:"LayerNormalization",shaderCache:{hint:`${h};${n};${t}`,inputDependencies:g},getRunData:()=>({outputs:$,dispatchGroup:{x:Math.ceil(l/64)},programUniforms:T}),getShaderSource:S}},Sy=(r,e)=>{N1(r.inputs),r.compute(z1(r.inputs,e,r.outputCount))}});var F1,M1,Ay,Py,Oy=C(()=>{"use strict";ue();ye();et();he();F1=(r,e)=>{if(r.length<3||r.length>4)throw new Error("MatMulNBits requires 3 or 4 inputs");let n=r[0],t=n.dims.length;if(n.dims[t-1]!==e.k)throw new Error("The last dim of input shape does not match the k value");let o=Math.floor((e.k+e.blockSize-1)/e.blockSize),i=e.blockSize/8*e.bits,s=r[1];if(!B.areEqual(s.dims,[e.n,o,i]))throw new Error("The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize");let u=r[2].dims;if(B.size(u)!==e.n*o)throw new Error("scales input size error.");if(r.length===4){let f=r[3].dims,c=e.bits>4?e.n*o:e.n*Math.floor((o+1)/2);if(B.size(f)!==c)throw new Error("zeroPoints input size error.")}},M1=(r,e,n,t)=>{let o=r[0].dims,i=o.length,s=Math.floor((e.k+e.blockSize-1)/e.blockSize),a=o[i-2],u=e.k,l=e.n,f=o.slice(0,i-2),c=B.size(f),b=e.blockSize/8*e.bits/4,h=r[0].dataType,g=ze(a),T=ze(e.k),w=ze(b),v=jr(h),S=a*s*v,$=Math.floor(t/S),P=s<=n[0]&&$>0,E=!P||$>=4?ze(l):$>=2&&ze(l)>=2?2:1,N=f.concat([a,l]),z=B.size(N)/E/g,q=P?[]:[{type:12,data:z},{type:12,data:e.blockSize}],K=[c,a,u/T],F=B.convertShape(r[1].dims).slice();F.splice(-1,1,b/w),q.push(...W(K)),q.push(...W(F)),q.push(...W(r[2].dims)),r.length===4&&q.push(...W(B.convertShape(r[3].dims)));let _e=[c,a,l/E];q.push(...W(_e));let $e=ae=>{let qe=K.length,Q=D("a",r[0].dataType,qe,T),ge=D("b",12,F.length,w),Ie=D("scales",r[2].dataType,r[2].dims.length),xe=[Q,ge,Ie],se=r.length===4?D("zero_points",12,r[3].dims.length):void 0;se&&xe.push(se);let pe=_e.length,ce=G("output",r[0].dataType,pe,E),ut=[{name:"output_size",type:"u32"},{name:"block_size",type:"u32"}],V=Be(r[0].dataType),ie=(()=>{switch(T){case 1:return`array<${V}, 8>`;case 2:return`mat4x2<${V}>`;case 4:return`mat2x4<${V}>`;default:throw new Error(`${T}-component is not supported.`)}})(),Te=` + }`},$=[{dims:a,dataType:r[0].dataType}];return w&&$.push({dims:b,dataType:1}),v&&$.push({dims:b,dataType:1}),{name:"LayerNormalization",shaderCache:{hint:`${h};${n};${t}`,inputDependencies:g},getRunData:()=>({outputs:$,dispatchGroup:{x:Math.ceil(l/64)},programUniforms:T}),getShaderSource:S}},Sy=(r,e)=>{F1(r.inputs),r.compute(M1(r.inputs,e,r.outputCount))}});var V1,G1,Ay,Py,Oy=C(()=>{"use strict";ue();ye();et();he();V1=(r,e)=>{if(r.length<3||r.length>4)throw new Error("MatMulNBits requires 3 or 4 inputs");let n=r[0],t=n.dims.length;if(n.dims[t-1]!==e.k)throw new Error("The last dim of input shape does not match the k value");let o=Math.floor((e.k+e.blockSize-1)/e.blockSize),i=e.blockSize/8*e.bits,s=r[1];if(!B.areEqual(s.dims,[e.n,o,i]))throw new Error("The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize");let u=r[2].dims;if(B.size(u)!==e.n*o)throw new Error("scales input size error.");if(r.length===4){let f=r[3].dims,c=e.bits>4?e.n*o:e.n*Math.floor((o+1)/2);if(B.size(f)!==c)throw new Error("zeroPoints input size error.")}},G1=(r,e,n,t)=>{let o=r[0].dims,i=o.length,s=Math.floor((e.k+e.blockSize-1)/e.blockSize),a=o[i-2],u=e.k,l=e.n,f=o.slice(0,i-2),c=B.size(f),b=e.blockSize/8*e.bits/4,h=r[0].dataType,g=ze(a),T=ze(e.k),w=ze(b),v=Kr(h),S=a*s*v,$=Math.floor(t/S),P=s<=n[0]&&$>0,E=!P||$>=4?ze(l):$>=2&&ze(l)>=2?2:1,N=f.concat([a,l]),z=B.size(N)/E/g,q=P?[]:[{type:12,data:z},{type:12,data:e.blockSize}],j=[c,a,u/T],F=B.convertShape(r[1].dims).slice();F.splice(-1,1,b/w),q.push(...W(j)),q.push(...W(F)),q.push(...W(r[2].dims)),r.length===4&&q.push(...W(B.convertShape(r[3].dims)));let _e=[c,a,l/E];q.push(...W(_e));let $e=ae=>{let qe=j.length,Q=D("a",r[0].dataType,qe,T),ge=D("b",12,F.length,w),Ie=D("scales",r[2].dataType,r[2].dims.length),xe=[Q,ge,Ie],se=r.length===4?D("zero_points",12,r[3].dims.length):void 0;se&&xe.push(se);let pe=_e.length,ce=G("output",r[0].dataType,pe,E),ut=[{name:"output_size",type:"u32"},{name:"block_size",type:"u32"}],V=Be(r[0].dataType),ie=(()=>{switch(T){case 1:return`array<${V}, 8>`;case 2:return`mat4x2<${V}>`;case 4:return`mat2x4<${V}>`;default:throw new Error(`${T}-component is not supported.`)}})(),Te=` for (var word: u32 = 0; word < ${b}; word += ${w}) { ${ge.indicesSet("b_indices","2","word")}; let b_data = ${ge.getByIndices("b_indices")}; @@ -3544,8 +3544,8 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let b_mask: u32 = 0x0F0F0F0Fu; let b_value_lower: vec4 = unpack4xU8(b_value & b_mask); let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask); - let b_quantized_values = ${ie}(${Array.from({length:4},(Fe,Ke)=>`${V}(b_value_lower[${Ke}]), ${V}(b_value_upper[${Ke}])`).join(", ")}); - let b_dequantized_values = ${(()=>T===1?`${ie}(${Array.from({length:8},(Fe,Ke)=>`(b_quantized_values[${Ke}] - zero_point) * scale`).join(", ")});`:`(b_quantized_values - ${ie}(${Array(8).fill("zero_point").join(",")})) * scale;`)()}; + let b_quantized_values = ${ie}(${Array.from({length:4},(Fe,je)=>`${V}(b_value_lower[${je}]), ${V}(b_value_upper[${je}])`).join(", ")}); + let b_dequantized_values = ${(()=>T===1?`${ie}(${Array.from({length:8},(Fe,je)=>`(b_quantized_values[${je}] - zero_point) * scale`).join(", ")});`:`(b_quantized_values - ${ie}(${Array(8).fill("zero_point").join(",")})) * scale;`)()}; // Number of B elements per 32-bit word is 32/bits = 32/4 = 8 for (var m: u32 = 0; m < ${P?a:g}u; m++) { ${Q.indicesSet("a_indices",qe-2,P?"m":`row * ${g} + m`)}; @@ -3556,7 +3556,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; a_data[j] = ${Q.getByOffset("input_offset")}; input_offset++; } - ${P?"workgroup_shared[workgroup_shared_offset + m]":"output_values[m]"}${E>1?"[c]":""} += ${Array.from({length:8/T},(Fe,Ke)=>`${T===1?`a_data[${Ke}] * b_dequantized_values[${Ke}]`:`dot(a_data[${Ke}], b_dequantized_values[${Ke}])`}`).join(" + ")}; + ${P?"workgroup_shared[workgroup_shared_offset + m]":"output_values[m]"}${E>1?"[c]":""} += ${Array.from({length:8/T},(Fe,je)=>`${T===1?`a_data[${je}] * b_dequantized_values[${je}]`:`dot(a_data[${je}], b_dequantized_values[${je}])`}`).join(" + ")}; } word_offset += ${8/T}; } @@ -3661,7 +3661,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${ce.indicesSet("output_indices",pe-2,`${g} * row + k`)}; ${ce.setByIndices("output_indices","output_values[k]")} } - }`};return{name:P?"BlockwiseMatMulNBits":"MatMulNBits",shaderCache:{hint:`${e.cacheKey};${a};${h};${r.length}`,inputDependencies:Array(r.length).fill("rank")},getRunData:()=>({outputs:[{dims:N,dataType:h}],name:P?"BlockwiseMatMulNBits":"MatMulNBits",dispatchGroup:P?{x:1,y:Math.ceil(l/E),z:c}:{x:Math.ceil(z/64)},programUniforms:q}),getShaderSource:$e}},Ay=(r,e)=>{F1(r.inputs,e);let n=r.getMaxComputeWorkgroupSizes(),t=r.getMaxComputeWorkgroupStoragesize();r.compute(M1(r.inputs,e,n,t))},Py=r=>de(r)});var V1,G1,U1,W1,H1,q1,K1,j1,Ey,Cy=C(()=>{"use strict";ue();ye();he();V1=r=>{if(!r||r.length<1)throw new Error("Too few inputs");if(r[0].dataType!==1&&r[0].dataType!==10)throw new Error("Input type must be float or float16.");if(r.length>=2){let e=r[0].dims.length*2===r[1].dims[0];if(r.length===4&&(e=r[3].dims[0]*2===r[1].dims[0]),!e)throw new Error("The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].")}},G1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` + }`};return{name:P?"BlockwiseMatMulNBits":"MatMulNBits",shaderCache:{hint:`${e.cacheKey};${a};${h};${r.length}`,inputDependencies:Array(r.length).fill("rank")},getRunData:()=>({outputs:[{dims:N,dataType:h}],name:P?"BlockwiseMatMulNBits":"MatMulNBits",dispatchGroup:P?{x:1,y:Math.ceil(l/E),z:c}:{x:Math.ceil(z/64)},programUniforms:q}),getShaderSource:$e}},Ay=(r,e)=>{V1(r.inputs,e);let n=r.getMaxComputeWorkgroupSizes(),t=r.getMaxComputeWorkgroupStoragesize();r.compute(G1(r.inputs,e,n,t))},Py=r=>de(r)});var U1,W1,H1,q1,j1,K1,X1,Z1,Ey,Cy=C(()=>{"use strict";ue();ye();he();U1=r=>{if(!r||r.length<1)throw new Error("Too few inputs");if(r[0].dataType!==1&&r[0].dataType!==10)throw new Error("Input type must be float or float16.");if(r.length>=2){let e=r[0].dims.length*2===r[1].dims[0];if(r.length===4&&(e=r[3].dims[0]*2===r[1].dims[0]),!e)throw new Error("The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].")}},W1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` k = i32(${r.indicesGet("indices",o)}) - ${Z("uniforms.pads",o,n)}; if (k < 0) { break; @@ -3678,7 +3678,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${t} value = x[offset]; } - `},U1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` + `},H1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` k = i32(${r.indicesGet("indices",o)}) - ${Z("uniforms.pads",o,n)}; if (k < 0) { k = -k; @@ -3696,7 +3696,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; var k = 0; ${t} value = x[offset]; - `},W1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` + `},q1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` k = i32(${r.indicesGet("indices",o)}) - ${Z("uniforms.pads",o,n)}; if (k < 0) { k = 0; @@ -3710,7 +3710,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; var k = 0; ${t} value = x[offset]; - `},H1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` + `},j1=(r,e,n)=>{let t="";for(let o=e-1;o>=0;--o)t+=` k = i32(${r.indicesGet("indices",o)}) - ${Z("uniforms.pads",o,n)}; if (k < 0) { k += i32(${Z("uniforms.x_shape",o,e)}]); @@ -3724,7 +3724,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; var k = 0; ${t} value = x[offset]; - `},q1=(r,e,n)=>{switch(n.mode){case 0:return G1(r,e,n.pads.length);case 1:return U1(r,e,n.pads.length);case 2:return W1(r,e,n.pads.length);case 3:return H1(r,e,n.pads.length);default:throw new Error("Invalid mode")}},K1=(r,e)=>{let n=B.padShape(r[0].dims.slice(),e.pads),t=r[0].dims,o=B.size(n),i=[{type:12,data:o},{type:6,data:e.pads}];e.mode===0&&i.push({type:r[0].dataType,data:e.value}),i.push(...W(r[0].dims,n));let s=["rank"],a=u=>{let l=G("output",r[0].dataType,n.length),f=D("x",r[0].dataType,t.length),c=f.type.value,p=q1(l,t.length,e),b=[{name:"output_size",type:"u32"},{name:"pads",type:"i32",length:e.pads.length}];return e.mode===0&&b.push({name:"constant_value",type:c}),` + `},K1=(r,e,n)=>{switch(n.mode){case 0:return W1(r,e,n.pads.length);case 1:return H1(r,e,n.pads.length);case 2:return q1(r,e,n.pads.length);case 3:return j1(r,e,n.pads.length);default:throw new Error("Invalid mode")}},X1=(r,e)=>{let n=B.padShape(r[0].dims.slice(),e.pads),t=r[0].dims,o=B.size(n),i=[{type:12,data:o},{type:6,data:e.pads}];e.mode===0&&i.push({type:r[0].dataType,data:e.value}),i.push(...W(r[0].dims,n));let s=["rank"],a=u=>{let l=G("output",r[0].dataType,n.length),f=D("x",r[0].dataType,t.length),c=f.type.value,p=K1(l,t.length,e),b=[{name:"output_size",type:"u32"},{name:"pads",type:"i32",length:e.pads.length}];return e.mode===0&&b.push({name:"constant_value",type:c}),` ${u.registerUniforms(b).declareVariables(f,l)} ${u.mainStart()} ${u.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -3734,7 +3734,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; var value = ${c}(0); ${p} output[global_idx] = value; - }`};return{name:"Pad",shaderCache:{hint:`${e.mode}`,inputDependencies:s},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(B.size(n)/64)},programUniforms:i}),getShaderSource:a}},j1=(r,e)=>{if(r.length>1){let n=r[1].getBigInt64Array(),t=r.length>=3&&r[2].data?r[2].getFloat32Array()[0]:0,o=r[0].dims.length,i=new Int32Array(2*o).fill(0);if(r.length>=4){let a=r[3].getBigInt64Array();for(let u=0;ui[Number(u)]=Number(a));let s=[];return i.forEach(a=>s.push(a)),{mode:e.mode,value:t,pads:s}}else return e},Ey=(r,e)=>{V1(r.inputs);let n=j1(r.inputs,e);r.compute(K1(r.inputs,n),{inputs:[0]})}});var qi,ky,Dy,By,Ly,X1,Z1,Ry,Ny,zy,Fy,My,Vy,Gy,Uy,Wy,Hy,qy,Ky,jy=C(()=>{"use strict";ft();ue();ye();he();qi=r=>{if(le.webgpu.validateInputContent&&(!r||r.length!==1))throw new Error("Pool ops requires 1 input.")},ky=(r,e,n)=>{let t=e.format==="NHWC",o=r.dims.slice();t&&o.splice(1,0,o.pop());let i=Object.hasOwnProperty.call(e,"dilations"),s=e.kernelShape.slice(),a=e.strides.slice(),u=i?e.dilations.slice():[],l=e.pads.slice();Xr.adjustPoolAttributes(n,o,s,a,u,l);let f=Xr.computePoolOutputShape(n,o,a,u,s,l,e.autoPad),c=Object.assign({},e);i?Object.assign(c,{kernelShape:s,strides:a,pads:l,dilations:u,cacheKey:e.cacheKey}):Object.assign(c,{kernelShape:s,strides:a,pads:l,cacheKey:e.cacheKey});let p=f.slice();return p.push(p.splice(1,1)[0]),[c,t?p:f]},Dy=(r,e)=>{let n=e.format==="NHWC",t=B.size(r),o=B.size(e.kernelShape),i=[{type:12,data:t},{type:12,data:o}],s=[{name:"outputSize",type:"u32"},{name:"kernelSize",type:"u32"}];if(e.kernelShape.length<=2){let a=e.kernelShape[e.kernelShape.length-1],u=e.strides[e.strides.length-1],l=e.pads[e.pads.length/2-1],f=e.pads[e.pads.length-1],c=!!(l+f);i.push({type:12,data:a},{type:12,data:u},{type:12,data:l},{type:12,data:f}),s.push({name:"kw",type:"u32"},{name:"sw",type:"u32"},{name:"pwStart",type:"u32"},{name:"pwEnd",type:"u32"});let p=!1;if(e.kernelShape.length===2){let b=e.kernelShape[e.kernelShape.length-2],h=e.strides[e.strides.length-2],g=e.pads[e.pads.length/2-2],T=e.pads[e.pads.length-2];p=!!(g+T),i.push({type:12,data:b},{type:12,data:h},{type:12,data:g},{type:12,data:T}),s.push({name:"kh",type:"u32"},{name:"sh",type:"u32"},{name:"phStart",type:"u32"},{name:"phEnd",type:"u32"})}return[i,s,!0,c,p]}else{if(n)throw new Error("Pooling with kernelShape.length > 2 is not supported for NHWC format.");let a=B.computeStrides(e.kernelShape);i.push({type:12,data:a},{type:12,data:e.pads},{type:12,data:e.strides}),s.push({name:"kernelStrides",type:"u32",length:a.length},{name:"pads",type:"u32",length:e.pads.length},{name:"strides",type:"u32",length:e.strides.length});let u=e.pads.reduce((l,f)=>l+f);return[i,s,!!u,!1,!1]}},By=(r,e,n,t,o,i,s,a,u,l,f,c)=>{let p=o.format==="NHWC",b=e.type.value,h=G("output",e.type.tensor,t);if(o.kernelShape.length<=2){let g="",T="",w="",v=n-(p?2:1);if(f?g=` + }`};return{name:"Pad",shaderCache:{hint:`${e.mode}`,inputDependencies:s},getRunData:()=>({outputs:[{dims:n,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(B.size(n)/64)},programUniforms:i}),getShaderSource:a}},Z1=(r,e)=>{if(r.length>1){let n=r[1].getBigInt64Array(),t=r.length>=3&&r[2].data?r[2].getFloat32Array()[0]:0,o=r[0].dims.length,i=new Int32Array(2*o).fill(0);if(r.length>=4){let a=r[3].getBigInt64Array();for(let u=0;ui[Number(u)]=Number(a));let s=[];return i.forEach(a=>s.push(a)),{mode:e.mode,value:t,pads:s}}else return e},Ey=(r,e)=>{U1(r.inputs);let n=Z1(r.inputs,e);r.compute(X1(r.inputs,n),{inputs:[0]})}});var qi,ky,Dy,By,Ly,Y1,J1,Ry,Ny,zy,Fy,My,Vy,Gy,Uy,Wy,Hy,qy,jy,Ky=C(()=>{"use strict";ft();ue();ye();he();qi=r=>{if(le.webgpu.validateInputContent&&(!r||r.length!==1))throw new Error("Pool ops requires 1 input.")},ky=(r,e,n)=>{let t=e.format==="NHWC",o=r.dims.slice();t&&o.splice(1,0,o.pop());let i=Object.hasOwnProperty.call(e,"dilations"),s=e.kernelShape.slice(),a=e.strides.slice(),u=i?e.dilations.slice():[],l=e.pads.slice();Xr.adjustPoolAttributes(n,o,s,a,u,l);let f=Xr.computePoolOutputShape(n,o,a,u,s,l,e.autoPad),c=Object.assign({},e);i?Object.assign(c,{kernelShape:s,strides:a,pads:l,dilations:u,cacheKey:e.cacheKey}):Object.assign(c,{kernelShape:s,strides:a,pads:l,cacheKey:e.cacheKey});let p=f.slice();return p.push(p.splice(1,1)[0]),[c,t?p:f]},Dy=(r,e)=>{let n=e.format==="NHWC",t=B.size(r),o=B.size(e.kernelShape),i=[{type:12,data:t},{type:12,data:o}],s=[{name:"outputSize",type:"u32"},{name:"kernelSize",type:"u32"}];if(e.kernelShape.length<=2){let a=e.kernelShape[e.kernelShape.length-1],u=e.strides[e.strides.length-1],l=e.pads[e.pads.length/2-1],f=e.pads[e.pads.length-1],c=!!(l+f);i.push({type:12,data:a},{type:12,data:u},{type:12,data:l},{type:12,data:f}),s.push({name:"kw",type:"u32"},{name:"sw",type:"u32"},{name:"pwStart",type:"u32"},{name:"pwEnd",type:"u32"});let p=!1;if(e.kernelShape.length===2){let b=e.kernelShape[e.kernelShape.length-2],h=e.strides[e.strides.length-2],g=e.pads[e.pads.length/2-2],T=e.pads[e.pads.length-2];p=!!(g+T),i.push({type:12,data:b},{type:12,data:h},{type:12,data:g},{type:12,data:T}),s.push({name:"kh",type:"u32"},{name:"sh",type:"u32"},{name:"phStart",type:"u32"},{name:"phEnd",type:"u32"})}return[i,s,!0,c,p]}else{if(n)throw new Error("Pooling with kernelShape.length > 2 is not supported for NHWC format.");let a=B.computeStrides(e.kernelShape);i.push({type:12,data:a},{type:12,data:e.pads},{type:12,data:e.strides}),s.push({name:"kernelStrides",type:"u32",length:a.length},{name:"pads",type:"u32",length:e.pads.length},{name:"strides",type:"u32",length:e.strides.length});let u=e.pads.reduce((l,f)=>l+f);return[i,s,!!u,!1,!1]}},By=(r,e,n,t,o,i,s,a,u,l,f,c)=>{let p=o.format==="NHWC",b=e.type.value,h=G("output",e.type.tensor,t);if(o.kernelShape.length<=2){let g="",T="",w="",v=n-(p?2:1);if(f?g=` for (var i: u32 = 0u; i < uniforms.kw; i++) { xIndices[${v}] = indices[${v}] * uniforms.sw - uniforms.pwStart + i; if (xIndices[${v}] < 0 || xIndices[${v}] @@ -3823,15 +3823,15 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${s} output[global_idx] = value; - }`}},Ly=r=>`${r.format};${r.ceilMode};${r.autoPad};${r.kernelShape.length}`,X1=r=>`${Ly(r)};${r.countIncludePad}`,Z1=r=>`${Ly(r)};${r.storageOrder};${r.dilations}`,Ry=r=>({format:r.format,autoPad:["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][r.auto_pad],ceilMode:r.ceil_mode,kernelShape:r.kernel_shape,strides:r.strides,pads:r.pads}),Ny=(r,e,n,t)=>{let[o,i]=ky(e,t,n),s=D("x",e.dataType,e.dims.length),a=s.type.value,u="value += x_val;",l="";o.countIncludePad?l+=`value /= ${a}(uniforms.kernelSize);`:l+=`value /= ${a}(i32(uniforms.kernelSize) - pad);`;let[f,c,p,b,h]=Dy(i,o);f.push(...W(e.dims,i));let g=["rank"];return{name:r,shaderCache:{hint:`${t.cacheKey};${p};${b};${h}`,inputDependencies:g},getRunData:()=>({outputs:[{dims:i,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(B.size(i)/64)},programUniforms:f}),getShaderSource:T=>By(T,s,e.dims.length,i.length,o,u,l,0,c,p,b,h)}},zy=r=>{let e=r.count_include_pad!==0,n=Ry(r);if(n.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");let t={countIncludePad:e,...n,cacheKey:""};return{...t,cacheKey:X1(t)}},Fy=(r,e)=>{qi(r.inputs),r.compute(Ny("AveragePool",r.inputs[0],!1,e))},My={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[]},Vy=r=>{let e=r.format;return{format:e,...My,cacheKey:e}},Gy=(r,e)=>{qi(r.inputs),r.compute(Ny("GlobalAveragePool",r.inputs[0],!0,e))},Uy=(r,e,n,t)=>{let[o,i]=ky(e,t,n),s=` + }`}},Ly=r=>`${r.format};${r.ceilMode};${r.autoPad};${r.kernelShape.length}`,Y1=r=>`${Ly(r)};${r.countIncludePad}`,J1=r=>`${Ly(r)};${r.storageOrder};${r.dilations}`,Ry=r=>({format:r.format,autoPad:["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][r.auto_pad],ceilMode:r.ceil_mode,kernelShape:r.kernel_shape,strides:r.strides,pads:r.pads}),Ny=(r,e,n,t)=>{let[o,i]=ky(e,t,n),s=D("x",e.dataType,e.dims.length),a=s.type.value,u="value += x_val;",l="";o.countIncludePad?l+=`value /= ${a}(uniforms.kernelSize);`:l+=`value /= ${a}(i32(uniforms.kernelSize) - pad);`;let[f,c,p,b,h]=Dy(i,o);f.push(...W(e.dims,i));let g=["rank"];return{name:r,shaderCache:{hint:`${t.cacheKey};${p};${b};${h}`,inputDependencies:g},getRunData:()=>({outputs:[{dims:i,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(B.size(i)/64)},programUniforms:f}),getShaderSource:T=>By(T,s,e.dims.length,i.length,o,u,l,0,c,p,b,h)}},zy=r=>{let e=r.count_include_pad!==0,n=Ry(r);if(n.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");let t={countIncludePad:e,...n,cacheKey:""};return{...t,cacheKey:Y1(t)}},Fy=(r,e)=>{qi(r.inputs),r.compute(Ny("AveragePool",r.inputs[0],!1,e))},My={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[]},Vy=r=>{let e=r.format;return{format:e,...My,cacheKey:e}},Gy=(r,e)=>{qi(r.inputs),r.compute(Ny("GlobalAveragePool",r.inputs[0],!0,e))},Uy=(r,e,n,t)=>{let[o,i]=ky(e,t,n),s=` value = max(x_val, value); - `,a="",u=D("x",e.dataType,e.dims.length),l=["rank"],[f,c,p,b,h]=Dy(i,o);return f.push(...W(e.dims,i)),{name:r,shaderCache:{hint:`${t.cacheKey};${p};${b};${h}`,inputDependencies:l},getRunData:()=>({outputs:[{dims:i,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(B.size(i)/64)},programUniforms:f}),getShaderSource:g=>By(g,u,e.dims.length,i.length,o,s,a,e.dataType===10?-65504:-1e5,c,p,b,h)}},Wy=(r,e)=>{qi(r.inputs),r.compute(Uy("MaxPool",r.inputs[0],!1,e))},Hy=r=>{let e=r.storage_order,n=r.dilations,t=Ry(r);if(e!==0)throw new Error("column major storage order is not yet supported for MaxPool");if(t.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for MaxPool");let o={storageOrder:e,dilations:n,...t,cacheKey:""};return{...o,cacheKey:Z1(o)}},qy=r=>{let e=r.format;return{format:e,...My,cacheKey:e}},Ky=(r,e)=>{qi(r.inputs),r.compute(Uy("GlobalMaxPool",r.inputs[0],!0,e))}});var J1,Q1,Xy,Zy=C(()=>{"use strict";ft();ue();he();J1=(r,e,n)=>{let t=r===e,o=re&&n>0;if(t||o||i)throw new Error("Range these inputs' contents are invalid.")},Q1=(r,e,n,t)=>{let o=Math.abs(Math.ceil((e-r)/n)),i=[o],s=o,a=[{type:12,data:s},{type:t,data:r},{type:t,data:n},...W(i)],u=l=>{let f=G("output",t,i.length),c=f.type.value,p=[{name:"outputSize",type:"u32"},{name:"start",type:c},{name:"delta",type:c}];return` + `,a="",u=D("x",e.dataType,e.dims.length),l=["rank"],[f,c,p,b,h]=Dy(i,o);return f.push(...W(e.dims,i)),{name:r,shaderCache:{hint:`${t.cacheKey};${p};${b};${h}`,inputDependencies:l},getRunData:()=>({outputs:[{dims:i,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(B.size(i)/64)},programUniforms:f}),getShaderSource:g=>By(g,u,e.dims.length,i.length,o,s,a,e.dataType===10?-65504:-1e5,c,p,b,h)}},Wy=(r,e)=>{qi(r.inputs),r.compute(Uy("MaxPool",r.inputs[0],!1,e))},Hy=r=>{let e=r.storage_order,n=r.dilations,t=Ry(r);if(e!==0)throw new Error("column major storage order is not yet supported for MaxPool");if(t.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for MaxPool");let o={storageOrder:e,dilations:n,...t,cacheKey:""};return{...o,cacheKey:J1(o)}},qy=r=>{let e=r.format;return{format:e,...My,cacheKey:e}},jy=(r,e)=>{qi(r.inputs),r.compute(Uy("GlobalMaxPool",r.inputs[0],!0,e))}});var eI,tI,Xy,Zy=C(()=>{"use strict";ft();ue();he();eI=(r,e,n)=>{let t=r===e,o=re&&n>0;if(t||o||i)throw new Error("Range these inputs' contents are invalid.")},tI=(r,e,n,t)=>{let o=Math.abs(Math.ceil((e-r)/n)),i=[o],s=o,a=[{type:12,data:s},{type:t,data:r},{type:t,data:n},...W(i)],u=l=>{let f=G("output",t,i.length),c=f.type.value,p=[{name:"outputSize",type:"u32"},{name:"start",type:c},{name:"delta",type:c}];return` ${l.registerUniforms(p).declareVariables(f)} ${l.mainStart()} ${l.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} output[global_idx] = uniforms.start + ${c}(global_idx) * uniforms.delta; - }`};return{name:"Range",shaderCache:{hint:`${t}`},getShaderSource:u,getRunData:()=>({outputs:[{dims:i,dataType:t}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:a})}},Xy=r=>{let e=0,n=0,t=0;r.inputs[0].dataType===6?(e=r.inputs[0].getInt32Array()[0],n=r.inputs[1].getInt32Array()[0],t=r.inputs[2].getInt32Array()[0]):r.inputs[0].dataType===1&&(e=r.inputs[0].getFloat32Array()[0],n=r.inputs[1].getFloat32Array()[0],t=r.inputs[2].getFloat32Array()[0]),le.webgpu.validateInputContent&&J1(e,n,t),r.compute(Q1(e,n,t,r.inputs[0].dataType),{inputs:[]})}});var eI,tI,rI,nI,oI,iI,aI,sI,uI,lI,cI,Yy,fI,dI,pI,mI,hI,Jy,Qy,ex=C(()=>{"use strict";ue();ye();et();he();eI=(r,e)=>{if(r.every(n=>n>0||(()=>{throw new Error("Resize requires scales input values to be positive")})),r.length>0){if(e.mode==="linear"){if(!(r.length===2||r.length===3||r.length===4&&r[0]===1&&r[1]===1||r.length===4&&r[0]===1&&r[3]===1||r.length===5&&r[0]===1&&r[1]===1))throw new Error(`For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and - one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`)}else if(e.mode==="cubic"&&!(r.length===2||r.length===4&&r[0]===1&&r[1]===1||r.length===4&&r[0]===1&&r[3]===1))throw new Error("Resize requires scales input size to be 2 or 4 for cubic mode")}},tI=(r,e,n)=>{e.every(o=>o>=0&&o{throw new Error("Resize requires axes input values to be positive and less than rank")}));let t=new Array(n).fill(1);return e.forEach((o,i)=>t[o]=r[i]),t},rI=(r,e,n,t,o,i)=>{let[s,a,u]=n>10?[1,2,3]:[-1,r.length>1?1:-1,-1],l=r[0].dims.length;if(s>0&&r.length>s&&r[s].dims.length>0)r[s].getFloat32Array().forEach(f=>i.push(f));else if(e.coordinateTransformMode==="tf_crop_and_resize")throw new Error("Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize");if(a>0&&r.length>a&&r[a].dims.length>0){if(r[a].getFloat32Array().forEach(f=>t.push(f)),t.length!==0&&t.length!==l&&n>=18&&t.length!==e.axes.length)throw new Error("Resize requires scales input size to be same as input rank or axes size for opset 18 and up");eI(t,e),e.axes.length>0&&tI(t,e.axes,l).forEach((f,c)=>t[c]=f)}if(u>0&&r.length>u&&(r[u].getBigInt64Array().forEach(f=>o.push(Number(f))),o.length!==l||n>=18&&o.length===e.axes.length))throw new Error("Resize requires sizes input size to be same as input rank or axes size for opset 18 and up");if(e.axes.length>0){if(t.length!==e.axes.length)throw new Error('Resize requires "scales" input size to be of axes rank when axes attributes is specified');if(o.length!==e.axes.length)throw new Error('Resize requires "sizes" input size to be of rank axes rank when axes attributes is specified')}if(typeof t<"u"&&typeof o<"u"&&t.length>0&&o.length>l)throw new Error("Resize requires only of scales or sizes to be specified")},nI=(r,e)=>`fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32, + }`};return{name:"Range",shaderCache:{hint:`${t}`},getShaderSource:u,getRunData:()=>({outputs:[{dims:i,dataType:t}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:a})}},Xy=r=>{let e=0,n=0,t=0;r.inputs[0].dataType===6?(e=r.inputs[0].getInt32Array()[0],n=r.inputs[1].getInt32Array()[0],t=r.inputs[2].getInt32Array()[0]):r.inputs[0].dataType===1&&(e=r.inputs[0].getFloat32Array()[0],n=r.inputs[1].getFloat32Array()[0],t=r.inputs[2].getFloat32Array()[0]),le.webgpu.validateInputContent&&eI(e,n,t),r.compute(tI(e,n,t,r.inputs[0].dataType),{inputs:[]})}});var rI,nI,oI,iI,aI,sI,uI,lI,cI,fI,dI,Yy,pI,mI,hI,gI,bI,Jy,Qy,ex=C(()=>{"use strict";ue();ye();et();he();rI=(r,e)=>{if(r.every(n=>n>0||(()=>{throw new Error("Resize requires scales input values to be positive")})),r.length>0){if(e.mode==="linear"){if(!(r.length===2||r.length===3||r.length===4&&r[0]===1&&r[1]===1||r.length===4&&r[0]===1&&r[3]===1||r.length===5&&r[0]===1&&r[1]===1))throw new Error(`For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and + one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`)}else if(e.mode==="cubic"&&!(r.length===2||r.length===4&&r[0]===1&&r[1]===1||r.length===4&&r[0]===1&&r[3]===1))throw new Error("Resize requires scales input size to be 2 or 4 for cubic mode")}},nI=(r,e,n)=>{e.every(o=>o>=0&&o{throw new Error("Resize requires axes input values to be positive and less than rank")}));let t=new Array(n).fill(1);return e.forEach((o,i)=>t[o]=r[i]),t},oI=(r,e,n,t,o,i)=>{let[s,a,u]=n>10?[1,2,3]:[-1,r.length>1?1:-1,-1],l=r[0].dims.length;if(s>0&&r.length>s&&r[s].dims.length>0)r[s].getFloat32Array().forEach(f=>i.push(f));else if(e.coordinateTransformMode==="tf_crop_and_resize")throw new Error("Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize");if(a>0&&r.length>a&&r[a].dims.length>0){if(r[a].getFloat32Array().forEach(f=>t.push(f)),t.length!==0&&t.length!==l&&n>=18&&t.length!==e.axes.length)throw new Error("Resize requires scales input size to be same as input rank or axes size for opset 18 and up");rI(t,e),e.axes.length>0&&nI(t,e.axes,l).forEach((f,c)=>t[c]=f)}if(u>0&&r.length>u&&(r[u].getBigInt64Array().forEach(f=>o.push(Number(f))),o.length!==l||n>=18&&o.length===e.axes.length))throw new Error("Resize requires sizes input size to be same as input rank or axes size for opset 18 and up");if(e.axes.length>0){if(t.length!==e.axes.length)throw new Error('Resize requires "scales" input size to be of axes rank when axes attributes is specified');if(o.length!==e.axes.length)throw new Error('Resize requires "sizes" input size to be of rank axes rank when axes attributes is specified')}if(typeof t<"u"&&typeof o<"u"&&t.length>0&&o.length>l)throw new Error("Resize requires only of scales or sizes to be specified")},iI=(r,e)=>`fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32, lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${e} { `+(()=>{switch(r){case"asymmetric":return`return ${e}(xResized) / ${e}(xScale);`;case"pytorch_half_pixel":return`if (lengthResized > 1) { return (${e}(xResized) + 0.5) / ${e}(xScale) - 0.5; } else { @@ -3856,7 +3856,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; const adjustment = ${e}(lengthResized) / outputWidth; const center = ${e}(lengthOriginal) / 2; const offset = center * (1 - adjustment); - return offset + ((${e}(xResized) + 0.5) / ${e}(xScale)) - 0.5;`;case"half_pixel":return`return ((${e}(xResized) + 0.5) / ${e}(xScale)) - 0.5;`;default:throw new Error(`Coordinate transform mode ${r} is not supported`)}})()+"}",oI=(r,e,n)=>`fn getNearestPixelFromOriginal(xOriginal: ${n}, isDownSample: bool) -> ${n} {`+(()=>{switch(r){case"round_prefer_ceil":return"if (fract(xOriginal) == 0.5) { return ceil(xOriginal); } else { return round(xOriginal); }";case"floor":return"return floor(xOriginal);";case"ceil":return"return ceil(xOriginal);";case"round_prefer_floor":return"if (fract(xOriginal) == 0.5) { return floor(xOriginal); } else { return round(xOriginal); }";case"simple":default:if(e<11)return"if (isDownSample) { return ceil(xOriginal); } else { return xOriginal; }";throw new Error(`Nearest mode ${r} is not supported`)}})()+"}",iI=(r,e,n)=>{let t=new Array(n).fill(0).concat(new Array(n).fill(1)),o=r.length===0?t:r.slice();return e.length>0?(e.forEach((i,s)=>{t[i]=o[s],t[s+n]=o[e.length+s]}),t):o},aI=(r,e,n,t)=>{let o=[];if(n.length>0)if(t.length>0){if(r.forEach(i=>o.push(i)),Math.max(...t)>r.length)throw new Error("axes is out of bound");t.forEach((i,s)=>o[i]=n[s])}else n.forEach(i=>o.push(i));else{if(e.length===0)throw new Error("Resize requires either scales or sizes.");o=r.map((i,s)=>Math.round(i*e[s]))}return o},sI=(r,e,n)=>{let t=(()=>{switch(n.keepAspectRatioPolicy){case"not_larger":return n.axes.length>0?Math.min(...n.axes.map(i=>e[i]),Number.MAX_VALUE):Math.min(...e,Number.MAX_VALUE);case"not_smaller":return n.axes.length>0?Math.max(...n.axes.map(i=>e[i]),Number.MIN_VALUE):Math.max(...e,Number.MIN_VALUE);default:throw new Error(`Keep aspect ratio policy ${n.keepAspectRatioPolicy} is not supported`)}})();e.fill(1,0,e.length);let o=r.slice();return n.axes.length>0?(n.axes.forEach(i=>e[i]=t),n.axes.forEach(i=>o[i]=Math.round(r[i]*e[i]))):(e.fill(t,0,e.length),o.forEach((i,s)=>o[s]=Math.round(i*e[s]))),o},uI=(r,e,n,t,o)=>` + return offset + ((${e}(xResized) + 0.5) / ${e}(xScale)) - 0.5;`;case"half_pixel":return`return ((${e}(xResized) + 0.5) / ${e}(xScale)) - 0.5;`;default:throw new Error(`Coordinate transform mode ${r} is not supported`)}})()+"}",aI=(r,e,n)=>`fn getNearestPixelFromOriginal(xOriginal: ${n}, isDownSample: bool) -> ${n} {`+(()=>{switch(r){case"round_prefer_ceil":return"if (fract(xOriginal) == 0.5) { return ceil(xOriginal); } else { return round(xOriginal); }";case"floor":return"return floor(xOriginal);";case"ceil":return"return ceil(xOriginal);";case"round_prefer_floor":return"if (fract(xOriginal) == 0.5) { return floor(xOriginal); } else { return round(xOriginal); }";case"simple":default:if(e<11)return"if (isDownSample) { return ceil(xOriginal); } else { return xOriginal; }";throw new Error(`Nearest mode ${r} is not supported`)}})()+"}",sI=(r,e,n)=>{let t=new Array(n).fill(0).concat(new Array(n).fill(1)),o=r.length===0?t:r.slice();return e.length>0?(e.forEach((i,s)=>{t[i]=o[s],t[s+n]=o[e.length+s]}),t):o},uI=(r,e,n,t)=>{let o=[];if(n.length>0)if(t.length>0){if(r.forEach(i=>o.push(i)),Math.max(...t)>r.length)throw new Error("axes is out of bound");t.forEach((i,s)=>o[i]=n[s])}else n.forEach(i=>o.push(i));else{if(e.length===0)throw new Error("Resize requires either scales or sizes.");o=r.map((i,s)=>Math.round(i*e[s]))}return o},lI=(r,e,n)=>{let t=(()=>{switch(n.keepAspectRatioPolicy){case"not_larger":return n.axes.length>0?Math.min(...n.axes.map(i=>e[i]),Number.MAX_VALUE):Math.min(...e,Number.MAX_VALUE);case"not_smaller":return n.axes.length>0?Math.max(...n.axes.map(i=>e[i]),Number.MIN_VALUE):Math.max(...e,Number.MIN_VALUE);default:throw new Error(`Keep aspect ratio policy ${n.keepAspectRatioPolicy} is not supported`)}})();e.fill(1,0,e.length);let o=r.slice();return n.axes.length>0?(n.axes.forEach(i=>e[i]=t),n.axes.forEach(i=>o[i]=Math.round(r[i]*e[i]))):(e.fill(t,0,e.length),o.forEach((i,s)=>o[s]=Math.round(i*e[s]))),o},cI=(r,e,n,t,o)=>` fn calculateOriginalIndicesFromOutputIndices(output_indices: ${r.type.indices}) -> array<${r.type.value}, ${n.length}> { var original_indices: array<${r.type.value}, ${n.length}>; for (var i:u32 = 0; i < ${n.length}; i++) { @@ -3874,7 +3874,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; } } return original_indices; - }`,lI=(r,e,n,t,o,i,s)=>` + }`,fI=(r,e,n,t,o,i,s)=>` fn calculateInputIndicesFromOutputIndices(output_indices: ${e.type.indices}) -> ${r.type.indices} { var input_indices: ${r.type.indices}; for (var i:u32 = 0; i < ${t.length}; i++) { @@ -3905,7 +3905,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${r.indicesSet("input_indices","i"," input_index")} } return input_indices; - }`,cI=(r,e)=>` + }`,dI=(r,e)=>` fn checkInputIndices(input_indices: ${r.type.indices}) -> bool { for (var i:u32 = 0; i < ${e.length}; i++) { var input_index = ${r.indicesGet("input_indices","i")}; @@ -3917,7 +3917,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; }`,Yy=(r,e,n,t)=>r.rank>t?` ${r.indicesSet("input_indices",e,"channel")}; ${r.indicesSet("input_indices",n,"batch")}; -`:"",fI=(r,e,n,t,o)=>{let[s,a,u,l]=n.length===2?[-1,0,1,-1]:[0,2,3,1],f=r.type.value;return` +`:"",pI=(r,e,n,t,o)=>{let[s,a,u,l]=n.length===2?[-1,0,1,-1]:[0,2,3,1],f=r.type.value;return` fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${f} { var input_indices: ${r.type.indices}; ${r.indicesSet("input_indices",a,`max(0, min(row, ${n[a]} - 1))`)}; @@ -3958,7 +3958,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; dy2 = 0.5; } return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1); - }`},dI=(r,e,n,t,o,i,s,a,u,l)=>{let f=n.length===2,c=!0,[p,b]=f?[0,1]:c?[2,3]:[1,2],h=r.type.value,g=T=>{let w=T===p?"row":"col";return` + }`},mI=(r,e,n,t,o,i,s,a,u,l)=>{let f=n.length===2,c=!0,[p,b]=f?[0,1]:c?[2,3]:[1,2],h=r.type.value,g=T=>{let w=T===p?"row":"col";return` fn ${w}CubicInterpolation(input_indices: ${r.type.indices}, output_indices: ${e.type.indices}) -> ${h} { var output_index = ${e.indicesGet("output_indices",T)}; var originalIdx: ${h} = getOriginalCoordinateFromResizedCoordinate(output_index, ${o[T]}, @@ -4006,7 +4006,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; var input_indices: ${r.type.indices} = output_indices; return colCubicInterpolation(input_indices, output_indices); } - `},pI=(r,e,n,t,o)=>{let[s,a,u,l,f]=n.length===3?[-1,0,1,2,-1]:[0,2,3,4,1],c=r.type.value;return` + `},hI=(r,e,n,t,o)=>{let[s,a,u,l,f]=n.length===3?[-1,0,1,2,-1]:[0,2,3,4,1],c=r.type.value;return` fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${c} { var input_indices: ${r.type.indices}; ${r.indicesSet("input_indices",a,`max(0, min(depth, ${n[a]} - 1))`)}; @@ -4065,18 +4065,18 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; } return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 + x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1); - }`},mI=(r,e,n,t,o,i)=>{let s=r.dims,a=iI(i,e.axes,s.length),u=aI(s,t,o,e.axes),l=t.slice();t.length===0&&(l=s.map((v,S)=>v===0?1:u[S]/v),e.keepAspectRatioPolicy!=="stretch"&&(u=sI(s,l,e)));let f=G("output",r.dataType,u.length),c=D("input",r.dataType,s.length),p=B.size(u),b=s.length===u.length&&s.every((v,S)=>v===u[S]),h=e.coordinateTransformMode==="tf_crop_and_resize",g=e.extrapolationValue,T=c.type.value,w=v=>` + }`},gI=(r,e,n,t,o,i)=>{let s=r.dims,a=sI(i,e.axes,s.length),u=uI(s,t,o,e.axes),l=t.slice();t.length===0&&(l=s.map((v,S)=>v===0?1:u[S]/v),e.keepAspectRatioPolicy!=="stretch"&&(u=lI(s,l,e)));let f=G("output",r.dataType,u.length),c=D("input",r.dataType,s.length),p=B.size(u),b=s.length===u.length&&s.every((v,S)=>v===u[S]),h=e.coordinateTransformMode==="tf_crop_and_resize",g=e.extrapolationValue,T=c.type.value,w=v=>` ${b?"":` - ${nI(e.coordinateTransformMode,T)}; + ${iI(e.coordinateTransformMode,T)}; ${(()=>{switch(e.mode){case"nearest":return` - ${cI(c,s)}; - ${oI(e.nearestMode,n,T)}; - ${lI(c,f,s,u,l.length,a.length,h)}; + ${dI(c,s)}; + ${aI(e.nearestMode,n,T)}; + ${fI(c,f,s,u,l.length,a.length,h)}; `;case"linear":return` - ${uI(f,s,u,l.length,a.length)}; - ${(()=>{if(s.length===2||s.length===4)return`${fI(c,f,s,h,g)}`;if(s.length===3||s.length===5)return`${pI(c,f,s,h,g)}`;throw Error("Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.")})()}; + ${cI(f,s,u,l.length,a.length)}; + ${(()=>{if(s.length===2||s.length===4)return`${pI(c,f,s,h,g)}`;if(s.length===3||s.length===5)return`${hI(c,f,s,h,g)}`;throw Error("Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.")})()}; `;case"cubic":return` - ${(()=>{if(s.length===2||s.length===4)return`${dI(c,f,s,u,l,a,e.cubicCoeffA,h,e.extrapolationValue,e.excludeOutside)}`;throw Error("Cubic mode only supports input dims 2 and 4 are supported in linear mode.")})()}; + ${(()=>{if(s.length===2||s.length===4)return`${mI(c,f,s,u,l,a,e.cubicCoeffA,h,e.extrapolationValue,e.excludeOutside)}`;throw Error("Cubic mode only supports input dims 2 and 4 are supported in linear mode.")})()}; `;default:throw Error("Invalid resize mode")}})()}; `} ${v.registerUniform("output_size","u32").registerUniform("scales","f32",l.length).registerUniform("roi","f32",a.length).declareVariables(c,f)} @@ -4092,7 +4092,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; output[global_idx] = ${e.extrapolationValue}; }`;case"linear":return`output[global_idx] = ${s.length===2||s.length===4?"bilinearInterpolation":"trilinearInterpolation"}(output_indices);`;case"cubic":return"output[global_idx] = bicubicInterpolation(output_indices);";default:throw Error(`Unsupported resize mode: ${e.mode}`)}})()}; `} - }`;return{name:"Resize",shaderCache:{hint:`${e.cacheKey}|${n}|${l.length>0?l:""}|${o.length>0?o:""}|${a.length>0?a:""}|${b}|${s}`,inputDependencies:["rank"]},getShaderSource:w,getRunData:()=>({outputs:[{dims:u,dataType:r.dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:[{type:12,data:p},{type:1,data:l},{type:1,data:a},...W(s,u)]})}},hI=r=>{let e=r.customDataBuffer;return new Uint32Array(e,e.byteOffset,1)[0]},Jy=(r,e)=>{let n=[],t=[],o=[],i=hI(r);if(e.antialias!==0)throw Error("Only default value (0) for Antialias attribute is supported");rI(r.inputs,e,i,n,t,o),r.compute(mI(r.inputs[0],e,i,n,t,o),{inputs:[0]})},Qy=r=>{let e=r.antialias,n=r.axes,t=r.coordinateTransformMode,o=r.cubicCoeffA,i=r.excludeOutside!==0,s=r.extrapolationValue,a=r.keepAspectRatioPolicy,u=r.mode,l=r.nearestMode===""?"simple":r.nearestMode;return de({antialias:e,axes:n,coordinateTransformMode:t,cubicCoeffA:o,excludeOutside:i,extrapolationValue:s,keepAspectRatioPolicy:a,mode:u,nearestMode:l})}});var gI,bI,tx,rx=C(()=>{"use strict";ue();ye();et();he();gI=(r,e)=>{let[n,t,o,i]=r,{numHeads:s,rotaryEmbeddingDim:a}=e;if(n.dims.length!==3&&n.dims.length!==4)throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${n.dims.length}`);if(!B.areEqual(t.dims,[])&&!B.areEqual(t.dims,[1])&&t.dims.length!==2)throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${t.dims.length}`);if(o.dims.length!==2)throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${o.dims.length}`);if(i.dims.length!==2)throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${i.dims.length}`);if(!B.areEqual(o.dims,i.dims))throw new Error("Inputs 'cos_cache' and 'sin_cache' are expected to have the same shape");if(a>0&&s===0)throw new Error("num_heads must be provided if rotary_embedding_dim is specified");let u=n.dims[0],l=n.dims[n.dims.length-2],f=o.dims[0],c=B.sizeFromDimension(n.dims,1)/l,p=a===0?o.dims[1]*2:c/s;if(a>p)throw new Error("rotary_embedding_dim must be less than or equal to head_size");if(t.dims.length===2){if(u!==t.dims[0])throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${t.dims[0]}`);if(l!==t.dims[1])throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${t.dims[1]}`)}if(p/2!==o.dims[1]&&a/2!==o.dims[1])throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${o.dims[1]}`);if(l>f)throw new Error("Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported")},bI=(r,e)=>{let{interleaved:n,numHeads:t,rotaryEmbeddingDim:o,scale:i}=e,s=r[0].dims[0],a=B.sizeFromDimension(r[0].dims,1),u=r[0].dims[r[0].dims.length-2],l=a/u,f=r[2].dims[1],c=o===0?f*2:l/t,p=new Array(s,u,l/c,c-f),b=B.computeStrides(p),h=[{type:1,data:i},{type:12,data:p},{type:12,data:b},...r[0].dims.length===3?new Array({type:12,data:[a,l,c,1]}):[],...r[0].dims.length===4?new Array({type:12,data:[a,c,u*c,1]}):[],...W(r[0].dims,r[1].dims,r[2].dims,r[3].dims,r[0].dims)],g=T=>{let w=D("input",r[0].dataType,r[0].dims.length),v=D("position_ids",r[1].dataType,r[1].dims.length),S=D("cos_cache",r[2].dataType,r[2].dims.length),$=D("sin_cache",r[3].dataType,r[3].dims.length),P=G("output",r[0].dataType,r[0].dims.length);return T.registerUniforms([{name:"scale",type:"f32"},{name:"global_shape",type:"u32",length:p.length},{name:"global_strides",type:"u32",length:b.length},{name:"input_output_strides",type:"u32",length:b.length}]),` + }`;return{name:"Resize",shaderCache:{hint:`${e.cacheKey}|${n}|${l.length>0?l:""}|${o.length>0?o:""}|${a.length>0?a:""}|${b}|${s}`,inputDependencies:["rank"]},getShaderSource:w,getRunData:()=>({outputs:[{dims:u,dataType:r.dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:[{type:12,data:p},{type:1,data:l},{type:1,data:a},...W(s,u)]})}},bI=r=>{let e=r.customDataBuffer;return new Uint32Array(e,e.byteOffset,1)[0]},Jy=(r,e)=>{let n=[],t=[],o=[],i=bI(r);if(e.antialias!==0)throw Error("Only default value (0) for Antialias attribute is supported");oI(r.inputs,e,i,n,t,o),r.compute(gI(r.inputs[0],e,i,n,t,o),{inputs:[0]})},Qy=r=>{let e=r.antialias,n=r.axes,t=r.coordinateTransformMode,o=r.cubicCoeffA,i=r.excludeOutside!==0,s=r.extrapolationValue,a=r.keepAspectRatioPolicy,u=r.mode,l=r.nearestMode===""?"simple":r.nearestMode;return de({antialias:e,axes:n,coordinateTransformMode:t,cubicCoeffA:o,excludeOutside:i,extrapolationValue:s,keepAspectRatioPolicy:a,mode:u,nearestMode:l})}});var yI,xI,tx,rx=C(()=>{"use strict";ue();ye();et();he();yI=(r,e)=>{let[n,t,o,i]=r,{numHeads:s,rotaryEmbeddingDim:a}=e;if(n.dims.length!==3&&n.dims.length!==4)throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${n.dims.length}`);if(!B.areEqual(t.dims,[])&&!B.areEqual(t.dims,[1])&&t.dims.length!==2)throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${t.dims.length}`);if(o.dims.length!==2)throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${o.dims.length}`);if(i.dims.length!==2)throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${i.dims.length}`);if(!B.areEqual(o.dims,i.dims))throw new Error("Inputs 'cos_cache' and 'sin_cache' are expected to have the same shape");if(a>0&&s===0)throw new Error("num_heads must be provided if rotary_embedding_dim is specified");let u=n.dims[0],l=n.dims[n.dims.length-2],f=o.dims[0],c=B.sizeFromDimension(n.dims,1)/l,p=a===0?o.dims[1]*2:c/s;if(a>p)throw new Error("rotary_embedding_dim must be less than or equal to head_size");if(t.dims.length===2){if(u!==t.dims[0])throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${t.dims[0]}`);if(l!==t.dims[1])throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${t.dims[1]}`)}if(p/2!==o.dims[1]&&a/2!==o.dims[1])throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${o.dims[1]}`);if(l>f)throw new Error("Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported")},xI=(r,e)=>{let{interleaved:n,numHeads:t,rotaryEmbeddingDim:o,scale:i}=e,s=r[0].dims[0],a=B.sizeFromDimension(r[0].dims,1),u=r[0].dims[r[0].dims.length-2],l=a/u,f=r[2].dims[1],c=o===0?f*2:l/t,p=new Array(s,u,l/c,c-f),b=B.computeStrides(p),h=[{type:1,data:i},{type:12,data:p},{type:12,data:b},...r[0].dims.length===3?new Array({type:12,data:[a,l,c,1]}):[],...r[0].dims.length===4?new Array({type:12,data:[a,c,u*c,1]}):[],...W(r[0].dims,r[1].dims,r[2].dims,r[3].dims,r[0].dims)],g=T=>{let w=D("input",r[0].dataType,r[0].dims.length),v=D("position_ids",r[1].dataType,r[1].dims.length),S=D("cos_cache",r[2].dataType,r[2].dims.length),$=D("sin_cache",r[3].dataType,r[3].dims.length),P=G("output",r[0].dataType,r[0].dims.length);return T.registerUniforms([{name:"scale",type:"f32"},{name:"global_shape",type:"u32",length:p.length},{name:"global_strides",type:"u32",length:b.length},{name:"input_output_strides",type:"u32",length:b.length}]),` ${T.declareVariables(w,v,S,$,P)} ${T.mainStart(Zr)} @@ -4118,7 +4118,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim; ${P.setByOffset("k",w.getByOffset("k"))} } - }`};return{name:"RotaryEmbedding",shaderCache:{hint:de({interleaved:n}).cacheKey,inputDependencies:["rank","rank","rank","rank"]},getShaderSource:g,getRunData:()=>({outputs:[{dims:r[0].dims,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(B.size(p)/Zr)},programUniforms:h})}},tx=(r,e)=>{gI(r.inputs,e),r.compute(bI(r.inputs,e))}});var yI,xI,nx,ox=C(()=>{"use strict";ue();ye();he();yI=r=>{if(!r||r.length<3)throw new Error("layerNorm requires at least 3 inputs.");let e=r[0],n=r[1],t=r[2];if(e.dataType!==n.dataType||e.dataType!==t.dataType)throw new Error("All inputs must have the same data type");if(e.dims.length!==3&&e.dims.length!==2)throw new Error("Input must be 2D or 3D");if(n.dims.length!==3&&n.dims.length!==2)throw new Error("Skip must be 2D or 3D");let o=e.dims[e.dims.length-1],i=e.dims[e.dims.length-2];if(n.dims[n.dims.length-1]!==o)throw new Error("Skip must have the same hidden size as input");if(n.dims[n.dims.length-2]!==i)throw new Error("Skip must have the same sequence length as input");if(t.dims.length!==1)throw new Error("Gamma must be 1D");if(t.dims[t.dims.length-1]!==o)throw new Error("Gamma must have the same hidden size as input");if(r.length>3){let s=r[3];if(s.dims.length!==1)throw new Error("Beta must be 1D");if(s.dims[s.dims.length-1]!==o)throw new Error("Beta must have the same hidden size as input")}if(r.length>4){let s=r[4];if(s.dims.length!==1)throw new Error("Bias must be 1D");if(s.dims[s.dims.length-1]!==o)throw new Error("Bias must have the same hidden size as input")}},xI=(r,e,n,t)=>{let o=e.simplified,i=r[0].dims,s=B.size(i),a=i,u=s,l=i.slice(-1)[0],f=t?i.slice(0,-1).concat(1):[],c=!o&&r.length>3,p=r.length>4,b=t&&n>1,h=t&&n>2,g=n>3,T=64,w=ze(l),v=[{type:12,data:u},{type:12,data:w},{type:12,data:l},{type:1,data:e.epsilon}],S=P=>{let E=[{name:"output_size",type:"u32"},{name:"components",type:"u32"},{name:"hidden_size",type:"u32"},{name:"epsilon",type:"f32"}],N=[D("x",r[0].dataType,r[0].dims,w),D("skip",r[1].dataType,r[1].dims,w),D("gamma",r[2].dataType,r[2].dims,w)];c&&N.push(D("beta",r[3].dataType,r[3].dims,w)),p&&N.push(D("bias",r[4].dataType,r[4].dims,w)),N.push(G("output",r[0].dataType,a,w)),b&&N.push(G("mean_output",1,f)),h&&N.push(G("inv_std_output",1,f)),g&&N.push(G("input_skip_bias_sum",r[0].dataType,a,w));let z=Be(r[0].dataType),q=Be(1,w);return` + }`};return{name:"RotaryEmbedding",shaderCache:{hint:de({interleaved:n}).cacheKey,inputDependencies:["rank","rank","rank","rank"]},getShaderSource:g,getRunData:()=>({outputs:[{dims:r[0].dims,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(B.size(p)/Zr)},programUniforms:h})}},tx=(r,e)=>{yI(r.inputs,e),r.compute(xI(r.inputs,e))}});var vI,wI,nx,ox=C(()=>{"use strict";ue();ye();he();vI=r=>{if(!r||r.length<3)throw new Error("layerNorm requires at least 3 inputs.");let e=r[0],n=r[1],t=r[2];if(e.dataType!==n.dataType||e.dataType!==t.dataType)throw new Error("All inputs must have the same data type");if(e.dims.length!==3&&e.dims.length!==2)throw new Error("Input must be 2D or 3D");if(n.dims.length!==3&&n.dims.length!==2)throw new Error("Skip must be 2D or 3D");let o=e.dims[e.dims.length-1],i=e.dims[e.dims.length-2];if(n.dims[n.dims.length-1]!==o)throw new Error("Skip must have the same hidden size as input");if(n.dims[n.dims.length-2]!==i)throw new Error("Skip must have the same sequence length as input");if(t.dims.length!==1)throw new Error("Gamma must be 1D");if(t.dims[t.dims.length-1]!==o)throw new Error("Gamma must have the same hidden size as input");if(r.length>3){let s=r[3];if(s.dims.length!==1)throw new Error("Beta must be 1D");if(s.dims[s.dims.length-1]!==o)throw new Error("Beta must have the same hidden size as input")}if(r.length>4){let s=r[4];if(s.dims.length!==1)throw new Error("Bias must be 1D");if(s.dims[s.dims.length-1]!==o)throw new Error("Bias must have the same hidden size as input")}},wI=(r,e,n,t)=>{let o=e.simplified,i=r[0].dims,s=B.size(i),a=i,u=s,l=i.slice(-1)[0],f=t?i.slice(0,-1).concat(1):[],c=!o&&r.length>3,p=r.length>4,b=t&&n>1,h=t&&n>2,g=n>3,T=64,w=ze(l),v=[{type:12,data:u},{type:12,data:w},{type:12,data:l},{type:1,data:e.epsilon}],S=P=>{let E=[{name:"output_size",type:"u32"},{name:"components",type:"u32"},{name:"hidden_size",type:"u32"},{name:"epsilon",type:"f32"}],N=[D("x",r[0].dataType,r[0].dims,w),D("skip",r[1].dataType,r[1].dims,w),D("gamma",r[2].dataType,r[2].dims,w)];c&&N.push(D("beta",r[3].dataType,r[3].dims,w)),p&&N.push(D("bias",r[4].dataType,r[4].dims,w)),N.push(G("output",r[0].dataType,a,w)),b&&N.push(G("mean_output",1,f)),h&&N.push(G("inv_std_output",1,f)),g&&N.push(G("input_skip_bias_sum",r[0].dataType,a,w));let z=Be(r[0].dataType),q=Be(1,w);return` ${P.registerUniforms(E).declareVariables(...N)} var sum_shared : array<${q}, ${T}>; @@ -4170,7 +4170,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${z}(inv_std_dev) * gamma[offset1d + i] ${c?"+ beta[offset1d + i]":""}; } - }`},$=[{dims:a,dataType:r[0].dataType}];return n>1&&$.push({dims:f,dataType:1}),n>2&&$.push({dims:f,dataType:1}),n>3&&$.push({dims:i,dataType:r[0].dataType}),{name:"SkipLayerNormalization",shaderCache:{hint:`${w};${b};${h};${g}`,inputDependencies:r.map((P,E)=>"type")},getShaderSource:S,getRunData:()=>({outputs:$,dispatchGroup:{x:Math.ceil(u/l)},programUniforms:v})}},nx=(r,e)=>{yI(r.inputs);let t=[0];r.outputCount>1&&t.push(-3),r.outputCount>2&&t.push(-3),r.outputCount>3&&t.push(3),r.compute(xI(r.inputs,e,r.outputCount,!1),{outputs:t})}});var vI,Ki,wI,ix,TI,_I,ax,sx,ux=C(()=>{"use strict";ue();ye();et();he();vI=(r,e)=>{if(!r||r.length<1)throw new Error("too few inputs");if(e.axes.length!==0){if(e.axes.length!==e.starts.length||e.axes.length!==e.ends.length)throw new Error("axes, starts and ends must have the same length")}else if(e.starts.length!==e.ends.length)throw new Error("starts and ends must have the same length");r.slice(1).forEach((n,t)=>{if(r[t+1].dataType!==6&&r[t+1].dataType!==7)throw new Error(`Input ${t} must be an array of int32 or int64`)})},Ki=(r,e)=>{let n=[];if(r.length>e)if(r[e].dataType===7)r[e].getBigInt64Array().forEach(t=>n.push(Number(t)));else if(r[e].dataType===6)r[e].getInt32Array().forEach(t=>n.push(Number(t)));else throw new Error(`Input ${e} must be an array of int32 or int64`);return n},wI=(r,e)=>{if(r.length>1){let n=Ki(r,1),t=Ki(r,2),o=Ki(r,3);return o.length===0&&(o=[...Array(r[0].dims.length).keys()]),de({starts:n,ends:t,axes:o})}else return e},ix=(r,e,n,t,o)=>{let i=r;return r<0&&(i+=n[t[e]]),o[e]<0?Math.max(0,Math.min(i,n[t[e]]-1)):Math.max(0,Math.min(i,n[t[e]]))},TI=(r,e,n)=>`fn calculateInputIndices(output_indices: ${e.type.indices}) -> ${r.type.indices} { + }`},$=[{dims:a,dataType:r[0].dataType}];return n>1&&$.push({dims:f,dataType:1}),n>2&&$.push({dims:f,dataType:1}),n>3&&$.push({dims:i,dataType:r[0].dataType}),{name:"SkipLayerNormalization",shaderCache:{hint:`${w};${b};${h};${g}`,inputDependencies:r.map((P,E)=>"type")},getShaderSource:S,getRunData:()=>({outputs:$,dispatchGroup:{x:Math.ceil(u/l)},programUniforms:v})}},nx=(r,e)=>{vI(r.inputs);let t=[0];r.outputCount>1&&t.push(-3),r.outputCount>2&&t.push(-3),r.outputCount>3&&t.push(3),r.compute(wI(r.inputs,e,r.outputCount,!1),{outputs:t})}});var TI,ji,_I,ix,II,SI,ax,sx,ux=C(()=>{"use strict";ue();ye();et();he();TI=(r,e)=>{if(!r||r.length<1)throw new Error("too few inputs");if(e.axes.length!==0){if(e.axes.length!==e.starts.length||e.axes.length!==e.ends.length)throw new Error("axes, starts and ends must have the same length")}else if(e.starts.length!==e.ends.length)throw new Error("starts and ends must have the same length");r.slice(1).forEach((n,t)=>{if(r[t+1].dataType!==6&&r[t+1].dataType!==7)throw new Error(`Input ${t} must be an array of int32 or int64`)})},ji=(r,e)=>{let n=[];if(r.length>e)if(r[e].dataType===7)r[e].getBigInt64Array().forEach(t=>n.push(Number(t)));else if(r[e].dataType===6)r[e].getInt32Array().forEach(t=>n.push(Number(t)));else throw new Error(`Input ${e} must be an array of int32 or int64`);return n},_I=(r,e)=>{if(r.length>1){let n=ji(r,1),t=ji(r,2),o=ji(r,3);return o.length===0&&(o=[...Array(r[0].dims.length).keys()]),de({starts:n,ends:t,axes:o})}else return e},ix=(r,e,n,t,o)=>{let i=r;return r<0&&(i+=n[t[e]]),o[e]<0?Math.max(0,Math.min(i,n[t[e]]-1)):Math.max(0,Math.min(i,n[t[e]]))},II=(r,e,n)=>`fn calculateInputIndices(output_indices: ${e.type.indices}) -> ${r.type.indices} { var input_indices: ${r.type.indices}; var carry = 0u; for (var i = ${n.length}; i >= 0; i--) { @@ -4188,15 +4188,15 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; ${r.indicesSet("input_indices","i","input_index")}; } return input_indices; - }`,_I=(r,e)=>{let n=r[0].dims,t=B.size(n),o=e.axes.length>0?B.normalizeAxes(e.axes,n.length):[...Array(n.length).keys()],i=Ki(r,4);i.forEach(w=>w!==0||(()=>{throw new Error("step cannot be 0")})),i.length===0&&(i=Array(o.length).fill(1));let s=e.starts.map((w,v)=>ix(w,v,n,o,i)),a=e.ends.map((w,v)=>ix(w,v,n,o,i));if(o.length!==s.length||o.length!==a.length)throw new Error("start, ends and axes should have the same number of elements");if(o.length!==n.length)for(let w=0;wMath.sign(w));i.forEach((w,v,S)=>{if(w<0){let $=(a[v]-s[v])/w,P=s[v],E=P+$*i[v];s[v]=E,a[v]=P,S[v]=-w}});let l=n.slice(0);o.forEach((w,v)=>{l[w]=Math.ceil((a[w]-s[w])/i[w])});let f={dims:l,dataType:r[0].dataType},c=G("output",r[0].dataType,l.length),p=D("input",r[0].dataType,r[0].dims.length),b=B.size(l),h=[{name:"outputSize",type:"u32"},{name:"starts",type:"u32",length:s.length},{name:"signs",type:"i32",length:u.length},{name:"steps",type:"u32",length:i.length}],g=[{type:12,data:b},{type:12,data:s},{type:6,data:u},{type:12,data:i},...W(r[0].dims,l)],T=w=>` + }`,SI=(r,e)=>{let n=r[0].dims,t=B.size(n),o=e.axes.length>0?B.normalizeAxes(e.axes,n.length):[...Array(n.length).keys()],i=ji(r,4);i.forEach(w=>w!==0||(()=>{throw new Error("step cannot be 0")})),i.length===0&&(i=Array(o.length).fill(1));let s=e.starts.map((w,v)=>ix(w,v,n,o,i)),a=e.ends.map((w,v)=>ix(w,v,n,o,i));if(o.length!==s.length||o.length!==a.length)throw new Error("start, ends and axes should have the same number of elements");if(o.length!==n.length)for(let w=0;wMath.sign(w));i.forEach((w,v,S)=>{if(w<0){let $=(a[v]-s[v])/w,P=s[v],E=P+$*i[v];s[v]=E,a[v]=P,S[v]=-w}});let l=n.slice(0);o.forEach((w,v)=>{l[w]=Math.ceil((a[w]-s[w])/i[w])});let f={dims:l,dataType:r[0].dataType},c=G("output",r[0].dataType,l.length),p=D("input",r[0].dataType,r[0].dims.length),b=B.size(l),h=[{name:"outputSize",type:"u32"},{name:"starts",type:"u32",length:s.length},{name:"signs",type:"i32",length:u.length},{name:"steps",type:"u32",length:i.length}],g=[{type:12,data:b},{type:12,data:s},{type:6,data:u},{type:12,data:i},...W(r[0].dims,l)],T=w=>` ${w.registerUniforms(h).declareVariables(p,c)} - ${TI(p,c,n)} + ${II(p,c,n)} ${w.mainStart()} ${w.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} let output_indices = ${c.offsetToIndices("global_idx")}; let input_indices = calculateInputIndices(output_indices); ${c.setByOffset("global_idx",p.getByIndices("input_indices"))} - }`;return{name:"Slice",shaderCache:{hint:`${u.length}_${s.length}_${i.length}`,inputDependencies:["rank"]},getShaderSource:T,getRunData:()=>({outputs:[f],dispatchGroup:{x:Math.ceil(t/64)},programUniforms:g})}},ax=(r,e)=>{vI(r.inputs,e);let n=wI(r.inputs,e);r.compute(_I(r.inputs,n),{inputs:[0]})},sx=r=>{let e=r.starts,n=r.ends,t=r.axes;return de({starts:e,ends:n,axes:t})}});var II,SI,lx,cx,fx=C(()=>{"use strict";ue();ye();et();he();II=r=>{if(!r||r.length!==1)throw new Error("Softmax op requires 1 input.")},SI=(r,e)=>{let n=r.dims,t=B.size(n),o=64,i=e.axis;if(i<0&&(i=n.length+i),iw===4?`max(max(${T}.x, ${T}.y), max(${T}.z, ${T}.w))`:w===2?`max(${T}.x, ${T}.y)`:w===3?`max(max(${T}.x, ${T}.y), ${T}.z)`:T,c=D("x",r.dataType,r.dims,u),p=G("result",r.dataType,r.dims,u),b=c.type.value,h=Be(r.dataType)==="f32"?`var threadMax = ${b}(-3.402823e+38f);`:`var threadMax = ${b}(-65504.0h);`,g=T=>` + }`;return{name:"Slice",shaderCache:{hint:`${u.length}_${s.length}_${i.length}`,inputDependencies:["rank"]},getShaderSource:T,getRunData:()=>({outputs:[f],dispatchGroup:{x:Math.ceil(t/64)},programUniforms:g})}},ax=(r,e)=>{TI(r.inputs,e);let n=_I(r.inputs,e);r.compute(SI(r.inputs,n),{inputs:[0]})},sx=r=>{let e=r.starts,n=r.ends,t=r.axes;return de({starts:e,ends:n,axes:t})}});var $I,AI,lx,cx,fx=C(()=>{"use strict";ue();ye();et();he();$I=r=>{if(!r||r.length!==1)throw new Error("Softmax op requires 1 input.")},AI=(r,e)=>{let n=r.dims,t=B.size(n),o=64,i=e.axis;if(i<0&&(i=n.length+i),iw===4?`max(max(${T}.x, ${T}.y), max(${T}.z, ${T}.w))`:w===2?`max(${T}.x, ${T}.y)`:w===3?`max(max(${T}.x, ${T}.y), ${T}.z)`:T,c=D("x",r.dataType,r.dims,u),p=G("result",r.dataType,r.dims,u),b=c.type.value,h=Be(r.dataType)==="f32"?`var threadMax = ${b}(-3.402823e+38f);`:`var threadMax = ${b}(-65504.0h);`,g=T=>` var rowMaxShared : ${b}; var rowSumShared : ${b}; var threadShared : array<${b}, ${o}>; @@ -4268,7 +4268,7 @@ ${ut}_indices[${Te}] = 0;`}),ie+=`${ut}_indices[${ce-2}] = 0u; let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared; setValue(row, col, row_stride, value); } - }`;return{name:"Softmax",shaderCache:{hint:`${u}`,inputDependencies:["type"]},getRunData:()=>({outputs:[{dims:n,dataType:r.dataType}],dispatchGroup:{x:a},programUniforms:[{type:6,data:l}]}),getShaderSource:g}},lx=(r,e)=>{II(r.inputs),r.compute(SI(r.inputs[0],e))},cx=r=>de({axis:r.axis})});var $I,AI,PI,OI,EI,dx,px,mx=C(()=>{"use strict";ue();ye();et();he();$I=r=>{if(!r||r.length<1)throw new Error("too few inputs")},AI=(r,e)=>{let n=[],t=e.numOutputs;return r[1].dims[0]>0&&(r[1].getBigInt64Array().forEach(o=>n.push(Number(o))),t=n.length),de({numOutputs:t,axis:e.axis,splitSizes:n})},PI=r=>` + }`;return{name:"Softmax",shaderCache:{hint:`${u}`,inputDependencies:["type"]},getRunData:()=>({outputs:[{dims:n,dataType:r.dataType}],dispatchGroup:{x:a},programUniforms:[{type:6,data:l}]}),getShaderSource:g}},lx=(r,e)=>{$I(r.inputs),r.compute(AI(r.inputs[0],e))},cx=r=>de({axis:r.axis})});var PI,OI,EI,CI,kI,dx,px,mx=C(()=>{"use strict";ue();ye();et();he();PI=r=>{if(!r||r.length<1)throw new Error("too few inputs")},OI=(r,e)=>{let n=[],t=e.numOutputs;return r[1].dims[0]>0&&(r[1].getBigInt64Array().forEach(o=>n.push(Number(o))),t=n.length),de({numOutputs:t,axis:e.axis,splitSizes:n})},EI=r=>` fn calculateOutputIndex(index: u32) -> u32 { for (var i: u32 = 0u; i < ${r}u; i += 1u ) { if (index < ${Z("uniforms.size_in_split_axis","i",r)}) { @@ -4276,14 +4276,14 @@ fn calculateOutputIndex(index: u32) -> u32 { } } return ${r}u; -}`,OI=r=>{let e=r.length,n=[];for(let t=0;t{let e=r.length,n=[];for(let t=0;t{let n=r[0].dims,t=B.size(n),o=r[0].dataType,i=B.normalizeAxis(e.axis,n.length),s=new Array(e.numOutputs),a=D("input",o,n.length),u=new Array(e.numOutputs),l=[],f=[],c=0,p=[{type:12,data:t}];for(let h=0;h` + }`},kI=(r,e)=>{let n=r[0].dims,t=B.size(n),o=r[0].dataType,i=B.normalizeAxis(e.axis,n.length),s=new Array(e.numOutputs),a=D("input",o,n.length),u=new Array(e.numOutputs),l=[],f=[],c=0,p=[{type:12,data:t}];for(let h=0;h` ${h.registerUniform("input_size","u32").registerUniform("size_in_split_axis","u32",u.length).declareVariables(a,...s)} - ${PI(u.length)} - ${OI(s)} + ${EI(u.length)} + ${CI(s)} ${h.mainStart()} ${h.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.input_size")} @@ -4296,7 +4296,7 @@ fn calculateOutputIndex(index: u32) -> u32 { ${a.indicesSet("indices",i,"index")}; } writeBufferData(output_number, indices, global_idx); - }`;return{name:"Split",shaderCache:{hint:e.cacheKey,inputDependencies:["rank"]},getShaderSource:b,getRunData:()=>({outputs:l,dispatchGroup:{x:Math.ceil(t/64)},programUniforms:p})}},dx=(r,e)=>{$I(r.inputs);let n=r.inputs.length===1?e:AI(r.inputs,e);r.compute(EI(r.inputs,n),{inputs:[0]})},px=r=>{let e=r.axis,n=r.splitSizes,t=r.numOutputs<0?n.length:r.numOutputs;if(t!==n.length)throw new Error("numOutputs and splitSizes lengh must be equal");return de({axis:e,numOutputs:t,splitSizes:n})}});var CI,kI,hx,gx=C(()=>{"use strict";ue();ye();he();CI=(r,e,n,t,o)=>{let i=G("output_data",o,n.length,4),s=D("a_data",e[1].dataType,e[1].dims.length,4),a=D("b_data",e[2].dataType,e[2].dims.length,4),u=D("c_data",e[0].dataType,e[0].dims.length,4),l,f=(c,p,b)=>`select(${p}, ${c}, ${b})`;if(!t)l=i.setByOffset("global_idx",f(s.getByOffset("global_idx"),a.getByOffset("global_idx"),u.getByOffset("global_idx")));else{let c=(p,b,h="")=>{let g=`a_data[index_a${b}][component_a${b}]`,T=`b_data[index_b${b}][component_b${b}]`,w=`bool(c_data[index_c${b}] & (0xffu << (component_c${b} * 8)))`;return` + }`;return{name:"Split",shaderCache:{hint:e.cacheKey,inputDependencies:["rank"]},getShaderSource:b,getRunData:()=>({outputs:l,dispatchGroup:{x:Math.ceil(t/64)},programUniforms:p})}},dx=(r,e)=>{PI(r.inputs);let n=r.inputs.length===1?e:OI(r.inputs,e);r.compute(kI(r.inputs,n),{inputs:[0]})},px=r=>{let e=r.axis,n=r.splitSizes,t=r.numOutputs<0?n.length:r.numOutputs;if(t!==n.length)throw new Error("numOutputs and splitSizes lengh must be equal");return de({axis:e,numOutputs:t,splitSizes:n})}});var DI,BI,hx,gx=C(()=>{"use strict";ue();ye();he();DI=(r,e,n,t,o)=>{let i=G("output_data",o,n.length,4),s=D("a_data",e[1].dataType,e[1].dims.length,4),a=D("b_data",e[2].dataType,e[2].dims.length,4),u=D("c_data",e[0].dataType,e[0].dims.length,4),l,f=(c,p,b)=>`select(${p}, ${c}, ${b})`;if(!t)l=i.setByOffset("global_idx",f(s.getByOffset("global_idx"),a.getByOffset("global_idx"),u.getByOffset("global_idx")));else{let c=(p,b,h="")=>{let g=`a_data[index_a${b}][component_a${b}]`,T=`b_data[index_b${b}][component_b${b}]`,w=`bool(c_data[index_c${b}] & (0xffu << (component_c${b} * 8)))`;return` let output_indices${b} = ${i.offsetToIndices(`global_idx * 4u + ${b}u`)}; let offset_a${b} = ${s.broadcastedIndicesToOffset(`output_indices${b}`,i)}; let offset_b${b} = ${a.broadcastedIndicesToOffset(`output_indices${b}`,i)}; @@ -4324,10 +4324,10 @@ fn calculateOutputIndex(index: u32) -> u32 { ${r.mainStart()} ${r.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} ${l} - }`},kI=r=>{let e=r[1].dims,n=r[2].dims,t=r[0].dims,o=r[1].dataType,i=!(B.areEqual(e,n)&&B.areEqual(n,t)),s=e,a=B.size(e);if(i){let l=nr.calcShape(nr.calcShape(e,n,!1),t,!1);if(!l)throw new Error("Can't perform where op on the given tensors");s=l,a=B.size(s)}let u=Math.ceil(a/4);return{name:"Where",shaderCache:{inputDependencies:["rank","rank","rank"]},getShaderSource:l=>CI(l,r,s,i,o),getRunData:()=>({outputs:[{dims:s,dataType:o}],dispatchGroup:{x:Math.ceil(a/64/4)},programUniforms:[{type:12,data:u},...W(t,e,n,s)]})}},hx=r=>{r.compute(kI(r.inputs))}});var bx,yx=C(()=>{"use strict";xg();Li();Tg();Ig();lb();vb();_b();eu();Vb();Wb();Kb();Jb();ty();ny();ay();ly();dy();Ty();Iy();$y();ru();Oy();fu();Cy();jy();Zy();Di();ex();rx();ox();ux();fx();mx();pu();Jr();Ni();gx();bx=new Map([["Abs",[Sg]],["Acos",[$g]],["Acosh",[Ag]],["Add",[cb]],["ArgMax",[yg,Xs]],["ArgMin",[bg,Xs]],["Asin",[Pg]],["Asinh",[Og]],["Atan",[Eg]],["Atanh",[Cg]],["Attention",[vg]],["AveragePool",[Fy,zy]],["BatchNormalization",[wg]],["BiasAdd",[_g]],["BiasSplitGelu",[ub]],["Cast",[Dg,kg]],["Ceil",[Lg]],["Clip",[Bg]],["Concat",[wb,Tb]],["Conv",[au,iu]],["ConvTranspose",[Mb,Fb]],["Cos",[Rg]],["Cosh",[Ng]],["CumSum",[Gb,Ub]],["DepthToSpace",[Hb,qb]],["Div",[fb]],["Einsum",[Zb,Yb]],["Elu",[zg,Zn]],["Equal",[db]],["Erf",[Fg]],["Exp",[Mg]],["Expand",[ey]],["FastGelu",[ry]],["Floor",[Vg]],["FusedConv",[au,iu]],["Gather",[iy,oy]],["GatherElements",[uy,sy]],["Gelu",[Gg]],["Gemm",[fy,cy]],["GlobalAveragePool",[Gy,Vy]],["GlobalMaxPool",[Ky,qy]],["Greater",[gb]],["GreaterOrEqual",[yb]],["GroupQueryAttention",[wy,vy]],["HardSigmoid",[Zg,Xg]],["InstanceNormalization",[_y]],["LayerNormalization",[Sy]],["LeakyRelu",[Ug,Zn]],["Less",[bb]],["LessOrEqual",[xb]],["Log",[ib]],["MatMul",[Bb]],["MatMulNBits",[Ay,Py]],["MaxPool",[Wy,Hy]],["Mul",[pb]],["MultiHeadAttention",[hy,my]],["Neg",[Hg]],["Not",[Wg]],["Pad",[Ey]],["Pow",[mb]],["QuickGelu",[ab,Zn]],["Range",[Xy]],["Reciprocal",[qg]],["ReduceMin",[fg]],["ReduceMean",[ag]],["ReduceMax",[cg]],["ReduceSum",[pg]],["ReduceProd",[dg]],["ReduceL1",[sg]],["ReduceL2",[ug]],["ReduceLogSum",[hg]],["ReduceLogSumExp",[lg]],["ReduceSumSquare",[mg]],["Relu",[Kg]],["Resize",[Jy,Qy]],["RotaryEmbedding",[tx]],["Sigmoid",[jg]],["Sin",[Yg]],["Sinh",[Jg]],["Slice",[ax,sx]],["SkipLayerNormalization",[nx]],["Split",[dx,px]],["Sqrt",[Qg]],["Softmax",[lx,cx]],["Sub",[hb]],["Tan",[eb]],["Tanh",[rb]],["ThresholdedRelu",[ob,Zn]],["Tile",[by]],["Transpose",[Kh,jh]],["Where",[hx]]])});var ji,xx=C(()=>{"use strict";ft();mr();he();ji=class{constructor(e){this.backend=e;this.repo=new Map,this.attributesBound=!1}getArtifact(e){return this.repo.get(e)}setArtifact(e,n){this.repo.set(e,n)}run(e,n,t,o,i){St(e.programInfo.name);let s=this.backend.device,a=this.backend.getComputePassEncoder();this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2);let u=[];for(let f of n)u.push({binding:u.length,resource:{buffer:f.buffer}});for(let f of t)u.push({binding:u.length,resource:{buffer:f.buffer}});i&&u.push({binding:u.length,resource:i});let l=s.createBindGroup({layout:e.computePipeline.getBindGroupLayout(0),entries:u,label:e.programInfo.name});if(this.backend.sessionStatus==="capturing"){let f={kernelId:this.backend.currentKernelId,computePipeline:e.computePipeline,bindGroup:l,dispatchGroup:o};this.backend.capturedCommandList.get(this.backend.currentSessionId).push(f)}a.setPipeline(e.computePipeline),a.setBindGroup(0,l),a.dispatchWorkgroups(...o),this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2+1),this.backend.pendingDispatchNumber++,(this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber||this.backend.queryType==="at-passes")&&this.backend.endComputePass(),this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber&&this.backend.flush(),yt(e.programInfo.name)}dispose(){}build(e,n){St(e.name);let t=this.backend.device,o=[];t.features.has("shader-f16")&&o.push("enable f16;");let i=Hh(n,this.backend.device.limits),s=e.getShaderSource(i),a=`${o.join(` + }`},BI=r=>{let e=r[1].dims,n=r[2].dims,t=r[0].dims,o=r[1].dataType,i=!(B.areEqual(e,n)&&B.areEqual(n,t)),s=e,a=B.size(e);if(i){let l=nr.calcShape(nr.calcShape(e,n,!1),t,!1);if(!l)throw new Error("Can't perform where op on the given tensors");s=l,a=B.size(s)}let u=Math.ceil(a/4);return{name:"Where",shaderCache:{inputDependencies:["rank","rank","rank"]},getShaderSource:l=>DI(l,r,s,i,o),getRunData:()=>({outputs:[{dims:s,dataType:o}],dispatchGroup:{x:Math.ceil(a/64/4)},programUniforms:[{type:12,data:u},...W(t,e,n,s)]})}},hx=r=>{r.compute(BI(r.inputs))}});var bx,yx=C(()=>{"use strict";xg();Li();Tg();Ig();lb();vb();_b();tu();Vb();Wb();jb();Jb();ty();ny();ay();ly();dy();Ty();Iy();$y();nu();Oy();du();Cy();Ky();Zy();Di();ex();rx();ox();ux();fx();mx();mu();Jr();Ni();gx();bx=new Map([["Abs",[Sg]],["Acos",[$g]],["Acosh",[Ag]],["Add",[cb]],["ArgMax",[yg,Zs]],["ArgMin",[bg,Zs]],["Asin",[Pg]],["Asinh",[Og]],["Atan",[Eg]],["Atanh",[Cg]],["Attention",[vg]],["AveragePool",[Fy,zy]],["BatchNormalization",[wg]],["BiasAdd",[_g]],["BiasSplitGelu",[ub]],["Cast",[Dg,kg]],["Ceil",[Lg]],["Clip",[Bg]],["Concat",[wb,Tb]],["Conv",[su,au]],["ConvTranspose",[Mb,Fb]],["Cos",[Rg]],["Cosh",[Ng]],["CumSum",[Gb,Ub]],["DepthToSpace",[Hb,qb]],["Div",[fb]],["Einsum",[Zb,Yb]],["Elu",[zg,Zn]],["Equal",[db]],["Erf",[Fg]],["Exp",[Mg]],["Expand",[ey]],["FastGelu",[ry]],["Floor",[Vg]],["FusedConv",[su,au]],["Gather",[iy,oy]],["GatherElements",[uy,sy]],["Gelu",[Gg]],["Gemm",[fy,cy]],["GlobalAveragePool",[Gy,Vy]],["GlobalMaxPool",[jy,qy]],["Greater",[gb]],["GreaterOrEqual",[yb]],["GroupQueryAttention",[wy,vy]],["HardSigmoid",[Zg,Xg]],["InstanceNormalization",[_y]],["LayerNormalization",[Sy]],["LeakyRelu",[Ug,Zn]],["Less",[bb]],["LessOrEqual",[xb]],["Log",[ib]],["MatMul",[Bb]],["MatMulNBits",[Ay,Py]],["MaxPool",[Wy,Hy]],["Mul",[pb]],["MultiHeadAttention",[hy,my]],["Neg",[Hg]],["Not",[Wg]],["Pad",[Ey]],["Pow",[mb]],["QuickGelu",[ab,Zn]],["Range",[Xy]],["Reciprocal",[qg]],["ReduceMin",[fg]],["ReduceMean",[ag]],["ReduceMax",[cg]],["ReduceSum",[pg]],["ReduceProd",[dg]],["ReduceL1",[sg]],["ReduceL2",[ug]],["ReduceLogSum",[hg]],["ReduceLogSumExp",[lg]],["ReduceSumSquare",[mg]],["Relu",[jg]],["Resize",[Jy,Qy]],["RotaryEmbedding",[tx]],["Sigmoid",[Kg]],["Sin",[Yg]],["Sinh",[Jg]],["Slice",[ax,sx]],["SkipLayerNormalization",[nx]],["Split",[dx,px]],["Sqrt",[Qg]],["Softmax",[lx,cx]],["Sub",[hb]],["Tan",[eb]],["Tanh",[rb]],["ThresholdedRelu",[ob,Zn]],["Tile",[by]],["Transpose",[jh,Kh]],["Where",[hx]]])});var Ki,xx=C(()=>{"use strict";ft();mr();he();Ki=class{constructor(e){this.backend=e;this.repo=new Map,this.attributesBound=!1}getArtifact(e){return this.repo.get(e)}setArtifact(e,n){this.repo.set(e,n)}run(e,n,t,o,i){St(e.programInfo.name);let s=this.backend.device,a=this.backend.getComputePassEncoder();this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2);let u=[];for(let f of n)u.push({binding:u.length,resource:{buffer:f.buffer}});for(let f of t)u.push({binding:u.length,resource:{buffer:f.buffer}});i&&u.push({binding:u.length,resource:i});let l=s.createBindGroup({layout:e.computePipeline.getBindGroupLayout(0),entries:u,label:e.programInfo.name});if(this.backend.sessionStatus==="capturing"){let f={kernelId:this.backend.currentKernelId,computePipeline:e.computePipeline,bindGroup:l,dispatchGroup:o};this.backend.capturedCommandList.get(this.backend.currentSessionId).push(f)}a.setPipeline(e.computePipeline),a.setBindGroup(0,l),a.dispatchWorkgroups(...o),this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2+1),this.backend.pendingDispatchNumber++,(this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber||this.backend.queryType==="at-passes")&&this.backend.endComputePass(),this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber&&this.backend.flush(),yt(e.programInfo.name)}dispose(){}build(e,n){St(e.name);let t=this.backend.device,o=[];t.features.has("shader-f16")&&o.push("enable f16;");let i=Hh(n,this.backend.device.limits),s=e.getShaderSource(i),a=`${o.join(` `)} ${i.additionalImplementations} -${s}`,u=t.createShaderModule({code:a,label:e.name});Ne("verbose",()=>`[WebGPU] ${e.name} shader code: ${a}`);let l=t.createComputePipeline({compute:{module:u,entryPoint:"main"},layout:"auto",label:e.name});return yt(e.name),{programInfo:e,computePipeline:l,uniformVariablesInfo:i.variablesInfo}}normalizeDispatchGroupSize(e){let n=typeof e=="number"?e:e.x,t=typeof e=="number"?1:e.y||1,o=typeof e=="number"?1:e.z||1,i=this.backend.device.limits.maxComputeWorkgroupsPerDimension;if(n<=i&&t<=i&&o<=i)return[n,t,o];let s=n*t*o,a=Math.ceil(Math.sqrt(s));if(a>i){if(a=Math.ceil(Math.cbrt(s)),a>i)throw new Error("Total dispatch size exceeds WebGPU maximum.");return[a,a,a]}else return[a,a,1]}}});var DI,BI,mu,Xi,vx=C(()=>{"use strict";ft();ue();mr();Fh();Wh();yx();xx();DI=(r,e)=>{if(e.length!==r.length)throw new Error(`inputDependencies length ${e.length} is not equal to inputTensors length ${r.length}.`);let n=[];for(let t=0;t{let t=r.name;return r.shaderCache?.hint&&(t+="["+r.shaderCache.hint+"]"),t+=":"+n+`:${DI(e,r.shaderCache?.inputDependencies??new Array(e.length).fill("dims"))}`,t},mu=class{constructor(e){e&&(this.architecture=e.architecture,this.vendor=e.vendor)}isArchitecture(e){return this.architecture===e}isVendor(e){return this.vendor===e}},Xi=class{constructor(){this.currentSessionId=null;this.currentKernelId=null;this.commandEncoder=null;this.computePassEncoder=null;this.maxDispatchNumber=16;this.pendingDispatchNumber=0;this.pendingKernels=[];this.pendingQueries=new Map;this.sessionStatus="default";this.capturedCommandList=new Map;this.capturedPendingKernels=new Map;this.sessionExternalDataMapping=new Map}get currentKernelCustomData(){if(this.currentKernelId===null)throw new Error("currentKernelCustomData(): currentKernelId is null. (should not happen)");let e=this.kernelCustomData.get(this.currentKernelId);return e||(e={},this.kernelCustomData.set(this.currentKernelId,e)),e}async initialize(e,n){this.env=e;let t=[],o={requiredLimits:{maxComputeWorkgroupStorageSize:n.limits.maxComputeWorkgroupStorageSize,maxComputeWorkgroupsPerDimension:n.limits.maxComputeWorkgroupsPerDimension,maxStorageBufferBindingSize:n.limits.maxStorageBufferBindingSize,maxBufferSize:n.limits.maxBufferSize,maxComputeInvocationsPerWorkgroup:n.limits.maxComputeInvocationsPerWorkgroup,maxComputeWorkgroupSizeX:n.limits.maxComputeWorkgroupSizeX,maxComputeWorkgroupSizeY:n.limits.maxComputeWorkgroupSizeY,maxComputeWorkgroupSizeZ:n.limits.maxComputeWorkgroupSizeZ},requiredFeatures:t};n.features.has("chromium-experimental-timestamp-query-inside-passes")?t.push("chromium-experimental-timestamp-query-inside-passes"):n.features.has("timestamp-query")&&t.push("timestamp-query"),n.features.has("shader-f16")&&t.push("shader-f16"),this.device=await n.requestDevice(o),this.adapterInfo=new mu(n.info||await n.requestAdapterInfo()),this.gpuDataManager=Uh(this),this.programManager=new ji(this),this.kernels=new Map,this.kernelPersistentData=new Map,this.kernelCustomData=new Map,Nh(e.logLevel,!!e.debug),this.device.onuncapturederror=i=>{i.error instanceof GPUValidationError&&console.error(`An uncaught WebGPU validation error was raised: ${i.error.message}`)},Object.defineProperty(this.env.webgpu,"device",{value:this.device,writable:!1,enumerable:!0,configurable:!1}),Object.defineProperty(this.env.webgpu,"adapter",{value:n,writable:!1,enumerable:!0,configurable:!1}),this.setQueryType()}dispose(){typeof this.querySet<"u"&&this.querySet.destroy(),this.gpuDataManager.dispose()}getCommandEncoder(){return this.commandEncoder||(this.commandEncoder=this.device.createCommandEncoder()),this.commandEncoder}getComputePassEncoder(){if(!this.computePassEncoder){let e=this.getCommandEncoder(),n={};this.queryType==="at-passes"&&(n.timestampWrites={querySet:this.querySet,beginningOfPassWriteIndex:this.pendingDispatchNumber*2,endOfPassWriteIndex:this.pendingDispatchNumber*2+1}),this.computePassEncoder=e.beginComputePass(n)}return this.computePassEncoder}endComputePass(){this.computePassEncoder&&(this.computePassEncoder.end(),this.computePassEncoder=null)}flush(){if(!this.commandEncoder)return;St(),this.endComputePass();let e;this.queryType!=="none"&&(this.commandEncoder.resolveQuerySet(this.querySet,0,this.pendingDispatchNumber*2,this.queryResolveBuffer,0),e=this.device.createBuffer({size:this.pendingDispatchNumber*2*8,usage:GPUBufferUsage.MAP_READ|GPUBufferUsage.COPY_DST}),this.pendingQueries.set(e,this.pendingKernels),this.pendingKernels=[],this.commandEncoder.copyBufferToBuffer(this.queryResolveBuffer,0,e,0,this.pendingDispatchNumber*2*8)),this.device.queue.submit([this.commandEncoder.finish()]),this.gpuDataManager.refreshPendingBuffers(),this.commandEncoder=null,this.pendingDispatchNumber=0,this.queryType!=="none"&&e.mapAsync(GPUMapMode.READ).then(()=>{let n=new BigUint64Array(e.getMappedRange()),t=this.pendingQueries.get(e);for(let o=0;o"u"&&(this.queryTimeBase=b);let g=Number(b-this.queryTimeBase),T=Number(h-this.queryTimeBase);if(!Number.isSafeInteger(g)||!Number.isSafeInteger(T))throw new RangeError("incorrect timestamp range");if(this.env.webgpu.profiling?.ondata)this.env.webgpu.profiling.ondata({version:1,inputsMetadata:c.map(w=>({dims:w.dims,dataType:Ar(w.dataType)})),outputsMetadata:p.map(w=>({dims:w.dims,dataType:Ar(w.dataType)})),kernelId:s,kernelType:u,kernelName:l,programName:f,startTime:g,endTime:T});else{let w="";c.forEach((S,$)=>{w+=`input[${$}]: [${S.dims}] | ${Ar(S.dataType)}, `});let v="";p.forEach((S,$)=>{v+=`output[${$}]: [${S.dims}] | ${Ar(S.dataType)}, `}),console.log(`[profiling] kernel "${s}|${u}|${l}|${f}" ${w}${v}execution time: ${T-g} ns`)}_o("GPU",`${f}::${b}::${h}`)}e.unmap(),this.pendingQueries.delete(e)}),yt()}run(e,n,t,o,i,s){St(e.name);let a=[];for(let S=0;S$):t;if(c.length!==u.length)throw new Error(`Output size ${c.length} must be equal to ${u.length}.`);let p=[],b=[];for(let S=0;S=s)throw new Error(`Invalid output index: ${c[S]}`);if(c[S]===-3)continue;let $=c[S]===-1,P=c[S]===-2,E=$||P?i(u[S].dataType,u[S].dims):o(c[S],u[S].dataType,u[S].dims);if(p.push(E),E.data===0)continue;let N=this.gpuDataManager.get(E.data);if(!N)throw new Error(`no GPU data for output: ${E.data}`);if($&&this.temporaryData.push(N),P){let z=this.kernelPersistentData.get(this.currentKernelId);z||(z=[],this.kernelPersistentData.set(this.currentKernelId,z)),z.push(N)}b.push(N)}if(a.length!==n.length||b.length!==p.length){if(b.length===0)return yt(e.name),p;throw new Error(`Program ${e.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`)}let h;if(f){let S=0,$=[];f.forEach(z=>{let q=typeof z.data=="number"?[z.data]:z.data;if(q.length===0)return;let K=z.type===10?2:4,F,_e;z.type===10?(_e=q.length>4?16:q.length>2?8:q.length*K,F=q.length>4?16:K*q.length):(_e=q.length<=2?q.length*K:16,F=16),S=Math.ceil(S/_e)*_e,$.push(S);let $e=z.type===10?8:4;S+=q.length>4?Math.ceil(q.length/$e)*F:q.length*K});let P=16;S=Math.ceil(S/P)*P;let E=new ArrayBuffer(S);f.forEach((z,q)=>{let K=$[q],F=typeof z.data=="number"?[z.data]:z.data;if(z.type===6)new Int32Array(E,K,F.length).set(F);else if(z.type===12)new Uint32Array(E,K,F.length).set(F);else if(z.type===10)new Uint16Array(E,K,F.length).set(F);else if(z.type===1)new Float32Array(E,K,F.length).set(F);else throw new Error(`Unsupported uniform type: ${Ar(z.type)}`)});let N=this.gpuDataManager.create(S,GPUBufferUsage.COPY_DST|GPUBufferUsage.UNIFORM);this.device.queue.writeBuffer(N.buffer,0,E,0,S),this.gpuDataManager.release(N.id),h={offset:0,size:S,buffer:N.buffer}}let g=this.programManager.normalizeDispatchGroupSize(l),T=g[1]===1&&g[2]===1,w=BI(e,n,T),v=this.programManager.getArtifact(w);if(v||(v=this.programManager.build(e,g),this.programManager.setArtifact(w,v),Ne("info",()=>`[artifact] key: ${w}, programName: ${e.name}`)),f&&v.uniformVariablesInfo){if(f.length!==v.uniformVariablesInfo.length)throw new Error(`Uniform variables count mismatch: expect ${v.uniformVariablesInfo.length}, got ${f.length} in program "${v.programInfo.name}".`);for(let S=0;S`[ProgramManager] run "${e.name}" (key=${w}) with ${g[0]}x${g[1]}x${g[2]}`),this.queryType!=="none"||this.sessionStatus==="capturing"){let S={kernelId:this.currentKernelId,programName:v.programInfo.name,inputTensorViews:n,outputTensorViews:p};this.pendingKernels.push(S),this.sessionStatus==="capturing"&&this.capturedPendingKernels.get(this.currentSessionId).push(S)}return this.programManager.run(v,a,b,g,h),yt(e.name),p}upload(e,n){this.gpuDataManager.upload(e,n)}memcpy(e,n){this.gpuDataManager.memcpy(e,n)}async download(e,n){await this.gpuDataManager.download(e,n)}alloc(e){return this.gpuDataManager.create(e).id}free(e){return this.gpuDataManager.release(e)}createKernel(e,n,t,o){let i=bx.get(e);if(!i)throw new Error(`kernel not implemented: ${e}`);let s={kernelType:e,kernelName:o,kernelEntry:i[0],attributes:[i[1],t]};this.kernels.set(n,s)}releaseKernel(e){let n=this.kernelPersistentData.get(e);if(n){for(let t of n)this.gpuDataManager.release(t.id);this.kernelPersistentData.delete(e)}this.kernelCustomData.delete(e),this.kernels.delete(e)}computeKernel(e,n,t){let o=this.kernels.get(e);if(!o)throw new Error(`kernel not created: ${e}`);let i=o.kernelType,s=o.kernelName,a=o.kernelEntry,u=o.attributes;if(this.currentKernelId!==null)throw new Error(`kernel "[${i}] ${s}" is not allowed to be called recursively`);this.currentKernelId=e,u[0]&&(u[1]=u[0](u[1]),u[0]=void 0),Ne("info",()=>`[WebGPU] Start to run kernel "[${i}] ${s}"...`);let l=this.env.debug;this.temporaryData=[];try{return l&&this.device.pushErrorScope("validation"),a(n,u[1]),0}catch(f){return t.push(Promise.resolve(`[WebGPU] Kernel "[${i}] ${s}" failed. ${f}`)),1}finally{l&&t.push(this.device.popErrorScope().then(f=>f?`GPU validation error for kernel "[${i}] ${s}": ${f.message}`:null));for(let f of this.temporaryData)this.gpuDataManager.release(f.id);this.temporaryData=[],this.currentKernelId=null}}registerBuffer(e,n,t,o){let i=this.sessionExternalDataMapping.get(e);i||(i=new Map,this.sessionExternalDataMapping.set(e,i));let s=i.get(n),a=this.gpuDataManager.registerExternalBuffer(t,o,s?.[1]);return i.set(n,[a,t]),a}unregisterBuffers(e){let n=this.sessionExternalDataMapping.get(e);n&&(n.forEach(t=>this.gpuDataManager.unregisterExternalBuffer(t[1])),this.sessionExternalDataMapping.delete(e))}getBuffer(e){let n=this.gpuDataManager.get(e);if(!n)throw new Error(`no GPU data for buffer: ${e}`);return n.buffer}createDownloader(e,n,t){return async()=>{let o=await Gs(this,e,n);return zh(o.buffer,t)}}writeTimestamp(e){this.queryType==="inside-passes"&&this.computePassEncoder.writeTimestamp(this.querySet,e)}setQueryType(){this.queryType="none",(this.env.webgpu.profiling?.mode==="default"||(typeof this.env.trace>"u"?this.env.wasm.trace:this.env.trace))&&(this.device.features.has("chromium-experimental-timestamp-query-inside-passes")?this.queryType="inside-passes":this.device.features.has("timestamp-query")&&(this.queryType="at-passes"),this.queryType!=="none"&&typeof this.querySet>"u"&&(this.querySet=this.device.createQuerySet({type:"timestamp",count:this.maxDispatchNumber*2}),this.queryResolveBuffer=this.device.createBuffer({size:this.maxDispatchNumber*2*8,usage:GPUBufferUsage.COPY_SRC|GPUBufferUsage.QUERY_RESOLVE})))}captureBegin(){Ne("info","captureBegin"),this.capturedCommandList.get(this.currentSessionId)||this.capturedCommandList.set(this.currentSessionId,[]),this.capturedPendingKernels.get(this.currentSessionId)||this.capturedPendingKernels.set(this.currentSessionId,[]),this.flush(),this.sessionStatus="capturing"}captureEnd(){Ne("info","captureEnd"),this.flush(),this.sessionStatus="default"}replay(){Ne("info","replay"),this.sessionStatus="replaying";let e=this.capturedCommandList.get(this.currentSessionId),n=this.capturedPendingKernels.get(this.currentSessionId),t=e.length;this.pendingKernels=[];for(let o=0;o=this.maxDispatchNumber||this.queryType==="at-passes")&&this.endComputePass(),this.pendingDispatchNumber>=this.maxDispatchNumber&&this.flush()}this.flush(),this.sessionStatus="default"}onReleaseSession(e){this.unregisterBuffers(e),this.capturedCommandList.has(e)&&this.capturedCommandList.delete(e),this.capturedPendingKernels.has(e)&&this.capturedPendingKernels.delete(e),this.gpuDataManager.onReleaseSession(e)}onRunStart(e){this.currentSessionId=e,this.setQueryType()}}});var wx={};sn(wx,{init:()=>LI});var to,hu,LI,Tx=C(()=>{"use strict";ue();vx();mr();ye();to=class r{constructor(e,n,t,o){this.module=e;this.dataType=n;this.data=t;this.dims=o}getFloat32Array(){if(this.dataType!==1)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new Float32Array:new Float32Array(this.module.HEAP8.buffer,this.data,e)}getBigInt64Array(){if(this.dataType!==7)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new BigInt64Array:new BigInt64Array(this.module.HEAP8.buffer,this.data,e)}getInt32Array(){if(this.dataType!==6)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new Int32Array:new Int32Array(this.module.HEAP8.buffer,this.data,e)}reshape(e){if(B.size(e)!==B.size(this.dims))throw new Error("Invalid new shape");return new r(this.module,this.dataType,this.data,e)}},hu=class{constructor(e,n,t){this.module=e;this.backend=n;this.customDataOffset=0;this.customDataSize=0;this.adapterInfo=n.adapterInfo;let o=e.HEAPU32,i=t>>>2;this.opKernelContext=o[i++];let s=o[i++];this.outputCount=o[i++],this.customDataOffset=o[i++],this.customDataSize=o[i++];let a=[];for(let u=0;utypeof a=="number"?this.inputs[a]:a)??this.inputs,o=n?.outputs??[],i=(a,u,l)=>new to(this.module,u,this.output(a,l),l),s=(a,u)=>{let l=jr(a);if(!l)throw new Error(`Unsupported data type: ${a}`);let f=l*B.size(u),c=f>0?this.backend.gpuDataManager.create(f).id:0;return new to(this.module,a,c,u)};return this.backend.run(e,t,o,i,s,this.outputCount)}output(e,n){let t=this.module.stackSave();try{let o=this.module.stackAlloc((1+n.length)*4),i=o>>2;this.module.HEAPU32[i++]=n.length;for(let s=0;s{let o=e.jsepInit;if(!o)throw new Error("Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.");if(r==="webgpu"){let i=new Xi;await i.initialize(n,t),o("webgpu",[i,s=>i.alloc(s),s=>i.free(s),(s,a,u,l=!1)=>{if(l)Ne("verbose",()=>`[WebGPU] jsepCopyGpuToGpu: src=${s}, dst=${a}, size=${u}`),i.memcpy(s,a);else{Ne("verbose",()=>`[WebGPU] jsepCopyCpuToGpu: dataOffset=${s}, gpuDataId=${a}, size=${u}`);let f=e.HEAPU8.subarray(s>>>0,(s>>>0)+u);i.upload(a,f)}},async(s,a,u)=>{Ne("verbose",()=>`[WebGPU] jsepCopyGpuToCpu: gpuDataId=${s}, dataOffset=${a}, size=${u}`),await i.download(s,()=>e.HEAPU8.subarray(a>>>0,(a>>>0)+u))},(s,a,u)=>i.createKernel(s,a,u,e.UTF8ToString(e._JsepGetNodeName(a))),s=>i.releaseKernel(s),(s,a,u,l)=>{Ne("verbose",()=>`[WebGPU] jsepRun: sessionHandle=${u}, kernel=${s}, contextDataOffset=${a}`);let f=new hu(e,i,a);return i.computeKernel(s,f,l)},()=>i.captureBegin(),()=>i.captureEnd(),()=>i.replay()])}else o("webnn")}});var RI,bi,yi,Qr,NI,qn,xi,vi,_x,wi,Ti,_i,ks=C(()=>{"use strict";kh();Bh();ue();Kr();Si();zs();RI=(r,e)=>{Ye()._OrtInit(r,e)!==0&&Ue("Can't initialize onnxruntime.")},bi=async r=>{RI(r.wasm.numThreads,jn(r.logLevel))},yi=async(r,e)=>{{let n=(Tx(),Pn(wx)).init;if(e==="webgpu"){if(typeof navigator>"u"||!navigator.gpu)throw new Error("WebGPU is not supported in current environment");let t=r.webgpu.adapter;if(t){if(typeof t.limits!="object"||typeof t.features!="object"||typeof t.requestDevice!="function")throw new Error("Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.")}else{let o=r.webgpu.powerPreference;if(o!==void 0&&o!=="low-power"&&o!=="high-performance")throw new Error(`Invalid powerPreference setting: "${o}"`);let i=r.webgpu.forceFallbackAdapter;if(i!==void 0&&typeof i!="boolean")throw new Error(`Invalid forceFallbackAdapter setting: "${i}"`);if(t=await navigator.gpu.requestAdapter({powerPreference:o,forceFallbackAdapter:i}),!t)throw new Error('Failed to get GPU adapter. You may need to enable flag "--enable-unsafe-webgpu" if you are using Chrome.')}await n("webgpu",Ye(),r,t)}if(e==="webnn"){if(typeof navigator>"u"||!navigator.ml)throw new Error("WebNN is not supported in current environment");await n("webnn",Ye(),r)}}},Qr=new Map,NI=r=>{let e=Ye(),n=e.stackSave();try{let t=e.stackAlloc(8);return e._OrtGetInputOutputCount(r,t,t+4)!==0&&Ue("Can't get session input/output count."),[e.HEAP32[t/4],e.HEAP32[t/4+1]]}finally{e.stackRestore(n)}},qn=r=>{let e=Ye(),n=e._malloc(r.byteLength);if(n===0)throw new Error(`Can't create a session. failed to allocate a buffer of size ${r.byteLength}.`);return e.HEAPU8.set(r,n),[n,r.byteLength]},xi=async(r,e)=>{let n,t,o=Ye();Array.isArray(r)?[n,t]=r:r.buffer===o.HEAPU8.buffer?[n,t]=[r.byteOffset,r.byteLength]:[n,t]=qn(r);let i=0,s=0,a=0,u=[],l=[],f=[];try{if([s,u]=Dh(e),e?.externalData&&o.mountExternalData){let v=[];for(let S of e.externalData){let $=typeof S=="string"?S:S.path;v.push(Xn(typeof S=="string"?S:S.data).then(P=>{o.mountExternalData($,P)}))}await Promise.all(v)}for(let v of e?.executionProviders??[])if((typeof v=="string"?v:v.name)==="webnn"){if(o.currentContext)throw new Error("WebNN execution provider is already set.");if(typeof v!="string"){let $=v,P=$?.context,E=$?.gpuDevice,N=$?.deviceType,z=$?.numThreads,q=$?.powerPreference;P?o.currentContext=P:E?o.currentContext=await navigator.ml.createContext(E):o.currentContext=await navigator.ml.createContext({deviceType:N,numThreads:z,powerPreference:q})}else o.currentContext=await navigator.ml.createContext();break}i=await o._OrtCreateSession(n,t,s),i===0&&Ue("Can't create a session."),o.currentContext&&(o.currentContext=void 0);let[c,p]=NI(i),b=!!e?.enableGraphCapture,h=[],g=[],T=[];for(let v=0;vv==="gpu-buffer")&&(a=o._OrtCreateBinding(i),a===0&&Ue("Can't create IO binding."),w={handle:a,outputPreferredLocations:T,outputPreferredLocationsEncoded:T.map(v=>Ns(v))}),Qr.set(i,[i,l,f,w,b,!1]),[i,h,g]}catch(c){throw l.forEach(p=>o._OrtFree(p)),f.forEach(p=>o._OrtFree(p)),a!==0&&o._OrtReleaseBinding(a),i!==0&&o._OrtReleaseSession(i),c}finally{o._free(n),s!==0&&o._OrtReleaseSessionOptions(s),u.forEach(c=>o._free(c)),o.unmountExternalData?.()}},vi=r=>{let e=Ye(),n=Qr.get(r);if(!n)throw new Error(`cannot release session. invalid session id: ${r}`);let[t,o,i,s,a]=n;s&&(a&&e._OrtClearBoundOutputs(s.handle),e._OrtReleaseBinding(s.handle)),e.jsepOnReleaseSession?.(r),o.forEach(u=>e._OrtFree(u)),i.forEach(u=>e._OrtFree(u)),e._OrtReleaseSession(t),Qr.delete(r)},_x=(r,e,n,t,o,i=!1)=>{if(!r){e.push(0);return}let s=Ye(),a=r[0],u=r[1],l=r[3],f,c;if(a==="string"&&l==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");if(i&&l!=="gpu-buffer")throw new Error(`External buffer must be provided for input/output index ${o} when enableGraphCapture is true.`);if(l==="gpu-buffer"){let h=r[2].gpuBuffer,g=jr(Rs(a));c=u.reduce((w,v)=>w*v,1)*g;let T=s.jsepRegisterBuffer;if(!T)throw new Error('Tensor location "gpu-buffer" is not supported without using WebGPU.');f=T(t,o,h,c)}else{let h=r[2];if(Array.isArray(h)){c=4*h.length,f=s._malloc(c),n.push(f);let g=f/4;for(let T=0;Ts.HEAP32[h++]=T);let g=s._OrtCreateTensor(Rs(a),f,c,b,u.length,Ns(l));g===0&&Ue(`Can't create tensor for input/output. session=${t}, index=${o}.`),e.push(g)}finally{s.stackRestore(p)}},wi=async(r,e,n,t,o,i)=>{let s=Ye(),a=Qr.get(r);if(!a)throw new Error(`cannot run inference. invalid session id: ${r}`);let u=a[0],l=a[1],f=a[2],c=a[3],p=a[4],b=a[5],h=e.length,g=t.length,T=0,w=[],v=[],S=[],$=[],P=s.stackSave(),E=s.stackAlloc(h*4),N=s.stackAlloc(h*4),z=s.stackAlloc(g*4),q=s.stackAlloc(g*4);try{[T,w]=Ch(i);for(let Q=0;Qmt*rt,1);pe=Ar(ie);let tn=c?.outputPreferredLocations[t[Q]];if(pe==="string"){if(tn==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");let mt=[],rt=ce/4;for(let Kt=0;Kt0){let mt=s.jsepGetBuffer;if(!mt)throw new Error('preferredLocation "gpu-buffer" is not supported without using WebGPU.');let rt=mt(ce),Kt=jr(ie);if(Kt===void 0||!Ai(pe))throw new Error(`Unsupported data type: ${pe}`);se=!0,qe.push([pe,Fe,{gpuBuffer:rt,download:s.jsepCreateDownloader(rt,Ke*Kt,pe),dispose:()=>{s._OrtReleaseTensor(ge)}},"gpu-buffer"])}else{let mt=$i(pe),rt=new mt(Ke);new Uint8Array(rt.buffer,rt.byteOffset,rt.byteLength).set(s.HEAPU8.subarray(ce,ce+rt.byteLength)),qe.push([pe,Fe,rt,"cpu"])}}finally{s.stackRestore(Ie),pe==="string"&&ce&&s._free(ce),se||s._OrtReleaseTensor(ge)}}return c&&!p&&(s._OrtClearBoundOutputs(c.handle),Qr.set(r,[u,l,f,c,p,!1])),qe}finally{s.stackRestore(P),v.forEach(K=>s._OrtReleaseTensor(K)),S.forEach(K=>s._OrtReleaseTensor(K)),$.forEach(K=>s._free(K)),T!==0&&s._OrtReleaseRunOptions(T),w.forEach(K=>s._free(K))}},Ti=r=>{let e=Ye(),n=Qr.get(r);if(!n)throw new Error("invalid session id");let t=n[0],o=e._OrtEndProfiling(t);o===0&&Ue("Can't get an profile file name."),e._OrtFree(o)},_i=r=>{let e=[];for(let n of r){let t=n[2];!Array.isArray(t)&&"buffer"in t&&e.push(t.buffer)}return e}});var en,Rt,ro,Yi,Ji,Zi,gu,bu,Sn,$n,FI,Ix,Sx,$x,Ax,Px,Ox,Ex,yu=C(()=>{"use strict";ft();ks();Kr();Hn();en=()=>!!le.wasm.proxy&&typeof document<"u",ro=!1,Yi=!1,Ji=!1,bu=new Map,Sn=(r,e)=>{let n=bu.get(r);n?n.push(e):bu.set(r,[e])},$n=()=>{if(ro||!Yi||Ji||!Rt)throw new Error("worker not ready")},FI=r=>{switch(r.data.type){case"init-wasm":ro=!1,r.data.err?(Ji=!0,gu[1](r.data.err)):(Yi=!0,gu[0]()),Zi&&(URL.revokeObjectURL(Zi),Zi=void 0);break;case"init-ep":case"copy-from":case"create":case"release":case"run":case"end-profiling":{let e=bu.get(r.data.type);r.data.err?e.shift()[1](r.data.err):e.shift()[0](r.data.out);break}default:}},Ix=async()=>{if(!Yi){if(ro)throw new Error("multiple calls to 'initWasm()' detected.");if(Ji)throw new Error("previous call to 'initWasm()' failed.");if(ro=!0,en())return new Promise((r,e)=>{Rt?.terminate(),Ph().then(([n,t])=>{try{Rt=t,Rt.onerror=i=>e(i),Rt.onmessage=FI,gu=[r,e];let o={type:"init-wasm",in:le};Rt.postMessage(o),Zi=n}catch(o){e(o)}},e)});try{await gi(le.wasm),await bi(le),Yi=!0}catch(r){throw Ji=!0,r}finally{ro=!1}}},Sx=async r=>{if(en())return $n(),new Promise((e,n)=>{Sn("init-ep",[e,n]);let t={type:"init-ep",in:{epName:r,env:le}};Rt.postMessage(t)});await yi(le,r)},$x=async r=>en()?($n(),new Promise((e,n)=>{Sn("copy-from",[e,n]);let t={type:"copy-from",in:{buffer:r}};Rt.postMessage(t,[r.buffer])})):qn(r),Ax=async(r,e)=>{if(en()){if(e?.preferredOutputLocation)throw new Error('session option "preferredOutputLocation" is not supported for proxy.');return $n(),new Promise((n,t)=>{Sn("create",[n,t]);let o={type:"create",in:{model:r,options:{...e}}},i=[];r instanceof Uint8Array&&i.push(r.buffer),Rt.postMessage(o,i)})}else return xi(r,e)},Px=async r=>{if(en())return $n(),new Promise((e,n)=>{Sn("release",[e,n]);let t={type:"release",in:r};Rt.postMessage(t)});vi(r)},Ox=async(r,e,n,t,o,i)=>{if(en()){if(n.some(s=>s[3]!=="cpu"))throw new Error("input tensor on GPU is not supported for proxy.");if(o.some(s=>s))throw new Error("pre-allocated output tensor is not supported for proxy.");return $n(),new Promise((s,a)=>{Sn("run",[s,a]);let u=n,l={type:"run",in:{sessionId:r,inputIndices:e,inputs:u,outputIndices:t,options:i}};Rt.postMessage(l,_i(u))})}else return wi(r,e,n,t,o,i)},Ex=async r=>{if(en())return $n(),new Promise((e,n)=>{Sn("end-profiling",[e,n]);let t={type:"end-profiling",in:r};Rt.postMessage(t)});Ti(r)}});var Cx,MI,Qi,kx=C(()=>{"use strict";ft();yu();ue();hi();zs();Cx=(r,e)=>{switch(r.location){case"cpu":return[r.type,r.dims,r.data,"cpu"];case"gpu-buffer":return[r.type,r.dims,{gpuBuffer:r.gpuBuffer},"gpu-buffer"];default:throw new Error(`invalid data location: ${r.location} for ${e()}`)}},MI=r=>{switch(r[3]){case"cpu":return new it(r[0],r[2],r[1]);case"gpu-buffer":{let e=r[0];if(!Ai(e))throw new Error(`not supported data type: ${e} for deserializing GPU tensor`);let{gpuBuffer:n,download:t,dispose:o}=r[2];return it.fromGpuBuffer(n,{dataType:e,dims:r[1],download:t,dispose:o})}default:throw new Error(`invalid data location: ${r[3]}`)}},Qi=class{async fetchModelAndCopyToWasmMemory(e){return $x(await Xn(e))}async loadModel(e,n){St();let t;typeof e=="string"?!1?t=await Xn(e):t=await this.fetchModelAndCopyToWasmMemory(e):t=e,[this.sessionId,this.inputNames,this.outputNames]=await Ax(t,n),yt()}async dispose(){return Px(this.sessionId)}async run(e,n,t){St();let o=[],i=[];Object.entries(e).forEach(p=>{let b=p[0],h=p[1],g=this.inputNames.indexOf(b);if(g===-1)throw new Error(`invalid input '${b}'`);o.push(h),i.push(g)});let s=[],a=[];Object.entries(n).forEach(p=>{let b=p[0],h=p[1],g=this.outputNames.indexOf(b);if(g===-1)throw new Error(`invalid output '${b}'`);s.push(h),a.push(g)});let u=o.map((p,b)=>Cx(p,()=>`input "${this.inputNames[i[b]]}"`)),l=s.map((p,b)=>p?Cx(p,()=>`output "${this.outputNames[a[b]]}"`):null),f=await Ox(this.sessionId,i,u,a,l,t),c={};for(let p=0;p{"use strict";ft();yu();kx();Hn();VI=()=>{if((typeof le.wasm.initTimeout!="number"||le.wasm.initTimeout<0)&&(le.wasm.initTimeout=0),le.wasm.simd===!1&&console.warn('Deprecated property "env.wasm.simd" is set to false. non-SIMD build is no longer provided, and this setting will be ignored.'),typeof le.wasm.proxy!="boolean"&&(le.wasm.proxy=!1),typeof le.wasm.trace!="boolean"&&(le.wasm.trace=!1),typeof le.wasm.numThreads!="number"||!Number.isInteger(le.wasm.numThreads)||le.wasm.numThreads<=0)if(typeof self<"u"&&!self.crossOriginIsolated)le.wasm.numThreads=1;else{let r=typeof navigator>"u"?Ta("node:os").cpus().length:navigator.hardwareConcurrency;le.wasm.numThreads=Math.min(4,Math.ceil((r||1)/2))}},ea=class{async init(e){VI(),await Ix(),await Sx(e)}async createInferenceSessionHandler(e,n){let t=new Qi;return await t.loadModel(e,n),Promise.resolve(t)}}});var Bx={};sn(Bx,{wasmBackend:()=>GI});var GI,Lx=C(()=>{"use strict";Dx();GI=new ea});ft();ft();ft();var mc="1.19.0";var YM=$a;{let r=(xh(),Pn(yh)).onnxjsBackend;vr("webgl",r,-10)}{let r=(Lx(),Pn(Bx)).wasmBackend;vr("webgpu",r,5),vr("webnn",r,5),vr("cpu",r,10),vr("wasm",r,10)}Object.defineProperty(le.versions,"web",{value:mc,enumerable:!0});export{ov as InferenceSession,_o as TRACE,St as TRACE_FUNC_BEGIN,yt as TRACE_FUNC_END,it as Tensor,av as TrainingSession,YM as default,le as env,vr as registerBackend}; +${s}`,u=t.createShaderModule({code:a,label:e.name});Ne("verbose",()=>`[WebGPU] ${e.name} shader code: ${a}`);let l=t.createComputePipeline({compute:{module:u,entryPoint:"main"},layout:"auto",label:e.name});return yt(e.name),{programInfo:e,computePipeline:l,uniformVariablesInfo:i.variablesInfo}}normalizeDispatchGroupSize(e){let n=typeof e=="number"?e:e.x,t=typeof e=="number"?1:e.y||1,o=typeof e=="number"?1:e.z||1,i=this.backend.device.limits.maxComputeWorkgroupsPerDimension;if(n<=i&&t<=i&&o<=i)return[n,t,o];let s=n*t*o,a=Math.ceil(Math.sqrt(s));if(a>i){if(a=Math.ceil(Math.cbrt(s)),a>i)throw new Error("Total dispatch size exceeds WebGPU maximum.");return[a,a,a]}else return[a,a,1]}}});var LI,RI,hu,Xi,vx=C(()=>{"use strict";ft();ue();mr();Fh();Wh();yx();xx();LI=(r,e)=>{if(e.length!==r.length)throw new Error(`inputDependencies length ${e.length} is not equal to inputTensors length ${r.length}.`);let n=[];for(let t=0;t{let t=r.name;return r.shaderCache?.hint&&(t+="["+r.shaderCache.hint+"]"),t+=":"+n+`:${LI(e,r.shaderCache?.inputDependencies??new Array(e.length).fill("dims"))}`,t},hu=class{constructor(e){e&&(this.architecture=e.architecture,this.vendor=e.vendor)}isArchitecture(e){return this.architecture===e}isVendor(e){return this.vendor===e}},Xi=class{constructor(){this.currentSessionId=null;this.currentKernelId=null;this.commandEncoder=null;this.computePassEncoder=null;this.maxDispatchNumber=16;this.pendingDispatchNumber=0;this.pendingKernels=[];this.pendingQueries=new Map;this.sessionStatus="default";this.capturedCommandList=new Map;this.capturedPendingKernels=new Map;this.sessionExternalDataMapping=new Map}get currentKernelCustomData(){if(this.currentKernelId===null)throw new Error("currentKernelCustomData(): currentKernelId is null. (should not happen)");let e=this.kernelCustomData.get(this.currentKernelId);return e||(e={},this.kernelCustomData.set(this.currentKernelId,e)),e}async initialize(e,n){this.env=e;let t=[],o={requiredLimits:{maxComputeWorkgroupStorageSize:n.limits.maxComputeWorkgroupStorageSize,maxComputeWorkgroupsPerDimension:n.limits.maxComputeWorkgroupsPerDimension,maxStorageBufferBindingSize:n.limits.maxStorageBufferBindingSize,maxBufferSize:n.limits.maxBufferSize,maxComputeInvocationsPerWorkgroup:n.limits.maxComputeInvocationsPerWorkgroup,maxComputeWorkgroupSizeX:n.limits.maxComputeWorkgroupSizeX,maxComputeWorkgroupSizeY:n.limits.maxComputeWorkgroupSizeY,maxComputeWorkgroupSizeZ:n.limits.maxComputeWorkgroupSizeZ},requiredFeatures:t};n.features.has("chromium-experimental-timestamp-query-inside-passes")?t.push("chromium-experimental-timestamp-query-inside-passes"):n.features.has("timestamp-query")&&t.push("timestamp-query"),n.features.has("shader-f16")&&t.push("shader-f16"),this.device=await n.requestDevice(o),this.adapterInfo=new hu(n.info||await n.requestAdapterInfo()),this.gpuDataManager=Uh(this),this.programManager=new Ki(this),this.kernels=new Map,this.kernelPersistentData=new Map,this.kernelCustomData=new Map,Nh(e.logLevel,!!e.debug),this.device.onuncapturederror=i=>{i.error instanceof GPUValidationError&&console.error(`An uncaught WebGPU validation error was raised: ${i.error.message}`)},Object.defineProperty(this.env.webgpu,"device",{value:this.device,writable:!1,enumerable:!0,configurable:!1}),Object.defineProperty(this.env.webgpu,"adapter",{value:n,writable:!1,enumerable:!0,configurable:!1}),this.setQueryType()}dispose(){typeof this.querySet<"u"&&this.querySet.destroy(),this.gpuDataManager.dispose()}getCommandEncoder(){return this.commandEncoder||(this.commandEncoder=this.device.createCommandEncoder()),this.commandEncoder}getComputePassEncoder(){if(!this.computePassEncoder){let e=this.getCommandEncoder(),n={};this.queryType==="at-passes"&&(n.timestampWrites={querySet:this.querySet,beginningOfPassWriteIndex:this.pendingDispatchNumber*2,endOfPassWriteIndex:this.pendingDispatchNumber*2+1}),this.computePassEncoder=e.beginComputePass(n)}return this.computePassEncoder}endComputePass(){this.computePassEncoder&&(this.computePassEncoder.end(),this.computePassEncoder=null)}flush(){if(!this.commandEncoder)return;St(),this.endComputePass();let e;this.queryType!=="none"&&(this.commandEncoder.resolveQuerySet(this.querySet,0,this.pendingDispatchNumber*2,this.queryResolveBuffer,0),e=this.device.createBuffer({size:this.pendingDispatchNumber*2*8,usage:GPUBufferUsage.MAP_READ|GPUBufferUsage.COPY_DST}),this.pendingQueries.set(e,this.pendingKernels),this.pendingKernels=[],this.commandEncoder.copyBufferToBuffer(this.queryResolveBuffer,0,e,0,this.pendingDispatchNumber*2*8)),this.device.queue.submit([this.commandEncoder.finish()]),this.gpuDataManager.refreshPendingBuffers(),this.commandEncoder=null,this.pendingDispatchNumber=0,this.queryType!=="none"&&e.mapAsync(GPUMapMode.READ).then(()=>{let n=new BigUint64Array(e.getMappedRange()),t=this.pendingQueries.get(e);for(let o=0;o"u"&&(this.queryTimeBase=b);let g=Number(b-this.queryTimeBase),T=Number(h-this.queryTimeBase);if(!Number.isSafeInteger(g)||!Number.isSafeInteger(T))throw new RangeError("incorrect timestamp range");if(this.env.webgpu.profiling?.ondata)this.env.webgpu.profiling.ondata({version:1,inputsMetadata:c.map(w=>({dims:w.dims,dataType:Ar(w.dataType)})),outputsMetadata:p.map(w=>({dims:w.dims,dataType:Ar(w.dataType)})),kernelId:s,kernelType:u,kernelName:l,programName:f,startTime:g,endTime:T});else{let w="";c.forEach((S,$)=>{w+=`input[${$}]: [${S.dims}] | ${Ar(S.dataType)}, `});let v="";p.forEach((S,$)=>{v+=`output[${$}]: [${S.dims}] | ${Ar(S.dataType)}, `}),console.log(`[profiling] kernel "${s}|${u}|${l}|${f}" ${w}${v}execution time: ${T-g} ns`)}_o("GPU",`${f}::${b}::${h}`)}e.unmap(),this.pendingQueries.delete(e)}),yt()}run(e,n,t,o,i,s){St(e.name);let a=[];for(let S=0;S$):t;if(c.length!==u.length)throw new Error(`Output size ${c.length} must be equal to ${u.length}.`);let p=[],b=[];for(let S=0;S=s)throw new Error(`Invalid output index: ${c[S]}`);if(c[S]===-3)continue;let $=c[S]===-1,P=c[S]===-2,E=$||P?i(u[S].dataType,u[S].dims):o(c[S],u[S].dataType,u[S].dims);if(p.push(E),E.data===0)continue;let N=this.gpuDataManager.get(E.data);if(!N)throw new Error(`no GPU data for output: ${E.data}`);if($&&this.temporaryData.push(N),P){let z=this.kernelPersistentData.get(this.currentKernelId);z||(z=[],this.kernelPersistentData.set(this.currentKernelId,z)),z.push(N)}b.push(N)}if(a.length!==n.length||b.length!==p.length){if(b.length===0)return yt(e.name),p;throw new Error(`Program ${e.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`)}let h;if(f){let S=0,$=[];f.forEach(z=>{let q=typeof z.data=="number"?[z.data]:z.data;if(q.length===0)return;let j=z.type===10?2:4,F,_e;z.type===10?(_e=q.length>4?16:q.length>2?8:q.length*j,F=q.length>4?16:j*q.length):(_e=q.length<=2?q.length*j:16,F=16),S=Math.ceil(S/_e)*_e,$.push(S);let $e=z.type===10?8:4;S+=q.length>4?Math.ceil(q.length/$e)*F:q.length*j});let P=16;S=Math.ceil(S/P)*P;let E=new ArrayBuffer(S);f.forEach((z,q)=>{let j=$[q],F=typeof z.data=="number"?[z.data]:z.data;if(z.type===6)new Int32Array(E,j,F.length).set(F);else if(z.type===12)new Uint32Array(E,j,F.length).set(F);else if(z.type===10)new Uint16Array(E,j,F.length).set(F);else if(z.type===1)new Float32Array(E,j,F.length).set(F);else throw new Error(`Unsupported uniform type: ${Ar(z.type)}`)});let N=this.gpuDataManager.create(S,GPUBufferUsage.COPY_DST|GPUBufferUsage.UNIFORM);this.device.queue.writeBuffer(N.buffer,0,E,0,S),this.gpuDataManager.release(N.id),h={offset:0,size:S,buffer:N.buffer}}let g=this.programManager.normalizeDispatchGroupSize(l),T=g[1]===1&&g[2]===1,w=RI(e,n,T),v=this.programManager.getArtifact(w);if(v||(v=this.programManager.build(e,g),this.programManager.setArtifact(w,v),Ne("info",()=>`[artifact] key: ${w}, programName: ${e.name}`)),f&&v.uniformVariablesInfo){if(f.length!==v.uniformVariablesInfo.length)throw new Error(`Uniform variables count mismatch: expect ${v.uniformVariablesInfo.length}, got ${f.length} in program "${v.programInfo.name}".`);for(let S=0;S`[ProgramManager] run "${e.name}" (key=${w}) with ${g[0]}x${g[1]}x${g[2]}`),this.queryType!=="none"||this.sessionStatus==="capturing"){let S={kernelId:this.currentKernelId,programName:v.programInfo.name,inputTensorViews:n,outputTensorViews:p};this.pendingKernels.push(S),this.sessionStatus==="capturing"&&this.capturedPendingKernels.get(this.currentSessionId).push(S)}return this.programManager.run(v,a,b,g,h),yt(e.name),p}upload(e,n){this.gpuDataManager.upload(e,n)}memcpy(e,n){this.gpuDataManager.memcpy(e,n)}async download(e,n){await this.gpuDataManager.download(e,n)}alloc(e){return this.gpuDataManager.create(e).id}free(e){return this.gpuDataManager.release(e)}createKernel(e,n,t,o){let i=bx.get(e);if(!i)throw new Error(`kernel not implemented: ${e}`);let s={kernelType:e,kernelName:o,kernelEntry:i[0],attributes:[i[1],t]};this.kernels.set(n,s)}releaseKernel(e){let n=this.kernelPersistentData.get(e);if(n){for(let t of n)this.gpuDataManager.release(t.id);this.kernelPersistentData.delete(e)}this.kernelCustomData.delete(e),this.kernels.delete(e)}computeKernel(e,n,t){let o=this.kernels.get(e);if(!o)throw new Error(`kernel not created: ${e}`);let i=o.kernelType,s=o.kernelName,a=o.kernelEntry,u=o.attributes;if(this.currentKernelId!==null)throw new Error(`kernel "[${i}] ${s}" is not allowed to be called recursively`);this.currentKernelId=e,u[0]&&(u[1]=u[0](u[1]),u[0]=void 0),Ne("info",()=>`[WebGPU] Start to run kernel "[${i}] ${s}"...`);let l=this.env.debug;this.temporaryData=[];try{return l&&this.device.pushErrorScope("validation"),a(n,u[1]),0}catch(f){return t.push(Promise.resolve(`[WebGPU] Kernel "[${i}] ${s}" failed. ${f}`)),1}finally{l&&t.push(this.device.popErrorScope().then(f=>f?`GPU validation error for kernel "[${i}] ${s}": ${f.message}`:null));for(let f of this.temporaryData)this.gpuDataManager.release(f.id);this.temporaryData=[],this.currentKernelId=null}}registerBuffer(e,n,t,o){let i=this.sessionExternalDataMapping.get(e);i||(i=new Map,this.sessionExternalDataMapping.set(e,i));let s=i.get(n),a=this.gpuDataManager.registerExternalBuffer(t,o,s?.[1]);return i.set(n,[a,t]),a}unregisterBuffers(e){let n=this.sessionExternalDataMapping.get(e);n&&(n.forEach(t=>this.gpuDataManager.unregisterExternalBuffer(t[1])),this.sessionExternalDataMapping.delete(e))}getBuffer(e){let n=this.gpuDataManager.get(e);if(!n)throw new Error(`no GPU data for buffer: ${e}`);return n.buffer}createDownloader(e,n,t){return async()=>{let o=await Us(this,e,n);return zh(o.buffer,t)}}writeTimestamp(e){this.queryType==="inside-passes"&&this.computePassEncoder.writeTimestamp(this.querySet,e)}setQueryType(){this.queryType="none",(this.env.webgpu.profiling?.mode==="default"||(typeof this.env.trace>"u"?this.env.wasm.trace:this.env.trace))&&(this.device.features.has("chromium-experimental-timestamp-query-inside-passes")?this.queryType="inside-passes":this.device.features.has("timestamp-query")&&(this.queryType="at-passes"),this.queryType!=="none"&&typeof this.querySet>"u"&&(this.querySet=this.device.createQuerySet({type:"timestamp",count:this.maxDispatchNumber*2}),this.queryResolveBuffer=this.device.createBuffer({size:this.maxDispatchNumber*2*8,usage:GPUBufferUsage.COPY_SRC|GPUBufferUsage.QUERY_RESOLVE})))}captureBegin(){Ne("info","captureBegin"),this.capturedCommandList.get(this.currentSessionId)||this.capturedCommandList.set(this.currentSessionId,[]),this.capturedPendingKernels.get(this.currentSessionId)||this.capturedPendingKernels.set(this.currentSessionId,[]),this.flush(),this.sessionStatus="capturing"}captureEnd(){Ne("info","captureEnd"),this.flush(),this.sessionStatus="default"}replay(){Ne("info","replay"),this.sessionStatus="replaying";let e=this.capturedCommandList.get(this.currentSessionId),n=this.capturedPendingKernels.get(this.currentSessionId),t=e.length;this.pendingKernels=[];for(let o=0;o=this.maxDispatchNumber||this.queryType==="at-passes")&&this.endComputePass(),this.pendingDispatchNumber>=this.maxDispatchNumber&&this.flush()}this.flush(),this.sessionStatus="default"}onReleaseSession(e){this.unregisterBuffers(e),this.capturedCommandList.has(e)&&this.capturedCommandList.delete(e),this.capturedPendingKernels.has(e)&&this.capturedPendingKernels.delete(e),this.gpuDataManager.onReleaseSession(e)}onRunStart(e){this.currentSessionId=e,this.setQueryType()}}});var wx={};un(wx,{init:()=>NI});var to,gu,NI,Tx=C(()=>{"use strict";ue();vx();mr();ye();to=class r{constructor(e,n,t,o){this.module=e;this.dataType=n;this.data=t;this.dims=o}getFloat32Array(){if(this.dataType!==1)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new Float32Array:new Float32Array(this.module.HEAP8.buffer,this.data,e)}getBigInt64Array(){if(this.dataType!==7)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new BigInt64Array:new BigInt64Array(this.module.HEAP8.buffer,this.data,e)}getInt32Array(){if(this.dataType!==6)throw new Error("Invalid data type");let e=B.size(this.dims);return e===0?new Int32Array:new Int32Array(this.module.HEAP8.buffer,this.data,e)}reshape(e){if(B.size(e)!==B.size(this.dims))throw new Error("Invalid new shape");return new r(this.module,this.dataType,this.data,e)}},gu=class{constructor(e,n,t){this.module=e;this.backend=n;this.customDataOffset=0;this.customDataSize=0;this.adapterInfo=n.adapterInfo;let o=e.HEAPU32,i=t>>>2;this.opKernelContext=o[i++];let s=o[i++];this.outputCount=o[i++],this.customDataOffset=o[i++],this.customDataSize=o[i++];let a=[];for(let u=0;utypeof a=="number"?this.inputs[a]:a)??this.inputs,o=n?.outputs??[],i=(a,u,l)=>new to(this.module,u,this.output(a,l),l),s=(a,u)=>{let l=Kr(a);if(!l)throw new Error(`Unsupported data type: ${a}`);let f=l*B.size(u),c=f>0?this.backend.gpuDataManager.create(f).id:0;return new to(this.module,a,c,u)};return this.backend.run(e,t,o,i,s,this.outputCount)}output(e,n){let t=this.module.stackSave();try{let o=this.module.stackAlloc((1+n.length)*4),i=o>>2;this.module.HEAPU32[i++]=n.length;for(let s=0;s{let o=e.jsepInit;if(!o)throw new Error("Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.");if(r==="webgpu"){let i=new Xi;await i.initialize(n,t),o("webgpu",[i,s=>i.alloc(s),s=>i.free(s),(s,a,u,l=!1)=>{if(l)Ne("verbose",()=>`[WebGPU] jsepCopyGpuToGpu: src=${s}, dst=${a}, size=${u}`),i.memcpy(s,a);else{Ne("verbose",()=>`[WebGPU] jsepCopyCpuToGpu: dataOffset=${s}, gpuDataId=${a}, size=${u}`);let f=e.HEAPU8.subarray(s>>>0,(s>>>0)+u);i.upload(a,f)}},async(s,a,u)=>{Ne("verbose",()=>`[WebGPU] jsepCopyGpuToCpu: gpuDataId=${s}, dataOffset=${a}, size=${u}`),await i.download(s,()=>e.HEAPU8.subarray(a>>>0,(a>>>0)+u))},(s,a,u)=>i.createKernel(s,a,u,e.UTF8ToString(e._JsepGetNodeName(a))),s=>i.releaseKernel(s),(s,a,u,l)=>{Ne("verbose",()=>`[WebGPU] jsepRun: sessionHandle=${u}, kernel=${s}, contextDataOffset=${a}`);let f=new gu(e,i,a);return i.computeKernel(s,f,l)},()=>i.captureBegin(),()=>i.captureEnd(),()=>i.replay()])}else o("webnn")}});var zI,bi,yi,Qr,FI,qn,xi,vi,_x,wi,Ti,_i,Ds=C(()=>{"use strict";kh();Bh();ue();jr();Si();Fs();zI=(r,e)=>{Ye()._OrtInit(r,e)!==0&&Ue("Can't initialize onnxruntime.")},bi=async r=>{zI(r.wasm.numThreads,Kn(r.logLevel))},yi=async(r,e)=>{{let n=(Tx(),Pn(wx)).init;if(e==="webgpu"){if(typeof navigator>"u"||!navigator.gpu)throw new Error("WebGPU is not supported in current environment");let t=r.webgpu.adapter;if(t){if(typeof t.limits!="object"||typeof t.features!="object"||typeof t.requestDevice!="function")throw new Error("Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.")}else{let o=r.webgpu.powerPreference;if(o!==void 0&&o!=="low-power"&&o!=="high-performance")throw new Error(`Invalid powerPreference setting: "${o}"`);let i=r.webgpu.forceFallbackAdapter;if(i!==void 0&&typeof i!="boolean")throw new Error(`Invalid forceFallbackAdapter setting: "${i}"`);if(t=await navigator.gpu.requestAdapter({powerPreference:o,forceFallbackAdapter:i}),!t)throw new Error('Failed to get GPU adapter. You may need to enable flag "--enable-unsafe-webgpu" if you are using Chrome.')}await n("webgpu",Ye(),r,t)}if(e==="webnn"){if(typeof navigator>"u"||!navigator.ml)throw new Error("WebNN is not supported in current environment");await n("webnn",Ye(),r)}}},Qr=new Map,FI=r=>{let e=Ye(),n=e.stackSave();try{let t=e.stackAlloc(8);return e._OrtGetInputOutputCount(r,t,t+4)!==0&&Ue("Can't get session input/output count."),[e.HEAP32[t/4],e.HEAP32[t/4+1]]}finally{e.stackRestore(n)}},qn=r=>{let e=Ye(),n=e._malloc(r.byteLength);if(n===0)throw new Error(`Can't create a session. failed to allocate a buffer of size ${r.byteLength}.`);return e.HEAPU8.set(r,n),[n,r.byteLength]},xi=async(r,e)=>{let n,t,o=Ye();Array.isArray(r)?[n,t]=r:r.buffer===o.HEAPU8.buffer?[n,t]=[r.byteOffset,r.byteLength]:[n,t]=qn(r);let i=0,s=0,a=0,u=[],l=[],f=[];try{if([s,u]=Dh(e),e?.externalData&&o.mountExternalData){let v=[];for(let S of e.externalData){let $=typeof S=="string"?S:S.path;v.push(Xn(typeof S=="string"?S:S.data).then(P=>{o.mountExternalData($,P)}))}await Promise.all(v)}for(let v of e?.executionProviders??[])if((typeof v=="string"?v:v.name)==="webnn"){if(o.currentContext)throw new Error("WebNN execution provider is already set.");if(typeof v!="string"){let $=v,P=$?.context,E=$?.gpuDevice,N=$?.deviceType,z=$?.numThreads,q=$?.powerPreference;P?o.currentContext=P:E?o.currentContext=await navigator.ml.createContext(E):o.currentContext=await navigator.ml.createContext({deviceType:N,numThreads:z,powerPreference:q})}else o.currentContext=await navigator.ml.createContext();break}i=await o._OrtCreateSession(n,t,s),i===0&&Ue("Can't create a session."),o.currentContext&&(o.currentContext=void 0);let[c,p]=FI(i),b=!!e?.enableGraphCapture,h=[],g=[],T=[];for(let v=0;vv==="gpu-buffer")&&(a=o._OrtCreateBinding(i),a===0&&Ue("Can't create IO binding."),w={handle:a,outputPreferredLocations:T,outputPreferredLocationsEncoded:T.map(v=>zs(v))}),Qr.set(i,[i,l,f,w,b,!1]),[i,h,g]}catch(c){throw l.forEach(p=>o._OrtFree(p)),f.forEach(p=>o._OrtFree(p)),a!==0&&o._OrtReleaseBinding(a),i!==0&&o._OrtReleaseSession(i),c}finally{o._free(n),s!==0&&o._OrtReleaseSessionOptions(s),u.forEach(c=>o._free(c)),o.unmountExternalData?.()}},vi=r=>{let e=Ye(),n=Qr.get(r);if(!n)throw new Error(`cannot release session. invalid session id: ${r}`);let[t,o,i,s,a]=n;s&&(a&&e._OrtClearBoundOutputs(s.handle),e._OrtReleaseBinding(s.handle)),e.jsepOnReleaseSession?.(r),o.forEach(u=>e._OrtFree(u)),i.forEach(u=>e._OrtFree(u)),e._OrtReleaseSession(t),Qr.delete(r)},_x=(r,e,n,t,o,i=!1)=>{if(!r){e.push(0);return}let s=Ye(),a=r[0],u=r[1],l=r[3],f,c;if(a==="string"&&l==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");if(i&&l!=="gpu-buffer")throw new Error(`External buffer must be provided for input/output index ${o} when enableGraphCapture is true.`);if(l==="gpu-buffer"){let h=r[2].gpuBuffer,g=Kr(Ns(a));c=u.reduce((w,v)=>w*v,1)*g;let T=s.jsepRegisterBuffer;if(!T)throw new Error('Tensor location "gpu-buffer" is not supported without using WebGPU.');f=T(t,o,h,c)}else{let h=r[2];if(Array.isArray(h)){c=4*h.length,f=s._malloc(c),n.push(f);let g=f/4;for(let T=0;Ts.HEAP32[h++]=T);let g=s._OrtCreateTensor(Ns(a),f,c,b,u.length,zs(l));g===0&&Ue(`Can't create tensor for input/output. session=${t}, index=${o}.`),e.push(g)}finally{s.stackRestore(p)}},wi=async(r,e,n,t,o,i)=>{let s=Ye(),a=Qr.get(r);if(!a)throw new Error(`cannot run inference. invalid session id: ${r}`);let u=a[0],l=a[1],f=a[2],c=a[3],p=a[4],b=a[5],h=e.length,g=t.length,T=0,w=[],v=[],S=[],$=[],P=s.stackSave(),E=s.stackAlloc(h*4),N=s.stackAlloc(h*4),z=s.stackAlloc(g*4),q=s.stackAlloc(g*4);try{[T,w]=Ch(i);for(let Q=0;Qmt*rt,1);pe=Ar(ie);let tn=c?.outputPreferredLocations[t[Q]];if(pe==="string"){if(tn==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");let mt=[],rt=ce/4;for(let jt=0;jt0){let mt=s.jsepGetBuffer;if(!mt)throw new Error('preferredLocation "gpu-buffer" is not supported without using WebGPU.');let rt=mt(ce),jt=Kr(ie);if(jt===void 0||!Ai(pe))throw new Error(`Unsupported data type: ${pe}`);se=!0,qe.push([pe,Fe,{gpuBuffer:rt,download:s.jsepCreateDownloader(rt,je*jt,pe),dispose:()=>{s._OrtReleaseTensor(ge)}},"gpu-buffer"])}else{let mt=$i(pe),rt=new mt(je);new Uint8Array(rt.buffer,rt.byteOffset,rt.byteLength).set(s.HEAPU8.subarray(ce,ce+rt.byteLength)),qe.push([pe,Fe,rt,"cpu"])}}finally{s.stackRestore(Ie),pe==="string"&&ce&&s._free(ce),se||s._OrtReleaseTensor(ge)}}return c&&!p&&(s._OrtClearBoundOutputs(c.handle),Qr.set(r,[u,l,f,c,p,!1])),qe}finally{s.stackRestore(P),v.forEach(j=>s._OrtReleaseTensor(j)),S.forEach(j=>s._OrtReleaseTensor(j)),$.forEach(j=>s._free(j)),T!==0&&s._OrtReleaseRunOptions(T),w.forEach(j=>s._free(j))}},Ti=r=>{let e=Ye(),n=Qr.get(r);if(!n)throw new Error("invalid session id");let t=n[0],o=e._OrtEndProfiling(t);o===0&&Ue("Can't get an profile file name."),e._OrtFree(o)},_i=r=>{let e=[];for(let n of r){let t=n[2];!Array.isArray(t)&&"buffer"in t&&e.push(t.buffer)}return e}});var en,Rt,ro,Yi,Ji,Zi,bu,yu,$n,An,VI,Ix,Sx,$x,Ax,Px,Ox,Ex,xu=C(()=>{"use strict";ft();Ds();jr();Hn();en=()=>!!le.wasm.proxy&&typeof document<"u",ro=!1,Yi=!1,Ji=!1,yu=new Map,$n=(r,e)=>{let n=yu.get(r);n?n.push(e):yu.set(r,[e])},An=()=>{if(ro||!Yi||Ji||!Rt)throw new Error("worker not ready")},VI=r=>{switch(r.data.type){case"init-wasm":ro=!1,r.data.err?(Ji=!0,bu[1](r.data.err)):(Yi=!0,bu[0]()),Zi&&(URL.revokeObjectURL(Zi),Zi=void 0);break;case"init-ep":case"copy-from":case"create":case"release":case"run":case"end-profiling":{let e=yu.get(r.data.type);r.data.err?e.shift()[1](r.data.err):e.shift()[0](r.data.out);break}default:}},Ix=async()=>{if(!Yi){if(ro)throw new Error("multiple calls to 'initWasm()' detected.");if(Ji)throw new Error("previous call to 'initWasm()' failed.");if(ro=!0,en())return new Promise((r,e)=>{Rt?.terminate(),Ph().then(([n,t])=>{try{Rt=t,Rt.onerror=i=>e(i),Rt.onmessage=VI,bu=[r,e];let o={type:"init-wasm",in:le};Rt.postMessage(o),Zi=n}catch(o){e(o)}},e)});try{await gi(le.wasm),await bi(le),Yi=!0}catch(r){throw Ji=!0,r}finally{ro=!1}}},Sx=async r=>{if(en())return An(),new Promise((e,n)=>{$n("init-ep",[e,n]);let t={type:"init-ep",in:{epName:r,env:le}};Rt.postMessage(t)});await yi(le,r)},$x=async r=>en()?(An(),new Promise((e,n)=>{$n("copy-from",[e,n]);let t={type:"copy-from",in:{buffer:r}};Rt.postMessage(t,[r.buffer])})):qn(r),Ax=async(r,e)=>{if(en()){if(e?.preferredOutputLocation)throw new Error('session option "preferredOutputLocation" is not supported for proxy.');return An(),new Promise((n,t)=>{$n("create",[n,t]);let o={type:"create",in:{model:r,options:{...e}}},i=[];r instanceof Uint8Array&&i.push(r.buffer),Rt.postMessage(o,i)})}else return xi(r,e)},Px=async r=>{if(en())return An(),new Promise((e,n)=>{$n("release",[e,n]);let t={type:"release",in:r};Rt.postMessage(t)});vi(r)},Ox=async(r,e,n,t,o,i)=>{if(en()){if(n.some(s=>s[3]!=="cpu"))throw new Error("input tensor on GPU is not supported for proxy.");if(o.some(s=>s))throw new Error("pre-allocated output tensor is not supported for proxy.");return An(),new Promise((s,a)=>{$n("run",[s,a]);let u=n,l={type:"run",in:{sessionId:r,inputIndices:e,inputs:u,outputIndices:t,options:i}};Rt.postMessage(l,_i(u))})}else return wi(r,e,n,t,o,i)},Ex=async r=>{if(en())return An(),new Promise((e,n)=>{$n("end-profiling",[e,n]);let t={type:"end-profiling",in:r};Rt.postMessage(t)});Ti(r)}});var Cx,GI,Qi,kx=C(()=>{"use strict";ft();xu();ue();hi();Fs();Cx=(r,e)=>{switch(r.location){case"cpu":return[r.type,r.dims,r.data,"cpu"];case"gpu-buffer":return[r.type,r.dims,{gpuBuffer:r.gpuBuffer},"gpu-buffer"];default:throw new Error(`invalid data location: ${r.location} for ${e()}`)}},GI=r=>{switch(r[3]){case"cpu":return new it(r[0],r[2],r[1]);case"gpu-buffer":{let e=r[0];if(!Ai(e))throw new Error(`not supported data type: ${e} for deserializing GPU tensor`);let{gpuBuffer:n,download:t,dispose:o}=r[2];return it.fromGpuBuffer(n,{dataType:e,dims:r[1],download:t,dispose:o})}default:throw new Error(`invalid data location: ${r[3]}`)}},Qi=class{async fetchModelAndCopyToWasmMemory(e){return $x(await Xn(e))}async loadModel(e,n){St();let t;typeof e=="string"?!1?t=await Xn(e):t=await this.fetchModelAndCopyToWasmMemory(e):t=e,[this.sessionId,this.inputNames,this.outputNames]=await Ax(t,n),yt()}async dispose(){return Px(this.sessionId)}async run(e,n,t){St();let o=[],i=[];Object.entries(e).forEach(p=>{let b=p[0],h=p[1],g=this.inputNames.indexOf(b);if(g===-1)throw new Error(`invalid input '${b}'`);o.push(h),i.push(g)});let s=[],a=[];Object.entries(n).forEach(p=>{let b=p[0],h=p[1],g=this.outputNames.indexOf(b);if(g===-1)throw new Error(`invalid output '${b}'`);s.push(h),a.push(g)});let u=o.map((p,b)=>Cx(p,()=>`input "${this.inputNames[i[b]]}"`)),l=s.map((p,b)=>p?Cx(p,()=>`output "${this.outputNames[a[b]]}"`):null),f=await Ox(this.sessionId,i,u,a,l,t),c={};for(let p=0;p{"use strict";ft();xu();kx();Hn();UI=()=>{if((typeof le.wasm.initTimeout!="number"||le.wasm.initTimeout<0)&&(le.wasm.initTimeout=0),le.wasm.simd===!1&&console.warn('Deprecated property "env.wasm.simd" is set to false. non-SIMD build is no longer provided, and this setting will be ignored.'),typeof le.wasm.proxy!="boolean"&&(le.wasm.proxy=!1),typeof le.wasm.trace!="boolean"&&(le.wasm.trace=!1),typeof le.wasm.numThreads!="number"||!Number.isInteger(le.wasm.numThreads)||le.wasm.numThreads<=0)if(typeof self<"u"&&!self.crossOriginIsolated)le.wasm.numThreads=1;else{let r=typeof navigator>"u"?_a("node:os").cpus().length:navigator.hardwareConcurrency;le.wasm.numThreads=Math.min(4,Math.ceil((r||1)/2))}},ea=class{async init(e){UI(),await Ix(),await Sx(e)}async createInferenceSessionHandler(e,n){let t=new Qi;return await t.loadModel(e,n),Promise.resolve(t)}}});var Bx={};un(Bx,{wasmBackend:()=>WI});var WI,Lx=C(()=>{"use strict";Dx();WI=new ea});ft();ft();ft();var mc="1.19.0";var QM=Aa;{let r=(xh(),Pn(yh)).onnxjsBackend;vr("webgl",r,-10)}{let r=(Lx(),Pn(Bx)).wasmBackend;vr("webgpu",r,5),vr("webnn",r,5),vr("cpu",r,10),vr("wasm",r,10)}Object.defineProperty(le.versions,"web",{value:mc,enumerable:!0});export{av as InferenceSession,_o as TRACE,St as TRACE_FUNC_BEGIN,yt as TRACE_FUNC_END,it as Tensor,uv as TrainingSession,QM as default,le as env,vr as registerBackend}; /** * @license * Copyright 2021 Google LLC. All Rights Reserved. diff --git a/assets/dist/ort.all.bundle.min.mjs.map b/assets/dist/ort.all.bundle.min.mjs.map index 6272ec8..eccbc1e 100644 --- a/assets/dist/ort.all.bundle.min.mjs.map +++ b/assets/dist/ort.all.bundle.min.mjs.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["../../common/lib/backend-impl.ts", "../../common/lib/backend.ts", "../../common/lib/version.ts", "../../common/lib/env-impl.ts", "../../common/lib/env.ts", "../../common/lib/tensor-conversion-impl.ts", "../../common/lib/tensor-factory-impl.ts", "../../common/lib/tensor-impl-type-mapping.ts", "../../common/lib/tensor-utils-impl.ts", "../../common/lib/tensor-impl.ts", "../../common/lib/tensor.ts", "../../common/lib/trace.ts", "../../common/lib/inference-session-impl.ts", "../../common/lib/inference-session.ts", "../../common/lib/tensor-conversion.ts", "../../common/lib/tensor-factory.ts", "../../common/lib/onnx-model.ts", "../../common/lib/onnx-value.ts", "../../common/lib/training-session-impl.ts", "../../common/lib/training-session.ts", "../../common/lib/index.ts", "../lib/onnxjs/instrument.ts", "../lib/onnxjs/opset.ts", "../node_modules/guid-typescript/dist/guid.js", "../node_modules/long/index.js", "../node_modules/flatbuffers/js/flatbuffers.mjs", "../lib/onnxjs/ort-schema/flatbuffers/ort-generated.ts", "../node_modules/@protobufjs/aspromise/index.js", "../node_modules/@protobufjs/base64/index.js", "../node_modules/@protobufjs/eventemitter/index.js", "../node_modules/@protobufjs/float/index.js", "../node_modules/@protobufjs/inquire/index.js", "../node_modules/@protobufjs/utf8/index.js", "../node_modules/@protobufjs/pool/index.js", "../node_modules/protobufjs/src/util/longbits.js", "../node_modules/protobufjs/src/util/minimal.js", "../node_modules/protobufjs/src/writer.js", "../node_modules/protobufjs/src/writer_buffer.js", "../node_modules/protobufjs/src/reader.js", "../node_modules/protobufjs/src/reader_buffer.js", "../node_modules/protobufjs/src/rpc/service.js", "../node_modules/protobufjs/src/rpc.js", "../node_modules/protobufjs/src/roots.js", "../node_modules/protobufjs/src/index-minimal.js", "../node_modules/protobufjs/minimal.js", "../lib/onnxjs/ort-schema/protobuf/onnx.js", "../lib/onnxjs/util.ts", "../lib/onnxjs/tensor.ts", "../lib/onnxjs/backends/webgl/glsl-source.ts", "../lib/onnxjs/backends/webgl/types.ts", "../lib/onnxjs/backends/webgl/utils.ts", "../lib/onnxjs/backends/webgl/ops/packing-utils.ts", "../lib/onnxjs/backends/webgl/ops/pack.ts", "../lib/onnxjs/backends/webgl/ops/reshape-packed.ts", "../lib/onnxjs/backends/webgl/ops/uint8-encode.ts", "../lib/onnxjs/backends/webgl/ops/unpack.ts", "../lib/onnxjs/backends/webgl/texture-data-encoder.ts", "../lib/onnxjs/backends/webgl/texture-layout.ts", "../lib/onnxjs/backends/webgl/inference-handler.ts", "../lib/onnxjs/attribute-with-cache-key.ts", "../lib/onnxjs/backends/webgl/ops/batch-normalization.ts", "../lib/onnxjs/backends/webgl/glsl-definitions.ts", "../lib/onnxjs/backends/webgl/ops/binary-op.ts", "../lib/onnxjs/backends/webgl/ops/cast.ts", "../lib/onnxjs/backends/webgl/ops/concat-packed.ts", "../lib/onnxjs/backends/webgl/ops/concat.ts", "../lib/onnxjs/backends/webgl/ops/unary-op.ts", "../lib/onnxjs/backends/webgl/ops/fuse-utils.ts", "../lib/onnxjs/backends/webgl/ops/conv-grouped.ts", "../lib/onnxjs/backends/webgl/ops/im2col-pack.ts", "../lib/onnxjs/backends/webgl/ops/matmul.ts", "../lib/onnxjs/backends/webgl/ops/matmul-pack.ts", "../lib/onnxjs/backends/webgl/ops/conv-pack.ts", "../lib/onnxjs/backends/webgl/ops/im2col.ts", "../lib/onnxjs/backends/webgl/ops/dot-product.ts", "../lib/onnxjs/backends/webgl/ops/conv.ts", "../lib/onnxjs/backends/webgl/ops/conv-transpose.ts", "../lib/onnxjs/backends/webgl/ops/transpose.ts", "../lib/onnxjs/backends/webgl/ops/depth-to-space.ts", "../lib/onnxjs/backends/webgl/ops/flatten.ts", "../lib/onnxjs/operators.ts", "../lib/onnxjs/backends/webgl/ops/gather.ts", "../lib/onnxjs/backends/webgl/ops/gemm.ts", "../lib/onnxjs/backends/webgl/ops/image-scaler.ts", "../lib/onnxjs/backends/webgl/ops/instance-normalization.ts", "../lib/onnxjs/backends/webgl/ops/lrn.ts", "../lib/onnxjs/backends/webgl/ops/pad.ts", "../lib/onnxjs/backends/webgl/ops/pool.ts", "../lib/onnxjs/backends/webgl/ops/reduce.ts", "../lib/onnxjs/backends/webgl/ops/reshape.ts", "../lib/onnxjs/backends/webgl/ops/upsample.ts", "../lib/onnxjs/backends/webgl/ops/resize-packed.ts", "../lib/onnxjs/backends/webgl/ops/shape.ts", "../lib/onnxjs/backends/webgl/ops/slice.ts", "../lib/onnxjs/backends/webgl/ops/softmax.ts", "../lib/onnxjs/backends/webgl/ops/split.ts", "../lib/onnxjs/backends/webgl/ops/squeeze.ts", "../lib/onnxjs/backends/webgl/ops/sum.ts", "../lib/onnxjs/backends/webgl/ops/tile.ts", "../lib/onnxjs/backends/webgl/ops/unsqueeze.ts", "../lib/onnxjs/backends/webgl/op-resolve-rules.ts", "../lib/onnxjs/backends/webgl/glsl-function-inliner.ts", "../lib/onnxjs/backends/webgl/texture-layout-strategy.ts", "../lib/onnxjs/backends/webgl/glsl-coordinate-lib.ts", "../lib/onnxjs/backends/webgl/glsl-encoding-lib.ts", "../lib/onnxjs/backends/webgl/glsl-fragcolor-lib.ts", "../lib/onnxjs/backends/webgl/glsl-shape-utils-lib.ts", "../lib/onnxjs/backends/webgl/glsl-vec-lib.ts", "../lib/onnxjs/backends/webgl/glsl-registered-libs.ts", "../lib/onnxjs/backends/webgl/glsl-preprocessor.ts", "../lib/onnxjs/backends/webgl/program-manager.ts", "../lib/onnxjs/backends/webgl/texture-manager.ts", "../lib/onnxjs/backends/webgl/session-handler.ts", "../lib/onnxjs/backends/webgl/webgl-context.ts", "../lib/onnxjs/backends/webgl/webgl-context-factory.ts", "../lib/onnxjs/backends/backend-webgl.ts", "../lib/onnxjs/backend.ts", "../lib/onnxjs/execution-plan.ts", "../lib/onnxjs/attribute.ts", "../lib/onnxjs/graph.ts", "../lib/onnxjs/model.ts", "../lib/onnxjs/session.ts", "../lib/onnxjs/session-handler-inference.ts", "../lib/backend-onnxjs.ts", "../lib/wasm/wasm-utils-env.ts", "../lib/wasm/proxy-worker/main.ts", "ort-wasm-simd-threaded.jsep.mjs", "../lib/wasm/wasm-utils-import.ts", "../lib/wasm/wasm-factory.ts", "../lib/wasm/wasm-utils.ts", "../lib/wasm/run-options.ts", "../lib/wasm/session-options.ts", "../lib/wasm/wasm-common.ts", "../lib/wasm/wasm-utils-load-file.ts", "../lib/wasm/jsep/log.ts", "../lib/wasm/jsep/tensor-view.ts", "../lib/wasm/jsep/webgpu/types.ts", "../lib/wasm/jsep/webgpu/gpu-data-manager.ts", "../lib/wasm/jsep/webgpu/attribute-with-cache-key.ts", "../lib/wasm/jsep/util.ts", "../lib/wasm/jsep/webgpu/ops/common.ts", "../lib/wasm/jsep/webgpu/ops/transpose.ts", "../lib/wasm/jsep/webgpu/ops/reduce-shared.ts", "../lib/wasm/jsep/webgpu/ops/reduce.ts", "../lib/wasm/jsep/webgpu/ops/argminmax.ts", "../lib/wasm/jsep/webgpu/ops/attention.ts", "../lib/wasm/jsep/webgpu/ops/batch-norm.ts", "../lib/wasm/jsep/webgpu/ops/bias-add.ts", "../lib/wasm/jsep/webgpu/ops/unary-op.ts", "../lib/wasm/jsep/webgpu/ops/bias-split-gelu.ts", "../lib/wasm/jsep/webgpu/ops/binary-op.ts", "../lib/wasm/jsep/webgpu/ops/concat.ts", "../lib/wasm/jsep/webgpu/ops/fuse-utils.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/activation_util.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_util.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/matmul_packed_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv2d_mm_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv3d_naive_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/conv-grouped.ts", "../lib/wasm/jsep/webgpu/ops/matmul.ts", "../lib/wasm/jsep/webgpu/ops/conv.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_mm_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/conv-transpose.ts", "../lib/wasm/jsep/webgpu/ops/cumsum.ts", "../lib/wasm/jsep/webgpu/ops/depth-to-space.ts", "../lib/wasm/jsep/webgpu/ops/einsum.ts", "../lib/wasm/jsep/webgpu/ops/expand.ts", "../lib/wasm/jsep/webgpu/ops/fast-gelu.ts", "../lib/wasm/jsep/webgpu/ops/gather.ts", "../lib/wasm/jsep/webgpu/ops/gather-elements.ts", "../lib/wasm/jsep/webgpu/ops/gemm.ts", "../lib/wasm/jsep/webgpu/ops/multihead-attention.ts", "../lib/wasm/jsep/webgpu/ops/tile.ts", "../lib/wasm/jsep/webgpu/ops/group-query-attention.ts", "../lib/wasm/jsep/webgpu/ops/instance-norm.ts", "../lib/wasm/jsep/webgpu/ops/layer-norm.ts", "../lib/wasm/jsep/webgpu/ops/matmulnbits.ts", "../lib/wasm/jsep/webgpu/ops/pad.ts", "../lib/wasm/jsep/webgpu/ops/pool.ts", "../lib/wasm/jsep/webgpu/ops/range.ts", "../lib/wasm/jsep/webgpu/ops/resize.ts", "../lib/wasm/jsep/webgpu/ops/rotary-embedding.ts", "../lib/wasm/jsep/webgpu/ops/skip-layer-norm.ts", "../lib/wasm/jsep/webgpu/ops/slice.ts", "../lib/wasm/jsep/webgpu/ops/softmax.ts", "../lib/wasm/jsep/webgpu/ops/split.ts", "../lib/wasm/jsep/webgpu/ops/where.ts", "../lib/wasm/jsep/webgpu/op-resolve-rules.ts", "../lib/wasm/jsep/webgpu/program-manager.ts", "../lib/wasm/jsep/backend-webgpu.ts", "../lib/wasm/jsep/init.ts", "../lib/wasm/wasm-core-impl.ts", "../lib/wasm/proxy-wrapper.ts", "../lib/wasm/session-handler-inference.ts", "../lib/backend-wasm.ts", "../lib/backend-wasm-inference.ts", "../lib/index.ts", "../lib/version.ts"], - "sourcesContent": ["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend} from './backend.js';\nimport {InferenceSession} from './inference-session.js';\n\ninterface BackendInfo {\n backend: Backend;\n priority: number;\n\n initPromise?: Promise;\n initialized?: boolean;\n aborted?: boolean;\n error?: string;\n}\n\nconst backends: Map = new Map();\nconst backendsSortedByPriority: string[] = [];\n\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @ignore\n */\nexport const registerBackend = (name: string, backend: Backend, priority: number): void => {\n if (backend && typeof backend.init === 'function' && typeof backend.createInferenceSessionHandler === 'function') {\n const currentBackend = backends.get(name);\n if (currentBackend === undefined) {\n backends.set(name, {backend, priority});\n } else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n } else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends.get(backendsSortedByPriority[i])!.priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n\n throw new TypeError('not a valid backend');\n};\n\n/**\n * Try to resolve and initialize a backend.\n *\n * @param backendName - the name of the backend.\n * @returns the backend instance if resolved and initialized successfully, or an error message if failed.\n */\nconst tryResolveAndInitializeBackend = async(backendName: string): Promise => {\n const backendInfo = backends.get(backendName);\n if (!backendInfo) {\n return 'backend not found.';\n }\n\n if (backendInfo.initialized) {\n return backendInfo.backend;\n } else if (backendInfo.aborted) {\n return backendInfo.error!;\n } else {\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init(backendName);\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n } catch (e) {\n if (!isInitializing) {\n backendInfo.error = `${e}`;\n backendInfo.aborted = true;\n }\n return backendInfo.error!;\n } finally {\n delete backendInfo.initPromise;\n }\n }\n};\n\n/**\n * Resolve execution providers from the specific session options.\n *\n * @param options - the session options object.\n * @returns a promise that resolves to a tuple of an initialized backend instance and a session options object with\n * filtered EP list.\n *\n * @ignore\n */\nexport const resolveBackendAndExecutionProviders = async(options: InferenceSession.SessionOptions):\n Promise<[backend: Backend, options: InferenceSession.SessionOptions]> => {\n // extract backend hints from session options\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n\n // try to resolve and initialize all requested backends\n let backend: Backend|undefined;\n const errors = [];\n const availableBackendNames = new Set();\n for (const backendName of backendNames) {\n const resolveResult = await tryResolveAndInitializeBackend(backendName);\n if (typeof resolveResult === 'string') {\n errors.push({name: backendName, err: resolveResult});\n } else {\n if (!backend) {\n backend = resolveResult;\n }\n if (backend === resolveResult) {\n availableBackendNames.add(backendName);\n }\n }\n }\n\n // if no backend is available, throw error.\n if (!backend) {\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n }\n\n // for each explicitly requested backend, if it's not available, output warning message.\n for (const {name, err} of errors) {\n if (backendHints.includes(name)) {\n // eslint-disable-next-line no-console\n console.warn(`removing requested execution provider \"${\n name}\" from session options because it is not available: ${err}`);\n }\n }\n\n const filteredEps = eps.filter(i => availableBackendNames.has(typeof i === 'string' ? i : i.name));\n\n return [\n backend, new Proxy(options, {\n get: (target, prop) => {\n if (prop === 'executionProviders') {\n return filteredEps;\n }\n return Reflect.get(target, prop);\n }\n })\n ];\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession} from './training-session.js';\n\n/**\n * @ignore\n */\nexport declare namespace SessionHandler {\n type FeedsType = {[name: string]: OnnxValue};\n type FetchesType = {[name: string]: OnnxValue | null};\n type ReturnType = {[name: string]: OnnxValue};\n}\n\n/**\n * Represents shared SessionHandler functionality\n *\n * @ignore\n */\ninterface SessionHandler {\n dispose(): Promise;\n\n readonly inputNames: readonly string[];\n readonly outputNames: readonly string[];\n}\n\n/**\n * Represent a handler instance of an inference session.\n *\n * @ignore\n */\nexport interface InferenceSessionHandler extends SessionHandler {\n startProfiling(): void;\n endProfiling(): void;\n\n run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n}\n\n/**\n * Represent a handler instance of a training inference session.\n *\n * @ignore\n */\nexport interface TrainingSessionHandler extends SessionHandler {\n readonly evalInputNames: readonly string[];\n readonly evalOutputNames: readonly string[];\n\n lazyResetGrad(): Promise;\n runTrainStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n runOptimizerStep(options: InferenceSession.RunOptions): Promise;\n runEvalStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n\n getParametersSize(trainableOnly: boolean): Promise;\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n getContiguousParameters(trainableOnly: boolean): Promise;\n}\n\n/**\n * Represent a backend that provides implementation of model inferencing.\n *\n * @ignore\n */\nexport interface Backend {\n /**\n * Initialize the backend asynchronously. Should throw when failed.\n */\n init(backendName: string): Promise;\n\n createInferenceSessionHandler(uriOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n\n createTrainingSessionHandler?\n (checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,\n evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,\n options: InferenceSession.SessionOptions): Promise;\n}\n\nexport {registerBackend} from './backend-impl.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from './env.js';\nimport {version} from './version.js';\n\ntype LogLevelType = Env['logLevel'];\n\nlet logLevelValue: Required = 'warning';\n\nexport const env: Env = {\n wasm: {} as Env.WebAssemblyFlags,\n webgl: {} as Env.WebGLFlags,\n webgpu: {} as Env.WebGpuFlags,\n versions: {common: version},\n\n set logLevel(value: LogLevelType) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n logLevelValue = value;\n },\n get logLevel(): Required {\n return logLevelValue;\n },\n};\n\n// set property 'logLevel' so that they can be correctly transferred to worker by `postMessage()`.\nObject.defineProperty(env, 'logLevel', {enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env as envImpl} from './env-impl.js';\n\nexport declare namespace Env {\n export type WasmPathPrefix = string;\n export interface WasmFilePaths {\n /**\n * Specify the override path for the main .wasm file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .wasm file is:\n * - `ort-wasm-simd-threaded.wasm` for default build\n * - `ort-wasm-simd-threaded.jsep.wasm` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.wasm` for training build\n */\n wasm?: URL|string;\n /**\n * Specify the override path for the main .mjs file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .mjs file is:\n * - `ort-wasm-simd-threaded.mjs` for default build\n * - `ort-wasm-simd-threaded.jsep.mjs` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.mjs` for training build\n */\n mjs?: URL|string;\n }\n export type WasmPrefixOrFilePaths = WasmPathPrefix|WasmFilePaths;\n export interface WebAssemblyFlags {\n /**\n * set or get number of thread(s). If omitted or set to 0, number of thread(s) will be determined by system. If set\n * to 1, no worker thread will be spawned.\n *\n * This setting is available only when WebAssembly multithread feature is available in current context.\n *\n * @defaultValue `0`\n */\n numThreads?: number;\n\n /**\n * set or get a boolean value indicating whether to enable SIMD. If set to false, SIMD will be forcely disabled.\n *\n * This setting is available only when WebAssembly SIMD feature is available in current context.\n *\n * @deprecated This property is deprecated. Since SIMD is supported by all major JavaScript engines, non-SIMD\n * build is no longer provided. This property will be removed in future release.\n * @defaultValue `true`\n */\n simd?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @deprecated Use `env.trace` instead. If `env.trace` is set, this property will be ignored.\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Set or get a number specifying the timeout for initialization of WebAssembly backend, in milliseconds. A zero\n * value indicates no timeout is set.\n *\n * @defaultValue `0`\n */\n initTimeout?: number;\n\n /**\n * Set a custom URL prefix to the .wasm/.mjs files, or an object of overrides for both .wasm/.mjs file. The override\n * path should be an absolute path.\n */\n wasmPaths?: WasmPrefixOrFilePaths;\n\n /**\n * Set or get a boolean value indicating whether to proxy the execution of main thread to a worker thread.\n *\n * @defaultValue `false`\n */\n proxy?: boolean;\n }\n\n export interface WebGLFlags {\n /**\n * Set or get the WebGL Context ID (webgl or webgl2).\n *\n * @defaultValue `'webgl2'`\n */\n contextId?: 'webgl'|'webgl2';\n /**\n * Get the WebGL rendering context.\n */\n readonly context: WebGLRenderingContext;\n /**\n * Set or get the maximum batch size for matmul. 0 means to disable batching.\n *\n * @deprecated\n */\n matmulMaxBatchSize?: number;\n /**\n * Set or get the texture cache mode.\n *\n * @defaultValue `'full'`\n */\n textureCacheMode?: 'initializerOnly'|'full';\n /**\n * Set or get the packed texture mode\n *\n * @defaultValue `false`\n */\n pack?: boolean;\n /**\n * Set or get whether enable async download.\n *\n * @defaultValue `false`\n */\n async?: boolean;\n }\n\n export interface WebGpuProfilingDataV1TensorMetadata {\n dims: readonly number[];\n dataType: string;\n }\n export interface WebGpuProfilingDataV1 {\n version: 1;\n inputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n outputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n kernelId: number;\n kernelType: string;\n kernelName: string;\n programName: string;\n startTime: number;\n endTime: number;\n }\n\n export type WebGpuProfilingData = WebGpuProfilingDataV1;\n\n export interface WebGpuFlags {\n /**\n * Set or get the profiling mode.\n *\n * @deprecated Use `env.webgpu.profiling.mode` instead. If `env.webgpu.profiling.mode` is set, this property will be\n * ignored.\n */\n profilingMode?: 'off'|'default';\n /**\n * Set or get the profiling configuration.\n */\n profiling?: {\n /**\n * Set or get the profiling mode.\n *\n * @defaultValue `'off'`\n */\n mode?: 'off'|'default';\n\n /**\n * Set or get a callback function when a profiling data is received. If not set, the profiling data will be\n * printed to console.\n */\n ondata?: (data: WebGpuProfilingData) => void;\n };\n /**\n * Set or get the power preference.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n powerPreference?: 'low-power'|'high-performance';\n /**\n * Set or get the force fallback adapter flag.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n forceFallbackAdapter?: boolean;\n /**\n * Set or get the adapter for WebGPU.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as the GPU adapter for the underlying WebGPU backend to create GPU device.\n *\n * If this property is not set, it will be available to get after the first WebGPU inference session is created. The\n * value will be the GPU adapter that created by the underlying WebGPU backend.\n *\n * When use with TypeScript, the type of this property is `GPUAdapter` defined in \"@webgpu/types\".\n * Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType}\n */\n adapter: unknown;\n /**\n * Get the device for WebGPU.\n *\n * This property is only available after the first WebGPU inference session is created.\n *\n * When use with TypeScript, the type of this property is `GPUDevice` defined in \"@webgpu/types\".\n * Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in \"@webgpu/types\".\n */\n readonly device: unknown;\n /**\n * Set or get whether validate input content.\n *\n * @defaultValue `false`\n */\n validateInputContent?: boolean;\n }\n}\n\nexport interface Env {\n /**\n * set the severity level for logging.\n *\n * @defaultValue `'warning'`\n */\n logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal';\n\n /**\n * Indicate whether run in debug mode.\n *\n * @defaultValue `false`\n */\n debug?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Get version of the current package.\n */\n readonly versions: {\n readonly common: string;\n readonly web?: string;\n readonly node?: string;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n readonly 'react-native'?: string;\n };\n\n /**\n * Represent a set of flags for WebAssembly\n */\n readonly wasm: Env.WebAssemblyFlags;\n\n /**\n * Represent a set of flags for WebGL\n */\n readonly webgl: Env.WebGLFlags;\n\n /**\n * Represent a set of flags for WebGPU\n */\n readonly webgpu: Env.WebGpuFlags;\n\n [name: string]: unknown;\n}\n\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env: Env = envImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {Tensor} from './tensor.js';\n\n/**\n * implementation of Tensor.toDataURL()\n */\nexport const tensorToDataURL = (tensor: Tensor, options?: TensorToDataUrlOptions): string => {\n const canvas = typeof document !== 'undefined' ? document.createElement('canvas') : (new OffscreenCanvas(1, 1));\n canvas.width = tensor.dims[3];\n canvas.height = tensor.dims[2];\n const pixels2DContext =\n canvas.getContext('2d') as (CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null);\n\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n }\n\n const inputformat = options?.format !== undefined ? options.format : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 0];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n // Default pointer assignments\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < height; i++) {\n for (let j = 0; j < width; j++) {\n const R = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n const G = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n const B = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n const A = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n pixels2DContext.fillStyle = 'rgba(' + R + ',' + G + ',' + B + ',' + A + ')';\n pixels2DContext.fillRect(j, i, 1, 1);\n }\n }\n if ('toDataURL' in canvas) {\n return canvas.toDataURL();\n } else {\n throw new Error('toDataURL is not supported');\n }\n } else {\n throw new Error('Can not access image data');\n }\n};\n\n/**\n * implementation of Tensor.toImageData()\n */\nexport const tensorToImageData = (tensor: Tensor, options?: TensorToImageDataOptions): ImageData => {\n const pixels2DContext = typeof document !== 'undefined' ?\n document.createElement('canvas').getContext('2d') :\n new OffscreenCanvas(1, 1).getContext('2d') as OffscreenCanvasRenderingContext2D;\n let image: ImageData;\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n let channels: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[1];\n channels = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n channels = tensor.dims[1];\n }\n const inputformat = options !== undefined ? (options.format !== undefined ? options.format : 'RGB') : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 255];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n if (options !== undefined) {\n if (options.format !== undefined && (channels === 4 && options.format !== 'RGBA') ||\n (channels === 3 && (options.format !== 'RGB' && options.format !== 'BGR'))) {\n throw new Error('Tensor format doesn\\'t match input tensor dims');\n }\n }\n\n // Default pointer assignments\n const step = 4;\n let rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n image = pixels2DContext.createImageData(width, height);\n\n for (let i = 0; i < height * width;\n rImagePointer += step, gImagePointer += step, bImagePointer += step, aImagePointer += step, i++) {\n image.data[rImagePointer] = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n image.data[gImagePointer] = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n image.data[bImagePointer] = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n image.data[aImagePointer] = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n }\n\n } else {\n throw new Error('Can not access image data');\n }\n return image;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsDimensions, OptionsFormat, OptionsNormalizationParameters, OptionsTensorFormat, OptionsTensorLayout, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\ninterface BufferToTensorOptions extends OptionsDimensions, OptionsTensorLayout, OptionsNormalizationParameters,\n OptionsFormat, OptionsTensorFormat {}\n\n/**\n * Create a new tensor object from image object\n *\n * @param buffer - Extracted image buffer data - assuming RGBA format\n * @param imageFormat - input image configuration - required configurations height, width, format\n * @param tensorFormat - output tensor configuration - Default is RGB format\n */\nexport const bufferToTensor = (buffer: Uint8ClampedArray|undefined, options: BufferToTensorOptions): Tensor => {\n if (buffer === undefined) {\n throw new Error('Image buffer must be defined');\n }\n if (options.height === undefined || options.width === undefined) {\n throw new Error('Image height and width must be defined');\n }\n if (options.tensorLayout === 'NHWC') {\n throw new Error('NHWC Tensor layout is not supported yet');\n }\n\n const {height, width} = options;\n\n const norm = options.norm ?? {mean: 255, bias: 0};\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean![0], norm.mean![1], norm.mean![2], norm.mean![3] ?? 255];\n }\n\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias![0], norm.bias![1], norm.bias![2], norm.bias![3] ?? 0];\n }\n\n const inputformat = options.format !== undefined ? options.format : 'RGBA';\n // default value is RGBA since imagedata and HTMLImageElement uses it\n\n const outputformat =\n options.tensorFormat !== undefined ? (options.tensorFormat !== undefined ? options.tensorFormat : 'RGB') : 'RGB';\n const stride = height * width;\n const float32Data = outputformat === 'RGBA' ? new Float32Array(stride * 4) : new Float32Array(stride * 3);\n\n // Default pointer assignments\n let step = 4, rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGB') {\n step = 3;\n rImagePointer = 0;\n gImagePointer = 1;\n bImagePointer = 2;\n aImagePointer = -1;\n }\n\n // Updating the pointer assignments based on the output tensor format\n if (outputformat === 'RGBA') {\n aTensorPointer = stride * 3;\n } else if (outputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n } else if (outputformat === 'BGR') {\n bTensorPointer = 0;\n gTensorPointer = stride;\n rTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < stride;\n i++, rImagePointer += step, bImagePointer += step, gImagePointer += step, aImagePointer += step) {\n float32Data[rTensorPointer++] = (buffer[rImagePointer] + normBias[0]) / normMean[0];\n float32Data[gTensorPointer++] = (buffer[gImagePointer] + normBias[1]) / normMean[1];\n float32Data[bTensorPointer++] = (buffer[bImagePointer] + normBias[2]) / normMean[2];\n if (aTensorPointer !== -1 && aImagePointer !== -1) {\n float32Data[aTensorPointer++] = (buffer[aImagePointer] + normBias[3]) / normMean[3];\n }\n }\n\n // Float32Array -> ort.Tensor\n const outputTensor = outputformat === 'RGBA' ? new Tensor('float32', float32Data, [1, 4, height, width]) :\n new Tensor('float32', float32Data, [1, 3, height, width]);\n return outputTensor;\n};\n\n/**\n * implementation of Tensor.fromImage().\n */\nexport const tensorFromImage = async(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise => {\n // checking the type of image object\n const isHTMLImageEle = typeof (HTMLImageElement) !== 'undefined' && image instanceof HTMLImageElement;\n const isImageDataEle = typeof (ImageData) !== 'undefined' && image instanceof ImageData;\n const isImageBitmap = typeof (ImageBitmap) !== 'undefined' && image instanceof ImageBitmap;\n const isString = typeof image === 'string';\n\n let data: Uint8ClampedArray|undefined;\n let bufferToTensorOptions: BufferToTensorOptions = options ?? {};\n\n const createCanvas = () => {\n if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n } else if (typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1);\n } else {\n throw new Error('Canvas is not supported');\n }\n };\n const createCanvasContext = (canvas: HTMLCanvasElement|OffscreenCanvas) => {\n if (canvas instanceof HTMLCanvasElement) {\n return canvas.getContext('2d');\n } else if (canvas instanceof OffscreenCanvas) {\n return canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n } else {\n return null;\n }\n };\n // filling and checking image configuration options\n if (isHTMLImageEle) {\n // HTMLImageElement - image object - format is RGBA by default\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n let height = image.height;\n let width = image.width;\n if (options !== undefined && options.resizedHeight !== undefined && options.resizedWidth !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n if (options.tensorFormat !== undefined) {\n throw new Error('Image input config format must be RGBA for HTMLImageElement');\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n }\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n }\n\n pixels2DContext.drawImage(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isImageDataEle) {\n let height: number;\n let width: number;\n\n if (options !== undefined && options.resizedWidth !== undefined && options.resizedHeight !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n } else {\n height = image.height;\n width = image.width;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n }\n bufferToTensorOptions.format = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n\n if (options !== undefined) {\n const tempCanvas = createCanvas();\n\n tempCanvas.width = width;\n tempCanvas.height = height;\n\n const pixels2DContext = createCanvasContext(tempCanvas);\n\n if (pixels2DContext != null) {\n pixels2DContext.putImageData(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else {\n data = image.data;\n }\n } else if (isImageBitmap) {\n // ImageBitmap - image object - format must be provided by user\n if (options === undefined) {\n throw new Error('Please provide image config with format for Imagebitmap');\n }\n\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n const height = image.height;\n const width = image.width;\n pixels2DContext.drawImage(image, 0, 0, width, height);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isString) {\n return new Promise((resolve, reject) => {\n const canvas = createCanvas();\n const context = createCanvasContext(canvas);\n if (!image || !context) {\n return reject();\n }\n const newImage = new Image();\n newImage.crossOrigin = 'Anonymous';\n newImage.src = image;\n newImage.onload = () => {\n canvas.width = newImage.width;\n canvas.height = newImage.height;\n context.drawImage(newImage, 0, 0, canvas.width, canvas.height);\n const img = context.getImageData(0, 0, canvas.width, canvas.height);\n\n bufferToTensorOptions.height = canvas.height;\n bufferToTensorOptions.width = canvas.width;\n resolve(bufferToTensor(img.data, bufferToTensorOptions));\n };\n });\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n\n if (data !== undefined) {\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n};\n\n/**\n * implementation of Tensor.fromTexture().\n */\nexport const tensorFromTexture = (\n texture: TensorInterface.TextureType, options: TensorFromTextureOptions): Tensor => {\n const {width, height, download, dispose} = options;\n // Always assume RGBAF32. TODO: support different texture format\n const dims = [1, height, width, 4];\n return new Tensor({location: 'texture', type: 'float32', texture, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromGpuBuffer().\n */\nexport const tensorFromGpuBuffer = (\n gpuBuffer: TensorInterface.GpuBufferType, options: TensorFromGpuBufferOptions): Tensor => {\n const {dataType, dims, download, dispose} = options;\n return new Tensor({location: 'gpu-buffer', type: dataType ?? 'float32', gpuBuffer, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromPinnedBuffer().\n */\nexport const tensorFromPinnedBuffer = (\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor =>\n new Tensor({location: 'cpu-pinned', type, data: buffer, dims: dims ?? [buffer.length]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type SupportedTypedArrayConstructors = Float32ArrayConstructor|Uint8ArrayConstructor|Int8ArrayConstructor|\n Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|Uint8ArrayConstructor|\n Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor;\nexport type SupportedTypedArray = InstanceType;\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([\n ['float32', Float32Array],\n ['uint8', Uint8Array],\n ['int8', Int8Array],\n ['uint16', Uint16Array],\n ['int16', Int16Array],\n ['int32', Int32Array],\n ['bool', Uint8Array],\n ['float64', Float64Array],\n ['uint32', Uint32Array],\n]);\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP = new Map([\n [Float32Array, 'float32'],\n [Uint8Array, 'uint8'],\n [Int8Array, 'int8'],\n [Uint16Array, 'uint16'],\n [Int16Array, 'int16'],\n [Int32Array, 'int32'],\n [Float64Array, 'float64'],\n [Uint32Array, 'uint32'],\n]);\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// the following code allows delaying execution of BigInt/Float16Array checking. This allows lazy initialization for\n// NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP and NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, which allows BigInt/Float16Array\n// polyfill if available.\nlet isTypedArrayChecked = false;\nexport const checkTypedArray = () => {\n if (!isTypedArrayChecked) {\n isTypedArrayChecked = true;\n const isBigInt64ArrayAvailable = typeof BigInt64Array !== 'undefined' && BigInt64Array.from;\n const isBigUint64ArrayAvailable = typeof BigUint64Array !== 'undefined' && BigUint64Array.from;\n const isFloat16ArrayAvailable = typeof Float16Array !== 'undefined' && Float16Array.from;\n\n if (isBigInt64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('int64', BigInt64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigInt64Array, 'int64');\n }\n if (isBigUint64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('uint64', BigUint64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigUint64Array, 'uint64');\n }\n if (isFloat16ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Float16Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(Float16Array, 'float16');\n } else {\n // if Float16Array is not available, use 'Uint16Array' to store the data.\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Uint16Array);\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TextureConstructorParameters} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\n\n/**\n * calculate size from dims.\n *\n * @param dims the dims array. May be an illegal input.\n */\nexport const calculateSize = (dims: readonly unknown[]): number => {\n let size = 1;\n for (let i = 0; i < dims.length; i++) {\n const dim = dims[i];\n if (typeof dim !== 'number' || !Number.isSafeInteger(dim)) {\n throw new TypeError(`dims[${i}] must be an integer, got: ${dim}`);\n }\n if (dim < 0) {\n throw new RangeError(`dims[${i}] must be a non-negative integer, got: ${dim}`);\n }\n size *= dim;\n }\n return size;\n};\n\n/**\n * implementation of Tensor.reshape()\n */\nexport const tensorReshape = (tensor: Tensor, dims: readonly number[]): Tensor => {\n switch (tensor.location) {\n case 'cpu':\n return new Tensor(tensor.type, tensor.data, dims);\n case 'cpu-pinned':\n return new Tensor({\n location: 'cpu-pinned',\n data: tensor.data as CpuPinnedConstructorParameters['data'],\n type: tensor.type as CpuPinnedConstructorParameters['type'],\n dims,\n });\n case 'texture':\n return new Tensor({\n location: 'texture',\n texture: tensor.texture,\n type: tensor.type as TextureConstructorParameters['type'],\n dims,\n });\n case 'gpu-buffer':\n return new Tensor({\n location: 'gpu-buffer',\n gpuBuffer: tensor.gpuBuffer,\n type: tensor.type as GpuBufferConstructorParameters['type'],\n dims,\n });\n default:\n throw new Error(`tensorReshape: tensor location ${tensor.location} is not supported`);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {tensorToDataURL, tensorToImageData} from './tensor-conversion-impl.js';\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {tensorFromGpuBuffer, tensorFromImage, tensorFromPinnedBuffer, tensorFromTexture} from './tensor-factory-impl.js';\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions, TextureConstructorParameters} from './tensor-factory.js';\nimport {checkTypedArray, NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP, NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, SupportedTypedArray, SupportedTypedArrayConstructors} from './tensor-impl-type-mapping.js';\nimport {calculateSize, tensorReshape} from './tensor-utils-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\n// type aliases for those exported from Tensor interface\n\ntype TensorType = TensorInterface.Type;\ntype TensorDataType = TensorInterface.DataType;\ntype TensorDataLocation = TensorInterface.DataLocation;\ntype TensorTextureType = TensorInterface.TextureType;\ntype TensorGpuBufferType = TensorInterface.GpuBufferType;\n\n/**\n * the implementation of Tensor interface.\n *\n * @ignore\n */\nexport class Tensor implements TensorInterface {\n // #region constructors\n\n /**\n * Construct a new CPU tensor object from the given type, data and dims.\n */\n constructor(\n type: TensorType, data: TensorDataType|readonly string[]|readonly number[]|readonly boolean[],\n dims?: readonly number[]);\n /**\n * Construct a new CPU tensor object from the given data and dims. Type is inferred from data.\n */\n constructor(data: TensorDataType|readonly string[]|readonly boolean[], dims?: readonly number[]);\n /**\n * Construct a new tensor object from the pinned CPU data with the given type and dims.\n *\n * Tensor's location will be set to 'cpu-pinned'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: CpuPinnedConstructorParameters);\n /**\n * Construct a new tensor object from the WebGL texture with the given type and dims.\n *\n * Tensor's location will be set to 'texture'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: TextureConstructorParameters);\n /**\n * Construct a new tensor object from the WebGPU buffer with the given type and dims.\n *\n * Tensor's location will be set to 'gpu-buffer'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: GpuBufferConstructorParameters);\n\n /**\n * implementation.\n */\n constructor(\n arg0: TensorType|TensorDataType|readonly string[]|readonly boolean[]|CpuPinnedConstructorParameters|\n TextureConstructorParameters|GpuBufferConstructorParameters,\n arg1?: TensorDataType|readonly number[]|readonly string[]|readonly boolean[], arg2?: readonly number[]) {\n // perform one-time check for BigInt/Float16Array support\n checkTypedArray();\n\n let type: TensorType;\n let dims: readonly number[];\n\n if (typeof arg0 === 'object' && 'location' in arg0) {\n //\n // constructing tensor from specific location\n //\n this.dataLocation = arg0.location;\n type = arg0.type;\n dims = arg0.dims;\n switch (arg0.location) {\n case 'cpu-pinned': {\n const expectedTypedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(type);\n if (!expectedTypedArrayConstructor) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from pinned buffer`);\n }\n if (!(arg0.data instanceof expectedTypedArrayConstructor)) {\n throw new TypeError(`buffer should be of type ${expectedTypedArrayConstructor.name}`);\n }\n this.cpuData = arg0.data;\n break;\n }\n case 'texture': {\n if (type !== 'float32') {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from texture`);\n }\n this.gpuTextureData = arg0.texture;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n case 'gpu-buffer': {\n if ((type !== 'float32' && type !== 'float16' && type !== 'int32' && type !== 'int64' && type !== 'uint32' &&\n type !== 'uint8' && type !== 'bool')) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from gpu buffer`);\n }\n this.gpuBufferData = arg0.gpuBuffer;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n default:\n throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`);\n }\n } else {\n //\n // constructing tensor of location 'cpu'\n //\n let data: TensorDataType;\n let maybeDims: typeof arg1|typeof arg2;\n // check whether arg0 is type or data\n if (typeof arg0 === 'string') {\n //\n // Override: constructor(type, data, ...)\n //\n type = arg0;\n maybeDims = arg2;\n if (arg0 === 'string') {\n // string tensor\n if (!Array.isArray(arg1)) {\n throw new TypeError('A string tensor\\'s data must be a string array.');\n }\n // we don't check whether every element in the array is string; this is too slow. we assume it's correct and\n // error will be populated at inference\n data = arg1;\n } else {\n // numeric tensor\n const typedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(arg0);\n if (typedArrayConstructor === undefined) {\n throw new TypeError(`Unsupported tensor type: ${arg0}.`);\n }\n if (Array.isArray(arg1)) {\n if (arg0 === 'float16' && typedArrayConstructor === Uint16Array) {\n // When no Float16Array polyfill is used, we cannot create 'float16' tensor from number array.\n //\n // Throw error here because when user try to use number array as data,\n // e.g. new Tensor('float16', [1, 2, 3, 4], dims)), it will actually call\n // Uint16Array.from(arg1) which generates wrong data.\n throw new TypeError(\n 'Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.');\n } else if (arg0 === 'uint64' || arg0 === 'int64') {\n // use 'as any' here because:\n // 1. TypeScript's check on type of 'Array.isArray()' does not work with readonly arrays.\n // see https://github.com/microsoft/TypeScript/issues/17002\n // 2. TypeScript's check on union type of '(BigInt64ArrayConstructor|BigUint64ArrayConstructor).from()'\n // does not accept parameter mapFn.\n // 3. parameters of 'SupportedTypedArrayConstructors.from()' does not match the requirement of the union\n // type.\n\n // assume 'arg1' is of type \"readonly number[]|readonly bigint[]\" here.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1, BigInt);\n } else {\n // assume 'arg1' is of type \"readonly number[]\" here.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1);\n }\n } else if (arg1 instanceof typedArrayConstructor) {\n data = arg1;\n } else {\n throw new TypeError(`A ${type} tensor's data must be type of ${typedArrayConstructor}`);\n }\n }\n } else {\n //\n // Override: constructor(data, ...)\n //\n maybeDims = arg1;\n if (Array.isArray(arg0)) {\n // only boolean[] and string[] is supported\n if (arg0.length === 0) {\n throw new TypeError('Tensor type cannot be inferred from an empty array.');\n }\n const firstElementType = typeof arg0[0];\n if (firstElementType === 'string') {\n type = 'string';\n data = arg0;\n } else if (firstElementType === 'boolean') {\n type = 'bool';\n // 'arg0' is of type 'boolean[]'. Uint8Array.from(boolean[]) actually works, but typescript thinks this is\n // wrong type. We use 'as any' to make it happy.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = Uint8Array.from(arg0 as any[]);\n } else {\n throw new TypeError(`Invalid element type of data array: ${firstElementType}.`);\n }\n } else {\n // get tensor type from TypedArray\n const mappedType =\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.get(arg0.constructor as SupportedTypedArrayConstructors);\n if (mappedType === undefined) {\n throw new TypeError(`Unsupported type for tensor data: ${arg0.constructor}.`);\n }\n type = mappedType;\n data = arg0 as SupportedTypedArray;\n }\n }\n\n // type and data is processed, now processing dims\n if (maybeDims === undefined) {\n // assume 1-D tensor if dims omitted\n maybeDims = [data.length];\n } else if (!Array.isArray(maybeDims)) {\n throw new TypeError('A tensor\\'s dims must be a number array');\n }\n dims = maybeDims as readonly number[];\n\n this.cpuData = data;\n this.dataLocation = 'cpu';\n }\n\n // perform check on dims\n const size = calculateSize(dims);\n // if data is on CPU, check whether data length matches tensor size\n if (this.cpuData && size !== this.cpuData.length) {\n throw new Error(`Tensor's size(${size}) does not match data length(${this.cpuData.length}).`);\n }\n\n this.type = type;\n this.dims = dims;\n this.size = size;\n }\n // #endregion\n\n // #region factory\n static async fromImage(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise {\n return tensorFromImage(image, options);\n }\n\n static fromTexture(\n texture: TensorTextureType, options: TensorFromTextureOptions): TensorInterface {\n return tensorFromTexture(texture, options);\n }\n\n static fromGpuBuffer(\n gpuBuffer: TensorGpuBufferType, options: TensorFromGpuBufferOptions): TensorInterface {\n return tensorFromGpuBuffer(gpuBuffer, options);\n }\n\n static fromPinnedBuffer(\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor {\n return tensorFromPinnedBuffer(type, buffer, dims);\n }\n\n // #endregion\n\n // #region conversions\n toDataURL(options?: TensorToDataUrlOptions): string {\n return tensorToDataURL(this, options);\n }\n\n toImageData(options?: TensorToImageDataOptions): ImageData {\n return tensorToImageData(this, options);\n }\n // #endregion\n\n // #region public fields\n readonly dims: readonly number[];\n readonly type: TensorType;\n readonly size: number;\n // #endregion\n\n // #region private fields\n\n /**\n * stores the location of the data.\n */\n private dataLocation: TensorDataLocation;\n\n /**\n * stores the data on CPU, if location is 'cpu' or 'cpu-pinned'. otherwise empty.\n */\n private cpuData?: TensorDataType;\n\n /**\n * stores the underlying texture when location is 'texture'. otherwise empty.\n */\n private gpuTextureData?: TensorTextureType;\n\n /**\n * stores the underlying GPU buffer when location is 'gpu-buffer'. otherwise empty.\n */\n private gpuBufferData?: TensorGpuBufferType;\n\n /**\n * stores an optional downloader function to download data from GPU to CPU.\n */\n private downloader?(): Promise;\n\n /**\n * a flag indicating whether the data is being downloaded from GPU to CPU.\n */\n private isDownloading?: boolean;\n\n /**\n * stores an optional disposer function to dispose the underlying data.\n */\n private disposer?(): void;\n // #endregion\n\n // #region properties\n get data(): TensorDataType {\n this.ensureValid();\n if (!this.cpuData) {\n throw new Error(\n 'The data is not on CPU. Use `getData()` to download GPU data to CPU, ' +\n 'or use `texture` or `gpuBuffer` property to access the GPU data directly.');\n }\n return this.cpuData;\n }\n\n get location(): TensorDataLocation {\n return this.dataLocation;\n }\n\n get texture(): TensorTextureType {\n this.ensureValid();\n if (!this.gpuTextureData) {\n throw new Error('The data is not stored as a WebGL texture.');\n }\n return this.gpuTextureData;\n }\n\n get gpuBuffer(): TensorGpuBufferType {\n this.ensureValid();\n if (!this.gpuBufferData) {\n throw new Error('The data is not stored as a WebGPU buffer.');\n }\n return this.gpuBufferData;\n }\n // #endregion\n\n // #region methods\n\n async getData(releaseData?: boolean): Promise {\n this.ensureValid();\n switch (this.dataLocation) {\n case 'cpu':\n case 'cpu-pinned':\n return this.data;\n case 'texture':\n case 'gpu-buffer': {\n if (!this.downloader) {\n throw new Error('The current tensor is not created with a specified data downloader.');\n }\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n try {\n this.isDownloading = true;\n const data = await this.downloader();\n this.downloader = undefined;\n this.dataLocation = 'cpu';\n this.cpuData = data;\n\n if (releaseData && this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n\n return data;\n\n } finally {\n this.isDownloading = false;\n }\n }\n default:\n throw new Error(`cannot get data from location: ${this.dataLocation}`);\n }\n }\n\n dispose(): void {\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n\n if (this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n this.cpuData = undefined;\n this.gpuTextureData = undefined;\n this.gpuBufferData = undefined;\n this.downloader = undefined;\n this.isDownloading = undefined;\n\n this.dataLocation = 'none';\n }\n\n // #endregion\n\n // #region tensor utilities\n private ensureValid(): void {\n if (this.dataLocation === 'none') {\n throw new Error('The tensor is disposed.');\n }\n }\n\n reshape(dims: readonly number[]): TensorInterface {\n this.ensureValid();\n if (this.downloader || this.disposer) {\n throw new Error('Cannot reshape a tensor that owns GPU resource.');\n }\n return tensorReshape(this, dims);\n }\n // #endregion\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorFactory} from './tensor-factory.js';\nimport {Tensor as TensorImpl} from './tensor-impl.js';\nimport {TypedTensorUtils} from './tensor-utils.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\n/**\n * represent a basic tensor with specified dimensions and data type.\n */\ninterface TypedTensorBase {\n /**\n * Get the dimensions of the tensor.\n */\n readonly dims: readonly number[];\n /**\n * Get the data type of the tensor.\n */\n readonly type: T;\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is not on CPU (eg. it's in the form of WebGL texture or WebGPU buffer), throw error.\n */\n readonly data: Tensor.DataTypeMap[T];\n /**\n * Get the location of the data.\n */\n readonly location: Tensor.DataLocation;\n /**\n * Get the WebGL texture that holds the tensor data.\n *\n * If the data is not on GPU as WebGL texture, throw error.\n */\n readonly texture: Tensor.TextureType;\n /**\n * Get the WebGPU buffer that holds the tensor data.\n *\n * If the data is not on GPU as WebGPU buffer, throw error.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is on CPU, returns the data immediately.\n * If the data is on GPU, downloads the data and returns the promise.\n *\n * @param releaseData - whether release the data on GPU. Ignore if data is already on CPU.\n */\n getData(releaseData?: boolean): Promise;\n\n /**\n * Dispose the tensor data.\n *\n * If the data is on CPU, remove its internal reference to the underlying data.\n * If the data is on GPU, release the data on GPU.\n *\n * After calling this function, the tensor is considered no longer valid. Its location will be set to 'none'.\n */\n dispose(): void;\n}\n\nexport declare namespace Tensor {\n interface DataTypeMap {\n float32: Float32Array;\n uint8: Uint8Array;\n int8: Int8Array;\n uint16: Uint16Array;\n int16: Int16Array;\n int32: Int32Array;\n int64: BigInt64Array;\n string: string[];\n bool: Uint8Array;\n float16: Uint16Array; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: Float64Array;\n uint32: Uint32Array;\n uint64: BigUint64Array;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n interface ElementTypeMap {\n float32: number;\n uint8: number;\n int8: number;\n uint16: number;\n int16: number;\n int32: number;\n int64: bigint;\n string: string;\n bool: boolean;\n float16: number; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: number;\n uint32: number;\n uint64: bigint;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n type DataType = DataTypeMap[Type];\n type ElementType = ElementTypeMap[Type];\n\n /**\n * supported data types for constructing a tensor from a pinned CPU buffer\n */\n export type CpuPinnedDataTypes = Exclude;\n\n /**\n * type alias for WebGL texture\n */\n export type TextureType = WebGLTexture;\n\n /**\n * supported data types for constructing a tensor from a WebGL texture\n */\n export type TextureDataTypes = 'float32';\n\n /**\n * type alias for WebGPU buffer\n *\n * The reason why we don't use type \"GPUBuffer\" defined in webgpu.d.ts from @webgpu/types is because \"@webgpu/types\"\n * requires \"@types/dom-webcodecs\" as peer dependency when using TypeScript < v5.1 and its version need to be chosen\n * carefully according to the TypeScript version being used. This means so far there is not a way to keep every\n * TypeScript version happy. It turns out that we will easily broke users on some TypeScript version.\n *\n * for more info see https://github.com/gpuweb/types/issues/127\n */\n export type GpuBufferType = {size: number; mapState: 'unmapped' | 'pending' | 'mapped'};\n\n /**\n * supported data types for constructing a tensor from a WebGPU buffer\n */\n export type GpuBufferDataTypes = 'float32'|'float16'|'int32'|'int64'|'uint32'|'uint8'|'bool';\n\n /**\n * represent where the tensor data is stored\n */\n export type DataLocation = 'none'|'cpu'|'cpu-pinned'|'texture'|'gpu-buffer';\n\n /**\n * represent the data type of a tensor\n */\n export type Type = keyof DataTypeMap;\n}\n\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface TypedTensor extends TypedTensorBase, TypedTensorUtils {}\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface Tensor extends TypedTensorBase, TypedTensorUtils {}\n\n/**\n * type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.\n */\nexport interface TensorConstructor extends TensorFactory {\n // #region CPU tensor - specify element type\n /**\n * Construct a new string tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'string', data: Tensor.DataTypeMap['string']|readonly string[],\n dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'bool', data: Tensor.DataTypeMap['bool']|readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new 64-bit integer typed tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(\n type: T, data: Tensor.DataTypeMap[T]|readonly bigint[]|readonly number[],\n dims?: readonly number[]): TypedTensor;\n\n /**\n * Construct a new numeric tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new>(\n type: T, data: Tensor.DataTypeMap[T]|readonly number[], dims?: readonly number[]): TypedTensor;\n // #endregion\n\n // #region CPU tensor - infer element types\n\n /**\n * Construct a new float32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float32Array, dims?: readonly number[]): TypedTensor<'float32'>;\n\n /**\n * Construct a new int8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int8Array, dims?: readonly number[]): TypedTensor<'int8'>;\n\n /**\n * Construct a new uint8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint8Array, dims?: readonly number[]): TypedTensor<'uint8'>;\n\n /**\n * Construct a new uint16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint16Array, dims?: readonly number[]): TypedTensor<'uint16'>;\n\n /**\n * Construct a new int16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int16Array, dims?: readonly number[]): TypedTensor<'int16'>;\n\n /**\n * Construct a new int32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int32Array, dims?: readonly number[]): TypedTensor<'int32'>;\n\n /**\n * Construct a new int64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigInt64Array, dims?: readonly number[]): TypedTensor<'int64'>;\n\n /**\n * Construct a new string tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly string[], dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new float64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float64Array, dims?: readonly number[]): TypedTensor<'float64'>;\n\n /**\n * Construct a new uint32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint32Array, dims?: readonly number[]): TypedTensor<'uint32'>;\n\n /**\n * Construct a new uint64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigUint64Array, dims?: readonly number[]): TypedTensor<'uint64'>;\n\n // #endregion\n\n // #region CPU tensor - fall back to non-generic tensor type declaration\n\n /**\n * Construct a new tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: Tensor.Type, data: Tensor.DataType|readonly number[]|readonly string[]|readonly bigint[]|readonly boolean[],\n dims?: readonly number[]): Tensor;\n\n /**\n * Construct a new tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Tensor.DataType, dims?: readonly number[]): Tensor;\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const Tensor = TensorImpl as TensorConstructor;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from './env-impl.js';\n\n/**\n * @ignore\n */\nexport const TRACE = (deviceType: string, label: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n // eslint-disable-next-line no-console\n console.timeStamp(`${deviceType}::ORT::${label}`);\n};\n\nconst TRACE_FUNC = (msg: string, extraMsg?: string) => {\n const stack = new Error().stack?.split(/\\r\\n|\\r|\\n/g) || [];\n let hasTraceFunc = false;\n for (let i = 0; i < stack.length; i++) {\n if (hasTraceFunc && !stack[i].includes('TRACE_FUNC')) {\n let label = `FUNC_${msg}::${stack[i].trim().split(' ')[1]}`;\n if (extraMsg) {\n label += `::${extraMsg}`;\n }\n TRACE('CPU', label);\n return;\n }\n if (stack[i].includes('TRACE_FUNC')) {\n hasTraceFunc = true;\n }\n }\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_BEGIN = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('BEGIN', extraMsg);\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_END = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('END', extraMsg);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {InferenceSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSessionInterface} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from './trace.js';\n\ntype SessionOptions = InferenceSessionInterface.SessionOptions;\ntype RunOptions = InferenceSessionInterface.RunOptions;\ntype FeedsType = InferenceSessionInterface.FeedsType;\ntype FetchesType = InferenceSessionInterface.FetchesType;\ntype ReturnType = InferenceSessionInterface.ReturnType;\n\nexport class InferenceSession implements InferenceSessionInterface {\n private constructor(handler: InferenceSessionHandler) {\n this.handler = handler;\n }\n run(feeds: FeedsType, options?: RunOptions): Promise;\n run(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async run(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n TRACE_FUNC_BEGIN();\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSessionInterface.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n\n // feeds, fetches and options are prepared\n\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n TRACE_FUNC_END();\n return returnValue;\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n\n static create(path: string, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: SessionOptions):\n Promise;\n static create(buffer: Uint8Array, options?: SessionOptions): Promise;\n static async create(\n arg0: string|ArrayBufferLike|Uint8Array, arg1?: SessionOptions|number, arg2?: number,\n arg3?: SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n // either load from a file or buffer\n let filePathOrUint8Array: string|Uint8Array;\n let options: SessionOptions = {};\n\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (\n arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n } else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n } else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n const handler = await backend.createInferenceSessionHandler(filePathOrUint8Array, optionsWithValidatedEPs);\n TRACE_FUNC_END();\n return new InferenceSession(handler);\n }\n\n startProfiling(): void {\n this.handler.startProfiling();\n }\n endProfiling(): void {\n this.handler.endProfiling();\n }\n\n get inputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get outputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n private handler: InferenceSessionHandler;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession as InferenceSessionImpl} from './inference-session-impl.js';\nimport {OnnxModelOptions} from './onnx-model.js';\nimport {OnnxValue, OnnxValueDataLocation} from './onnx-value.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace InferenceSession {\n // #region input/output types\n\n type OnnxValueMapType = {readonly [name: string]: OnnxValue};\n type NullableOnnxValueMapType = {readonly [name: string]: OnnxValue | null};\n\n /**\n * A feeds (model inputs) is an object that uses input names as keys and OnnxValue as corresponding values.\n */\n type FeedsType = OnnxValueMapType;\n\n /**\n * A fetches (model outputs) could be one of the following:\n *\n * - Omitted. Use model's output names definition.\n * - An array of string indicating the output names.\n * - An object that use output names as keys and OnnxValue or null as corresponding values.\n *\n * @remark\n * different from input argument, in output, OnnxValue is optional. If an OnnxValue is present it will be\n * used as a pre-allocated value by the inference engine; if omitted, inference engine will allocate buffer\n * internally.\n */\n type FetchesType = readonly string[]|NullableOnnxValueMapType;\n\n /**\n * A inferencing return type is an object that uses output names as keys and OnnxValue as corresponding values.\n */\n type ReturnType = OnnxValueMapType;\n\n // #endregion\n\n // #region session options\n\n /**\n * A set of configurations for session behavior.\n */\n export interface SessionOptions extends OnnxModelOptions {\n /**\n * An array of execution provider options.\n *\n * An execution provider option can be a string indicating the name of the execution provider,\n * or an object of corresponding type.\n */\n executionProviders?: readonly ExecutionProviderConfig[];\n\n /**\n * The intra OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n intraOpNumThreads?: number;\n\n /**\n * The inter OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n interOpNumThreads?: number;\n\n /**\n * The free dimension override.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n freeDimensionOverrides?: {readonly [dimensionName: string]: number};\n\n /**\n * The optimization level.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n graphOptimizationLevel?: 'disabled'|'basic'|'extended'|'all';\n\n /**\n * Whether enable CPU memory arena.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableCpuMemArena?: boolean;\n\n /**\n * Whether enable memory pattern.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableMemPattern?: boolean;\n\n /**\n * Execution mode.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n executionMode?: 'sequential'|'parallel';\n\n /**\n * Optimized model file path.\n *\n * If this setting is specified, the optimized model will be dumped. In browser, a blob will be created\n * with a pop-up window.\n */\n optimizedModelFilePath?: string;\n\n /**\n * Whether enable profiling.\n *\n * This setting is a placeholder for a future use.\n */\n enableProfiling?: boolean;\n\n /**\n * File prefix for profiling.\n *\n * This setting is a placeholder for a future use.\n */\n profileFilePrefix?: string;\n\n /**\n * Log ID.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logId?: string;\n\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Specify string as a preferred data location for all outputs, or an object that use output names as keys and a\n * preferred data location as corresponding values.\n *\n * This setting is available only in ONNXRuntime Web for WebGL and WebGPU EP.\n */\n preferredOutputLocation?: OnnxValueDataLocation|{readonly [outputName: string]: OnnxValueDataLocation};\n\n /**\n * Whether enable graph capture.\n * This setting is available only in ONNXRuntime Web for WebGPU EP.\n */\n enableGraphCapture?: boolean;\n\n /**\n * Store configurations for a session. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_session_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n * ```js\n * extra: {\n * session: {\n * set_denormal_as_zero: \"1\",\n * disable_prepacking: \"1\"\n * },\n * optimization: {\n * enable_gelu_approximation: \"1\"\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #region execution providers\n\n // Currently, we have the following backends to support execution providers:\n // Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).\n // Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.\n // Backend ONNX.js: supports 'webgl'.\n // Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).\n interface ExecutionProviderOptionMap {\n coreml: CoreMLExecutionProviderOption;\n cpu: CpuExecutionProviderOption;\n cuda: CudaExecutionProviderOption;\n dml: DmlExecutionProviderOption;\n nnapi: NnapiExecutionProviderOption;\n tensorrt: TensorRtExecutionProviderOption;\n wasm: WebAssemblyExecutionProviderOption;\n webgl: WebGLExecutionProviderOption;\n webgpu: WebGpuExecutionProviderOption;\n webnn: WebNNExecutionProviderOption;\n qnn: QnnExecutionProviderOption;\n xnnpack: XnnpackExecutionProviderOption;\n }\n\n type ExecutionProviderName = keyof ExecutionProviderOptionMap;\n type ExecutionProviderConfig =\n ExecutionProviderOptionMap[ExecutionProviderName]|ExecutionProviderOption|ExecutionProviderName|string;\n\n export interface ExecutionProviderOption {\n readonly name: string;\n }\n export interface CpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cpu';\n useArena?: boolean;\n }\n export interface CudaExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cuda';\n deviceId?: number;\n }\n export interface DmlExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'dml';\n deviceId?: number;\n }\n export interface TensorRtExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'tensorrt';\n deviceId?: number;\n }\n export interface WebAssemblyExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'wasm';\n }\n export interface WebGLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgl';\n // TODO: add flags\n }\n export interface XnnpackExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'xnnpack';\n }\n export interface WebGpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgpu';\n preferredLayout?: 'NCHW'|'NHWC';\n }\n\n // #region WebNN options\n\n interface WebNNExecutionProviderName extends ExecutionProviderOption {\n readonly name: 'webnn';\n }\n\n /**\n * Represents a set of options for creating a WebNN MLContext.\n *\n * @see https://www.w3.org/TR/webnn/#dictdef-mlcontextoptions\n */\n export interface WebNNContextOptions {\n deviceType?: 'cpu'|'gpu'|'npu';\n numThreads?: number;\n powerPreference?: 'default'|'low-power'|'high-performance';\n }\n\n /**\n * Represents a set of options for WebNN execution provider without MLContext.\n */\n export interface WebNNOptionsWithoutMLContext extends WebNNExecutionProviderName, WebNNContextOptions {\n context?: never;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext.\n *\n * When MLContext is provided, the deviceType is also required so that the WebNN EP can determine the preferred\n * channel layout.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext\n */\n export interface WebNNOptionsWithMLContext extends WebNNExecutionProviderName,\n Omit,\n Required> {\n context: unknown /* MLContext */;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext which is created from GPUDevice.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext-gpudevice\n */\n export interface WebNNOptionsWebGpu extends WebNNExecutionProviderName {\n context: unknown /* MLContext */;\n gpuDevice: unknown /* GPUDevice */;\n }\n\n /**\n * Options for WebNN execution provider.\n */\n export type WebNNExecutionProviderOption = WebNNOptionsWithoutMLContext|WebNNOptionsWithMLContext|WebNNOptionsWebGpu;\n\n // #endregion\n\n export interface QnnExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'qnn';\n // TODO add flags\n }\n export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'coreml';\n /**\n * The bit flags for CoreML execution provider.\n *\n * ```\n * COREML_FLAG_USE_CPU_ONLY = 0x001\n * COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002\n * COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004\n * COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008\n * COREML_FLAG_CREATE_MLPROGRAM = 0x010\n * ```\n *\n * See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.\n *\n * This flag is available only in ONNXRuntime (Node.js binding).\n */\n coreMlFlags?: number;\n /**\n * Specify whether to use CPU only in CoreML EP.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n useCPUOnly?: boolean;\n /**\n * Specify whether to enable CoreML EP on subgraph.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n enableOnSubgraph?: boolean;\n /**\n * Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n onlyEnableDeviceWithANE?: boolean;\n }\n export interface NnapiExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'nnapi';\n useFP16?: boolean;\n useNCHW?: boolean;\n cpuDisabled?: boolean;\n cpuOnly?: boolean;\n }\n // #endregion\n\n // #endregion\n\n // #region run options\n\n /**\n * A set of configurations for inference run behavior\n */\n export interface RunOptions {\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Terminate all incomplete OrtRun calls as soon as possible if true\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n terminate?: boolean;\n\n /**\n * A tag for the Run() calls using this\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n tag?: string;\n\n /**\n * Set a single run configuration entry. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_run_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n *\n * ```js\n * extra: {\n * memory: {\n * enable_memory_arena_shrinkage: \"1\",\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #endregion\n\n // #region value metadata\n\n // eslint-disable-next-line @typescript-eslint/no-empty-interface\n interface ValueMetadata {\n // TBD\n }\n\n // #endregion\n}\n\n/**\n * Represent a runtime instance of an ONNX model.\n */\nexport interface InferenceSession {\n // #region run()\n\n /**\n * Execute the model asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Execute the model asynchronously with the given feeds, fetches and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param fetches - Representation of the model output. See type description of `InferenceSession.OutputType` for\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n\n // #endregion\n\n // #region profiling\n\n /**\n * Start profiling.\n */\n startProfiling(): void;\n\n /**\n * End profiling.\n */\n endProfiling(): void;\n\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded model.\n */\n readonly inputNames: readonly string[];\n\n /**\n * Get output names of the loaded model.\n */\n readonly outputNames: readonly string[];\n\n // /**\n // * Get input metadata of the loaded model.\n // */\n // readonly inputMetadata: ReadonlyArray>;\n\n // /**\n // * Get output metadata of the loaded model.\n // */\n // readonly outputMetadata: ReadonlyArray>;\n\n // #endregion\n}\n\nexport interface InferenceSessionFactory {\n // #region create()\n\n /**\n * Create a new inference session and load model asynchronously from an ONNX model file.\n *\n * @param uri - The URI or file path of the model to load.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(uri: string, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from segment of an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param byteOffset - The beginning of the specified portion of the array buffer.\n * @param byteLength - The length in bytes of the array buffer.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: InferenceSession.SessionOptions):\n Promise;\n\n /**\n * Create a new inference session and load model asynchronously from a Uint8Array.\n *\n * @param buffer - A Uint8Array representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: Uint8Array, options?: InferenceSession.SessionOptions): Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession: InferenceSessionFactory = InferenceSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsFormat, OptionsNormalizationParameters, OptionsTensorLayout} from './tensor-factory.js';\n\nexport interface TensorToDataUrlOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface TensorToImageDataOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface ConversionUtils {\n /**\n * creates a DataURL instance from tensor\n *\n * @param options - An optional object representing options for creating a DataURL instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns a DataURL string representing the image converted from tensor data\n */\n toDataURL(options?: TensorToDataUrlOptions): string;\n\n /**\n * creates an ImageData instance from tensor\n *\n * @param options - An optional object representing options for creating an ImageData instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns an ImageData instance representing the image converted from tensor data\n */\n toImageData(options?: TensorToImageDataOptions): ImageData;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor, TypedTensor} from './tensor.js';\n\nexport type ImageFormat = 'RGB'|'RGBA'|'BGR'|'RBG';\nexport type ImageTensorLayout = 'NHWC'|'NCHW';\n\n// the following region contains type definitions for constructing tensor from a specific location.\n\n// #region types for constructing a tensor from a specific location\n\n/**\n * represent common properties of the parameter for constructing a tensor from a specific location.\n */\ninterface CommonConstructorParameters extends Pick {\n /**\n * Specify the data type of the tensor.\n */\n readonly type: T;\n}\n\n/**\n * represent the parameter for constructing a tensor from a GPU resource.\n */\ninterface GpuResourceConstructorParameters {\n /**\n * an optional callback function to download data from GPU to CPU.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n download?(): Promise;\n\n /**\n * an optional callback function that will be called when the tensor is disposed.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n dispose?(): void;\n}\n\n/**\n * represent the parameter for constructing a tensor from a pinned CPU buffer\n */\nexport interface CpuPinnedConstructorParameters extends\n CommonConstructorParameters {\n /**\n * Specify the location of the data to be 'cpu-pinned'.\n */\n readonly location: 'cpu-pinned';\n /**\n * Specify the CPU pinned buffer that holds the tensor data.\n */\n readonly data: Tensor.DataTypeMap[T];\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGL texture\n */\nexport interface TextureConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'texture'.\n */\n readonly location: 'texture';\n /**\n * Specify the WebGL texture that holds the tensor data.\n */\n readonly texture: Tensor.TextureType;\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGPU buffer\n */\nexport interface GpuBufferConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'gpu-buffer'.\n */\n readonly location: 'gpu-buffer';\n /**\n * Specify the WebGPU buffer that holds the tensor data.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n}\n\n// #endregion\n\n// the following region contains type definitions of each individual options.\n// the tensor factory functions use a composition of those options as the parameter type.\n\n// #region Options fields\n\nexport interface OptionsFormat {\n /**\n * Describes the image format represented in RGBA color space.\n */\n format?: ImageFormat;\n}\n\nexport interface OptionsTensorFormat {\n /**\n * Describes the image format of the tensor.\n *\n * NOTE: this is different from option 'format'. While option 'format' represents the original image, 'tensorFormat'\n * represents the target format of the tensor. A transpose will be performed if they are different.\n */\n tensorFormat?: ImageFormat;\n}\n\nexport interface OptionsTensorDataType {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: 'float32'|'uint8';\n}\n\nexport interface OptionsTensorLayout {\n /**\n * Describes the tensor layout when representing data of one or more image(s).\n */\n tensorLayout?: ImageTensorLayout;\n}\n\nexport interface OptionsDimensions {\n /**\n * Describes the image height in pixel\n */\n height?: number;\n /**\n * Describes the image width in pixel\n */\n width?: number;\n}\n\nexport interface OptionResizedDimensions {\n /**\n * Describes the resized height. If omitted, original height will be used.\n */\n resizedHeight?: number;\n /**\n * Describes resized width - can be accessed via tensor dimensions as well\n */\n resizedWidth?: number;\n}\n\nexport interface OptionsNormalizationParameters {\n /**\n * Describes normalization parameters when preprocessing the image as model input.\n *\n * Data element are ranged from 0 to 255.\n */\n norm?: {\n /**\n * The 'bias' value for image normalization.\n * - If omitted, use default value 0.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n bias?: number|[number, number, number]|[number, number, number, number];\n /**\n * The 'mean' value for image normalization.\n * - If omitted, use default value 255.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n mean?: number | [number, number, number] | [number, number, number, number];\n };\n}\n\n// #endregion\n\n// #region Options composition\n\nexport interface TensorFromImageDataOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromImageElementOptions extends OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromUrlOptions extends OptionsDimensions, OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromImageBitmapOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromTextureOptions extends\n Required, OptionsFormat, GpuResourceConstructorParameters/* TODO: add more */ {}\n\nexport interface TensorFromGpuBufferOptions extends\n Pick, GpuResourceConstructorParameters {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: T;\n}\n\n// #endregion\n\n/**\n * type TensorFactory defines the factory functions of 'Tensor' to create tensor instances from existing data or\n * resources.\n */\nexport interface TensorFactory {\n /**\n * create a tensor from an ImageData object\n *\n * @param imageData - the ImageData object to create tensor from\n * @param options - An optional object representing options for creating tensor from ImageData.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageData: ImageData, options?: TensorFromImageDataOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a HTMLImageElement object\n *\n * @param imageElement - the HTMLImageElement object to create tensor from\n * @param options - An optional object representing options for creating tensor from HTMLImageElement.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from URL\n *\n * @param urlSource - a string as a URL to the image or a data URL containing the image data.\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(urlSource: string, options?: TensorFromUrlOptions): Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from an ImageBitmap object\n *\n * @param bitmap - the ImageBitmap object to create tensor from\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(bitmap: ImageBitmap, options: TensorFromImageBitmapOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a WebGL texture\n *\n * @param texture - the WebGLTexture object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGL texture.\n *\n * The options include following properties:\n * - `width`: the width of the texture. Required.\n * - `height`: the height of the texture. Required.\n * - `format`: the format of the texture. If omitted, assume 'RGBA'.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromTexture(\n texture: Tensor.TextureType, options: TensorFromTextureOptions): TypedTensor<'float32'>;\n\n /**\n * create a tensor from a WebGPU buffer\n *\n * @param buffer - the GPUBuffer object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGPU buffer.\n *\n * The options include following properties:\n * - `dataType`: the data type of the tensor. If omitted, assume 'float32'.\n * - `dims`: the dimension of the tensor. Required.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromGpuBuffer(\n buffer: Tensor.GpuBufferType, options: TensorFromGpuBufferOptions): TypedTensor;\n\n /**\n * create a tensor from a pre-allocated buffer. The buffer will be used as a pinned buffer.\n *\n * @param type - the tensor element type.\n * @param buffer - a TypedArray corresponding to the type.\n * @param dims - specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n *\n * @returns a tensor object\n */\n fromPinnedBuffer>(\n type: T, buffer: Tensor.DataTypeMap[T], dims?: readonly number[]): TypedTensor;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * A string that represents a file's URL or path.\n *\n * Path is vailable only in onnxruntime-node or onnxruntime-web running in Node.js.\n */\nexport type FileUrlOrPath = string;\n\n/**\n * A Blob object that represents a file.\n */\nexport type FileBlob = Blob;\n\n/**\n * A Uint8Array, ArrayBuffer or SharedArrayBuffer object that represents a file content.\n *\n * When it is an ArrayBuffer or SharedArrayBuffer, the whole buffer is assumed to be the file content.\n */\nexport type FileData = Uint8Array|ArrayBufferLike;\n\n/**\n * Represents a file that can be loaded by the ONNX Runtime JavaScript API.\n */\nexport type FileType = FileUrlOrPath|FileBlob|FileData;\n\n/**\n * Represents an external data file.\n */\nexport interface ExternalDataFileDescription {\n /**\n * Specify the external data file.\n */\n data: FileType;\n /**\n * Specify the file path.\n */\n path: string;\n}\n\n/**\n * Represents an external data file.\n *\n * When using a string, it should be a file URL or path that in the same directory as the model file.\n */\nexport type ExternalDataFileType = ExternalDataFileDescription|FileUrlOrPath;\n\n/**\n * Options for model loading.\n */\nexport interface OnnxModelOptions {\n /**\n * Specifying a list of files that represents the external data.\n */\n externalData?: readonly ExternalDataFileType[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type NonTensorType = never;\n\n/**\n * Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.\n *\n * NOTE: currently not support non-tensor\n */\nexport type OnnxValue = Tensor|NonTensorType;\n\n/**\n * Type OnnxValueDataLocation represents the location of the data of an OnnxValue.\n */\nexport type OnnxValueDataLocation = Tensor.DataLocation;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {SessionHandler, TrainingSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TrainingSession as TrainingSessionInterface, TrainingSessionCreateOptions} from './training-session.js';\n\ntype SessionOptions = InferenceSession.SessionOptions;\ntype FeedsType = InferenceSession.FeedsType;\ntype FetchesType = InferenceSession.FetchesType;\ntype ReturnType = InferenceSession.ReturnType;\ntype RunOptions = InferenceSession.RunOptions;\n\nconst noBackendErrMsg: string = 'Training backend could not be resolved. ' +\n 'Make sure you\\'re using the correct configuration & WebAssembly files.';\n\nexport class TrainingSession implements TrainingSessionInterface {\n private constructor(handler: TrainingSessionHandler, hasOptimizerModel: boolean, hasEvalModel: boolean) {\n this.handler = handler;\n this.hasOptimizerModel = hasOptimizerModel;\n this.hasEvalModel = hasEvalModel;\n }\n private handler: TrainingSessionHandler;\n private hasOptimizerModel: boolean;\n private hasEvalModel: boolean;\n\n get trainingInputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get trainingOutputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n get evalInputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalInputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n get evalOutputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalOutputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n\n static async create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: SessionOptions):\n Promise {\n const evalModel: string|Uint8Array = trainingOptions.evalModel || '';\n const optimizerModel: string|Uint8Array = trainingOptions.optimizerModel || '';\n const options: SessionOptions = sessionOptions || {};\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n if (backend.createTrainingSessionHandler) {\n const handler = await backend.createTrainingSessionHandler(\n trainingOptions.checkpointState, trainingOptions.trainModel, evalModel, optimizerModel,\n optionsWithValidatedEPs);\n return new TrainingSession(handler, !!trainingOptions.optimizerModel, !!trainingOptions.evalModel);\n } else {\n throw new Error(noBackendErrMsg);\n }\n }\n\n /**\n * Helper function for runTrainStep and future runStep methods that handles the type-narrowing conversion from\n * the given parameters to SessionHandler.FetchesType and RunOptions.\n *\n * @param inputNames the feeds object is checked that they contain all input names in the provided list of input\n * names.\n * @param outputNames the fetches object is checked that their keys match up with valid names in the list of output\n * names.\n * @param feeds the required input\n * @param arg1 narrowed & converted into the SessionHandler.FetchesType or RunOptions object\n * @param arg2 optional RunOptions object.\n * @returns\n */\n typeNarrowingForRunStep(\n inputNames: readonly string[], outputNames: readonly string[], feeds: FeedsType, arg1?: FetchesType|RunOptions,\n arg2?: RunOptions): [SessionHandler.FetchesType, RunOptions] {\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSession.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of outputNames) {\n fetches[name] = null;\n }\n }\n\n return [fetches, options];\n }\n\n /**\n * Helper method for runTrainStep and any other runStep methods. Takes the ReturnType result from the SessionHandler\n * and changes it into a map of Tensors.\n *\n * @param results\n * @returns\n */\n convertHandlerReturnTypeToMapOfTensors(results: SessionHandler.ReturnType): ReturnType {\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n return returnValue;\n }\n\n async lazyResetGrad(): Promise {\n await this.handler.lazyResetGrad();\n }\n\n runTrainStep(feeds: FeedsType, options?: RunOptions): Promise;\n runTrainStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async runTrainStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.trainingInputNames, this.trainingOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runTrainStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n }\n\n async runOptimizerStep(options?: InferenceSession.RunOptions|undefined): Promise {\n if (this.hasOptimizerModel) {\n await this.handler.runOptimizerStep(options || {});\n } else {\n throw new Error('This TrainingSession has no OptimizerModel loaded.');\n }\n }\n\n runEvalStep(feeds: FeedsType, options?: RunOptions|undefined): Promise;\n runEvalStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions|undefined): Promise;\n async runEvalStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n if (this.hasEvalModel) {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.evalInputNames, this.evalOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runEvalStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n } else {\n throw new Error('This TrainingSession has no EvalModel loaded.');\n }\n }\n\n async getParametersSize(trainableOnly = true): Promise {\n return this.handler.getParametersSize(trainableOnly);\n }\n\n async loadParametersBuffer(array: Uint8Array, trainableOnly = true): Promise {\n const paramsSize = await this.getParametersSize(trainableOnly);\n // checking that the size of the Uint8Array is equivalent to the byte length of a Float32Array of the number\n // of parameters\n if (array.length !== 4 * paramsSize) {\n throw new Error(\n 'Size of the buffer passed into loadParametersBuffer must match the number of parameters in ' +\n 'the model. Please use getParametersSize method to check.');\n }\n return this.handler.loadParametersBuffer(array, trainableOnly);\n }\n\n async getContiguousParameters(trainableOnly = true): Promise {\n return this.handler.getContiguousParameters(trainableOnly);\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession as TrainingSessionImpl} from './training-session-impl.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace TrainingSession {\n /**\n * Either URI file path (string) or Uint8Array containing model or checkpoint information.\n */\n type UriOrBuffer = string|Uint8Array;\n}\n\n/**\n * Represent a runtime instance of an ONNX training session,\n * which contains a model that can be trained, and, optionally,\n * an eval and optimizer model.\n */\nexport interface TrainingSession {\n // #region run()\n\n /**\n * Lazily resets the gradients of all trainable parameters to zero. Should happen after the invocation of\n * runOptimizerStep.\n */\n lazyResetGrad(): Promise;\n\n /**\n * Run TrainStep asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for\n detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n runTrainStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single train step with the given inputs and options.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runTrainStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Runs a single optimizer step, which performs weight updates for the trainable parameters using the optimizer model.\n *\n * @param options - Optional. A set of options that controls the behavior of model optimizing.\n */\n runOptimizerStep(options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region copy parameters\n\n /**\n * Retrieves the size of all parameters for the training state. Calculates the total number of primitive (datatype of\n * the parameters) elements of all the parameters in the training state.\n *\n * @param trainableOnly - When set to true, the size is calculated for trainable params only. Default value is true.\n */\n getParametersSize(trainableOnly: boolean): Promise;\n\n /**\n * Copies parameter values from the given buffer to the training state. Currently, only supporting models with\n * parameters of type Float32.\n *\n * @param buffer - A Uint8Array representation of Float32 parameters.\n * @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.\n */\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n\n /**\n * Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.\n * Currently, only supporting models with parameters of type Float32.\n *\n * @param trainableOnly - When set to true, only trainable parameters are copied. Trainable parameters are parameters\n * for which requires_grad is set to true. Default value is true.\n * @returns A promise that resolves to a Float32 OnnxValue of the requested parameters.\n */\n getContiguousParameters(trainableOnly: boolean): Promise;\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded training model.\n */\n readonly trainingInputNames: readonly string[];\n\n /**\n * Get output names of the loaded training model.\n */\n readonly trainingOutputNames: readonly string[];\n\n /**\n * Get input names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalInputNames: readonly string[];\n\n /**\n * Get output names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalOutputNames: readonly string[];\n\n // #endregion\n}\n\n/**\n * Represents the optional parameters that can be passed into the TrainingSessionFactory.\n */\nexport interface TrainingSessionCreateOptions {\n /**\n * URI or buffer for a .ckpt file that contains the checkpoint for the training model.\n */\n checkpointState: TrainingSession.UriOrBuffer;\n /**\n * URI or buffer for the .onnx training file.\n */\n trainModel: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx optimizer model file.\n */\n optimizerModel?: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx eval model file.\n */\n evalModel?: TrainingSession.UriOrBuffer;\n}\n\n/**\n * Defines method overload possibilities for creating a TrainingSession.\n */\nexport interface TrainingSessionFactory {\n // #region create()\n\n /**\n * Creates a new TrainingSession and asynchronously loads any models passed in through trainingOptions\n *\n * @param trainingOptions specify models and checkpoints to load into the Training Session\n * @param sessionOptions specify configuration for training session behavior\n *\n * @returns Promise that resolves to a TrainingSession object\n */\n create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: InferenceSession.SessionOptions):\n Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const TrainingSession: TrainingSessionFactory = TrainingSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * # ONNX Runtime JavaScript API\n *\n * ONNX Runtime JavaScript API is a unified API for all JavaScript usages, including the following NPM packages:\n *\n * - [onnxruntime-node](https://www.npmjs.com/package/onnxruntime-node)\n * - [onnxruntime-web](https://www.npmjs.com/package/onnxruntime-web)\n * - [onnxruntime-react-native](https://www.npmjs.com/package/onnxruntime-react-native)\n *\n * See also:\n * - [Get Started](https://onnxruntime.ai/docs/get-started/with-javascript/)\n * - [Inference examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js)\n *\n * @packageDocumentation\n */\n\nexport * from './backend.js';\nexport * from './env.js';\nexport * from './inference-session.js';\nexport * from './tensor.js';\nexport * from './tensor-conversion.js';\nexport * from './tensor-factory.js';\nexport * from './trace.js';\nexport * from './onnx-model.js';\nexport * from './onnx-value.js';\nexport * from './training-session.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {WebGLContext} from './backends/webgl/webgl-context';\n\nexport declare namespace Logger {\n export interface SeverityTypeMap {\n verbose: 'v';\n info: 'i';\n warning: 'w';\n error: 'e';\n fatal: 'f';\n }\n\n export type Severity = keyof SeverityTypeMap;\n\n export type Provider = 'none'|'console';\n\n /**\n * Logging config that used to control the behavior of logger\n */\n export interface Config {\n /**\n * Specify the logging provider. 'console' by default\n */\n provider?: Provider;\n /**\n * Specify the minimal logger serverity. 'warning' by default\n */\n minimalSeverity?: Logger.Severity;\n /**\n * Whether to output date time in log. true by default\n */\n logDateTime?: boolean;\n /**\n * Whether to output source information (Not yet supported). false by default\n */\n logSourceLocation?: boolean;\n }\n\n export interface CategorizedLogger {\n verbose(content: string): void;\n info(content: string): void;\n warning(content: string): void;\n error(content: string): void;\n fatal(content: string): void;\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface Logger {\n (category: string): Logger.CategorizedLogger;\n\n verbose(content: string): void;\n verbose(category: string, content: string): void;\n info(content: string): void;\n info(category: string, content: string): void;\n warning(content: string): void;\n warning(category: string, content: string): void;\n error(content: string): void;\n error(category: string, content: string): void;\n fatal(content: string): void;\n fatal(category: string, content: string): void;\n\n /**\n * Reset the logger configuration.\n * @param config specify an optional default config\n */\n reset(config?: Logger.Config): void;\n /**\n * Set the logger's behavior on the given category\n * @param category specify a category string. If '*' is specified, all previous configuration will be overwritten. If\n * '' is specified, the default behavior will be updated.\n * @param config the config object to indicate the logger's behavior\n */\n set(category: string, config: Logger.Config): void;\n\n /**\n * Set the logger's behavior from ort-common env\n * @param env the env used to set logger. Currently only setting loglevel is supported through Env.\n */\n setWithEnv(env: Env): void;\n}\n\ninterface LoggerProvider {\n log(severity: Logger.Severity, content: string, category?: string): void;\n}\nclass NoOpLoggerProvider implements LoggerProvider {\n log(_severity: Logger.Severity, _content: string, _category?: string) {\n // do nothing\n }\n}\nclass ConsoleLoggerProvider implements LoggerProvider {\n log(severity: Logger.Severity, content: string, category?: string) {\n // eslint-disable-next-line no-console\n console.log(`${this.color(severity)} ${category ? '\\x1b[35m' + category + '\\x1b[0m ' : ''}${content}`);\n }\n\n private color(severity: Logger.Severity) {\n switch (severity) {\n case 'verbose':\n return '\\x1b[34;40mv\\x1b[0m';\n case 'info':\n return '\\x1b[32mi\\x1b[0m';\n case 'warning':\n return '\\x1b[30;43mw\\x1b[0m';\n case 'error':\n return '\\x1b[31;40me\\x1b[0m';\n case 'fatal':\n return '\\x1b[101mf\\x1b[0m';\n default:\n throw new Error(`unsupported severity: ${severity}`);\n }\n }\n}\n\nconst SEVERITY_VALUE = {\n verbose: 1000,\n info: 2000,\n warning: 4000,\n error: 5000,\n fatal: 6000\n};\n\nconst LOGGER_PROVIDER_MAP: {readonly [provider: string]: Readonly} = {\n ['none']: new NoOpLoggerProvider(),\n ['console']: new ConsoleLoggerProvider()\n};\nconst LOGGER_DEFAULT_CONFIG = {\n provider: 'console',\n minimalSeverity: 'warning',\n logDateTime: true,\n logSourceLocation: false\n};\nlet LOGGER_CONFIG_MAP:\n {[category: string]: Readonly>} = {['']: LOGGER_DEFAULT_CONFIG as Required};\n\nfunction log(category: string): Logger.CategorizedLogger;\nfunction log(severity: Logger.Severity, content: string): void;\nfunction log(severity: Logger.Severity, category: string, content: string): void;\nfunction log(severity: Logger.Severity, arg1: string, arg2?: string): void;\nfunction log(\n arg0: string|Logger.Severity, arg1?: string, arg2?: string|number, arg3?: number): Logger.CategorizedLogger|void {\n if (arg1 === undefined) {\n // log(category: string): Logger.CategorizedLogger;\n return createCategorizedLogger(arg0);\n } else if (arg2 === undefined) {\n // log(severity, content);\n logInternal(arg0 as Logger.Severity, arg1, 1);\n } else if (typeof arg2 === 'number' && arg3 === undefined) {\n // log(severity, content, stack)\n logInternal(arg0 as Logger.Severity, arg1, arg2);\n } else if (typeof arg2 === 'string' && arg3 === undefined) {\n // log(severity, category, content)\n logInternal(arg0 as Logger.Severity, arg2, 1, arg1);\n } else if (typeof arg2 === 'string' && typeof arg3 === 'number') {\n // log(severity, category, content, stack)\n logInternal(arg0 as Logger.Severity, arg2, arg3, arg1);\n } else {\n throw new TypeError('input is valid');\n }\n}\n\nfunction createCategorizedLogger(category: string): Logger.CategorizedLogger {\n return {\n verbose: log.verbose.bind(null, category),\n info: log.info.bind(null, category),\n warning: log.warning.bind(null, category),\n error: log.error.bind(null, category),\n fatal: log.fatal.bind(null, category)\n };\n}\n\n// NOTE: argument 'category' is put the last parameter beacause typescript\n// doesn't allow optional argument put in front of required argument. This\n// order is different from a usual logging API.\nfunction logInternal(severity: Logger.Severity, content: string, _stack: number, category?: string) {\n const config = LOGGER_CONFIG_MAP[category || ''] || LOGGER_CONFIG_MAP[''];\n if (SEVERITY_VALUE[severity] < SEVERITY_VALUE[config.minimalSeverity]) {\n return;\n }\n\n if (config.logDateTime) {\n content = `${new Date().toISOString()}|${content}`;\n }\n\n if (config.logSourceLocation) {\n // TODO: calculate source location from 'stack'\n }\n\n LOGGER_PROVIDER_MAP[config.provider].log(severity, content, category);\n}\n\n// eslint-disable-next-line @typescript-eslint/no-namespace\nnamespace log {\n export function verbose(content: string): void;\n export function verbose(category: string, content: string): void;\n export function verbose(arg0: string, arg1?: string) {\n log('verbose', arg0, arg1);\n }\n export function info(content: string): void;\n export function info(category: string, content: string): void;\n export function info(arg0: string, arg1?: string) {\n log('info', arg0, arg1);\n }\n export function warning(content: string): void;\n export function warning(category: string, content: string): void;\n export function warning(arg0: string, arg1?: string) {\n log('warning', arg0, arg1);\n }\n export function error(content: string): void;\n export function error(category: string, content: string): void;\n export function error(arg0: string, arg1?: string) {\n log('error', arg0, arg1);\n }\n export function fatal(content: string): void;\n export function fatal(category: string, content: string): void;\n export function fatal(arg0: string, arg1?: string) {\n log('fatal', arg0, arg1);\n }\n\n export function reset(config?: Logger.Config): void {\n LOGGER_CONFIG_MAP = {};\n set('', config || {});\n }\n export function set(category: string, config: Logger.Config): void {\n if (category === '*') {\n reset(config);\n } else {\n const previousConfig = LOGGER_CONFIG_MAP[category] || LOGGER_DEFAULT_CONFIG;\n LOGGER_CONFIG_MAP[category] = {\n provider: config.provider || previousConfig.provider,\n minimalSeverity: config.minimalSeverity || previousConfig.minimalSeverity,\n logDateTime: (config.logDateTime === undefined) ? previousConfig.logDateTime : config.logDateTime,\n logSourceLocation: (config.logSourceLocation === undefined) ? previousConfig.logSourceLocation :\n config.logSourceLocation\n };\n }\n\n // TODO: we want to support wildcard or regex?\n }\n\n export function setWithEnv(env: Env): void {\n const config: Logger.Config = {};\n if (env.logLevel) {\n config.minimalSeverity = env.logLevel as Logger.Severity;\n }\n set('', config);\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare, @typescript-eslint/naming-convention\nexport const Logger: Logger = log;\n\nexport declare namespace Profiler {\n export interface Config {\n maxNumberEvents?: number;\n flushBatchSize?: number;\n flushIntervalInMilliseconds?: number;\n }\n\n export type EventCategory = 'session'|'node'|'op'|'backend';\n\n export interface Event {\n end(): void|Promise;\n }\n}\n// TODO\n// class WebGLEvent implements Profiler.Event {}\n\nclass Event implements Profiler.Event {\n constructor(\n public category: Profiler.EventCategory, public name: string, public startTime: number,\n private endCallback: (e: Event) => void|Promise, public timer?: WebGLQuery, public ctx?: WebGLContext) {}\n\n async end() {\n return this.endCallback(this);\n }\n\n async checkTimer(): Promise {\n if (this.ctx === undefined || this.timer === undefined) {\n throw new Error('No webgl timer found');\n } else {\n this.ctx.endTimer();\n return this.ctx.waitForQueryAndGetTime(this.timer);\n }\n }\n}\n\nclass EventRecord {\n constructor(\n public category: Profiler.EventCategory, public name: string, public startTime: number, public endTime: number) {}\n}\n\nexport class Profiler {\n static create(config?: Profiler.Config): Profiler {\n if (config === undefined) {\n return new this();\n }\n return new this(config.maxNumberEvents, config.flushBatchSize, config.flushIntervalInMilliseconds);\n }\n\n private constructor(maxNumberEvents?: number, flushBatchSize?: number, flushIntervalInMilliseconds?: number) {\n this._started = false;\n this._maxNumberEvents = maxNumberEvents === undefined ? 10000 : maxNumberEvents;\n this._flushBatchSize = flushBatchSize === undefined ? 10 : flushBatchSize;\n this._flushIntervalInMilliseconds = flushIntervalInMilliseconds === undefined ? 5000 : flushIntervalInMilliseconds;\n }\n\n // start profiling\n start() {\n this._started = true;\n this._timingEvents = [];\n this._flushTime = now();\n this._flushPointer = 0;\n }\n\n // stop profiling\n stop() {\n this._started = false;\n for (; this._flushPointer < this._timingEvents.length; this._flushPointer++) {\n this.logOneEvent(this._timingEvents[this._flushPointer]);\n }\n }\n\n // create an event scope for the specific function\n event(category: Profiler.EventCategory, name: string, func: () => T, ctx?: WebGLContext): T;\n event(category: Profiler.EventCategory, name: string, func: () => Promise, ctx?: WebGLContext): Promise;\n\n event(category: Profiler.EventCategory, name: string, func: () => T | Promise, ctx?: WebGLContext): T\n |Promise {\n const event = this._started ? this.begin(category, name, ctx) : undefined;\n let isPromise = false;\n\n const res = func();\n\n // we consider a then-able object is a promise\n if (res && typeof (res as Promise).then === 'function') {\n isPromise = true;\n return new Promise((resolve, reject) => {\n (res as Promise)\n .then(\n async value => { // fulfilled\n if (event) {\n await event.end();\n }\n resolve(value);\n },\n async reason => { // rejected\n if (event) {\n await event.end();\n }\n reject(reason);\n });\n });\n }\n if (!isPromise && event) {\n const eventRes = event.end();\n if (eventRes && typeof eventRes.then === 'function') {\n return new Promise((resolve, reject) => {\n (eventRes).then(\n () => { // fulfilled\n resolve(res);\n },\n (reason) => { // rejected\n reject(reason);\n });\n });\n }\n }\n return res;\n }\n\n // begin an event\n begin(category: Profiler.EventCategory, name: string, ctx?: WebGLContext): Event {\n if (!this._started) {\n throw new Error('profiler is not started yet');\n }\n if (ctx === undefined) {\n const startTime = now();\n this.flush(startTime);\n return new Event(category, name, startTime, e => this.endSync(e));\n } else {\n const timer: WebGLQuery = ctx.beginTimer();\n return new Event(category, name, 0, async e => this.end(e), timer, ctx);\n }\n }\n\n // end the specific event\n private async end(event: Event): Promise {\n const endTime: number = await event.checkTimer();\n if (this._timingEvents.length < this._maxNumberEvents) {\n this._timingEvents.push(new EventRecord(event.category, event.name, event.startTime, endTime));\n this.flush(endTime);\n }\n }\n\n private endSync(event: Event): void {\n const endTime: number = now();\n if (this._timingEvents.length < this._maxNumberEvents) {\n this._timingEvents.push(new EventRecord(event.category, event.name, event.startTime, endTime));\n this.flush(endTime);\n }\n }\n\n private logOneEvent(event: EventRecord) {\n Logger.verbose(\n `Profiler.${event.category}`,\n `${(event.endTime - event.startTime).toFixed(2)}ms on event '${event.name}' at ${event.endTime.toFixed(2)}`);\n }\n\n private flush(currentTime: number) {\n if (this._timingEvents.length - this._flushPointer >= this._flushBatchSize ||\n currentTime - this._flushTime >= this._flushIntervalInMilliseconds) {\n // should flush when either batch size accumlated or interval elepsed\n\n for (const previousPointer = this._flushPointer; this._flushPointer < previousPointer + this._flushBatchSize &&\n this._flushPointer < this._timingEvents.length;\n this._flushPointer++) {\n this.logOneEvent(this._timingEvents[this._flushPointer]);\n }\n\n this._flushTime = now();\n }\n }\n\n get started() {\n return this._started;\n }\n private _started = false;\n private _timingEvents: EventRecord[];\n\n private readonly _maxNumberEvents: number;\n\n private readonly _flushBatchSize: number;\n private readonly _flushIntervalInMilliseconds: number;\n\n private _flushTime: number;\n private _flushPointer = 0;\n}\n\n/**\n * returns a number to represent the current timestamp in a resolution as high as possible.\n */\nexport const now = (typeof performance !== 'undefined' && performance.now) ? () => performance.now() : Date.now;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from './graph';\nimport {OperatorImplementation, OperatorInitialization} from './operators';\n\nexport interface OpSet {\n domain: string;\n version: number;\n}\nexport declare namespace OpSet {\n /**\n * Domain of an opset, it can be an empty string(default value, represent for ai.onnx), or 'ai.onnx.ml'\n */\n type Domain = ''|'ai.onnx.ml'|'com.microsoft';\n /**\n * A resolve rule consists of 4 or 5 items: opType, opSetDomain, versionSelector, operatorImplementation and\n * operatorInitialization (optional)\n */\n type ResolveRule = [\n string, Domain, string, OperatorImplementation\n ]|[string, Domain, string, OperatorImplementation, OperatorInitialization];\n}\n\nexport function resolveOperator(node: Graph.Node, opsets: readonly OpSet[], rules: readonly OpSet.ResolveRule[]) {\n for (const rule of rules) {\n const opType = rule[0];\n const domain = rule[1];\n const versionSelector = rule[2];\n const opImpl = rule[3];\n const opInit = rule[4];\n\n if (node.opType === opType) { // operator type matches\n for (const opset of opsets) {\n // opset '' and 'ai.onnx' are considered the same.\n if (opset.domain === domain || (opset.domain === 'ai.onnx' && domain === '')) { // opset domain found\n if (matchSelector(opset.version, versionSelector)) {\n return {opImpl, opInit};\n }\n }\n }\n }\n }\n\n throw new TypeError(`cannot resolve operator '${node.opType}' with opsets: ${\n opsets.map(set => `${set.domain || 'ai.onnx'} v${set.version}`).join(', ')}`);\n}\n\nfunction matchSelector(version: number, selector: string): boolean {\n if (selector.endsWith('+')) {\n // minimum version match ('7+' expects version>=7)\n const rangeStart = Number.parseInt(selector.substring(0, selector.length - 1), 10);\n return !isNaN(rangeStart) && rangeStart <= version;\n } else if (selector.split('-').length === 2) {\n // range match ('6-8' expects 6<=version<=8)\n const pair = selector.split('-');\n const rangeStart = Number.parseInt(pair[0], 10);\n const rangeEnd = Number.parseInt(pair[1], 10);\n return !isNaN(rangeStart) && !isNaN(rangeEnd) && rangeStart <= version && version <= rangeEnd;\n } else {\n // exact match ('7' expects version===7)\n return Number.parseInt(selector, 10) === version;\n }\n}\n", "\"use strict\";\r\nexports.__esModule = true;\r\nvar Guid = /** @class */ (function () {\r\n function Guid(guid) {\r\n if (!guid) {\r\n throw new TypeError(\"Invalid argument; `value` has no value.\");\r\n }\r\n this.value = Guid.EMPTY;\r\n if (guid && Guid.isGuid(guid)) {\r\n this.value = guid;\r\n }\r\n }\r\n Guid.isGuid = function (guid) {\r\n var value = guid.toString();\r\n return guid && (guid instanceof Guid || Guid.validator.test(value));\r\n };\r\n Guid.create = function () {\r\n return new Guid([Guid.gen(2), Guid.gen(1), Guid.gen(1), Guid.gen(1), Guid.gen(3)].join(\"-\"));\r\n };\r\n Guid.createEmpty = function () {\r\n return new Guid(\"emptyguid\");\r\n };\r\n Guid.parse = function (guid) {\r\n return new Guid(guid);\r\n };\r\n Guid.raw = function () {\r\n return [Guid.gen(2), Guid.gen(1), Guid.gen(1), Guid.gen(1), Guid.gen(3)].join(\"-\");\r\n };\r\n Guid.gen = function (count) {\r\n var out = \"\";\r\n for (var i = 0; i < count; i++) {\r\n // tslint:disable-next-line:no-bitwise\r\n out += (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);\r\n }\r\n return out;\r\n };\r\n Guid.prototype.equals = function (other) {\r\n // Comparing string `value` against provided `guid` will auto-call\r\n // toString on `guid` for comparison\r\n return Guid.isGuid(other) && this.value === other.toString();\r\n };\r\n Guid.prototype.isEmpty = function () {\r\n return this.value === Guid.EMPTY;\r\n };\r\n Guid.prototype.toString = function () {\r\n return this.value;\r\n };\r\n Guid.prototype.toJSON = function () {\r\n return {\r\n value: this.value\r\n };\r\n };\r\n Guid.validator = new RegExp(\"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$\", \"i\");\r\n Guid.EMPTY = \"00000000-0000-0000-0000-000000000000\";\r\n return Guid;\r\n}());\r\nexports.Guid = Guid;\r\n", "/**\n * @license\n * Copyright 2009 The Closure Library Authors\n * Copyright 2020 Daniel Wirtz / The long.js Authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n */\n\n// WebAssembly optimizations to do native i64 multiplication and divide\nvar wasm = null;\ntry {\n wasm = new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 13, 2, 96, 0, 1, 127, 96, 4, 127, 127, 127, 127, 1, 127, 3, 7, 6, 0, 1, 1, 1, 1, 1, 6, 6, 1, 127, 1, 65, 0, 11, 7, 50, 6, 3, 109, 117, 108, 0, 1, 5, 100, 105, 118, 95, 115, 0, 2, 5, 100, 105, 118, 95, 117, 0, 3, 5, 114, 101, 109, 95, 115, 0, 4, 5, 114, 101, 109, 95, 117, 0, 5, 8, 103, 101, 116, 95, 104, 105, 103, 104, 0, 0, 10, 191, 1, 6, 4, 0, 35, 0, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 126, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 127, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 128, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 129, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 130, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11\n ])), {}).exports;\n} catch (e) {\n // no wasm support :(\n}\n\n/**\n * Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as *signed* integers.\n * See the from* functions below for more convenient ways of constructing Longs.\n * @exports Long\n * @class A Long class for representing a 64 bit two's-complement integer value.\n * @param {number} low The low (signed) 32 bits of the long\n * @param {number} high The high (signed) 32 bits of the long\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @constructor\n */\nfunction Long(low, high, unsigned) {\n\n /**\n * The low 32 bits as a signed value.\n * @type {number}\n */\n this.low = low | 0;\n\n /**\n * The high 32 bits as a signed value.\n * @type {number}\n */\n this.high = high | 0;\n\n /**\n * Whether unsigned or not.\n * @type {boolean}\n */\n this.unsigned = !!unsigned;\n}\n\n// The internal representation of a long is the two given signed, 32-bit values.\n// We use 32-bit pieces because these are the size of integers on which\n// Javascript performs bit-operations. For operations like addition and\n// multiplication, we split each number into 16 bit pieces, which can easily be\n// multiplied within Javascript's floating-point representation without overflow\n// or change in sign.\n//\n// In the algorithms below, we frequently reduce the negative case to the\n// positive case by negating the input(s) and then post-processing the result.\n// Note that we must ALWAYS check specially whether those values are MIN_VALUE\n// (-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as\n// a positive number, it overflows back into a negative). Not handling this\n// case would often result in infinite recursion.\n//\n// Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the from*\n// methods on which they depend.\n\n/**\n * An indicator used to reliably determine if an object is a Long or not.\n * @type {boolean}\n * @const\n * @private\n */\nLong.prototype.__isLong__;\n\nObject.defineProperty(Long.prototype, \"__isLong__\", { value: true });\n\n/**\n * @function\n * @param {*} obj Object\n * @returns {boolean}\n * @inner\n */\nfunction isLong(obj) {\n return (obj && obj[\"__isLong__\"]) === true;\n}\n\n/**\n * @function\n * @param {*} value number\n * @returns {number}\n * @inner\n */\nfunction ctz32(value) {\n var c = Math.clz32(value & -value);\n return value ? 31 - c : c;\n}\n\n/**\n * Tests if the specified object is a Long.\n * @function\n * @param {*} obj Object\n * @returns {boolean}\n */\nLong.isLong = isLong;\n\n/**\n * A cache of the Long representations of small integer values.\n * @type {!Object}\n * @inner\n */\nvar INT_CACHE = {};\n\n/**\n * A cache of the Long representations of small unsigned integer values.\n * @type {!Object}\n * @inner\n */\nvar UINT_CACHE = {};\n\n/**\n * @param {number} value\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromInt(value, unsigned) {\n var obj, cachedObj, cache;\n if (unsigned) {\n value >>>= 0;\n if (cache = (0 <= value && value < 256)) {\n cachedObj = UINT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, 0, true);\n if (cache)\n UINT_CACHE[value] = obj;\n return obj;\n } else {\n value |= 0;\n if (cache = (-128 <= value && value < 128)) {\n cachedObj = INT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, value < 0 ? -1 : 0, false);\n if (cache)\n INT_CACHE[value] = obj;\n return obj;\n }\n}\n\n/**\n * Returns a Long representing the given 32 bit integer value.\n * @function\n * @param {number} value The 32 bit integer in question\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromInt = fromInt;\n\n/**\n * @param {number} value\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromNumber(value, unsigned) {\n if (isNaN(value))\n return unsigned ? UZERO : ZERO;\n if (unsigned) {\n if (value < 0)\n return UZERO;\n if (value >= TWO_PWR_64_DBL)\n return MAX_UNSIGNED_VALUE;\n } else {\n if (value <= -TWO_PWR_63_DBL)\n return MIN_VALUE;\n if (value + 1 >= TWO_PWR_63_DBL)\n return MAX_VALUE;\n }\n if (value < 0)\n return fromNumber(-value, unsigned).neg();\n return fromBits((value % TWO_PWR_32_DBL) | 0, (value / TWO_PWR_32_DBL) | 0, unsigned);\n}\n\n/**\n * Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.\n * @function\n * @param {number} value The number in question\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromNumber = fromNumber;\n\n/**\n * @param {number} lowBits\n * @param {number} highBits\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromBits(lowBits, highBits, unsigned) {\n return new Long(lowBits, highBits, unsigned);\n}\n\n/**\n * Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is\n * assumed to use 32 bits.\n * @function\n * @param {number} lowBits The low 32 bits\n * @param {number} highBits The high 32 bits\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromBits = fromBits;\n\n/**\n * @function\n * @param {number} base\n * @param {number} exponent\n * @returns {number}\n * @inner\n */\nvar pow_dbl = Math.pow; // Used 4 times (4*8 to 15+4)\n\n/**\n * @param {string} str\n * @param {(boolean|number)=} unsigned\n * @param {number=} radix\n * @returns {!Long}\n * @inner\n */\nfunction fromString(str, unsigned, radix) {\n if (str.length === 0)\n throw Error('empty string');\n if (typeof unsigned === 'number') {\n // For goog.math.long compatibility\n radix = unsigned;\n unsigned = false;\n } else {\n unsigned = !!unsigned;\n }\n if (str === \"NaN\" || str === \"Infinity\" || str === \"+Infinity\" || str === \"-Infinity\")\n return unsigned ? UZERO : ZERO;\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError('radix');\n\n var p;\n if ((p = str.indexOf('-')) > 0)\n throw Error('interior hyphen');\n else if (p === 0) {\n return fromString(str.substring(1), unsigned, radix).neg();\n }\n\n // Do several (8) digits each time through the loop, so as to\n // minimize the calls to the very expensive emulated div.\n var radixToPower = fromNumber(pow_dbl(radix, 8));\n\n var result = ZERO;\n for (var i = 0; i < str.length; i += 8) {\n var size = Math.min(8, str.length - i),\n value = parseInt(str.substring(i, i + size), radix);\n if (size < 8) {\n var power = fromNumber(pow_dbl(radix, size));\n result = result.mul(power).add(fromNumber(value));\n } else {\n result = result.mul(radixToPower);\n result = result.add(fromNumber(value));\n }\n }\n result.unsigned = unsigned;\n return result;\n}\n\n/**\n * Returns a Long representation of the given string, written using the specified radix.\n * @function\n * @param {string} str The textual representation of the Long\n * @param {(boolean|number)=} unsigned Whether unsigned or not, defaults to signed\n * @param {number=} radix The radix in which the text is written (2-36), defaults to 10\n * @returns {!Long} The corresponding Long value\n */\nLong.fromString = fromString;\n\n/**\n * @function\n * @param {!Long|number|string|!{low: number, high: number, unsigned: boolean}} val\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromValue(val, unsigned) {\n if (typeof val === 'number')\n return fromNumber(val, unsigned);\n if (typeof val === 'string')\n return fromString(val, unsigned);\n // Throws for non-objects, converts non-instanceof Long:\n return fromBits(val.low, val.high, typeof unsigned === 'boolean' ? unsigned : val.unsigned);\n}\n\n/**\n * Converts the specified value to a Long using the appropriate from* function for its type.\n * @function\n * @param {!Long|number|string|!{low: number, high: number, unsigned: boolean}} val Value\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long}\n */\nLong.fromValue = fromValue;\n\n// NOTE: the compiler should inline these constant values below and then remove these variables, so there should be\n// no runtime penalty for these.\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_16_DBL = 1 << 16;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_24_DBL = 1 << 24;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2;\n\n/**\n * @type {!Long}\n * @const\n * @inner\n */\nvar TWO_PWR_24 = fromInt(TWO_PWR_24_DBL);\n\n/**\n * @type {!Long}\n * @inner\n */\nvar ZERO = fromInt(0);\n\n/**\n * Signed zero.\n * @type {!Long}\n */\nLong.ZERO = ZERO;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar UZERO = fromInt(0, true);\n\n/**\n * Unsigned zero.\n * @type {!Long}\n */\nLong.UZERO = UZERO;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar ONE = fromInt(1);\n\n/**\n * Signed one.\n * @type {!Long}\n */\nLong.ONE = ONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar UONE = fromInt(1, true);\n\n/**\n * Unsigned one.\n * @type {!Long}\n */\nLong.UONE = UONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar NEG_ONE = fromInt(-1);\n\n/**\n * Signed negative one.\n * @type {!Long}\n */\nLong.NEG_ONE = NEG_ONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MAX_VALUE = fromBits(0xFFFFFFFF | 0, 0x7FFFFFFF | 0, false);\n\n/**\n * Maximum signed value.\n * @type {!Long}\n */\nLong.MAX_VALUE = MAX_VALUE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MAX_UNSIGNED_VALUE = fromBits(0xFFFFFFFF | 0, 0xFFFFFFFF | 0, true);\n\n/**\n * Maximum unsigned value.\n * @type {!Long}\n */\nLong.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MIN_VALUE = fromBits(0, 0x80000000 | 0, false);\n\n/**\n * Minimum signed value.\n * @type {!Long}\n */\nLong.MIN_VALUE = MIN_VALUE;\n\n/**\n * @alias Long.prototype\n * @inner\n */\nvar LongPrototype = Long.prototype;\n\n/**\n * Converts the Long to a 32 bit integer, assuming it is a 32 bit integer.\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.toInt = function toInt() {\n return this.unsigned ? this.low >>> 0 : this.low;\n};\n\n/**\n * Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa).\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.toNumber = function toNumber() {\n if (this.unsigned)\n return ((this.high >>> 0) * TWO_PWR_32_DBL) + (this.low >>> 0);\n return this.high * TWO_PWR_32_DBL + (this.low >>> 0);\n};\n\n/**\n * Converts the Long to a string written in the specified radix.\n * @this {!Long}\n * @param {number=} radix Radix (2-36), defaults to 10\n * @returns {string}\n * @override\n * @throws {RangeError} If `radix` is out of range\n */\nLongPrototype.toString = function toString(radix) {\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError('radix');\n if (this.isZero())\n return '0';\n if (this.isNegative()) { // Unsigned Longs are never negative\n if (this.eq(MIN_VALUE)) {\n // We need to change the Long value before it can be negated, so we remove\n // the bottom-most digit in this base and then recurse to do the rest.\n var radixLong = fromNumber(radix),\n div = this.div(radixLong),\n rem1 = div.mul(radixLong).sub(this);\n return div.toString(radix) + rem1.toInt().toString(radix);\n } else\n return '-' + this.neg().toString(radix);\n }\n\n // Do several (6) digits each time through the loop, so as to\n // minimize the calls to the very expensive emulated div.\n var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned),\n rem = this;\n var result = '';\n while (true) {\n var remDiv = rem.div(radixToPower),\n intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0,\n digits = intval.toString(radix);\n rem = remDiv;\n if (rem.isZero())\n return digits + result;\n else {\n while (digits.length < 6)\n digits = '0' + digits;\n result = '' + digits + result;\n }\n }\n};\n\n/**\n * Gets the high 32 bits as a signed integer.\n * @this {!Long}\n * @returns {number} Signed high bits\n */\nLongPrototype.getHighBits = function getHighBits() {\n return this.high;\n};\n\n/**\n * Gets the high 32 bits as an unsigned integer.\n * @this {!Long}\n * @returns {number} Unsigned high bits\n */\nLongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() {\n return this.high >>> 0;\n};\n\n/**\n * Gets the low 32 bits as a signed integer.\n * @this {!Long}\n * @returns {number} Signed low bits\n */\nLongPrototype.getLowBits = function getLowBits() {\n return this.low;\n};\n\n/**\n * Gets the low 32 bits as an unsigned integer.\n * @this {!Long}\n * @returns {number} Unsigned low bits\n */\nLongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() {\n return this.low >>> 0;\n};\n\n/**\n * Gets the number of bits needed to represent the absolute value of this Long.\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.getNumBitsAbs = function getNumBitsAbs() {\n if (this.isNegative()) // Unsigned Longs are never negative\n return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs();\n var val = this.high != 0 ? this.high : this.low;\n for (var bit = 31; bit > 0; bit--)\n if ((val & (1 << bit)) != 0)\n break;\n return this.high != 0 ? bit + 33 : bit + 1;\n};\n\n/**\n * Tests if this Long's value equals zero.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isZero = function isZero() {\n return this.high === 0 && this.low === 0;\n};\n\n/**\n * Tests if this Long's value equals zero. This is an alias of {@link Long#isZero}.\n * @returns {boolean}\n */\nLongPrototype.eqz = LongPrototype.isZero;\n\n/**\n * Tests if this Long's value is negative.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isNegative = function isNegative() {\n return !this.unsigned && this.high < 0;\n};\n\n/**\n * Tests if this Long's value is positive or zero.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isPositive = function isPositive() {\n return this.unsigned || this.high >= 0;\n};\n\n/**\n * Tests if this Long's value is odd.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isOdd = function isOdd() {\n return (this.low & 1) === 1;\n};\n\n/**\n * Tests if this Long's value is even.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isEven = function isEven() {\n return (this.low & 1) === 0;\n};\n\n/**\n * Tests if this Long's value equals the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.equals = function equals(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.unsigned !== other.unsigned && (this.high >>> 31) === 1 && (other.high >>> 31) === 1)\n return false;\n return this.high === other.high && this.low === other.low;\n};\n\n/**\n * Tests if this Long's value equals the specified's. This is an alias of {@link Long#equals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.eq = LongPrototype.equals;\n\n/**\n * Tests if this Long's value differs from the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.notEquals = function notEquals(other) {\n return !this.eq(/* validates */ other);\n};\n\n/**\n * Tests if this Long's value differs from the specified's. This is an alias of {@link Long#notEquals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.neq = LongPrototype.notEquals;\n\n/**\n * Tests if this Long's value differs from the specified's. This is an alias of {@link Long#notEquals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.ne = LongPrototype.notEquals;\n\n/**\n * Tests if this Long's value is less than the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lessThan = function lessThan(other) {\n return this.comp(/* validates */ other) < 0;\n};\n\n/**\n * Tests if this Long's value is less than the specified's. This is an alias of {@link Long#lessThan}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lt = LongPrototype.lessThan;\n\n/**\n * Tests if this Long's value is less than or equal the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lessThanOrEqual = function lessThanOrEqual(other) {\n return this.comp(/* validates */ other) <= 0;\n};\n\n/**\n * Tests if this Long's value is less than or equal the specified's. This is an alias of {@link Long#lessThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lte = LongPrototype.lessThanOrEqual;\n\n/**\n * Tests if this Long's value is less than or equal the specified's. This is an alias of {@link Long#lessThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.le = LongPrototype.lessThanOrEqual;\n\n/**\n * Tests if this Long's value is greater than the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.greaterThan = function greaterThan(other) {\n return this.comp(/* validates */ other) > 0;\n};\n\n/**\n * Tests if this Long's value is greater than the specified's. This is an alias of {@link Long#greaterThan}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.gt = LongPrototype.greaterThan;\n\n/**\n * Tests if this Long's value is greater than or equal the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) {\n return this.comp(/* validates */ other) >= 0;\n};\n\n/**\n * Tests if this Long's value is greater than or equal the specified's. This is an alias of {@link Long#greaterThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.gte = LongPrototype.greaterThanOrEqual;\n\n/**\n * Tests if this Long's value is greater than or equal the specified's. This is an alias of {@link Long#greaterThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.ge = LongPrototype.greaterThanOrEqual;\n\n/**\n * Compares this Long's value with the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {number} 0 if they are the same, 1 if the this is greater and -1\n * if the given one is greater\n */\nLongPrototype.compare = function compare(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.eq(other))\n return 0;\n var thisNeg = this.isNegative(),\n otherNeg = other.isNegative();\n if (thisNeg && !otherNeg)\n return -1;\n if (!thisNeg && otherNeg)\n return 1;\n // At this point the sign bits are the same\n if (!this.unsigned)\n return this.sub(other).isNegative() ? -1 : 1;\n // Both are positive if at least one is unsigned\n return (other.high >>> 0) > (this.high >>> 0) || (other.high === this.high && (other.low >>> 0) > (this.low >>> 0)) ? -1 : 1;\n};\n\n/**\n * Compares this Long's value with the specified's. This is an alias of {@link Long#compare}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {number} 0 if they are the same, 1 if the this is greater and -1\n * if the given one is greater\n */\nLongPrototype.comp = LongPrototype.compare;\n\n/**\n * Negates this Long's value.\n * @this {!Long}\n * @returns {!Long} Negated Long\n */\nLongPrototype.negate = function negate() {\n if (!this.unsigned && this.eq(MIN_VALUE))\n return MIN_VALUE;\n return this.not().add(ONE);\n};\n\n/**\n * Negates this Long's value. This is an alias of {@link Long#negate}.\n * @function\n * @returns {!Long} Negated Long\n */\nLongPrototype.neg = LongPrototype.negate;\n\n/**\n * Returns the sum of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} addend Addend\n * @returns {!Long} Sum\n */\nLongPrototype.add = function add(addend) {\n if (!isLong(addend))\n addend = fromValue(addend);\n\n // Divide each number into 4 chunks of 16 bits, and then sum the chunks.\n\n var a48 = this.high >>> 16;\n var a32 = this.high & 0xFFFF;\n var a16 = this.low >>> 16;\n var a00 = this.low & 0xFFFF;\n\n var b48 = addend.high >>> 16;\n var b32 = addend.high & 0xFFFF;\n var b16 = addend.low >>> 16;\n var b00 = addend.low & 0xFFFF;\n\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 + b00;\n c16 += c00 >>> 16;\n c00 &= 0xFFFF;\n c16 += a16 + b16;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c32 += a32 + b32;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c48 += a48 + b48;\n c48 &= 0xFFFF;\n return fromBits((c16 << 16) | c00, (c48 << 16) | c32, this.unsigned);\n};\n\n/**\n * Returns the difference of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} subtrahend Subtrahend\n * @returns {!Long} Difference\n */\nLongPrototype.subtract = function subtract(subtrahend) {\n if (!isLong(subtrahend))\n subtrahend = fromValue(subtrahend);\n return this.add(subtrahend.neg());\n};\n\n/**\n * Returns the difference of this and the specified Long. This is an alias of {@link Long#subtract}.\n * @function\n * @param {!Long|number|string} subtrahend Subtrahend\n * @returns {!Long} Difference\n */\nLongPrototype.sub = LongPrototype.subtract;\n\n/**\n * Returns the product of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} multiplier Multiplier\n * @returns {!Long} Product\n */\nLongPrototype.multiply = function multiply(multiplier) {\n if (this.isZero())\n return this;\n if (!isLong(multiplier))\n multiplier = fromValue(multiplier);\n\n // use wasm support if present\n if (wasm) {\n var low = wasm[\"mul\"](this.low,\n this.high,\n multiplier.low,\n multiplier.high);\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n if (multiplier.isZero())\n return this.unsigned ? UZERO : ZERO;\n if (this.eq(MIN_VALUE))\n return multiplier.isOdd() ? MIN_VALUE : ZERO;\n if (multiplier.eq(MIN_VALUE))\n return this.isOdd() ? MIN_VALUE : ZERO;\n\n if (this.isNegative()) {\n if (multiplier.isNegative())\n return this.neg().mul(multiplier.neg());\n else\n return this.neg().mul(multiplier).neg();\n } else if (multiplier.isNegative())\n return this.mul(multiplier.neg()).neg();\n\n // If both longs are small, use float multiplication\n if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24))\n return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned);\n\n // Divide each long into 4 chunks of 16 bits, and then add up 4x4 products.\n // We can skip products that would overflow.\n\n var a48 = this.high >>> 16;\n var a32 = this.high & 0xFFFF;\n var a16 = this.low >>> 16;\n var a00 = this.low & 0xFFFF;\n\n var b48 = multiplier.high >>> 16;\n var b32 = multiplier.high & 0xFFFF;\n var b16 = multiplier.low >>> 16;\n var b00 = multiplier.low & 0xFFFF;\n\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 * b00;\n c16 += c00 >>> 16;\n c00 &= 0xFFFF;\n c16 += a16 * b00;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c16 += a00 * b16;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c32 += a32 * b00;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c32 += a16 * b16;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c32 += a00 * b32;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48;\n c48 &= 0xFFFF;\n return fromBits((c16 << 16) | c00, (c48 << 16) | c32, this.unsigned);\n};\n\n/**\n * Returns the product of this and the specified Long. This is an alias of {@link Long#multiply}.\n * @function\n * @param {!Long|number|string} multiplier Multiplier\n * @returns {!Long} Product\n */\nLongPrototype.mul = LongPrototype.multiply;\n\n/**\n * Returns this Long divided by the specified. The result is signed if this Long is signed or\n * unsigned if this Long is unsigned.\n * @this {!Long}\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Quotient\n */\nLongPrototype.divide = function divide(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n if (divisor.isZero())\n throw Error('division by zero');\n\n // use wasm support if present\n if (wasm) {\n // guard against signed division overflow: the largest\n // negative number / -1 would be 1 larger than the largest\n // positive number, due to two's complement.\n if (!this.unsigned &&\n this.high === -0x80000000 &&\n divisor.low === -1 && divisor.high === -1) {\n // be consistent with non-wasm code path\n return this;\n }\n var low = (this.unsigned ? wasm[\"div_u\"] : wasm[\"div_s\"])(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n if (this.isZero())\n return this.unsigned ? UZERO : ZERO;\n var approx, rem, res;\n if (!this.unsigned) {\n // This section is only relevant for signed longs and is derived from the\n // closure library as a whole.\n if (this.eq(MIN_VALUE)) {\n if (divisor.eq(ONE) || divisor.eq(NEG_ONE))\n return MIN_VALUE; // recall that -MIN_VALUE == MIN_VALUE\n else if (divisor.eq(MIN_VALUE))\n return ONE;\n else {\n // At this point, we have |other| >= 2, so |this/other| < |MIN_VALUE|.\n var halfThis = this.shr(1);\n approx = halfThis.div(divisor).shl(1);\n if (approx.eq(ZERO)) {\n return divisor.isNegative() ? ONE : NEG_ONE;\n } else {\n rem = this.sub(divisor.mul(approx));\n res = approx.add(rem.div(divisor));\n return res;\n }\n }\n } else if (divisor.eq(MIN_VALUE))\n return this.unsigned ? UZERO : ZERO;\n if (this.isNegative()) {\n if (divisor.isNegative())\n return this.neg().div(divisor.neg());\n return this.neg().div(divisor).neg();\n } else if (divisor.isNegative())\n return this.div(divisor.neg()).neg();\n res = ZERO;\n } else {\n // The algorithm below has not been made for unsigned longs. It's therefore\n // required to take special care of the MSB prior to running it.\n if (!divisor.unsigned)\n divisor = divisor.toUnsigned();\n if (divisor.gt(this))\n return UZERO;\n if (divisor.gt(this.shru(1))) // 15 >>> 1 = 7 ; with divisor = 8 ; true\n return UONE;\n res = UZERO;\n }\n\n // Repeat the following until the remainder is less than other: find a\n // floating-point that approximates remainder / other *from below*, add this\n // into the result, and subtract it from the remainder. It is critical that\n // the approximate value is less than or equal to the real value so that the\n // remainder never becomes negative.\n rem = this;\n while (rem.gte(divisor)) {\n // Approximate the result of division. This may be a little greater or\n // smaller than the actual value.\n approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber()));\n\n // We will tweak the approximate result by changing it in the 48-th digit or\n // the smallest non-fractional digit, whichever is larger.\n var log2 = Math.ceil(Math.log(approx) / Math.LN2),\n delta = (log2 <= 48) ? 1 : pow_dbl(2, log2 - 48),\n\n // Decrease the approximation until it is smaller than the remainder. Note\n // that if it is too large, the product overflows and is negative.\n approxRes = fromNumber(approx),\n approxRem = approxRes.mul(divisor);\n while (approxRem.isNegative() || approxRem.gt(rem)) {\n approx -= delta;\n approxRes = fromNumber(approx, this.unsigned);\n approxRem = approxRes.mul(divisor);\n }\n\n // We know the answer can't be zero... and actually, zero would cause\n // infinite recursion since we would make no progress.\n if (approxRes.isZero())\n approxRes = ONE;\n\n res = res.add(approxRes);\n rem = rem.sub(approxRem);\n }\n return res;\n};\n\n/**\n * Returns this Long divided by the specified. This is an alias of {@link Long#divide}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Quotient\n */\nLongPrototype.div = LongPrototype.divide;\n\n/**\n * Returns this Long modulo the specified.\n * @this {!Long}\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.modulo = function modulo(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n\n // use wasm support if present\n if (wasm) {\n var low = (this.unsigned ? wasm[\"rem_u\"] : wasm[\"rem_s\"])(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n return this.sub(this.div(divisor).mul(divisor));\n};\n\n/**\n * Returns this Long modulo the specified. This is an alias of {@link Long#modulo}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.mod = LongPrototype.modulo;\n\n/**\n * Returns this Long modulo the specified. This is an alias of {@link Long#modulo}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.rem = LongPrototype.modulo;\n\n/**\n * Returns the bitwise NOT of this Long.\n * @this {!Long}\n * @returns {!Long}\n */\nLongPrototype.not = function not() {\n return fromBits(~this.low, ~this.high, this.unsigned);\n};\n\n/**\n * Returns count leading zeros of this Long.\n * @this {!Long}\n * @returns {!number}\n */\nLongPrototype.countLeadingZeros = function countLeadingZeros() {\n return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32;\n};\n\n/**\n * Returns count leading zeros. This is an alias of {@link Long#countLeadingZeros}.\n * @function\n * @param {!Long}\n * @returns {!number}\n */\nLongPrototype.clz = LongPrototype.countLeadingZeros;\n\n/**\n * Returns count trailing zeros of this Long.\n * @this {!Long}\n * @returns {!number}\n */\nLongPrototype.countTrailingZeros = function countTrailingZeros() {\n return this.low ? ctz32(this.low) : ctz32(this.high) + 32;\n};\n\n/**\n * Returns count trailing zeros. This is an alias of {@link Long#countTrailingZeros}.\n * @function\n * @param {!Long}\n * @returns {!number}\n */\nLongPrototype.ctz = LongPrototype.countTrailingZeros;\n\n/**\n * Returns the bitwise AND of this Long and the specified.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.and = function and(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low & other.low, this.high & other.high, this.unsigned);\n};\n\n/**\n * Returns the bitwise OR of this Long and the specified.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.or = function or(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low | other.low, this.high | other.high, this.unsigned);\n};\n\n/**\n * Returns the bitwise XOR of this Long and the given one.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.xor = function xor(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned);\n};\n\n/**\n * Returns this Long with bits shifted to the left by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftLeft = function shiftLeft(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits(this.low << numBits, (this.high << numBits) | (this.low >>> (32 - numBits)), this.unsigned);\n else\n return fromBits(0, this.low << (numBits - 32), this.unsigned);\n};\n\n/**\n * Returns this Long with bits shifted to the left by the given amount. This is an alias of {@link Long#shiftLeft}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shl = LongPrototype.shiftLeft;\n\n/**\n * Returns this Long with bits arithmetically shifted to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftRight = function shiftRight(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits((this.low >>> numBits) | (this.high << (32 - numBits)), this.high >> numBits, this.unsigned);\n else\n return fromBits(this.high >> (numBits - 32), this.high >= 0 ? 0 : -1, this.unsigned);\n};\n\n/**\n * Returns this Long with bits arithmetically shifted to the right by the given amount. This is an alias of {@link Long#shiftRight}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shr = LongPrototype.shiftRight;\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) {\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits < 32) return fromBits((this.low >>> numBits) | (this.high << (32 - numBits)), this.high >>> numBits, this.unsigned);\n if (numBits === 32) return fromBits(this.high, 0, this.unsigned);\n return fromBits(this.high >>> (numBits - 32), 0, this.unsigned);\n};\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount. This is an alias of {@link Long#shiftRightUnsigned}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shru = LongPrototype.shiftRightUnsigned;\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount. This is an alias of {@link Long#shiftRightUnsigned}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shr_u = LongPrototype.shiftRightUnsigned;\n\n/**\n * Returns this Long with bits rotated to the left by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotateLeft = function rotateLeft(numBits) {\n var b;\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits === 32) return fromBits(this.high, this.low, this.unsigned);\n if (numBits < 32) {\n b = (32 - numBits);\n return fromBits(((this.low << numBits) | (this.high >>> b)), ((this.high << numBits) | (this.low >>> b)), this.unsigned);\n }\n numBits -= 32;\n b = (32 - numBits);\n return fromBits(((this.high << numBits) | (this.low >>> b)), ((this.low << numBits) | (this.high >>> b)), this.unsigned);\n}\n/**\n * Returns this Long with bits rotated to the left by the given amount. This is an alias of {@link Long#rotateLeft}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotl = LongPrototype.rotateLeft;\n\n/**\n * Returns this Long with bits rotated to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotateRight = function rotateRight(numBits) {\n var b;\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits === 32) return fromBits(this.high, this.low, this.unsigned);\n if (numBits < 32) {\n b = (32 - numBits);\n return fromBits(((this.high << b) | (this.low >>> numBits)), ((this.low << b) | (this.high >>> numBits)), this.unsigned);\n }\n numBits -= 32;\n b = (32 - numBits);\n return fromBits(((this.low << b) | (this.high >>> numBits)), ((this.high << b) | (this.low >>> numBits)), this.unsigned);\n}\n/**\n * Returns this Long with bits rotated to the right by the given amount. This is an alias of {@link Long#rotateRight}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotr = LongPrototype.rotateRight;\n\n/**\n * Converts this Long to signed.\n * @this {!Long}\n * @returns {!Long} Signed long\n */\nLongPrototype.toSigned = function toSigned() {\n if (!this.unsigned)\n return this;\n return fromBits(this.low, this.high, false);\n};\n\n/**\n * Converts this Long to unsigned.\n * @this {!Long}\n * @returns {!Long} Unsigned long\n */\nLongPrototype.toUnsigned = function toUnsigned() {\n if (this.unsigned)\n return this;\n return fromBits(this.low, this.high, true);\n};\n\n/**\n * Converts this Long to its byte representation.\n * @param {boolean=} le Whether little or big endian, defaults to big endian\n * @this {!Long}\n * @returns {!Array.} Byte representation\n */\nLongPrototype.toBytes = function toBytes(le) {\n return le ? this.toBytesLE() : this.toBytesBE();\n};\n\n/**\n * Converts this Long to its little endian byte representation.\n * @this {!Long}\n * @returns {!Array.} Little endian byte representation\n */\nLongPrototype.toBytesLE = function toBytesLE() {\n var hi = this.high,\n lo = this.low;\n return [\n lo & 0xff,\n lo >>> 8 & 0xff,\n lo >>> 16 & 0xff,\n lo >>> 24,\n hi & 0xff,\n hi >>> 8 & 0xff,\n hi >>> 16 & 0xff,\n hi >>> 24\n ];\n};\n\n/**\n * Converts this Long to its big endian byte representation.\n * @this {!Long}\n * @returns {!Array.} Big endian byte representation\n */\nLongPrototype.toBytesBE = function toBytesBE() {\n var hi = this.high,\n lo = this.low;\n return [\n hi >>> 24,\n hi >>> 16 & 0xff,\n hi >>> 8 & 0xff,\n hi & 0xff,\n lo >>> 24,\n lo >>> 16 & 0xff,\n lo >>> 8 & 0xff,\n lo & 0xff\n ];\n};\n\n/**\n * Creates a Long from its byte representation.\n * @param {!Array.} bytes Byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @param {boolean=} le Whether little or big endian, defaults to big endian\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytes = function fromBytes(bytes, unsigned, le) {\n return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned);\n};\n\n/**\n * Creates a Long from its little endian byte representation.\n * @param {!Array.} bytes Little endian byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytesLE = function fromBytesLE(bytes, unsigned) {\n return new Long(\n bytes[0] |\n bytes[1] << 8 |\n bytes[2] << 16 |\n bytes[3] << 24,\n bytes[4] |\n bytes[5] << 8 |\n bytes[6] << 16 |\n bytes[7] << 24,\n unsigned\n );\n};\n\n/**\n * Creates a Long from its big endian byte representation.\n * @param {!Array.} bytes Big endian byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytesBE = function fromBytesBE(bytes, unsigned) {\n return new Long(\n bytes[4] << 24 |\n bytes[5] << 16 |\n bytes[6] << 8 |\n bytes[7],\n bytes[0] << 24 |\n bytes[1] << 16 |\n bytes[2] << 8 |\n bytes[3],\n unsigned\n );\n};\n\nexport default Long;\n", "/// @file\n/// @addtogroup flatbuffers_javascript_api\n/// @{\n/// @cond FLATBUFFERS_INTERNAL\n\n/**\n * @fileoverview\n *\n * Need to suppress 'global this' error so the Node.js export line doesn't cause\n * closure compile to error out.\n * @suppress {globalThis}\n */\n\n/**\n * @const\n * @namespace\n */\nvar flatbuffers = {};\n\n/**\n * @typedef {number}\n */\nflatbuffers.Offset;\n\n/**\n * @typedef {{\n * bb: flatbuffers.ByteBuffer,\n * bb_pos: number\n * }}\n */\nflatbuffers.Table;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZEOF_SHORT = 2;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZEOF_INT = 4;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.FILE_IDENTIFIER_LENGTH = 4;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZE_PREFIX_LENGTH = 4;\n\n/**\n * @enum {number}\n */\nflatbuffers.Encoding = {\n UTF8_BYTES: 1,\n UTF16_STRING: 2\n};\n\n/**\n * @type {Int32Array}\n * @const\n */\nflatbuffers.int32 = new Int32Array(2);\n\n/**\n * @type {Float32Array}\n * @const\n */\nflatbuffers.float32 = new Float32Array(flatbuffers.int32.buffer);\n\n/**\n * @type {Float64Array}\n * @const\n */\nflatbuffers.float64 = new Float64Array(flatbuffers.int32.buffer);\n\n/**\n * @type {boolean}\n * @const\n */\nflatbuffers.isLittleEndian = new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;\n\n////////////////////////////////////////////////////////////////////////////////\n\n/**\n * @constructor\n * @param {number} low\n * @param {number} high\n */\nflatbuffers.Long = function(low, high) {\n /**\n * @type {number}\n * @const\n */\n this.low = low | 0;\n\n /**\n * @type {number}\n * @const\n */\n this.high = high | 0;\n};\n\n/**\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.Long.create = function(low, high) {\n // Special-case zero to avoid GC overhead for default values\n return low == 0 && high == 0 ? flatbuffers.Long.ZERO : new flatbuffers.Long(low, high);\n};\n\n/**\n * @returns {number}\n */\nflatbuffers.Long.prototype.toFloat64 = function() {\n return (this.low >>> 0) + this.high * 0x100000000;\n};\n\n/**\n * @param {flatbuffers.Long} other\n * @returns {boolean}\n */\nflatbuffers.Long.prototype.equals = function(other) {\n return this.low == other.low && this.high == other.high;\n};\n\n/**\n * @type {!flatbuffers.Long}\n * @const\n */\nflatbuffers.Long.ZERO = new flatbuffers.Long(0, 0);\n\n/// @endcond\n////////////////////////////////////////////////////////////////////////////////\n/**\n * Create a FlatBufferBuilder.\n *\n * @constructor\n * @param {number=} opt_initial_size\n */\nflatbuffers.Builder = function(opt_initial_size) {\n if (!opt_initial_size) {\n var initial_size = 1024;\n } else {\n var initial_size = opt_initial_size;\n }\n\n /**\n * @type {flatbuffers.ByteBuffer}\n * @private\n */\n this.bb = flatbuffers.ByteBuffer.allocate(initial_size);\n\n /**\n * Remaining space in the ByteBuffer.\n *\n * @type {number}\n * @private\n */\n this.space = initial_size;\n\n /**\n * Minimum alignment encountered so far.\n *\n * @type {number}\n * @private\n */\n this.minalign = 1;\n\n /**\n * The vtable for the current table.\n *\n * @type {Array.}\n * @private\n */\n this.vtable = null;\n\n /**\n * The amount of fields we're actually using.\n *\n * @type {number}\n * @private\n */\n this.vtable_in_use = 0;\n\n /**\n * Whether we are currently serializing a table.\n *\n * @type {boolean}\n * @private\n */\n this.isNested = false;\n\n /**\n * Starting offset of the current struct/table.\n *\n * @type {number}\n * @private\n */\n this.object_start = 0;\n\n /**\n * List of offsets of all vtables.\n *\n * @type {Array.}\n * @private\n */\n this.vtables = [];\n\n /**\n * For the current vector being built.\n *\n * @type {number}\n * @private\n */\n this.vector_num_elems = 0;\n\n /**\n * False omits default values from the serialized data\n *\n * @type {boolean}\n * @private\n */\n this.force_defaults = false;\n};\n\nflatbuffers.Builder.prototype.clear = function() {\n this.bb.clear();\n this.space = this.bb.capacity();\n this.minalign = 1;\n this.vtable = null;\n this.vtable_in_use = 0;\n this.isNested = false;\n this.object_start = 0;\n this.vtables = [];\n this.vector_num_elems = 0;\n this.force_defaults = false;\n};\n\n/**\n * In order to save space, fields that are set to their default value\n * don't get serialized into the buffer. Forcing defaults provides a\n * way to manually disable this optimization.\n *\n * @param {boolean} forceDefaults true always serializes default values\n */\nflatbuffers.Builder.prototype.forceDefaults = function(forceDefaults) {\n this.force_defaults = forceDefaults;\n};\n\n/**\n * Get the ByteBuffer representing the FlatBuffer. Only call this after you've\n * called finish(). The actual data starts at the ByteBuffer's current position,\n * not necessarily at 0.\n *\n * @returns {flatbuffers.ByteBuffer}\n */\nflatbuffers.Builder.prototype.dataBuffer = function() {\n return this.bb;\n};\n\n/**\n * Get the bytes representing the FlatBuffer. Only call this after you've\n * called finish().\n *\n * @returns {!Uint8Array}\n */\nflatbuffers.Builder.prototype.asUint8Array = function() {\n return this.bb.bytes().subarray(this.bb.position(), this.bb.position() + this.offset());\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Prepare to write an element of `size` after `additional_bytes` have been\n * written, e.g. if you write a string, you need to align such the int length\n * field is aligned to 4 bytes, and the string data follows it directly. If all\n * you need to do is alignment, `additional_bytes` will be 0.\n *\n * @param {number} size This is the of the new element to write\n * @param {number} additional_bytes The padding size\n */\nflatbuffers.Builder.prototype.prep = function(size, additional_bytes) {\n // Track the biggest thing we've ever aligned to.\n if (size > this.minalign) {\n this.minalign = size;\n }\n\n // Find the amount of alignment needed such that `size` is properly\n // aligned after `additional_bytes`\n var align_size = ((~(this.bb.capacity() - this.space + additional_bytes)) + 1) & (size - 1);\n\n // Reallocate the buffer if needed.\n while (this.space < align_size + size + additional_bytes) {\n var old_buf_size = this.bb.capacity();\n this.bb = flatbuffers.Builder.growByteBuffer(this.bb);\n this.space += this.bb.capacity() - old_buf_size;\n }\n\n this.pad(align_size);\n};\n\n/**\n * @param {number} byte_size\n */\nflatbuffers.Builder.prototype.pad = function(byte_size) {\n for (var i = 0; i < byte_size; i++) {\n this.bb.writeInt8(--this.space, 0);\n }\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt8 = function(value) {\n this.bb.writeInt8(this.space -= 1, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt16 = function(value) {\n this.bb.writeInt16(this.space -= 2, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt32 = function(value) {\n this.bb.writeInt32(this.space -= 4, value);\n};\n\n/**\n * @param {flatbuffers.Long} value\n */\nflatbuffers.Builder.prototype.writeInt64 = function(value) {\n this.bb.writeInt64(this.space -= 8, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeFloat32 = function(value) {\n this.bb.writeFloat32(this.space -= 4, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeFloat64 = function(value) {\n this.bb.writeFloat64(this.space -= 8, value);\n};\n/// @endcond\n\n/**\n * Add an `int8` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int8` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt8 = function(value) {\n this.prep(1, 0);\n this.writeInt8(value);\n};\n\n/**\n * Add an `int16` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int16` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt16 = function(value) {\n this.prep(2, 0);\n this.writeInt16(value);\n};\n\n/**\n * Add an `int32` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int32` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt32 = function(value) {\n this.prep(4, 0);\n this.writeInt32(value);\n};\n\n/**\n * Add an `int64` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {flatbuffers.Long} value The `int64` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt64 = function(value) {\n this.prep(8, 0);\n this.writeInt64(value);\n};\n\n/**\n * Add a `float32` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `float32` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addFloat32 = function(value) {\n this.prep(4, 0);\n this.writeFloat32(value);\n};\n\n/**\n * Add a `float64` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `float64` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addFloat64 = function(value) {\n this.prep(8, 0);\n this.writeFloat64(value);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt8 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt8(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt16 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt16(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt32 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt32(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {flatbuffers.Long} value\n * @param {flatbuffers.Long} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt64 = function(voffset, value, defaultValue) {\n if (this.force_defaults || !value.equals(defaultValue)) {\n this.addInt64(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldFloat32 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addFloat32(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldFloat64 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addFloat64(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {flatbuffers.Offset} value\n * @param {flatbuffers.Offset} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldOffset = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addOffset(value);\n this.slot(voffset);\n }\n};\n\n/**\n * Structs are stored inline, so nothing additional is being added. `d` is always 0.\n *\n * @param {number} voffset\n * @param {flatbuffers.Offset} value\n * @param {flatbuffers.Offset} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldStruct = function(voffset, value, defaultValue) {\n if (value != defaultValue) {\n this.nested(value);\n this.slot(voffset);\n }\n};\n\n/**\n * Structures are always stored inline, they need to be created right\n * where they're used. You'll get this assertion failure if you\n * created it elsewhere.\n *\n * @param {flatbuffers.Offset} obj The offset of the created object\n */\nflatbuffers.Builder.prototype.nested = function(obj) {\n if (obj != this.offset()) {\n throw new Error('FlatBuffers: struct must be serialized inline.');\n }\n};\n\n/**\n * Should not be creating any other object, string or vector\n * while an object is being constructed\n */\nflatbuffers.Builder.prototype.notNested = function() {\n if (this.isNested) {\n throw new Error('FlatBuffers: object serialization must not be nested.');\n }\n};\n\n/**\n * Set the current vtable at `voffset` to the current location in the buffer.\n *\n * @param {number} voffset\n */\nflatbuffers.Builder.prototype.slot = function(voffset) {\n this.vtable[voffset] = this.offset();\n};\n\n/**\n * @returns {flatbuffers.Offset} Offset relative to the end of the buffer.\n */\nflatbuffers.Builder.prototype.offset = function() {\n return this.bb.capacity() - this.space;\n};\n\n/**\n * Doubles the size of the backing ByteBuffer and copies the old data towards\n * the end of the new buffer (since we build the buffer backwards).\n *\n * @param {flatbuffers.ByteBuffer} bb The current buffer with the existing data\n * @returns {!flatbuffers.ByteBuffer} A new byte buffer with the old data copied\n * to it. The data is located at the end of the buffer.\n *\n * uint8Array.set() formally takes {Array|ArrayBufferView}, so to pass\n * it a uint8Array we need to suppress the type check:\n * @suppress {checkTypes}\n */\nflatbuffers.Builder.growByteBuffer = function(bb) {\n var old_buf_size = bb.capacity();\n\n // Ensure we don't grow beyond what fits in an int.\n if (old_buf_size & 0xC0000000) {\n throw new Error('FlatBuffers: cannot grow buffer beyond 2 gigabytes.');\n }\n\n var new_buf_size = old_buf_size << 1;\n var nbb = flatbuffers.ByteBuffer.allocate(new_buf_size);\n nbb.setPosition(new_buf_size - old_buf_size);\n nbb.bytes().set(bb.bytes(), new_buf_size - old_buf_size);\n return nbb;\n};\n/// @endcond\n\n/**\n * Adds on offset, relative to where it will be written.\n *\n * @param {flatbuffers.Offset} offset The offset to add.\n */\nflatbuffers.Builder.prototype.addOffset = function(offset) {\n this.prep(flatbuffers.SIZEOF_INT, 0); // Ensure alignment is already done.\n this.writeInt32(this.offset() - offset + flatbuffers.SIZEOF_INT);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Start encoding a new object in the buffer. Users will not usually need to\n * call this directly. The FlatBuffers compiler will generate helper methods\n * that call this method internally.\n *\n * @param {number} numfields\n */\nflatbuffers.Builder.prototype.startObject = function(numfields) {\n this.notNested();\n if (this.vtable == null) {\n this.vtable = [];\n }\n this.vtable_in_use = numfields;\n for (var i = 0; i < numfields; i++) {\n this.vtable[i] = 0; // This will push additional elements as needed\n }\n this.isNested = true;\n this.object_start = this.offset();\n};\n\n/**\n * Finish off writing the object that is under construction.\n *\n * @returns {flatbuffers.Offset} The offset to the object inside `dataBuffer`\n */\nflatbuffers.Builder.prototype.endObject = function() {\n if (this.vtable == null || !this.isNested) {\n throw new Error('FlatBuffers: endObject called without startObject');\n }\n\n this.addInt32(0);\n var vtableloc = this.offset();\n\n // Trim trailing zeroes.\n var i = this.vtable_in_use - 1;\n for (; i >= 0 && this.vtable[i] == 0; i--) {}\n var trimmed_size = i + 1;\n\n // Write out the current vtable.\n for (; i >= 0; i--) {\n // Offset relative to the start of the table.\n this.addInt16(this.vtable[i] != 0 ? vtableloc - this.vtable[i] : 0);\n }\n\n var standard_fields = 2; // The fields below:\n this.addInt16(vtableloc - this.object_start);\n var len = (trimmed_size + standard_fields) * flatbuffers.SIZEOF_SHORT;\n this.addInt16(len);\n\n // Search for an existing vtable that matches the current one.\n var existing_vtable = 0;\n var vt1 = this.space;\nouter_loop:\n for (i = 0; i < this.vtables.length; i++) {\n var vt2 = this.bb.capacity() - this.vtables[i];\n if (len == this.bb.readInt16(vt2)) {\n for (var j = flatbuffers.SIZEOF_SHORT; j < len; j += flatbuffers.SIZEOF_SHORT) {\n if (this.bb.readInt16(vt1 + j) != this.bb.readInt16(vt2 + j)) {\n continue outer_loop;\n }\n }\n existing_vtable = this.vtables[i];\n break;\n }\n }\n\n if (existing_vtable) {\n // Found a match:\n // Remove the current vtable.\n this.space = this.bb.capacity() - vtableloc;\n\n // Point table to existing vtable.\n this.bb.writeInt32(this.space, existing_vtable - vtableloc);\n } else {\n // No match:\n // Add the location of the current vtable to the list of vtables.\n this.vtables.push(this.offset());\n\n // Point table to current vtable.\n this.bb.writeInt32(this.bb.capacity() - vtableloc, this.offset() - vtableloc);\n }\n\n this.isNested = false;\n return vtableloc;\n};\n/// @endcond\n\n/**\n * Finalize a buffer, poiting to the given `root_table`.\n *\n * @param {flatbuffers.Offset} root_table\n * @param {string=} opt_file_identifier\n * @param {boolean=} opt_size_prefix\n */\nflatbuffers.Builder.prototype.finish = function(root_table, opt_file_identifier, opt_size_prefix) {\n var size_prefix = opt_size_prefix ? flatbuffers.SIZE_PREFIX_LENGTH : 0;\n if (opt_file_identifier) {\n var file_identifier = opt_file_identifier;\n this.prep(this.minalign, flatbuffers.SIZEOF_INT +\n flatbuffers.FILE_IDENTIFIER_LENGTH + size_prefix);\n if (file_identifier.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error('FlatBuffers: file identifier must be length ' +\n flatbuffers.FILE_IDENTIFIER_LENGTH);\n }\n for (var i = flatbuffers.FILE_IDENTIFIER_LENGTH - 1; i >= 0; i--) {\n this.writeInt8(file_identifier.charCodeAt(i));\n }\n }\n this.prep(this.minalign, flatbuffers.SIZEOF_INT + size_prefix);\n this.addOffset(root_table);\n if (size_prefix) {\n this.addInt32(this.bb.capacity() - this.space);\n }\n this.bb.setPosition(this.space);\n};\n\n/**\n * Finalize a size prefixed buffer, pointing to the given `root_table`.\n *\n * @param {flatbuffers.Offset} root_table\n * @param {string=} opt_file_identifier\n */\nflatbuffers.Builder.prototype.finishSizePrefixed = function (root_table, opt_file_identifier) {\n this.finish(root_table, opt_file_identifier, true);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * This checks a required field has been set in a given table that has\n * just been constructed.\n *\n * @param {flatbuffers.Offset} table\n * @param {number} field\n */\nflatbuffers.Builder.prototype.requiredField = function(table, field) {\n var table_start = this.bb.capacity() - table;\n var vtable_start = table_start - this.bb.readInt32(table_start);\n var ok = this.bb.readInt16(vtable_start + field) != 0;\n\n // If this fails, the caller will show what field needs to be set.\n if (!ok) {\n throw new Error('FlatBuffers: field ' + field + ' must be set');\n }\n};\n\n/**\n * Start a new array/vector of objects. Users usually will not call\n * this directly. The FlatBuffers compiler will create a start/end\n * method for vector types in generated code.\n *\n * @param {number} elem_size The size of each element in the array\n * @param {number} num_elems The number of elements in the array\n * @param {number} alignment The alignment of the array\n */\nflatbuffers.Builder.prototype.startVector = function(elem_size, num_elems, alignment) {\n this.notNested();\n this.vector_num_elems = num_elems;\n this.prep(flatbuffers.SIZEOF_INT, elem_size * num_elems);\n this.prep(alignment, elem_size * num_elems); // Just in case alignment > int.\n};\n\n/**\n * Finish off the creation of an array and all its elements. The array must be\n * created with `startVector`.\n *\n * @returns {flatbuffers.Offset} The offset at which the newly created array\n * starts.\n */\nflatbuffers.Builder.prototype.endVector = function() {\n this.writeInt32(this.vector_num_elems);\n return this.offset();\n};\n/// @endcond\n\n/**\n * Encode the string `s` in the buffer using UTF-8. If a Uint8Array is passed\n * instead of a string, it is assumed to contain valid UTF-8 encoded data.\n *\n * @param {string|Uint8Array} s The string to encode\n * @return {flatbuffers.Offset} The offset in the buffer where the encoded string starts\n */\nflatbuffers.Builder.prototype.createString = function(s) {\n if (s instanceof Uint8Array) {\n var utf8 = s;\n } else {\n var utf8 = [];\n var i = 0;\n\n while (i < s.length) {\n var codePoint;\n\n // Decode UTF-16\n var a = s.charCodeAt(i++);\n if (a < 0xD800 || a >= 0xDC00) {\n codePoint = a;\n } else {\n var b = s.charCodeAt(i++);\n codePoint = (a << 10) + b + (0x10000 - (0xD800 << 10) - 0xDC00);\n }\n\n // Encode UTF-8\n if (codePoint < 0x80) {\n utf8.push(codePoint);\n } else {\n if (codePoint < 0x800) {\n utf8.push(((codePoint >> 6) & 0x1F) | 0xC0);\n } else {\n if (codePoint < 0x10000) {\n utf8.push(((codePoint >> 12) & 0x0F) | 0xE0);\n } else {\n utf8.push(\n ((codePoint >> 18) & 0x07) | 0xF0,\n ((codePoint >> 12) & 0x3F) | 0x80);\n }\n utf8.push(((codePoint >> 6) & 0x3F) | 0x80);\n }\n utf8.push((codePoint & 0x3F) | 0x80);\n }\n }\n }\n\n this.addInt8(0);\n this.startVector(1, utf8.length, 1);\n this.bb.setPosition(this.space -= utf8.length);\n for (var i = 0, offset = this.space, bytes = this.bb.bytes(); i < utf8.length; i++) {\n bytes[offset++] = utf8[i];\n }\n return this.endVector();\n};\n\n/**\n * A helper function to avoid generated code depending on this file directly.\n *\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.Builder.prototype.createLong = function(low, high) {\n return flatbuffers.Long.create(low, high);\n};\n////////////////////////////////////////////////////////////////////////////////\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Create a new ByteBuffer with a given array of bytes (`Uint8Array`).\n *\n * @constructor\n * @param {Uint8Array} bytes\n */\nflatbuffers.ByteBuffer = function(bytes) {\n /**\n * @type {Uint8Array}\n * @private\n */\n this.bytes_ = bytes;\n\n /**\n * @type {number}\n * @private\n */\n this.position_ = 0;\n};\n\n/**\n * Create and allocate a new ByteBuffer with a given size.\n *\n * @param {number} byte_size\n * @returns {!flatbuffers.ByteBuffer}\n */\nflatbuffers.ByteBuffer.allocate = function(byte_size) {\n return new flatbuffers.ByteBuffer(new Uint8Array(byte_size));\n};\n\nflatbuffers.ByteBuffer.prototype.clear = function() {\n this.position_ = 0;\n};\n\n/**\n * Get the underlying `Uint8Array`.\n *\n * @returns {Uint8Array}\n */\nflatbuffers.ByteBuffer.prototype.bytes = function() {\n return this.bytes_;\n};\n\n/**\n * Get the buffer's position.\n *\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.position = function() {\n return this.position_;\n};\n\n/**\n * Set the buffer's position.\n *\n * @param {number} position\n */\nflatbuffers.ByteBuffer.prototype.setPosition = function(position) {\n this.position_ = position;\n};\n\n/**\n * Get the buffer's capacity.\n *\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.capacity = function() {\n return this.bytes_.length;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt8 = function(offset) {\n return this.readUint8(offset) << 24 >> 24;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint8 = function(offset) {\n return this.bytes_[offset];\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt16 = function(offset) {\n return this.readUint16(offset) << 16 >> 16;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint16 = function(offset) {\n return this.bytes_[offset] | this.bytes_[offset + 1] << 8;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt32 = function(offset) {\n return this.bytes_[offset] | this.bytes_[offset + 1] << 8 | this.bytes_[offset + 2] << 16 | this.bytes_[offset + 3] << 24;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint32 = function(offset) {\n return this.readInt32(offset) >>> 0;\n};\n\n/**\n * @param {number} offset\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.readInt64 = function(offset) {\n return new flatbuffers.Long(this.readInt32(offset), this.readInt32(offset + 4));\n};\n\n/**\n * @param {number} offset\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.readUint64 = function(offset) {\n return new flatbuffers.Long(this.readUint32(offset), this.readUint32(offset + 4));\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readFloat32 = function(offset) {\n flatbuffers.int32[0] = this.readInt32(offset);\n return flatbuffers.float32[0];\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readFloat64 = function(offset) {\n flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1] = this.readInt32(offset);\n flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0] = this.readInt32(offset + 4);\n return flatbuffers.float64[0];\n};\n\n/**\n * @param {number} offset\n * @param {number|boolean} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt8 = function(offset, value) {\n this.bytes_[offset] = /** @type {number} */(value);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint8 = function(offset, value) {\n this.bytes_[offset] = value;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt16 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint16 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt32 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n this.bytes_[offset + 2] = value >> 16;\n this.bytes_[offset + 3] = value >> 24;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint32 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n this.bytes_[offset + 2] = value >> 16;\n this.bytes_[offset + 3] = value >> 24;\n};\n\n/**\n * @param {number} offset\n * @param {flatbuffers.Long} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt64 = function(offset, value) {\n this.writeInt32(offset, value.low);\n this.writeInt32(offset + 4, value.high);\n};\n\n/**\n * @param {number} offset\n * @param {flatbuffers.Long} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint64 = function(offset, value) {\n this.writeUint32(offset, value.low);\n this.writeUint32(offset + 4, value.high);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeFloat32 = function(offset, value) {\n flatbuffers.float32[0] = value;\n this.writeInt32(offset, flatbuffers.int32[0]);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeFloat64 = function(offset, value) {\n flatbuffers.float64[0] = value;\n this.writeInt32(offset, flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1]);\n this.writeInt32(offset + 4, flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0]);\n};\n\n/**\n * Return the file identifier. Behavior is undefined for FlatBuffers whose\n * schema does not include a file_identifier (likely points at padding or the\n * start of a the root vtable).\n * @returns {string}\n */\nflatbuffers.ByteBuffer.prototype.getBufferIdentifier = function() {\n if (this.bytes_.length < this.position_ + flatbuffers.SIZEOF_INT +\n flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error(\n 'FlatBuffers: ByteBuffer is too short to contain an identifier.');\n }\n var result = \"\";\n for (var i = 0; i < flatbuffers.FILE_IDENTIFIER_LENGTH; i++) {\n result += String.fromCharCode(\n this.readInt8(this.position_ + flatbuffers.SIZEOF_INT + i));\n }\n return result;\n};\n\n/**\n * Look up a field in the vtable, return an offset into the object, or 0 if the\n * field is not present.\n *\n * @param {number} bb_pos\n * @param {number} vtable_offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__offset = function(bb_pos, vtable_offset) {\n var vtable = bb_pos - this.readInt32(bb_pos);\n return vtable_offset < this.readInt16(vtable) ? this.readInt16(vtable + vtable_offset) : 0;\n};\n\n/**\n * Initialize any Table-derived type to point to the union at the given offset.\n *\n * @param {flatbuffers.Table} t\n * @param {number} offset\n * @returns {flatbuffers.Table}\n */\nflatbuffers.ByteBuffer.prototype.__union = function(t, offset) {\n t.bb_pos = offset + this.readInt32(offset);\n t.bb = this;\n return t;\n};\n\n/**\n * Create a JavaScript string from UTF-8 data stored inside the FlatBuffer.\n * This allocates a new string and converts to wide chars upon each access.\n *\n * To avoid the conversion to UTF-16, pass flatbuffers.Encoding.UTF8_BYTES as\n * the \"optionalEncoding\" argument. This is useful for avoiding conversion to\n * and from UTF-16 when the data will just be packaged back up in another\n * FlatBuffer later on.\n *\n * @param {number} offset\n * @param {flatbuffers.Encoding=} opt_encoding Defaults to UTF16_STRING\n * @returns {string|!Uint8Array}\n */\nflatbuffers.ByteBuffer.prototype.__string = function(offset, opt_encoding) {\n offset += this.readInt32(offset);\n\n var length = this.readInt32(offset);\n var result = '';\n var i = 0;\n\n offset += flatbuffers.SIZEOF_INT;\n\n if (opt_encoding === flatbuffers.Encoding.UTF8_BYTES) {\n return this.bytes_.subarray(offset, offset + length);\n }\n\n while (i < length) {\n var codePoint;\n\n // Decode UTF-8\n var a = this.readUint8(offset + i++);\n if (a < 0xC0) {\n codePoint = a;\n } else {\n var b = this.readUint8(offset + i++);\n if (a < 0xE0) {\n codePoint =\n ((a & 0x1F) << 6) |\n (b & 0x3F);\n } else {\n var c = this.readUint8(offset + i++);\n if (a < 0xF0) {\n codePoint =\n ((a & 0x0F) << 12) |\n ((b & 0x3F) << 6) |\n (c & 0x3F);\n } else {\n var d = this.readUint8(offset + i++);\n codePoint =\n ((a & 0x07) << 18) |\n ((b & 0x3F) << 12) |\n ((c & 0x3F) << 6) |\n (d & 0x3F);\n }\n }\n }\n\n // Encode UTF-16\n if (codePoint < 0x10000) {\n result += String.fromCharCode(codePoint);\n } else {\n codePoint -= 0x10000;\n result += String.fromCharCode(\n (codePoint >> 10) + 0xD800,\n (codePoint & ((1 << 10) - 1)) + 0xDC00);\n }\n }\n\n return result;\n};\n\n/**\n * Retrieve the relative offset stored at \"offset\"\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__indirect = function(offset) {\n return offset + this.readInt32(offset);\n};\n\n/**\n * Get the start of data of a vector whose offset is stored at \"offset\" in this object.\n *\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__vector = function(offset) {\n return offset + this.readInt32(offset) + flatbuffers.SIZEOF_INT; // data starts after the length\n};\n\n/**\n * Get the length of a vector whose offset is stored at \"offset\" in this object.\n *\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__vector_len = function(offset) {\n return this.readInt32(offset + this.readInt32(offset));\n};\n\n/**\n * @param {string} ident\n * @returns {boolean}\n */\nflatbuffers.ByteBuffer.prototype.__has_identifier = function(ident) {\n if (ident.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error('FlatBuffers: file identifier must be length ' +\n flatbuffers.FILE_IDENTIFIER_LENGTH);\n }\n for (var i = 0; i < flatbuffers.FILE_IDENTIFIER_LENGTH; i++) {\n if (ident.charCodeAt(i) != this.readInt8(this.position_ + flatbuffers.SIZEOF_INT + i)) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * A helper function to avoid generated code depending on this file directly.\n *\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.createLong = function(low, high) {\n return flatbuffers.Long.create(low, high);\n};\n\n// Exports for Node.js and RequireJS\nexport { flatbuffers };\n\n/// @endcond\n/// @}\n", "// automatically generated by the FlatBuffers compiler, do not modify\n/* eslint-disable */\n\nimport {flatbuffers} from 'flatbuffers';\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum AttributeType {\n UNDEFINED = 0,\n FLOAT = 1,\n INT = 2,\n STRING = 3,\n TENSOR = 4,\n GRAPH = 5,\n FLOATS = 6,\n INTS = 7,\n STRINGS = 8,\n TENSORS = 9,\n GRAPHS = 10,\n SPARSE_TENSOR = 11,\n SPARSE_TENSORS = 12\n }\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum DimensionValueType {UNKNOWN = 0, VALUE = 1, PARAM = 2}\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum TensorDataType {\n UNDEFINED = 0,\n FLOAT = 1,\n UINT8 = 2,\n INT8 = 3,\n UINT16 = 4,\n INT16 = 5,\n INT32 = 6,\n INT64 = 7,\n STRING = 8,\n BOOL = 9,\n FLOAT16 = 10,\n DOUBLE = 11,\n UINT32 = 12,\n UINT64 = 13,\n COMPLEX64 = 14,\n COMPLEX128 = 15,\n BFLOAT16 = 16,\n FLOAT8E4M3FN = 17,\n FLOAT8E4M3FNUZ = 18,\n FLOAT8E5M2 = 19,\n FLOAT8E5M2FNUZ = 20,\n }\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum NodeType {Primitive = 0, Fused = 1}\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum TypeInfoValue {NONE = 0, tensor_type = 1, sequence_type = 2, map_type = 3}\n}\n\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Shape {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Shape\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Shape {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Shape= obj\n * @returns Shape\n */\n static getRootAsShape(bb: flatbuffers.ByteBuffer, obj?: Shape): Shape {\n return (obj || new Shape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Shape= obj\n * @returns Shape\n */\n static getSizePrefixedRootAsShape(bb: flatbuffers.ByteBuffer, obj?: Shape): Shape {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Shape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Dimension= obj\n * @returns onnxruntime.experimental.fbs.Dimension\n */\n dim(index: number, obj?: onnxruntime.experimental.fbs.Dimension): onnxruntime.experimental.fbs.Dimension|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Dimension())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n dimLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startShape(builder: flatbuffers.Builder) {\n builder.startObject(1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimOffset\n */\n static addDim(builder: flatbuffers.Builder, dimOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, dimOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endShape(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createShape(builder: flatbuffers.Builder, dimOffset: flatbuffers.Offset): flatbuffers.Offset {\n Shape.startShape(builder);\n Shape.addDim(builder, dimOffset);\n return Shape.endShape(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Dimension {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Dimension\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Dimension {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Dimension= obj\n * @returns Dimension\n */\n static getRootAsDimension(bb: flatbuffers.ByteBuffer, obj?: Dimension): Dimension {\n return (obj || new Dimension()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Dimension= obj\n * @returns Dimension\n */\n static getSizePrefixedRootAsDimension(bb: flatbuffers.ByteBuffer, obj?: Dimension): Dimension {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Dimension()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.DimensionValue= obj\n * @returns onnxruntime.experimental.fbs.DimensionValue|null\n */\n value(obj?: onnxruntime.experimental.fbs.DimensionValue): onnxruntime.experimental.fbs.DimensionValue|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.DimensionValue())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n denotation(): string|null;\n denotation(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n denotation(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startDimension(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueOffset\n */\n static addValue(builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, valueOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset denotationOffset\n */\n static addDenotation(builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, denotationOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endDimension(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createDimension(\n builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset,\n denotationOffset: flatbuffers.Offset): flatbuffers.Offset {\n Dimension.startDimension(builder);\n Dimension.addValue(builder, valueOffset);\n Dimension.addDenotation(builder, denotationOffset);\n return Dimension.endDimension(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class DimensionValue {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns DimensionValue\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): DimensionValue {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param DimensionValue= obj\n * @returns DimensionValue\n */\n static getRootAsDimensionValue(bb: flatbuffers.ByteBuffer, obj?: DimensionValue): DimensionValue {\n return (obj || new DimensionValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param DimensionValue= obj\n * @returns DimensionValue\n */\n static getSizePrefixedRootAsDimensionValue(bb: flatbuffers.ByteBuffer, obj?: DimensionValue): DimensionValue {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new DimensionValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.DimensionValueType\n */\n dimType(): onnxruntime.experimental.fbs.DimensionValueType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt8(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.DimensionValueType.UNKNOWN;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n dimValue(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n dimParam(): string|null;\n dimParam(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n dimParam(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startDimensionValue(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.DimensionValueType dimType\n */\n static addDimType(builder: flatbuffers.Builder, dimType: onnxruntime.experimental.fbs.DimensionValueType) {\n builder.addFieldInt8(0, dimType, onnxruntime.experimental.fbs.DimensionValueType.UNKNOWN);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long dimValue\n */\n static addDimValue(builder: flatbuffers.Builder, dimValue: flatbuffers.Long) {\n builder.addFieldInt64(1, dimValue, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimParamOffset\n */\n static addDimParam(builder: flatbuffers.Builder, dimParamOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimParamOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endDimensionValue(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createDimensionValue(\n builder: flatbuffers.Builder, dimType: onnxruntime.experimental.fbs.DimensionValueType,\n dimValue: flatbuffers.Long, dimParamOffset: flatbuffers.Offset): flatbuffers.Offset {\n DimensionValue.startDimensionValue(builder);\n DimensionValue.addDimType(builder, dimType);\n DimensionValue.addDimValue(builder, dimValue);\n DimensionValue.addDimParam(builder, dimParamOffset);\n return DimensionValue.endDimensionValue(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class TensorTypeAndShape {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns TensorTypeAndShape\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): TensorTypeAndShape {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TensorTypeAndShape= obj\n * @returns TensorTypeAndShape\n */\n static getRootAsTensorTypeAndShape(bb: flatbuffers.ByteBuffer, obj?: TensorTypeAndShape): TensorTypeAndShape {\n return (obj || new TensorTypeAndShape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TensorTypeAndShape= obj\n * @returns TensorTypeAndShape\n */\n static getSizePrefixedRootAsTensorTypeAndShape(bb: flatbuffers.ByteBuffer, obj?: TensorTypeAndShape):\n TensorTypeAndShape {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new TensorTypeAndShape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n elemType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Shape= obj\n * @returns onnxruntime.experimental.fbs.Shape|null\n */\n shape(obj?: onnxruntime.experimental.fbs.Shape): onnxruntime.experimental.fbs.Shape|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Shape())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTensorTypeAndShape(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType elemType\n */\n static addElemType(builder: flatbuffers.Builder, elemType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(0, elemType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset shapeOffset\n */\n static addShape(builder: flatbuffers.Builder, shapeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, shapeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTensorTypeAndShape(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTensorTypeAndShape(\n builder: flatbuffers.Builder, elemType: onnxruntime.experimental.fbs.TensorDataType,\n shapeOffset: flatbuffers.Offset): flatbuffers.Offset {\n TensorTypeAndShape.startTensorTypeAndShape(builder);\n TensorTypeAndShape.addElemType(builder, elemType);\n TensorTypeAndShape.addShape(builder, shapeOffset);\n return TensorTypeAndShape.endTensorTypeAndShape(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class MapType {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns MapType\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): MapType {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param MapType= obj\n * @returns MapType\n */\n static getRootAsMapType(bb: flatbuffers.ByteBuffer, obj?: MapType): MapType {\n return (obj || new MapType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param MapType= obj\n * @returns MapType\n */\n static getSizePrefixedRootAsMapType(bb: flatbuffers.ByteBuffer, obj?: MapType): MapType {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new MapType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n keyType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n valueType(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startMapType(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType keyType\n */\n static addKeyType(builder: flatbuffers.Builder, keyType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(0, keyType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueTypeOffset\n */\n static addValueType(builder: flatbuffers.Builder, valueTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, valueTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endMapType(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createMapType(\n builder: flatbuffers.Builder, keyType: onnxruntime.experimental.fbs.TensorDataType,\n valueTypeOffset: flatbuffers.Offset): flatbuffers.Offset {\n MapType.startMapType(builder);\n MapType.addKeyType(builder, keyType);\n MapType.addValueType(builder, valueTypeOffset);\n return MapType.endMapType(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SequenceType {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SequenceType\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SequenceType {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SequenceType= obj\n * @returns SequenceType\n */\n static getRootAsSequenceType(bb: flatbuffers.ByteBuffer, obj?: SequenceType): SequenceType {\n return (obj || new SequenceType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SequenceType= obj\n * @returns SequenceType\n */\n static getSizePrefixedRootAsSequenceType(bb: flatbuffers.ByteBuffer, obj?: SequenceType): SequenceType {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SequenceType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n elemType(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSequenceType(builder: flatbuffers.Builder) {\n builder.startObject(1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset elemTypeOffset\n */\n static addElemType(builder: flatbuffers.Builder, elemTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, elemTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSequenceType(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSequenceType(builder: flatbuffers.Builder, elemTypeOffset: flatbuffers.Offset): flatbuffers.Offset {\n SequenceType.startSequenceType(builder);\n SequenceType.addElemType(builder, elemTypeOffset);\n return SequenceType.endSequenceType(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class EdgeEnd {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns EdgeEnd\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): EdgeEnd {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @returns number\n */\n nodeIndex(): number {\n return this.bb!.readUint32(this.bb_pos);\n }\n\n /**\n * @returns number\n */\n srcArgIndex(): number {\n return this.bb!.readInt32(this.bb_pos + 4);\n }\n\n /**\n * @returns number\n */\n dstArgIndex(): number {\n return this.bb!.readInt32(this.bb_pos + 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number node_index\n * @param number src_arg_index\n * @param number dst_arg_index\n * @returns flatbuffers.Offset\n */\n static createEdgeEnd(\n builder: flatbuffers.Builder, node_index: number, src_arg_index: number,\n dst_arg_index: number): flatbuffers.Offset {\n builder.prep(4, 12);\n builder.writeInt32(dst_arg_index);\n builder.writeInt32(src_arg_index);\n builder.writeInt32(node_index);\n return builder.offset();\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class NodeEdge {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns NodeEdge\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): NodeEdge {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param NodeEdge= obj\n * @returns NodeEdge\n */\n static getRootAsNodeEdge(bb: flatbuffers.ByteBuffer, obj?: NodeEdge): NodeEdge {\n return (obj || new NodeEdge()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param NodeEdge= obj\n * @returns NodeEdge\n */\n static getSizePrefixedRootAsNodeEdge(bb: flatbuffers.ByteBuffer, obj?: NodeEdge): NodeEdge {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new NodeEdge()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns number\n */\n nodeIndex(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.EdgeEnd= obj\n * @returns onnxruntime.experimental.fbs.EdgeEnd\n */\n inputEdges(index: number, obj?: onnxruntime.experimental.fbs.EdgeEnd): onnxruntime.experimental.fbs.EdgeEnd|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.EdgeEnd())\n .__init(this.bb!.__vector(this.bb_pos + offset) + index * 12, this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n inputEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.EdgeEnd= obj\n * @returns onnxruntime.experimental.fbs.EdgeEnd\n */\n outputEdges(index: number, obj?: onnxruntime.experimental.fbs.EdgeEnd): onnxruntime.experimental.fbs.EdgeEnd|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.EdgeEnd())\n .__init(this.bb!.__vector(this.bb_pos + offset) + index * 12, this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n outputEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startNodeEdge(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number nodeIndex\n */\n static addNodeIndex(builder: flatbuffers.Builder, nodeIndex: number) {\n builder.addFieldInt32(0, nodeIndex, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputEdgesOffset\n */\n static addInputEdges(builder: flatbuffers.Builder, inputEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, inputEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(12, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputEdgesOffset\n */\n static addOutputEdges(builder: flatbuffers.Builder, outputEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, outputEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(12, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endNodeEdge(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createNodeEdge(\n builder: flatbuffers.Builder, nodeIndex: number, inputEdgesOffset: flatbuffers.Offset,\n outputEdgesOffset: flatbuffers.Offset): flatbuffers.Offset {\n NodeEdge.startNodeEdge(builder);\n NodeEdge.addNodeIndex(builder, nodeIndex);\n NodeEdge.addInputEdges(builder, inputEdgesOffset);\n NodeEdge.addOutputEdges(builder, outputEdgesOffset);\n return NodeEdge.endNodeEdge(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Node {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Node\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Node {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Node= obj\n * @returns Node\n */\n static getRootAsNode(bb: flatbuffers.ByteBuffer, obj?: Node): Node {\n return (obj || new Node()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Node= obj\n * @returns Node\n */\n static getSizePrefixedRootAsNode(bb: flatbuffers.ByteBuffer, obj?: Node): Node {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Node()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n sinceVersion(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns number\n */\n index(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n opType(): string|null;\n opType(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n opType(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.NodeType\n */\n type(): onnxruntime.experimental.fbs.NodeType {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.NodeType.Primitive;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n executionProviderType(): string|null;\n executionProviderType(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n executionProviderType(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n inputs(index: number): string;\n inputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n inputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n inputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n outputs(index: number): string;\n outputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n outputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n outputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Attribute= obj\n * @returns onnxruntime.experimental.fbs.Attribute\n */\n attributes(index: number, obj?: onnxruntime.experimental.fbs.Attribute): onnxruntime.experimental.fbs.Attribute\n |null {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? (obj || new onnxruntime.experimental.fbs.Attribute())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n attributesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @returns number\n */\n inputArgCounts(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.readInt32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n inputArgCountsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Int32Array\n */\n inputArgCountsArray(): Int32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ?\n new Int32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n implicitInputs(index: number): string;\n implicitInputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n implicitInputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n implicitInputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startNode(builder: flatbuffers.Builder) {\n builder.startObject(13);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number sinceVersion\n */\n static addSinceVersion(builder: flatbuffers.Builder, sinceVersion: number) {\n builder.addFieldInt32(3, sinceVersion, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number index\n */\n static addIndex(builder: flatbuffers.Builder, index: number) {\n builder.addFieldInt32(4, index, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset opTypeOffset\n */\n static addOpType(builder: flatbuffers.Builder, opTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, opTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.NodeType type\n */\n static addType(builder: flatbuffers.Builder, type: onnxruntime.experimental.fbs.NodeType) {\n builder.addFieldInt32(6, type, onnxruntime.experimental.fbs.NodeType.Primitive);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset executionProviderTypeOffset\n */\n static addExecutionProviderType(builder: flatbuffers.Builder, executionProviderTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, executionProviderTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputsOffset\n */\n static addInputs(builder: flatbuffers.Builder, inputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, inputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputsOffset\n */\n static addOutputs(builder: flatbuffers.Builder, outputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(9, outputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOutputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset attributesOffset\n */\n static addAttributes(builder: flatbuffers.Builder, attributesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(10, attributesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createAttributesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startAttributesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputArgCountsOffset\n */\n static addInputArgCounts(builder: flatbuffers.Builder, inputArgCountsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(11, inputArgCountsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputArgCountsVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputArgCountsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset implicitInputsOffset\n */\n static addImplicitInputs(builder: flatbuffers.Builder, implicitInputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(12, implicitInputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createImplicitInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startImplicitInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endNode(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createNode(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n domainOffset: flatbuffers.Offset, sinceVersion: number, index: number, opTypeOffset: flatbuffers.Offset,\n type: onnxruntime.experimental.fbs.NodeType, executionProviderTypeOffset: flatbuffers.Offset,\n inputsOffset: flatbuffers.Offset, outputsOffset: flatbuffers.Offset, attributesOffset: flatbuffers.Offset,\n inputArgCountsOffset: flatbuffers.Offset, implicitInputsOffset: flatbuffers.Offset): flatbuffers.Offset {\n Node.startNode(builder);\n Node.addName(builder, nameOffset);\n Node.addDocString(builder, docStringOffset);\n Node.addDomain(builder, domainOffset);\n Node.addSinceVersion(builder, sinceVersion);\n Node.addIndex(builder, index);\n Node.addOpType(builder, opTypeOffset);\n Node.addType(builder, type);\n Node.addExecutionProviderType(builder, executionProviderTypeOffset);\n Node.addInputs(builder, inputsOffset);\n Node.addOutputs(builder, outputsOffset);\n Node.addAttributes(builder, attributesOffset);\n Node.addInputArgCounts(builder, inputArgCountsOffset);\n Node.addImplicitInputs(builder, implicitInputsOffset);\n return Node.endNode(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class ValueInfo {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns ValueInfo\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): ValueInfo {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param ValueInfo= obj\n * @returns ValueInfo\n */\n static getRootAsValueInfo(bb: flatbuffers.ByteBuffer, obj?: ValueInfo): ValueInfo {\n return (obj || new ValueInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param ValueInfo= obj\n * @returns ValueInfo\n */\n static getSizePrefixedRootAsValueInfo(bb: flatbuffers.ByteBuffer, obj?: ValueInfo): ValueInfo {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new ValueInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n type(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startValueInfo(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset typeOffset\n */\n static addType(builder: flatbuffers.Builder, typeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, typeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endValueInfo(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createValueInfo(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n typeOffset: flatbuffers.Offset): flatbuffers.Offset {\n ValueInfo.startValueInfo(builder);\n ValueInfo.addName(builder, nameOffset);\n ValueInfo.addDocString(builder, docStringOffset);\n ValueInfo.addType(builder, typeOffset);\n return ValueInfo.endValueInfo(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class TypeInfo {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns TypeInfo\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): TypeInfo {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TypeInfo= obj\n * @returns TypeInfo\n */\n static getRootAsTypeInfo(bb: flatbuffers.ByteBuffer, obj?: TypeInfo): TypeInfo {\n return (obj || new TypeInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TypeInfo= obj\n * @returns TypeInfo\n */\n static getSizePrefixedRootAsTypeInfo(bb: flatbuffers.ByteBuffer, obj?: TypeInfo): TypeInfo {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new TypeInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n denotation(): string|null;\n denotation(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n denotation(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TypeInfoValue\n */\n valueType(): onnxruntime.experimental.fbs.TypeInfoValue {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? /** */ (this.bb!.readUint8(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TypeInfoValue.NONE;\n }\n\n /**\n * @param flatbuffers.Table obj\n * @returns ?flatbuffers.Table\n */\n value(obj: T): T|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__union(obj, this.bb_pos + offset) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTypeInfo(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset denotationOffset\n */\n static addDenotation(builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, denotationOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TypeInfoValue valueType\n */\n static addValueType(builder: flatbuffers.Builder, valueType: onnxruntime.experimental.fbs.TypeInfoValue) {\n builder.addFieldInt8(1, valueType, onnxruntime.experimental.fbs.TypeInfoValue.NONE);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueOffset\n */\n static addValue(builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, valueOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTypeInfo(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTypeInfo(\n builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset,\n valueType: onnxruntime.experimental.fbs.TypeInfoValue, valueOffset: flatbuffers.Offset): flatbuffers.Offset {\n TypeInfo.startTypeInfo(builder);\n TypeInfo.addDenotation(builder, denotationOffset);\n TypeInfo.addValueType(builder, valueType);\n TypeInfo.addValue(builder, valueOffset);\n return TypeInfo.endTypeInfo(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class OperatorSetId {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns OperatorSetId\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): OperatorSetId {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param OperatorSetId= obj\n * @returns OperatorSetId\n */\n static getRootAsOperatorSetId(bb: flatbuffers.ByteBuffer, obj?: OperatorSetId): OperatorSetId {\n return (obj || new OperatorSetId()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param OperatorSetId= obj\n * @returns OperatorSetId\n */\n static getSizePrefixedRootAsOperatorSetId(bb: flatbuffers.ByteBuffer, obj?: OperatorSetId): OperatorSetId {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new OperatorSetId()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n version(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startOperatorSetId(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long version\n */\n static addVersion(builder: flatbuffers.Builder, version: flatbuffers.Long) {\n builder.addFieldInt64(1, version, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endOperatorSetId(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createOperatorSetId(\n builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset, version: flatbuffers.Long): flatbuffers.Offset {\n OperatorSetId.startOperatorSetId(builder);\n OperatorSetId.addDomain(builder, domainOffset);\n OperatorSetId.addVersion(builder, version);\n return OperatorSetId.endOperatorSetId(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Tensor {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Tensor\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Tensor {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Tensor= obj\n * @returns Tensor\n */\n static getRootAsTensor(bb: flatbuffers.ByteBuffer, obj?: Tensor): Tensor {\n return (obj || new Tensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Tensor= obj\n * @returns Tensor\n */\n static getSizePrefixedRootAsTensor(bb: flatbuffers.ByteBuffer, obj?: Tensor): Tensor {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Tensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n dims(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n dimsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n dataType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param number index\n * @returns number\n */\n rawData(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;\n }\n\n /**\n * @returns number\n */\n rawDataLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Uint8Array\n */\n rawDataArray(): Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ?\n new Uint8Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n stringData(index: number): string;\n stringData(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n stringData(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n stringDataLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTensor(builder: flatbuffers.Builder) {\n builder.startObject(6);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimsOffset\n */\n static addDims(builder: flatbuffers.Builder, dimsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType dataType\n */\n static addDataType(builder: flatbuffers.Builder, dataType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(3, dataType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset rawDataOffset\n */\n static addRawData(builder: flatbuffers.Builder, rawDataOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, rawDataOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createRawDataVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(1, data.length, 1);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt8(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startRawDataVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(1, numElems, 1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset stringDataOffset\n */\n static addStringData(builder: flatbuffers.Builder, stringDataOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, stringDataOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createStringDataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startStringDataVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTensor(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTensor(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n dimsOffset: flatbuffers.Offset, dataType: onnxruntime.experimental.fbs.TensorDataType,\n rawDataOffset: flatbuffers.Offset, stringDataOffset: flatbuffers.Offset): flatbuffers.Offset {\n Tensor.startTensor(builder);\n Tensor.addName(builder, nameOffset);\n Tensor.addDocString(builder, docStringOffset);\n Tensor.addDims(builder, dimsOffset);\n Tensor.addDataType(builder, dataType);\n Tensor.addRawData(builder, rawDataOffset);\n Tensor.addStringData(builder, stringDataOffset);\n return Tensor.endTensor(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SparseTensor {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SparseTensor\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SparseTensor {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SparseTensor= obj\n * @returns SparseTensor\n */\n static getRootAsSparseTensor(bb: flatbuffers.ByteBuffer, obj?: SparseTensor): SparseTensor {\n return (obj || new SparseTensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SparseTensor= obj\n * @returns SparseTensor\n */\n static getSizePrefixedRootAsSparseTensor(bb: flatbuffers.ByteBuffer, obj?: SparseTensor): SparseTensor {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SparseTensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n values(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n indices(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n dims(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n dimsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSparseTensor(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valuesOffset\n */\n static addValues(builder: flatbuffers.Builder, valuesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, valuesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset indicesOffset\n */\n static addIndices(builder: flatbuffers.Builder, indicesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, indicesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimsOffset\n */\n static addDims(builder: flatbuffers.Builder, dimsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSparseTensor(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSparseTensor(\n builder: flatbuffers.Builder, valuesOffset: flatbuffers.Offset, indicesOffset: flatbuffers.Offset,\n dimsOffset: flatbuffers.Offset): flatbuffers.Offset {\n SparseTensor.startSparseTensor(builder);\n SparseTensor.addValues(builder, valuesOffset);\n SparseTensor.addIndices(builder, indicesOffset);\n SparseTensor.addDims(builder, dimsOffset);\n return SparseTensor.endSparseTensor(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Attribute {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Attribute\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Attribute {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Attribute= obj\n * @returns Attribute\n */\n static getRootAsAttribute(bb: flatbuffers.ByteBuffer, obj?: Attribute): Attribute {\n return (obj || new Attribute()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Attribute= obj\n * @returns Attribute\n */\n static getSizePrefixedRootAsAttribute(bb: flatbuffers.ByteBuffer, obj?: Attribute): Attribute {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Attribute()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.AttributeType\n */\n type(): onnxruntime.experimental.fbs.AttributeType {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.AttributeType.UNDEFINED;\n }\n\n /**\n * @returns number\n */\n f(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readFloat32(this.bb_pos + offset) : 0.0;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n i(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n s(): string|null;\n s(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n s(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n t(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph|null\n */\n g(obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @returns number\n */\n floats(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.readFloat32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n floatsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Float32Array\n */\n floatsArray(): Float32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ?\n new Float32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n ints(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n intsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n strings(index: number): string;\n strings(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n strings(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n stringsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor\n */\n tensors(index: number, obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n tensorsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph\n */\n graphs(index: number, obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n graphsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startAttribute(builder: flatbuffers.Builder) {\n builder.startObject(13);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.AttributeType type\n */\n static addType(builder: flatbuffers.Builder, type: onnxruntime.experimental.fbs.AttributeType) {\n builder.addFieldInt32(2, type, onnxruntime.experimental.fbs.AttributeType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number f\n */\n static addF(builder: flatbuffers.Builder, f: number) {\n builder.addFieldFloat32(3, f, 0.0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long i\n */\n static addI(builder: flatbuffers.Builder, i: flatbuffers.Long) {\n builder.addFieldInt64(4, i, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sOffset\n */\n static addS(builder: flatbuffers.Builder, sOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, sOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset tOffset\n */\n static addT(builder: flatbuffers.Builder, tOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, tOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset gOffset\n */\n static addG(builder: flatbuffers.Builder, gOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, gOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset floatsOffset\n */\n static addFloats(builder: flatbuffers.Builder, floatsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, floatsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createFloatsVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addFloat32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startFloatsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset intsOffset\n */\n static addInts(builder: flatbuffers.Builder, intsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(9, intsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createIntsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startIntsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset stringsOffset\n */\n static addStrings(builder: flatbuffers.Builder, stringsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(10, stringsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createStringsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startStringsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset tensorsOffset\n */\n static addTensors(builder: flatbuffers.Builder, tensorsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(11, tensorsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createTensorsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startTensorsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphsOffset\n */\n static addGraphs(builder: flatbuffers.Builder, graphsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(12, graphsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createGraphsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startGraphsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endAttribute(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createAttribute(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n type: onnxruntime.experimental.fbs.AttributeType, f: number, i: flatbuffers.Long, sOffset: flatbuffers.Offset,\n tOffset: flatbuffers.Offset, gOffset: flatbuffers.Offset, floatsOffset: flatbuffers.Offset,\n intsOffset: flatbuffers.Offset, stringsOffset: flatbuffers.Offset, tensorsOffset: flatbuffers.Offset,\n graphsOffset: flatbuffers.Offset): flatbuffers.Offset {\n Attribute.startAttribute(builder);\n Attribute.addName(builder, nameOffset);\n Attribute.addDocString(builder, docStringOffset);\n Attribute.addType(builder, type);\n Attribute.addF(builder, f);\n Attribute.addI(builder, i);\n Attribute.addS(builder, sOffset);\n Attribute.addT(builder, tOffset);\n Attribute.addG(builder, gOffset);\n Attribute.addFloats(builder, floatsOffset);\n Attribute.addInts(builder, intsOffset);\n Attribute.addStrings(builder, stringsOffset);\n Attribute.addTensors(builder, tensorsOffset);\n Attribute.addGraphs(builder, graphsOffset);\n return Attribute.endAttribute(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Graph {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Graph\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Graph {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Graph= obj\n * @returns Graph\n */\n static getRootAsGraph(bb: flatbuffers.ByteBuffer, obj?: Graph): Graph {\n return (obj || new Graph()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Graph= obj\n * @returns Graph\n */\n static getSizePrefixedRootAsGraph(bb: flatbuffers.ByteBuffer, obj?: Graph): Graph {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Graph()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor\n */\n initializers(index: number, obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n initializersLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.ValueInfo= obj\n * @returns onnxruntime.experimental.fbs.ValueInfo\n */\n nodeArgs(index: number, obj?: onnxruntime.experimental.fbs.ValueInfo): onnxruntime.experimental.fbs.ValueInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.ValueInfo())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodeArgsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Node= obj\n * @returns onnxruntime.experimental.fbs.Node\n */\n nodes(index: number, obj?: onnxruntime.experimental.fbs.Node): onnxruntime.experimental.fbs.Node|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.Node())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns number\n */\n maxNodeIndex(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.NodeEdge= obj\n * @returns onnxruntime.experimental.fbs.NodeEdge\n */\n nodeEdges(index: number, obj?: onnxruntime.experimental.fbs.NodeEdge): onnxruntime.experimental.fbs.NodeEdge|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? (obj || new onnxruntime.experimental.fbs.NodeEdge())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodeEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n inputs(index: number): string;\n inputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n inputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n inputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n outputs(index: number): string;\n outputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n outputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n outputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.SparseTensor= obj\n * @returns onnxruntime.experimental.fbs.SparseTensor\n */\n sparseInitializers(index: number, obj?: onnxruntime.experimental.fbs.SparseTensor):\n onnxruntime.experimental.fbs.SparseTensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.SparseTensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n sparseInitializersLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startGraph(builder: flatbuffers.Builder) {\n builder.startObject(8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset initializersOffset\n */\n static addInitializers(builder: flatbuffers.Builder, initializersOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, initializersOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInitializersVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInitializersVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeArgsOffset\n */\n static addNodeArgs(builder: flatbuffers.Builder, nodeArgsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, nodeArgsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeArgsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeArgsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodesOffset\n */\n static addNodes(builder: flatbuffers.Builder, nodesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, nodesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number maxNodeIndex\n */\n static addMaxNodeIndex(builder: flatbuffers.Builder, maxNodeIndex: number) {\n builder.addFieldInt32(3, maxNodeIndex, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeEdgesOffset\n */\n static addNodeEdges(builder: flatbuffers.Builder, nodeEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, nodeEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeEdgesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputsOffset\n */\n static addInputs(builder: flatbuffers.Builder, inputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, inputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputsOffset\n */\n static addOutputs(builder: flatbuffers.Builder, outputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, outputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOutputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sparseInitializersOffset\n */\n static addSparseInitializers(builder: flatbuffers.Builder, sparseInitializersOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, sparseInitializersOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createSparseInitializersVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]):\n flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startSparseInitializersVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endGraph(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createGraph(\n builder: flatbuffers.Builder, initializersOffset: flatbuffers.Offset, nodeArgsOffset: flatbuffers.Offset,\n nodesOffset: flatbuffers.Offset, maxNodeIndex: number, nodeEdgesOffset: flatbuffers.Offset,\n inputsOffset: flatbuffers.Offset, outputsOffset: flatbuffers.Offset,\n sparseInitializersOffset: flatbuffers.Offset): flatbuffers.Offset {\n Graph.startGraph(builder);\n Graph.addInitializers(builder, initializersOffset);\n Graph.addNodeArgs(builder, nodeArgsOffset);\n Graph.addNodes(builder, nodesOffset);\n Graph.addMaxNodeIndex(builder, maxNodeIndex);\n Graph.addNodeEdges(builder, nodeEdgesOffset);\n Graph.addInputs(builder, inputsOffset);\n Graph.addOutputs(builder, outputsOffset);\n Graph.addSparseInitializers(builder, sparseInitializersOffset);\n return Graph.endGraph(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Model {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Model\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Model {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Model= obj\n * @returns Model\n */\n static getRootAsModel(bb: flatbuffers.ByteBuffer, obj?: Model): Model {\n return (obj || new Model()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Model= obj\n * @returns Model\n */\n static getSizePrefixedRootAsModel(bb: flatbuffers.ByteBuffer, obj?: Model): Model {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Model()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns flatbuffers.Long\n */\n irVersion(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.OperatorSetId= obj\n * @returns onnxruntime.experimental.fbs.OperatorSetId\n */\n opsetImport(index: number, obj?: onnxruntime.experimental.fbs.OperatorSetId):\n onnxruntime.experimental.fbs.OperatorSetId|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.OperatorSetId())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n opsetImportLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n producerName(): string|null;\n producerName(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n producerName(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n producerVersion(): string|null;\n producerVersion(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n producerVersion(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n modelVersion(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph|null\n */\n graph(obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n graphDocString(): string|null;\n graphDocString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n graphDocString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startModel(builder: flatbuffers.Builder) {\n builder.startObject(9);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long irVersion\n */\n static addIrVersion(builder: flatbuffers.Builder, irVersion: flatbuffers.Long) {\n builder.addFieldInt64(0, irVersion, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset opsetImportOffset\n */\n static addOpsetImport(builder: flatbuffers.Builder, opsetImportOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, opsetImportOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOpsetImportVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOpsetImportVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset producerNameOffset\n */\n static addProducerName(builder: flatbuffers.Builder, producerNameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, producerNameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset producerVersionOffset\n */\n static addProducerVersion(builder: flatbuffers.Builder, producerVersionOffset: flatbuffers.Offset) {\n builder.addFieldOffset(3, producerVersionOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long modelVersion\n */\n static addModelVersion(builder: flatbuffers.Builder, modelVersion: flatbuffers.Long) {\n builder.addFieldInt64(5, modelVersion, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphOffset\n */\n static addGraph(builder: flatbuffers.Builder, graphOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, graphOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphDocStringOffset\n */\n static addGraphDocString(builder: flatbuffers.Builder, graphDocStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, graphDocStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endModel(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createModel(\n builder: flatbuffers.Builder, irVersion: flatbuffers.Long, opsetImportOffset: flatbuffers.Offset,\n producerNameOffset: flatbuffers.Offset, producerVersionOffset: flatbuffers.Offset,\n domainOffset: flatbuffers.Offset, modelVersion: flatbuffers.Long, docStringOffset: flatbuffers.Offset,\n graphOffset: flatbuffers.Offset, graphDocStringOffset: flatbuffers.Offset): flatbuffers.Offset {\n Model.startModel(builder);\n Model.addIrVersion(builder, irVersion);\n Model.addOpsetImport(builder, opsetImportOffset);\n Model.addProducerName(builder, producerNameOffset);\n Model.addProducerVersion(builder, producerVersionOffset);\n Model.addDomain(builder, domainOffset);\n Model.addModelVersion(builder, modelVersion);\n Model.addDocString(builder, docStringOffset);\n Model.addGraph(builder, graphOffset);\n Model.addGraphDocString(builder, graphDocStringOffset);\n return Model.endModel(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class KernelCreateInfos {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns KernelCreateInfos\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): KernelCreateInfos {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param KernelCreateInfos= obj\n * @returns KernelCreateInfos\n */\n static getRootAsKernelCreateInfos(bb: flatbuffers.ByteBuffer, obj?: KernelCreateInfos): KernelCreateInfos {\n return (obj || new KernelCreateInfos()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param KernelCreateInfos= obj\n * @returns KernelCreateInfos\n */\n static getSizePrefixedRootAsKernelCreateInfos(bb: flatbuffers.ByteBuffer, obj?: KernelCreateInfos):\n KernelCreateInfos {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new KernelCreateInfos()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @returns number\n */\n nodeIndices(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readUint32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n nodeIndicesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Uint32Array\n */\n nodeIndicesArray(): Uint32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ?\n new Uint32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n kernelDefHashes(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readUint64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n kernelDefHashesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startKernelCreateInfos(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeIndicesOffset\n */\n static addNodeIndices(builder: flatbuffers.Builder, nodeIndicesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nodeIndicesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeIndicesVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeIndicesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset kernelDefHashesOffset\n */\n static addKernelDefHashes(builder: flatbuffers.Builder, kernelDefHashesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, kernelDefHashesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createKernelDefHashesVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startKernelDefHashesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endKernelCreateInfos(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createKernelCreateInfos(\n builder: flatbuffers.Builder, nodeIndicesOffset: flatbuffers.Offset,\n kernelDefHashesOffset: flatbuffers.Offset): flatbuffers.Offset {\n KernelCreateInfos.startKernelCreateInfos(builder);\n KernelCreateInfos.addNodeIndices(builder, nodeIndicesOffset);\n KernelCreateInfos.addKernelDefHashes(builder, kernelDefHashesOffset);\n return KernelCreateInfos.endKernelCreateInfos(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SubGraphSessionState {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SubGraphSessionState\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SubGraphSessionState {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SubGraphSessionState= obj\n * @returns SubGraphSessionState\n */\n static getRootAsSubGraphSessionState(bb: flatbuffers.ByteBuffer, obj?: SubGraphSessionState): SubGraphSessionState {\n return (obj || new SubGraphSessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SubGraphSessionState= obj\n * @returns SubGraphSessionState\n */\n static getSizePrefixedRootAsSubGraphSessionState(bb: flatbuffers.ByteBuffer, obj?: SubGraphSessionState):\n SubGraphSessionState {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SubGraphSessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n graphId(): string|null;\n graphId(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n graphId(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.SessionState= obj\n * @returns onnxruntime.experimental.fbs.SessionState|null\n */\n sessionState(obj?: onnxruntime.experimental.fbs.SessionState): onnxruntime.experimental.fbs.SessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.SessionState())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSubGraphSessionState(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphIdOffset\n */\n static addGraphId(builder: flatbuffers.Builder, graphIdOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, graphIdOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sessionStateOffset\n */\n static addSessionState(builder: flatbuffers.Builder, sessionStateOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, sessionStateOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSubGraphSessionState(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n builder.requiredField(offset, 4); // graph_id\n return offset;\n }\n\n static createSubGraphSessionState(\n builder: flatbuffers.Builder, graphIdOffset: flatbuffers.Offset,\n sessionStateOffset: flatbuffers.Offset): flatbuffers.Offset {\n SubGraphSessionState.startSubGraphSessionState(builder);\n SubGraphSessionState.addGraphId(builder, graphIdOffset);\n SubGraphSessionState.addSessionState(builder, sessionStateOffset);\n return SubGraphSessionState.endSubGraphSessionState(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SessionState {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SessionState\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SessionState {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SessionState= obj\n * @returns SessionState\n */\n static getRootAsSessionState(bb: flatbuffers.ByteBuffer, obj?: SessionState): SessionState {\n return (obj || new SessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SessionState= obj\n * @returns SessionState\n */\n static getSizePrefixedRootAsSessionState(bb: flatbuffers.ByteBuffer, obj?: SessionState): SessionState {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.KernelCreateInfos= obj\n * @returns onnxruntime.experimental.fbs.KernelCreateInfos|null\n */\n kernels(obj?: onnxruntime.experimental.fbs.KernelCreateInfos): onnxruntime.experimental.fbs.KernelCreateInfos|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.KernelCreateInfos())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.SubGraphSessionState= obj\n * @returns onnxruntime.experimental.fbs.SubGraphSessionState\n */\n subGraphSessionStates(index: number, obj?: onnxruntime.experimental.fbs.SubGraphSessionState):\n onnxruntime.experimental.fbs.SubGraphSessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.SubGraphSessionState())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n subGraphSessionStatesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSessionState(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset kernelsOffset\n */\n static addKernels(builder: flatbuffers.Builder, kernelsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, kernelsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset subGraphSessionStatesOffset\n */\n static addSubGraphSessionStates(builder: flatbuffers.Builder, subGraphSessionStatesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, subGraphSessionStatesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createSubGraphSessionStatesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]):\n flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startSubGraphSessionStatesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSessionState(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSessionState(\n builder: flatbuffers.Builder, kernelsOffset: flatbuffers.Offset,\n subGraphSessionStatesOffset: flatbuffers.Offset): flatbuffers.Offset {\n SessionState.startSessionState(builder);\n SessionState.addKernels(builder, kernelsOffset);\n SessionState.addSubGraphSessionStates(builder, subGraphSessionStatesOffset);\n return SessionState.endSessionState(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class InferenceSession {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns InferenceSession\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): InferenceSession {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param InferenceSession= obj\n * @returns InferenceSession\n */\n static getRootAsInferenceSession(bb: flatbuffers.ByteBuffer, obj?: InferenceSession): InferenceSession {\n return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param InferenceSession= obj\n * @returns InferenceSession\n */\n static getSizePrefixedRootAsInferenceSession(bb: flatbuffers.ByteBuffer, obj?: InferenceSession): InferenceSession {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @returns boolean\n */\n static bufferHasIdentifier(bb: flatbuffers.ByteBuffer): boolean {\n return bb.__has_identifier('ORTM');\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n ortVersion(): string|null;\n ortVersion(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n ortVersion(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Model= obj\n * @returns onnxruntime.experimental.fbs.Model|null\n */\n model(obj?: onnxruntime.experimental.fbs.Model): onnxruntime.experimental.fbs.Model|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Model())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.SessionState= obj\n * @returns onnxruntime.experimental.fbs.SessionState|null\n */\n sessionState(obj?: onnxruntime.experimental.fbs.SessionState): onnxruntime.experimental.fbs.SessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.SessionState())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startInferenceSession(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset ortVersionOffset\n */\n static addOrtVersion(builder: flatbuffers.Builder, ortVersionOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, ortVersionOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset modelOffset\n */\n static addModel(builder: flatbuffers.Builder, modelOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, modelOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sessionStateOffset\n */\n static addSessionState(builder: flatbuffers.Builder, sessionStateOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, sessionStateOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endInferenceSession(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset offset\n */\n static finishInferenceSessionBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {\n builder.finish(offset, 'ORTM');\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset offset\n */\n static finishSizePrefixedInferenceSessionBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {\n builder.finish(offset, 'ORTM', true);\n }\n\n static createInferenceSession(\n builder: flatbuffers.Builder, ortVersionOffset: flatbuffers.Offset, modelOffset: flatbuffers.Offset,\n sessionStateOffset: flatbuffers.Offset): flatbuffers.Offset {\n InferenceSession.startInferenceSession(builder);\n InferenceSession.addOrtVersion(builder, ortVersionOffset);\n InferenceSession.addModel(builder, modelOffset);\n InferenceSession.addSessionState(builder, sessionStateOffset);\n return InferenceSession.endInferenceSession(builder);\n }\n }\n}\n", "\"use strict\";\r\nmodule.exports = asPromise;\r\n\r\n/**\r\n * Callback as used by {@link util.asPromise}.\r\n * @typedef asPromiseCallback\r\n * @type {function}\r\n * @param {Error|null} error Error, if any\r\n * @param {...*} params Additional arguments\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Returns a promise from a node-style callback function.\r\n * @memberof util\r\n * @param {asPromiseCallback} fn Function to call\r\n * @param {*} ctx Function context\r\n * @param {...*} params Function arguments\r\n * @returns {Promise<*>} Promisified function\r\n */\r\nfunction asPromise(fn, ctx/*, varargs */) {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0,\r\n index = 2,\r\n pending = true;\r\n while (index < arguments.length)\r\n params[offset++] = arguments[index++];\r\n return new Promise(function executor(resolve, reject) {\r\n params[offset] = function callback(err/*, varargs */) {\r\n if (pending) {\r\n pending = false;\r\n if (err)\r\n reject(err);\r\n else {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0;\r\n while (offset < params.length)\r\n params[offset++] = arguments[offset];\r\n resolve.apply(null, params);\r\n }\r\n }\r\n };\r\n try {\r\n fn.apply(ctx || null, params);\r\n } catch (err) {\r\n if (pending) {\r\n pending = false;\r\n reject(err);\r\n }\r\n }\r\n });\r\n}\r\n", "\"use strict\";\r\n\r\n/**\r\n * A minimal base64 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar base64 = exports;\r\n\r\n/**\r\n * Calculates the byte length of a base64 encoded string.\r\n * @param {string} string Base64 encoded string\r\n * @returns {number} Byte length\r\n */\r\nbase64.length = function length(string) {\r\n var p = string.length;\r\n if (!p)\r\n return 0;\r\n var n = 0;\r\n while (--p % 4 > 1 && string.charAt(p) === \"=\")\r\n ++n;\r\n return Math.ceil(string.length * 3) / 4 - n;\r\n};\r\n\r\n// Base64 encoding table\r\nvar b64 = new Array(64);\r\n\r\n// Base64 decoding table\r\nvar s64 = new Array(123);\r\n\r\n// 65..90, 97..122, 48..57, 43, 47\r\nfor (var i = 0; i < 64;)\r\n s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++;\r\n\r\n/**\r\n * Encodes a buffer to a base64 encoded string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} Base64 encoded string\r\n */\r\nbase64.encode = function encode(buffer, start, end) {\r\n var parts = null,\r\n chunk = [];\r\n var i = 0, // output index\r\n j = 0, // goto index\r\n t; // temporary\r\n while (start < end) {\r\n var b = buffer[start++];\r\n switch (j) {\r\n case 0:\r\n chunk[i++] = b64[b >> 2];\r\n t = (b & 3) << 4;\r\n j = 1;\r\n break;\r\n case 1:\r\n chunk[i++] = b64[t | b >> 4];\r\n t = (b & 15) << 2;\r\n j = 2;\r\n break;\r\n case 2:\r\n chunk[i++] = b64[t | b >> 6];\r\n chunk[i++] = b64[b & 63];\r\n j = 0;\r\n break;\r\n }\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (j) {\r\n chunk[i++] = b64[t];\r\n chunk[i++] = 61;\r\n if (j === 1)\r\n chunk[i++] = 61;\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\nvar invalidEncoding = \"invalid encoding\";\r\n\r\n/**\r\n * Decodes a base64 encoded string to a buffer.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Number of bytes written\r\n * @throws {Error} If encoding is invalid\r\n */\r\nbase64.decode = function decode(string, buffer, offset) {\r\n var start = offset;\r\n var j = 0, // goto index\r\n t; // temporary\r\n for (var i = 0; i < string.length;) {\r\n var c = string.charCodeAt(i++);\r\n if (c === 61 && j > 1)\r\n break;\r\n if ((c = s64[c]) === undefined)\r\n throw Error(invalidEncoding);\r\n switch (j) {\r\n case 0:\r\n t = c;\r\n j = 1;\r\n break;\r\n case 1:\r\n buffer[offset++] = t << 2 | (c & 48) >> 4;\r\n t = c;\r\n j = 2;\r\n break;\r\n case 2:\r\n buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2;\r\n t = c;\r\n j = 3;\r\n break;\r\n case 3:\r\n buffer[offset++] = (t & 3) << 6 | c;\r\n j = 0;\r\n break;\r\n }\r\n }\r\n if (j === 1)\r\n throw Error(invalidEncoding);\r\n return offset - start;\r\n};\r\n\r\n/**\r\n * Tests if the specified string appears to be base64 encoded.\r\n * @param {string} string String to test\r\n * @returns {boolean} `true` if probably base64 encoded, otherwise false\r\n */\r\nbase64.test = function test(string) {\r\n return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string);\r\n};\r\n", "\"use strict\";\r\nmodule.exports = EventEmitter;\r\n\r\n/**\r\n * Constructs a new event emitter instance.\r\n * @classdesc A minimal event emitter.\r\n * @memberof util\r\n * @constructor\r\n */\r\nfunction EventEmitter() {\r\n\r\n /**\r\n * Registered listeners.\r\n * @type {Object.}\r\n * @private\r\n */\r\n this._listeners = {};\r\n}\r\n\r\n/**\r\n * Registers an event listener.\r\n * @param {string} evt Event name\r\n * @param {function} fn Listener\r\n * @param {*} [ctx] Listener context\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.on = function on(evt, fn, ctx) {\r\n (this._listeners[evt] || (this._listeners[evt] = [])).push({\r\n fn : fn,\r\n ctx : ctx || this\r\n });\r\n return this;\r\n};\r\n\r\n/**\r\n * Removes an event listener or any matching listeners if arguments are omitted.\r\n * @param {string} [evt] Event name. Removes all listeners if omitted.\r\n * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted.\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.off = function off(evt, fn) {\r\n if (evt === undefined)\r\n this._listeners = {};\r\n else {\r\n if (fn === undefined)\r\n this._listeners[evt] = [];\r\n else {\r\n var listeners = this._listeners[evt];\r\n for (var i = 0; i < listeners.length;)\r\n if (listeners[i].fn === fn)\r\n listeners.splice(i, 1);\r\n else\r\n ++i;\r\n }\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Emits an event by calling its listeners with the specified arguments.\r\n * @param {string} evt Event name\r\n * @param {...*} args Arguments\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.emit = function emit(evt) {\r\n var listeners = this._listeners[evt];\r\n if (listeners) {\r\n var args = [],\r\n i = 1;\r\n for (; i < arguments.length;)\r\n args.push(arguments[i++]);\r\n for (i = 0; i < listeners.length;)\r\n listeners[i].fn.apply(listeners[i++].ctx, args);\r\n }\r\n return this;\r\n};\r\n", "\"use strict\";\r\n\r\nmodule.exports = factory(factory);\r\n\r\n/**\r\n * Reads / writes floats / doubles from / to buffers.\r\n * @name util.float\r\n * @namespace\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using little endian byte order.\r\n * @name util.float.writeFloatLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using big endian byte order.\r\n * @name util.float.writeFloatBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using little endian byte order.\r\n * @name util.float.readFloatLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using big endian byte order.\r\n * @name util.float.readFloatBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using little endian byte order.\r\n * @name util.float.writeDoubleLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using big endian byte order.\r\n * @name util.float.writeDoubleBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using little endian byte order.\r\n * @name util.float.readDoubleLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using big endian byte order.\r\n * @name util.float.readDoubleBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n// Factory function for the purpose of node-based testing in modified global environments\r\nfunction factory(exports) {\r\n\r\n // float: typed array\r\n if (typeof Float32Array !== \"undefined\") (function() {\r\n\r\n var f32 = new Float32Array([ -0 ]),\r\n f8b = new Uint8Array(f32.buffer),\r\n le = f8b[3] === 128;\r\n\r\n function writeFloat_f32_cpy(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n }\r\n\r\n function writeFloat_f32_rev(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[3];\r\n buf[pos + 1] = f8b[2];\r\n buf[pos + 2] = f8b[1];\r\n buf[pos + 3] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy;\r\n\r\n function readFloat_f32_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n function readFloat_f32_rev(buf, pos) {\r\n f8b[3] = buf[pos ];\r\n f8b[2] = buf[pos + 1];\r\n f8b[1] = buf[pos + 2];\r\n f8b[0] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy;\r\n\r\n // float: ieee754\r\n })(); else (function() {\r\n\r\n function writeFloat_ieee754(writeUint, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0)\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos);\r\n else if (isNaN(val))\r\n writeUint(2143289344, buf, pos);\r\n else if (val > 3.4028234663852886e+38) // +-Infinity\r\n writeUint((sign << 31 | 2139095040) >>> 0, buf, pos);\r\n else if (val < 1.1754943508222875e-38) // denormal\r\n writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos);\r\n else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2),\r\n mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607;\r\n writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos);\r\n }\r\n }\r\n\r\n exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE);\r\n exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE);\r\n\r\n function readFloat_ieee754(readUint, buf, pos) {\r\n var uint = readUint(buf, pos),\r\n sign = (uint >> 31) * 2 + 1,\r\n exponent = uint >>> 23 & 255,\r\n mantissa = uint & 8388607;\r\n return exponent === 255\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 1.401298464324817e-45 * mantissa\r\n : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608);\r\n }\r\n\r\n exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE);\r\n exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE);\r\n\r\n })();\r\n\r\n // double: typed array\r\n if (typeof Float64Array !== \"undefined\") (function() {\r\n\r\n var f64 = new Float64Array([-0]),\r\n f8b = new Uint8Array(f64.buffer),\r\n le = f8b[7] === 128;\r\n\r\n function writeDouble_f64_cpy(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n buf[pos + 4] = f8b[4];\r\n buf[pos + 5] = f8b[5];\r\n buf[pos + 6] = f8b[6];\r\n buf[pos + 7] = f8b[7];\r\n }\r\n\r\n function writeDouble_f64_rev(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[7];\r\n buf[pos + 1] = f8b[6];\r\n buf[pos + 2] = f8b[5];\r\n buf[pos + 3] = f8b[4];\r\n buf[pos + 4] = f8b[3];\r\n buf[pos + 5] = f8b[2];\r\n buf[pos + 6] = f8b[1];\r\n buf[pos + 7] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy;\r\n\r\n function readDouble_f64_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n f8b[4] = buf[pos + 4];\r\n f8b[5] = buf[pos + 5];\r\n f8b[6] = buf[pos + 6];\r\n f8b[7] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n function readDouble_f64_rev(buf, pos) {\r\n f8b[7] = buf[pos ];\r\n f8b[6] = buf[pos + 1];\r\n f8b[5] = buf[pos + 2];\r\n f8b[4] = buf[pos + 3];\r\n f8b[3] = buf[pos + 4];\r\n f8b[2] = buf[pos + 5];\r\n f8b[1] = buf[pos + 6];\r\n f8b[0] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy;\r\n\r\n // double: ieee754\r\n })(); else (function() {\r\n\r\n function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1);\r\n } else if (isNaN(val)) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(2146959360, buf, pos + off1);\r\n } else if (val > 1.7976931348623157e+308) { // +-Infinity\r\n writeUint(0, buf, pos + off0);\r\n writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1);\r\n } else {\r\n var mantissa;\r\n if (val < 2.2250738585072014e-308) { // denormal\r\n mantissa = val / 5e-324;\r\n writeUint(mantissa >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1);\r\n } else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2);\r\n if (exponent === 1024)\r\n exponent = 1023;\r\n mantissa = val * Math.pow(2, -exponent);\r\n writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1);\r\n }\r\n }\r\n }\r\n\r\n exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4);\r\n exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0);\r\n\r\n function readDouble_ieee754(readUint, off0, off1, buf, pos) {\r\n var lo = readUint(buf, pos + off0),\r\n hi = readUint(buf, pos + off1);\r\n var sign = (hi >> 31) * 2 + 1,\r\n exponent = hi >>> 20 & 2047,\r\n mantissa = 4294967296 * (hi & 1048575) + lo;\r\n return exponent === 2047\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 5e-324 * mantissa\r\n : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496);\r\n }\r\n\r\n exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4);\r\n exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0);\r\n\r\n })();\r\n\r\n return exports;\r\n}\r\n\r\n// uint helpers\r\n\r\nfunction writeUintLE(val, buf, pos) {\r\n buf[pos ] = val & 255;\r\n buf[pos + 1] = val >>> 8 & 255;\r\n buf[pos + 2] = val >>> 16 & 255;\r\n buf[pos + 3] = val >>> 24;\r\n}\r\n\r\nfunction writeUintBE(val, buf, pos) {\r\n buf[pos ] = val >>> 24;\r\n buf[pos + 1] = val >>> 16 & 255;\r\n buf[pos + 2] = val >>> 8 & 255;\r\n buf[pos + 3] = val & 255;\r\n}\r\n\r\nfunction readUintLE(buf, pos) {\r\n return (buf[pos ]\r\n | buf[pos + 1] << 8\r\n | buf[pos + 2] << 16\r\n | buf[pos + 3] << 24) >>> 0;\r\n}\r\n\r\nfunction readUintBE(buf, pos) {\r\n return (buf[pos ] << 24\r\n | buf[pos + 1] << 16\r\n | buf[pos + 2] << 8\r\n | buf[pos + 3]) >>> 0;\r\n}\r\n", "\"use strict\";\r\nmodule.exports = inquire;\r\n\r\n/**\r\n * Requires a module only if available.\r\n * @memberof util\r\n * @param {string} moduleName Module to require\r\n * @returns {?Object} Required module if available and not empty, otherwise `null`\r\n */\r\nfunction inquire(moduleName) {\r\n try {\r\n var mod = eval(\"quire\".replace(/^/,\"re\"))(moduleName); // eslint-disable-line no-eval\r\n if (mod && (mod.length || Object.keys(mod).length))\r\n return mod;\r\n } catch (e) {} // eslint-disable-line no-empty\r\n return null;\r\n}\r\n", "\"use strict\";\r\n\r\n/**\r\n * A minimal UTF8 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar utf8 = exports;\r\n\r\n/**\r\n * Calculates the UTF8 byte length of a string.\r\n * @param {string} string String\r\n * @returns {number} Byte length\r\n */\r\nutf8.length = function utf8_length(string) {\r\n var len = 0,\r\n c = 0;\r\n for (var i = 0; i < string.length; ++i) {\r\n c = string.charCodeAt(i);\r\n if (c < 128)\r\n len += 1;\r\n else if (c < 2048)\r\n len += 2;\r\n else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) {\r\n ++i;\r\n len += 4;\r\n } else\r\n len += 3;\r\n }\r\n return len;\r\n};\r\n\r\n/**\r\n * Reads UTF8 bytes as a string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} String read\r\n */\r\nutf8.read = function utf8_read(buffer, start, end) {\r\n var len = end - start;\r\n if (len < 1)\r\n return \"\";\r\n var parts = null,\r\n chunk = [],\r\n i = 0, // char offset\r\n t; // temporary\r\n while (start < end) {\r\n t = buffer[start++];\r\n if (t < 128)\r\n chunk[i++] = t;\r\n else if (t > 191 && t < 224)\r\n chunk[i++] = (t & 31) << 6 | buffer[start++] & 63;\r\n else if (t > 239 && t < 365) {\r\n t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000;\r\n chunk[i++] = 0xD800 + (t >> 10);\r\n chunk[i++] = 0xDC00 + (t & 1023);\r\n } else\r\n chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63;\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\n/**\r\n * Writes a string as UTF8 bytes.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Bytes written\r\n */\r\nutf8.write = function utf8_write(string, buffer, offset) {\r\n var start = offset,\r\n c1, // character 1\r\n c2; // character 2\r\n for (var i = 0; i < string.length; ++i) {\r\n c1 = string.charCodeAt(i);\r\n if (c1 < 128) {\r\n buffer[offset++] = c1;\r\n } else if (c1 < 2048) {\r\n buffer[offset++] = c1 >> 6 | 192;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) {\r\n c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF);\r\n ++i;\r\n buffer[offset++] = c1 >> 18 | 240;\r\n buffer[offset++] = c1 >> 12 & 63 | 128;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else {\r\n buffer[offset++] = c1 >> 12 | 224;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n }\r\n }\r\n return offset - start;\r\n};\r\n", "\"use strict\";\r\nmodule.exports = pool;\r\n\r\n/**\r\n * An allocator as used by {@link util.pool}.\r\n * @typedef PoolAllocator\r\n * @type {function}\r\n * @param {number} size Buffer size\r\n * @returns {Uint8Array} Buffer\r\n */\r\n\r\n/**\r\n * A slicer as used by {@link util.pool}.\r\n * @typedef PoolSlicer\r\n * @type {function}\r\n * @param {number} start Start offset\r\n * @param {number} end End offset\r\n * @returns {Uint8Array} Buffer slice\r\n * @this {Uint8Array}\r\n */\r\n\r\n/**\r\n * A general purpose buffer pool.\r\n * @memberof util\r\n * @function\r\n * @param {PoolAllocator} alloc Allocator\r\n * @param {PoolSlicer} slice Slicer\r\n * @param {number} [size=8192] Slab size\r\n * @returns {PoolAllocator} Pooled allocator\r\n */\r\nfunction pool(alloc, slice, size) {\r\n var SIZE = size || 8192;\r\n var MAX = SIZE >>> 1;\r\n var slab = null;\r\n var offset = SIZE;\r\n return function pool_alloc(size) {\r\n if (size < 1 || size > MAX)\r\n return alloc(size);\r\n if (offset + size > SIZE) {\r\n slab = alloc(SIZE);\r\n offset = 0;\r\n }\r\n var buf = slice.call(slab, offset, offset += size);\r\n if (offset & 7) // align to 32 bit\r\n offset = (offset | 7) + 1;\r\n return buf;\r\n };\r\n}\r\n", "\"use strict\";\nmodule.exports = LongBits;\n\nvar util = require(\"../util/minimal\");\n\n/**\n * Constructs new long bits.\n * @classdesc Helper class for working with the low and high bits of a 64 bit value.\n * @memberof util\n * @constructor\n * @param {number} lo Low 32 bits, unsigned\n * @param {number} hi High 32 bits, unsigned\n */\nfunction LongBits(lo, hi) {\n\n // note that the casts below are theoretically unnecessary as of today, but older statically\n // generated converter code might still call the ctor with signed 32bits. kept for compat.\n\n /**\n * Low bits.\n * @type {number}\n */\n this.lo = lo >>> 0;\n\n /**\n * High bits.\n * @type {number}\n */\n this.hi = hi >>> 0;\n}\n\n/**\n * Zero bits.\n * @memberof util.LongBits\n * @type {util.LongBits}\n */\nvar zero = LongBits.zero = new LongBits(0, 0);\n\nzero.toNumber = function() { return 0; };\nzero.zzEncode = zero.zzDecode = function() { return this; };\nzero.length = function() { return 1; };\n\n/**\n * Zero hash.\n * @memberof util.LongBits\n * @type {string}\n */\nvar zeroHash = LongBits.zeroHash = \"\\0\\0\\0\\0\\0\\0\\0\\0\";\n\n/**\n * Constructs new long bits from the specified number.\n * @param {number} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.fromNumber = function fromNumber(value) {\n if (value === 0)\n return zero;\n var sign = value < 0;\n if (sign)\n value = -value;\n var lo = value >>> 0,\n hi = (value - lo) / 4294967296 >>> 0;\n if (sign) {\n hi = ~hi >>> 0;\n lo = ~lo >>> 0;\n if (++lo > 4294967295) {\n lo = 0;\n if (++hi > 4294967295)\n hi = 0;\n }\n }\n return new LongBits(lo, hi);\n};\n\n/**\n * Constructs new long bits from a number, long or string.\n * @param {Long|number|string} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.from = function from(value) {\n if (typeof value === \"number\")\n return LongBits.fromNumber(value);\n if (util.isString(value)) {\n /* istanbul ignore else */\n if (util.Long)\n value = util.Long.fromString(value);\n else\n return LongBits.fromNumber(parseInt(value, 10));\n }\n return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;\n};\n\n/**\n * Converts this long bits to a possibly unsafe JavaScript number.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {number} Possibly unsafe number\n */\nLongBits.prototype.toNumber = function toNumber(unsigned) {\n if (!unsigned && this.hi >>> 31) {\n var lo = ~this.lo + 1 >>> 0,\n hi = ~this.hi >>> 0;\n if (!lo)\n hi = hi + 1 >>> 0;\n return -(lo + hi * 4294967296);\n }\n return this.lo + this.hi * 4294967296;\n};\n\n/**\n * Converts this long bits to a long.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long} Long\n */\nLongBits.prototype.toLong = function toLong(unsigned) {\n return util.Long\n ? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))\n /* istanbul ignore next */\n : { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };\n};\n\nvar charCodeAt = String.prototype.charCodeAt;\n\n/**\n * Constructs new long bits from the specified 8 characters long hash.\n * @param {string} hash Hash\n * @returns {util.LongBits} Bits\n */\nLongBits.fromHash = function fromHash(hash) {\n if (hash === zeroHash)\n return zero;\n return new LongBits(\n ( charCodeAt.call(hash, 0)\n | charCodeAt.call(hash, 1) << 8\n | charCodeAt.call(hash, 2) << 16\n | charCodeAt.call(hash, 3) << 24) >>> 0\n ,\n ( charCodeAt.call(hash, 4)\n | charCodeAt.call(hash, 5) << 8\n | charCodeAt.call(hash, 6) << 16\n | charCodeAt.call(hash, 7) << 24) >>> 0\n );\n};\n\n/**\n * Converts this long bits to a 8 characters long hash.\n * @returns {string} Hash\n */\nLongBits.prototype.toHash = function toHash() {\n return String.fromCharCode(\n this.lo & 255,\n this.lo >>> 8 & 255,\n this.lo >>> 16 & 255,\n this.lo >>> 24 ,\n this.hi & 255,\n this.hi >>> 8 & 255,\n this.hi >>> 16 & 255,\n this.hi >>> 24\n );\n};\n\n/**\n * Zig-zag encodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzEncode = function zzEncode() {\n var mask = this.hi >> 31;\n this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;\n this.lo = ( this.lo << 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Zig-zag decodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzDecode = function zzDecode() {\n var mask = -(this.lo & 1);\n this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;\n this.hi = ( this.hi >>> 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Calculates the length of this longbits when encoded as a varint.\n * @returns {number} Length\n */\nLongBits.prototype.length = function length() {\n var part0 = this.lo,\n part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,\n part2 = this.hi >>> 24;\n return part2 === 0\n ? part1 === 0\n ? part0 < 16384\n ? part0 < 128 ? 1 : 2\n : part0 < 2097152 ? 3 : 4\n : part1 < 16384\n ? part1 < 128 ? 5 : 6\n : part1 < 2097152 ? 7 : 8\n : part2 < 128 ? 9 : 10;\n};\n", "\"use strict\";\nvar util = exports;\n\n// used to return a Promise where callback is omitted\nutil.asPromise = require(\"@protobufjs/aspromise\");\n\n// converts to / from base64 encoded strings\nutil.base64 = require(\"@protobufjs/base64\");\n\n// base class of rpc.Service\nutil.EventEmitter = require(\"@protobufjs/eventemitter\");\n\n// float handling accross browsers\nutil.float = require(\"@protobufjs/float\");\n\n// requires modules optionally and hides the call from bundlers\nutil.inquire = require(\"@protobufjs/inquire\");\n\n// converts to / from utf8 encoded strings\nutil.utf8 = require(\"@protobufjs/utf8\");\n\n// provides a node-like buffer pool in the browser\nutil.pool = require(\"@protobufjs/pool\");\n\n// utility to work with the low and high bits of a 64 bit value\nutil.LongBits = require(\"./longbits\");\n\n/**\n * Whether running within node or not.\n * @memberof util\n * @type {boolean}\n */\nutil.isNode = Boolean(typeof global !== \"undefined\"\n && global\n && global.process\n && global.process.versions\n && global.process.versions.node);\n\n/**\n * Global object reference.\n * @memberof util\n * @type {Object}\n */\nutil.global = util.isNode && global\n || typeof window !== \"undefined\" && window\n || typeof self !== \"undefined\" && self\n || this; // eslint-disable-line no-invalid-this\n\n/**\n * An immuable empty array.\n * @memberof util\n * @type {Array.<*>}\n * @const\n */\nutil.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes\n\n/**\n * An immutable empty object.\n * @type {Object}\n * @const\n */\nutil.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes\n\n/**\n * Tests if the specified value is an integer.\n * @function\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is an integer\n */\nutil.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {\n return typeof value === \"number\" && isFinite(value) && Math.floor(value) === value;\n};\n\n/**\n * Tests if the specified value is a string.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a string\n */\nutil.isString = function isString(value) {\n return typeof value === \"string\" || value instanceof String;\n};\n\n/**\n * Tests if the specified value is a non-null object.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a non-null object\n */\nutil.isObject = function isObject(value) {\n return value && typeof value === \"object\";\n};\n\n/**\n * Checks if a property on a message is considered to be present.\n * This is an alias of {@link util.isSet}.\n * @function\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isset =\n\n/**\n * Checks if a property on a message is considered to be present.\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isSet = function isSet(obj, prop) {\n var value = obj[prop];\n if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins\n return typeof value !== \"object\" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;\n return false;\n};\n\n/**\n * Any compatible Buffer instance.\n * This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.\n * @interface Buffer\n * @extends Uint8Array\n */\n\n/**\n * Node's Buffer class if available.\n * @type {Constructor}\n */\nutil.Buffer = (function() {\n try {\n var Buffer = util.inquire(\"buffer\").Buffer;\n // refuse to use non-node buffers if not explicitly assigned (perf reasons):\n return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;\n } catch (e) {\n /* istanbul ignore next */\n return null;\n }\n})();\n\n// Internal alias of or polyfull for Buffer.from.\nutil._Buffer_from = null;\n\n// Internal alias of or polyfill for Buffer.allocUnsafe.\nutil._Buffer_allocUnsafe = null;\n\n/**\n * Creates a new buffer of whatever type supported by the environment.\n * @param {number|number[]} [sizeOrArray=0] Buffer size or number array\n * @returns {Uint8Array|Buffer} Buffer\n */\nutil.newBuffer = function newBuffer(sizeOrArray) {\n /* istanbul ignore next */\n return typeof sizeOrArray === \"number\"\n ? util.Buffer\n ? util._Buffer_allocUnsafe(sizeOrArray)\n : new util.Array(sizeOrArray)\n : util.Buffer\n ? util._Buffer_from(sizeOrArray)\n : typeof Uint8Array === \"undefined\"\n ? sizeOrArray\n : new Uint8Array(sizeOrArray);\n};\n\n/**\n * Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.\n * @type {Constructor}\n */\nutil.Array = typeof Uint8Array !== \"undefined\" ? Uint8Array /* istanbul ignore next */ : Array;\n\n/**\n * Any compatible Long instance.\n * This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.\n * @interface Long\n * @property {number} low Low bits\n * @property {number} high High bits\n * @property {boolean} unsigned Whether unsigned or not\n */\n\n/**\n * Long.js's Long class if available.\n * @type {Constructor}\n */\nutil.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long\n || /* istanbul ignore next */ util.global.Long\n || util.inquire(\"long\");\n\n/**\n * Regular expression used to verify 2 bit (`bool`) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key2Re = /^true|false|0|1$/;\n\n/**\n * Regular expression used to verify 32 bit (`int32` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key32Re = /^-?(?:0|[1-9][0-9]*)$/;\n\n/**\n * Regular expression used to verify 64 bit (`int64` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key64Re = /^(?:[\\\\x00-\\\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;\n\n/**\n * Converts a number or long to an 8 characters long hash string.\n * @param {Long|number} value Value to convert\n * @returns {string} Hash\n */\nutil.longToHash = function longToHash(value) {\n return value\n ? util.LongBits.from(value).toHash()\n : util.LongBits.zeroHash;\n};\n\n/**\n * Converts an 8 characters long hash string to a long or number.\n * @param {string} hash Hash\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long|number} Original value\n */\nutil.longFromHash = function longFromHash(hash, unsigned) {\n var bits = util.LongBits.fromHash(hash);\n if (util.Long)\n return util.Long.fromBits(bits.lo, bits.hi, unsigned);\n return bits.toNumber(Boolean(unsigned));\n};\n\n/**\n * Merges the properties of the source object into the destination object.\n * @memberof util\n * @param {Object.} dst Destination object\n * @param {Object.} src Source object\n * @param {boolean} [ifNotSet=false] Merges only if the key is not already set\n * @returns {Object.} Destination object\n */\nfunction merge(dst, src, ifNotSet) { // used by converters\n for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)\n if (dst[keys[i]] === undefined || !ifNotSet)\n dst[keys[i]] = src[keys[i]];\n return dst;\n}\n\nutil.merge = merge;\n\n/**\n * Converts the first character of a string to lower case.\n * @param {string} str String to convert\n * @returns {string} Converted string\n */\nutil.lcFirst = function lcFirst(str) {\n return str.charAt(0).toLowerCase() + str.substring(1);\n};\n\n/**\n * Creates a custom error constructor.\n * @memberof util\n * @param {string} name Error name\n * @returns {Constructor} Custom error constructor\n */\nfunction newError(name) {\n\n function CustomError(message, properties) {\n\n if (!(this instanceof CustomError))\n return new CustomError(message, properties);\n\n // Error.call(this, message);\n // ^ just returns a new error instance because the ctor can be called as a function\n\n Object.defineProperty(this, \"message\", { get: function() { return message; } });\n\n /* istanbul ignore next */\n if (Error.captureStackTrace) // node\n Error.captureStackTrace(this, CustomError);\n else\n Object.defineProperty(this, \"stack\", { value: new Error().stack || \"\" });\n\n if (properties)\n merge(this, properties);\n }\n\n CustomError.prototype = Object.create(Error.prototype, {\n constructor: {\n value: CustomError,\n writable: true,\n enumerable: false,\n configurable: true,\n },\n name: {\n get: function get() { return name; },\n set: undefined,\n enumerable: false,\n // configurable: false would accurately preserve the behavior of\n // the original, but I'm guessing that was not intentional.\n // For an actual error subclass, this property would\n // be configurable.\n configurable: true,\n },\n toString: {\n value: function value() { return this.name + \": \" + this.message; },\n writable: true,\n enumerable: false,\n configurable: true,\n },\n });\n\n return CustomError;\n}\n\nutil.newError = newError;\n\n/**\n * Constructs a new protocol error.\n * @classdesc Error subclass indicating a protocol specifc error.\n * @memberof util\n * @extends Error\n * @template T extends Message\n * @constructor\n * @param {string} message Error message\n * @param {Object.} [properties] Additional properties\n * @example\n * try {\n * MyMessage.decode(someBuffer); // throws if required fields are missing\n * } catch (e) {\n * if (e instanceof ProtocolError && e.instance)\n * console.log(\"decoded so far: \" + JSON.stringify(e.instance));\n * }\n */\nutil.ProtocolError = newError(\"ProtocolError\");\n\n/**\n * So far decoded message instance.\n * @name util.ProtocolError#instance\n * @type {Message}\n */\n\n/**\n * A OneOf getter as returned by {@link util.oneOfGetter}.\n * @typedef OneOfGetter\n * @type {function}\n * @returns {string|undefined} Set field name, if any\n */\n\n/**\n * Builds a getter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfGetter} Unbound getter\n */\nutil.oneOfGetter = function getOneOf(fieldNames) {\n var fieldMap = {};\n for (var i = 0; i < fieldNames.length; ++i)\n fieldMap[fieldNames[i]] = 1;\n\n /**\n * @returns {string|undefined} Set field name, if any\n * @this Object\n * @ignore\n */\n return function() { // eslint-disable-line consistent-return\n for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)\n if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)\n return keys[i];\n };\n};\n\n/**\n * A OneOf setter as returned by {@link util.oneOfSetter}.\n * @typedef OneOfSetter\n * @type {function}\n * @param {string|undefined} value Field name\n * @returns {undefined}\n */\n\n/**\n * Builds a setter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfSetter} Unbound setter\n */\nutil.oneOfSetter = function setOneOf(fieldNames) {\n\n /**\n * @param {string} name Field name\n * @returns {undefined}\n * @this Object\n * @ignore\n */\n return function(name) {\n for (var i = 0; i < fieldNames.length; ++i)\n if (fieldNames[i] !== name)\n delete this[fieldNames[i]];\n };\n};\n\n/**\n * Default conversion options used for {@link Message#toJSON} implementations.\n *\n * These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:\n *\n * - Longs become strings\n * - Enums become string keys\n * - Bytes become base64 encoded strings\n * - (Sub-)Messages become plain objects\n * - Maps become plain objects with all string keys\n * - Repeated fields become arrays\n * - NaN and Infinity for float and double fields become strings\n *\n * @type {IConversionOptions}\n * @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json\n */\nutil.toJSONOptions = {\n longs: String,\n enums: String,\n bytes: String,\n json: true\n};\n\n// Sets up buffer utility according to the environment (called in index-minimal)\nutil._configure = function() {\n var Buffer = util.Buffer;\n /* istanbul ignore if */\n if (!Buffer) {\n util._Buffer_from = util._Buffer_allocUnsafe = null;\n return;\n }\n // because node 4.x buffers are incompatible & immutable\n // see: https://github.com/dcodeIO/protobuf.js/pull/665\n util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||\n /* istanbul ignore next */\n function Buffer_from(value, encoding) {\n return new Buffer(value, encoding);\n };\n util._Buffer_allocUnsafe = Buffer.allocUnsafe ||\n /* istanbul ignore next */\n function Buffer_allocUnsafe(size) {\n return new Buffer(size);\n };\n};\n", "\"use strict\";\nmodule.exports = Writer;\n\nvar util = require(\"./util/minimal\");\n\nvar BufferWriter; // cyclic\n\nvar LongBits = util.LongBits,\n base64 = util.base64,\n utf8 = util.utf8;\n\n/**\n * Constructs a new writer operation instance.\n * @classdesc Scheduled writer operation.\n * @constructor\n * @param {function(*, Uint8Array, number)} fn Function to call\n * @param {number} len Value byte length\n * @param {*} val Value to write\n * @ignore\n */\nfunction Op(fn, len, val) {\n\n /**\n * Function to call.\n * @type {function(Uint8Array, number, *)}\n */\n this.fn = fn;\n\n /**\n * Value byte length.\n * @type {number}\n */\n this.len = len;\n\n /**\n * Next operation.\n * @type {Writer.Op|undefined}\n */\n this.next = undefined;\n\n /**\n * Value to write.\n * @type {*}\n */\n this.val = val; // type varies\n}\n\n/* istanbul ignore next */\nfunction noop() {} // eslint-disable-line no-empty-function\n\n/**\n * Constructs a new writer state instance.\n * @classdesc Copied writer state.\n * @memberof Writer\n * @constructor\n * @param {Writer} writer Writer to copy state from\n * @ignore\n */\nfunction State(writer) {\n\n /**\n * Current head.\n * @type {Writer.Op}\n */\n this.head = writer.head;\n\n /**\n * Current tail.\n * @type {Writer.Op}\n */\n this.tail = writer.tail;\n\n /**\n * Current buffer length.\n * @type {number}\n */\n this.len = writer.len;\n\n /**\n * Next state.\n * @type {State|null}\n */\n this.next = writer.states;\n}\n\n/**\n * Constructs a new writer instance.\n * @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n */\nfunction Writer() {\n\n /**\n * Current length.\n * @type {number}\n */\n this.len = 0;\n\n /**\n * Operations head.\n * @type {Object}\n */\n this.head = new Op(noop, 0, 0);\n\n /**\n * Operations tail\n * @type {Object}\n */\n this.tail = this.head;\n\n /**\n * Linked forked states.\n * @type {Object|null}\n */\n this.states = null;\n\n // When a value is written, the writer calculates its byte length and puts it into a linked\n // list of operations to perform when finish() is called. This both allows us to allocate\n // buffers of the exact required size and reduces the amount of work we have to do compared\n // to first calculating over objects and then encoding over objects. In our case, the encoding\n // part is just a linked list walk calling operations with already prepared values.\n}\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup() {\n return (Writer.create = function create_buffer() {\n return new BufferWriter();\n })();\n }\n /* istanbul ignore next */\n : function create_array() {\n return new Writer();\n };\n};\n\n/**\n * Creates a new writer.\n * @function\n * @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}\n */\nWriter.create = create();\n\n/**\n * Allocates a buffer of the specified size.\n * @param {number} size Buffer size\n * @returns {Uint8Array} Buffer\n */\nWriter.alloc = function alloc(size) {\n return new util.Array(size);\n};\n\n// Use Uint8Array buffer pool in the browser, just like node does with buffers\n/* istanbul ignore else */\nif (util.Array !== Array)\n Writer.alloc = util.pool(Writer.alloc, util.Array.prototype.subarray);\n\n/**\n * Pushes a new operation to the queue.\n * @param {function(Uint8Array, number, *)} fn Function to call\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @returns {Writer} `this`\n * @private\n */\nWriter.prototype._push = function push(fn, len, val) {\n this.tail = this.tail.next = new Op(fn, len, val);\n this.len += len;\n return this;\n};\n\nfunction writeByte(val, buf, pos) {\n buf[pos] = val & 255;\n}\n\nfunction writeVarint32(val, buf, pos) {\n while (val > 127) {\n buf[pos++] = val & 127 | 128;\n val >>>= 7;\n }\n buf[pos] = val;\n}\n\n/**\n * Constructs a new varint writer operation instance.\n * @classdesc Scheduled varint writer operation.\n * @extends Op\n * @constructor\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @ignore\n */\nfunction VarintOp(len, val) {\n this.len = len;\n this.next = undefined;\n this.val = val;\n}\n\nVarintOp.prototype = Object.create(Op.prototype);\nVarintOp.prototype.fn = writeVarint32;\n\n/**\n * Writes an unsigned 32 bit value as a varint.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.uint32 = function write_uint32(value) {\n // here, the call to this.push has been inlined and a varint specific Op subclass is used.\n // uint32 is by far the most frequently used operation and benefits significantly from this.\n this.len += (this.tail = this.tail.next = new VarintOp(\n (value = value >>> 0)\n < 128 ? 1\n : value < 16384 ? 2\n : value < 2097152 ? 3\n : value < 268435456 ? 4\n : 5,\n value)).len;\n return this;\n};\n\n/**\n * Writes a signed 32 bit value as a varint.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.int32 = function write_int32(value) {\n return value < 0\n ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec\n : this.uint32(value);\n};\n\n/**\n * Writes a 32 bit value as a varint, zig-zag encoded.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sint32 = function write_sint32(value) {\n return this.uint32((value << 1 ^ value >> 31) >>> 0);\n};\n\nfunction writeVarint64(val, buf, pos) {\n while (val.hi) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;\n val.hi >>>= 7;\n }\n while (val.lo > 127) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = val.lo >>> 7;\n }\n buf[pos++] = val.lo;\n}\n\n/**\n * Writes an unsigned 64 bit value as a varint.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.uint64 = function write_uint64(value) {\n var bits = LongBits.from(value);\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a signed 64 bit value as a varint.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.int64 = Writer.prototype.uint64;\n\n/**\n * Writes a signed 64 bit value as a varint, zig-zag encoded.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sint64 = function write_sint64(value) {\n var bits = LongBits.from(value).zzEncode();\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a boolish value as a varint.\n * @param {boolean} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bool = function write_bool(value) {\n return this._push(writeByte, 1, value ? 1 : 0);\n};\n\nfunction writeFixed32(val, buf, pos) {\n buf[pos ] = val & 255;\n buf[pos + 1] = val >>> 8 & 255;\n buf[pos + 2] = val >>> 16 & 255;\n buf[pos + 3] = val >>> 24;\n}\n\n/**\n * Writes an unsigned 32 bit value as fixed 32 bits.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.fixed32 = function write_fixed32(value) {\n return this._push(writeFixed32, 4, value >>> 0);\n};\n\n/**\n * Writes a signed 32 bit value as fixed 32 bits.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sfixed32 = Writer.prototype.fixed32;\n\n/**\n * Writes an unsigned 64 bit value as fixed 64 bits.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.fixed64 = function write_fixed64(value) {\n var bits = LongBits.from(value);\n return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);\n};\n\n/**\n * Writes a signed 64 bit value as fixed 64 bits.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sfixed64 = Writer.prototype.fixed64;\n\n/**\n * Writes a float (32 bit).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.float = function write_float(value) {\n return this._push(util.float.writeFloatLE, 4, value);\n};\n\n/**\n * Writes a double (64 bit float).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.double = function write_double(value) {\n return this._push(util.float.writeDoubleLE, 8, value);\n};\n\nvar writeBytes = util.Array.prototype.set\n ? function writeBytes_set(val, buf, pos) {\n buf.set(val, pos); // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytes_for(val, buf, pos) {\n for (var i = 0; i < val.length; ++i)\n buf[pos + i] = val[i];\n };\n\n/**\n * Writes a sequence of bytes.\n * @param {Uint8Array|string} value Buffer or base64 encoded string to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bytes = function write_bytes(value) {\n var len = value.length >>> 0;\n if (!len)\n return this._push(writeByte, 1, 0);\n if (util.isString(value)) {\n var buf = Writer.alloc(len = base64.length(value));\n base64.decode(value, buf, 0);\n value = buf;\n }\n return this.uint32(len)._push(writeBytes, len, value);\n};\n\n/**\n * Writes a string.\n * @param {string} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.string = function write_string(value) {\n var len = utf8.length(value);\n return len\n ? this.uint32(len)._push(utf8.write, len, value)\n : this._push(writeByte, 1, 0);\n};\n\n/**\n * Forks this writer's state by pushing it to a stack.\n * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.\n * @returns {Writer} `this`\n */\nWriter.prototype.fork = function fork() {\n this.states = new State(this);\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n return this;\n};\n\n/**\n * Resets this instance to the last state.\n * @returns {Writer} `this`\n */\nWriter.prototype.reset = function reset() {\n if (this.states) {\n this.head = this.states.head;\n this.tail = this.states.tail;\n this.len = this.states.len;\n this.states = this.states.next;\n } else {\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n }\n return this;\n};\n\n/**\n * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.\n * @returns {Writer} `this`\n */\nWriter.prototype.ldelim = function ldelim() {\n var head = this.head,\n tail = this.tail,\n len = this.len;\n this.reset().uint32(len);\n if (len) {\n this.tail.next = head.next; // skip noop\n this.tail = tail;\n this.len += len;\n }\n return this;\n};\n\n/**\n * Finishes the write operation.\n * @returns {Uint8Array} Finished buffer\n */\nWriter.prototype.finish = function finish() {\n var head = this.head.next, // skip noop\n buf = this.constructor.alloc(this.len),\n pos = 0;\n while (head) {\n head.fn(head.val, buf, pos);\n pos += head.len;\n head = head.next;\n }\n // this.head = this.tail = null;\n return buf;\n};\n\nWriter._configure = function(BufferWriter_) {\n BufferWriter = BufferWriter_;\n Writer.create = create();\n BufferWriter._configure();\n};\n", "\"use strict\";\nmodule.exports = BufferWriter;\n\n// extends Writer\nvar Writer = require(\"./writer\");\n(BufferWriter.prototype = Object.create(Writer.prototype)).constructor = BufferWriter;\n\nvar util = require(\"./util/minimal\");\n\n/**\n * Constructs a new buffer writer instance.\n * @classdesc Wire format writer using node buffers.\n * @extends Writer\n * @constructor\n */\nfunction BufferWriter() {\n Writer.call(this);\n}\n\nBufferWriter._configure = function () {\n /**\n * Allocates a buffer of the specified size.\n * @function\n * @param {number} size Buffer size\n * @returns {Buffer} Buffer\n */\n BufferWriter.alloc = util._Buffer_allocUnsafe;\n\n BufferWriter.writeBytesBuffer = util.Buffer && util.Buffer.prototype instanceof Uint8Array && util.Buffer.prototype.set.name === \"set\"\n ? function writeBytesBuffer_set(val, buf, pos) {\n buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)\n // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytesBuffer_copy(val, buf, pos) {\n if (val.copy) // Buffer values\n val.copy(buf, pos, 0, val.length);\n else for (var i = 0; i < val.length;) // plain array values\n buf[pos++] = val[i++];\n };\n};\n\n\n/**\n * @override\n */\nBufferWriter.prototype.bytes = function write_bytes_buffer(value) {\n if (util.isString(value))\n value = util._Buffer_from(value, \"base64\");\n var len = value.length >>> 0;\n this.uint32(len);\n if (len)\n this._push(BufferWriter.writeBytesBuffer, len, value);\n return this;\n};\n\nfunction writeStringBuffer(val, buf, pos) {\n if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)\n util.utf8.write(val, buf, pos);\n else if (buf.utf8Write)\n buf.utf8Write(val, pos);\n else\n buf.write(val, pos);\n}\n\n/**\n * @override\n */\nBufferWriter.prototype.string = function write_string_buffer(value) {\n var len = util.Buffer.byteLength(value);\n this.uint32(len);\n if (len)\n this._push(writeStringBuffer, len, value);\n return this;\n};\n\n\n/**\n * Finishes the write operation.\n * @name BufferWriter#finish\n * @function\n * @returns {Buffer} Finished buffer\n */\n\nBufferWriter._configure();\n", "\"use strict\";\nmodule.exports = Reader;\n\nvar util = require(\"./util/minimal\");\n\nvar BufferReader; // cyclic\n\nvar LongBits = util.LongBits,\n utf8 = util.utf8;\n\n/* istanbul ignore next */\nfunction indexOutOfRange(reader, writeLength) {\n return RangeError(\"index out of range: \" + reader.pos + \" + \" + (writeLength || 1) + \" > \" + reader.len);\n}\n\n/**\n * Constructs a new reader instance using the specified buffer.\n * @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n * @param {Uint8Array} buffer Buffer to read from\n */\nfunction Reader(buffer) {\n\n /**\n * Read buffer.\n * @type {Uint8Array}\n */\n this.buf = buffer;\n\n /**\n * Read buffer position.\n * @type {number}\n */\n this.pos = 0;\n\n /**\n * Read buffer length.\n * @type {number}\n */\n this.len = buffer.length;\n}\n\nvar create_array = typeof Uint8Array !== \"undefined\"\n ? function create_typed_array(buffer) {\n if (buffer instanceof Uint8Array || Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n }\n /* istanbul ignore next */\n : function create_array(buffer) {\n if (Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n };\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup(buffer) {\n return (Reader.create = function create_buffer(buffer) {\n return util.Buffer.isBuffer(buffer)\n ? new BufferReader(buffer)\n /* istanbul ignore next */\n : create_array(buffer);\n })(buffer);\n }\n /* istanbul ignore next */\n : create_array;\n};\n\n/**\n * Creates a new reader using the specified buffer.\n * @function\n * @param {Uint8Array|Buffer} buffer Buffer to read from\n * @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}\n * @throws {Error} If `buffer` is not a valid buffer\n */\nReader.create = create();\n\nReader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */ util.Array.prototype.slice;\n\n/**\n * Reads a varint as an unsigned 32 bit value.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.uint32 = (function read_uint32_setup() {\n var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)\n return function read_uint32() {\n value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;\n\n /* istanbul ignore if */\n if ((this.pos += 5) > this.len) {\n this.pos = this.len;\n throw indexOutOfRange(this, 10);\n }\n return value;\n };\n})();\n\n/**\n * Reads a varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.int32 = function read_int32() {\n return this.uint32() | 0;\n};\n\n/**\n * Reads a zig-zag encoded varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.sint32 = function read_sint32() {\n var value = this.uint32();\n return value >>> 1 ^ -(value & 1) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readLongVarint() {\n // tends to deopt with local vars for octet etc.\n var bits = new LongBits(0, 0);\n var i = 0;\n if (this.len - this.pos > 4) { // fast route (lo)\n for (; i < 4; ++i) {\n // 1st..4th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 5th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n i = 0;\n } else {\n for (; i < 3; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 1st..3th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 4th\n bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;\n return bits;\n }\n if (this.len - this.pos > 4) { // fast route (hi)\n for (; i < 5; ++i) {\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n } else {\n for (; i < 5; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n }\n /* istanbul ignore next */\n throw Error(\"invalid varint encoding\");\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads a varint as a signed 64 bit value.\n * @name Reader#int64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as an unsigned 64 bit value.\n * @name Reader#uint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a zig-zag encoded varint as a signed 64 bit value.\n * @name Reader#sint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as a boolean.\n * @returns {boolean} Value read\n */\nReader.prototype.bool = function read_bool() {\n return this.uint32() !== 0;\n};\n\nfunction readFixed32_end(buf, end) { // note that this uses `end`, not `pos`\n return (buf[end - 4]\n | buf[end - 3] << 8\n | buf[end - 2] << 16\n | buf[end - 1] << 24) >>> 0;\n}\n\n/**\n * Reads fixed 32 bits as an unsigned 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.fixed32 = function read_fixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4);\n};\n\n/**\n * Reads fixed 32 bits as a signed 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.sfixed32 = function read_sfixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readFixed64(/* this: Reader */) {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 8);\n\n return new LongBits(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads fixed 64 bits.\n * @name Reader#fixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads zig-zag encoded fixed 64 bits.\n * @name Reader#sfixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a float (32 bit) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.float = function read_float() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readFloatLE(this.buf, this.pos);\n this.pos += 4;\n return value;\n};\n\n/**\n * Reads a double (64 bit float) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.double = function read_double() {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readDoubleLE(this.buf, this.pos);\n this.pos += 8;\n return value;\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @returns {Uint8Array} Value read\n */\nReader.prototype.bytes = function read_bytes() {\n var length = this.uint32(),\n start = this.pos,\n end = this.pos + length;\n\n /* istanbul ignore if */\n if (end > this.len)\n throw indexOutOfRange(this, length);\n\n this.pos += length;\n if (Array.isArray(this.buf)) // plain array\n return this.buf.slice(start, end);\n\n if (start === end) { // fix for IE 10/Win8 and others' subarray returning array of size 1\n var nativeBuffer = util.Buffer;\n return nativeBuffer\n ? nativeBuffer.alloc(0)\n : new this.buf.constructor(0);\n }\n return this._slice.call(this.buf, start, end);\n};\n\n/**\n * Reads a string preceeded by its byte length as a varint.\n * @returns {string} Value read\n */\nReader.prototype.string = function read_string() {\n var bytes = this.bytes();\n return utf8.read(bytes, 0, bytes.length);\n};\n\n/**\n * Skips the specified number of bytes if specified, otherwise skips a varint.\n * @param {number} [length] Length if known, otherwise a varint is assumed\n * @returns {Reader} `this`\n */\nReader.prototype.skip = function skip(length) {\n if (typeof length === \"number\") {\n /* istanbul ignore if */\n if (this.pos + length > this.len)\n throw indexOutOfRange(this, length);\n this.pos += length;\n } else {\n do {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n } while (this.buf[this.pos++] & 128);\n }\n return this;\n};\n\n/**\n * Skips the next element of the specified wire type.\n * @param {number} wireType Wire type received\n * @returns {Reader} `this`\n */\nReader.prototype.skipType = function(wireType) {\n switch (wireType) {\n case 0:\n this.skip();\n break;\n case 1:\n this.skip(8);\n break;\n case 2:\n this.skip(this.uint32());\n break;\n case 3:\n while ((wireType = this.uint32() & 7) !== 4) {\n this.skipType(wireType);\n }\n break;\n case 5:\n this.skip(4);\n break;\n\n /* istanbul ignore next */\n default:\n throw Error(\"invalid wire type \" + wireType + \" at offset \" + this.pos);\n }\n return this;\n};\n\nReader._configure = function(BufferReader_) {\n BufferReader = BufferReader_;\n Reader.create = create();\n BufferReader._configure();\n\n var fn = util.Long ? \"toLong\" : /* istanbul ignore next */ \"toNumber\";\n util.merge(Reader.prototype, {\n\n int64: function read_int64() {\n return readLongVarint.call(this)[fn](false);\n },\n\n uint64: function read_uint64() {\n return readLongVarint.call(this)[fn](true);\n },\n\n sint64: function read_sint64() {\n return readLongVarint.call(this).zzDecode()[fn](false);\n },\n\n fixed64: function read_fixed64() {\n return readFixed64.call(this)[fn](true);\n },\n\n sfixed64: function read_sfixed64() {\n return readFixed64.call(this)[fn](false);\n }\n\n });\n};\n", "\"use strict\";\nmodule.exports = BufferReader;\n\n// extends Reader\nvar Reader = require(\"./reader\");\n(BufferReader.prototype = Object.create(Reader.prototype)).constructor = BufferReader;\n\nvar util = require(\"./util/minimal\");\n\n/**\n * Constructs a new buffer reader instance.\n * @classdesc Wire format reader using node buffers.\n * @extends Reader\n * @constructor\n * @param {Buffer} buffer Buffer to read from\n */\nfunction BufferReader(buffer) {\n Reader.call(this, buffer);\n\n /**\n * Read buffer.\n * @name BufferReader#buf\n * @type {Buffer}\n */\n}\n\nBufferReader._configure = function () {\n /* istanbul ignore else */\n if (util.Buffer)\n BufferReader.prototype._slice = util.Buffer.prototype.slice;\n};\n\n\n/**\n * @override\n */\nBufferReader.prototype.string = function read_string_buffer() {\n var len = this.uint32(); // modifies pos\n return this.buf.utf8Slice\n ? this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len))\n : this.buf.toString(\"utf-8\", this.pos, this.pos = Math.min(this.pos + len, this.len));\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @name BufferReader#bytes\n * @function\n * @returns {Buffer} Value read\n */\n\nBufferReader._configure();\n", "\"use strict\";\nmodule.exports = Service;\n\nvar util = require(\"../util/minimal\");\n\n// Extends EventEmitter\n(Service.prototype = Object.create(util.EventEmitter.prototype)).constructor = Service;\n\n/**\n * A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.\n *\n * Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.\n * @typedef rpc.ServiceMethodCallback\n * @template TRes extends Message\n * @type {function}\n * @param {Error|null} error Error, if any\n * @param {TRes} [response] Response message\n * @returns {undefined}\n */\n\n/**\n * A service method part of a {@link rpc.Service} as created by {@link Service.create}.\n * @typedef rpc.ServiceMethod\n * @template TReq extends Message\n * @template TRes extends Message\n * @type {function}\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} [callback] Node-style callback called with the error, if any, and the response message\n * @returns {Promise>} Promise if `callback` has been omitted, otherwise `undefined`\n */\n\n/**\n * Constructs a new RPC service instance.\n * @classdesc An RPC service as returned by {@link Service#create}.\n * @exports rpc.Service\n * @extends util.EventEmitter\n * @constructor\n * @param {RPCImpl} rpcImpl RPC implementation\n * @param {boolean} [requestDelimited=false] Whether requests are length-delimited\n * @param {boolean} [responseDelimited=false] Whether responses are length-delimited\n */\nfunction Service(rpcImpl, requestDelimited, responseDelimited) {\n\n if (typeof rpcImpl !== \"function\")\n throw TypeError(\"rpcImpl must be a function\");\n\n util.EventEmitter.call(this);\n\n /**\n * RPC implementation. Becomes `null` once the service is ended.\n * @type {RPCImpl|null}\n */\n this.rpcImpl = rpcImpl;\n\n /**\n * Whether requests are length-delimited.\n * @type {boolean}\n */\n this.requestDelimited = Boolean(requestDelimited);\n\n /**\n * Whether responses are length-delimited.\n * @type {boolean}\n */\n this.responseDelimited = Boolean(responseDelimited);\n}\n\n/**\n * Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.\n * @param {Method|rpc.ServiceMethod} method Reflected or static method\n * @param {Constructor} requestCtor Request constructor\n * @param {Constructor} responseCtor Response constructor\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} callback Service callback\n * @returns {undefined}\n * @template TReq extends Message\n * @template TRes extends Message\n */\nService.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {\n\n if (!request)\n throw TypeError(\"request must be specified\");\n\n var self = this;\n if (!callback)\n return util.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);\n\n if (!self.rpcImpl) {\n setTimeout(function() { callback(Error(\"already ended\")); }, 0);\n return undefined;\n }\n\n try {\n return self.rpcImpl(\n method,\n requestCtor[self.requestDelimited ? \"encodeDelimited\" : \"encode\"](request).finish(),\n function rpcCallback(err, response) {\n\n if (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n\n if (response === null) {\n self.end(/* endedByRPC */ true);\n return undefined;\n }\n\n if (!(response instanceof responseCtor)) {\n try {\n response = responseCtor[self.responseDelimited ? \"decodeDelimited\" : \"decode\"](response);\n } catch (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n }\n\n self.emit(\"data\", response, method);\n return callback(null, response);\n }\n );\n } catch (err) {\n self.emit(\"error\", err, method);\n setTimeout(function() { callback(err); }, 0);\n return undefined;\n }\n};\n\n/**\n * Ends this service and emits the `end` event.\n * @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.\n * @returns {rpc.Service} `this`\n */\nService.prototype.end = function end(endedByRPC) {\n if (this.rpcImpl) {\n if (!endedByRPC) // signal end to rpcImpl\n this.rpcImpl(null, null, null);\n this.rpcImpl = null;\n this.emit(\"end\").off();\n }\n return this;\n};\n", "\"use strict\";\n\n/**\n * Streaming RPC helpers.\n * @namespace\n */\nvar rpc = exports;\n\n/**\n * RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.\n * @typedef RPCImpl\n * @type {function}\n * @param {Method|rpc.ServiceMethod,Message<{}>>} method Reflected or static method being called\n * @param {Uint8Array} requestData Request data\n * @param {RPCImplCallback} callback Callback function\n * @returns {undefined}\n * @example\n * function rpcImpl(method, requestData, callback) {\n * if (protobuf.util.lcFirst(method.name) !== \"myMethod\") // compatible with static code\n * throw Error(\"no such method\");\n * asynchronouslyObtainAResponse(requestData, function(err, responseData) {\n * callback(err, responseData);\n * });\n * }\n */\n\n/**\n * Node-style callback as used by {@link RPCImpl}.\n * @typedef RPCImplCallback\n * @type {function}\n * @param {Error|null} error Error, if any, otherwise `null`\n * @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error\n * @returns {undefined}\n */\n\nrpc.Service = require(\"./rpc/service\");\n", "\"use strict\";\nmodule.exports = {};\n\n/**\n * Named roots.\n * This is where pbjs stores generated structures (the option `-r, --root` specifies a name).\n * Can also be used manually to make roots available across modules.\n * @name roots\n * @type {Object.}\n * @example\n * // pbjs -r myroot -o compiled.js ...\n *\n * // in another module:\n * require(\"./compiled.js\");\n *\n * // in any subsequent module:\n * var root = protobuf.roots[\"myroot\"];\n */\n", "\"use strict\";\nvar protobuf = exports;\n\n/**\n * Build type, one of `\"full\"`, `\"light\"` or `\"minimal\"`.\n * @name build\n * @type {string}\n * @const\n */\nprotobuf.build = \"minimal\";\n\n// Serialization\nprotobuf.Writer = require(\"./writer\");\nprotobuf.BufferWriter = require(\"./writer_buffer\");\nprotobuf.Reader = require(\"./reader\");\nprotobuf.BufferReader = require(\"./reader_buffer\");\n\n// Utility\nprotobuf.util = require(\"./util/minimal\");\nprotobuf.rpc = require(\"./rpc\");\nprotobuf.roots = require(\"./roots\");\nprotobuf.configure = configure;\n\n/* istanbul ignore next */\n/**\n * Reconfigures the library according to the environment.\n * @returns {undefined}\n */\nfunction configure() {\n protobuf.util._configure();\n protobuf.Writer._configure(protobuf.BufferWriter);\n protobuf.Reader._configure(protobuf.BufferReader);\n}\n\n// Set up buffer utility according to the environment\nconfigure();\n", "// minimal library entry point.\n\n\"use strict\";\nmodule.exports = require(\"./src/index-minimal\");\n", "/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\n// Common aliases\nvar $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;\n\n// Exported root namespace\nvar $root = $protobuf.roots[\"default\"] || ($protobuf.roots[\"default\"] = {});\n\n$root.onnx = (function() {\n\n /**\n * Namespace onnx.\n * @exports onnx\n * @namespace\n */\n var onnx = {};\n\n /**\n * Version enum.\n * @name onnx.Version\n * @enum {number}\n * @property {number} _START_VERSION=0 _START_VERSION value\n * @property {number} IR_VERSION_2017_10_10=1 IR_VERSION_2017_10_10 value\n * @property {number} IR_VERSION_2017_10_30=2 IR_VERSION_2017_10_30 value\n * @property {number} IR_VERSION_2017_11_3=3 IR_VERSION_2017_11_3 value\n * @property {number} IR_VERSION_2019_1_22=4 IR_VERSION_2019_1_22 value\n * @property {number} IR_VERSION_2019_3_18=5 IR_VERSION_2019_3_18 value\n * @property {number} IR_VERSION_2019_9_19=6 IR_VERSION_2019_9_19 value\n * @property {number} IR_VERSION_2020_5_8=7 IR_VERSION_2020_5_8 value\n * @property {number} IR_VERSION_2021_7_30=8 IR_VERSION_2021_7_30 value\n * @property {number} IR_VERSION=9 IR_VERSION value\n */\n onnx.Version = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"_START_VERSION\"] = 0;\n values[valuesById[1] = \"IR_VERSION_2017_10_10\"] = 1;\n values[valuesById[2] = \"IR_VERSION_2017_10_30\"] = 2;\n values[valuesById[3] = \"IR_VERSION_2017_11_3\"] = 3;\n values[valuesById[4] = \"IR_VERSION_2019_1_22\"] = 4;\n values[valuesById[5] = \"IR_VERSION_2019_3_18\"] = 5;\n values[valuesById[6] = \"IR_VERSION_2019_9_19\"] = 6;\n values[valuesById[7] = \"IR_VERSION_2020_5_8\"] = 7;\n values[valuesById[8] = \"IR_VERSION_2021_7_30\"] = 8;\n values[valuesById[9] = \"IR_VERSION\"] = 9;\n return values;\n })();\n\n onnx.AttributeProto = (function() {\n\n /**\n * Properties of an AttributeProto.\n * @memberof onnx\n * @interface IAttributeProto\n * @property {string|null} [name] AttributeProto name\n * @property {string|null} [refAttrName] AttributeProto refAttrName\n * @property {string|null} [docString] AttributeProto docString\n * @property {onnx.AttributeProto.AttributeType|null} [type] AttributeProto type\n * @property {number|null} [f] AttributeProto f\n * @property {number|Long|null} [i] AttributeProto i\n * @property {Uint8Array|null} [s] AttributeProto s\n * @property {onnx.ITensorProto|null} [t] AttributeProto t\n * @property {onnx.IGraphProto|null} [g] AttributeProto g\n * @property {onnx.ISparseTensorProto|null} [sparseTensor] AttributeProto sparseTensor\n * @property {onnx.ITypeProto|null} [tp] AttributeProto tp\n * @property {Array.|null} [floats] AttributeProto floats\n * @property {Array.|null} [ints] AttributeProto ints\n * @property {Array.|null} [strings] AttributeProto strings\n * @property {Array.|null} [tensors] AttributeProto tensors\n * @property {Array.|null} [graphs] AttributeProto graphs\n * @property {Array.|null} [sparseTensors] AttributeProto sparseTensors\n * @property {Array.|null} [typeProtos] AttributeProto typeProtos\n */\n\n /**\n * Constructs a new AttributeProto.\n * @memberof onnx\n * @classdesc Represents an AttributeProto.\n * @implements IAttributeProto\n * @constructor\n * @param {onnx.IAttributeProto=} [properties] Properties to set\n */\n function AttributeProto(properties) {\n this.floats = [];\n this.ints = [];\n this.strings = [];\n this.tensors = [];\n this.graphs = [];\n this.sparseTensors = [];\n this.typeProtos = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * AttributeProto name.\n * @member {string} name\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.name = \"\";\n\n /**\n * AttributeProto refAttrName.\n * @member {string} refAttrName\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.refAttrName = \"\";\n\n /**\n * AttributeProto docString.\n * @member {string} docString\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.docString = \"\";\n\n /**\n * AttributeProto type.\n * @member {onnx.AttributeProto.AttributeType} type\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.type = 0;\n\n /**\n * AttributeProto f.\n * @member {number} f\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.f = 0;\n\n /**\n * AttributeProto i.\n * @member {number|Long} i\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.i = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * AttributeProto s.\n * @member {Uint8Array} s\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.s = $util.newBuffer([]);\n\n /**\n * AttributeProto t.\n * @member {onnx.ITensorProto|null|undefined} t\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.t = null;\n\n /**\n * AttributeProto g.\n * @member {onnx.IGraphProto|null|undefined} g\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.g = null;\n\n /**\n * AttributeProto sparseTensor.\n * @member {onnx.ISparseTensorProto|null|undefined} sparseTensor\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.sparseTensor = null;\n\n /**\n * AttributeProto tp.\n * @member {onnx.ITypeProto|null|undefined} tp\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.tp = null;\n\n /**\n * AttributeProto floats.\n * @member {Array.} floats\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.floats = $util.emptyArray;\n\n /**\n * AttributeProto ints.\n * @member {Array.} ints\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.ints = $util.emptyArray;\n\n /**\n * AttributeProto strings.\n * @member {Array.} strings\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.strings = $util.emptyArray;\n\n /**\n * AttributeProto tensors.\n * @member {Array.} tensors\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.tensors = $util.emptyArray;\n\n /**\n * AttributeProto graphs.\n * @member {Array.} graphs\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.graphs = $util.emptyArray;\n\n /**\n * AttributeProto sparseTensors.\n * @member {Array.} sparseTensors\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.sparseTensors = $util.emptyArray;\n\n /**\n * AttributeProto typeProtos.\n * @member {Array.} typeProtos\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.typeProtos = $util.emptyArray;\n\n /**\n * Creates a new AttributeProto instance using the specified properties.\n * @function create\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto=} [properties] Properties to set\n * @returns {onnx.AttributeProto} AttributeProto instance\n */\n AttributeProto.create = function create(properties) {\n return new AttributeProto(properties);\n };\n\n /**\n * Encodes the specified AttributeProto message. Does not implicitly {@link onnx.AttributeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto} message AttributeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n AttributeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.f != null && Object.hasOwnProperty.call(message, \"f\"))\n writer.uint32(/* id 2, wireType 5 =*/21).float(message.f);\n if (message.i != null && Object.hasOwnProperty.call(message, \"i\"))\n writer.uint32(/* id 3, wireType 0 =*/24).int64(message.i);\n if (message.s != null && Object.hasOwnProperty.call(message, \"s\"))\n writer.uint32(/* id 4, wireType 2 =*/34).bytes(message.s);\n if (message.t != null && Object.hasOwnProperty.call(message, \"t\"))\n $root.onnx.TensorProto.encode(message.t, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.g != null && Object.hasOwnProperty.call(message, \"g\"))\n $root.onnx.GraphProto.encode(message.g, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim();\n if (message.floats != null && message.floats.length) {\n writer.uint32(/* id 7, wireType 2 =*/58).fork();\n for (var i = 0; i < message.floats.length; ++i)\n writer.float(message.floats[i]);\n writer.ldelim();\n }\n if (message.ints != null && message.ints.length) {\n writer.uint32(/* id 8, wireType 2 =*/66).fork();\n for (var i = 0; i < message.ints.length; ++i)\n writer.int64(message.ints[i]);\n writer.ldelim();\n }\n if (message.strings != null && message.strings.length)\n for (var i = 0; i < message.strings.length; ++i)\n writer.uint32(/* id 9, wireType 2 =*/74).bytes(message.strings[i]);\n if (message.tensors != null && message.tensors.length)\n for (var i = 0; i < message.tensors.length; ++i)\n $root.onnx.TensorProto.encode(message.tensors[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim();\n if (message.graphs != null && message.graphs.length)\n for (var i = 0; i < message.graphs.length; ++i)\n $root.onnx.GraphProto.encode(message.graphs[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 13, wireType 2 =*/106).string(message.docString);\n if (message.tp != null && Object.hasOwnProperty.call(message, \"tp\"))\n $root.onnx.TypeProto.encode(message.tp, writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.typeProtos != null && message.typeProtos.length)\n for (var i = 0; i < message.typeProtos.length; ++i)\n $root.onnx.TypeProto.encode(message.typeProtos[i], writer.uint32(/* id 15, wireType 2 =*/122).fork()).ldelim();\n if (message.type != null && Object.hasOwnProperty.call(message, \"type\"))\n writer.uint32(/* id 20, wireType 0 =*/160).int32(message.type);\n if (message.refAttrName != null && Object.hasOwnProperty.call(message, \"refAttrName\"))\n writer.uint32(/* id 21, wireType 2 =*/170).string(message.refAttrName);\n if (message.sparseTensor != null && Object.hasOwnProperty.call(message, \"sparseTensor\"))\n $root.onnx.SparseTensorProto.encode(message.sparseTensor, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim();\n if (message.sparseTensors != null && message.sparseTensors.length)\n for (var i = 0; i < message.sparseTensors.length; ++i)\n $root.onnx.SparseTensorProto.encode(message.sparseTensors[i], writer.uint32(/* id 23, wireType 2 =*/186).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified AttributeProto message, length delimited. Does not implicitly {@link onnx.AttributeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto} message AttributeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n AttributeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an AttributeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.AttributeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.AttributeProto} AttributeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n AttributeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.AttributeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 21: {\n message.refAttrName = reader.string();\n break;\n }\n case 13: {\n message.docString = reader.string();\n break;\n }\n case 20: {\n message.type = reader.int32();\n break;\n }\n case 2: {\n message.f = reader.float();\n break;\n }\n case 3: {\n message.i = reader.int64();\n break;\n }\n case 4: {\n message.s = reader.bytes();\n break;\n }\n case 5: {\n message.t = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 6: {\n message.g = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 22: {\n message.sparseTensor = $root.onnx.SparseTensorProto.decode(reader, reader.uint32());\n break;\n }\n case 14: {\n message.tp = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n case 7: {\n if (!(message.floats && message.floats.length))\n message.floats = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.floats.push(reader.float());\n } else\n message.floats.push(reader.float());\n break;\n }\n case 8: {\n if (!(message.ints && message.ints.length))\n message.ints = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.ints.push(reader.int64());\n } else\n message.ints.push(reader.int64());\n break;\n }\n case 9: {\n if (!(message.strings && message.strings.length))\n message.strings = [];\n message.strings.push(reader.bytes());\n break;\n }\n case 10: {\n if (!(message.tensors && message.tensors.length))\n message.tensors = [];\n message.tensors.push($root.onnx.TensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 11: {\n if (!(message.graphs && message.graphs.length))\n message.graphs = [];\n message.graphs.push($root.onnx.GraphProto.decode(reader, reader.uint32()));\n break;\n }\n case 23: {\n if (!(message.sparseTensors && message.sparseTensors.length))\n message.sparseTensors = [];\n message.sparseTensors.push($root.onnx.SparseTensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 15: {\n if (!(message.typeProtos && message.typeProtos.length))\n message.typeProtos = [];\n message.typeProtos.push($root.onnx.TypeProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an AttributeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.AttributeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.AttributeProto} AttributeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n AttributeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an AttributeProto message.\n * @function verify\n * @memberof onnx.AttributeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n AttributeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.refAttrName != null && message.hasOwnProperty(\"refAttrName\"))\n if (!$util.isString(message.refAttrName))\n return \"refAttrName: string expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.type != null && message.hasOwnProperty(\"type\"))\n switch (message.type) {\n default:\n return \"type: enum value expected\";\n case 0:\n case 1:\n case 2:\n case 3:\n case 4:\n case 5:\n case 11:\n case 13:\n case 6:\n case 7:\n case 8:\n case 9:\n case 10:\n case 12:\n case 14:\n break;\n }\n if (message.f != null && message.hasOwnProperty(\"f\"))\n if (typeof message.f !== \"number\")\n return \"f: number expected\";\n if (message.i != null && message.hasOwnProperty(\"i\"))\n if (!$util.isInteger(message.i) && !(message.i && $util.isInteger(message.i.low) && $util.isInteger(message.i.high)))\n return \"i: integer|Long expected\";\n if (message.s != null && message.hasOwnProperty(\"s\"))\n if (!(message.s && typeof message.s.length === \"number\" || $util.isString(message.s)))\n return \"s: buffer expected\";\n if (message.t != null && message.hasOwnProperty(\"t\")) {\n var error = $root.onnx.TensorProto.verify(message.t);\n if (error)\n return \"t.\" + error;\n }\n if (message.g != null && message.hasOwnProperty(\"g\")) {\n var error = $root.onnx.GraphProto.verify(message.g);\n if (error)\n return \"g.\" + error;\n }\n if (message.sparseTensor != null && message.hasOwnProperty(\"sparseTensor\")) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseTensor);\n if (error)\n return \"sparseTensor.\" + error;\n }\n if (message.tp != null && message.hasOwnProperty(\"tp\")) {\n var error = $root.onnx.TypeProto.verify(message.tp);\n if (error)\n return \"tp.\" + error;\n }\n if (message.floats != null && message.hasOwnProperty(\"floats\")) {\n if (!Array.isArray(message.floats))\n return \"floats: array expected\";\n for (var i = 0; i < message.floats.length; ++i)\n if (typeof message.floats[i] !== \"number\")\n return \"floats: number[] expected\";\n }\n if (message.ints != null && message.hasOwnProperty(\"ints\")) {\n if (!Array.isArray(message.ints))\n return \"ints: array expected\";\n for (var i = 0; i < message.ints.length; ++i)\n if (!$util.isInteger(message.ints[i]) && !(message.ints[i] && $util.isInteger(message.ints[i].low) && $util.isInteger(message.ints[i].high)))\n return \"ints: integer|Long[] expected\";\n }\n if (message.strings != null && message.hasOwnProperty(\"strings\")) {\n if (!Array.isArray(message.strings))\n return \"strings: array expected\";\n for (var i = 0; i < message.strings.length; ++i)\n if (!(message.strings[i] && typeof message.strings[i].length === \"number\" || $util.isString(message.strings[i])))\n return \"strings: buffer[] expected\";\n }\n if (message.tensors != null && message.hasOwnProperty(\"tensors\")) {\n if (!Array.isArray(message.tensors))\n return \"tensors: array expected\";\n for (var i = 0; i < message.tensors.length; ++i) {\n var error = $root.onnx.TensorProto.verify(message.tensors[i]);\n if (error)\n return \"tensors.\" + error;\n }\n }\n if (message.graphs != null && message.hasOwnProperty(\"graphs\")) {\n if (!Array.isArray(message.graphs))\n return \"graphs: array expected\";\n for (var i = 0; i < message.graphs.length; ++i) {\n var error = $root.onnx.GraphProto.verify(message.graphs[i]);\n if (error)\n return \"graphs.\" + error;\n }\n }\n if (message.sparseTensors != null && message.hasOwnProperty(\"sparseTensors\")) {\n if (!Array.isArray(message.sparseTensors))\n return \"sparseTensors: array expected\";\n for (var i = 0; i < message.sparseTensors.length; ++i) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseTensors[i]);\n if (error)\n return \"sparseTensors.\" + error;\n }\n }\n if (message.typeProtos != null && message.hasOwnProperty(\"typeProtos\")) {\n if (!Array.isArray(message.typeProtos))\n return \"typeProtos: array expected\";\n for (var i = 0; i < message.typeProtos.length; ++i) {\n var error = $root.onnx.TypeProto.verify(message.typeProtos[i]);\n if (error)\n return \"typeProtos.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates an AttributeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.AttributeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.AttributeProto} AttributeProto\n */\n AttributeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.AttributeProto)\n return object;\n var message = new $root.onnx.AttributeProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.refAttrName != null)\n message.refAttrName = String(object.refAttrName);\n if (object.docString != null)\n message.docString = String(object.docString);\n switch (object.type) {\n default:\n if (typeof object.type === \"number\") {\n message.type = object.type;\n break;\n }\n break;\n case \"UNDEFINED\":\n case 0:\n message.type = 0;\n break;\n case \"FLOAT\":\n case 1:\n message.type = 1;\n break;\n case \"INT\":\n case 2:\n message.type = 2;\n break;\n case \"STRING\":\n case 3:\n message.type = 3;\n break;\n case \"TENSOR\":\n case 4:\n message.type = 4;\n break;\n case \"GRAPH\":\n case 5:\n message.type = 5;\n break;\n case \"SPARSE_TENSOR\":\n case 11:\n message.type = 11;\n break;\n case \"TYPE_PROTO\":\n case 13:\n message.type = 13;\n break;\n case \"FLOATS\":\n case 6:\n message.type = 6;\n break;\n case \"INTS\":\n case 7:\n message.type = 7;\n break;\n case \"STRINGS\":\n case 8:\n message.type = 8;\n break;\n case \"TENSORS\":\n case 9:\n message.type = 9;\n break;\n case \"GRAPHS\":\n case 10:\n message.type = 10;\n break;\n case \"SPARSE_TENSORS\":\n case 12:\n message.type = 12;\n break;\n case \"TYPE_PROTOS\":\n case 14:\n message.type = 14;\n break;\n }\n if (object.f != null)\n message.f = Number(object.f);\n if (object.i != null)\n if ($util.Long)\n (message.i = $util.Long.fromValue(object.i)).unsigned = false;\n else if (typeof object.i === \"string\")\n message.i = parseInt(object.i, 10);\n else if (typeof object.i === \"number\")\n message.i = object.i;\n else if (typeof object.i === \"object\")\n message.i = new $util.LongBits(object.i.low >>> 0, object.i.high >>> 0).toNumber();\n if (object.s != null)\n if (typeof object.s === \"string\")\n $util.base64.decode(object.s, message.s = $util.newBuffer($util.base64.length(object.s)), 0);\n else if (object.s.length >= 0)\n message.s = object.s;\n if (object.t != null) {\n if (typeof object.t !== \"object\")\n throw TypeError(\".onnx.AttributeProto.t: object expected\");\n message.t = $root.onnx.TensorProto.fromObject(object.t);\n }\n if (object.g != null) {\n if (typeof object.g !== \"object\")\n throw TypeError(\".onnx.AttributeProto.g: object expected\");\n message.g = $root.onnx.GraphProto.fromObject(object.g);\n }\n if (object.sparseTensor != null) {\n if (typeof object.sparseTensor !== \"object\")\n throw TypeError(\".onnx.AttributeProto.sparseTensor: object expected\");\n message.sparseTensor = $root.onnx.SparseTensorProto.fromObject(object.sparseTensor);\n }\n if (object.tp != null) {\n if (typeof object.tp !== \"object\")\n throw TypeError(\".onnx.AttributeProto.tp: object expected\");\n message.tp = $root.onnx.TypeProto.fromObject(object.tp);\n }\n if (object.floats) {\n if (!Array.isArray(object.floats))\n throw TypeError(\".onnx.AttributeProto.floats: array expected\");\n message.floats = [];\n for (var i = 0; i < object.floats.length; ++i)\n message.floats[i] = Number(object.floats[i]);\n }\n if (object.ints) {\n if (!Array.isArray(object.ints))\n throw TypeError(\".onnx.AttributeProto.ints: array expected\");\n message.ints = [];\n for (var i = 0; i < object.ints.length; ++i)\n if ($util.Long)\n (message.ints[i] = $util.Long.fromValue(object.ints[i])).unsigned = false;\n else if (typeof object.ints[i] === \"string\")\n message.ints[i] = parseInt(object.ints[i], 10);\n else if (typeof object.ints[i] === \"number\")\n message.ints[i] = object.ints[i];\n else if (typeof object.ints[i] === \"object\")\n message.ints[i] = new $util.LongBits(object.ints[i].low >>> 0, object.ints[i].high >>> 0).toNumber();\n }\n if (object.strings) {\n if (!Array.isArray(object.strings))\n throw TypeError(\".onnx.AttributeProto.strings: array expected\");\n message.strings = [];\n for (var i = 0; i < object.strings.length; ++i)\n if (typeof object.strings[i] === \"string\")\n $util.base64.decode(object.strings[i], message.strings[i] = $util.newBuffer($util.base64.length(object.strings[i])), 0);\n else if (object.strings[i].length >= 0)\n message.strings[i] = object.strings[i];\n }\n if (object.tensors) {\n if (!Array.isArray(object.tensors))\n throw TypeError(\".onnx.AttributeProto.tensors: array expected\");\n message.tensors = [];\n for (var i = 0; i < object.tensors.length; ++i) {\n if (typeof object.tensors[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.tensors: object expected\");\n message.tensors[i] = $root.onnx.TensorProto.fromObject(object.tensors[i]);\n }\n }\n if (object.graphs) {\n if (!Array.isArray(object.graphs))\n throw TypeError(\".onnx.AttributeProto.graphs: array expected\");\n message.graphs = [];\n for (var i = 0; i < object.graphs.length; ++i) {\n if (typeof object.graphs[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.graphs: object expected\");\n message.graphs[i] = $root.onnx.GraphProto.fromObject(object.graphs[i]);\n }\n }\n if (object.sparseTensors) {\n if (!Array.isArray(object.sparseTensors))\n throw TypeError(\".onnx.AttributeProto.sparseTensors: array expected\");\n message.sparseTensors = [];\n for (var i = 0; i < object.sparseTensors.length; ++i) {\n if (typeof object.sparseTensors[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.sparseTensors: object expected\");\n message.sparseTensors[i] = $root.onnx.SparseTensorProto.fromObject(object.sparseTensors[i]);\n }\n }\n if (object.typeProtos) {\n if (!Array.isArray(object.typeProtos))\n throw TypeError(\".onnx.AttributeProto.typeProtos: array expected\");\n message.typeProtos = [];\n for (var i = 0; i < object.typeProtos.length; ++i) {\n if (typeof object.typeProtos[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.typeProtos: object expected\");\n message.typeProtos[i] = $root.onnx.TypeProto.fromObject(object.typeProtos[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from an AttributeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.AttributeProto} message AttributeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n AttributeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.floats = [];\n object.ints = [];\n object.strings = [];\n object.tensors = [];\n object.graphs = [];\n object.typeProtos = [];\n object.sparseTensors = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.f = 0;\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.i = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.i = options.longs === String ? \"0\" : 0;\n if (options.bytes === String)\n object.s = \"\";\n else {\n object.s = [];\n if (options.bytes !== Array)\n object.s = $util.newBuffer(object.s);\n }\n object.t = null;\n object.g = null;\n object.docString = \"\";\n object.tp = null;\n object.type = options.enums === String ? \"UNDEFINED\" : 0;\n object.refAttrName = \"\";\n object.sparseTensor = null;\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.f != null && message.hasOwnProperty(\"f\"))\n object.f = options.json && !isFinite(message.f) ? String(message.f) : message.f;\n if (message.i != null && message.hasOwnProperty(\"i\"))\n if (typeof message.i === \"number\")\n object.i = options.longs === String ? String(message.i) : message.i;\n else\n object.i = options.longs === String ? $util.Long.prototype.toString.call(message.i) : options.longs === Number ? new $util.LongBits(message.i.low >>> 0, message.i.high >>> 0).toNumber() : message.i;\n if (message.s != null && message.hasOwnProperty(\"s\"))\n object.s = options.bytes === String ? $util.base64.encode(message.s, 0, message.s.length) : options.bytes === Array ? Array.prototype.slice.call(message.s) : message.s;\n if (message.t != null && message.hasOwnProperty(\"t\"))\n object.t = $root.onnx.TensorProto.toObject(message.t, options);\n if (message.g != null && message.hasOwnProperty(\"g\"))\n object.g = $root.onnx.GraphProto.toObject(message.g, options);\n if (message.floats && message.floats.length) {\n object.floats = [];\n for (var j = 0; j < message.floats.length; ++j)\n object.floats[j] = options.json && !isFinite(message.floats[j]) ? String(message.floats[j]) : message.floats[j];\n }\n if (message.ints && message.ints.length) {\n object.ints = [];\n for (var j = 0; j < message.ints.length; ++j)\n if (typeof message.ints[j] === \"number\")\n object.ints[j] = options.longs === String ? String(message.ints[j]) : message.ints[j];\n else\n object.ints[j] = options.longs === String ? $util.Long.prototype.toString.call(message.ints[j]) : options.longs === Number ? new $util.LongBits(message.ints[j].low >>> 0, message.ints[j].high >>> 0).toNumber() : message.ints[j];\n }\n if (message.strings && message.strings.length) {\n object.strings = [];\n for (var j = 0; j < message.strings.length; ++j)\n object.strings[j] = options.bytes === String ? $util.base64.encode(message.strings[j], 0, message.strings[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.strings[j]) : message.strings[j];\n }\n if (message.tensors && message.tensors.length) {\n object.tensors = [];\n for (var j = 0; j < message.tensors.length; ++j)\n object.tensors[j] = $root.onnx.TensorProto.toObject(message.tensors[j], options);\n }\n if (message.graphs && message.graphs.length) {\n object.graphs = [];\n for (var j = 0; j < message.graphs.length; ++j)\n object.graphs[j] = $root.onnx.GraphProto.toObject(message.graphs[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.tp != null && message.hasOwnProperty(\"tp\"))\n object.tp = $root.onnx.TypeProto.toObject(message.tp, options);\n if (message.typeProtos && message.typeProtos.length) {\n object.typeProtos = [];\n for (var j = 0; j < message.typeProtos.length; ++j)\n object.typeProtos[j] = $root.onnx.TypeProto.toObject(message.typeProtos[j], options);\n }\n if (message.type != null && message.hasOwnProperty(\"type\"))\n object.type = options.enums === String ? $root.onnx.AttributeProto.AttributeType[message.type] === undefined ? message.type : $root.onnx.AttributeProto.AttributeType[message.type] : message.type;\n if (message.refAttrName != null && message.hasOwnProperty(\"refAttrName\"))\n object.refAttrName = message.refAttrName;\n if (message.sparseTensor != null && message.hasOwnProperty(\"sparseTensor\"))\n object.sparseTensor = $root.onnx.SparseTensorProto.toObject(message.sparseTensor, options);\n if (message.sparseTensors && message.sparseTensors.length) {\n object.sparseTensors = [];\n for (var j = 0; j < message.sparseTensors.length; ++j)\n object.sparseTensors[j] = $root.onnx.SparseTensorProto.toObject(message.sparseTensors[j], options);\n }\n return object;\n };\n\n /**\n * Converts this AttributeProto to JSON.\n * @function toJSON\n * @memberof onnx.AttributeProto\n * @instance\n * @returns {Object.} JSON object\n */\n AttributeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for AttributeProto\n * @function getTypeUrl\n * @memberof onnx.AttributeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n AttributeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.AttributeProto\";\n };\n\n /**\n * AttributeType enum.\n * @name onnx.AttributeProto.AttributeType\n * @enum {number}\n * @property {number} UNDEFINED=0 UNDEFINED value\n * @property {number} FLOAT=1 FLOAT value\n * @property {number} INT=2 INT value\n * @property {number} STRING=3 STRING value\n * @property {number} TENSOR=4 TENSOR value\n * @property {number} GRAPH=5 GRAPH value\n * @property {number} SPARSE_TENSOR=11 SPARSE_TENSOR value\n * @property {number} TYPE_PROTO=13 TYPE_PROTO value\n * @property {number} FLOATS=6 FLOATS value\n * @property {number} INTS=7 INTS value\n * @property {number} STRINGS=8 STRINGS value\n * @property {number} TENSORS=9 TENSORS value\n * @property {number} GRAPHS=10 GRAPHS value\n * @property {number} SPARSE_TENSORS=12 SPARSE_TENSORS value\n * @property {number} TYPE_PROTOS=14 TYPE_PROTOS value\n */\n AttributeProto.AttributeType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"UNDEFINED\"] = 0;\n values[valuesById[1] = \"FLOAT\"] = 1;\n values[valuesById[2] = \"INT\"] = 2;\n values[valuesById[3] = \"STRING\"] = 3;\n values[valuesById[4] = \"TENSOR\"] = 4;\n values[valuesById[5] = \"GRAPH\"] = 5;\n values[valuesById[11] = \"SPARSE_TENSOR\"] = 11;\n values[valuesById[13] = \"TYPE_PROTO\"] = 13;\n values[valuesById[6] = \"FLOATS\"] = 6;\n values[valuesById[7] = \"INTS\"] = 7;\n values[valuesById[8] = \"STRINGS\"] = 8;\n values[valuesById[9] = \"TENSORS\"] = 9;\n values[valuesById[10] = \"GRAPHS\"] = 10;\n values[valuesById[12] = \"SPARSE_TENSORS\"] = 12;\n values[valuesById[14] = \"TYPE_PROTOS\"] = 14;\n return values;\n })();\n\n return AttributeProto;\n })();\n\n onnx.ValueInfoProto = (function() {\n\n /**\n * Properties of a ValueInfoProto.\n * @memberof onnx\n * @interface IValueInfoProto\n * @property {string|null} [name] ValueInfoProto name\n * @property {onnx.ITypeProto|null} [type] ValueInfoProto type\n * @property {string|null} [docString] ValueInfoProto docString\n */\n\n /**\n * Constructs a new ValueInfoProto.\n * @memberof onnx\n * @classdesc Represents a ValueInfoProto.\n * @implements IValueInfoProto\n * @constructor\n * @param {onnx.IValueInfoProto=} [properties] Properties to set\n */\n function ValueInfoProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * ValueInfoProto name.\n * @member {string} name\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.name = \"\";\n\n /**\n * ValueInfoProto type.\n * @member {onnx.ITypeProto|null|undefined} type\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.type = null;\n\n /**\n * ValueInfoProto docString.\n * @member {string} docString\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.docString = \"\";\n\n /**\n * Creates a new ValueInfoProto instance using the specified properties.\n * @function create\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto=} [properties] Properties to set\n * @returns {onnx.ValueInfoProto} ValueInfoProto instance\n */\n ValueInfoProto.create = function create(properties) {\n return new ValueInfoProto(properties);\n };\n\n /**\n * Encodes the specified ValueInfoProto message. Does not implicitly {@link onnx.ValueInfoProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto} message ValueInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ValueInfoProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.type != null && Object.hasOwnProperty.call(message, \"type\"))\n $root.onnx.TypeProto.encode(message.type, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.docString);\n return writer;\n };\n\n /**\n * Encodes the specified ValueInfoProto message, length delimited. Does not implicitly {@link onnx.ValueInfoProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto} message ValueInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ValueInfoProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a ValueInfoProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ValueInfoProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.ValueInfoProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 2: {\n message.type = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n message.docString = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a ValueInfoProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ValueInfoProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a ValueInfoProto message.\n * @function verify\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n ValueInfoProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.type != null && message.hasOwnProperty(\"type\")) {\n var error = $root.onnx.TypeProto.verify(message.type);\n if (error)\n return \"type.\" + error;\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n return null;\n };\n\n /**\n * Creates a ValueInfoProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n */\n ValueInfoProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.ValueInfoProto)\n return object;\n var message = new $root.onnx.ValueInfoProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.type != null) {\n if (typeof object.type !== \"object\")\n throw TypeError(\".onnx.ValueInfoProto.type: object expected\");\n message.type = $root.onnx.TypeProto.fromObject(object.type);\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n return message;\n };\n\n /**\n * Creates a plain object from a ValueInfoProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.ValueInfoProto} message ValueInfoProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n ValueInfoProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.name = \"\";\n object.type = null;\n object.docString = \"\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.type != null && message.hasOwnProperty(\"type\"))\n object.type = $root.onnx.TypeProto.toObject(message.type, options);\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n return object;\n };\n\n /**\n * Converts this ValueInfoProto to JSON.\n * @function toJSON\n * @memberof onnx.ValueInfoProto\n * @instance\n * @returns {Object.} JSON object\n */\n ValueInfoProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for ValueInfoProto\n * @function getTypeUrl\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n ValueInfoProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.ValueInfoProto\";\n };\n\n return ValueInfoProto;\n })();\n\n onnx.NodeProto = (function() {\n\n /**\n * Properties of a NodeProto.\n * @memberof onnx\n * @interface INodeProto\n * @property {Array.|null} [input] NodeProto input\n * @property {Array.|null} [output] NodeProto output\n * @property {string|null} [name] NodeProto name\n * @property {string|null} [opType] NodeProto opType\n * @property {string|null} [domain] NodeProto domain\n * @property {Array.|null} [attribute] NodeProto attribute\n * @property {string|null} [docString] NodeProto docString\n */\n\n /**\n * Constructs a new NodeProto.\n * @memberof onnx\n * @classdesc Represents a NodeProto.\n * @implements INodeProto\n * @constructor\n * @param {onnx.INodeProto=} [properties] Properties to set\n */\n function NodeProto(properties) {\n this.input = [];\n this.output = [];\n this.attribute = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * NodeProto input.\n * @member {Array.} input\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.input = $util.emptyArray;\n\n /**\n * NodeProto output.\n * @member {Array.} output\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.output = $util.emptyArray;\n\n /**\n * NodeProto name.\n * @member {string} name\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.name = \"\";\n\n /**\n * NodeProto opType.\n * @member {string} opType\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.opType = \"\";\n\n /**\n * NodeProto domain.\n * @member {string} domain\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.domain = \"\";\n\n /**\n * NodeProto attribute.\n * @member {Array.} attribute\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.attribute = $util.emptyArray;\n\n /**\n * NodeProto docString.\n * @member {string} docString\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.docString = \"\";\n\n /**\n * Creates a new NodeProto instance using the specified properties.\n * @function create\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto=} [properties] Properties to set\n * @returns {onnx.NodeProto} NodeProto instance\n */\n NodeProto.create = function create(properties) {\n return new NodeProto(properties);\n };\n\n /**\n * Encodes the specified NodeProto message. Does not implicitly {@link onnx.NodeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto} message NodeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n NodeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.input[i]);\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.output[i]);\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.name);\n if (message.opType != null && Object.hasOwnProperty.call(message, \"opType\"))\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.opType);\n if (message.attribute != null && message.attribute.length)\n for (var i = 0; i < message.attribute.length; ++i)\n $root.onnx.AttributeProto.encode(message.attribute[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.docString);\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 7, wireType 2 =*/58).string(message.domain);\n return writer;\n };\n\n /**\n * Encodes the specified NodeProto message, length delimited. Does not implicitly {@link onnx.NodeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto} message NodeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n NodeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a NodeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.NodeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.NodeProto} NodeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n NodeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.NodeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push(reader.string());\n break;\n }\n case 2: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push(reader.string());\n break;\n }\n case 3: {\n message.name = reader.string();\n break;\n }\n case 4: {\n message.opType = reader.string();\n break;\n }\n case 7: {\n message.domain = reader.string();\n break;\n }\n case 5: {\n if (!(message.attribute && message.attribute.length))\n message.attribute = [];\n message.attribute.push($root.onnx.AttributeProto.decode(reader, reader.uint32()));\n break;\n }\n case 6: {\n message.docString = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a NodeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.NodeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.NodeProto} NodeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n NodeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a NodeProto message.\n * @function verify\n * @memberof onnx.NodeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n NodeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i)\n if (!$util.isString(message.input[i]))\n return \"input: string[] expected\";\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i)\n if (!$util.isString(message.output[i]))\n return \"output: string[] expected\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.opType != null && message.hasOwnProperty(\"opType\"))\n if (!$util.isString(message.opType))\n return \"opType: string expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.attribute != null && message.hasOwnProperty(\"attribute\")) {\n if (!Array.isArray(message.attribute))\n return \"attribute: array expected\";\n for (var i = 0; i < message.attribute.length; ++i) {\n var error = $root.onnx.AttributeProto.verify(message.attribute[i]);\n if (error)\n return \"attribute.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n return null;\n };\n\n /**\n * Creates a NodeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.NodeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.NodeProto} NodeProto\n */\n NodeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.NodeProto)\n return object;\n var message = new $root.onnx.NodeProto();\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.NodeProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i)\n message.input[i] = String(object.input[i]);\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.NodeProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i)\n message.output[i] = String(object.output[i]);\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.opType != null)\n message.opType = String(object.opType);\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.attribute) {\n if (!Array.isArray(object.attribute))\n throw TypeError(\".onnx.NodeProto.attribute: array expected\");\n message.attribute = [];\n for (var i = 0; i < object.attribute.length; ++i) {\n if (typeof object.attribute[i] !== \"object\")\n throw TypeError(\".onnx.NodeProto.attribute: object expected\");\n message.attribute[i] = $root.onnx.AttributeProto.fromObject(object.attribute[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n return message;\n };\n\n /**\n * Creates a plain object from a NodeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.NodeProto} message NodeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n NodeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.input = [];\n object.output = [];\n object.attribute = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.opType = \"\";\n object.docString = \"\";\n object.domain = \"\";\n }\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = message.input[j];\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = message.output[j];\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.opType != null && message.hasOwnProperty(\"opType\"))\n object.opType = message.opType;\n if (message.attribute && message.attribute.length) {\n object.attribute = [];\n for (var j = 0; j < message.attribute.length; ++j)\n object.attribute[j] = $root.onnx.AttributeProto.toObject(message.attribute[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n return object;\n };\n\n /**\n * Converts this NodeProto to JSON.\n * @function toJSON\n * @memberof onnx.NodeProto\n * @instance\n * @returns {Object.} JSON object\n */\n NodeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for NodeProto\n * @function getTypeUrl\n * @memberof onnx.NodeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n NodeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.NodeProto\";\n };\n\n return NodeProto;\n })();\n\n onnx.TrainingInfoProto = (function() {\n\n /**\n * Properties of a TrainingInfoProto.\n * @memberof onnx\n * @interface ITrainingInfoProto\n * @property {onnx.IGraphProto|null} [initialization] TrainingInfoProto initialization\n * @property {onnx.IGraphProto|null} [algorithm] TrainingInfoProto algorithm\n * @property {Array.|null} [initializationBinding] TrainingInfoProto initializationBinding\n * @property {Array.|null} [updateBinding] TrainingInfoProto updateBinding\n */\n\n /**\n * Constructs a new TrainingInfoProto.\n * @memberof onnx\n * @classdesc Represents a TrainingInfoProto.\n * @implements ITrainingInfoProto\n * @constructor\n * @param {onnx.ITrainingInfoProto=} [properties] Properties to set\n */\n function TrainingInfoProto(properties) {\n this.initializationBinding = [];\n this.updateBinding = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TrainingInfoProto initialization.\n * @member {onnx.IGraphProto|null|undefined} initialization\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.initialization = null;\n\n /**\n * TrainingInfoProto algorithm.\n * @member {onnx.IGraphProto|null|undefined} algorithm\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.algorithm = null;\n\n /**\n * TrainingInfoProto initializationBinding.\n * @member {Array.} initializationBinding\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.initializationBinding = $util.emptyArray;\n\n /**\n * TrainingInfoProto updateBinding.\n * @member {Array.} updateBinding\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.updateBinding = $util.emptyArray;\n\n /**\n * Creates a new TrainingInfoProto instance using the specified properties.\n * @function create\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto=} [properties] Properties to set\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto instance\n */\n TrainingInfoProto.create = function create(properties) {\n return new TrainingInfoProto(properties);\n };\n\n /**\n * Encodes the specified TrainingInfoProto message. Does not implicitly {@link onnx.TrainingInfoProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto} message TrainingInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TrainingInfoProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.initialization != null && Object.hasOwnProperty.call(message, \"initialization\"))\n $root.onnx.GraphProto.encode(message.initialization, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.algorithm != null && Object.hasOwnProperty.call(message, \"algorithm\"))\n $root.onnx.GraphProto.encode(message.algorithm, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.initializationBinding != null && message.initializationBinding.length)\n for (var i = 0; i < message.initializationBinding.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.initializationBinding[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();\n if (message.updateBinding != null && message.updateBinding.length)\n for (var i = 0; i < message.updateBinding.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.updateBinding[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TrainingInfoProto message, length delimited. Does not implicitly {@link onnx.TrainingInfoProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto} message TrainingInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TrainingInfoProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TrainingInfoProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TrainingInfoProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TrainingInfoProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.initialization = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 2: {\n message.algorithm = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n if (!(message.initializationBinding && message.initializationBinding.length))\n message.initializationBinding = [];\n message.initializationBinding.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 4: {\n if (!(message.updateBinding && message.updateBinding.length))\n message.updateBinding = [];\n message.updateBinding.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TrainingInfoProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TrainingInfoProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TrainingInfoProto message.\n * @function verify\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TrainingInfoProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.initialization != null && message.hasOwnProperty(\"initialization\")) {\n var error = $root.onnx.GraphProto.verify(message.initialization);\n if (error)\n return \"initialization.\" + error;\n }\n if (message.algorithm != null && message.hasOwnProperty(\"algorithm\")) {\n var error = $root.onnx.GraphProto.verify(message.algorithm);\n if (error)\n return \"algorithm.\" + error;\n }\n if (message.initializationBinding != null && message.hasOwnProperty(\"initializationBinding\")) {\n if (!Array.isArray(message.initializationBinding))\n return \"initializationBinding: array expected\";\n for (var i = 0; i < message.initializationBinding.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.initializationBinding[i]);\n if (error)\n return \"initializationBinding.\" + error;\n }\n }\n if (message.updateBinding != null && message.hasOwnProperty(\"updateBinding\")) {\n if (!Array.isArray(message.updateBinding))\n return \"updateBinding: array expected\";\n for (var i = 0; i < message.updateBinding.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.updateBinding[i]);\n if (error)\n return \"updateBinding.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TrainingInfoProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n */\n TrainingInfoProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TrainingInfoProto)\n return object;\n var message = new $root.onnx.TrainingInfoProto();\n if (object.initialization != null) {\n if (typeof object.initialization !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.initialization: object expected\");\n message.initialization = $root.onnx.GraphProto.fromObject(object.initialization);\n }\n if (object.algorithm != null) {\n if (typeof object.algorithm !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.algorithm: object expected\");\n message.algorithm = $root.onnx.GraphProto.fromObject(object.algorithm);\n }\n if (object.initializationBinding) {\n if (!Array.isArray(object.initializationBinding))\n throw TypeError(\".onnx.TrainingInfoProto.initializationBinding: array expected\");\n message.initializationBinding = [];\n for (var i = 0; i < object.initializationBinding.length; ++i) {\n if (typeof object.initializationBinding[i] !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.initializationBinding: object expected\");\n message.initializationBinding[i] = $root.onnx.StringStringEntryProto.fromObject(object.initializationBinding[i]);\n }\n }\n if (object.updateBinding) {\n if (!Array.isArray(object.updateBinding))\n throw TypeError(\".onnx.TrainingInfoProto.updateBinding: array expected\");\n message.updateBinding = [];\n for (var i = 0; i < object.updateBinding.length; ++i) {\n if (typeof object.updateBinding[i] !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.updateBinding: object expected\");\n message.updateBinding[i] = $root.onnx.StringStringEntryProto.fromObject(object.updateBinding[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TrainingInfoProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.TrainingInfoProto} message TrainingInfoProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TrainingInfoProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.initializationBinding = [];\n object.updateBinding = [];\n }\n if (options.defaults) {\n object.initialization = null;\n object.algorithm = null;\n }\n if (message.initialization != null && message.hasOwnProperty(\"initialization\"))\n object.initialization = $root.onnx.GraphProto.toObject(message.initialization, options);\n if (message.algorithm != null && message.hasOwnProperty(\"algorithm\"))\n object.algorithm = $root.onnx.GraphProto.toObject(message.algorithm, options);\n if (message.initializationBinding && message.initializationBinding.length) {\n object.initializationBinding = [];\n for (var j = 0; j < message.initializationBinding.length; ++j)\n object.initializationBinding[j] = $root.onnx.StringStringEntryProto.toObject(message.initializationBinding[j], options);\n }\n if (message.updateBinding && message.updateBinding.length) {\n object.updateBinding = [];\n for (var j = 0; j < message.updateBinding.length; ++j)\n object.updateBinding[j] = $root.onnx.StringStringEntryProto.toObject(message.updateBinding[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TrainingInfoProto to JSON.\n * @function toJSON\n * @memberof onnx.TrainingInfoProto\n * @instance\n * @returns {Object.} JSON object\n */\n TrainingInfoProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TrainingInfoProto\n * @function getTypeUrl\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TrainingInfoProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TrainingInfoProto\";\n };\n\n return TrainingInfoProto;\n })();\n\n onnx.ModelProto = (function() {\n\n /**\n * Properties of a ModelProto.\n * @memberof onnx\n * @interface IModelProto\n * @property {number|Long|null} [irVersion] ModelProto irVersion\n * @property {Array.|null} [opsetImport] ModelProto opsetImport\n * @property {string|null} [producerName] ModelProto producerName\n * @property {string|null} [producerVersion] ModelProto producerVersion\n * @property {string|null} [domain] ModelProto domain\n * @property {number|Long|null} [modelVersion] ModelProto modelVersion\n * @property {string|null} [docString] ModelProto docString\n * @property {onnx.IGraphProto|null} [graph] ModelProto graph\n * @property {Array.|null} [metadataProps] ModelProto metadataProps\n * @property {Array.|null} [trainingInfo] ModelProto trainingInfo\n * @property {Array.|null} [functions] ModelProto functions\n */\n\n /**\n * Constructs a new ModelProto.\n * @memberof onnx\n * @classdesc Represents a ModelProto.\n * @implements IModelProto\n * @constructor\n * @param {onnx.IModelProto=} [properties] Properties to set\n */\n function ModelProto(properties) {\n this.opsetImport = [];\n this.metadataProps = [];\n this.trainingInfo = [];\n this.functions = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * ModelProto irVersion.\n * @member {number|Long} irVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.irVersion = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * ModelProto opsetImport.\n * @member {Array.} opsetImport\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.opsetImport = $util.emptyArray;\n\n /**\n * ModelProto producerName.\n * @member {string} producerName\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.producerName = \"\";\n\n /**\n * ModelProto producerVersion.\n * @member {string} producerVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.producerVersion = \"\";\n\n /**\n * ModelProto domain.\n * @member {string} domain\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.domain = \"\";\n\n /**\n * ModelProto modelVersion.\n * @member {number|Long} modelVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.modelVersion = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * ModelProto docString.\n * @member {string} docString\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.docString = \"\";\n\n /**\n * ModelProto graph.\n * @member {onnx.IGraphProto|null|undefined} graph\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.graph = null;\n\n /**\n * ModelProto metadataProps.\n * @member {Array.} metadataProps\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.metadataProps = $util.emptyArray;\n\n /**\n * ModelProto trainingInfo.\n * @member {Array.} trainingInfo\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.trainingInfo = $util.emptyArray;\n\n /**\n * ModelProto functions.\n * @member {Array.} functions\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.functions = $util.emptyArray;\n\n /**\n * Creates a new ModelProto instance using the specified properties.\n * @function create\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto=} [properties] Properties to set\n * @returns {onnx.ModelProto} ModelProto instance\n */\n ModelProto.create = function create(properties) {\n return new ModelProto(properties);\n };\n\n /**\n * Encodes the specified ModelProto message. Does not implicitly {@link onnx.ModelProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto} message ModelProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ModelProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.irVersion != null && Object.hasOwnProperty.call(message, \"irVersion\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.irVersion);\n if (message.producerName != null && Object.hasOwnProperty.call(message, \"producerName\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.producerName);\n if (message.producerVersion != null && Object.hasOwnProperty.call(message, \"producerVersion\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.producerVersion);\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.domain);\n if (message.modelVersion != null && Object.hasOwnProperty.call(message, \"modelVersion\"))\n writer.uint32(/* id 5, wireType 0 =*/40).int64(message.modelVersion);\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.docString);\n if (message.graph != null && Object.hasOwnProperty.call(message, \"graph\"))\n $root.onnx.GraphProto.encode(message.graph, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();\n if (message.opsetImport != null && message.opsetImport.length)\n for (var i = 0; i < message.opsetImport.length; ++i)\n $root.onnx.OperatorSetIdProto.encode(message.opsetImport[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();\n if (message.metadataProps != null && message.metadataProps.length)\n for (var i = 0; i < message.metadataProps.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.metadataProps[i], writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.trainingInfo != null && message.trainingInfo.length)\n for (var i = 0; i < message.trainingInfo.length; ++i)\n $root.onnx.TrainingInfoProto.encode(message.trainingInfo[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim();\n if (message.functions != null && message.functions.length)\n for (var i = 0; i < message.functions.length; ++i)\n $root.onnx.FunctionProto.encode(message.functions[i], writer.uint32(/* id 25, wireType 2 =*/202).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified ModelProto message, length delimited. Does not implicitly {@link onnx.ModelProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto} message ModelProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ModelProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a ModelProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.ModelProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.ModelProto} ModelProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ModelProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.ModelProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.irVersion = reader.int64();\n break;\n }\n case 8: {\n if (!(message.opsetImport && message.opsetImport.length))\n message.opsetImport = [];\n message.opsetImport.push($root.onnx.OperatorSetIdProto.decode(reader, reader.uint32()));\n break;\n }\n case 2: {\n message.producerName = reader.string();\n break;\n }\n case 3: {\n message.producerVersion = reader.string();\n break;\n }\n case 4: {\n message.domain = reader.string();\n break;\n }\n case 5: {\n message.modelVersion = reader.int64();\n break;\n }\n case 6: {\n message.docString = reader.string();\n break;\n }\n case 7: {\n message.graph = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 14: {\n if (!(message.metadataProps && message.metadataProps.length))\n message.metadataProps = [];\n message.metadataProps.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 20: {\n if (!(message.trainingInfo && message.trainingInfo.length))\n message.trainingInfo = [];\n message.trainingInfo.push($root.onnx.TrainingInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 25: {\n if (!(message.functions && message.functions.length))\n message.functions = [];\n message.functions.push($root.onnx.FunctionProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a ModelProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.ModelProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.ModelProto} ModelProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ModelProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a ModelProto message.\n * @function verify\n * @memberof onnx.ModelProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n ModelProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.irVersion != null && message.hasOwnProperty(\"irVersion\"))\n if (!$util.isInteger(message.irVersion) && !(message.irVersion && $util.isInteger(message.irVersion.low) && $util.isInteger(message.irVersion.high)))\n return \"irVersion: integer|Long expected\";\n if (message.opsetImport != null && message.hasOwnProperty(\"opsetImport\")) {\n if (!Array.isArray(message.opsetImport))\n return \"opsetImport: array expected\";\n for (var i = 0; i < message.opsetImport.length; ++i) {\n var error = $root.onnx.OperatorSetIdProto.verify(message.opsetImport[i]);\n if (error)\n return \"opsetImport.\" + error;\n }\n }\n if (message.producerName != null && message.hasOwnProperty(\"producerName\"))\n if (!$util.isString(message.producerName))\n return \"producerName: string expected\";\n if (message.producerVersion != null && message.hasOwnProperty(\"producerVersion\"))\n if (!$util.isString(message.producerVersion))\n return \"producerVersion: string expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.modelVersion != null && message.hasOwnProperty(\"modelVersion\"))\n if (!$util.isInteger(message.modelVersion) && !(message.modelVersion && $util.isInteger(message.modelVersion.low) && $util.isInteger(message.modelVersion.high)))\n return \"modelVersion: integer|Long expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.graph != null && message.hasOwnProperty(\"graph\")) {\n var error = $root.onnx.GraphProto.verify(message.graph);\n if (error)\n return \"graph.\" + error;\n }\n if (message.metadataProps != null && message.hasOwnProperty(\"metadataProps\")) {\n if (!Array.isArray(message.metadataProps))\n return \"metadataProps: array expected\";\n for (var i = 0; i < message.metadataProps.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.metadataProps[i]);\n if (error)\n return \"metadataProps.\" + error;\n }\n }\n if (message.trainingInfo != null && message.hasOwnProperty(\"trainingInfo\")) {\n if (!Array.isArray(message.trainingInfo))\n return \"trainingInfo: array expected\";\n for (var i = 0; i < message.trainingInfo.length; ++i) {\n var error = $root.onnx.TrainingInfoProto.verify(message.trainingInfo[i]);\n if (error)\n return \"trainingInfo.\" + error;\n }\n }\n if (message.functions != null && message.hasOwnProperty(\"functions\")) {\n if (!Array.isArray(message.functions))\n return \"functions: array expected\";\n for (var i = 0; i < message.functions.length; ++i) {\n var error = $root.onnx.FunctionProto.verify(message.functions[i]);\n if (error)\n return \"functions.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a ModelProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.ModelProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.ModelProto} ModelProto\n */\n ModelProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.ModelProto)\n return object;\n var message = new $root.onnx.ModelProto();\n if (object.irVersion != null)\n if ($util.Long)\n (message.irVersion = $util.Long.fromValue(object.irVersion)).unsigned = false;\n else if (typeof object.irVersion === \"string\")\n message.irVersion = parseInt(object.irVersion, 10);\n else if (typeof object.irVersion === \"number\")\n message.irVersion = object.irVersion;\n else if (typeof object.irVersion === \"object\")\n message.irVersion = new $util.LongBits(object.irVersion.low >>> 0, object.irVersion.high >>> 0).toNumber();\n if (object.opsetImport) {\n if (!Array.isArray(object.opsetImport))\n throw TypeError(\".onnx.ModelProto.opsetImport: array expected\");\n message.opsetImport = [];\n for (var i = 0; i < object.opsetImport.length; ++i) {\n if (typeof object.opsetImport[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.opsetImport: object expected\");\n message.opsetImport[i] = $root.onnx.OperatorSetIdProto.fromObject(object.opsetImport[i]);\n }\n }\n if (object.producerName != null)\n message.producerName = String(object.producerName);\n if (object.producerVersion != null)\n message.producerVersion = String(object.producerVersion);\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.modelVersion != null)\n if ($util.Long)\n (message.modelVersion = $util.Long.fromValue(object.modelVersion)).unsigned = false;\n else if (typeof object.modelVersion === \"string\")\n message.modelVersion = parseInt(object.modelVersion, 10);\n else if (typeof object.modelVersion === \"number\")\n message.modelVersion = object.modelVersion;\n else if (typeof object.modelVersion === \"object\")\n message.modelVersion = new $util.LongBits(object.modelVersion.low >>> 0, object.modelVersion.high >>> 0).toNumber();\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.graph != null) {\n if (typeof object.graph !== \"object\")\n throw TypeError(\".onnx.ModelProto.graph: object expected\");\n message.graph = $root.onnx.GraphProto.fromObject(object.graph);\n }\n if (object.metadataProps) {\n if (!Array.isArray(object.metadataProps))\n throw TypeError(\".onnx.ModelProto.metadataProps: array expected\");\n message.metadataProps = [];\n for (var i = 0; i < object.metadataProps.length; ++i) {\n if (typeof object.metadataProps[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.metadataProps: object expected\");\n message.metadataProps[i] = $root.onnx.StringStringEntryProto.fromObject(object.metadataProps[i]);\n }\n }\n if (object.trainingInfo) {\n if (!Array.isArray(object.trainingInfo))\n throw TypeError(\".onnx.ModelProto.trainingInfo: array expected\");\n message.trainingInfo = [];\n for (var i = 0; i < object.trainingInfo.length; ++i) {\n if (typeof object.trainingInfo[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.trainingInfo: object expected\");\n message.trainingInfo[i] = $root.onnx.TrainingInfoProto.fromObject(object.trainingInfo[i]);\n }\n }\n if (object.functions) {\n if (!Array.isArray(object.functions))\n throw TypeError(\".onnx.ModelProto.functions: array expected\");\n message.functions = [];\n for (var i = 0; i < object.functions.length; ++i) {\n if (typeof object.functions[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.functions: object expected\");\n message.functions[i] = $root.onnx.FunctionProto.fromObject(object.functions[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a ModelProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.ModelProto} message ModelProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n ModelProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.opsetImport = [];\n object.metadataProps = [];\n object.trainingInfo = [];\n object.functions = [];\n }\n if (options.defaults) {\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.irVersion = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.irVersion = options.longs === String ? \"0\" : 0;\n object.producerName = \"\";\n object.producerVersion = \"\";\n object.domain = \"\";\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.modelVersion = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.modelVersion = options.longs === String ? \"0\" : 0;\n object.docString = \"\";\n object.graph = null;\n }\n if (message.irVersion != null && message.hasOwnProperty(\"irVersion\"))\n if (typeof message.irVersion === \"number\")\n object.irVersion = options.longs === String ? String(message.irVersion) : message.irVersion;\n else\n object.irVersion = options.longs === String ? $util.Long.prototype.toString.call(message.irVersion) : options.longs === Number ? new $util.LongBits(message.irVersion.low >>> 0, message.irVersion.high >>> 0).toNumber() : message.irVersion;\n if (message.producerName != null && message.hasOwnProperty(\"producerName\"))\n object.producerName = message.producerName;\n if (message.producerVersion != null && message.hasOwnProperty(\"producerVersion\"))\n object.producerVersion = message.producerVersion;\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.modelVersion != null && message.hasOwnProperty(\"modelVersion\"))\n if (typeof message.modelVersion === \"number\")\n object.modelVersion = options.longs === String ? String(message.modelVersion) : message.modelVersion;\n else\n object.modelVersion = options.longs === String ? $util.Long.prototype.toString.call(message.modelVersion) : options.longs === Number ? new $util.LongBits(message.modelVersion.low >>> 0, message.modelVersion.high >>> 0).toNumber() : message.modelVersion;\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.graph != null && message.hasOwnProperty(\"graph\"))\n object.graph = $root.onnx.GraphProto.toObject(message.graph, options);\n if (message.opsetImport && message.opsetImport.length) {\n object.opsetImport = [];\n for (var j = 0; j < message.opsetImport.length; ++j)\n object.opsetImport[j] = $root.onnx.OperatorSetIdProto.toObject(message.opsetImport[j], options);\n }\n if (message.metadataProps && message.metadataProps.length) {\n object.metadataProps = [];\n for (var j = 0; j < message.metadataProps.length; ++j)\n object.metadataProps[j] = $root.onnx.StringStringEntryProto.toObject(message.metadataProps[j], options);\n }\n if (message.trainingInfo && message.trainingInfo.length) {\n object.trainingInfo = [];\n for (var j = 0; j < message.trainingInfo.length; ++j)\n object.trainingInfo[j] = $root.onnx.TrainingInfoProto.toObject(message.trainingInfo[j], options);\n }\n if (message.functions && message.functions.length) {\n object.functions = [];\n for (var j = 0; j < message.functions.length; ++j)\n object.functions[j] = $root.onnx.FunctionProto.toObject(message.functions[j], options);\n }\n return object;\n };\n\n /**\n * Converts this ModelProto to JSON.\n * @function toJSON\n * @memberof onnx.ModelProto\n * @instance\n * @returns {Object.} JSON object\n */\n ModelProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for ModelProto\n * @function getTypeUrl\n * @memberof onnx.ModelProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n ModelProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.ModelProto\";\n };\n\n return ModelProto;\n })();\n\n onnx.StringStringEntryProto = (function() {\n\n /**\n * Properties of a StringStringEntryProto.\n * @memberof onnx\n * @interface IStringStringEntryProto\n * @property {string|null} [key] StringStringEntryProto key\n * @property {string|null} [value] StringStringEntryProto value\n */\n\n /**\n * Constructs a new StringStringEntryProto.\n * @memberof onnx\n * @classdesc Represents a StringStringEntryProto.\n * @implements IStringStringEntryProto\n * @constructor\n * @param {onnx.IStringStringEntryProto=} [properties] Properties to set\n */\n function StringStringEntryProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * StringStringEntryProto key.\n * @member {string} key\n * @memberof onnx.StringStringEntryProto\n * @instance\n */\n StringStringEntryProto.prototype.key = \"\";\n\n /**\n * StringStringEntryProto value.\n * @member {string} value\n * @memberof onnx.StringStringEntryProto\n * @instance\n */\n StringStringEntryProto.prototype.value = \"\";\n\n /**\n * Creates a new StringStringEntryProto instance using the specified properties.\n * @function create\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto=} [properties] Properties to set\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto instance\n */\n StringStringEntryProto.create = function create(properties) {\n return new StringStringEntryProto(properties);\n };\n\n /**\n * Encodes the specified StringStringEntryProto message. Does not implicitly {@link onnx.StringStringEntryProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto} message StringStringEntryProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n StringStringEntryProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.key != null && Object.hasOwnProperty.call(message, \"key\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.key);\n if (message.value != null && Object.hasOwnProperty.call(message, \"value\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.value);\n return writer;\n };\n\n /**\n * Encodes the specified StringStringEntryProto message, length delimited. Does not implicitly {@link onnx.StringStringEntryProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto} message StringStringEntryProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n StringStringEntryProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a StringStringEntryProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n StringStringEntryProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.StringStringEntryProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.key = reader.string();\n break;\n }\n case 2: {\n message.value = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a StringStringEntryProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n StringStringEntryProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a StringStringEntryProto message.\n * @function verify\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n StringStringEntryProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.key != null && message.hasOwnProperty(\"key\"))\n if (!$util.isString(message.key))\n return \"key: string expected\";\n if (message.value != null && message.hasOwnProperty(\"value\"))\n if (!$util.isString(message.value))\n return \"value: string expected\";\n return null;\n };\n\n /**\n * Creates a StringStringEntryProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n */\n StringStringEntryProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.StringStringEntryProto)\n return object;\n var message = new $root.onnx.StringStringEntryProto();\n if (object.key != null)\n message.key = String(object.key);\n if (object.value != null)\n message.value = String(object.value);\n return message;\n };\n\n /**\n * Creates a plain object from a StringStringEntryProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.StringStringEntryProto} message StringStringEntryProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n StringStringEntryProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.key = \"\";\n object.value = \"\";\n }\n if (message.key != null && message.hasOwnProperty(\"key\"))\n object.key = message.key;\n if (message.value != null && message.hasOwnProperty(\"value\"))\n object.value = message.value;\n return object;\n };\n\n /**\n * Converts this StringStringEntryProto to JSON.\n * @function toJSON\n * @memberof onnx.StringStringEntryProto\n * @instance\n * @returns {Object.} JSON object\n */\n StringStringEntryProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for StringStringEntryProto\n * @function getTypeUrl\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n StringStringEntryProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.StringStringEntryProto\";\n };\n\n return StringStringEntryProto;\n })();\n\n onnx.TensorAnnotation = (function() {\n\n /**\n * Properties of a TensorAnnotation.\n * @memberof onnx\n * @interface ITensorAnnotation\n * @property {string|null} [tensorName] TensorAnnotation tensorName\n * @property {Array.|null} [quantParameterTensorNames] TensorAnnotation quantParameterTensorNames\n */\n\n /**\n * Constructs a new TensorAnnotation.\n * @memberof onnx\n * @classdesc Represents a TensorAnnotation.\n * @implements ITensorAnnotation\n * @constructor\n * @param {onnx.ITensorAnnotation=} [properties] Properties to set\n */\n function TensorAnnotation(properties) {\n this.quantParameterTensorNames = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorAnnotation tensorName.\n * @member {string} tensorName\n * @memberof onnx.TensorAnnotation\n * @instance\n */\n TensorAnnotation.prototype.tensorName = \"\";\n\n /**\n * TensorAnnotation quantParameterTensorNames.\n * @member {Array.} quantParameterTensorNames\n * @memberof onnx.TensorAnnotation\n * @instance\n */\n TensorAnnotation.prototype.quantParameterTensorNames = $util.emptyArray;\n\n /**\n * Creates a new TensorAnnotation instance using the specified properties.\n * @function create\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation=} [properties] Properties to set\n * @returns {onnx.TensorAnnotation} TensorAnnotation instance\n */\n TensorAnnotation.create = function create(properties) {\n return new TensorAnnotation(properties);\n };\n\n /**\n * Encodes the specified TensorAnnotation message. Does not implicitly {@link onnx.TensorAnnotation.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation} message TensorAnnotation message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorAnnotation.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.tensorName != null && Object.hasOwnProperty.call(message, \"tensorName\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.tensorName);\n if (message.quantParameterTensorNames != null && message.quantParameterTensorNames.length)\n for (var i = 0; i < message.quantParameterTensorNames.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.quantParameterTensorNames[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TensorAnnotation message, length delimited. Does not implicitly {@link onnx.TensorAnnotation.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation} message TensorAnnotation message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorAnnotation.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorAnnotation message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorAnnotation.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorAnnotation();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.tensorName = reader.string();\n break;\n }\n case 2: {\n if (!(message.quantParameterTensorNames && message.quantParameterTensorNames.length))\n message.quantParameterTensorNames = [];\n message.quantParameterTensorNames.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorAnnotation message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorAnnotation.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorAnnotation message.\n * @function verify\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorAnnotation.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.tensorName != null && message.hasOwnProperty(\"tensorName\"))\n if (!$util.isString(message.tensorName))\n return \"tensorName: string expected\";\n if (message.quantParameterTensorNames != null && message.hasOwnProperty(\"quantParameterTensorNames\")) {\n if (!Array.isArray(message.quantParameterTensorNames))\n return \"quantParameterTensorNames: array expected\";\n for (var i = 0; i < message.quantParameterTensorNames.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.quantParameterTensorNames[i]);\n if (error)\n return \"quantParameterTensorNames.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TensorAnnotation message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n */\n TensorAnnotation.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorAnnotation)\n return object;\n var message = new $root.onnx.TensorAnnotation();\n if (object.tensorName != null)\n message.tensorName = String(object.tensorName);\n if (object.quantParameterTensorNames) {\n if (!Array.isArray(object.quantParameterTensorNames))\n throw TypeError(\".onnx.TensorAnnotation.quantParameterTensorNames: array expected\");\n message.quantParameterTensorNames = [];\n for (var i = 0; i < object.quantParameterTensorNames.length; ++i) {\n if (typeof object.quantParameterTensorNames[i] !== \"object\")\n throw TypeError(\".onnx.TensorAnnotation.quantParameterTensorNames: object expected\");\n message.quantParameterTensorNames[i] = $root.onnx.StringStringEntryProto.fromObject(object.quantParameterTensorNames[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorAnnotation message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.TensorAnnotation} message TensorAnnotation\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorAnnotation.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.quantParameterTensorNames = [];\n if (options.defaults)\n object.tensorName = \"\";\n if (message.tensorName != null && message.hasOwnProperty(\"tensorName\"))\n object.tensorName = message.tensorName;\n if (message.quantParameterTensorNames && message.quantParameterTensorNames.length) {\n object.quantParameterTensorNames = [];\n for (var j = 0; j < message.quantParameterTensorNames.length; ++j)\n object.quantParameterTensorNames[j] = $root.onnx.StringStringEntryProto.toObject(message.quantParameterTensorNames[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TensorAnnotation to JSON.\n * @function toJSON\n * @memberof onnx.TensorAnnotation\n * @instance\n * @returns {Object.} JSON object\n */\n TensorAnnotation.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorAnnotation\n * @function getTypeUrl\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorAnnotation.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorAnnotation\";\n };\n\n return TensorAnnotation;\n })();\n\n onnx.GraphProto = (function() {\n\n /**\n * Properties of a GraphProto.\n * @memberof onnx\n * @interface IGraphProto\n * @property {Array.|null} [node] GraphProto node\n * @property {string|null} [name] GraphProto name\n * @property {Array.|null} [initializer] GraphProto initializer\n * @property {Array.|null} [sparseInitializer] GraphProto sparseInitializer\n * @property {string|null} [docString] GraphProto docString\n * @property {Array.|null} [input] GraphProto input\n * @property {Array.|null} [output] GraphProto output\n * @property {Array.|null} [valueInfo] GraphProto valueInfo\n * @property {Array.|null} [quantizationAnnotation] GraphProto quantizationAnnotation\n */\n\n /**\n * Constructs a new GraphProto.\n * @memberof onnx\n * @classdesc Represents a GraphProto.\n * @implements IGraphProto\n * @constructor\n * @param {onnx.IGraphProto=} [properties] Properties to set\n */\n function GraphProto(properties) {\n this.node = [];\n this.initializer = [];\n this.sparseInitializer = [];\n this.input = [];\n this.output = [];\n this.valueInfo = [];\n this.quantizationAnnotation = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * GraphProto node.\n * @member {Array.} node\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.node = $util.emptyArray;\n\n /**\n * GraphProto name.\n * @member {string} name\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.name = \"\";\n\n /**\n * GraphProto initializer.\n * @member {Array.} initializer\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.initializer = $util.emptyArray;\n\n /**\n * GraphProto sparseInitializer.\n * @member {Array.} sparseInitializer\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.sparseInitializer = $util.emptyArray;\n\n /**\n * GraphProto docString.\n * @member {string} docString\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.docString = \"\";\n\n /**\n * GraphProto input.\n * @member {Array.} input\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.input = $util.emptyArray;\n\n /**\n * GraphProto output.\n * @member {Array.} output\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.output = $util.emptyArray;\n\n /**\n * GraphProto valueInfo.\n * @member {Array.} valueInfo\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.valueInfo = $util.emptyArray;\n\n /**\n * GraphProto quantizationAnnotation.\n * @member {Array.} quantizationAnnotation\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.quantizationAnnotation = $util.emptyArray;\n\n /**\n * Creates a new GraphProto instance using the specified properties.\n * @function create\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto=} [properties] Properties to set\n * @returns {onnx.GraphProto} GraphProto instance\n */\n GraphProto.create = function create(properties) {\n return new GraphProto(properties);\n };\n\n /**\n * Encodes the specified GraphProto message. Does not implicitly {@link onnx.GraphProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto} message GraphProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n GraphProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.node != null && message.node.length)\n for (var i = 0; i < message.node.length; ++i)\n $root.onnx.NodeProto.encode(message.node[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.name);\n if (message.initializer != null && message.initializer.length)\n for (var i = 0; i < message.initializer.length; ++i)\n $root.onnx.TensorProto.encode(message.initializer[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 10, wireType 2 =*/82).string(message.docString);\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.input[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.output[i], writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim();\n if (message.valueInfo != null && message.valueInfo.length)\n for (var i = 0; i < message.valueInfo.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.valueInfo[i], writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim();\n if (message.quantizationAnnotation != null && message.quantizationAnnotation.length)\n for (var i = 0; i < message.quantizationAnnotation.length; ++i)\n $root.onnx.TensorAnnotation.encode(message.quantizationAnnotation[i], writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.sparseInitializer != null && message.sparseInitializer.length)\n for (var i = 0; i < message.sparseInitializer.length; ++i)\n $root.onnx.SparseTensorProto.encode(message.sparseInitializer[i], writer.uint32(/* id 15, wireType 2 =*/122).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified GraphProto message, length delimited. Does not implicitly {@link onnx.GraphProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto} message GraphProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n GraphProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a GraphProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.GraphProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.GraphProto} GraphProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n GraphProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.GraphProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.node && message.node.length))\n message.node = [];\n message.node.push($root.onnx.NodeProto.decode(reader, reader.uint32()));\n break;\n }\n case 2: {\n message.name = reader.string();\n break;\n }\n case 5: {\n if (!(message.initializer && message.initializer.length))\n message.initializer = [];\n message.initializer.push($root.onnx.TensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 15: {\n if (!(message.sparseInitializer && message.sparseInitializer.length))\n message.sparseInitializer = [];\n message.sparseInitializer.push($root.onnx.SparseTensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 10: {\n message.docString = reader.string();\n break;\n }\n case 11: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 12: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 13: {\n if (!(message.valueInfo && message.valueInfo.length))\n message.valueInfo = [];\n message.valueInfo.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 14: {\n if (!(message.quantizationAnnotation && message.quantizationAnnotation.length))\n message.quantizationAnnotation = [];\n message.quantizationAnnotation.push($root.onnx.TensorAnnotation.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a GraphProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.GraphProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.GraphProto} GraphProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n GraphProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a GraphProto message.\n * @function verify\n * @memberof onnx.GraphProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n GraphProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.node != null && message.hasOwnProperty(\"node\")) {\n if (!Array.isArray(message.node))\n return \"node: array expected\";\n for (var i = 0; i < message.node.length; ++i) {\n var error = $root.onnx.NodeProto.verify(message.node[i]);\n if (error)\n return \"node.\" + error;\n }\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.initializer != null && message.hasOwnProperty(\"initializer\")) {\n if (!Array.isArray(message.initializer))\n return \"initializer: array expected\";\n for (var i = 0; i < message.initializer.length; ++i) {\n var error = $root.onnx.TensorProto.verify(message.initializer[i]);\n if (error)\n return \"initializer.\" + error;\n }\n }\n if (message.sparseInitializer != null && message.hasOwnProperty(\"sparseInitializer\")) {\n if (!Array.isArray(message.sparseInitializer))\n return \"sparseInitializer: array expected\";\n for (var i = 0; i < message.sparseInitializer.length; ++i) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseInitializer[i]);\n if (error)\n return \"sparseInitializer.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.input[i]);\n if (error)\n return \"input.\" + error;\n }\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.output[i]);\n if (error)\n return \"output.\" + error;\n }\n }\n if (message.valueInfo != null && message.hasOwnProperty(\"valueInfo\")) {\n if (!Array.isArray(message.valueInfo))\n return \"valueInfo: array expected\";\n for (var i = 0; i < message.valueInfo.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.valueInfo[i]);\n if (error)\n return \"valueInfo.\" + error;\n }\n }\n if (message.quantizationAnnotation != null && message.hasOwnProperty(\"quantizationAnnotation\")) {\n if (!Array.isArray(message.quantizationAnnotation))\n return \"quantizationAnnotation: array expected\";\n for (var i = 0; i < message.quantizationAnnotation.length; ++i) {\n var error = $root.onnx.TensorAnnotation.verify(message.quantizationAnnotation[i]);\n if (error)\n return \"quantizationAnnotation.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a GraphProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.GraphProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.GraphProto} GraphProto\n */\n GraphProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.GraphProto)\n return object;\n var message = new $root.onnx.GraphProto();\n if (object.node) {\n if (!Array.isArray(object.node))\n throw TypeError(\".onnx.GraphProto.node: array expected\");\n message.node = [];\n for (var i = 0; i < object.node.length; ++i) {\n if (typeof object.node[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.node: object expected\");\n message.node[i] = $root.onnx.NodeProto.fromObject(object.node[i]);\n }\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.initializer) {\n if (!Array.isArray(object.initializer))\n throw TypeError(\".onnx.GraphProto.initializer: array expected\");\n message.initializer = [];\n for (var i = 0; i < object.initializer.length; ++i) {\n if (typeof object.initializer[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.initializer: object expected\");\n message.initializer[i] = $root.onnx.TensorProto.fromObject(object.initializer[i]);\n }\n }\n if (object.sparseInitializer) {\n if (!Array.isArray(object.sparseInitializer))\n throw TypeError(\".onnx.GraphProto.sparseInitializer: array expected\");\n message.sparseInitializer = [];\n for (var i = 0; i < object.sparseInitializer.length; ++i) {\n if (typeof object.sparseInitializer[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.sparseInitializer: object expected\");\n message.sparseInitializer[i] = $root.onnx.SparseTensorProto.fromObject(object.sparseInitializer[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.GraphProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i) {\n if (typeof object.input[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.input: object expected\");\n message.input[i] = $root.onnx.ValueInfoProto.fromObject(object.input[i]);\n }\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.GraphProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i) {\n if (typeof object.output[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.output: object expected\");\n message.output[i] = $root.onnx.ValueInfoProto.fromObject(object.output[i]);\n }\n }\n if (object.valueInfo) {\n if (!Array.isArray(object.valueInfo))\n throw TypeError(\".onnx.GraphProto.valueInfo: array expected\");\n message.valueInfo = [];\n for (var i = 0; i < object.valueInfo.length; ++i) {\n if (typeof object.valueInfo[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.valueInfo: object expected\");\n message.valueInfo[i] = $root.onnx.ValueInfoProto.fromObject(object.valueInfo[i]);\n }\n }\n if (object.quantizationAnnotation) {\n if (!Array.isArray(object.quantizationAnnotation))\n throw TypeError(\".onnx.GraphProto.quantizationAnnotation: array expected\");\n message.quantizationAnnotation = [];\n for (var i = 0; i < object.quantizationAnnotation.length; ++i) {\n if (typeof object.quantizationAnnotation[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.quantizationAnnotation: object expected\");\n message.quantizationAnnotation[i] = $root.onnx.TensorAnnotation.fromObject(object.quantizationAnnotation[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a GraphProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.GraphProto} message GraphProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n GraphProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.node = [];\n object.initializer = [];\n object.input = [];\n object.output = [];\n object.valueInfo = [];\n object.quantizationAnnotation = [];\n object.sparseInitializer = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.docString = \"\";\n }\n if (message.node && message.node.length) {\n object.node = [];\n for (var j = 0; j < message.node.length; ++j)\n object.node[j] = $root.onnx.NodeProto.toObject(message.node[j], options);\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.initializer && message.initializer.length) {\n object.initializer = [];\n for (var j = 0; j < message.initializer.length; ++j)\n object.initializer[j] = $root.onnx.TensorProto.toObject(message.initializer[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = $root.onnx.ValueInfoProto.toObject(message.input[j], options);\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = $root.onnx.ValueInfoProto.toObject(message.output[j], options);\n }\n if (message.valueInfo && message.valueInfo.length) {\n object.valueInfo = [];\n for (var j = 0; j < message.valueInfo.length; ++j)\n object.valueInfo[j] = $root.onnx.ValueInfoProto.toObject(message.valueInfo[j], options);\n }\n if (message.quantizationAnnotation && message.quantizationAnnotation.length) {\n object.quantizationAnnotation = [];\n for (var j = 0; j < message.quantizationAnnotation.length; ++j)\n object.quantizationAnnotation[j] = $root.onnx.TensorAnnotation.toObject(message.quantizationAnnotation[j], options);\n }\n if (message.sparseInitializer && message.sparseInitializer.length) {\n object.sparseInitializer = [];\n for (var j = 0; j < message.sparseInitializer.length; ++j)\n object.sparseInitializer[j] = $root.onnx.SparseTensorProto.toObject(message.sparseInitializer[j], options);\n }\n return object;\n };\n\n /**\n * Converts this GraphProto to JSON.\n * @function toJSON\n * @memberof onnx.GraphProto\n * @instance\n * @returns {Object.} JSON object\n */\n GraphProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for GraphProto\n * @function getTypeUrl\n * @memberof onnx.GraphProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n GraphProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.GraphProto\";\n };\n\n return GraphProto;\n })();\n\n onnx.TensorProto = (function() {\n\n /**\n * Properties of a TensorProto.\n * @memberof onnx\n * @interface ITensorProto\n * @property {Array.|null} [dims] TensorProto dims\n * @property {number|null} [dataType] TensorProto dataType\n * @property {onnx.TensorProto.ISegment|null} [segment] TensorProto segment\n * @property {Array.|null} [floatData] TensorProto floatData\n * @property {Array.|null} [int32Data] TensorProto int32Data\n * @property {Array.|null} [stringData] TensorProto stringData\n * @property {Array.|null} [int64Data] TensorProto int64Data\n * @property {string|null} [name] TensorProto name\n * @property {string|null} [docString] TensorProto docString\n * @property {Uint8Array|null} [rawData] TensorProto rawData\n * @property {Array.|null} [externalData] TensorProto externalData\n * @property {onnx.TensorProto.DataLocation|null} [dataLocation] TensorProto dataLocation\n * @property {Array.|null} [doubleData] TensorProto doubleData\n * @property {Array.|null} [uint64Data] TensorProto uint64Data\n */\n\n /**\n * Constructs a new TensorProto.\n * @memberof onnx\n * @classdesc Represents a TensorProto.\n * @implements ITensorProto\n * @constructor\n * @param {onnx.ITensorProto=} [properties] Properties to set\n */\n function TensorProto(properties) {\n this.dims = [];\n this.floatData = [];\n this.int32Data = [];\n this.stringData = [];\n this.int64Data = [];\n this.externalData = [];\n this.doubleData = [];\n this.uint64Data = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorProto dims.\n * @member {Array.} dims\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dims = $util.emptyArray;\n\n /**\n * TensorProto dataType.\n * @member {number} dataType\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dataType = 0;\n\n /**\n * TensorProto segment.\n * @member {onnx.TensorProto.ISegment|null|undefined} segment\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.segment = null;\n\n /**\n * TensorProto floatData.\n * @member {Array.} floatData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.floatData = $util.emptyArray;\n\n /**\n * TensorProto int32Data.\n * @member {Array.} int32Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.int32Data = $util.emptyArray;\n\n /**\n * TensorProto stringData.\n * @member {Array.} stringData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.stringData = $util.emptyArray;\n\n /**\n * TensorProto int64Data.\n * @member {Array.} int64Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.int64Data = $util.emptyArray;\n\n /**\n * TensorProto name.\n * @member {string} name\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.name = \"\";\n\n /**\n * TensorProto docString.\n * @member {string} docString\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.docString = \"\";\n\n /**\n * TensorProto rawData.\n * @member {Uint8Array} rawData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.rawData = $util.newBuffer([]);\n\n /**\n * TensorProto externalData.\n * @member {Array.} externalData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.externalData = $util.emptyArray;\n\n /**\n * TensorProto dataLocation.\n * @member {onnx.TensorProto.DataLocation} dataLocation\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dataLocation = 0;\n\n /**\n * TensorProto doubleData.\n * @member {Array.} doubleData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.doubleData = $util.emptyArray;\n\n /**\n * TensorProto uint64Data.\n * @member {Array.} uint64Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.uint64Data = $util.emptyArray;\n\n /**\n * Creates a new TensorProto instance using the specified properties.\n * @function create\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto=} [properties] Properties to set\n * @returns {onnx.TensorProto} TensorProto instance\n */\n TensorProto.create = function create(properties) {\n return new TensorProto(properties);\n };\n\n /**\n * Encodes the specified TensorProto message. Does not implicitly {@link onnx.TensorProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto} message TensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dims != null && message.dims.length) {\n writer.uint32(/* id 1, wireType 2 =*/10).fork();\n for (var i = 0; i < message.dims.length; ++i)\n writer.int64(message.dims[i]);\n writer.ldelim();\n }\n if (message.dataType != null && Object.hasOwnProperty.call(message, \"dataType\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int32(message.dataType);\n if (message.segment != null && Object.hasOwnProperty.call(message, \"segment\"))\n $root.onnx.TensorProto.Segment.encode(message.segment, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();\n if (message.floatData != null && message.floatData.length) {\n writer.uint32(/* id 4, wireType 2 =*/34).fork();\n for (var i = 0; i < message.floatData.length; ++i)\n writer.float(message.floatData[i]);\n writer.ldelim();\n }\n if (message.int32Data != null && message.int32Data.length) {\n writer.uint32(/* id 5, wireType 2 =*/42).fork();\n for (var i = 0; i < message.int32Data.length; ++i)\n writer.int32(message.int32Data[i]);\n writer.ldelim();\n }\n if (message.stringData != null && message.stringData.length)\n for (var i = 0; i < message.stringData.length; ++i)\n writer.uint32(/* id 6, wireType 2 =*/50).bytes(message.stringData[i]);\n if (message.int64Data != null && message.int64Data.length) {\n writer.uint32(/* id 7, wireType 2 =*/58).fork();\n for (var i = 0; i < message.int64Data.length; ++i)\n writer.int64(message.int64Data[i]);\n writer.ldelim();\n }\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 8, wireType 2 =*/66).string(message.name);\n if (message.rawData != null && Object.hasOwnProperty.call(message, \"rawData\"))\n writer.uint32(/* id 9, wireType 2 =*/74).bytes(message.rawData);\n if (message.doubleData != null && message.doubleData.length) {\n writer.uint32(/* id 10, wireType 2 =*/82).fork();\n for (var i = 0; i < message.doubleData.length; ++i)\n writer.double(message.doubleData[i]);\n writer.ldelim();\n }\n if (message.uint64Data != null && message.uint64Data.length) {\n writer.uint32(/* id 11, wireType 2 =*/90).fork();\n for (var i = 0; i < message.uint64Data.length; ++i)\n writer.uint64(message.uint64Data[i]);\n writer.ldelim();\n }\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 12, wireType 2 =*/98).string(message.docString);\n if (message.externalData != null && message.externalData.length)\n for (var i = 0; i < message.externalData.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.externalData[i], writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim();\n if (message.dataLocation != null && Object.hasOwnProperty.call(message, \"dataLocation\"))\n writer.uint32(/* id 14, wireType 0 =*/112).int32(message.dataLocation);\n return writer;\n };\n\n /**\n * Encodes the specified TensorProto message, length delimited. Does not implicitly {@link onnx.TensorProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto} message TensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorProto} TensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.dims && message.dims.length))\n message.dims = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.dims.push(reader.int64());\n } else\n message.dims.push(reader.int64());\n break;\n }\n case 2: {\n message.dataType = reader.int32();\n break;\n }\n case 3: {\n message.segment = $root.onnx.TensorProto.Segment.decode(reader, reader.uint32());\n break;\n }\n case 4: {\n if (!(message.floatData && message.floatData.length))\n message.floatData = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.floatData.push(reader.float());\n } else\n message.floatData.push(reader.float());\n break;\n }\n case 5: {\n if (!(message.int32Data && message.int32Data.length))\n message.int32Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.int32Data.push(reader.int32());\n } else\n message.int32Data.push(reader.int32());\n break;\n }\n case 6: {\n if (!(message.stringData && message.stringData.length))\n message.stringData = [];\n message.stringData.push(reader.bytes());\n break;\n }\n case 7: {\n if (!(message.int64Data && message.int64Data.length))\n message.int64Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.int64Data.push(reader.int64());\n } else\n message.int64Data.push(reader.int64());\n break;\n }\n case 8: {\n message.name = reader.string();\n break;\n }\n case 12: {\n message.docString = reader.string();\n break;\n }\n case 9: {\n message.rawData = reader.bytes();\n break;\n }\n case 13: {\n if (!(message.externalData && message.externalData.length))\n message.externalData = [];\n message.externalData.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 14: {\n message.dataLocation = reader.int32();\n break;\n }\n case 10: {\n if (!(message.doubleData && message.doubleData.length))\n message.doubleData = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.doubleData.push(reader.double());\n } else\n message.doubleData.push(reader.double());\n break;\n }\n case 11: {\n if (!(message.uint64Data && message.uint64Data.length))\n message.uint64Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.uint64Data.push(reader.uint64());\n } else\n message.uint64Data.push(reader.uint64());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorProto} TensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorProto message.\n * @function verify\n * @memberof onnx.TensorProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.dims != null && message.hasOwnProperty(\"dims\")) {\n if (!Array.isArray(message.dims))\n return \"dims: array expected\";\n for (var i = 0; i < message.dims.length; ++i)\n if (!$util.isInteger(message.dims[i]) && !(message.dims[i] && $util.isInteger(message.dims[i].low) && $util.isInteger(message.dims[i].high)))\n return \"dims: integer|Long[] expected\";\n }\n if (message.dataType != null && message.hasOwnProperty(\"dataType\"))\n if (!$util.isInteger(message.dataType))\n return \"dataType: integer expected\";\n if (message.segment != null && message.hasOwnProperty(\"segment\")) {\n var error = $root.onnx.TensorProto.Segment.verify(message.segment);\n if (error)\n return \"segment.\" + error;\n }\n if (message.floatData != null && message.hasOwnProperty(\"floatData\")) {\n if (!Array.isArray(message.floatData))\n return \"floatData: array expected\";\n for (var i = 0; i < message.floatData.length; ++i)\n if (typeof message.floatData[i] !== \"number\")\n return \"floatData: number[] expected\";\n }\n if (message.int32Data != null && message.hasOwnProperty(\"int32Data\")) {\n if (!Array.isArray(message.int32Data))\n return \"int32Data: array expected\";\n for (var i = 0; i < message.int32Data.length; ++i)\n if (!$util.isInteger(message.int32Data[i]))\n return \"int32Data: integer[] expected\";\n }\n if (message.stringData != null && message.hasOwnProperty(\"stringData\")) {\n if (!Array.isArray(message.stringData))\n return \"stringData: array expected\";\n for (var i = 0; i < message.stringData.length; ++i)\n if (!(message.stringData[i] && typeof message.stringData[i].length === \"number\" || $util.isString(message.stringData[i])))\n return \"stringData: buffer[] expected\";\n }\n if (message.int64Data != null && message.hasOwnProperty(\"int64Data\")) {\n if (!Array.isArray(message.int64Data))\n return \"int64Data: array expected\";\n for (var i = 0; i < message.int64Data.length; ++i)\n if (!$util.isInteger(message.int64Data[i]) && !(message.int64Data[i] && $util.isInteger(message.int64Data[i].low) && $util.isInteger(message.int64Data[i].high)))\n return \"int64Data: integer|Long[] expected\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.rawData != null && message.hasOwnProperty(\"rawData\"))\n if (!(message.rawData && typeof message.rawData.length === \"number\" || $util.isString(message.rawData)))\n return \"rawData: buffer expected\";\n if (message.externalData != null && message.hasOwnProperty(\"externalData\")) {\n if (!Array.isArray(message.externalData))\n return \"externalData: array expected\";\n for (var i = 0; i < message.externalData.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.externalData[i]);\n if (error)\n return \"externalData.\" + error;\n }\n }\n if (message.dataLocation != null && message.hasOwnProperty(\"dataLocation\"))\n switch (message.dataLocation) {\n default:\n return \"dataLocation: enum value expected\";\n case 0:\n case 1:\n break;\n }\n if (message.doubleData != null && message.hasOwnProperty(\"doubleData\")) {\n if (!Array.isArray(message.doubleData))\n return \"doubleData: array expected\";\n for (var i = 0; i < message.doubleData.length; ++i)\n if (typeof message.doubleData[i] !== \"number\")\n return \"doubleData: number[] expected\";\n }\n if (message.uint64Data != null && message.hasOwnProperty(\"uint64Data\")) {\n if (!Array.isArray(message.uint64Data))\n return \"uint64Data: array expected\";\n for (var i = 0; i < message.uint64Data.length; ++i)\n if (!$util.isInteger(message.uint64Data[i]) && !(message.uint64Data[i] && $util.isInteger(message.uint64Data[i].low) && $util.isInteger(message.uint64Data[i].high)))\n return \"uint64Data: integer|Long[] expected\";\n }\n return null;\n };\n\n /**\n * Creates a TensorProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorProto} TensorProto\n */\n TensorProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorProto)\n return object;\n var message = new $root.onnx.TensorProto();\n if (object.dims) {\n if (!Array.isArray(object.dims))\n throw TypeError(\".onnx.TensorProto.dims: array expected\");\n message.dims = [];\n for (var i = 0; i < object.dims.length; ++i)\n if ($util.Long)\n (message.dims[i] = $util.Long.fromValue(object.dims[i])).unsigned = false;\n else if (typeof object.dims[i] === \"string\")\n message.dims[i] = parseInt(object.dims[i], 10);\n else if (typeof object.dims[i] === \"number\")\n message.dims[i] = object.dims[i];\n else if (typeof object.dims[i] === \"object\")\n message.dims[i] = new $util.LongBits(object.dims[i].low >>> 0, object.dims[i].high >>> 0).toNumber();\n }\n if (object.dataType != null)\n message.dataType = object.dataType | 0;\n if (object.segment != null) {\n if (typeof object.segment !== \"object\")\n throw TypeError(\".onnx.TensorProto.segment: object expected\");\n message.segment = $root.onnx.TensorProto.Segment.fromObject(object.segment);\n }\n if (object.floatData) {\n if (!Array.isArray(object.floatData))\n throw TypeError(\".onnx.TensorProto.floatData: array expected\");\n message.floatData = [];\n for (var i = 0; i < object.floatData.length; ++i)\n message.floatData[i] = Number(object.floatData[i]);\n }\n if (object.int32Data) {\n if (!Array.isArray(object.int32Data))\n throw TypeError(\".onnx.TensorProto.int32Data: array expected\");\n message.int32Data = [];\n for (var i = 0; i < object.int32Data.length; ++i)\n message.int32Data[i] = object.int32Data[i] | 0;\n }\n if (object.stringData) {\n if (!Array.isArray(object.stringData))\n throw TypeError(\".onnx.TensorProto.stringData: array expected\");\n message.stringData = [];\n for (var i = 0; i < object.stringData.length; ++i)\n if (typeof object.stringData[i] === \"string\")\n $util.base64.decode(object.stringData[i], message.stringData[i] = $util.newBuffer($util.base64.length(object.stringData[i])), 0);\n else if (object.stringData[i].length >= 0)\n message.stringData[i] = object.stringData[i];\n }\n if (object.int64Data) {\n if (!Array.isArray(object.int64Data))\n throw TypeError(\".onnx.TensorProto.int64Data: array expected\");\n message.int64Data = [];\n for (var i = 0; i < object.int64Data.length; ++i)\n if ($util.Long)\n (message.int64Data[i] = $util.Long.fromValue(object.int64Data[i])).unsigned = false;\n else if (typeof object.int64Data[i] === \"string\")\n message.int64Data[i] = parseInt(object.int64Data[i], 10);\n else if (typeof object.int64Data[i] === \"number\")\n message.int64Data[i] = object.int64Data[i];\n else if (typeof object.int64Data[i] === \"object\")\n message.int64Data[i] = new $util.LongBits(object.int64Data[i].low >>> 0, object.int64Data[i].high >>> 0).toNumber();\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.rawData != null)\n if (typeof object.rawData === \"string\")\n $util.base64.decode(object.rawData, message.rawData = $util.newBuffer($util.base64.length(object.rawData)), 0);\n else if (object.rawData.length >= 0)\n message.rawData = object.rawData;\n if (object.externalData) {\n if (!Array.isArray(object.externalData))\n throw TypeError(\".onnx.TensorProto.externalData: array expected\");\n message.externalData = [];\n for (var i = 0; i < object.externalData.length; ++i) {\n if (typeof object.externalData[i] !== \"object\")\n throw TypeError(\".onnx.TensorProto.externalData: object expected\");\n message.externalData[i] = $root.onnx.StringStringEntryProto.fromObject(object.externalData[i]);\n }\n }\n switch (object.dataLocation) {\n default:\n if (typeof object.dataLocation === \"number\") {\n message.dataLocation = object.dataLocation;\n break;\n }\n break;\n case \"DEFAULT\":\n case 0:\n message.dataLocation = 0;\n break;\n case \"EXTERNAL\":\n case 1:\n message.dataLocation = 1;\n break;\n }\n if (object.doubleData) {\n if (!Array.isArray(object.doubleData))\n throw TypeError(\".onnx.TensorProto.doubleData: array expected\");\n message.doubleData = [];\n for (var i = 0; i < object.doubleData.length; ++i)\n message.doubleData[i] = Number(object.doubleData[i]);\n }\n if (object.uint64Data) {\n if (!Array.isArray(object.uint64Data))\n throw TypeError(\".onnx.TensorProto.uint64Data: array expected\");\n message.uint64Data = [];\n for (var i = 0; i < object.uint64Data.length; ++i)\n if ($util.Long)\n (message.uint64Data[i] = $util.Long.fromValue(object.uint64Data[i])).unsigned = true;\n else if (typeof object.uint64Data[i] === \"string\")\n message.uint64Data[i] = parseInt(object.uint64Data[i], 10);\n else if (typeof object.uint64Data[i] === \"number\")\n message.uint64Data[i] = object.uint64Data[i];\n else if (typeof object.uint64Data[i] === \"object\")\n message.uint64Data[i] = new $util.LongBits(object.uint64Data[i].low >>> 0, object.uint64Data[i].high >>> 0).toNumber(true);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.TensorProto} message TensorProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.dims = [];\n object.floatData = [];\n object.int32Data = [];\n object.stringData = [];\n object.int64Data = [];\n object.doubleData = [];\n object.uint64Data = [];\n object.externalData = [];\n }\n if (options.defaults) {\n object.dataType = 0;\n object.segment = null;\n object.name = \"\";\n if (options.bytes === String)\n object.rawData = \"\";\n else {\n object.rawData = [];\n if (options.bytes !== Array)\n object.rawData = $util.newBuffer(object.rawData);\n }\n object.docString = \"\";\n object.dataLocation = options.enums === String ? \"DEFAULT\" : 0;\n }\n if (message.dims && message.dims.length) {\n object.dims = [];\n for (var j = 0; j < message.dims.length; ++j)\n if (typeof message.dims[j] === \"number\")\n object.dims[j] = options.longs === String ? String(message.dims[j]) : message.dims[j];\n else\n object.dims[j] = options.longs === String ? $util.Long.prototype.toString.call(message.dims[j]) : options.longs === Number ? new $util.LongBits(message.dims[j].low >>> 0, message.dims[j].high >>> 0).toNumber() : message.dims[j];\n }\n if (message.dataType != null && message.hasOwnProperty(\"dataType\"))\n object.dataType = message.dataType;\n if (message.segment != null && message.hasOwnProperty(\"segment\"))\n object.segment = $root.onnx.TensorProto.Segment.toObject(message.segment, options);\n if (message.floatData && message.floatData.length) {\n object.floatData = [];\n for (var j = 0; j < message.floatData.length; ++j)\n object.floatData[j] = options.json && !isFinite(message.floatData[j]) ? String(message.floatData[j]) : message.floatData[j];\n }\n if (message.int32Data && message.int32Data.length) {\n object.int32Data = [];\n for (var j = 0; j < message.int32Data.length; ++j)\n object.int32Data[j] = message.int32Data[j];\n }\n if (message.stringData && message.stringData.length) {\n object.stringData = [];\n for (var j = 0; j < message.stringData.length; ++j)\n object.stringData[j] = options.bytes === String ? $util.base64.encode(message.stringData[j], 0, message.stringData[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.stringData[j]) : message.stringData[j];\n }\n if (message.int64Data && message.int64Data.length) {\n object.int64Data = [];\n for (var j = 0; j < message.int64Data.length; ++j)\n if (typeof message.int64Data[j] === \"number\")\n object.int64Data[j] = options.longs === String ? String(message.int64Data[j]) : message.int64Data[j];\n else\n object.int64Data[j] = options.longs === String ? $util.Long.prototype.toString.call(message.int64Data[j]) : options.longs === Number ? new $util.LongBits(message.int64Data[j].low >>> 0, message.int64Data[j].high >>> 0).toNumber() : message.int64Data[j];\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.rawData != null && message.hasOwnProperty(\"rawData\"))\n object.rawData = options.bytes === String ? $util.base64.encode(message.rawData, 0, message.rawData.length) : options.bytes === Array ? Array.prototype.slice.call(message.rawData) : message.rawData;\n if (message.doubleData && message.doubleData.length) {\n object.doubleData = [];\n for (var j = 0; j < message.doubleData.length; ++j)\n object.doubleData[j] = options.json && !isFinite(message.doubleData[j]) ? String(message.doubleData[j]) : message.doubleData[j];\n }\n if (message.uint64Data && message.uint64Data.length) {\n object.uint64Data = [];\n for (var j = 0; j < message.uint64Data.length; ++j)\n if (typeof message.uint64Data[j] === \"number\")\n object.uint64Data[j] = options.longs === String ? String(message.uint64Data[j]) : message.uint64Data[j];\n else\n object.uint64Data[j] = options.longs === String ? $util.Long.prototype.toString.call(message.uint64Data[j]) : options.longs === Number ? new $util.LongBits(message.uint64Data[j].low >>> 0, message.uint64Data[j].high >>> 0).toNumber(true) : message.uint64Data[j];\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.externalData && message.externalData.length) {\n object.externalData = [];\n for (var j = 0; j < message.externalData.length; ++j)\n object.externalData[j] = $root.onnx.StringStringEntryProto.toObject(message.externalData[j], options);\n }\n if (message.dataLocation != null && message.hasOwnProperty(\"dataLocation\"))\n object.dataLocation = options.enums === String ? $root.onnx.TensorProto.DataLocation[message.dataLocation] === undefined ? message.dataLocation : $root.onnx.TensorProto.DataLocation[message.dataLocation] : message.dataLocation;\n return object;\n };\n\n /**\n * Converts this TensorProto to JSON.\n * @function toJSON\n * @memberof onnx.TensorProto\n * @instance\n * @returns {Object.} JSON object\n */\n TensorProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorProto\n * @function getTypeUrl\n * @memberof onnx.TensorProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorProto\";\n };\n\n /**\n * DataType enum.\n * @name onnx.TensorProto.DataType\n * @enum {number}\n * @property {number} UNDEFINED=0 UNDEFINED value\n * @property {number} FLOAT=1 FLOAT value\n * @property {number} UINT8=2 UINT8 value\n * @property {number} INT8=3 INT8 value\n * @property {number} UINT16=4 UINT16 value\n * @property {number} INT16=5 INT16 value\n * @property {number} INT32=6 INT32 value\n * @property {number} INT64=7 INT64 value\n * @property {number} STRING=8 STRING value\n * @property {number} BOOL=9 BOOL value\n * @property {number} FLOAT16=10 FLOAT16 value\n * @property {number} DOUBLE=11 DOUBLE value\n * @property {number} UINT32=12 UINT32 value\n * @property {number} UINT64=13 UINT64 value\n * @property {number} COMPLEX64=14 COMPLEX64 value\n * @property {number} COMPLEX128=15 COMPLEX128 value\n * @property {number} BFLOAT16=16 BFLOAT16 value\n * @property {number} FLOAT8E4M3FN=17 FLOAT8E4M3FN value\n * @property {number} FLOAT8E4M3FNUZ=18 FLOAT8E4M3FNUZ value\n * @property {number} FLOAT8E5M2=19 FLOAT8E5M2 value\n * @property {number} FLOAT8E5M2FNUZ=20 FLOAT8E5M2FNUZ value\n */\n TensorProto.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"UNDEFINED\"] = 0;\n values[valuesById[1] = \"FLOAT\"] = 1;\n values[valuesById[2] = \"UINT8\"] = 2;\n values[valuesById[3] = \"INT8\"] = 3;\n values[valuesById[4] = \"UINT16\"] = 4;\n values[valuesById[5] = \"INT16\"] = 5;\n values[valuesById[6] = \"INT32\"] = 6;\n values[valuesById[7] = \"INT64\"] = 7;\n values[valuesById[8] = \"STRING\"] = 8;\n values[valuesById[9] = \"BOOL\"] = 9;\n values[valuesById[10] = \"FLOAT16\"] = 10;\n values[valuesById[11] = \"DOUBLE\"] = 11;\n values[valuesById[12] = \"UINT32\"] = 12;\n values[valuesById[13] = \"UINT64\"] = 13;\n values[valuesById[14] = \"COMPLEX64\"] = 14;\n values[valuesById[15] = \"COMPLEX128\"] = 15;\n values[valuesById[16] = \"BFLOAT16\"] = 16;\n values[valuesById[17] = \"FLOAT8E4M3FN\"] = 17;\n values[valuesById[18] = \"FLOAT8E4M3FNUZ\"] = 18;\n values[valuesById[19] = \"FLOAT8E5M2\"] = 19;\n values[valuesById[20] = \"FLOAT8E5M2FNUZ\"] = 20;\n return values;\n })();\n\n TensorProto.Segment = (function() {\n\n /**\n * Properties of a Segment.\n * @memberof onnx.TensorProto\n * @interface ISegment\n * @property {number|Long|null} [begin] Segment begin\n * @property {number|Long|null} [end] Segment end\n */\n\n /**\n * Constructs a new Segment.\n * @memberof onnx.TensorProto\n * @classdesc Represents a Segment.\n * @implements ISegment\n * @constructor\n * @param {onnx.TensorProto.ISegment=} [properties] Properties to set\n */\n function Segment(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Segment begin.\n * @member {number|Long} begin\n * @memberof onnx.TensorProto.Segment\n * @instance\n */\n Segment.prototype.begin = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Segment end.\n * @member {number|Long} end\n * @memberof onnx.TensorProto.Segment\n * @instance\n */\n Segment.prototype.end = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Creates a new Segment instance using the specified properties.\n * @function create\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment=} [properties] Properties to set\n * @returns {onnx.TensorProto.Segment} Segment instance\n */\n Segment.create = function create(properties) {\n return new Segment(properties);\n };\n\n /**\n * Encodes the specified Segment message. Does not implicitly {@link onnx.TensorProto.Segment.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment} message Segment message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Segment.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.begin != null && Object.hasOwnProperty.call(message, \"begin\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.begin);\n if (message.end != null && Object.hasOwnProperty.call(message, \"end\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int64(message.end);\n return writer;\n };\n\n /**\n * Encodes the specified Segment message, length delimited. Does not implicitly {@link onnx.TensorProto.Segment.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment} message Segment message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Segment.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Segment message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorProto.Segment} Segment\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Segment.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorProto.Segment();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.begin = reader.int64();\n break;\n }\n case 2: {\n message.end = reader.int64();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Segment message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorProto.Segment} Segment\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Segment.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Segment message.\n * @function verify\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Segment.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.begin != null && message.hasOwnProperty(\"begin\"))\n if (!$util.isInteger(message.begin) && !(message.begin && $util.isInteger(message.begin.low) && $util.isInteger(message.begin.high)))\n return \"begin: integer|Long expected\";\n if (message.end != null && message.hasOwnProperty(\"end\"))\n if (!$util.isInteger(message.end) && !(message.end && $util.isInteger(message.end.low) && $util.isInteger(message.end.high)))\n return \"end: integer|Long expected\";\n return null;\n };\n\n /**\n * Creates a Segment message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorProto.Segment} Segment\n */\n Segment.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorProto.Segment)\n return object;\n var message = new $root.onnx.TensorProto.Segment();\n if (object.begin != null)\n if ($util.Long)\n (message.begin = $util.Long.fromValue(object.begin)).unsigned = false;\n else if (typeof object.begin === \"string\")\n message.begin = parseInt(object.begin, 10);\n else if (typeof object.begin === \"number\")\n message.begin = object.begin;\n else if (typeof object.begin === \"object\")\n message.begin = new $util.LongBits(object.begin.low >>> 0, object.begin.high >>> 0).toNumber();\n if (object.end != null)\n if ($util.Long)\n (message.end = $util.Long.fromValue(object.end)).unsigned = false;\n else if (typeof object.end === \"string\")\n message.end = parseInt(object.end, 10);\n else if (typeof object.end === \"number\")\n message.end = object.end;\n else if (typeof object.end === \"object\")\n message.end = new $util.LongBits(object.end.low >>> 0, object.end.high >>> 0).toNumber();\n return message;\n };\n\n /**\n * Creates a plain object from a Segment message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.Segment} message Segment\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Segment.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.begin = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.begin = options.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.end = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.end = options.longs === String ? \"0\" : 0;\n }\n if (message.begin != null && message.hasOwnProperty(\"begin\"))\n if (typeof message.begin === \"number\")\n object.begin = options.longs === String ? String(message.begin) : message.begin;\n else\n object.begin = options.longs === String ? $util.Long.prototype.toString.call(message.begin) : options.longs === Number ? new $util.LongBits(message.begin.low >>> 0, message.begin.high >>> 0).toNumber() : message.begin;\n if (message.end != null && message.hasOwnProperty(\"end\"))\n if (typeof message.end === \"number\")\n object.end = options.longs === String ? String(message.end) : message.end;\n else\n object.end = options.longs === String ? $util.Long.prototype.toString.call(message.end) : options.longs === Number ? new $util.LongBits(message.end.low >>> 0, message.end.high >>> 0).toNumber() : message.end;\n return object;\n };\n\n /**\n * Converts this Segment to JSON.\n * @function toJSON\n * @memberof onnx.TensorProto.Segment\n * @instance\n * @returns {Object.} JSON object\n */\n Segment.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Segment\n * @function getTypeUrl\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Segment.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorProto.Segment\";\n };\n\n return Segment;\n })();\n\n /**\n * DataLocation enum.\n * @name onnx.TensorProto.DataLocation\n * @enum {number}\n * @property {number} DEFAULT=0 DEFAULT value\n * @property {number} EXTERNAL=1 EXTERNAL value\n */\n TensorProto.DataLocation = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"DEFAULT\"] = 0;\n values[valuesById[1] = \"EXTERNAL\"] = 1;\n return values;\n })();\n\n return TensorProto;\n })();\n\n onnx.SparseTensorProto = (function() {\n\n /**\n * Properties of a SparseTensorProto.\n * @memberof onnx\n * @interface ISparseTensorProto\n * @property {onnx.ITensorProto|null} [values] SparseTensorProto values\n * @property {onnx.ITensorProto|null} [indices] SparseTensorProto indices\n * @property {Array.|null} [dims] SparseTensorProto dims\n */\n\n /**\n * Constructs a new SparseTensorProto.\n * @memberof onnx\n * @classdesc Represents a SparseTensorProto.\n * @implements ISparseTensorProto\n * @constructor\n * @param {onnx.ISparseTensorProto=} [properties] Properties to set\n */\n function SparseTensorProto(properties) {\n this.dims = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * SparseTensorProto values.\n * @member {onnx.ITensorProto|null|undefined} values\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.values = null;\n\n /**\n * SparseTensorProto indices.\n * @member {onnx.ITensorProto|null|undefined} indices\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.indices = null;\n\n /**\n * SparseTensorProto dims.\n * @member {Array.} dims\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.dims = $util.emptyArray;\n\n /**\n * Creates a new SparseTensorProto instance using the specified properties.\n * @function create\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto=} [properties] Properties to set\n * @returns {onnx.SparseTensorProto} SparseTensorProto instance\n */\n SparseTensorProto.create = function create(properties) {\n return new SparseTensorProto(properties);\n };\n\n /**\n * Encodes the specified SparseTensorProto message. Does not implicitly {@link onnx.SparseTensorProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto} message SparseTensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensorProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.values != null && Object.hasOwnProperty.call(message, \"values\"))\n $root.onnx.TensorProto.encode(message.values, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.indices != null && Object.hasOwnProperty.call(message, \"indices\"))\n $root.onnx.TensorProto.encode(message.indices, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.dims != null && message.dims.length) {\n writer.uint32(/* id 3, wireType 2 =*/26).fork();\n for (var i = 0; i < message.dims.length; ++i)\n writer.int64(message.dims[i]);\n writer.ldelim();\n }\n return writer;\n };\n\n /**\n * Encodes the specified SparseTensorProto message, length delimited. Does not implicitly {@link onnx.SparseTensorProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto} message SparseTensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensorProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a SparseTensorProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensorProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.SparseTensorProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.values = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 2: {\n message.indices = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n if (!(message.dims && message.dims.length))\n message.dims = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.dims.push(reader.int64());\n } else\n message.dims.push(reader.int64());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a SparseTensorProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensorProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a SparseTensorProto message.\n * @function verify\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n SparseTensorProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.values != null && message.hasOwnProperty(\"values\")) {\n var error = $root.onnx.TensorProto.verify(message.values);\n if (error)\n return \"values.\" + error;\n }\n if (message.indices != null && message.hasOwnProperty(\"indices\")) {\n var error = $root.onnx.TensorProto.verify(message.indices);\n if (error)\n return \"indices.\" + error;\n }\n if (message.dims != null && message.hasOwnProperty(\"dims\")) {\n if (!Array.isArray(message.dims))\n return \"dims: array expected\";\n for (var i = 0; i < message.dims.length; ++i)\n if (!$util.isInteger(message.dims[i]) && !(message.dims[i] && $util.isInteger(message.dims[i].low) && $util.isInteger(message.dims[i].high)))\n return \"dims: integer|Long[] expected\";\n }\n return null;\n };\n\n /**\n * Creates a SparseTensorProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n */\n SparseTensorProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.SparseTensorProto)\n return object;\n var message = new $root.onnx.SparseTensorProto();\n if (object.values != null) {\n if (typeof object.values !== \"object\")\n throw TypeError(\".onnx.SparseTensorProto.values: object expected\");\n message.values = $root.onnx.TensorProto.fromObject(object.values);\n }\n if (object.indices != null) {\n if (typeof object.indices !== \"object\")\n throw TypeError(\".onnx.SparseTensorProto.indices: object expected\");\n message.indices = $root.onnx.TensorProto.fromObject(object.indices);\n }\n if (object.dims) {\n if (!Array.isArray(object.dims))\n throw TypeError(\".onnx.SparseTensorProto.dims: array expected\");\n message.dims = [];\n for (var i = 0; i < object.dims.length; ++i)\n if ($util.Long)\n (message.dims[i] = $util.Long.fromValue(object.dims[i])).unsigned = false;\n else if (typeof object.dims[i] === \"string\")\n message.dims[i] = parseInt(object.dims[i], 10);\n else if (typeof object.dims[i] === \"number\")\n message.dims[i] = object.dims[i];\n else if (typeof object.dims[i] === \"object\")\n message.dims[i] = new $util.LongBits(object.dims[i].low >>> 0, object.dims[i].high >>> 0).toNumber();\n }\n return message;\n };\n\n /**\n * Creates a plain object from a SparseTensorProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.SparseTensorProto} message SparseTensorProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n SparseTensorProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.dims = [];\n if (options.defaults) {\n object.values = null;\n object.indices = null;\n }\n if (message.values != null && message.hasOwnProperty(\"values\"))\n object.values = $root.onnx.TensorProto.toObject(message.values, options);\n if (message.indices != null && message.hasOwnProperty(\"indices\"))\n object.indices = $root.onnx.TensorProto.toObject(message.indices, options);\n if (message.dims && message.dims.length) {\n object.dims = [];\n for (var j = 0; j < message.dims.length; ++j)\n if (typeof message.dims[j] === \"number\")\n object.dims[j] = options.longs === String ? String(message.dims[j]) : message.dims[j];\n else\n object.dims[j] = options.longs === String ? $util.Long.prototype.toString.call(message.dims[j]) : options.longs === Number ? new $util.LongBits(message.dims[j].low >>> 0, message.dims[j].high >>> 0).toNumber() : message.dims[j];\n }\n return object;\n };\n\n /**\n * Converts this SparseTensorProto to JSON.\n * @function toJSON\n * @memberof onnx.SparseTensorProto\n * @instance\n * @returns {Object.} JSON object\n */\n SparseTensorProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for SparseTensorProto\n * @function getTypeUrl\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n SparseTensorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.SparseTensorProto\";\n };\n\n return SparseTensorProto;\n })();\n\n onnx.TensorShapeProto = (function() {\n\n /**\n * Properties of a TensorShapeProto.\n * @memberof onnx\n * @interface ITensorShapeProto\n * @property {Array.|null} [dim] TensorShapeProto dim\n */\n\n /**\n * Constructs a new TensorShapeProto.\n * @memberof onnx\n * @classdesc Represents a TensorShapeProto.\n * @implements ITensorShapeProto\n * @constructor\n * @param {onnx.ITensorShapeProto=} [properties] Properties to set\n */\n function TensorShapeProto(properties) {\n this.dim = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorShapeProto dim.\n * @member {Array.} dim\n * @memberof onnx.TensorShapeProto\n * @instance\n */\n TensorShapeProto.prototype.dim = $util.emptyArray;\n\n /**\n * Creates a new TensorShapeProto instance using the specified properties.\n * @function create\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto=} [properties] Properties to set\n * @returns {onnx.TensorShapeProto} TensorShapeProto instance\n */\n TensorShapeProto.create = function create(properties) {\n return new TensorShapeProto(properties);\n };\n\n /**\n * Encodes the specified TensorShapeProto message. Does not implicitly {@link onnx.TensorShapeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto} message TensorShapeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorShapeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dim != null && message.dim.length)\n for (var i = 0; i < message.dim.length; ++i)\n $root.onnx.TensorShapeProto.Dimension.encode(message.dim[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TensorShapeProto message, length delimited. Does not implicitly {@link onnx.TensorShapeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto} message TensorShapeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorShapeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorShapeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorShapeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorShapeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.dim && message.dim.length))\n message.dim = [];\n message.dim.push($root.onnx.TensorShapeProto.Dimension.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorShapeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorShapeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorShapeProto message.\n * @function verify\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorShapeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.dim != null && message.hasOwnProperty(\"dim\")) {\n if (!Array.isArray(message.dim))\n return \"dim: array expected\";\n for (var i = 0; i < message.dim.length; ++i) {\n var error = $root.onnx.TensorShapeProto.Dimension.verify(message.dim[i]);\n if (error)\n return \"dim.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TensorShapeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n */\n TensorShapeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorShapeProto)\n return object;\n var message = new $root.onnx.TensorShapeProto();\n if (object.dim) {\n if (!Array.isArray(object.dim))\n throw TypeError(\".onnx.TensorShapeProto.dim: array expected\");\n message.dim = [];\n for (var i = 0; i < object.dim.length; ++i) {\n if (typeof object.dim[i] !== \"object\")\n throw TypeError(\".onnx.TensorShapeProto.dim: object expected\");\n message.dim[i] = $root.onnx.TensorShapeProto.Dimension.fromObject(object.dim[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorShapeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.TensorShapeProto} message TensorShapeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorShapeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.dim = [];\n if (message.dim && message.dim.length) {\n object.dim = [];\n for (var j = 0; j < message.dim.length; ++j)\n object.dim[j] = $root.onnx.TensorShapeProto.Dimension.toObject(message.dim[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TensorShapeProto to JSON.\n * @function toJSON\n * @memberof onnx.TensorShapeProto\n * @instance\n * @returns {Object.} JSON object\n */\n TensorShapeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorShapeProto\n * @function getTypeUrl\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorShapeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorShapeProto\";\n };\n\n TensorShapeProto.Dimension = (function() {\n\n /**\n * Properties of a Dimension.\n * @memberof onnx.TensorShapeProto\n * @interface IDimension\n * @property {number|Long|null} [dimValue] Dimension dimValue\n * @property {string|null} [dimParam] Dimension dimParam\n * @property {string|null} [denotation] Dimension denotation\n */\n\n /**\n * Constructs a new Dimension.\n * @memberof onnx.TensorShapeProto\n * @classdesc Represents a Dimension.\n * @implements IDimension\n * @constructor\n * @param {onnx.TensorShapeProto.IDimension=} [properties] Properties to set\n */\n function Dimension(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Dimension dimValue.\n * @member {number|Long|null|undefined} dimValue\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.dimValue = null;\n\n /**\n * Dimension dimParam.\n * @member {string|null|undefined} dimParam\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.dimParam = null;\n\n /**\n * Dimension denotation.\n * @member {string} denotation\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.denotation = \"\";\n\n // OneOf field names bound to virtual getters and setters\n var $oneOfFields;\n\n /**\n * Dimension value.\n * @member {\"dimValue\"|\"dimParam\"|undefined} value\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Object.defineProperty(Dimension.prototype, \"value\", {\n get: $util.oneOfGetter($oneOfFields = [\"dimValue\", \"dimParam\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n /**\n * Creates a new Dimension instance using the specified properties.\n * @function create\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension=} [properties] Properties to set\n * @returns {onnx.TensorShapeProto.Dimension} Dimension instance\n */\n Dimension.create = function create(properties) {\n return new Dimension(properties);\n };\n\n /**\n * Encodes the specified Dimension message. Does not implicitly {@link onnx.TensorShapeProto.Dimension.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension} message Dimension message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Dimension.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dimValue != null && Object.hasOwnProperty.call(message, \"dimValue\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.dimValue);\n if (message.dimParam != null && Object.hasOwnProperty.call(message, \"dimParam\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.dimParam);\n if (message.denotation != null && Object.hasOwnProperty.call(message, \"denotation\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.denotation);\n return writer;\n };\n\n /**\n * Encodes the specified Dimension message, length delimited. Does not implicitly {@link onnx.TensorShapeProto.Dimension.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension} message Dimension message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Dimension.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Dimension message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Dimension.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorShapeProto.Dimension();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.dimValue = reader.int64();\n break;\n }\n case 2: {\n message.dimParam = reader.string();\n break;\n }\n case 3: {\n message.denotation = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Dimension message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Dimension.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Dimension message.\n * @function verify\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Dimension.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n var properties = {};\n if (message.dimValue != null && message.hasOwnProperty(\"dimValue\")) {\n properties.value = 1;\n if (!$util.isInteger(message.dimValue) && !(message.dimValue && $util.isInteger(message.dimValue.low) && $util.isInteger(message.dimValue.high)))\n return \"dimValue: integer|Long expected\";\n }\n if (message.dimParam != null && message.hasOwnProperty(\"dimParam\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n if (!$util.isString(message.dimParam))\n return \"dimParam: string expected\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n if (!$util.isString(message.denotation))\n return \"denotation: string expected\";\n return null;\n };\n\n /**\n * Creates a Dimension message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n */\n Dimension.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorShapeProto.Dimension)\n return object;\n var message = new $root.onnx.TensorShapeProto.Dimension();\n if (object.dimValue != null)\n if ($util.Long)\n (message.dimValue = $util.Long.fromValue(object.dimValue)).unsigned = false;\n else if (typeof object.dimValue === \"string\")\n message.dimValue = parseInt(object.dimValue, 10);\n else if (typeof object.dimValue === \"number\")\n message.dimValue = object.dimValue;\n else if (typeof object.dimValue === \"object\")\n message.dimValue = new $util.LongBits(object.dimValue.low >>> 0, object.dimValue.high >>> 0).toNumber();\n if (object.dimParam != null)\n message.dimParam = String(object.dimParam);\n if (object.denotation != null)\n message.denotation = String(object.denotation);\n return message;\n };\n\n /**\n * Creates a plain object from a Dimension message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.Dimension} message Dimension\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Dimension.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.denotation = \"\";\n if (message.dimValue != null && message.hasOwnProperty(\"dimValue\")) {\n if (typeof message.dimValue === \"number\")\n object.dimValue = options.longs === String ? String(message.dimValue) : message.dimValue;\n else\n object.dimValue = options.longs === String ? $util.Long.prototype.toString.call(message.dimValue) : options.longs === Number ? new $util.LongBits(message.dimValue.low >>> 0, message.dimValue.high >>> 0).toNumber() : message.dimValue;\n if (options.oneofs)\n object.value = \"dimValue\";\n }\n if (message.dimParam != null && message.hasOwnProperty(\"dimParam\")) {\n object.dimParam = message.dimParam;\n if (options.oneofs)\n object.value = \"dimParam\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n object.denotation = message.denotation;\n return object;\n };\n\n /**\n * Converts this Dimension to JSON.\n * @function toJSON\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n * @returns {Object.} JSON object\n */\n Dimension.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Dimension\n * @function getTypeUrl\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Dimension.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorShapeProto.Dimension\";\n };\n\n return Dimension;\n })();\n\n return TensorShapeProto;\n })();\n\n onnx.TypeProto = (function() {\n\n /**\n * Properties of a TypeProto.\n * @memberof onnx\n * @interface ITypeProto\n * @property {onnx.TypeProto.ITensor|null} [tensorType] TypeProto tensorType\n * @property {onnx.TypeProto.ISequence|null} [sequenceType] TypeProto sequenceType\n * @property {onnx.TypeProto.IMap|null} [mapType] TypeProto mapType\n * @property {onnx.TypeProto.IOptional|null} [optionalType] TypeProto optionalType\n * @property {onnx.TypeProto.ISparseTensor|null} [sparseTensorType] TypeProto sparseTensorType\n * @property {string|null} [denotation] TypeProto denotation\n */\n\n /**\n * Constructs a new TypeProto.\n * @memberof onnx\n * @classdesc Represents a TypeProto.\n * @implements ITypeProto\n * @constructor\n * @param {onnx.ITypeProto=} [properties] Properties to set\n */\n function TypeProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TypeProto tensorType.\n * @member {onnx.TypeProto.ITensor|null|undefined} tensorType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.tensorType = null;\n\n /**\n * TypeProto sequenceType.\n * @member {onnx.TypeProto.ISequence|null|undefined} sequenceType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.sequenceType = null;\n\n /**\n * TypeProto mapType.\n * @member {onnx.TypeProto.IMap|null|undefined} mapType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.mapType = null;\n\n /**\n * TypeProto optionalType.\n * @member {onnx.TypeProto.IOptional|null|undefined} optionalType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.optionalType = null;\n\n /**\n * TypeProto sparseTensorType.\n * @member {onnx.TypeProto.ISparseTensor|null|undefined} sparseTensorType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.sparseTensorType = null;\n\n /**\n * TypeProto denotation.\n * @member {string} denotation\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.denotation = \"\";\n\n // OneOf field names bound to virtual getters and setters\n var $oneOfFields;\n\n /**\n * TypeProto value.\n * @member {\"tensorType\"|\"sequenceType\"|\"mapType\"|\"optionalType\"|\"sparseTensorType\"|undefined} value\n * @memberof onnx.TypeProto\n * @instance\n */\n Object.defineProperty(TypeProto.prototype, \"value\", {\n get: $util.oneOfGetter($oneOfFields = [\"tensorType\", \"sequenceType\", \"mapType\", \"optionalType\", \"sparseTensorType\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n /**\n * Creates a new TypeProto instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto=} [properties] Properties to set\n * @returns {onnx.TypeProto} TypeProto instance\n */\n TypeProto.create = function create(properties) {\n return new TypeProto(properties);\n };\n\n /**\n * Encodes the specified TypeProto message. Does not implicitly {@link onnx.TypeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto} message TypeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TypeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.tensorType != null && Object.hasOwnProperty.call(message, \"tensorType\"))\n $root.onnx.TypeProto.Tensor.encode(message.tensorType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.sequenceType != null && Object.hasOwnProperty.call(message, \"sequenceType\"))\n $root.onnx.TypeProto.Sequence.encode(message.sequenceType, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();\n if (message.mapType != null && Object.hasOwnProperty.call(message, \"mapType\"))\n $root.onnx.TypeProto.Map.encode(message.mapType, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.denotation != null && Object.hasOwnProperty.call(message, \"denotation\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.denotation);\n if (message.sparseTensorType != null && Object.hasOwnProperty.call(message, \"sparseTensorType\"))\n $root.onnx.TypeProto.SparseTensor.encode(message.sparseTensorType, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();\n if (message.optionalType != null && Object.hasOwnProperty.call(message, \"optionalType\"))\n $root.onnx.TypeProto.Optional.encode(message.optionalType, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TypeProto message, length delimited. Does not implicitly {@link onnx.TypeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto} message TypeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TypeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TypeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto} TypeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TypeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.tensorType = $root.onnx.TypeProto.Tensor.decode(reader, reader.uint32());\n break;\n }\n case 4: {\n message.sequenceType = $root.onnx.TypeProto.Sequence.decode(reader, reader.uint32());\n break;\n }\n case 5: {\n message.mapType = $root.onnx.TypeProto.Map.decode(reader, reader.uint32());\n break;\n }\n case 9: {\n message.optionalType = $root.onnx.TypeProto.Optional.decode(reader, reader.uint32());\n break;\n }\n case 8: {\n message.sparseTensorType = $root.onnx.TypeProto.SparseTensor.decode(reader, reader.uint32());\n break;\n }\n case 6: {\n message.denotation = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TypeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto} TypeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TypeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TypeProto message.\n * @function verify\n * @memberof onnx.TypeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TypeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n var properties = {};\n if (message.tensorType != null && message.hasOwnProperty(\"tensorType\")) {\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Tensor.verify(message.tensorType);\n if (error)\n return \"tensorType.\" + error;\n }\n }\n if (message.sequenceType != null && message.hasOwnProperty(\"sequenceType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Sequence.verify(message.sequenceType);\n if (error)\n return \"sequenceType.\" + error;\n }\n }\n if (message.mapType != null && message.hasOwnProperty(\"mapType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Map.verify(message.mapType);\n if (error)\n return \"mapType.\" + error;\n }\n }\n if (message.optionalType != null && message.hasOwnProperty(\"optionalType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Optional.verify(message.optionalType);\n if (error)\n return \"optionalType.\" + error;\n }\n }\n if (message.sparseTensorType != null && message.hasOwnProperty(\"sparseTensorType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.SparseTensor.verify(message.sparseTensorType);\n if (error)\n return \"sparseTensorType.\" + error;\n }\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n if (!$util.isString(message.denotation))\n return \"denotation: string expected\";\n return null;\n };\n\n /**\n * Creates a TypeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto} TypeProto\n */\n TypeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto)\n return object;\n var message = new $root.onnx.TypeProto();\n if (object.tensorType != null) {\n if (typeof object.tensorType !== \"object\")\n throw TypeError(\".onnx.TypeProto.tensorType: object expected\");\n message.tensorType = $root.onnx.TypeProto.Tensor.fromObject(object.tensorType);\n }\n if (object.sequenceType != null) {\n if (typeof object.sequenceType !== \"object\")\n throw TypeError(\".onnx.TypeProto.sequenceType: object expected\");\n message.sequenceType = $root.onnx.TypeProto.Sequence.fromObject(object.sequenceType);\n }\n if (object.mapType != null) {\n if (typeof object.mapType !== \"object\")\n throw TypeError(\".onnx.TypeProto.mapType: object expected\");\n message.mapType = $root.onnx.TypeProto.Map.fromObject(object.mapType);\n }\n if (object.optionalType != null) {\n if (typeof object.optionalType !== \"object\")\n throw TypeError(\".onnx.TypeProto.optionalType: object expected\");\n message.optionalType = $root.onnx.TypeProto.Optional.fromObject(object.optionalType);\n }\n if (object.sparseTensorType != null) {\n if (typeof object.sparseTensorType !== \"object\")\n throw TypeError(\".onnx.TypeProto.sparseTensorType: object expected\");\n message.sparseTensorType = $root.onnx.TypeProto.SparseTensor.fromObject(object.sparseTensorType);\n }\n if (object.denotation != null)\n message.denotation = String(object.denotation);\n return message;\n };\n\n /**\n * Creates a plain object from a TypeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.TypeProto} message TypeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TypeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.denotation = \"\";\n if (message.tensorType != null && message.hasOwnProperty(\"tensorType\")) {\n object.tensorType = $root.onnx.TypeProto.Tensor.toObject(message.tensorType, options);\n if (options.oneofs)\n object.value = \"tensorType\";\n }\n if (message.sequenceType != null && message.hasOwnProperty(\"sequenceType\")) {\n object.sequenceType = $root.onnx.TypeProto.Sequence.toObject(message.sequenceType, options);\n if (options.oneofs)\n object.value = \"sequenceType\";\n }\n if (message.mapType != null && message.hasOwnProperty(\"mapType\")) {\n object.mapType = $root.onnx.TypeProto.Map.toObject(message.mapType, options);\n if (options.oneofs)\n object.value = \"mapType\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n object.denotation = message.denotation;\n if (message.sparseTensorType != null && message.hasOwnProperty(\"sparseTensorType\")) {\n object.sparseTensorType = $root.onnx.TypeProto.SparseTensor.toObject(message.sparseTensorType, options);\n if (options.oneofs)\n object.value = \"sparseTensorType\";\n }\n if (message.optionalType != null && message.hasOwnProperty(\"optionalType\")) {\n object.optionalType = $root.onnx.TypeProto.Optional.toObject(message.optionalType, options);\n if (options.oneofs)\n object.value = \"optionalType\";\n }\n return object;\n };\n\n /**\n * Converts this TypeProto to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto\n * @instance\n * @returns {Object.} JSON object\n */\n TypeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TypeProto\n * @function getTypeUrl\n * @memberof onnx.TypeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TypeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto\";\n };\n\n TypeProto.Tensor = (function() {\n\n /**\n * Properties of a Tensor.\n * @memberof onnx.TypeProto\n * @interface ITensor\n * @property {number|null} [elemType] Tensor elemType\n * @property {onnx.ITensorShapeProto|null} [shape] Tensor shape\n */\n\n /**\n * Constructs a new Tensor.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Tensor.\n * @implements ITensor\n * @constructor\n * @param {onnx.TypeProto.ITensor=} [properties] Properties to set\n */\n function Tensor(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Tensor elemType.\n * @member {number} elemType\n * @memberof onnx.TypeProto.Tensor\n * @instance\n */\n Tensor.prototype.elemType = 0;\n\n /**\n * Tensor shape.\n * @member {onnx.ITensorShapeProto|null|undefined} shape\n * @memberof onnx.TypeProto.Tensor\n * @instance\n */\n Tensor.prototype.shape = null;\n\n /**\n * Creates a new Tensor instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor=} [properties] Properties to set\n * @returns {onnx.TypeProto.Tensor} Tensor instance\n */\n Tensor.create = function create(properties) {\n return new Tensor(properties);\n };\n\n /**\n * Encodes the specified Tensor message. Does not implicitly {@link onnx.TypeProto.Tensor.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor} message Tensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Tensor.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.elemType);\n if (message.shape != null && Object.hasOwnProperty.call(message, \"shape\"))\n $root.onnx.TensorShapeProto.encode(message.shape, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Tensor message, length delimited. Does not implicitly {@link onnx.TypeProto.Tensor.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor} message Tensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Tensor.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Tensor message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Tensor} Tensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Tensor.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Tensor();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = reader.int32();\n break;\n }\n case 2: {\n message.shape = $root.onnx.TensorShapeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Tensor message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Tensor} Tensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Tensor.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Tensor message.\n * @function verify\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Tensor.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n if (!$util.isInteger(message.elemType))\n return \"elemType: integer expected\";\n if (message.shape != null && message.hasOwnProperty(\"shape\")) {\n var error = $root.onnx.TensorShapeProto.verify(message.shape);\n if (error)\n return \"shape.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Tensor message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Tensor} Tensor\n */\n Tensor.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Tensor)\n return object;\n var message = new $root.onnx.TypeProto.Tensor();\n if (object.elemType != null)\n message.elemType = object.elemType | 0;\n if (object.shape != null) {\n if (typeof object.shape !== \"object\")\n throw TypeError(\".onnx.TypeProto.Tensor.shape: object expected\");\n message.shape = $root.onnx.TensorShapeProto.fromObject(object.shape);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Tensor message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.Tensor} message Tensor\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Tensor.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.elemType = 0;\n object.shape = null;\n }\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = message.elemType;\n if (message.shape != null && message.hasOwnProperty(\"shape\"))\n object.shape = $root.onnx.TensorShapeProto.toObject(message.shape, options);\n return object;\n };\n\n /**\n * Converts this Tensor to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Tensor\n * @instance\n * @returns {Object.} JSON object\n */\n Tensor.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Tensor\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Tensor.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Tensor\";\n };\n\n return Tensor;\n })();\n\n TypeProto.Sequence = (function() {\n\n /**\n * Properties of a Sequence.\n * @memberof onnx.TypeProto\n * @interface ISequence\n * @property {onnx.ITypeProto|null} [elemType] Sequence elemType\n */\n\n /**\n * Constructs a new Sequence.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Sequence.\n * @implements ISequence\n * @constructor\n * @param {onnx.TypeProto.ISequence=} [properties] Properties to set\n */\n function Sequence(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Sequence elemType.\n * @member {onnx.ITypeProto|null|undefined} elemType\n * @memberof onnx.TypeProto.Sequence\n * @instance\n */\n Sequence.prototype.elemType = null;\n\n /**\n * Creates a new Sequence instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence=} [properties] Properties to set\n * @returns {onnx.TypeProto.Sequence} Sequence instance\n */\n Sequence.create = function create(properties) {\n return new Sequence(properties);\n };\n\n /**\n * Encodes the specified Sequence message. Does not implicitly {@link onnx.TypeProto.Sequence.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence} message Sequence message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Sequence.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n $root.onnx.TypeProto.encode(message.elemType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Sequence message, length delimited. Does not implicitly {@link onnx.TypeProto.Sequence.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence} message Sequence message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Sequence.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Sequence message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Sequence} Sequence\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Sequence.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Sequence();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Sequence message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Sequence} Sequence\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Sequence.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Sequence message.\n * @function verify\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Sequence.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\")) {\n var error = $root.onnx.TypeProto.verify(message.elemType);\n if (error)\n return \"elemType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Sequence message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Sequence} Sequence\n */\n Sequence.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Sequence)\n return object;\n var message = new $root.onnx.TypeProto.Sequence();\n if (object.elemType != null) {\n if (typeof object.elemType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Sequence.elemType: object expected\");\n message.elemType = $root.onnx.TypeProto.fromObject(object.elemType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Sequence message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.Sequence} message Sequence\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Sequence.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.elemType = null;\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = $root.onnx.TypeProto.toObject(message.elemType, options);\n return object;\n };\n\n /**\n * Converts this Sequence to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Sequence\n * @instance\n * @returns {Object.} JSON object\n */\n Sequence.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Sequence\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Sequence.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Sequence\";\n };\n\n return Sequence;\n })();\n\n TypeProto.Map = (function() {\n\n /**\n * Properties of a Map.\n * @memberof onnx.TypeProto\n * @interface IMap\n * @property {number|null} [keyType] Map keyType\n * @property {onnx.ITypeProto|null} [valueType] Map valueType\n */\n\n /**\n * Constructs a new Map.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Map.\n * @implements IMap\n * @constructor\n * @param {onnx.TypeProto.IMap=} [properties] Properties to set\n */\n function Map(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Map keyType.\n * @member {number} keyType\n * @memberof onnx.TypeProto.Map\n * @instance\n */\n Map.prototype.keyType = 0;\n\n /**\n * Map valueType.\n * @member {onnx.ITypeProto|null|undefined} valueType\n * @memberof onnx.TypeProto.Map\n * @instance\n */\n Map.prototype.valueType = null;\n\n /**\n * Creates a new Map instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap=} [properties] Properties to set\n * @returns {onnx.TypeProto.Map} Map instance\n */\n Map.create = function create(properties) {\n return new Map(properties);\n };\n\n /**\n * Encodes the specified Map message. Does not implicitly {@link onnx.TypeProto.Map.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap} message Map message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Map.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.keyType != null && Object.hasOwnProperty.call(message, \"keyType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.keyType);\n if (message.valueType != null && Object.hasOwnProperty.call(message, \"valueType\"))\n $root.onnx.TypeProto.encode(message.valueType, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Map message, length delimited. Does not implicitly {@link onnx.TypeProto.Map.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap} message Map message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Map.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Map message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Map} Map\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Map.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Map();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.keyType = reader.int32();\n break;\n }\n case 2: {\n message.valueType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Map message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Map} Map\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Map.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Map message.\n * @function verify\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Map.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.keyType != null && message.hasOwnProperty(\"keyType\"))\n if (!$util.isInteger(message.keyType))\n return \"keyType: integer expected\";\n if (message.valueType != null && message.hasOwnProperty(\"valueType\")) {\n var error = $root.onnx.TypeProto.verify(message.valueType);\n if (error)\n return \"valueType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Map message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Map} Map\n */\n Map.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Map)\n return object;\n var message = new $root.onnx.TypeProto.Map();\n if (object.keyType != null)\n message.keyType = object.keyType | 0;\n if (object.valueType != null) {\n if (typeof object.valueType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Map.valueType: object expected\");\n message.valueType = $root.onnx.TypeProto.fromObject(object.valueType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Map message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.Map} message Map\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Map.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.keyType = 0;\n object.valueType = null;\n }\n if (message.keyType != null && message.hasOwnProperty(\"keyType\"))\n object.keyType = message.keyType;\n if (message.valueType != null && message.hasOwnProperty(\"valueType\"))\n object.valueType = $root.onnx.TypeProto.toObject(message.valueType, options);\n return object;\n };\n\n /**\n * Converts this Map to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Map\n * @instance\n * @returns {Object.} JSON object\n */\n Map.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Map\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Map.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Map\";\n };\n\n return Map;\n })();\n\n TypeProto.Optional = (function() {\n\n /**\n * Properties of an Optional.\n * @memberof onnx.TypeProto\n * @interface IOptional\n * @property {onnx.ITypeProto|null} [elemType] Optional elemType\n */\n\n /**\n * Constructs a new Optional.\n * @memberof onnx.TypeProto\n * @classdesc Represents an Optional.\n * @implements IOptional\n * @constructor\n * @param {onnx.TypeProto.IOptional=} [properties] Properties to set\n */\n function Optional(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Optional elemType.\n * @member {onnx.ITypeProto|null|undefined} elemType\n * @memberof onnx.TypeProto.Optional\n * @instance\n */\n Optional.prototype.elemType = null;\n\n /**\n * Creates a new Optional instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional=} [properties] Properties to set\n * @returns {onnx.TypeProto.Optional} Optional instance\n */\n Optional.create = function create(properties) {\n return new Optional(properties);\n };\n\n /**\n * Encodes the specified Optional message. Does not implicitly {@link onnx.TypeProto.Optional.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional} message Optional message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Optional.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n $root.onnx.TypeProto.encode(message.elemType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Optional message, length delimited. Does not implicitly {@link onnx.TypeProto.Optional.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional} message Optional message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Optional.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an Optional message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Optional} Optional\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Optional.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Optional();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an Optional message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Optional} Optional\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Optional.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an Optional message.\n * @function verify\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Optional.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\")) {\n var error = $root.onnx.TypeProto.verify(message.elemType);\n if (error)\n return \"elemType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates an Optional message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Optional} Optional\n */\n Optional.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Optional)\n return object;\n var message = new $root.onnx.TypeProto.Optional();\n if (object.elemType != null) {\n if (typeof object.elemType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Optional.elemType: object expected\");\n message.elemType = $root.onnx.TypeProto.fromObject(object.elemType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from an Optional message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.Optional} message Optional\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Optional.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.elemType = null;\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = $root.onnx.TypeProto.toObject(message.elemType, options);\n return object;\n };\n\n /**\n * Converts this Optional to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Optional\n * @instance\n * @returns {Object.} JSON object\n */\n Optional.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Optional\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Optional.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Optional\";\n };\n\n return Optional;\n })();\n\n TypeProto.SparseTensor = (function() {\n\n /**\n * Properties of a SparseTensor.\n * @memberof onnx.TypeProto\n * @interface ISparseTensor\n * @property {number|null} [elemType] SparseTensor elemType\n * @property {onnx.ITensorShapeProto|null} [shape] SparseTensor shape\n */\n\n /**\n * Constructs a new SparseTensor.\n * @memberof onnx.TypeProto\n * @classdesc Represents a SparseTensor.\n * @implements ISparseTensor\n * @constructor\n * @param {onnx.TypeProto.ISparseTensor=} [properties] Properties to set\n */\n function SparseTensor(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * SparseTensor elemType.\n * @member {number} elemType\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n */\n SparseTensor.prototype.elemType = 0;\n\n /**\n * SparseTensor shape.\n * @member {onnx.ITensorShapeProto|null|undefined} shape\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n */\n SparseTensor.prototype.shape = null;\n\n /**\n * Creates a new SparseTensor instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor=} [properties] Properties to set\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor instance\n */\n SparseTensor.create = function create(properties) {\n return new SparseTensor(properties);\n };\n\n /**\n * Encodes the specified SparseTensor message. Does not implicitly {@link onnx.TypeProto.SparseTensor.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor} message SparseTensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensor.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.elemType);\n if (message.shape != null && Object.hasOwnProperty.call(message, \"shape\"))\n $root.onnx.TensorShapeProto.encode(message.shape, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified SparseTensor message, length delimited. Does not implicitly {@link onnx.TypeProto.SparseTensor.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor} message SparseTensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensor.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a SparseTensor message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensor.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.SparseTensor();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = reader.int32();\n break;\n }\n case 2: {\n message.shape = $root.onnx.TensorShapeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a SparseTensor message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensor.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a SparseTensor message.\n * @function verify\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n SparseTensor.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n if (!$util.isInteger(message.elemType))\n return \"elemType: integer expected\";\n if (message.shape != null && message.hasOwnProperty(\"shape\")) {\n var error = $root.onnx.TensorShapeProto.verify(message.shape);\n if (error)\n return \"shape.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a SparseTensor message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n */\n SparseTensor.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.SparseTensor)\n return object;\n var message = new $root.onnx.TypeProto.SparseTensor();\n if (object.elemType != null)\n message.elemType = object.elemType | 0;\n if (object.shape != null) {\n if (typeof object.shape !== \"object\")\n throw TypeError(\".onnx.TypeProto.SparseTensor.shape: object expected\");\n message.shape = $root.onnx.TensorShapeProto.fromObject(object.shape);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a SparseTensor message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.SparseTensor} message SparseTensor\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n SparseTensor.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.elemType = 0;\n object.shape = null;\n }\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = message.elemType;\n if (message.shape != null && message.hasOwnProperty(\"shape\"))\n object.shape = $root.onnx.TensorShapeProto.toObject(message.shape, options);\n return object;\n };\n\n /**\n * Converts this SparseTensor to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n * @returns {Object.} JSON object\n */\n SparseTensor.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for SparseTensor\n * @function getTypeUrl\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n SparseTensor.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.SparseTensor\";\n };\n\n return SparseTensor;\n })();\n\n return TypeProto;\n })();\n\n onnx.OperatorSetIdProto = (function() {\n\n /**\n * Properties of an OperatorSetIdProto.\n * @memberof onnx\n * @interface IOperatorSetIdProto\n * @property {string|null} [domain] OperatorSetIdProto domain\n * @property {number|Long|null} [version] OperatorSetIdProto version\n */\n\n /**\n * Constructs a new OperatorSetIdProto.\n * @memberof onnx\n * @classdesc Represents an OperatorSetIdProto.\n * @implements IOperatorSetIdProto\n * @constructor\n * @param {onnx.IOperatorSetIdProto=} [properties] Properties to set\n */\n function OperatorSetIdProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * OperatorSetIdProto domain.\n * @member {string} domain\n * @memberof onnx.OperatorSetIdProto\n * @instance\n */\n OperatorSetIdProto.prototype.domain = \"\";\n\n /**\n * OperatorSetIdProto version.\n * @member {number|Long} version\n * @memberof onnx.OperatorSetIdProto\n * @instance\n */\n OperatorSetIdProto.prototype.version = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Creates a new OperatorSetIdProto instance using the specified properties.\n * @function create\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto=} [properties] Properties to set\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto instance\n */\n OperatorSetIdProto.create = function create(properties) {\n return new OperatorSetIdProto(properties);\n };\n\n /**\n * Encodes the specified OperatorSetIdProto message. Does not implicitly {@link onnx.OperatorSetIdProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto} message OperatorSetIdProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n OperatorSetIdProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.domain);\n if (message.version != null && Object.hasOwnProperty.call(message, \"version\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int64(message.version);\n return writer;\n };\n\n /**\n * Encodes the specified OperatorSetIdProto message, length delimited. Does not implicitly {@link onnx.OperatorSetIdProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto} message OperatorSetIdProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n OperatorSetIdProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an OperatorSetIdProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n OperatorSetIdProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.OperatorSetIdProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.domain = reader.string();\n break;\n }\n case 2: {\n message.version = reader.int64();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an OperatorSetIdProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n OperatorSetIdProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an OperatorSetIdProto message.\n * @function verify\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n OperatorSetIdProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.version != null && message.hasOwnProperty(\"version\"))\n if (!$util.isInteger(message.version) && !(message.version && $util.isInteger(message.version.low) && $util.isInteger(message.version.high)))\n return \"version: integer|Long expected\";\n return null;\n };\n\n /**\n * Creates an OperatorSetIdProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n */\n OperatorSetIdProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.OperatorSetIdProto)\n return object;\n var message = new $root.onnx.OperatorSetIdProto();\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.version != null)\n if ($util.Long)\n (message.version = $util.Long.fromValue(object.version)).unsigned = false;\n else if (typeof object.version === \"string\")\n message.version = parseInt(object.version, 10);\n else if (typeof object.version === \"number\")\n message.version = object.version;\n else if (typeof object.version === \"object\")\n message.version = new $util.LongBits(object.version.low >>> 0, object.version.high >>> 0).toNumber();\n return message;\n };\n\n /**\n * Creates a plain object from an OperatorSetIdProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.OperatorSetIdProto} message OperatorSetIdProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n OperatorSetIdProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.domain = \"\";\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.version = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.version = options.longs === String ? \"0\" : 0;\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.version != null && message.hasOwnProperty(\"version\"))\n if (typeof message.version === \"number\")\n object.version = options.longs === String ? String(message.version) : message.version;\n else\n object.version = options.longs === String ? $util.Long.prototype.toString.call(message.version) : options.longs === Number ? new $util.LongBits(message.version.low >>> 0, message.version.high >>> 0).toNumber() : message.version;\n return object;\n };\n\n /**\n * Converts this OperatorSetIdProto to JSON.\n * @function toJSON\n * @memberof onnx.OperatorSetIdProto\n * @instance\n * @returns {Object.} JSON object\n */\n OperatorSetIdProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for OperatorSetIdProto\n * @function getTypeUrl\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n OperatorSetIdProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.OperatorSetIdProto\";\n };\n\n return OperatorSetIdProto;\n })();\n\n /**\n * OperatorStatus enum.\n * @name onnx.OperatorStatus\n * @enum {number}\n * @property {number} EXPERIMENTAL=0 EXPERIMENTAL value\n * @property {number} STABLE=1 STABLE value\n */\n onnx.OperatorStatus = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"EXPERIMENTAL\"] = 0;\n values[valuesById[1] = \"STABLE\"] = 1;\n return values;\n })();\n\n onnx.FunctionProto = (function() {\n\n /**\n * Properties of a FunctionProto.\n * @memberof onnx\n * @interface IFunctionProto\n * @property {string|null} [name] FunctionProto name\n * @property {Array.|null} [input] FunctionProto input\n * @property {Array.|null} [output] FunctionProto output\n * @property {Array.|null} [attribute] FunctionProto attribute\n * @property {Array.|null} [attributeProto] FunctionProto attributeProto\n * @property {Array.|null} [node] FunctionProto node\n * @property {string|null} [docString] FunctionProto docString\n * @property {Array.|null} [opsetImport] FunctionProto opsetImport\n * @property {string|null} [domain] FunctionProto domain\n */\n\n /**\n * Constructs a new FunctionProto.\n * @memberof onnx\n * @classdesc Represents a FunctionProto.\n * @implements IFunctionProto\n * @constructor\n * @param {onnx.IFunctionProto=} [properties] Properties to set\n */\n function FunctionProto(properties) {\n this.input = [];\n this.output = [];\n this.attribute = [];\n this.attributeProto = [];\n this.node = [];\n this.opsetImport = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * FunctionProto name.\n * @member {string} name\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.name = \"\";\n\n /**\n * FunctionProto input.\n * @member {Array.} input\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.input = $util.emptyArray;\n\n /**\n * FunctionProto output.\n * @member {Array.} output\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.output = $util.emptyArray;\n\n /**\n * FunctionProto attribute.\n * @member {Array.} attribute\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.attribute = $util.emptyArray;\n\n /**\n * FunctionProto attributeProto.\n * @member {Array.} attributeProto\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.attributeProto = $util.emptyArray;\n\n /**\n * FunctionProto node.\n * @member {Array.} node\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.node = $util.emptyArray;\n\n /**\n * FunctionProto docString.\n * @member {string} docString\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.docString = \"\";\n\n /**\n * FunctionProto opsetImport.\n * @member {Array.} opsetImport\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.opsetImport = $util.emptyArray;\n\n /**\n * FunctionProto domain.\n * @member {string} domain\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.domain = \"\";\n\n /**\n * Creates a new FunctionProto instance using the specified properties.\n * @function create\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto=} [properties] Properties to set\n * @returns {onnx.FunctionProto} FunctionProto instance\n */\n FunctionProto.create = function create(properties) {\n return new FunctionProto(properties);\n };\n\n /**\n * Encodes the specified FunctionProto message. Does not implicitly {@link onnx.FunctionProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto} message FunctionProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n FunctionProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.input[i]);\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n writer.uint32(/* id 5, wireType 2 =*/42).string(message.output[i]);\n if (message.attribute != null && message.attribute.length)\n for (var i = 0; i < message.attribute.length; ++i)\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.attribute[i]);\n if (message.node != null && message.node.length)\n for (var i = 0; i < message.node.length; ++i)\n $root.onnx.NodeProto.encode(message.node[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 8, wireType 2 =*/66).string(message.docString);\n if (message.opsetImport != null && message.opsetImport.length)\n for (var i = 0; i < message.opsetImport.length; ++i)\n $root.onnx.OperatorSetIdProto.encode(message.opsetImport[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim();\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 10, wireType 2 =*/82).string(message.domain);\n if (message.attributeProto != null && message.attributeProto.length)\n for (var i = 0; i < message.attributeProto.length; ++i)\n $root.onnx.AttributeProto.encode(message.attributeProto[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified FunctionProto message, length delimited. Does not implicitly {@link onnx.FunctionProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto} message FunctionProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n FunctionProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a FunctionProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.FunctionProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.FunctionProto} FunctionProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n FunctionProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.FunctionProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 4: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push(reader.string());\n break;\n }\n case 5: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push(reader.string());\n break;\n }\n case 6: {\n if (!(message.attribute && message.attribute.length))\n message.attribute = [];\n message.attribute.push(reader.string());\n break;\n }\n case 11: {\n if (!(message.attributeProto && message.attributeProto.length))\n message.attributeProto = [];\n message.attributeProto.push($root.onnx.AttributeProto.decode(reader, reader.uint32()));\n break;\n }\n case 7: {\n if (!(message.node && message.node.length))\n message.node = [];\n message.node.push($root.onnx.NodeProto.decode(reader, reader.uint32()));\n break;\n }\n case 8: {\n message.docString = reader.string();\n break;\n }\n case 9: {\n if (!(message.opsetImport && message.opsetImport.length))\n message.opsetImport = [];\n message.opsetImport.push($root.onnx.OperatorSetIdProto.decode(reader, reader.uint32()));\n break;\n }\n case 10: {\n message.domain = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a FunctionProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.FunctionProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.FunctionProto} FunctionProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n FunctionProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a FunctionProto message.\n * @function verify\n * @memberof onnx.FunctionProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n FunctionProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i)\n if (!$util.isString(message.input[i]))\n return \"input: string[] expected\";\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i)\n if (!$util.isString(message.output[i]))\n return \"output: string[] expected\";\n }\n if (message.attribute != null && message.hasOwnProperty(\"attribute\")) {\n if (!Array.isArray(message.attribute))\n return \"attribute: array expected\";\n for (var i = 0; i < message.attribute.length; ++i)\n if (!$util.isString(message.attribute[i]))\n return \"attribute: string[] expected\";\n }\n if (message.attributeProto != null && message.hasOwnProperty(\"attributeProto\")) {\n if (!Array.isArray(message.attributeProto))\n return \"attributeProto: array expected\";\n for (var i = 0; i < message.attributeProto.length; ++i) {\n var error = $root.onnx.AttributeProto.verify(message.attributeProto[i]);\n if (error)\n return \"attributeProto.\" + error;\n }\n }\n if (message.node != null && message.hasOwnProperty(\"node\")) {\n if (!Array.isArray(message.node))\n return \"node: array expected\";\n for (var i = 0; i < message.node.length; ++i) {\n var error = $root.onnx.NodeProto.verify(message.node[i]);\n if (error)\n return \"node.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.opsetImport != null && message.hasOwnProperty(\"opsetImport\")) {\n if (!Array.isArray(message.opsetImport))\n return \"opsetImport: array expected\";\n for (var i = 0; i < message.opsetImport.length; ++i) {\n var error = $root.onnx.OperatorSetIdProto.verify(message.opsetImport[i]);\n if (error)\n return \"opsetImport.\" + error;\n }\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n return null;\n };\n\n /**\n * Creates a FunctionProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.FunctionProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.FunctionProto} FunctionProto\n */\n FunctionProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.FunctionProto)\n return object;\n var message = new $root.onnx.FunctionProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.FunctionProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i)\n message.input[i] = String(object.input[i]);\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.FunctionProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i)\n message.output[i] = String(object.output[i]);\n }\n if (object.attribute) {\n if (!Array.isArray(object.attribute))\n throw TypeError(\".onnx.FunctionProto.attribute: array expected\");\n message.attribute = [];\n for (var i = 0; i < object.attribute.length; ++i)\n message.attribute[i] = String(object.attribute[i]);\n }\n if (object.attributeProto) {\n if (!Array.isArray(object.attributeProto))\n throw TypeError(\".onnx.FunctionProto.attributeProto: array expected\");\n message.attributeProto = [];\n for (var i = 0; i < object.attributeProto.length; ++i) {\n if (typeof object.attributeProto[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.attributeProto: object expected\");\n message.attributeProto[i] = $root.onnx.AttributeProto.fromObject(object.attributeProto[i]);\n }\n }\n if (object.node) {\n if (!Array.isArray(object.node))\n throw TypeError(\".onnx.FunctionProto.node: array expected\");\n message.node = [];\n for (var i = 0; i < object.node.length; ++i) {\n if (typeof object.node[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.node: object expected\");\n message.node[i] = $root.onnx.NodeProto.fromObject(object.node[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.opsetImport) {\n if (!Array.isArray(object.opsetImport))\n throw TypeError(\".onnx.FunctionProto.opsetImport: array expected\");\n message.opsetImport = [];\n for (var i = 0; i < object.opsetImport.length; ++i) {\n if (typeof object.opsetImport[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.opsetImport: object expected\");\n message.opsetImport[i] = $root.onnx.OperatorSetIdProto.fromObject(object.opsetImport[i]);\n }\n }\n if (object.domain != null)\n message.domain = String(object.domain);\n return message;\n };\n\n /**\n * Creates a plain object from a FunctionProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.FunctionProto} message FunctionProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n FunctionProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.input = [];\n object.output = [];\n object.attribute = [];\n object.node = [];\n object.opsetImport = [];\n object.attributeProto = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.docString = \"\";\n object.domain = \"\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = message.input[j];\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = message.output[j];\n }\n if (message.attribute && message.attribute.length) {\n object.attribute = [];\n for (var j = 0; j < message.attribute.length; ++j)\n object.attribute[j] = message.attribute[j];\n }\n if (message.node && message.node.length) {\n object.node = [];\n for (var j = 0; j < message.node.length; ++j)\n object.node[j] = $root.onnx.NodeProto.toObject(message.node[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.opsetImport && message.opsetImport.length) {\n object.opsetImport = [];\n for (var j = 0; j < message.opsetImport.length; ++j)\n object.opsetImport[j] = $root.onnx.OperatorSetIdProto.toObject(message.opsetImport[j], options);\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.attributeProto && message.attributeProto.length) {\n object.attributeProto = [];\n for (var j = 0; j < message.attributeProto.length; ++j)\n object.attributeProto[j] = $root.onnx.AttributeProto.toObject(message.attributeProto[j], options);\n }\n return object;\n };\n\n /**\n * Converts this FunctionProto to JSON.\n * @function toJSON\n * @memberof onnx.FunctionProto\n * @instance\n * @returns {Object.} JSON object\n */\n FunctionProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for FunctionProto\n * @function getTypeUrl\n * @memberof onnx.FunctionProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n FunctionProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.FunctionProto\";\n };\n\n return FunctionProto;\n })();\n\n return onnx;\n})();\n\nmodule.exports = $root;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {flatbuffers} from 'flatbuffers';\nimport Long from 'long';\n\nimport {Graph} from './graph';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\n\n// check the inputs shape before running an OP.\n// return true when the inputs pass the check\n// return false when the inputs do not fit the requirement\n// throw exception when fatal error or not implemented\nexport function checkInputsShape(inputs: Tensor[], ...expectedDimensions: number[]): boolean {\n if (!inputs || inputs.length !== expectedDimensions.length) {\n return false;\n }\n for (let i = 0; i < inputs.length; i++) {\n if (!inputs[i].dims || inputs[i].dims.length !== expectedDimensions[i]) {\n return false;\n }\n }\n return true;\n}\n\n// Evaluates the given expression and asserts error message if condition is unmet.\nexport function assert(expr: boolean, msg: () => string) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\n\nexport class ArrayUtil {\n /**\n * Verifies if 2 input arrays contain the same elements.\n * @param n1 Array 1\n * @param n2 Array 2\n * @returns Whether these 2 are equal\n */\n static arraysEqual(\n n1: readonly number[]|Int8Array|Uint8Array|Int16Array|Uint16Array|Int32Array|Uint32Array|Uint8ClampedArray|\n Float32Array|Float64Array,\n n2: readonly number[]|Int8Array|Uint8Array|Int16Array|Uint16Array|Int32Array|Uint32Array|Uint8ClampedArray|\n Float32Array|Float64Array) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n }\n}\n\nexport class MatMulUtil {\n /**\n * Fix the input shapes for MatMul operation if they need fixing\n * @param dimsA The shape of tensor A. Should be an array of positive integers\n * @param dimsB The shape of tensor B. Should be an array of positive integers\n * @returns A tuple containing the preprocessed input shapes as required by ONNX specifications\n */\n static preprocessInputShapes(dimsA: readonly number[], dimsB: readonly number[]):\n [readonly number[], readonly number[]] {\n // If the first argument is 1-D, it is promoted to a matrix by prepending\n // a 1 to its dimensions. After matrix multiplication the prepended 1 is\n // removed.\n const a = (dimsA.length === 1) ? [1, dimsA[0]] : dimsA;\n\n // If the second argument is 1-D, it is promoted to a matrix by appending\n // a 1 to its dimensions. After matrix multiplication the appended 1 is\n // removed.\n const b = (dimsB.length === 1) ? [dimsB[0], 1] : dimsB;\n\n return [a, b];\n }\n\n /**\n * Fix the output shape computed for MatMul operation if it needs fixing\n * @param outputShape The computed outputShape. Should be an array (atleast of length 2) of positive integers.\n * This will be mutated.\n * @param aRank The rank of tensor A.\n * @param bRank The rank of tensor B.\n */\n static postprocessOutputShape(outputShape: number[], aRank: number, bRank: number) {\n // Remove prepended dimension if first input is 1d\n if (aRank === 1) {\n // outputShape = outputShape.slice(0, outputShape.length - 2).concat(outputShape.slice(outputShape.length - 1));\n outputShape.splice(outputShape.length - 2, 1);\n }\n // Remove appended dimension if second input is 1d\n if (bRank === 1) {\n outputShape.pop();\n }\n }\n\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n cdims[crank - i] = Math.max(aLen, bLen);\n }\n\n return cdims;\n }\n\n /**\n * Given the indices of a broadcasted tensor, calculate the original indices\n * @param broadcastedIndices The given indices of the broadcasted tensor.\n * @param originalShape The original shape of the tensor before broadcas\n * @returns The calculated indices that maps to the original tensor.\n */\n static index(broadcastedIndices: readonly number[], originalShape: readonly number[]): number[] {\n // NOTE 1: we assume the parameter broadcastedIndices is valid. ie. it should have the same\n // length as the broadcasted shape, and for each dimension the index should\n // not be out of range.\n const originalIndices = new Array(originalShape.length);\n BroadcastUtil.fillIndex(broadcastedIndices, originalShape, originalIndices);\n return originalIndices;\n }\n\n /**\n * Given the indices of a broadcasted tensor, calculate the original indices\n * @param broadcastedIndices The given indices of the broadcasted tensor.\n * @param originalShape The original shape of the tensor before broadcast\n * @param originalIndices The mapping of broadcastedIndices to the originalIndices (output parameter - will be\n * mutated).\n */\n static fillIndex(broadcastedIndices: readonly number[], originalShape: readonly number[], originalIndices: number[]) {\n // NOTE 1: we assume the parameter broadcastedIndices is valid. ie. it should have the same length as the\n // broadcasted shape, and for each dimension the index should not be out of range.\n // NOTE 2: we assume the parameter originalIndices has the same length as the originalShape\n const dimOffset = broadcastedIndices.length - originalShape.length;\n for (let i = 0; i < originalShape.length; i++) {\n originalIndices[i] = broadcastedIndices[dimOffset + i] % originalShape[i];\n }\n }\n\n /**\n * Perform the broadcasting operation on the specific operator\n * @param a The input tensor A\n * @param b The input tensor B\n * @param op The operator lambda function\n * @param inplace Whether to write the result back to A.\n * @returns The result tensor, or undefined if input not broadcastable.\n */\n static calc(\n a: Tensor, b: Tensor, op: (a: string|number, b: string|number) => (string | number), inplace: boolean,\n resultType?: Tensor.DataType): Tensor|undefined {\n const outputShape = BroadcastUtil.calcShape(a.dims, b.dims);\n\n if (outputShape) {\n if (inplace && !ShapeUtil.areEqual(outputShape, a.dims)) {\n // B is not broadcastable to A, failed to calculate inplace.\n return undefined;\n }\n\n const size = ShapeUtil.size(outputShape);\n const c = inplace ? a : new Tensor(outputShape, resultType || a.type);\n\n // both inputs are scalars\n if (outputShape.length === 0) {\n c.set([], op(a.get([]) as number, b.get([]) as number));\n }\n\n // atleast one input is a non-scalar\n else {\n const outputIndices = new Array(outputShape.length);\n const originalIndicesA = new Array(a.dims.length);\n const originalIndicesB = new Array(b.dims.length);\n let valA: string|number = 0;\n let valB: string|number = 0;\n let isAScalar = false;\n let isBScalar = false;\n if (a.dims.length === 0) {\n valA = a.get([]) as number;\n isAScalar = true;\n }\n if (b.dims.length === 0) {\n valB = b.get([]) as number;\n isBScalar = true;\n }\n let rest: number;\n for (let i = 0; i < size; i++) {\n // traversal indices\n rest = i;\n for (let j = outputShape.length - 1; j >= 0; j--) {\n outputIndices[j] = rest % outputShape[j];\n rest = Math.floor(rest / outputShape[j]);\n }\n\n if (!isAScalar) {\n // map outputIndices (which is actually broadcasted) to the originalIndices\n BroadcastUtil.fillIndex(outputIndices, a.dims, originalIndicesA);\n valA = a.get(originalIndicesA) as number;\n }\n if (!isBScalar) {\n BroadcastUtil.fillIndex(outputIndices, b.dims, originalIndicesB);\n valB = b.get(originalIndicesB) as number;\n }\n\n c.set(outputIndices, op(valA, valB));\n }\n }\n\n return c;\n }\n\n return undefined;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n\n /**\n * Determine the broadcasted dims in input shape based on the given output shape.\n * Note that this function only returns the broadcasted dims.\n * @param inputShape The input shape\n * @param outputShape The output shape\n * @returns The broadcasted dims in input shape.\n */\n static getBroadcastDims(inputShape: readonly number[], outputShape: readonly number[]): number[] {\n const inRank = inputShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inputShape[dim] || 1;\n const b = outputShape[outputShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n }\n}\n\n// copy array helper\n// mimics memcpy as much as possible\nexport function arrayCopyHelper(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = source[sourceIndex + offset];\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\nexport class ProtoUtil {\n static tensorDataTypeFromProto(typeProto: onnx.TensorProto.DataType|\n onnxruntime.experimental.fbs.TensorDataType): Tensor.DataType {\n switch (typeProto) {\n case onnx.TensorProto.DataType.INT8:\n return 'int8';\n case onnx.TensorProto.DataType.UINT8:\n return 'uint8';\n case onnx.TensorProto.DataType.BOOL:\n return 'bool';\n case onnx.TensorProto.DataType.INT16:\n return 'int16';\n case onnx.TensorProto.DataType.UINT16:\n return 'uint16';\n case onnx.TensorProto.DataType.INT32:\n return 'int32';\n case onnx.TensorProto.DataType.UINT32:\n return 'uint32';\n case onnx.TensorProto.DataType.FLOAT:\n return 'float32';\n case onnx.TensorProto.DataType.DOUBLE:\n return 'float64';\n case onnx.TensorProto.DataType.STRING:\n return 'string';\n\n // For INT64/UINT64, reduce their value to 32-bits.\n // Should throw exception when overflow\n case onnx.TensorProto.DataType.INT64:\n return 'int32';\n case onnx.TensorProto.DataType.UINT64:\n return 'uint32';\n\n default:\n throw new Error(`unsupported data type: ${onnx.TensorProto.DataType[typeProto]}`);\n }\n }\n\n static tensorDataTypeStringToEnum(type: string): onnx.TensorProto.DataType {\n switch (type) {\n case 'int8':\n return onnx.TensorProto.DataType.INT8;\n case 'uint8':\n return onnx.TensorProto.DataType.UINT8;\n case 'bool':\n return onnx.TensorProto.DataType.BOOL;\n case 'int16':\n return onnx.TensorProto.DataType.INT16;\n case 'uint16':\n return onnx.TensorProto.DataType.UINT16;\n case 'int32':\n return onnx.TensorProto.DataType.INT32;\n case 'uint32':\n return onnx.TensorProto.DataType.UINT32;\n case 'float32':\n return onnx.TensorProto.DataType.FLOAT;\n case 'float64':\n return onnx.TensorProto.DataType.DOUBLE;\n case 'string':\n return onnx.TensorProto.DataType.STRING;\n case 'int64':\n return onnx.TensorProto.DataType.INT64;\n case 'uint64':\n return onnx.TensorProto.DataType.UINT64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n }\n\n static tensorDimsFromProto(dims: Array): number[] {\n // get rid of Long type for dims\n return dims.map(d => Long.isLong(d) ? d.toNumber() : d);\n }\n\n static tensorValueTypeFromProto(valueType: onnx.TypeProto.ITensor): Graph.ValueType {\n return {\n tensorType: ProtoUtil.tensorDataTypeFromProto(valueType.elemType!),\n shape: {dims: ProtoUtil.tensorDimsFromProto(valueType.shape!.dim!.map(d => d.dimValue!))}\n };\n }\n\n static tensorDimsFromORTFormat(tensor: onnxruntime.experimental.fbs.Tensor) {\n const dims = [];\n for (let i = 0; i < tensor.dimsLength(); i++) {\n dims.push(LongUtil.longToNumber(tensor.dims(i)!));\n }\n return dims;\n }\n\n static tensorAttributesFromORTFormat(node: onnxruntime.experimental.fbs.Node) {\n const attributes = [];\n for (let i = 0; i < node.attributesLength(); i++) {\n attributes.push(node.attributes(i)!);\n }\n return attributes;\n }\n}\n\nexport class LongUtil {\n // This function is called to get a number from long type of data for attribute, dim, and ir version,\n // which values are signed integers.\n // To make it more generic, add an optional parameter to convert to a unsigned number.\n static longToNumber(n: Long|flatbuffers.Long|number, unsigned?: boolean) {\n if (Long.isLong(n)) {\n return n.toNumber();\n } else if (n instanceof flatbuffers.Long) {\n return Long.fromValue({low: n.low, high: n.high, unsigned: unsigned ?? false}).toNumber();\n }\n return n;\n }\n static isLong(n: unknown) {\n return Long.isLong(n) || n instanceof flatbuffers.Long;\n }\n}\n\nexport class ShapeUtil {\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n // `axis` inclusive\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n // `axis` exclusive\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be 0 or negative.\n if (dims[i] <= 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains 0 or negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n static transpose(dims: readonly number[]): readonly number[] {\n const copy = dims.slice();\n return copy.reverse();\n }\n\n static indicesToOffset(indices: readonly number[], strides: readonly number[], axis?: number): number {\n if (axis === undefined) {\n axis = indices.length;\n }\n let offset = 0;\n for (let i = 0; i < axis; ++i) {\n offset += strides[i] * indices[i];\n }\n return offset;\n }\n\n static offsetToIndices(offset: number, strides: readonly number[]): readonly number[] {\n const rank = strides.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [offset * strides[0]];\n }\n const indices: number[] = new Array(strides.length);\n for (let i = 0; i < indices.length - 1; ++i) {\n indices[i] = Math.floor(offset / strides[i]);\n offset -= indices[i] * strides[i];\n }\n indices[indices.length - 1] = offset;\n return indices;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank));\n }\n\n // Increment an index into a tensor (in lexicographic\n // ordering), wrapping around the specified upper_bound.\n /**\n * Increment an index into a tensor (in lexicographic ordering), wrapping around the specified upper_bound.\n * @param index Given index to increment (Will be mutated)\n * @param dims The dimensions of the tensor for which the given index corresponds to\n * @param axisToIncrementOn The 1-indexed axis to increment on. If undefined, axisToIncrementOn == rank\n */\n static incrementIndex(index: number[], dims: readonly number[], axisToIncrementOn?: number) {\n if (dims.length === 0 || index.length === 0) {\n throw new Error('Index incrementing unsupported for scalar Tensor');\n }\n if (axisToIncrementOn === undefined) {\n axisToIncrementOn = dims.length;\n } else {\n if (axisToIncrementOn <= 0 || axisToIncrementOn > dims.length) {\n throw new Error('Incorrect axis to increment on');\n }\n }\n\n for (let k = axisToIncrementOn - 1; k >= 0; --k) {\n index[k]++;\n if (index[k] < dims[k]) {\n break;\n }\n index[k] = 0;\n }\n }\n\n /**\n * Produces a new dimensions array based on the values in the 'originalDimensions' and 'shape' array\n * Used in Reshape\n * @param originalDims Original Shape array\n * @param shapeHints array containing values to compute the new dimensions\n * For example:\n * originalDims = [2,2] and shapeHints = [0,-1] will return [2,2]\n * originalDims = [2,2] and shapeHints = [4] will return [4]\n * originalDims = [2,2] and shapeHints = [5] will throw an exception\n * https://github.com/onnx/onnx/blob/main/docs/Operators.md#Reshape\n */\n\n static calculateReshapedDims(originalDims: readonly number[], shapeHints: ArrayLike): number[] {\n // reshape to a Scalar Tensor\n if (shapeHints.length === 0) {\n if (originalDims.length === 0 || ShapeUtil.size(originalDims) === 1) {\n return [];\n } else {\n throw new Error('cannot reshape to a scalar Tensor');\n }\n }\n\n const nDims = shapeHints.length;\n const reshapedDims = new Array(nDims);\n let unknownDimension = -1;\n let newTensorSize = 1;\n for (let i = 0; i < nDims; i++) {\n if (shapeHints[i] < -1) {\n throw new Error('a dimension in shape hints cannot be less than -1');\n }\n if (shapeHints[i] === -1) {\n if (unknownDimension !== -1) {\n throw new Error('at most one dimension in shape hints can be -1');\n }\n unknownDimension = i;\n } else {\n if (shapeHints[i] === 0) {\n if (i >= originalDims.length) {\n throw new Error('the dimension with value zero exceeds the dimension size of the input tensor');\n }\n reshapedDims[i] = originalDims[i];\n } else {\n reshapedDims[i] = shapeHints[i];\n }\n newTensorSize *= reshapedDims[i];\n }\n }\n\n const oldTensorSize = ShapeUtil.size(originalDims);\n if (unknownDimension !== -1) {\n if (oldTensorSize % newTensorSize !== 0) {\n throw new Error(`the input tensor cannot be reshaped to the requested shape. Input shape: [${\n originalDims}] Output shape: [${shapeHints}]`);\n }\n reshapedDims[unknownDimension] = oldTensorSize / newTensorSize;\n }\n // validate sizes from originalDims and reshapedDims match\n else {\n if (newTensorSize !== oldTensorSize) {\n throw new Error('reshapedDims and originalDims don\\'t have matching sizes');\n }\n }\n return reshapedDims;\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n\n /**\n * Validates if the given `dims` or `shape` is valid in ONNX.js context and returns data size\n * @param dims - input `dims` that needs to be checked\n */\n static validateDimsAndCalcSize(dims: readonly number[]): number {\n if (dims.length > 6) {\n throw new TypeError('Only rank 0 to 6 is supported for tensor shape.');\n }\n let size = 1;\n for (const n of dims) {\n if (!Number.isInteger(n)) {\n throw new TypeError(`Invalid shape: ${n} is not an integer`);\n }\n if (n < 0 || n > 2147483647) {\n throw new TypeError(`Invalid shape: length ${n} is not allowed`);\n }\n size *= n;\n }\n return size;\n }\n\n /**\n * Determines the shape of output tensor y = flatten(x, axis)\n * @param dims - shape of input tensor\n * @param axis - flatten axis, in the range [-r, r]\n */\n static flattenShape(dims: readonly number[], axis: number): readonly number[] {\n if (axis < 0) {\n axis += dims.length;\n }\n const total = dims.reduce((x, y) => x * y, 1);\n const right = dims.slice(axis).reduce((x, y) => x * y, 1);\n const outputDims = [total / right, right];\n\n return outputDims;\n }\n\n /**\n * Determines the shape of output tensor y = squeeze(x, axes)\n * @param dims - shape of input tensor\n * @param axes - squeeze axes\n */\n static squeezeShape(dims: readonly number[], axes: readonly number[]): readonly number[] {\n const outputDims = new Array();\n\n // sanity check\n axes = ShapeUtil.normalizeAxes(axes, dims.length);\n\n for (let i = 0; i < dims.length; i++) {\n const inSqueezeList = axes.indexOf(i) >= 0;\n if (inSqueezeList && dims[i] !== 1) {\n throw new Error('squeeze an axis of size different than 1');\n }\n\n if ((axes.length === 0 && dims[i] > 1) || (axes.length > 0 && !inSqueezeList)) {\n outputDims.push(dims[i]);\n }\n }\n\n return outputDims;\n }\n\n /**\n * Determines the shape of output tensor y = unsqueeze(x, axes)\n * @param dims - shape of input tensor\n * @param axes - unsqueeze axes\n */\n static unsqueezeShape(dims: readonly number[], axes: readonly number[]): readonly number[] {\n const outputDims = new Array(dims.length + axes.length);\n\n // initialize the array elements to 0\n outputDims.fill(0);\n\n // set all axes indices to 1 in outputDims and check for duplicates\n for (let i = 0; i < axes.length; i++) {\n const axis = ShapeUtil.normalizeAxis(axes[i], outputDims.length);\n if (axis >= outputDims.length) {\n throw new Error('\\'axes\\' has an out of range axis');\n }\n if (outputDims[axis] !== 0) {\n throw new Error('\\'axes\\' has a duplicate axis');\n }\n\n outputDims[axis] = 1;\n }\n\n // fill in the zero entries of outputDims with the input tensor's shape\n let inputDimsIterator = 0;\n for (let i = 0; i < outputDims.length; i++) {\n if (outputDims[i] === 0) {\n outputDims[i] = dims[inputDimsIterator++];\n }\n }\n\n // sanity check assertion. 'inputDimsIterator'\n // should be equal to the length of 'dims'\n if (inputDimsIterator !== dims.length) {\n throw new Error('the unsqueezed dimension could not be established');\n }\n\n return outputDims;\n }\n}\n\n// bunch of helper methods that do a variety of math operations\nexport class MathUtil {\n // y = (x*x) + y\n static sqr(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] += Math.pow(source[sourceIndex + offset], 2);\n }\n }\n\n // y = ax + y\n static axpy(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number, alpha: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] += (alpha * source[sourceIndex + offset]);\n }\n }\n\n // y = pow(x, b)\n static powx(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number, b: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = Math.pow(source[sourceIndex + offset], b);\n }\n }\n\n // y = x * y\n static mul(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = (source[sourceIndex + offset] * target[targetIndex + offset]);\n }\n }\n}\n\nexport class SplitUtil {\n /**\n * Calculates new Shapes from existing one and the splits given along the axis provides\n * @param dims Shape of the Tensor to be splitted into two or more Shapes\n * @param axis The dimension along which the Tensor will be split\n * @param splits Offsets for the start of each split\n */\n static splitShape(dims: readonly number[], axis: number, split: number[], numOutputs?: number):\n [number[][], number[]] {\n if (split.length === 0) {\n if (!numOutputs) {\n throw new Error('need to know number of outputs when the \\'split\\' attribute is not specified');\n }\n SplitUtil.determineSplit(dims[axis], numOutputs, split);\n }\n\n const shapes: number[][] = [];\n const offsets = [0];\n for (let i = 0; i < split.length; ++i) {\n if (i !== 0) {\n offsets.push(offsets[i - 1] + split[i - 1]);\n }\n const shape = dims.slice();\n shape[axis] = split[i];\n shapes.push(shape);\n }\n return [shapes, offsets];\n }\n\n static determineSplit(numElementsAlongAxis: number, numOutputs: number, split: number[]) {\n // If 'split' is not specified by the user, we need to partition the number of elements equally among the outputs\n if (numElementsAlongAxis % numOutputs !== 0) {\n throw new Error('cannot split tensor to equal sized parts');\n }\n for (let i = 0; i < numOutputs; ++i) {\n split.push(numElementsAlongAxis / numOutputs);\n }\n }\n}\n\nexport class ReduceUtil {\n /**\n * Perform reduce operations on the specific operator\n * @param a Input tensor data\n * @param axes The dimensions along which the Tensor will be reduced\n * @param keepdims If set to true, the axes which are reduced are left in the\n * result as dimensions with size one.\n * @param op1 The operation to be performed on each element in the tensor\n * @param op2 The operation to be performed between elements in the tensor\n */\n static calcReduce(\n a: Tensor, axes: number[], keepdims: boolean, op1: (b: number) => number,\n op2: (a: number, b: number) => number): Tensor {\n const dims = a.dims.slice(0);\n // if axes is not set, perform reduce on all axes\n if (axes.length === 0) {\n dims.forEach((_d, ind) => axes.push(ind));\n }\n // get a temporary broadcastable output shape\n const outputDims = ReduceUtil.calcReduceShape(dims, axes, true);\n\n // loop through the output and calculate result one by one\n const size = ShapeUtil.size(outputDims);\n const y = new Tensor(outputDims, a.type);\n const strides = ShapeUtil.computeStrides(outputDims);\n const inputStrides = ShapeUtil.computeStrides(dims);\n const indicesY = new Array(dims.length);\n for (let i = 0; i < size; i++) {\n const indices = ShapeUtil.offsetToIndices(i, strides);\n // map index\n BroadcastUtil.fillIndex(indices, dims, indicesY);\n y.set(\n indices,\n ReduceUtil.calcReduceByAxis(\n a.numberData, axes, dims, 0, ShapeUtil.indicesToOffset(indicesY, inputStrides), op1, op2));\n }\n\n if (keepdims) {\n return y;\n } else {\n // keepdims == 0, calculate the expected shape\n return new Tensor(\n ReduceUtil.calcReduceShape(dims, axes, keepdims), y.type, undefined, undefined, y.data, y.dataId);\n }\n }\n\n /**\n * Perform reduce operations on the specific operator on specific axes\n * @param a Input tensor data\n * @param axes The dimensions along which the Tensor will be reduced\n * @param dims The input dimension.\n * @param curAxisInd Index in axes specifying the current dimension along\n * which the tensor will be reduced\n * @param pos The current index of element to perform operation\n * @param op1 The operation to be performed on each element in the tensor\n * @param op2 The operation to be performed between elements in the tensor\n */\n static calcReduceByAxis(\n input: Tensor.NumberType, axes: number[], dims: number[], curAxisInd: number, pos: number,\n op1: (b: number) => number, op2: (a: number, b: number) => number): number {\n let res = 0;\n if (curAxisInd >= axes.length) {\n return op1(input[pos]);\n }\n const axis = axes[curAxisInd];\n const step = axis >= dims.length ? 1 : ShapeUtil.size(dims.slice(axis + 1));\n for (let i = 0; i < dims[axis]; i++) {\n res = i === 0 ? ReduceUtil.calcReduceByAxis(input, axes, dims, curAxisInd + 1, pos, op1, op2) :\n op2(res, ReduceUtil.calcReduceByAxis(input, axes, dims, curAxisInd + 1, pos, op1, op2));\n pos += step;\n }\n return res;\n }\n\n /**\n * Calculate the expected shape of a reduce operation\n * @param dims The input tensor dimension\n * @param axes The dimensions along which the Tensor will be reduced\n * @param keepdims If set to true, the axes which are reduced are left in the\n * result as dimensions with size one.\n */\n static calcReduceShape(dims: readonly number[], axes: readonly number[], keepDims: boolean): number[] {\n const outputDims = dims.slice();\n for (let i = 0; i < axes.length; i++) {\n if (keepDims) {\n outputDims[axes[i]] = 1;\n } else {\n outputDims[axes[i]] = 0;\n }\n }\n return outputDims.filter(dim => dim !== 0);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]) {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n\nexport function decodeUtf8String(buffer: Uint8Array): string {\n return new TextDecoder().decode(buffer);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Guid} from 'guid-typescript';\nimport Long from 'long';\n\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {decodeUtf8String, ProtoUtil, ShapeUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Tensor {\n export interface DataTypeMap {\n bool: Uint8Array;\n float32: Float32Array;\n float64: Float64Array;\n string: string[];\n int8: Int8Array;\n uint8: Uint8Array;\n int16: Int16Array;\n uint16: Uint16Array;\n int32: Int32Array;\n uint32: Uint32Array;\n int64: BigInt64Array;\n }\n\n export type DataType = keyof DataTypeMap;\n\n export type StringType = Tensor.DataTypeMap['string'];\n export type BooleanType = Tensor.DataTypeMap['bool'];\n export type IntegerType = Tensor.DataTypeMap['int8']|Tensor.DataTypeMap['uint8']|Tensor.DataTypeMap['int16']|\n Tensor.DataTypeMap['uint16']|Tensor.DataTypeMap['int32']|Tensor.DataTypeMap['uint32'];\n export type FloatType = Tensor.DataTypeMap['float32']|Tensor.DataTypeMap['float64'];\n export type NumberType = BooleanType|IntegerType|FloatType;\n\n export type Id = Guid;\n}\n\ntype TensorData = Tensor.DataTypeMap[Tensor.DataType];\n\ntype DataProvider = (id: Tensor.Id) => TensorData;\ntype AsyncDataProvider = (id: Tensor.Id) => Promise;\n\nexport class Tensor {\n /**\n * get the underlying tensor data\n */\n get data(): TensorData {\n if (this.cache === undefined) {\n const data = this.dataProvider!(this.dataId);\n if (data.length !== this.size) {\n throw new Error('Length of data provided by the Data Provider is inconsistent with the dims of this Tensor.');\n }\n this.cache = data;\n }\n return this.cache;\n }\n\n /**\n * get the underlying string tensor data. Should only use when type is STRING\n */\n get stringData() {\n if (this.type !== 'string') {\n throw new TypeError('data type is not string');\n }\n\n return this.data as Tensor.StringType;\n }\n\n /**\n * get the underlying integer tensor data. Should only use when type is one of the following: (UINT8, INT8, UINT16,\n * INT16, INT32, UINT32, BOOL)\n */\n get integerData() {\n switch (this.type) {\n case 'uint8':\n case 'int8':\n case 'uint16':\n case 'int16':\n case 'int32':\n case 'uint32':\n case 'bool':\n return this.data as Tensor.IntegerType;\n\n default:\n throw new TypeError('data type is not integer (uint8, int8, uint16, int16, int32, uint32, bool)');\n }\n }\n\n /**\n * get the underlying float tensor data. Should only use when type is one of the following: (FLOAT, DOUBLE)\n */\n get floatData() {\n switch (this.type) {\n case 'float32':\n case 'float64':\n return this.data as Tensor.FloatType;\n\n default:\n throw new TypeError('data type is not float (float32, float64)');\n }\n }\n\n /**\n * get the underlying number tensor data. Should only use when type is one of the following: (UINT8, INT8, UINT16,\n * INT16, INT32, UINT32, BOOL, FLOAT, DOUBLE)\n */\n get numberData() {\n if (this.type !== 'string') {\n return this.data as Tensor.NumberType;\n }\n throw new TypeError('type cannot be non-number (string)');\n }\n\n /**\n * get value of an element at the given indices\n */\n get(indices: readonly number[]): Tensor.DataTypeMap[Tensor.DataType][number] {\n return this.data[ShapeUtil.indicesToOffset(indices, this.strides)];\n }\n\n /**\n * set value of an element at the given indices\n */\n set(indices: readonly number[], value: Tensor.DataTypeMap[Tensor.DataType][number]) {\n this.data[ShapeUtil.indicesToOffset(indices, this.strides)] = value;\n }\n\n /**\n * get the underlying tensor data asynchronously\n */\n async getData(): Promise {\n if (this.cache === undefined) {\n this.cache = await this.asyncDataProvider!(this.dataId);\n }\n return this.cache;\n }\n\n /**\n * get the number of elements in the tensor\n */\n public readonly size: number;\n\n private _strides: readonly number[];\n /**\n * get the strides for each dimension\n */\n get strides(): readonly number[] {\n if (!this._strides) {\n this._strides = ShapeUtil.computeStrides(this.dims);\n }\n return this._strides;\n }\n\n constructor(\n /**\n * get the dimensions of the tensor\n */\n public readonly dims: readonly number[],\n /**\n * get the type of the tensor\n */\n public readonly type: Tensor.DataType, private dataProvider?: DataProvider,\n private asyncDataProvider?: AsyncDataProvider, private cache?: TensorData,\n /**\n * get the data ID that used to map to a tensor data\n */\n public readonly dataId: Guid = Guid.create()) {\n this.size = ShapeUtil.validateDimsAndCalcSize(dims);\n const size = this.size;\n const empty = (dataProvider === undefined && asyncDataProvider === undefined && cache === undefined);\n\n if (cache !== undefined) {\n if (cache.length !== size) {\n throw new RangeError('Input dims doesn\\'t match data length.');\n }\n }\n\n if (type === 'string') {\n if (cache !== undefined && (!Array.isArray(cache) || !cache.every(i => typeof i === 'string'))) {\n throw new TypeError('cache should be a string array');\n }\n\n if (empty) {\n this.cache = new Array(size);\n }\n } else {\n if (cache !== undefined) {\n const constructor = dataviewConstructor(type);\n if (!(cache instanceof constructor)) {\n throw new TypeError(`cache should be type ${constructor.name}`);\n }\n }\n\n if (empty) {\n const buf = new ArrayBuffer(size * sizeof(type));\n this.cache = createView(buf, type);\n }\n }\n }\n\n /**\n * Construct new Tensor from a ONNX Tensor object\n * @param tensorProto the ONNX Tensor\n */\n static fromProto(tensorProto: onnx.ITensorProto): Tensor {\n if (!tensorProto) {\n throw new Error('cannot construct Value from an empty tensor');\n }\n const type = ProtoUtil.tensorDataTypeFromProto(tensorProto.dataType!);\n const dims = ProtoUtil.tensorDimsFromProto(tensorProto.dims!);\n\n const value = new Tensor(dims, type);\n\n if (type === 'string') {\n // When it's STRING type, the value should always be stored in field\n // 'stringData'\n tensorProto.stringData!.forEach((str, i) => {\n value.data[i] = decodeUtf8String(str);\n });\n\n } else if (\n tensorProto.rawData && typeof tensorProto.rawData.byteLength === 'number' &&\n tensorProto.rawData.byteLength > 0) {\n // NOT considering segment for now (IMPORTANT)\n\n // populate value from rawData\n const dataDest = value.data;\n const dataSource =\n new DataView(tensorProto.rawData.buffer, tensorProto.rawData.byteOffset, tensorProto.rawData.byteLength);\n const elementSize = sizeofProto(tensorProto.dataType!);\n const length = tensorProto.rawData.byteLength / elementSize;\n\n if (tensorProto.rawData.byteLength % elementSize !== 0) {\n throw new Error('invalid buffer length');\n }\n if (dataDest.length !== length) {\n throw new Error('buffer length mismatch');\n }\n\n for (let i = 0; i < length; i++) {\n const n = readProto(dataSource, tensorProto.dataType!, i * elementSize);\n dataDest[i] = n;\n }\n } else {\n // populate value from array\n let array: Array;\n switch (tensorProto.dataType) {\n case onnx.TensorProto.DataType.FLOAT:\n array = tensorProto.floatData!;\n break;\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.INT16:\n case onnx.TensorProto.DataType.UINT16:\n case onnx.TensorProto.DataType.INT8:\n case onnx.TensorProto.DataType.UINT8:\n case onnx.TensorProto.DataType.BOOL:\n array = tensorProto.int32Data!;\n break;\n case onnx.TensorProto.DataType.INT64:\n array = tensorProto.int64Data!;\n break;\n case onnx.TensorProto.DataType.DOUBLE:\n array = tensorProto.doubleData!;\n break;\n case onnx.TensorProto.DataType.UINT32:\n case onnx.TensorProto.DataType.UINT64:\n array = tensorProto.uint64Data!;\n break;\n default:\n // should never run here\n throw new Error('unspecific error');\n }\n\n if (array === null || array === undefined) {\n throw new Error('failed to populate data from a tensorproto value');\n }\n\n const data = value.data;\n if (data.length !== array.length) {\n throw new Error('array length mismatch');\n }\n\n for (let i = 0; i < array.length; i++) {\n const element = array[i];\n if (Long.isLong(element)) {\n data[i] = longToNumber(element, tensorProto.dataType);\n } else {\n data[i] = element;\n }\n }\n }\n\n return value;\n }\n\n /**\n * Construct new Tensor from raw data\n * @param data the raw data object. Should be a string array for 'string' tensor, and the corresponding typed array\n * for other types of tensor.\n * @param dims the dimensions of the tensor\n * @param type the type of the tensor\n */\n static fromData(data: Tensor.DataTypeMap[Tensor.DataType], dims: readonly number[], type: Tensor.DataType) {\n return new Tensor(dims, type, undefined, undefined, data);\n }\n\n static fromOrtTensor(ortTensor: ortFbs.Tensor) {\n if (!ortTensor) {\n throw new Error('cannot construct Value from an empty tensor');\n }\n const dims = ProtoUtil.tensorDimsFromORTFormat(ortTensor);\n const type = ProtoUtil.tensorDataTypeFromProto(ortTensor.dataType());\n\n const value = new Tensor(dims, type);\n\n if (type === 'string') {\n // When it's STRING type, the value should always be stored in field\n // 'stringData'\n for (let i = 0; i < ortTensor.stringDataLength(); i++) {\n value.data[i] = ortTensor.stringData(i);\n }\n\n } else if (\n ortTensor.rawDataArray() && typeof ortTensor.rawDataLength() === 'number' && ortTensor.rawDataLength() > 0) {\n // NOT considering segment for now (IMPORTANT)\n\n // populate value from rawData\n const dataDest = value.data;\n const dataSource = new DataView(\n ortTensor.rawDataArray()!.buffer, ortTensor.rawDataArray()!.byteOffset, ortTensor.rawDataLength());\n const elementSize = sizeofProto(ortTensor.dataType());\n const length = ortTensor.rawDataLength() / elementSize;\n\n if (ortTensor.rawDataLength() % elementSize !== 0) {\n throw new Error('invalid buffer length');\n }\n if (dataDest.length !== length) {\n throw new Error('buffer length mismatch');\n }\n\n for (let i = 0; i < length; i++) {\n const n = readProto(dataSource, ortTensor.dataType(), i * elementSize);\n dataDest[i] = n;\n }\n }\n return value;\n }\n}\n\nfunction sizeof(type: Tensor.DataType): number {\n switch (type) {\n case 'bool':\n case 'int8':\n case 'uint8':\n return 1;\n case 'int16':\n case 'uint16':\n return 2;\n case 'int32':\n case 'uint32':\n case 'float32':\n return 4;\n case 'float64':\n return 8;\n default:\n throw new Error(`cannot calculate sizeof() on type ${type}`);\n }\n}\n\nfunction sizeofProto(type: onnx.TensorProto.DataType|ortFbs.TensorDataType): number {\n switch (type) {\n case onnx.TensorProto.DataType.UINT8:\n case onnx.TensorProto.DataType.INT8:\n case onnx.TensorProto.DataType.BOOL:\n return 1;\n case onnx.TensorProto.DataType.UINT16:\n case onnx.TensorProto.DataType.INT16:\n return 2;\n case onnx.TensorProto.DataType.FLOAT:\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.UINT32:\n return 4;\n case onnx.TensorProto.DataType.INT64:\n case onnx.TensorProto.DataType.DOUBLE:\n case onnx.TensorProto.DataType.UINT64:\n return 8;\n default:\n throw new Error(`cannot calculate sizeof() on type ${onnx.TensorProto.DataType[type]}`);\n }\n}\n\nfunction createView(dataBuffer: ArrayBuffer, type: Tensor.DataType) {\n return new (dataviewConstructor(type))(dataBuffer);\n}\n\nfunction dataviewConstructor(type: Tensor.DataType) {\n switch (type) {\n case 'bool':\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'int16':\n return Int16Array;\n case 'uint16':\n return Uint16Array;\n case 'int32':\n return Int32Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'float32':\n return Float32Array;\n case 'float64':\n return Float64Array;\n default:\n // should never run to here\n throw new Error('unspecified error');\n }\n}\n\n// convert a long number to a 32-bit integer (cast-down)\nfunction longToNumber(i: Long, type: onnx.TensorProto.DataType|ortFbs.TensorDataType): number {\n // INT64, UINT32, UINT64\n if (type === onnx.TensorProto.DataType.INT64 || type === ortFbs.TensorDataType.INT64) {\n if (i.greaterThanOrEqual(2147483648) || i.lessThan(-2147483648)) {\n throw new TypeError('int64 is not supported');\n }\n } else if (\n type === onnx.TensorProto.DataType.UINT32 || type === ortFbs.TensorDataType.UINT32 ||\n type === onnx.TensorProto.DataType.UINT64 || type === ortFbs.TensorDataType.UINT64) {\n if (i.greaterThanOrEqual(4294967296) || i.lessThan(0)) {\n throw new TypeError('uint64 is not supported');\n }\n } else {\n throw new TypeError(`not a LONG type: ${onnx.TensorProto.DataType[type]}`);\n }\n\n return i.toNumber();\n}\n\n// read one value from TensorProto\nfunction readProto(view: DataView, type: onnx.TensorProto.DataType|ortFbs.TensorDataType, byteOffset: number): number {\n switch (type) {\n case onnx.TensorProto.DataType.BOOL:\n case onnx.TensorProto.DataType.UINT8:\n return view.getUint8(byteOffset);\n case onnx.TensorProto.DataType.INT8:\n return view.getInt8(byteOffset);\n case onnx.TensorProto.DataType.UINT16:\n return view.getUint16(byteOffset, true);\n case onnx.TensorProto.DataType.INT16:\n return view.getInt16(byteOffset, true);\n case onnx.TensorProto.DataType.FLOAT:\n return view.getFloat32(byteOffset, true);\n case onnx.TensorProto.DataType.INT32:\n return view.getInt32(byteOffset, true);\n case onnx.TensorProto.DataType.UINT32:\n return view.getUint32(byteOffset, true);\n case onnx.TensorProto.DataType.INT64:\n return longToNumber(\n Long.fromBits(view.getUint32(byteOffset, true), view.getUint32(byteOffset + 4, true), false), type);\n case onnx.TensorProto.DataType.DOUBLE:\n return view.getFloat64(byteOffset, true);\n case onnx.TensorProto.DataType.UINT64:\n return longToNumber(\n Long.fromBits(view.getUint32(byteOffset, true), view.getUint32(byteOffset + 4, true), true), type);\n default:\n throw new Error(`cannot read from DataView for type ${onnx.TensorProto.DataType[type]}`);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * represent a version irrelevant abstraction of for GLSL source code\n */\nexport interface Glsl {\n readonly version: string;\n readonly attribute: string;\n readonly varyingVertex: string;\n readonly varyingFrag: string;\n readonly texture2D: string;\n readonly output: string;\n readonly outputDeclaration: string;\n}\n\nconst GLSL_ES_2_0: Glsl = {\n version: '',\n attribute: 'attribute',\n varyingVertex: 'varying',\n varyingFrag: 'varying',\n texture2D: 'texture2D',\n output: 'gl_FragColor',\n outputDeclaration: '',\n};\nconst GLSL_ES_3_0: Glsl = {\n version: '#version 300 es',\n attribute: 'in',\n varyingVertex: 'out',\n varyingFrag: 'in',\n texture2D: 'texture',\n output: 'outputColor',\n outputDeclaration: 'out vec4 outputColor;',\n};\n\nexport function getGlsl(version: 1|2) {\n return version === 1 ? GLSL_ES_2_0 : GLSL_ES_3_0;\n}\n\nexport function getVertexShaderSource(version: 1|2): string {\n const glsl = getGlsl(version);\n return `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 position;\n ${glsl.attribute} vec2 textureCoord;\n\n ${glsl.varyingVertex} vec2 TexCoords;\n\n void main()\n {\n gl_Position = vec4(position, 1.0);\n TexCoords = textureCoord;\n }`;\n}\n\nexport function getFragShaderPreamble(version: 1|2): string {\n const glsl = getGlsl(version);\n return `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFrag} vec2 TexCoords;\n ${glsl.outputDeclaration}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n // Custom vector types to handle higher dimenalities.\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n `;\n}\n\nexport function getDefaultFragShaderMain(version: 1|2, outputShapeLength: number): string {\n const glsl = getGlsl(version);\n return `\n void main() {\n int indices[${outputShapeLength}];\n toVec(TexCoords, indices);\n vec4 result = vec4(process(indices));\n ${glsl.output} = result;\n }\n `;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../tensor';\n\n/**\n * Layout info is used for mapping n-dimensional array to 2D textures\n * The layout is created by the TextureLayoutStrategy based on\n * the Tensor's dimensions and strides\n */\nexport interface TextureLayout {\n width: number;\n height: number;\n /**\n * specify the number of value that encoded in a single pixel\n */\n channels: 1|2|3|4;\n /**\n * whether in packed mode or not\n */\n isPacked?: boolean;\n /**\n * the normalized shape\n */\n shape: readonly number[];\n /**\n * the stride of each dimensions, calculated according to shape\n */\n strides: readonly number[];\n /**\n * the original shape(dims) of the corresponding tensor\n */\n unpackedShape: readonly number[];\n\n reversedWH?: boolean;\n}\nexport interface TextureData extends TextureLayout {\n tensor: Tensor;\n texture: WebGLTexture;\n}\n\nexport enum TextureType {\n unpacked, // <-- normal unpacked texture\n unpackedReversed, // <-- unpacked texture used in old ONNX.js implementation (deprecated)\n packed, // <-- normal packed texture\n downloadUint8AsFloat, // <-- ONLY used in texture downloading for iOS devices\n packedLastDimension // <-- ONLY used in old ONNX.js Conv implementation for input W (deprecated)\n}\n\nexport interface TensorInfo {\n id?: Tensor.Id;\n dims: readonly number[];\n type: Tensor.DataType;\n textureType: TextureType;\n}\n\nexport interface ProgramVariable {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n data: number|number[];\n}\n\n/**\n * A set of metadata of a shader program.\n */\nexport interface ProgramMetadata {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n /**\n * texture types for each input\n */\n inputTypes: TextureType[];\n /**\n * names of each input\n */\n inputNames: string[];\n /**\n * an optional string as a cache hint in the artifact cache\n */\n cacheHint?: string;\n}\n\n/**\n * A ProgramInfoLoader allows\n */\nexport interface ProgramInfoLoader extends ProgramMetadata {\n /**\n * a function to get the program info\n */\n get(): ProgramInfo;\n}\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo extends ProgramMetadata {\n /**\n * information of uniform variables\n */\n variables?: ProgramVariable[];\n /**\n * tensor info for output\n */\n output: TensorInfo;\n /**\n * the shader's processing source code\n */\n shaderSource: string;\n /**\n * whether the shader source contains a customized main function implementation\n */\n hasMain?: boolean;\n}\n\nexport interface VariableInfo {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n}\n\nexport interface ProgramVariable {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n data: number|number[];\n}\n\n/**\n * Information of uniforms that shader uses\n */\nexport interface UniformInfo {\n type: 'sampler2D'|VariableInfo['type'];\n name: string;\n arrayLength?: number;\n}\n\nexport interface UniformLocation extends UniformInfo {\n location: WebGLUniformLocation;\n}\n\n/**\n * Artifact is the result of compilation\n * It does not contain input of output data\n * However anything that could be run as a \"program\"\n */\nexport interface Artifact {\n programInfo: ProgramInfo;\n program: WebGLProgram;\n uniformLocations: UniformLocation[];\n attribLocations: {position: number; textureCoord: number};\n}\nexport declare namespace Artifact {\n type UniformLocations = Artifact['uniformLocations'];\n type AttribLocations = Artifact['attribLocations'];\n}\n\nexport interface UniformData {\n [name: string]: number|number[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {assert} from '../../util';\n/**\n * Given a non RGBA shape calculate the R version\n * It is assumed that the dimensions are multiples of given channels\n * NOTE: it is always the last dim that gets packed.\n * @param unpackedShape original shape to create a packed version from\n */\nexport function getPackedShape(unpackedShape: readonly number[]): readonly number[] {\n const len = unpackedShape.length;\n return unpackedShape.slice(0, len - 1).concat(unpackedShape[len - 1] / 4);\n}\n\nexport async function repeatedTry(\n checkFn: () => boolean, delayFn = (_counter: number) => 0, maxCounter?: number): Promise {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n\n tryCount++;\n\n const nextBackoff = delayFn(tryCount);\n\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n\n tryFn();\n });\n}\n\n/**\n * Generates the function name from an input sampler name.\n * @param samplerName Name of the sampler.\n */\nexport function generateShaderFuncNameFromInputSamplerName(samplerName: string): string {\n assert(typeof samplerName !== 'undefined' && samplerName.length !== 0, () => 'empty string found for sampler name');\n return 'get' + samplerName.charAt(0).toUpperCase() + samplerName.slice(1);\n}\n\n/**\n * Generates the function name from an input sampler name at output coordinates.\n * @param samplerName Name of the sampler.\n */\nexport function generateShaderFuncNameFromInputSamplerNameAtOutCoords(samplerName: string): string {\n assert(typeof samplerName !== 'undefined' && samplerName.length !== 0, () => 'empty string found for sampler name');\n return 'get' + samplerName.charAt(0).toUpperCase() + samplerName.slice(1) + 'AtOutCoords';\n}\n\n/** Returns a new input shape (a copy) that has a squeezed logical shape. */\nexport function squeezeInputShape(inputShape: readonly number[], squeezedShape: number[]): number[] {\n // Deep copy.\n let newInputShape: number[] = JSON.parse(JSON.stringify(inputShape));\n newInputShape = squeezedShape;\n return newInputShape;\n}\n\n/** Returns a list of squeezed parameters for shader functions */\nexport function getSqueezedParams(params: string[], keptDims: number[]): string {\n return keptDims.map(d => params[d]).join(', ');\n}\n\n/** Returns the data type for different ranks. */\nexport function getCoordsDataType(rank: number): string {\n if (rank <= 1) {\n return 'int';\n } else if (rank === 2) {\n return 'ivec2';\n } else if (rank === 3) {\n return 'ivec3';\n } else if (rank === 4) {\n return 'ivec4';\n } else if (rank === 5) {\n return 'ivec5';\n } else if (rank === 6) {\n return 'ivec6';\n } else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n\nexport function getGlChannels(rank = 6): string[] {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getGlChannels} from '../utils';\n\nexport function getVecChannels(name: string, rank: number): string[] {\n return getGlChannels(rank).map(d => `${name}.${d}`);\n}\n\nexport function getChannels(name: string, rank: number): string[] {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\n\nexport function unpackFromChannel(): string {\n return `\n float getChannel(vec4 frag, int dim) {\n int modCoord = imod(dim, 2);\n return modCoord == 0 ? frag.r : frag.g;\n }\n\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n `;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {getChannels} from './packing-utils';\n\nconst packProgramMetadata = {\n name: 'pack',\n inputNames: ['A'],\n inputTypes: [TextureType.unpackedReversed]\n};\n\nconst createPackProgramInfo = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfo => {\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const inputShape = input.dims;\n\n const inputRank = inputShape.length;\n // createTextureLayoutFromShape won't change output rank. Need to verify by running tests\n const outputRank = input.dims.length;\n\n const coordsDataType = getCoordsDataType(outputRank);\n const channels = getChannels('rc', outputRank);\n const setup = getSetup(outputRank, channels, inputShape[inputShape.length - 2], inputShape[inputShape.length - 1]);\n\n let reversedInputWH;\n if (inputRank === 0) {\n reversedInputWH = [1, 1];\n } else if (inputRank === 1) {\n reversedInputWH = [inputShape[0], 1];\n } else {\n reversedInputWH = [inputShape[outputRank - 1], inputShape[outputRank - 2]];\n }\n const outOfBoundsCondition = getOutOfBoundsCondition(outputRank, reversedInputWH, channels);\n const output = getOutput(inputShape, channels);\n\n const shaderSource = `\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n ${glsl.output} = vec4(0);\n } else {\n ${setup}\n\n ${glsl.output} = vec4(${output});\n }\n }\n `;\n return {\n ...packProgramMetadata,\n hasMain: true,\n output: {dims: input.dims, type: input.type, textureType: TextureType.packed},\n shaderSource\n };\n};\n\nexport const createPackProgramInfoLoader = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfoLoader =>\n ({...packProgramMetadata, get: () => createPackProgramInfo(handler, input)});\n\n/**\n * check output coordinate location and return false if it is outside input's width/height boundary\n */\nfunction getOutOfBoundsCondition(rank: number, shape: readonly number[], dims: string[]): string {\n if (rank === 0) {\n return 'false';\n }\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i - rank + 2]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n\n return cond;\n}\n\n/**\n * code snippet to sample input texture with output coordinates\n */\nfunction getOutput(shape: readonly number[], dims: string[]): string {\n const rank = shape.length;\n\n if (rank === 0) {\n return 'getA(), 0, 0, 0';\n }\n\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n\n const coord00 = 'r, c';\n const coord01 = 'r, cp1';\n const coord10 = 'rp1, c';\n const coord11 = 'rp1, cp1';\n let D = '';\n if (rank > 2) {\n for (let i = 0; i < rank - 2; ++i) {\n D = D + `${dims[i]},`;\n }\n }\n return `getA(${D}${coord00}),\n rEdge ? 0. : getA(${D}${coord10}),\n cEdge ? 0. : getA(${D}${coord01}),\n rEdge || cEdge ? 0. : getA(${D}${coord11})`;\n}\n\n/**\n * code snippet to setup 4 coordinates and edge conditions\n */\nfunction getSetup(rank: number, dims: string[], rows: number, cols: number): string {\n if (rank === 0 || rank === 1) {\n return '';\n }\n // rank >= 2 for width+height pack.\n else {\n const setup = `\n int r = ${dims[rank - 2]};\n int c = ${dims[rank - 1]};\n int rp1 = ${dims[rank - 2]} + 1;\n int cp1 = ${dims[rank - 1]} + 1;\n bool rEdge = rp1 >= ${cols};\n bool cEdge = cp1 >= ${rows};\n `;\n return setup;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {unpackFromChannel} from './packing-utils';\n\nconst createPackedReshape3DProgramMetadata = (outputShape3D: readonly number[]) =>\n ({name: 'Reshape (packed)', inputTypes: [TextureType.packed], inputNames: ['A'], cacheHint: `${outputShape3D}`});\n\nconst createPackedReshape3DProgramInfo =\n (handler: WebGLInferenceHandler, input3D: Tensor, metadata: ProgramMetadata, outputShape3D: readonly number[]):\n ProgramInfo => {\n const inputShape3D = input3D.dims as [number, number, number];\n const squeezedOutputShape = outputShape3D as [number, number, number];\n\n let mainLoop = '';\n for (let i = 0; i < 4; i++) {\n let outputCoords = '';\n switch (i) {\n case 0:\n outputCoords = 'outputCoords = rc;';\n break;\n case 1:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y+1, rc.z);';\n break;\n case 2:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y, rc.z+1);';\n break;\n case 3:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y+1, rc.z+1);';\n break;\n default:\n throw new Error();\n }\n\n mainLoop += `\n ${outputCoords}\n ${i > 0 ? 'if(outputCoords.y < rows && outputCoords.z < cols){' : ''}\n int flattenedIndex = getFlattenedIndex(outputCoords);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flattenedIndex);\n vec2 innerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] = getChannel(getA(inputRC.x, inputRC.y, inputRC.z), innerDims);\n\n ${i > 0 ? '}' : ''}\n `;\n }\n const glsl = getGlsl(handler.session.backend.glContext.version);\n\n const shaderSource = `\n ${getReshapedInputCoords(inputShape3D)}\n ${getFlattenedIndexFrom3D(squeezedOutputShape)}\n ${unpackFromChannel()}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.0);\n\n ivec3 outputCoords;\n int rows = ${squeezedOutputShape[2]};\n int cols = ${squeezedOutputShape[1]};\n\n ${mainLoop}\n ${glsl.output} = result;\n }\n `;\n\n return {\n ...metadata,\n output: {dims: squeezedOutputShape, type: input3D.type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedReshape3DProgramInfoLoader =\n (handler: WebGLInferenceHandler, input3D: Tensor, outputShape3D: readonly number[]): ProgramInfoLoader => {\n const metadata = createPackedReshape3DProgramMetadata(outputShape3D);\n return {...metadata, get: () => createPackedReshape3DProgramInfo(handler, input3D, metadata, outputShape3D)};\n };\n\nexport function processDims3D(shape: ArrayLike): [number, number, number] {\n if (shape.length === 0) {\n return [1, 1, 1];\n }\n // TODO: squeeze other shapes to 2D case\n let batch = 1;\n for (let i = 0; i < shape.length - 2; ++i) {\n batch *= shape[i];\n }\n return [batch, shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]];\n}\n\n// For packed reshape, we need to re-arrange texel data for output shape.\n// Our pack is designed to pack a 2x2 tile in last h and w dimension, so\n// for the reshaped new tensor, we just need to re-arrange the last h and\n// w dimension. For any shape that is not in 3D, i.e. [batch, W, H], we\n// first convert it to 3D by collapsing other dimension to batch dim, then\n// process with the last two dimensions.\n// Note: we only need the shape tensor to calculate output shape, so the\n// content in shape tensor is never uploaded to GPU. It is always kept in CPU.\n// TODO: optimize the algorithm -- in some cases, if the last two dims are\n// the same between input shape and output shape, the packed reshape can be\n// treated as no-op.\nexport function isReshapeCheap(dims: readonly number[], reshapedDims: readonly number[]) {\n let isCheapReshape = false;\n if (dims.length === 0 || reshapedDims.length === 0) { // scalar\n isCheapReshape = true;\n } else if (dims.length < 2 || reshapedDims.length < 2) { // 1D\n isCheapReshape = dims[dims.length - 1] === reshapedDims[reshapedDims.length - 1];\n } else { // 2D +\n isCheapReshape = dims[dims.length - 1] === reshapedDims[reshapedDims.length - 1] &&\n dims[dims.length - 2] === reshapedDims[reshapedDims.length - 2];\n }\n\n return isCheapReshape;\n}\n\nfunction getReshapedInputCoords(shape: [number, number, number]): string {\n const strides = ShapeUtil.computeStrides(shape);\n const coords = ['b', 'r', 'c'];\n const index = 'index';\n const coordsFromIndexSnippet = strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(b, r, c);\n }\n `;\n}\n\nfunction getFlattenedIndexFrom3D(shape: [number, number, number]): string {\n const strides = ShapeUtil.computeStrides(shape);\n\n return `\n int getFlattenedIndex(ivec3 coords) {\n // reverse y, z order\n return coords.x * ${strides[0]} + coords.z * ${strides[1]} + coords.y;\n }\n`;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {TextureData, TextureType} from '../types';\n\nexport const encodeAsUint8 = (inferenceHandler: WebGLInferenceHandler, input: TextureData): TextureData => {\n const outputShape = input.shape;\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n /**\n * https://github.com/tensorflow/tfjs-core/blob/master/src/kernels/webgl/encode_float_gpu.ts\n */\n const shaderSource = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n bool isNaN(float val) {\n return (val < 1.0 || 0.0 < val || val == 0.0) ? false : true;\n }\n\n highp vec4 encodeAsUint8(highp float v) {\n if (isNaN(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n\n void main() {\n float value = ${glsl.texture2D}(X,TexCoords).r;\n ${glsl.output} = encodeAsUint8(value);\n }`;\n const programInfo = {\n name: 'Uint8Encode',\n inputTypes: [TextureType.unpacked],\n inputNames: ['X'],\n output: {dims: outputShape, type: input.tensor.type, textureType: TextureType.downloadUint8AsFloat},\n shaderSource,\n hasMain: true\n };\n return inferenceHandler.executeProgram(programInfo, [input.tensor]);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {getChannels, unpackFromChannel} from './packing-utils';\n\nconst unpackProgramMetadata = {\n name: 'unpack',\n inputNames: ['A'],\n inputTypes: [TextureType.packed]\n};\n\nexport const createUnpackProgramInfo = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfo => {\n const rank = input.dims.length;\n\n const channels = getChannels('rc', rank);\n const innerDims = channels.slice(-2);\n const coordsDataType = getCoordsDataType(rank);\n const unpackChannel = unpackFromChannel();\n const isScalar = (input.dims.length === 0);\n const sourceCoords = isScalar ? '' : getSourceCoords(rank, channels);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = `\n ${unpackChannel}\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n // Sample the texture with the coords to get the rgba channel value.\n vec4 packedInput = getA(${sourceCoords});\n\n ${glsl.output} = vec4(getChannel(packedInput, ${coords}), 0, 0, 0);\n }\n `;\n\n return {\n ...unpackProgramMetadata,\n hasMain: true,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n};\n\nexport const createUnpackProgramInfoLoader = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfoLoader =>\n ({...unpackProgramMetadata, get: () => createUnpackProgramInfo(handler, input)});\n\nfunction getSourceCoords(rank: number, dims: string[]): string {\n if (rank === 1) {\n return 'rc';\n }\n\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\n\nexport declare namespace Encoder {\n export interface DataTypeMap {\n float: Float32Array;\n byte: Uint8Array;\n int: Uint32Array;\n }\n export type DataType = keyof DataTypeMap;\n type DataArrayType = DataTypeMap[DataType];\n}\n\n/* eslint-disable @typescript-eslint/naming-convention */\nexport const enum EncoderUsage {\n Default = 0,\n UploadOnly,\n Download4BytesAsFloat32,\n}\n/* eslint-enable @typescript-eslint/naming-convention */\n\n/**\n * Abstraction for mapping data types to texture texlets\n * Encoding means how a Float32 is mapped to 1 or 4 channels for each texlet\n * Decoding means how a texlet's channels are mapped to a resulting Float32\n */\nexport interface DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n encode(src: Encoder.DataArrayType, textureSize: number): Encoder.DataArrayType;\n allocate(size: number): Encoder.DataArrayType;\n decode(buffer: Encoder.DataArrayType, dataSize: number): Encoder.DataArrayType;\n}\n/**\n * WebGL2 data encoder\n * Uses R32F as the format for texlet\n */\nexport class RedFloat32DataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n constructor(gl: WebGL2RenderingContext, channels = 1) {\n if (channels === 1) {\n this.internalFormat = gl.R32F;\n this.format = gl.RED;\n this.textureType = gl.FLOAT;\n this.channelSize = channels;\n } else if (channels === 4) {\n this.internalFormat = gl.RGBA32F;\n this.format = gl.RGBA;\n this.textureType = gl.FLOAT;\n this.channelSize = channels;\n } else {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n }\n encode(src: Encoder.DataArrayType, textureSize: number): Encoder.DataArrayType {\n let result: Float32Array;\n let source: Float32Array;\n if (src.constructor !== Float32Array) {\n Logger.warning('Encoder', 'data was not of type Float32; creating new Float32Array');\n source = new Float32Array(src);\n }\n if (textureSize * this.channelSize > src.length) {\n Logger.warning('Encoder', 'Source data too small. Allocating larger array');\n source = src as Float32Array;\n result = this.allocate(textureSize * this.channelSize) as Float32Array;\n source.forEach((v, i) => result[i] = v);\n } else {\n source = src as Float32Array;\n result = source;\n }\n return result;\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Float32Array(size * 4);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {\n if (this.channelSize === 1) {\n const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);\n return filteredData;\n }\n return buffer.subarray(0, dataSize) as Float32Array;\n }\n}\n/**\n * Data encoder for WebGL 1 with support for floating point texture\n */\nexport class RGBAFloatDataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n constructor(gl: WebGLRenderingContext, channels = 1, textureType?: number) {\n if (channels !== 1 && channels !== 4) {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n this.internalFormat = gl.RGBA;\n this.format = gl.RGBA;\n this.channelSize = channels;\n this.textureType = textureType || gl.FLOAT;\n }\n encode(src: Float32Array, textureSize: number): Encoder.DataArrayType {\n let dest = src;\n if (this.channelSize === 1) {\n Logger.verbose('Encoder', 'Exploding into a larger array');\n dest = this.allocate(textureSize) as Float32Array;\n src.forEach((v, i) => dest[i * 4] = v);\n }\n return dest;\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Float32Array(size * 4);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {\n if (this.channelSize === 1) {\n const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);\n return filteredData;\n }\n return buffer.subarray(0, dataSize) as Float32Array;\n }\n}\n\nexport class Uint8DataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize = 4;\n constructor(gl: WebGLRenderingContext, channels = 1) {\n if (channels === 1) {\n this.internalFormat = gl.ALPHA;\n this.format = gl.ALPHA; // not tested\n this.textureType = gl.UNSIGNED_BYTE;\n this.channelSize = channels;\n } else if (channels === 4) {\n this.internalFormat = gl.RGBA;\n this.format = gl.RGBA;\n this.textureType = gl.UNSIGNED_BYTE;\n this.channelSize = channels;\n } else {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n }\n encode(src: Uint8Array, _textureSize: number): Encoder.DataArrayType {\n return new Uint8Array(src.buffer, src.byteOffset, src.byteLength);\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Uint8Array(size * this.channelSize);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Uint8Array {\n if (buffer instanceof Uint8Array) {\n return buffer.subarray(0, dataSize);\n }\n throw new Error(`Invalid array type: ${buffer.constructor}`);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ShapeUtil} from '../../util';\n\nimport {TextureLayoutStrategy, WidthHeightPrefs} from './texture-layout-strategy';\nimport {TextureLayout, TextureType} from './types';\n\nexport const createTextureLayoutFromTextureType =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[],\n textureType: TextureType): TextureLayout => {\n const channel = (textureType === TextureType.unpacked || textureType === TextureType.unpackedReversed) ? 1 : 4;\n const isPacked = textureType === TextureType.packed;\n const reverseWH = (textureType === TextureType.unpackedReversed || textureType === TextureType.packed);\n const breakAxis = textureType === TextureType.packedLastDimension ? shape.length - 1 : undefined;\n const unpackedShape = textureType === TextureType.packedLastDimension ?\n shape.map((d, i) => i === shape.length - 1 ? d * 4 : d) :\n undefined;\n return createTextureLayoutFromShape(\n textureLayoutStrategy, shape, channel, unpackedShape, {isPacked, reverseWH, breakAxis});\n };\n\nexport const calculateTextureWidthAndHeight =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[], textureType: TextureType):\n [number, number] => {\n const layout = createTextureLayoutFromTextureType(textureLayoutStrategy, shape, textureType);\n return [layout.width, layout.height];\n };\n\n/**\n * Create a TextureLayout object from shape.\n */\nexport const createTextureLayoutFromShape =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[], channels: 1|4 = 1,\n unpackedShape?: readonly number[], prefs?: WidthHeightPrefs): TextureLayout => {\n const isPacked = !!(prefs && prefs.isPacked);\n const [width, height] = textureLayoutStrategy.computeTextureWH(isPacked ? unpackedShape || shape : shape, prefs);\n const rank = shape.length;\n let inferredDims = shape.slice(0);\n if (rank === 0) {\n inferredDims = [1];\n }\n if (channels === 1) {\n // unpackedShape will take `shape` and not `inferredDims` so as to create a scalar Tensor if need be\n unpackedShape = shape;\n } else if (isPacked) {\n if (channels !== 4) {\n throw new Error('a packed texture must be 4-channel');\n }\n unpackedShape = shape;\n if (rank > 0) {\n inferredDims[rank - 1] = Math.ceil(inferredDims[rank - 1] / 2);\n }\n if (rank > 1) {\n inferredDims[rank - 2] = Math.ceil(inferredDims[rank - 2] / 2);\n }\n } else if (!unpackedShape) {\n throw new Error('Unpacked shape is needed when using channels > 1');\n }\n return {\n width,\n height,\n channels,\n isPacked,\n shape: inferredDims,\n strides: ShapeUtil.computeStrides(inferredDims),\n unpackedShape,\n reversedWH: (prefs && prefs.reverseWH)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceHandler} from '../../backend';\nimport {Logger} from '../../instrument';\nimport {Tensor} from '../../tensor';\nimport {ShapeUtil} from '../../util';\n\nimport {createPackProgramInfoLoader} from './ops/pack';\nimport {createPackedReshape3DProgramInfoLoader, isReshapeCheap, processDims3D} from './ops/reshape-packed';\nimport {encodeAsUint8} from './ops/uint8-encode';\nimport {createUnpackProgramInfoLoader} from './ops/unpack';\nimport {WebGLSessionHandler} from './session-handler';\nimport {EncoderUsage} from './texture-data-encoder';\nimport {calculateTextureWidthAndHeight, createTextureLayoutFromShape, createTextureLayoutFromTextureType} from './texture-layout';\nimport {Artifact, ProgramInfo, ProgramInfoLoader, TextureData, TextureLayout, TextureType} from './types';\n\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo|ProgramInfoLoader, inputTextureDatas: TextureData[]): string => {\n const inputs =\n inputTextureDatas.map(texture => `${texture.unpackedShape.join(',')};${texture.width}x${texture.height}`)\n .join('_');\n let key = programInfo.name;\n if (programInfo.cacheHint) {\n key += '[' + programInfo.cacheHint + ']';\n }\n key += ':' + inputs;\n return key;\n };\n\nexport class WebGLInferenceHandler implements InferenceHandler {\n private packedTextureDataCache: Map;\n private unpackedTextureDataCache: Map;\n constructor(public session: WebGLSessionHandler) {\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache = new Map();\n }\n\n /**\n * @returns [width, height]\n */\n calculateTextureWidthAndHeight(shape: readonly number[], textureType: TextureType): [number, number] {\n return calculateTextureWidthAndHeight(this.session.layoutStrategy, shape, textureType);\n }\n\n executeProgram(program: ProgramInfo|ProgramInfoLoader, inputs: readonly Tensor[]): TextureData {\n if (inputs.length < program.inputNames.length) {\n throw new Error(`Input size mustn't be less than ${program.inputNames.length}.`);\n }\n if (program.inputNames.length !== program.inputTypes.length) {\n throw new Error('input names size does not match input types');\n }\n\n // create texture info for input\n const inputTextureDatas: TextureData[] = [];\n for (let i = 0; i < program.inputNames.length; ++i) {\n inputTextureDatas[i] = this.getOrCreateTextureData(inputs[i], program.inputTypes[i]);\n }\n\n const key = getProgramInfoUniqueKey(program, inputTextureDatas);\n let artifact = this.session.programManager.getArtifact(key);\n const programInfo = artifact ?\n artifact.programInfo :\n (typeof (program as ProgramInfoLoader).get === 'function' ? (program as ProgramInfoLoader).get() :\n (program as ProgramInfo));\n\n // create texture info for output\n const outputTextureLayout = createTextureLayoutFromTextureType(\n this.session.layoutStrategy, programInfo.output.dims, programInfo.output.textureType);\n const outputTextureData = this.createTextureData(outputTextureLayout, programInfo.output.type);\n\n if (!artifact) {\n artifact = this.session.programManager.build(programInfo, inputTextureDatas, outputTextureData);\n this.session.programManager.setArtifact(key, artifact);\n }\n\n this.runProgram(artifact, inputTextureDatas, outputTextureData);\n return outputTextureData;\n }\n\n run(program: ProgramInfoLoader, inputs: readonly Tensor[]): Tensor {\n const outputTextureData = this.executeProgram(program, inputs);\n return outputTextureData.tensor;\n }\n\n private runProgram(artifact: Artifact, inputs: TextureData[], output: TextureData): void {\n // input should match\n for (let i = 0; i < inputs.length; ++i) {\n if (!!inputs[i].isPacked !== (artifact.programInfo.inputTypes[i] === TextureType.packed)) {\n throw new Error(`input[${i}] property packed inconsistent`);\n }\n }\n\n // output should match\n if (!!output.isPacked !== (artifact.programInfo.output.textureType === TextureType.packed)) {\n throw new Error('output property packed inconsistent');\n }\n\n this.session.programManager.run(artifact, inputs, output);\n }\n\n /**\n * Create a TextureData object from a tensor.\n * Usage = EncoderUsage.UploadOnly.\n * If a related texture data is found in cache, returns it;\n * Otherwise:\n * Creates a new texture layout if not provided;\n * Creates WebGLTexture with the layout;\n * Upload tensor data to the texture;\n * Creates a texture data object associated with the given tensor.\n * @param tensor the tensor with data to upload\n */\n private getOrCreateTextureData(tensor: Tensor, textureType: TextureType) {\n let td = this.getTextureData(tensor.dataId, textureType === TextureType.packed);\n\n if (!td) {\n // check if we have texture data in different type\n td = this.getTextureData(tensor.dataId, textureType !== TextureType.packed);\n if (td) {\n if (textureType === TextureType.packed) {\n return this.pack(td);\n } else {\n return this.unpack(td);\n }\n }\n }\n\n if (!td) {\n const layout = createTextureLayoutFromTextureType(this.session.layoutStrategy, tensor.dims, textureType);\n\n if (textureType === TextureType.packedLastDimension) {\n const group = 1;\n const channels = 4;\n const shape = tensor.dims;\n if (shape.length === 4) {\n // pre-processing for kernel data of Conv.\n //\n // TODO: currently this is a hacking to overwrite Conv's weight. The correct way to do this should be:\n // 1. implement texture based const-folding\n // 2. create a WebGL program \"preprocessConvWeight\" to do the same work as below\n // 3. run the program before dotProduct.\n //\n const adjustedKernelShape = [shape[0], Math.ceil((shape[1] * shape[2] * shape[3]) / channels)];\n const adjustedLayout =\n createTextureLayoutFromTextureType(this.session.layoutStrategy, adjustedKernelShape, textureType);\n let buffer = tensor.numberData;\n if (shape[1] * shape[2] * shape[3] % channels !== 0) {\n const numFeatureMaps = shape[0];\n const oldRowSize = shape[1] * shape[2] * shape[3];\n const newRowSize = Math.ceil(oldRowSize * group / channels) * channels;\n const newSize = numFeatureMaps * newRowSize;\n buffer = new Float32Array(newSize);\n for (let f = 0; f < numFeatureMaps; ++f) {\n const oldOffset = f * oldRowSize;\n const newOffset = f * newRowSize + f % group * oldRowSize;\n buffer.set(tensor.numberData.subarray(oldOffset, oldOffset + oldRowSize), newOffset);\n }\n }\n return this.createTextureData(adjustedLayout, tensor.type, buffer, tensor, EncoderUsage.UploadOnly);\n }\n }\n\n if (textureType === TextureType.packed) {\n const unpackedTextureLayout =\n createTextureLayoutFromShape(this.session.layoutStrategy, tensor.dims, 1, [], {reverseWH: true});\n const unpackedTextureData = this.createTextureData(\n unpackedTextureLayout, tensor.type, tensor.numberData, tensor, EncoderUsage.UploadOnly);\n td = this.pack(unpackedTextureData);\n } else {\n td = this.createTextureData(layout, tensor.type, tensor.numberData, tensor, EncoderUsage.UploadOnly);\n }\n }\n return td;\n }\n\n /**\n * Create a TextureData object using the given data and bind to the given tensor.\n * Usage = EncoderUsage.UploadOnly.\n * NOTE: this function is a hack for Conv implementation. should remove this function, after rewriting Conv\n * implementation by Graph.Transformer\n * @param dataType the tensor data type\n * @param data the actual data to upload\n * @param tensor the tensor to bind. tensor's data is ignored.\n */\n createTextureDataFromLayoutBindTensor(\n layout: TextureLayout, dataType: Tensor.DataType, data: Tensor.NumberType, tensor: Tensor): TextureData {\n return this.createTextureData(layout, dataType, data, tensor, EncoderUsage.UploadOnly);\n }\n\n private createTextureData(\n layout: TextureLayout, dataType: Tensor.DataType, data?: Tensor.NumberType, tensor?: Tensor,\n usage?: EncoderUsage): TextureData {\n Logger.verbose('InferenceHandler', `Creating TextureData: layout:[${JSON.stringify(layout)}]`);\n const texture = this.session.textureManager.createTextureFromLayout(dataType, layout, data, usage);\n return this.createTextureDataFromTexture(layout, dataType, texture, tensor);\n }\n\n reshapeUnpacked(input: Tensor, reshapedDims: readonly number[]): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.unpacked);\n const newTextureLayout: TextureLayout = {\n channels: inputTD.channels,\n height: inputTD.height,\n width: inputTD.width,\n // handle reshaping into scalar Tensors\n shape: reshapedDims.length !== 0 ? reshapedDims : [1],\n strides: ShapeUtil.computeStrides(reshapedDims),\n unpackedShape: reshapedDims,\n };\n const newTextureData = this.createTextureDataFromTexture(newTextureLayout, input.type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n reshapePacked(input: Tensor, reshapedDims: readonly number[]): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.packed);\n\n // check if the reshape is 'cheap'\n if (isReshapeCheap(input.dims, reshapedDims)) {\n const newTextureLayout: TextureLayout = {\n channels: inputTD.channels,\n height: inputTD.height,\n width: inputTD.width,\n // handle reshaping into scalar Tensors\n shape: reshapedDims.length !== 0 ? reshapedDims : [1],\n strides: ShapeUtil.computeStrides(reshapedDims),\n unpackedShape: reshapedDims,\n isPacked: true\n };\n const newTextureData = this.createTextureDataFromTexture(newTextureLayout, input.type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n const squeezedInputShape = processDims3D(input.dims);\n const squeezedOutputShape = processDims3D(reshapedDims);\n\n const squeezedInputTensor = this.reshapePacked(input, squeezedInputShape);\n const squeezedOutputTensor = this.run(\n createPackedReshape3DProgramInfoLoader(this, squeezedInputTensor, squeezedOutputShape), [squeezedInputTensor]);\n const outputTensor = this.reshapePacked(squeezedOutputTensor, reshapedDims);\n return outputTensor;\n }\n\n cast(input: Tensor, type: Tensor.DataType): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.unpacked);\n const newTextureData = this.createTextureDataFromTexture(inputTD as TextureLayout, type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n private createTextureDataFromTexture(\n layout: TextureLayout, dataType: Tensor.DataType, texture: WebGLTexture, tensor?: Tensor, tensorId?: Tensor.Id) {\n const textureData: TextureData = {\n ...layout,\n tensor: tensor ||\n new Tensor(\n layout.unpackedShape, dataType, (_id: Tensor.Id) => this.readTexture(textureData),\n async (_id: Tensor.Id) => this.readTextureAsync(textureData), undefined, tensorId),\n texture\n };\n this.setTextureData(textureData.tensor.dataId, textureData, layout.isPacked);\n return textureData;\n }\n\n private getTextureData(tensorId: Tensor.Id, isPacked = false): TextureData|undefined {\n return this.session.isInitializer(tensorId) ? this.session.getTextureData(tensorId, isPacked) :\n isPacked ? this.packedTextureDataCache.get(tensorId) :\n this.unpackedTextureDataCache.get(tensorId);\n }\n setTextureData(tensorId: Tensor.Id, td: TextureData, isPacked = false): void {\n if (this.session.isInitializer(tensorId)) {\n this.session.setTextureData(tensorId, td, isPacked);\n } else {\n (isPacked ? this.packedTextureDataCache : this.unpackedTextureDataCache).set(tensorId, td);\n }\n }\n isTextureLayoutCached(tensor: Tensor, isPacked = false): boolean {\n return !!this.getTextureData(tensor.dataId, isPacked);\n }\n\n dispose(): void {\n this.session.textureManager.clearActiveTextures();\n this.packedTextureDataCache.forEach(td => this.session.textureManager.releaseTexture(td));\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache.forEach(td => this.session.textureManager.releaseTexture(td));\n this.unpackedTextureDataCache = new Map();\n }\n\n readTexture(textureData: TextureData): Tensor.NumberType {\n if (textureData.isPacked) {\n return this.readTexture(this.unpack(textureData));\n }\n if (!this.session.backend.glContext.isFloat32DownloadSupported) {\n return this.session.textureManager.readUint8TextureAsFloat(encodeAsUint8(this, textureData));\n }\n return this.session.textureManager.readTexture(textureData, textureData.tensor.type, textureData.channels);\n }\n\n async readTextureAsync(textureData: TextureData): Promise {\n if (textureData.isPacked) {\n return this.readTextureAsync(this.unpack(textureData));\n }\n if (!this.session.backend.glContext.isFloat32DownloadSupported) {\n return this.session.textureManager.readUint8TextureAsFloat(encodeAsUint8(this, textureData));\n }\n return this.session.textureManager.readTextureAsync(textureData, textureData.tensor.type, textureData.channels);\n }\n\n pack(input: TextureData): TextureData {\n const outputTextureData = this.executeProgram(createPackProgramInfoLoader(this, input.tensor), [input.tensor]);\n return outputTextureData;\n }\n\n unpack(input: TextureData): TextureData {\n const outputTextureData = this.executeProgram(createUnpackProgramInfoLoader(this, input.tensor), [input.tensor]);\n return outputTextureData;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface BatchNormalizationAttributes extends AttributeWithCacheKey {\n epsilon: number;\n momentum: number;\n spatial: number;\n}\n\nconst batchNormalizationProgramMetadata = {\n name: 'BatchNormalization',\n inputNames: ['A', 'Scale', 'B', 'Mean', 'Variance'],\n inputTypes:\n [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked, TextureType.unpacked, TextureType.unpacked]\n};\n\nexport const batchNormalization: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: BatchNormalizationAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...batchNormalizationProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createBatchNormalizationProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseBatchNormalizationAttributes: OperatorInitialization =\n (node: Graph.Node): BatchNormalizationAttributes => {\n const epsilon = node.attributes.getFloat('epsilon', 1e-5);\n const momentum = node.attributes.getFloat('momentum', 0.9);\n const spatial = node.attributes.getInt('spatial', 1);\n return createAttributeWithCacheKey({epsilon, momentum, spatial});\n };\n\nconst createBatchNormalizationProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: BatchNormalizationAttributes):\n ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const rank = inputs[0].dims.length;\n const [scaleWidth, scaleHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(inputs[1].dims, TextureType.unpacked);\n const shaderSource = `\n float process(int[${rank}] indices) {\n vec2 position = offsetToCoords(indices[1], ${scaleWidth}, ${scaleHeight});\n float scale = getColorAsFloat(${glsl.texture2D}(Scale, position));\n float mean = getColorAsFloat(${glsl.texture2D}(Mean, position));\n float variance = getColorAsFloat(${glsl.texture2D}(Variance, position));\n float b = getColorAsFloat(${glsl.texture2D}(B, position));\n\n return scale * ( (_A(indices) - mean) / sqrt(variance + float(${attributes.epsilon})) ) + b;\n }`;\n return {\n ...batchNormalizationProgramMetadata,\n output: {dims: inputs[0].dims, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs.');\n }\n\n const X = inputs[0];\n const scale = inputs[1];\n const B = inputs[2];\n const mean = inputs[3];\n const var_ = inputs[4];\n\n // input should atleast have three dimensions - N,C,dim1,...,dimn\n // other inputs can have only one dimensions\n if (X.dims.length < 3 || scale.dims.length !== 1 || B.dims.length !== 1 || mean.dims.length !== 1 ||\n var_.dims.length !== 1) {\n throw new Error('invalid input shape.');\n }\n if (scale.dims[0] !== X.dims[1] || B.dims[0] !== X.dims[1] || mean.dims[0] !== X.dims[1] ||\n var_.dims[0] !== X.dims[1]) {\n throw new Error('invalid input shape.');\n }\n if ((X.type !== 'float32' && X.type !== 'float64') || (scale.type !== 'float32' && scale.type !== 'float64') ||\n (B.type !== 'float32' && B.type !== 'float64') || (mean.type !== 'float32' && mean.type !== 'float64') ||\n (var_.type !== 'float32' && var_.type !== 'float64')) {\n throw new Error('invalid input tensor types.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ProgramInfo, TextureLayout} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/* eslint-disable @typescript-eslint/naming-convention */\nexport enum FunctionType {\n ValueBased,\n Positional\n}\nexport interface GlslFunction {\n body: string;\n name: string;\n type: T;\n}\nexport type GlslValueFunction = GlslFunction;\nexport interface GlslPositionalFunction extends GlslFunction {\n inputShape: readonly number[];\n outputShape: readonly number[];\n}\n\nexport class GlslContext {\n constructor(\n public glContext: WebGLContext, public programInfo: ProgramInfo, public inputTextureLayouts: TextureLayout[],\n public outputTextureLayout: TextureLayout) {}\n}\nexport abstract class GlslLib {\n constructor(public context: GlslContext) {}\n abstract getFunctions(): {[name: string]: GlslLibRoutine};\n abstract getCustomTypes(): {[name: string]: string};\n}\n\n// abstraction to represent a GLSL library routine and it's dependencies\nexport class GlslLibRoutine {\n constructor(public routineBody: string, public dependencies?: string[]) {}\n}\n\n// abstraction to represent a GLSL library routine and it's dependencies AS GRAPH Nodes\n// this level of abstraction is used to topologically sort routines before fragment shade inclusion\nexport class GlslLibRoutineNode {\n dependencies: GlslLibRoutineNode[];\n routineBody: string;\n constructor(public name: string, routineBody?: string, dependencies?: GlslLibRoutineNode[]) {\n if (dependencies) {\n this.dependencies = dependencies;\n } else {\n this.dependencies = [];\n }\n\n if (routineBody) {\n this.routineBody = routineBody;\n }\n }\n addDependency(node: GlslLibRoutineNode) {\n if (node) {\n this.dependencies.push(node);\n }\n }\n}\n\n// topologically sort GLSL library routines (graph nodes abstraction) before shader script inclusion\nexport class TopologicalSortGlslRoutines {\n static returnOrderedNodes(nodes: GlslLibRoutineNode[]): GlslLibRoutineNode[] {\n if (!nodes || nodes.length === 0) {\n return [];\n }\n\n if (nodes.length === 1) {\n return nodes;\n }\n\n const cycleCheck = new Set();\n const alreadyTraversed = new Set();\n const result = new Array();\n\n this.createOrderedNodes(nodes, cycleCheck, alreadyTraversed, result);\n return result;\n }\n\n private static createOrderedNodes(\n graphNodes: GlslLibRoutineNode[], cycleCheck: Set, alreadyTraversed: Set,\n result: GlslLibRoutineNode[]) {\n for (let i = 0; i < graphNodes.length; ++i) {\n this.dfsTraverse(graphNodes[i], cycleCheck, alreadyTraversed, result);\n }\n }\n\n private static dfsTraverse(\n root: GlslLibRoutineNode, cycleCheck: Set, alreadyTraversed: Set, result: GlslLibRoutineNode[]) {\n // if this root has already been traversed return\n if (!root || alreadyTraversed.has(root.name)) {\n return;\n }\n\n // cyclic dependency has been detected\n if (cycleCheck.has(root.name)) {\n throw new Error('Cyclic dependency detected. Can\\'t topologically sort routines needed for shader.');\n }\n\n // hold this node to detect cycles if any\n cycleCheck.add(root.name);\n\n // traverse children in a dfs fashion\n const dependencies = root.dependencies;\n if (dependencies && dependencies.length > 0) {\n for (let i = 0; i < dependencies.length; ++i) {\n this.dfsTraverse(dependencies[i], cycleCheck, alreadyTraversed, result);\n }\n }\n\n // add to result holder\n result.push(root);\n\n // mark this node as traversed so that we don't traverse from this again\n alreadyTraversed.add(root.name);\n\n // release the hold\n cycleCheck.delete(root.name);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {FunctionType, GlslValueFunction} from '../glsl-definitions';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\n\nexport function glslAdd(): GlslValueFunction {\n const name = 'add_';\n const body = `\n float ${name}(float a, float b) {\n return a + b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 + v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslDiv(): GlslValueFunction {\n const name = 'div_';\n const body = `\n float ${name}(float a, float b) {\n return a / b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 / v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslMul(): GlslValueFunction {\n const name = 'mul_';\n const body = `\n float ${name}(float a, float b) {\n return a * b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 * v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSub(): GlslValueFunction {\n const name = 'sub_';\n const body = `\n float ${name}(float a, float b) {\n return a - b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 - v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslEqual(): GlslValueFunction {\n const name = 'equal_';\n const body = `\n float ${name}(float a, float b) {\n return float(a == b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4(equal(v1, v2));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslGreater(): GlslValueFunction {\n const name = 'greater_';\n const body = `\n float ${name}(float a, float b) {\n return float(a > b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4( v1.r > v2.r ,\n v1.g > v2.g,\n v1.b > v2.b,\n v1.a > v2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLess(): GlslValueFunction {\n const name = 'less_';\n const body = `\n float ${name}(float a, float b) {\n return float(a < b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4( v1.r < v2.r ,\n v1.g < v2.g,\n v1.b < v2.b,\n v1.a < v2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslAnd(): GlslValueFunction {\n const name = 'and_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) && bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r && b2.r ,\n b1.g && b2.g,\n b1.b && b2.b,\n b1.a && b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslOr(): GlslValueFunction {\n const name = 'or_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) || bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r || b2.r ,\n b1.g || b2.g,\n b1.b || b2.b,\n b1.a || b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslXor(): GlslValueFunction {\n const name = 'xor_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) ^^ bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r ^^ b2.r ,\n b1.g ^^ b2.g,\n b1.b ^^ b2.b,\n b1.a ^^ b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslPow(): GlslValueFunction {\n return glslBuiltinBinary('pow');\n}\nexport function glslPRelu(): GlslValueFunction {\n const name = 'prelu_';\n const body = `\n float ${name}(float a, float b) {\n return a < 0.0 ? a * b: a;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4(\n v1.r < 0.0 ? v1.r * v2.r: v1.r,\n v1.g < 0.0 ? v1.g * v2.g: v1.g,\n v1.b < 0.0 ? v1.b * v2.b: v1.b,\n v1.a < 0.0 ? v1.a * v2.a: v1.a\n );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\nfunction glslBuiltinBinary(fname: string): GlslValueFunction {\n const name = `${fname}_`;\n const body = `\n float ${name}(float a, float b) {\n return ${fname}(a, b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return ${fname}(v1, v2);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\nconst createBinaryProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], glslFunc: GlslValueFunction,\n outputTensorType: Tensor.DataType = inputs[0].type, cacheKey?: string): ProgramInfoLoader => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n cacheHint: cacheKey,\n get: () => createBinaryProgramInfo(handler, inputs, glslFunc, outputTensorType)\n };\n };\n\nconst createBinaryProgramInfo =\n (handler: WebGLInferenceHandler, inputs: Tensor[], glslFunc: GlslValueFunction,\n outputTensorType: Tensor.DataType = inputs[0].type): ProgramInfo => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const isBroadcast = !ShapeUtil.areEqual(inputs[0].dims, inputs[1].dims);\n let outputShape = inputs[0].dims;\n\n const usePackedTexture = handler.session.pack;\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(inputs[0].dims, inputs[1].dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n const outputRank = outputShape.length;\n const aRank = inputs[0].dims.length !== 0 ? inputs[0].dims.length : 1;\n const bRank = inputs[1].dims.length !== 0 ? inputs[1].dims.length : 1;\n const aBcast = inputs[0].dims.length !== 0 ? 'bcastIndices_A(indices, aindices);' : 'aindices[0] = 0;';\n const bBcast = inputs[1].dims.length !== 0 ? 'bcastIndices_B(indices, bindices);' : 'bindices[0] = 0;';\n\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = usePackedTexture ? `\n ${glslFunc.body}\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n vec4 result = ${glslFunc.name}(a, b);\n ${glsl.output} = result;\n }` :\n `\n ${glslFunc.body}\n float process(int indices[${outputRank}]) {\n int aindices[${aRank}];\n int bindices[${bRank}];\n ${aBcast}\n ${bBcast}\n return ${glslFunc.name}(_A(aindices), _B(bindices));\n }`;\n\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n output: {dims: outputShape, type: outputTensorType, textureType},\n shaderSource,\n hasMain: usePackedTexture\n };\n }\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = `\n ${glslFunc.body}\n void main() {\n vec4 v1 = ${glsl.texture2D}(A, TexCoords);\n vec4 v2 = ${glsl.texture2D}(B, TexCoords);\n vec4 result = ${glslFunc.name}(v1, v2);\n ${glsl.output} = result;\n }\n `;\n\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n output: {dims: inputs[0].dims, type: outputTensorType, textureType},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const add = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslAdd()), inputs)];\n\nexport const and = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslAnd(), 'bool'), inputs)];\n\nexport const div = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslDiv()), inputs)];\n\nexport const equal = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslEqual(), 'bool'), inputs)];\n\nexport const greater = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslGreater(), 'bool'), inputs)];\n\nexport const less = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslLess(), 'bool'), inputs)];\n\nexport const mul = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslMul()), inputs)];\n\nexport const or = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslOr(), 'bool'), inputs)];\n\nexport const pow = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslPow()), inputs)];\n\nexport const pRelu = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslPRelu()), inputs)];\n\nexport const sub = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslSub()), inputs)];\n\nexport const xor = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslXor(), 'bool'), inputs)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ProtoUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const cast: OperatorImplementation =\n (handler: WebGLInferenceHandler, inputs: Tensor[], to: Tensor.DataType): Tensor[] => {\n validateInputs(inputs);\n return [handler.cast(inputs[0], to)];\n };\n\nexport const parseCastAttributes: OperatorInitialization = (node: Graph.Node): Tensor.DataType =>\n ProtoUtil.tensorDataTypeFromProto(node.attributes.getInt('to'));\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Cast requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('Invalid input type.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {ConcatAttributes} from './concat';\nimport {getChannels, unpackFromChannel} from './packing-utils';\n\nconst createPackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({\n name: 'Concat (packed)',\n inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),\n inputTypes: Array(inputCount).fill(TextureType.packed),\n cacheHint\n});\n\nconst createPackedConcatProgramInfo =\n (handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n if (axis >= inputShape.length || axis < (-1 * inputShape.length)) {\n throw new Error('axis specified for concat doesn\\'t match input dimensionality');\n }\n if (axis < 0) {\n axis = inputShape.length + axis;\n }\n // ensure all of the non-concatenated axes match each other\n // calculate the shape of the output tensor while we do that\n const outputShape = inputShape.slice(0);\n for (let i = 1; i < inputs.length; i++) {\n const dataNShape = inputs[i].dims.slice();\n for (let axisIndex = 0; axisIndex < inputShape.length; axisIndex++) {\n // add to the placeholder for computing output shape\n if (axisIndex === axis) {\n outputShape[axis] += dataNShape[axisIndex];\n }\n // ensure all non-cancatenated axes match each other\n else if (inputShape[axisIndex] !== dataNShape[axisIndex]) {\n throw new Error('non concat dimensions must match');\n }\n }\n }\n\n const rank = outputShape.length;\n const coords = getChannels('coords', rank);\n const dtype = getCoordsDataType(rank);\n const unpackChannel = unpackFromChannel();\n\n const shapes = inputs.map(i => i.dims);\n const channels = getGlChannels(rank);\n const offsets: number[] = new Array(shapes.length - 1);\n\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getX0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getX${i}(${getShiftedChannelsSnippet(channels, channel, shift)}),\n vec2(${getShiftedChannelsSnippet(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getX${lastIndex}(${getShiftedChannelsSnippet(channels, channel, shift)}),\n vec2(${getShiftedChannelsSnippet(lastChannels, channel, shift)}));`;\n\n const glsl = getGlsl(handler.session.backend.glContext.version);\n\n const shaderSource = `\n ${unpackChannel}\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n int lastDim = coords.${channels[rank - 1]};\n coords.${channels[rank - 1]} = coords.${channels[rank - 2]};\n coords.${channels[rank - 2]} = lastDim;\n\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${outputShape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${outputShape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${outputShape[rank - 2]} &&\n ${coords[rank - 1]} < ${outputShape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n ${glsl.output} = result;\n }\n `;\n\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true,\n };\n };\n\nexport const createPackedConcatProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): ProgramInfoLoader => {\n const metadata = createPackedConcatProgramMetadata(inputs.length, attributes.cacheKey);\n return {...metadata, get: () => createPackedConcatProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst getShiftedChannelsSnippet = (channels: string[], channel: string, shift: number): string => {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n } else {\n return c;\n }\n });\n return res.join();\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {createPackedConcatProgramInfoLoader} from './concat-packed';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nexport const concat: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): Tensor[] => {\n validateInputs(inputs);\n if (inferenceHandler.session.pack && inputs[0].dims.length > 1) {\n const output =\n inferenceHandler.run(createPackedConcatProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n } else {\n const output =\n inferenceHandler.run(createUnpackedConcatProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n }\n };\n\nconst createUnpackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({\n name: 'Concat',\n inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),\n inputTypes: Array(inputCount).fill(TextureType.unpacked),\n cacheHint\n});\n\nconst createUnpackedConcatProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n if (axis >= inputShape.length || axis < (-1 * inputShape.length)) {\n throw new Error('axis specified for concat doesn\\'t match input dimensionality');\n }\n if (axis < 0) {\n axis = inputShape.length + axis;\n }\n // ensure all of the non-concatenated axes match each other\n // calculate the shape of the output tensor while we do that\n const outputShape = inputShape.slice(0);\n for (let i = 1; i < inputs.length; i++) {\n const dataNShape = inputs[i].dims.slice();\n for (let axisIndex = 0; axisIndex < inputShape.length; axisIndex++) {\n // add to the placeholder for computing output shape\n if (axisIndex === axis) {\n outputShape[axis] += dataNShape[axisIndex];\n }\n // ensure all non-cancatenated axes match each other\n else if (inputShape[axisIndex] !== dataNShape[axisIndex]) {\n throw new Error('non concat dimensions must match');\n }\n }\n }\n\n const rank = outputShape.length;\n\n const sizeInConcatAxis = new Array(inputs.length);\n let previousSum = 0;\n for (let i = 0; i < sizeInConcatAxis.length; ++i) {\n previousSum += inputs[i].dims[axis];\n sizeInConcatAxis[i] = previousSum;\n }\n\n let getTextureIndexWhereDataResidesMethod = '';\n // in most cases linear search is sufficient, as in most scenarios, only 2 tensors are concatenated\n if (inputs.length < 5) {\n getTextureIndexWhereDataResidesMethod = getTextureIndexWhereDataResidesLinearSearch(sizeInConcatAxis);\n } else {\n getTextureIndexWhereDataResidesMethod = getTextureIndexWhereDataResidesBinarySearch(sizeInConcatAxis);\n }\n\n const fetchDataFromCorrectTextureMethod = getFetchDataFromCorrectTextureMethod(inputs.length, rank);\n const getSizeInConcatAxisValueFromIndexMethod = getGetSizeInConcatAxisValueFromIndexMethod(sizeInConcatAxis);\n const shaderSource = `\n ${fetchDataFromCorrectTextureMethod}\n ${getSizeInConcatAxisValueFromIndexMethod}\n ${getTextureIndexWhereDataResidesMethod}\n float process(int indices[${rank}]) {\n int textureIndex = getTextureWhereDataResides (indices[${axis}]);\n\n if(textureIndex != 0) {\n indices[${axis}] = indices[${axis}] - int(getSizeInConcatAxisValueFromIndex(textureIndex-int(1)));\n }\n\n return fetchDataFromCorrectTexture(textureIndex, indices);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n };\n\nconst createUnpackedConcatProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): ProgramInfoLoader => {\n const metadata = createUnpackedConcatProgramMetadata(inputs.length, attributes.cacheKey);\n return {...metadata, get: () => createUnpackedConcatProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst getTextureIndexWhereDataResidesLinearSearch = (sizeInConcatAxis: number[]): string => {\n const searchAxis = sizeInConcatAxis.map((size, i) => `if(index<${size}) {return ${i};}\n`);\n return `int getTextureWhereDataResides(int index) {\n ${searchAxis.join('')}\n }`;\n};\n\n// TODO: Implement BinarySearch in GLSL\nconst getTextureIndexWhereDataResidesBinarySearch = (sizeInConcatAxis: number[]): string =>\n getTextureIndexWhereDataResidesLinearSearch(sizeInConcatAxis);\n\nconst getFetchDataFromCorrectTextureMethod = (numberOfTensors: number, tensorRank: number) => {\n const codeLines: string[] = [`float fetchDataFromCorrectTexture(int textureIndex, int indices[${tensorRank}]) {`];\n for (let i = 0; i < numberOfTensors; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (textureIndex == ${i}) { return _X${i}(indices); }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(\n '\\t' +\n `else { return _X${i}(indices); }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (textureIndex == ${i}) { return _X${i}(indices); }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n return codeLines.join('\\n');\n};\n\nconst getGetSizeInConcatAxisValueFromIndexMethod = (sizeInConcatAxis: number[]): string => {\n const codeLines: string[] = ['int getSizeInConcatAxisValueFromIndex(int index) {'];\n for (let i = 0; i < sizeInConcatAxis.length; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (index == ${i}) { return ${sizeInConcatAxis[i]}; }`);\n } else if (i === sizeInConcatAxis.length - 1) {\n codeLines.push(\n '\\t' +\n `else { return ${sizeInConcatAxis[i]}; }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (index == ${i}) { return ${sizeInConcatAxis[i]}; }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n\n return codeLines.join('\\n');\n};\n\nexport const parseConcatAttributes: OperatorInitialization = (node: Graph.Node): ConcatAttributes =>\n createAttributeWithCacheKey({axis: node.attributes.getInt('axis')});\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n\n const inputType = inputs[0].type;\n const inputDimensionality = inputs[0].dims.length;\n\n // TODO: Support string concat\n if (inputType === 'string') {\n throw new Error('string tensor is not supported yet');\n }\n\n for (const input of inputs) {\n // make sure types of all inputs match\n if (input.type !== inputType) {\n throw new Error('input tensors should be one type');\n }\n\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputDimensionality) {\n throw new Error('input tensors should have the same shape');\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {Tensor} from '../../../tensor';\nimport {MAX_CLIP, MIN_CLIP} from '../../../util';\nimport {FunctionType, GlslValueFunction} from '../glsl-definitions';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport function glslAbs(): GlslValueFunction {\n return glslBuiltinUnary('abs');\n}\nexport function glslAcos(): GlslValueFunction {\n return glslBuiltinUnary('acos');\n}\nexport function glslAsin(): GlslValueFunction {\n return glslBuiltinUnary('asin');\n}\nexport function glslAtan(): GlslValueFunction {\n return glslBuiltinUnary('atan');\n}\nexport function glslCeil(): GlslValueFunction {\n return glslBuiltinUnary('ceil');\n}\nexport function glslCos(): GlslValueFunction {\n return glslBuiltinUnary('cos');\n}\nexport function glslElu(alpha: number): GlslValueFunction {\n const name = 'elu';\n const body = `\n const float alpha = float(${alpha});\n\n float ${name}_(float a) {\n return a >= 0.0 ? a: (exp(a) - 1.0) * alpha;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(${name}_(v.x), ${name}_(v.y), ${name}_(v.z), ${name}_(v.w));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslExp(): GlslValueFunction {\n return glslBuiltinUnary('exp');\n}\nexport function glslFloor(): GlslValueFunction {\n return glslBuiltinUnary('floor');\n}\nexport function glslClip(min: number, max: number): GlslValueFunction {\n const name = 'clip';\n const body = `\n const float min = float(${min});\n const float max = float(${max});\n\n float ${name}_(float a) {\n return clamp(a, min, max);\n }\n vec4 ${name}_(vec4 v) {\n return clamp(v, min, max);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslIdentity(): GlslValueFunction {\n const name = 'indentity';\n const body = `\n float ${name}_(float a) {\n return a;\n }\n vec4 ${name}_(vec4 v) {\n return v;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLeakyRelu(alpha: number): GlslValueFunction {\n const name = 'leakyRelu';\n const body = `\n const float alpha = float(${alpha});\n\n float ${name}_(float a) {\n return a < 0.0 ? a * alpha : a;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(${name}_(v.x), ${name}_(v.y), ${name}_(v.z), ${name}_(v.w));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLog(): GlslValueFunction {\n return glslBuiltinUnary('log');\n}\nexport function glslNeg(): GlslValueFunction {\n const name = 'neg';\n const body = `\n float ${name}_(float a) {\n return -a;\n }\n vec4 ${name}_(vec4 v) {\n return -v;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslNot(): GlslValueFunction {\n const name = 'not';\n const body = `\n float ${name}_(float a) {\n return float( ! bool(a) );\n }\n bool ${name}_(bool a) {\n return !a;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(!bool(v.x), !bool(v.y), !bool(v.z), !bool(v.w));\n }\n bvec4 ${name}_(bvec4 v) {\n return bvec4(!v.x, !v.y, !v.z, !v.w);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSin(): GlslValueFunction {\n return glslBuiltinUnary('sin');\n}\nexport function glslRelu(): GlslValueFunction {\n const name = 'relu';\n const body = `\n float ${name}_(float a) {\n return max( a, 0.0 );\n }\n vec4 ${name}_(vec4 v) {\n return max( v, 0.0 );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSigmoid(): GlslValueFunction {\n const name = 'sigmoid';\n const body = `\n float ${name}_(float a) {\n return 1.0 / (1.0 + exp(-a));\n }\n vec4 ${name}_(vec4 v) {\n return 1.0 / (1.0 + exp(-v));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSqrt(): GlslValueFunction {\n return glslBuiltinUnary('sqrt');\n}\nexport function glslTan(): GlslValueFunction {\n return glslBuiltinUnary('tan');\n}\nexport function glslTanh(): GlslValueFunction {\n const name = 'tanh';\n const body = `\n float ${name}_(float a) {\n a = clamp(a, -10., 10.);\n a = exp(2.*a);\n return (a - 1.) / (a + 1.);\n }\n vec4 ${name}_(vec4 v) {\n v = clamp(v, -10., 10.);\n v = exp(2.*v);\n return (v - 1.) / (v + 1.);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nfunction glslBuiltinUnary(name: string): GlslValueFunction {\n const body = `\n float ${name}_(float a) {\n return ${name}(a);\n }\n vec4 ${name}_(vec4 v) {\n return ${name}(v);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\n/////\n/////\n/////\n\nconst createElementwiseProgramInfo =\n (handler: WebGLInferenceHandler, metadata: ProgramMetadata, input: Tensor, glslFunc: GlslValueFunction):\n ProgramInfo => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const glsl = getGlsl(handler.session.backend.glContext.version);\n return {\n ...metadata,\n output: {dims: input.dims, type: input.type, textureType},\n shaderSource: `\n ${glslFunc.body}\n void main() {\n vec4 v = ${glsl.texture2D}(A, TexCoords);\n v = ${glslFunc.name}_(v);\n ${glsl.output} = v;\n }\n `,\n hasMain: true\n };\n };\n\nconst createElementwiseProgramInfoLoader =\n (handler: WebGLInferenceHandler, input: Tensor, glslFunc: GlslValueFunction, cacheKey?: string):\n ProgramInfoLoader => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const metadata = {name: glslFunc.name, inputTypes: [textureType], inputNames: ['A'], cacheHint: cacheKey};\n return {...metadata, get: () => createElementwiseProgramInfo(handler, metadata, input, glslFunc)};\n };\n\nexport const abs = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAbs()), inputs)];\n\nexport const acos = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAcos()), inputs)];\n\nexport const asin = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAsin()), inputs)];\n\nexport const atan = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAtan()), inputs)];\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nexport const clip =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ClipAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(\n handler, inputs[0], glslClip(attributes.min, attributes.max), attributes.cacheKey),\n inputs)];\n\nexport const parseClipAttributes = (node: Graph.Node): ClipAttributes => createAttributeWithCacheKey(\n {min: node.attributes.getFloat('min', MIN_CLIP), max: node.attributes.getFloat('max', MAX_CLIP)});\n\nexport const clipV11 = (handler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n const attributes = generateClipAttributesFromInputs(handler, inputs);\n return clip(handler, [inputs[0]], attributes);\n};\n\nconst generateClipAttributesFromInputs = (handler: WebGLInferenceHandler, inputs: Tensor[]): ClipAttributes => {\n if (inputs.length >= 3 &&\n (!handler.session.isInitializer(inputs[1].dataId) || !handler.session.isInitializer(inputs[2].dataId))) {\n throw new Error('dynamic clip attributes are not allowed');\n }\n\n const min = (inputs.length >= 3) ? inputs[1].numberData[0] : MIN_CLIP;\n const max = (inputs.length >= 3) ? inputs[2].numberData[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const ceil = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslCeil()), inputs)];\n\nexport const cos = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslCos()), inputs)];\n\nexport interface EluAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const elu =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: EluAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(handler, inputs[0], glslElu(attributes.alpha), attributes.cacheKey),\n inputs)];\n\nexport const parseEluAttributes = (node: Graph.Node): EluAttributes =>\n createAttributeWithCacheKey({alpha: node.attributes.getFloat('alpha', 1.0)});\n\nexport const exp = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslExp()), inputs)];\n\nexport const floor = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslFloor()), inputs)];\n\nexport const identity = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslIdentity()), inputs)];\n\nexport interface LeakyReluAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const leakyRelu =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: LeakyReluAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(handler, inputs[0], glslLeakyRelu(attributes.alpha), attributes.cacheKey),\n inputs)];\n\nexport const parseLeakyReluAttributes = (node: Graph.Node): LeakyReluAttributes =>\n createAttributeWithCacheKey({alpha: node.attributes.getFloat('alpha', 0.01)});\n\nexport const log = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslLog()), inputs)];\n\nexport const neg = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslNeg()), inputs)];\n\nexport const not = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslNot()), inputs)];\n\nexport const relu = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslRelu()), inputs)];\n\nexport const sigmoid = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSigmoid()), inputs)];\n\nexport const sin = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSin()), inputs)];\n\nexport const sqrt = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSqrt()), inputs)];\n\nexport const tan = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslTan()), inputs)];\n\nexport const tanh = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslTanh()), inputs)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Attribute} from '../../../attribute';\nimport {MAX_CLIP, MIN_CLIP} from '../../../util';\nimport {GlslValueFunction} from '../glsl-definitions';\n\nimport {glslClip, glslRelu, glslSigmoid} from './unary-op';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly activationCacheKey: string;\n}\n\nexport function getActivationSnippet(attributes: InternalActivationAttributes) {\n let func: GlslValueFunction;\n switch (attributes.activation) {\n case 'Relu':\n func = glslRelu();\n break;\n case 'Sigmoid':\n func = glslSigmoid();\n break;\n case 'Clip':\n func = glslClip(attributes.clipMin!, attributes.clipMax!);\n break;\n // TODO: adding other activations that can be fused.\n default:\n return {activationFunction: '', applyActivation: ''};\n }\n\n const activationName = func.name;\n const activationFunction = func.body;\n const applyActivation = `value = ${activationName}_(value);`;\n return {activationFunction, applyActivation};\n}\n\nexport const parseInternalActivationAttributes = (attributes: Attribute): InternalActivationAttributes => {\n const activation = attributes.getString('activation', '');\n\n if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes.getFloats('activation_params', [MIN_CLIP, MAX_CLIP]);\n return {activation, clipMax, clipMin, activationCacheKey: `${activation}:${clipMin},${clipMax}`};\n }\n return {activation, activationCacheKey: activation};\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../../instrument';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {getActivationSnippet} from './fuse-utils';\n\nconst createUnpackedGroupedConvProgramMetadata = (hasBias: boolean, cacheHint: string): ProgramMetadata => ({\n name: 'GroupedConv',\n inputNames: hasBias ? ['X', 'W', 'Bias'] : ['X', 'W'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nconst createUnpackedGroupedConvProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], metadata: ProgramMetadata,\n attributes: ConvAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBias(output_channel);' : '';\n const xShape = inputs[0].dims.slice();\n const wShape = inputs[1].dims.slice();\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n Logger.verbose(\n 'GroupedConv',\n `autpPad:${attributes.autoPad}, dilations:${attributes.dilations}, group:${attributes.group}, kernelShape:${\n attributes.kernelShape}, pads:${attributes.pads}, strides:${attributes.strides}`);\n const outputShape =\n calculateOutputShape(xShape, wShape, attributes.dilations, attributes.pads, attributes.strides);\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n\n const shaderSource = `\n const ivec2 strides = ivec2(${attributes.strides[0]}, ${attributes.strides[1]});\n const ivec2 pads = ivec2(${attributes.pads[0]}, ${attributes.pads[1]});\n ${activationFunction}\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n int output_channel = coords.y;\n ivec2 xRCCorner = coords.zw * strides - pads;\n int group_id = output_channel / ${outputChannelsPerGroup};\n\n float value = 0.0;\n for (int wInChannel = 0; wInChannel < ${wShape[1]}; wInChannel++) {\n int input_channel = group_id * ${wShape[1]} + wInChannel;\n for (int wHeight = 0; wHeight < ${wShape[2]}; wHeight++) {\n int xHeight = xRCCorner.x + wHeight * ${attributes.dilations[0]};\n\n if (xHeight < 0 || xHeight >= ${xShape[2]}) {\n continue;\n }\n\n for (int wWidth = 0; wWidth < ${wShape[3]}; wWidth++) {\n int xWidth = xRCCorner.y + wWidth * ${attributes.dilations[1]};\n if (xWidth < 0 || xWidth >= ${xShape[3]}) {\n continue;\n }\n\n float xVal = getX(batch, input_channel, xWidth, xHeight);\n float wVal = getW(output_channel, wInChannel, wWidth, wHeight);\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${glsl.output} = vec4(value, .0, .0, .0);\n }\n`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n hasMain: true,\n };\n };\n\nexport const createUnpackedGroupedConvProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes):\n ProgramInfoLoader => {\n const metadata = createUnpackedGroupedConvProgramMetadata(inputs.length > 2, attributes.cacheKey);\n return {\n ...metadata,\n get: () => createUnpackedGroupedConvProgramInfo(inferenceHandler, inputs, metadata, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\nimport {unpackFromChannel} from './packing-utils';\n\nconst createPackedIm2ColProgramMetadata = (cacheHint: string) => ({\n name: 'Im2Col (packed)',\n inputNames: ['A'],\n inputTypes: [TextureType.packed],\n cacheHint,\n});\n\nconst createPackedIm2ColProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,\n outputShape: readonly number[], attributes: ConvAttributes): ProgramInfo => {\n const xshape = x.dims;\n const wshape = w.dims;\n const rowDim = 2;\n const colDim = 3;\n const rank = outputShape.length;\n const im2colShape = [wshape[1] * wshape[2] * wshape[3], outputShape[2] * outputShape[3]];\n const kernelSize = wshape[2] * wshape[3];\n const unpackChannel = unpackFromChannel();\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n let unrolled = '';\n\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.x + ${col};\n pos = rc.y + ${row};\n\n if(blockIndex < ${im2colShape[1]} && pos < ${im2colShape[0]}) {\n offsetY = int(blockIndex / (${outputShape[rank - 1]})) * ${attributes.strides[0]} -\n ${attributes.pads[0]};\n d0 = offsetY + ${attributes.dilations[0]} * (imod(pos, ${kernelSize}) / ${wshape[2]});\n\n if(d0 < ${xshape[rowDim]} && d0 >= 0) {\n offsetX = imod(blockIndex, ${outputShape[rank - 1]}) * ${attributes.strides[1]} -\n ${attributes.pads[1]};\n d1 = offsetX + ${attributes.dilations[1]} * imod(imod(pos, ${kernelSize}), ${wshape[2]});\n\n if(d1 < ${xshape[colDim]} && d1 >= 0) {\n\n ch = int(float(pos)/ ${kernelSize}.);\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(0, ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n\n `;\n }\n }\n\n const shaderSource = `\n ${unpackChannel}\n\n void main() {\n ivec2 rc = getOutputCoords();\n vec4 result = vec4(0.0);\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n ${unrolled}\n ${glsl.output} = result;\n }\n `;\n return {\n ...metadata,\n output: {dims: im2colShape, type: x.type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedIm2ColProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, x: Tensor, w: Tensor, outputShape: readonly number[],\n attributes: ConvAttributes): ProgramInfoLoader => {\n const metadata = createPackedIm2ColProgramMetadata(attributes.cacheKey);\n return {\n ...metadata,\n get: () => createPackedIm2ColProgramInfo(inferenceHandler, metadata, x, w, outputShape, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {getActivationSnippet, InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createPackedMatmulProgramInfoLoader} from './matmul-pack';\n\nexport const matMul: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: InternalActivationAttributes): Tensor[] => {\n validateInputs(inputs);\n\n if (inferenceHandler.session.pack) {\n return [inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, inputs, attributes), inputs)];\n } else {\n return [inferenceHandler.run(createMatmulProgramInfoLoader(inputs, attributes), inputs)];\n }\n };\n\nexport const parseMatMulAttributes: OperatorInitialization =\n (node: Graph.Node): InternalActivationAttributes => parseInternalActivationAttributes(node.attributes);\n\nconst createMatmulProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'MatMul',\n inputNames: hasBias ? ['A', 'B', 'Bias'] : ['A', 'B'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nfunction createMatmulProgramInfo(\n metadata: ProgramMetadata, inputs: Tensor[], activationAttributes: InternalActivationAttributes): ProgramInfo {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outputShape = BroadcastUtil.calcShape(aShape, bShape, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const coordsDataType = getCoordsDataType(outputShape.length);\n const allGlChannels = getGlChannels();\n const {activationFunction, applyActivation} = getActivationSnippet(activationAttributes);\n\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBiasForMatmul();' : '';\n const getBiasForMatmulSnippet =\n hasBias ? `${getBiasForMatmul(coordsDataType, allGlChannels, inputs[2].dims, outputShape, false)}` : '';\n\n const rank = outputShape.length;\n const arank = aShape.length;\n const brank = bShape.length;\n const sharedDim = aShape[aShape.length - 1];\n const shaderSource = `\n ${activationFunction}\n ${getBiasForMatmulSnippet}\n float process(int indices[${rank}]) {\n int a[${arank}];\n int b[${brank}];\n bcastMatmulIndices_A(indices, a);\n bcastMatmulIndices_B(indices, b);\n\n float value;\n for (int k=0; k<${sharedDim}; ++k) {\n a[${arank - 1}] = k;\n b[${brank - 2}] = k;\n value += _A(a) * _B(b);\n }\n ${processBias}\n ${applyActivation}\n return value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n}\n\nexport function createMatmulProgramInfoLoader(\n inputs: Tensor[], activationAttributes: InternalActivationAttributes): ProgramInfoLoader {\n const metadata = createMatmulProgramMetadata(inputs.length > 2, activationAttributes.activationCacheKey);\n return {...metadata, get: () => createMatmulProgramInfo(metadata, inputs, activationAttributes)};\n}\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n\n if ((inputs[0].type !== 'float32' && inputs[0].type !== 'float64') ||\n (inputs[1].type !== 'float32' && inputs[1].type !== 'float64')) {\n throw new Error('inputs should be float type');\n }\n\n if (inputs[0].type !== inputs[1].type) {\n throw new Error('inputs types should match');\n }\n};\n\nexport function getBiasForMatmul(\n coordsDataType: string, allGlChannels: readonly string[], inShape: readonly number[], outShape: readonly number[],\n isPacked: boolean): string {\n let unpackedCoordsSnippet = '';\n const inRank = inShape.length;\n const outRank = outShape.length;\n const rankDiff = outRank - inRank;\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${allGlChannels[i + rankDiff]}`).join(', ');\n }\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n const coordsSnippet = broadcastDims.map(d => `coords.${allGlChannels[d + rankDiff]} = 0;`).join('\\n');\n const inSize = ShapeUtil.size(inShape);\n const isInputScalar = inSize === 1;\n let output = 'vec4(outputValue.xx, outputValue.yy)';\n if (isInputScalar) {\n output = 'vec4(outputValue.x)';\n }\n const getBiasForMatmulSource = isPacked ? `\nvec4 getBiasForMatmul() {\n ${coordsDataType} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = getBias(${unpackedCoordsSnippet});\n return ${output};\n}` :\n `\nfloat getBiasForMatmul() {\n ${coordsDataType} coords = getOutputCoords();\n ${coordsSnippet}\n return getBias(coords.x);\n}`;\n\n return getBiasForMatmulSource;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\nimport {getBiasForMatmul} from './matmul';\n\nconst createPackedMatmulProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'MatMul (packed)',\n inputNames: hasBias ? ['A', 'B', 'Bias'] : ['A', 'B'],\n inputTypes: hasBias ? [TextureType.packed, TextureType.packed, TextureType.packed] :\n [TextureType.packed, TextureType.packed],\n cacheHint\n});\n\nconst createPackedMatmulProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[],\n activationAttributes: InternalActivationAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBiasForMatmul();' : '';\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outputShape = BroadcastUtil.calcShape(aShape, bShape, true);\n const isBroadcast = !ShapeUtil.areEqual(inputs[0].dims, inputs[1].dims);\n\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const sharedDim = aShape[aShape.length - 1];\n const sharedDimIndex = Math.ceil(sharedDim / 2);\n const aRank = aShape.length;\n const bRank = bShape.length;\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const coordsDataType = getCoordsDataType(outputShape.length);\n const outRank = outputShape.length;\n const allGlChannels = getGlChannels();\n const {activationFunction, applyActivation} = getActivationSnippet(activationAttributes);\n\n const getBiasForMatmulSnippet =\n hasBias ? `${getBiasForMatmul(coordsDataType, allGlChannels, inputs[2].dims, outputShape, true)}` : '';\n\n const getBcastedSamplerForMatmulSnippet =\n isBroadcast ? `${getBcastSamplerForMatmul(coordsDataType, allGlChannels, inputs, outputShape)}` : '';\n\n const getSamplerAInLoopSnippet = isBroadcast ? 'getAAtOutCoordsMatmul(i)' : `getA(${getA(allGlChannels, aRank)})`;\n const getSamplerBInLoopSnippet = isBroadcast ? 'getBAtOutCoordsMatmul(i)' : `getB(${getB(allGlChannels, bRank)})`;\n const getOutputCoordsSnippet = isBroadcast ? '' : `${coordsDataType} rc =\n getOutputCoords(); int lastDim = rc.${allGlChannels[outRank - 1]}; rc.${allGlChannels[outRank - 1]} =\n rc.${allGlChannels[outRank - 2]}; rc.${allGlChannels[outRank - 2]} = lastDim;\n `;\n const shaderSource = `\n ${getBcastedSamplerForMatmulSnippet}\n ${getBiasForMatmulSnippet}\n ${activationFunction}\n void main() {\n ${getOutputCoordsSnippet}\n\n vec4 value = vec4(0);\n for (int i = 0; i < ${sharedDimIndex}; i++) {\n vec4 a = ${getSamplerAInLoopSnippet};\n vec4 b = ${getSamplerBInLoopSnippet};\n\n value += (a.rrbb * b.rgrg);\n value += (a.ggaa * b.baba);\n }\n ${processBias}\n ${applyActivation}\n ${glsl.output} = value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedMatmulProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[],\n activationAttributes: InternalActivationAttributes): ProgramInfoLoader => {\n const metadata = createPackedMatmulProgramMetadata(inputs.length > 2, activationAttributes.activationCacheKey);\n return {\n ...metadata,\n get: () => createPackedMatmulProgramInfo(inferenceHandler, metadata, inputs, activationAttributes)\n };\n };\n\nfunction getBcastSamplerForMatmul(\n coordsDataType: string, allGlChannels: readonly string[], inputs: Tensor[], outShape: readonly number[]): string {\n let unpackedACoordsSnippet = [];\n let unpackedBCoordsSnippet = [];\n\n const inAShape = inputs[0].dims;\n const inBShape = inputs[1].dims;\n\n const inARank = inAShape.length;\n const inBRank = inBShape.length;\n\n const outRank = outShape.length;\n const rankADiff = outRank - inARank;\n const rankBDiff = outRank - inBRank;\n\n unpackedACoordsSnippet = inAShape.map((_s, i) => `coords.${allGlChannels[i + rankADiff]}`);\n unpackedACoordsSnippet[inARank - 1] = 'i*2';\n unpackedACoordsSnippet.join(', ');\n unpackedBCoordsSnippet = inBShape.map((_s, i) => `coords.${allGlChannels[i + rankBDiff]}`);\n unpackedBCoordsSnippet[inBRank - 2] = 'i*2';\n unpackedBCoordsSnippet.join(', ');\n\n const broadcastADims = BroadcastUtil.getBroadcastDims(inAShape, outShape);\n const broadcastBDims = BroadcastUtil.getBroadcastDims(inBShape, outShape);\n\n const coordsASnippet = broadcastADims.map(d => `coords.${allGlChannels[d + rankADiff]} = 0;`).join('\\n');\n const coordsBSnippet = broadcastBDims.map(d => `coords.${allGlChannels[d + rankBDiff]} = 0;`).join('\\n');\n const swapDimSnippet = `int lastDim = coords.${allGlChannels[outRank - 1]};\n coords.${allGlChannels[outRank - 1]} = coords.${allGlChannels[outRank - 2]};\n coords.${allGlChannels[outRank - 2]} = lastDim;`;\n\n const getBcastSamplerMatmulSource = `\nvec4 getAAtOutCoordsMatmul(int i) {\n ${coordsDataType} coords = getOutputCoords();\n ${swapDimSnippet}\n ${coordsASnippet}\n vec4 outputValue = getA(${unpackedACoordsSnippet});\n return outputValue;\n}\n\nvec4 getBAtOutCoordsMatmul(int i) {\n ${coordsDataType} coords = getOutputCoords();\n ${swapDimSnippet}\n ${coordsBSnippet}\n vec4 outputValue = getB(${unpackedBCoordsSnippet});\n return outputValue;\n}`;\n\n return getBcastSamplerMatmulSource;\n}\n\nfunction getA(allGlChannels: string[], rank: number): string {\n let res = '';\n for (let i = 0; i < rank - 2; i++) {\n res += `rc.${allGlChannels[i]}, `;\n }\n res += `rc.${allGlChannels[rank - 2]}, ` +\n 'i*2';\n return res;\n}\n\nfunction getB(allGlChannels: string[], rank: number): string {\n let res = '';\n for (let i = 0; i < rank - 2; i++) {\n res += `rc.${allGlChannels[i]}, `;\n }\n res += 'i*2, ' +\n `rc.${allGlChannels[rank - 1]}`;\n return res;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {createPackedIm2ColProgramInfoLoader} from './im2col-pack';\nimport {createPackedMatmulProgramInfoLoader} from './matmul-pack';\n\nexport const conv2DPackedPointwise =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const reshapedX = inferenceHandler.reshapePacked(inputs[0], [xshape[1], xshape[2] * xshape[3]]);\n const reshapedK = inferenceHandler.reshapePacked(inputs[1], [kshape[0], kshape[1]]);\n\n const matmulInputs = inputs.length > 2 ? [reshapedK, reshapedX, inputs[2]] : [reshapedK, reshapedX];\n const matmulOutput = inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, matmulInputs, attributes), matmulInputs);\n return inferenceHandler.reshapePacked(matmulOutput, outputShape);\n };\n\nexport const conv2DPacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n\n // run im2col\n const im2colOutput = inferenceHandler.run(\n createPackedIm2ColProgramInfoLoader(inferenceHandler, inputs[0], inputs[1], outputShape, attributes),\n [inputs[0]]);\n\n // reshape kernel\n const kernelReshaped = inferenceHandler.reshapePacked(inputs[1], [kshape[0], kshape[1] * kshape[2] * kshape[3]]);\n\n // run matmul\n const matmulInputs =\n (inputs.length === 3) ? [kernelReshaped, im2colOutput, inputs[2]] : [kernelReshaped, im2colOutput];\n const matmulOutput = inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, matmulInputs, attributes), matmulInputs);\n\n // reshape output\n const outputReshaped = inferenceHandler.reshapePacked(matmulOutput, outputShape);\n return outputReshaped;\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\n\nconst createIm2ColProgramMetadata = (cacheHint: string) => ({\n name: 'Im2Col',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n cacheHint,\n});\n\nconst createIm2ColProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,\n outputShape: readonly number[], attributes: ConvAttributes): ProgramInfo => {\n const xshape = x.dims;\n const wshape = w.dims;\n\n const rank = outputShape.length;\n const im2colDims = calculateIm2ColDims(xshape, wshape, outputShape, 4);\n\n const shaderSource = `\n const int XC = ${xshape[1]};\n const int XH = ${xshape[2]};\n const int XW = ${xshape[3]};\n const int KH = ${attributes.kernelShape[0]};\n const int KW = ${attributes.kernelShape[1]};\n const int dilationH = ${attributes.dilations[0]};\n const int dilationW = ${attributes.dilations[1]};\n const int strideH = ${attributes.strides[0]};\n const int strideW = ${attributes.strides[1]};\n const int padH = ${attributes.pads[0]};\n const int padW = ${attributes.pads[1]};\n const int KHKW = KH*KW;\n const int XCKHKW = XC * KHKW;\n const int outputChannels = 4;\n vec4 process(int indices[${rank}]) {\n int b = indices[0]; // batch size\n int oh = indices[1] * strideH - padH; //output height\n int ow = indices[2] * strideW - padW; //output width\n int p = indices[3] * outputChannels; //patch\n vec4 value = vec4(0.0);\n for(int i=0; i < outputChannels; ++i) {\n if(p < XCKHKW) {\n int patchC = p / KHKW;\n int patchH = (p - patchC*KHKW) / KW;\n int patchW = (p - patchC*KHKW) - patchH * KW;\n int xh2 = oh + patchH * dilationH;\n int xw2 = ow + patchW * dilationW;\n int x[${xshape.length}];\n x[0] = b;\n x[1] = patchC;\n x[2] = xh2;\n x[3] = xw2;\n if(xh2 >= 0 &&\n xh2 < XH &&\n xw2 >= 0 &&\n xw2 < XW) {\n value[i] = _X(x);\n }\n }\n ++p;\n }\n return value;\n }\n `;\n return {\n ...metadata,\n output: {dims: im2colDims, type: x.type, textureType: TextureType.packedLastDimension},\n shaderSource\n };\n };\n\nexport const createIm2ColProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, x: Tensor, w: Tensor, outputShape: readonly number[],\n attributes: ConvAttributes): ProgramInfoLoader => {\n const metadata = createIm2ColProgramMetadata(attributes.cacheKey);\n return {\n ...metadata,\n get: () => createIm2ColProgramInfo(inferenceHandler, metadata, x, w, outputShape, attributes)\n };\n };\n\n\nexport const calculateIm2ColDims =\n (inputShape: readonly number[], kernelShape: readonly number[], outputShape: readonly number[], channels = 4):\n number[] =>\n [outputShape[0], outputShape[2], outputShape[3],\n Math.ceil(inputShape[1] * kernelShape[2] * kernelShape[3] / channels)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\nimport {calculateIm2ColDims} from './im2col';\n\nconst createDotProductProgramMetadata = (hasBias: boolean, attributes: InternalActivationAttributes) => ({\n name: 'ConvDotProduct',\n inputNames: hasBias ? ['Im2Col', 'K', 'B'] : ['Im2Col', 'K'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.packedLastDimension, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.packedLastDimension],\n cacheKey: attributes.activationCacheKey\n});\n\nconst createDotProductProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: readonly Tensor[],\n outputShape: number[], attributes: InternalActivationAttributes): ProgramInfo => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const adjustedKernelShape = [kshape[0], Math.ceil((xshape[1] * kshape[2] * kshape[3]) / 4)];\n const im2colShape = calculateIm2ColDims(xshape, kshape, outputShape);\n const [kWidth, kHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(adjustedKernelShape, TextureType.packedLastDimension);\n\n const im2colStrides = ShapeUtil.computeStrides(im2colShape);\n const [im2colWidth, im2colHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(im2colShape, TextureType.packedLastDimension);\n const rank = outputShape.length;\n\n const initValue = (inputs.length < 3) ? '0.0' : '_B(b)';\n const sharedDim = Math.ceil(xshape[1] * kshape[2] * kshape[3] / 4);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n${activationFunction}\nfloat process(int indices[${rank}]) {\n int b[1];\n b[0] = indices[1];\n int im2col[4];\n im2col[0] = indices[0];\n im2col[1] = indices[2];\n im2col[2] = indices[3];\n int im2colOffset = im2col[0] * ${im2colStrides[0]} + im2col[1] * ${im2colStrides[1]} + im2col[2] * ${\n im2colStrides[2]};\n int kernelOffset = indices[1] * ${adjustedKernelShape[1]};\n float value = ${initValue};\n for (int i = 0; i < ${sharedDim}; ++i) {\n vec2 im2colCoords = offsetToCoords(im2colOffset, ${im2colWidth}, ${im2colHeight});\n vec2 kernelCoords = offsetToCoords(kernelOffset, ${kWidth}, ${kHeight});\n value += dot(${glsl.texture2D}(Im2Col, im2colCoords), ${glsl.texture2D}(K, kernelCoords));\n ++im2colOffset;\n ++kernelOffset;\n }\n ${applyActivation}\n return value;\n}`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nexport const createDotProductProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], outputShape: number[],\n attributes: InternalActivationAttributes): ProgramInfoLoader => {\n const metadata = createDotProductProgramMetadata(inputs.length > 2, attributes);\n return {\n ...metadata,\n get: () => createDotProductProgramInfo(inferenceHandler, metadata, inputs, outputShape, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {InferenceHandler} from '../../../backend';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {PoolConvUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {createUnpackedGroupedConvProgramInfoLoader} from './conv-grouped';\nimport {conv2DPacked} from './conv-pack';\nimport {createDotProductProgramInfoLoader} from './dot-product';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createIm2ColProgramInfoLoader} from './im2col';\nimport {createMatmulProgramInfoLoader} from './matmul';\n\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[]): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(2);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputSpatialShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n const outputShape = [batchSize, outChannels].concat(...outputSpatialShape);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n}\n\nexport const conv: OperatorImplementation =\n (inferenceHandler: InferenceHandler, inputs: Tensor[], attributes: ConvAttributes): Tensor[] => {\n validateInputs(inputs, attributes); // currently will fail if not conv2D\n return conv2d(inferenceHandler, inputs, attributes);\n };\n\nconst conv2d: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConvAttributes): Tensor[] => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const packMode = inferenceHandler.session.pack;\n const isPointwise = adjustedAttributes.kernelShape[0] === 1 && adjustedAttributes.kernelShape[1] === 1;\n if (adjustedAttributes.group > 1) {\n const result = inferenceHandler.run(\n createUnpackedGroupedConvProgramInfoLoader(inferenceHandler, inputs, adjustedAttributes), inputs);\n return [result];\n } else if (isPointwise && packMode) {\n return [conv2DUnpackedPointwise(inferenceHandler, inputs, adjustedAttributes)];\n } else if (packMode && inputs[0].dims.length === 4 && inputs[0].dims[0] === 1 && !isPointwise) {\n return [conv2DPacked(inferenceHandler, inputs, adjustedAttributes)];\n } else {\n return [conv2DUnpacked(inferenceHandler, inputs, adjustedAttributes)];\n }\n };\n\nconst conv2DUnpackedPointwise =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const reshapedX = inferenceHandler.reshapeUnpacked(inputs[0], [xshape[1], xshape[2] * xshape[3]]);\n const reshapedK = inferenceHandler.reshapeUnpacked(inputs[1], [kshape[0], kshape[1]]);\n\n const matmulInputs = inputs.length > 2 ? [reshapedK, reshapedX, inputs[2]] : [reshapedK, reshapedX];\n const matmulOutput = inferenceHandler.run(createMatmulProgramInfoLoader(matmulInputs, attributes), matmulInputs);\n return inferenceHandler.reshapeUnpacked(matmulOutput, outputShape);\n };\n\nconst conv2DUnpacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const xIm2Col = inferenceHandler.run(\n createIm2ColProgramInfoLoader(inferenceHandler, inputs[0], inputs[1], outputShape, attributes), [inputs[0]]);\n\n const dotProductInputs = inputs.length === 3 ? [xIm2Col, inputs[1], inputs[2]] : [xIm2Col, inputs[1]];\n const output = inferenceHandler.run(\n createDotProductProgramInfoLoader(inferenceHandler, inputs, outputShape, attributes), dotProductInputs);\n return output;\n };\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: Tensor[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0) {\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, cacheKey: attributes.cacheKey});\n return newAttributes;\n};\n\nexport const parseConvAttributes: OperatorInitialization = (node: Graph.Node): ConvAttributes => {\n const attributes = node.attributes;\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const autoPad = attributes.getString('auto_pad', 'NOTSET');\n const dilations = attributes.getInts('dilations', [1, 1]);\n const group = attributes.getInt('group', 1);\n const kernelShape = attributes.getInts('kernel_shape', []);\n const pads = attributes.getInts('pads', [0, 0, 0, 0]);\n const strides = attributes.getInts('strides', [1, 1]);\n\n return createAttributeWithCacheKey({autoPad, dilations, group, kernelShape, pads, strides, ...activationAttributes});\n};\n\nconst validateInputs = (inputs: Tensor[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 || inputs[1].dims.length !== 4) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // TODO : Need to add support for float64\n if (inputs[0].type !== 'float32' || inputs[1].type !== 'float32') {\n throw new Error('Conv input(X,W) should be float tensor');\n }\n\n if (inputs.length === 3 && inputs[2].type !== 'float32') {\n throw new Error('Conv input(bias) should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {InferenceHandler} from '../../../backend';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\nimport {getActivationSnippet, parseInternalActivationAttributes} from './fuse-utils';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n pads: number[], strides: readonly number[], outputPadding: readonly number[], outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateShape = outputShape.length === 0;\n for (let i = 0; i < spatialRank; ++i) {\n const outSize = updateShape ? inputShape[i + 2] * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inputShape[i + 2], strides[i], pads[i], kernelShape[i], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateShape) {\n outputShape.push(\n strides[i] * (inputShape[i + 2] - 1) + outputPadding[i] + (kernelShape[i] - 1) * dilations[i] + 1 -\n pads[i] - pads[i + spatialRank]);\n }\n }\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nexport const convTranspose: OperatorImplementation =\n (inferenceHandler: InferenceHandler, inputs: Tensor[], attributes: ConvTransposeAttributes): Tensor[] => {\n validateInputs(inputs, attributes); // currently will fail if not convTranspose2D\n return convTranspose2d(inferenceHandler, inputs, attributes);\n };\n\nconst convTranspose2d: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConvTransposeAttributes): Tensor[] => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n return [convTranspose2DUnpacked(inferenceHandler, inputs, adjustedAttributes)];\n };\n\nconst createConvTransposeProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'ConvTranspose',\n inputNames: hasBias ? ['X', 'W', 'B'] : ['X', 'W'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nconst createUnpackedConvTransposeProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], metadata: ProgramMetadata,\n attributes: ConvTransposeAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const valueInit = hasBias ? 'getB(output_channel)' : '0.0';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[1];\n const inputChannelsPerGroup = wShape[0] / attributes.group;\n const outputShape = [inputs[0].dims[0], inputs[1].dims[1] * attributes.group, ...attributes.outputShape];\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n\n const shaderSource = `\n const ivec2 strides = ivec2(${attributes.strides[0]}, ${attributes.strides[1]});\n const ivec2 pads = ivec2(${attributes.pads[0]}, ${attributes.pads[1]});\n ${activationFunction}\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n int output_channel = coords.y;\n\n ivec2 loc = coords.zw + pads;\n\n int group_id = output_channel / ${outputChannelsPerGroup};\n int wOutChannel = output_channel - group_id * ${outputChannelsPerGroup};\n\n float value = ${valueInit};\n for (int inChannelOffset = 0; inChannelOffset < ${inputChannelsPerGroup}; inChannelOffset++) {\n int input_channel = group_id * ${inputChannelsPerGroup} + inChannelOffset;\n for (int wWOff = 0; wWOff < ${wShape[2]}; wWOff++) {\n for (int wHOff = 0; wHOff < ${wShape[3]}; wHOff++) {\n ivec2 wOff = ivec2(wWOff * ${attributes.dilations[0]}, wHOff * ${attributes.dilations[1]});\n ivec2 wLoc = loc - wOff;\n ivec2 wLocIn = wLoc / strides;\n if (\n wLocIn * strides == wLoc &&\n wLocIn.x >= 0 && wLocIn.x < ${xShape[2]} &&\n wLocIn.y >= 0 && wLocIn.y < ${xShape[3]}\n ) {\n float xVal = getX(batch, input_channel, wLocIn.y, wLocIn.x);\n float wVal = getW(input_channel, wOutChannel, wHOff, wWOff);\n value += xVal * wVal;\n }\n }\n }\n }\n ${applyActivation}\n ${glsl.output} = vec4(value, .0, .0, .0);\n }\n`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n hasMain: true,\n };\n };\n\nconst createUnpackedConvTransposeProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvTransposeAttributes):\n ProgramInfoLoader => {\n const metadata = createConvTransposeProgramMetadata(inputs.length > 2, attributes.cacheKey);\n return {\n ...metadata,\n get: () => createUnpackedConvTransposeProgramInfo(inferenceHandler, inputs, metadata, attributes)\n };\n };\n\n\nconst convTranspose2DUnpacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvTransposeAttributes):\n Tensor => {\n const result = inferenceHandler.run(\n createUnpackedConvTransposeProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return result;\n };\n\nconst getAdjustedConvTransposeAttributes = (attributes: T, inputs: Tensor[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0) {\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const inputShape = inputs[0].dims;\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, attributes.dilations, attributes.autoPad, pads, attributes.strides,\n attributes.outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputShape, cacheKey: attributes.cacheKey});\n return newAttributes;\n};\n\nexport const parseConvTransposeAttributes: OperatorInitialization =\n (node: Graph.Node): ConvTransposeAttributes => {\n const attributes = node.attributes;\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const autoPad = attributes.getString('auto_pad', 'NOTSET');\n const dilations = attributes.getInts('dilations', [1, 1]);\n const group = attributes.getInt('group', 1);\n const kernelShape = attributes.getInts('kernel_shape', []);\n const outputPadding = attributes.getInts('output_padding', [0, 0]);\n const outputShape = attributes.getInts('output_shape', []);\n const pads = attributes.getInts('pads', [0, 0, 0, 0]);\n const strides = attributes.getInts('strides', [1, 1]);\n\n return createAttributeWithCacheKey(\n {autoPad, dilations, group, kernelShape, outputPadding, outputShape, pads, strides, ...activationAttributes});\n };\n\nconst validateInputs = (inputs: Tensor[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 || inputs[1].dims.length !== 4) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n\n // TODO : Need to add support for float64\n if (inputs[0].type !== 'float32' || inputs[1].type !== 'float32') {\n throw new Error('ConvTranspose input(X,W) should be float tensor');\n }\n\n if (inputs.length === 3 && inputs[2].type !== 'float32') {\n throw new Error('ConvTranspose input(bias) should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst transposeProgramMetadata = {\n name: 'Transpose',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const transpose: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: TransposeAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...transposeProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createTransposeProgramInfo(inferenceHandler, inputs[0], attributes.perm)\n },\n inputs);\n return [output];\n };\n\nexport const parseTransposeAttributes: OperatorInitialization =\n (node: Graph.Node): TransposeAttributes => createAttributeWithCacheKey({perm: node.attributes.getInts('perm', [])});\n\nconst createTransposeProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, perm: number[]): ProgramInfo => {\n const inputShape = input.dims;\n perm = getAdjustedPerm(inputShape, perm);\n const unpackedOutputShape = getOutputShape(inputShape, perm);\n const rank = inputShape.length;\n // A dims=[${inputs[0].dims.toString()}]\n // out Dims=[${unpackedOutputShape.toString()}]\n // based on perm=[${perm.toString()}]\n const shaderSource = `\n ${getPermFunctionBody('perm', perm, rank)}\n float process(int indices[${rank}]) {\n int a[${rank}];\n perm(a, indices);\n return _A(a);\n }`;\n return {\n ...transposeProgramMetadata,\n output: {dims: unpackedOutputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst getAdjustedPerm = (inputShape: readonly number[], perm: number[]): number[] => {\n if (perm && perm.length !== inputShape.length) {\n perm = [...(inputShape.keys())].reverse();\n }\n return perm;\n};\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] => {\n perm = getAdjustedPerm(inputShape, perm);\n return ShapeUtil.sortBasedOnPerm(inputShape, perm);\n};\n\nconst getPermFunctionBody = (name: string, perm: number[], rank: number): string => {\n const reverseFunc = [];\n reverseFunc.push(`void ${name}(out int a[${rank}], int src[${rank}]) {`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(`\\ta[${perm[i]}]=src[${i}];`);\n }\n reverseFunc.push('\\t}');\n return reverseFunc.join('\\n');\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('input should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {transpose, TransposeAttributes} from './transpose';\n\nexport interface DepthToSpaceAttributes {\n mode: 'DCR'|'CRD';\n blocksize: number;\n}\n\nexport const depthToSpace: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: DepthToSpaceAttributes): Tensor[] => {\n validateInputs(inputs);\n const blocksize = attributes.blocksize;\n const blocksizeSqr = blocksize * blocksize;\n const transposePerm = attributes.mode === 'DCR' ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n const firstReshapeShape = attributes.mode === 'DCR' ?\n [\n inputs[0].dims[0], blocksize, blocksize, inputs[0].dims[1] / blocksizeSqr, inputs[0].dims[2],\n inputs[0].dims[3]\n ] :\n [\n inputs[0].dims[0], inputs[0].dims[1] / blocksizeSqr, blocksize, blocksize, inputs[0].dims[2],\n inputs[0].dims[3]\n ];\n\n // const transpose = new WebGLTranspose();\n // const attributes = new Attribute(undefined);\n // attributes.set('perm', 'ints', transposePerm);\n // transpose.initialize(attributes);\n\n // First reshape\n const firstReshapedTensor = inferenceHandler.reshapeUnpacked(inputs[0], firstReshapeShape);\n\n // transpose\n const transposeAttributes: TransposeAttributes = {perm: transposePerm, cacheKey: `${transposePerm}`};\n const [transposeOutput] = transpose(inferenceHandler, [firstReshapedTensor], transposeAttributes);\n\n // Second reshape\n const secondReshapeShape = [\n inputs[0].dims[0], inputs[0].dims[1] / blocksizeSqr, inputs[0].dims[2] * blocksize,\n inputs[0].dims[3] * blocksize\n ];\n const result = inferenceHandler.reshapeUnpacked(transposeOutput, secondReshapeShape);\n return [result];\n };\n\nexport const parseDepthToSpaceAttributes: OperatorInitialization =\n (node: Graph.Node): DepthToSpaceAttributes => {\n // processing node attributes\n const blocksize = node.attributes.getInt('blocksize');\n if (blocksize < 1) {\n throw new Error(`blocksize must be >= 1, but got : ${blocksize} for DepthToSpace`);\n }\n const mode = node.attributes.getString('mode', 'DCR');\n if (mode !== 'DCR' && mode !== 'CRD') {\n throw new Error(`unrecognized mode: ${mode} for DepthToSpace`);\n }\n return {mode, blocksize};\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (inputs.length !== 1) {\n throw new Error(`DepthToSpace expect 1 inputs, but got ${inputs.length}`);\n }\n\n // Input has to be a 4-D tensor\n // TODO: Support string depth-to-space.\n if (inputs[0].type === 'string' || inputs[0].dims.length !== 4) {\n throw new TypeError('DepthToSpace input should be a 4-D numeric tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const flatten: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number): Tensor[] => {\n validateInputs(inputs, axis);\n\n const outputDims = ShapeUtil.flattenShape(inputs[0].dims, axis);\n return [inferenceHandler.reshapeUnpacked(inputs[0], outputDims)];\n };\n\nexport const parseFlattenAttributes: OperatorInitialization = (node: Graph.Node): number =>\n node.attributes.getInt('axis', 1); // default axis is 1\n\nconst validateInputs = (inputs: Tensor[], axis: number): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Flatten requires 1 input.');\n }\n\n const r = inputs[0].dims.length;\n if (r === 0) {\n throw new Error('scalar tensor is not supported.');\n }\n\n if (axis < -r || axis > r) {\n throw new Error('Invalid axis');\n }\n\n // TODO: Support string type\n if (inputs[0].type === 'string') {\n throw new Error('string tensor is not supported.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceHandler} from './backend';\nimport {Graph} from './graph';\nimport {Tensor} from './tensor';\n\nexport type OperatorImplementation = (inferenceHandler: InferenceHandler, inputs: Tensor[], context: T) => Tensor[];\nexport type OperatorInitialization = (node: Graph.Node, graph: Graph) => T;\n\nexport interface Operator {\n readonly impl: OperatorImplementation;\n readonly context: Graph.Node|unknown;\n}\n\nexport const NUMBER_TYPES: readonly Tensor.DataType[] =\n ['float32', 'float64', 'int32', 'int16', 'int8', 'uint16', 'uint32', 'uint8'];\nexport const INT_TYPES: readonly Tensor.DataType[] = ['int32', 'int16', 'int8', 'uint16', 'uint32', 'uint8'];\nexport const FLOAT_TYPES: readonly Tensor.DataType[] = ['float32', 'float64'];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\ninterface GatherAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nexport const gather: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: GatherAttributes): Tensor[] => {\n validateInputs(inputs, attributes.axis);\n const output = inferenceHandler.run(createGatherProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n };\n\nexport const parseGatherAttributes: OperatorInitialization = (node: Graph.Node): GatherAttributes =>\n createAttributeWithCacheKey({axis: node.attributes.getInt('axis', 0)});\n\nconst gatherProgramMetadata = {\n name: 'Gather',\n inputNames: ['A', 'B'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked],\n};\n\nconst createGatherProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n const indexDataShape = inputs[1].dims.slice();\n const outputShape = new Array(inputShape.length + indexDataShape.length - 1);\n\n axis = ShapeUtil.normalizeAxis(axis, inputShape.length);\n const indexCopyOps: string[] = [];\n for (let i = 0; i < outputShape.length; i++) {\n // outputShape is divided into three parts: A, B, C\n // |0 axis| axis + indexDataShape.length | end|\n // | A | B | C |\n //\n // inputIdx: [A, inputs[1][B], C]\n if (i < axis) { // A\n outputShape[i] = inputShape[i];\n indexCopyOps.push(`inputIdx[${i}] = outputIdx[${i}];`);\n } else {\n if (i < axis + indexDataShape.length) { // B\n outputShape[i] = indexDataShape[i - axis];\n indexCopyOps.push(`indexDataIdx[${i - axis}] = outputIdx[${i}];`);\n } else { // C\n outputShape[i] = inputShape[i - indexDataShape.length + 1]; // skip 1 for axis\n indexCopyOps.push(`inputIdx[${i - indexDataShape.length + 1}] = outputIdx[${i}];`);\n }\n }\n }\n\n const orank = outputShape.length || 1;\n const irank = inputShape.length;\n const iDrank = indexDataShape.length || 1;\n const shaderSource = `\n float process(int outputIdx[${orank}]) {\n int inputIdx[${irank}];\n int indexDataIdx[${iDrank}];\n indexDataIdx[0] = 0;\n ${indexCopyOps.join('\\n ')}\n int idx = int(_B(indexDataIdx));\n inputIdx[${axis}] = idx < 0 ? idx + ${inputShape[axis]} : idx;\n return _A(inputIdx);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst createGatherProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: GatherAttributes): ProgramInfoLoader => {\n const metadata = {...gatherProgramMetadata, cacheHint: attributes.cacheKey};\n return {...metadata, get: () => createGatherProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst validateInputs = (inputs: Tensor[], axis: number): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n const tensorRank = inputs[0].dims.length;\n if (tensorRank < 1) {\n throw new Error('Invalid input shape.');\n }\n if (axis < -tensorRank || axis > tensorRank - 1) {\n throw new Error('Invalid axis.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invaid input type.');\n }\n if (inputs[1].type !== 'int32' && inputs[1].type !== 'int16') {\n throw new Error('Invaid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {GemmUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n isOptionalC: boolean; // in opset 11, C becomes optional\n}\n\nexport const gemm: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: GemmAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(createGemmProgramInfoLoader(inputs, attributes), inputs);\n return [output];\n };\n\nconst parseGemmAttributes = (node: Graph.Node, isOptionalC: boolean): GemmAttributes => {\n const transA = node.attributes.getInt('transA', 0) !== 0;\n const transB = node.attributes.getInt('transB', 0) !== 0;\n const alpha = node.attributes.getFloat('alpha', 1.0);\n const beta = node.attributes.getFloat('beta', 1.0);\n return createAttributeWithCacheKey({transA, transB, alpha, beta, isOptionalC});\n};\n\nexport const parseGemmAttributesV7: OperatorInitialization = (node: Graph.Node): GemmAttributes =>\n parseGemmAttributes(node, false);\n\nexport const parseGemmAttributesV11: OperatorInitialization = (node: Graph.Node): GemmAttributes =>\n parseGemmAttributes(node, true);\n\nconst createGemmProgramInfoLoader = (inputs: Tensor[], attributes: GemmAttributes): ProgramInfoLoader => {\n const metadata = {\n name: 'Gemm',\n inputNames: inputs.length === 3 ? ['A', 'B', 'C'] : ['A', 'B'],\n inputTypes: inputs.length === 3 ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n key: attributes.cacheKey\n };\n\n return {...metadata, get: () => createGemmProgramInfo(metadata, inputs, attributes)};\n};\n\nconst createGemmProgramInfo =\n (metadata: ProgramMetadata, inputs: Tensor[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n let sharedDim = aShape[aShape.length - 1];\n let line = '';\n if (attributes.transA) {\n sharedDim = aShape[0];\n }\n if (attributes.transA && attributes.transB) {\n line = 'value += _A_T(a) * _B_T(b);';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += _A_T(a) * _B(b);';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += _A(a) * _B_T(b);';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += _A(a) * _B(b);';\n }\n const rank = outputShape.length;\n const declareC = inputs.length === 3 ? `int c[${inputs[2].dims.length}];` : '';\n const broadcastC = inputs.length === 3 ? 'bcastIndices_C(indices, c);' : '';\n const calculateC = inputs.length === 3 ? 'value += beta * _C(c);' : '';\n const shaderSource = `\n float process(int indices[${rank}]) {\n int a[${rank}];\n int b[${rank}];\n ${declareC}\n\n copyVec(indices, a);\n copyVec(indices, b);\n ${broadcastC}\n\n float value = 0.0;\n for (int k=0; k<${sharedDim}; ++k) {\n a[${rank - 1}] = k;\n b[${rank - 2}] = k;\n ${line}\n }\n\n value = value * alpha;\n ${calculateC}\n return value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n variables: [\n {name: 'alpha', type: 'float', data: attributes.alpha}, {name: 'beta', type: 'float', data: attributes.beta}\n ],\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[], attributes: GemmAttributes): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (attributes.isOptionalC && (inputs.length < 2 || inputs.length > 3)) {\n throw new Error('Invaid input shape.');\n }\n if (!attributes.isOptionalC && inputs.length !== 3) {\n throw new Error('Gemm requires 3 inputs');\n }\n\n // 'C' can be of dimensionality 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length !== 1 && inputs[2].dims.length !== 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].type !== 'float32' && inputs[0].type !== 'float64') ||\n (inputs[1].type !== 'float32' && inputs[1].type !== 'float64') ||\n (inputs.length === 3 && inputs[2].type !== 'float32' && inputs[2].type !== 'float64')) {\n throw new Error('Invalid input type.');\n }\n\n if ((inputs[0].type !== inputs[1].type) || (inputs.length === 3 && inputs[0].type !== inputs[2].type)) {\n throw new Error('Input types are mismatched');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport interface ImageScalerAttributes extends AttributeWithCacheKey {\n scale: number;\n bias: number[];\n}\n\nexport const imageScaler: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ImageScalerAttributes): Tensor[] => {\n validateInputs(inputs);\n const output =\n inferenceHandler.run(createImageScalerProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n };\n\nexport const parseImageScalerAttributes: OperatorInitialization =\n (node: Graph.Node): ImageScalerAttributes => {\n const scale = node.attributes.getFloat('scale');\n const bias = node.attributes.getFloats('bias');\n return createAttributeWithCacheKey({scale, bias});\n };\n\nconst imageScalerProgramMetadata = {\n name: 'ImageScaler',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst createImageScalerProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], attributes: ImageScalerAttributes):\n ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n const rank = outputShape.length;\n const getBiasMethod = createGetBiasMethod(attributes.bias.length);\n const shaderSource = `\n ${getBiasMethod}\n float process(int indices[${rank}]) {\n return _X(indices) * scale + getBias(bias, indices[1]);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n variables: [\n {name: 'bias', type: 'float', arrayLength: attributes.bias.length, data: attributes.bias},\n {name: 'scale', type: 'float', data: attributes.scale}\n ],\n shaderSource\n };\n };\n\nconst createImageScalerProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ImageScalerAttributes): ProgramInfoLoader => {\n const metadata = {...imageScalerProgramMetadata, cacheHint: attributes.cacheKey};\n return {...metadata, get: () => createImageScalerProgramInfo(handler, metadata, inputs, attributes)};\n };\n\nconst createGetBiasMethod = (numChannels: number): string => {\n const codeLines: string[] = [`float getBias(float bias[${numChannels}], int channel) {`];\n for (let i = 0; i < numChannels; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (channel == ${i}) { return bias[${i}]; }`);\n } else if (i === numChannels - 1) {\n codeLines.push(\n '\\t' +\n `else { return bias[${i}]; }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (channel == ${i}) { return bias[${i}]; }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n return codeLines.join('\\n');\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('ImageScaler requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('Invalid input shape.');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport const instanceNormalization: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], epsilon: number): Tensor[] => {\n validateInputs(inputs);\n\n const meanAndVariance = inferenceHandler.run(createMeanAndVarianceProgramInfoLoader(inputs[0]), inputs);\n const output = inferenceHandler.run(\n createComputeOutputProgramInfoLoader(inferenceHandler, inputs[0], epsilon, meanAndVariance.dims),\n [inputs[0], meanAndVariance, inputs[1], inputs[2]]);\n return [output];\n };\n\nexport const parseInstanceNormalizationAttributes: OperatorInitialization = (node: Graph.Node): number =>\n node.attributes.getFloat('epsilon', 1e-5);\n\nconst meanAndVarianceProgramMetadata = {\n name: 'InstanceNormalization_MeanAndVariance',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst createMeanAndVarianceProgramInfo = (metadata: ProgramMetadata, input: Tensor): ProgramInfo => {\n const xDims = input.dims.slice();\n const channel = xDims[1];\n const channelSize = xDims[2] * xDims[3];\n const outputShape = [xDims[0], channel];\n\n const shaderSource = `\n vec4 process(int[2] indices) {\n vec4 v = vec4(0.0);\n int a[4];\n a[0] = indices[0];\n a[1] = indices[1];\n float temp = 0.0;\n for(int a2=0; a2<${xDims[2]}; a2++) {\n a[2] = a2;\n for(int a3=0; a3<${xDims[3]}; a3++) {\n a[3] = a3;\n float x = _X(a);\n temp += x;\n }\n }\n float mean = temp / float(${channelSize});\n temp = 0.0;\n for(int a2=0; a2<${xDims[2]}; a2++) {\n a[2] = a2;\n for(int a3=0; a3<${xDims[3]}; a3++) {\n a[3] = a3;\n float x = _X(a);\n temp += (x - mean) * (x - mean);\n }\n }\n v.r = mean;\n v.g = temp / float(${channelSize});\n\n return v;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.packedLastDimension},\n shaderSource\n };\n};\n\nconst createMeanAndVarianceProgramInfoLoader = (input: Tensor): ProgramInfoLoader => ({\n ...meanAndVarianceProgramMetadata,\n get: () => createMeanAndVarianceProgramInfo(meanAndVarianceProgramMetadata, input)\n});\n\nconst computeOutputProgramMetadata = {\n name: 'InstanceNormalization_ComputeOutput',\n inputNames: ['X', 'MeanAndVariance', 'Scale', 'B'],\n inputTypes: [TextureType.unpacked, TextureType.packedLastDimension, TextureType.unpacked, TextureType.unpacked],\n};\n\nconst createComputeOutputProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, input: Tensor, epsilon: number,\n meanAndVarianceShape: readonly number[]): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(meanAndVarianceShape, TextureType.packedLastDimension);\n const [meanAndVarianceWidth, meanAndVarianceHeight] = [textureWidth / 4, textureHeight];\n const shaderSource = `\n vec4 get_MeanAndVariance(int[2] mv) {\n int offset = indicesToOffset_MeanAndVariance(mv);\n vec2 coords = offsetToCoords(offset, ${meanAndVarianceWidth}, ${meanAndVarianceHeight});\n return ${glsl.texture2D}(MeanAndVariance, coords);\n }\n\n float process(int[4] indices) {\n int mv[2];\n mv[0] = indices[0];\n mv[1] = indices[1];\n vec4 mean_and_variance = get_MeanAndVariance(mv);\n float mean = mean_and_variance.r;\n float variance = mean_and_variance.g;\n\n int sb[1];\n sb[0] = indices[1];\n float scale = _Scale(sb);\n float b = _B(sb);\n\n return scale * (_X(indices) - mean) / sqrt(variance + epsilon) + b;\n }`;\n return {\n ...metadata,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n variables: [{name: 'epsilon', type: 'float', data: epsilon}],\n shaderSource\n };\n };\n\nconst createComputeOutputProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, epsilon: number, meanAndVarianceShape: readonly number[]):\n ProgramInfoLoader => {\n const metadata = {...computeOutputProgramMetadata, cacheHint: `${epsilon}`};\n return {\n ...metadata,\n get: () => createComputeOutputProgramInfo(inferenceHandler, metadata, input, epsilon, meanAndVarianceShape)\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 3) {\n throw new Error('InstanceNormalization requires 3 inputs.');\n }\n\n const X = inputs[0];\n const scale = inputs[1];\n const B = inputs[2];\n\n // input should at least have three dimensions - N,C,dim1,...,dimn\n // other inputs can have only one dimensions\n if (X.dims.length < 3 || scale.dims.length !== 1 || B.dims.length !== 1) {\n throw new Error('Invalid input shape.');\n }\n if (scale.dims[0] !== X.dims[1] || B.dims[0] !== X.dims[1]) {\n throw new Error('Input shapes are mismatched.');\n }\n if ((X.type !== 'float32' && X.type !== 'float64') || (scale.type !== 'float32' && scale.type !== 'float64') ||\n (B.type !== 'float32' && B.type !== 'float64')) {\n throw new Error('Invalid input type.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('Only support 4-D input shape.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\n\nexport interface LrnAttributes extends AttributeWithCacheKey {\n alpha: number;\n beta: number;\n bias: number;\n size: number;\n}\n\nexport const lrn: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: LrnAttributes): Tensor[] => {\n validateInputs(inputs);\n\n // if (inferenceHandler.session.pack) {\n // return [inferenceHandler.run(createPackedLrnProgramInfoLoader(inferenceHandler, inputs, attributes),\n // inputs)];\n // } else {\n return [inferenceHandler.run(createLrnProgramInfoLoader(inputs, attributes), inputs)];\n //}\n };\n\nexport const parseLrnAttributes: OperatorInitialization = (node: Graph.Node): LrnAttributes => {\n const alpha = node.attributes.getFloat('alpha', 0.0001);\n const beta = node.attributes.getFloat('beta', 0.75);\n const bias = node.attributes.getFloat('bias', 1.0);\n const size = node.attributes.getInt('size');\n\n return createAttributeWithCacheKey({alpha, beta, bias, size});\n};\n\nconst lrnProgramMetadata = {\n name: 'LRN',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked]\n};\n\nfunction createLrnProgramInfo(inputs: Tensor[], attributes: LrnAttributes): ProgramInfo {\n const C = inputs[0].dims[1];\n const rank = inputs[0].dims.length;\n const from = -Math.floor((attributes.size - 1) / 2);\n const to = Math.ceil((attributes.size - 1) / 2);\n const alpha = `float(${attributes.alpha}) / float(${attributes.size})`;\n const bias = `float(${attributes.bias})`;\n const beta = `float(${attributes.beta})`;\n\n const shaderSource = `\n float process(int indices[${rank}]) {\n int c = indices[1];\n float x = _X(indices);\n float square_sum = 0.0;\n\n for (int i = ${from}; i <= ${to}; i++) {\n int idx = c + i;\n if (c >= 0 && c < ${C}) {\n indices[1] = idx;\n float j = _X(indices);\n square_sum += j * j;\n }\n }\n return x / pow(${bias} + ${alpha} * square_sum, ${beta});\n }`;\n return {\n ...lrnProgramMetadata,\n cacheHint: attributes.cacheKey,\n output: {dims: inputs[0].dims, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n}\n\nexport function createLrnProgramInfoLoader(inputs: Tensor[], attributes: LrnAttributes): ProgramInfoLoader {\n return {...lrnProgramMetadata, cacheHint: attributes.cacheKey, get: () => createLrnProgramInfo(inputs, attributes)};\n}\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('LRN requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('currently only support LRN for input with \"NCHW\" format');\n }\n if (inputs[0].type !== 'float32') {\n throw new Error('input should be float type');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl, Glsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface PadAttributes extends AttributeWithCacheKey {\n readonly mode: string;\n readonly pads: number[];\n readonly value: number;\n}\n\nconst padProgramMetadata = {\n name: 'Pad',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const padV2: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: PadAttributes): Tensor[] => {\n validateInputsV2(inputs);\n const output = inferenceHandler.run(\n {\n ...padProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createPadProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parsePadAttributesV2: OperatorInitialization = (node: Graph.Node): PadAttributes => {\n const mode = node.attributes.getString('mode', 'constant');\n const value = node.attributes.getFloat('value', 0.0);\n const pads = node.attributes.getInts('pads');\n return createAttributeWithCacheKey({mode, value, pads});\n};\n\nexport const padV11: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], mode: string): Tensor[] => {\n validateInputsV11(inputs);\n const attrubutes = generatePadAttributesFromInputs(inferenceHandler, inputs, mode);\n return padV2(inferenceHandler, [inputs[0]], attrubutes);\n };\n\nexport const parsePadAttributesV11: OperatorInitialization = (node: Graph.Node): string =>\n node.attributes.getString('mode', 'constant');\n\nconst generatePadAttributesFromInputs =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], mode: string): PadAttributes => {\n if (!inferenceHandler.session.isInitializer(inputs[1].dataId) ||\n (inputs.length >= 3 && !inferenceHandler.session.isInitializer(inputs[2].dataId))) {\n throw new Error('dynamic pad attributes are not allowed');\n }\n\n const pads = Array.from(inputs[1].integerData);\n const value = (inputs.length >= 3) ? inputs[2].floatData[0] : 0.0;\n\n return createAttributeWithCacheKey({mode, pads, value});\n };\n\nconst createPadProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(input.dims.slice(), attributes.pads);\n const rank = outputShape.length;\n const padFunction = getPadFunction(inferenceHandler, input, attributes);\n const shaderSource = `\n ${padFunction}\n float process(int[${rank}] indices) {\n return padA(indices);\n }`;\n return {\n name: 'Pad',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputsV2 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Pad requires 1 input');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst validateInputsV11 = (inputs: Tensor[]): void => {\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Pad requires 2 or 3 inputs');\n }\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 3 && inputs[2].type === 'string') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst getPadFunction = (inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: PadAttributes): string => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [width, height] = inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const strides = ShapeUtil.computeStrides(input.dims);\n\n switch (attributes.mode) {\n case 'constant':\n return getPadConstant(glsl, input.dims, strides, width, height, attributes.pads, attributes.value);\n case 'reflect':\n return getPadReflect(glsl, input.dims, strides, width, height, attributes.pads);\n case 'edge':\n return getPadEdge(glsl, input.dims, strides, width, height, attributes.pads);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst getPadConstant =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[],\n value: number): string => {\n const rank = shape.length;\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) return constant;\n if (k >= ${shape[i]}) return constant;\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n const float constant = float(${value});\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n\nconst getPadReflect =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[]):\n string => {\n const rank = shape.length;\n\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) { k = -k; }\n {\n const int _2n_1 = ${2 * (shape[i] - 1)};\n k = int( mod( float(k), float(_2n_1) ) ) ;\n if(k >= ${shape[i]}) { k = _2n_1 - k; }\n }\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n\nconst getPadEdge =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[]):\n string => {\n const rank = shape.length;\n\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) k = 0;\n if (k >= ${shape[i]}) k = ${shape[i] - 1};\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {PoolConvUtil, ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport interface AveragePoolAttributes extends AttributeWithCacheKey {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly countIncludePad: boolean;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nexport const averagePool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: AveragePoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata =\n {name: 'AveragePool', inputNames: ['X'], inputTypes: [TextureType.unpacked], cacheHint: attributes.cacheKey};\n const output = inferenceHandler.run(\n {...metadata, get: () => createAveragePoolProgramInfo(inputs, metadata, false, attributes)}, inputs);\n return [output];\n };\n\nexport const parseAveragePoolAttributes: OperatorInitialization =\n (node: Graph.Node): AveragePoolAttributes => {\n const autoPad = node.attributes.getString('auto_pad', 'NOTSET');\n const ceilMode = node.attributes.getInt('ceil_mode', 0);\n const countIncludePad = (node.attributes.getInt('count_include_pad', 0) === 0 ? false : true);\n const kernelShape = node.attributes.getInts('kernel_shape');\n const strides = node.attributes.getInts('strides', []);\n const pads = node.attributes.getInts('pads', []);\n\n // TODO: support attribute 'ceil_mode'\n if (ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n\n return createAttributeWithCacheKey({autoPad, ceilMode, countIncludePad, kernelShape, strides, pads});\n };\n\nconst createAveragePoolProgramInfo =\n (inputs: Tensor[], metadata: ProgramMetadata, isGlobalOperator: boolean, attributes: AveragePoolAttributes):\n ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(inputs, attributes, isGlobalOperator);\n const kernelSize = ShapeUtil.size(adjustedAttributes.kernelShape);\n const op1 = 'value += _X(x);';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= float(${kernelSize});`;\n } else {\n op2 += `value /= float(${kernelSize} - pad);`;\n }\n const poolingCode = generatePoolingCode(inputs[0].dims, adjustedAttributes, op1, op2, '0.0');\n const shaderSource = `\n ${poolingCode}\n `;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nexport const globalAveragePool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: AveragePoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata = {\n name: 'GlobalAveragePool',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n cacheHint: `${attributes.countIncludePad}`\n };\n const output = inferenceHandler.run(\n {...metadata, get: () => createAveragePoolProgramInfo(inputs, metadata, true, attributes)}, inputs);\n return [output];\n };\n\nexport const parseGlobalAveragePoolAttributes: OperatorInitialization =\n (node: Graph.Node): AveragePoolAttributes => {\n const countIncludePad = (node.attributes.getInt('count_include_pad', 0) === 0 ? false : true);\n return createAttributeWithCacheKey(\n {autoPad: '', ceilMode: 0, countIncludePad, kernelShape: [], strides: [], pads: []});\n };\n\nexport interface MaxPoolAttributes extends AveragePoolAttributes {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nexport const maxPool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: MaxPoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata =\n {name: 'MaxPool', inputNames: ['X'], inputTypes: [TextureType.unpacked], cacheHint: attributes.cacheKey};\n const output = inferenceHandler.run(\n {...metadata, get: () => createMaxPoolProgramInfo(inputs, metadata, false, attributes)}, inputs);\n return [output];\n };\n\nexport const parseMaxPoolAttributes: OperatorInitialization =\n (node: Graph.Node): MaxPoolAttributes => {\n const autoPad = node.attributes.getString('auto_pad', 'NOTSET');\n const ceilMode = node.attributes.getInt('ceil_mode', 0);\n const kernelShape = node.attributes.getInts('kernel_shape');\n const strides = node.attributes.getInts('strides', []);\n const pads = node.attributes.getInts('pads', []);\n const storageOrder = node.attributes.getInt('storage_order', 0);\n const dilations = node.attributes.getInts('dilations', []);\n\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n\n return createAttributeWithCacheKey(\n {autoPad, ceilMode, countIncludePad: false, kernelShape, strides, pads, storageOrder, dilations});\n };\n\nconst createMaxPoolProgramInfo =\n (inputs: Tensor[], metadata: ProgramMetadata, isGlobalOperator: boolean, attributes: MaxPoolAttributes):\n ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(inputs, attributes, isGlobalOperator);\n const op1 = `\n value = max(_X(x), value);\n `;\n const op2 = '';\n const poolingCode = generatePoolingCode(inputs[0].dims, adjustedAttributes, op1, op2, '-1e5');\n const shaderSource = `\n ${poolingCode}\n `;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst getAdjustedPoolAttributesAndOutputShape =\n (inputs: Tensor[], attributes: AveragePoolAttributes|MaxPoolAttributes, isGlobalOperator: boolean):\n [AveragePoolAttributes|MaxPoolAttributes, number[]] => {\n const inputShape = inputs[0].dims.slice();\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShape, kernelShape, strides, dilations, pads);\n\n const outputShape = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShape, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n return [newAttributes, outputShape];\n };\n\nconst globalMaxPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: [],\n cacheKey: ''\n};\n\nconst globalMaxPoolMetadata = {\n name: 'GlobalMaxPool',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const globalMaxPool = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...globalMaxPoolMetadata,\n get: () => createMaxPoolProgramInfo(inputs, globalMaxPoolMetadata, true, globalMaxPoolAttributes)\n },\n inputs);\n return [output];\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Pool ops requires 1 input.');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst generatePoolingCode =\n (inputDims: readonly number[], attributes: AveragePoolAttributes, op1: string, op2: string, start: string):\n string => {\n const rank = inputDims.length;\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const dimW = inputDims[rank - 1];\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n if (pwStart + pwEnd !== 0) {\n codeW = `\n for (int i = 0; i < ${kw}; i++) {\n x[${rank} - 1] = indices[${rank} - 1] * ${sw} - ${pwStart} + i;\n if (x[${rank} - 1] < 0 || x[${rank} - 1] >= ${dimW}) {\n pad++;\n continue;\n }\n ${op1}\n }`;\n } else {\n codeW = `\n for (int i = 0; i < ${kw}; i++) {\n x[${rank} - 1] = indices[${rank} - 1] * ${sw} - ${pwStart} + i;\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n const dimH = inputDims[rank - 2];\n if (phStart + phEnd !== 0) {\n codeH = `\n for (int j = 0; j < ${kh}; j++) {\n x[${rank} - 2] = indices[${rank} - 2] * ${sh} - ${phStart} + j;\n if (x[${rank} - 2] < 0 || x[${rank} - 2] >= ${dimH}) {\n pad+= ${kw};\n continue;\n }\n `;\n } else {\n codeH = `\n for (int j = 0; j < ${kh}; j++) {\n x[${rank} - 2] = indices[${rank} - 2] * ${sh} - ${phStart} + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n float process(int indices[${rank}]) {\n int x[${rank}];\n copyVec(indices, x);\n\n float value = ${start};\n int pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n return value;\n }\n `;\n return poolingCode;\n } else {\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n const stridesRank = kernelStrides.length;\n const padsRank = attributes.pads.length;\n const offsetToIndicesFunction = offsetToIndices(stridesRank);\n const copyInputDims = copyArray(inputDims, 'inputDims');\n const copyPads = copyArray(attributes.pads, 'pads');\n const copyKernelStrides = copyArray(kernelStrides, 'kernelStrides');\n const copyStrides = copyArray(attributes.strides, 'strides');\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (x[j] >= inputDims[j] || x[j] < 0) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n ${op1}\n }`;\n } else {\n padCode = `\n }\n ${op1}\n `;\n }\n const poolingCode = `\n ${offsetToIndicesFunction}\n float process(int indices[${rank}]) {\n int x[${rank}];\n copyVec(indices, x);\n int offset[${stridesRank}];\n int pads[${padsRank}];\n int inputDims[${rank}];\n int kernelStrides[${stridesRank}];\n int strides[${stridesRank}];\n ${copyPads}\n ${copyInputDims}\n ${copyStrides}\n ${copyKernelStrides}\n\n float value = ${start};\n int pad = 0;\n bool isPad = false;\n for (int i = 0; i < ${kernelSize}; i++) {\n offsetToIndices(i, kernelStrides, offset);\n isPad = false;\n for (int j = ${rank} - ${stridesRank}; j < ${rank}; j++) {\n x[j] = indices[j] * strides[j - ${rank} + ${stridesRank}]\n + offset[j - ${rank} + ${stridesRank}] - pads[j - 2];\n ${padCode}\n }\n ${op2}\n\n return value;\n }\n `;\n return poolingCode;\n }\n };\n\nconst copyArray = (array: readonly number[], arrayName: string): string => {\n let block = '';\n for (let i = 0; i < array.length; i++) {\n block += `\n ${arrayName}[${i}] = ${array[i]};\n `;\n }\n return block;\n};\n\nconst offsetToIndices = (rank: number): string => `\n void offsetToIndices(int offset, int[${rank}] strides, out int[${rank}] indices) {\n if (${rank} == 0) {\n return;\n }\n for (int i = 0; i < ${rank} - 1; ++i) {\n indices[i] = offset / strides[i];\n offset -= indices[i] * strides[i];\n }\n indices[${rank} - 1] = offset;\n }`;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n readonly axes: number[];\n readonly keepDims: boolean;\n}\n\n// return [init ops, reduce ops, final ops]\ntype ReduceOp = (inputs: Tensor[], axes: number[]) => string[];\n\nconst reduce =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, name: string,\n reduceOp: ReduceOp): Tensor[] => {\n validateInputs(inputs);\n\n const reduceProgramMetadata = {\n name,\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n };\n\n const output = inferenceHandler.run(\n {\n ...reduceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () =>\n createReduceProgramInfo(inferenceHandler, inputs, attributes, name, reduceOp, reduceProgramMetadata)\n },\n inputs);\n return [output];\n };\n\nexport const parseReduceAttributes: OperatorInitialization = (node: Graph.Node): ReduceAttributes => {\n const axes = node.attributes.getInts('axes', []);\n const keepDims = node.attributes.getInt('keepdims', 1) === 1;\n return createAttributeWithCacheKey({axes, keepDims});\n};\n\nconst createReduceProgramInfo =\n (_handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, _name: string, reduceOp: ReduceOp,\n reduceProgramMetadata: ProgramMetadata): ProgramInfo => {\n const outputShape: number[] = [];\n const iRank = inputs[0].dims.length || 1;\n\n const idxCopy = []; // copy output indexes to input indexes\n\n const axes = ShapeUtil.normalizeAxes(attributes.axes, inputs[0].dims.length);\n const ops = reduceOp(inputs, axes);\n let reduceOps = ops[1];\n\n for (let k = 0; k < inputs[0].dims.length; k++) {\n // if this axis is reduced\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n if (attributes.keepDims) {\n outputShape.push(1);\n } // else { remove the axis from outputShape; }\n\n // loop over the d-th axis\n reduceOps = `\n for(int j${k} = 0; j${k} < ${inputs[0].dims[k]}; j${k}++) {\n inputIdx[${k}] = j${k};\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`inputIdx[${k}] = outputIdx[${outputShape.length}];`);\n\n outputShape.push(inputs[0].dims[k]);\n }\n }\n\n const oRank = outputShape.length || 1;\n\n const shaderSource = `\n float process(int outputIdx[${oRank}]) {\n float value; // final result\n int inputIdx[${iRank}]; // addressing input data\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${reduceOps}\n ${ops[2]} // final computation for reduce mean\n return value;\n }`;\n\n return {\n ...reduceProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n // TODO: support Reduce* operators with 2 inputs.\n if (!inputs || inputs.length !== 1) {\n throw new Error('Reduce op requires 1 input.');\n }\n\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport const reduceSum: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 0.0;', 'value += _A(inputIdx);', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceSum', reduceOp);\n };\n\nexport const reduceMean: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n let size = 1.0;\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n size *= inputs[0].dims[k];\n }\n }\n\n return ['value = 0.0;', 'value += _A(inputIdx);', `value /= ${size}.;`]; // ensure real number with `.`\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMean', reduceOp);\n };\n\nexport const reduceMax: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n const idxZero = [];\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`inputIdx[${k}] = 0;`); // first element\n }\n }\n\n return [`${idxZero.join('\\n')}\\nvalue = _A(inputIdx);`, 'value = max(value, _A(inputIdx));', ''];\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMax', reduceOp);\n };\n\nexport const reduceMin: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n const idxZero = [];\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`inputIdx[${k}] = 0;`); // first element\n }\n }\n\n return [`${idxZero.join('\\n')}\\nvalue = _A(inputIdx);`, 'value = min(value, _A(inputIdx));', ''];\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMin', reduceOp);\n };\n\nexport const reduceProd: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 1.0;', 'value *= _A(inputIdx);', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceProd', reduceOp);\n };\n\nexport const reduceLogSum: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 0.0;', 'value += _A(inputIdx);', 'value = log(value);'];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceLogSum', reduceOp);\n };\n\nexport const reduceLogSumSquare: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['float t; value = 0.0;', 't = _A(inputIdx); value += t * t;', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceLogSumSquare', reduceOp);\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const reshape = (handler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n const reshapedDims = ShapeUtil.calculateReshapedDims(inputs[0].dims, inputs[1].integerData);\n if (handler.session.pack) {\n return [handler.reshapePacked(inputs[0], reshapedDims)];\n } else {\n return [handler.reshapeUnpacked(inputs[0], reshapedDims)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface UpsampleAttributes extends AttributeWithCacheKey {\n readonly opset: number;\n readonly isResize: boolean;\n readonly mode: string;\n readonly scales: number[];\n readonly extrapolationValue: number;\n readonly coordinateTransformMode: string;\n readonly useExtrapolation: boolean;\n readonly needRoiInput: boolean;\n readonly nearestMode: string;\n readonly cubicCoefficientA: number;\n readonly excludeOutside: boolean;\n readonly useNearest2xOptimization: boolean;\n readonly roiInputIdx: number;\n readonly scalesInputIdx: number;\n readonly sizesInputIdx: number;\n}\n\nconst upsampleProgramMetadata = {\n name: 'Upsample',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const upsample: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(\n {\n ...upsampleProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createUpsampleProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseUpsampleAttributesV7: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 7);\n\nexport const parseUpsampleAttributesV9: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 9);\n\nexport const parseUpsampleAttributes = (node: Graph.Node, opset: number): UpsampleAttributes => {\n const isResize = (opset >= 10);\n\n // processing node attributes\n const mode = node.attributes.getString('mode', 'nearest');\n if (mode !== 'nearest' && mode !== 'linear' && (opset < 11 || mode !== 'cubic')) {\n throw new Error(`unrecognized mode: ${mode}`);\n }\n\n let scales: number[] = [];\n if (opset < 9) {\n scales = node.attributes.getFloats('scales');\n scalesValidation(scales, mode, isResize);\n }\n\n const extrapolationValue = node.attributes.getFloat('extrapolation_value', 0.0);\n\n const coordinateTransformMode =\n opset > 10 ? node.attributes.getString('coordinate_transformation_mode', 'half_pixel') : 'asymmetric';\n if ([\n 'asymmetric', 'pytorch_half_pixel', 'tf_half_pixel_for_nn', 'align_corners', 'tf_crop_and_resize', 'half_pixel'\n ].indexOf(coordinateTransformMode) === -1) {\n throw new Error(`coordinate_transform_mode '${coordinateTransformMode}' is not supported`);\n }\n const needRoiInput = (coordinateTransformMode === 'tf_crop_and_resize');\n const useExtrapolation = needRoiInput;\n\n const nearestMode =\n (mode === 'nearest' && opset >= 11) ? node.attributes.getString('nearest_mode', 'round_prefer_floor') : '';\n if (['round_prefer_floor', 'round_prefer_ceil', 'floor', 'ceil', ''].indexOf(nearestMode) === -1) {\n throw new Error(`nearest_mode '${nearestMode}' is not supported`);\n }\n\n const cubicCoefficientA = node.attributes.getFloat('cubic_coeff_a', -0.75);\n const excludeOutside = node.attributes.getInt('exclude_outside', 0) !== 0;\n if (excludeOutside && mode !== 'cubic') {\n throw new Error('exclude_outside can be set to 1 only when mode is CUBIC.');\n }\n\n const useNearest2xOptimization =\n (opset < 11) ? true : (mode === 'nearest' && coordinateTransformMode === 'asymmetric' && nearestMode === 'floor');\n\n let roiInputIdx = 0;\n let scalesInputIdx = 0;\n let sizesInputIdx = 0;\n\n if (opset > 10) {\n // handle when roiInput is not given\n if (node.inputs.length > 2) {\n roiInputIdx = 1;\n scalesInputIdx = 2;\n sizesInputIdx = 3;\n } else {\n scalesInputIdx = 1;\n sizesInputIdx = 2;\n }\n } else if (opset === 9) {\n scalesInputIdx = 1;\n }\n\n return createAttributeWithCacheKey({\n opset,\n isResize,\n mode,\n scales,\n extrapolationValue,\n coordinateTransformMode,\n useExtrapolation,\n needRoiInput,\n nearestMode,\n cubicCoefficientA,\n excludeOutside,\n useNearest2xOptimization,\n roiInputIdx,\n scalesInputIdx,\n sizesInputIdx\n });\n};\n\nconst createUpsampleProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [inputWidth, inputHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(inputs[0].dims, TextureType.unpacked);\n\n const outputShape = inputs[0].dims.map((dim, i) => Math.floor(dim * attributes.scales[i]));\n const [outputWidth, outputHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(outputShape, TextureType.unpacked);\n const dim = outputShape.length;\n\n const outputPitches = new Array(dim);\n const inputPitches = new Array(dim);\n let precalculatedPitches = `\n int output_pitches[${dim}];\n int input_pitches[${dim}];\n `;\n for (let d = dim - 1; d >= 0; d--) {\n outputPitches[d] = (d === dim - 1) ? 1 : outputPitches[d + 1] * outputShape[d + 1];\n inputPitches[d] = (d === dim - 1) ? 1 : inputPitches[d + 1] * inputs[0].dims[d + 1];\n\n precalculatedPitches += `\n output_pitches[${d}] = ${outputPitches[d]};\n input_pitches[${d}] = ${inputPitches[d]};\n `;\n }\n const getInputFloatFunction = `\n float getInputFloat(int index) {\n vec2 coords = offsetToCoords(index, ${inputWidth}, ${inputHeight});\n float value = getColorAsFloat(${glsl.texture2D}(X, coords));\n return value;\n }\n `;\n\n const shaderSource = attributes.mode === 'nearest' ?\n // nearest\n `\n ${getInputFloatFunction}\n float process(int indices[${dim}]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int d, m;\n for (int dim = 0; dim < ${dim}; ++dim) {\n d = output_index / output_pitches[dim];\n m = output_index - d * output_pitches[dim];\n output_index = m;\n\n if (scales[dim] != 1 && d > 0) {\n int d2 = d / scales[dim];\n m = d - d2 * scales[dim];\n d = d2;\n }\n input_index += input_pitches[dim] * d;\n }\n\n return getInputFloat(input_index);\n }` :\n dim === 4 ?\n // bilinear 4D\n `\n ${getInputFloatFunction}\n float process(int indices[4]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int m;\n int index_of_dim0, index_of_dim1, index_of_dim2, index_of_dim3;\n index_of_dim0 = output_index / output_pitches[0];\n m = output_index - index_of_dim0 * output_pitches[0];\n index_of_dim1 = m / output_pitches[1];\n m = m - index_of_dim1 * output_pitches[1];\n index_of_dim2 = m / output_pitches[2];\n m = m - index_of_dim2 * output_pitches[2];\n index_of_dim3 = m;\n\n int index_of_input_dim2, index_of_input_dim3, x_offset, y_offset;\n index_of_input_dim2 = index_of_dim2 / scales[2];\n y_offset = index_of_dim2 - index_of_input_dim2 * scales[2];\n index_of_input_dim3 = index_of_dim3 / scales[3];\n x_offset = index_of_dim3 - index_of_input_dim3 * scales[3];\n\n input_index = index_of_dim0 * input_pitches[0] +\n index_of_dim1 * input_pitches[1] +\n index_of_input_dim2 * input_pitches[2] +\n index_of_input_dim3;\n\n float x00 = getInputFloat(input_index);\n float x10, x01, x11;\n\n bool end_of_dim2 = false;\n if (index_of_input_dim2 == (${inputs[0].dims[2]} - 1)) {\n // It's the end in dimension 2\n x01 = x00;\n end_of_dim2 = true;\n } else {\n x01 = getInputFloat(input_index + input_pitches[2]);\n }\n\n if (index_of_input_dim3 == (input_pitches[2] - 1)) {\n // It's the end in dimension 3\n x10 = x00;\n x11 = x01;\n }\n else {\n x10 = getInputFloat(input_index + 1);\n x11 = end_of_dim2 ? x10 : getInputFloat(input_index + input_pitches[2] + 1);\n }\n\n float y0 = x00 + float(y_offset) * (x01 - x00) / float(scales[2]);\n float y1 = x10 + float(y_offset) * (x11 - x10) / float(scales[2]);\n return y0 + float(x_offset) * (y1 - y0) / float(scales[3]);\n }` :\n // bilinear 2D\n `\n ${getInputFloatFunction}\n float process(int indices[2]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int m;\n int index_of_dim0, index_of_dim1;\n index_of_dim0 = output_index / output_pitches[0];\n m = output_index - index_of_dim0 * output_pitches[0];\n index_of_dim1 = m;\n\n int index_of_input_dim0, index_of_input_dim1, x_offset, y_offset;\n index_of_input_dim0 = index_of_dim0 / scales[0];\n y_offset = index_of_dim0 - index_of_input_dim0 * scales[0];\n index_of_input_dim1 = index_of_dim1 / scales[1];\n x_offset = index_of_dim1 - index_of_input_dim1 * scales[1];\n\n input_index = index_of_input_dim0 * input_pitches[0] + index_of_input_dim1;\n\n float x00 = getInputFloat(input_index);\n float x10, x01, x11;\n\n bool end_of_dim0 = false;\n if (index_of_input_dim0 == (${inputs[0].dims[0]} - 1)) {\n // It's the end in dimension 0\n x01 = x00;\n end_of_dim0 = true;\n } else {\n x01 = getInputFloat(input_index + input_pitches[0]);\n }\n\n if (index_of_input_dim1 == (input_pitches[0] - 1)) {\n // It's the end in dimension 1\n x10 = x00;\n x11 = x01;\n }\n else {\n x10 = getInputFloat(input_index + 1);\n x11 = end_of_dim0 ? x10 : getInputFloat(input_index + input_pitches[0] + 1);\n }\n\n float y0 = x00 + float(y_offset) * (x01 - x00) / float(scales[0]);\n float y1 = x10 + float(y_offset) * (x11 - x10) / float(scales[0]);\n return y0 + float(x_offset) * (y1 - y0) / float(scales[1]);\n }`;\n return {\n ...upsampleProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n variables: [{\n name: 'scales',\n type: 'int',\n arrayLength: attributes.scales.length,\n data: attributes.scales.map(x => Math.ceil(x))\n }]\n };\n };\n\nexport const validateInputs = (inputs: Tensor[], attribute: UpsampleAttributes): void => {\n if (!inputs || (attribute.opset < 9 && inputs.length !== 1) ||\n (attribute.opset >= 9 && attribute.opset < 11 && inputs.length !== 2) ||\n (attribute.opset >= 11 && inputs.length < 2)) {\n throw new Error('invalid inputs.');\n }\n\n if (attribute.scales.length > 0 && inputs[0].dims.length !== attribute.scales.length) {\n throw new Error('Invalid input shape.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('Invalid input tensor types.');\n }\n};\n\nexport const scalesValidation = (scales: number[], mode: string, isResize: boolean): void => {\n if (!isResize) {\n for (const scale of scales) {\n if (scale < 1) {\n throw new Error('Scale value should be greater than or equal to 1.');\n }\n }\n } else {\n for (const scale of scales) {\n if (scale <= 0) {\n throw new Error('Scale value should be greater than 0.');\n }\n }\n }\n if (mode === 'linear' || mode === 'cubic') {\n if (scales.length !== 2 && (scales.length !== 4 || scales[0] !== 1 || scales[1] !== 1)) {\n throw new Error(`'Linear' mode and 'Cubic' mode only support 2-D inputs ('Bilinear', 'Bicubic') \\\n or 4-D inputs with the corresponding outermost 2 scale values being 1 \\\n in the ${isResize ? 'Resize' : 'Upsample'} opeartor.`);\n }\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {unpackFromChannel} from './packing-utils';\nimport {parseUpsampleAttributes, scalesValidation, UpsampleAttributes, validateInputs} from './upsample';\n\nconst resizeProgramMetadata = {\n name: 'Resize',\n inputNames: ['A'],\n inputTypes: [TextureType.packed]\n};\n\nexport const resize: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(\n {\n ...resizeProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createPackedResizeProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseResizeAttributesV10: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 10);\n\nexport const parseResizeAttributesV11: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 11);\n\nconst createPackedResizeProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [scales, outputShape] = prepareInputs(inputs, attributes);\n\n const isSame =\n scales.every((s: number) => s === 1) && attributes.coordinateTransformMode !== 'tf_crop_and_resize';\n if (isSame) {\n return {\n ...resizeProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n hasMain: true,\n shaderSource: `void main() {\n vec4 v = ${glsl.texture2D}(X, TexCoords);\n ${glsl.output} = v;\n }`\n };\n }\n\n const dim = outputShape.length;\n if (dim < 2) {\n throw new Error(`output dimension should be at least 2, but got ${dim}`);\n }\n\n const outputHeight = outputShape[dim - 2];\n const outputWidth = outputShape[dim - 1];\n\n const inputShape = inputs[0].dims;\n if (dim !== inputShape.length) {\n throw new Error(`output dimension should match input ${inputShape.length}, but got ${dim}`);\n }\n const inputHeight = inputShape[dim - 2];\n const inputWidth = inputShape[dim - 1];\n\n const scalesHeight = scales[dim - 2];\n const scalesWidth = scales[dim - 1];\n\n let getSourceFracIndex = '';\n\n if (attributes.mode !== 'linear') {\n // TODO: support other modes\n throw new Error(`resize (packed) does not support mode: '${attributes.mode}'`);\n }\n switch (attributes.coordinateTransformMode) {\n case 'asymmetric':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n return vec4(coords) / scaleWHWH;\n }\n `;\n break;\n case 'half_pixel':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n return (vec4(coords) + 0.5) / scaleWHWH - 0.5;\n }\n `;\n break;\n case 'pytorch_half_pixel':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n vec4 fcoords = vec4(coords);\n return vec4(\n ${outputWidth}.0 > 1.0 ? (fcoords.x + 0.5) / scaleWHWH.x - 0.5 : 0.0,\n ${outputHeight}.0 > 1.0 ? (fcoords.y + 0.5) / scaleWHWH.y - 0.5 : 0.0,\n ${outputWidth}.0 > 1.0 ? (fcoords.z + 0.5) / scaleWHWH.z - 0.5 : 0.0,\n ${outputHeight}.0 > 1.0 ? (fcoords.w + 0.5) / scaleWHWH.w - 0.5 : 0.0\n );\n }\n `;\n break;\n case 'align_corners':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n vec4 resized = vec4(${outputWidth}.0 - 1.0, ${outputHeight}.0 - 1.0, ${outputWidth}.0 - 1.0,\n ${outputHeight}.0 - 1.0);\n vec4 original = vec4(${inputWidth}.0 - 1.0, ${inputHeight}.0 - 1.0, ${inputWidth}.0 - 1.0,\n ${inputHeight}.0 - 1.0);\n vec4 new_scale = original / resized;\n return vec4(coords) * new_scale;\n }\n `;\n break;\n default:\n // TODO:supporting other coordinateTransformModes\n throw new Error(`resize (packed) does not support coordinateTransformMode: \\\n '${attributes.coordinateTransformMode}'`);\n }\n\n const coordsDataType = getCoordsDataType(dim);\n const unpackChannel = unpackFromChannel();\n const shaderSource = `\n const vec2 inputWH = vec2(${inputHeight}.0, ${inputWidth}.0);\n const vec4 scaleWHWH = vec4(float(${scalesHeight}), float(${scalesWidth}), float(${scalesHeight}), float(${\n scalesWidth}));\n ${unpackChannel}\n ${getSourceFracIndex}\n float getAValue(int x10, int r, int c, int d) {\n return getChannel(getA(x10, r, c, d), vec2(c, d));\n }\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n int batch = rc[0];\n int depth = rc[1];\n\n // retrieve the 4 coordinates that is used in the 4 packed output values.\n ivec4 coords = ivec4(rc.wz, rc.w + 1, rc.z + 1);\n\n // calculate the source index in fraction\n vec4 sourceFrac = getSourceFracIndex(coords);\n\n // get the lower and upper bound of the 4 values that will be packed into one texel.\n ivec4 x00 = ivec4(max(sourceFrac.xy, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.xy)));\n ivec4 x01 = ivec4(max(sourceFrac.xw, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.xw)));\n ivec4 x10 = ivec4(max(sourceFrac.zy, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.zy)));\n ivec4 x11 = ivec4(max(sourceFrac.zw, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.zw)));\n\n bool hasNextRow = rc.w < ${outputHeight - 1};\n bool hasNextCol = rc.z < ${outputWidth - 1};\n\n // pack x00, x01, x10, x11's top-left corner into one vec4 structure\n vec4 topLeft = vec4(\n getAValue(batch, depth, x00.x, x00.y),\n hasNextCol ? getAValue(batch, depth, x01.x, x01.y) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.x, x10.y) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.x, x11.y) : 0.0);\n\n // pack x00, x01, x10, x11's top-right corner into one vec4 structure\n vec4 topRight = vec4(\n getAValue(batch, depth, x00.x, x00.w),\n hasNextCol ? getAValue(batch, depth, x01.x, x01.w) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.x, x10.w) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.x, x11.w) : 0.0);\n\n // pack x00, x01, x10, x11's bottom-left corner into one vec4 structure\n vec4 bottomLeft = vec4(\n getAValue(batch, depth, x00.z, x00.y),\n hasNextCol ? getAValue(batch, depth, x01.z, x01.y) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.z, x10.y) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.z, x11.y) : 0.0);\n\n // pack x00, x01, x10, x11's bottom-right corner into one vec4 structure\n vec4 bottomRight = vec4(\n getAValue(batch, depth, x00.z, x00.w),\n hasNextCol ? getAValue(batch, depth, x01.z, x01.w) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.z, x10.w) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.z, x11.w) : 0.0);\n\n // calculate the interpolation fraction on u and v direction\n vec4 frac = vec4(sourceFrac) - floor(sourceFrac);\n vec4 clampFrac = clamp(frac, vec4(0.0), vec4(1.0));\n\n vec4 top = mix(topLeft, topRight, clampFrac.ywyw);\n vec4 bottom = mix(bottomLeft, bottomRight, clampFrac.ywyw);\n vec4 newValue = mix(top, bottom, clampFrac.xxzz);\n\n ${glsl.output} = vec4(newValue);\n }\n `;\n return {\n ...resizeProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n hasMain: true,\n shaderSource\n };\n };\n\n\nconst prepareInputs = (inputs: Tensor[], attributes: UpsampleAttributes): [readonly number[], readonly number[]] => {\n const x = inputs[0];\n const xDims = x.dims;\n\n let scales = attributes.scales;\n let outputSizes: number[]|undefined;\n if (scales.length === 0) {\n const scalesTensor = inputs[attributes.scalesInputIdx];\n if (scalesTensor && scalesTensor.size !== 0) {\n if (inputs[attributes.sizesInputIdx]) {\n throw new Error('Only one of scales or sizes must be provided as input.');\n }\n scales = parseScalesData(scalesTensor, attributes.mode, attributes.isResize);\n } else {\n const sizesTensor = inputs[attributes.sizesInputIdx];\n if (!sizesTensor || sizesTensor.size === 0) {\n throw new Error('Either scales or sizes MUST be provided as input.');\n }\n\n outputSizes = Array.from(sizesTensor.integerData);\n scales = parseScalesDataFromOutputSize(outputSizes, xDims, attributes.mode, attributes.isResize);\n }\n } else {\n if (inputs[attributes.sizesInputIdx]) {\n throw new Error('Only one of scales or sizes must be provided as input.');\n }\n }\n\n const yDims = outputSizes || (xDims.map((dim, i) => Math.floor(dim * scales[i])));\n\n return [scales, yDims];\n};\n\nconst parseScalesData = (scale: Tensor, mode: string, isResize: boolean): number[] => {\n const scales = Array.from(scale.floatData);\n scalesValidation(scales, mode, isResize);\n return scales;\n};\n\nconst parseScalesDataFromOutputSize =\n (yDims: readonly number[], xDims: readonly number[], mode: string, isResize: boolean): number[] => {\n const length = xDims.length;\n const scales = new Array(length);\n\n for (let i = 0, end = length; i < end; i++) {\n if (xDims[i] === 0) {\n if (yDims[i] !== 0) {\n throw new Error('Input dim is zero but required output dim is non-zero.');\n }\n scales[i] = 1;\n } else {\n scales[i] = yDims[i] / xDims[i];\n }\n }\n scalesValidation(scales, mode, isResize);\n return scales;\n };\n\n// roi data is not used yet. but leave here for future usage.\n// const getRoi = (inputs: Tensor[], attributes: UpsampleAttributes) : number[] => {\n// let roi: number[] = [];\n// if (attributes.needRoiInput) {\n// if (attributes.roiInputIdx <= 0) {\n// throw new Error('Invalid roi input index.');\n// }\n// const roiTensor = inputs[attributes.roiInputIdx];\n// roi = roiTensor.size > 0 ? Array.from(roiTensor.floatData) : [];\n// } else {\n// roi = new Array(inputs[0].dims.length * 2).fill(0);\n// }\n// return roi;\n// };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const shape = (_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n return [new Tensor([inputs[0].dims.length], 'int32', undefined, undefined, new Int32Array(inputs[0].dims))];\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Shape requires 1 input.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly axes: number[];\n readonly ends: number[];\n readonly starts: number[];\n}\n\nconst sliceProgramMetadata = {\n name: 'Slice',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const slice: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SliceAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...sliceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createSliceProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseSliceAttributes: OperatorInitialization = (node: Graph.Node): SliceAttributes => {\n const starts = node.attributes.getInts('starts');\n const ends = node.attributes.getInts('ends');\n const axes = node.attributes.getInts('axes', []);\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n\nconst createSliceProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SliceAttributes): ProgramInfo => {\n const axes = (attributes.axes.length === 0) ? input.dims.slice(0).map((_val, i) => i) : attributes.axes;\n const normalizedAxes = ShapeUtil.normalizeAxes(axes, input.dims.length);\n const starts = attributes.starts.map((start, i) => {\n if (start > input.dims[normalizedAxes[i]] - 1) {\n return input.dims[normalizedAxes[i]];\n }\n return ShapeUtil.normalizeAxis(start, input.dims[normalizedAxes[i]]);\n });\n const ends = attributes.ends.map((end, i) => {\n if (end > input.dims[normalizedAxes[i]] - 1) {\n return input.dims[normalizedAxes[i]];\n }\n return ShapeUtil.normalizeAxis(end, input.dims[normalizedAxes[i]]);\n });\n\n const outputShape = input.dims.slice();\n\n const sliceOps: string[] = [];\n for (let i = 0; i < normalizedAxes.length; i++) {\n outputShape[normalizedAxes[i]] = ends[i] - starts[i];\n if (starts[i] > 0) {\n sliceOps.push(`outputIdx[${normalizedAxes[i]}] += ${starts[i]};`);\n } // else { sliceOps.push(`outputIdx[${normalizedAxes[i]}] += 0;`); }\n }\n\n const rank = outputShape.length;\n const shaderSource = `\n float process(int outputIdx[${rank}]) {\n ${sliceOps.join('\\n ')}\n return _A(outputIdx);\n }`;\n return {\n ...sliceProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Slice requires 1 input.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport const sliceV10 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV10(inputs);\n const attributes = generateSliceAttributesFromInputs(inferenceHandler, inputs);\n const output = inferenceHandler.run(\n {\n ...sliceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createSliceProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n [inputs[0]]);\n return [output];\n};\n\nconst generateSliceAttributesFromInputs =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): SliceAttributes => {\n if (!inferenceHandler.session.isInitializer(inputs[1].dataId) ||\n !inferenceHandler.session.isInitializer(inputs[2].dataId) ||\n (inputs.length >= 4 && !inferenceHandler.session.isInitializer(inputs[3].dataId)) ||\n (inputs.length >= 5 && !inferenceHandler.session.isInitializer(inputs[4].dataId))) {\n throw new Error('dynamic slice attributes are not allowed');\n }\n\n if (inputs.length >= 5 && inputs[4].integerData.some((i: number) => i !== 1)) {\n throw new Error('currently non-1 steps is not supported for Slice');\n }\n\n const starts = Array.from(inputs[1].integerData);\n const ends = Array.from(inputs[2].integerData);\n const axes = inputs.length >= 4 ? Array.from(inputs[3].integerData) : [];\n const cacheKey = `${axes};${starts};${ends}`;\n return {starts, ends, axes, cacheKey};\n };\n\nconst validateInputsV10 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length < 3 || inputs.length > 5) {\n throw new Error('Invalid input number.');\n }\n if (inputs[1].type !== 'int32' || inputs[1].dims.length !== 1) {\n throw new Error('Invalid input type.');\n }\n if (inputs[2].type !== 'int32' || inputs[2].dims.length !== 1) {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 4 && (inputs[3].type !== 'int32' || inputs[3].dims.length !== 1)) {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 5 && (inputs[4].type !== 'int32' || inputs[4].dims.length !== 1)) {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nimport {transpose, TransposeAttributes} from './transpose';\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst softmaxComputeMaxProgramMetadata = {\n name: 'SoftmaxComputeMax',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst softmaxComputeScaleProgramMetadata = {\n name: 'SoftmaxComputeScale',\n inputNames: ['A', 'Max'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked],\n};\n\nconst softmaxProgramMetadata = {\n name: 'SoftMax',\n inputNames: ['A', 'Max', 'Norm'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked],\n};\n\nexport const softmax: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const inputShape = inputs[0].dims.slice();\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const logicalRowCount = ShapeUtil.sizeToDimension(inputShape, axis);\n const featureCount = ShapeUtil.sizeFromDimension(inputShape, axis);\n\n const output = computeSoftmax(inferenceHandler, inputs, attributes, logicalRowCount, featureCount);\n return output;\n };\n\nexport const parseSoftmaxAttributes: OperatorInitialization =\n (node: Graph.Node): SoftmaxAttributes => createAttributeWithCacheKey({axis: node.attributes.getInt('axis', 1)});\n\nexport const parseSoftmaxAttributesV13: OperatorInitialization =\n (node: Graph.Node): SoftmaxAttributes => createAttributeWithCacheKey({axis: node.attributes.getInt('axis', -1)});\n\n// The \"semantic\" meaning of axis has changed in opset-13.\n// Please compare: https://github.com/onnx/onnx/blob/main/docs/Operators.md#Softmax\n// with https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Softmax-11 for detailed explanations\n// To account for the opset-13 behavior, our plan will be to transpose the \"axis\" dim to the innermost dim\n// and perform softmax and then reverse the transpose. We can skip the transposing aspect if the axis is already\n// the innermost dim\nexport const softmaxV13: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const inputShape = inputs[0].dims.slice();\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const rank = inputShape.length;\n\n const isTransposeRequired = (axis !== rank - 1) ? true : false;\n const transposedInputShape: number[] = [];\n let perm: number[] = [];\n let transposedInputs: Tensor[] = [];\n let transposeAttribute: TransposeAttributes;\n\n if (isTransposeRequired) {\n perm = Array.from({length: rank}).map((_, i) => i);\n\n // swap the innermost dim with the dim corresponding to axis\n perm[axis] = rank - 1;\n perm[rank - 1] = axis;\n\n perm.map(p => transposedInputShape.push(inputShape[p]));\n\n transposeAttribute = createAttributeWithCacheKey({perm});\n transposedInputs = transpose(inferenceHandler, inputs, transposeAttribute);\n }\n\n const logicalRowCount = isTransposeRequired ? ShapeUtil.sizeToDimension(transposedInputShape, rank - 1) :\n ShapeUtil.sizeToDimension(inputShape, rank - 1);\n const featureCount = isTransposeRequired ? ShapeUtil.sizeFromDimension(transposedInputShape, rank - 1) :\n ShapeUtil.sizeFromDimension(inputShape, rank - 1);\n\n const output = computeSoftmax(\n inferenceHandler, isTransposeRequired ? transposedInputs : inputs, attributes, logicalRowCount, featureCount);\n\n if (isTransposeRequired) {\n const reversedOutput = transpose(inferenceHandler, output, transposeAttribute!);\n return reversedOutput;\n } else {\n return output;\n }\n };\n\nconst computeSoftmax =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes, logicalRowCount: number,\n featureCount: number): Tensor[] => {\n const computeMaxProgramInfo =\n createComputeMaxProgramInfo(inferenceHandler, inputs[0], logicalRowCount, featureCount, [logicalRowCount]);\n const max = inferenceHandler.run(\n {...softmaxComputeMaxProgramMetadata, cacheHint: attributes.cacheKey, get: () => computeMaxProgramInfo},\n inputs);\n\n const computeScaleProgramInfo = createComputScaleProgramInfo(\n inferenceHandler, inputs[0], logicalRowCount, featureCount, computeMaxProgramInfo.output.dims,\n [logicalRowCount]);\n const scale = inferenceHandler.run(\n {...softmaxComputeScaleProgramMetadata, cacheHint: attributes.cacheKey, get: () => computeScaleProgramInfo},\n [inputs[0], max]);\n\n const softMaxProgramInfo = createSoftMaxProgramInfo(\n inferenceHandler, inputs[0], logicalRowCount, featureCount, computeMaxProgramInfo.output.dims,\n computeScaleProgramInfo.output.dims);\n const output = inferenceHandler.run(\n {...softmaxProgramMetadata, cacheHint: attributes.cacheKey, get: () => softMaxProgramInfo},\n [inputs[0], max, scale]);\n return [output];\n };\n\n/**\n * Create a texture that contains the maximum value of each of the 'N' rows\n */\nconst createComputeMaxProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n outputShape: number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = outputShape.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (outputShape.length !== 1) {\n throw new Error('Dimensionality of the output should be 1');\n }\n\n if (outputShape[0] !== logicalRowCount) {\n throw new Error('Shape of the output should be equal to logical row count');\n }\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n float process(int[${rank}] indices) {\n int logical_row_start_offset = indices[0] * ${featureCount};\n\n float max = getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset, ${textureWidth},\n ${textureHeight} )));\n for(int i=1; i<${featureCount}; ++i)\n {\n float current = getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset + i,\n ${textureWidth}, ${textureHeight})));\n if(current > max)\n max = current;\n }\n\n return max;\n }`;\n return {\n ...softmaxComputeMaxProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\n/**\n * Create a texture that contains the normalization factor for each of the 'N' rows\n */\nconst createComputScaleProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n maxElementPerLogicalRow: readonly number[], outputShape: number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = outputShape.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (outputShape.length !== 1) {\n throw new Error('Dimensionality of the output should be 1');\n }\n\n if (outputShape[0] !== logicalRowCount) {\n throw new Error('Shape of the output should be equal to logical row count');\n }\n\n if (maxElementPerLogicalRow.length !== 1) {\n throw new Error('Dimensionality of the intermediate results should be 1');\n }\n\n if (maxElementPerLogicalRow[0] !== logicalRowCount) {\n throw new Error('Shape of the intermediate results should be equal to logical row count');\n }\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n float process(int[${rank}] indices) {\n int logical_row_start_offset = indices[0] * ${featureCount};\n\n float norm_factor = 0.0;\n float max = _Max(indices);\n for(int i=0; i<${featureCount}; ++i)\n {\n norm_factor += exp(getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset + i,\n ${textureWidth}, ${textureHeight}))) - max);\n }\n\n return norm_factor;\n }`;\n return {\n ...softmaxComputeScaleProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst createSoftMaxProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n maxElementPerLogicalRow: readonly number[], normalizationPerLogicalRow: readonly number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = input.dims.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (maxElementPerLogicalRow.length !== 1 || normalizationPerLogicalRow.length !== 1) {\n throw new Error('Dimensionality of the intermediate results should be 1');\n }\n\n if (maxElementPerLogicalRow[0] !== logicalRowCount || normalizationPerLogicalRow[0] !== logicalRowCount) {\n throw new Error('Shape of the intermediate results should be equal to logical row count');\n }\n\n const shaderSource = `\n float process(int[${rank}] indices) {\n\n // get offset of current logical tensor index from the 2-D texture coordinates (TexCoords)\n int offset = coordsToOffset(TexCoords, ${textureWidth}, ${textureHeight});\n\n //determine the logical row for this index\n int logical_row_index[1];\n logical_row_index[0] = offset / ${featureCount};\n\n float norm_factor = _Norm(logical_row_index);\n\n // avoid possible division by 0\n // if norm_facor is 0, all elements are zero\n // if so, return 0\n if(norm_factor == 0.0)\n return 0.0;\n\n return exp(_A(indices) - _Max(logical_row_index)) / norm_factor;\n }`;\n return {\n ...softmaxProgramMetadata,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax requires 1 input.');\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil, SplitUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly split: number[];\n readonly numOutputs: number;\n}\n\nconst splitProgramMetadata = {\n name: 'Split',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const split: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SplitAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputs[0].dims.length);\n const count = getProgramCount(inferenceHandler, inputs, axis, attributes);\n const output: Tensor[] = [];\n for (let i = 0; i < count; ++i) {\n output.push(inferenceHandler.run(\n {\n ...splitProgramMetadata,\n cacheHint: `${attributes.cacheKey};${i}`,\n get: () => createSplitProgramInfo(inferenceHandler, inputs[0], attributes, axis, i)\n },\n inputs));\n }\n\n return output;\n };\n\nexport const parseSplitAttributes: OperatorInitialization = (node: Graph.Node): SplitAttributes => {\n const axis = node.attributes.getInt('axis', 0);\n const split = node.attributes.getInts('split', []);\n const numOutputs = node.outputs.length;\n return createAttributeWithCacheKey({axis, split, numOutputs});\n};\n\nconst getProgramCount =\n (_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number, attributes: SplitAttributes): number => {\n const [, offsets] = SplitUtil.splitShape(inputs[0].dims, axis, attributes.split, attributes.numOutputs);\n return offsets.length;\n };\n\nconst createSplitProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SplitAttributes, axis: number, index: number):\n ProgramInfo => {\n const [shapes, offsets] = SplitUtil.splitShape(input.dims, axis, attributes.split, attributes.numOutputs);\n const offset = offsets[index];\n const outputShape = shapes[index];\n const rank = outputShape.length;\n const shaderSource = `\n float process(int indices[${rank}]) {\n indices[${axis}] += ${offset};\n return _A(indices);\n }\n `;\n return {\n ...splitProgramMetadata,\n cacheHint: `${attributes.cacheKey}:${index}`,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Split requires one input.');\n }\n\n if (inputs[0].type !== 'int8' && inputs[0].type !== 'uint8' && inputs[0].type !== 'int16' &&\n inputs[0].type !== 'uint16' && inputs[0].type !== 'int32' && inputs[0].type !== 'uint32' &&\n inputs[0].type !== 'float32' && inputs[0].type !== 'float64' && inputs[0].type !== 'bool') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const squeeze: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axes: number[]): Tensor[] => {\n validateInputs(inputs);\n const outputShape = ShapeUtil.squeezeShape(inputs[0].dims, axes);\n const output = inferenceHandler.reshapeUnpacked(inputs[0], outputShape);\n return [output];\n };\n\nexport const squeezeV13 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV13(inputs);\n return squeeze(inferenceHandler, [inputs[0]], Array.from(inputs[1].integerData));\n};\n\nexport const parseSqueezeAttributes: OperatorInitialization = (node: Graph.Node): number[] =>\n node.attributes.getInts('axes');\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Squeeze requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('invalid input tensor types.');\n }\n};\n\nconst validateInputsV13 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Squeeze requires 2 inputs.');\n }\n\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport const sum = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n\n const sumProgramMetadata = {\n name: 'Sum',\n inputNames: inputs.map((_v, i) => `X${i}`),\n inputTypes: new Array(inputs.length).fill(TextureType.unpacked)\n };\n\n const output = inferenceHandler.run(\n {...sumProgramMetadata, get: () => createSumProgramInfo(inferenceHandler, inputs, sumProgramMetadata)}, inputs);\n return [output];\n};\n\nconst createSumProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], sumProgramMetadata: ProgramMetadata): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const outputShape = inputs[0].dims.slice();\n const sumLine = inputs.map((_v, i) => `${glsl.texture2D}(X${i},TexCoords)`).join(' + ');\n const shaderSource = `\n void main() {\n vec4 result = ${sumLine};\n ${glsl.output} = result;\n }\n `;\n return {\n ...sumProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n hasMain: true,\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length === 0) {\n throw new Error('Sum requires inputs.');\n }\n\n const length = inputs[0].dims.length;\n for (let i = 1; i < inputs.length; i++) {\n if (length !== inputs[i].dims.length) {\n throw new Error('Input shapes are mismatched.');\n }\n\n for (let j = 0; j < length; j++) {\n if (inputs[0].dims[j] !== inputs[i].dims[j]) {\n throw new Error('Input shapes are not matched.');\n }\n }\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n for (let i = 1; i < inputs.length; i++) {\n if (inputs[0].type !== inputs[i].type) {\n throw new Error('Input types are not matched.');\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {NUMBER_TYPES} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport const tile = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n\n const tileProgramMetadata = {\n name: 'Tile',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n };\n\n const output = inferenceHandler.run(\n {...tileProgramMetadata, get: () => createTileProgramInfo(inferenceHandler, inputs, tileProgramMetadata)},\n inputs);\n return [output];\n};\n\nconst createTileProgramInfo =\n (_handler: WebGLInferenceHandler, inputs: Tensor[], tileProgramMetadata: ProgramMetadata): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n const outputShape = new Array(inputShape.length);\n\n const tileOps: string[] = [];\n for (let i = 0; i < inputShape.length; i++) {\n outputShape[i] = inputShape[i] * inputs[1].numberData[i];\n tileOps.push(`inputIdx[${i}] = int(mod(float(outputIdx[${i}]), ${inputShape[i]}.));`);\n }\n\n const rank = outputShape.length;\n const shaderSource = `\n float process(int outputIdx[${rank}]) {\n int inputIdx[${rank}];\n ${tileOps.join('\\n')}\n return _A(inputIdx);\n }\n `;\n return {\n ...tileProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 input.');\n }\n if (inputs[1].dims.length !== 1) {\n throw new Error('The second input shape must 1 dimension.');\n }\n if (inputs[1].dims[0] !== inputs[0].dims.length) {\n throw new Error('Invalid input shape.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n if (inputs[1].type !== 'int32' && inputs[1].type !== 'int16') {\n throw new Error('Invalid repeat type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const unsqueeze: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axes: number[]): Tensor[] => {\n validateInputs(inputs);\n const outputShape = ShapeUtil.unsqueezeShape(inputs[0].dims, axes);\n const output = inferenceHandler.reshapeUnpacked(inputs[0], outputShape);\n return [output];\n };\n\nexport const unsqueezeV13 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV13(inputs);\n return unsqueeze(inferenceHandler, [inputs[0]], Array.from(inputs[1].integerData));\n};\n\nexport const parseUnsqueezeAttributes: OperatorInitialization = (node: Graph.Node): number[] =>\n node.attributes.getInts('axes');\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Unsqueeze requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('invalid input tensor types.');\n }\n};\n\nconst validateInputsV13 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Unsqueeze requires 2 inputs.');\n }\n\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OpSet} from '../../opset';\n\nimport {batchNormalization, parseBatchNormalizationAttributes} from './ops/batch-normalization';\nimport * as binaryOps from './ops/binary-op';\nimport {cast, parseCastAttributes} from './ops/cast';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {flatten, parseFlattenAttributes} from './ops/flatten';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gemm, parseGemmAttributesV11, parseGemmAttributesV7} from './ops/gemm';\nimport {imageScaler, parseImageScalerAttributes} from './ops/image-scaler';\nimport {instanceNormalization, parseInstanceNormalizationAttributes} from './ops/instance-normalization';\nimport {lrn, parseLrnAttributes} from './ops/lrn';\nimport {matMul, parseMatMulAttributes} from './ops/matmul';\nimport {padV11, padV2, parsePadAttributesV11, parsePadAttributesV2} from './ops/pad';\nimport {averagePool, globalAveragePool, globalMaxPool, maxPool, parseAveragePoolAttributes, parseGlobalAveragePoolAttributes, parseMaxPoolAttributes} from './ops/pool';\nimport {parseReduceAttributes, reduceLogSum, reduceLogSumSquare, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum} from './ops/reduce';\nimport {reshape} from './ops/reshape';\nimport {parseResizeAttributesV10, parseResizeAttributesV11, resize} from './ops/resize-packed';\nimport {shape} from './ops/shape';\nimport {parseSliceAttributes, slice, sliceV10} from './ops/slice';\nimport {parseSoftmaxAttributes, parseSoftmaxAttributesV13, softmax, softmaxV13} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {parseSqueezeAttributes, squeeze, squeezeV13} from './ops/squeeze';\nimport {sum} from './ops/sum';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {parseUnsqueezeAttributes, unsqueeze, unsqueezeV13} from './ops/unsqueeze';\nimport {parseUpsampleAttributesV7, parseUpsampleAttributesV9, upsample} from './ops/upsample';\n\nexport const WEBGL_OP_RESOLVE_RULES: readonly OpSet.ResolveRule[] = [\n ['Abs', '', '6+', unaryOps.abs],\n ['Acos', '', '7+', unaryOps.acos],\n ['Add', '', '7+', binaryOps.add],\n ['And', '', '7+', binaryOps.and],\n ['Asin', '', '7+', unaryOps.asin],\n ['Atan', '', '7+', unaryOps.atan],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', '', '7+', averagePool, parseAveragePoolAttributes],\n ['BatchNormalization', '', '7+', batchNormalization, parseBatchNormalizationAttributes],\n ['Cast', '', '6+', cast, parseCastAttributes],\n ['Ceil', '', '6+', unaryOps.ceil],\n ['Clip', '', '6-10', unaryOps.clip, unaryOps.parseClipAttributes],\n ['Clip', '', '11+', unaryOps.clipV11],\n ['Concat', '', '4+', concat, parseConcatAttributes],\n ['Conv', '', '1+', conv, parseConvAttributes],\n ['ConvTranspose', '', '1+', convTranspose, parseConvTransposeAttributes],\n ['Cos', '', '7+', unaryOps.cos],\n ['Div', '', '7+', binaryOps.div],\n ['Dropout', '', '7+', unaryOps.identity],\n ['DepthToSpace', '', '1+', depthToSpace, parseDepthToSpaceAttributes],\n ['Equal', '', '7+', binaryOps.equal],\n ['Elu', '', '6+', unaryOps.elu, unaryOps.parseEluAttributes],\n ['Exp', '', '6+', unaryOps.exp],\n ['Flatten', '', '1+', flatten, parseFlattenAttributes],\n ['Floor', '', '6+', unaryOps.floor],\n ['FusedConv', 'com.microsoft', '1+', conv, parseConvAttributes],\n ['Gather', '', '1+', gather, parseGatherAttributes],\n ['Gemm', '', '7-10', gemm, parseGemmAttributesV7],\n ['Gemm', '', '11+', gemm, parseGemmAttributesV11],\n ['GlobalAveragePool', '', '1+', globalAveragePool, parseGlobalAveragePoolAttributes],\n ['GlobalMaxPool', '', '1+', globalMaxPool],\n ['Greater', '', '7+', binaryOps.greater],\n ['Identity', '', '1+', unaryOps.identity],\n ['ImageScaler', '', '1+', imageScaler, parseImageScalerAttributes],\n ['InstanceNormalization', '', '6+', instanceNormalization, parseInstanceNormalizationAttributes],\n ['LeakyRelu', '', '6+', unaryOps.leakyRelu, unaryOps.parseLeakyReluAttributes],\n ['Less', '', '7+', binaryOps.less],\n ['LRN', '', '1+', lrn, parseLrnAttributes],\n ['Log', '', '6+', unaryOps.log],\n ['MatMul', '', '1+', matMul, parseMatMulAttributes],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', '', '1+', maxPool, parseMaxPoolAttributes],\n ['Mul', '', '7+', binaryOps.mul],\n ['Neg', '', '6+', unaryOps.neg],\n ['Not', '', '1+', unaryOps.not],\n ['Or', '', '7+', binaryOps.or],\n ['Pad', '', '2-10', padV2, parsePadAttributesV2],\n ['Pad', '', '11+', padV11, parsePadAttributesV11],\n ['Pow', '', '7+', binaryOps.pow],\n ['PRelu', '', '7+', binaryOps.pRelu],\n ['ReduceLogSum', '', '1+', reduceLogSum, parseReduceAttributes],\n ['ReduceMax', '', '1+', reduceMax, parseReduceAttributes],\n ['ReduceMean', '', '1+', reduceMean, parseReduceAttributes],\n ['ReduceMin', '', '1+', reduceMin, parseReduceAttributes],\n ['ReduceProd', '', '1+', reduceProd, parseReduceAttributes],\n ['ReduceSum', '', '1-12', reduceSum, parseReduceAttributes],\n ['ReduceSumSquare', '', '1+', reduceLogSumSquare, parseReduceAttributes],\n ['Relu', '', '6+', unaryOps.relu],\n ['Reshape', '', '5+', reshape],\n ['Resize', '', '10', resize, parseResizeAttributesV10],\n ['Resize', '', '11+', resize, parseResizeAttributesV11],\n ['Shape', '', '1+', shape],\n ['Sigmoid', '', '6+', unaryOps.sigmoid],\n ['Sin', '', '7+', unaryOps.sin],\n ['Slice', '', '10+', sliceV10], // TODO: support 'steps' for Slice-10\n ['Slice', '', '1-9', slice, parseSliceAttributes],\n // The \"semantic\" meaning of axis has changed in opset-13.\n ['Softmax', '', '1-12', softmax, parseSoftmaxAttributes],\n ['Softmax', '', '13+', softmaxV13, parseSoftmaxAttributesV13],\n // 'Split' operator has an optional attribute 'split'\n // this attribute determines how the specified axis of input data is split.\n // When the attribute is missing, we need the count of number of outputs\n // so that we can determine the 'split' attribute from the runtime input to the Operator\n ['Split', '', '2-12', split, parseSplitAttributes],\n ['Sqrt', '', '6+', unaryOps.sqrt],\n ['Squeeze', '', '1-12', squeeze, parseSqueezeAttributes],\n ['Squeeze', '', '13+', squeezeV13],\n ['Sub', '', '7+', binaryOps.sub],\n ['Sum', '', '6+', sum],\n ['Tan', '', '7+', unaryOps.tan],\n ['Tanh', '', '6+', unaryOps.tanh],\n ['Tile', '', '6+', tile],\n ['Transpose', '', '1+', transpose, parseTransposeAttributes],\n ['Upsample', '', '7-8', upsample, parseUpsampleAttributesV7],\n ['Upsample', '', '9', upsample, parseUpsampleAttributesV9],\n ['Unsqueeze', '', '1-12', unsqueeze, parseUnsqueezeAttributes],\n ['Unsqueeze', '', '13+', unsqueezeV13],\n ['Xor', '', '7+', binaryOps.xor],\n];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nconst INLINE_FUNC_DEF_REGEX = /@inline[\\s\\n\\r]+(\\w+)[\\s\\n\\r]+([0-9a-zA-Z_]+)\\s*\\(([^)]*)\\)\\s*{(([^}]|[\\n\\r])*)}/gm;\nconst FUNC_CALL_REGEX = '(\\\\w+)?\\\\s+([_0-9a-zA-Z]+)\\\\s+=\\\\s+__FUNC__\\\\((.*)\\\\)\\\\s*;';\n/**\n * GLSL preprocessor responsible for resolving @inline directives\n */\nexport function replaceInlines(script: string): string {\n const inlineDefs: {[name: string]: {params: Array<{type: string; name: string}|null>; body: string}} = {};\n let match;\n while ((match = INLINE_FUNC_DEF_REGEX.exec(script)) !== null) {\n const params = match[3]\n .split(',')\n .map(s => {\n const tokens = s.trim().split(' ');\n if (tokens && tokens.length === 2) {\n return {type: tokens[0], name: tokens[1]};\n }\n return null;\n })\n .filter(v => v !== null);\n inlineDefs[match[2]] = {params, body: match[4]};\n }\n for (const name in inlineDefs) {\n const regexString = FUNC_CALL_REGEX.replace('__FUNC__', name);\n const regex = new RegExp(regexString, 'gm');\n while ((match = regex.exec(script)) !== null) {\n const type = match[1];\n const variable = match[2];\n const params = match[3].split(',');\n const declLine = (type) ? `${type} ${variable};` : '';\n let newBody: string = inlineDefs[name].body;\n let paramRedecLine = '';\n inlineDefs[name].params.forEach((v, i) => {\n if (v) {\n paramRedecLine += `${v.type} ${v.name} = ${params[i]};\\n`;\n }\n });\n newBody = `${paramRedecLine}\\n ${newBody}`;\n newBody = newBody.replace('return', `${variable} = `);\n const replacement = `\n ${declLine}\n {\n ${newBody}\n }\n `;\n script = script.replace(match[0], replacement);\n }\n }\n script = script.replace(INLINE_FUNC_DEF_REGEX, '');\n return script;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\nimport {assert} from '../../util';\n\n/** Layout preferences */\nexport interface WidthHeightPrefs {\n breakAxis?: number;\n isPacked?: boolean;\n reverseWH?: boolean;\n}\n/**\n * TextureLayoutStrategy is an abstraction for different plans\n * for mapping n-dimensional arrays to 2D textures (and back)\n */\nexport interface TextureLayoutStrategy {\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number];\n}\n\n/**\n * This strategy try to find the minimal max(W,H) that fulfills (W * H == totalSize)\n */\nexport class AlwaysKeepOriginalSizeStrategy implements TextureLayoutStrategy {\n constructor(public maxTextureSize: number) {}\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n // scalar tensor\n if (shape.length === 0) {\n return [1, 1];\n }\n const maxTextureSize = this.maxTextureSize;\n if (prefs && prefs.breakAxis !== undefined) {\n // check to see if dims fit\n const wsize = prefs.breakAxis >= shape.length ? 1 : shape.slice(prefs.breakAxis).reduce((a, b) => a * b);\n const hsize = prefs.breakAxis <= 0 ? 1 : shape.slice(0, prefs.breakAxis).reduce((a, b) => a * b);\n if (wsize > maxTextureSize || hsize > maxTextureSize) {\n // ignore preferences\n // continue with default layout\n Logger.verbose(\n 'TextureLayout',\n `Given width/height preferences were unattainable: shape:${shape}, breakAxis:${prefs.breakAxis}`);\n } else {\n return [wsize, hsize];\n }\n }\n const totalSize = shape.reduce((a, b) => a * b);\n\n let width = Math.floor(Math.sqrt(totalSize));\n\n for (; width < maxTextureSize && width < totalSize; width++) {\n if (totalSize % width === 0) {\n break;\n }\n }\n\n if (width >= maxTextureSize || totalSize % width !== 0) {\n throw new Error(`The given dimensions are outside this GPU's boundaries: ${shape}`);\n }\n return [width, totalSize / width];\n }\n}\n\nexport class PreferLogicalStrategy implements TextureLayoutStrategy {\n constructor(public maxTextureSize: number) {}\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n const wh = this.computeTexture(shape, prefs);\n if (prefs && prefs.isPacked) {\n wh[0] /= 2;\n wh[1] /= 2;\n }\n if (prefs && prefs.reverseWH) {\n return [wh[1], wh[0]];\n }\n return wh;\n }\n\n computeTexture(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n const isPacked = prefs && prefs.isPacked;\n // scalar tensor\n if (shape.length === 0) {\n return isPacked ? [2, 2] : [1, 1];\n }\n let maxTextureSize = this.maxTextureSize;\n if (prefs && prefs.breakAxis !== undefined) {\n // check to see if dims fit\n const wsize = prefs.breakAxis >= shape.length ? 1 : shape.slice(prefs.breakAxis).reduce((a, b) => a * b);\n const hsize = prefs.breakAxis <= 0 ? 1 : shape.slice(0, prefs.breakAxis).reduce((a, b) => a * b);\n if (wsize > maxTextureSize || hsize > maxTextureSize) {\n // ignore preferences\n // continue with default layout\n Logger.verbose(\n 'TextureLayout',\n `Given width/height preferences were unattainable: shape:${shape}, breakAxis:${prefs.breakAxis}`);\n } else {\n return [wsize, hsize];\n }\n }\n let logShape = shape.slice(0);\n if (isPacked) {\n maxTextureSize = maxTextureSize * 2;\n\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map(\n (_d, i) => i >= logShape.length - 2 ? (logShape[i] % 2 === 0 ? logShape[i] : logShape[i] + 1) : logShape[i]);\n\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n\n const size = sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTextureSize) {\n return [1, size];\n } else if (logShape.length === 2 && logShape[0] <= maxTextureSize && logShape[1] <= maxTextureSize) {\n return logShape as [number, number];\n } else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTextureSize && logShape[2] <= maxTextureSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n } else if (logShape.length === 3 && logShape[0] <= maxTextureSize && logShape[1] * logShape[2] <= maxTextureSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n } else if (\n logShape.length === 4 && logShape[0] * logShape[1] * logShape[2] <= maxTextureSize &&\n logShape[3] <= maxTextureSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n } else if (\n logShape.length === 4 && logShape[0] <= maxTextureSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTextureSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n } else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n return sizeToSquarishShape(size / 4).map(d => d * 2) as [number, number];\n }\n return sizeToSquarishShape(size);\n }\n }\n}\n\nexport function squeezeShape(shape: number[], axis?: number[]): {newShape: number[]; keptDims: number[]} {\n const newShape: number[] = [];\n const keptDims: number[] = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ? null : parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return {newShape, keptDims};\n}\n\nexport function parseAxisParam(axis: number|number[], shape: number[]): number[] {\n const rank = shape.length;\n\n // Normalize input\n axis = axis == null ? shape.map((_s, i) => i) : ([] as number[]).concat(axis);\n\n // Check for valid range\n assert(\n axis.every(ax => ax >= -rank && ax < rank),\n () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n\n // Check for only integers\n assert(\n axis.every(isInt),\n () => 'All values in axis param must be integers but ' +\n `got axis ${axis}`);\n\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\nexport function isInt(a: number): boolean {\n return a % 1 === 0;\n}\nexport function sizeFromShape(shape: number[]): number {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function getRowsCols(shape: number[]): [number, number] {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n\n return [shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]];\n}\nexport function sizeToSquarishShape(size: number): [number, number] {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\nexport function getBatchDim(shape: number[], dimsToSkip = 2): number {\n return sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ArrayUtil, BroadcastUtil, ShapeUtil} from '../../util';\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\nimport {getGlsl} from './glsl-source';\nimport {squeezeShape} from './texture-layout-strategy';\nimport {TextureLayout} from './types';\nimport {generateShaderFuncNameFromInputSamplerName, generateShaderFuncNameFromInputSamplerNameAtOutCoords, getCoordsDataType, getGlChannels, getSqueezedParams, squeezeInputShape} from './utils';\n\n/**\n * GLSL Library responsible for data types and routines for manipulating\n * coordinates and mapping to/from tensor indices\n */\nexport class CoordsGlslLib extends GlslLib {\n returnType: string;\n\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {\n ...this.offsetToCoords(),\n ...this.coordsToOffset(),\n ...this.toVec(),\n ...this.valueFrom(),\n // TODO return these only when packing is enabled.\n ...this.getCommonUtilFuncs(),\n ...this.getInputsSamplingSnippets(),\n ...this.getOutputSamplingSnippet()\n };\n }\n getCustomTypes() {\n return {};\n }\n /**\n * Produces a function that can map from\n * 2D normalzied coordinates (s,t) to a flat offset\n */\n protected offsetToCoords(): {[name: string]: GlslLibRoutine} {\n const funcName = 'offsetToCoords';\n return {\n offsetToCoords: new GlslLibRoutine(`\n vec2 ${funcName}(int offset, int width, int height) {\n int t = offset / width;\n int s = offset - t*width;\n vec2 coords = (vec2(s,t) + vec2(0.5,0.5)) / vec2(width, height);\n return coords;\n }\n `)\n };\n }\n\n /**\n * Produces a function that can map from\n * 2D normalzied coordinates (s,t) to a flat offset\n */\n protected coordsToOffset(): {[name: string]: GlslLibRoutine} {\n const funcName = 'coordsToOffset';\n return {\n coordsToOffset: new GlslLibRoutine(`\n int ${funcName}(vec2 coords, int width, int height) {\n float s = coords.s * float(width);\n float t = coords.t * float(height);\n int offset = int(t) * width + int(s);\n return offset;\n }\n `)\n };\n }\n\n /**\n * Generates code for output sampler.\n */\n\n protected getOutputSamplingSnippet(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n if (outputLayout.isPacked) {\n return this.getPackedOutputSamplingSnippet(outputLayout);\n } else {\n return this.getUnpackedOutputSamplingSnippet(outputLayout);\n }\n }\n\n /**\n * Generates code for packed output sampler.\n */\n protected getPackedOutputSamplingSnippet(outputLayout: TextureLayout): {[name: string]: GlslLibRoutine} {\n const outShape = outputLayout.unpackedShape;\n const outTexShape = [outputLayout.width, outputLayout.height];\n const result: {[name: string]: GlslLibRoutine} = {};\n const funcName = 'getOutputCoords';\n switch (outShape.length) {\n case 0:\n result[funcName] = this.getOutputScalarCoords();\n break;\n case 1:\n result[funcName] = this.getOutputPacked1DCoords(outShape as [number], outTexShape as [number, number]);\n break;\n case 2:\n result[funcName] = this.getOutputPacked2DCoords(outShape as [number, number], outTexShape as [number, number]);\n break;\n case 3:\n result[funcName] =\n this.getOutputPacked3DCoords(outShape as [number, number, number], outTexShape as [number, number]);\n break;\n default:\n result[funcName] = this.getOutputPackedNDCoords(outShape, outTexShape as [number, number]);\n }\n const glsl = getGlsl(this.context.glContext.version);\n // TODO we need this to properly return a packed vec4 from kernels.\n // Replace all '{glsl.output} = result' with 'setOutput(result)' in all kernels.\n const floatTextureSetRGBASource = `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n const floatTextureSetRGBAFuncName = 'floatTextureSetRGBA';\n result[floatTextureSetRGBAFuncName] = new GlslLibRoutine(floatTextureSetRGBASource);\n return result;\n }\n\n /**\n * Generates code for unpacked output sampler.\n */\n protected getUnpackedOutputSamplingSnippet(outputLayout: TextureLayout): {[name: string]: GlslLibRoutine} {\n const outShape = outputLayout.unpackedShape;\n const outTexShape = [outputLayout.width, outputLayout.height];\n const result: {[name: string]: GlslLibRoutine} = {};\n const funcName = 'getOutputCoords';\n switch (outShape.length) {\n case 0:\n result[funcName] = this.getOutputScalarCoords();\n break;\n case 1:\n result[funcName] = this.getOutputUnpacked1DCoords(outShape as [number], outTexShape as [number, number]);\n break;\n case 2:\n result[funcName] =\n this.getOutputUnpacked2DCoords(outShape as [number, number], outTexShape as [number, number]);\n break;\n case 3:\n result[funcName] =\n this.getOutputUnpacked3DCoords(outShape as [number, number, number], outTexShape as [number, number]);\n break;\n case 4:\n result[funcName] = this.getOutputUnpacked4DCoords(\n outShape as [number, number, number, number], outTexShape as [number, number]);\n break;\n case 5:\n result[funcName] = this.getOutputUnpacked5DCoords(\n outShape as [number, number, number, number, number], outTexShape as [number, number]);\n break;\n case 6:\n result[funcName] = this.getOutputUnpacked6DCoords(\n outShape as [number, number, number, number, number, number], outTexShape as [number, number]);\n break;\n default:\n throw new Error(`Unsupported output dimensionality: ${outShape.length}`);\n }\n const glsl = getGlsl(this.context.glContext.version);\n // TODO we need this to properly return a packed vec4 from kernels.\n // Replace all '{glsl.output} = result' with 'setOutput(result)' in all kernels.\n const floatTextureSetRSource = `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n const floatTextureSetRFuncName = 'floatTextureSetR';\n result[floatTextureSetRFuncName] = new GlslLibRoutine(floatTextureSetRSource);\n return result;\n }\n\n /**\n * Scalar output coordinates.\n */\n protected getOutputScalarCoords(): GlslLibRoutine {\n return new GlslLibRoutine(`\n int getOutputCoords() {\n return 0;\n }\n `);\n }\n\n /**\n * 1D packed output coordinates.\n */\n protected getOutputPacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = texShape;\n let source = '';\n if (packedTexShape[0] === 1) {\n source = `\n int getOutputCoords() {\n return 2 * int(TexCoords.y * ${packedTexShape[1]}.0);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n if (packedTexShape[1] === 1) {\n source = `\n int getOutputCoords() {\n return 2 * int(TexCoords.x * ${packedTexShape[0]}.0);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n source = `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.y * ${packedTexShape[0]} + resTexRC.x);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * 2D packed output coordinates.\n */\n protected getOutputPacked2DCoords(shape: [number, number], texShape: [number, number]): GlslLibRoutine {\n let source = '';\n if (ArrayUtil.arraysEqual(shape, texShape)) {\n source = `\n ivec2 getOutputCoords() {\n return 2 * ivec2(TexCoords.xy * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n const packedTexShape = texShape;\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n source = `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec2(r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * 3D packed output coordinates.\n */\n protected getOutputPacked3DCoords(shape: [number, number, number], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = [texShape[0], texShape[1]];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n const source = `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec3(b, r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * ND packed output coordinates.\n */\n protected getOutputPackedNDCoords(shape: readonly number[], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = [texShape[0], texShape[1]];\n\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = '';\n let coords = 'b, r, c';\n\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n const source = `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec${shape.length}(${coords});\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 1D output coordinates.\n */\n protected getOutputUnpacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {\n const source = `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.y * ${texShape[0]} + resTexRC.x;\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 2D output coordinates.\n */\n protected getOutputUnpacked2DCoords(shape: [number, number], texShape: [number, number]): GlslLibRoutine {\n const source = `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 3D output coordinates.\n */\n protected getOutputUnpacked3DCoords(shape: [number, number, number], texShape: [number, number]): GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 4D output coordinates.\n */\n protected getOutputUnpacked4DCoords(shape: [number, number, number, number], texShape: [number, number]):\n GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 5D output coordinates.\n */\n protected getOutputUnpacked5DCoords(shape: [number, number, number, number, number], texShape: [number, number]):\n GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2', 'd3'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec5(r, c, d, d2, d3);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 6D output coordinates.\n */\n protected getOutputUnpacked6DCoords(shape: [number, number, number, number, number, number], texShape: [\n number, number\n ]): GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2', 'd3', 'd4'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec6(r, c, d, d2, d3, d4);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Generates code for common UV coords computation utility functions.\n */\n protected getCommonUtilFuncs(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n let funcName = 'uvFromFlat';\n result[funcName] = new GlslLibRoutine(`\n vec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texC = index / texNumR;\n int texR = index - texC * texNumR;\n // TODO: swap texR, texC order in following function so row is corresponding to u and column is corresponding to\n // v.\n return (vec2(texR, texC) + halfCR) / vec2(texNumR, texNumC);\n }\n `);\n funcName = 'packedUVfrom1D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'packedUVfrom2D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom2D(int texNumR, int texNumC, int texelsInLogicalRow, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'packedUVfrom3D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'sampleTexture';\n const glsl = getGlsl(this.context.glContext.version);\n result[funcName] = new GlslLibRoutine(`\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }`);\n return result;\n }\n\n /**\n * Constructing snippets for inputs\n */\n protected getInputsSamplingSnippets(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n const outputLayout = this.context.outputTextureLayout;\n this.context.programInfo.inputNames.forEach((samplerName, i) => {\n const inputLayout = this.context.inputTextureLayouts[i];\n const funcName = generateShaderFuncNameFromInputSamplerName(samplerName);\n if (inputLayout.isPacked) {\n result[funcName] = this.getPackedSamplerFromInput(funcName, samplerName, inputLayout);\n } else {\n result[funcName] = this.getUnpackedSamplerFromInput(funcName, samplerName, inputLayout);\n }\n\n const outCoordFuncName = generateShaderFuncNameFromInputSamplerNameAtOutCoords(samplerName);\n if (inputLayout.unpackedShape.length <= outputLayout.unpackedShape.length) {\n if (inputLayout.isPacked) {\n result[outCoordFuncName] =\n this.getPackedSamplerAtOutputCoords(outCoordFuncName, inputLayout, outputLayout, samplerName);\n } else {\n result[outCoordFuncName] =\n this.getUnpackedSamplerAtOutputCoords(outCoordFuncName, inputLayout, outputLayout, samplerName);\n }\n }\n });\n\n return result;\n }\n\n /**\n * Constructing snippets for output coordinates of samplers\n */\n protected getPackedSamplerAtOutputCoords(\n funcName: string, inputLayout: TextureLayout, outputLayout: TextureLayout, name: string): GlslLibRoutine {\n const inShape = inputLayout.unpackedShape;\n const outShape = outputLayout.unpackedShape;\n const texName = name;\n const texFuncSnippet = generateShaderFuncNameFromInputSamplerName(texName);\n\n const inRank = inShape.length;\n const outRank = outShape.length;\n\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet: string;\n const fields = getGlChannels();\n\n if (inRank === 0) {\n coordsSnippet = '';\n } else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n } else {\n coordsSnippet = broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`).join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');\n }\n\n let output = 'return outputValue;';\n const inSize = ShapeUtil.size(inShape);\n const isInputScalar = inSize === 1;\n const outSize = ShapeUtil.size(outShape);\n const isOutputScalar = outSize === 1;\n\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n } else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n } else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n } else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = 'return vec4(outputValue.x);';\n } else if (broadcastDims.indexOf(rows) > -1) {\n output = 'return vec4(outputValue.x, outputValue.y, ' +\n 'outputValue.x, outputValue.y);';\n } else if (broadcastDims.indexOf(cols) > -1) {\n output = 'return vec4(outputValue.xx, outputValue.zz);';\n }\n }\n\n const swapLastDimsSnippet = `\n int lastDim = coords.${fields[outRank - 1]};\n coords.${fields[outRank - 1]} = coords.${fields[outRank - 2]};\n coords.${fields[outRank - 2]} = lastDim;\n `;\n const source = `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${swapLastDimsSnippet}\n ${coordsSnippet}\n vec4 outputValue = ${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.getOutputCoords']);\n }\n\n /**\n * Constructing snippets for unpacked output coordinates of samplers\n */\n protected getUnpackedSamplerAtOutputCoords(\n funcName: string, inputLayout: TextureLayout, outputLayout: TextureLayout, name: string): GlslLibRoutine {\n const outTexShape = [outputLayout.width, outputLayout.height];\n const inTexShape = [inputLayout.width, inputLayout.height];\n const inRank = inputLayout.unpackedShape.length;\n const outRank = outputLayout.unpackedShape.length;\n const inShape = inputLayout.unpackedShape;\n const outShape = outputLayout.unpackedShape;\n const texFuncSnippet = generateShaderFuncNameFromInputSamplerName(name);\n\n if (inRank === outRank && ArrayUtil.arraysEqual(inTexShape, outTexShape)) {\n const source = `\n float ${funcName}() {\n return sampleTexture(${name}, TexCoords);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const type = getCoordsDataType(outRank);\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet: string;\n const fields = getGlChannels();\n\n if (inRank === 0) {\n coordsSnippet = '';\n } else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n } else {\n coordsSnippet = broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`).join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inputLayout.unpackedShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');\n }\n const source = `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return ${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.getOutputCoords']);\n }\n\n /**\n * Constructing snippets for packed operations.\n */\n protected getPackedSamplerFromInput(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n switch (inputLayout.unpackedShape.length) {\n case 0:\n return this.getPackedSamplerScalar(funcName, name);\n case 1:\n return this.getPackedSampler1D(funcName, name, inputLayout);\n case 2:\n return this.getPackedSampler2D(funcName, name, inputLayout);\n case 3:\n return this.getPackedSampler3D(funcName, name, inputLayout);\n default:\n return this.getPackedSamplerND(funcName, name, inputLayout);\n }\n }\n\n /**\n * Constructing snippets for unpacked operations.\n */\n protected getUnpackedSamplerFromInput(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n switch (shape.length) {\n case 0:\n return this.getUnpackedSamplerScalar(funcName, name, inputLayout);\n case 1:\n return this.getUnpackedSampler1D(funcName, name, inputLayout);\n case 2:\n return this.getUnpackedSampler2D(funcName, name, inputLayout);\n case 3:\n return this.getUnpackedSampler3D(funcName, name, inputLayout);\n case 4:\n return this.getUnpackedSampler4D(funcName, name, inputLayout);\n case 5:\n return this.getUnpackedSampler5D(funcName, name, inputLayout);\n case 6:\n return this.getUnpackedSampler6D(funcName, name, inputLayout);\n default:\n // TODO support more dimensionalities\n throw new Error(`Unsupported dimension ${shape.length}-D`);\n }\n }\n\n /**\n * Packed scalar snippet.\n */\n protected getPackedSamplerScalar(funcName: string, name: string): GlslLibRoutine {\n const glsl = getGlsl(this.context.glContext.version);\n const source = `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Packed 1D snippet.\n */\n protected getPackedSampler1D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const texShape = [inputLayout.width, inputLayout.height];\n const packedTexShape = [texShape[1], texShape[0]];\n const glsl = getGlsl(this.context.glContext.version);\n\n const packedSampler = `vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom1D']);\n }\n\n /**\n * Packed 2D snippet.\n */\n protected getPackedSampler2D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const texShape = [inputLayout.width, inputLayout.height];\n const glsl = getGlsl(this.context.glContext.version);\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n\n if (texShape != null && ArrayUtil.arraysEqual(shape, texShape)) {\n const packedSampler = `vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n\n return new GlslLibRoutine(packedSampler);\n }\n const packedTexShape = texShape;\n const valuesPerRow = Math.ceil(shape[1] / 2);\n const packedSampler = `vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${packedTexShape[1]}, ${packedTexShape[0]}, ${valuesPerRow}, row, col);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom2D']);\n }\n\n /**\n * Packed 3D snippet.\n */\n protected getPackedSampler3D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const texShape = [inputLayout.width, inputLayout.height];\n const packedTexShape = [texShape[0], texShape[1]];\n const glsl = getGlsl(this.context.glContext.version);\n\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n const params = ['b', 'row', 'col'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n const samplerRoutine = this.getPackedSamplerFromInput(funcName, name, newInputLayout);\n const packedSampler = `${samplerRoutine.routineBody}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n } `;\n const source = packedSampler;\n return new GlslLibRoutine(source, samplerRoutine.dependencies);\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n\n const packedSampler = `vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumC}, ${texNumR}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${name}, uv);}`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom3D']);\n }\n /*\n * Packed ND snippet.\n */\n protected getPackedSamplerND(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const rank = shape.length;\n const texShape = [inputLayout.width, inputLayout.height];\n const glsl = getGlsl(this.context.glContext.version);\n\n const packedTexShape = [texShape[0], texShape[1]];\n const texNumR = packedTexShape[1];\n const texNumC = packedTexShape[0];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = 'int b, int row, int col';\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const packedSampler = `vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked scalar snippet.\n */\n protected getUnpackedSamplerScalar(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const [texNumR, texNumC] = [inputLayout.width, inputLayout.height];\n if (texNumR === 1 && texNumC === 1) {\n const source = `\n float ${funcName}() {\n return sampleTexture(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const source = `\n float ${funcName}() {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, offset_${name});\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 1D snippet.\n */\n protected getUnpackedSampler1D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const tNumR = inputLayout.width;\n const tNumC = inputLayout.height;\n\n if (tNumC === 1 && tNumR === 1) {\n const source = `\n float ${funcName}(int index) {\n return sampleTexture(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n if (tNumC === 1) {\n const source = `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index) + 0.5) / ${tNumR}.0, 0.5);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n if (tNumR === 1) {\n const source = `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index) + 0.5) / ${tNumC}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n const source = `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture']);\n }\n\n /**\n * Unpacked 2D snippet.\n */\n\n protected getUnpackedSampler2D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n\n // TODO: modify row/col order for other dimensions.\n const texShape = [inputLayout.height, inputLayout.width];\n\n if (texShape != null && ArrayUtil.arraysEqual(shape, texShape)) {\n const texNumR = texShape[1];\n const texNumC = texShape[0];\n const source = `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(row, col) + halfCR) / vec2(${texNumR}.0, ${texNumC}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const params = ['col', 'row'];\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const texNumR = texShape[1];\n const texNumC = texShape[0];\n if (texNumC === 1) {\n const source = `\n float ${funcName}(int row, int col) {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n float index = dot(vec3(row, col, offset_${name}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n if (texNumR === 1) {\n const source = `\n float ${funcName}(int row, int col) {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n float index = dot(vec3(row, col, offset_${name}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n const source = `\n float ${funcName}(int row, int col) {\n int index = col * ${shape[1]} + row;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 3D snippet.\n */\n\n protected getUnpackedSampler3D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n const params = ['batch', 'col', 'row'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n const routine = this.getUnpackedSamplerFromInput(funcName, name, newInputLayout);\n // TODO: revisit the logic here to make it simpler\n const revDims = keptDims.reverse();\n const source = `\n ${routine.routineBody}\n float ${funcName}(int batch, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, revDims)});\n }\n `;\n return new GlslLibRoutine(source, routine.dependencies);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int depth, int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = depth * ${stride0} + col * ${stride1} + row;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 4D snippet.\n */\n\n protected getUnpackedSampler4D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n //\n // TODO: re-enable this shortcut once the index calculation bug is fixed.\n //\n // const {newShape, keptDims} = squeezeShape(shape as number[]);\n // if (newShape.length < shape.length) {\n // const newInputShape = squeezeInputShape(shape, newShape);\n // const params = ['row', 'col', 'depth', 'depth2'];\n // // Deep copy of input texture layout.\n // const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n // newInputLayout.unpackedShape = newInputShape;\n // const source = `\n // ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n // float ${funcName}(int row, int col, int depth, int depth2) {\n // return ${funcName}(${getSqueezedParams(params, keptDims)});\n // }\n // `;\n // return new GlslLibRoutine(\n // source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n // }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = row * ${stride0} + col * ${stride1} +\n depth2 * ${stride2} + depth;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture']);\n }\n\n /**\n * Unpacked 5D snippet.\n */\n protected getUnpackedSampler5D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n if (newShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth3 * ${stride3} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n /**\n * Unpacked 6D snippet.\n */\n protected getUnpackedSampler6D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n if (newShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * This is the main function to map from the given texture coordinates (s,t)\n * to logical indices for the output\n * There will only be one single variation of this\n * Also see coordsToOffset and offsetToIndices for input-specific versions\n */\n protected toVec(): {[name: string]: GlslLibRoutine} {\n const output = this.context.outputTextureLayout;\n const rank = output.shape.length;\n const strides = output.strides;\n const xScale = output.width;\n const yScale = output.height;\n\n const stridesBlock = [];\n for (let i = 0; i < rank - 1; ++i) {\n stridesBlock.push(`\n c[${i}] = offset / ${strides[i]};`);\n stridesBlock.push(`\n offset -= c[${i}] * ${strides[i]};`);\n }\n stridesBlock.push(`\n c[${rank - 1}] = offset;`);\n const body = `\n void toVec(vec2 texCoords, out int c[${rank}]) {\n int offset = coordsToOffset(texCoords, ${xScale}, ${yScale});\n ${stridesBlock.join('')}\n }\n void toVec(int offset, out int c[${rank}]) {\n ${stridesBlock.join('')}\n }\n `;\n return {toVec: new GlslLibRoutine(body, ['coordinates.coordsToOffset'])};\n }\n /**\n * These are value getter functions generated for each input\n * Each function is hardwired to the name and dimensions of the input\n * An '_T' variation is also produced which accesses values as if the\n * input was transposed\n */\n protected valueFrom(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const layout = this.context.inputTextureLayouts[i];\n const shape = layout.unpackedShape.length > 0 ? layout.unpackedShape : layout.shape;\n const rank = shape.length;\n let funcName = `_${name}`;\n result[funcName] = new GlslLibRoutine(\n this.getValueFromSingle(name, rank, layout.width, layout.height, false),\n [`shapeUtils.indicesToOffset${funcName}`, 'coordinates.offsetToCoords', 'fragcolor.getColorAsFloat']);\n funcName = funcName + '_T';\n result[funcName] = new GlslLibRoutine(\n this.getValueFromSingle(name, rank, layout.width, layout.height, true),\n [`shapeUtils.indicesToOffset${funcName}`, 'coordinates.offsetToCoords', 'fragcolor.getColorAsFloat']);\n });\n return result;\n }\n /**\n * Produces one value getter function for the name and rank given\n * If a transpose is set proper offsetToCoords mapping will be used\n * @param name name of the function\n * @param rank rank of the input\n * @param transpose whether or not should generate a transpose variation\n */\n protected getValueFromSingle(varName: string, rank: number, width: number, height: number, transpose: boolean):\n string {\n let name = `_${varName}`;\n if (transpose) {\n name = name + '_T';\n }\n const glsl = getGlsl(this.context.glContext.version);\n return `\n float ${name}(int m[${rank}]) {\n int offset = indicesToOffset${name}(m);\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(${varName}, coords));\n return value;\n }\n `;\n }\n\n /**\n * Produces a packed value getter function for the name and rank given\n * If a transpose is set proper offsetToCoords mapping will be used\n * @param name name of the function\n * @param rank rank of the input\n * @param transpose whether or not should generate a transpose variation\n */\n protected getPackedValueFrom(varName: string, rank: number, width: number, height: number, transpose: boolean):\n string {\n let name = `_${varName}_Pack`;\n if (transpose) {\n name = name + '_T';\n }\n const glsl = getGlsl(this.context.glContext.version);\n return `\n vec4 ${name}(int m[${rank}]) {\n int offset = indicesToOffset_${varName}(m);\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n return ${glsl.texture2D}(${varName}, coords);\n }\n `;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * This GLSL library handles routines converting\n * float32 to/from Unsigned byte or float 16\n */\nexport class EncodingGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.encodeFloat32(), ...this.decodeFloat32()};\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n protected encodeFloat32(): {[name: string]: GlslLibRoutine} {\n return {\n encode: new GlslLibRoutine(`highp vec4 encode(highp float f) {\n return vec4(f, 0.0, 0.0, 0.0);\n }\n `)\n };\n }\n protected decodeFloat32(): {[name: string]: GlslLibRoutine} {\n return {\n decode: new GlslLibRoutine(`highp float decode(highp vec4 rgba) {\n return rgba.r;\n }\n `)\n };\n }\n /**\n * returns the routine to encode encode a 32bit float to a vec4 (of unsigned bytes)\n * @credit: https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float\n */\n protected encodeUint8(): {[name: string]: GlslLibRoutine} {\n const endianness = EncodingGlslLib.isLittleEndian() ? 'rgba.rgba=rgba.abgr;' : '';\n return {\n encode: new GlslLibRoutine(`\n highp vec4 encode(highp float f) {\n highp float F = abs(f);\n highp float Sign = step(0.0,-f);\n highp float Exponent = floor(log2(F));\n highp float Mantissa = (exp2(- Exponent) * F);\n Exponent = floor(log2(F) + 127.0) + floor(log2(Mantissa));\n highp vec4 rgba;\n rgba[0] = 128.0 * Sign + floor(Exponent*exp2(-1.0));\n rgba[1] = 128.0 * mod(Exponent,2.0) + mod(floor(Mantissa*128.0),128.0);\n rgba[2] = floor(mod(floor(Mantissa*exp2(23.0 -8.0)),exp2(8.0)));\n rgba[3] = floor(exp2(23.0)*mod(Mantissa,exp2(-15.0)));\n ${endianness}\n rgba = rgba / 255.0; // values need to be normalized to [0,1]\n return rgba;\n }\n `)\n };\n }\n /**\n * returns the routine to encode a vec4 of unsigned bytes to float32\n * @credit: https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float\n */\n protected decodeUint8(): {[name: string]: GlslLibRoutine} {\n const endianness = EncodingGlslLib.isLittleEndian() ? 'rgba.rgba=rgba.abgr;' : '';\n return {\n decode: new GlslLibRoutine(`\n highp float decode(highp vec4 rgba) {\n rgba = rgba * 255.0; // values need to be de-normalized from [0,1] to [0,255]\n ${endianness}\n highp float Sign = 1.0 - step(128.0,rgba[0])*2.0;\n highp float Exponent = 2.0 * mod(rgba[0],128.0) + step(128.0,rgba[1]) - 127.0;\n highp float Mantissa = mod(rgba[1],128.0)*65536.0 + rgba[2]*256.0 +rgba[3] + float(0x800000);\n highp float Result = Sign * exp2(Exponent) * (Mantissa * exp2(-23.0 ));\n return Result;\n }\n `)\n };\n }\n /**\n * Determines if the machine is little endian or not\n * @credit: https://gist.github.com/TooTallNate/4750953\n */\n static isLittleEndian(): boolean {\n const b = new ArrayBuffer(4);\n const a = new Uint32Array(b);\n const c = new Uint8Array(b);\n a[0] = 0xdeadbeef;\n if (c[0] === 0xef) {\n return true;\n }\n if (c[0] === 0xde) {\n return false;\n }\n throw new Error('unknown endianness');\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\nimport {getGlsl} from './glsl-source';\n\n/**\n * This GLSL library handles routines around reading a texlet and writing to it\n * Reading and writing could be more than just dealing with one channel\n * It may require encoding/decoding to/from 4 channels into one\n */\nexport class FragColorGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.setFragColor(), ...this.getColorAsFloat()};\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n protected setFragColor(): {[name: string]: GlslLibRoutine} {\n const glsl = getGlsl(this.context.glContext.version);\n return {\n setFragColor: new GlslLibRoutine(\n `\n void setFragColor(float value) {\n ${glsl.output} = encode(value);\n }\n `,\n ['encoding.encode'])\n };\n }\n protected getColorAsFloat(): {[name: string]: GlslLibRoutine} {\n return {\n getColorAsFloat: new GlslLibRoutine(\n `\n float getColorAsFloat(vec4 color) {\n return decode(color);\n }\n `,\n ['encoding.decode'])\n };\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * GLSL Library responsible for data types and routines for manipulating\n * coordinates and mapping to/from tensor indices\n */\nexport class ShapeUtilsGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {\n ...this.bcastIndex(),\n ...this.bcastMatmulIndex(),\n ...this.offsetToIndices(),\n ...this.indicesToOffset(),\n ...this.incrementIndices()\n };\n }\n getCustomTypes() {\n return {};\n }\n protected bcastIndex(): {[name: string]: GlslLibRoutine} {\n const outputRank = this.context.outputTextureLayout.shape.length;\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].unpackedShape;\n if (shape.length <= outputRank) {\n const rank = shape.length;\n const dimOffset = outputRank - rank;\n const funcName = `bcastIndices_${name}`;\n let block = '';\n for (let i = 0; i < rank; ++i) {\n block += `\n realIndices[${i}] = int( mod(float(bcastedIndices[${dimOffset + i}]), ${shape[i]}.0) );\n `;\n }\n const body = `\n void ${funcName} (int bcastedIndices[${outputRank}], out int realIndices[${rank}]) {\n ${block}\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n }\n });\n return result;\n }\n protected bcastMatmulIndex(): {[name: string]: GlslLibRoutine} {\n const outputRank = this.context.outputTextureLayout.shape.length;\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n if (!(shape.length < 2 || shape.length > outputRank)) {\n const rank = shape.length;\n const dimOffset = outputRank - rank;\n const funcName = `bcastMatmulIndices_${name}`;\n let block = '';\n for (let i = 0; i < rank - 2; ++i) {\n block += `\n realIndices[${i}] = int( mod(float(bcastedIndices[${dimOffset + i}]), ${shape[i]}.0) );\n `;\n }\n const body = `\n void ${funcName}(int bcastedIndices[${outputRank}], out int realIndices[${rank}]) {\n ${block}\n realIndices[${rank - 1}] = bcastedIndices[${outputRank - 1}];\n realIndices[${rank - 2}] = bcastedIndices[${outputRank - 2}];\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n }\n });\n return result;\n }\n protected indicesToOffset(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const strides = this.context.inputTextureLayouts[i].strides;\n const rank = shape.length;\n let funcName = `indicesToOffset_${name}`;\n result[funcName] = new GlslLibRoutine(ShapeUtilsGlslLib.indexToOffsetSingle(funcName, rank, strides));\n funcName = `indicesToOffset_${name}_T`;\n result[funcName] =\n new GlslLibRoutine(ShapeUtilsGlslLib.indexToOffsetSingle(funcName, rank, strides.slice().reverse()));\n });\n return result;\n }\n static indexToOffsetSingle(name: string, rank: number, strides: readonly number[]): string {\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n offset += indices[${i}] * ${strides[i]};\n `;\n }\n return `\n int ${name}(int indices[${rank}]) {\n int offset = 0;\n ${block}\n return offset;\n }\n `;\n }\n protected offsetToIndices(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const strides = this.context.inputTextureLayouts[i].strides;\n const rank = shape.length;\n let funcName = `offsetToIndices_${name}`;\n result[funcName] = new GlslLibRoutine(ShapeUtilsGlslLib.offsetToIndicesSingle(funcName, rank, strides));\n funcName = `offsetToIndices_${name}_T`;\n result[funcName] =\n new GlslLibRoutine(ShapeUtilsGlslLib.offsetToIndicesSingle(funcName, rank, strides.slice().reverse()));\n });\n return result;\n }\n static offsetToIndicesSingle(name: string, rank: number, strides: readonly number[]): string {\n const stridesBlock = [];\n for (let i = 0; i < rank - 1; ++i) {\n stridesBlock.push(`\n indices[${i}] = offset / ${strides[i]};`);\n stridesBlock.push(`\n offset -= indices[${i}] * ${strides[i]};`);\n }\n stridesBlock.push(`\n indices[${rank - 1}] = offset;`);\n return `\n void ${name}(int offset, out int indices[${rank}]) {\n ${stridesBlock.join('')}\n }\n `;\n }\n protected incrementIndices(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const rank = shape.length;\n const funcName = `incrementIndices_${name}`;\n let shapeInit = '';\n for (let i = 0; i < rank; ++i) {\n shapeInit += `\n shape[${i}] = ${shape[i]};`;\n }\n const body = `\n void ${funcName}(int axis, out int indices[${rank}]) {\n int shape[${rank}];\n ${shapeInit};\n for(int i = ${rank} -1 ; i >= 0; --i) {\n if(i > axis) continue;\n indices[i] += 1;\n if(indices[i] < shape[i]) {\n break;\n }\n indices[i] = 0;\n }\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n });\n return result;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * GLSL Library responsible for vec routines\n * Vec is an varible length int array. The length is fixed at the time of\n * generating the library functions from the dimensions of the output.\n */\nexport class VecGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.binaryVecFunctions(), ...this.copyVec(), ...this.setVecItem(), ...this.getVecItem()};\n }\n protected binaryVecFunctions(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n const nameOp: {[name: string]: string} = {add: '+=', sub: '-=', mul: '*=', div: '/='};\n const result: {[name: string]: GlslLibRoutine} = {};\n for (const name in nameOp) {\n const fname = `${name}Vec`;\n let assignmentBlock = '';\n for (let i = 0; i < rank; ++i) {\n assignmentBlock += `\n dest[${i}] ${nameOp[name]} src[${i}];\n `;\n }\n const body = `\n void ${fname}(int src[${rank}], out int dest[${rank}]) {\n ${assignmentBlock}\n }\n `;\n result[fname] = new GlslLibRoutine(body);\n }\n\n return result;\n }\n protected copyVec(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let assignmentBlock = '';\n for (let i = 0; i < rank; ++i) {\n assignmentBlock += `\n dest[${i}] = src[${i}];\n `;\n }\n const body = `\n void copyVec(int src[${rank}], out int dest[${rank}]) {\n ${assignmentBlock}\n }\n `;\n return {copyVec: new GlslLibRoutine(body)};\n }\n\n protected setVecItem(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let block = `\n if(index < 0)\n index =${rank} + index;\n if (index == 0)\n m[0] = value;\n `;\n for (let i = 1; i < rank - 1; ++i) {\n block += `\n else if (index == ${i})\n m[${i}] = value;\n `;\n }\n block += `\n else\n m[${rank - 1}] = value;\n `;\n const body = `\n void setVecItem(out int m[${rank}], int index, int value) {\n ${block}\n }\n `;\n return {setVecItem: new GlslLibRoutine(body)};\n }\n protected getVecItem(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let block = `\n if(index < 0)\n index = ${rank} + index;\n if (index == 0)\n return m[0];\n `;\n for (let i = 1; i < rank - 1; ++i) {\n block += `\n else if (index == ${i})\n return m[${i}];\n `;\n }\n block += `\n else\n return m[${rank - 1}];\n `;\n const body = `\n int getVecItem(int m[${rank}], int index) {\n ${block}\n }\n `;\n return {getVecItem: new GlslLibRoutine(body)};\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CoordsGlslLib} from './glsl-coordinate-lib';\nimport {GlslContext, GlslLib} from './glsl-definitions';\nimport {EncodingGlslLib} from './glsl-encoding-lib';\nimport {FragColorGlslLib} from './glsl-fragcolor-lib';\nimport {ShapeUtilsGlslLib} from './glsl-shape-utils-lib';\nimport {VecGlslLib} from './glsl-vec-lib';\n\nexport const glslRegistry: {[name: string]: new (context: GlslContext) => GlslLib} = {\n 'encoding': EncodingGlslLib,\n 'fragcolor': FragColorGlslLib,\n 'vec': VecGlslLib,\n 'shapeUtils': ShapeUtilsGlslLib,\n 'coordinates': CoordsGlslLib,\n // 'arrays': ArrayGlslSLib\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutineNode, TopologicalSortGlslRoutines} from './glsl-definitions';\nimport {replaceInlines} from './glsl-function-inliner';\nimport {glslRegistry} from './glsl-registered-libs';\nimport {getDefaultFragShaderMain, getFragShaderPreamble} from './glsl-source';\nimport {ProgramInfo, TextureLayout, VariableInfo} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/**\n * Preprocessor for the additions to the GLSL language\n * It deals with:\n * @include directives\n * @inline\n * Loop unrolling (not implemented)\n * Macro resolution (not implemented)\n */\nexport class GlslPreprocessor {\n readonly context: GlslContext;\n readonly libs: {[name: string]: GlslLib} = {};\n readonly glslLibRoutineDependencyGraph: {[routineName: string]: GlslLibRoutineNode} = {};\n\n constructor(\n glContext: WebGLContext, programInfo: ProgramInfo, inputTextureLayouts: TextureLayout[],\n outputTextureLayout: TextureLayout) {\n this.context = new GlslContext(glContext, programInfo, inputTextureLayouts, outputTextureLayout);\n\n // construct GlslLibs\n Object.keys(glslRegistry).forEach((name: string) => {\n const lib = new glslRegistry[name](this.context);\n this.libs[name] = lib;\n });\n\n // construct GlslRoutineDependencyGraph\n const map = this.glslLibRoutineDependencyGraph;\n for (const libName in this.libs) {\n const lib = this.libs[libName];\n const routinesInLib = lib.getFunctions();\n for (const routine in routinesInLib) {\n const key = libName + '.' + routine;\n let currentNode: GlslLibRoutineNode;\n if (map[key]) {\n currentNode = map[key];\n currentNode.routineBody = routinesInLib[routine].routineBody;\n } else {\n currentNode = new GlslLibRoutineNode(key, routinesInLib[routine].routineBody);\n map[key] = currentNode;\n }\n const dependencies = routinesInLib[routine].dependencies;\n if (dependencies) {\n for (let i = 0; i < dependencies.length; ++i) {\n if (!map[dependencies[i]]) {\n const node = new GlslLibRoutineNode(dependencies[i]);\n map[dependencies[i]] = node;\n currentNode.addDependency(node);\n } else {\n currentNode.addDependency(map[dependencies[i]]);\n }\n }\n }\n }\n }\n }\n\n preprocess(): string {\n const programInfo = this.context.programInfo;\n let source = programInfo.shaderSource;\n\n // append main() function\n if (!this.context.programInfo.hasMain) {\n source = `${source}\n ${getDefaultFragShaderMain(this.context.glContext.version, this.context.outputTextureLayout.shape.length)}`;\n }\n // replace inlines\n source = replaceInlines(source);\n\n // concat final source string\n return `${getFragShaderPreamble(this.context.glContext.version)}\n ${this.getUniforms(programInfo.inputNames, programInfo.variables)}\n ${this.getImports(source)}\n ${source}`;\n }\n\n protected getImports(script: string): string {\n const routinesIncluded = this.selectGlslLibRoutinesToBeIncluded(script);\n\n if (routinesIncluded.length === 0) {\n return '';\n }\n\n let routines = '';\n for (let i = 0; i < routinesIncluded.length; ++i) {\n if (routinesIncluded[i].routineBody) {\n routines += routinesIncluded[i].routineBody + '\\n';\n } else {\n throw new Error(`Missing body for the Glsl Library routine: ${routinesIncluded[i].name}`);\n }\n }\n\n return routines;\n }\n private selectGlslLibRoutinesToBeIncluded(script: string): GlslLibRoutineNode[] {\n const nodes: GlslLibRoutineNode[] = [];\n\n Object.keys(this.glslLibRoutineDependencyGraph).forEach(classAndRoutine => {\n const routine = classAndRoutine.split('.')[1];\n if (script.indexOf(routine) !== -1) {\n nodes.push(this.glslLibRoutineDependencyGraph[classAndRoutine]);\n }\n });\n\n return TopologicalSortGlslRoutines.returnOrderedNodes(nodes);\n }\n\n protected getUniforms(samplers?: string[], variables?: VariableInfo[]): string {\n const uniformLines: string[] = [];\n if (samplers) {\n for (const sampler of samplers) {\n uniformLines.push(`uniform sampler2D ${sampler};`);\n }\n }\n if (variables) {\n for (const variable of variables) {\n uniformLines.push(\n `uniform ${variable.type} ${variable.name}${variable.arrayLength ? `[${variable.arrayLength}]` : ''};`);\n }\n }\n return uniformLines.join('\\n');\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {Logger, Profiler} from '../../instrument';\n\nimport {GlslPreprocessor} from './glsl-preprocessor';\nimport {getVertexShaderSource} from './glsl-source';\nimport {TextureLayoutStrategy} from './texture-layout-strategy';\nimport {Artifact, ProgramInfo, ProgramVariable, TextureData, TextureLayout, VariableInfo} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n vertexShader: WebGLShader;\n attributesBound: boolean;\n\n constructor(\n public profiler: Readonly, public glContext: WebGLContext,\n public textureLayoutStrategy: TextureLayoutStrategy) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: TextureData[], output: TextureData): void {\n this.profiler.event('op', `ProgramManager.run ${buildArtifact.programInfo.name ?? 'unknown kernel'}`, () => {\n const gl = this.glContext.gl;\n const program = buildArtifact.program;\n gl.useProgram(program);\n try {\n this.bindOutput(output);\n if (!this.attributesBound) {\n this.bindAttributes(buildArtifact.attribLocations);\n }\n this.bindUniforms(buildArtifact.uniformLocations, buildArtifact.programInfo.variables ?? [], inputs);\n } catch (err) {\n Logger.error('ProgramManager', buildArtifact.programInfo.shaderSource);\n throw err;\n }\n this.profiler.event('backend', 'GlContext.draw()', () => {\n this.glContext.draw();\n });\n }, this.glContext);\n }\n dispose(): void {\n if (this.vertexShader) {\n this.glContext.deleteShader(this.vertexShader);\n }\n this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, inputTextureLayouts: TextureLayout[], outputTextureLayout: TextureLayout): Artifact {\n return this.profiler.event('backend', 'ProgramManager.build', () => {\n const preprocessor = new GlslPreprocessor(this.glContext, programInfo, inputTextureLayouts, outputTextureLayout);\n const fragScript = preprocessor.preprocess();\n const program = this.compile(fragScript);\n const artifact = {\n programInfo,\n program,\n uniformLocations: this.getUniformLocations(\n program, preprocessor.context.programInfo.inputNames, preprocessor.context.programInfo.variables),\n attribLocations: this.getAttribLocations(program)\n };\n return artifact;\n });\n }\n protected compile(fragShaderScript: string): WebGLProgram {\n if (!this.vertexShader) {\n Logger.verbose('ProrgramManager', 'Compiling and caching Vertex shader for the first time');\n const vertexShaderScript = getVertexShaderSource(this.glContext.version);\n this.vertexShader = this.glContext.compileShader(vertexShaderScript, this.glContext.gl.VERTEX_SHADER);\n }\n if (env.debug) {\n Logger.verbose('ProrgramManager', `FragShader:\n${fragShaderScript}\n`);\n }\n const fragShader = this.glContext.compileShader(fragShaderScript, this.glContext.gl.FRAGMENT_SHADER);\n const program = this.glContext.createProgram(this.vertexShader, fragShader);\n this.glContext.deleteShader(fragShader);\n return program;\n }\n bindOutput(td: TextureData): void {\n const width = td.width;\n const height = td.height;\n Logger.verbose(\n 'ProrgramManager',\n `Binding output texture to Framebuffer: w/h=${width}/${height}, shape=${td.shape}, type=${td.tensor.type}`);\n this.glContext.attachFramebuffer(td.texture, width, height);\n }\n bindAttributes(attribLocations: Artifact.AttribLocations): void {\n const positionHandle = attribLocations.position;\n const textureCoordHandle = attribLocations.textureCoord;\n this.glContext.setVertexAttributes(positionHandle, textureCoordHandle);\n this.attributesBound = true;\n }\n bindUniforms(uniformLocations: Artifact.UniformLocations, variables: ProgramVariable[], textures: TextureData[]):\n void {\n const gl = this.glContext.gl;\n let texturePosition = 0;\n for (const {name, type, location, arrayLength} of uniformLocations) {\n const value = variables.find(v => v.name === name)?.data;\n if (type !== 'sampler2D' && !value) {\n throw new Error(`variable '${name}' does not have data defined in program info`);\n }\n switch (type) {\n case 'sampler2D':\n this.bindTexture(textures[texturePosition], location, texturePosition);\n texturePosition++;\n break;\n case 'float':\n if (arrayLength) {\n gl.uniform1fv(location, value as number[]);\n } else {\n gl.uniform1f(location, value as number);\n }\n break;\n case 'int':\n if (arrayLength) {\n gl.uniform1iv(location, value as number[]);\n } else {\n gl.uniform1i(location, value as number);\n }\n break;\n default:\n throw new Error(`Uniform not implemented: ${type}`);\n }\n }\n }\n bindTexture(td: TextureData, uniformHandle: WebGLUniformLocation, position: number): void {\n this.glContext.bindTextureToUniform(td.texture, position, uniformHandle);\n }\n getAttribLocations(program: WebGLProgram): Artifact.AttribLocations {\n return {\n position: this.getAttribLocation(program, 'position'),\n textureCoord: this.getAttribLocation(program, 'textureCoord')\n };\n }\n getUniformLocations(program: WebGLProgram, samplers?: string[], variables?: VariableInfo[]):\n Artifact.UniformLocations {\n const uniformLocations: Artifact.UniformLocations = [];\n if (samplers) {\n for (const sampler of samplers) {\n uniformLocations.push({name: sampler, type: 'sampler2D', location: this.getUniformLocation(program, sampler)});\n }\n }\n if (variables) {\n for (const variable of variables) {\n uniformLocations.push({...variable, location: this.getUniformLocation(program, variable.name)});\n }\n }\n return uniformLocations;\n }\n getUniformLocation(program: WebGLProgram, name: string): WebGLUniformLocation {\n const gl = this.glContext.gl;\n const reference = gl.getUniformLocation(program, name);\n if (reference === null) {\n throw new Error(`Uniform ${name} not found.`);\n }\n return reference;\n }\n getAttribLocation(program: WebGLProgram, name: string): number {\n const gl = this.glContext.gl;\n const attributeLocation: number = gl.getAttribLocation(program, name);\n return attributeLocation;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger, Profiler} from '../../instrument';\nimport {Tensor} from '../../tensor';\n\nimport {Encoder, EncoderUsage} from './texture-data-encoder';\nimport {TextureLayoutStrategy} from './texture-layout-strategy';\nimport {TextureData, TextureLayout} from './types';\nimport {WebGLContext} from './webgl-context';\n\nexport interface TextureManagerConfig {\n reuseTextures?: boolean;\n}\n\n/**\n * TextureManager is the mainly responsible for caching Textures\n * Textures are cached in 2 levels:\n * 1. the texures which are associated with a dataId (from Tensor)\n * Caching these is crucial to performance. These are In-use Textures\n * 2. textures which are not in use by any current ProgramInfo/Tensor\n * These are called Free Textures\n * TextureManager is also used to help creating textures. For this it\n * uses WebGLContext and TextureLayoutStrategy\n */\nexport class TextureManager {\n private readonly inUseTextures: Map;\n private readonly idleTextures: Map;\n private readonly textureLookup: Map;\n private readonly pendingRead: Map void>> = new Map();\n\n constructor(\n public glContext: WebGLContext, public layoutStrategy: TextureLayoutStrategy, public profiler: Readonly,\n private config: TextureManagerConfig) {\n if (config.reuseTextures) {\n this.inUseTextures = new Map();\n this.idleTextures = new Map();\n this.textureLookup = new Map();\n }\n }\n createTextureFromLayout(\n dataType: Tensor.DataType, layout: TextureLayout, data?: Tensor.NumberType, usage?: EncoderUsage) {\n const textureDataType = this.toEncoderType(dataType);\n\n const encoder = this.glContext.getEncoder(textureDataType, layout.channels || 1, usage);\n if (layout.isPacked && usage === EncoderUsage.UploadOnly) {\n throw new Error('not implemented');\n }\n const width = layout.width;\n const height = layout.height;\n\n let key: string|undefined;\n let inUseTextures: WebGLTexture[]|undefined;\n if (this.config.reuseTextures) {\n key = `${width}x${height}_${encoder.format}_${encoder.internalFormat}_${encoder.textureType}`;\n inUseTextures = this.inUseTextures.get(key);\n if (!inUseTextures) {\n inUseTextures = [];\n this.inUseTextures.set(key, inUseTextures);\n }\n\n const idleTextures = this.idleTextures.get(key);\n if (idleTextures && idleTextures.length > 0) {\n const texture = idleTextures.pop()!;\n inUseTextures.push(texture);\n if (usage === EncoderUsage.UploadOnly) {\n this.glContext.updateTexture(texture, width, height, encoder, this.toTextureData(dataType, data)!);\n }\n return texture;\n }\n }\n\n Logger.verbose('TextureManager', `Creating new texture of size ${layout.width}x${layout.height}`);\n const texture = this.glContext.allocateTexture(width, height, encoder, this.toTextureData(dataType, data));\n\n if (this.config.reuseTextures) {\n inUseTextures!.push(texture);\n this.textureLookup.set(texture, key!);\n }\n return texture;\n }\n readTexture(td: TextureData, dataType: Tensor.DataType, channels?: number): Tensor.NumberType {\n if (!channels) {\n channels = 1;\n }\n return this.profiler.event('backend', 'TextureManager.readTexture', () => {\n const dataSize = td.shape.reduce((a, b) => a * b) * channels!;\n const data = this.glContext.readTexture(\n td.texture, td.width, td.height, dataSize, this.toEncoderType(dataType), channels!);\n return this.toTensorData(dataType, data);\n });\n }\n async readTextureAsync(td: TextureData, dataType: Tensor.DataType, channels?: number): Promise {\n const dataId = td.tensor.dataId;\n if (!channels) {\n channels = 1;\n }\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers?.push(resolve));\n }\n return this.profiler.event('backend', 'TextureManager.readTextureAsync', async () => {\n this.pendingRead.set(dataId, []);\n const dataSize = td.shape.reduce((a, b) => a * b) * channels!;\n // add a fence waiting for the data to be ready\n await this.glContext.createAndWaitForFence();\n const data = this.glContext.readTexture(\n td.texture, td.width, td.height, dataSize, this.toEncoderType(dataType), channels!);\n const tensorData = this.toTensorData(dataType, data);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n subscribers?.forEach(resolve => resolve(tensorData));\n return tensorData;\n });\n }\n readUint8TextureAsFloat(td: TextureData): Float32Array {\n return this.profiler.event('backend', 'TextureManager.readUint8TextureAsFloat', () => {\n const dataSize = td.shape.reduce((a, b) => a * b);\n const data = this.glContext.readTexture(td.texture, td.width, td.height, dataSize * 4, 'byte', 4);\n return new Float32Array(data.buffer, data.byteOffset, dataSize);\n });\n }\n releaseTexture(textureData: TextureData, deleteTexture?: boolean): void {\n let key: string|undefined;\n if (this.config.reuseTextures) {\n key = this.textureLookup.get(textureData.texture);\n if (key) {\n if (deleteTexture) {\n this.textureLookup.delete(key);\n }\n const inUseTextures = this.inUseTextures.get(key);\n if (inUseTextures) {\n const index = inUseTextures.indexOf(textureData.texture);\n if (index !== -1) {\n inUseTextures.splice(index, 1);\n let idleTextures = this.idleTextures.get(key);\n if (!idleTextures) {\n idleTextures = [];\n this.idleTextures.set(key, idleTextures);\n }\n idleTextures.push(textureData.texture);\n }\n }\n }\n }\n\n if (!key || deleteTexture) {\n Logger.verbose('TextureManager', `Deleting texture of size ${textureData.width}x${textureData.height}`);\n this.glContext.deleteTexture(textureData.texture);\n }\n }\n toTensorData(dataType: Tensor.DataType, data: Encoder.DataArrayType): Tensor.NumberType {\n switch (dataType) {\n case 'int16':\n return data instanceof Int16Array ? data : Int16Array.from(data);\n case 'int32':\n return data instanceof Int32Array ? data : Int32Array.from(data);\n case 'int8':\n return data instanceof Int8Array ? data : Int8Array.from(data);\n case 'uint16':\n return data instanceof Uint16Array ? data : Uint16Array.from(data);\n case 'uint32':\n return data instanceof Uint32Array ? data : Uint32Array.from(data);\n case 'uint8':\n case 'bool':\n return data instanceof Uint8Array ? data : Uint8Array.from(data);\n case 'float32':\n return data instanceof Float32Array ? data : Float32Array.from(data);\n case 'float64':\n return data instanceof Float64Array ? data : Float64Array.from(data);\n default:\n throw new Error(`TensorData type ${dataType} is not supported`);\n }\n }\n toTextureData(_dataType: Tensor.DataType, data: Tensor.NumberType|undefined): Encoder.DataArrayType|undefined {\n if (!data) {\n return undefined;\n }\n return (data instanceof Float32Array) ? data : new Float32Array(data);\n /*\n switch (dataType) {\n case 'int16':\n case 'int32':\n case 'uint16':\n case 'uint32':\n return (data.constructor === Uint32Array) ? data as Uint32Array : new Uint32Array(data);\n case 'int8':\n case 'uint8':\n case 'bool':\n return (data.constructor === Uint8Array) ? data as Uint8Array : new Uint8Array(data);\n case 'float32':\n case 'float64':\n return (data.constructor === Float32Array) ? data as Float32Array : new Float32Array(data);\n default:\n throw new Error(`TensorData type ${dataType} is not supported`);\n }\n */\n }\n toEncoderType(_dataType: Tensor.DataType): Encoder.DataType {\n return 'float';\n // switch (dataType) {\n // case 'int16':\n // case 'int32':\n // case 'uint16':\n // case 'uint32':\n // return 'int';\n // case 'uint8':\n // case 'bool':\n // return 'byte';\n // case 'float32':\n // case 'float64':\n // return 'float';\n // default:\n // throw new Error(`TensorData type ${dataType} is not supported`);\n // }\n }\n clearActiveTextures(): void {\n this.glContext.clearActiveTextures();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {SessionHandler} from '../../backend';\nimport {Graph} from '../../graph';\nimport {Logger} from '../../instrument';\nimport {Operator} from '../../operators';\nimport {OpSet, resolveOperator} from '../../opset';\nimport {Session} from '../../session';\nimport {Tensor} from '../../tensor';\nimport {WebGLBackend} from '../backend-webgl';\n\nimport {WebGLInferenceHandler} from './inference-handler';\nimport {WEBGL_OP_RESOLVE_RULES} from './op-resolve-rules';\nimport {ProgramManager} from './program-manager';\nimport {PreferLogicalStrategy, TextureLayoutStrategy} from './texture-layout-strategy';\nimport {TextureManager} from './texture-manager';\nimport {TextureData} from './types';\n\nexport class WebGLSessionHandler implements SessionHandler {\n programManager: ProgramManager;\n textureManager: TextureManager;\n layoutStrategy: TextureLayoutStrategy;\n packedTextureDataCache: Map;\n unpackedTextureDataCache: Map;\n pack2unpackMap: Map;\n unpack2packMap: Map;\n initializers: Set;\n pack?: boolean;\n\n constructor(public readonly backend: WebGLBackend, public readonly context: Session.Context) {\n this.layoutStrategy = new PreferLogicalStrategy(backend.glContext.maxTextureSize);\n this.programManager = new ProgramManager(this.context.profiler, backend.glContext, this.layoutStrategy);\n this.textureManager = new TextureManager(\n backend.glContext, this.layoutStrategy, this.context.profiler,\n {reuseTextures: backend.textureCacheMode === 'full'});\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache = new Map();\n this.pack = backend.pack;\n this.pack2unpackMap = new Map();\n this.unpack2packMap = new Map();\n }\n\n createInferenceHandler() {\n return new WebGLInferenceHandler(this);\n }\n onGraphInitialized(graph: Graph): void {\n const initializers = graph.getValues().filter(v => v.from === -1 && v.tensor).map(v => v.tensor!.dataId);\n this.initializers = new Set(initializers);\n }\n isInitializer(tensorId: Tensor.Id): boolean {\n return this.initializers ? this.initializers.has(tensorId) : false;\n }\n addInitializer(tensorId: Tensor.Id): void {\n this.initializers.add(tensorId);\n }\n getTextureData(tensorId: Tensor.Id, isPacked: boolean): TextureData|undefined {\n if (isPacked) {\n return this.packedTextureDataCache.get(tensorId);\n } else {\n return this.unpackedTextureDataCache.get(tensorId);\n }\n }\n setTextureData(tensorId: Tensor.Id, textureData: TextureData, isPacked = false): void {\n Logger.verbose('WebGLSessionHandler', 'Storing Texture data in cache');\n if (isPacked) {\n this.packedTextureDataCache.set(tensorId, textureData);\n } else {\n this.unpackedTextureDataCache.set(tensorId, textureData);\n }\n }\n dispose(): void {\n this.programManager.dispose();\n this.textureManager.clearActiveTextures();\n this.packedTextureDataCache.forEach(td => this.textureManager.releaseTexture(td, true));\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache.forEach(td => this.textureManager.releaseTexture(td, true));\n this.unpackedTextureDataCache = new Map();\n }\n resolve(node: Graph.Node, opsets: readonly OpSet[], graph: Graph): Operator {\n const op = resolveOperator(node, opsets, WEBGL_OP_RESOLVE_RULES);\n return {impl: op.opImpl, context: op.opInit ? op.opInit(node, graph) : node};\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport * as DataEncoders from './texture-data-encoder';\nimport {DataEncoder, Encoder, EncoderUsage} from './texture-data-encoder';\nimport {repeatedTry} from './utils';\n\nexport interface FenceContext {\n query: WebGLSync|null;\n isFencePassed(): boolean;\n}\n\ntype PollItem = {\n isDoneFn: () => boolean; resolveFn: () => void;\n};\n\nexport function linearSearchLastTrue(arr: Array<() => boolean>): number {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n\n/**\n * Abstraction and wrapper around WebGLRenderingContext and its operations\n */\nexport class WebGLContext {\n gl: WebGLRenderingContext;\n version: 1|2;\n\n private vertexbuffer: WebGLBuffer;\n private framebuffer: WebGLFramebuffer;\n\n // WebGL flags and vital parameters\n private isFloatTextureAttachableToFrameBuffer: boolean;\n isFloat32DownloadSupported: boolean;\n isRenderFloat32Supported: boolean;\n isBlendSupported: boolean;\n maxTextureSize: number;\n // private maxCombinedTextureImageUnits: number;\n private maxTextureImageUnits: number;\n // private maxCubeMapTextureSize: number;\n // private shadingLanguageVersion: string;\n // private webglVendor: string;\n // private webglVersion: string;\n\n // WebGL2 flags and vital parameters\n // private max3DTextureSize: number;\n // private maxArrayTextureLayers: number;\n // private maxColorAttachments: number;\n // private maxDrawBuffers: number;\n\n // WebGL extensions\n // eslint-disable-next-line camelcase\n textureFloatExtension: OES_texture_float|null;\n // eslint-disable-next-line camelcase\n textureHalfFloatExtension: OES_texture_half_float|null;\n\n // WebGL2 extensions\n colorBufferFloatExtension: unknown|null;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n disjointTimerQueryWebgl2Extension: {TIME_ELAPSED_EXT: GLenum; GPU_DISJOINT_EXT: GLenum}|null;\n\n private disposed: boolean;\n private frameBufferBound = false;\n\n constructor(gl: WebGLRenderingContext, version: 1|2) {\n this.gl = gl;\n this.version = version;\n\n this.getExtensions();\n this.vertexbuffer = this.createVertexbuffer();\n this.framebuffer = this.createFramebuffer();\n this.queryVitalParameters();\n }\n\n allocateTexture(width: number, height: number, encoder: DataEncoder, data?: Encoder.DataArrayType): WebGLTexture {\n const gl = this.gl;\n // create the texture\n const texture = gl.createTexture();\n // bind the texture so the following methods effect this texture.\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n const buffer = data ? encoder.encode(data, width * height) : null;\n gl.texImage2D(\n gl.TEXTURE_2D,\n 0, // Level of detail.\n encoder.internalFormat, width, height,\n 0, // Always 0 in OpenGL ES.\n encoder.format, encoder.textureType, buffer);\n this.checkError();\n return texture as WebGLTexture;\n }\n updateTexture(\n texture: WebGLTexture, width: number, height: number, encoder: DataEncoder, data: Encoder.DataArrayType): void {\n const gl = this.gl;\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const buffer = encoder.encode(data, width * height);\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0, // level\n 0, // xoffset\n 0, // yoffset\n width, height, encoder.format, encoder.textureType, buffer);\n this.checkError();\n }\n attachFramebuffer(texture: WebGLTexture, width: number, height: number): void {\n const gl = this.gl;\n // Make it the target for framebuffer operations - including rendering.\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture,\n 0); // 0, we aren't using MIPMAPs\n this.checkError();\n gl.viewport(0, 0, width, height);\n gl.scissor(0, 0, width, height);\n }\n readTexture(\n texture: WebGLTexture, width: number, height: number, dataSize: number, dataType: Encoder.DataType,\n channels: number): Encoder.DataArrayType {\n const gl = this.gl;\n if (!channels) {\n channels = 1;\n }\n if (!this.frameBufferBound) {\n this.attachFramebuffer(texture, width, height);\n }\n const encoder = this.getEncoder(dataType, channels);\n const buffer = encoder.allocate(width * height);\n // bind texture to framebuffer\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture,\n 0); // 0, we aren't using MIPMAPs\n // TODO: Check if framebuffer is ready\n gl.readPixels(0, 0, width, height, gl.RGBA, encoder.textureType, buffer);\n this.checkError();\n // unbind FB\n return encoder.decode(buffer, dataSize);\n }\n\n isFramebufferReady(): boolean {\n // TODO: Implement logic to check if the framebuffer is ready\n return true;\n }\n getActiveTexture(): string {\n const gl = this.gl;\n const n = gl.getParameter(this.gl.ACTIVE_TEXTURE);\n return `TEXTURE${(n - gl.TEXTURE0)}`;\n }\n getTextureBinding(): WebGLTexture {\n return this.gl.getParameter(this.gl.TEXTURE_BINDING_2D);\n }\n getFramebufferBinding(): WebGLFramebuffer {\n return this.gl.getParameter(this.gl.FRAMEBUFFER_BINDING);\n }\n setVertexAttributes(positionHandle: number, textureCoordHandle: number): void {\n const gl = this.gl;\n gl.vertexAttribPointer(positionHandle, 3, gl.FLOAT, false, 20, 0);\n gl.enableVertexAttribArray(positionHandle);\n if (textureCoordHandle !== -1) {\n gl.vertexAttribPointer(textureCoordHandle, 2, gl.FLOAT, false, 20, 12);\n gl.enableVertexAttribArray(textureCoordHandle);\n }\n this.checkError();\n }\n createProgram(\n vertexShader: WebGLShader,\n fragShader: WebGLShader,\n ): WebGLProgram {\n const gl = this.gl;\n const program = gl.createProgram()!;\n\n // the program consists of our shaders\n gl.attachShader(program, vertexShader);\n gl.attachShader(program, fragShader);\n gl.linkProgram(program);\n return program;\n }\n compileShader(shaderSource: string, shaderType: number): WebGLShader {\n const gl = this.gl;\n const shader = gl.createShader(shaderType);\n if (!shader) {\n throw new Error(`createShader() returned null with type ${shaderType}`);\n }\n\n gl.shaderSource(shader, shaderSource);\n gl.compileShader(shader);\n if (gl.getShaderParameter(shader, gl.COMPILE_STATUS) === false) {\n throw new Error(`Failed to compile shader: ${gl.getShaderInfoLog(shader)}\nShader source:\n${shaderSource}`);\n }\n return shader;\n }\n deleteShader(shader: WebGLShader): void {\n this.gl.deleteShader(shader);\n }\n bindTextureToUniform(texture: WebGLTexture, position: number, uniformHandle: WebGLUniformLocation): void {\n const gl = this.gl;\n gl.activeTexture(gl.TEXTURE0 + position);\n this.checkError();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n this.checkError();\n gl.uniform1i(uniformHandle, position);\n this.checkError();\n }\n draw(): void {\n this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);\n this.checkError();\n }\n checkError(): void {\n if (env.debug) {\n const gl = this.gl;\n const error = gl.getError();\n let label = '';\n switch (error) {\n case (gl.NO_ERROR):\n return;\n case (gl.INVALID_ENUM):\n label = 'INVALID_ENUM';\n break;\n case (gl.INVALID_VALUE):\n label = 'INVALID_VALUE';\n break;\n case (gl.INVALID_OPERATION):\n label = 'INVALID_OPERATION';\n break;\n case (gl.INVALID_FRAMEBUFFER_OPERATION):\n label = 'INVALID_FRAMEBUFFER_OPERATION';\n break;\n case (gl.OUT_OF_MEMORY):\n label = 'OUT_OF_MEMORY';\n break;\n case (gl.CONTEXT_LOST_WEBGL):\n label = 'CONTEXT_LOST_WEBGL';\n break;\n default:\n label = `Unknown WebGL Error: ${error.toString(16)}`;\n }\n throw new Error(label);\n }\n }\n deleteTexture(texture: WebGLTexture): void {\n this.gl.deleteTexture(texture);\n }\n deleteProgram(program: WebGLProgram): void {\n this.gl.deleteProgram(program);\n }\n getEncoder(dataType: Encoder.DataType, channels: number, usage: EncoderUsage = EncoderUsage.Default): DataEncoder {\n if (this.version === 2) {\n return new DataEncoders.RedFloat32DataEncoder(this.gl as WebGL2RenderingContext, channels);\n }\n\n switch (dataType) {\n case 'float':\n if (usage === EncoderUsage.UploadOnly || this.isRenderFloat32Supported) {\n return new DataEncoders.RGBAFloatDataEncoder(this.gl, channels);\n } else {\n return new DataEncoders.RGBAFloatDataEncoder(\n this.gl, channels, this.textureHalfFloatExtension!.HALF_FLOAT_OES);\n }\n case 'int':\n throw new Error('not implemented');\n case 'byte':\n return new DataEncoders.Uint8DataEncoder(this.gl, channels);\n default:\n throw new Error(`Invalid dataType: ${dataType}`);\n }\n }\n clearActiveTextures(): void {\n const gl = this.gl;\n for (let unit = 0; unit < this.maxTextureImageUnits; ++unit) {\n gl.activeTexture(gl.TEXTURE0 + unit);\n gl.bindTexture(gl.TEXTURE_2D, null);\n }\n }\n dispose(): void {\n if (this.disposed) {\n return;\n }\n const gl = this.gl;\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteFramebuffer(this.framebuffer);\n gl.bindBuffer(gl.ARRAY_BUFFER, null);\n gl.deleteBuffer(this.vertexbuffer);\n gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);\n gl.finish();\n this.disposed = true;\n }\n\n private createDefaultGeometry(): Float32Array {\n // Sets of x,y,z(=0),s,t coordinates.\n return new Float32Array([\n -1.0, 1.0, 0.0, 0.0, 1.0, // upper left\n -1.0, -1.0, 0.0, 0.0, 0.0, // lower left\n 1.0, 1.0, 0.0, 1.0, 1.0, // upper right\n 1.0, -1.0, 0.0, 1.0, 0.0 // lower right\n ]);\n }\n private createVertexbuffer(): WebGLBuffer {\n const gl = this.gl;\n const buffer = gl.createBuffer();\n if (!buffer) {\n throw new Error('createBuffer() returned null');\n }\n const geometry = this.createDefaultGeometry();\n gl.bindBuffer(gl.ARRAY_BUFFER, buffer);\n gl.bufferData(gl.ARRAY_BUFFER, geometry, gl.STATIC_DRAW);\n this.checkError();\n return buffer;\n }\n private createFramebuffer(): WebGLFramebuffer {\n const fb = this.gl.createFramebuffer();\n if (!fb) {\n throw new Error('createFramebuffer returned null');\n }\n return fb;\n }\n\n private queryVitalParameters(): void {\n const gl = this.gl;\n\n this.isFloatTextureAttachableToFrameBuffer = this.checkFloatTextureAttachableToFrameBuffer();\n this.isRenderFloat32Supported = this.checkRenderFloat32();\n this.isFloat32DownloadSupported = this.checkFloat32Download();\n\n if (this.version === 1 && !this.textureHalfFloatExtension && !this.isRenderFloat32Supported) {\n throw new Error('both float32 and float16 TextureType are not supported');\n }\n\n this.isBlendSupported = !this.isRenderFloat32Supported || this.checkFloat32Blend();\n\n // this.maxCombinedTextureImageUnits = gl.getParameter(gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS);\n this.maxTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n this.maxTextureImageUnits = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n // this.maxCubeMapTextureSize = gl.getParameter(gl.MAX_CUBE_MAP_TEXTURE_SIZE);\n // this.shadingLanguageVersion = gl.getParameter(gl.SHADING_LANGUAGE_VERSION);\n // this.webglVendor = gl.getParameter(gl.VENDOR);\n // this.webglVersion = gl.getParameter(gl.VERSION);\n\n if (this.version === 2) {\n // this.max3DTextureSize = gl.getParameter(WebGL2RenderingContext.MAX_3D_TEXTURE_SIZE);\n // this.maxArrayTextureLayers = gl.getParameter(WebGL2RenderingContext.MAX_ARRAY_TEXTURE_LAYERS);\n // this.maxColorAttachments = gl.getParameter(WebGL2RenderingContext.MAX_COLOR_ATTACHMENTS);\n // this.maxDrawBuffers = gl.getParameter(WebGL2RenderingContext.MAX_DRAW_BUFFERS);\n }\n }\n private getExtensions(): void {\n if (this.version === 2) {\n this.colorBufferFloatExtension = this.gl.getExtension('EXT_color_buffer_float');\n this.disjointTimerQueryWebgl2Extension = this.gl.getExtension('EXT_disjoint_timer_query_webgl2');\n } else {\n this.textureFloatExtension = this.gl.getExtension('OES_texture_float');\n this.textureHalfFloatExtension = this.gl.getExtension('OES_texture_half_float');\n }\n }\n\n private checkFloatTextureAttachableToFrameBuffer(): boolean {\n // test whether Float32 texture is supported:\n // STEP.1 create a float texture\n const gl = this.gl;\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n // eslint-disable-next-line @typescript-eslint/naming-convention\n const internalFormat = this.version === 2 ? (gl as unknown as {RGBA32F: number}).RGBA32F : gl.RGBA;\n gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, 1, 1, 0, gl.RGBA, gl.FLOAT, null);\n // STEP.2 bind a frame buffer\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n // STEP.3 attach texture to framebuffer\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n // STEP.4 test whether framebuffer is complete\n const isComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isComplete;\n }\n\n private checkRenderFloat32(): boolean {\n if (this.version === 2) {\n if (!this.colorBufferFloatExtension) {\n return false;\n }\n } else {\n if (!this.textureFloatExtension) {\n return false;\n }\n }\n return this.isFloatTextureAttachableToFrameBuffer;\n }\n\n private checkFloat32Download(): boolean {\n if (this.version === 2) {\n if (!this.colorBufferFloatExtension) {\n return false;\n }\n } else {\n if (!this.textureFloatExtension) {\n return false;\n }\n if (!this.gl.getExtension('WEBGL_color_buffer_float')) {\n return false;\n }\n }\n return this.isFloatTextureAttachableToFrameBuffer;\n }\n\n /**\n * Check whether GL_BLEND is supported\n */\n private checkFloat32Blend(): boolean {\n // it looks like currently (2019-05-08) there is no easy way to detect whether BLEND is supported\n // https://github.com/microsoft/onnxjs/issues/145\n\n const gl = this.gl;\n\n let texture: WebGLTexture|null|undefined;\n let frameBuffer: WebGLFramebuffer|null|undefined;\n let vertexShader: WebGLShader|null|undefined;\n let fragmentShader: WebGLShader|null|undefined;\n let program: WebGLProgram|null|undefined;\n\n try {\n texture = gl.createTexture();\n frameBuffer = gl.createFramebuffer();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n\n // eslint-disable-next-line @typescript-eslint/naming-convention\n const internalFormat = this.version === 2 ? (gl as unknown as {RGBA32F: number}).RGBA32F : gl.RGBA;\n gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, 1, 1, 0, gl.RGBA, gl.FLOAT, null);\n\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.enable(gl.BLEND);\n\n vertexShader = gl.createShader(gl.VERTEX_SHADER);\n if (!vertexShader) {\n return false;\n }\n gl.shaderSource(vertexShader, 'void main(){}');\n gl.compileShader(vertexShader);\n\n fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);\n if (!fragmentShader) {\n return false;\n }\n gl.shaderSource(fragmentShader, 'precision highp float;void main(){gl_FragColor=vec4(0.5);}');\n gl.compileShader(fragmentShader);\n\n program = gl.createProgram();\n if (!program) {\n return false;\n }\n gl.attachShader(program, vertexShader);\n gl.attachShader(program, fragmentShader);\n gl.linkProgram(program);\n gl.useProgram(program);\n\n gl.drawArrays(gl.POINTS, 0, 1);\n return gl.getError() === gl.NO_ERROR;\n\n } finally {\n gl.disable(gl.BLEND);\n\n if (program) {\n gl.deleteProgram(program);\n }\n if (vertexShader) {\n gl.deleteShader(vertexShader);\n }\n if (fragmentShader) {\n gl.deleteShader(fragmentShader);\n }\n if (frameBuffer) {\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteFramebuffer(frameBuffer);\n }\n if (texture) {\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.deleteTexture(texture);\n }\n }\n }\n\n beginTimer(): WebGLQuery {\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n\n const query = gl2.createQuery() as WebGLQuery;\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported.');\n }\n }\n\n endTimer() {\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n }\n\n isTimerResultAvailable(query: WebGLQuery): boolean {\n let available = false, disjoint = false;\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n\n available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n disjoint = gl2.getParameter(ext.GPU_DISJOINT_EXT);\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n\n return available && !disjoint;\n }\n\n getTimerResult(query: WebGLQuery): number {\n let timeElapsed = 0;\n if (this.version === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n timeElapsed = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n gl2.deleteQuery(query);\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n // return miliseconds\n return timeElapsed / 1000000;\n }\n\n async waitForQueryAndGetTime(query: WebGLQuery): Promise {\n await repeatedTry(() => this.isTimerResultAvailable(query));\n return this.getTimerResult(query);\n }\n\n public async createAndWaitForFence(): Promise {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n\n private createFence(gl: WebGLRenderingContext): FenceContext {\n let isFencePassed: () => boolean;\n const gl2 = gl as WebGL2RenderingContext;\n const query = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n if (query === null) {\n isFencePassed = () => true;\n } else {\n isFencePassed = () => {\n const status = gl2.clientWaitSync(query, 0, 0);\n return status === gl2.ALREADY_SIGNALED || status === gl2.CONDITION_SATISFIED;\n };\n }\n return {query, isFencePassed};\n }\n\n async pollFence(fenceContext: FenceContext) {\n return new Promise(resolve => {\n void this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n\n private itemsToPoll: PollItem[] = [];\n\n pollItems(): void {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const {resolveFn} = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n\n private async addItemToPoll(isDoneFn: () => boolean, resolveFn: () => void) {\n this.itemsToPoll.push({isDoneFn, resolveFn});\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n await repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\n\nimport {WebGLContext} from './webgl-context';\n\nconst cache: {[contextId: string]: WebGLContext} = {};\n\n/**\n * This factory function creates proper WebGLRenderingContext based on\n * the current browsers capabilities\n * The order is from higher/most recent versions to most basic\n */\nexport function createWebGLContext(contextId?: 'webgl'|'webgl2'): WebGLContext {\n let context: WebGLContext|undefined;\n if ((!contextId || contextId === 'webgl2') && 'webgl2' in cache) {\n context = cache.webgl2;\n } else if ((!contextId || contextId === 'webgl') && 'webgl' in cache) {\n context = cache.webgl;\n }\n\n if (!context) {\n try {\n // try to create webgl context from an offscreen canvas\n const offscreenCanvas = createOffscreenCanvas();\n context = createNewWebGLContext(offscreenCanvas, contextId);\n } catch (e) {\n // if failed, fallback to try to use a normal canvas element\n const canvas = createCanvas();\n context = createNewWebGLContext(canvas, contextId);\n }\n }\n\n contextId = contextId || context.version === 1 ? 'webgl' : 'webgl2';\n const gl = context.gl;\n\n cache[contextId] = context;\n\n if (gl.isContextLost()) {\n delete cache[contextId];\n return createWebGLContext(contextId);\n }\n\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n\n return context;\n}\n\nexport function createNewWebGLContext(canvas: HTMLCanvasElement, contextId?: 'webgl'|'webgl2'): WebGLContext {\n const contextAttributes: WebGLContextAttributes = {\n alpha: false,\n depth: false,\n antialias: false,\n stencil: false,\n preserveDrawingBuffer: false,\n premultipliedAlpha: false,\n failIfMajorPerformanceCaveat: false\n };\n let gl: WebGLRenderingContext|null;\n const ca = contextAttributes;\n if (!contextId || contextId === 'webgl2') {\n gl = canvas.getContext('webgl2', ca);\n if (gl) {\n try {\n return new WebGLContext(gl, 2);\n } catch (err) {\n Logger.warning('GlContextFactory', `failed to create WebGLContext using contextId 'webgl2'. Error: ${err}`);\n }\n }\n }\n if (!contextId || contextId === 'webgl') {\n gl = canvas.getContext('webgl', ca) || canvas.getContext('experimental-webgl', ca) as WebGLRenderingContext;\n if (gl) {\n try {\n return new WebGLContext(gl, 1);\n } catch (err) {\n Logger.warning(\n 'GlContextFactory',\n `failed to create WebGLContext using contextId 'webgl' or 'experimental-webgl'. Error: ${err}`);\n }\n }\n }\n\n throw new Error('WebGL is not supported');\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\ndeclare let OffscreenCanvas: {new (width: number, height: number): HTMLCanvasElement};\n\nfunction createCanvas(): HTMLCanvasElement {\n if (typeof document === 'undefined') {\n throw new TypeError('failed to create canvas: document is not supported');\n }\n const canvas: HTMLCanvasElement = document.createElement('canvas');\n canvas.width = 1;\n canvas.height = 1;\n return canvas;\n}\n\nfunction createOffscreenCanvas(): HTMLCanvasElement {\n if (typeof OffscreenCanvas === 'undefined') {\n throw new TypeError('failed to create offscreen canvas: OffscreenCanvas is not supported');\n }\n return new OffscreenCanvas(1, 1);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {Backend, SessionHandler} from '../backend';\nimport {Logger} from '../instrument';\nimport {Session} from '../session';\n\nimport {WebGLSessionHandler} from './webgl/session-handler';\nimport {WebGLContext} from './webgl/webgl-context';\nimport {createWebGLContext} from './webgl/webgl-context-factory';\n\n/**\n * WebGLBackend is the entry point for all WebGL opeartions\n * When it starts it created the WebGLRenderingContext\n * and other main framework components such as Program and Texture Managers\n */\nexport class WebGLBackend implements Backend {\n glContext: WebGLContext;\n\n get contextId(): 'webgl'|'webgl2'|undefined {\n return env.webgl.contextId;\n }\n set contextId(value: 'webgl'|'webgl2'|undefined) {\n env.webgl.contextId = value;\n }\n\n get matmulMaxBatchSize(): number|undefined {\n return env.webgl.matmulMaxBatchSize;\n }\n set matmulMaxBatchSize(value: number|undefined) {\n env.webgl.matmulMaxBatchSize = value;\n }\n\n get textureCacheMode(): 'initializerOnly'|'full'|undefined {\n return env.webgl.textureCacheMode;\n }\n set textureCacheMode(value: 'initializerOnly'|'full'|undefined) {\n env.webgl.textureCacheMode = value;\n }\n\n get pack(): boolean|undefined {\n return env.webgl.pack;\n }\n set pack(value: boolean|undefined) {\n env.webgl.pack = value;\n }\n\n get async(): boolean|undefined {\n return env.webgl.async;\n }\n set async(value: boolean|undefined) {\n env.webgl.async = value;\n }\n\n initialize(): boolean {\n try {\n this.glContext = createWebGLContext(this.contextId);\n if (typeof this.matmulMaxBatchSize !== 'number') {\n this.matmulMaxBatchSize = 16;\n }\n if (typeof this.textureCacheMode !== 'string') {\n this.textureCacheMode = 'full';\n }\n if (typeof this.pack !== 'boolean') {\n this.pack = false;\n }\n if (typeof this.async !== 'boolean') {\n this.async = false;\n }\n\n Logger.setWithEnv(env);\n\n if (!env.webgl.context) {\n Object.defineProperty(env.webgl, 'context', {value: this.glContext.gl});\n }\n\n Logger.verbose(\n 'WebGLBackend',\n `Created WebGLContext: ${typeof this.glContext} with matmulMaxBatchSize: ${\n this.matmulMaxBatchSize}; textureCacheMode: ${this.textureCacheMode}; pack: ${this.pack}; async: ${\n this.async}.`);\n return true;\n } catch (e) {\n Logger.warning('WebGLBackend', `Unable to initialize WebGLBackend. ${e}`);\n return false;\n }\n }\n createSessionHandler(context: Session.Context): SessionHandler {\n return new WebGLSessionHandler(this, context);\n }\n dispose(): void {\n this.glContext.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGLBackend} from './backends/backend-webgl';\nimport {Graph} from './graph';\nimport {Operator} from './operators';\nimport {OpSet} from './opset';\nimport {Session} from './session';\n\nexport interface InferenceHandler {\n /**\n * dispose the inference handler. it will be called as the last step in Session.run()\n */\n dispose(): void;\n}\n\nexport interface SessionHandler {\n /**\n * transform the graph at initialization time\n * @param graphTransformer the graph transformer to manipulate the model graph\n */\n transformGraph?(graphTransformer: Graph.Transformer): void;\n\n /**\n * create an instance of InferenceHandler to use in a Session.run() call\n */\n createInferenceHandler(): InferenceHandler;\n\n /**\n * dispose the session handler. it will be called when a session is being disposed explicitly\n */\n dispose(): void;\n\n /**\n * Resolves the operator from the name and opset version; backend specific\n * @param node the node to resolve\n * @param opsets a list of opsets that exported from the model\n * @param graph the completely initialized graph\n */\n resolve(node: Graph.Node, opsets: readonly OpSet[], graph: Graph): Operator;\n\n /**\n * This method let's the sessionHandler know that the graph initialization is complete\n * @param graph the completely initialized graph\n */\n onGraphInitialized?(graph: Graph): void;\n\n /**\n * a reference to the corresponding backend\n */\n readonly backend: Backend;\n\n /**\n * a reference to the session context\n */\n readonly context: Session.Context;\n}\n\nexport interface Backend {\n /**\n * initialize the backend. will be called only once, when the first time the\n * backend it to be used\n */\n initialize(): boolean|Promise;\n\n /**\n * create an instance of SessionHandler to use in a Session object's lifecycle\n */\n createSessionHandler(context: Session.Context): SessionHandler;\n\n /**\n * dispose the backend. currently this will not be called\n */\n dispose(): void;\n}\n\n// caches all initialized backend instances\nconst backendsCache: Map = new Map();\n\nexport const backend: {[name: string]: Backend} = {\n webgl: new WebGLBackend()\n};\n\n/**\n * Resolve a reference to the backend. If a hint is specified, the corresponding\n * backend will be used.\n */\nexport async function resolveBackend(hint?: string|readonly string[]): Promise {\n if (!hint) {\n return resolveBackend(['webgl']);\n } else {\n const hints = typeof hint === 'string' ? [hint] : hint;\n\n for (const backendHint of hints) {\n const cache = backendsCache.get(backendHint);\n if (cache) {\n return cache;\n }\n\n const backend = await tryLoadBackend(backendHint);\n if (backend) {\n return backend;\n }\n }\n }\n\n throw new Error('no available backend to use');\n}\n\nasync function tryLoadBackend(backendHint: string): Promise {\n const backendObj = backend;\n\n if (typeof backendObj[backendHint] !== 'undefined' && isBackend(backendObj[backendHint])) {\n const backend = backendObj[backendHint];\n let init = backend.initialize();\n if (typeof init === 'object' && 'then' in init) {\n init = await init;\n }\n if (init) {\n backendsCache.set(backendHint, backend);\n return backend;\n }\n }\n\n return undefined;\n}\n\nfunction isBackend(obj: unknown) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const o = obj as any;\n\n // check if an object is a Backend instance\n if (\n 'initialize' in o && typeof o.initialize === 'function' && // initialize()\n 'createSessionHandler' in o && typeof o.createSessionHandler === 'function' && // createSessionHandler()\n 'dispose' in o && typeof o.dispose === 'function' // dispose()\n ) {\n return true;\n }\n\n return false;\n}\n\nexport type BackendType = Backend;\nexport type SessionHandlerType = ReturnType;\nexport type InferenceHandlerType = ReturnType;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {SessionHandler} from './backend';\nimport {Graph} from './graph';\nimport {Logger, Profiler} from './instrument';\nimport {Operator} from './operators';\nimport {Tensor} from './tensor';\n\nclass KernelOp {\n constructor(public op: Operator, public node: Graph.Node) {}\n}\n\nexport class ExecutionPlan {\n constructor(private graph: Graph, ops: Operator[], private profiler: Readonly) {\n this.initialize(ops);\n }\n\n initialize(ops: Operator[]) {\n this.profiler.event('session', 'ExecutionPlan.initialize', () => {\n const graphNodes = this.graph.getNodes();\n if (graphNodes.length !== ops.length) {\n throw new Error('The size of nodes and OPs do not match.');\n }\n\n this._ops = ops.map((op, i) => new KernelOp(op, graphNodes[i]));\n this.reset();\n\n // look for starter node(s)\n this._starter = [];\n this._ops.forEach((op, i) => {\n let resolved = true;\n for (const input of op.node.inputs) {\n if (\n !this._values[input] // not an initialized input\n && this.graph.getInputIndices().indexOf(input) === -1 // not model input\n ) {\n resolved = false;\n break;\n }\n }\n if (resolved) {\n this._starter.push(i);\n }\n });\n });\n }\n\n reset() {\n this._values = this.graph.getValues().map(i => i.tensor);\n }\n\n async execute(sessionHandler: SessionHandler, modelInputs: Tensor[]): Promise {\n return this.profiler.event('session', 'ExecutionPlan.execute', async () => {\n // reset mediem result\n this.reset();\n\n // create inference handler\n const inferenceHandler = sessionHandler.createInferenceHandler();\n\n // populate inputs value\n const graphInputs = this.graph.getInputIndices();\n if (modelInputs.length !== graphInputs.length) {\n throw new Error(`number of input tensors don't match the number of inputs to the model: actual: ${\n modelInputs.length} expected: ${graphInputs.length}`);\n }\n\n modelInputs.forEach((input, i) => {\n const index = graphInputs[i];\n this._values[index] = input;\n });\n\n // prepare running sequence\n const sequence: number[] = this._starter.slice(0);\n\n // execution iterations\n const graphValues = this.graph.getValues();\n const graphNodes = this.graph.getNodes();\n\n let rear = 0;\n while (rear < sequence.length) {\n const thisOpIndex = sequence[rear++];\n const thisOp = this._ops[thisOpIndex];\n\n // check input\n const inputList = thisOp.node.inputs.map(i => this._values[i]);\n if (inputList.indexOf(undefined) !== -1) {\n throw new Error(`unresolved input detected: op: ${thisOp.node}`);\n }\n\n // run\n const inputTensors = inputList as Tensor[];\n Logger.verbose(\n 'ExecPlan',\n `Running op:${thisOp.node.name} (${\n inputTensors.map((t, i) => `'${thisOp.node.inputs[i]}': ${t.type}[${t.dims.join(',')}]`).join(', ')})`);\n\n const outputList = await this.profiler.event(\n 'node', thisOp.node.name, async () => thisOp.op.impl(inferenceHandler, inputTensors, thisOp.op.context));\n\n // check output\n if (outputList.length !== thisOp.node.outputs.length) {\n throw new Error('the size of output does not match model definition.');\n }\n\n // fill value\n outputList.forEach((output, i) => {\n const j = thisOp.node.outputs[i];\n if (this._values[j]) {\n throw new Error(`output [${j}] already has value: op:${thisOp.node.name}`);\n }\n this._values[j] = output;\n });\n\n // resolve downstream nodes\n const downstreamNodes = new Set();\n outputList.forEach((_output, i) => {\n const j = thisOp.node.outputs[i];\n for (const currentDownstreamNodeIndex of graphValues[j].to) {\n const currentDownstreamNode = graphNodes[currentDownstreamNodeIndex];\n let resolved = true;\n for (const k of currentDownstreamNode.inputs) {\n if (!this._values[k]) {\n resolved = false;\n break;\n }\n }\n if (resolved) {\n downstreamNodes.add(currentDownstreamNodeIndex);\n }\n }\n });\n sequence.push(...downstreamNodes);\n }\n\n const output: Tensor[] = [];\n for (let i = 0; i < this.graph.getOutputIndices().length; i++) {\n const outputIndex = this.graph.getOutputIndices()[i];\n const outputTensor = this._values[outputIndex];\n if (outputTensor === undefined) {\n throw new Error(`required output [${outputIndex}] does not have value`);\n }\n if (outputIndex === 0) {\n await outputTensor.getData();\n } else {\n // eslint-disable-next-line no-unused-expressions\n outputTensor.data;\n }\n output.push(outputTensor);\n }\n Logger.verbose('ExecPlan', 'disposing of inferenceHandler');\n inferenceHandler.dispose();\n return output;\n });\n }\n\n _values: Array;\n _ops: KernelOp[];\n _starter: number[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport Long from 'long';\n\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\nimport {decodeUtf8String, LongUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Attribute {\n export interface DataTypeMap {\n float: number;\n int: number;\n string: string;\n tensor: Tensor;\n floats: number[];\n ints: number[];\n strings: string[];\n tensors: Tensor[];\n }\n\n export type DataType = keyof DataTypeMap;\n}\n\ntype ValueTypes = Attribute.DataTypeMap[Attribute.DataType];\n\ntype Value = [ValueTypes, Attribute.DataType];\n\nexport class Attribute {\n constructor(attributes: onnx.IAttributeProto[]|ortFbs.Attribute[]|null|undefined) {\n this._attributes = new Map();\n if (attributes !== null && attributes !== undefined) {\n for (const attr of attributes) {\n if (attr instanceof onnx.AttributeProto) {\n this._attributes.set(attr.name, [Attribute.getValue(attr), Attribute.getType(attr)]);\n } else if (attr instanceof ortFbs.Attribute) {\n this._attributes.set(attr.name()!, [Attribute.getValue(attr), Attribute.getType(attr)]);\n }\n }\n if (this._attributes.size < attributes.length) {\n throw new Error('duplicated attribute names');\n }\n }\n }\n\n set(key: string, type: Attribute.DataType, value: ValueTypes): void {\n this._attributes.set(key, [value, type]);\n }\n delete(key: string): void {\n this._attributes.delete(key);\n }\n getFloat(key: string, defaultValue?: Attribute.DataTypeMap['float']) {\n return this.get(key, 'float', defaultValue);\n }\n\n getInt(key: string, defaultValue?: Attribute.DataTypeMap['int']) {\n return this.get(key, 'int', defaultValue);\n }\n\n getString(key: string, defaultValue?: Attribute.DataTypeMap['string']) {\n return this.get(key, 'string', defaultValue);\n }\n\n getTensor(key: string, defaultValue?: Attribute.DataTypeMap['tensor']) {\n return this.get(key, 'tensor', defaultValue);\n }\n\n getFloats(key: string, defaultValue?: Attribute.DataTypeMap['floats']) {\n return this.get(key, 'floats', defaultValue);\n }\n\n getInts(key: string, defaultValue?: Attribute.DataTypeMap['ints']) {\n return this.get(key, 'ints', defaultValue);\n }\n\n getStrings(key: string, defaultValue?: Attribute.DataTypeMap['strings']) {\n return this.get(key, 'strings', defaultValue);\n }\n\n getTensors(key: string, defaultValue?: Attribute.DataTypeMap['tensors']) {\n return this.get(key, 'tensors', defaultValue);\n }\n\n private get(\n key: string, type: Attribute.DataType, defaultValue?: V): V {\n const valueAndType = this._attributes.get(key);\n if (valueAndType === undefined) {\n if (defaultValue !== undefined) {\n return defaultValue;\n }\n throw new Error(`required attribute not found: ${key}`);\n }\n if (valueAndType[1] !== type) {\n throw new Error(`type mismatch: expected ${type} but got ${valueAndType[1]}`);\n }\n return valueAndType[0] as V;\n }\n\n private static getType(attr: onnx.IAttributeProto|ortFbs.Attribute): Attribute.DataType {\n const type = attr instanceof onnx.AttributeProto ? (attr).type : (attr as ortFbs.Attribute).type();\n switch (type) {\n case onnx.AttributeProto.AttributeType.FLOAT:\n return 'float';\n case onnx.AttributeProto.AttributeType.INT:\n return 'int';\n case onnx.AttributeProto.AttributeType.STRING:\n return 'string';\n case onnx.AttributeProto.AttributeType.TENSOR:\n return 'tensor';\n case onnx.AttributeProto.AttributeType.FLOATS:\n return 'floats';\n case onnx.AttributeProto.AttributeType.INTS:\n return 'ints';\n case onnx.AttributeProto.AttributeType.STRINGS:\n return 'strings';\n case onnx.AttributeProto.AttributeType.TENSORS:\n return 'tensors';\n default:\n throw new Error(`attribute type is not supported yet: ${onnx.AttributeProto.AttributeType[type]}`);\n }\n }\n\n private static getValue(attr: onnx.IAttributeProto|ortFbs.Attribute) {\n const attrType = attr instanceof onnx.AttributeProto ? attr.type : (attr as ortFbs.Attribute).type();\n if (attrType === onnx.AttributeProto.AttributeType.GRAPH || attrType === onnx.AttributeProto.AttributeType.GRAPHS) {\n throw new Error('graph attribute is not supported yet');\n }\n\n const value = this.getValueNoCheck(attr);\n\n // cast LONG to number\n if (attrType === onnx.AttributeProto.AttributeType.INT && LongUtil.isLong(value)) {\n return LongUtil.longToNumber(value as Long | flatbuffers.Long);\n }\n\n // cast LONG[] to number[]\n if (attrType === onnx.AttributeProto.AttributeType.INTS) {\n const arr = (value as Array);\n const numberValue: number[] = new Array(arr.length);\n\n for (let i = 0; i < arr.length; i++) {\n const maybeLong = arr[i];\n numberValue[i] = LongUtil.longToNumber(maybeLong);\n }\n\n return numberValue;\n }\n\n // cast onnx.TensorProto to onnxjs.Tensor\n if (attrType === onnx.AttributeProto.AttributeType.TENSOR) {\n return attr instanceof onnx.AttributeProto ? Tensor.fromProto(value as onnx.ITensorProto) :\n Tensor.fromOrtTensor(value as ortFbs.Tensor);\n }\n\n // cast onnx.TensorProto[] to onnxjs.Tensor[]\n if (attrType === onnx.AttributeProto.AttributeType.TENSORS) {\n if (attr instanceof onnx.AttributeProto) {\n const tensorProtos = value as onnx.ITensorProto[];\n return tensorProtos.map(value => Tensor.fromProto(value));\n } else if (attr instanceof ortFbs.Attribute) {\n const tensorProtos = value as ortFbs.Tensor[];\n return tensorProtos.map(value => Tensor.fromOrtTensor(value));\n }\n }\n\n // cast Uint8Array to string\n if (attrType === onnx.AttributeProto.AttributeType.STRING) {\n // string in onnx attribute is of uint8array type, so we need to convert it to string below. While in ort format,\n // string attributes are returned as string, so no conversion is needed.\n if (attr instanceof onnx.AttributeProto) {\n const utf8String = value as Uint8Array;\n return decodeUtf8String(utf8String);\n }\n }\n\n // cast Uint8Array[] to string[]\n if (attrType === onnx.AttributeProto.AttributeType.STRINGS) {\n // strings in onnx attribute is returned as uint8array[], so we need to convert it to string[] below. While in ort\n // format strings attributes are returned as string[], so no conversion is needed.\n if (attr instanceof onnx.AttributeProto) {\n const utf8Strings = value as Uint8Array[];\n return utf8Strings.map(decodeUtf8String);\n }\n }\n\n return value as ValueTypes;\n }\n\n private static getValueNoCheck(attr: onnx.IAttributeProto|ortFbs.Attribute) {\n return attr instanceof (onnx.AttributeProto) ? this.getValueNoCheckFromOnnxFormat(attr) :\n this.getValueNoCheckFromOrtFormat(attr as ortFbs.Attribute);\n }\n\n private static getValueNoCheckFromOnnxFormat(attr: onnx.IAttributeProto) {\n switch (attr.type!) {\n case onnx.AttributeProto.AttributeType.FLOAT:\n return attr.f;\n case onnx.AttributeProto.AttributeType.INT:\n return attr.i;\n case onnx.AttributeProto.AttributeType.STRING:\n return attr.s;\n case onnx.AttributeProto.AttributeType.TENSOR:\n return attr.t;\n case onnx.AttributeProto.AttributeType.GRAPH:\n return attr.g;\n case onnx.AttributeProto.AttributeType.FLOATS:\n return attr.floats;\n case onnx.AttributeProto.AttributeType.INTS:\n return attr.ints;\n case onnx.AttributeProto.AttributeType.STRINGS:\n return attr.strings;\n case onnx.AttributeProto.AttributeType.TENSORS:\n return attr.tensors;\n case onnx.AttributeProto.AttributeType.GRAPHS:\n return attr.graphs;\n default:\n throw new Error(`unsupported attribute type: ${onnx.AttributeProto.AttributeType[attr.type!]}`);\n }\n }\n\n private static getValueNoCheckFromOrtFormat(attr: ortFbs.Attribute) {\n switch (attr.type()) {\n case ortFbs.AttributeType.FLOAT:\n return attr.f();\n case ortFbs.AttributeType.INT:\n return attr.i();\n case ortFbs.AttributeType.STRING:\n return attr.s();\n case ortFbs.AttributeType.TENSOR:\n return attr.t();\n case ortFbs.AttributeType.GRAPH:\n return attr.g();\n case ortFbs.AttributeType.FLOATS:\n return attr.floatsArray();\n case ortFbs.AttributeType.INTS: {\n const ints = [];\n for (let i = 0; i < attr.intsLength(); i++) {\n ints.push(attr.ints(i)!);\n }\n return ints;\n }\n case ortFbs.AttributeType.STRINGS: {\n const strings = [];\n for (let i = 0; i < attr.stringsLength(); i++) {\n strings.push(attr.strings(i));\n }\n return strings;\n }\n case ortFbs.AttributeType.TENSORS: {\n const tensors = [];\n for (let i = 0; i < attr.tensorsLength(); i++) {\n tensors.push(attr.tensors(i)!);\n }\n return tensors;\n }\n // case ortFbs.AttributeType.GRAPHS:\n // TODO: Subgraph not supported yet.\n // const graphs = [];\n // for (let i = 0; i < attr.graphsLength(); i++) {\n // graphs.push(attr.graphs(i)!);\n // }\n // return graphs;\n default:\n throw new Error(`unsupported attribute type: ${ortFbs.AttributeType[attr.type()]}`);\n }\n }\n\n protected _attributes: Map;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Attribute} from './attribute';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\nimport {LongUtil, MAX_CLIP, MIN_CLIP, ProtoUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Graph {\n export interface Shape {\n readonly dims: readonly number[];\n }\n export interface ValueType {\n readonly tensorType: Tensor.DataType;\n readonly shape: Shape;\n }\n export interface Value {\n // the tensor data. empty for non-initialized inputs\n readonly tensor?: Tensor;\n\n // index to the Node where the value comes from. -1 for initializer.\n readonly from: number;\n\n // indices to the Nodes where the values go to.\n readonly to: readonly number[];\n\n // value type specification. empty for non-input values.\n readonly type?: ValueType;\n }\n export interface Node {\n // name of the node\n readonly name: string;\n\n // the operator type\n readonly opType: string;\n\n // indices to the Values where the inputs come from.\n readonly inputs: readonly number[];\n\n // indices to the Values where the outpus go to.\n readonly outputs: readonly number[];\n\n // the attributes that used by the operator\n readonly attributes: Attribute;\n }\n\n /**\n * a Transformer is an instance that allows all possible transformation operations that applied to a graph\n */\n export interface Transformer {\n removeAllIdentityNodes(): void;\n removeAllDropoutNodes(): void;\n fuseConvActivationNodes(): void;\n // TODO: add generic functions to manipulate the graph\n }\n\n // an initializer can use transformer to transform the graph\n export interface Initializer {\n transformGraph(transformer: Transformer): void;\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface Graph {\n getInputIndices(): readonly number[];\n getInputNames(): readonly string[];\n getOutputIndices(): readonly number[];\n getOutputNames(): readonly string[];\n getValues(): readonly Graph.Value[];\n getNodes(): readonly Graph.Node[];\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-redeclare\nexport const Graph = {\n /**\n * construct a graph from a graph protobuf type\n */\n from: (graphProto: onnx.IGraphProto|ortFbs.Graph, initializer?: Graph.Initializer) =>\n new GraphImpl(graphProto, initializer),\n};\n\nclass Value implements Graph.Value {\n constructor(valueInfo?: onnx.IValueInfoProto) {\n this._from = undefined;\n this._to = [];\n this.tensor = undefined;\n this.type = undefined;\n\n if (valueInfo) {\n this.type = ProtoUtil.tensorValueTypeFromProto(valueInfo.type!.tensorType!);\n }\n }\n\n _from?: number; // -1 represent from initializer\n get from() {\n return this._from!;\n }\n _to: number[];\n get to() {\n return this._to;\n }\n type?: Graph.ValueType;\n tensor?: Tensor;\n}\n\nclass Node implements Graph.Node {\n constructor(_nodeProto: onnx.INodeProto|ortFbs.Node, name?: string) {\n if (_nodeProto instanceof onnx.NodeProto) {\n this.name = _nodeProto.name;\n this.opType = _nodeProto.opType;\n this.attributes = new Attribute(_nodeProto.attribute);\n } else if (_nodeProto instanceof ortFbs.Node) {\n this.name = name ?? _nodeProto.name()!;\n this.opType = _nodeProto.opType()!;\n this.attributes = new Attribute(ProtoUtil.tensorAttributesFromORTFormat(_nodeProto));\n }\n\n this.inputs = [];\n this.outputs = [];\n this.executeNode = true;\n }\n\n name: string;\n opType: string;\n inputs: number[];\n outputs: number[];\n attributes: Attribute;\n executeNode: boolean;\n}\n\nclass GraphImpl implements Graph, Graph.Transformer {\n private _allData: Value[];\n\n private _allInputIndices: number[];\n private _allInputNames: string[];\n\n private _allOutputIndices: number[];\n private _allOutputNames: string[];\n\n private _nodes: Node[];\n\n constructor(graph: onnx.IGraphProto|ortFbs.Graph, graphInitializer?: Graph.Initializer) {\n if (!graph) {\n throw new TypeError('graph is empty');\n }\n\n // build the graph - will throw exceptions if something fatal is detected\n this.buildGraph(graph);\n\n // execute any transformation logic for the graph (if applicable)\n this.transformGraph(graphInitializer);\n\n // check for cycles and other inconsistencies - will throw exceptions if something fatal is detected\n this.checkIsAcyclic();\n }\n\n getInputIndices(): readonly number[] {\n return this._allInputIndices;\n }\n\n getInputNames(): readonly string[] {\n return this._allInputNames;\n }\n\n getOutputIndices(): readonly number[] {\n return this._allOutputIndices;\n }\n\n getOutputNames(): readonly string[] {\n return this._allOutputNames;\n }\n\n getValues(): readonly Graph.Value[] {\n return this._allData;\n }\n\n getNodes(): readonly Graph.Node[] {\n return this._nodes;\n }\n\n private buildGraph(graph: onnx.IGraphProto|ortFbs.Graph) {\n // build the graph - will throw exceptions if something fatal is detected\n if (graph instanceof onnx.GraphProto) {\n this.buildGraphFromOnnxFormat(graph);\n } else if (graph instanceof ortFbs.Graph) {\n this.buildGraphFromOrtFormat(graph);\n } else {\n throw new TypeError('Graph type is not supported.');\n }\n }\n private buildGraphFromOnnxFormat(graph: onnx.IGraphProto) {\n const dataIndices = new Map();\n this._allData = [];\n\n this._allInputIndices = [];\n this._allInputNames = [];\n\n this._allOutputIndices = [];\n this._allOutputNames = [];\n\n this._nodes = [];\n\n const nodesIndices = new Map();\n\n // scan all inputs\n if (!graph.input) {\n throw new Error('missing information in graph: input');\n }\n const inputValueNames = [];\n for (const i of graph.input) {\n if (dataIndices.has(i.name!)) {\n throw new Error(`duplicated input name: ${i.name}`);\n }\n const currentIndex = this._allData.push(new Value(i)) - 1;\n dataIndices.set(i.name!, currentIndex);\n inputValueNames.push(i.name!);\n }\n\n // scan all initializers\n if (!graph.initializer) {\n throw new Error('missing information in graph: initializer');\n }\n for (const i of graph.initializer) {\n let index = dataIndices.get(i.name!);\n if (index === undefined) {\n const value = new Value();\n value.type = {\n shape: {dims: ProtoUtil.tensorDimsFromProto(i.dims!)},\n tensorType: ProtoUtil.tensorDataTypeFromProto(i.dataType!)\n };\n index = this._allData.push(value) - 1;\n dataIndices.set(i.name!, index);\n }\n this._allData[index]._from = -1;\n this._allData[index].tensor = Tensor.fromProto(i);\n }\n\n // filter out input indices\n for (let i = 0; i < this._allData.length; i++) {\n if (!this._allData[i].tensor) {\n this._allInputIndices.push(i);\n this._allInputNames.push(inputValueNames[i]);\n }\n }\n\n // scan all outputs\n if (!graph.output) {\n throw new Error('missing information in graph: output');\n }\n for (const i of graph.output) {\n if (dataIndices.has(i.name!)) {\n throw new Error(`duplicated output name: ${i.name}`);\n }\n const currentIndex = this._allData.push(new Value(i)) - 1;\n dataIndices.set(i.name!, currentIndex);\n this._allOutputIndices.push(currentIndex);\n this._allOutputNames.push(i.name!);\n }\n\n // scan all nodes\n if (!graph.node) {\n throw new Error('missing information in graph: node');\n }\n for (const nodeProto of graph.node) {\n if (!nodeProto.name) {\n // assign a name to the node if it doesn't have one\n for (let pick = 0;; pick++) {\n const name = `unnamed_${nodeProto.opType}_${pick}`;\n if (!nodesIndices.has(name)) {\n nodeProto.name = name;\n break;\n }\n }\n }\n\n if (nodesIndices.has(nodeProto.name)) {\n throw new Error(`duplicated node name: ${nodeProto.name}`);\n }\n const currentIndex = this._nodes.push(new Node(nodeProto)) - 1;\n nodesIndices.set(nodeProto.name, currentIndex);\n }\n\n // scan node's outputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.node[i];\n if (!nodeProto.output) {\n throw new Error(`missing output for node: ${nodeProto.name}`);\n }\n for (const output of nodeProto.output) {\n let dataIndex = dataIndices.get(output);\n if (typeof dataIndex === 'undefined') {\n dataIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(output, dataIndex);\n }\n node.outputs.push(dataIndex);\n\n if (this._allData[dataIndex]._from !== undefined) {\n throw new Error(`multiple nodes output to one data value: ${dataIndex}`);\n }\n this._allData[dataIndex]._from = i;\n\n // for the 'Constant' operator, just create a new edge in the graph corresponding to the 'output' of the\n // operator and ignore the node from the graph\n if (nodeProto.opType === 'Constant') {\n if (!nodeProto.attribute || nodeProto.attribute.length !== 1 || !nodeProto.attribute[0].t) {\n throw new Error('missing attributes or missing tensor value in attributes for this Constant operator');\n }\n if (!nodeProto.output || nodeProto.output.length !== 1) {\n throw new Error('missing output or incorrect number of outputs for this Constant operator');\n }\n node.outputs.pop();\n node.executeNode = false;\n\n this._allData[dataIndex]._from = -1;\n this._allData[dataIndex].tensor = Tensor.fromProto(nodeProto.attribute[0].t);\n }\n }\n }\n\n // scan node's inputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.node[i];\n\n if (!nodeProto.input) {\n throw new Error(`missing input for node: ${nodeProto.name}`);\n }\n for (const input of nodeProto.input) {\n const dataIndex = dataIndices.get(input);\n if (typeof dataIndex === 'undefined') {\n // handle exception when opset > 9 and roi / scales not given\n if (input === '' && (nodeProto.input.length === 3 || nodeProto.input.length === 4) &&\n nodeProto.opType === 'Resize') {\n continue;\n }\n throw new Error(`unrecognized input '${input}' for node: ${nodeProto.name}`);\n }\n node.inputs.push(dataIndex);\n\n this._allData[dataIndex]._to.push(i);\n }\n }\n\n return true;\n }\n\n private buildGraphFromOrtFormat(graph: ortFbs.Graph) {\n const dataIndices = new Map();\n this._allData = [];\n\n this._allInputIndices = [];\n this._allInputNames = [];\n\n this._allOutputIndices = [];\n this._allOutputNames = [];\n\n this._nodes = [];\n\n const nodesIndices = new Map();\n\n // scan all inputs\n const inputValueNames = [];\n for (let i = 0; i < graph.inputsLength(); i++) {\n const inputName = graph.inputs(i);\n if (dataIndices.has(inputName)) {\n throw new Error(`duplicated input name: ${inputName}`);\n }\n // Find the input typeInfo from nodeargs\n for (let j = 0; j < graph.nodeArgsLength(); j++) {\n if (graph.nodeArgs(j)?.name() === inputName) {\n const value = new Value();\n const valueType = graph.nodeArgs(j)?.type()?.valueType();\n if (valueType !== ortFbs.TypeInfoValue.tensor_type) {\n throw new Error('Unexpected value type for the nodeArg.');\n }\n const valueInfo = graph.nodeArgs(j)!.type()!.value(new ortFbs.TensorTypeAndShape())!;\n const type = ProtoUtil.tensorDataTypeFromProto(valueInfo.elemType());\n const shape = valueInfo.shape()!;\n const dims = [];\n for (let k = 0; k < shape.dimLength()!; k++) {\n dims.push(LongUtil.longToNumber(shape.dim(k)!.value()!.dimValue()!));\n }\n value.type = {shape: {dims}, tensorType: type};\n const currentIndex = this._allData.push(value) - 1;\n dataIndices.set(inputName, currentIndex);\n inputValueNames.push(inputName);\n }\n }\n }\n // check initializers\n for (let i = 0; i < graph.initializersLength(); i++) {\n const initializer = graph.initializers(i)!;\n let index = dataIndices.get(initializer.name()!);\n if (index === undefined) {\n const value = new Value();\n const dims = ProtoUtil.tensorDimsFromORTFormat(initializer);\n const type = ProtoUtil.tensorDataTypeFromProto(initializer.dataType());\n value.type = {shape: {dims}, tensorType: type};\n index = this._allData.push(value) - 1;\n dataIndices.set(initializer.name()!, index);\n }\n this._allData[index]._from = -1;\n this._allData[index].tensor = Tensor.fromOrtTensor(initializer);\n }\n\n // filter out input indices\n for (let i = 0; i < this._allData.length; i++) {\n if (!this._allData[i].tensor) {\n this._allInputIndices.push(i);\n this._allInputNames.push(inputValueNames[i]);\n }\n }\n\n // scan all outputs\n for (let i = 0; i < graph.outputsLength(); i++) {\n const outputName = graph.outputs(i);\n if (dataIndices.has(outputName)) {\n throw new Error(`duplicated output name: ${outputName}`);\n }\n const currentIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(outputName, currentIndex);\n this._allOutputIndices.push(currentIndex);\n this._allOutputNames.push(outputName);\n }\n\n // scan all nodes\n if (!graph.nodes) {\n throw new Error('missing information in graph: node');\n }\n for (let i = 0; i < graph.nodesLength(); i++) {\n const nodeProto = graph.nodes(i);\n let name = nodeProto!.name();\n if (!name) {\n // assign a name to the node if it doesn't have one\n for (let pick = 0;; pick++) {\n name = `unnamed_${nodeProto!.opType()}_${pick}`;\n if (!nodesIndices.has(name)) {\n // an unique name is found. break.\n break;\n }\n }\n }\n\n if (nodesIndices.has(name)) {\n throw new Error(`duplicated node name: ${name}`);\n }\n const currentIndex = this._nodes.push(new Node(nodeProto!, name)) - 1;\n nodesIndices.set(name, currentIndex);\n }\n\n // scan node's outputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.nodes(i);\n if (nodeProto == null) {\n throw new Error(`No node exists at index ${i}`);\n }\n if (nodeProto?.outputsLength() === 0) {\n throw new Error(`missing output for node: ${nodeProto.name}`);\n }\n for (let j = 0; j < nodeProto?.outputsLength(); j++) {\n const output = nodeProto?.outputs(j);\n let dataIndex = dataIndices.get(output);\n if (typeof dataIndex === 'undefined') {\n dataIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(output, dataIndex);\n }\n node.outputs.push(dataIndex);\n\n if (this._allData[dataIndex]._from !== undefined) {\n throw new Error(`multiple nodes output to one data value: ${dataIndex}`);\n }\n this._allData[dataIndex]._from = i;\n\n // for the 'Constant' operator, just create a new edge in the graph corresponding to the 'output' of the\n // operator and ignore the node from the graph\n if (nodeProto.opType() === 'Constant') {\n if (nodeProto.attributesLength() !== 1 || !nodeProto.attributes(0)!.t()) {\n throw new Error('missing attributes or missing tensor value in attributes for this Constant operator');\n }\n if (nodeProto.outputsLength() !== 1) {\n throw new Error('missing output or incorrect number of outputs for this Constant operator');\n }\n node.outputs.pop();\n node.executeNode = false;\n\n this._allData[dataIndex]._from = -1;\n this._allData[dataIndex].tensor = Tensor.fromOrtTensor(nodeProto.attributes(0)!.t()!);\n }\n }\n }\n\n // scan node's inputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.nodes(i)!;\n\n if (nodeProto.inputsLength() === 0) {\n throw new Error(`missing input for node: ${nodeProto.name}`);\n }\n for (let j = 0; j < nodeProto.inputsLength()!; j++) {\n const input = nodeProto.inputs(j)!;\n const dataIndex = dataIndices.get(input);\n if (typeof dataIndex === 'undefined') {\n throw new Error(`unrecognized input '${input}' for node: ${nodeProto!.name()}`);\n }\n node.inputs.push(dataIndex);\n\n this._allData[dataIndex]._to.push(i);\n }\n }\n }\n\n private checkIsAcyclic() {\n // go through the graph and check for cycles or other fatal inconsistencies\n const starters: Set = new Set();\n this._allInputIndices.forEach(i => {\n const data = this._allData[i];\n data._to.forEach(j => {\n starters.add(j);\n });\n });\n\n // Iterative DFS to check for cycles\n const nodesStack = Array.from(starters);\n const nodesState = new Array(this._nodes.length).fill('white');\n\n while (nodesStack.length > 0) {\n const nodeIndex = nodesStack.pop()!;\n // this node has now been processed completely. Mark this node 'black' to denote this.\n if (nodesState[nodeIndex] === 'gray') {\n nodesState[nodeIndex] = 'black';\n } else {\n // this node is under processing stage. mark this node 'gray' to denote this.\n nodesStack.push(nodeIndex);\n nodesState[nodeIndex] = 'gray';\n\n this._nodes[nodeIndex].outputs.forEach((outgoingEdgeIndex) => {\n const data = this._allData[outgoingEdgeIndex];\n if (typeof data.tensor !== 'undefined') {\n throw new Error('node outputs should not be initialized');\n }\n if (data._from !== nodeIndex) {\n throw new Error('from property of the Value object doesn\\'t match index of Node being processed');\n }\n data._to.forEach((downstreamNodeIndex) => {\n // back edge found - cyclic\n if (nodesState[downstreamNodeIndex] === 'gray') {\n throw new Error('model graph is cyclic');\n }\n // tree edge found - continue processing by adding it to stack\n else if (nodesState[downstreamNodeIndex] === 'white') {\n nodesStack.push(downstreamNodeIndex);\n }\n });\n });\n }\n }\n }\n\n private transformGraph(graphInitializer?: Graph.Initializer): void {\n // apply common transform\n this.removeAllIdentityNodes();\n this.removeAllDropoutNodes();\n this.fuseConvActivationNodes();\n // apply initializer specific transform\n if (graphInitializer) {\n graphInitializer.transformGraph(this);\n }\n\n // finalize graph\n this.finalizeGraph();\n }\n\n /**\n * finalize the graph.\n *\n * this function should be called after all the transformation completed.\n * this function removes all unnecessary nodes and values from the graph\n */\n finalizeGraph() {\n let offset = 0;\n // delete all nodes that are not being executed\n // The graph is represented using these two arrays\n // this._nodes - Array holding the kernels to execute - each entry is a kernel pointing to this._allData\n // this._allData - hold 2 fields - to [] & from - these feileds hold the graph map for inputs and outputs per node\n // newIndices - remapping the graph after reading the flag 'executeNode'\n const newIndices = new Array(this._nodes.length, 0);\n let nodePossition = 0;\n\n for (let i = 0; i < this._nodes.length; i++) {\n // giving new indexes to the nodes based on execution flag\n newIndices[i] = nodePossition;\n if (this._nodes[i].executeNode) {\n if (nodePossition !== i) {\n this._nodes[nodePossition] = this._nodes[i];\n }\n nodePossition++;\n\n } else {\n // delete all output values\n this._nodes[i].outputs.forEach(ind => {\n this._allData[ind]._from = -2;\n });\n }\n }\n\n // removing the unused nodes\n this._nodes.splice(nodePossition, this._nodes.length - nodePossition);\n\n // Updating this._allData according to the new this._nodes\n for (let i = 0; i < this._allData.length; i++) {\n const currentData = this._allData[i];\n if (currentData._from !== undefined && currentData._from !== -1 && currentData._from !== -2) {\n currentData._from = newIndices[currentData._from];\n }\n\n for (let j = 0; j < currentData._to.length; j++) {\n if (currentData._to[j] >= 0) {\n currentData._to[j] = newIndices[currentData._to[j]];\n } else {\n throw new Error('Trying to update a removed node');\n }\n }\n }\n\n offset = 0;\n // delete all values that are not being referenced\n for (let i = 0; i < this._allData.length; i++) {\n // if current value is neither linked to next node, nor an output value, remove it.\n if (this._allData[i].from === -2 && this._allOutputIndices.indexOf(i + offset) === -1) {\n offset++;\n this._allData.splice(i, 1);\n i--;\n continue;\n }\n if (offset > 0) {\n let ind = -1;\n // if current value is neither an input value nor an initializer, find the node it's\n // coming from and update the corresponding node output\n if (this._allData[i].from !== undefined && this._allData[i].from !== -1) {\n ind = this._nodes[this._allData[i].from].outputs.indexOf(i + offset);\n if (ind !== -1) {\n this._nodes[this._allData[i].from].outputs[ind] = i;\n }\n } else {\n // if current value is an input value, update its reference in inputIndices\n ind = this._allInputIndices.indexOf(i + offset);\n if (ind !== -1) {\n this._allInputIndices[ind] = i;\n }\n }\n\n // find the node that the current value is linking to and update its input reference\n this._allData[i].to.forEach(node => {\n ind = this._nodes[node].inputs.indexOf(i + offset);\n if (ind !== -1) {\n this._nodes[node].inputs[ind] = i;\n }\n });\n if (this._allData[i].to.length === 0) {\n // if current value is a graph output, update its reference in outputIndices\n ind = this._allOutputIndices.indexOf(i + offset);\n if (ind !== -1) {\n this._allOutputIndices[ind] = i;\n }\n }\n }\n }\n }\n\n /**\n * Delete the specified node. Assume the node has one incoming input and the first output connected to other nodes.\n * An input validation must be done before calling this function.\n * @param nodeIndex The index of node to be deleted\n */\n private deleteNode(nodeIndex: number) {\n const node = this._nodes[nodeIndex];\n if (node.outputs.length > 1) {\n for (let i = 1; i < node.outputs.length; i++) {\n if (this._allData[node.outputs[i]].to.length > 0) {\n throw new Error('Node deletion with more than one output connected to other nodes is not supported. ');\n }\n }\n }\n\n // this node wil not be executed\n node.executeNode = false;\n const inputValueIndex = node.inputs[0];\n const outputValueIndex = node.outputs[0];\n const nodesConsumingOutput = this._allData[outputValueIndex].to;\n\n // remove this node from the to property of the input Value\n for (let i = 0; i < node.inputs.length; i++) {\n const delIndex = this._allData[node.inputs[i]].to.indexOf(nodeIndex);\n // should not happen\n if (delIndex === -1) {\n throw new Error('The Value object doesn\\'t have the current Node in it\\'s \\'to\\' property ');\n }\n this._allData[node.inputs[i]].to.splice(delIndex, 1);\n }\n\n // clear node indices consuming this output Value\n this._allData[outputValueIndex]._to = [];\n\n // if the output of this node is a graph output, adjust the index appropriately\n const index = this._allOutputIndices.indexOf(outputValueIndex);\n if (index !== -1) {\n this._allOutputIndices[index] = inputValueIndex;\n }\n\n // override the inputs for nodes consuming this node's output with the input to this node\n if (nodesConsumingOutput && nodesConsumingOutput.length > 0) {\n for (const nodeIndex of nodesConsumingOutput) {\n const replaceIndex = this._nodes[nodeIndex].inputs.indexOf(outputValueIndex);\n // should not happen\n if (replaceIndex === -1) {\n throw new Error('The Node object doesn\\'t have the output Value in it\\'s \\'inputs\\' property ');\n }\n this._nodes[nodeIndex].inputs[replaceIndex] = inputValueIndex;\n this._allData[inputValueIndex].to.push(nodeIndex);\n }\n }\n }\n\n removeAllDropoutNodes() {\n let nodeIndex = 0;\n for (const node of this._nodes) {\n // weed out 'Dropout' nodes so that no time is wasted in execution\n if (node.opType === 'Dropout') {\n // the node should have exactly 1 input and 1 or 2 outputs\n if (node.inputs.length !== 1) {\n throw new Error('Dropout nodes should only contain one input. ');\n }\n if (node.outputs.length !== 1 && node.outputs.length !== 2) {\n throw new Error('Dropout nodes should contain either 1 or 2 output(s)');\n }\n // the second output should not be referenced by any other node\n if (node.outputs.length === 2 && this._allData[node.outputs[1]]._to.length !== 0) {\n throw new Error('Dropout nodes\\'s second output should not be referenced by other nodes');\n }\n this.deleteNode(nodeIndex);\n }\n nodeIndex++;\n }\n }\n\n removeAllIdentityNodes() {\n let nodeIndex = 0;\n for (const node of this._nodes) {\n // weed out 'Identity' nodes so that no time is wasted in execution\n if (node.opType === 'Identity') {\n this.deleteNode(nodeIndex);\n }\n nodeIndex++;\n }\n }\n\n isActivation(n: Node): boolean {\n switch (n.opType) {\n // TODO: add other activation methods\n case 'Relu':\n case 'Sigmoid':\n case 'Clip':\n return true;\n default:\n return false;\n }\n }\n\n fuseConvActivationNodes() {\n for (const node of this._nodes) {\n if (node.opType === 'Conv') {\n const next = this._allData[node.outputs[0]]._to;\n if (next.length === 1 && this.isActivation(this._nodes[next[0]])) {\n const child = this._nodes[next[0]];\n if (child.opType === 'Clip') {\n if (child.inputs.length === 1) {\n try {\n node.attributes.set(\n 'activation_params', 'floats',\n [child.attributes.getFloat('min'), child.attributes.getFloat('max')]);\n } catch (e) {\n node.attributes.set('activation_params', 'floats', [MIN_CLIP, MAX_CLIP]);\n }\n } else if (\n child.inputs.length >= 3 && this._allData[child.inputs[1]].tensor !== undefined &&\n this._allData[child.inputs[2]].tensor !== undefined) {\n node.attributes.set('activation_params', 'floats', [\n this._allData[child.inputs[1]].tensor!.floatData[0], this._allData[child.inputs[2]].tensor!.floatData[0]\n ]);\n } else {\n // Skip fusion with clip node since clip min and clip max are not coming from initializer\n continue;\n }\n }\n node.attributes.set('activation', 'string', (child.opType));\n this.deleteNode(next[0]);\n }\n }\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {flatbuffers} from 'flatbuffers';\n\nimport {Graph} from './graph';\nimport {OpSet} from './opset';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {LongUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport class Model {\n // empty model\n constructor() {}\n\n load(buf: Uint8Array, graphInitializer?: Graph.Initializer, isOrtFormat?: boolean): void {\n let onnxError: Error|undefined;\n if (!isOrtFormat) {\n // isOrtFormat === false || isOrtFormat === undefined\n try {\n this.loadFromOnnxFormat(buf, graphInitializer);\n return;\n } catch (e) {\n if (isOrtFormat !== undefined) {\n throw e;\n }\n onnxError = e;\n }\n }\n\n try {\n this.loadFromOrtFormat(buf, graphInitializer);\n } catch (e) {\n if (isOrtFormat !== undefined) {\n throw e;\n }\n // Tried both formats and failed (when isOrtFormat === undefined)\n throw new Error(`Failed to load model as ONNX format: ${onnxError}\\nas ORT format: ${e}`);\n }\n }\n\n private loadFromOnnxFormat(buf: Uint8Array, graphInitializer?: Graph.Initializer): void {\n const modelProto = onnx.ModelProto.decode(buf);\n const irVersion = LongUtil.longToNumber(modelProto.irVersion);\n if (irVersion < 3) {\n throw new Error('only support ONNX model with IR_VERSION>=3');\n }\n\n this._opsets =\n modelProto.opsetImport.map(i => ({domain: i.domain as string, version: LongUtil.longToNumber(i.version!)}));\n\n this._graph = Graph.from(modelProto.graph!, graphInitializer);\n }\n\n private loadFromOrtFormat(buf: Uint8Array, graphInitializer?: Graph.Initializer): void {\n const fb = new flatbuffers.ByteBuffer(buf);\n const ortModel = ortFbs.InferenceSession.getRootAsInferenceSession(fb).model()!;\n const irVersion = LongUtil.longToNumber(ortModel.irVersion());\n if (irVersion < 3) {\n throw new Error('only support ONNX model with IR_VERSION>=3');\n }\n this._opsets = [];\n for (let i = 0; i < ortModel.opsetImportLength(); i++) {\n const opsetId = ortModel.opsetImport(i)!;\n this._opsets.push({domain: opsetId?.domain() as string, version: LongUtil.longToNumber(opsetId.version()!)});\n }\n\n this._graph = Graph.from(ortModel.graph()!, graphInitializer);\n }\n\n private _graph: Graph;\n get graph(): Graph {\n return this._graph;\n }\n\n private _opsets: OpSet[];\n get opsets(): readonly OpSet[] {\n return this._opsets;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackend, SessionHandlerType} from './backend';\nimport {ExecutionPlan} from './execution-plan';\nimport {Graph} from './graph';\nimport {Profiler} from './instrument';\nimport {Model} from './model';\nimport {Operator} from './operators';\nimport {Tensor} from './tensor';\n\nexport declare namespace Session {\n export interface Config {\n backendHint?: string;\n profiler?: Profiler.Config;\n }\n\n export interface Context {\n profiler: Readonly;\n graphInputTypes?: Tensor.DataType[];\n graphInputDims?: Array;\n }\n}\n\nexport class Session {\n constructor(config: Session.Config = {}) {\n this._initialized = false;\n this.backendHint = config.backendHint;\n this.profiler = Profiler.create(config.profiler);\n this.context = {profiler: this.profiler, graphInputTypes: [], graphInputDims: []};\n }\n\n get inputNames(): readonly string[] {\n return this._model.graph.getInputNames();\n }\n get outputNames(): readonly string[] {\n return this._model.graph.getOutputNames();\n }\n\n startProfiling() {\n this.profiler.start();\n }\n\n endProfiling() {\n this.profiler.stop();\n }\n\n async loadModel(uri: string): Promise;\n async loadModel(buffer: ArrayBuffer, byteOffset?: number, length?: number): Promise;\n async loadModel(buffer: Uint8Array): Promise;\n async loadModel(arg: string|ArrayBuffer|Uint8Array, byteOffset?: number, length?: number): Promise {\n await this.profiler.event('session', 'Session.loadModel', async () => {\n // resolve backend and session handler\n const backend = await resolveBackend(this.backendHint);\n this.sessionHandler = backend.createSessionHandler(this.context);\n\n this._model = new Model();\n if (typeof arg === 'string') {\n const isOrtFormat = arg.endsWith('.ort');\n if (typeof process !== 'undefined' && process.versions && process.versions.node) {\n // node\n const {readFile} = require('node:fs/promises');\n const buf = await readFile(arg);\n this.initialize(buf, isOrtFormat);\n } else {\n // browser\n const response = await fetch(arg);\n const buf = await response.arrayBuffer();\n this.initialize(new Uint8Array(buf), isOrtFormat);\n }\n } else if (!ArrayBuffer.isView(arg)) {\n // load model from ArrayBuffer\n const arr = new Uint8Array(arg, byteOffset || 0, length || arg.byteLength);\n this.initialize(arr);\n } else {\n // load model from Uint8array\n this.initialize(arg);\n }\n });\n }\n\n private initialize(modelProtoBlob: Uint8Array, isOrtFormat?: boolean): void {\n if (this._initialized) {\n throw new Error('already initialized');\n }\n\n this.profiler.event('session', 'Session.initialize', () => {\n // load graph\n const graphInitializer =\n this.sessionHandler.transformGraph ? this.sessionHandler as Graph.Initializer : undefined;\n this._model.load(modelProtoBlob, graphInitializer, isOrtFormat);\n\n // graph is completely initialzied at this stage , let the interested handlers know\n if (this.sessionHandler.onGraphInitialized) {\n this.sessionHandler.onGraphInitialized(this._model.graph);\n }\n // initialize each operator in the graph\n this.initializeOps(this._model.graph);\n\n // instantiate an ExecutionPlan object to be used by the Session object\n this._executionPlan = new ExecutionPlan(this._model.graph, this._ops, this.profiler);\n });\n\n this._initialized = true;\n }\n\n async run(inputs: Map|Tensor[]): Promise> {\n if (!this._initialized) {\n throw new Error('session not initialized yet');\n }\n\n return this.profiler.event('session', 'Session.run', async () => {\n const inputTensors = this.normalizeAndValidateInputs(inputs);\n\n const outputTensors = await this._executionPlan.execute(this.sessionHandler, inputTensors);\n\n return this.createOutput(outputTensors);\n });\n }\n\n private normalizeAndValidateInputs(inputs: Map|Tensor[]): Tensor[] {\n const modelInputNames = this._model.graph.getInputNames();\n\n // normalize inputs\n // inputs: Tensor[]\n if (Array.isArray(inputs)) {\n if (inputs.length !== modelInputNames.length) {\n throw new Error(`incorrect input array length: expected ${modelInputNames.length} but got ${inputs.length}`);\n }\n }\n // convert map to array\n // inputs: Map\n else {\n if (inputs.size !== modelInputNames.length) {\n throw new Error(`incorrect input map size: expected ${modelInputNames.length} but got ${inputs.size}`);\n }\n\n const sortedInputs = new Array(inputs.size);\n let sortedInputsIndex = 0;\n for (let i = 0; i < modelInputNames.length; ++i) {\n const tensor = inputs.get(modelInputNames[i]);\n if (!tensor) {\n throw new Error(`missing input tensor for: '${name}'`);\n }\n sortedInputs[sortedInputsIndex++] = tensor;\n }\n\n inputs = sortedInputs;\n }\n\n // validate dims requirements\n // First session run - graph input data is not cached for the session\n if (!this.context.graphInputTypes || this.context.graphInputTypes.length === 0 || !this.context.graphInputDims ||\n this.context.graphInputDims.length === 0) {\n const modelInputIndices = this._model.graph.getInputIndices();\n const modelValues = this._model.graph.getValues();\n\n const graphInputDims = new Array(modelInputIndices.length);\n\n for (let i = 0; i < modelInputIndices.length; ++i) {\n const graphInput = modelValues[modelInputIndices[i]];\n graphInputDims[i] = graphInput.type!.shape.dims;\n\n // cached for second and subsequent runs.\n // Some parts of the framework works on the assumption that the graph and types and shapes are static\n this.context.graphInputTypes!.push(graphInput.type!.tensorType);\n this.context.graphInputDims!.push(inputs[i].dims);\n }\n\n this.validateInputTensorDims(graphInputDims, inputs, true);\n }\n\n // Second and subsequent session runs - graph input data is cached for the session\n else {\n this.validateInputTensorDims(this.context.graphInputDims, inputs, false);\n }\n\n // validate types requirement\n this.validateInputTensorTypes(this.context.graphInputTypes!, inputs);\n\n return inputs;\n }\n\n private validateInputTensorTypes(graphInputTypes: Tensor.DataType[], givenInputs: Tensor[]) {\n for (let i = 0; i < givenInputs.length; i++) {\n const expectedType = graphInputTypes[i];\n const actualType = givenInputs[i].type;\n if (expectedType !== actualType) {\n throw new Error(`input tensor[${i}] check failed: expected type '${expectedType}' but got ${actualType}`);\n }\n }\n }\n\n private validateInputTensorDims(\n graphInputDims: Array, givenInputs: Tensor[], noneDimSupported: boolean) {\n for (let i = 0; i < givenInputs.length; i++) {\n const expectedDims = graphInputDims[i];\n const actualDims = givenInputs[i].dims;\n if (!this.compareTensorDims(expectedDims, actualDims, noneDimSupported)) {\n throw new Error(`input tensor[${i}] check failed: expected shape '[${expectedDims.join(',')}]' but got [${\n actualDims.join(',')}]`);\n }\n }\n }\n\n private compareTensorDims(expectedDims: readonly number[], actualDims: readonly number[], noneDimSupported: boolean):\n boolean {\n if (expectedDims.length !== actualDims.length) {\n return false;\n }\n\n for (let i = 0; i < expectedDims.length; ++i) {\n if (expectedDims[i] !== actualDims[i] && (!noneDimSupported || expectedDims[i] !== 0)) {\n // data shape mis-match AND not a 'None' dimension.\n return false;\n }\n }\n\n return true;\n }\n\n private createOutput(outputTensors: Tensor[]): Map {\n const modelOutputNames = this._model.graph.getOutputNames();\n if (outputTensors.length !== modelOutputNames.length) {\n throw new Error('expected number of outputs do not match number of generated outputs');\n }\n\n const output = new Map();\n for (let i = 0; i < modelOutputNames.length; ++i) {\n output.set(modelOutputNames[i], outputTensors[i]);\n }\n\n return output;\n }\n\n private initializeOps(graph: Graph): void {\n const nodes = graph.getNodes();\n this._ops = new Array(nodes.length);\n\n for (let i = 0; i < nodes.length; i++) {\n this._ops[i] = this.sessionHandler.resolve(nodes[i], this._model.opsets, graph);\n }\n }\n\n private _model: Model;\n private _initialized: boolean;\n\n private _ops: Operator[];\n private _executionPlan: ExecutionPlan;\n\n private backendHint?: string;\n\n private sessionHandler: SessionHandlerType;\n private context: Session.Context;\n private profiler: Readonly;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor} from 'onnxruntime-common';\n\nimport {Session} from './session';\nimport {Tensor as OnnxjsTensor} from './tensor';\n\nexport class OnnxjsSessionHandler implements InferenceSessionHandler {\n constructor(private session: Session) {\n this.inputNames = this.session.inputNames;\n this.outputNames = this.session.outputNames;\n }\n\n async dispose(): Promise {}\n inputNames: readonly string[];\n outputNames: readonly string[];\n async run(\n feeds: SessionHandler.FeedsType, _fetches: SessionHandler.FetchesType,\n _options: InferenceSession.RunOptions): Promise {\n const inputMap = new Map();\n for (const name in feeds) {\n if (Object.hasOwnProperty.call(feeds, name)) {\n const feed = feeds[name];\n inputMap.set(\n name,\n new OnnxjsTensor(\n feed.dims, feed.type as OnnxjsTensor.DataType, undefined, undefined,\n feed.data as OnnxjsTensor.NumberType));\n }\n }\n const outputMap = await this.session.run(inputMap);\n const output: SessionHandler.ReturnType = {};\n outputMap.forEach((tensor, name) => {\n output[name] = new Tensor(tensor.type, tensor.data, tensor.dims);\n });\n return output;\n }\n startProfiling(): void {\n this.session.startProfiling();\n }\n endProfiling(): void {\n this.session.endProfiling();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable import/no-internal-modules */\nimport {Backend, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {Session} from './onnxjs/session';\nimport {OnnxjsSessionHandler} from './onnxjs/session-handler-inference';\n\nclass OnnxjsBackend implements Backend {\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n async init(): Promise {}\n\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n // NOTE: Session.Config(from onnx.js) is not compatible with InferenceSession.SessionOptions(from\n // onnxruntime-common).\n // In future we should remove Session.Config and use InferenceSession.SessionOptions.\n // Currently we allow this to happen to make test runner work.\n const session = new Session(options as unknown as Session.Config);\n\n // typescript cannot merge method override correctly (so far in 4.2.3). need if-else to call the method.\n if (typeof pathOrBuffer === 'string') {\n await session.loadModel(pathOrBuffer);\n } else {\n await session.loadModel(pathOrBuffer);\n }\n\n return new OnnxjsSessionHandler(session);\n }\n}\n\nexport const onnxjsBackend = new OnnxjsBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nexport const isNode = !!(typeof process !== 'undefined' && process.versions && process.versions.node);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/// \n\n//\n// * type hack for \"HTMLImageElement\"\n//\n// in typescript, the type of \"HTMLImageElement\" is defined in lib.dom.d.ts, which is conflict with lib.webworker.d.ts.\n// when we use webworker, the lib.webworker.d.ts will be used, which does not have HTMLImageElement defined.\n//\n// we will get the following errors complaining that HTMLImageElement is not defined:\n//\n// ====================================================================================================================\n//\n// ../common/dist/cjs/tensor-factory.d.ts:187:29 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 187 fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n// Promise | TypedTensor<'uint8'>>;\n// ~~~~~~~~~~~~~~~~\n//\n// node_modules/@webgpu/types/dist/index.d.ts:83:7 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 83 | HTMLImageElement\n// ~~~~~~~~~~~~~~~~\n//\n// ====================================================================================================================\n//\n// `HTMLImageElement` is only used in type declaration and not in real code. So we define it as `unknown` here to\n// bypass the type check.\n\n//\n// * type hack for \"document\"\n//\n// in typescript, the type of \"document\" is defined in lib.dom.d.ts, so it's not available in webworker.\n//\n// we will get the following errors complaining that document is not defined:\n//\n// ====================================================================================================================\n//\n// lib/wasm/wasm-utils-import.ts:7:33 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:61 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:88 - error TS2552: Cannot find name 'HTMLScriptElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~~~~~~~~~~\n// ====================================================================================================================\n//\n// `document` is used to get the current script URL, which is not available in webworker. This file is served as a\n// \"dual\" file for entries of both webworker and the esm module.\n//\ndeclare global {\n type HTMLImageElement = unknown;\n type HTMLScriptElement = {src?: string};\n const document: undefined|{currentScript?: HTMLScriptElement};\n}\n\n/**\n * @summary\n *\n * This file is served as a \"dual\" file for both entries of the following:\n * - The proxy worker itself.\n * - When used as a worker, it listens to the messages from the main thread and performs the corresponding operations.\n * - Should be imported directly using `new Worker()` in the main thread.\n *\n * - The ESM module that creates the proxy worker (as a worker launcher).\n * - When used as a worker launcher, it creates the proxy worker and returns it.\n * - Should be imported using `import()` in the main thread, with the query parameter `import=1`.\n *\n * This file will be always compiling into ESM format.\n */\n\nimport type {OrtWasmMessage, SerializableTensorMetadata} from '../proxy-messages.js';\nimport {createSession, copyFromExternalBuffer, endProfiling, extractTransferableBuffers, initEp, initRuntime, releaseSession, run} from '../wasm-core-impl.js';\nimport {initializeWebAssembly} from '../wasm-factory.js';\nimport {scriptSrc} from '../wasm-utils-import.js';\n\nconst WORKER_NAME = 'ort-wasm-proxy-worker';\nconst isProxyWorker = globalThis.self?.name === WORKER_NAME;\n\nif (isProxyWorker) {\n // Worker thread\n self.onmessage = (ev: MessageEvent): void => {\n const {type, in : message} = ev.data;\n try {\n switch (type) {\n case 'init-wasm':\n initializeWebAssembly(message!.wasm)\n .then(\n () => {\n initRuntime(message!).then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n },\n err => {\n postMessage({type, err});\n });\n break;\n case 'init-ep': {\n const {epName, env} = message!;\n initEp(env, epName)\n .then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'copy-from': {\n const {buffer} = message!;\n const bufferData = copyFromExternalBuffer(buffer);\n postMessage({type, out: bufferData} as OrtWasmMessage);\n break;\n }\n case 'create': {\n const {model, options} = message!;\n createSession(model, options)\n .then(\n sessionMetadata => {\n postMessage({type, out: sessionMetadata} as OrtWasmMessage);\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'release':\n releaseSession(message!);\n postMessage({type});\n break;\n case 'run': {\n const {sessionId, inputIndices, inputs, outputIndices, options} = message!;\n run(sessionId, inputIndices, inputs, outputIndices, new Array(outputIndices.length).fill(null), options)\n .then(\n outputs => {\n if (outputs.some(o => o[3] !== 'cpu')) {\n postMessage({type, err: 'Proxy does not support non-cpu tensor location.'});\n } else {\n postMessage(\n {type, out: outputs} as OrtWasmMessage,\n extractTransferableBuffers([...inputs, ...outputs] as SerializableTensorMetadata[]));\n }\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'end-profiling':\n endProfiling(message!);\n postMessage({type});\n break;\n default:\n }\n } catch (err) {\n postMessage({type, err} as OrtWasmMessage);\n }\n };\n}\n\nexport default isProxyWorker ?\n null :\n (urlOverride?: string) =>\n new Worker(urlOverride ?? scriptSrc!, {type: BUILD_DEFS.IS_ESM ? 'module' : 'classic', name: WORKER_NAME});\n", "var r,e=(r=import.meta.url,async function(e={}){function t(){return k.buffer!=R.buffer&&Y(),R}function n(){return k.buffer!=R.buffer&&Y(),P}function a(){return k.buffer!=R.buffer&&Y(),D}function o(){return k.buffer!=R.buffer&&Y(),F}function i(){return k.buffer!=R.buffer&&Y(),B}function s(){return k.buffer!=R.buffer&&Y(),I}function u(){return k.buffer!=R.buffer&&Y(),U}function f(){return k.buffer!=R.buffer&&Y(),G}var l,c,d=Object.assign({},e),b=new Promise(((r,e)=>{l=r,c=e})),g=\"object\"==typeof window,m=\"function\"==typeof importScripts,p=m&&\"em-pthread\"==self.name;d.mountExternalData=(r,e)=>{(d.Cb||(d.Cb=new Map)).set(r,e)},d.unmountExternalData=()=>{delete d.Cb};var h=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let v=()=>{const r=(r,e,t)=>(...n)=>{const a=Ye,o=e?.();n=r(...n);const i=e?.();return o!==i&&(r=i,t(o),e=t=null),Ye!=a?new Promise(((r,e)=>{Qe={resolve:r,reject:e}})):n},e=r=>async(...e)=>{try{if(d.Bb)throw Error(\"Session already started\");const t=d.Bb={Zb:e[0],errors:[]},n=await r(...e);if(d.Bb!==t)throw Error(\"Session mismatch\");d.Jb?.flush();const a=t.errors;if(0r)),0d._OrtCreateSession),(r=>d._OrtCreateSession=r)),d._OrtRun=e(r(d._OrtRun,(()=>d._OrtRun),(r=>d._OrtRun=r))),d._OrtRunWithBinding=e(r(d._OrtRunWithBinding,(()=>d._OrtRunWithBinding),(r=>d._OrtRunWithBinding=r))),d._OrtBindInput=r(d._OrtBindInput,(()=>d._OrtBindInput),(r=>d._OrtBindInput=r)),v=void 0};d.jsepInit=(r,e)=>{if(v?.(),\"webgpu\"===r){[d.Jb,d.Qb,d.Ub,d.Kb,d.Tb,d.gb,d.Vb,d.Xb,d.Rb,d.Sb,d.Wb]=e;const r=d.Jb;d.jsepRegisterBuffer=(e,t,n,a)=>r.registerBuffer(e,t,n,a),d.jsepGetBuffer=e=>r.getBuffer(e),d.jsepCreateDownloader=(e,t,n)=>r.createDownloader(e,t,n),d.jsepOnReleaseSession=e=>{r.onReleaseSession(e)},d.jsepOnRunStart=e=>r.onRunStart(e)}};var y,w,A=Object.assign({},d),_=\"./this.program\",C=(r,e)=>{throw e},O=\"\";(g||m)&&(m?O=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(O=document.currentScript.src),r&&(O=r),O=O.startsWith(\"blob:\")?\"\":O.substr(0,O.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),r=>{var e=new XMLHttpRequest;return e.open(\"GET\",r,!1),e.send(null),e.responseText},m&&(w=r=>{var e=new XMLHttpRequest;return e.open(\"GET\",r,!1),e.responseType=\"arraybuffer\",e.send(null),new Uint8Array(e.response)}),y=(r,e,t)=>{var n=new XMLHttpRequest;n.open(\"GET\",r,!0),n.responseType=\"arraybuffer\",n.onload=()=>{200==n.status||0==n.status&&n.response?e(n.response):t()},n.onerror=t,n.send(null)});var T=console.log.bind(console),S=console.error.bind(console),W=T,E=S;if(Object.assign(d,A),A=null,p){var x,M=!1;function Dn(r){try{var e=r.data,t=e.cmd;if(\"load\"===t){let r=[];self.onmessage=e=>r.push(e),self.startWorker=()=>{postMessage({cmd:\"loaded\"});for(let e of r)Dn(e);self.onmessage=Dn};for(const r of e.handlers)d[r]&&!d[r].proxy||(d[r]=(...e)=>{postMessage({Ib:\"callHandler\",hc:r,args:e})},\"print\"==r&&(W=d[r]),\"printErr\"==r&&(E=d[r]));k=e.wasmMemory,Y(),x(e.wasmModule)}else if(\"run\"===t){wn(e.pthread_ptr,0,0,1,0,0),He(e.pthread_ptr),Tr(),Ar(),M||(mn(),M=!0);try{Sr(e.start_routine,e.arg)}catch(r){if(\"unwind\"!=r)throw r}}else\"cancel\"===t?hn()&&On(-1):\"setimmediate\"!==e.target&&(\"checkMailbox\"===t?M&&Re():t&&(E(`worker: received unknown command ${t}`),E(e)))}catch(r){throw An(),r}}E=function(...r){r=r.join(\" \"),console.error(r)},self.alert=function(...r){postMessage({Ib:\"alert\",text:r.join(\" \"),jc:hn()})},d.instantiateWasm=(r,e)=>new Promise((r=>{x=t=>{t=new WebAssembly.Instance(t,or()),e(t),r()}})),self.onunhandledrejection=r=>{throw r.reason||r},self.onmessage=Dn}var k,N,H,R,P,D,F,B,I,U,j,$,G,z=!1;function Y(){var r=k.buffer;d.HEAP8=R=new Int8Array(r),d.HEAP16=D=new Int16Array(r),d.HEAPU8=P=new Uint8Array(r),d.HEAPU16=F=new Uint16Array(r),d.HEAP32=B=new Int32Array(r),d.HEAPU32=I=new Uint32Array(r),d.HEAPF32=U=new Float32Array(r),d.HEAPF64=G=new Float64Array(r),d.HEAP64=j=new BigInt64Array(r),d.HEAPU64=$=new BigUint64Array(r)}if(!p){if(d.wasmMemory)k=d.wasmMemory;else if(!((k=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof h))throw E(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),Error(\"bad memory\");Y()}var L=[],V=[],q=[],J=0,X=null,Q=null;function Z(){if(0==--J&&(null!==X&&(clearInterval(X),X=null),Q)){var r=Q;Q=null,r()}}function K(r){throw E(r=\"Aborted(\"+r+\")\"),z=!0,H=1,r=new WebAssembly.RuntimeError(r+\". Build with -sASSERTIONS for more info.\"),c(r),r}var rr,er=r=>r.startsWith(\"data:application/octet-stream;base64,\"),tr=r=>r.startsWith(\"file://\");function nr(r){if(w)return w(r);throw\"both async and sync fetching of the wasm failed\"}function ar(r,e,t){return function(r){if(g||m){if(\"function\"==typeof fetch&&!tr(r))return fetch(r,{credentials:\"same-origin\"}).then((e=>{if(!e.ok)throw`failed to load wasm binary file at '${r}'`;return e.arrayBuffer()})).catch((()=>nr(r)));if(y)return new Promise(((e,t)=>{y(r,(r=>e(new Uint8Array(r))),t)}))}return Promise.resolve().then((()=>nr(r)))}(r).then((r=>WebAssembly.instantiate(r,e))).then(t,(r=>{E(`failed to asynchronously prepare wasm: ${r}`),K(r)}))}function or(){return{a:{wa:sr,b:Er,Y:Mr,y:Rr,ma:Pr,U:Ir,W:Ur,na:jr,ka:$r,da:Gr,ja:zr,I:Yr,V:Lr,S:Vr,la:qr,T:Jr,sa:Zr,C:oe,M:se,L:me,B:he,s:ve,p:ye,D:we,x:Ee,N:xe,ra:Me,ga:ke,Q:Pe,Z:Fe,E:Be,fa:He,pa:Ie,u:$e,A:rt,o:tt,k:ot,c:le,n:st,j:ct,xa:dt,r:bt,d:gt,v:mt,m:pt,g:ht,l:vt,i:yt,h:wt,e:At,aa:_t,ba:St,ca:Wt,_:Et,$:xt,P:Mt,f:Ht,K:Rt,F:Pt,J:Dt,ta:Ft,oa:It,R:Ut,t:Bt,w:jt,O:$t,va:Lt,ua:Vt,ha:Qt,ia:Zt,X:mr,z:Kt,H:rn,ea:en,G:nn,a:k,qa:un,q:fn}}}var ir={1336340:(r,e,t,a)=>{if(void 0===d||!d.Cb)return 1;if((r=Hr(r>>>0)).startsWith(\"./\")&&(r=r.substring(2)),!(r=d.Cb.get(r)))return 2;if(a>>>=0,(e>>>=0)+(t>>>=0)>r.byteLength)return 3;try{return n().set(r.subarray(e,e+t),a>>>0),0}catch{return 4}},1336841:()=>{d.Rb()},1336872:()=>{d.Sb()},1336901:()=>{d.Wb()},1336926:r=>d.Qb(r),1336959:r=>d.Ub(r),1336991:(r,e,t)=>{d.Kb(r,e,t,!0)},1337030:(r,e,t)=>{d.Kb(r,e,t)},1337063:()=>\"undefined\"!=typeof wasmOffsetConverter,1337120:r=>{d.gb(\"Abs\",r,void 0)},1337171:r=>{d.gb(\"Neg\",r,void 0)},1337222:r=>{d.gb(\"Floor\",r,void 0)},1337275:r=>{d.gb(\"Ceil\",r,void 0)},1337327:r=>{d.gb(\"Reciprocal\",r,void 0)},1337385:r=>{d.gb(\"Sqrt\",r,void 0)},1337437:r=>{d.gb(\"Exp\",r,void 0)},1337488:r=>{d.gb(\"Erf\",r,void 0)},1337539:r=>{d.gb(\"Sigmoid\",r,void 0)},1337594:(r,e,t)=>{d.gb(\"HardSigmoid\",r,{alpha:e,beta:t})},1337673:r=>{d.gb(\"Log\",r,void 0)},1337724:r=>{d.gb(\"Sin\",r,void 0)},1337775:r=>{d.gb(\"Cos\",r,void 0)},1337826:r=>{d.gb(\"Tan\",r,void 0)},1337877:r=>{d.gb(\"Asin\",r,void 0)},1337929:r=>{d.gb(\"Acos\",r,void 0)},1337981:r=>{d.gb(\"Atan\",r,void 0)},1338033:r=>{d.gb(\"Sinh\",r,void 0)},1338085:r=>{d.gb(\"Cosh\",r,void 0)},1338137:r=>{d.gb(\"Asinh\",r,void 0)},1338190:r=>{d.gb(\"Acosh\",r,void 0)},1338243:r=>{d.gb(\"Atanh\",r,void 0)},1338296:r=>{d.gb(\"Tanh\",r,void 0)},1338348:r=>{d.gb(\"Not\",r,void 0)},1338399:(r,e,t)=>{d.gb(\"Clip\",r,{min:e,max:t})},1338468:r=>{d.gb(\"Clip\",r,void 0)},1338520:(r,e)=>{d.gb(\"Elu\",r,{alpha:e})},1338578:r=>{d.gb(\"Relu\",r,void 0)},1338630:(r,e)=>{d.gb(\"LeakyRelu\",r,{alpha:e})},1338694:(r,e)=>{d.gb(\"ThresholdedRelu\",r,{alpha:e})},1338764:(r,e)=>{d.gb(\"Cast\",r,{to:e})},1338822:r=>{d.gb(\"Add\",r,void 0)},1338873:r=>{d.gb(\"Sub\",r,void 0)},1338924:r=>{d.gb(\"Mul\",r,void 0)},1338975:r=>{d.gb(\"Div\",r,void 0)},1339026:r=>{d.gb(\"Pow\",r,void 0)},1339077:r=>{d.gb(\"Equal\",r,void 0)},1339130:r=>{d.gb(\"Greater\",r,void 0)},1339185:r=>{d.gb(\"GreaterOrEqual\",r,void 0)},1339247:r=>{d.gb(\"Less\",r,void 0)},1339299:r=>{d.gb(\"LessOrEqual\",r,void 0)},1339358:(r,e,t,n,a)=>{d.gb(\"ReduceMean\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339517:(r,e,t,n,a)=>{d.gb(\"ReduceMax\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339675:(r,e,t,n,a)=>{d.gb(\"ReduceMin\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339833:(r,e,t,n,a)=>{d.gb(\"ReduceProd\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339992:(r,e,t,n,a)=>{d.gb(\"ReduceSum\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340150:(r,e,t,n,a)=>{d.gb(\"ReduceL1\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340307:(r,e,t,n,a)=>{d.gb(\"ReduceL2\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340464:(r,e,t,n,a)=>{d.gb(\"ReduceLogSum\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340625:(r,e,t,n,a)=>{d.gb(\"ReduceSumSquare\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340789:(r,e,t,n,a)=>{d.gb(\"ReduceLogSumExp\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340953:r=>{d.gb(\"Where\",r,void 0)},1341006:(r,e,t)=>{d.gb(\"Transpose\",r,{perm:e?Array.from(i().subarray(e>>>0,t>>>0)):[]})},1341114:(r,e,t,n)=>{d.gb(\"DepthToSpace\",r,{blocksize:e,mode:Hr(t),format:n?\"NHWC\":\"NCHW\"})},1341247:(r,e,t,n)=>{d.gb(\"DepthToSpace\",r,{blocksize:e,mode:Hr(t),format:n?\"NHWC\":\"NCHW\"})},1341380:(r,e,n,a,o,s,u,f,l,c,b,g,m,p,h)=>{d.gb(\"ConvTranspose\",r,{format:l?\"NHWC\":\"NCHW\",autoPad:e,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,g>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Hr(h)})},1341781:(r,e,n,a,o,s,u,f,l,c,b,g,m,p)=>{d.gb(\"ConvTranspose\",r,{format:f?\"NHWC\":\"NCHW\",autoPad:e,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:g?Array.from(i().subarray(g>>>0,m>>>0)):[],activation:Hr(p)})},1342346:(r,e,n,a,o,s,u,f,l,c,b,g,m,p,h)=>{d.gb(\"ConvTranspose\",r,{format:l?\"NHWC\":\"NCHW\",autoPad:e,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,g>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Hr(h)})},1342747:(r,e,n,a,o,s,u,f,l,c,b,g,m,p)=>{d.gb(\"ConvTranspose\",r,{format:f?\"NHWC\":\"NCHW\",autoPad:e,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:g?Array.from(i().subarray(g>>>0,m>>>0)):[],activation:Hr(p)})},1343312:(r,e)=>{d.gb(\"GlobalAveragePool\",r,{format:e?\"NHWC\":\"NCHW\"})},1343403:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"AveragePool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1343687:(r,e)=>{d.gb(\"GlobalAveragePool\",r,{format:e?\"NHWC\":\"NCHW\"})},1343778:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"AveragePool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344062:(r,e)=>{d.gb(\"GlobalMaxPool\",r,{format:e?\"NHWC\":\"NCHW\"})},1344149:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"MaxPool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344429:(r,e)=>{d.gb(\"GlobalMaxPool\",r,{format:e?\"NHWC\":\"NCHW\"})},1344516:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"MaxPool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344796:(r,e,t,n,a)=>{d.gb(\"Gemm\",r,{alpha:e,beta:t,transA:n,transB:a})},1344900:r=>{d.gb(\"MatMul\",r,void 0)},1344954:(r,e,t,n)=>{d.gb(\"ArgMax\",r,{keepDims:!!e,selectLastIndex:!!t,axis:n})},1345062:(r,e,t,n)=>{d.gb(\"ArgMin\",r,{keepDims:!!e,selectLastIndex:!!t,axis:n})},1345170:(r,e)=>{d.gb(\"Softmax\",r,{axis:e})},1345233:(r,e)=>{d.gb(\"Concat\",r,{axis:e})},1345293:(r,e,t,n,a)=>{d.gb(\"Split\",r,{axis:e,numOutputs:t,splitSizes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1345433:r=>{d.gb(\"Expand\",r,void 0)},1345487:(r,e)=>{d.gb(\"Gather\",r,{axis:Number(e)})},1345558:(r,e)=>{d.gb(\"GatherElements\",r,{axis:Number(e)})},1345637:(r,e,t,n,a,o,s,u,f,l,c)=>{d.gb(\"Resize\",r,{antialias:e,axes:t?Array.from(i().subarray(t>>>0,n>>>0)):[],coordinateTransformMode:Hr(a),cubicCoeffA:o,excludeOutside:s,extrapolationValue:u,keepAspectRatioPolicy:Hr(f),mode:Hr(l),nearestMode:Hr(c)})},1345983:(r,e,t,n,a,o,s)=>{d.gb(\"Slice\",r,{starts:e?Array.from(i().subarray(e>>>0,t>>>0)):[],ends:n?Array.from(i().subarray(n>>>0,a>>>0)):[],axes:o?Array.from(i().subarray(o>>>0,s>>>0)):[]})},1346199:r=>{d.gb(\"Tile\",r,void 0)},1346251:(r,e,t)=>{d.gb(\"InstanceNormalization\",r,{epsilon:e,format:t?\"NHWC\":\"NCHW\"})},1346365:(r,e,t)=>{d.gb(\"InstanceNormalization\",r,{epsilon:e,format:t?\"NHWC\":\"NCHW\"})},1346479:r=>{d.gb(\"Range\",r,void 0)},1346532:(r,e)=>{d.gb(\"Einsum\",r,{equation:Hr(e)})},1346613:(r,e,t,n,a)=>{d.gb(\"Pad\",r,{mode:e,value:t,pads:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1346740:(r,e,t,n,a,o)=>{d.gb(\"BatchNormalization\",r,{epsilon:e,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1346909:(r,e,t,n,a,o)=>{d.gb(\"BatchNormalization\",r,{epsilon:e,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1347078:(r,e,t)=>{d.gb(\"CumSum\",r,{exclusive:Number(e),reverse:Number(t)})},1347175:(r,e,t,n,a,o,s,u,f)=>{d.gb(\"Attention\",r,{numHeads:e,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o,qkvHiddenSizes:s?Array.from(i().subarray(Number(u)>>>0,Number(u)+s>>>0)):[],pastPresentShareBuffer:!!f})},1347447:r=>{d.gb(\"BiasAdd\",r,void 0)},1347502:r=>{d.gb(\"BiasSplitGelu\",r,void 0)},1347563:r=>{d.gb(\"FastGelu\",r,void 0)},1347619:(r,e,n,a,o,s,f,l,c,b,g,m,p,h,v,y)=>{d.gb(\"Conv\",r,{format:m?\"NHWC\":\"NCHW\",auto_pad:e,dilations:n?Array.from(i().subarray(n>>>0,a>>>0)):[],group:o,kernel_shape:s?Array.from(i().subarray(s>>>0,f>>>0)):[],pads:l?Array.from(i().subarray(l>>>0,c>>>0)):[],strides:b?Array.from(i().subarray(b>>>0,g>>>0)):[],w_is_const:()=>!!t()[p>>>0],activation:Hr(h),activation_params:v?Array.from(u().subarray(v>>>0,y>>>0)):[]})},1348115:r=>{d.gb(\"Gelu\",r,void 0)},1348167:(r,e,t,n)=>{d.gb(\"GroupQueryAttention\",r,{numHeads:e,kvNumHeads:t,scale:n})},1348280:(r,e,t,n)=>{d.gb(\"LayerNormalization\",r,{axis:e,epsilon:t,simplified:!!n})},1348391:(r,e,t,n)=>{d.gb(\"LayerNormalization\",r,{axis:e,epsilon:t,simplified:!!n})},1348502:(r,e,t,n,a,o)=>{d.gb(\"MatMulNBits\",r,{k:e,n:t,accuracyLevel:n,bits:a,blockSize:o})},1348629:(r,e,t,n,a,o)=>{d.gb(\"MultiHeadAttention\",r,{numHeads:e,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o})},1348788:(r,e)=>{d.gb(\"QuickGelu\",r,{alpha:e})},1348852:(r,e,t,n,a)=>{d.gb(\"RotaryEmbedding\",r,{interleaved:!!e,numHeads:t,rotaryEmbeddingDim:n,scale:a})},1348991:(r,e,t)=>{d.gb(\"SkipLayerNormalization\",r,{epsilon:e,simplified:!!t})},1349093:r=>{d.Vb(r)},1349127:(r,e)=>d.Xb(r,e,d.Bb.Zb,d.Bb.errors),1349239:(r,e,t)=>{d.gb(\"SkipLayerNormalization\",r,{epsilon:e,simplified:!!t})}};function sr(r,e,t){return Ke((async()=>{await d.Tb(r,e,t)}))}function ur(r){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${r})`,this.status=r}var fr=r=>{r.terminate(),r.onmessage=()=>{}},lr=r=>{0==pr.length&&(Cr(),_r(pr[0]));var e=pr.pop();if(!e)return 6;hr.push(e),yr[r.xb]=e,e.xb=r.xb;var t={cmd:\"run\",start_routine:r.$b,arg:r.Mb,pthread_ptr:r.xb};return e.postMessage(t,r.ec),0},cr=0,dr=(r,e,...t)=>{for(var n=2*t.length,a=xn(),o=En(8*n),i=o>>>3,s=0;s>>0]=u)}return r=_n(r,0,n,o,e),Wn(a),r};function br(r){if(p)return dr(0,1,r);if(H=r,!(0{if(H=r,p)throw gr(r),\"unwind\";br(r)},pr=[],hr=[],vr=[],yr={},wr=r=>{var e=r.xb;delete yr[e],pr.push(r),hr.splice(hr.indexOf(r),1),r.xb=0,Cn(e)};function Ar(){vr.forEach((r=>r()))}var _r=r=>new Promise((e=>{r.onmessage=t=>{var n=(t=t.data).cmd;if(t.targetThread&&t.targetThread!=hn()){var a=yr[t.targetThread];a?a.postMessage(t,t.transferList):E(`Internal error! Worker sent a message \"${n}\" to target pthread ${t.targetThread}, but that thread no longer exists!`)}else\"checkMailbox\"===n?Re():\"spawnThread\"===n?lr(t):\"cleanupThread\"===n?wr(yr[t.thread]):\"killThread\"===n?(t=t.thread,n=yr[t],delete yr[t],fr(n),Cn(t),hr.splice(hr.indexOf(n),1),n.xb=0):\"cancelThread\"===n?yr[t.thread].postMessage({cmd:\"cancel\"}):\"loaded\"===n?(r.loaded=!0,e(r)):\"alert\"===n?alert(`Thread ${t.threadId}: ${t.text}`):\"setimmediate\"===t.target?r.postMessage(t):\"callHandler\"===n?d[t.handler](...t.args):n&&E(`worker sent an unknown command ${n}`)},r.onerror=r=>{throw E(`worker sent an error! ${r.filename}:${r.lineno}: ${r.message}`),r};var t,n=[];for(t of[\"onExit\"])d.hasOwnProperty(t)&&n.push(t);r.postMessage({cmd:\"load\",handlers:n,wasmMemory:k,wasmModule:N})}));function Cr(){var r=new Worker(new URL(import.meta.url),{type:\"module\",workerData:\"em-pthread\",name:\"em-pthread\"});pr.push(r)}var Or=r=>{for(;0{var r=hn(),e=s()[r+52>>>2>>>0];r=s()[r+56>>>2>>>0],Sn(e,e-r),Wn(e)},Sr=(r,e)=>{cr=0,r=Mn(r,e),0>>=0);throw e>>>=0,t>>>=0,s()[n.Fb+16>>>2>>>0]=0,s()[n.Fb+4>>>2>>>0]=e,s()[n.Fb+8>>>2>>>0]=t,r}function xr(r,e,t,n){return p?dr(2,1,r,e,t,n):Mr(r,e,t,n)}function Mr(r,e,t,n){if(r>>>=0,e>>>=0,t>>>=0,n>>>=0,void 0===h)return E(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var a=[];return p&&0===a.length?xr(r,e,t,n):(r={$b:t,xb:r,Mb:n,ec:a},p?(r.Ib=\"spawnThread\",postMessage(r,a),0):lr(r))}var kr=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,Nr=(r,e,t)=>{var n=(e>>>=0)+t;for(t=e;r[t]&&!(t>=n);)++t;if(16(a=224==(240&a)?(15&a)<<12|o<<6|i:(7&a)<<18|o<<12|i<<6|63&r[e++])?n+=String.fromCharCode(a):(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a))}}else n+=String.fromCharCode(a)}return n},Hr=(r,e)=>(r>>>=0)?Nr(n(),r,e):\"\";function Rr(r,e,t){return p?dr(3,1,r,e,t):0}function Pr(r,e){if(p)return dr(4,1,r,e)}var Dr=r=>{for(var e=0,t=0;t=n?e++:2047>=n?e+=2:55296<=n&&57343>=n?(e+=4,++t):e+=3}return e},Fr=(r,e,t,n)=>{if(!(0>>=0;n=t+n-1;for(var o=0;o=i&&(i=65536+((1023&i)<<10)|1023&r.charCodeAt(++o)),127>=i){if(t>=n)break;e[t++>>>0]=i}else{if(2047>=i){if(t+1>=n)break;e[t++>>>0]=192|i>>6}else{if(65535>=i){if(t+2>=n)break;e[t++>>>0]=224|i>>12}else{if(t+3>=n)break;e[t++>>>0]=240|i>>18,e[t++>>>0]=128|i>>12&63}e[t++>>>0]=128|i>>6&63}e[t++>>>0]=128|63&i}}return e[t>>>0]=0,t-a},Br=(r,e,t)=>Fr(r,n(),e,t);function Ir(r,e){if(p)return dr(5,1,r,e)}function Ur(r,e,t){if(p)return dr(6,1,r,e,t)}function jr(r,e,t){return p?dr(7,1,r,e,t):0}function $r(r,e){if(p)return dr(8,1,r,e)}function Gr(r,e,t){if(p)return dr(9,1,r,e,t)}function zr(r,e,t,n){if(p)return dr(10,1,r,e,t,n)}function Yr(r,e,t,n){if(p)return dr(11,1,r,e,t,n)}function Lr(r,e,t,n){if(p)return dr(12,1,r,e,t,n)}function Vr(r){if(p)return dr(13,1,r)}function qr(r,e){if(p)return dr(14,1,r,e)}function Jr(r,e,t){if(p)return dr(15,1,r,e,t)}var Xr,Qr,Zr=()=>{K(\"\")},Kr=r=>{for(var e=\"\";n()[r>>>0];)e+=Xr[n()[r++>>>0]];return e},re={},ee={},te={};function ne(r,e,t={}){if(!(\"argPackAdvance\"in e))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return function(r,e,t={}){var n=e.name;if(!r)throw new Qr(`type \"${n}\" must have a positive integer typeid pointer`);if(ee.hasOwnProperty(r)){if(t.Ob)return;throw new Qr(`Cannot register type '${n}' twice`)}ee[r]=e,delete te[r],re.hasOwnProperty(r)&&(e=re[r],delete re[r],e.forEach((r=>r())))}(r,e,t)}var ae=(r,e,u)=>{switch(e){case 1:return u?r=>t()[r>>>0]:r=>n()[r>>>0];case 2:return u?r=>a()[r>>>1>>>0]:r=>o()[r>>>1>>>0];case 4:return u?r=>i()[r>>>2>>>0]:r=>s()[r>>>2>>>0];case 8:return u?r=>j[r>>>3]:r=>$[r>>>3];default:throw new TypeError(`invalid integer width (${e}): ${r}`)}};function oe(r,e,t){t>>>=0,ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:r=>r,toWireType:function(r,e){if(\"bigint\"!=typeof e&&\"number\"!=typeof e)throw e=null===e?\"null\":\"object\"==(r=typeof e)||\"array\"===r||\"function\"===r?e.toString():\"\"+e,new TypeError(`Cannot convert \"${e}\" to ${this.name}`);return\"number\"==typeof e&&(e=BigInt(e)),e},argPackAdvance:ie,readValueFromPointer:ae(e,t,-1==e.indexOf(\"u\")),Ab:null})}var ie=8;function se(r,e,t,a){ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:function(r){return!!r},toWireType:function(r,e){return e?t:a},argPackAdvance:ie,readValueFromPointer:function(r){return this.fromWireType(n()[r>>>0])},Ab:null})}var ue=[],fe=[];function le(r){9<(r>>>=0)&&0==--fe[r+1]&&(fe[r]=void 0,ue.push(r))}var ce=r=>{if(!r)throw new Qr(\"Cannot use deleted val. handle = \"+r);return fe[r]},de=r=>{switch(r){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:const e=ue.pop()||fe.length;return fe[e]=r,fe[e+1]=1,e}};function be(r){return this.fromWireType(s()[r>>>2>>>0])}var ge={name:\"emscripten::val\",fromWireType:r=>{var e=ce(r);return le(r),e},toWireType:(r,e)=>de(e),argPackAdvance:ie,readValueFromPointer:be,Ab:null};function me(r){return ne(r>>>0,ge)}var pe=(r,e)=>{switch(e){case 4:return function(r){return this.fromWireType(u()[r>>>2>>>0])};case 8:return function(r){return this.fromWireType(f()[r>>>3>>>0])};default:throw new TypeError(`invalid float width (${e}): ${r}`)}};function he(r,e,t){t>>>=0,ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:r=>r,toWireType:(r,e)=>e,argPackAdvance:ie,readValueFromPointer:pe(e,t),Ab:null})}function ve(r,e,t,n,a){if(r>>>=0,t>>>=0,e=Kr(e>>>0),-1===a&&(a=4294967295),a=r=>r,0===n){var o=32-8*t;a=r=>r<>>o}var i=e.includes(\"unsigned\")?function(r,e){return e>>>0}:function(r,e){return e};ne(r,{name:e,fromWireType:a,toWireType:i,argPackAdvance:ie,readValueFromPointer:ae(e,t,0!==n),Ab:null})}function ye(r,e,n){function a(r){var e=s()[r>>>2>>>0];return r=s()[r+4>>>2>>>0],new o(t().buffer,r,e)}var o=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][e];ne(r>>>=0,{name:n=Kr(n>>>0),fromWireType:a,argPackAdvance:ie,readValueFromPointer:a},{Ob:!0})}function we(r,e){r>>>=0;var t=\"std::string\"===(e=Kr(e>>>0));ne(r,{name:e,fromWireType:function(r){var e=s()[r>>>2>>>0],a=r+4;if(t)for(var o=a,i=0;i<=e;++i){var u=a+i;if(i==e||0==n()[u>>>0]){if(o=Hr(o,u-o),void 0===f)var f=o;else f+=String.fromCharCode(0),f+=o;o=u+1}}else{for(f=Array(e),i=0;i>>0]);f=f.join(\"\")}return yn(r),f},toWireType:function(r,e){e instanceof ArrayBuffer&&(e=new Uint8Array(e));var a=\"string\"==typeof e;if(!(a||e instanceof Uint8Array||e instanceof Uint8ClampedArray||e instanceof Int8Array))throw new Qr(\"Cannot pass non-string to std::string\");var o=t&&a?Dr(e):e.length,i=vn(4+o+1),u=i+4;if(s()[i>>>2>>>0]=o,t&&a)Br(e,u,o+1);else if(a)for(a=0;a>>0]=f}else for(a=0;a>>0]=e[a];return null!==r&&r.push(yn,i),i},argPackAdvance:ie,readValueFromPointer:be,Ab(r){yn(r)}})}var Ae=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf-16le\"):void 0,_e=(r,e)=>{for(var t=r>>1,i=t+e/2;!(t>=i)&&o()[t>>>0];)++t;if(32<(t<<=1)-r&&Ae)return Ae.decode(n().slice(r,t));for(t=\"\",i=0;!(i>=e/2);++i){var s=a()[r+2*i>>>1>>>0];if(0==s)break;t+=String.fromCharCode(s)}return t},Ce=(r,e,t)=>{if(t??=2147483647,2>t)return 0;var n=e;t=(t-=2)<2*r.length?t/2:r.length;for(var o=0;o>>1>>>0]=i,e+=2}return a()[e>>>1>>>0]=0,e-n},Oe=r=>2*r.length,Te=(r,e)=>{for(var t=0,n=\"\";!(t>=e/4);){var a=i()[r+4*t>>>2>>>0];if(0==a)break;++t,65536<=a?(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a)):n+=String.fromCharCode(a)}return n},Se=(r,e,t)=>{if(e>>>=0,t??=2147483647,4>t)return 0;var n=e;t=n+t-4;for(var a=0;a=o&&(o=65536+((1023&o)<<10)|1023&r.charCodeAt(++a)),i()[e>>>2>>>0]=o,(e+=4)+4>t)break}return i()[e>>>2>>>0]=0,e-n},We=r=>{for(var e=0,t=0;t=n&&++t,e+=4}return e};function Ee(r,e,t){if(r>>>=0,e>>>=0,t=Kr(t>>>=0),2===e)var n=_e,a=Ce,i=Oe,u=r=>o()[r>>>1>>>0];else 4===e&&(n=Te,a=Se,i=We,u=r=>s()[r>>>2>>>0]);ne(r,{name:t,fromWireType:r=>{for(var t,a=s()[r>>>2>>>0],o=r+4,i=0;i<=a;++i){var f=r+4+i*e;i!=a&&0!=u(f)||(o=n(o,f-o),void 0===t?t=o:(t+=String.fromCharCode(0),t+=o),o=f+e)}return yn(r),t},toWireType:(r,n)=>{if(\"string\"!=typeof n)throw new Qr(`Cannot pass non-string to C++ string type ${t}`);var o=i(n),u=vn(4+o+e);return s()[u>>>2>>>0]=o/e,a(n,u+4,o+e),null!==r&&r.push(yn,u),u},argPackAdvance:ie,readValueFromPointer:be,Ab(r){yn(r)}})}function xe(r,e){ne(r>>>=0,{Pb:!0,name:e=Kr(e>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var Me=()=>1;function ke(r){wn(r>>>0,!m,1,!g,131072,!1),Ar()}var Ne=r=>{if(!z)try{if(r(),!(0>>=0,\"function\"==typeof Atomics.fc&&(Atomics.fc(i(),r>>>2,r).value.then(Re),r+=128,Atomics.store(i(),r>>>2,1))}var Re=()=>{var r=hn();r&&(He(r),Ne(Tn))};function Pe(r,e){(r>>>=0)==e>>>0?setTimeout(Re):p?postMessage({targetThread:r,cmd:\"checkMailbox\"}):(r=yr[r])&&r.postMessage({cmd:\"checkMailbox\"})}var De=[];function Fe(r,e,t,n,a){for(e>>>=0,n/=2,De.length=n,t=a>>>0>>>3,a=0;a>>0];return(e?ir[e]:dn[r])(...De)}function Be(r){r>>>=0,p?postMessage({cmd:\"cleanupThread\",thread:r}):wr(yr[r])}function Ie(r){}var Ue=(r,e)=>{var t=ee[r];if(void 0===t)throw r=gn(r),t=Kr(r),yn(r),new Qr(`${e} has unknown type ${t}`);return t},je=(r,e,t)=>{var n=[];return r=r.toWireType(n,t),n.length&&(s()[e>>>2>>>0]=de(n)),r};function $e(r,e,t){return e>>>=0,t>>>=0,r=ce(r>>>0),e=Ue(e,\"emval::as\"),je(e,t,r)}var Ge=r=>{try{r()}catch(r){K(r)}},ze=0,Ye=null,Le=0,Ve=[],qe={},Je={},Xe=0,Qe=null,Ze=[];function Ke(r){return function(r){if(!z){if(0===ze){var e=!1,t=!1;r(((r=0)=>{if(!z&&(Le=r,e=!0,t)){ze=2,Ge((()=>Hn(Ye))),\"undefined\"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.resume(),r=!1;try{var n=function(){var r=i()[Ye+8>>>2>>>0];return r=bn[Je[r]],--cr,r()}()}catch(e){n=e,r=!0}var a=!1;if(!Ye){var o=Qe;o&&(Qe=null,(r?o.reject:o.resolve)(n),a=!0)}if(r&&!a)throw n}})),t=!0,e||(ze=1,Ye=function(){var r=vn(65548),e=r+12;s()[r>>>2>>>0]=e,s()[r+4>>>2>>>0]=e+65536,e=Ve[0];var t=qe[e];return void 0===t&&(t=Xe++,qe[e]=t,Je[t]=e),e=t,i()[r+8>>>2>>>0]=e,r}(),\"undefined\"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.pause(),Ge((()=>kn(Ye))))}else 2===ze?(ze=0,Ge(Rn),yn(Ye),Ye=null,Ze.forEach(Ne)):K(`invalid state: ${ze}`);return Le}}((e=>{r().then(e)}))}function rt(r){return r>>>=0,Ke((()=>(r=ce(r)).then(de)))}var et=[];function tt(r,e,t,n){return t>>>=0,n>>>=0,(r=et[r>>>0])(null,e=ce(e>>>0),t,n)}var nt={},at=r=>{var e=nt[r];return void 0===e?Kr(r):e};function ot(r,e,t,n,a){return t>>>=0,n>>>=0,a>>>=0,(r=et[r>>>0])(e=ce(e>>>0),e[t=at(t)],n,a)}var it=()=>\"object\"==typeof globalThis?globalThis:Function(\"return this\")();function st(r){return 0==(r>>>=0)?de(it()):(r=at(r),de(it()[r]))}var ut=r=>{var e=et.length;return et.push(r),e},ft=(r,e)=>{for(var t=Array(r),n=0;n>>2>>>0],\"parameter \"+n);return t},lt=(r,e)=>Object.defineProperty(e,\"name\",{value:r});function ct(r,e,t){var n=(e=ft(r,e>>>0)).shift();r--;var a=\"return function (obj, func, destructorsRef, args) {\\n\",o=0,i=[];0===t&&i.push(\"obj\");for(var s=[\"retType\"],u=[n],f=0;fr.name)).join(\", \")}) => ${n.name}>`,ut(lt(t,r))}function dt(r){return r=at(r>>>0),de(d[r])}function bt(r,e){return e>>>=0,r=ce(r>>>0),e=ce(e),de(r[e])}function gt(r){9<(r>>>=0)&&(fe[r+1]+=1)}function mt(){return de([])}function pt(r){r=ce(r>>>0);for(var e=Array(r.length),t=0;t>>0))}function vt(){return de({})}function yt(r){for(var e=ce(r>>>=0);e.length;){var t=e.pop();e.pop()(t)}le(r)}function wt(r,e,t){e>>>=0,t>>>=0,r=ce(r>>>0),e=ce(e),t=ce(t),r[e]=t}function At(r,e){return e>>>=0,r=(r=Ue(r>>>0,\"_emval_take_value\")).readValueFromPointer(e),de(r)}function _t(r,e){r=-9007199254740992>r||9007199254740992>>=0,r=new Date(1e3*r),i()[e>>>2>>>0]=r.getUTCSeconds(),i()[e+4>>>2>>>0]=r.getUTCMinutes(),i()[e+8>>>2>>>0]=r.getUTCHours(),i()[e+12>>>2>>>0]=r.getUTCDate(),i()[e+16>>>2>>>0]=r.getUTCMonth(),i()[e+20>>>2>>>0]=r.getUTCFullYear()-1900,i()[e+24>>>2>>>0]=r.getUTCDay(),r=(r.getTime()-Date.UTC(r.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[e+28>>>2>>>0]=r}var Ct=r=>0==r%4&&(0!=r%100||0==r%400),Ot=[0,31,60,91,121,152,182,213,244,274,305,335],Tt=[0,31,59,90,120,151,181,212,243,273,304,334];function St(r,e){r=-9007199254740992>r||9007199254740992>>=0,r=new Date(1e3*r),i()[e>>>2>>>0]=r.getSeconds(),i()[e+4>>>2>>>0]=r.getMinutes(),i()[e+8>>>2>>>0]=r.getHours(),i()[e+12>>>2>>>0]=r.getDate(),i()[e+16>>>2>>>0]=r.getMonth(),i()[e+20>>>2>>>0]=r.getFullYear()-1900,i()[e+24>>>2>>>0]=r.getDay();var t=(Ct(r.getFullYear())?Ot:Tt)[r.getMonth()]+r.getDate()-1|0;i()[e+28>>>2>>>0]=t,i()[e+36>>>2>>>0]=-60*r.getTimezoneOffset(),t=new Date(r.getFullYear(),6,1).getTimezoneOffset();var n=new Date(r.getFullYear(),0,1).getTimezoneOffset();r=0|(t!=n&&r.getTimezoneOffset()==Math.min(n,t)),i()[e+32>>>2>>>0]=r}function Wt(r){r>>>=0;var e=new Date(i()[r+20>>>2>>>0]+1900,i()[r+16>>>2>>>0],i()[r+12>>>2>>>0],i()[r+8>>>2>>>0],i()[r+4>>>2>>>0],i()[r>>>2>>>0],0),t=i()[r+32>>>2>>>0],n=e.getTimezoneOffset(),a=new Date(e.getFullYear(),6,1).getTimezoneOffset(),o=new Date(e.getFullYear(),0,1).getTimezoneOffset(),s=Math.min(o,a);return 0>t?i()[r+32>>>2>>>0]=Number(a!=o&&s==n):0>>2>>>0]=e.getDay(),t=(Ct(e.getFullYear())?Ot:Tt)[e.getMonth()]+e.getDate()-1|0,i()[r+28>>>2>>>0]=t,i()[r>>>2>>>0]=e.getSeconds(),i()[r+4>>>2>>>0]=e.getMinutes(),i()[r+8>>>2>>>0]=e.getHours(),i()[r+12>>>2>>>0]=e.getDate(),i()[r+16>>>2>>>0]=e.getMonth(),i()[r+20>>>2>>>0]=e.getYear(),r=e.getTime(),BigInt(isNaN(r)?-1:r/1e3)}function Et(r,e,t,n,a,o,i){return p?dr(16,1,r,e,t,n,a,o,i):-52}function xt(r,e,t,n,a,o){if(p)return dr(17,1,r,e,t,n,a,o)}function Mt(r,e,t,n){r>>>=0,e>>>=0,t>>>=0,n>>>=0;var a=(new Date).getFullYear(),o=new Date(a,0,1),u=new Date(a,6,1);a=o.getTimezoneOffset();var f=u.getTimezoneOffset(),l=Math.max(a,f);s()[r>>>2>>>0]=60*l,i()[e>>>2>>>0]=Number(a!=f),o=(r=r=>r.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1])(o),u=r(u),f{kt.length=0;for(var t;t=n()[r++>>>0];){var a=105!=t;e+=(a&=112!=t)&&e%8?4:0,kt.push(112==t?s()[e>>>2>>>0]:106==t?j[e>>>3]:105==t?i()[e>>>2>>>0]:f()[e>>>3>>>0]),e+=a?8:4}return kt};function Ht(r,e,t){return r>>>=0,e=Nt(e>>>0,t>>>0),ir[r](...e)}function Rt(r,e,t){return r>>>=0,e=Nt(e>>>0,t>>>0),ir[r](...e)}var Pt=()=>{},Dt=()=>Date.now();function Ft(r,e){return E(Hr(r>>>0,e>>>0))}var Bt,It=()=>{throw cr+=1,\"unwind\"};function Ut(){return 4294901760}Bt=()=>performance.timeOrigin+performance.now();var jt=()=>navigator.hardwareConcurrency;function $t(r){r>>>=0;var e=n().length;if(r<=e||4294901760=t;t*=2){var a=e*(1+.2/t);a=Math.min(a,r+100663296);var o=Math;a=Math.max(r,a);r:{o=(o.min.call(o,4294901760,a+(65536-a%65536)%65536)-k.buffer.byteLength+65535)/65536;try{k.grow(o),Y();var i=1;break r}catch(r){}i=void 0}if(i)return!0}return!1}var Gt=()=>(K(\"Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER\"),0),zt={},Yt=r=>{r.forEach((r=>{var e=Gt();e&&(zt[e]=r)}))};function Lt(){var r=Error().stack.toString().split(\"\\n\");return\"Error\"==r[0]&&r.shift(),Yt(r),zt.Lb=Gt(),zt.Yb=r,zt.Lb}function Vt(r,e,t){if(r>>>=0,e>>>=0,zt.Lb==r)var n=zt.Yb;else\"Error\"==(n=Error().stack.toString().split(\"\\n\"))[0]&&n.shift(),Yt(n);for(var a=3;n[a]&&Gt()!=r;)++a;for(r=0;r>>2>>>0]=Gt();return r}var qt,Jt={},Xt=()=>{if(!qt){var r,e={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:_||\"./this.program\"};for(r in Jt)void 0===Jt[r]?delete e[r]:e[r]=Jt[r];var t=[];for(r in e)t.push(`${r}=${e[r]}`);qt=t}return qt};function Qt(r,e){if(p)return dr(18,1,r,e);r>>>=0,e>>>=0;var n=0;return Xt().forEach(((a,o)=>{var i=e+n;for(o=s()[r+4*o>>>2>>>0]=i,i=0;i>>0]=a.charCodeAt(i);t()[o>>>0]=0,n+=a.length+1})),0}function Zt(r,e){if(p)return dr(19,1,r,e);r>>>=0,e>>>=0;var t=Xt();s()[r>>>2>>>0]=t.length;var n=0;return t.forEach((r=>n+=r.length+1)),s()[e>>>2>>>0]=n,0}function Kt(r){return p?dr(20,1,r):52}function rn(r,e,t,n){return p?dr(21,1,r,e,t,n):52}function en(r,e,t,n){return p?dr(22,1,r,e,t,n):70}var tn=[null,[],[]];function nn(r,e,t,a){if(p)return dr(23,1,r,e,t,a);e>>>=0,t>>>=0,a>>>=0;for(var o=0,i=0;i>>2>>>0],f=s()[e+4>>>2>>>0];e+=8;for(var l=0;l>>0],d=tn[r];0===c||10===c?((1===r?W:E)(Nr(d,0)),d.length=0):d.push(c)}o+=f}return s()[a>>>2>>>0]=o,0}var an=[31,29,31,30,31,30,31,31,30,31,30,31],on=[31,28,31,30,31,30,31,31,30,31,30,31],sn=(r,e)=>{t().set(r,e>>>0)};function un(r,e,t,n){function a(r,e,t){for(r=\"number\"==typeof r?r.toString():r||\"\";r.lengthr?-1:0n-r.getDate())){r.setDate(r.getDate()+e);break}e-=n-r.getDate()+1,r.setDate(1),11>t?r.setMonth(t+1):(r.setMonth(0),r.setFullYear(r.getFullYear()+1))}return t=new Date(r.getFullYear()+1,0,4),e=f(new Date(r.getFullYear(),0,4)),t=f(t),0>=u(e,r)?0>=u(t,r)?r.getFullYear()+1:r.getFullYear():r.getFullYear()-1}r>>>=0,e>>>=0,t>>>=0,n>>>=0;var c=s()[n+40>>>2>>>0];for(var d in n={cc:i()[n>>>2>>>0],bc:i()[n+4>>>2>>>0],Db:i()[n+8>>>2>>>0],Hb:i()[n+12>>>2>>>0],Eb:i()[n+16>>>2>>>0],zb:i()[n+20>>>2>>>0],rb:i()[n+24>>>2>>>0],yb:i()[n+28>>>2>>>0],kc:i()[n+32>>>2>>>0],ac:i()[n+36>>>2>>>0],dc:c?Hr(c):\"\"},t=Hr(t),c={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"})t=t.replace(new RegExp(d,\"g\"),c[d]);var b=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),g=\"January February March April May June July August September October November December\".split(\" \");for(d in c={\"%a\":r=>b[r.rb].substring(0,3),\"%A\":r=>b[r.rb],\"%b\":r=>g[r.Eb].substring(0,3),\"%B\":r=>g[r.Eb],\"%C\":r=>o((r.zb+1900)/100|0,2),\"%d\":r=>o(r.Hb,2),\"%e\":r=>a(r.Hb,2,\" \"),\"%g\":r=>l(r).toString().substring(2),\"%G\":l,\"%H\":r=>o(r.Db,2),\"%I\":r=>(0==(r=r.Db)?r=12:12{for(var e=0,t=0;t<=r.Eb-1;e+=(Ct(r.zb+1900)?an:on)[t++]);return o(r.Hb+e,3)},\"%m\":r=>o(r.Eb+1,2),\"%M\":r=>o(r.bc,2),\"%n\":()=>\"\\n\",\"%p\":r=>0<=r.Db&&12>r.Db?\"AM\":\"PM\",\"%S\":r=>o(r.cc,2),\"%t\":()=>\"\\t\",\"%u\":r=>r.rb||7,\"%U\":r=>o(Math.floor((r.yb+7-r.rb)/7),2),\"%V\":r=>{var e=Math.floor((r.yb+7-(r.rb+6)%7)/7);if(2>=(r.rb+371-r.yb-2)%7&&e++,e)53==e&&(4==(t=(r.rb+371-r.yb)%7)||3==t&&Ct(r.zb)||(e=1));else{e=52;var t=(r.rb+7-r.yb-1)%7;(4==t||5==t&&Ct(r.zb%400-1))&&e++}return o(e,2)},\"%w\":r=>r.rb,\"%W\":r=>o(Math.floor((r.yb+7-(r.rb+6)%7)/7),2),\"%y\":r=>(r.zb+1900).toString().substring(2),\"%Y\":r=>r.zb+1900,\"%z\":r=>{var e=0<=(r=r.ac);return r=Math.abs(r)/60,(e?\"+\":\"-\")+String(\"0000\"+(r/60*100+r%60)).slice(-4)},\"%Z\":r=>r.dc,\"%%\":()=>\"%\"},t=t.replace(/%%/g,\"\\0\\0\"),c)t.includes(d)&&(t=t.replace(new RegExp(d,\"g\"),c[d](n)));return d=function(r){var e=Array(Dr(r)+1);return Fr(r,e,0,e.length),e}(t=t.replace(/\\0\\0/g,\"%\")),d.length>e?0:(sn(d,r),d.length-1)}function fn(r,e,t,n){return un(r>>>0,e>>>0,t>>>0,n>>>0)}p||function(){for(var r=d.numThreads-1;r--;)Cr();L.unshift((()=>{J++,function(r){p?r():Promise.all(pr.map(_r)).then(r)}((()=>Z()))}))}();for(var ln=Array(256),cn=0;256>cn;++cn)ln[cn]=String.fromCharCode(cn);Xr=ln,Qr=d.BindingError=class extends Error{constructor(r){super(r),this.name=\"BindingError\"}},d.InternalError=class extends Error{constructor(r){super(r),this.name=\"InternalError\"}},fe.push(0,1,void 0,1,null,1,!0,1,!1,1),d.count_emval_handles=()=>fe.length/2-5-ue.length;var dn=[br,gr,xr,Rr,Pr,Ir,Ur,jr,$r,Gr,zr,Yr,Lr,Vr,qr,Jr,Et,xt,Qt,Zt,Kt,rn,en,nn],bn=function(){function r(r,e){return bn=r.exports,bn=function(){var r=bn,e={};for(let[t,n]of Object.entries(r))e[t]=\"function\"==typeof n?(...r)=>{Ve.push(t);try{return n(...r)}finally{z||(Ve.pop(),Ye&&1===ze&&0===Ve.length&&(ze=0,cr+=1,Ge(Nn),\"undefined\"!=typeof Fibers&&Fibers.lc()))}}:n;return e}(),bn=function(){var r=bn,e=r=>e=>r(e)>>>0,t=r=>()=>r()>>>0;return(r=Object.assign({},r)).za=e(r.za),r.cb=t(r.cb),r.db=e(r.db),r.emscripten_main_runtime_thread_id=t(r.emscripten_main_runtime_thread_id),r.pb=e(r.pb),r.qb=t(r.qb),r}(),vr.push(bn.fb),V.unshift(bn.ya),N=e,Z(),bn}var e=or();if(J++,d.instantiateWasm)try{return d.instantiateWasm(e,r)}catch(r){E(`Module.instantiateWasm callback failed with error: ${r}`),c(r)}return rr||=d.locateFile?er(\"ort-wasm-simd-threaded.jsep.wasm\")?\"ort-wasm-simd-threaded.jsep.wasm\":d.locateFile?d.locateFile(\"ort-wasm-simd-threaded.jsep.wasm\",O):O+\"ort-wasm-simd-threaded.jsep.wasm\":new URL(\"ort-wasm-simd-threaded.jsep.wasm\",import.meta.url).href,function(r,e){var t=rr;return\"function\"!=typeof WebAssembly.instantiateStreaming||er(t)||tr(t)||\"function\"!=typeof fetch?ar(t,r,e):fetch(t,{credentials:\"same-origin\"}).then((n=>WebAssembly.instantiateStreaming(n,r).then(e,(function(n){return E(`wasm streaming compile failed: ${n}`),E(\"falling back to ArrayBuffer instantiation\"),ar(t,r,e)}))))}(e,(function(e){r(e.instance,e.module)})).catch(c),{}}(),gn=r=>(gn=bn.za)(r),mn=()=>(mn=bn.Aa)();d._OrtInit=(r,e)=>(d._OrtInit=bn.Ba)(r,e),d._OrtGetLastError=(r,e)=>(d._OrtGetLastError=bn.Ca)(r,e),d._OrtCreateSessionOptions=(r,e,t,n,a,o,i,s,u,f)=>(d._OrtCreateSessionOptions=bn.Da)(r,e,t,n,a,o,i,s,u,f),d._OrtAppendExecutionProvider=(r,e)=>(d._OrtAppendExecutionProvider=bn.Ea)(r,e),d._OrtAddFreeDimensionOverride=(r,e,t)=>(d._OrtAddFreeDimensionOverride=bn.Fa)(r,e,t),d._OrtAddSessionConfigEntry=(r,e,t)=>(d._OrtAddSessionConfigEntry=bn.Ga)(r,e,t),d._OrtReleaseSessionOptions=r=>(d._OrtReleaseSessionOptions=bn.Ha)(r),d._OrtCreateSession=(r,e,t)=>(d._OrtCreateSession=bn.Ia)(r,e,t),d._OrtReleaseSession=r=>(d._OrtReleaseSession=bn.Ja)(r),d._OrtGetInputOutputCount=(r,e,t)=>(d._OrtGetInputOutputCount=bn.Ka)(r,e,t),d._OrtGetInputName=(r,e)=>(d._OrtGetInputName=bn.La)(r,e),d._OrtGetOutputName=(r,e)=>(d._OrtGetOutputName=bn.Ma)(r,e),d._OrtFree=r=>(d._OrtFree=bn.Na)(r),d._OrtCreateTensor=(r,e,t,n,a,o)=>(d._OrtCreateTensor=bn.Oa)(r,e,t,n,a,o),d._OrtGetTensorData=(r,e,t,n,a)=>(d._OrtGetTensorData=bn.Pa)(r,e,t,n,a),d._OrtReleaseTensor=r=>(d._OrtReleaseTensor=bn.Qa)(r),d._OrtCreateRunOptions=(r,e,t,n)=>(d._OrtCreateRunOptions=bn.Ra)(r,e,t,n),d._OrtAddRunConfigEntry=(r,e,t)=>(d._OrtAddRunConfigEntry=bn.Sa)(r,e,t),d._OrtReleaseRunOptions=r=>(d._OrtReleaseRunOptions=bn.Ta)(r),d._OrtCreateBinding=r=>(d._OrtCreateBinding=bn.Ua)(r),d._OrtBindInput=(r,e,t)=>(d._OrtBindInput=bn.Va)(r,e,t),d._OrtBindOutput=(r,e,t,n)=>(d._OrtBindOutput=bn.Wa)(r,e,t,n),d._OrtClearBoundOutputs=r=>(d._OrtClearBoundOutputs=bn.Xa)(r),d._OrtReleaseBinding=r=>(d._OrtReleaseBinding=bn.Ya)(r),d._OrtRunWithBinding=(r,e,t,n,a)=>(d._OrtRunWithBinding=bn.Za)(r,e,t,n,a),d._OrtRun=(r,e,t,n,a,o,i,s)=>(d._OrtRun=bn._a)(r,e,t,n,a,o,i,s),d._OrtEndProfiling=r=>(d._OrtEndProfiling=bn.$a)(r),d._JsepOutput=(r,e,t)=>(d._JsepOutput=bn.ab)(r,e,t),d._JsepGetNodeName=r=>(d._JsepGetNodeName=bn.bb)(r);var pn,hn=()=>(hn=bn.cb)(),vn=d._malloc=r=>(vn=d._malloc=bn.db)(r),yn=d._free=r=>(yn=d._free=bn.eb)(r),wn=(r,e,t,n,a,o)=>(wn=bn.hb)(r,e,t,n,a,o),An=()=>(An=bn.ib)(),_n=(r,e,t,n,a)=>(_n=bn.jb)(r,e,t,n,a),Cn=r=>(Cn=bn.kb)(r),On=r=>(On=bn.lb)(r),Tn=()=>(Tn=bn.mb)(),Sn=(r,e)=>(Sn=bn.nb)(r,e),Wn=r=>(Wn=bn.ob)(r),En=r=>(En=bn.pb)(r),xn=()=>(xn=bn.qb)(),Mn=d.dynCall_ii=(r,e)=>(Mn=d.dynCall_ii=bn.sb)(r,e),kn=r=>(kn=bn.tb)(r),Nn=()=>(Nn=bn.ub)(),Hn=r=>(Hn=bn.vb)(r),Rn=()=>(Rn=bn.wb)();function Pn(){if(!(0xn(),d.stackRestore=r=>Wn(r),d.stackAlloc=r=>En(r),d.UTF8ToString=Hr,d.stringToUTF8=Br,d.lengthBytesUTF8=Dr,Q=function r(){pn||Pn(),pn||(Q=r)},Pn(),b});export default e;\"em-pthread\"===globalThis.self?.name&&e();", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {isNode} from './wasm-utils-env';\n\n/**\n * The classic script source URL. This is not always available in non ESModule environments.\n *\n * In Node.js, this is undefined.\n */\nexport const scriptSrc =\n // if Nodejs, return undefined\n isNode ? undefined :\n // if It's ESM, use import.meta.url\n BUILD_DEFS.ESM_IMPORT_META_URL ??\n // use `document.currentScript.src` if available\n (typeof document !== 'undefined' ? (document.currentScript as HTMLScriptElement)?.src :\n // use `self.location.href` if available\n (typeof self !== 'undefined' ? self.location?.href : undefined));\n\n/**\n * The origin of the current location.\n *\n * In Node.js, this is undefined.\n */\nconst origin = isNode || typeof location === 'undefined' ? undefined : location.origin;\n\n/**\n * Check if the given filename with prefix is from the same origin.\n */\nconst isSameOrigin = (filename: string, prefixOverride?: string) => {\n try {\n const baseUrl = prefixOverride ?? scriptSrc;\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.origin === origin;\n } catch {\n return false;\n }\n};\n\n/**\n * Normalize the inputs to an absolute URL with the given prefix override. If failed, return undefined.\n */\nconst normalizeUrl = (filename: string, prefixOverride?: string) => {\n const baseUrl = prefixOverride ?? scriptSrc;\n try {\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.href;\n } catch {\n return undefined;\n }\n};\n\n/**\n * Create a fallback URL if an absolute URL cannot be created by the normalizeUrl function.\n */\nconst fallbackUrl = (filename: string, prefixOverride?: string) => `${prefixOverride ?? './'}${filename}`;\n\n/**\n * This helper function is used to preload a module from a URL.\n *\n * If the origin of the worker URL is different from the current origin, the worker cannot be loaded directly.\n * See discussions in https://github.com/webpack-contrib/worker-loader/issues/154\n *\n * In this case, we will fetch the worker URL and create a new Blob URL with the same origin as a workaround.\n *\n * @param absoluteUrl - The absolute URL to preload.\n *\n * @returns - A promise that resolves to a new Blob URL\n */\nconst preload = async(absoluteUrl: string): Promise => {\n const response = await fetch(absoluteUrl, {credentials: 'same-origin'});\n const blob = await response.blob();\n return URL.createObjectURL(blob);\n};\n\n/**\n * This helper function is used to dynamically import a module from a URL.\n *\n * The build script has special handling for this function to ensure that the URL is not bundled into the final output.\n *\n * @param url - The URL to import.\n *\n * @returns - A promise that resolves to the default export of the module.\n */\nconst dynamicImportDefault = async(url: string): Promise => (await import(/* webpackIgnore: true */ url)).default;\n\n/**\n * The proxy worker factory imported from the proxy worker module.\n *\n * This is only available when the WebAssembly proxy is not disabled.\n */\nconst createProxyWorker: ((urlOverride?: string) => Worker)|undefined =\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n BUILD_DEFS.DISABLE_WASM_PROXY ? undefined : require('./proxy-worker/main').default;\n\n/**\n * Import the proxy worker.\n *\n * This function will perform the following steps:\n * 1. If a preload is needed, it will preload the module and return the object URL.\n * 2. Use the proxy worker factory to create the proxy worker.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The proxy worker.\n */\nexport const importProxyWorker = async(): Promise<[undefined | string, Worker]> => {\n if (!scriptSrc) {\n throw new Error('Failed to load proxy worker: cannot determine the script source URL.');\n }\n\n // If the script source is from the same origin, we can use the embedded proxy module directly.\n if (isSameOrigin(scriptSrc)) {\n return [undefined, createProxyWorker!()];\n }\n\n // Otherwise, need to preload\n const url = await preload(scriptSrc);\n return [url, createProxyWorker!(url)];\n};\n\n/**\n * The embedded WebAssembly module.\n *\n * This is only available in ESM and when embedding is not disabled.\n */\nconst embeddedWasmModule: EmscriptenModuleFactory|undefined =\n BUILD_DEFS.IS_ESM && BUILD_DEFS.DISABLE_DYNAMIC_IMPORT ?\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n require(\n !BUILD_DEFS.DISABLE_TRAINING ? '../../dist/ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? '../../dist/ort-wasm-simd-threaded.jsep.mjs' :\n '../../dist/ort-wasm-simd-threaded.mjs')\n .default :\n undefined;\n\n/**\n * Import the WebAssembly module.\n *\n * This function will perform the following steps:\n * 1. If BUILD_DEFS.DISABLE_DYNAMIC_IMPORT is true, use the embedded module.\n * 2. If a preload is needed, it will preload the module and return the object URL.\n * 3. Otherwise, it will perform a dynamic import of the module.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The default export of the module, which is a factory function to create the WebAssembly module.\n */\nexport const importWasmModule = async(\n urlOverride: string|undefined, prefixOverride: string|undefined,\n isMultiThreaded: boolean): Promise<[undefined | string, EmscriptenModuleFactory]> => {\n if (BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n return [undefined, embeddedWasmModule!];\n } else {\n const wasmModuleFilename = !BUILD_DEFS.DISABLE_TRAINING ? 'ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? 'ort-wasm-simd-threaded.jsep.mjs' :\n 'ort-wasm-simd-threaded.mjs';\n const wasmModuleUrl = urlOverride ?? normalizeUrl(wasmModuleFilename, prefixOverride);\n // need to preload if all of the following conditions are met:\n // 1. not in Node.js.\n // - Node.js does not have the same origin policy for creating workers.\n // 2. multi-threaded is enabled.\n // - If multi-threaded is disabled, no worker will be created. So we don't need to preload the module.\n // 3. the absolute URL is available.\n // - If the absolute URL is failed to be created, the origin cannot be determined. In this case, we will not\n // preload the module.\n // 4. the worker URL is not from the same origin.\n // - If the worker URL is from the same origin, we can create the worker directly.\n const needPreload = !isNode && isMultiThreaded && wasmModuleUrl && !isSameOrigin(wasmModuleUrl, prefixOverride);\n const url = needPreload ? (await preload(wasmModuleUrl)) :\n (wasmModuleUrl ?? fallbackUrl(wasmModuleFilename, prefixOverride));\n return [needPreload ? url : undefined, await dynamicImportDefault>(url)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {importWasmModule} from './wasm-utils-import';\n\nlet wasm: OrtWasmModule|undefined;\nlet initialized = false;\nlet initializing = false;\nlet aborted = false;\n\nconst isMultiThreadSupported = (): boolean => {\n // If 'SharedArrayBuffer' is not available, WebAssembly threads will not work.\n if (typeof SharedArrayBuffer === 'undefined') {\n return false;\n }\n\n try {\n // Test for transferability of SABs (for browsers. needed for Firefox)\n // https://groups.google.com/forum/#!msg/mozilla.dev.platform/IHkBZlHETpA/dwsMNchWEQAJ\n if (typeof MessageChannel !== 'undefined') {\n new MessageChannel().port1.postMessage(new SharedArrayBuffer(1));\n }\n\n // Test for WebAssembly threads capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing threaded instructions.\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 5,\n 4, 1, 3, 1, 1, 10, 11, 1, 9, 0, 65, 0, 254, 16, 2, 0, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst isSimdSupported = (): boolean => {\n try {\n // Test for WebAssembly SIMD capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing SIMD instructions.\n\n // The binary data is generated from the following code by wat2wasm:\n //\n // (module\n // (type $t0 (func))\n // (func $f0 (type $t0)\n // (drop\n // (i32x4.dot_i16x8_s\n // (i8x16.splat\n // (i32.const 0))\n // (v128.const i32x4 0x00000000 0x00000000 0x00000000 0x00000000)))))\n\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 10, 30, 1, 28, 0, 65, 0,\n 253, 15, 253, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 253, 186, 1, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nexport const initializeWebAssembly = async(flags: Env.WebAssemblyFlags): Promise => {\n if (initialized) {\n return Promise.resolve();\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initializeWebAssembly()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initializeWebAssembly()\\' failed.');\n }\n\n initializing = true;\n\n // wasm flags are already initialized\n const timeout = flags.initTimeout!;\n let numThreads = flags.numThreads!;\n\n // ensure SIMD is supported\n if (!isSimdSupported()) {\n throw new Error('WebAssembly SIMD is not supported in the current environment.');\n }\n\n // check if multi-threading is supported\n const multiThreadSupported = isMultiThreadSupported();\n if (numThreads > 1 && !multiThreadSupported) {\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', but this will not work unless you enable crossOriginIsolated mode. ' +\n 'See https://web.dev/cross-origin-isolation-guide/ for more info.');\n }\n\n // eslint-disable-next-line no-console\n console.warn(\n 'WebAssembly multi-threading is not supported in the current environment. ' +\n 'Falling back to single-threading.');\n\n // set flags.numThreads to 1 so that OrtInit() will not create a global thread pool.\n flags.numThreads = numThreads = 1;\n }\n\n const wasmPaths = flags.wasmPaths;\n const wasmPrefixOverride = typeof wasmPaths === 'string' ? wasmPaths : undefined;\n const mjsPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.mjs;\n const mjsPathOverride = (mjsPathOverrideFlag as URL)?.href ?? mjsPathOverrideFlag;\n const wasmPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.wasm;\n const wasmPathOverride = (wasmPathOverrideFlag as URL)?.href ?? wasmPathOverrideFlag;\n\n const [objectUrl, ortWasmFactory] = (await importWasmModule(mjsPathOverride, wasmPrefixOverride, numThreads > 1));\n\n let isTimeout = false;\n\n const tasks: Array> = [];\n\n // promise for timeout\n if (timeout > 0) {\n tasks.push(new Promise((resolve) => {\n setTimeout(() => {\n isTimeout = true;\n resolve();\n }, timeout);\n }));\n }\n\n // promise for module initialization\n tasks.push(new Promise((resolve, reject) => {\n const config: Partial = {\n /**\n * The number of threads. WebAssembly will create (Module.numThreads - 1) workers. If it is 1, no worker will be\n * created.\n */\n numThreads,\n };\n\n if (wasmPathOverride || wasmPrefixOverride) {\n /**\n * A callback function to locate the WebAssembly file. The function should return the full path of the file.\n *\n * Since Emscripten 3.1.58, this function is only called for the .wasm file.\n */\n config.locateFile = (fileName, scriptDirectory) =>\n wasmPathOverride ?? (wasmPrefixOverride ?? scriptDirectory) + fileName;\n }\n\n ortWasmFactory(config).then(\n // wasm module initialized successfully\n module => {\n initializing = false;\n initialized = true;\n wasm = module;\n resolve();\n if (objectUrl) {\n URL.revokeObjectURL(objectUrl);\n }\n },\n // wasm module failed to initialize\n (what) => {\n initializing = false;\n aborted = true;\n reject(what);\n });\n }));\n\n await Promise.race(tasks);\n\n if (isTimeout) {\n throw new Error(`WebAssembly backend initializing failed due to timeout: ${timeout}ms`);\n }\n};\n\nexport const getInstance = (): OrtWasmModule => {\n if (initialized && wasm) {\n return wasm;\n }\n\n throw new Error('WebAssembly is not initialized yet.');\n};\n\nexport const dispose = (): void => {\n if (initialized && !initializing && !aborted) {\n // TODO: currently \"PThread.terminateAllThreads()\" is not exposed in the wasm module.\n // And this function is not yet called by any code.\n // If it is needed in the future, we should expose it in the wasm module and uncomment the following line.\n\n // wasm?.PThread?.terminateAllThreads();\n wasm = undefined;\n\n initializing = false;\n initialized = false;\n aborted = true;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getInstance} from './wasm-factory';\n\nexport const allocWasmString = (data: string, allocs: number[]): number => {\n const wasm = getInstance();\n\n const dataLength = wasm.lengthBytesUTF8(data) + 1;\n const dataOffset = wasm._malloc(dataLength);\n wasm.stringToUTF8(data, dataOffset, dataLength);\n allocs.push(dataOffset);\n\n return dataOffset;\n};\n\ninterface ExtraOptionsHandler {\n (name: string, value: string): void;\n}\n\nexport const iterateExtraOptions =\n (options: Record, prefix: string, seen: WeakSet>,\n handler: ExtraOptionsHandler): void => {\n if (typeof options == 'object' && options !== null) {\n if (seen.has(options)) {\n throw new Error('Circular reference in options');\n } else {\n seen.add(options);\n }\n }\n\n Object.entries(options).forEach(([key, value]) => {\n const name = (prefix) ? prefix + key : key;\n if (typeof value === 'object') {\n iterateExtraOptions(value as Record, name + '.', seen, handler);\n } else if (typeof value === 'string' || typeof value === 'number') {\n handler(name, value.toString());\n } else if (typeof value === 'boolean') {\n handler(name, (value) ? '1' : '0');\n } else {\n throw new Error(`Can't handle extra config type: ${typeof value}`);\n }\n });\n };\n\n/**\n * check web assembly API's last error and throw error if any error occurred.\n * @param message a message used when an error occurred.\n */\nexport const checkLastError = (message: string): void => {\n const wasm = getInstance();\n\n const stack = wasm.stackSave();\n try {\n const paramsOffset = wasm.stackAlloc(8);\n wasm._OrtGetLastError(paramsOffset, paramsOffset + 4);\n const errorCode = wasm.HEAP32[paramsOffset / 4];\n const errorMessagePointer = wasm.HEAPU32[paramsOffset / 4 + 1];\n const errorMessage = errorMessagePointer ? wasm.UTF8ToString(errorMessagePointer) : '';\n throw new Error(`${message} ERROR_CODE: ${errorCode}, ERROR_MESSAGE: ${errorMessage}`);\n } finally {\n wasm.stackRestore(stack);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nexport const setRunOptions = (options: InferenceSession.RunOptions): [number, number[]] => {\n const wasm = getInstance();\n let runOptionsHandle = 0;\n const allocs: number[] = [];\n\n const runOptions: InferenceSession.RunOptions = options || {};\n\n try {\n if (options?.logSeverityLevel === undefined) {\n runOptions.logSeverityLevel = 2; // Default to warning\n } else if (\n typeof options.logSeverityLevel !== 'number' || !Number.isInteger(options.logSeverityLevel) ||\n options.logSeverityLevel < 0 || options.logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${options.logSeverityLevel}`);\n }\n\n if (options?.logVerbosityLevel === undefined) {\n runOptions.logVerbosityLevel = 0; // Default to 0\n } else if (typeof options.logVerbosityLevel !== 'number' || !Number.isInteger(options.logVerbosityLevel)) {\n throw new Error(`log verbosity level is not valid: ${options.logVerbosityLevel}`);\n }\n\n if (options?.terminate === undefined) {\n runOptions.terminate = false;\n }\n\n let tagDataOffset = 0;\n if (options?.tag !== undefined) {\n tagDataOffset = allocWasmString(options.tag, allocs);\n }\n\n runOptionsHandle = wasm._OrtCreateRunOptions(\n runOptions.logSeverityLevel!, runOptions.logVerbosityLevel!, !!runOptions.terminate!, tagDataOffset);\n if (runOptionsHandle === 0) {\n checkLastError('Can\\'t create run options.');\n }\n\n if (options?.extra !== undefined) {\n iterateExtraOptions(options.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddRunConfigEntry(runOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a run config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [runOptionsHandle, allocs];\n } catch (e) {\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nconst getGraphOptimzationLevel = (graphOptimizationLevel: string|unknown): number => {\n switch (graphOptimizationLevel) {\n case 'disabled':\n return 0;\n case 'basic':\n return 1;\n case 'extended':\n return 2;\n case 'all':\n return 99;\n default:\n throw new Error(`unsupported graph optimization level: ${graphOptimizationLevel}`);\n }\n};\n\nconst getExecutionMode = (executionMode: 'sequential'|'parallel'): number => {\n switch (executionMode) {\n case 'sequential':\n return 0;\n case 'parallel':\n return 1;\n default:\n throw new Error(`unsupported execution mode: ${executionMode}`);\n }\n};\n\nconst appendDefaultOptions = (options: InferenceSession.SessionOptions): void => {\n if (!options.extra) {\n options.extra = {};\n }\n if (!options.extra.session) {\n options.extra.session = {};\n }\n const session = options.extra.session as Record;\n if (!session.use_ort_model_bytes_directly) {\n // eslint-disable-next-line camelcase\n session.use_ort_model_bytes_directly = '1';\n }\n\n // if using JSEP with WebGPU, always disable memory pattern\n if (options.executionProviders &&\n options.executionProviders.some(ep => (typeof ep === 'string' ? ep : ep.name) === 'webgpu')) {\n options.enableMemPattern = false;\n }\n};\n\nconst setExecutionProviders =\n (sessionOptionsHandle: number, executionProviders: readonly InferenceSession.ExecutionProviderConfig[],\n allocs: number[]): void => {\n for (const ep of executionProviders) {\n let epName = typeof ep === 'string' ? ep : ep.name;\n\n // check EP name\n switch (epName) {\n case 'webnn':\n epName = 'WEBNN';\n if (typeof ep !== 'string') {\n const webnnOptions = ep as InferenceSession.WebNNExecutionProviderOption;\n // const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n if (deviceType) {\n const keyDataOffset = allocWasmString('deviceType', allocs);\n const valueDataOffset = allocWasmString(deviceType, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'deviceType' - ${deviceType}.`);\n }\n }\n }\n break;\n case 'webgpu':\n epName = 'JS';\n if (typeof ep !== 'string') {\n const webgpuOptions = ep as InferenceSession.WebGpuExecutionProviderOption;\n if (webgpuOptions?.preferredLayout) {\n if (webgpuOptions.preferredLayout !== 'NCHW' && webgpuOptions.preferredLayout !== 'NHWC') {\n throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${webgpuOptions.preferredLayout}`);\n }\n const keyDataOffset = allocWasmString('preferredLayout', allocs);\n const valueDataOffset = allocWasmString(webgpuOptions.preferredLayout, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'preferredLayout' - ${webgpuOptions.preferredLayout}.`);\n }\n }\n }\n break;\n case 'wasm':\n case 'cpu':\n continue;\n default:\n throw new Error(`not supported execution provider: ${epName}`);\n }\n\n const epNameDataOffset = allocWasmString(epName, allocs);\n if (getInstance()._OrtAppendExecutionProvider(sessionOptionsHandle, epNameDataOffset) !== 0) {\n checkLastError(`Can't append execution provider: ${epName}.`);\n }\n }\n };\n\nexport const setSessionOptions = (options?: InferenceSession.SessionOptions): [number, number[]] => {\n const wasm = getInstance();\n let sessionOptionsHandle = 0;\n const allocs: number[] = [];\n\n const sessionOptions: InferenceSession.SessionOptions = options || {};\n appendDefaultOptions(sessionOptions);\n\n try {\n const graphOptimizationLevel = getGraphOptimzationLevel(sessionOptions.graphOptimizationLevel ?? 'all');\n const executionMode = getExecutionMode(sessionOptions.executionMode ?? 'sequential');\n const logIdDataOffset =\n typeof sessionOptions.logId === 'string' ? allocWasmString(sessionOptions.logId, allocs) : 0;\n\n const logSeverityLevel = sessionOptions.logSeverityLevel ?? 2; // Default to 2 - warning\n if (!Number.isInteger(logSeverityLevel) || logSeverityLevel < 0 || logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${logSeverityLevel}`);\n }\n\n const logVerbosityLevel = sessionOptions.logVerbosityLevel ?? 0; // Default to 0 - verbose\n if (!Number.isInteger(logVerbosityLevel) || logVerbosityLevel < 0 || logVerbosityLevel > 4) {\n throw new Error(`log verbosity level is not valid: ${logVerbosityLevel}`);\n }\n\n const optimizedModelFilePathOffset = typeof sessionOptions.optimizedModelFilePath === 'string' ?\n allocWasmString(sessionOptions.optimizedModelFilePath, allocs) :\n 0;\n\n sessionOptionsHandle = wasm._OrtCreateSessionOptions(\n graphOptimizationLevel, !!sessionOptions.enableCpuMemArena, !!sessionOptions.enableMemPattern, executionMode,\n !!sessionOptions.enableProfiling, 0, logIdDataOffset, logSeverityLevel, logVerbosityLevel,\n optimizedModelFilePathOffset);\n if (sessionOptionsHandle === 0) {\n checkLastError('Can\\'t create session options.');\n }\n\n if (sessionOptions.executionProviders) {\n setExecutionProviders(sessionOptionsHandle, sessionOptions.executionProviders, allocs);\n }\n\n if (sessionOptions.enableGraphCapture !== undefined) {\n if (typeof sessionOptions.enableGraphCapture !== 'boolean') {\n throw new Error(`enableGraphCapture must be a boolean value: ${sessionOptions.enableGraphCapture}`);\n }\n const keyDataOffset = allocWasmString('enableGraphCapture', allocs);\n const valueDataOffset = allocWasmString(sessionOptions.enableGraphCapture.toString(), allocs);\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(\n `Can't set a session config entry: 'enableGraphCapture' - ${sessionOptions.enableGraphCapture}.`);\n }\n }\n\n if (sessionOptions.freeDimensionOverrides) {\n for (const [name, value] of Object.entries(sessionOptions.freeDimensionOverrides)) {\n if (typeof name !== 'string') {\n throw new Error(`free dimension override name must be a string: ${name}`);\n }\n if (typeof value !== 'number' || !Number.isInteger(value) || value < 0) {\n throw new Error(`free dimension override value must be a non-negative integer: ${value}`);\n }\n const nameOffset = allocWasmString(name, allocs);\n if (wasm._OrtAddFreeDimensionOverride(sessionOptionsHandle, nameOffset, value) !== 0) {\n checkLastError(`Can't set a free dimension override: ${name} - ${value}.`);\n }\n }\n }\n\n if (sessionOptions.extra !== undefined) {\n iterateExtraOptions(sessionOptions.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a session config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [sessionOptionsHandle, allocs];\n } catch (e) {\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// This file includes common definitions. They do NOT have dependency on the WebAssembly instance.\n\n/**\n * Copied from ONNX definition. Use this to drop dependency 'onnx_proto' to decrease compiled .js file size.\n */\nexport const enum DataType {\n undefined = 0,\n float = 1,\n uint8 = 2,\n int8 = 3,\n uint16 = 4,\n int16 = 5,\n int32 = 6,\n int64 = 7,\n string = 8,\n bool = 9,\n float16 = 10,\n double = 11,\n uint32 = 12,\n uint64 = 13,\n complex64 = 14,\n complex128 = 15,\n bfloat16 = 16\n}\n\n/**\n * Map string tensor data to enum value\n */\nexport const tensorDataTypeStringToEnum = (type: string): DataType => {\n switch (type) {\n case 'int8':\n return DataType.int8;\n case 'uint8':\n return DataType.uint8;\n case 'bool':\n return DataType.bool;\n case 'int16':\n return DataType.int16;\n case 'uint16':\n return DataType.uint16;\n case 'int32':\n return DataType.int32;\n case 'uint32':\n return DataType.uint32;\n case 'float16':\n return DataType.float16;\n case 'float32':\n return DataType.float;\n case 'float64':\n return DataType.double;\n case 'string':\n return DataType.string;\n case 'int64':\n return DataType.int64;\n case 'uint64':\n return DataType.uint64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n};\n\n/**\n * Map enum value to string tensor data\n */\nexport const tensorDataTypeEnumToString = (typeProto: DataType): Tensor.Type => {\n switch (typeProto) {\n case DataType.int8:\n return 'int8';\n case DataType.uint8:\n return 'uint8';\n case DataType.bool:\n return 'bool';\n case DataType.int16:\n return 'int16';\n case DataType.uint16:\n return 'uint16';\n case DataType.int32:\n return 'int32';\n case DataType.uint32:\n return 'uint32';\n case DataType.float16:\n return 'float16';\n case DataType.float:\n return 'float32';\n case DataType.double:\n return 'float64';\n case DataType.string:\n return 'string';\n case DataType.int64:\n return 'int64';\n case DataType.uint64:\n return 'uint64';\n\n default:\n throw new Error(`unsupported data type: ${typeProto}`);\n }\n};\n\n/**\n * get tensor element size in bytes by the given data type\n * @returns size in integer or undefined if the data type is not supported\n */\nexport const getTensorElementSize = (dateType: number): number|\n undefined => [undefined, 4, 1, 1, 2, 2, 4, 8, undefined, 1, 2, 8, 4, 8, undefined, undefined, undefined][dateType];\n\n/**\n * get typed array constructor by the given tensor type\n */\nexport const tensorTypeToTypedArrayConstructor = (type: Tensor.Type): Float32ArrayConstructor|Uint8ArrayConstructor|\n Int8ArrayConstructor|Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|\n Uint8ArrayConstructor|Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor => {\n switch (type) {\n case 'float16':\n // allow Float16Array polyfill.\n return typeof Float16Array !== 'undefined' && Float16Array.from ? Float16Array : Uint16Array;\n case 'float32':\n return Float32Array;\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'uint16':\n return Uint16Array;\n case 'int16':\n return Int16Array;\n case 'int32':\n return Int32Array;\n case 'bool':\n return Uint8Array;\n case 'float64':\n return Float64Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'uint64':\n return BigUint64Array;\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n };\n\n/**\n * Map string log level to integer value\n */\nexport const logLevelStringToEnum = (logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal'): number => {\n switch (logLevel) {\n case 'verbose':\n return 0;\n case 'info':\n return 1;\n case 'warning':\n return 2;\n case 'error':\n return 3;\n case 'fatal':\n return 4;\n default:\n throw new Error(`unsupported logging level: ${logLevel}`);\n }\n};\n\n/**\n * Check whether the given tensor type is supported by GPU buffer\n */\nexport const isGpuBufferSupportedType = (type: Tensor.Type): type is Tensor.GpuBufferDataTypes => type === 'float32' ||\n type === 'float16' || type === 'int32' || type === 'int64' || type === 'uint32' || type === 'uint8' ||\n type === 'bool';\n\n/**\n * Map string data location to integer value\n */\nexport const dataLocationStringToEnum = (location: Tensor.DataLocation): number => {\n switch (location) {\n case 'none':\n return 0;\n case 'cpu':\n return 1;\n case 'cpu-pinned':\n return 2;\n case 'texture':\n return 3;\n case 'gpu-buffer':\n return 4;\n default:\n throw new Error(`unsupported data location: ${location}`);\n }\n};\n\n/**\n * Map integer data location to string value\n */\nexport const dataLocationEnumToString = (location: number): Tensor.DataLocation|undefined =>\n (['none', 'cpu', 'cpu-pinned', 'texture', 'gpu-buffer'] as const)[location];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {isNode} from './wasm-utils-env';\n\n/**\n * Load a file into a Uint8Array.\n *\n * @param file - the file to load. Can be a URL/path, a Blob, an ArrayBuffer, or a Uint8Array.\n * @returns a Uint8Array containing the file data.\n */\nexport const loadFile = async(file: string|Blob|ArrayBufferLike|Uint8Array): Promise => {\n if (typeof file === 'string') {\n if (isNode) {\n // load file into ArrayBuffer in Node.js\n try {\n const {readFile} = require('node:fs/promises');\n return new Uint8Array(await readFile(file));\n } catch (e) {\n if (e.code === 'ERR_FS_FILE_TOO_LARGE') {\n // file is too large, use fs.createReadStream instead\n const {createReadStream} = require('node:fs');\n const stream = createReadStream(file);\n const chunks: Uint8Array[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n return new Uint8Array(Buffer.concat(chunks));\n }\n throw e;\n }\n } else {\n // load file into ArrayBuffer in browsers\n const response = await fetch(file);\n if (!response.ok) {\n throw new Error(`failed to load external data file: ${file}`);\n }\n const contentLengthHeader = response.headers.get('Content-Length');\n const fileSize = contentLengthHeader ? parseInt(contentLengthHeader, 10) : 0;\n if (fileSize < 1073741824 /* 1GB */) {\n // when Content-Length header is not set, we cannot determine the file size. We assume it is small enough to\n // load into memory.\n return new Uint8Array(await response.arrayBuffer());\n } else {\n // file is too large, use stream instead\n if (!response.body) {\n throw new Error(`failed to load external data file: ${file}, no response body.`);\n }\n const reader = response.body.getReader();\n\n let buffer;\n try {\n // try to create ArrayBuffer directly\n buffer = new ArrayBuffer(fileSize);\n } catch (e) {\n if (e instanceof RangeError) {\n // use WebAssembly Memory to allocate larger ArrayBuffer\n const pages = Math.ceil(fileSize / 65536);\n buffer = new WebAssembly.Memory({initial: pages, maximum: pages}).buffer;\n } else {\n throw e;\n }\n }\n\n let offset = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const {done, value} = await reader.read();\n if (done) {\n break;\n }\n const chunkSize = value.byteLength;\n const chunk = new Uint8Array(buffer, offset, chunkSize);\n chunk.set(value);\n offset += chunkSize;\n }\n return new Uint8Array(buffer, 0, fileSize);\n }\n }\n\n } else if (file instanceof Blob) {\n return new Uint8Array(await file.arrayBuffer());\n } else if (file instanceof Uint8Array) {\n return file;\n } else {\n return new Uint8Array(file);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {logLevelStringToEnum} from '../wasm-common';\n\ntype LogLevel = NonNullable;\ntype MessageString = string;\ntype MessageFunction = () => string;\ntype Message = MessageString|MessageFunction;\n\nconst logLevelPrefix = ['V', 'I', 'W', 'E', 'F'];\n\nconst doLog = (level: number, message: string): void => {\n // eslint-disable-next-line no-console\n console.log(`[${logLevelPrefix[level]},${new Date().toISOString()}]${message}`);\n};\n\nlet configLogLevel: LogLevel|undefined;\nlet debug: boolean|undefined;\n\nexport const configureLogger = ($configLogLevel: LogLevel, $debug: boolean): void => {\n configLogLevel = $configLogLevel;\n debug = $debug;\n};\n\n/**\n * A simple logging utility to log messages to the console.\n */\nexport const LOG = (logLevel: LogLevel, msg: Message): void => {\n const messageLevel = logLevelStringToEnum(logLevel);\n const configLevel = logLevelStringToEnum(configLogLevel);\n if (messageLevel >= configLevel) {\n doLog(messageLevel, typeof msg === 'function' ? msg() : msg);\n }\n};\n\n/**\n * A simple logging utility to log messages to the console. Only logs when debug is enabled.\n */\nexport const LOG_DEBUG: typeof LOG = (...args: Parameters) => {\n if (debug) {\n LOG(...args);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\nimport {tensorTypeToTypedArrayConstructor} from '../wasm-common';\n\nexport const createView = (dataBuffer: ArrayBuffer, type: Tensor.Type): Int32Array|Uint32Array|BigInt64Array|\n BigUint64Array|Uint8Array|Float32Array|Float64Array|Int8Array|Int16Array|Uint16Array =>\n new (tensorTypeToTypedArrayConstructor(type))(dataBuffer);\n\n/**\n * a TensorView does not own the data.\n */\nexport interface TensorView {\n readonly data: number;\n readonly dataType: number;\n readonly dims: readonly number[];\n\n /**\n * get a Float32Array data view of the tensor data. tensor data must be on CPU.\n */\n getFloat32Array(): Float32Array;\n\n /**\n * get a BigInt64Array data view of the tensor data. tensor data must be on CPU.\n */\n getBigInt64Array(): BigInt64Array;\n\n /**\n * get a Int32Array data view of the tensor data. tensor data must be on CPU.\n */\n getInt32Array(): Int32Array;\n\n /**\n * create a new tensor view with the same data but different dimensions.\n */\n reshape(newDims: readonly number[]): TensorView;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../wasm-common';\nimport {TensorView} from '../tensor-view';\n\nimport {ShaderHelper} from './ops/common';\n\nexport type SessionState = 'default'|'capturing'|'replaying';\n\nexport enum GpuDataType {\n default = 0,\n upload = 1,\n profile = 2\n}\nexport type GpuDataId = number;\n\nexport type GpuArchitecture = 'ampere';\nexport type GpuVendor = 'amd'|'intel'|'nvidia';\nexport interface AdapterInfo {\n isArchitecture: (architecture: GpuArchitecture) => boolean;\n isVendor: (vendor: GpuVendor) => boolean;\n}\n\nexport interface GpuData {\n type: GpuDataType;\n id: GpuDataId;\n buffer: GPUBuffer;\n}\n\nexport interface TensorInfo {\n dims: readonly number[];\n dataType: number;\n}\n\nexport interface ProgramUniform {\n type: DataType;\n data: number|readonly number[];\n}\n\nexport type ProgramUniformVariableInfo = [type: DataType, length: number];\n\n/**\n * Represent the dependency of a program on a specific input tensor.\n *\n * - 'none': the shader/uniform does not depend on this input's info\n * - 'type': the shader/uniform depends on data type of this input\n * - 'rank': the shader/uniform depends on data type and the rank of this input\n * - 'dims': the shader/uniform depends on data type and the dims of this input\n * - 'data': the shader/uniform depends on data type, the dims and the data of this input\n */\nexport type ProgramInputTensorInfoDependency = 'none'|'type'|'rank'|'dims'|'data';\n\n/**\n * Represent information about a program's cache for shader.\n */\nexport interface ProgramShaderCacheInfo {\n /**\n * an optional string as a cache hint in the artifact cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains initializing-time information, such as the attributes or any information of\n * initializers. It should NOT contain any runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'dims' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n/**\n * Represent information about a program's cache for uniform.\n */\nexport interface ProgramUniformCacheInfo {\n /**\n * an optional string as a cache hint in the uniform cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'none' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n\n /**\n * an optional object describing the cache information of the program shader.\n *\n * If this is not specified, assume hint is empty and inputDependencies are ['dims'] for all inputs.\n */\n shaderCache?: ProgramShaderCacheInfo;\n\n /**\n * the shader's processing source code.\n *\n * This function will be called when shader cache missed.\n */\n getShaderSource: (shaderHelper: ShaderHelper) => string;\n\n /**\n * A function to get run data required to run the program.\n *\n * This function will be called every time the program is executed. Should keep this function as simple as possible.\n */\n getRunData: (inputs: readonly TensorView[]) => {\n outputs: readonly TensorInfo[];\n dispatchGroup: {x: number; y?: number; z?: number};\n programUniforms?: readonly ProgramUniform[];\n };\n}\n\nexport interface Artifact {\n programInfo: ProgramInfo;\n computePipeline: GPUComputePipeline;\n uniformVariablesInfo: readonly ProgramUniformVariableInfo[]|undefined;\n}\n\nexport interface ComputeContextInputsOutputsMapping {\n /**\n * specify the mapping to the program's inputs. the value can be a number or a tensor view.\n * - if it's a number, it's the index of the kernel's input\n * - if it's a tensor view, it's an existing tensor view that will be used as the input\n *\n * if inputs is not specified, the mapping will be the kernel's inputs in order.\n */\n readonly inputs?: ReadonlyArray;\n /**\n * specify the mapping to the program's outputs. the value must be a number.\n * - if it's a non-negative number, it's the index of the kernel's output\n * - if it's -1, it's an output that will be created as a temporary value. this value will be released after\n * the kernel is executed.\n * - if it's -2, it's an output that will be created as a persistent value. this value will be released when the\n * kernel is released.\n *\n * if outputs is not specified, the mapping will be the kernel's outputs in order.\n */\n readonly outputs?: readonly number[];\n}\n\n/**\n * A ComputeContext instance carries the states that representing the current running of a kernel.\n */\nexport interface ComputeContext {\n /**\n * gpu adapter info\n */\n readonly adapterInfo: AdapterInfo;\n\n /**\n * stores the pointer to OpKernelContext\n */\n readonly opKernelContext: number;\n\n /**\n * a list of inputs, each input is an instance of TensorView\n */\n readonly inputs: readonly TensorView[];\n\n /**\n * a custom data object that can be used to store any data that is needed by the kernel\n */\n readonly kernelCustomData: {[key: string]: unknown};\n\n /**\n * a buffer that can be used to access custom data created each time the kernel is executed\n */\n readonly customDataBuffer: Uint8Array;\n\n /**\n * a number of outputs for the node\n */\n readonly outputCount: number;\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[];\n output(index: number, dims: readonly number[]): number;\n getMaxComputeWorkgroupSizes(): [number, number, number];\n getMaxComputeWorkgroupStoragesize(): number;\n}\n\nexport type TimestampQuery = 'none'|'inside-passes'|'at-passes';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {GpuData, GpuDataId, GpuDataType} from './types';\n\n/**\n * manages GpuDataId -> GpuBuffer\n */\nexport interface GpuDataManager {\n /**\n * copy data from CPU to GPU.\n */\n upload(id: GpuDataId, data: Uint8Array): void;\n /**\n * copy data from GPU to GPU.\n */\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void;\n /**\n * create new data on GPU.\n */\n create(size: number, usage?: number): GpuData;\n /**\n * get GPU data by ID.\n */\n get(id: GpuDataId): GpuData|undefined;\n /**\n * release the data on GPU by ID.\n *\n * @return size of the data released\n */\n release(id: GpuDataId): number;\n /**\n * copy data from GPU to CPU.\n */\n download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise;\n\n /**\n * refresh the buffers that marked for release.\n *\n * when release() is called, the buffer is not released immediately. this is because we need to wait for the commands\n * to be submitted to the GPU. this function is called after the commands are submitted so that the buffers can be\n * actually released.\n */\n refreshPendingBuffers(): void;\n\n /**\n * register an external buffer for IO Binding. If the buffer is already registered, return the existing GPU data ID.\n *\n * GPU data manager only manages a mapping between the buffer and the GPU data ID. It will not manage the lifecycle of\n * the external buffer.\n */\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number;\n\n /**\n * unregister an external buffer for IO Binding.\n */\n unregisterExternalBuffer(buffer: GPUBuffer): void;\n\n /**\n * destroy all gpu buffers.\n */\n dispose(): void;\n\n /**\n * release session related data.\n * @param sessionId - specify the session ID.\n */\n onReleaseSession(sessionId: number): void;\n}\n\ninterface StorageCacheValue {\n gpuData: GpuData;\n originalSize: number;\n}\n\nconst bucketFreelist: Map = new Map([\n [64, 250],\n [128, 200],\n [256, 200],\n [512, 200],\n [2048, 230],\n [4096, 200],\n [8192, 50],\n [16384, 50],\n [32768, 50],\n [65536, 50],\n [131072, 50],\n [262144, 50],\n [524288, 50],\n [1048576, 50],\n [2097152, 30],\n [4194304, 20],\n [8388608, 10],\n [12582912, 10],\n [16777216, 10],\n [26214400, 15],\n [33554432, 22],\n [44236800, 2],\n [58982400, 6],\n // we don't want to cache the bucket sizes below but not caching them\n // results in some major performance hits for models like sd-turbo.\n [67108864, 6],\n [134217728, 6],\n [167772160, 6],\n]);\n\nconst bucketArr: number[] = [];\n\n/**\n * normalize the buffer size so that it fits the 128-bits (16 bytes) alignment.\n */\nconst calcNormalizedBufferSize = (size: number) => Math.ceil(size / 16) * 16;\n\n/**\n * calculate the buffer size so that it fits into buckets.\n */\nconst calcBucketBufferSize = (size: number) => {\n for (let idx = 0; idx < bucketArr.length; idx++) {\n const sizeForBucket = bucketArr[idx];\n if (size <= sizeForBucket) {\n return sizeForBucket;\n }\n }\n // not in bucket list -> caller will not cache, round up to 16.\n return Math.ceil(size / 16) * 16;\n};\n\nlet guid = 1;\nconst createNewGpuDataId = () => guid++;\n\n/**\n * exported standard download function. This function is used by the session to download the data from GPU, and also by\n * factory to create GPU tensors with the capacity of downloading data from GPU.\n *\n * @param backend - the WebGPU backend\n * @param gpuBuffer - the GPU buffer to download\n * @param originalSize - the original size of the data\n * @param getTargetBuffer - optional. If provided, the data will be copied to the target buffer. Otherwise, a new buffer\n * will be created and returned.\n */\nexport const downloadGpuData =\n async(backend: WebGpuBackend, gpuBuffer: GPUBuffer, originalSize: number, getTargetBuffer?: () => Uint8Array):\n Promise => {\n const bufferSize = calcNormalizedBufferSize(originalSize);\n const gpuReadBuffer = backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: bufferSize, usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ});\n try {\n const commandEncoder = backend.getCommandEncoder();\n backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n gpuBuffer /* source buffer */, 0 /* source offset */, gpuReadBuffer /* destination buffer */,\n 0 /* destination offset */, bufferSize /* size */\n );\n backend.flush();\n\n await gpuReadBuffer.mapAsync(GPUMapMode.READ);\n\n const arrayBuffer = gpuReadBuffer.getMappedRange();\n if (getTargetBuffer) {\n // if we already have a CPU buffer to accept the data, no need to clone the ArrayBuffer.\n const targetBuffer = getTargetBuffer();\n targetBuffer.set(new Uint8Array(arrayBuffer, 0, originalSize));\n return targetBuffer;\n } else {\n // the mapped ArrayBuffer will be released when the GPU buffer is destroyed. Need to clone the\n // ArrayBuffer.\n return new Uint8Array(arrayBuffer.slice(0, originalSize));\n }\n } finally {\n gpuReadBuffer.destroy();\n }\n };\n\nclass GpuDataManagerImpl implements GpuDataManager {\n // GPU Data ID => GPU Data ( storage buffer )\n private storageCache: Map;\n\n // pending buffers for uploading ( data is unmapped )\n private buffersForUploadingPending: GPUBuffer[];\n // pending buffers for computing\n private buffersPending: GPUBuffer[];\n\n // The reusable storage buffers for computing.\n private freeBuffers: Map;\n // The reusable uniform buffers\n private freeUniformBuffers: Map;\n\n // The external buffers registered users for IO Binding.\n private externalBuffers: Map;\n\n // The pendingBuffers for capture graph.\n // a SessionID -> GPUBuffer[] mapping.\n private capturedPendingBuffers: Map;\n\n constructor(private backend: WebGpuBackend) {\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.buffersForUploadingPending = [];\n this.buffersPending = [];\n this.externalBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n\n for (const [key, ] of bucketFreelist) {\n bucketArr.push(key);\n this.freeBuffers.set(key, []);\n this.freeUniformBuffers.set(key, []);\n }\n }\n\n upload(id: GpuDataId, data: Uint8Array): void {\n const srcArrayBuffer = data.buffer;\n const srcOffset = data.byteOffset;\n const srcLength = data.byteLength;\n const size = calcNormalizedBufferSize(srcLength);\n\n // get destination gpu buffer\n const gpuDataCache = this.storageCache.get(id);\n if (!gpuDataCache) {\n throw new Error('gpu data for uploading does not exist');\n }\n if (gpuDataCache.originalSize !== srcLength) {\n throw new Error(`inconsistent data size. gpu data size=${gpuDataCache.originalSize}, data size=${srcLength}`);\n }\n\n // create gpu buffer\n const gpuBufferForUploading = this.backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {mappedAtCreation: true, size, usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC});\n\n // copy (upload) data\n const arrayBuffer = gpuBufferForUploading.getMappedRange();\n new Uint8Array(arrayBuffer).set(new Uint8Array(srcArrayBuffer, srcOffset, srcLength));\n gpuBufferForUploading.unmap();\n\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(gpuBufferForUploading, 0, gpuDataCache.gpuData.buffer, 0, size);\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.upload(id=${id})`);\n\n this.buffersForUploadingPending.push(gpuBufferForUploading);\n }\n\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void {\n // get source gpu buffer\n const sourceGpuDataCache = this.storageCache.get(sourceId);\n if (!sourceGpuDataCache) {\n throw new Error('source gpu data for memcpy does not exist');\n }\n // get destination gpu buffer\n const destinationGpuDataCache = this.storageCache.get(destinationId);\n if (!destinationGpuDataCache) {\n throw new Error('destination gpu data for memcpy does not exist');\n }\n if (sourceGpuDataCache.originalSize !== destinationGpuDataCache.originalSize) {\n throw new Error('inconsistent source and destination gpu data size');\n }\n\n const size = calcNormalizedBufferSize(sourceGpuDataCache.originalSize);\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n sourceGpuDataCache.gpuData.buffer, 0, destinationGpuDataCache.gpuData.buffer, 0, size);\n }\n\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number {\n let id: number|undefined;\n if (previousBuffer) {\n id = this.externalBuffers.get(previousBuffer);\n if (id === undefined) {\n throw new Error('previous buffer is not registered');\n }\n if (buffer === previousBuffer) {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${\n id}, buffer is the same, skip.`);\n return id;\n } else if (this.backend.capturedCommandList.has(this.backend.currentSessionId!)) {\n throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);\n }\n this.externalBuffers.delete(previousBuffer);\n } else {\n id = createNewGpuDataId();\n }\n\n this.storageCache.set(id, {gpuData: {id, type: GpuDataType.default, buffer}, originalSize});\n this.externalBuffers.set(buffer, id);\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${id}, registered.`);\n return id;\n }\n\n unregisterExternalBuffer(buffer: GPUBuffer): void {\n const id = this.externalBuffers.get(buffer);\n if (id !== undefined) {\n this.storageCache.delete(id);\n this.externalBuffers.delete(buffer);\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${id}`);\n }\n }\n\n // eslint-disable-next-line no-bitwise\n create(size: number, usage = GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST): GpuData {\n const bufferSize = calcBucketBufferSize(size);\n\n let gpuBuffer;\n // Currently, only storage buffers are reused.\n // eslint-disable-next-line no-bitwise\n const isStorage = (usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE;\n // eslint-disable-next-line no-bitwise\n const isUniform = (usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM;\n if (isStorage || isUniform) {\n const freeBuffers = isStorage ? this.freeBuffers : this.freeUniformBuffers;\n const buffers = freeBuffers.get(bufferSize);\n if (!buffers) {\n // no such bucket/freelist - create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n } else {\n if (buffers.length > 0) {\n // in freelist, use it\n gpuBuffer = buffers.pop() as GPUBuffer;\n } else {\n // bucket empty, create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n }\n } else {\n // create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n\n const gpuData = {id: createNewGpuDataId(), type: GpuDataType.default, buffer: gpuBuffer};\n this.storageCache.set(gpuData.id, {gpuData, originalSize: size});\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.create(size=${size}) => id=${gpuData.id}`);\n return gpuData;\n }\n\n get(id: GpuDataId): GpuData|undefined {\n return this.storageCache.get(id)?.gpuData;\n }\n\n release(id: GpuDataId): number {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('releasing data does not exist');\n }\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.release(id=${id}), gpuDataId=${cachedData.gpuData.id}`);\n\n this.storageCache.delete(id);\n this.buffersPending.push(cachedData.gpuData.buffer);\n // cachedData.gpuData.buffer.destroy();\n\n return cachedData.originalSize;\n }\n\n async download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('data does not exist');\n }\n await downloadGpuData(this.backend, cachedData.gpuData.buffer, cachedData.originalSize, getTargetBuffer);\n }\n\n refreshPendingBuffers(): void {\n for (const buffer of this.buffersForUploadingPending) {\n // upload buffer is only useful in the session creation time. So we don't need to reuse them in session running.\n buffer.destroy();\n }\n this.buffersForUploadingPending = [];\n\n if (this.buffersPending.length === 0) {\n return;\n }\n\n if (this.backend.sessionStatus === 'default') {\n for (const buffer of this.buffersPending) {\n const maxInFreeList = bucketFreelist.get(buffer.size);\n\n // eslint-disable-next-line no-bitwise\n if ((buffer.usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE) {\n // Put the pending buffer to freeBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n // eslint-disable-next-line no-bitwise\n } else if ((buffer.usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM) {\n // Put the pending buffer to freeUniformBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeUniformBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n } else {\n buffer.destroy();\n }\n }\n this.buffersPending = [];\n } else {\n // Don't release intermediate tensors in non-default mode.\n // TODO: reuse the storage buffers in non-default mode.\n let capturedBuffers = this.capturedPendingBuffers.get(this.backend.currentSessionId!);\n if (!capturedBuffers) {\n capturedBuffers = [];\n this.capturedPendingBuffers.set(this.backend.currentSessionId!, capturedBuffers);\n }\n for (const buffer of this.buffersPending) {\n capturedBuffers.push(buffer);\n }\n this.buffersPending = [];\n }\n }\n\n dispose() {\n this.freeBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.freeUniformBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n\n this.storageCache.forEach((storage) => {\n storage.gpuData.buffer.destroy();\n });\n\n this.capturedPendingBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n }\n\n onReleaseSession(sessionId: number) {\n // release the captured pending buffers.\n const pendingBuffers = this.capturedPendingBuffers.get(sessionId);\n if (pendingBuffers) {\n pendingBuffers.forEach(buffer => {\n buffer.destroy();\n });\n this.capturedPendingBuffers.delete(sessionId);\n }\n }\n}\n\nexport const createGpuDataManager = (...args: ConstructorParameters): GpuDataManager =>\n new GpuDataManagerImpl(...args);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\n/**\n * create a new object from the given attribute, and add a cacheKey property to it\n */\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable no-param-reassign */\n\nexport class MatMulUtil {\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n const max = Math.max(aLen, bLen);\n if (aLen && bLen) {\n cdims[crank - i] = Math.max(aLen, bLen);\n } else {\n // when either aLen or bLen is 0, the other should be either 0 or 1, otherwise it is not broadcastable.\n if (max > 1) {\n return undefined;\n }\n cdims[crank - i] = 0;\n }\n }\n\n return cdims;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n}\n\n\nexport class ShapeUtil {\n /**\n * calculate the size (number of elements)\n */\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n /**\n * convert dims corresponding to type change to pack. ex. uint8 data to uint32\n */\n static convertShape(dims: readonly number[], size = 4): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n }\n const newDims = new Array(rank);\n let i = rank - 1;\n while (i >= 0) {\n if (dims[i] % size === 0) {\n newDims[i] = dims[i] / size;\n break;\n }\n if (size % dims[i] !== 0) {\n throw new Error('cannot convert shape');\n }\n newDims[i] = 1;\n size /= dims[i];\n i--;\n }\n for (i--; i >= 0; i--) {\n newDims[i] = dims[i];\n }\n return newDims;\n }\n\n /**\n * calculate the size (number of elements) from the given axis (inclusive)\n */\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n /**\n * calculate the size (number of elements) to the given axis (exclusive)\n */\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n /**\n * calculate the size (number of elements) from and to the given axis [start, end)\n */\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be negative.\n if (dims[i] < 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank?: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank ?? axes.length));\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]): void {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], isChannelLast: boolean, autoPad?: string): void {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + (isChannelLast ? 1 : 2)], strides[dim], dilations[dim], kernelShape[dim], pads, dim,\n dim + inputDims.length - 2, autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {ShapeUtil} from '../../util';\nimport {ProgramUniform, ProgramUniformVariableInfo} from '../types';\n\n/**\n * constant value for a workgroup size.\n *\n * We definitely can do further optimization in future, but for now we use 64.\n *\n * rule of thumb: Use [a workgroup size of] 64 unless you know what GPU you are targeting or that your workload\n * needs something different.\n *\n * from: https://surma.dev/things/webgpu/\n **/\nexport const WORKGROUP_SIZE = 64;\n\ninterface IndicesHelperTypes {\n /**\n * WGSL type of indices expression\n */\n readonly indices: string;\n\n /**\n * WGSL type of a value\n */\n readonly value: string;\n\n /**\n * WGSL type of storage type representing a value\n *\n * This is usually the same to `value`, but for some type (eg. bool), we need to use `u32` as storage type for\n * value type `vec4`\n */\n readonly storage: string;\n\n /**\n * tensor type as represented in TensorView\n */\n readonly tensor: number;\n}\n\n/**\n * A helper class for generating WGSL code for manipulating indices and data for a shader's input or output.\n *\n * This class is designed to offer a unified way to generate WGSL code for manipulating indices and data for a shader's\n * input or output.\n *\n * The following is a list of terminologies used in this class:\n * - `offset`: a uint32 value representing the offset of an element in the data buffer.\n * - `indices`: an abstraction of a multi-dimensional array's indices representing the data's index on each dimension.\n * - `value`: a value of a data element.\n *\n * Users are expected to create an instance of this class for each shader's input or output, and use the instance to\n * generate WGSL code for manipulating indices and data. The following 2 exported functions are for users to call to\n * create an instance of an indices helper:\n * - `inputVariable()`: create an indices helper instance for an input.\n * - `outputVariable()`: create an indices helper instance for an output.\n * - `internalVariable()`: create an indices helper instance for an internal variable.\n *\n * An indices helper instance contains helper functions for the following operations:\n * - access readonly basic information, including: `name`(the name of the input or output), `usage`(whether it's an\n * input, an output or an internal variable) and `shape`(the passed in shape).\n * - `type`: access readonly type information, including: `indices`(the type of indices), `value`(the type of value at\n * runtime), `storage`(the type of value at storage) and `tensor`(the tensor type as represented in TensorView).\n * - generate WGSL code for getting indices from offset. Use `offsetToIndices()` for WGSL code snippet to calculate\n * indices from offset, and use `indicesToOffset()` for WGSL code snippet to calculate offset from indices.\n * - to manipulate an instance of indices, use `setIndices()` and `getIndices()` to set and get the indices on an\n * indices variable.\n * - to manipulate data, use `set()`/`get()` to access data at the given indices from parameter list, use\n * `setByIndices()`/`getByIndices()` to access data at the given indices from an indices variable, and use\n * `setByOffset()`/`getByOffset()` to access data at the given offset.\n * - `impl`: get WGSL code of function implementation for the util functions mentioned above.\n */\nexport interface IndicesHelper {\n /**\n * get WGSL code of function implementation for the util functions.\n *\n */\n readonly impl: () => string;\n\n /**\n * get type info\n */\n readonly type: IndicesHelperTypes;\n\n /**\n * WGSL code of a expression for getting indices from offset.\n *\n * @param varOffset - a u32 expression representing the offset.\n *\n * @returns an `type.indices` expression\n */\n readonly offsetToIndices: (varOffset: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting offset from indices.\n *\n * @param varIndices - a `type.indices` expression representing the indices.\n *\n * @returns an `u32` expression\n */\n readonly indicesToOffset: (varIndices: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting original offset from broadcasted indices.\n *\n * @param varIndices - a `type.indices` expression representing the output indices.\n * @param output - output IndicesHelper.\n *\n * @returns an `u32` expression\n */\n readonly broadcastedIndicesToOffset: (varIndices: string, output: IndicesHelper) => string;\n\n /**\n * WGSL code of generating an indices literal\n *\n * @param init - initial value.\n */\n readonly indices: (...init: ReadonlyArray) => string;\n\n /**\n * WGSL code of a statement for setting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to set. can be a number or a string (WGSL `u32` expression).\n * @param value - the value to set. can be a number or a string (WGSL `u32` expression).\n *\n * @returns a WGSL statement\n */\n readonly indicesSet: (varIndices: string, idx: number|string, value: number|string) => void;\n\n /**\n * WGSL code of an `u32` expression for getting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to get. can be a number or a string (WGSL `u32` expression).\n *\n * @returns an `u32` expression\n */\n readonly indicesGet: (varIndices: string, idx: number|string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices.\n *\n * @param indicesAndValue - an array of numbers or strings (WGSL `u32` expression) representing the indices, followed\n * by the value to set. This array should have exactly `shape.length + 1` elements.\n */\n readonly set: (...indicesAndValue: ReadonlyArray) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByIndices: (varIndices: string, value: string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByOffset: (offset: number|string, value: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices.\n *\n * @param indices - an array of numbers or strings (WGSL `u32` expression) representing the indices.\n */\n readonly get: (...indices: ReadonlyArray) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n */\n readonly getByIndices: (varIndices: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n */\n readonly getByOffset: (offset: number|string) => string;\n\n /**\n * name of the data variable\n */\n readonly name: string;\n\n /**\n * whether the helper is for an input, an output or an internal variable.\n */\n readonly usage: 'input'|'output'|'internal';\n\n /**\n * the rank of the input or output.\n */\n readonly rank: number;\n\n /**\n * a string representing the variable name for the shape of the input or output.\n */\n readonly shape: string;\n\n /**\n * a string representing the variable name for the strides of the input or output.\n */\n readonly strides: string;\n}\n\nconst getWgslMappedType = (type: number, components: 1|2|3|4): string|[string, string] => {\n if (components === 3) {\n throw new Error('vec3 has same alignment as vec4, use vec4 instead');\n }\n\n // return type is [ storage type, runtime type ] or a single string for both\n switch (type) {\n case DataType.float16:\n return components > 1 ? `vec${components}` : 'f16';\n case DataType.float:\n return components > 1 ? `vec${components}` : 'f32';\n case DataType.int32:\n return components > 1 ? `vec${components}` : 'i32';\n case DataType.uint32:\n return components > 1 ? `vec${components}` : 'u32';\n case DataType.int64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'i32'];\n case DataType.uint64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'u32'];\n case DataType.bool:\n if (components !== 4) {\n throw new Error('bool must be vec4');\n }\n return ['u32', 'vec4'];\n\n default:\n throw new Error(`Unknown data type: ${type}`);\n }\n};\n\nexport const tensorTypeToWsglStorageType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[0];\n};\n\nexport const tensorTypeToWsglValueType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[1];\n};\n\nexport const createTensorShapeVariables = (...dims: ReadonlyArray): ProgramUniform[] => {\n const programUniforms: ProgramUniform[] = [];\n dims.forEach(dim => {\n if (dim.length !== 0) {\n programUniforms.push(\n {type: DataType.uint32, data: dim}, {type: DataType.uint32, data: ShapeUtil.computeStrides(dim)});\n }\n });\n return programUniforms;\n};\n\n/**\n * A helper function to get maximum vector size for specified data length\n * @param size\n */\nexport const getMaxComponents = (size: number) => {\n // we cannot use vec3 type since it has alignment of 16 bytes\n if (size % 4 === 0) {\n return 4;\n } else if (size % 2 === 0) {\n return 2;\n }\n\n return 1;\n};\n\n/**\n * A helper function that initializes variable as a scalar or vector. e.g. f32(0) or vec4f(0,0,0,0)\n * @param dataType\n * @param components\n * @param value\n */\nexport const fillVector = (dataType = 'f32', components?: number, value = '0') => {\n if (!components || components === 1) {\n return `${dataType}(${value})`;\n }\n\n return `vec${components}<${dataType}>(${value})`;\n};\n\n/**\n * A helper function that casts value or vector to f32\n * @param dataType\n * @param components\n * @param value\n */\nexport const castToF32 = (dataType: string, components: number, value: string) => {\n if (dataType === 'f32') {\n return value;\n }\n if (components === 1) {\n return `f32(${value})`;\n }\n\n return `vec${components}(${value})`;\n};\n\n/**\n * A helper function that returns scalar or sums all components of a vector\n * @param name\n * @param components\n */\nexport const sumVector = (name: string, components: number) => {\n if (components === 4) {\n return `(${name}.x + ${name}.y + ${name}.z + ${name}.w)`;\n } else if (components === 2) {\n return `(${name}.x + ${name}.y)`;\n } else if (components === 3) {\n return `(${name}.x + ${name}.y + ${name}.z)`;\n }\n\n return name;\n};\n\n/**\n * A helper function that returns variable element at index.\n * @param name - the name of variable.\n * @param index - the index of variable element.\n * @param length - the length of variable.\n * @param type - the type of variable, optional.\n */\nexport const getElementAt =\n (name: string, index: number|string, length: number, type?: UniformDataElementType): string => {\n if (name.startsWith('uniforms.') && length > 4) {\n if (typeof (index) === 'string') {\n if (type === 'f16') {\n return `${name}[(${index}) / 8][(${index}) % 8 / 4][(${index}) % 8 % 4]`;\n } else {\n return `${name}[(${index}) / 4][(${index}) % 4]`;\n }\n } else {\n if (type === 'f16') {\n return `${name}[${Math.floor(index / 8)}][${Math.floor(index % 8 / 4)}][${index % 8 % 4}]`;\n } else {\n return `${name}[${Math.floor(index / 4)}][${index % 4}]`;\n }\n }\n } else {\n return length > 1 ? `${name}[${index}]` : name;\n }\n };\n\n/**\n * A helper function to get a IndicesHelper for a given input or output.\n *\n * @param name - the name of the input or output.\n * @param tensorType - the tensor type of the input or output.\n * @param shapeOrRank - the tensor shape or the rank of the input or output.\n * @param usage - the usage of the indices helper.\n * @param components - indicates the number of components of each element. 1 for scalar, 2 for vec2, 3 for vec3, 4 for\n * vec4.\n */\nconst createIndicesHelper =\n (name: string, tensorType: number, shapeOrRank: number|readonly number[], usage: IndicesHelper['usage'],\n components: 1|2|3|4): IndicesHelper => {\n const useUniform = typeof shapeOrRank === 'number';\n const rank = useUniform ? shapeOrRank : shapeOrRank.length;\n const rankIdentity = [...new Array(rank).keys()];\n const indicesType = rank < 2 ? 'u32' : rank <= 4 ? `vec${rank}` : `array`;\n const mappedType = getWgslMappedType(tensorType, components);\n const valueType = typeof mappedType === 'string' ? mappedType : mappedType[1];\n const storageType = typeof mappedType === 'string' ? mappedType : mappedType[0];\n const type = {indices: indicesType, value: valueType, storage: storageType, tensor: tensorType};\n\n const normalizeDim = (dim: number|string): string => typeof dim === 'string' ? dim : `${dim}u`;\n\n const implementationUsed = {\n offsetToIndices: false,\n indicesToOffset: false,\n broadcastedIndicesToOffset: false,\n set: false,\n setByIndices: false,\n get: false,\n getByIndices: false,\n };\n\n const uniformPrefix = useUniform ? 'uniforms.' : '';\n const shape = `${uniformPrefix}${name}_shape`;\n const strides = `${uniformPrefix}${name}_strides`;\n\n let o2iSnippet = '';\n for (let i = 0; i < rank - 1; i++) {\n o2iSnippet += `\n let dim${i} = current / ${getElementAt(strides, i, rank)};\n let rest${i} = current % ${getElementAt(strides, i, rank)};\n indices[${i}] = dim${i};\n current = rest${i};\n `;\n }\n o2iSnippet += `indices[${rank - 1}] = current;`;\n\n const offsetToIndicesImplementation = rank < 2 ? '' : `\n fn o2i_${name}(offset: u32) -> ${type.indices} {\n var indices: ${type.indices};\n var current = offset;\n ${o2iSnippet}\n return indices;\n }`;\n\n const offsetToIndices = (varOffset: string) => {\n implementationUsed.offsetToIndices = true;\n return rank < 2 ? varOffset : `o2i_${name}(${varOffset})`;\n };\n\n const offsets: string[] = [];\n if (rank >= 2) {\n for (let i = rank - 1; i >= 0; i--) {\n offsets.push(`${getElementAt(strides, i, rank)} * (indices[${i}])`);\n }\n }\n\n const indicesToOffsetImplementation = rank < 2 ? '' : `\n fn i2o_${name}(indices: ${type.indices}) -> u32 {\n return ${offsets.join('+')};\n }`;\n\n const indicesToOffset = (varIndices: string) => {\n implementationUsed.indicesToOffset = true;\n return rank < 2 ? varIndices : `i2o_${name}(${varIndices})`;\n };\n\n const indices = (...init: ReadonlyArray) =>\n rank === 0 ? '0u' : `${type.indices}(${init.map(normalizeDim).join(',')})`;\n\n const indicesGet = (varIndices: string, idx: number|string) => {\n if (rank < 2) {\n return `${varIndices}`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}`;\n }\n };\n\n const indicesSet = (varIndices: string, idx: number|string, value: string) => {\n if (rank < 2) {\n return `${varIndices}=${value};`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}=${value};`;\n }\n };\n\n const broadcastedIndicesToOffsetImplementation: {[key: string]: string} = {};\n const broadcastedIndicesToOffset = (varIndices: string, output: IndicesHelper) => {\n implementationUsed.broadcastedIndicesToOffset = true;\n const implKey = `${output.name}broadcastedIndicesTo${name}Offset`;\n if (implKey in broadcastedIndicesToOffsetImplementation) {\n return `${implKey}(${varIndices})`;\n }\n const offsets = [];\n for (let i = rank - 1; i >= 0; i--) {\n const idx = output.indicesGet('outputIndices', i + output.rank - rank);\n offsets.push(`${indicesGet(strides, i)} * (${idx} % ${indicesGet(shape, i)})`);\n }\n broadcastedIndicesToOffsetImplementation[implKey] =\n `fn ${implKey}(outputIndices: ${output.type.indices}) -> u32 {\n return ${offsets.length > 0 ? offsets.join('+') : '0u'};\n }`;\n\n return `${implKey}(${varIndices})`;\n };\n\n const setByOffset = (offset: number|string, value: string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]=${value};`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), select(0u, 0xFFFFFFFFu, ${value} < 0));`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), 0u);`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `${name}[${offset}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${value}));`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByOffset = (offset: number|string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `i32(${name}[${offset}].x)`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `u32(${name}[${offset}].x)`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `vec4(bool(${name}[${offset}] & 0xFFu), bool(${name}[${offset}] & 0xFF00u), bool(${name}[${\n offset}] & 0xFF0000u), bool(${name}[${offset}] & 0xFF000000u))`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByIndicesImplementation = rank < 2 ? '' : `\n fn get_${name}ByIndices(indices: ${type.indices}) -> ${valueType} {\n return ${getByOffset(`i2o_${name}(indices)`)};\n }`;\n\n const getImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn get_${name}(${functionParams}) -> ${valueType} {\n return get_${name}ByIndices(${indices(dimsParams)});\n }`;\n })();\n\n const get = (...indices: ReadonlyArray) => {\n if (indices.length !== rank) {\n throw new Error(`indices length must be ${rank}`);\n }\n\n const normalizedIndices = indices.map(normalizeDim).join(',');\n\n if (rank === 0) {\n return getByOffset('0u');\n } else if (rank === 1) {\n return getByOffset(normalizedIndices[0]);\n } else {\n implementationUsed.get = true;\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}(${normalizedIndices})`;\n }\n };\n\n const getByIndices = (varIndices: string) => {\n if (rank < 2) {\n return getByOffset(varIndices);\n } else {\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}ByIndices(${varIndices})`;\n }\n };\n\n const setByIndicesImplementation = rank < 2 ? '' : `\n fn set_${name}ByIndices(indices: ${type.indices}, value: ${valueType}) {\n ${setByOffset(`i2o_${name}(indices)`, 'value')}\n }`;\n\n const setImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn set_${name}(${functionParams}, value: ${valueType}) {\n set_${name}ByIndices(${indices(dimsParams)}, value);\n }`;\n })();\n\n const set = (...indicesAndValue: ReadonlyArray) => {\n if (indicesAndValue.length !== rank + 1) {\n throw new Error(`indices length must be ${rank}`);\n }\n const value = indicesAndValue[rank];\n if (typeof value !== 'string') {\n throw new Error('value must be string');\n }\n\n const normalizedIndices = indicesAndValue.slice(0, rank).map(normalizeDim).join(',');\n\n if (rank === 0) {\n return setByOffset('0u', value);\n } else if (rank === 1) {\n return setByOffset(normalizedIndices[0], value);\n } else {\n implementationUsed.set = true;\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}(${normalizedIndices}, ${value})`;\n }\n };\n\n const setByIndices = (varIndices: string, value: string) => {\n if (rank < 2) {\n return setByOffset(varIndices, value);\n } else {\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}ByIndices(${varIndices}, ${value});`;\n }\n };\n\n const impl = () => {\n const impls = [];\n let needShapeStrides = false;\n if (implementationUsed.offsetToIndices) {\n impls.push(offsetToIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.indicesToOffset) {\n impls.push(indicesToOffsetImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.broadcastedIndicesToOffset) {\n Object.values(broadcastedIndicesToOffsetImplementation).forEach(impl => impls.push(impl));\n needShapeStrides = true;\n }\n if (implementationUsed.set) {\n impls.push(setImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.setByIndices) {\n impls.push(setByIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.get) {\n impls.push(getImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.getByIndices) {\n impls.push(getByIndicesImplementation);\n needShapeStrides = true;\n }\n if (!useUniform && needShapeStrides) {\n impls.unshift(\n `const ${shape} = ${type.indices}(${shapeOrRank.join(',')});`,\n `const ${strides} = ${type.indices}(${ShapeUtil.computeStrides(shapeOrRank).join(',')});`);\n }\n return impls.join('\\n');\n };\n\n return {\n impl,\n type,\n offsetToIndices,\n indicesToOffset,\n broadcastedIndicesToOffset,\n indices,\n indicesGet,\n indicesSet,\n set,\n setByOffset,\n setByIndices,\n get,\n getByOffset,\n getByIndices,\n // isVec4,\n usage,\n name,\n strides,\n shape,\n rank\n };\n };\n\n/**\n * Create a IndicesHelper for an input.\n *\n * @param name - the name of the input.\n * @param type - the tensor type of the input.\n * @param shapeOrRank - the tensor shape or the rank of the input.\n * @param components - the number of components of the input. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the input.\n */\nexport const inputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'input', components);\n\n/**\n * Create a IndicesHelper for an output.\n *\n * @param name - the name of the output.\n * @param type - the tensor type of the output.\n * @param shapeOrRank - the tensor shape or the rank of the output.\n * @param components - the number of components of the output. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the output.\n */\nexport const outputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'output', components);\n\n/**\n * Create a IndicesHelper for an internal variable.\n *\n * @param name - the name of the variable.\n * @param type - the tensor type of the variable.\n * @param shapeOrRank - the tensor shape or the rank of the variable.\n * @param components - the number of components of the variable. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the variable.\n */\nexport const internalVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'internal', components);\n\nexport type UniformDataElementType = 'u32'|'f16'|'f32'|'i32';\nexport type UniformsArrayType = Array<{name: string; type: UniformDataElementType; length?: number}>;\n\n/**\n * A ShaderHelper is a helper class for generating WGSL code.\n */\nexport interface ShaderHelper {\n /**\n * A helper function to generate the start of main function in WGSL source code.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param workgroupSize - an optional workgroup size. default is WORKGROUP_SIZE.\n */\n mainStart(workgroupSize?: number|[number, number, number]): string;\n\n /**\n * A helper function to generate the code snippet for guarding against out-of-bounds size.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n *\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param size - the size of the data to guard against. can be a number or a string (WGSL `u32` expression).\n */\n guardAgainstOutOfBoundsWorkgroupSizes(size: unknown): string;\n\n /**\n * A helper function to generate the code snippet for declaring multiple inputs or outputs.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n declareVariables(...variables: IndicesHelper[]): string;\n\n /**\n * A helper function to register one uniform. Can be called multiple times to register multiple uniforms.\n *\n * @param name - the name of the uniform.\n * @param type - the type of the uniform.\n * @param length - the length of the uniform, default to 1 when it is not provided.\n */\n registerUniform(name: string, type: string, length?: number): ShaderHelper;\n\n /**\n * A helper function to register multiple uniforms. Can be called multiple times to register multiple uniforms.\n *\n * @param uniforms - an array of uniforms. Each element of the array is an object with 2 properties: `name` and\n * `type`.\n */\n registerUniforms(uniforms: UniformsArrayType): ShaderHelper;\n\n /**\n * A helper function to register multiple internal variables. Can be called multiple times to register multiple\n * internal variables.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper;\n}\n\nclass ShaderHelperImpl implements ShaderHelper {\n constructor(private normalizedDispatchGroup: [number, number, number], private limits: GPUSupportedLimits) {}\n\n guardAgainstOutOfBoundsWorkgroupSizes(size: number|string): string {\n // Guard against out-of-bounds work group sizes\n const sizeInCode = typeof size === 'number' ? `${size}u` : size;\n return `if (global_idx >= ${sizeInCode}) { return; }`;\n }\n\n mainStart(workgroupSize: number|[number, number, number] = WORKGROUP_SIZE) {\n const workgroupSizeX = typeof workgroupSize === 'number' ? workgroupSize : workgroupSize[0];\n const workgroupSizeY = typeof workgroupSize === 'number' ? 1 : workgroupSize[1];\n const workgroupSizeZ = typeof workgroupSize === 'number' ? 1 : workgroupSize[2];\n\n if (workgroupSizeX > this.limits.maxComputeWorkgroupSizeX ||\n workgroupSizeY > this.limits.maxComputeWorkgroupSizeY ||\n workgroupSizeZ > this.limits.maxComputeWorkgroupSizeZ) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${\n this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);\n }\n\n if (workgroupSizeX * workgroupSizeY * workgroupSizeZ > this.limits.maxComputeInvocationsPerWorkgroup) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup invocations ${\n this.limits.maxComputeInvocationsPerWorkgroup}.`);\n }\n\n const is1DimensionDispatch = this.normalizedDispatchGroup[1] === 1 && this.normalizedDispatchGroup[2] === 1;\n const paramList = is1DimensionDispatch ? `@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3` :\n `@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`;\n const globalIdxDefinition = is1DimensionDispatch ?\n 'let global_idx = global_id.x; let local_idx = local_id.x;' :\n `let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${\n workgroupSizeX * workgroupSizeY * workgroupSizeZ}u + local_idx;`;\n\n return `@compute @workgroup_size(${workgroupSizeX}, ${workgroupSizeY}, ${workgroupSizeZ})\n fn main(${paramList}) {\n ${globalIdxDefinition}\n `;\n }\n\n private appendVariableUniforms(variable: IndicesHelper): void {\n if (variable.rank !== 0) {\n if (variable.shape.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.shape.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n if (variable.strides.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.strides.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n }\n }\n\n private declareVariable(variable: IndicesHelper, bindingIndex: number): string {\n if (variable.usage === 'internal') {\n throw new Error('cannot use internal variable with declareVariable(). use registerInternalVariables() instead.');\n }\n this.variables.push(variable);\n this.appendVariableUniforms(variable);\n\n const access = variable.usage === 'input' ? 'read' : 'read_write';\n const storageType = variable.type.storage;\n return `@group(0) @binding(${bindingIndex}) var ${variable.name}: array<${storageType}>;`;\n }\n\n declareVariables(...variables: IndicesHelper[]): string {\n return variables.map(v => this.declareVariable(v, this.variableIndex++)).join('\\n');\n }\n\n private registerInternalVariable(variable: IndicesHelper): void {\n if (variable.usage !== 'internal') {\n throw new Error(\n 'cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.');\n }\n\n this.internalVariables.push(variable);\n this.appendVariableUniforms(variable);\n }\n\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper {\n variables.forEach(v => this.registerInternalVariable(v));\n return this;\n }\n\n registerUniform(name: string, type: UniformDataElementType, length = 1): ShaderHelper {\n this.uniforms.push({name, type, length});\n return this;\n }\n\n registerUniforms(additionalUniforms: UniformsArrayType): ShaderHelper {\n this.uniforms = this.uniforms.concat(additionalUniforms);\n return this;\n }\n\n private internalVariables: IndicesHelper[] = [];\n private variables: IndicesHelper[] = [];\n private uniforms: UniformsArrayType = [];\n private uniformDeclaration(): string {\n if (this.uniforms.length === 0) {\n return '';\n }\n\n const uniformSnippets: string[] = [];\n for (const {name, type, length} of this.uniforms) {\n if (length && length > 4) {\n if (type === 'f16') {\n uniformSnippets.push(`@align(16) ${name}:array, ${Math.ceil(length / 8)}>`);\n } else {\n uniformSnippets.push(`${name}:array, ${Math.ceil(length / 4)}>`);\n }\n } else {\n const typeTemp = length == null || length === 1 ? type : `vec${length}<${type}>`;\n uniformSnippets.push(`${name}:${typeTemp}`);\n }\n }\n\n return `\n struct Uniforms { ${uniformSnippets.join(', ')} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`;\n }\n private variableIndex = 0;\n\n /**\n * Get additional implementation that needs to be added to the shader source.\n */\n get additionalImplementations(): string {\n return this.uniformDeclaration() + this.variables.map(i => i.impl()).join('\\n') +\n this.internalVariables.map(i => i.impl()).join('\\n');\n }\n\n /**\n * Get the variable info of the shader program.\n */\n get variablesInfo(): ProgramUniformVariableInfo[]|undefined {\n if (this.uniforms.length === 0) {\n return undefined;\n }\n\n const uniformWgslTypeToDataType = (type: UniformDataElementType) =>\n ([DataType.uint32, DataType.float16, DataType.float,\n DataType.int32][['u32', 'f16', 'f32', 'i32'].indexOf(type)]);\n return this.uniforms.map(u => ([uniformWgslTypeToDataType(u.type), u.length ?? 1]));\n }\n}\n\nexport const createShaderHelper = (dispatchGroup: [number, number, number], limits: GPUSupportedLimits) =>\n new ShaderHelperImpl(dispatchGroup, limits);\n\n/**\n * This function comes from https://github.com/tensorflow/tfjs/blob/master/tfjs-core/src/ops/broadcast_util.ts#L18-L40\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport const getBroadcastDims = (inShape: readonly number[], outShape: readonly number[]): number[] => {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n};\n\nconst getAdjustedPerm = (inputRank: number, perm: number[]): number[] =>\n (perm && perm.length !== inputRank) ? [...(new Array(inputRank).keys())].reverse() : perm;\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] =>\n ShapeUtil.sortBasedOnPerm(inputShape, getAdjustedPerm(inputShape.length, perm));\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nexport const createTransposeProgramInfo = (inputTensor: TensorView, permAttr: number[]): ProgramInfo => {\n const inputDataType = inputTensor.dataType;\n const inputRank = inputTensor.dims.length;\n const perm = getAdjustedPerm(inputRank, permAttr);\n const outputShape = getOutputShape(inputTensor.dims, perm);\n const output = outputVariable('output', inputDataType, outputShape.length);\n const input = inputVariable('a', inputDataType, inputRank);\n let getShaderSource;\n if (perm.length === 2 && perm[0] === 1 && perm[1] === 0) {\n const wgslType = output.type.value;\n const workgroupSize: [number, number, number] = [16, 16, 1];\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n var tile : array, ${workgroupSize[0]}>;\n ${shaderHelper.mainStart(workgroupSize)}\n var x = workgroup_id.x * ${workgroupSize[0]}u + local_id.x;\n var y = workgroup_id.y * ${workgroupSize[0]}u + local_id.y;\n let width = uniforms.output_shape[0];\n let height = uniforms.output_shape[1];\n if (x < width && y < height) {\n tile[local_id.y][local_id.x] = ${input.getByOffset('y * width + x')};\n }\n workgroupBarrier();\n x = workgroup_id.y * ${workgroupSize[0]}u + local_id.x;\n y = workgroup_id.x * ${workgroupSize[0]}u + local_id.y;\n if (x < height && y < width) {\n ${output.setByOffset('y * height + x', 'tile[local_id.x][local_id.y]')}\n }\n }`;\n } else {\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${permFunctionBody(perm, inputRank, input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${output.setByOffset('global_idx', input.getByIndices('aIndices'))}\n }`;\n }\n return {\n name: 'Transpose',\n shaderCache: {hint: `${permAttr}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputSize = ShapeUtil.size(outputShape);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const transpose = (context: ComputeContext, attributes: TransposeAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createTransposeProgramInfo(context.inputs[0], attributes.perm));\n};\n\nexport const parseTransposeAttributes = (attributes: Record): TransposeAttributes =>\n createAttributeWithCacheKey({perm: attributes.perm as number[]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\nimport {createReduceAttributesFromInputs, ReduceAttributes} from './reduce';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst reduceOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate * candidate',\n logSumExp: 'bestValue + exp(candidate)',\n l1: 'bestValue + abs(candidate)',\n l2: 'bestValue + candidate * candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceSharedOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate',\n logSumExp: 'bestValue + candidate',\n l1: 'bestValue + candidate',\n l2: 'bestValue + candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceInitValues: {[key: string]: string} = {\n max: '_A[offset]',\n min: '_A[offset]',\n mean: '0',\n sum: '0',\n prod: '1',\n sumSquare: '0',\n logSumExp: '0',\n l1: '0',\n l2: '0',\n logSum: '0'\n};\n\nconst reduceOutputValues: {[key: string]: string} = {\n max: 'bestValue',\n min: 'bestValue',\n sum: 'bestValue',\n prod: 'bestValue',\n sumSquare: 'bestValue',\n logSumExp: 'log(bestValue)',\n l1: 'bestValue',\n l2: 'sqrt(bestValue)',\n logSum: 'log(bestValue)'\n};\n\nconst getInnerMostAxes = (numInnerAxes: number, rank: number): number[] => {\n const res = [];\n for (let i = rank - numInnerAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n};\n\nconst computeOutAndReduceShapes = (shape: readonly number[], axes: readonly number[]): [number[], number[]] => {\n const outputShape = [];\n const rank = shape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputShape.push(shape[dim]);\n }\n }\n const reduceShape = axes.map(dim => shape[dim]);\n return [outputShape, reduceShape];\n};\n\nconst expandShapeToKeepDim = (shape: number[], axes: number[]): number[] => {\n const rank = shape.length + axes.length;\n const expandShape = [];\n let shapeIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n expandShape.push(shape[shapeIdx++]);\n } else {\n expandShape.push(1);\n }\n }\n return expandShape;\n};\n\nconst areAxesInnerMostDims = (axes: number[], rank: number): boolean => {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n};\n\nconst getAxesPermutation = (axes: number[], rank: number): number[] => {\n const res = [];\n if (!areAxesInnerMostDims(axes, rank)) {\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n res.push(i);\n }\n }\n axes.forEach(axis => res.push(axis));\n }\n return res;\n};\n\nexport const createReduceSharedProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceType: string,\n outputDataType: DataType, outputShape: number[], reduceShape: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n\n const outputSize = ShapeUtil.size(outputShape);\n const reduceSize = ShapeUtil.size(reduceShape);\n\n const input = inputVariable('_A', inputs[0].dataType, inputShape);\n const output = outputVariable('output', outputDataType, outputShape);\n\n const workgroupSize = 32;\n\n const sharedMemorySnippet = `\n var aBestValues : array;\n `;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('reduceSize', 'u32').declareVariables(input, output)}\n ${sharedMemorySnippet}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${shaderHelper.mainStart(workgroupSize)}\n\n let outputIndex = global_idx / ${workgroupSize};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${reduceInitValues[reduceType]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${workgroupSize}) {\n let candidate = f32(${input.getByOffset('offset + k')});\n bestValue = ${reduceOps[reduceType]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${workgroupSize}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${reduceSharedOps[reduceType]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${\n output.setByOffset(\n 'outputIndex',\n `${\n reduceType === 'mean' ? `${output.type.storage}(bestValue / f32(uniforms.reduceSize))` :\n `${output.type.storage}(${reduceOutputValues[reduceType]})`}`)};\n }\n }`;\n\n // One work group is responsible for only one element of output.\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: outputSize},\n programUniforms: [{type: DataType.uint32, data: reduceSize}]\n }),\n };\n };\n\nconst reduceCommon =\n (context: ComputeContext, name: string, attributes: ReduceAttributes,\n reduceType: 'sum'|'sumSquare'|'prod'|'min'|'max'|'mean'|'logSumExp'|'l1'|'l2'|'logSum'): void => {\n const updatedAttributes: ReduceAttributes =\n context.inputs.length === 1 ? attributes : createReduceAttributesFromInputs(context.inputs, attributes);\n\n let updatedAxes = updatedAttributes.axes;\n if (updatedAxes.length === 0 && !updatedAttributes.noopWithEmptyAxes) {\n updatedAxes = context.inputs[0].dims.map((_dim, i) => i);\n }\n const normalizeAxes = ShapeUtil.normalizeAxes(updatedAxes, context.inputs[0].dims.length);\n\n let axes = normalizeAxes;\n let input = context.inputs[0];\n const permutedAxes = getAxesPermutation(axes, context.inputs[0].dims.length);\n if (permutedAxes.length > 0) {\n input = context.compute(\n createTransposeProgramInfo(context.inputs[0], permutedAxes), {inputs: [0], outputs: [-1]})[0];\n axes = getInnerMostAxes(axes.length, input.dims.length);\n }\n\n const [outputShape, reduceShape] = computeOutAndReduceShapes(input.dims, axes);\n let finalOutputShape = outputShape;\n if (updatedAttributes.keepDims) {\n finalOutputShape = expandShapeToKeepDim(outputShape, normalizeAxes);\n }\n\n context.compute(\n createReduceSharedProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['type']}, [input], reduceType,\n context.inputs[0].dataType, finalOutputShape, reduceShape),\n {inputs: [input]});\n };\n\nexport const reduceMeanShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMeanShared', attributes, 'mean');\n};\n\nexport const reduceL1Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL1Shared', attributes, 'l1');\n};\n\nexport const reduceL2Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL2Shared', attributes, 'l2');\n};\n\nexport const reduceLogSumExpShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumExpShared', attributes, 'logSumExp');\n};\n\nexport const reduceMaxShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMaxShared', attributes, 'max');\n};\n\nexport const reduceMinShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMinShared', attributes, 'min');\n};\n\nexport const reduceProdShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceProdShared', attributes, 'prod');\n};\n\nexport const reduceSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumShared', attributes, 'sum');\n};\n\nexport const reduceSumSquareShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumSquareShared', attributes, 'sumSquare');\n};\n\nexport const reduceLogSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumShared', attributes, 'logSum');\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\nimport {reduceL1Shared, reduceL2Shared, reduceLogSumExpShared, reduceLogSumShared, reduceMaxShared, reduceMeanShared, reduceMinShared, reduceProdShared, reduceSumShared, reduceSumSquareShared} from './reduce-shared';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('Reduce op requires 1 or 2 inputs.');\n }\n\n if (inputs.length === 2 && inputs[1].dims.length !== 1) {\n throw new Error('Invalid axes input dims.');\n }\n};\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n noopWithEmptyAxes: boolean;\n axes: number[];\n}\n\nexport type ReduceOp =\n (input: IndicesHelper, output: IndicesHelper,\n axes: readonly number[]) => [string, string, string, string, ...string[]];\n\nconst noOp: ReduceOp = (input) => ['', '', `var value = ${input.getByIndices('input_indices')};`, ''];\nexport const createReduceProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceOp: ReduceOp,\n axesInput: number[], outputDataType: DataType, keepDims = false, noopWithEmptyAxes = false): ProgramInfo => {\n const outputShape: number[] = [];\n const inputShape = inputs[0].dims;\n const inputRank = inputShape.length;\n const axes = ShapeUtil.normalizeAxes(axesInput, inputRank);\n const reduceOnAllAxes = !noopWithEmptyAxes && axes.length === 0;\n inputShape.forEach((d, i) => {\n if (reduceOnAllAxes || axes.indexOf(i) >= 0) {\n if (keepDims) {\n outputShape.push(1);\n } // else { // skip this axis}\n } else {\n outputShape.push(d);\n }\n });\n const outputRank = outputShape.length;\n const outputSize = ShapeUtil.size(outputShape);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = []; // copy output indexes to input indexes\n\n const input = inputVariable('_A', inputs[0].dataType, inputRank);\n const output = outputVariable('output', outputDataType, outputRank);\n const ops = reduceOp(input, output, axes);\n let reduceOps = ops[2];\n\n for (let k = 0, l = 0; k < inputRank; k++) {\n // if this axis is reduced\n if (reduceOnAllAxes || axes.indexOf(k) >= 0) {\n if (keepDims) {\n l++;\n }\n // loop over the d-th axis\n reduceOps = `for(var j${k}: u32 = 0; j${k} < ${inputShape[k]}; j${k}++) {\n ${ops[2].includes('last_index') ? `let last_index = j${k};` : ''}\n ${input.indicesSet('input_indices', k, `j${k}`)}\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`${input.indicesSet('input_indices', k, output.indicesGet('output_indices', l))};`);\n l++;\n }\n }\n return `\n\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var input_indices: ${input.type.indices};\n let output_indices = ${output.offsetToIndices('global_idx')};\n\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${ops[1]}\n ${reduceOps}\n ${ops[3]}\n ${ops.length === 4 ? output.setByOffset('global_idx', 'value') : ops.slice(4).join('\\n')}\n }`;\n };\n\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)]\n }),\n };\n };\n\nexport const createReduceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: ReduceAttributes): ReduceAttributes => {\n const axes: number[] = [];\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => axes.push(Number(v)));\n }\n return createAttributeWithCacheKey(\n {axes, keepDims: attributes.keepDims, noopWithEmptyAxes: attributes.noopWithEmptyAxes});\n };\n\nconst runReduceProgram =\n (context: ComputeContext, name: string, attributes: ReduceAttributes, reduceOp: ReduceOp): void => {\n const inputs = context.inputs;\n const updatedAttributes: ReduceAttributes =\n inputs.length === 1 ? attributes : createReduceAttributesFromInputs(inputs, attributes);\n\n context.compute(\n createReduceProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['rank']}, [inputs[0]],\n updatedAttributes.noopWithEmptyAxes && updatedAttributes.axes.length === 0 ? noOp : reduceOp,\n updatedAttributes.axes, inputs[0].dataType, updatedAttributes.keepDims,\n updatedAttributes.noopWithEmptyAxes),\n {inputs: [0]});\n };\n\nconst reduceLogSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSum', attributes, reduceOp);\n};\n\nconst reduceL1Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += abs(${input.getByIndices('input_indices')});`,\n '',\n ];\n runReduceProgram(context, 'ReduceL1', attributes, reduceOp);\n};\n\nconst reduceL2Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += (t * t);`,\n 'value = sqrt(value);',\n ];\n runReduceProgram(context, 'ReduceL2', attributes, reduceOp);\n};\n\nconst reduceLogSumExpNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += exp(${input.getByIndices('input_indices')});`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSumExp', attributes, reduceOp);\n};\n\nconst reduceMaxNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(input.indicesSet('input_indices', k, 0));\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = max(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMax', attributes, reduceOp);\n};\n\nconst reduceMeanNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output, axes) => {\n let size = 1.0;\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n // TODO: this depends on the input dims. If we want to use uniform, this need to be updated.\n size *= context.inputs[0].dims[k];\n }\n }\n\n return [\n 'var sum = f32(0);',\n '',\n `sum += f32(${input.getByIndices('input_indices')});`,\n `let value = ${output.type.value}(sum / ${size});`,\n ];\n };\n runReduceProgram(context, 'ReduceMean', attributes, reduceOp);\n};\n\nconst reduceMinNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = min(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMin', attributes, reduceOp);\n};\n\nconst reduceProdNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(1);`,\n '',\n `value *= ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceProd', attributes, reduceOp);\n};\n\nconst reduceSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceSum', attributes, reduceOp);\n};\n\nconst reduceSumSquareNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += t * t;`,\n '',\n ];\n runReduceProgram(context, 'ReduceSumSquare', attributes, reduceOp);\n};\n\nconst useNaiveReduceMethod =\n (shape: readonly number[], axes: readonly number[], noopWithEmptyAxes: boolean): boolean => {\n if (axes.length === 0) {\n return noopWithEmptyAxes;\n }\n\n let outputSize = 1;\n let reduceSize = 1;\n for (let dim = 0; dim < axes.length; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputSize *= shape[dim];\n } else {\n reduceSize *= shape[dim];\n }\n }\n\n // The condition data is very rough, although considering the count of Execution Unit (EU), the potential\n // work groups in a EU and the counts of loops in the naive and shared methods, also doing experiments\n // on some machines.\n return reduceSize < 32 && outputSize > 1024;\n };\n\nexport const reduceMean = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMeanNaive(context, attributes);\n } else {\n reduceMeanShared(context, attributes);\n }\n};\n\nexport const reduceL1 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL1Naive(context, attributes);\n } else {\n reduceL1Shared(context, attributes);\n }\n};\n\nexport const reduceL2 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL2Naive(context, attributes);\n } else {\n reduceL2Shared(context, attributes);\n }\n};\n\nexport const reduceLogSumExp = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumExpNaive(context, attributes);\n } else {\n reduceLogSumExpShared(context, attributes);\n }\n};\n\nexport const reduceMax = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMaxNaive(context, attributes);\n } else {\n reduceMaxShared(context, attributes);\n }\n};\n\nexport const reduceMin = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMinNaive(context, attributes);\n } else {\n reduceMinShared(context, attributes);\n }\n};\n\nexport const reduceProd = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceProdNaive(context, attributes);\n } else {\n reduceProdShared(context, attributes);\n }\n};\n\nexport const reduceSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumNaive(context, attributes);\n } else {\n reduceSumShared(context, attributes);\n }\n};\n\nexport const reduceSumSquare = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumSquareNaive(context, attributes);\n } else {\n reduceSumSquareShared(context, attributes);\n }\n};\n\nexport const reduceLogSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumNaive(context, attributes);\n } else {\n reduceLogSumShared(context, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createReduceProgramInfo, ReduceOp} from './reduce';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('ArgMinMaxOp op requires 1 or 2 inputs.');\n }\n if (inputs[0].dataType !== DataType.float) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport interface ArgMinMaxAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n axis: number;\n selectLastIndex: number;\n}\n\nexport const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '<=' : '<'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'ArgMin', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '>=' : '>'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'argMax', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const parseArgMinMaxAttributes = (attributes: Record): ArgMinMaxAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext, GpuDataType, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, tensorTypeToWsglValueType, UniformDataElementType, UniformsArrayType} from './common';\n\nexport const enum AttentionQkvFormat {\n unknown, // enum value not set, or depends on qkv projection implementation details\n qkvBNSH, // for non-packed qkv, permuted\n qkvBSNH, // for non-packed qkv, not permuted, used by memory efficient attention or MultiHeadAttention\n qkvBSN3H, // for TRT fused attention, qkv are packed\n qkvBNSHqkvBS3NH, // for TRT fused causal attention, data has two formats (qkv is 3BNSH, gemm_buffer is BS3NH)\n qKvBSNHxBSN2H, // for TRT fused cross attention, kv are packed\n qkvTNH, // for memory efficient attention, qkv are not packed, and paddings are removed.\n qkvTN3H, // for TRT fused attention, qkv are packed and paddings are removed\n}\n\nexport const enum AttentionMaskType {\n none, // No mask\n mask1dKeySeqLen, // [batch_size], key sequence length\n mask1dEndStart, // [2 * batch_size] with end positions and start positions\n mask1DKeySeqLenStart, // [3 * batch_size + 2] with [key_len[0], ..., key_len[batch_size - 1], query_start[0],\n // ..., query_start[batch_size - 1], query_end[batch_size - 1], key_start[0], ...,\n // key_start[batch_size - 1], key_end[batch_size - 1]]\n mask2dDummy, // dummy mask with shape [1, 1] or [batch_size, 1]. It has same effect as no mask.\n mask2dKeyPadding, // [batch_size, total_sequence_length]\n mask3dAttention, // [batch_size, sequence_length, total_sequence_length]\n mask4dMegatron, // Megatron causal mask with shape [batch_size, 1, max_sequence_length, max_sequence_length]\n maskUnknown\n}\n\nexport interface AttentionParameters {\n batchSize: number;\n sequenceLength: number;\n pastSequenceLength: number;\n kvSequenceLength: number;\n totalSequenceLength: number;\n maxSequenceLength: number;\n inputHiddenSize: number;\n hiddenSize: number;\n vHiddenSize: number;\n headSize: number;\n vHeadSize: number;\n numHeads: number;\n kvNumHeads?: number;\n nReps?: number;\n isUnidirectional?: boolean;\n pastPresentShareBuffer: boolean;\n maskFilterValue?: number;\n maskType: AttentionMaskType;\n scale: number;\n broadcastResPosBias: boolean;\n passPastInKv: boolean;\n qkvFormat: AttentionQkvFormat;\n isPastkvBSNH?: boolean;\n}\n\nexport interface AttentionAttrs {\n numHeads: number;\n kvNumHeads?: number;\n isUnidirectional?: number;\n maskFilterValue?: number;\n scale: number;\n doRotary: number;\n qkvHiddenSizes: number[];\n pastPresentShareBuffer: boolean;\n}\n\nconst validateAttentionInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // When past state is used, Q, K and V should have same hidden size (unless we split it into past_key and past_value).\n\n // Input shapes:\n // input (Q/K/V) : (B, S, D_i)\n // weights (Q/K/V) : (D_i, D + D + D_v)\n // bias (Q/K/V) : (D + D + D_v)\n // mask_index : see below\n // past (K/V) : (2, B, N, P, H) or NULL\n // relative_position_bias : (B, N, S, T) or NULL\n\n // For mask_index, the following shapes are supported:\n // NULL, (B, 1), (1, 1)\n // (B), (2 * B), (3 * B + 2)\n // (B, T)\n // (B, S, T)\n // (B, 1, M, M)\n //\n // When a model is pruned (like some attention heads are removed in Q/K/V), input_hidden_size could be larger\n // than hidden dimension of Q, K and V.\n\n const input = inputs[0];\n const weights = inputs[1];\n const bias = inputs[2];\n const maskIndex = inputs[3];\n const past = inputs[4];\n const relativePositionBias = inputs[5];\n\n if (past && relativePositionBias) {\n throw new Error('Attention cannot have both past and relative_position_bias');\n }\n\n if (input.dims.length !== 3) {\n throw new Error('Input \"input\" must have 3 dimensions');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[1];\n const inputHiddenSize = input.dims[2];\n\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimensions');\n }\n\n if (weights.dims.length !== 2) {\n throw new Error('Input \"weights\" is expected to have 2 dimensions');\n }\n\n if (weights.dims[0] !== inputHiddenSize) {\n throw new Error('Input 1 dimension 0 should have same length as dimension 2 of input 0');\n }\n\n if (bias.dims[0] !== weights.dims[1]) {\n throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');\n }\n\n let qHiddenSize = bias.dims[0] / 3;\n let kHiddenSize = qHiddenSize;\n let vHiddenSize = kHiddenSize;\n if (attributes.qkvHiddenSizes.length > 0) {\n if (attributes.qkvHiddenSizes.length !== 3) {\n throw new Error('qkv_hidden_sizes attribute should have 3 elements');\n }\n for (const sz of attributes.qkvHiddenSizes) {\n if (sz % attributes.numHeads !== 0) {\n throw new Error('qkv_hidden_sizes should be divisible by num_heads');\n }\n }\n\n qHiddenSize = attributes.qkvHiddenSizes[0];\n kHiddenSize = attributes.qkvHiddenSizes[1];\n vHiddenSize = attributes.qkvHiddenSizes[2];\n }\n\n const kvSequenceLength = sequenceLength;\n\n if (qHiddenSize !== kHiddenSize) {\n throw new Error('qkv_hidden_sizes first element should be same as the second');\n }\n\n if (bias.dims[0] !== qHiddenSize + kHiddenSize + vHiddenSize) {\n throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');\n }\n\n let pastSequenceLength = 0;\n if (past) {\n if (kHiddenSize !== vHiddenSize) {\n throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');\n }\n if (past.dims.length !== 5) {\n throw new Error('Input \"past\" must have 5 dimensions');\n }\n if (past.dims[0] !== 2) {\n throw new Error('Input \"past\" first dimension must be 2');\n }\n if (past.dims[1] !== batchSize) {\n throw new Error('Input \"past\" second dimension must be batch_size');\n }\n if (past.dims[2] !== attributes.numHeads) {\n throw new Error('Input \"past\" third dimension must be num_heads');\n }\n if (past.dims[4] !== kHiddenSize / attributes.numHeads) {\n throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');\n }\n\n if (!attributes.pastPresentShareBuffer) {\n pastSequenceLength = past.dims[3];\n }\n // TODO: handle past_seq_len\n }\n\n const totalSequenceLength = kvSequenceLength + pastSequenceLength;\n const maxSequenceLength = -1;\n\n const maskType = AttentionMaskType.none;\n if (maskIndex) {\n // maskType = AttentionMaskType.MASK_UNKNOWN;\n // TODO: handle mask\n throw new Error('Mask not supported');\n }\n\n if (past) {\n throw new Error('past is not supported');\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize,\n hiddenSize: qHiddenSize,\n vHiddenSize,\n headSize: Math.floor(qHiddenSize / attributes.numHeads),\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias: false,\n passPastInKv: false,\n qkvFormat: AttentionQkvFormat.qkvBNSH,\n };\n};\n\nconst createInPlaceSoftmaxProgramInfo = (_context: ComputeContext, input: TensorView, n: number, d: number) => {\n const components = getMaxComponents(d);\n let WG = 64;\n const dComp = d / components;\n if (dComp < WG) {\n WG = 1;\n } else if (dComp / 8 < 64) {\n WG = Math.ceil(dComp / 8);\n }\n const elementsPerThread = Math.ceil(d / components / WG);\n const programUniforms: ProgramUniform[] = [\n {type: input.dataType, data: 1 / d}, {type: DataType.uint32, data: dComp},\n {type: DataType.uint32, data: elementsPerThread}\n ];\n const dataType = tensorTypeToWsglStorageType(input.dataType, components);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = outputVariable('x', input.dataType, input.dims, components);\n const elemValueType = tensorTypeToWsglValueType(input.dataType);\n const uniforms: UniformsArrayType = [\n {name: 'd_inv', type: elemValueType as UniformDataElementType}, {name: 'd_comp', type: 'u32'},\n {name: 'elements_per_thread', type: 'u32'}\n ];\n\n return `\n var thread_max: array;\n var thread_sum: array;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(inputHelper)}\n ${shaderHelper.mainStart([\n WG, 1, 1\n ])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${f32Type}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${f32Type}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'thread_max_vector';\n case 2:\n return 'max(thread_max_vector.x, thread_max_vector.y)';\n case 4:\n return 'max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${WG}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${f32Type}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${f32Type}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'sum_vector';\n case 2:\n return 'sum_vector.x + sum_vector.y';\n case 4:\n return 'sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${WG}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${inputHelper.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${f32Type}(x[offset + i]);\n x[offset + i] = ${inputHelper.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`;\n };\n\n return {\n name: 'AttentionProbsSoftmax',\n shaderCache: {hint: `${WG};${dataType};${components}`},\n getShaderSource,\n getRunData: () => ({outputs: [], dispatchGroup: {x: n}, programUniforms}),\n };\n};\n\nconst createAttentionProbsProgramInfo =\n (context: ComputeContext, q: TensorView, key: TensorView, pastKey: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs,\n pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n const probsShape = [parameters.batchSize, parameters.numHeads, parameters.sequenceLength, totalSequenceLength];\n const presentKey = parameters.kvNumHeads === undefined && context.outputCount > 1;\n const presentKeyShape = presentKey ?\n [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize] :\n undefined;\n\n // TODO: handle mask\n\n const alpha = attributes.scale === 0 ? 1.0 / Math.sqrt(parameters.headSize) : attributes.scale;\n const components = getMaxComponents(parameters.headSize);\n const vectorizedHeadSize = parameters.headSize / components;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(totalSequenceLength / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: parameters.sequenceLength}, {type: DataType.uint32, data: vectorizedHeadSize},\n {type: DataType.uint32, data: totalSequenceLength}, {type: DataType.uint32, data: parameters.numHeads},\n {type: DataType.float, data: alpha}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: parameters.kvSequenceLength}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (pastKey) {\n inputDependencies.push('type');\n }\n if (relativePositionBias) {\n inputDependencies.push('type');\n }\n const outputs = [{dims: probsShape, dataType: q.dataType, gpuDataType: GpuDataType.default}];\n if (presentKey) {\n outputs.push({dims: presentKeyShape!, dataType: q.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const qInput = inputVariable('q', q.dataType, q.dims, components);\n const kInput = inputVariable('key', key.dataType, key.dims, components);\n const inputVars = [qInput, kInput];\n if (pastKey) {\n const pastKeyInput = inputVariable('past_key', pastKey.dataType, pastKey.dims, components);\n inputVars.push(pastKeyInput);\n }\n if (relativePositionBias) {\n inputVars.push(\n inputVariable('relative_position_bias', relativePositionBias.dataType, relativePositionBias.dims));\n }\n const output = outputVariable('output', q.dataType, probsShape);\n const outputVars = [output];\n if (presentKey) {\n outputVars.push(outputVariable('present_key', q.dataType, presentKeyShape!, components));\n }\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'alpha', type: 'f32' as UniformDataElementType},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n\n var tileQ: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n let kOffset = uniforms.kv_sequence_length * uniforms.K * headIdx;\n let pastKeyOffset = uniforms.past_sequence_length * uniforms.K * headIdx;`;\n } else {\n return `\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;`;\n }\n })()}\n ${presentKey ? 'let presentKeyOffset = headIdx * uniforms.N * uniforms.K;' : ''}\n var value = ${f32Type}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n if (n + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_key[pastKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x];\n } else {\n tileK[idx] =\n key[kOffset + (n + local_id.y - uniforms.past_sequence_length) * uniforms.K + w + local_id.x];\n }`;\n } else {\n return 'tileK[idx] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];';\n }\n })()}\n ${\n presentKey ?\n 'present_key[presentKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x] = tileK[idx];' :\n ''}\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${f32Type}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(() => {\n switch (components) {\n case 1:\n return 'value';\n case 2:\n return 'value.x + value.y';\n case 4:\n return 'value.x + value.y + value.z + value.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n output[outputIdx] = ${output.type.value} (sum * uniforms.alpha) + ${\n relativePositionBias ? 'relative_position_bias[outputIdx]' : '0.0'};\n }\n }`;\n };\n return {\n name: 'AttentionProbs',\n shaderCache: {\n hint: `${components};${relativePositionBias !== undefined};${pastKey !== undefined};${context.outputCount}`,\n inputDependencies\n },\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\n\nconst createVxAttentionScoreProgramInfo =\n (context: ComputeContext, probs: TensorView, v: TensorView, pastValue: TensorView|undefined,\n params: AttentionParameters, pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + params.kvSequenceLength;\n const nReps = params.nReps ? params.nReps : 1;\n const repeatedVHiddenSize = params.vHiddenSize * nReps;\n const presentValue = params.kvNumHeads == null && context.outputCount > 1;\n const presentValueShape =\n presentValue ? [params.batchSize, params.numHeads, totalSequenceLength, params.headSize] : undefined;\n const outputShape = [params.batchSize, params.sequenceLength, repeatedVHiddenSize];\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(params.vHeadSize / TILE_SIZE),\n y: Math.ceil(params.sequenceLength / TILE_SIZE),\n z: params.batchSize * params.numHeads\n };\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: params.sequenceLength}, {type: DataType.uint32, data: totalSequenceLength},\n {type: DataType.uint32, data: params.vHeadSize}, {type: DataType.uint32, data: params.numHeads},\n {type: DataType.uint32, data: repeatedVHiddenSize}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] =\n pastValue ? ['type', 'type', 'type'] : ['type', 'type'];\n const outputs = [{dims: outputShape, dataType: probs.dataType, gpuDataType: GpuDataType.default}];\n if (presentValue) {\n outputs.push({dims: presentValueShape!, dataType: probs.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const probsHelper = inputVariable('probs', probs.dataType, probs.dims);\n const vHelper = inputVariable('v', v.dataType, v.dims);\n const inputVars = [probsHelper, vHelper];\n if (pastValue) {\n inputVars.push(inputVariable('past_value', pastValue.dataType, pastValue.dims));\n }\n const output = outputVariable('output', probs.dataType, outputShape);\n const outputVars = [output];\n if (presentValue) {\n outputVars.push(outputVariable('present_value', probs.dataType, presentValueShape!));\n }\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'v_hidden_size', type: 'u32'},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileQ: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n let pastValueOffset = headIdx * uniforms.N * uniforms.past_sequence_length + n;\n let vOffset = headIdx * uniforms.N * uniforms.kv_sequence_length + n;\n `;\n } else {\n return `\n let offsetB = headIdx * uniforms.N * uniforms.K + n;\n `;\n }\n })()}\n ${presentValue ? 'let presentValueOffset = headIdx * uniforms.N * uniforms.K + n;' : ''}\n var value = ${probsHelper.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n if (w + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_value[pastValueOffset + (w + local_id.y) * uniforms.N];\n } else {\n tileK[idx] = v[vOffset + (w + local_id.y - uniforms.past_sequence_length) * uniforms.N];\n }\n `;\n } else {\n return `\n tileK[idx] = v[offsetB + (w + local_id.y) * uniforms.N];\n `;\n }\n })()}\n ${presentValue ? 'present_value[presentValueOffset + (w + local_id.y) * uniforms.N] = tileK[idx];' : ''}\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`;\n };\n\n return {\n name: 'AttentionScore',\n shaderCache: {hint: `${pastValue !== undefined};${context.outputCount}`, inputDependencies},\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const applyAttention =\n (context: ComputeContext, q: TensorView, k: TensorView, v: TensorView, _maskIndex: TensorView|undefined,\n _past: TensorView|undefined, pastKey: TensorView|undefined, pastValue: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs) => {\n const outputCount = context.outputCount;\n const pastSequenceLength =\n parameters.kvNumHeads !== undefined || outputCount > 1 ? parameters.pastSequenceLength : 0;\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n\n const inputsK = (parameters.kvNumHeads === undefined && outputCount > 1 && pastKey) ? [q, k, pastKey] : [q, k];\n if (relativePositionBias) {\n inputsK.push(relativePositionBias);\n }\n\n // Run AttentionProbs\n const probs = context.compute(\n createAttentionProbsProgramInfo(\n context, q, k, outputCount > 1 ? pastKey : undefined, relativePositionBias, parameters, attributes,\n pastSequenceLength),\n {inputs: inputsK, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [-1, 1] : [-1]})[0];\n\n // Run Softmax\n context.compute(\n createInPlaceSoftmaxProgramInfo(\n context, probs, parameters.batchSize * parameters.numHeads * parameters.sequenceLength,\n totalSequenceLength),\n {inputs: [probs], outputs: []});\n\n // Run AttrionScore\n const inputsV =\n (parameters.kvNumHeads === undefined && outputCount > 1 && pastValue) ? [probs, v, pastValue] : [probs, v];\n context.compute(\n createVxAttentionScoreProgramInfo(\n context, probs, v, outputCount > 1 && pastValue ? pastValue : undefined, parameters, pastSequenceLength),\n {inputs: inputsV, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [0, 2] : [0]});\n };\n\nconst prepare = (context: ComputeContext, parameters: AttentionParameters) => {\n const outputShape = [\n parameters.batchSize,\n parameters.numHeads,\n parameters.sequenceLength,\n parameters.headSize,\n ];\n const M = parameters.sequenceLength;\n const K = parameters.inputHiddenSize;\n const N = parameters.headSize;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(parameters.headSize / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const inputs = [context.inputs[0], context.inputs[1], context.inputs[2]];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: M}, {type: DataType.uint32, data: K}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: parameters.numHeads}, {type: DataType.uint32, data: parameters.headSize},\n {type: DataType.uint32, data: parameters.hiddenSize},\n {type: DataType.uint32, data: parameters.hiddenSize + parameters.hiddenSize + parameters.vHiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const outputQ = outputVariable('output_q', inputs[0].dataType, outputShape);\n const outputK = outputVariable('output_k', inputs[0].dataType, outputShape);\n const outputV = outputVariable('output_v', inputs[0].dataType, outputShape);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims);\n const weight = inputVariable('weight', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const dataType = input.type.storage;\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'num_heads', type: 'u32'},\n {name: 'head_size', type: 'u32'}, {name: 'hidden_size', type: 'u32'}, {name: 'ldb', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileInput: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightQ: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightK: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightV: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, weight, bias, outputQ, outputK, outputV)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${dataType}(0);\n var valueK = ${dataType}(0);\n var valueV = ${dataType}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k ({\n outputs: [\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n ],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n },\n {inputs, outputs: [-1, -1, -1]});\n};\n\nexport const attention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateAttentionInputs(context.inputs, attributes);\n\n const [q, k, v] = prepare(context, params);\n\n return applyAttention(\n context, q, k, v, context.inputs[4], undefined, undefined, undefined, context.inputs[5], params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface BatchNormAttributes extends AttributeWithCacheKey {\n readonly epsilon: number;\n readonly momentum: number;\n readonly spatial: boolean;\n readonly trainingMode: boolean;\n readonly format: 'NHWC'|'NCHW';\n readonly outputCount: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: BatchNormAttributes): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs');\n }\n\n const checkShapeEqual = (actual: readonly number[], expected: readonly number[], message: string) => {\n const r = expected.length;\n if (r !== actual.length) {\n throw new Error(`${message}: num dimensions != ${r}`);\n }\n expected.forEach((v, i) => {\n if (v !== actual[i]) {\n throw new Error(`${message}: dim[${i}] do not match`);\n }\n });\n };\n\n if (inputs[0].dims.length > 1) {\n const shape = attributes.format === 'NHWC' ?\n (attributes.spatial ? inputs[0].dims.slice(-1) :\n inputs[0].dims.slice(-1).concat(inputs[0].dims.slice(1, inputs[0].dims.length - 1))) :\n inputs[0].dims.slice(1, attributes.spatial ? 2 : undefined);\n checkShapeEqual(inputs[1].dims, shape, 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, shape, 'Invalid input B');\n checkShapeEqual(inputs[3].dims, shape, 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, shape, 'Invalid input var');\n } else {\n checkShapeEqual(inputs[1].dims, [1], 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, [1], 'Invalid input B');\n checkShapeEqual(inputs[3].dims, [1], 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, [1], 'Invalid input var');\n }\n};\n\nconst createBatchNormInferenceProgramInfo =\n (inputs: readonly TensorView[], attributes: BatchNormAttributes): ProgramInfo => {\n const {epsilon, spatial, format} = attributes;\n const yShape = inputs[0].dims;\n const components = spatial ? getMaxComponents(yShape[yShape.length - 1]) : 1;\n const cComponents = format === 'NHWC' && yShape.length > 1 ? components : 1;\n const outputSize = ShapeUtil.size(yShape) / components;\n // Only support uniforms for opset version >= 9 (spatial = true).\n const useShapesUniforms = spatial;\n const shapeOrRank = useShapesUniforms ? yShape.length : yShape;\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims, cComponents);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims, cComponents);\n const inputMean = inputVariable('inputMean', inputs[3].dataType, inputs[3].dims, cComponents);\n const inputVar = inputVariable('inputVar', inputs[4].dataType, inputs[4].dims, cComponents);\n const y = outputVariable('y', inputs[0].dataType, shapeOrRank, components);\n // TODO: support inputs with different data type. Current we need to make sure all inputs have the same data type.\n // Otherwise, the shader compilation will fail.\n const calcCOffset = (): string => {\n let cOffset = '';\n if (spatial) {\n cOffset = `let cOffset = ${\n yShape.length === 1 ? '0u' :\n format === 'NHWC' ? `outputIndices[${yShape.length - 1}] / ${components}` :\n 'outputIndices[1]'};`;\n } else {\n if (format === 'NCHW') {\n cOffset = `\n ${y.indicesSet('outputIndices', '0', '0')}\n let cOffset = ${y.indicesToOffset('outputIndices')};`;\n } else {\n // update C channel.\n cOffset = `var cIndices = ${scale.type.indices}(0);\n cIndices[0] = outputIndices[${yShape.length - 1}];`;\n // update D1 x ... x Dn channels.\n for (let i = 1; i < scale.rank; i++) {\n cOffset += `cIndices[${i}] = outputIndices[${i}];`;\n }\n cOffset += `let cOffset = ${scale.indicesToOffset('cIndices')};`;\n }\n }\n return cOffset;\n };\n const getInferenceModeShaderSource = (helper: ShaderHelper) => `\n const epsilon = ${epsilon};\n ${helper.registerUniform('outputSize', 'u32').declareVariables(x, scale, bias, inputMean, inputVar, y)}\n ${helper.mainStart()}\n ${helper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${y.offsetToIndices(`global_idx * ${components}`)};\n ${calcCOffset()}\n let scale = ${scale.getByOffset('cOffset')};\n let bias = ${bias.getByOffset('cOffset')};\n let inputMean = ${inputMean.getByOffset('cOffset')};\n let inputVar = ${inputVar.getByOffset('cOffset')};\n let x = ${x.getByOffset('global_idx')};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${y.setByOffset('global_idx', 'value')}\n }`;\n return {\n name: 'BatchNormalization',\n shaderCache: {\n hint: `${attributes.epsilon}_${attributes.format}_${spatial}_${components}`,\n inputDependencies: useShapesUniforms ? ['rank', 'type', 'type', 'type', 'type'] : undefined,\n },\n getShaderSource: getInferenceModeShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: useShapesUniforms ?\n [\n {type: DataType.uint32, data: outputSize},\n ...createTensorShapeVariables(yShape),\n ] :\n [\n {type: DataType.uint32, data: outputSize},\n ],\n }),\n };\n };\n\nexport const parseBatchNormAttributes = (attributes: Record): BatchNormAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n\nexport const batchNorm = (context: ComputeContext, attributes: Record): void => {\n const {inputs, outputCount} = context;\n const updatedAttributes = parseBatchNormAttributes({...attributes, outputCount});\n if (env.webgpu.validateInputContent) {\n validateInputs(inputs, updatedAttributes);\n }\n if (attributes.trainingMode) {\n throw new Error('BatchNormalization trainingMode is not supported yet.');\n } else {\n context.compute(createBatchNormInferenceProgramInfo(inputs, updatedAttributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![320, 640, 1280].includes(inputs[0].dims[2])) {\n throw new Error('number of channels should be 320, 640 or 1280');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasAddProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims;\n\n const channels = inputs[0].dims[2];\n // since channel number can be only 320/640/1280, it's always divisable by 4\n const outputSize = ShapeUtil.size(outputShape) / 4;\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, outputShape, 4);\n const bias = inputVariable('bias', dataType, [channels], 4);\n const residual = inputVariable('residual', dataType, outputShape, 4);\n const output = outputVariable('output', dataType, outputShape, 4);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const channels = ${channels}u / 4;\n ${shaderHelper.declareVariables(input, bias, residual, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let value = ${input.getByOffset('global_idx')}\n + ${bias.getByOffset('global_idx % channels')} + ${residual.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', 'value')}\n }`;\n\n return {\n name: 'BiasAdd',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasAdd = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasAddProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {MAX_CLIP, MIN_CLIP, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType} from './common';\n\ntype BuiltinFunctionName = string;\ntype ElementwiseCustomExpression = (expression: string) => string;\ntype ElementwiseFunctionCall = BuiltinFunctionName|ElementwiseCustomExpression;\n\nconst createElementwiseProgramShader =\n (shaderHelper: ShaderHelper, datasize: number, inputDataType: number, outputDataType: number,\n funcCall: ElementwiseFunctionCall, additionalImplementation?: string): string => {\n const vecSize = Math.ceil(datasize / 4);\n\n let expression = '';\n if (typeof funcCall === 'string') {\n expression = `${funcCall}(a)`;\n } else {\n expression = funcCall('a');\n }\n\n const input = inputVariable('inputData', inputDataType, [vecSize], 4);\n const output = outputVariable('outputData', outputDataType, [vecSize], 4);\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n\n let a = ${input.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', expression)}\n }`;\n };\n\nconst createElementwiseProgramInfo =\n (input: TensorView, name: string, funcCall: ElementwiseFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType: number = input.dataType): ProgramInfo => ({\n name,\n shaderCache: {hint: cacheKey, inputDependencies: ['type']},\n getShaderSource: shaderHelper => createElementwiseProgramShader(\n shaderHelper, ShapeUtil.size(input.dims), input.dataType, outputDataType, funcCall, additionalImplementation),\n getRunData: (inputTensors) => ({\n outputs: [{dims: input.dims, dataType: outputDataType}],\n dispatchGroup:\n {x: Math.ceil(ShapeUtil.size(inputTensors[0].dims) / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(input.dims) / 4)},\n ],\n })\n });\n\nexport const abs = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Abs', 'abs'));\n};\n\nexport const acos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acos', 'acos'));\n};\n\nexport const acosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acosh', 'acosh'));\n};\n\nexport const asin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asin', 'asin'));\n};\n\nexport const asinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asinh', 'asinh'));\n};\n\nexport const atan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atan', 'atan'));\n};\nexport const atanh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atanh', 'atanh'));\n};\n\nexport interface CastAttributes extends AttributeWithCacheKey {\n readonly to: number;\n readonly saturate?: boolean;\n}\n\nexport const parseCastAttributes = (attributes: Record): CastAttributes =>\n createAttributeWithCacheKey(attributes as {to: number});\n\n\nexport const cast = (context: ComputeContext, attributes: CastAttributes): void => {\n let func: ElementwiseFunctionCall;\n switch (attributes.to) {\n case DataType.float16:\n func = 'vec4';\n break;\n case DataType.float:\n func = 'vec4';\n break;\n case DataType.uint32:\n func = 'vec4';\n break;\n case DataType.int32:\n func = 'vec4';\n break;\n case DataType.bool:\n func = 'vec4';\n break;\n default:\n throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${attributes.to}`);\n }\n context.compute(\n createElementwiseProgramInfo(context.inputs[0], 'Cast', func, undefined, attributes.cacheKey, attributes.to));\n};\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nconst generateClipAttributesFromInputs = (inputs: readonly TensorView[]): ClipAttributes => {\n const min = (inputs.length >= 2 && inputs[1].data !== 0) ? inputs[1].getFloat32Array()[0] : MIN_CLIP;\n const max = (inputs.length >= 3 && inputs[2].data !== 0) ? inputs[2].getFloat32Array()[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const clip = (context: ComputeContext, clipAttributes: ClipAttributes): void => {\n const attributes = context.inputs.length === 1 ? clipAttributes : generateClipAttributesFromInputs(context.inputs);\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(\n createElementwiseProgramInfo(\n context.inputs[0], 'Clip', a => `clamp(${a}, clip_min_, clip_max_)`, `\n const clip_min_: vec4<${dataType}> = vec4(${dataType}(${attributes.min}));\n const clip_max_: vec4<${dataType}> = vec4(${dataType}(${attributes.max}));\n`,\n attributes.cacheKey),\n {inputs: [0]});\n};\n\nexport const ceil = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Ceil', 'ceil'));\n};\n\nexport const cos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cos', 'cos'));\n};\n\nexport const cosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cosh', 'cosh'));\n};\n\nexport interface AlphaAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const parseAlphaAttributes = (attributes: Record): AlphaAttributes =>\n createAttributeWithCacheKey(attributes as {alpha: number});\n\nexport const elu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Elu', a => `elu_vf32(${a})`, `\n const elu_alpha_ = ${dataType}(${attributes.alpha});\n\n fn elu_f32(a: ${dataType}) -> ${dataType} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${dataType}>) -> vec4<${dataType}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,\n attributes.cacheKey));\n};\n\nexport const erfImpl = (varType = 'f32') => `\nconst r0: ${varType} = 0.3275911;\nconst r1: ${varType} = 0.254829592;\nconst r2: ${varType} = -0.284496736;\nconst r3: ${varType} = 1.421413741;\nconst r4: ${varType} = -1.453152027;\nconst r5: ${varType} = 1.061405429;\n\nfn erf_vf32(v: vec4<${varType}>) -> vec4<${varType}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`;\n\nexport const erf = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Erf', a => `erf_vf32(${a})`, erfImpl(dataType)));\n};\n\nexport const exp = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Exp', 'exp'));\n};\n\nexport const floor = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Floor', 'floor'));\n};\n\nexport const gelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Gelu', a => `0.5 * ${a} * (1.0 + erf_vf32(${a} * 0.7071067811865475))`, erfImpl(dataType)));\n};\n\nexport const leakyRelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'LeakyRelu', a => `select(leaky_relu_alpha_ * ${a}, ${a}, ${a} >= vec4<${dataType}>(0.0))`,\n `const leaky_relu_alpha_ = ${dataType}(${attributes.alpha});`, attributes.cacheKey));\n};\n\nexport const not = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Not', a => `!${a}`));\n};\n\nexport const neg = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Neg', a => `-${a}`));\n};\n\nexport const reciprocal = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Reciprocal', a => `1.0/${a}`));\n};\n\nexport const relu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Relu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > vec4<${dataType}>(0.0))`));\n};\n\nexport const sigmoid = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sigmoid', a => `(1.0 / (1.0 + exp(-${a})))`));\n};\n\nexport interface HardSigmoidAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n readonly beta: number;\n}\n\nexport const parseHardSigmoidAttributes = (attributes: Record): HardSigmoidAttributes =>\n createAttributeWithCacheKey(attributes as {\n alpha: number;\n beta: number;\n });\n\nexport const hardSigmoid = (context: ComputeContext, attributes: HardSigmoidAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'HardSigmoid',\n a => `max(vec4<${dataType}>(0.0), min(vec4<${dataType}>(1.0), ${attributes.alpha} * ${a} + vec4<${dataType}>(${\n attributes.beta})))`,\n undefined, attributes.cacheKey));\n};\n\nexport const sin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sin', 'sin'));\n};\n\nexport const sinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sinh', 'sinh'));\n};\n\nexport const sqrt = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sqrt', 'sqrt'));\n};\n\nexport const tan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tan', 'tan'));\n};\n\nexport const tanhExpression = (a: string) => `sign(${a}) * (1 - exp(-2 * abs(${a}))) / (1 + exp(-2 * abs(${a})))`;\n\nexport const tanh = (context: ComputeContext): void => {\n // TODO: revisit after https://github.com/gpuweb/gpuweb/issues/4458 is resolved\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tanh', tanhExpression));\n};\n\nexport const fastGeluImpl = (varType = 'f32') => `\nconst fast_gelu_a: ${varType} = 0.5;\nconst fast_gelu_b: ${varType} = 0.7978845608028654;\nconst fast_gelu_c: ${varType} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${varType}>) -> vec4<${varType}> {\n return ${tanhExpression('v')};\n}\n`;\n\nexport const fastGeluExpression = (x: string) =>\n `(fast_gelu_a + fast_gelu_a * tanh_v(${x} * (fast_gelu_c * ${x} * ${x} + fast_gelu_b))) * ${x}`;\n\nexport const fastGelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'FastGelu', fastGeluExpression, fastGeluImpl(dataType), undefined,\n context.inputs[0].dataType));\n};\n\nexport const thresholdedRelu = (context: ComputeContext, attributes: AlphaAttributes): number => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'ThresholdedRelu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > thresholded_relu_alpha_)`,\n `const thresholded_relu_alpha_ = vec4<${dataType}>(${attributes.alpha});`, attributes.cacheKey));\n return 0;\n};\n\nexport const log = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Log', 'log'));\n};\n\nexport const quickGeluImpl = (varType: string, alpha: number) => `\nconst alpha = vec4<${varType}>(${alpha});\nconst one = ${varType}(1.0);\nconst zero = ${varType}(0.0);\n\nfn quick_gelu_impl(x: vec4<${varType}>) -> vec4<${varType}> {\n let v = x *alpha;\n var x1 : vec4<${varType}>;\n for (var i = 0; i < 4; i = i + 1) {\n if (v[i] >= zero) {\n x1[i] = one / (one + exp(-v[i]));\n } else {\n x1[i] = one - one / (one + exp(v[i]));\n }\n }\n return x * x1;\n}\n`;\n\nexport const quickGeluExpression = (x: string) => `quick_gelu_impl(${x})`;\n\nexport const quickgelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'QuickGelu', quickGeluExpression, quickGeluImpl(dType, attributes.alpha), attributes.cacheKey,\n context.inputs[0].dataType));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType} from './common';\nimport {erfImpl} from './unary-op';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![2560, 5120, 10240].includes(inputs[0].dims[2])) {\n throw new Error('hidden state should be 2560, 5120 or 10240');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasSplitGeluProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n outputShape[2] = outputShape[2] / 2;\n\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims, 4);\n const bias = inputVariable('bias', inputs[0].dataType, [inputs[0].dims[2]], 4);\n const output = outputVariable('output', inputs[0].dataType, outputShape, 4);\n\n const outputSize = ShapeUtil.size(outputShape) / 4;\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${inputs[0].dims[2] / 4 / 2}u;\n\n ${shaderHelper.declareVariables(input, bias, output)}\n\n ${erfImpl(dataType)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${output.setByOffset('global_idx', 'valueLeft * geluRight')}\n }`;\n\n return {\n name: 'BiasSplitGelu',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasSplitGelu = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasSplitGeluProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype BuiltinFunctionName = string;\ntype BinaryCustomExpression = (expressionA: string, expressionB: string) => string;\ntype BinaryFunctionCall = BuiltinFunctionName|BinaryCustomExpression|{\n scalar: BinaryCustomExpression;\n vector: BinaryCustomExpression;\n};\n\nconst createBinaryOpProgramShader =\n (shaderHelper: ShaderHelper, dimsA: readonly number[], dimsB: readonly number[], dimsOutput: readonly number[],\n vectorize: boolean, doBroadcast: boolean, sharedDimensionDivisibleBy4: boolean, funcCall: BinaryFunctionCall,\n typeA: number, typeB: number, typeOutput: number, additionalImplementation?: string) => {\n let expressionScalar: BinaryCustomExpression;\n let expressionVector: BinaryCustomExpression;\n if (typeof funcCall === 'string') {\n expressionScalar = expressionVector = (a, b) => `${funcCall}((${a}),(${b}))`;\n } else if (typeof funcCall === 'function') {\n expressionScalar = expressionVector = funcCall;\n } else {\n expressionScalar = funcCall.scalar;\n expressionVector = funcCall.vector;\n }\n\n const output = outputVariable('outputData', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('aData', typeA, dimsA.length, 4);\n const b = inputVariable('bData', typeB, dimsB.length, 4);\n\n let assignment: string;\n if (vectorize) {\n if (doBroadcast) {\n const isAOneElement = ShapeUtil.size(dimsA) === 1;\n const isBOneElement = ShapeUtil.size(dimsB) === 1;\n const aLastDimDivisibleBy4 = dimsA.length > 0 && dimsA[dimsA.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = dimsB.length > 0 && dimsB[dimsB.length - 1] % 4 === 0;\n if (isAOneElement || isBOneElement) {\n assignment = output.setByOffset(\n 'global_idx',\n expressionVector(\n isAOneElement ? `${a.type.value}(${a.getByOffset('0')}.x)` : a.getByOffset('global_idx'),\n isBOneElement ? `${b.type.value}(${b.getByOffset('0')}.x)` : b.getByOffset('global_idx')));\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx * 4u')};\n let offsetA = ${a.broadcastedIndicesToOffset('outputIndices', output)};\n let offsetB = ${b.broadcastedIndicesToOffset('outputIndices', output)};\n ${\n output.setByOffset(\n 'global_idx',\n expressionVector(\n sharedDimensionDivisibleBy4 || aLastDimDivisibleBy4 ?\n a.getByOffset('offsetA / 4u') :\n `${a.type.value}(${a.getByOffset('offsetA / 4u')}[offsetA % 4u])`,\n sharedDimensionDivisibleBy4 || bLastDimDivisibleBy4 ?\n b.getByOffset('offsetB / 4u') :\n `${b.type.value}(${b.getByOffset('offsetB / 4u')}[offsetB % 4u])`))}\n `;\n }\n } else {\n assignment = output.setByOffset(\n 'global_idx', expressionVector(a.getByOffset('global_idx'), b.getByOffset('global_idx')));\n }\n } else {\n if (!doBroadcast) {\n throw new Error('no necessary to use scalar implementation for element-wise binary op implementation.');\n }\n\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `aData[indexA${x}][componentA${x}]`;\n const expressionB = `bData[indexB${x}][componentB${x}]`;\n return `\n let outputIndices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offsetA${x} = ${a.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let offsetB${x} = ${b.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let indexA${x} = offsetA${x} / 4u;\n let indexB${x} = offsetB${x} / 4u;\n let componentA${x} = offsetA${x} % 4u;\n let componentB${x} = offsetB${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expressionScalar(expressionA, expressionB)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('outputData[global_idx]', 0)}\n ${singleAssignment('outputData[global_idx]', 1)}\n ${singleAssignment('outputData[global_idx]', 2)}\n ${singleAssignment('outputData[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(a, b, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createBinaryOpProgramInfo =\n (name: string, cacheKey: string, a: TensorView, b: TensorView, funcCall: BinaryFunctionCall,\n additionalImplementation?: string, outputDataType: number = a.dataType): ProgramInfo => {\n const isBroadcast = !ShapeUtil.areEqual(a.dims, b.dims);\n let outputShape = a.dims;\n let outputSize = ShapeUtil.size(a.dims);\n\n let vectorize = false;\n let sharedDimensionDivisibleBy4 = false;\n\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n const cacheKeyAux = [isBroadcast];\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(a.dims, b.dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n const isAOneElement = ShapeUtil.size(a.dims) === 1;\n const isBOneElement = ShapeUtil.size(b.dims) === 1;\n const aLastDimDivisibleBy4 = a.dims.length > 0 && a.dims[a.dims.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = b.dims.length > 0 && b.dims[b.dims.length - 1] % 4 === 0;\n cacheKeyAux.push(isAOneElement);\n cacheKeyAux.push(isBOneElement);\n cacheKeyAux.push(aLastDimDivisibleBy4);\n cacheKeyAux.push(bLastDimDivisibleBy4);\n // check whether vectorize can be enabled\n let sharedDimension = 1;\n for (let i = 1; i < outputShape.length; i++) {\n const dimA = a.dims[a.dims.length - i] ?? 1;\n const dimB = b.dims[b.dims.length - i] ?? 1;\n if (dimA === dimB) {\n sharedDimension *= dimA;\n } else {\n break;\n }\n }\n if (sharedDimension % 4 === 0) {\n sharedDimensionDivisibleBy4 = true;\n vectorize = true;\n } else if (isAOneElement || isBOneElement || aLastDimDivisibleBy4 || bLastDimDivisibleBy4) {\n vectorize = true;\n }\n } else {\n // element-wise\n vectorize = true;\n }\n cacheKeyAux.push(vectorize);\n\n return {\n name,\n shaderCache: {\n hint: cacheKey + cacheKeyAux.map((x) => x.toString()).join('_'),\n inputDependencies: ['rank', 'rank'],\n },\n getShaderSource: (shaderHelper) => createBinaryOpProgramShader(\n shaderHelper, a.dims, b.dims, outputShape, vectorize, isBroadcast, sharedDimensionDivisibleBy4, funcCall,\n a.dataType, b.dataType, outputDataType, additionalImplementation),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* component size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(outputShape) / 4)},\n ...createTensorShapeVariables(a.dims, b.dims, outputShape)\n ],\n }),\n };\n };\n\nconst runBinaryOp =\n (context: ComputeContext, name: string, funcCall: BinaryFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType?: number): void => {\n context.compute(createBinaryOpProgramInfo(\n name, cacheKey ?? '', context.inputs[0], context.inputs[1], funcCall, additionalImplementation,\n outputDataType));\n };\n\nexport const add = (context: ComputeContext): void => {\n runBinaryOp(context, 'Add', (a, b) => `${a}+${b}`);\n};\n\nexport const div = (context: ComputeContext): void => {\n runBinaryOp(context, 'Div', (a, b) => `${a}/${b}`);\n};\n\nexport const equal = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Equal', ({scalar: (a, b) => `u32(${a}==${b})`, vector: (a, b) => `vec4(${a}==${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const mul = (context: ComputeContext): void => {\n runBinaryOp(context, 'Mul', (a, b) => `${a}*${b}`);\n};\n\nexport const pow = (context: ComputeContext): void => {\n const type = inputVariable('input', context.inputs[0].dataType, context.inputs[0].dims).type.value;\n const roundStr = type === 'i32' ? 'round' : '';\n runBinaryOp(\n context, 'Pow', ({scalar: (a, b) => `pow_custom(${a},${b})`, vector: (a, b) => `pow_vector_custom(${a},${b})`}),\n `\n fn pow_custom(a : ${type}, b : ${type}) -> ${type} {\n if (b == ${type}(0.0)) {\n return ${type}(1.0);\n } else if (a < ${type}(0.0) && f32(b) != floor(f32(b))) {\n return ${type}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${type}(1.0), round(f32(abs(b) % ${type}(2.0))) != 1.0) * ${type}(${\n roundStr}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${type}>, b : vec4<${type}>) -> vec4<${type}> {\n // TODO: implement vectorized pow\n return vec4<${type}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `);\n};\n\nexport const sub = (context: ComputeContext): void => {\n runBinaryOp(context, 'Sub', (a, b) => `${a}-${b}`);\n};\n\nexport const greater = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Greater', ({scalar: (a, b) => `u32(${a}>${b})`, vector: (a, b) => `vec4(${a}>${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const less = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Less', ({scalar: (a, b) => `u32(${a}<${b})`, vector: (a, b) => `vec4(${a}<${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const greaterOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'GreaterOrEqual', ({scalar: (a, b) => `u32(${a}>=${b})`, vector: (a, b) => `vec4(${a}>=${b})`}),\n undefined, undefined, DataType.bool);\n};\n\nexport const lessOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'LessOrEqual', ({scalar: (a, b) => `u32(${a}<=${b})`, vector: (a, b) => `vec4(${a}<=${b})`}),\n undefined, undefined, DataType.bool);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], axis: number): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n const referenceIndex = 0;\n const referenceInput = inputs[referenceIndex];\n const inputType = referenceInput.dataType;\n const inputRank = referenceInput.dims.length;\n inputs.forEach((input, i) => {\n if (i === referenceIndex) {\n return;\n }\n // make sure types of all inputs match\n if (input.dataType !== inputType) {\n throw new Error('input tensors should be one type');\n }\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputRank) {\n throw new Error('input tensors should have the same shape');\n }\n input.dims.forEach((dim, i) => {\n if (i !== axis && dim !== referenceInput.dims[i]) {\n throw new Error('non concat dimensions must match');\n }\n });\n });\n};\n\nconst calculateInputIndexImpl = (numberOfTensors: number, sizeInConcatAxisStr: string): string => `\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n for (var i: u32 = 0u; i < ${numberOfTensors}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n }`;\n\nconst assignOutputData = (inputs: readonly IndicesHelper[], output: IndicesHelper) => {\n const numberOfTensors = inputs.length;\n\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = output.setByOffset('global_idx', inputs[i].getByIndices('indices'));\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (inputIndex == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (inputIndex == ${i}) { ${returnSnippet} }`);\n }\n }\n return codeLines.join('\\n');\n};\n\nconst createConcatProgramInfo =\n (inputs: readonly TensorView[], adjustedAxis: number, outputShape: number[], dataType: DataType): ProgramInfo => {\n const outputSize = ShapeUtil.size(outputShape);\n\n const sizeInConcatAxis = new Array(inputs.length);\n const inputVars = new Array(inputs.length);\n\n let previousSum = 0;\n const inputDependencies: ProgramInputTensorInfoDependency[] = [];\n const inputRanks = [];\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: outputSize}];\n for (let i = 0; i < inputs.length; ++i) {\n previousSum += inputs[i].dims[adjustedAxis];\n sizeInConcatAxis[i] = previousSum;\n inputRanks.push(inputs[i].dims.length);\n inputVars[i] = inputVariable(`input${i}`, dataType, inputRanks[i]);\n inputDependencies.push('rank');\n programUniforms.push({type: DataType.uint32, data: sizeInConcatAxis[i]});\n }\n for (let i = 0; i < inputs.length; ++i) {\n programUniforms.push(...createTensorShapeVariables(inputs[i].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const output = outputVariable('output', dataType, outputShape.length);\n const indicesAxis = output.indicesGet('indices', adjustedAxis);\n const sizeInConcatAxisStr =\n Array.from(Array(sizeInConcatAxis.length).keys()).map(i => `uniforms.sizeInConcatAxis${i}`).join(',');\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${(() => {\n shaderHelper.registerUniform('outputSize', 'u32');\n for (let i = 0; i < inputs.length; i++) {\n shaderHelper.registerUniform(`sizeInConcatAxis${i}`, 'u32');\n }\n return shaderHelper.declareVariables(...inputVars, output);\n })()}\n\n ${calculateInputIndexImpl(sizeInConcatAxis.length, sizeInConcatAxisStr)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n var indices = ${output.offsetToIndices('global_idx')};\n\n let inputIndex = calculateInputIndex(${indicesAxis});\n if (inputIndex != 0u) {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n ${indicesAxis} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${assignOutputData(inputVars, output)}\n }`;\n\n return {\n name: 'Concat',\n shaderCache: {hint: `${adjustedAxis}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const concat = (context: ComputeContext, attributes: ConcatAttributes): void => {\n const inputs = context.inputs;\n const inputShape = inputs[0].dims;\n const adjustedAxis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n validateInputs(inputs, adjustedAxis);\n const outputShape = inputShape.slice();\n outputShape[adjustedAxis] =\n inputs.reduce((sum, input) => sum + (input.dims.length > adjustedAxis ? input.dims[adjustedAxis] : 0), 0);\n // 0 length tensors are valid for concat, remove them\n const nonEmptyInputs = inputs.filter(input => ShapeUtil.size(input.dims) > 0);\n context.compute(\n createConcatProgramInfo(nonEmptyInputs, adjustedAxis, outputShape, inputs[0].dataType), {inputs: nonEmptyInputs});\n};\n\nexport const parseConcatAttributes = (attributes: Record): ConcatAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {MAX_CLIP, MIN_CLIP} from '../../util';\nimport {ProgramUniform} from '../types';\n\nimport {UniformsArrayType} from './common';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly alpha?: number;\n readonly beta?: number;\n}\n\nexport const getActivationSnippet =\n (attributes: InternalActivationAttributes, valueType: string, baseType = 'f32'): string => {\n switch (attributes.activation) {\n case 'Relu':\n return `value = max(value, ${valueType}(0.0));`;\n case 'Sigmoid':\n return `value = (${valueType}(1.0) / (${valueType}(1.0) + exp(-value)));`;\n case 'Clip':\n return `value = clamp(value, ${valueType}(${baseType}(uniforms.clip_min)), ${valueType}(${\n baseType}(uniforms.clip_max)));`;\n case 'HardSigmoid':\n return `value = max(${valueType}(0.0), min(${valueType}(1.0), ${baseType}(uniforms.alpha) * value + ${\n baseType}(uniforms.beta)));`;\n case 'LeakyRelu':\n return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;\n case '':\n return '';\n // TODO: adding other activations that can be fused.\n default:\n throw new Error(`Unsupported activation ${attributes.activation}`);\n }\n };\n\nexport const appendActivationUniformsData =\n (attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {\n if (attributes.activation === 'Clip') {\n programUniform.push(\n {type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});\n } else if (attributes.activation === 'HardSigmoid') {\n programUniform.push(\n {type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});\n } else if (attributes.activation === 'LeakyRelu') {\n programUniform.push({type: DataType.float, data: attributes.alpha!});\n }\n };\n\nexport const appendActivationUniforms = (attributes: InternalActivationAttributes, uniforms: UniformsArrayType) => {\n if (attributes.activation === 'Clip') {\n uniforms.push({name: 'clip_max', type: 'f32'}, {name: 'clip_min', type: 'f32'});\n } else if (attributes.activation === 'HardSigmoid') {\n uniforms.push({name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'});\n } else if (attributes.activation === 'LeakyRelu') {\n uniforms.push({name: 'alpha', type: 'f32'});\n }\n};\n\nexport const parseInternalActivationAttributes =\n (attributes: Record|undefined): InternalActivationAttributes => {\n const activation = attributes?.activation as string || '';\n if (activation === 'HardSigmoid') {\n const [alpha, beta] = attributes?.activation_params as [number, number] || [0.2, 0.5];\n return {activation, alpha, beta};\n } else if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes?.activation_params as [number, number] || [MIN_CLIP, MAX_CLIP];\n return {activation, clipMax, clipMin};\n } else if (activation === 'LeakyRelu') {\n const [alpha] = attributes?.activation_params as [number] || [0.01];\n return {activation, alpha};\n }\n return {activation};\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/activation_util.ts\n//\n// modified to fit the needs of the project\n\nexport const typeSnippet = (component: number, dataType: string) => {\n switch (component) {\n case 1:\n return dataType;\n case 2:\n return `vec2<${dataType}>`;\n case 3:\n return `vec3<${dataType}>`;\n case 4:\n return `vec4<${dataType}>`;\n default:\n throw new Error(`${component}-component is not supported.`);\n }\n};\n\nexport const biasSnippet = (hasBias: boolean): string => `\n ${hasBias ? 'value = value + getBiasByOutputCoords(coords);' : ''}\n `;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-core/src/ops/conv_util.ts\n//\n// modified to fit the needs of the project\n\nexport const utilFunctions = (strideStr: string) => (`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${strideStr}.x), i32(${strideStr}.y), i32(${strideStr}.z), 1));\n}\n`);\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/matmul_packed_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getBroadcastDims, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from '../fuse-utils';\n\nimport {typeSnippet} from './activation_util';\n\nconst writeDataToSubAVec4Snippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst calculateResultSnippet = (transposeA: boolean, innerElementSize: number) => {\n if (transposeA) {\n return `\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${innerElementSize === 3 ? '' : 'let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];'}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached3[i] + acc[i];'}\n }`;\n } else {\n return `\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached.w + acc[i];'}\n }`;\n }\n};\n\nexport const makeMatMulPackedVec4Source =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32): string => {\n const tileAOuter = workgroupSize[1] * workPerThread[1];\n const tileBOuter = workgroupSize[0] * workPerThread[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n const innerElementSize = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n\n if (!(((transposeA && innerElementSize === 4 && workPerThread[1] === 4) ||\n (!transposeA && (innerElementSize === 3 || innerElementSize === 4))) &&\n tileAWidth % workgroupSize[0] === 0 && tileInner % workgroupSize[1] === 0 && workPerThread[0] === 4)) {\n throw new Error(`If transposeA ${transposeA} is true, innerElementSize ${\n innerElementSize} and workPerThread[1] ${workPerThread[1]} must be 4.\n Otherwise, innerElementSize ${innerElementSize} must be 3 or 4.\n tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${workgroupSize[0]}. tileInner ${\n tileInner} must be divisible by workgroupSize[1] ${workgroupSize[1]}. colPerThread ${\n workPerThread[0]} must be 4.`);\n }\n return `\nvar mm_Asub: array, ${tileAWidth / innerElementSize}>, ${tileAHight}>;\nvar mm_Bsub: array, ${tileBOuter / workPerThread[0]}>, ${tileInner}>;\n\nconst rowPerThread = ${workPerThread[1]};\nconst colPerThread = ${workPerThread[0]};\nconst innerElementSize = ${innerElementSize};\nconst tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\n let num_tiles = ${splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${rowPerThreadB};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${writeDataToSubAVec4Snippet(transposeA, batchDims)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${\n batchDims ? ', batchIndices' : ''});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${innerElementSize === 3 ? '' : 'let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];'}\n\n ${calculateResultSnippet(transposeA, innerElementSize)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`;\n };\n\nconst writeDataToSubASnippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst readDataFromSubASnippet = (transposeA: boolean) =>\n transposeA ? 'let ACached = mm_Asub[k][tileRow + innerRow];' : 'let ACached = mm_Asub[tileRow + innerRow][k];';\n\n// sequentialAccessByThreads means sequential data in memory is accessed by\n// threads, instead of a single thread (default behavior).\nexport const makeMatMulPackedSource =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32,\n sequentialAccessByThreads = false): string => {\n const tileAOuter = workPerThread[1] * workgroupSize[1];\n const tileBOuter = workPerThread[0] * workgroupSize[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n\n if (!(tileAHight % workgroupSize[1] === 0 && tileAWidth % workgroupSize[0] === 0 &&\n tileInner % workgroupSize[1] === 0)) {\n throw new Error(`tileAHight ${tileAHight} must be divisible by workgroupSize[1]${\n workgroupSize[1]}, tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${\n workgroupSize[0]}, tileInner ${tileInner} must be divisible by workgroupSize[1]${workgroupSize[1]}`);\n }\n const rowPerThreadA = tileAHight / workgroupSize[1];\n const colPerThreadA = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n const matmulSnippet = sequentialAccessByThreads ?\n `\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n let globalColStart = i32(workgroupId.x) * ${tileBOuter};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${tileAHight}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileAWidth}; inputCol = inputCol + ${workgroupSize[0]}) {\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${tileInner}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileBOuter}; inputCol = inputCol + ${workgroupSize[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${workgroupSize[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${\n transposeA ? `mm_Asub[k][localRow + innerRow * ${workgroupSize[1]}];` :\n `mm_Asub[localRow + innerRow * ${workgroupSize[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${workgroupSize[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${workgroupSize[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n ` :\n `\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\nlet tileRowA = i32(localId.y) * ${rowPerThreadA};\nlet tileColA = i32(localId.x) * ${colPerThreadA};\nlet tileRowB = i32(localId.y) * ${rowPerThreadB};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadA}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${colPerThreadA}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${readDataFromSubASnippet(transposeA)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;\n\n return `\n var mm_Asub : array, ${tileAHight}>;\n var mm_Bsub : array, ${tileInner}>;\n const rowPerThread = ${workPerThread[1]};\n const colPerThread = ${workPerThread[0]};\n const tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let num_tiles = ${\n splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc : array, rowPerThread>;\n ${matmulSnippet}\n }\n`;\n };\n\nconst matMulReadWriteFnSource =\n (component: number, hasBias: boolean, applyActivation: string, variables: IndicesHelper[],\n batchShapes: Array, isChannelsLast = false): string => {\n const [batchAShape, batchBShape, batchShape] = batchShapes;\n const [batchVariable, aVariable, bVariable, outputVariable] = variables;\n const broadCastADims = getBroadcastDims(batchAShape, batchShape);\n const broadCastBDims = getBroadcastDims(batchBShape, batchShape);\n const dataType = tensorTypeToWsglStorageType(variables[0].type.tensor);\n const getAIndices = () => {\n const aRank = aVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var aIndices: ${aVariable.type.indices};`;\n for (let i = aRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\naIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastADims.forEach(i => {\n resStr += `\\naIndices[${i}] = 0;`;\n });\n resStr += `\\naIndices[${aRank - 2}] = u32(row);\n aIndices[${aRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const getBIndices = () => {\n const bRank = bVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var bIndices: ${bVariable.type.indices};`;\n for (let i = bRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\nbIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastBDims.forEach(i => {\n resStr += `\\nbIndices[${i}] = 0;`;\n });\n resStr += `\\nbIndices[${bRank - 2}] = u32(row);\n bIndices[${bRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const source = `\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${getAIndices()}\n value = ${aVariable.getByIndices('aIndices')};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${getBIndices()}\n value = ${bVariable.getByIndices('bIndices')};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${typeSnippet(component, dataType)}) {\n let col = colIn * ${component};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${\n hasBias ?\n `value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :\n '' }\n ${applyActivation}\n ${outputVariable.setByIndices('vec3(coords)', 'value')}\n }\n }\n `;\n return source;\n };\n\nexport const createMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const dimAOuter = aShape[aShape.length - 2];\n const dimInner = aShape[aShape.length - 1];\n const dimBOuter = bShape[bShape.length - 1];\n const isVec4 = dimInner % 4 === 0 && dimBOuter % 4 === 0;\n\n // TODO: fine tune size\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const workgroupSize: [number, number, number] = [8, 8, 1];\n const dispatch = [\n Math.ceil(dimBOuter / workgroupSize[0] / elementsPerThread[0]),\n Math.ceil(dimAOuter / workgroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workgroupSize[2] / elementsPerThread[2])\n ];\n\n const components = isVec4 ? 4 : 1;\n const aShapeTemp = [...outerDimsA, dimAOuter, dimInner / components];\n const aRank = aShapeTemp.length;\n const bShapeTemp = [...outerDimsB, dimInner, dimBOuter / components];\n const bRank = bShapeTemp.length;\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShapeTemp, bShapeTemp));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n const hasBias = inputs.length > 2;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchRank = outerDims.length;\n const batchDims = internalVariable('batchDims', inputs[0].dataType, batchRank, 1);\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const A = inputVariable('a', inputs[0].dataType, aRank, components);\n const B = inputVariable('b', inputs[1].dataType, bRank, components);\n const output = outputVariable('result', inputs[0].dataType, outputShapeTemp.length, components);\n const inputVariables = [A, B];\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n }\n const uniforms: UniformsArrayType =\n [{name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'}];\n appendActivationUniforms(activationAttributes, uniforms);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const declareFunctions = matMulReadWriteFnSource(\n components, hasBias, applyActivation, [batchDims, A, B, output], [outerDimsA, outerDimsB, outerDims],\n isChannelsLast);\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${declareFunctions}\n ${\n isVec4 ? makeMatMulPackedVec4Source(elementsPerThread, workgroupSize, dataType, batchDims) :\n makeMatMulPackedSource(elementsPerThread, workgroupSize, dataType, batchDims)}\n `;\n };\n return {\n name: 'MatMul',\n shaderCache: {\n hint: `${elementsPerThread};${activationAttributes.activation};${isVec4};${isChannelsLast}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv2d_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet, typeSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dCommonSnippet =\n (isChannelsLast: boolean, fitAOuter: boolean, fitBOuter: boolean, fitInner: boolean, addBias = false,\n attributes: ConvAttributes, innerElementSizeX = 4, innerElementSizeW = 4, innerElementSize = 4,\n dataType = 'f32'): string => {\n const getXSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'resData = x[xIndex];';\n case 3:\n return `resData = vec3<${dataType}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;\n case 4:\n return 'resData = x[xIndex / 4];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[row * i32(uniforms.w_shape[3]) + colIn];';\n case 4:\n return 'return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, xRow, xCol, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, xRow, xCol);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n const readXSnippet = `\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${col} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${col} % inChannels;\n var resData = ${typeSnippet(innerElementSizeX, dataType)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${xHeight} && xCol >= 0 && xCol < ${xWidth}) {\n ${coordASnippet}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${getXSnippet(innerElementSizeX)}\n }\n return resData;`;\n\n const sampleX = isChannelsLast ? (fitAOuter && fitInner ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`) :\n (fitInner && fitBOuter ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`);\n\n const sampleW = `${getWSnippet(innerElementSizeW)}`;\n\n const resType = typeSnippet(innerElementSize, dataType);\n const aType =\n isChannelsLast ? typeSnippet(innerElementSizeX, dataType) : typeSnippet(innerElementSizeW, dataType);\n const bType =\n isChannelsLast ? typeSnippet(innerElementSizeW, dataType) : typeSnippet(innerElementSizeX, dataType);\n const applyActivation = getActivationSnippet(attributes, resType, dataType);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${aType} {\n ${isChannelsLast ? sampleX : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${bType} {\n ${isChannelsLast ? sampleW : sampleX}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${resType}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[], dimAOuter: number,\n dimBOuter: number, dimInner: number, hasBias: boolean, sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 || inChannels % 3 === 0) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv2d_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const tileAOuter = workGroupSize[1] * elementsPerThread[1];\n const tileBOuter = workGroupSize[0] * elementsPerThread[0];\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const fitAOuter = dimAOuter % tileAOuter === 0;\n const fitBOuter = dimBOuter % tileBOuter === 0;\n const fitInner = dimInner % tileInner === 0;\n const elementsSize = isVec4 ? [innerElementSize, 4, 4] : [1, 1, 1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.int32, data: attributes.strides}, {type: DataType.int32, data: attributes.dilations}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'pad', type: 'i32', length: 2}, {name: 'stride', type: 'i32', length: 2},\n {name: 'dilation', type: 'i32', length: 2}\n ];\n appendActivationUniforms(attributes, uniforms);\n\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n result[flatIndex] = ${isVec4 ? `vec4<${t}>` : t}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${isVec4 ? '/ 4' : ''}, value);\n }`;\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${utilFunctions('uniforms.result_strides')}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n ${\n conv2dCommonSnippet(\n isChannelsLast, fitAOuter, fitBOuter, fitInner, hasBias, attributes, elementsSize[0], elementsSize[1],\n elementsSize[2], t)}\n ${\n isVec4 ?\n makeMatMulPackedVec4Source(elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner, false, undefined,\n sequentialAccessByThreads)}`;\n };\n return {\n name: 'Conv2DMatMul',\n shaderCache: {\n hint: `${attributes.cacheKey};${innerElementSize};${isVec4};${fitAOuter};${fitBOuter};${fitInner};${\n tileAOuter};${tileBOuter};${tileInner}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv3d_naive_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\n\nconst arrayProduct = (arr: number[]) => {\n let product = 1;\n for (let i = 0; i < arr.length; i++) {\n product *= arr[i];\n }\n return product;\n};\n\nconst parse3TupleParam = (param: number|[number, number, number]): [number, number, number] =>\n typeof param === 'number' ? [param, param, param] : param;\n\nconst getEffectiveFilterSize = (filterSize: number, dilation: number): number => {\n if (dilation <= 1) {\n return filterSize;\n }\n\n return filterSize + (filterSize - 1) * (dilation - 1);\n};\n\nconst computeDefaultPad =\n (inputShape: [number, number]|[number, number, number, number], fieldSize: number, stride: number, dilation = 1):\n number => {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n };\n\nconst computeOutputShape4D =\n (inShape: [number, number, number, number], filterShape: [number, number, number], outChannels: number,\n strides: [number, number, number], zeroPad?: number): [number, number, number, number] => {\n if (zeroPad == null) {\n // eslint-disable-next-line no-param-reassign\n zeroPad = computeDefaultPad(inShape, filterShape[0], strides[0]);\n }\n const outShape: [number, number, number, number] = [0, 0, 0, outChannels];\n for (let index = 0; index < 3; index++) {\n if (inShape[index] + 2 * zeroPad >= filterShape[index]) {\n outShape[index] = Math.trunc((inShape[index] - filterShape[index] + 2 * zeroPad) / strides[index] + 1);\n }\n }\n return outShape;\n };\n\nconst get3DPadAndOutInfo =\n (pad: number|string|number[], inDepth: number, inHeight: number, inWidth: number, strideDepth: number,\n strideHeight: number, strideWidth: number, filterDepth: number, filterHeight: number,\n filterWidth: number): {padInfo: PadInfo3D; outDepth: number; outHeight: number; outWidth: number} => {\n let padInfo: PadInfo3D;\n let outDepth: number;\n let outHeight: number;\n let outWidth: number;\n\n if (pad === 'VALID') {\n // eslint-disable-next-line no-param-reassign\n pad = 0;\n }\n\n if (typeof pad === 'number') {\n padInfo = {top: pad, bottom: pad, left: pad, right: pad, front: pad, back: pad};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (Array.isArray(pad)) {\n if (!pad.every((val, _, arr) => val === arr[0])) {\n throw Error(`Unsupported padding parameter: ${pad}`);\n }\n padInfo = {top: pad[0], bottom: pad[1], left: pad[2], right: pad[3], front: pad[4], back: pad[5]};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad[0]);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (pad === 'SAME_UPPER') {\n // TODO: support 'SAME_LOWER'.\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n\n padInfo = {top, bottom, left, right, front, back};\n } else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return {padInfo, outDepth, outHeight, outWidth};\n };\n\ntype PadInfo3D = {\n top: number; left: number; right: number; bottom: number; front: number; back: number;\n};\n\nexport type Conv3DInfo = {\n batchSize: number; inDepth: number; inHeight: number; inWidth: number; inChannels: number; outDepth: number;\n outHeight: number;\n outWidth: number;\n outChannels: number;\n dataFormat: 'channelsFirst' | 'channelsLast';\n strideDepth: number;\n strideHeight: number;\n strideWidth: number;\n dilationDepth: number;\n dilationHeight: number;\n dilationWidth: number;\n filterDepth: number;\n filterHeight: number;\n filterWidth: number;\n effectiveFilterDepth: number;\n effectiveFilterHeight: number;\n effectiveFilterWidth: number;\n padInfo: PadInfo3D;\n inShape: [number, number, number, number, number];\n outShape: [number, number, number, number, number];\n filterShape: [number, number, number, number, number];\n};\n\nexport const computeConv3DInfo =\n (inShape: [number, number, number, number, number], filterShape: [number, number, number, number, number],\n strides: number|[number, number, number], dilations: number|[number, number, number], pad: number|string|number[],\n depthwise = false, dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv3DInfo => {\n let batchSize, inDepth, inHeight, inWidth, inChannels;\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n } else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterChannels, , filterDepth, filterHeight, filterWidth] = filterShape;\n\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const {padInfo, outDepth, outHeight, outWidth} = get3DPadAndOutInfo(\n pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth,\n effectiveFilterHeight, effectiveFilterWidth);\n\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n\n let outShape: [number, number, number, number, number] = [0, 0, 0, 0, 0];\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n } else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n };\n\nexport const createConv3DNaiveProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[],\n filterDims: readonly number[], pads: readonly number[], dataFormat: string): ProgramInfo => {\n const isChannelsLast = dataFormat === 'channelsLast';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n // TODO: enable vec4.\n const isVec4 = false;\n const workGroupSize: [number, number, number] = [64, 1, 1];\n const dispatchLayout = {x: outputShape.map((_, i) => i)};\n const dispatch = [Math.ceil(arrayProduct(dispatchLayout.x.map(d => outputShape[d])) / (workGroupSize[0])), 1, 1];\n\n LOG_DEBUG('verbose', () => `[conv3d_naive_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: filterDims},\n {type: DataType.uint32, data: pads}, {type: DataType.uint32, data: attributes.strides},\n {type: DataType.uint32, data: attributes.dilations}\n ];\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'pads', type: 'u32', length: pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length},\n {name: 'dilations', type: 'u32', length: attributes.dilations.length}\n ];\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : array) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[${isChannelsLast ? getElementAt('coords', 4, 5) : getElementAt('coords', 1, 5)}${\n isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${declareFunctions}\n fn getX(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${x.getByIndices('aIndices')};\n }\n fn getW(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${w.getByIndices('aIndices')};\n }\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let coords = ${output.offsetToIndices('global_idx')};\n let batch = ${getElementAt('coords', 0, x.rank)};\n let d2 = ${\n isChannelsLast ? getElementAt('coords', x.rank - 1, x.rank) : getElementAt('coords', 1, x.rank)};\n let xFRCCorner = vec3(${\n isChannelsLast ? getElementAt('coords', 1, x.rank) : getElementAt('coords', 2, x.rank)},\n ${isChannelsLast ? getElementAt('coords', 2, x.rank) : getElementAt('coords', 3, x.rank)},\n ${\n isChannelsLast ? getElementAt('coords', 3, x.rank) :\n getElementAt('coords', 4, x.rank)}) * uniforms.strides - uniforms.pads;\n let xFCorner = xFRCCorner.x;\n let xRCorner = xFRCCorner.y;\n let xCCorner = xFRCCorner.z;\n let xShapeY = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 1, x.rank) : getElementAt('uniforms.x_shape', 2, x.rank)};\n let xShapeZ = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 2, x.rank) : getElementAt('uniforms.x_shape', 3, x.rank)};\n let xShapeW = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 3, x.rank) : getElementAt('uniforms.x_shape', 4, x.rank)};\n let xShapeU = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 4, x.rank) : getElementAt('uniforms.x_shape', 1, x.rank)};\n let inputDepthNearestVec4 = (xShapeU / 4) * 4;\n let inputDepthVec4Remainder = xShapeU % 4;\n\n var dotProd = 0.0;\n for (var wF = 0u; wF < uniforms.filter_dims[0]; wF++) {\n let xF = xFCorner + wF * uniforms.dilations[0];\n if (xF < 0 || xF >= xShapeY) {\n continue;\n }\n\n for (var wR = 0u; wR < uniforms.filter_dims[1]; wR++) {\n let xR = xRCorner + wR * uniforms.dilations[1];\n if (xR < 0 || xR >= xShapeZ) {\n continue;\n }\n\n for (var wC = 0u; wC < uniforms.filter_dims[2]; wC++) {\n let xC = xCCorner + wC * uniforms.dilations[2];\n if (xC < 0 || xC >= xShapeW) {\n continue;\n }\n\n for (var d1 = 0u; d1 < inputDepthNearestVec4; d1 += 4) {\n ${\n isChannelsLast ? `let xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3));\n ` :\n `let xValues = vec4(\n getX(batch, d1, xF, xR, xC),\n getX(batch, d1 + 1, xF, xR, xC),\n getX(batch, d1 + 2, xF, xR, xC),\n getX(batch, d1 + 3, xF, xR, xC));\n `}\n let wValues = vec4(\n getW(d2, d1, wF, wR, wC),\n getW(d2, d1 + 1, wF, wR, wC),\n getW(d2, d1 + 2, wF, wR, wC),\n getW(d2, d1 + 3, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n if (inputDepthVec4Remainder == 1) {\n ${\n isChannelsLast ? `dotProd += getX(batch, xF, xR, xC, inputDepthNearestVec4)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);` :\n `dotProd += getX(batch, inputDepthNearestVec4, xF, xR, xC)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);`}\n } else if (inputDepthVec4Remainder == 2) {\n ${\n isChannelsLast ? `let xValues = vec2(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1));\n ` :\n `let xValues = vec2(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC));\n `}\n let wValues = vec2(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n } else if (inputDepthVec4Remainder == 3) {\n ${\n isChannelsLast ? `let xValues = vec3(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 2));\n ` :\n `let xValues = vec3(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 2, xF, xR, xC));\n `}\n let wValues = vec3(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 2, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n ${hasBias ? 'dotProd = dotProd + getBiasByOutputCoords(coords)' : ''};\n result[global_idx] = f32(dotProd);\n }`;\n };\n return {\n name: 'Conv3DNaive',\n shaderCache:\n {hint: `${attributes.cacheKey};${isChannelsLast};${innerElementSize};${hasBias}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from './fuse-utils';\n\n/**\n * naive grouped conv implementation, supports 1d/2d conv\n * @param squeezeOutputShapeFunction - an optional function to squeeze the output shape, only used in conv1d\n */\nexport const createGroupedConvProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n\n const isChannelLast = attributes.format === 'NHWC';\n const outputShape = calculateOutputShape(\n xShape, wShape, attributes.dilations, attributes.pads, attributes.strides, isChannelLast);\n const outputSize = ShapeUtil.size(outputShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.dilations},\n {type: DataType.uint32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.uint32, data: outputChannelsPerGroup}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length);\n const w = inputVariable('w', inputs[1].dataType, wShape.length);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims.length));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'dilations', type: 'u32', length: attributes.dilations.length},\n {name: 'strides', type: 'u32', length: 2}, {name: 'pads', type: 'u32', length: 2},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${isChannelLast ? 3 : 1}];\n let xRCCorner: vec2 = vec2(outputIndices[${isChannelLast ? 1 : 2}], outputIndices[${\n isChannelLast ? 2 : 3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${output.type.value} = ${output.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${isChannelLast ? 1 : 2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${isChannelLast ? 2 : 3}]) {\n continue;\n }\n\n let xVal = ${\n isChannelLast ? x.get('batch', 'xHeight', 'xWidth', 'input_channel') :\n x.get('batch', 'input_channel', 'xHeight', 'xWidth')};\n let wVal = ${w.get('output_channel', 'wInChannel', 'wHeight', 'wWidth')};\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${output.setByOffset('global_idx', 'value')}\n }`;\n };\n return {\n name: 'GroupedConv',\n shaderCache: {hint: attributes.cacheKey, inputDependencies},\n getRunData: () => ({\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const createGroupedConvVectorizeProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const components = getMaxComponents(outputShape[3]);\n const outputNumber = getMaxComponents(outputShape[2]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const xShape = [inputs[0].dims[0], inputs[0].dims[1], inputs[0].dims[2], inputs[0].dims[3] / components];\n const wShape = [inputs[1].dims[0], inputs[1].dims[1], inputs[1].dims[2], inputs[1].dims[3] / components];\n const outputShapeInShader = [outputShape[0], outputShape[1], outputShape[2], outputShape[3] / components];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.int32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape, outputShapeInShader));\n const xNumber = (outputNumber - 1) * attributes.strides[1] + wShape[1];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length, components);\n const w = inputVariable('w', inputs[1].dataType, wShape.length, components);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims, components));\n }\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'strides', type: 'i32', length: 2},\n {name: 'pads', type: 'i32', length: 2},\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${outputNumber}u;\n let col = (index1 % width1) * ${outputNumber}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${x.type.value}, ${xNumber}>;\n var values: array<${output.type.value}, ${outputNumber}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${wShape[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${xNumber}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${x.get('batch', 'u32(x_height)', 'u32(x_width)', 'input_channel')};\n } else {\n x_vals[i] = ${x.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${wShape[1]}; w_width++) {\n let w_val = ${w.get('w_height', 'w_width', '0', 'output_channel')};\n for (var i = 0u; i < ${outputNumber}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n ${output.set('batch', 'row', 'col + i', 'output_channel', 'value')};\n }\n }`;\n };\n\n return {\n name: 'GroupedConv-Vectorize',\n shaderCache: {\n hint: `${attributes.cacheKey};${components};${outputNumber};${xNumber};${wShape[0]};${wShape[1]}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'type'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createTensorShapeVariables, getBroadcastDims, getMaxComponents, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\n\nexport const createNaiveMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n\n const M = aShape[aShape.length - 2];\n const N = bShape[bShape.length - 1];\n const K = aShape[aShape.length - 1];\n const components = getMaxComponents(N);\n const aComponents = getMaxComponents(K);\n const outputNumber = getMaxComponents(M);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const hasBias = inputs.length > 2;\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const outputShapeInShader = [batchSize, M, N];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShape, bShape));\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeInShader));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchDims = internalVariable('batch_dims', inputs[0].dataType, outerDims.length);\n const a = inputVariable('a', inputs[0].dataType, aShape.length, aComponents);\n const b = inputVariable('b', inputs[1].dataType, bShape.length, components);\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const inputVariables = [a, b];\n let processBias = '';\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n processBias = `${\n isChannelsLast ? `value += bias[col / ${biasComponents}];` :\n `value += ${output.type.value}(bias[row + i]);`}`;\n }\n\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const broadCastADims = getBroadcastDims(outerDimsA, outerDims);\n const broadCastBDims = getBroadcastDims(outerDimsB, outerDims);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'K', type: 'u32'}\n ];\n appendActivationUniforms(activationAttributes, uniforms);\n\n const getIndices = (variable: IndicesHelper, broadCastDims: number[]) => {\n const rank = variable.rank;\n const name = variable.name;\n if (rank === 2) {\n return `var ${name}_indices = ${variable.type.indices}(0u, 0u);`;\n }\n const batchRank = batchDims.rank;\n let resStr = `var ${name}_indices: ${variable.type.indices};`;\n for (let i = rank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\n${name}_indices[${i}] = ${batchRank > 1 ? `batch_indices[${j}]` : 'batch_indices'};`;\n }\n broadCastDims.forEach(i => {\n resStr += `\\n${name}_indices[${i}] = 0;`;\n });\n resStr += `${name}_indices[${rank - 2}] = 0u;\n ${name}_indices[${rank - 1}] = 0u;`;\n return resStr;\n };\n\n const calcResult = (): string => {\n let calcStr = `var a_data: ${a.type.value};`;\n for (let i = 0; i < aComponents; i++) {\n calcStr += `\n let b_data${i} = b[(b_offset + (k + ${i}) * uniforms.N + col) / ${components}];`;\n }\n for (let i = 0; i < outputNumber; i++) {\n calcStr += `a_data = a[(a_offset + (row + ${i}) * uniforms.K + k) / ${aComponents}];`;\n\n for (let j = 0; j < aComponents; j++) {\n calcStr += `\n values[${i}] = fma(${b.type.value}(a_data${aComponents === 1 ? '' : `[${j}]`}), b_data${j}, values[${\n i}]);\\n`;\n }\n }\n return calcStr;\n };\n\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let col = (global_idx % (uniforms.N / ${components})) * ${components};\n var index1 = global_idx / (uniforms.N / ${components});\n let stride1 = uniforms.M / ${outputNumber};\n let row = (index1 % stride1) * ${outputNumber};\n let batch = index1 / stride1;\n\n ${outputShape.length === 2 ? '' : `let batch_indices = ${batchDims.offsetToIndices('batch')};`}\n ${getIndices(a, broadCastADims)}\n let a_offset = ${a.indicesToOffset('a_indices')};\n ${getIndices(b, broadCastBDims)}\n let b_offset = ${b.indicesToOffset('b_indices')};\n var values: array<${output.type.value}, ${outputNumber}>;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${aComponents}) {\n ${calcResult()}\n }\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n let cur_indices = ${output.type.indices}(batch, row + i, col);\n let offset = ${output.indicesToOffset('cur_indices')};\n ${output.setByOffset(`offset / ${components}`, 'value')};\n }\n }\n `;\n };\n return {\n name: 'MatMulNaive',\n shaderCache: {\n hint: `${activationAttributes.activation};${components};${aComponents};${outputNumber};${isChannelsLast}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'rank'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n};\n\nexport const matMul = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n const outputShape = BroadcastUtil.calcShape(context.inputs[0].dims, context.inputs[1].dims, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const N = outputShape[outputShape.length - 1];\n const K = context.inputs[0].dims[context.inputs[0].dims.length - 1];\n if (N < 8 && K < 8) {\n context.compute(createNaiveMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n } else {\n context.compute(createMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DMatMulProgramInfo} from './3rd-party/conv2d_mm_webgpu';\nimport {computeConv3DInfo, createConv3DNaiveProgramInfo} from './3rd-party/conv3d_naive_webgpu';\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createGroupedConvProgramInfo, createGroupedConvVectorizeProgramInfo} from './conv-grouped';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createNaiveMatmulProgramInfo} from './matmul';\nimport {createTransposeProgramInfo} from './transpose';\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[], isChannelLast: boolean): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(isChannelLast ? 1 : 2, isChannelLast ? 3 : 4);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly format: 'NHWC'|'NCHW';\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n readonly wIsConst: boolean;\n}\n\n// for transposing weight tensor from [M, C/group, KH, KW] to [KH, KW, C/group, M]\nconst weightTransposeAttribute = [2, 3, 1, 0];\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n if (inputs[0].dims.length > 5) {\n throw new Error('greater than 5D is not supported');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n};\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n if (kernelShape[i - 2] === 0) {\n kernelShape[i - 2] = inputs[1].dims[i];\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.format === 'NHWC',\n attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads});\n return newAttributes;\n};\n\nexport const parseConvAttributes = (attributes: Record): ConvAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad = ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number];\n const dilations = attributes.dilations as number[];\n const group = attributes.group as number;\n const kernelShape = attributes.kernel_shape as number[];\n const pads = attributes.pads as number[];\n const strides = attributes.strides as number[];\n const wIsConst = (attributes.w_is_const as () => boolean)();\n\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst conv2d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n\n // check attributes\n\n // const hasPreluActivationWeights = false; /* TODO: add support for prelu activation weights */\n const isChannelsLast = attributes.format === 'NHWC';\n if (attributes.group !== 1) {\n // NVIDIA GPU with ampere architecture fails with below 2 cases, but we couldn't repro them with any other\n // GPUs. So just disable vectorize on NVIDIA ampere to ensure always correct outputs.\n // [webgpu]Conv - conv - vectorize group - B\n // [webgpu]Conv - conv - vectorize group - D\n const enableGroupedConvVectorize = !context.adapterInfo.isArchitecture('ampere');\n if (enableGroupedConvVectorize && isChannelsLast && inputs[1].dims[0] === attributes.group &&\n inputs[1].dims[1] === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1) {\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n const convInputs = [inputs[0], transposedWeight];\n if (inputs.length === 3) {\n convInputs.push(inputs[2]);\n }\n context.compute(\n createGroupedConvVectorizeProgramInfo(convInputs, adjustedAttributes, outputShape), {inputs: convInputs});\n } else {\n context.compute(createGroupedConvProgramInfo(inputs, adjustedAttributes));\n }\n return;\n }\n\n const hasBias = inputs.length === 3;\n const inputHeight = inputs[0].dims[isChannelsLast ? 1 : 2];\n const inputWidth = inputs[0].dims[isChannelsLast ? 2 : 3];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n\n const sameSize = isChannelsLast && weightHeight === inputHeight && weightWidth === inputWidth &&\n attributes.pads[0] === 0 && attributes.pads[1] === 0;\n if (sameSize ||\n (weightHeight === 1 && weightWidth === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1 &&\n attributes.strides[0] === 1 && attributes.strides[1] === 1 && attributes.pads[0] === 0 &&\n attributes.pads[1] === 0)) {\n // conv2dByMatMul\n const batch = outputShape[0];\n let xReshaped, wReshaped, matmulOutputShape;\n const matmulInputs = [];\n if (isChannelsLast) {\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n if (sameSize) {\n const sharedDim = inputHeight * inputWidth * inputChannels;\n xReshaped = inputs[0].reshape([1, batch, sharedDim]);\n wReshaped = transposedWeight.reshape([1, sharedDim, outChannels]);\n matmulOutputShape = [1, batch, outChannels];\n } else {\n xReshaped = inputs[0].reshape([batch, inputHeight * inputWidth, inputChannels]);\n wReshaped = transposedWeight.reshape([1, inputChannels, outChannels]);\n matmulOutputShape = [batch, outHeight * outWidth, outChannels];\n }\n matmulInputs.push(xReshaped);\n matmulInputs.push(wReshaped);\n } else {\n xReshaped = inputs[0].reshape([batch, inputChannels, inputHeight * inputWidth]);\n wReshaped = inputs[1].reshape([1, outChannels, inputChannels]);\n matmulOutputShape = [batch, outChannels, outHeight * outWidth];\n matmulInputs.push(wReshaped);\n matmulInputs.push(xReshaped);\n }\n if (hasBias) {\n matmulInputs.push(inputs[2]);\n }\n const N = matmulOutputShape[2];\n const K = matmulInputs[0].dims[matmulInputs[0].dims.length - 1];\n // Tune the threshold.\n if (N < 8 && K < 8) {\n context.compute(\n createNaiveMatmulProgramInfo(\n matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n } else {\n context.compute(\n createMatmulProgramInfo(matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n }\n return;\n }\n\n // TODO: implement conv2dWithIm2Col()\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convInputs = [inputs[0], transposedWeight];\n if (hasBias) {\n convInputs.push(inputs[2]);\n }\n\n // STEP.3: compute matmul\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n context.compute(\n createConv2DMatMulProgramInfo(\n convInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convInputs});\n};\n\nconst conv1d = (context: ComputeContext, attributes: ConvAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n const pads = [0, attributes.pads[0], 0, attributes.pads[1]];\n const strides = [1].concat(attributes.strides);\n const dilations = [1].concat(attributes.dilations);\n const kernelShape = [1].concat(attributes.kernelShape);\n const adjustedAttributes = getAdjustedConvAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createGroupedConvProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] : []));\n};\n\nconst conv3d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const format = attributes.format === 'NHWC' ? 'channelsLast' : 'channelsFirst';\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const pads = attributes.autoPad === 'NOTSET' ? attributes.pads : attributes.autoPad;\n const convInfo = computeConv3DInfo(\n inputs[0].dims as [number, number, number, number, number],\n inputs[1].dims as [number, number, number, number, number],\n attributes.strides as number | [number, number, number],\n attributes.dilations as number | [number, number, number], pads as string | number[], false, format);\n context.compute(createConv3DNaiveProgramInfo(\n inputs, adjustedAttributes, convInfo.outShape,\n [convInfo.filterDepth, convInfo.filterHeight, convInfo.filterWidth],\n [convInfo.padInfo.front, convInfo.padInfo.top, convInfo.padInfo.left], format));\n};\n\nexport const conv = (context: ComputeContext, attributes: ConvAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n conv1d(context, attributes);\n } else if (context.inputs[0].dims.length === 5) {\n conv3d(context, context.inputs, attributes);\n } else {\n conv2d(context, context.inputs, attributes);\n }\n};\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dTransposeCommonSnippet =\n (isChannelsLast: boolean, addBias = false, attributes: ConvTransposeAttributes, type: string,\n innerElementSize = 4): string => {\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];';\n case 4:\n return `\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${type}(v0, v1, v2, v3);\n `;\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, iXR, iXC, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, iXR, iXC);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n\n const readASnippet = `\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${col} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${xHeight}) || fract(xR) > 0.0) {\n return ${type}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${xWidth}) || fract(xC) > 0.0) {\n return ${type}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${col} % inChannels;\n ${coordASnippet}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${innerElementSize}];`;\n\n const sampleA = isChannelsLast ? `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readASnippet}\n }\n return ${type}(0.0);` :\n `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readASnippet}\n }\n return ${type}(0.0);`;\n\n const sampleW = `\n let col = colIn * ${innerElementSize};\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${\n isChannelsLast ? 'row < uniforms.dim_inner && col < uniforms.dim_b_outer' :\n 'row < uniforms.dim_inner && col < uniforms.dim_a_outer'} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${getWSnippet(innerElementSize)}\n }\n return ${type}(0.0);\n `;\n\n const applyActivation = getActivationSnippet(attributes, type);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleA : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleW : sampleA}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${type}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${innerElementSize}] = value;\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DTransposeMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes, outputShape: readonly number[],\n dimAOuter: number, dimBOuter: number, dimInner: number, hasBias: boolean,\n sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 && inChannels % 3) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv_backprop_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? 4 : 1;\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const components = isVec4 ? 4 : 1;\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const effectiveFilterDims = [\n filterDims[0] + (attributes.dilations[0] <= 1 ? 0 : (filterDims[0] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] + (attributes.dilations[1] <= 1 ? 0 : (filterDims[1] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor((attributes.pads[1] + attributes.pads[3]) / 2)\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: attributes.strides},\n {type: DataType.int32, data: attributes.dilations}, {type: DataType.int32, data: filterDims},\n {type: DataType.int32, data: pads}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims.length, components);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, 1);\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n const inputVariables = [x, w];\n\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${bias.type.value} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'strides', type: 'i32', length: 2}, {name: 'dilations', type: 'i32', length: 2},\n {name: 'filter_dims', type: 'i32', length: filterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}\n ];\n appendActivationUniforms(attributes, uniforms);\n const elemType = tensorTypeToWsglStorageType(inputs[0].dataType, 1);\n if (elemType !== 'f16' && elemType !== 'f32') {\n throw new Error(`elemType ${elemType} is not supported.`);\n }\n return `\n ${utilFunctions('uniforms.result_strides')}\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)};\n ${declareFunctions}\n ${conv2dTransposeCommonSnippet(isChannelsLast, hasBias, attributes, x.type.value, innerElementSize)}\n ${\n isVec4 ? makeMatMulPackedVec4Source(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner, false,\n undefined, sequentialAccessByThreads)}`;\n };\n\n return {\n name: 'Conv2DTransposeMatMul',\n shaderCache:\n {hint: `${attributes.cacheKey};${elementsPerThread};${workGroupSize};${isVec4}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_webgpu.ts\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\n\nconst createConvTranspose2DOpProgramShaderSource =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], outputShape: readonly number[], hasBias: boolean,\n is1DimensionDispatch: boolean, isVec4 = false, dataType: string, uniforms: UniformsArrayType,\n isChannelsLast = false): string => {\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n const workPerThread = isVec4 ? 2 : 1;\n\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : u32, value : ${isVec4 ? `vec4<${dataType}>` : dataType}) {\n result[flatIndex] = ${isVec4 ? `vec4<${dataType}>` : dataType}(value);\n }`;\n if (hasBias) {\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${dataType}>` : dataType} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n const components = isVec4 ? 4 : 1;\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const dy = inputVariable('Dy', inputs[0].dataType, inputs[0].dims.length, components);\n const inputVariables = [dy, w];\n if (hasBias) {\n inputVariables.push(inputVariable('bias', inputs[2].dataType, [outputShape[channelDim]].length, components));\n }\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n\n const codeSnippet4 = `{\n let batch: u32 = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} / uniforms.result_shape[1];\n let r = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} % uniforms.result_shape[1];\n let c = ${is1DimensionDispatch ? 'global_id.y' : 'workgroup_id.y'} * ${workPerThread};\n let d1: u32 = ${is1DimensionDispatch ? 'global_id.x' : 'workgroup_id.x'} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${workPerThread}>;\n for (var i = 0; i < ${workPerThread}; i++) {\n dotProd[i] = vec4<${dataType}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${dataType}(dyCorner.x) + ${dataType}(wR)) / ${dataType}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${dataType}(dyCorner.y) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let dyC2 = (${dataType}(dyCorner.y) + 1.0 + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n\n dotProd[1] = dotProd[1] + vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${channelDim}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${workPerThread}; i = i + 1) {\n let value = dotProd[i] + ${hasBias ? 'bias[c+i]' : `vec4<${dataType}>(0.0)`};\n ${output.set('batch', 'r', 'c + i', 'd1', 'value')};\n }\n }`;\n const codeSnippet = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch = ${output.indicesGet('outputIndices', 0)};\n let d1 = ${output.indicesGet('outputIndices', channelDim)};\n let r = ${output.indicesGet('outputIndices', rowDim)};\n let c = ${output.indicesGet('outputIndices', colDim)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${dataType}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${dataType}(dyRCorner) + ${dataType}(wR)) / ${dataType}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[${rowDim}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${dataType}(dyCCorner) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[${colDim}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${\n isChannelsLast ? dy.get('batch', 'idyR', 'idyC', 'inputChannel') :\n dy.get('batch', 'inputChannel', 'idyR', 'idyC')};\n let wValue = ${w.get('inputChannel', 'wOutChannel', 'u32(wRPerm)', 'u32(wCPerm)')};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${hasBias ? 'bias[d1]' : `${dataType}(0.0)`};\n ${output.setByOffset('global_idx', 'value')};\n `;\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')};\n ${isVec4 ? codeSnippet4 : codeSnippet}}`;\n };\n\nexport const createConvTranspose2DProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n // const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = attributes.outputShape;\n const outputSize = ShapeUtil.size(outputShape);\n\n // const inChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // TODO Enable isVec4 for performance\n // Disabled due to weight matrix layout issue\n // const isVec4 = attributes.group === 1 && isChannelsLast && inChannels % 4 === 0 && outChannels % 4 === 0;\n const dispatch = [\n Math.ceil(outputSize / 64),\n 1,\n 1,\n ];\n LOG_DEBUG('verbose', () => `[conv2d_backprop_webgpu] dispatch = ${dispatch}`);\n\n const isChannelsLast = attributes.format === 'NHWC';\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const strides = [attributes.strides[0], attributes.strides[1]];\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const dilations = [attributes.dilations[0], attributes.dilations[1]];\n const effectiveFilterDims = [\n filterDims[0] +\n (attributes.dilations[0] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 1 : 2] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] +\n (attributes.dilations[1] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 2 : 3] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor(attributes.pads[1] + attributes.pads[3]) / 2\n ];\n\n const isVec4 = false;\n const group = attributes.group;\n const wShape = inputs[1].dims;\n const inputChannelsPerGroup = wShape[0] / group;\n const outputChannelsPerGroup = wShape[1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: strides},\n {type: DataType.uint32, data: filterDims}, {type: DataType.uint32, data: dilations},\n {type: DataType.uint32, data: effectiveFilterDims}, {type: DataType.int32, data: pads},\n {type: DataType.uint32, data: inputChannelsPerGroup}, {type: DataType.uint32, data: outputChannelsPerGroup},\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims)\n ];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const is1DimensionDispatch = dispatch[1] === 1 && dispatch[2] === 1;\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'strides', type: 'u32', length: strides.length},\n {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'dilations', type: 'u32', length: filterDims.length},\n {name: 'effective_filter_dims', type: 'u32', length: effectiveFilterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}, {name: 'input_channels_per_group', type: 'u32'},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `${\n createConvTranspose2DOpProgramShaderSource(\n shaderHelper, inputs, outputShape, hasBias, is1DimensionDispatch, isVec4, dataType, uniforms,\n isChannelsLast)}`;\n };\n return {\n name: 'ConvTranspose2D',\n shaderCache: {hint: `${attributes.cacheKey};`, inputDependencies},\n getRunData: () => ({\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n programUniforms\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DTransposeMatMulProgramInfo} from './3rd-party/conv_backprop_mm_webgpu';\nimport {createConvTranspose2DProgramInfo} from './3rd-party/conv_backprop_webgpu';\nimport {ConvAttributes} from './conv';\nimport {parseInternalActivationAttributes} from './fuse-utils';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n group: number, pads: number[], strides: readonly number[], isChannelLast: boolean, outputPadding: number[],\n outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateOutputShape = outputShape.length === 0;\n if (outputPadding.length === 0) {\n for (let i = 0; i < spatialRank; ++i) {\n outputPadding.push(0);\n }\n }\n const batchSize = inputShape[0];\n const outChannels = kernelShape[isChannelLast ? 3 : 1] * group;\n for (let i = 0, j = inputShape.length - spatialRank - (isChannelLast ? 1 : 0); i < spatialRank; ++i, ++j) {\n const inSize = inputShape[j];\n const outSize = updateOutputShape ? inSize * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inSize, strides[i], pads[i], kernelShape[j], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateOutputShape) {\n outputShape.push(\n strides[i] * (inSize - 1) + outputPadding[i] + (kernelShape[j] - 1) * dilations[i] + 1 - pads[i] -\n pads[i + spatialRank]);\n }\n }\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nconst getAdjustedConvTransposeAttributes =\n (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0 || attributes.kernelShape.reduce((a, b) => a * b, 1) === 0) {\n kernelShape.length = 0;\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const isChannelsLast = attributes.format === 'NHWC';\n kernelShape.splice(0, 0, inputs[1].dims[0]);\n kernelShape.splice(isChannelsLast ? 3 : 1, 0, inputs[1].dims[1]);\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const outputPadding = attributes.outputPadding.slice();\n const inputShape = inputs[0].dims;\n let dilations = attributes.dilations.slice();\n if (dilations.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n dilations = new Array(spatialRank).fill(1);\n }\n let strides = attributes.strides.slice();\n if (strides.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n strides = new Array(spatialRank).fill(1);\n }\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, dilations, attributes.autoPad, attributes.group, pads, strides, isChannelsLast,\n outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputPadding, outputShape, dilations, strides});\n return newAttributes;\n };\n\nexport const parseConvTransposeAttributes = (attributes: Record): ConvTransposeAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad =\n ['NOTSET', 'VALID', 'SAME_UPPER',\n 'SAME_LOWER'][typeof attributes.autoPad == 'undefined' ? 0 : attributes.autoPad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernelShape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.wIsConst as () => boolean)();\n const outputPadding = attributes.outputPadding as [number, number, number, number];\n const outputShape = attributes.outputShape as [number, number];\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n outputPadding,\n outputShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#ConvTranspose\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n const dilationsSet = attributes.dilations.reduce((a, b) => a + b, 0) > 0;\n // wrong dilations dimension\n if (dilationsSet && attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n const stridesSet = attributes.strides.reduce((a, b) => a + b, 0) > 0;\n // Wrong strides dimension\n if (stridesSet && attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n const padsSet = attributes.pads.reduce((a, b) => a + b, 0) > 0;\n if (padsSet && attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank && attributes.outputPadding.length !== 0) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n const kernelShapeSet = attributes.kernelShape.reduce((a, b) => a + b, 0) > 0;\n if (kernelShapeSet && attributes.kernelShape.length !== 0 &&\n attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n};\n\n// for transposing weight tensor from [C, M/group, KH, KW] to [KH, KW, M/group, C]\nconst weightTransposePerm = [2, 3, 1, 0];\n\nconst convTranspose2d =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = adjustedAttributes.outputShape;\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // Switch to naive method when outChannels and inputChannels are very small. It's because that in this case it's\n // not suitable for matmul version since matmul uses tile size 32x32 resulting the underlying execution unit\n // utilization rate is very low.\n if (adjustedAttributes.group !== 1 || (outChannels === 1 && inputChannels === 1)) {\n context.compute(createConvTranspose2DProgramInfo(inputs, adjustedAttributes));\n return;\n }\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposePerm),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convTransposeInputs = [inputs[0], transposedWeight];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n if (!isChannelsLast && inputs[2].dims.length === 1) {\n convTransposeInputs.push(inputs[2].reshape([inputs[2].dims[0], 1, 1]));\n } else {\n convTransposeInputs.push(inputs[2]);\n }\n }\n\n // STEP.3: compute matmul\n context.compute(\n createConv2DTransposeMatMulProgramInfo(\n convTransposeInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convTransposeInputs});\n };\n\nconst convTranspose1d = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n let kernelShape = attributes.kernelShape;\n if (kernelShape.length === 0 || kernelShape[0] === 0) {\n kernelShape = [context.inputs[1].dims[2]];\n }\n let dilations = attributes.dilations;\n if (dilations.length === 0 || dilations[0] === 0) {\n dilations = [1];\n }\n let strides = attributes.strides;\n if (strides.length === 0 || strides[0] === 0) {\n strides = [1];\n }\n let pads = attributes.pads;\n if (pads.length === 0) {\n pads = [0, 0];\n }\n pads = [0, pads[0], 0, pads[1]];\n strides = [1].concat(strides);\n dilations = [1].concat(dilations);\n kernelShape = [1].concat(kernelShape);\n const adjustedAttributes =\n getAdjustedConvTransposeAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createConvTranspose2DProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] :\n [outputShape[0], outputShape[1], outputShape[3]]));\n};\n\nexport const convTranspose = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n convTranspose1d(context, attributes);\n } else {\n convTranspose2d(context, context.inputs, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper} from './common';\n\n\nexport interface CumSumAttributes extends AttributeWithCacheKey {\n readonly exclusive: boolean;\n readonly reverse: boolean;\n}\nconst createCumsumProgramInfo =\n (inputType: number, inputShape: readonly number[], axisInput: TensorView, attributes: CumSumAttributes):\n ProgramInfo => {\n const outputSize = ShapeUtil.size(inputShape); // outputShape is same as inputShape.\n const rank = inputShape.length; // input/output rank\n const input = inputVariable('input', inputType, rank);\n const output = outputVariable('output', inputType, rank);\n const axisValue = axisInput.dataType === DataType.int32 ? axisInput.getInt32Array()[0] :\n Number(axisInput.getBigInt64Array()[0]);\n const axis = ShapeUtil.normalizeAxis(axisValue, rank);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const index = ` i32(${input.indicesGet('inputIndices', 'uniforms.axis')}) `;\n const max = getElementAt('uniforms.input_shape', 'uniforms.axis', rank);\n const lowerLimit = attributes.reverse ? index + (attributes.exclusive ? ' + 1' : '') : '0';\n const upperLimit = attributes.reverse ? max : index + (attributes.exclusive ? '' : ' + 1');\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var inputIndices = ${output.offsetToIndices('global_idx')};\n var sum = ${output.type.value}(0);\n let first : i32 = ${lowerLimit};\n let last : i32 = ${upperLimit};\n for (var i : i32 = first; i < last; i++) {\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(i)')};\n sum = sum + ${input.getByIndices('inputIndices')};\n }\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'CumSum',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: inputShape, dataType: inputType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: axis},\n ...createTensorShapeVariables(inputShape, inputShape)\n ]\n\n }),\n getShaderSource\n };\n };\n\n\nexport const cumsum = (context: ComputeContext, attributes: CumSumAttributes): void => {\n const inputShape = context.inputs[0].dims;\n const inputType = context.inputs[0].dataType;\n const axis = context.inputs[1];\n context.compute(createCumsumProgramInfo(inputType, inputShape, axis, attributes), {inputs: [0]});\n};\n\nexport const parseCumSumAttributes = (attributes: Record): CumSumAttributes => {\n const exclusive = attributes.exclusive as number === 1;\n const reverse = attributes.reverse as number === 1;\n return createAttributeWithCacheKey({exclusive, reverse});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface DepthToSpaceAttributes extends FormatAttributes, AttributeWithCacheKey {\n readonly blocksize: number;\n readonly mode: string;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('DepthToSpace requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('DepthToSpace requires 4D input.');\n }\n};\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nconst createDepthToSpaceProgramInfo = (inputTensor: TensorView, attributes: DepthToSpaceAttributes): ProgramInfo => {\n let n: number, h: number, w: number, c: number;\n let shape: number[];\n let perm: number[];\n const isChannelLast = attributes.format === 'NHWC';\n const blocksize = attributes.blocksize;\n const isDCRmode = attributes.mode === 'DCR';\n if (isChannelLast) {\n [n, h, w, c] = inputTensor.dims;\n shape = isDCRmode ? [n, h, w, blocksize, blocksize, c / (blocksize ** 2)] :\n [n, h, w, c / (blocksize ** 2), blocksize, blocksize];\n perm = isDCRmode ? [0, 1, 3, 2, 4, 5] : [0, 1, 4, 2, 5, 3];\n } else {\n [n, h, w, c] = [inputTensor.dims[0], inputTensor.dims[2], inputTensor.dims[3], inputTensor.dims[1]];\n shape = isDCRmode ? [n, blocksize, blocksize, c / (blocksize ** 2), h, w] :\n [n, c / (blocksize ** 2), blocksize, blocksize, h, w];\n perm = isDCRmode ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n }\n const reshapedInputTensor = inputTensor.reshape(shape);\n const reshapedInputRank = reshapedInputTensor.dims.length;\n const inputDataType = inputTensor.dataType;\n\n const reshapedInput = inputVariable('a', inputDataType, reshapedInputRank);\n const permedOutput = outputVariable('output', inputDataType, reshapedInputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(reshapedInput, permedOutput)}\n\n ${permFunctionBody(perm, reshapedInputRank, reshapedInput, permedOutput)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${permedOutput.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${permedOutput.setByOffset('global_idx', reshapedInput.getByIndices('aIndices'))}\n }`;\n\n return {\n name: 'DepthToSpace',\n shaderCache: {hint: `${inputTensor.dims};${attributes.blocksize};${attributes.mode}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputShape = isChannelLast ? [n, h * blocksize, w * blocksize, c / (blocksize ** 2)] :\n [n, c / (blocksize ** 2), h * blocksize, w * blocksize];\n const outputSize = ShapeUtil.size(outputShape);\n const shapeBeforePerm = reshapedInputTensor.dims;\n const shapeAfterPerm = ShapeUtil.sortBasedOnPerm(shapeBeforePerm, perm);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(shapeBeforePerm, shapeAfterPerm)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const depthToSpace = (context: ComputeContext, attributes: DepthToSpaceAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createDepthToSpaceProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseDepthToSpaceAttributes = (attributes: Record): DepthToSpaceAttributes =>\n createAttributeWithCacheKey({\n blocksize: attributes.blocksize as number,\n mode: attributes.mode as string,\n format: attributes.format as 'NHWC' | 'NCHW'\n });\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface EinsumAttributes extends AttributeWithCacheKey {\n readonly equation: string;\n}\n// The equation attribute value is a string which consists of left hand side (LHS) and optionally right hand side (RHS)\n// separated by '->'. Ex. \"ij,jk -> ik\" expresses matrix multiplication\n// \"ij->ji\" expresses matrix transpose\n// \"ii->i\" diagonal elements of a square matrix\n// LHS consists of a sequence of terms separated by commas. Each term corresponds to an input variable.\n// Each symbol corresponds to a dimension in the input variable. The symbol can be either a letter, 'a' to 'z' or 'A' to\n// 'Z' or '...' to represent arbitrary dimensions.\n\nconst symbolPattern =\n '[a-zA-Z]|\\\\.\\\\.\\\\.'; // The pattern each symbol in each term in the symbolic equation should match\nconst termPattern = '(' + symbolPattern + ')+'; // The pattern each term in the symbolic equation should match\nconst termPatternOnly = '^' + termPattern + '$'; // The patterns only matchs a term begin to end.\nconst lhsPattern = '(' + termPattern + ',)*' + termPattern; // The pattern the LHS should match\nconst lhsPatternOnly = '^' + lhsPattern + '$'; // The patterns only matchs a LHS begin to end.\n\ninterface SymbolInfo {\n count: number; // Symbol corresponding to a dimmension of an input\n inputIndices: number[]; // Number of input variables the symbol corresponds to\n dimValue: number; // Number of dimensions the symbol corresponds to\n}\n\nclass EinsumTerm {\n constructor(inputIndex = -1) {\n this.symbolToIndices = new Map();\n this.inputIndex = inputIndex;\n }\n\n // Add a symbol to the term\n addSymbol(symbol: string, index: number) {\n let value = this.symbolToIndices.get(symbol);\n if (value === undefined) {\n value = [index];\n } else {\n value.push(index);\n }\n this.symbolToIndices.set(symbol, value);\n }\n\n symbolToIndices: Map; // Map from symbol to dimensions of the input corresponding to the term\n inputIndex: number; // -1 for output and 0, 1, 2, ... for inputs\n}\n\nclass EinsumEquation {\n constructor(inputs: readonly TensorView[], public readonly equation: string) {\n this.hasEllipsis = false;\n this.symbolToInfo = new Map();\n this.lhs = new Array();\n this.outputDims = [];\n // As rhs needs to be updated allow using let instead of const for both lhs and rhs.\n // eslint-disable-next-line prefer-const\n let [lhs, rhs] = equation.includes('->') ? equation.split('->', 2) : [equation, ''];\n if (!lhs.match(RegExp(lhsPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const inputTerms = lhs.split(',');\n inputTerms.forEach((inputTerm, index) => {\n const dims = inputs[index].dims.slice();\n if (!inputTerm.match(RegExp(termPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const einsumTerm = this.processTerm(inputTerm, true, dims, index);\n this.lhs.push(einsumTerm);\n });\n\n // Initialize the RHS if not specified\n if (rhs === '') {\n // Construct RHS from LHS terms/symbols\n rhs += [...this.symbolToInfo.entries()]\n .filter(([sym, info]) => (info.count === 1 || sym === '...'))\n .map(([sym]) => sym)\n .join('');\n } else {\n if (!rhs.match(RegExp(termPattern))) {\n throw new Error('Invalid RHS');\n }\n }\n\n // Compute output dims\n const rhsSymbols = rhs.match(RegExp(symbolPattern, 'g'));\n rhsSymbols?.forEach((symbol) => {\n if (symbol === '...') {\n this.outputDims = this.outputDims.concat(this.ellipsisDims);\n } else {\n const info = this.symbolToInfo.get(symbol);\n if (info === undefined) {\n throw new Error('Invalid RHS symbol');\n }\n this.outputDims.push(info.dimValue);\n }\n });\n this.rhs = this.processTerm(rhs, false, this.outputDims);\n } // End of EinsumEqation constructor\n\n // Add a symbol to the equation\n addSymbol(symbol: string, dimValue: number, inputIndex: number) {\n let info = this.symbolToInfo.get(symbol);\n if (info !== undefined) {\n if (info.dimValue !== dimValue && info.count !== 1) {\n throw new Error('Dimension mismatch');\n } else {\n info.count++;\n info.inputIndices.push(inputIndex);\n }\n } else {\n info = {count: 1, dimValue, inputIndices: [inputIndex]};\n }\n this.symbolToInfo.set(symbol, info);\n }\n\n // Process one input/output term\n processTerm(term: string, isInput: boolean, dims: readonly number[], index = -1): EinsumTerm {\n const rank = dims.length;\n let ellipsis = false;\n let ellipsisDims = [];\n let nextDim = 0;\n // For output empty string is allowed because the output may be reduced to a scalar value\n if (!term.match(RegExp(termPatternOnly)) && (!isInput && term !== '')) {\n throw new Error('Invalid LHS term');\n }\n const indexSymbols = term.match(RegExp(symbolPattern, 'g'));\n const einsumTerm = new EinsumTerm(index);\n // symbol can be either a lettre, 'a' to 'z' or 'A' to 'Z', or '...'\n indexSymbols?.forEach((symbol: string, i: number) => {\n if (symbol === '...') {\n if (ellipsis) {\n throw new Error('Only one ellipsis is allowed per input term');\n }\n ellipsis = true;\n const ellipsisDimLength = rank - indexSymbols.length + 1;\n if (ellipsisDimLength < 0) {\n throw new Error('Ellipsis out of bounds');\n }\n ellipsisDims = dims.slice(nextDim, nextDim + ellipsisDimLength);\n if (this.hasEllipsis) {\n if (this.ellipsisDims.length !== ellipsisDims.length ||\n this.ellipsisDims.toString() !== ellipsisDims.toString()) {\n throw new Error('Ellipsis dimensions mismatch');\n }\n } else if (isInput) {\n this.hasEllipsis = true;\n this.ellipsisDims = ellipsisDims;\n } else {\n throw new Error('Ellipsis must be specified in the LHS');\n }\n // Add '0', '1', '2', '3', '4', etc to represent ellipsis dimensions to avoid special handling\n for (let j = 0; j < ellipsisDims.length; j++) {\n const symbol = String.fromCharCode('0'.charCodeAt(0) + j);\n einsumTerm.addSymbol(symbol, i + j);\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n } else {\n einsumTerm.addSymbol(symbol, i + (this.hasEllipsis ? this.ellipsisDims.length - 1 : 0));\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n });\n return einsumTerm;\n }\n\n symbolToInfo: Map; // All symbols in the equation\n hasEllipsis: boolean; // The equation has ellipsis or not\n ellipsisDims: number[]; // The dimensions of the equation ellipsis corresponds to.\n lhs: EinsumTerm[]; // Terms on the left-hand side of the equation\n rhs: EinsumTerm; // Term on the right-hand side of the equation\n outputDims: number[]; // Output dimensions of the equation\n} // End of class EinsumEquation\n\nconst appendMax = (name: string): string => name + '_max';\n\nconst createEinsumProgramInfo =\n (inputShapes: Array, dataType: number, einsumEquation: EinsumEquation,\n outputShape: readonly number[]): ProgramInfo => {\n const ranks = inputShapes.map((dims) => dims.length);\n const inputVars = ranks.map((rank, index) => inputVariable(`input${index}`, dataType, rank));\n const outputSize = ShapeUtil.size(outputShape);\n const output = outputVariable('output', dataType, outputShape.length);\n const uniformsSymbols =\n [...einsumEquation.symbolToInfo.keys()].filter((symbol) => !einsumEquation.rhs.symbolToIndices.has(symbol));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = [];\n const initProd = 'var prod = 1.0;';\n const initSum = 'var sum = 0.0;';\n const updateSum = 'sum += prod;';\n const reduceOpsSetIndices: string[] = [];\n const reduceOpsLoopHeaders: string[] = [];\n const reduceOpsLoopFooters: string[] = [];\n const reduceOpCompute: string[] = [];\n const isReduceOpsWithoutLoop = einsumEquation.symbolToInfo.size === einsumEquation.rhs.symbolToIndices.size;\n einsumEquation.symbolToInfo.forEach((info, symbol) => {\n if (einsumEquation.rhs.symbolToIndices.has(symbol)) {\n const outputIndex = einsumEquation.rhs.symbolToIndices.get(symbol)?.[0];\n if (outputIndex !== undefined) {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n idxCopy.push(`${\n inputVars[i].indicesSet(\n `input${i}Indices`, index, output.indicesGet('outputIndices', outputIndex))}`);\n });\n }\n });\n }\n } else {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n reduceOpsSetIndices.push(`${inputVars[i].indicesSet(`input${i}Indices`, index, `${symbol}`)}`);\n });\n reduceOpCompute.push(`prod *= ${inputVars[i].getByIndices(`input${i}Indices`)};`);\n }\n });\n reduceOpsLoopHeaders.push(\n `for(var ${symbol}: u32 = 0; ${symbol} < uniforms.${appendMax(symbol)}; ${symbol}++) {`);\n reduceOpsLoopFooters.push('}');\n }\n });\n const reduceOps = isReduceOpsWithoutLoop ?\n [\n ...idxCopy,\n `let sum = ${inputVars.map((inputVar, i) => inputVar.getByIndices(`input${i}Indices`)).join(' * ')};`\n ] :\n [\n ...idxCopy,\n initSum,\n ...reduceOpsLoopHeaders,\n ...reduceOpsSetIndices,\n initProd,\n ...reduceOpCompute,\n updateSum,\n ...reduceOpsLoopFooters,\n ];\n return `\n ${\n shaderHelper\n .registerUniforms(uniformsSymbols.map((symbol) => ({name: `${appendMax(symbol)}`, type: 'u32'})))\n .registerUniform('outputSize', 'u32')\n .declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${output.offsetToIndices('global_idx')};\n ${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\\n')}\n ${reduceOps.join('\\n')};\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'Einsum',\n shaderCache: {hint: einsumEquation.equation, inputDependencies: inputShapes.map(() => 'rank')},\n getRunData: () => {\n // The symbols from uniformSymbols array are guaranteed to exist in einsumEquations.symbolToInfo map. The\n // filter is added to make sure that dimValue is never 0.\n const programUniformsInit: ProgramUniform[] =\n uniformsSymbols.filter((symbol) => einsumEquation.symbolToInfo.has(symbol))\n .map(\n (symbol) =>\n ({type: DataType.uint32, data: einsumEquation.symbolToInfo.get(symbol)?.dimValue || 0}));\n programUniformsInit.push({type: DataType.uint32, data: outputSize});\n const programUniforms: ProgramUniform[] =\n inputShapes.map((dims, _) => [...createTensorShapeVariables(dims)])\n .reduce((acc, inputProgramUniforms) => acc.concat(inputProgramUniforms), programUniformsInit);\n programUniforms.push(...createTensorShapeVariables(outputShape));\n return ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n });\n },\n getShaderSource,\n };\n };\n\nexport const einsum = (context: ComputeContext, attributes: EinsumAttributes): void => {\n const einsumEquation = new EinsumEquation(context.inputs, attributes.equation);\n const outputShape = einsumEquation.outputDims;\n const inputShapes = context.inputs.map((input, _) => input.dims);\n context.compute(createEinsumProgramInfo(inputShapes, context.inputs[0].dataType, einsumEquation, outputShape));\n};\n\nexport const parseEinsumAttributes = (attributes: Record): EinsumAttributes => {\n const equation = (attributes.equation as string).replace(/\\s+/g, '');\n return createAttributeWithCacheKey({equation});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Expand requires 2 input.');\n }\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n\n let shapeIndex = shape.length < inputShape.length ? 0 : shape.length - inputShape.length;\n let inputShapeIndex = inputShape.length < shape.length ? 0 : inputShape.length - shape.length;\n for (; shapeIndex < shape.length && inputShapeIndex < inputShape.length; ++shapeIndex, ++inputShapeIndex) {\n if (shape[shapeIndex] !== inputShape[inputShapeIndex] && shape[shapeIndex] !== 1 &&\n inputShape[inputShapeIndex] !== 1) {\n throw new Error('Expand requires shape to be broadcastable to input');\n }\n }\n};\n\nconst getAdjustedShape = (shape1: readonly number[], shape2: readonly number[]): number[] => {\n const diff = shape1.length - shape2.length;\n const shape: number[] = [];\n for (let i = 0; i < diff; ++i) {\n shape.push(shape1[i]);\n }\n for (let i = 0; i < shape2.length; ++i) {\n shape.push(shape2[i] === 1 ? shape1[i + diff] : shape2[i]);\n }\n return shape;\n};\n\nconst calculateOutputShape = (inputShape: readonly number[], shape: readonly number[]): number[] =>\n (inputShape.length > shape.length) ? getAdjustedShape(inputShape, shape) : getAdjustedShape(shape, inputShape);\n\n\nconst createExpandProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n const outputShape: number[] = calculateOutputShape(inputShape, shape);\n const dataType = inputs[0].dataType;\n const components = dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', dataType, inputShape.length, components);\n const output = outputVariable('output', dataType, outputShape.length, components);\n let assignment: string;\n if (dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n let offset${x} = ${input.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${input.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n ${output.setByOffset('global_idx', 'data')}\n }`;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let inputOffset = ${input.broadcastedIndicesToOffset('outputIndices', output)};\n ${output.setByOffset('global_idx', input.getByOffset('inputOffset'))}\n }`;\n }\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}`;\n };\n\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)];\n return {\n name: 'Expand',\n shaderCache: {hint: `${outputShape.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const expand = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createExpandProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType, UniformsArrayType, WORKGROUP_SIZE} from './common';\nimport * as unary from './unary-op';\n\n// GELU is defined as Y=0.5*X*(1+tanh(0.797885*X+0.035677*X*X*X)), where X may pre-add a bias.\n\nconst createFastGeluProgramInfo = (inputTensors: readonly TensorView[]): ProgramInfo => {\n const dataType = inputTensors[0].dataType;\n const outputSize = ShapeUtil.size(inputTensors[0].dims);\n const biasLength = ShapeUtil.size(inputTensors[1].dims);\n // can only use vec4 when bias length is multiple of 4\n const useVec4 = biasLength % 4 === 0;\n const getShaderSource = (shaderHelper: ShaderHelper): string => {\n const x = inputVariable('x', dataType, [1], 4);\n const bias = inputVariable('bias', dataType, [1], 4);\n const y = outputVariable('y', dataType, [1], 4);\n\n const uniforms: UniformsArrayType = [{name: 'output_vec_size', type: 'u32'}, {name: 'bias_size', type: 'u32'}];\n\n const singleElementBias = (i: 0|1|2|3) => `\n let bias${i}_offset: u32 = (global_idx * 4 + ${i}) % uniforms.bias_size;\n let bias${i} = ${bias.getByOffset(`bias${i}_offset / 4`)}[bias${i}_offset % 4];`;\n const biasGetExpression = useVec4 ?\n `\n let bias = ${bias.getByOffset('global_idx % (uniforms.bias_size / 4)')};` :\n `${singleElementBias(0)}${singleElementBias(1)}${singleElementBias(2)}${singleElementBias(3)}\n let bias = ${x.type.value}(bias0, bias1, bias2, bias3);`;\n\n return `${shaderHelper.registerUniforms(uniforms).declareVariables(x, bias, y)}\n\n ${unary.fastGeluImpl(tensorTypeToWsglValueType(dataType))}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_vec_size')}\n\n let x = ${x.getByOffset('global_idx')};\n ${biasGetExpression}\n let x_in = x + bias;\n ${y.setByOffset('global_idx', unary.fastGeluExpression('x_in'))}\n }`;\n };\n\n return {\n name: 'FastGeluWithBias',\n shaderCache: {hint: `${useVec4}`, inputDependencies: ['type', 'type']},\n getShaderSource,\n getRunData: (inputs) => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n programUniforms:\n [{type: DataType.uint32, data: Math.ceil(outputSize / 4)}, {type: DataType.uint32, data: biasLength}],\n dispatchGroup: {x: Math.ceil(outputSize / WORKGROUP_SIZE / 4)}\n })\n };\n};\n\nexport const fastGelu = (context: ComputeContext): void => {\n if (context.inputs.length < 2 || ShapeUtil.size(context.inputs[1].dims) === 0) {\n unary.fastGelu(context);\n } else {\n context.compute(createFastGeluProgramInfo(context.inputs));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n};\n\nconst createGatherProgramInfo = (inputs: readonly TensorView[], attributes: GatherAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const indicesShape = inputs[1].dims;\n\n const inputRank = inputShape.length;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n\n const outputShape = inputShape.slice(0);\n outputShape.splice(axis, 1, ...indicesShape);\n\n const axisDimLimit = inputShape[axis];\n const components = inputs[0].dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}, ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const data = inputVariable('data', inputs[0].dataType, inputs[0].dims.length, components);\n const indices = inputVariable('inputIndices', inputs[1].dataType, inputs[1].dims.length);\n const output = outputVariable('output', inputs[0].dataType, outputShape.length, components);\n\n const calcDataIndices = (x: number|string): string => {\n const indicesRank = indicesShape.length;\n let calcStr = `var indicesIndices${x} = ${indices.type.indices}(0);`;\n for (let i = 0; i < indicesRank; i++) {\n calcStr += `${indicesRank > 1 ? `indicesIndices${x}[${i}]` : `indicesIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[uniforms.axis + ${i}]` : `outputIndices${x}`};`;\n }\n calcStr += `\n var idx${x} = ${indices.getByIndices(`indicesIndices${x}`)};\n if (idx${x} < 0) {\n idx${x} = idx${x} + uniforms.axisDimLimit;\n }\n var dataIndices${x} : ${data.type.indices};\n `;\n for (let i = 0, j = 0; i < inputRank; i++) {\n if (i === axis) {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = u32(idx${x});`;\n j += indicesRank;\n } else {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[${j}]` : `outputIndices${x}`};`;\n j++;\n }\n }\n return calcStr;\n };\n let assignment: string;\n if (inputs[0].dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n ${calcDataIndices(x)};\n let offset${x} = ${data.indicesToOffset(`dataIndices${x}`)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${data.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var value = vec4(0);\n ${singleAssignment('value', 0, 'u32')}\n ${singleAssignment('value', 1, 'u32')}\n ${singleAssignment('value', 2, 'u32')}\n ${singleAssignment('value', 3, 'u32')}\n ${output.setByOffset('global_idx', 'value')}\n `;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n ${calcDataIndices('')};\n let value = ${data.getByIndices('dataIndices')};\n ${output.setByOffset('global_idx', 'value')};\n `;\n }\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(data, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n ${assignment}\n }`;\n };\n return {\n name: 'Gather',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank', 'rank']},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGatherAttributes = (attributes: Record): GatherAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gather = (context: ComputeContext, attributes: GatherAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherElementsAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('GatherElements requires 2 inputs.');\n }\n\n if (inputs[0].dims.length < 1) {\n throw new Error('GatherElements requires that the data input be rank >= 1.');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`);\n }\n};\n\nconst createGatherElementsProgramInfo =\n (inputs: readonly TensorView[], attributes: GatherElementsAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputOutputDataType = inputs[0].dataType;\n const inputRank = inputShape.length;\n\n const indicesShape = inputs[1].dims;\n const indicesDataType = inputs[1].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n const axisDimLimit = inputShape[axis];\n\n const outputShape = indicesShape.slice(0);\n const outputSize = ShapeUtil.size(outputShape);\n\n const input = inputVariable('input', inputOutputDataType, inputRank);\n const indices = inputVariable('indicesInput', indicesDataType, indicesShape.length);\n const output = outputVariable('output', inputOutputDataType, outputShape.length);\n\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}\n ];\n programUniforms.push(...createTensorShapeVariables(inputShape, indicesShape, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n // int64 indices would be treated as little endian i32 with assumption they fall in i32 limits\n // That assumption is safe as it's not possible to allocate >2gb buffer for input tensor\n // Input data will be treated as u32 or two u32 for 8-byte tensors\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n\n var idx = ${indices.getByOffset('global_idx')};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${input.type.indices}(outputIndices);\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(idx)')};\n let value = ${input.getByIndices('inputIndices')};\n\n ${output.setByOffset('global_idx', 'value')};\n }`;\n\n return {\n name: 'GatherElements',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const parseGatherElementsAttributes = (attributes: Record): GatherElementsAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gatherElements = (context: ComputeContext, attributes: GatherElementsAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherElementsProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {GemmUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (inputs.length < 2 || inputs.length > 3) {\n throw new Error('Invaid input number.');\n }\n\n // 'C' can be of dimensionality 0, 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length > 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].dataType !== inputs[1].dataType) ||\n (inputs.length === 3 && inputs[0].dataType !== inputs[2].dataType)) {\n throw new Error('Input types are mismatched');\n }\n};\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n}\n\nconst createGemmProgramInfo = (inputs: readonly TensorView[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N, K] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}, {type: DataType.float, data: attributes.alpha},\n {type: DataType.float, data: attributes.beta}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (inputs.length === 3) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n let line = '';\n if (attributes.transA && attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[n * uniforms.K + k];';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[k * uniforms.N + n];';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[n * uniforms.K + k];';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[k * uniforms.N + n];';\n }\n\n const calculateAlpha = attributes.alpha === 1 ? '' : 'value *= uniforms.alpha;';\n const a = inputVariable('a', inputs[0].dataType, inputs[0].dims);\n const b = inputVariable('b', inputs[1].dataType, inputs[1].dims);\n const dataType = a.type.value;\n let c: IndicesHelper|null = null;\n const variables = [a, b];\n if (inputs.length === 3) {\n c = inputVariable('c', inputs[2].dataType, inputs[2].dims.length);\n variables.push(c);\n }\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n variables.push(output);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'K', type: 'u32'},\n {name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${dataType}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${line}\n }\n\n ${calculateAlpha}\n ${(() => {\n if (c != null) {\n return `let cOffset = ${c.broadcastedIndicesToOffset('vec2(m, n)', output)}; value += ${\n dataType}(uniforms.beta) * ${c.getByOffset('cOffset')};`;\n }\n return '';\n })()}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Gemm',\n shaderCache: {hint: `${attributes.cacheKey}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGemmAttributes = (attributes: Record): GemmAttributes => {\n const transA = attributes.transA as boolean;\n const transB = attributes.transB as boolean;\n const alpha = attributes.alpha as number;\n const beta = attributes.beta as number;\n return {transA, transB, alpha, beta, cacheKey: `${attributes.transA};${attributes.transB};${attributes.alpha === 1}`};\n};\n\nexport const gemm = (context: ComputeContext, attributes: GemmAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createGemmProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, GpuDataType, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nconst getInput = (inputs: readonly TensorView[], i: number) =>\n (inputs.length > i) && (inputs[i].dims.length > 0) && (ShapeUtil.size(inputs[i].dims)) > 0 ? inputs[i] : undefined;\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = getInput(inputs, 1);\n const value = getInput(inputs, 2);\n const bias = getInput(inputs, 3);\n const keyPaddingMask = getInput(inputs, 4);\n const relativePositionBias = getInput(inputs, 5);\n const pastKey = getInput(inputs, 6);\n const pastValue = getInput(inputs, 7);\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // key_padding_mask (K/V) : (B) or (2*B + 1) or (B, L) or None\n // relative_position_bias : (B, 1, S, L)\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // bias (Q/K/V) : (D + D + D_v)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // bias (Q/K/V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n // bias (Q/K/V) : None or (D + D + D_v)\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n if (pastKey && pastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastKey.dims[0] !== batchSize || pastKey.dims[1] !== attributes.numHeads || pastKey.dims[3] !== headSize) {\n throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastValue.dims[0] !== batchSize || pastValue.dims[1] !== attributes.numHeads ||\n pastValue.dims[3] !== headSize) {\n throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastKey.dims[2] !== pastValue.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n } else if (pastKey || pastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (key.dims[2] !== query.dims[2]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n if (bias) {\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimension');\n }\n\n if (value) {\n if (query.dims.length === 5 && query.dims[3] === 2) {\n throw new Error('bias is not allowed for packed kv.');\n }\n }\n }\n\n let maskType: AttentionMaskType = AttentionMaskType.none;\n if (keyPaddingMask) {\n maskType = AttentionMaskType.maskUnknown;\n const maskDims = keyPaddingMask.dims;\n if (maskDims.length === 1) {\n if (maskDims[0] === batchSize) {\n maskType = AttentionMaskType.mask1dKeySeqLen;\n } else if (maskDims[0] === 3 * batchSize + 2) {\n maskType = AttentionMaskType.mask1DKeySeqLenStart;\n }\n } else if (maskDims.length === 2 && maskDims[0] === batchSize && maskDims[1] === kvSequenceLength) {\n maskType = AttentionMaskType.mask2dKeyPadding;\n }\n if (maskType === AttentionMaskType.maskUnknown) {\n throw new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)');\n }\n throw new Error('Mask not supported');\n }\n\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n if (keyPaddingMask) {\n throw new Error('Key padding mask is not supported');\n }\n\n if (relativePositionBias) {\n if (relativePositionBias.dims.length !== 4) {\n throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');\n }\n if ((relativePositionBias.dims[0] !== batchSize && relativePositionBias.dims[0] !== 1) ||\n relativePositionBias.dims[1] !== attributes.numHeads || relativePositionBias.dims[2] !== sequenceLength ||\n relativePositionBias.dims[3] !== totalSequenceLength) {\n throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)');\n }\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n };\n};\n\nexport const parseMultiHeadAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst addBiasTranspose =\n (context: ComputeContext, qkv: TensorView, bias: TensorView, batchSize: number, sequenceLength: number,\n hiddenSize: number, biasOffset: number) => {\n const outputShape = [batchSize, sequenceLength, hiddenSize];\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: biasOffset},\n {type: DataType.uint32, data: hiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('qkv_with_bias', qkv.dataType, outputShape);\n const qkvInput = inputVariable('qkv', qkv.dataType, outputShape);\n const biasInput = inputVariable('bias', bias.dataType, outputShape);\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'bias_offset', type: 'u32'}, {name: 'hidden_size', type: 'u32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(qkvInput, biasInput, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`;\n };\n\n return context.compute(\n {\n name: 'MultiHeadAttentionAddBias',\n shaderCache: {inputDependencies: ['type', 'type']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: qkv.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [qkv, bias], outputs: [-1]})[0];\n };\n\nexport const maybeTransposeToBNSHAndAddBias =\n (context: ComputeContext, batchSize: number, numHeads: number, sequenceLength: number, headSize: number,\n input: TensorView, bias?: TensorView, biasOffset?: number) => {\n // const newDims = [];\n\n let reshapedInput = input;\n if (!bias) {\n if (input.dims.length === 3) {\n reshapedInput = input.reshape([batchSize, sequenceLength, numHeads, headSize]);\n }\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n } else {\n if (sequenceLength === 1) {\n throw new Error('AddBiasReshape is not implemented. Please export your model with packed QKV or KV');\n } else {\n reshapedInput =\n addBiasTranspose(context, input, bias, batchSize, sequenceLength, numHeads * headSize, biasOffset!);\n reshapedInput = reshapedInput.reshape([batchSize, sequenceLength, numHeads, headSize]);\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n }\n }\n };\n\nexport const multiHeadAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n const query = context.inputs[0];\n const key = getInput(context.inputs, 1);\n const value = getInput(context.inputs, 2);\n const bias = getInput(context.inputs, 3);\n const keyPaddingMask = getInput(context.inputs, 4);\n const relativePositionBias = getInput(context.inputs, 5);\n const pastKey = getInput(context.inputs, 6);\n const pastValue = getInput(context.inputs, 7);\n if (query.dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (key?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n // applyAttention expects BNSH inputs\n const kvBNSH = key && value && key.dims.length === 4 && value.dims.length === 4;\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, query, bias, 0);\n\n if (kvBNSH) {\n return applyAttention(\n context, Q, key, value, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params,\n attributes);\n }\n if (!key || !value) {\n throw new Error('key and value must be provided');\n }\n const K = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.headSize, key, bias,\n params.hiddenSize);\n\n const V = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.vHeadSize, value, bias,\n 2 * params.hiddenSize);\n\n applyAttention(\n context, Q, K, V, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst getRepeats = (repeatsTensorView: TensorView): readonly number[] =>\n Array.from(repeatsTensorView.getBigInt64Array(), Number);\n\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 inputs.');\n }\n\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16 &&\n inputs[0].dataType !== DataType.int32 && inputs[0].dataType !== DataType.uint32) {\n throw new Error('Tile only support float, float16, int32, and uint32 data types');\n }\n\n if (inputs[1].dataType !== DataType.int64) {\n throw new Error('Tile `repeats` input should be of int64 data type');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('Tile `repeats` input should be 1-D');\n }\n\n const repeats: readonly number[] = getRepeats(inputs[1]);\n\n if (repeats.length !== inputs[0].dims.length) {\n throw new Error('Tile `repeats` input should have same number of elements as rank of input data tensor');\n }\n};\n\nconst getOutputShape = (inputShape: readonly number[], repeats: readonly number[]): readonly number[] => {\n const outputShape: number[] = [];\n\n for (let i = 0; i < inputShape.length; ++i) {\n outputShape.push(inputShape[i] * repeats[i]);\n }\n\n return outputShape;\n};\n\nexport const createTileProgramInfo = (inputs: readonly TensorView[], shape?: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const repeats: readonly number[] = shape == null ? getRepeats(inputs[1]) : shape;\n const outputShape = getOutputShape(inputShape, repeats);\n const outputSize = ShapeUtil.size(outputShape);\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, inputShape.length);\n const output = outputVariable('output', dataType, outputShape.length);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const inputShape = ${input.indices(...inputShape)};\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n for (var i = 0; i < ${inputShape.length}; i++) {\n let input_dim_i = ${input.indicesGet('uniforms.input_shape', 'i')};\n let input_dim_value = ${output.indicesGet('output_indices', 'i')} % input_dim_i;\n\n ${input.indicesSet('input_indices', 'i', 'input_dim_value')}\n }\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n\n return {\n name: 'Tile',\n shaderCache: {hint: `${repeats}`, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n }),\n getShaderSource,\n };\n};\n\nexport const tile = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createTileProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {maybeTransposeToBNSHAndAddBias} from './multihead-attention';\nimport {createTileProgramInfo} from './tile';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nexport const validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = inputs[1];\n const value = inputs[2];\n const pastKey = inputs[3];\n const pastValue = inputs[4];\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n const hasPastKey = pastKey && pastKey.dims.length !== 0;\n const hasPastValue = pastValue && pastValue.dims.length !== 0;\n // TODO : this should be from attributes.\n const isPastkvBSNH = true;\n if (hasPastKey && hasPastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n if (isPastkvBSNH) {\n // For BSNH\n pastSequenceLength = pastKey.dims[1];\n maxSequenceLength = pastKey.dims[1];\n } else {\n // For BNSH\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n }\n } else if (hasPastKey || hasPastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (query.dims[2] % key.dims[2] !== 0) {\n throw new Error('Dimension 2 of \"query\" should be a multiple of \"key\"');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n const maskType: AttentionMaskType = AttentionMaskType.none;\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.kvNumHeads!),\n numHeads: attributes.numHeads,\n kvNumHeads: attributes.kvNumHeads,\n nReps: attributes.numHeads / attributes.kvNumHeads!,\n pastPresentShareBuffer: false,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n isPastkvBSNH,\n };\n};\n\nconst createConcatProgramInfo =\n (a: TensorView, b: TensorView|undefined, dataType: DataType, params: AttentionParameters): ProgramInfo => {\n const outputShape = [params.batchSize, params.totalSequenceLength, params.kvNumHeads!, params.headSize];\n const component = 4;\n const outputSize = ShapeUtil.size(outputShape) / component;\n const presentSequenceLength = params.totalSequenceLength;\n const output = outputVariable('present_kv', dataType, outputShape.length, component);\n const inputA = inputVariable('new_kv', a.dataType, a.dims.length, component);\n const inputB = b ? inputVariable('past_kv', b.dataType, b.dims.length, component) : undefined;\n\n const H = Math.ceil(params.headSize / component);\n const dispatch = {x: presentSequenceLength, y: a.dims[0], z: 1};\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = b ? ['rank', 'rank'] : ['rank'];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: params.pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength},\n {type: DataType.uint32, data: params.totalSequenceLength}\n ];\n\n const inputs = [inputA];\n if (inputB) {\n programUniforms.push(\n ...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(b!.dims),\n ...createTensorShapeVariables(outputShape));\n inputs.push(inputB);\n } else {\n programUniforms.push(...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(outputShape));\n }\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'past_seqlen', type: 'u32'}, {name: 'new_seqlen', type: 'u32'},\n {name: 'present_seqlen', type: 'u32'}\n ];\n\n const pastStr = ` let past_batch_stride = uniforms.past_seqlen * num_heads * H;\n var past_head_stride = uniforms.past_seqlen * H;\n if (is_bsnh) {\n past_head_stride = H;\n }\n let in_offset = b * past_batch_stride + s * row_stride + n * past_head_stride + h;\n present_kv[out_offset] = past_kv[in_offset];`;\n const newStr = ` let new_batch_stride = uniforms.new_seqlen * num_heads * H;\n let new_row_stride = num_heads * H;\n let new_head_stride = H;\n let in_offset = b * new_batch_stride + (s - past_seqlen) * new_row_stride + n * new_head_stride + h;\n present_kv[out_offset] = new_kv[in_offset];`;\n const concatStr = b ? `if (s < past_seqlen) {\n ${pastStr}\n } else if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }` :\n `if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }`;\n\n // TODO: handle H * params.kvNumHeads greater than maxComputeInvocationsPerWorkgroup limit.\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputs, output)}\n ${shaderHelper.mainStart([\n H, params.kvNumHeads!, 1\n ])}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var indices = ${output.offsetToIndices('global_idx')};\n let h = local_id.x;\n let n = local_id.y;\n let s = workgroup_id.x;\n let b = workgroup_id.y;\n let num_heads = ${params.kvNumHeads!}u;\n let H = ${H}u;\n\n let present_seqlen = uniforms.present_seqlen;\n let present_batch_stride = present_seqlen * num_heads * H;\n var row_stride = H;\n let is_bsnh = ${params.isPastkvBSNH};\n\n if (is_bsnh) {\n row_stride = num_heads * H;\n }\n var present_head_stride = present_seqlen * H;\n if (is_bsnh) {\n present_head_stride = H;\n }\n\n let past_seqlen = uniforms.past_seqlen;\n\n let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h;\n ${concatStr}\n }`;\n\n return {\n name: 'ConcatPastNew',\n shaderCache: {hint: `${params.kvNumHeads!}${H}${!!b}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: dispatch,\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const parseGroupQueryAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst maybeExpandAndTransposeToBNSH =\n (context: ComputeContext, input: TensorView, pastKV: TensorView|undefined, params: AttentionParameters,\n outputIndex: number) => {\n let reshapedInput = input;\n const numHeads = params.kvNumHeads!;\n const nReps = params.nReps!;\n if (input.dims.length === 3 && params.kvSequenceLength !== 0) {\n reshapedInput = input.reshape([params.batchSize, params.kvSequenceLength, numHeads, params.headSize]);\n }\n\n if (pastKV) {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, pastKV, reshapedInput.dataType, params),\n {inputs: [reshapedInput, pastKV], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n } else {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, undefined, reshapedInput.dataType, params),\n {inputs: [reshapedInput], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n }\n if (nReps !== 1) {\n reshapedInput = context.compute(\n createTileProgramInfo([reshapedInput], [1, 1, 1, nReps]), {inputs: [reshapedInput], outputs: [-1]})[0];\n reshapedInput =\n reshapedInput.reshape([params.batchSize, params.totalSequenceLength, numHeads * nReps, params.headSize]);\n }\n\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n };\n\nexport const groupQueryAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (context.inputs[1]?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, context.inputs[0], undefined,\n 0);\n const pastKey = context.inputs[3] && context.inputs[3].dims.length !== 0 ? context.inputs[3] : undefined;\n const pastValue = context.inputs[4] && context.inputs[4].dims.length !== 0 ? context.inputs[4] : undefined;\n const K = maybeExpandAndTransposeToBNSH(context, context.inputs[1], pastKey, params, 1);\n const V = maybeExpandAndTransposeToBNSH(context, context.inputs[2], pastValue, params, 2);\n applyAttention(context, Q, K, V, undefined, undefined, undefined, undefined, undefined, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface InstanceNormAttributes {\n epsilon: number;\n format: 'NHWC'|'NCHW';\n}\n\nconst createInstanceNormProgramInfo =\n (inputs: readonly TensorView[], attributes: InstanceNormAttributes): ProgramInfo => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const axis = 2;\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n const components = getMaxComponents(normSize);\n const normPackedSize = normSize / components;\n const inputShape = [xShape[0], xShape[1], normPackedSize];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'type', 'type'];\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: normSize}, {type: DataType.uint32, data: normPackedSize}];\n programUniforms.push(...createTensorShapeVariables(inputShape, inputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputShape.length, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const output = outputVariable('output', inputs[0].dataType, inputShape.length, components);\n const variables = [x, scale, bias, output];\n const dataType = x.type.value;\n const f32Type = components === 1 ? 'f32' : `vec${components}`;\n const workgroupSize = 64;\n\n const uniforms: UniformsArrayType = [{name: 'normSize', type: 'u32'}, {name: 'normPackedSize', type: 'u32'}];\n return `\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${f32Type}, ${workgroupSize}>;\n const workgroupSize = ${workgroupSize}u;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart(workgroupSize)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${f32Type}(${x.get('batch', 'channel', 'h')});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${sumVector('workgroupShared[0]', components)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${f32Type}(${x.get('batch', 'channel', 'h')}) - ${f32Type}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${sumVector('workgroupShared[0]', components)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${attributes.epsilon}));\n let channelScale = invStdDev * f32(${scale.getByOffset('channel')});\n let channelShift = f32(${bias.getByOffset('channel')}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get('batch', 'channel', 'h')} * ${dataType}(${f32Type}(channelScale)) + ${dataType}(${\n f32Type}(channelShift));\n ${output.set('batch', 'channel', 'h', 'value')};\n }\n }`;\n };\n return {\n ...{name: 'InstanceNormalization'},\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${attributes.epsilon};${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: normCount},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nconst computeMean =\n (context: ComputeContext, input: TensorView, scale: TensorView, bias: TensorView, n: number, h: number, c: number,\n epsilon: number) => {\n const components = getMaxComponents(c);\n const WG = 64;\n // we will store channel scale and channel shift in [2, components] matrix\n // or in vec2 when components == 1\n const outputType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const sumCastType = components === 1 ? 'f32' : `vec${components}f`;\n const setOutputValue = (var1: string, var2: string) => `${outputType}(${var1}, ${var2})`;\n const unitsOfWork = n * c / components;\n const wgSize = Math.ceil(h / WG);\n\n const meanInputDependencies: ProgramInputTensorInfoDependency[] = ['type'];\n const meanProgramUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: wgSize}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(h * c / components)}\n ];\n\n const getMeanShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = inputVariable('input', input.dataType, input.dims, components);\n return `\n ${shaderHelper.declareVariables(inputHelper)}\n @group(0) @binding(1) var output : array<${outputType}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart(WG)}\n let currentImageNumber = global_idx / ${WG} / uniforms.C;\n let currentChannelNumber = (global_idx / ${WG}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${sumCastType}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${setOutputValue('sum', 'squaredSum')};\n }`;\n };\n\n const meanValues = context.compute(\n {\n name: 'InstanceNormComputeMean',\n shaderCache: {hint: `${components}`, inputDependencies: meanInputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, WG, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: n * c / components},\n programUniforms: meanProgramUniforms\n }),\n getShaderSource: getMeanShaderSource,\n },\n {inputs: [input], outputs: [-1]})[0];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: unitsOfWork}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(WG * c / components)}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const scaleHelper = inputVariable('scale', scale.dataType, scale.dims, components);\n const biasHelper = inputVariable('bias', bias.dataType, bias.dims, components);\n return `\n @group(0) @binding(0) var input : array<${outputType}>;\n @group(0) @binding(1) var scale : array<${scaleHelper.type.storage}>;\n @group(0) @binding(2) var bias : array<${biasHelper.type.storage}>;\n @group(0) @binding(3) var output : array<${outputType}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.units_of_work')}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < min(${WG}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${WG}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${epsilon}));\n let channelScale = invStdDev * ${sumCastType}(scale[currentChannelNumber]);\n let channelShift = ${sumCastType}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${setOutputValue('channelScale', 'channelShift')};\n }`;\n };\n return context.compute(\n {\n name: 'InstanceNormComputeChannelScaleShift',\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${components};${epsilon}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: Math.ceil(unitsOfWork / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [meanValues, scale, bias], outputs: [-1]})[0];\n };\n\nconst createInstanceNormNHWCProgramInfo =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: InstanceNormAttributes) => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const N = xShape[0];\n const C = xShape[xShape.length - 1];\n const H = ShapeUtil.sizeFromDimension(xShape, 1) / C;\n const components = getMaxComponents(C);\n const outputSize = ShapeUtil.size(outputShape) / components;\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: H}, {type: DataType.uint32, data: Math.floor(C / components)}];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n // first compute mean\n const channelScaleShift = computeMean(context, inputs[0], inputs[1], inputs[2], N, H, C, attributes.epsilon);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const scaleType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const scaleCastType = components === 1 ? dataType : `vec${components}<${dataType}>`;\n\n const inputHelper = inputVariable('input', inputs[0].dataType, inputs[0].dims, components);\n const outputHelper = outputVariable('output', inputs[0].dataType, outputShape, components);\n\n return `\n @group(0) @binding(0) var input : array<${inputHelper.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${scaleType}>;\n @group(0) @binding(2) var output : array<${outputHelper.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${scaleCastType}(scale[0]), ${scaleCastType}(scale[1]));\n }`;\n };\n context.compute(\n {\n name: 'InstanceNormalizationNHWC',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [inputs[0], channelScaleShift]});\n };\n\nexport const instanceNorm = (context: ComputeContext, attributes: InstanceNormAttributes): void => {\n if (attributes.format === 'NHWC') {\n createInstanceNormNHWCProgramInfo(context, context.inputs, attributes);\n } else {\n context.compute(createInstanceNormProgramInfo(context.inputs, attributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType,} from './common';\n\ninterface LayerNormAttributes {\n simplified: boolean;\n axis: number;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 2) {\n throw new Error('layerNorm requires at least 2 inputs.');\n }\n};\n\nconst createLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: LayerNormAttributes, outputCount: number): ProgramInfo => {\n const simplified = attributes.simplified;\n\n const xShape = inputs[0].dims;\n const scale = inputs[1];\n const bias = !simplified && inputs[2];\n\n const outputShape = xShape;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, xShape.length);\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n\n const scaleSize = ShapeUtil.size(scale.dims);\n const biasSize = bias ? ShapeUtil.size(bias.dims) : 0;\n if (scaleSize !== normSize || (bias && biasSize !== normSize)) {\n throw new Error(`Size of X.shape()[axis:] == ${normSize}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${scaleSize} and bias size of ${biasSize}`);\n }\n\n const meanInvStdDevDim: number[] = [];\n for (let i = 0; i < xShape.length; ++i) {\n if (i < axis) {\n meanInvStdDevDim.push(xShape[i]);\n } else {\n meanInvStdDevDim.push(1);\n }\n }\n const components = getMaxComponents(normSize);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: normCount}, {type: DataType.float, data: normSize},\n {type: DataType.uint32, data: Math.floor(normSize / components)},\n {type: DataType.float, data: attributes.epsilon}\n ];\n if (bias) {\n inputDependencies.push('type');\n }\n const hasMeanDataOutput = outputCount > 1;\n const hasInvStdOutput = outputCount > 2;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('scale', scale.dataType, scale.dims, components),\n ];\n if (bias) {\n variables.push(inputVariable('bias', bias.dataType, bias.dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanDataOutput) {\n variables.push(outputVariable('mean_data_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'norm_count', type: 'u32'}, {name: 'norm_size', type: 'f32'},\n {name: 'norm_size_vectorized', type: 'u32'}, {name: 'epsilon', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.norm_count')}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${fillVector('f32', components)};\n var mean_square_vector = ${fillVector('f32', components)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${castToF32(dataType, components, 'x[h + offset]')};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${sumVector('mean_vector', components)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${sumVector('mean_square_vector', components)} / uniforms.norm_size ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${castToF32(dataType, components, 'x[j + offset]')};\n let f32scale = ${castToF32(dataType, components, 'scale[j]')};\n output[j + offset] = ${variables[0].type.value}((f32input ${simplified ? '' : '- mean'}) * inv_std_dev * f32scale\n ${bias ? `+ ${castToF32(dataType, components, 'bias[j]')}` : ''}\n );\n }\n\n ${hasMeanDataOutput ? 'mean_data_output[global_idx] = mean' : ''};\n ${hasInvStdOutput ? 'inv_std_output[global_idx] = inv_std_dev' : ''};\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (hasMeanDataOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (hasInvStdOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n\n return {\n name: 'LayerNormalization',\n shaderCache: {hint: `${components};${outputCount};${simplified}`, inputDependencies},\n getRunData: () =>\n ({outputs, dispatchGroup: {x: Math.ceil(normCount / 64 /* workgroup size */)}, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const layerNorm = (context: ComputeContext, attributes: LayerNormAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createLayerNormProgramInfo(context.inputs, attributes, context.outputCount));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType, getTensorElementSize} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\n// TODO support quantization bits not equal to 4\nexport interface MatMulNBitsAttributes extends AttributeWithCacheKey {\n k: number;\n n: number;\n accuracyLevel: number;\n bits: number;\n blockSize: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes): void => {\n if (inputs.length < 3 || inputs.length > 4) {\n throw new Error('MatMulNBits requires 3 or 4 inputs');\n }\n const a = inputs[0];\n const aRank = a.dims.length;\n if (a.dims[aRank - 1] !== attributes.k) {\n throw new Error('The last dim of input shape does not match the k value');\n }\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const b = inputs[1];\n if (!ShapeUtil.areEqual(b.dims, [attributes.n, nBlocksPerCol, blobSize])) {\n throw new Error('The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize');\n }\n const scales = inputs[2];\n const scalesShape = scales.dims;\n if (ShapeUtil.size(scalesShape) !== attributes.n * nBlocksPerCol) {\n throw new Error('scales input size error.');\n }\n if (inputs.length === 4) {\n const zeroPoints = inputs[3];\n const zeroPointsShape = zeroPoints.dims;\n const expectedZeroPointsSize =\n attributes.bits > 4 ? (attributes.n * nBlocksPerCol) : attributes.n * Math.floor((nBlocksPerCol + 1) / 2);\n if (ShapeUtil.size(zeroPointsShape) !== expectedZeroPointsSize) {\n throw new Error('zeroPoints input size error.');\n }\n }\n};\n\nexport const createMatMulNBitsProgramInfo =\n (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes,\n maxComputeWorkgroupSizes: [number, number, number], maxComputeWorkgroupStorageSize: number): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const aRank = inputShape.length;\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const dimAOuter = inputShape[aRank - 2];\n const dimInner = attributes.k;\n const dimBOuter = attributes.n;\n const batchDims = inputShape.slice(0, aRank - 2);\n const batchSize = ShapeUtil.size(batchDims);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const blobSizeInWords = blobSize / 4;\n const dataType = inputs[0].dataType;\n const outputNumber = getMaxComponents(dimAOuter);\n const aComponents = getMaxComponents(attributes.k);\n const bComponents = getMaxComponents(blobSizeInWords);\n const elementSize = getTensorElementSize(dataType)!;\n const workgroupOutputSize = dimAOuter * nBlocksPerCol * elementSize;\n const maxNumberOfComponents = Math.floor(maxComputeWorkgroupStorageSize / workgroupOutputSize);\n const useBlockwiseMatMulNBits = nBlocksPerCol <= maxComputeWorkgroupSizes[0] && maxNumberOfComponents > 0;\n const components = (!useBlockwiseMatMulNBits || maxNumberOfComponents >= 4) ? getMaxComponents(dimBOuter) :\n ((maxNumberOfComponents >= 2) && getMaxComponents(dimBOuter) >= 2) ? 2 :\n 1;\n const outputShape = batchDims.concat([dimAOuter, dimBOuter]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n\n const programUniforms: ProgramUniform[] = useBlockwiseMatMulNBits ?\n [] :\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.blockSize}];\n const inputShapeTemp = [batchSize, dimAOuter, dimInner / aComponents];\n const bShape = ShapeUtil.convertShape(inputs[1].dims).slice();\n bShape.splice(-1, 1, blobSizeInWords / bComponents);\n programUniforms.push(...createTensorShapeVariables(inputShapeTemp));\n programUniforms.push(...createTensorShapeVariables(bShape));\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n if (inputs.length === 4) {\n programUniforms.push(...createTensorShapeVariables(ShapeUtil.convertShape(inputs[3].dims)));\n }\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputRank = inputShapeTemp.length;\n const a = inputVariable('a', inputs[0].dataType, inputRank, aComponents);\n const b = inputVariable('b', DataType.uint32, bShape.length, bComponents);\n const scales = inputVariable('scales', inputs[2].dataType, inputs[2].dims.length);\n const inputVariables = [a, b, scales];\n const zeroPoints =\n inputs.length === 4 ? inputVariable('zero_points', DataType.uint32, inputs[3].dims.length) : undefined;\n if (zeroPoints) {\n inputVariables.push(zeroPoints);\n }\n const outputRank = outputShapeTemp.length;\n const output = outputVariable('output', inputs[0].dataType, outputRank, components);\n const uniforms: UniformsArrayType = [{name: 'output_size', type: 'u32'}, {name: 'block_size', type: 'u32'}];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const qDqDataType = (() => {\n switch (aComponents) {\n case 1:\n return `array<${dataType}, 8>`;\n case 2:\n return `mat4x2<${dataType}>`;\n case 4:\n return `mat2x4<${dataType}>`;\n default:\n throw new Error(`${aComponents}-component is not supported.`);\n }\n })();\n\n const processOneBlock = `\n for (var word: u32 = 0; word < ${blobSizeInWords}; word += ${bComponents}) {\n ${b.indicesSet('b_indices', '2', 'word')};\n let b_data = ${b.getByIndices('b_indices')};\n for (var i: u32 = 0; i < ${bComponents}; i++) {\n let b_value: u32 = ${bComponents === 1 ? 'b_data' : 'b_data[word + i]'};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${qDqDataType}(${\n Array.from({length: 4}, (_, i) => `${dataType}(b_value_lower[${i}]), ${dataType}(b_value_upper[${i}])`)\n .join(', ')});\n let b_dequantized_values = ${(() => {\n if (aComponents === 1) {\n return `${qDqDataType}(${\n Array.from({length: 8}, (_, i) => `(b_quantized_values[${i}] - zero_point) * scale`).join(', ')});`;\n } else {\n return `(b_quantized_values - ${qDqDataType}(${Array(8).fill('zero_point').join(',')})) * scale;`;\n }\n })()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${useBlockwiseMatMulNBits ? dimAOuter : outputNumber}u; m++) {\n ${a.indicesSet('a_indices', inputRank - 2, useBlockwiseMatMulNBits ? 'm' : `row * ${outputNumber} + m`)};\n ${a.indicesSet('a_indices', inputRank - 1, 'word_offset')};\n var input_offset = ${a.indicesToOffset('a_indices')};\n var a_data: ${qDqDataType};\n for (var j: u32 = 0; j < ${8 / aComponents}; j++) {\n a_data[j] = ${a.getByOffset('input_offset')};\n input_offset++;\n }\n ${useBlockwiseMatMulNBits ? 'workgroup_shared[workgroup_shared_offset + m]' : 'output_values[m]'}${\n components > 1 ? '[c]' : ''} += ${\n Array\n .from(\n {length: 8 / aComponents},\n (_, i) => `${\n aComponents === 1 ? `a_data[${i}] * b_dequantized_values[${i}]` :\n `dot(a_data[${i}], b_dequantized_values[${i}])`}`)\n .join(' + ')};\n }\n word_offset += ${8 / aComponents};\n }\n }`;\n const updateZeroPointIndex = zeroPoints ? `\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${zeroPoints.getByOffset('zero_point_index')};\n }` :\n '';\n\n return useBlockwiseMatMulNBits ? `\n var workgroup_shared: array<${output.type.value}, ${dimAOuter * nBlocksPerCol}>;\n ${shaderHelper.declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart([\n nBlocksPerCol, 1, 1\n ])}\n var a_indices: ${a.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${a.indicesSet('a_indices', '0', 'batch')};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${components}; c++) {\n let col_times_components_plus_c = col * ${components} + c;\n ${\n zeroPoints ? `\n var zero_point_bytes_per_col: u32 = (${nBlocksPerCol} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_word_index')} >> zero_point_bits_offset;` :\n ''}\n var b_indices: ${b.type.indices};\n ${b.indicesSet('b_indices', '0', 'col_times_components_plus_c')};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${nBlocksPerCol} + block;\n let scale = ${scales.getByOffset('scales_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? '(zero_point_word) & 0xFu' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block * ${attributes.blockSize / aComponents};\n var workgroup_shared_offset: u32 = block * ${dimAOuter};\n ${processOneBlock}\n }\n workgroupBarrier();\n var output_indices: ${output.type.indices};\n var elements_per_thread: u32 = ${Math.ceil(dimAOuter / nBlocksPerCol)};\n ${output.indicesSet('output_indices', '0', 'batch')};\n ${output.indicesSet('output_indices', outputRank - 1, 'col')};\n ${output.indicesSet('output_indices', outputRank - 2, 'local_id.x * elements_per_thread')};\n var output_offset = ${output.indicesToOffset('output_indices')};\n for (var m: u32 = 0u; m < elements_per_thread; m++) {\n var row = m + local_id.x * elements_per_thread;\n if (row < ${dimAOuter}) {\n var output_value: ${output.type.value} = ${output.type.value}(0);\n var workgroup_shared_offset: u32 = row;\n for (var b: u32 = 0u; b < ${nBlocksPerCol}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${dimAOuter};\n }\n ${output.setByOffset('output_offset', 'output_value')};\n output_offset += ${dimBOuter / components};\n }\n }\n }` :\n `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var output_values: array<${output.type.value}, ${outputNumber}>;\n var output_indices = ${output.offsetToIndices('global_idx')};\n var col = ${output.indicesGet('output_indices', outputRank - 1)};\n var row = ${output.indicesGet('output_indices', outputRank - 2)};\n var a_indices: ${a.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${\n zeroPoints ? `\n var zero_point_abs_offset = col * ${components} * ((${nBlocksPerCol} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_index')};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;` :\n ''}\n var scale_index = col * ${nBlocksPerCol * components};\n var b_indices: ${b.type.indices};\n for (var c: u32 = 0; c < ${components}; c++) {\n ${b.indicesSet('b_indices', '0', `col * ${components} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${nBlocksPerCol}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${scales.getByOffset('scale_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? 'extractBits(zero_point_word, zero_point_offset, 4)' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block_offset;\n ${processOneBlock}\n scale_index++;\n ${updateZeroPointIndex}\n block_offset += uniforms.block_size / ${aComponents};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${\n zeroPoints ? `if (zero_point_offset % 8 > 0) {\n ${updateZeroPointIndex}\n }` :\n ''}\n }\n for (var k: u32 = 0u; k < ${outputNumber}u; k++) {\n ${output.indicesSet('output_indices', outputRank - 2, `${outputNumber} * row + k`)};\n ${output.setByIndices('output_indices', 'output_values[k]')}\n }\n }`;\n };\n return {\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n shaderCache: {\n hint: `${attributes.cacheKey};${dimAOuter};${dataType};${inputs.length}`,\n inputDependencies: Array(inputs.length).fill('rank')\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n dispatchGroup: useBlockwiseMatMulNBits ? {x: 1, y: Math.ceil(dimBOuter / components), z: batchSize} :\n {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nexport const matMulNBits = (context: ComputeContext, attributes: MatMulNBitsAttributes): void => {\n validateInputs(context.inputs, attributes);\n const maxComputeWorkgroupSizes: [number, number, number] = context.getMaxComputeWorkgroupSizes();\n const maxComputeWorkgroupStorageSize = context.getMaxComputeWorkgroupStoragesize();\n context.compute(createMatMulNBitsProgramInfo(\n context.inputs, attributes, maxComputeWorkgroupSizes, maxComputeWorkgroupStorageSize));\n};\n\nexport const parseMatMulNBitsAttributes = (attributes: Record): MatMulNBitsAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\ninterface PadAttributes {\n // 0-constant, 1-reflect, 2-edge, 3-wrap\n readonly mode: number;\n readonly value: number;\n readonly pads: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('Too few inputs');\n }\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16) {\n throw new Error('Input type must be float or float16.');\n }\n\n if (inputs.length >= 2) {\n let validPads = inputs[0].dims.length * 2 === inputs[1].dims[0];\n if (inputs.length === 4) {\n validPads = inputs[3].dims[0] * 2 === inputs[1].dims[0];\n }\n if (!validPads) {\n throw new Error('The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].');\n }\n }\n};\n\nconst getPadConstant = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n break;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n value = ${output.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n }\n `;\n};\n\nconst getPadReflect = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadEdge = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadWrap = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k += i32(${getElementAt('uniforms.x_shape', i, inputRank)}]);\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k -= i32(${getElementAt('uniforms.x_shape', i, inputRank)});\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadSnippet = (output: IndicesHelper, inputRank: number, attributes: PadAttributes): string => {\n switch (attributes.mode) {\n case 0:\n return getPadConstant(output, inputRank, attributes.pads.length);\n case 1:\n return getPadReflect(output, inputRank, attributes.pads.length);\n case 2:\n return getPadEdge(output, inputRank, attributes.pads.length);\n case 3:\n return getPadWrap(output, inputRank, attributes.pads.length);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst createPadProgramInfo = (inputs: readonly TensorView[], attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(inputs[0].dims.slice(), attributes.pads);\n const inputDims = inputs[0].dims;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: attributes.pads}];\n if (attributes.mode === 0) {\n programUniforms.push({type: inputs[0].dataType, data: attributes.value});\n }\n\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('x', inputs[0].dataType, inputDims.length);\n const dataType = input.type.value;\n const padSnippet = getPadSnippet(output, inputDims.length, attributes);\n const uniforms: UniformsArrayType =\n [{name: 'output_size', type: 'u32'}, {name: 'pads', type: 'i32', length: attributes.pads.length}];\n if (attributes.mode === 0) {\n uniforms.push({name: 'constant_value', type: dataType as UniformDataElementType});\n }\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(0);\n ${padSnippet}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Pad',\n shaderCache: {hint: `${attributes.mode}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nconst createPadAttributesFromInputs = (inputs: readonly TensorView[], attributes: PadAttributes): PadAttributes => {\n if (inputs.length > 1) {\n const bigInt64Pads = inputs[1].getBigInt64Array();\n const value = (inputs.length >= 3 && inputs[2].data) ? inputs[2].getFloat32Array()[0] : 0.0;\n\n const inputRank = inputs[0].dims.length;\n const updatePads = new Int32Array(2 * inputRank).fill(0);\n if (inputs.length >= 4) {\n const axes = inputs[3].getBigInt64Array();\n for (let i = 0; i < axes.length; i++) {\n updatePads[Number(axes[i])] = Number(bigInt64Pads[i]);\n updatePads[Number(axes[i]) + inputRank] = Number(bigInt64Pads[i + axes.length]);\n }\n } else {\n bigInt64Pads.forEach((v, i) => updatePads[Number(i)] = (Number(v)));\n }\n\n const pads: number[] = [];\n updatePads.forEach(v => pads.push(v));\n\n return {mode: attributes.mode, value, pads};\n } else {\n return attributes;\n }\n};\n\nexport const pad = (context: ComputeContext, attributes: PadAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes = createPadAttributesFromInputs(context.inputs, attributes);\n context.compute(createPadProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\n// TODO: support:\n// - ceil_mode \"test_maxpool_2d_ceil\"\n// - storage_order \"test_maxpool_with_argmax_2d_precomputed_strides\"\n// - [MaxPool] dilations \"test_maxpool_2d_dilations\"\n// - [MaxPool] output[1] \"test_maxpool_with_argmax_2d_precomputed_pads\"\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (env.webgpu.validateInputContent && (!inputs || inputs.length !== 1)) {\n throw new Error('Pool ops requires 1 input.');\n }\n};\n\nconst getAdjustedPoolAttributesAndOutputShape = (\n input: TensorView, attributes: AttributeType, isGlobalOperator: boolean): [AttributeType, number[]] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inputShapeAsChannelFirst = input.dims.slice();\n if (isChannelsLast) {\n inputShapeAsChannelFirst.splice(1, 0, inputShapeAsChannelFirst.pop()!); // Move channel to the second position.\n }\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShapeAsChannelFirst, kernelShape, strides, dilations, pads);\n\n const outputShapeAsChannelFirst = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShapeAsChannelFirst, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n const outputShapeAsChannelLast = outputShapeAsChannelFirst.slice();\n outputShapeAsChannelLast.push(outputShapeAsChannelLast.splice(1, 1)[0]);\n return [newAttributes, isChannelsLast ? outputShapeAsChannelLast : outputShapeAsChannelFirst];\n};\n\nconst getUniformAndPadInfo = (\n outputShape: readonly number[],\n attributes: AttributeType): [ProgramUniform[], UniformsArrayType, boolean, boolean, boolean] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const outputSize = ShapeUtil.size(outputShape);\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: kernelSize}];\n const uniforms: UniformsArrayType = [{name: 'outputSize', type: 'u32'}, {name: 'kernelSize', type: 'u32'}];\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const pwStartEndNotZero = !!(pwStart + pwEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kw},\n {type: DataType.uint32, data: sw},\n {type: DataType.uint32, data: pwStart},\n {type: DataType.uint32, data: pwEnd},\n );\n uniforms.push(\n {name: 'kw', type: 'u32'}, {name: 'sw', type: 'u32'}, {name: 'pwStart', type: 'u32'},\n {name: 'pwEnd', type: 'u32'});\n\n let phStartEndNotZero = false;\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n phStartEndNotZero = !!(phStart + phEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kh}, {type: DataType.uint32, data: sh}, {type: DataType.uint32, data: phStart},\n {type: DataType.uint32, data: phEnd});\n\n uniforms.push(\n {name: 'kh', type: 'u32'}, {name: 'sh', type: 'u32'}, {name: 'phStart', type: 'u32'},\n {name: 'phEnd', type: 'u32'});\n }\n return [programUniforms, uniforms, true, pwStartEndNotZero, phStartEndNotZero];\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n programUniforms.push(\n {type: DataType.uint32, data: kernelStrides}, {type: DataType.uint32, data: attributes.pads},\n {type: DataType.uint32, data: attributes.strides});\n uniforms.push(\n {name: 'kernelStrides', type: 'u32', length: kernelStrides.length},\n {name: 'pads', type: 'u32', length: attributes.pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length});\n\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n return [programUniforms, uniforms, !!hasPads, false, false];\n }\n};\n\nconst generatePoolingCode = (\n shaderHelper: ShaderHelper, x: IndicesHelper, rank: number, outputShapeRank: number, attributes: AttributeType,\n op1: string, op2: string, start: number, uniforms: UniformsArrayType, hasPads: boolean, pwStartEndNotZero: boolean,\n phStartEndNotZero: boolean): string => {\n const isChannelsLast = attributes.format === 'NHWC';\n const dataType = x.type.value;\n const output = outputVariable('output', x.type.tensor, outputShapeRank);\n\n if (attributes.kernelShape.length <= 2) {\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n const dimIdxW = rank - (isChannelsLast ? 2 : 1);\n if (pwStartEndNotZero) {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${dimIdxW}] < 0 || xIndices[${dimIdxW}]\n >= uniforms.x_shape[${dimIdxW}]) {\n pad++;\n continue;\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const dimIdxH = rank - (isChannelsLast ? 3 : 2);\n if (phStartEndNotZero) {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${dimIdxH}] < 0 || xIndices[${dimIdxH}] >= uniforms.x_shape[${dimIdxH}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `;\n } else {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(${start});\n var pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const stridesRank = attributes.kernelShape.length;\n const padsRank = attributes.pads.length;\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n padCode = `\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n `;\n }\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var offsets: array;\n\n var value = ${dataType}(${start});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${stridesRank - 1}u; j++) {\n offsets[j] = offset / ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n offset -= offsets[j] * ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n }\n offsets[${stridesRank - 1}] = offset;\n\n isPad = false;\n for (var j = ${rank - stridesRank}u; j < ${rank}u; j++) {\n xIndices[j] = indices[j] * ${\n getElementAt('uniforms.strides', `j - ${rank - stridesRank}u`, stridesRank)}\n + offsets[j - ${rank - stridesRank}u] - ${getElementAt('uniforms.pads', 'j - 2u', padsRank)};\n ${padCode}\n }\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n }\n};\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface PoolCommonAttributes extends FormatAttributes {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nconst createShaderKeyFromAttributes = (attributes: PoolCommonAttributes): string =>\n (`${attributes.format};${attributes.ceilMode};${attributes.autoPad};${attributes.kernelShape.length}`);\n\nconst createAveragePoolShaderKeyFromAttributes = (attributes: AveragePoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.countIncludePad}`);\n\nconst createMaxPoolShaderKeyFromAttributes = (attributes: MaxPoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.storageOrder};${attributes.dilations}`);\n\nconst parsePoolCommonAttributes = (attributes: Record): PoolCommonAttributes => ({\n format: attributes.format as FormatAttributes['format'],\n autoPad: ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number],\n ceilMode: attributes.ceil_mode as number,\n kernelShape: attributes.kernel_shape as [number, number],\n strides: attributes.strides as [number, number],\n pads: attributes.pads as [number, number, number, number]\n});\n\nexport interface AveragePoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly countIncludePad: boolean;\n}\n\nconst createAveragePoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: AveragePoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const x = inputVariable('x', input.dataType, input.dims.length);\n const dataType = x.type.value;\n\n const op1 = 'value += x_val;';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= ${dataType}(uniforms.kernelSize);`;\n } else {\n op2 += `value /= ${dataType}(i32(uniforms.kernelSize) - pad);`;\n }\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2, 0.0, uniforms,\n hasPads, pwStartEndNotZero, phStartEndNotZero),\n };\n };\n\nexport const parseAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const countIncludePad = (attributes.count_include_pad as number) === 0 ? false : true;\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode'\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n const averagePoolAttributes = {countIncludePad, ...attr, cacheKey: ''};\n return {...averagePoolAttributes, cacheKey: createAveragePoolShaderKeyFromAttributes(averagePoolAttributes)};\n};\n\nexport const averagePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('AveragePool', context.inputs[0], false, attributes));\n};\n\nconst globalPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: []\n};\n\nexport const parseGlobalAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalAveragePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('GlobalAveragePool', context.inputs[0], true, attributes));\n};\n\nexport interface MaxPoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nconst createMaxPoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: MaxPoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const op1 = `\n value = max(x_val, value);\n `;\n const op2 = '';\n const x = inputVariable('x', input.dataType, input.dims.length);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2,\n (input.dataType === DataType.float16) ? -65504 : -1e5, uniforms, hasPads, pwStartEndNotZero,\n phStartEndNotZero),\n };\n };\n\nexport const maxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('MaxPool', context.inputs[0], false, attributes));\n};\n\nexport const parseMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const storageOrder = attributes.storage_order as number;\n const dilations = attributes.dilations as [number, number];\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n const maxPoolAttributes = {storageOrder, dilations, ...attr, cacheKey: ''};\n return {...maxPoolAttributes, cacheKey: createMaxPoolShaderKeyFromAttributes(maxPoolAttributes)};\n};\n\nexport const parseGlobalMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalMaxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('GlobalMaxPool', context.inputs[0], true, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\nconst validateInputsContent = (start: number, limit: number, delta: number): void => {\n const sameStartLimit = start === limit;\n const increasingRangeNegativeStep = start < limit && delta < 0;\n const decreasingRangePositiveStep = start > limit && delta > 0;\n\n if (sameStartLimit || increasingRangeNegativeStep || decreasingRangePositiveStep) {\n throw new Error('Range these inputs\\' contents are invalid.');\n }\n};\n\nconst createRangeProgramInfo = (start: number, limit: number, delta: number, dataType: DataType): ProgramInfo => {\n const numElements = Math.abs(Math.ceil((limit - start) / delta));\n const outputShape: number[] = [numElements];\n const outputSize = numElements;\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: dataType, data: start}, {type: dataType, data: delta},\n ...createTensorShapeVariables(outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', dataType, outputShape.length);\n const wgslType = output.type.value;\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'start', type: wgslType as UniformDataElementType},\n {name: 'delta', type: wgslType as UniformDataElementType}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n output[global_idx] = uniforms.start + ${wgslType}(global_idx) * uniforms.delta;\n }`;\n };\n\n return {\n name: 'Range',\n shaderCache: {hint: `${dataType}`},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const range = (context: ComputeContext): void => {\n let start = 0;\n let limit = 0;\n let delta = 0;\n if (context.inputs[0].dataType === DataType.int32) {\n start = context.inputs[0].getInt32Array()[0];\n limit = context.inputs[1].getInt32Array()[0];\n delta = context.inputs[2].getInt32Array()[0];\n } else if (context.inputs[0].dataType === DataType.float) {\n start = context.inputs[0].getFloat32Array()[0];\n limit = context.inputs[1].getFloat32Array()[0];\n delta = context.inputs[2].getFloat32Array()[0];\n }\n if (env.webgpu.validateInputContent) {\n validateInputsContent(start, limit, delta);\n }\n\n context.compute(createRangeProgramInfo(start, limit, delta, context.inputs[0].dataType), {inputs: []});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype CoordinateTransformMode = 'half_pixel'|'asymmetric'|'pytorch_half_pixel'|'tf_half_pixel_for_nn'|'align_corners'|\n 'tf_crop_and_resize'|'half_pixel_symmetric';\n\ntype KeepAspectRatioPolicy = 'stretch'|'not_smaller'|'not_larger';\n\ntype Mode = 'nearest'|'linear'|'cubic';\n\ntype NearestMode = 'round_prefer_floor'|'round_prefer_ceil'|'floor'|'ceil'|'simple';\n\nexport interface ResizeAttributes extends AttributeWithCacheKey {\n antialias: number;\n axes: number[];\n coordinateTransformMode: CoordinateTransformMode;\n cubicCoeffA: number;\n excludeOutside: boolean;\n extrapolationValue: number;\n keepAspectRatioPolicy: KeepAspectRatioPolicy;\n mode: Mode;\n nearestMode: NearestMode;\n}\n\nconst validateScales = (scales: number[], attributes: ResizeAttributes): void => {\n scales.every((value) => value > 0 || (() => {\n throw new Error('Resize requires scales input values to be positive');\n }));\n // Check scales dims based on mode: LINEAR, CUBIC\n if (scales.length > 0) {\n if (attributes.mode === 'linear') {\n if (!(scales.length === 2 || scales.length === 3 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1) ||\n (scales.length === 5 && scales[0] === 1 && scales[1] === 1))) {\n throw new Error(\n `For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`);\n }\n } else if (attributes.mode === 'cubic') {\n if (!(scales.length === 2 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1))) {\n throw new Error('Resize requires scales input size to be 2 or 4 for cubic mode');\n }\n }\n }\n};\n\nconst updateScales = (scales: readonly number[], axes: readonly number[], rank: number): number[] => {\n axes.every((value) => value >= 0 && value < rank || (() => {\n throw new Error('Resize requires axes input values to be positive and less than rank');\n }));\n const newScales = new Array(rank).fill(1.0);\n axes.forEach((value, index) => newScales[value] = scales[index]);\n return newScales;\n};\n\nconst validateInputs =\n (inputs: readonly TensorView[], attributes: ResizeAttributes, opsetVersion: number, scales: number[],\n sizes: number[], roi: number[]): void => {\n const [roiInputIndex, scalesInputIndex, sizesInputIndex] =\n (opsetVersion > 10) ? [1, 2, 3] : [-1, (inputs.length > 1) ? 1 : -1, -1];\n const rank = inputs[0].dims.length;\n if (roiInputIndex > 0 && inputs.length > roiInputIndex && inputs[roiInputIndex].dims.length > 0) {\n inputs[roiInputIndex].getFloat32Array().forEach((value) => roi.push(value));\n } else if (attributes.coordinateTransformMode === 'tf_crop_and_resize') {\n throw new Error('Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize');\n }\n\n if (scalesInputIndex > 0 && inputs.length > scalesInputIndex && inputs[scalesInputIndex].dims.length > 0) {\n inputs[scalesInputIndex].getFloat32Array().forEach((value) => scales.push(value));\n if (scales.length !== 0 &&\n (scales.length !== rank && (opsetVersion >= 18 && scales.length !== attributes.axes.length))) {\n throw new Error(\n 'Resize requires scales input size to be same as input rank or axes size for opset 18 and up');\n }\n validateScales(scales, attributes);\n if (attributes.axes.length > 0) {\n updateScales(scales, attributes.axes, rank).forEach((value, index) => scales[index] = value);\n }\n }\n if (sizesInputIndex > 0 && inputs.length > sizesInputIndex) {\n inputs[sizesInputIndex].getBigInt64Array().forEach((value) => sizes.push(Number(value)));\n if (sizes.length !== rank || (opsetVersion >= 18 && sizes.length === attributes.axes.length)) {\n throw new Error('Resize requires sizes input size to be same as input rank or axes size for opset 18 and up');\n }\n }\n\n if (attributes.axes.length > 0) {\n if (scales.length !== attributes.axes.length) {\n throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');\n }\n if (sizes.length !== attributes.axes.length) {\n throw new Error(\n 'Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified');\n }\n }\n if (typeof scales !== 'undefined' && typeof sizes !== 'undefined' && scales.length > 0 && sizes.length > rank) {\n throw new Error('Resize requires only of scales or sizes to be specified');\n }\n };\n\nconst getOriginalCoordinateFromResizedCoordinate =\n (coordinateTransferMode: CoordinateTransformMode, dType: string): string =>\n `fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${dType} { ` +\n (() => {\n switch (coordinateTransferMode) {\n case 'asymmetric':\n return `return ${dType}(xResized) / ${dType}(xScale);`;\n case 'pytorch_half_pixel':\n return `if (lengthResized > 1) {\n return (${dType}(xResized) + 0.5) / ${dType}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;\n case 'tf_half_pixel_for_nn':\n return `return (${dType}(xResized) + 0.5) / ${dType}(xScale);`;\n case 'align_corners':\n return `if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${dType}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${dType}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${dType}(lengthResized - 1);\n return whole + fract;\n }`;\n case 'tf_crop_and_resize':\n return `if (lengthResized > 1) {\n return ${dType}(roiStart) * ${dType}(lengthOriginal - 1) +\n (${dType}(xResized) * ${dType}(roiEnd - roiStart) * ${dType}(lengthOriginal - 1)) /\n ${dType}(lengthResized - 1);\n } else {\n return 0.5 * ${dType}(roiStart + roiEnd) * ${dType}(lengthOriginal - 1);\n }`;\n case 'half_pixel_symmetric':\n return `const outputWidth = ${dType}xScale * ${dType}(lengthResized);\n const adjustment = ${dType}(lengthResized) / outputWidth;\n const center = ${dType}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n case 'half_pixel':\n return `return ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n default:\n throw new Error(`Coordinate transform mode ${coordinateTransferMode} is not supported`);\n }\n })() +\n '}';\n\nconst getNearestPixelFromOriginal = (nearestMode: NearestMode, opsetVersion: number, dType: string): string =>\n `fn getNearestPixelFromOriginal(xOriginal: ${dType}, isDownSample: bool) -> ${dType} {` + (() => {\n switch (nearestMode) {\n case 'round_prefer_ceil':\n return 'if (fract(xOriginal) == 0.5) { \\\n return ceil(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'floor':\n return 'return floor(xOriginal);';\n case 'ceil':\n return 'return ceil(xOriginal);';\n case 'round_prefer_floor':\n return 'if (fract(xOriginal) == 0.5) { \\\n return floor(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'simple':\n default:\n if (opsetVersion < 11) {\n return 'if (isDownSample) \\\n { \\\n return ceil(xOriginal); \\\n } else { \\\n return xOriginal; \\\n }';\n }\n throw new Error(`Nearest mode ${nearestMode} is not supported`);\n }\n })() +\n '}';\n\nconst updateRoI = (roi: readonly number[], axes: readonly number[], rank: number): number[] => {\n const roiTmp = new Array(rank).fill(0).concat(new Array(rank).fill(1));\n const roiLocal = roi.length === 0 ? roiTmp : roi.slice();\n if (axes.length > 0) {\n axes.forEach((v, i) => {\n roiTmp[v] = roiLocal[i];\n roiTmp[i + rank] = roiLocal[axes.length + i];\n });\n return roiTmp;\n }\n return roiLocal;\n};\n\nconst initOutputShape =\n (inputShape: readonly number[], scales: readonly number[], sizes: readonly number[], axes: readonly number[]):\n number[] => {\n let outputShape: number[] = [];\n if (sizes.length > 0) {\n if (axes.length > 0) {\n inputShape.forEach((v) => outputShape.push(v));\n if (Math.max(...axes) > inputShape.length) {\n throw new Error('axes is out of bound');\n }\n axes.forEach((v, i) => outputShape[v] = sizes[i]);\n } else {\n sizes.forEach((v) => outputShape.push(v));\n }\n } else {\n if (scales.length === 0) {\n throw new Error('Resize requires either scales or sizes.');\n } else {\n outputShape = inputShape.map((value, index) => Math.round(value * scales[index]));\n }\n }\n return outputShape;\n };\n\nconst adjustOutputShape = (inputShape: readonly number[], scales: number[], attributes: ResizeAttributes) => {\n const scaleInPolicy = (() => {\n switch (attributes.keepAspectRatioPolicy) {\n case 'not_larger':\n return attributes.axes.length > 0 ? Math.min(...attributes.axes.map(i => scales[i]), Number.MAX_VALUE) :\n Math.min(...scales, Number.MAX_VALUE);\n case 'not_smaller':\n return attributes.axes.length > 0 ? Math.max(...attributes.axes.map(i => scales[i]), Number.MIN_VALUE) :\n Math.max(...scales, Number.MIN_VALUE);\n default:\n throw new Error(`Keep aspect ratio policy ${attributes.keepAspectRatioPolicy} is not supported`);\n }\n })();\n scales.fill(1.0, 0, scales.length);\n const adjustedOutputShape = inputShape.slice();\n if (attributes.axes.length > 0) {\n attributes.axes.forEach((v) => scales[v] = scaleInPolicy);\n attributes.axes.forEach((v) => adjustedOutputShape[v] = Math.round(inputShape[v] * scales[v]));\n } else {\n scales.fill(scaleInPolicy, 0, scales.length);\n adjustedOutputShape.forEach((v, i) => adjustedOutputShape[i] = Math.round(v * scales[i]));\n }\n return adjustedOutputShape;\n};\n\nconst calculateOriginalIndicesFromOutputIndices =\n (output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[], scalesLength: number,\n roiLength: number): string => `\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> array<${\n output.type.value}, ${outputShape.length}> {\n var original_indices: array<${output.type.value}, ${outputShape.length}>;\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n if (scale == 1.0) {\n original_indices[i] = ${output.type.value}(output_index);\n } else {\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`;\n\nconst calculateInputIndicesFromOutputIndices =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scalesLength: number, roiLength: number, useExtrapolation: boolean): string => `\n fn calculateInputIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index: u32;\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${useExtrapolation} || (original_idx >= 0 && original_idx < ${output.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${output.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${input.indicesSet('input_indices', 'i', ' input_index')}\n }\n return input_indices;\n }`;\nconst checkInputIndices = (input: IndicesHelper, inputShape: readonly number[]): string => `\n fn checkInputIndices(input_indices: ${input.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${inputShape.length}; i++) {\n var input_index = ${input.indicesGet('input_indices', 'i')};\n if (input_index < 0 || input_index >= ${getElementAt('uniforms.input_shape', 'i', inputShape.length)}) {\n return false;\n }\n }\n return true;\n }`;\n\nconst setChannelAndBatchIndices =\n (input: IndicesHelper, channelIdx: number, batchIdx: number, spacialDims: number): string =>\n input.rank > spacialDims ? `\n ${input.indicesSet('input_indices', channelIdx, 'channel')};\n ${input.indicesSet('input_indices', batchIdx, 'batch')};\n` :\n '';\n\nconst bilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 2 ? [-1, 0, 1, -1] : (isNchw ? [0, 2, 3, 1] : [0, 1, 2, 3]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(row, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(col, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 2)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn bilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${dType} = originalIndices[${heightIdx}];\n var col:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ?\n `if (row < 0 || row > (${inputShape[heightIdx]} - 1) || col < 0 || col > (${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n row = max(0, min(row, ${inputShape[heightIdx]} - 1));\n col = max(0, min(col, ${inputShape[widthIdx]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${batchIdx}])` : '0'};\n var x11: ${dType} = getInputValue(batch, channel, row1, col1);\n var x12: ${dType} = getInputValue(batch, channel, row1, col2);\n var x21: ${dType} = getInputValue(batch, channel, row2, col1);\n var x22: ${dType} = getInputValue(batch, channel, row2, col2);\n var dx1: ${dType} = abs(row - ${dType}(row1));\n var dx2: ${dType} = abs(${dType}(row2) - row);\n var dy1: ${dType} = abs(col - ${dType}(col1));\n var dy2: ${dType} = abs(${dType}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`;\n };\n\nconst bicubicInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scales: readonly number[], roi: readonly number[], cubicCoeffA: number, useExtrapolation: boolean,\n extrapolationValue: number, excludeOutside: boolean): string => {\n const is2D = inputShape.length === 2;\n const isNchw = true;\n const [heightIdx, widthIdx] = is2D ? [0, 1] : isNchw ? [2, 3] : [1, 2];\n const dType = input.type.value;\n const createCubicInterpolationFunction = (idx: number): string => {\n const direction = idx === heightIdx ? 'row' : 'col';\n return `\n fn ${direction}CubicInterpolation(input_indices: ${input.type.indices}, output_indices: ${\n output.type.indices}) -> ${dType} {\n var output_index = ${output.indicesGet('output_indices', idx)};\n var originalIdx: ${dType} = getOriginalCoordinateFromResizedCoordinate(output_index, ${scales[idx]},\n ${outputShape[idx]}, ${inputShape[idx]}, ${roi[idx]}, ${roi[idx]} + ${inputShape.length});\n var fractOriginalIdx: ${dType} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${useExtrapolation} && (originalIdx < 0 || originalIdx > (${inputShape[idx]} - 1))) {\n return ${extrapolationValue};\n }\n var data: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${direction}: ${dType} = originalIdx + ${dType}(i);\n if (${direction} < 0 || ${direction} >= ${inputShape[idx]}) {\n ${(() => {\n if (excludeOutside) {\n return `coefs[i + 1] = 0.0;\n continue;`;\n } else if (useExtrapolation) {\n return `return ${extrapolationValue};`;\n } else {\n return `${direction} = max(0, min(${direction}, ${inputShape[idx]} - 1));`;\n }\n })()};\n }\n var input_indices_copy: ${input.type.indices} = input_indices;\n ${input.indicesSet('input_indices_copy', idx, `u32(${direction})`)};\n data[i + 1] = ${\n idx === heightIdx ? input.getByIndices('input_indices_copy') :\n 'rowCubicInterpolation(input_indices_copy, output_indices)'};\n }\n return cubicInterpolation1D(data, coefs);\n }`;\n };\n\n return `\n ${createCubicInterpolationFunction(heightIdx)};\n ${createCubicInterpolationFunction(widthIdx)};\n fn getCubicInterpolationCoefs(s: ${dType}) -> array<${dType}, 4> {\n var absS = abs(s);\n var coeffs: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${dType} = 1.0 - absS;\n var twoMinusAbsS: ${dType} = 2.0 - absS;\n var onePlusAbsS: ${dType} = 1.0 + absS;\n coeffs[0] = ((${cubicCoeffA} * onePlusAbsS - 5 * ${cubicCoeffA}) * onePlusAbsS + 8 * ${\n cubicCoeffA}) * onePlusAbsS - 4 * ${cubicCoeffA};\n coeffs[1] = ((${cubicCoeffA} + 2) * absS - (${cubicCoeffA} + 3)) * absS * absS + 1;\n coeffs[2] = ((${cubicCoeffA} + 2) * oneMinusAbsS - (${cubicCoeffA} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${cubicCoeffA} * twoMinusAbsS - 5 * ${cubicCoeffA}) * twoMinusAbsS + 8 * ${\n cubicCoeffA}) * twoMinusAbsS - 4 * ${cubicCoeffA};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${dType}, 4>, coefs: array<${dType}, 4>) -> ${dType} {\n var coefsSum: ${dType} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var input_indices: ${input.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `;\n };\n\nconst trilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, depthIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 3 ? [-1, 0, 1, 2, -1] : (isNchw ? [0, 2, 3, 4, 1] : [0, 1, 2, 3, 4]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', depthIdx, `max(0, min(depth, ${inputShape[depthIdx]} - 1))`)};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(height, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(width, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 3)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn trilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${dType} = originalIndices[${depthIdx}];\n var height:${dType} = originalIndices[${heightIdx}];\n var width:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ? `if (depth < 0 || depth > (${inputShape[depthIdx]} - 1) || height < 0 || height > (${\n inputShape[heightIdx]} - 1) || width < 0 || (width > ${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n\n depth = max(0, min(depth, ${inputShape[depthIdx]} - 1));\n height = max(0, min(height, ${inputShape[heightIdx]} - 1));\n width = max(0, min(width, ${inputShape[widthIdx]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${batchIdx}])` : '0'};\n\n var x111: ${dType} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${dType} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${dType} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${dType} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${dType} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${dType} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${dType} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${dType} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${dType} = abs(depth - ${dType}(depth1));\n var dx2: ${dType} = abs(${dType}(depth2) - depth);\n var dy1: ${dType} = abs(height - ${dType}(height1));\n var dy2: ${dType} = abs(${dType}(height2) - height);\n var dz1: ${dType} = abs(width - ${dType}(width1));\n var dz2: ${dType} = abs(${dType}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`;\n };\n\nconst createResizeProgramInfo =\n (inputTensor: TensorView, attributes: ResizeAttributes, opsetVersion: number, scalesInput: readonly number[],\n sizes: readonly number[], roiInput: readonly number[]): ProgramInfo => {\n const inputShape = inputTensor.dims;\n const roi = updateRoI(roiInput, attributes.axes, inputShape.length);\n\n let outputShape = initOutputShape(inputShape, scalesInput, sizes, attributes.axes);\n let scales = scalesInput.slice();\n if (scalesInput.length === 0) {\n scales = inputShape.map((value, index) => value === 0 ? 1.0 : outputShape[index] / value);\n if (attributes.keepAspectRatioPolicy !== 'stretch') {\n outputShape = adjustOutputShape(inputShape, scales, attributes);\n }\n }\n const output = outputVariable('output', inputTensor.dataType, outputShape.length);\n const input = inputVariable('input', inputTensor.dataType, inputShape.length);\n const outputSize = ShapeUtil.size(outputShape);\n const noScale = inputShape.length === outputShape.length && inputShape.every((d, i) => d === outputShape[i]);\n const useExtrapolation = attributes.coordinateTransformMode === 'tf_crop_and_resize';\n const extrapolationValue = attributes.extrapolationValue;\n const dataType = input.type.value;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${noScale ? '' : `\n ${getOriginalCoordinateFromResizedCoordinate(attributes.coordinateTransformMode, dataType)};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `\n ${checkInputIndices(input, inputShape)};\n ${getNearestPixelFromOriginal(attributes.nearestMode, opsetVersion, dataType)};\n ${\n calculateInputIndicesFromOutputIndices(\n input, output, inputShape, outputShape, scales.length, roi.length, useExtrapolation)};\n `;\n case 'linear':\n return `\n ${calculateOriginalIndicesFromOutputIndices(output, inputShape, outputShape, scales.length, roi.length)};\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${bilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else if (inputShape.length === 3 || inputShape.length === 5) {\n return `${trilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else {\n throw Error('Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.');\n }\n })()};\n `;\n case 'cubic':\n return `\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${\n bicubicInterpolation(\n input, output, inputShape, outputShape, scales, roi, attributes.cubicCoeffA, useExtrapolation,\n attributes.extrapolationValue, attributes.excludeOutside)}`;\n } else {\n throw Error('Cubic mode only supports input dims 2 and 4 are supported in linear mode.');\n }\n })()};\n `;\n default:\n throw Error('Invalid resize mode');\n }\n })()};\n `}\n ${\n shaderHelper.registerUniform('output_size', 'u32')\n .registerUniform('scales', 'f32', scales.length)\n .registerUniform('roi', 'f32', roi.length)\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n ${noScale ? 'output[global_idx] = input[global_idx];' : `\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${input.getByIndices('input_indices')};\n } else {\n output[global_idx] = ${attributes.extrapolationValue};\n }`;\n case 'linear':\n return `output[global_idx] = ${\n (inputShape.length === 2 || inputShape.length === 4) ? 'bilinearInterpolation' :\n 'trilinearInterpolation'}(output_indices);`;\n case 'cubic':\n return 'output[global_idx] = bicubicInterpolation(output_indices);';\n default:\n throw Error(`Unsupported resize mode: ${attributes.mode}`);\n }\n })()};\n`}\n }`;\n\n return {\n name: 'Resize',\n shaderCache: {\n hint: `${attributes.cacheKey}|${opsetVersion}|${scales.length > 0 ? scales : ''}|${\n sizes.length > 0 ? sizes : ''}|${roi.length > 0 ? roi : ''}|${noScale}|${inputShape}`,\n inputDependencies: ['rank']\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputTensor.dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.float, data: scales},\n {type: DataType.float, data: roi}, ...createTensorShapeVariables(inputShape, outputShape)\n ]\n })\n };\n };\n\nconst getOpsetVersionFromCustomDataBuffer = (context: ComputeContext): number => {\n const customDataBuffer = context.customDataBuffer;\n const customDataBuffer32 = new Uint32Array(customDataBuffer, customDataBuffer.byteOffset, 1);\n const opsetVersion = customDataBuffer32[0];\n return opsetVersion;\n};\n\nexport const resize = (context: ComputeContext, attributes: ResizeAttributes): void => {\n const scales: number[] = [];\n const sizes: number[] = [];\n const roi: number[] = [];\n\n // Note that scales in resize are always f32. roi can be f32 or f16.\n // TODO: Currently this code does not support f16 for roi when passed as optional input.\n\n const opsetVersion = getOpsetVersionFromCustomDataBuffer(context);\n if (attributes.antialias !== 0) {\n throw Error('Only default value (0) for Antialias attribute is supported');\n }\n validateInputs(context.inputs, attributes, opsetVersion, scales, sizes, roi);\n context.compute(\n createResizeProgramInfo(context.inputs[0], attributes, opsetVersion, scales, sizes, roi), {inputs: [0]});\n};\n\nexport const parseResizeAttributes = (attributes: Record): ResizeAttributes => {\n const antialias = attributes.antialias as number;\n const axes = attributes.axes as number[];\n const coordinateTransformMode: CoordinateTransformMode =\n attributes.coordinateTransformMode as CoordinateTransformMode;\n const cubicCoeffA = attributes.cubicCoeffA as number;\n const excludeOutside = attributes.excludeOutside as number !== 0;\n const extrapolationValue = attributes.extrapolationValue as number;\n const keepAspectRatioPolicy: KeepAspectRatioPolicy = attributes.keepAspectRatioPolicy as KeepAspectRatioPolicy;\n const mode: Mode = attributes.mode as Mode;\n // If nearestMode is not specified, use simple mode.\n const nearestMode: NearestMode = (attributes.nearestMode === '' ? 'simple' : attributes.nearestMode) as NearestMode;\n return createAttributeWithCacheKey({\n antialias,\n axes,\n coordinateTransformMode,\n cubicCoeffA,\n excludeOutside,\n extrapolationValue,\n keepAspectRatioPolicy,\n mode,\n nearestMode\n });\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, WORKGROUP_SIZE} from './common';\n\nexport interface RotaryEmbeddingAttributes {\n readonly interleaved: boolean;\n readonly numHeads: number;\n readonly rotaryEmbeddingDim: number;\n readonly scale: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): void => {\n const [input, positionIds, cosCache, sinCache] = inputs;\n const {numHeads, rotaryEmbeddingDim} = attributes;\n\n if (input.dims.length !== 3 && input.dims.length !== 4) {\n throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${input.dims.length}`);\n }\n if (!ShapeUtil.areEqual(positionIds.dims, []) && !ShapeUtil.areEqual(positionIds.dims, [1]) &&\n positionIds.dims.length !== 2) {\n throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${positionIds.dims.length}`);\n }\n if (cosCache.dims.length !== 2) {\n throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${cosCache.dims.length}`);\n }\n if (sinCache.dims.length !== 2) {\n throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${sinCache.dims.length}`);\n }\n if (!ShapeUtil.areEqual(cosCache.dims, sinCache.dims)) {\n throw new Error('Inputs \\'cos_cache\\' and \\'sin_cache\\' are expected to have the same shape');\n }\n\n if (rotaryEmbeddingDim > 0 && numHeads === 0) {\n throw new Error('num_heads must be provided if rotary_embedding_dim is specified');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[input.dims.length - 2];\n const maxSequenceLength = cosCache.dims[0];\n const hiddenSize = ShapeUtil.sizeFromDimension(input.dims, 1) / sequenceLength;\n const headSize = rotaryEmbeddingDim === 0 ? cosCache.dims[1] * 2 : hiddenSize / numHeads;\n if (rotaryEmbeddingDim > headSize) {\n throw new Error('rotary_embedding_dim must be less than or equal to head_size');\n }\n\n if (positionIds.dims.length === 2) {\n if (batchSize !== positionIds.dims[0]) {\n throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${positionIds.dims[0]}`);\n }\n if (sequenceLength !== positionIds.dims[1]) {\n throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${positionIds.dims[1]}`);\n }\n }\n\n if (headSize / 2 !== cosCache.dims[1] && rotaryEmbeddingDim / 2 !== cosCache.dims[1]) {\n throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${\n cosCache.dims[1]}`);\n }\n\n if (sequenceLength > maxSequenceLength) {\n throw new Error('Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported');\n }\n};\n\nconst createRotaryEmbeddingProgramInfo =\n (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): ProgramInfo => {\n const {interleaved, numHeads, rotaryEmbeddingDim, scale} = attributes;\n const batchSize = inputs[0].dims[0];\n const batchStride = ShapeUtil.sizeFromDimension(inputs[0].dims, 1);\n const sequenceLength = inputs[0].dims[inputs[0].dims.length - 2];\n const hiddenSize = batchStride / sequenceLength;\n const halfRotaryEmbeddingDim = inputs[2].dims[1];\n const headSize = rotaryEmbeddingDim === 0 ? halfRotaryEmbeddingDim * 2 : hiddenSize / numHeads;\n\n // Rotary embeddings will be calculated in a pair-wise fashion. In accordance, use the shape\n // [batch size, sequence length, num of heads, num of pairs to rotate + num of dims to copy]\n // to unfold the global index in shader.\n const globalShape =\n new Array(batchSize, sequenceLength, hiddenSize / headSize, headSize - halfRotaryEmbeddingDim);\n const globalStrides = ShapeUtil.computeStrides(globalShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.float, data: scale},\n {type: DataType.uint32, data: globalShape},\n {type: DataType.uint32, data: globalStrides},\n\n // strides for addressing the input/output tensor, in permutated order to align with the unfolded global index,\n // i.e. BSNH\n ...(inputs[0].dims.length === 3 ?\n new Array({type: DataType.uint32, data: [batchStride, hiddenSize, headSize, 1]}) :\n []),\n ...(inputs[0].dims.length === 4 ?\n new Array(\n {type: DataType.uint32, data: [batchStride, headSize, sequenceLength * headSize, 1]}) :\n []),\n\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, inputs[2].dims, inputs[3].dims, inputs[0].dims),\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const positionIds = inputVariable('position_ids', inputs[1].dataType, inputs[1].dims.length);\n const cosCache = inputVariable('cos_cache', inputs[2].dataType, inputs[2].dims.length);\n const sinCache = inputVariable('sin_cache', inputs[3].dataType, inputs[3].dims.length);\n const output = outputVariable('output', inputs[0].dataType, inputs[0].dims.length);\n\n shaderHelper.registerUniforms([\n {name: 'scale', type: 'f32'},\n {name: 'global_shape', type: 'u32', length: globalShape.length},\n {name: 'global_strides', type: 'u32', length: globalStrides.length},\n {name: 'input_output_strides', type: 'u32', length: globalStrides.length},\n ]);\n\n return `\n ${shaderHelper.declareVariables(input, positionIds, cosCache, sinCache, output)}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n let half_rotary_emb_dim = uniforms.${cosCache.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('size')}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${positionIds.broadcastedIndicesToOffset('bsnh.xy', outputVariable('', positionIds.type.tensor, 2))};\n let position_id =\n u32(${positionIds.getByOffset('position_ids_idx')}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${interleaved});\n let j = i + select(half_rotary_emb_dim, 1, ${interleaved});\n let re = ${input.getByOffset('i')} * ${cosCache.get('position_id', 'bsnh[3]')} -\n ${input.getByOffset('j')} * ${sinCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('i', 're')}\n let im = ${input.getByOffset('i')} * ${sinCache.get('position_id', 'bsnh[3]')} +\n ${input.getByOffset('j')} * ${cosCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('j', 'im')}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${output.setByOffset('k', input.getByOffset('k'))}\n }\n }`;\n };\n\n return {\n name: 'RotaryEmbedding',\n shaderCache: {\n hint: createAttributeWithCacheKey({\n interleaved,\n }).cacheKey,\n inputDependencies: ['rank', 'rank', 'rank', 'rank'],\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(globalShape) / WORKGROUP_SIZE)},\n programUniforms,\n }),\n };\n };\n\nexport const rotaryEmbedding = (context: ComputeContext, attributes: RotaryEmbeddingAttributes): void => {\n validateInputs(context.inputs, attributes);\n context.compute(createRotaryEmbeddingProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {castToF32, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface SkipLayerNormAttributes {\n simplified: boolean;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 3) {\n throw new Error('layerNorm requires at least 3 inputs.');\n }\n\n const input: TensorView = inputs[0];\n const skip: TensorView = inputs[1];\n const gamma: TensorView = inputs[2];\n\n if (input.dataType !== skip.dataType || input.dataType !== gamma.dataType) {\n throw new Error('All inputs must have the same data type');\n }\n\n if (input.dims.length !== 3 && input.dims.length !== 2) {\n throw new Error('Input must be 2D or 3D');\n }\n\n if (skip.dims.length !== 3 && skip.dims.length !== 2) {\n throw new Error('Skip must be 2D or 3D');\n }\n\n const hiddenSize = input.dims[input.dims.length - 1];\n const sequenceLength = input.dims[input.dims.length - 2];\n if (skip.dims[skip.dims.length - 1] !== hiddenSize) {\n throw new Error('Skip must have the same hidden size as input');\n }\n if (skip.dims[skip.dims.length - 2] !== sequenceLength) {\n throw new Error('Skip must have the same sequence length as input');\n }\n\n if (gamma.dims.length !== 1) {\n throw new Error('Gamma must be 1D');\n }\n if (gamma.dims[gamma.dims.length - 1] !== hiddenSize) {\n throw new Error('Gamma must have the same hidden size as input');\n }\n if (inputs.length > 3) {\n const beta: TensorView = inputs[3];\n if (beta.dims.length !== 1) {\n throw new Error('Beta must be 1D');\n }\n if (beta.dims[beta.dims.length - 1] !== hiddenSize) {\n throw new Error('Beta must have the same hidden size as input');\n }\n }\n if (inputs.length > 4) {\n const bias: TensorView = inputs[4];\n if (bias.dims.length !== 1) {\n throw new Error('Bias must be 1D');\n }\n if (bias.dims[bias.dims.length - 1] !== hiddenSize) {\n throw new Error('Bias must have the same hidden size as input');\n }\n }\n};\n\nconst createSkipLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: SkipLayerNormAttributes, outputCount: number, isTraining: boolean):\n ProgramInfo => {\n const simplified = attributes.simplified;\n\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const outputShape = inputShape;\n const outputSize = inputSize;\n const hiddenSize = inputShape.slice(-1)[0];\n const meanInvStdDevDim = isTraining ? inputShape.slice(0, -1).concat(1) : [];\n const hasBetaInput = !simplified && inputs.length > 3;\n const hasBiasInput = inputs.length > 4;\n const hasMeanOutput = isTraining && outputCount > 1;\n const hasInvStdDevOutput = isTraining && outputCount > 2;\n const hasInputSkipBiasSumOutput = outputCount > 3;\n const workgroupSize = 64;\n\n const components = getMaxComponents(hiddenSize);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.uint32, data: components},\n {type: DataType.uint32, data: hiddenSize},\n {type: DataType.float, data: attributes.epsilon},\n ];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniformsArray: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'components', type: 'u32'},\n {name: 'hidden_size', type: 'u32'},\n {name: 'epsilon', type: 'f32'},\n ];\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('skip', inputs[1].dataType, inputs[1].dims, components),\n inputVariable('gamma', inputs[2].dataType, inputs[2].dims, components),\n ];\n if (hasBetaInput) {\n variables.push(inputVariable('beta', inputs[3].dataType, inputs[3].dims, components));\n }\n if (hasBiasInput) {\n variables.push(inputVariable('bias', inputs[4].dataType, inputs[4].dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanOutput) {\n variables.push(outputVariable('mean_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdDevOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInputSkipBiasSumOutput) {\n variables.push(outputVariable('input_skip_bias_sum', inputs[0].dataType, outputShape, components));\n }\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const vecDataType = tensorTypeToWsglStorageType(DataType.float, components);\n return `\n\n ${shaderHelper.registerUniforms(uniformsArray).declareVariables(...variables)}\n var sum_shared : array<${vecDataType}, ${workgroupSize}>;\n var sum_squared_shared : array<${vecDataType}, ${workgroupSize}>;\n\n ${shaderHelper.mainStart([\n workgroupSize, 1, 1\n ])}\n let ix = local_id.x;\n let iy = global_id.x / ${workgroupSize};\n\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n var stride = hidden_size_vectorized / ${workgroupSize};\n let offset = ix * stride + iy * hidden_size_vectorized;\n let offset1d = stride * ix;\n if (ix == ${workgroupSize - 1}) {\n stride = hidden_size_vectorized - stride * ix;\n }\n for (var i: u32 = 0; i < stride; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${hasBiasInput ? 'bias[offset1d + i]' : dataType + '(0.0)'};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${hasInputSkipBiasSumOutput ? 'input_skip_bias_sum[offset + i] = value;' : ''}\n output[offset + i] = value;\n let f32_value = ${castToF32(dataType, components, 'value')};\n sum_shared[ix] += f32_value;\n sum_squared_shared[ix] += f32_value * f32_value;\n }\n workgroupBarrier();\n\n var reduce_size : u32 = ${workgroupSize};\n for (var curr_size = reduce_size >> 1; curr_size > 0; curr_size = reduce_size >> 1) {\n reduce_size = curr_size + (reduce_size & 1);\n if (ix < curr_size) {\n sum_shared[ix] += sum_shared[ix + reduce_size];\n sum_squared_shared[ix] += sum_squared_shared[ix + reduce_size];\n }\n workgroupBarrier();\n }\n\n let sum = sum_shared[0];\n let square_sum = sum_squared_shared[0];\n let mean = ${sumVector('sum', components)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${sumVector('square_sum', components)} / f32(uniforms.hidden_size) ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n ${hasMeanOutput ? 'mean_output[global_idx] = mean;' : ''}\n ${hasInvStdDevOutput ? 'inv_std_output[global_idx] = inv_std_dev;' : ''}\n\n for (var i: u32 = 0; i < stride; i++) {\n output[offset + i] = (output[offset + i] ${simplified ? '' : `- ${dataType}(mean)`}) *\n ${dataType}(inv_std_dev) * gamma[offset1d + i]\n ${hasBetaInput ? '+ beta[offset1d + i]' : ''};\n }\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (outputCount > 1) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 2) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 3) {\n outputs.push({dims: inputShape, dataType: inputs[0].dataType});\n }\n return {\n name: 'SkipLayerNormalization',\n shaderCache: {\n hint: `${components};${hasMeanOutput};${hasInvStdDevOutput};${hasInputSkipBiasSumOutput}`,\n inputDependencies: inputs.map((_input, _index) => 'type')\n },\n getShaderSource,\n getRunData: () => ({\n outputs,\n dispatchGroup: {\n x: Math.ceil(outputSize / hiddenSize),\n },\n programUniforms\n }),\n };\n };\n\nexport const skipLayerNorm = (context: ComputeContext, attributes: SkipLayerNormAttributes): void => {\n // TODO: initialize isTraining from ComputeContext\n const isTraining = false;\n validateInputs(context.inputs);\n // Mean and InvStdDev are only used in training mode and are not required for inference.\n // They are added here for completeness only.\n const outputs = [0];\n if (context.outputCount > 1) {\n outputs.push(isTraining ? 1 : -3);\n }\n if (context.outputCount > 2) {\n outputs.push(isTraining ? 2 : -3);\n }\n if (context.outputCount > 3) {\n outputs.push(3);\n }\n context.compute(\n createSkipLayerNormProgramInfo(context.inputs, attributes, context.outputCount, isTraining), {outputs});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly starts: number[];\n readonly ends: number[];\n readonly axes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: SliceAttributes): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n if (attributes.axes.length !== 0) {\n if (attributes.axes.length !== attributes.starts.length || attributes.axes.length !== attributes.ends.length) {\n throw new Error('axes, starts and ends must have the same length');\n }\n } else if (attributes.starts.length !== attributes.ends.length) {\n throw new Error('starts and ends must have the same length');\n }\n inputs.slice(1).forEach((_, idx) => {\n if (inputs[idx + 1].dataType !== DataType.int32 && inputs[idx + 1].dataType !== DataType.int64) {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n });\n};\n\nconst readInput = (inputs: readonly TensorView[], idx: number): number[] => {\n const input: number[] = [];\n if (inputs.length > idx) {\n if (inputs[idx].dataType === DataType.int64) {\n inputs[idx].getBigInt64Array().forEach(v => input.push(Number(v)));\n } else if (inputs[idx].dataType === DataType.int32) {\n inputs[idx].getInt32Array().forEach(v => input.push(Number(v)));\n } else {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n }\n return input;\n};\n\nconst createSliceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SliceAttributes): SliceAttributes => {\n if (inputs.length > 1) {\n const starts: number[] = readInput(inputs, 1);\n const ends: number[] = readInput(inputs, 2);\n let axes: number[] = readInput(inputs, 3);\n if (axes.length === 0) {\n axes = [...Array(inputs[0].dims.length).keys()];\n }\n return createAttributeWithCacheKey({starts, ends, axes});\n } else {\n return attributes;\n }\n };\n\nconst fixStartEndValues =\n (value: number, index: number, inputShape: readonly number[], axes: readonly number[], steps: readonly number[]):\n number => {\n let newValue = value;\n if (value < 0) {\n newValue += inputShape[axes[index]];\n }\n if (steps[index] < 0) {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]] - 1));\n } else {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]]));\n }\n };\n\nconst calculateInputIndicesImpl =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[]): string =>\n `fn calculateInputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n var carry = 0u;\n for (var i = ${inputShape.length}; i >= 0; i--) {\n let input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n let steps_i = ${getElementAt('uniforms.steps', 'i', inputShape.length)};\n let signs_i = ${getElementAt('uniforms.signs', 'i', inputShape.length)};\n let starts_i = ${getElementAt('uniforms.starts', 'i', inputShape.length)};\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${input.indicesSet('input_indices', 'i', 'input_index')};\n }\n return input_indices;\n }`;\n\nconst createSliceProgramInfo = (inputs: readonly TensorView[], attributes: SliceAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const axes = (attributes.axes.length > 0) ? ShapeUtil.normalizeAxes(attributes.axes, inputShape.length) :\n [...Array(inputShape.length).keys()];\n let steps = readInput(inputs, 4);\n steps.forEach((step) => step !== 0 || (() => {\n throw new Error('step cannot be 0');\n }));\n if (steps.length === 0) {\n steps = Array(axes.length).fill(1);\n }\n const starts = attributes.starts.map((start, i) => fixStartEndValues(start, i, inputShape, axes, steps));\n\n const ends = attributes.ends.map((end, i) => fixStartEndValues(end, i, inputShape, axes, steps));\n\n if (axes.length !== starts.length || axes.length !== ends.length) {\n throw new Error('start, ends and axes should have the same number of elements');\n }\n\n if (axes.length !== inputShape.length) {\n for (let i = 0; i < inputShape.length; ++i) {\n if (!axes.includes(i)) {\n starts.splice(i, 0, 0);\n ends.splice(i, 0, inputShape[i]);\n steps.splice(i, 0, 1);\n }\n }\n }\n const signs = steps.map(step => Math.sign(step));\n // Convert negative steps to positive steps and reverse starts and ends\n steps.forEach((step, i, array) => {\n if (step < 0) {\n const numSteps = (ends[i] - starts[i]) / step;\n const newEnd = starts[i];\n const newStart = newEnd + numSteps * steps[i];\n starts[i] = newStart;\n ends[i] = newEnd;\n array[i] = -step;\n }\n });\n // Output rank is expected to be less than or equal to the input rank.\n const outputShape = inputShape.slice(0);\n axes.forEach((axis, _) => {\n outputShape[axis] = Math.ceil((ends[axis] - starts[axis]) / steps[axis]);\n });\n const outputTensorInfo: TensorInfo = {dims: outputShape, dataType: inputs[0].dataType};\n\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const outputSize = ShapeUtil.size(outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'starts', type: 'u32', length: starts.length},\n {name: 'signs', type: 'i32', length: signs.length}, {name: 'steps', type: 'u32', length: steps.length}\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: starts},\n {type: DataType.int32, data: signs}, {type: DataType.uint32, data: steps},\n ...createTensorShapeVariables(inputs[0].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${calculateInputIndicesImpl(input, output, inputShape)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n let input_indices = calculateInputIndices(output_indices);\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n return {\n name: 'Slice',\n shaderCache: {hint: `${signs.length}_${starts.length}_${steps.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [outputTensorInfo],\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const slice = (context: ComputeContext, attributes: SliceAttributes): void => {\n validateInputs(context.inputs, attributes);\n const updatedAttributes = createSliceAttributesFromInputs(context.inputs, attributes);\n context.compute(createSliceProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n // if (ShapeUtil.size(program.outputs[0].dims) > 0) {\n // context.compute(programInfoLoader, {inputs: [0]});\n // } else {\n // // TODO: support empty output\n // throw new Error('slice: output size is 0');\n // }\n};\n\nexport const parseSliceAttributes = (attributes: Record): SliceAttributes => {\n const starts = attributes.starts as number[];\n const ends = attributes.ends as number[];\n const axes = attributes.axes as number[];\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax op requires 1 input.');\n }\n};\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst createSoftmaxProgramInfo = (input: TensorView, attributes: SoftmaxAttributes): ProgramInfo => {\n const shape = input.dims;\n const outputSize = ShapeUtil.size(shape);\n const WG = 64;\n let axis = attributes.axis;\n if (axis < 0) {\n axis = shape.length + axis;\n }\n if (axis < shape.length - 1) {\n throw new Error('softmax only supports last axis for now.');\n }\n\n const cols = shape[axis];\n const rows = outputSize / cols;\n const components = getMaxComponents(cols);\n const packedCols = cols / components;\n\n const maxVector = (name: string, components: number) => {\n if (components === 4) {\n return `max(max(${name}.x, ${name}.y), max(${name}.z, ${name}.w))`;\n } else if (components === 2) {\n return `max(${name}.x, ${name}.y)`;\n } else if (components === 3) {\n return `max(max(${name}.x, ${name}.y), ${name}.z)`;\n }\n\n return name;\n };\n const x = inputVariable('x', input.dataType, input.dims, components);\n const output = outputVariable('result', input.dataType, input.dims, components);\n const valueType = x.type.value;\n // 6.2.4 in wgsl spec\n const threadMaxDecl = tensorTypeToWsglStorageType(input.dataType) === 'f32' ?\n `var threadMax = ${valueType}(-3.402823e+38f);` :\n `var threadMax = ${valueType}(-65504.0h);`;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n var rowMaxShared : ${valueType};\n var rowSumShared : ${valueType};\n var threadShared : array<${valueType}, ${WG}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${valueType} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${valueType}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${shaderHelper.registerUniform('packedCols', 'i32').declareVariables(x, output)}\n ${shaderHelper.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${WG};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${threadMaxDecl}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${valueType}(${maxVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${valueType}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${valueType}(${sumVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;\n return {\n name: 'Softmax',\n shaderCache: {hint: `${components}`, inputDependencies: ['type']},\n getRunData: () => ({\n outputs: [{dims: shape, dataType: input.dataType}],\n dispatchGroup: {x: rows},\n programUniforms: [{type: DataType.int32, data: packedCols}]\n }),\n getShaderSource,\n };\n};\n\nexport const softmax = (context: ComputeContext, attributes: SoftmaxAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createSoftmaxProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseSoftmaxAttributes = (attributes: Record): SoftmaxAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly numOutputs: number;\n readonly splitSizes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n};\n\nconst createSplitAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SplitAttributes): SplitAttributes => {\n const splitSizes: number[] = [];\n let numOutputs: number = attributes.numOutputs;\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => splitSizes.push(Number(v)));\n numOutputs = splitSizes.length;\n }\n return createAttributeWithCacheKey({numOutputs, axis: attributes.axis, splitSizes});\n };\n\nconst calculateOutputIndexImpl = (numberOfTensors: number): string => `\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${numberOfTensors}u; i += 1u ) {\n if (index < ${getElementAt('uniforms.size_in_split_axis', 'i', numberOfTensors)}) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n}`;\nconst writeBufferDataImpl = (outputs: readonly IndicesHelper[]) => {\n const numberOfTensors = outputs.length;\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = outputs[i].setByIndices('indices', 'input[global_idx]');\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (output_number == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (output_number == ${i}) { ${returnSnippet} }`);\n }\n }\n return `\n fn writeBufferData(output_number: u32, indices: ${outputs[0].type.indices}, global_idx: u32) {\n ${codeLines.join('\\n')}\n }`;\n};\n\nconst createSplitProgramInfo = (inputs: readonly TensorView[], attributes: SplitAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const dataType = inputs[0].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const outputs = new Array(attributes.numOutputs);\n const input = inputVariable('input', dataType, inputShape.length);\n const sizeInSplitAxis = new Array(attributes.numOutputs);\n const outputsTensorInfo: TensorInfo[] = [];\n const outputShapes: number[][] = [];\n let previousSum = 0;\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: inputSize}];\n for (let i = 0; i < attributes.numOutputs; i++) {\n previousSum += attributes.splitSizes[i];\n sizeInSplitAxis[i] = previousSum;\n const outputShape = inputShape.slice();\n outputShape[attributes.axis] = attributes.splitSizes[i];\n outputShapes.push(outputShape);\n outputs[i] = outputVariable(`output${i}`, dataType, outputShape.length);\n outputsTensorInfo.push({dims: outputShapes[i], dataType: inputs[0].dataType});\n }\n programUniforms.push(\n {type: DataType.uint32, data: sizeInSplitAxis}, ...createTensorShapeVariables(inputShape, ...outputShapes));\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('input_size', 'u32')\n .registerUniform('size_in_split_axis', 'u32', sizeInSplitAxis.length)\n .declareVariables(input, ...outputs)}\n ${calculateOutputIndexImpl(sizeInSplitAxis.length)}\n ${writeBufferDataImpl(outputs)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.input_size')}\n\n var indices = ${input.offsetToIndices('global_idx')};\n var index = ${input.indicesGet('indices', axis)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${getElementAt('uniforms.size_in_split_axis', 'output_number - 1u', sizeInSplitAxis.length)};\n ${input.indicesSet('indices', axis, 'index')};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;\n return {\n name: 'Split',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: outputsTensorInfo,\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const split = (context: ComputeContext, attributes: SplitAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes =\n context.inputs.length === 1 ? attributes : createSplitAttributesFromInputs(context.inputs, attributes);\n context.compute(createSplitProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n\nexport const parseSplitAttributes = (attributes: Record): SplitAttributes => {\n const axis = attributes.axis as number;\n const splitSizes: number[] = attributes.splitSizes as number[];\n const numOutputs = attributes.numOutputs as number < 0 ? splitSizes.length : attributes.numOutputs as number;\n if (numOutputs !== splitSizes.length) {\n throw new Error('numOutputs and splitSizes lengh must be equal');\n }\n return createAttributeWithCacheKey({axis, numOutputs, splitSizes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst createWhereOpProgramShader =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], dimsOutput: readonly number[], isBroadcast: boolean,\n typeOutput: number) => {\n const output = outputVariable('output_data', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('a_data', inputs[1].dataType, inputs[1].dims.length, 4);\n const b = inputVariable('b_data', inputs[2].dataType, inputs[2].dims.length, 4);\n const c = inputVariable('c_data', inputs[0].dataType, inputs[0].dims.length, 4);\n\n let assignment: string;\n const expression = (a: string, b: string, c: string) => `select(${b}, ${a}, ${c})`;\n if (!isBroadcast) {\n assignment = output.setByOffset(\n 'global_idx',\n expression(a.getByOffset('global_idx'), b.getByOffset('global_idx'), c.getByOffset('global_idx')));\n } else {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `a_data[index_a${x}][component_a${x}]`;\n const expressionB = `b_data[index_b${x}][component_b${x}]`;\n // eslint-disable-next-line no-bitwise\n const expressionC = `bool(c_data[index_c${x}] & (0xffu << (component_c${x} * 8)))`;\n return `\n let output_indices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offset_a${x} = ${a.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_b${x} = ${b.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_c${x} = ${c.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let index_a${x} = offset_a${x} / 4u;\n let index_b${x} = offset_b${x} / 4u;\n let index_c${x} = offset_c${x} / 4u;\n let component_a${x} = offset_a${x} % 4u;\n let component_b${x} = offset_b${x} % 4u;\n let component_c${x} = offset_c${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expression(expressionA, expressionB, expressionC)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('output_data[global_idx]', 0)}\n ${singleAssignment('output_data[global_idx]', 1)}\n ${singleAssignment('output_data[global_idx]', 2)}\n ${singleAssignment('output_data[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(c, a, b, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createWhereOpProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const dimsA = inputs[1].dims;\n const dimsB = inputs[2].dims;\n const dimsC = inputs[0].dims;\n const outputDataType = inputs[1].dataType;\n\n const isBroadcast = !(ShapeUtil.areEqual(dimsA, dimsB) && ShapeUtil.areEqual(dimsB, dimsC));\n let outputShape = dimsA;\n let outputSize = ShapeUtil.size(dimsA);\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(BroadcastUtil.calcShape(dimsA, dimsB, false)!, dimsC, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform where op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n }\n\n const vecSize = Math.ceil(outputSize / 4);\n\n return {\n name: 'Where',\n shaderCache: {inputDependencies: ['rank', 'rank', 'rank']},\n getShaderSource: (shaderHelper) =>\n createWhereOpProgramShader(shaderHelper, inputs, outputShape, isBroadcast, outputDataType),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms:\n [{type: DataType.uint32, data: vecSize}, ...createTensorShapeVariables(dimsC, dimsA, dimsB, outputShape)],\n }),\n };\n};\n\nexport const where = (context: ComputeContext): void => {\n context.compute(createWhereOpProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {argMax, argMin, parseArgMinMaxAttributes} from './ops/argminmax';\nimport {attention} from './ops/attention';\nimport {batchNorm} from './ops/batch-norm';\nimport {biasAdd} from './ops/bias-add';\nimport {biasSplitGelu} from './ops/bias-split-gelu';\nimport * as binaryOps from './ops/binary-op';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {cumsum, parseCumSumAttributes} from './ops/cumsum';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {einsum, parseEinsumAttributes} from './ops/einsum';\nimport {expand} from './ops/expand';\nimport {fastGelu} from './ops/fast-gelu';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gatherElements, parseGatherElementsAttributes} from './ops/gather-elements';\nimport {gemm, parseGemmAttributes} from './ops/gemm';\nimport {groupQueryAttention, parseGroupQueryAttentionAttributes} from './ops/group-query-attention';\nimport {instanceNorm} from './ops/instance-norm';\nimport {layerNorm} from './ops/layer-norm';\nimport {matMul} from './ops/matmul';\nimport {matMulNBits, parseMatMulNBitsAttributes} from './ops/matmulnbits';\nimport {multiHeadAttention, parseMultiHeadAttentionAttributes} from './ops/multihead-attention';\nimport {pad} from './ops/pad';\nimport * as pool from './ops/pool';\nimport {range} from './ops/range';\nimport {reduceL1, reduceL2, reduceLogSum, reduceLogSumExp, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum, reduceSumSquare} from './ops/reduce';\nimport {parseResizeAttributes, resize} from './ops/resize';\nimport {rotaryEmbedding} from './ops/rotary-embedding';\nimport {skipLayerNorm} from './ops/skip-layer-norm';\nimport {parseSliceAttributes, slice} from './ops/slice';\nimport {parseSoftmaxAttributes, softmax} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {where} from './ops/where';\nimport {ComputeContext} from './types';\n\nexport type RunFunction = (context: ComputeContext, attribute?: unknown) => void;\nexport type ParseAttributeFunction = (attributeRaw: unknown) => unknown;\nexport type OperatorImplementation = [RunFunction]|[RunFunction, ParseAttributeFunction];\n\nexport const WEBGPU_OP_RESOLVE_RULES: Map = new Map([\n ['Abs', [unaryOps.abs]],\n ['Acos', [unaryOps.acos]],\n ['Acosh', [unaryOps.acosh]],\n ['Add', [binaryOps.add]],\n ['ArgMax', [argMax, parseArgMinMaxAttributes]],\n ['ArgMin', [argMin, parseArgMinMaxAttributes]],\n ['Asin', [unaryOps.asin]],\n ['Asinh', [unaryOps.asinh]],\n ['Atan', [unaryOps.atan]],\n ['Atanh', [unaryOps.atanh]],\n ['Attention', [attention]],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', [pool.averagePool, pool.parseAveragePoolAttributes]],\n ['BatchNormalization', [batchNorm]],\n ['BiasAdd', [biasAdd]],\n ['BiasSplitGelu', [biasSplitGelu]],\n ['Cast', [unaryOps.cast, unaryOps.parseCastAttributes]],\n ['Ceil', [unaryOps.ceil]],\n ['Clip', [unaryOps.clip]],\n ['Concat', [concat, parseConcatAttributes]],\n ['Conv', [conv, parseConvAttributes]],\n ['ConvTranspose', [convTranspose, parseConvTransposeAttributes]],\n ['Cos', [unaryOps.cos]],\n ['Cosh', [unaryOps.cosh]],\n ['CumSum', [cumsum, parseCumSumAttributes]],\n ['DepthToSpace', [depthToSpace, parseDepthToSpaceAttributes]],\n ['Div', [binaryOps.div]],\n ['Einsum', [einsum, parseEinsumAttributes]],\n ['Elu', [unaryOps.elu, unaryOps.parseAlphaAttributes]],\n ['Equal', [binaryOps.equal]],\n ['Erf', [unaryOps.erf]],\n ['Exp', [unaryOps.exp]],\n ['Expand', [expand]],\n ['FastGelu', [fastGelu]],\n ['Floor', [unaryOps.floor]],\n ['FusedConv', [conv, parseConvAttributes]],\n ['Gather', [gather, parseGatherAttributes]],\n ['GatherElements', [gatherElements, parseGatherElementsAttributes]],\n ['Gelu', [unaryOps.gelu]],\n ['Gemm', [gemm, parseGemmAttributes]],\n ['GlobalAveragePool', [pool.globalAveragePool, pool.parseGlobalAveragePoolAttributes]],\n ['GlobalMaxPool', [pool.globalMaxPool, pool.parseGlobalMaxPoolAttributes]],\n ['Greater', [binaryOps.greater]],\n ['GreaterOrEqual', [binaryOps.greaterOrEqual]],\n ['GroupQueryAttention', [groupQueryAttention, parseGroupQueryAttentionAttributes]],\n ['HardSigmoid', [unaryOps.hardSigmoid, unaryOps.parseHardSigmoidAttributes]],\n ['InstanceNormalization', [instanceNorm]],\n ['LayerNormalization', [layerNorm]],\n ['LeakyRelu', [unaryOps.leakyRelu, unaryOps.parseAlphaAttributes]],\n ['Less', [binaryOps.less]],\n ['LessOrEqual', [binaryOps.lessOrEqual]],\n ['Log', [unaryOps.log]],\n ['MatMul', [matMul]],\n ['MatMulNBits', [matMulNBits, parseMatMulNBitsAttributes]],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', [pool.maxPool, pool.parseMaxPoolAttributes]],\n ['Mul', [binaryOps.mul]],\n ['MultiHeadAttention', [multiHeadAttention, parseMultiHeadAttentionAttributes]],\n ['Neg', [unaryOps.neg]],\n ['Not', [unaryOps.not]],\n ['Pad', [pad]],\n ['Pow', [binaryOps.pow]],\n ['QuickGelu', [unaryOps.quickgelu, unaryOps.parseAlphaAttributes]],\n ['Range', [range]],\n ['Reciprocal', [unaryOps.reciprocal]],\n ['ReduceMin', [reduceMin]],\n ['ReduceMean', [reduceMean]],\n ['ReduceMax', [reduceMax]],\n ['ReduceSum', [reduceSum]],\n ['ReduceProd', [reduceProd]],\n ['ReduceL1', [reduceL1]],\n ['ReduceL2', [reduceL2]],\n ['ReduceLogSum', [reduceLogSum]],\n ['ReduceLogSumExp', [reduceLogSumExp]],\n ['ReduceSumSquare', [reduceSumSquare]],\n ['Relu', [unaryOps.relu]],\n ['Resize', [resize, parseResizeAttributes]],\n ['RotaryEmbedding', [rotaryEmbedding]],\n ['Sigmoid', [unaryOps.sigmoid]],\n ['Sin', [unaryOps.sin]],\n ['Sinh', [unaryOps.sinh]],\n ['Slice', [slice, parseSliceAttributes]],\n ['SkipLayerNormalization', [skipLayerNorm]],\n ['Split', [split, parseSplitAttributes]],\n ['Sqrt', [unaryOps.sqrt]],\n ['Softmax', [softmax, parseSoftmaxAttributes]],\n ['Sub', [binaryOps.sub]],\n ['Tan', [unaryOps.tan]],\n ['Tanh', [unaryOps.tanh]],\n ['ThresholdedRelu', [unaryOps.thresholdedRelu, unaryOps.parseAlphaAttributes]],\n ['Tile', [tile]],\n ['Transpose', [transpose, parseTransposeAttributes]],\n ['Where', [where]],\n]);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {createShaderHelper} from './ops/common';\nimport {Artifact, GpuData, ProgramInfo} from './types';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n attributesBound: boolean;\n\n constructor(private backend: WebGpuBackend) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: GpuData[], outputs: GpuData[], dispatchGroup: [number, number, number],\n uniformBufferBinding: GPUBindingResource|undefined): void {\n TRACE_FUNC_BEGIN(buildArtifact.programInfo.name);\n const device = this.backend.device;\n const computePassEncoder = this.backend.getComputePassEncoder();\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2);\n const entries = [];\n for (const input of inputs) {\n entries.push({binding: entries.length, resource: {buffer: input.buffer}});\n }\n for (const output of outputs) {\n entries.push({binding: entries.length, resource: {buffer: output.buffer}});\n }\n if (uniformBufferBinding) {\n entries.push({binding: entries.length, resource: uniformBufferBinding});\n }\n const bindGroup = device.createBindGroup(\n {layout: buildArtifact.computePipeline.getBindGroupLayout(0), entries, label: buildArtifact.programInfo.name});\n\n if (this.backend.sessionStatus === 'capturing') {\n const commandInfo = {\n kernelId: this.backend.currentKernelId!,\n computePipeline: buildArtifact.computePipeline,\n bindGroup,\n dispatchGroup\n };\n const sessionCommandList = this.backend.capturedCommandList.get(this.backend.currentSessionId!);\n sessionCommandList!.push(commandInfo);\n }\n\n computePassEncoder.setPipeline(buildArtifact.computePipeline);\n computePassEncoder.setBindGroup(0, bindGroup);\n computePassEncoder.dispatchWorkgroups(...dispatchGroup);\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2 + 1);\n this.backend.pendingDispatchNumber++;\n\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber ||\n this.backend.queryType === 'at-passes') {\n this.backend.endComputePass();\n }\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber) {\n this.backend.flush();\n }\n TRACE_FUNC_END(buildArtifact.programInfo.name);\n }\n dispose(): void {\n // this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, normalizedDispatchGroupSize: [number, number, number]): Artifact {\n TRACE_FUNC_BEGIN(programInfo.name);\n const device = this.backend.device;\n const extensions: string[] = [];\n if (device.features.has('shader-f16')) {\n extensions.push('enable f16;');\n }\n const shaderHelper = createShaderHelper(normalizedDispatchGroupSize, this.backend.device.limits);\n const userCode = programInfo.getShaderSource(shaderHelper);\n const code = `${extensions.join('\\n')}\\n${shaderHelper.additionalImplementations}\\n${userCode}`;\n const shaderModule = device.createShaderModule({code, label: programInfo.name});\n LOG_DEBUG('verbose', () => `[WebGPU] ${programInfo.name} shader code: ${code}`);\n\n const computePipeline = device.createComputePipeline(\n {compute: {module: shaderModule, entryPoint: 'main'}, layout: 'auto', label: programInfo.name});\n\n TRACE_FUNC_END(programInfo.name);\n return {programInfo, computePipeline, uniformVariablesInfo: shaderHelper.variablesInfo};\n }\n\n normalizeDispatchGroupSize(dispatchGroup: ReturnType['dispatchGroup']):\n [number, number, number] {\n const x = typeof dispatchGroup === 'number' ? dispatchGroup : dispatchGroup.x;\n const y = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.y || 1);\n const z = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.z || 1);\n const limitPerDimension = this.backend.device.limits.maxComputeWorkgroupsPerDimension;\n if (x <= limitPerDimension && y <= limitPerDimension && z <= limitPerDimension) {\n return [x, y, z];\n }\n const size = x * y * z;\n let dispatchAverage = Math.ceil(Math.sqrt(size));\n if (dispatchAverage > limitPerDimension) {\n dispatchAverage = Math.ceil(Math.cbrt(size));\n if (dispatchAverage > limitPerDimension) {\n throw new Error('Total dispatch size exceeds WebGPU maximum.');\n }\n return [dispatchAverage, dispatchAverage, dispatchAverage];\n } else {\n return [dispatchAverage, dispatchAverage, 1];\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, Tensor, TRACE, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {DataType, tensorDataTypeEnumToString} from '../wasm-common';\n\nimport {configureLogger, LOG_DEBUG} from './log';\nimport {createView, TensorView} from './tensor-view';\nimport {createGpuDataManager, downloadGpuData, GpuDataManager} from './webgpu/gpu-data-manager';\nimport {RunFunction, WEBGPU_OP_RESOLVE_RULES} from './webgpu/op-resolve-rules';\nimport {ProgramManager} from './webgpu/program-manager';\nimport {AdapterInfo, ComputeContext, GpuArchitecture, GpuData, GpuVendor, ProgramInfo, ProgramInputTensorInfoDependency, SessionState, TimestampQuery} from './webgpu/types';\n\ninterface CommandInfo {\n readonly kernelId: number;\n readonly computePipeline: GPUComputePipeline;\n readonly bindGroup: GPUBindGroup;\n readonly dispatchGroup: [number, number, number];\n}\n\ninterface KernelInfo {\n readonly kernelType: string;\n readonly kernelName: string;\n readonly kernelEntry: RunFunction;\n readonly attributes: [((attribute: unknown) => unknown)|undefined, unknown];\n}\n\ninterface PendingKernelInfo {\n readonly kernelId: number;\n readonly programName: string;\n readonly inputTensorViews: readonly TensorView[];\n readonly outputTensorViews: readonly TensorView[];\n}\n\nconst getProgramInputTensorInfoDependencyKey =\n (inputTensors: readonly TensorView[], inputDependencies: readonly ProgramInputTensorInfoDependency[]): string => {\n if (inputDependencies.length !== inputTensors.length) {\n throw new Error(`inputDependencies length ${inputDependencies.length} is not equal to inputTensors length ${\n inputTensors.length}.`);\n }\n\n const inputInfos: string[] = [];\n for (let i = 0; i < inputTensors.length; ++i) {\n const type = inputTensors[i].dataType;\n switch (inputDependencies[i]) {\n case 'none': {\n inputInfos.push('');\n break;\n }\n case 'type': {\n inputInfos.push(`${type}`);\n break;\n }\n case 'rank': {\n const rank = inputTensors[i].dims.length;\n inputInfos.push(`${type};${rank}`);\n break;\n }\n case 'dims': {\n const dims = inputTensors[i].dims.join(',');\n inputInfos.push(`${type};${dims}`);\n break;\n }\n default:\n throw new Error(`unsupported input dependency: ${inputDependencies[i]}`);\n }\n }\n\n return inputInfos.join('|');\n };\n\n/**\n * get a unique key representing the program from the program info, input shapes and types.\n *\n * @returns a unique key is a shorter string than the shader source, which contains all the information to identify a\n * program. if the key is the same, the program shader source should be the same, so we can reuse the program.\n *\n */\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo, inputTensors: readonly TensorView[], is1DimensionDispatch: boolean): string => {\n // final key format:\n // []:is1DimensionDispatch:||...\n let key = programInfo.name;\n if (programInfo.shaderCache?.hint) {\n key += '[' + programInfo.shaderCache.hint + ']';\n }\n key += ':' + is1DimensionDispatch +\n `:${\n getProgramInputTensorInfoDependencyKey(\n inputTensors,\n programInfo.shaderCache?.inputDependencies ??\n new Array(inputTensors.length).fill('dims'))}`;\n return key;\n };\n\nclass AdapterInfoImpl implements AdapterInfo {\n readonly architecture?: string;\n readonly vendor?: string;\n\n constructor(adapterInfo: GPUAdapterInfo) {\n if (adapterInfo) {\n this.architecture = adapterInfo.architecture;\n this.vendor = adapterInfo.vendor;\n }\n }\n\n isArchitecture(architecture: GpuArchitecture): boolean {\n return this.architecture === architecture;\n }\n\n isVendor(vendor: GpuVendor): boolean {\n return this.vendor === vendor;\n }\n}\n\n/**\n * this class is designed to store status and being used as a singleton for JSEP. It will be passed to jsepInit() as\n * the first parameter so that it is stored for future use.\n */\nexport class WebGpuBackend {\n adapterInfo: AdapterInfoImpl;\n device: GPUDevice;\n /**\n * an instance of GpuDataManager to manage a GpuDataId -> GpuBuffer mapping\n */\n gpuDataManager: GpuDataManager;\n /**\n * an instance of ProgramManager to build and run WebGPU compute shader program, and manage a ProgramKey -> Program\n * artifacts mapping\n */\n programManager: ProgramManager;\n\n /**\n * representing the session ID of which is currently being run.\n * `null` means no session is being run.\n * only valid when session.run is executed.\n */\n currentSessionId: number|null = null;\n\n /**\n * representing the kernel ID of which is currently being computed (CPU code perspective).\n * `null` means no kernel is being computed.\n * only one kernel can be computed at a moment.\n */\n currentKernelId: number|null = null;\n /**\n * a list of temporary GPU data for the current kernel. should release when the kernel done computation.\n */\n private temporaryData: GpuData[];\n /**\n * a KernelID -> a GPU data list, which stores persistent GPU data owned by the specific kernel.\n */\n private kernelPersistentData: Map;\n /**\n * a KernelID -> a custom data, which stores custom data owned by the specific kernel.\n */\n private kernelCustomData: Map;\n /**\n * get the custom data of the current kernel\n */\n get currentKernelCustomData(): {[key: string]: unknown} {\n if (this.currentKernelId === null) {\n throw new Error('currentKernelCustomData(): currentKernelId is null. (should not happen)');\n }\n\n let data = this.kernelCustomData.get(this.currentKernelId);\n if (!data) {\n data = {};\n this.kernelCustomData.set(this.currentKernelId, data);\n }\n\n return data;\n }\n\n // KernelID -> kernelInfo mapping\n kernels: Map;\n private commandEncoder: GPUCommandEncoder|null = null;\n private computePassEncoder: GPUComputePassEncoder|null = null;\n maxDispatchNumber = 16;\n pendingDispatchNumber = 0;\n\n // info of kernels pending submission for a single batch\n private pendingKernels: PendingKernelInfo[] = [];\n // queryReadBuffer -> pendingKernels mapping for all the batches\n private pendingQueries: Map = new Map();\n private queryResolveBuffer?: GPUBuffer;\n private querySet?: GPUQuerySet;\n private queryTimeBase?: bigint;\n queryType: TimestampQuery;\n\n env: Env;\n sessionStatus: SessionState = 'default';\n /**\n * a SessionID -> CommandInfo[] mapping. It's used to record all GPU commands for corresponding session.\n */\n capturedCommandList: Map = new Map();\n\n /**\n * a SessionID -> PendingKernelInfo[] mapping for profiling.\n */\n private capturedPendingKernels: Map = new Map();\n\n /**\n * a SessionID -> a Map of (InputOutputIndex -> [ID, GPUBuffer]) mapping.\n */\n sessionExternalDataMapping: Map> = new Map();\n\n async initialize(env: Env, adapter: GPUAdapter): Promise {\n this.env = env;\n const requiredFeatures: GPUFeatureName[] = [];\n const deviceDescriptor: GPUDeviceDescriptor = {\n requiredLimits: {\n maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize,\n maxComputeWorkgroupsPerDimension: adapter.limits.maxComputeWorkgroupsPerDimension,\n maxStorageBufferBindingSize: adapter.limits.maxStorageBufferBindingSize,\n maxBufferSize: adapter.limits.maxBufferSize,\n maxComputeInvocationsPerWorkgroup: adapter.limits.maxComputeInvocationsPerWorkgroup,\n maxComputeWorkgroupSizeX: adapter.limits.maxComputeWorkgroupSizeX,\n maxComputeWorkgroupSizeY: adapter.limits.maxComputeWorkgroupSizeY,\n maxComputeWorkgroupSizeZ: adapter.limits.maxComputeWorkgroupSizeZ,\n },\n requiredFeatures,\n };\n\n if (adapter.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n requiredFeatures.push('chromium-experimental-timestamp-query-inside-passes' as GPUFeatureName);\n } else if (adapter.features.has('timestamp-query')) {\n requiredFeatures.push('timestamp-query');\n }\n if (adapter.features.has('shader-f16')) {\n requiredFeatures.push('shader-f16');\n }\n\n this.device = await adapter.requestDevice(deviceDescriptor);\n this.adapterInfo = new AdapterInfoImpl(adapter.info || await adapter.requestAdapterInfo());\n this.gpuDataManager = createGpuDataManager(this);\n this.programManager = new ProgramManager(this);\n this.kernels = new Map();\n this.kernelPersistentData = new Map();\n this.kernelCustomData = new Map();\n\n // set up flags for logger\n configureLogger(env.logLevel!, !!env.debug);\n\n // TODO: set up flags\n\n this.device.onuncapturederror = ev => {\n if (ev.error instanceof GPUValidationError) {\n // eslint-disable-next-line no-console\n console.error(`An uncaught WebGPU validation error was raised: ${ev.error.message}`);\n }\n };\n\n Object.defineProperty(\n this.env.webgpu, 'device', {value: this.device, writable: false, enumerable: true, configurable: false});\n Object.defineProperty(\n this.env.webgpu, 'adapter', {value: adapter, writable: false, enumerable: true, configurable: false});\n\n // init queryType, which is necessary for InferenceSession.create\n this.setQueryType();\n }\n\n dispose(): void {\n if (typeof this.querySet !== 'undefined') {\n this.querySet.destroy();\n }\n this.gpuDataManager.dispose();\n }\n\n getCommandEncoder(): GPUCommandEncoder {\n if (!this.commandEncoder) {\n this.commandEncoder = this.device.createCommandEncoder();\n }\n return this.commandEncoder;\n }\n\n getComputePassEncoder(): GPUComputePassEncoder {\n if (!this.computePassEncoder) {\n const commandEncoder = this.getCommandEncoder();\n const computePassDescriptor: GPUComputePassDescriptor = {};\n\n if (this.queryType === 'at-passes') {\n computePassDescriptor.timestampWrites = {\n querySet: this.querySet!,\n beginningOfPassWriteIndex: this.pendingDispatchNumber * 2,\n endOfPassWriteIndex: this.pendingDispatchNumber * 2 + 1,\n };\n }\n\n this.computePassEncoder = commandEncoder.beginComputePass(computePassDescriptor);\n }\n return this.computePassEncoder;\n }\n\n endComputePass(): void {\n if (this.computePassEncoder) {\n this.computePassEncoder.end();\n this.computePassEncoder = null;\n }\n }\n\n flush(): void {\n if (!this.commandEncoder) {\n return;\n }\n\n TRACE_FUNC_BEGIN();\n\n this.endComputePass();\n let queryReadBuffer: GPUBuffer;\n if (this.queryType !== 'none') {\n this.commandEncoder.resolveQuerySet(\n this.querySet!, 0, this.pendingDispatchNumber * 2, this.queryResolveBuffer!, 0);\n\n queryReadBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.pendingDispatchNumber * 2 * 8, usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST});\n\n this.pendingQueries.set(queryReadBuffer, this.pendingKernels);\n this.pendingKernels = [];\n this.commandEncoder.copyBufferToBuffer(\n this.queryResolveBuffer!, 0, queryReadBuffer, 0, this.pendingDispatchNumber * 2 * 8);\n }\n\n this.device.queue.submit([this.commandEncoder.finish()]);\n this.gpuDataManager.refreshPendingBuffers();\n this.commandEncoder = null;\n this.pendingDispatchNumber = 0;\n\n if (this.queryType !== 'none') {\n void queryReadBuffer!.mapAsync(GPUMapMode.READ).then(() => {\n const mappedData = new BigUint64Array(queryReadBuffer.getMappedRange());\n const pendingKernels = this.pendingQueries.get(queryReadBuffer)!;\n for (let i = 0; i < mappedData.length / 2; i++) {\n const pendingKernelInfo = pendingKernels[i];\n const kernelId = pendingKernelInfo.kernelId;\n const kernelInfo = this.kernels.get(kernelId)!;\n const kernelType = kernelInfo.kernelType;\n const kernelName = kernelInfo.kernelName;\n const programName = pendingKernelInfo.programName;\n const inputTensorViews = pendingKernelInfo.inputTensorViews;\n const outputTensorViews = pendingKernelInfo.outputTensorViews;\n const startTimeU64 = mappedData[i * 2];\n const endTimeU64 = mappedData[i * 2 + 1];\n\n if (typeof this.queryTimeBase === 'undefined') {\n this.queryTimeBase = startTimeU64;\n }\n\n const startTime = Number(startTimeU64 - this.queryTimeBase);\n const endTime = Number(endTimeU64 - this.queryTimeBase);\n\n if (!Number.isSafeInteger(startTime) || !Number.isSafeInteger(endTime)) {\n throw new RangeError('incorrect timestamp range');\n }\n\n if (this.env.webgpu.profiling?.ondata) {\n this.env.webgpu.profiling.ondata({\n version: 1,\n inputsMetadata: inputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n outputsMetadata: outputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n kernelId,\n kernelType,\n kernelName,\n programName,\n startTime,\n endTime,\n });\n } else {\n // if no callback is provided, print the profiling message to console\n let inputShapes = '';\n inputTensorViews.forEach((value, i) => {\n inputShapes += `input[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n let outputShapes = '';\n outputTensorViews.forEach((value, i) => {\n outputShapes += `output[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n // eslint-disable-next-line no-console\n console.log(`[profiling] kernel \"${kernelId}|${kernelType}|${kernelName}|${programName}\" ${inputShapes}${\n outputShapes}execution time: ${endTime - startTime} ns`);\n }\n TRACE('GPU', `${programName}::${startTimeU64}::${endTimeU64}`);\n }\n queryReadBuffer.unmap();\n this.pendingQueries.delete(queryReadBuffer);\n });\n }\n TRACE_FUNC_END();\n }\n\n /**\n * run a WebGPU program.\n * @param program a ProgramInfo instance\n * @param inputTensorViews a TensorView array. each element represents a value already exists in GPU.\n * @param outputIndices an indices array. each element can be either -1 (temporary data), -2 (persistent data) or an\n * index to the kernel's output.\n * @param createKernelOutput a callback function that create a value to kernel's output with the given index\n * @param createIntermediateOutput a callback function that create a value as a intermediate value, either temporary\n * or persistent (owned by the current kernel)\n * @returns a TensorView array representing the result.\n */\n run(program: ProgramInfo, inputTensorViews: readonly TensorView[], outputIndices: readonly number[],\n createKernelOutput: (index: number, dataType: number, dims: readonly number[]) => TensorView,\n createIntermediateOutput: (dataType: number, dims: readonly number[]) => TensorView,\n outputCount: number): TensorView[] {\n TRACE_FUNC_BEGIN(program.name);\n // create info for inputs\n const inputDatas: GpuData[] = [];\n for (let i = 0; i < inputTensorViews.length; ++i) {\n const data = inputTensorViews[i].data;\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(data);\n if (!gpuData) {\n throw new Error(`no GPU data for input: ${data}`);\n }\n inputDatas.push(gpuData);\n }\n\n const {outputs, dispatchGroup, programUniforms} = program.getRunData(inputTensorViews);\n\n // check output indices\n const validatedOutputIndices = outputIndices.length === 0 ? outputs.map((_, i) => i) : outputIndices;\n if (validatedOutputIndices.length !== outputs.length) {\n throw new Error(`Output size ${validatedOutputIndices.length} must be equal to ${outputs.length}.`);\n }\n\n // create info for outputs\n const outputTensorViews: TensorView[] = [];\n const outputDatas: GpuData[] = [];\n for (let i = 0; i < outputs.length; ++i) {\n // value -1 and -2 are used for creating temporary and persistent outputs.\n // value -3 is used for placeholder output. So -3, -2, -1 and 0, 1, 2, ... are valid\n // output indices. see type definition of ComputeContextInputsOutputsMapping for more details.\n if (!Number.isInteger(validatedOutputIndices[i]) || validatedOutputIndices[i] < -3 ||\n validatedOutputIndices[i] >= outputCount) {\n throw new Error(`Invalid output index: ${validatedOutputIndices[i]}`);\n }\n if (validatedOutputIndices[i] === -3) {\n continue;\n }\n const isTemporary = validatedOutputIndices[i] === -1;\n const isPersistent = validatedOutputIndices[i] === -2;\n const tensorView = (isTemporary || isPersistent) ?\n createIntermediateOutput(outputs[i].dataType, outputs[i].dims) :\n createKernelOutput(validatedOutputIndices[i], outputs[i].dataType, outputs[i].dims);\n outputTensorViews.push(tensorView);\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (tensorView.data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(tensorView.data);\n if (!gpuData) {\n throw new Error(`no GPU data for output: ${tensorView.data}`);\n }\n if (isTemporary) {\n this.temporaryData.push(gpuData);\n }\n if (isPersistent) {\n let persistentData = this.kernelPersistentData.get(this.currentKernelId!);\n if (!persistentData) {\n persistentData = [];\n this.kernelPersistentData.set(this.currentKernelId!, persistentData);\n }\n persistentData.push(gpuData);\n }\n outputDatas.push(gpuData);\n }\n\n // when there are any zero-sized tensor in the inputs or outputs, we should report error unless all outputs are\n // zero-sized tensors.\n if (inputDatas.length !== inputTensorViews.length || outputDatas.length !== outputTensorViews.length) {\n // if all outputs are zero-sized tensors, there is no need to run the program.\n if (outputDatas.length === 0) {\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n // if some outputs are zero-sized tensors, report an error.\n //\n // TODO: so far we don't see any use case that outputs include both zero-sized tensors and non-zero-sized tensors.\n // If we see such use case, we need to make a change here to support it.\n throw new Error(\n `Program ${program.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`);\n }\n\n // load uniforms\n // TODO: add cache for uniform (is it necessary?)\n //\n let uniformBufferBinding: GPUBindingResource|undefined;\n if (programUniforms) {\n let currentOffset = 0;\n const offsets: number[] = [];\n\n programUniforms.forEach(v => {\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (data.length === 0) {\n return;\n }\n // https://www.w3.org/TR/WGSL/#alignof\n const sizeOfElement = v.type === DataType.float16 ? 2 : 4;\n let sizeOfVecOrMat;\n let baseAlignment;\n if (v.type === DataType.float16) {\n baseAlignment = data.length > 4 ? 16 : (data.length > 2 ? 8 : data.length * sizeOfElement);\n sizeOfVecOrMat = data.length > 4 ? 16 : sizeOfElement * data.length;\n } else {\n baseAlignment = data.length <= 2 ? data.length * sizeOfElement : 16;\n sizeOfVecOrMat = 16;\n }\n currentOffset = Math.ceil(currentOffset / baseAlignment) * baseAlignment;\n offsets.push(currentOffset);\n // For non-float16 type, when data.length > 4, the uniform variable is of type array,N>, where\n // N = Math.ceil(data.length / 4) and SizeOf(vec4) = 16. The total byte length is N *\n // SizeOf(vec4). For float16 type, when data.length > 4, the uniform variable is of type\n // array,N>, where N = Math.ceil(data.length / 8) and SizeOf(mat2x4) = 16. The total byte\n // length is N * SizeOf(mat2x4).\n const elementPerVecOrMat = v.type === DataType.float16 ? 8 : 4;\n currentOffset += data.length > 4 ? Math.ceil(data.length / elementPerVecOrMat) * sizeOfVecOrMat :\n data.length * sizeOfElement;\n });\n\n // Meet alignment of struct here: https://www.w3.org/TR/WGSL/#alignment-and-size. For simplicity, set\n // maxAlignmentOfField to 16 since the underlying buffer has been rounded up to 16.\n const maxAlignmentOfField = 16;\n currentOffset = Math.ceil(currentOffset / maxAlignmentOfField) * maxAlignmentOfField;\n const arrayBuffer = new ArrayBuffer(currentOffset);\n programUniforms.forEach((v, i) => {\n const offset = offsets[i];\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (v.type === DataType.int32) {\n new Int32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.uint32) {\n new Uint32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float16) {\n // TODO: use Float16Array.\n new Uint16Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float) {\n new Float32Array(arrayBuffer, offset, data.length).set(data);\n } else {\n throw new Error(`Unsupported uniform type: ${tensorDataTypeEnumToString(v.type)}`);\n }\n });\n\n const uniformBufferData =\n // eslint-disable-next-line no-bitwise\n this.gpuDataManager.create(currentOffset, GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM);\n this.device.queue.writeBuffer(uniformBufferData.buffer, 0, arrayBuffer, 0, currentOffset);\n this.gpuDataManager.release(uniformBufferData.id);\n uniformBufferBinding = {offset: 0, size: currentOffset, buffer: uniformBufferData.buffer};\n }\n\n const normalizedDispatchGroup = this.programManager.normalizeDispatchGroupSize(dispatchGroup);\n const is1DimensionDispatch = normalizedDispatchGroup[1] === 1 && normalizedDispatchGroup[2] === 1;\n // get program info\n const key = getProgramInfoUniqueKey(program, inputTensorViews, is1DimensionDispatch);\n let artifact = this.programManager.getArtifact(key);\n if (!artifact) {\n artifact = this.programManager.build(program, normalizedDispatchGroup);\n this.programManager.setArtifact(key, artifact);\n LOG_DEBUG('info', () => `[artifact] key: ${key}, programName: ${program.name}`);\n }\n\n // validate uniform variables\n if (programUniforms && artifact.uniformVariablesInfo) {\n if (programUniforms.length !== artifact.uniformVariablesInfo.length) {\n throw new Error(`Uniform variables count mismatch: expect ${artifact.uniformVariablesInfo.length}, got ${\n programUniforms.length} in program \"${artifact.programInfo.name}\".`);\n }\n for (let i = 0; i < programUniforms.length; i++) {\n const uniform = programUniforms[i];\n const actualType = uniform.type;\n const actualLength = typeof uniform.data === 'number' ? 1 : uniform.data.length;\n const [type, length] = artifact.uniformVariablesInfo[i];\n if (actualType !== type || actualLength !== length) {\n throw new Error(`Uniform variable ${i} mismatch: expect type ${type} with size ${length}, got type ${\n actualType} with size ${actualLength} in program \"${artifact.programInfo.name}\".`);\n }\n }\n }\n\n LOG_DEBUG(\n 'info',\n () => `[ProgramManager] run \"${program.name}\" (key=${key}) with ${normalizedDispatchGroup[0]}x${\n normalizedDispatchGroup[1]}x${normalizedDispatchGroup[2]}`);\n\n if (this.queryType !== 'none' || this.sessionStatus === 'capturing') {\n const pendingKernelInfo: PendingKernelInfo = {\n kernelId: this.currentKernelId!,\n programName: artifact.programInfo.name,\n inputTensorViews,\n outputTensorViews,\n };\n this.pendingKernels.push(pendingKernelInfo);\n\n if (this.sessionStatus === 'capturing') {\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n sessionPendingKernels!.push(pendingKernelInfo);\n }\n }\n\n this.programManager.run(artifact, inputDatas, outputDatas, normalizedDispatchGroup, uniformBufferBinding);\n\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n\n upload(gpuDataId: number, data: Uint8Array): void {\n this.gpuDataManager.upload(gpuDataId, data);\n }\n\n memcpy(src: number, dst: number): void {\n this.gpuDataManager.memcpy(src, dst);\n }\n\n async download(gpuDataId: number, getTargetBuffer: () => Uint8Array): Promise {\n // the underlying buffer may be changed after the async function is called. so we use a getter function to make sure\n // the buffer is up-to-date.\n await this.gpuDataManager.download(gpuDataId, getTargetBuffer);\n }\n\n alloc(size: number): number {\n return this.gpuDataManager.create(size).id;\n }\n\n free(ptr: number): number {\n return this.gpuDataManager.release(ptr);\n }\n\n createKernel(kernelType: string, kernelId: number, attribute: unknown, kernelName: string): void {\n const op = WEBGPU_OP_RESOLVE_RULES.get(kernelType);\n if (!op) {\n throw new Error(`kernel not implemented: ${kernelType}`);\n }\n\n const kernelInfo: KernelInfo = {\n kernelType,\n kernelName,\n kernelEntry: op[0],\n attributes: [op[1], attribute],\n };\n this.kernels.set(kernelId, kernelInfo);\n }\n\n releaseKernel(kernelId: number): void {\n const persistentData = this.kernelPersistentData.get(kernelId);\n if (persistentData) {\n for (const data of persistentData) {\n this.gpuDataManager.release(data.id);\n }\n this.kernelPersistentData.delete(kernelId);\n }\n\n this.kernelCustomData.delete(kernelId);\n this.kernels.delete(kernelId);\n }\n\n computeKernel(kernelId: number, context: ComputeContext, errors: Array>): number {\n const kernel = this.kernels.get(kernelId);\n if (!kernel) {\n throw new Error(`kernel not created: ${kernelId}`);\n }\n const kernelType = kernel.kernelType;\n const kernelName = kernel.kernelName;\n const kernelEntry = kernel.kernelEntry;\n const attributes = kernel.attributes;\n if (this.currentKernelId !== null) {\n throw new Error(`kernel \"[${kernelType}] ${kernelName}\" is not allowed to be called recursively`);\n }\n this.currentKernelId = kernelId;\n\n // parse attributes if necessary\n if (attributes[0]) {\n attributes[1] = attributes[0](attributes[1]);\n attributes[0] = undefined;\n }\n\n LOG_DEBUG('info', () => `[WebGPU] Start to run kernel \"[${kernelType}] ${kernelName}\"...`);\n\n const useErrorScope = this.env.debug;\n\n this.temporaryData = [];\n try {\n if (useErrorScope) {\n this.device.pushErrorScope('validation');\n }\n\n kernelEntry(context, attributes[1]);\n return 0; // ORT_OK\n } catch (e) {\n errors.push(Promise.resolve(`[WebGPU] Kernel \"[${kernelType}] ${kernelName}\" failed. ${e}`));\n return 1; // ORT_FAIL\n } finally {\n if (useErrorScope) {\n errors.push(this.device.popErrorScope().then(\n err => err ? `GPU validation error for kernel \"[${kernelType}] ${kernelName}\": ${err.message}` : null));\n }\n\n for (const data of this.temporaryData) {\n this.gpuDataManager.release(data.id);\n }\n this.temporaryData = [];\n this.currentKernelId = null;\n }\n }\n\n // #region external buffer\n registerBuffer(sessionId: number, index: number, buffer: GPUBuffer, size: number): number {\n let sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (!sessionInputOutputMapping) {\n sessionInputOutputMapping = new Map();\n this.sessionExternalDataMapping.set(sessionId, sessionInputOutputMapping);\n }\n\n const previousBuffer = sessionInputOutputMapping.get(index);\n const id = this.gpuDataManager.registerExternalBuffer(buffer, size, previousBuffer?.[1]);\n sessionInputOutputMapping.set(index, [id, buffer]);\n return id;\n }\n unregisterBuffers(sessionId: number): void {\n const sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (sessionInputOutputMapping) {\n sessionInputOutputMapping.forEach(bufferInfo => this.gpuDataManager.unregisterExternalBuffer(bufferInfo[1]));\n this.sessionExternalDataMapping.delete(sessionId);\n }\n }\n getBuffer(gpuDataId: number): GPUBuffer {\n const gpuData = this.gpuDataManager.get(gpuDataId);\n if (!gpuData) {\n throw new Error(`no GPU data for buffer: ${gpuDataId}`);\n }\n return gpuData.buffer;\n }\n createDownloader(gpuBuffer: GPUBuffer, size: number, type: Tensor.GpuBufferDataTypes):\n () => Promise {\n return async () => {\n const data = await downloadGpuData(this, gpuBuffer, size);\n return createView(data.buffer, type);\n };\n }\n // #endregion\n writeTimestamp(index: number): void {\n if (this.queryType !== 'inside-passes') {\n return;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this.computePassEncoder as any).writeTimestamp(this.querySet, index);\n }\n setQueryType(): void {\n this.queryType = 'none';\n if (this.env.webgpu.profiling?.mode === 'default' ||\n (typeof this.env.trace === 'undefined' ? this.env.wasm.trace : this.env.trace)) {\n if (this.device.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n this.queryType = 'inside-passes';\n } else if (this.device.features.has('timestamp-query')) {\n this.queryType = 'at-passes';\n }\n\n if (this.queryType !== 'none' && typeof this.querySet === 'undefined') {\n this.querySet = this.device.createQuerySet({\n type: 'timestamp',\n count: this.maxDispatchNumber * 2,\n });\n this.queryResolveBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.maxDispatchNumber * 2 * 8, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE});\n }\n }\n }\n\n captureBegin(): void {\n LOG_DEBUG('info', 'captureBegin');\n if (!this.capturedCommandList.get(this.currentSessionId!)) {\n this.capturedCommandList.set(this.currentSessionId!, []);\n }\n if (!this.capturedPendingKernels.get(this.currentSessionId!)) {\n this.capturedPendingKernels.set(this.currentSessionId!, []);\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'capturing';\n }\n captureEnd(): void {\n LOG_DEBUG('info', 'captureEnd');\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n replay(): void {\n LOG_DEBUG('info', 'replay');\n this.sessionStatus = 'replaying';\n const sessionCommandList = this.capturedCommandList.get(this.currentSessionId!);\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n const length = sessionCommandList!.length;\n this.pendingKernels = [];\n for (let i = 0; i < length; i++) {\n const computePassEncoder = this.getComputePassEncoder();\n const command = sessionCommandList![i];\n this.writeTimestamp(this.pendingDispatchNumber * 2);\n computePassEncoder.setPipeline(command.computePipeline);\n computePassEncoder.setBindGroup(0, command.bindGroup);\n computePassEncoder.dispatchWorkgroups(...command.dispatchGroup);\n this.writeTimestamp(this.pendingDispatchNumber * 2 + 1);\n this.pendingDispatchNumber++;\n if (this.queryType !== 'none') {\n this.pendingKernels.push(sessionPendingKernels![i]);\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber || this.queryType === 'at-passes') {\n this.endComputePass();\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber) {\n this.flush();\n }\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n\n onReleaseSession(sessionId: number): void {\n this.unregisterBuffers(sessionId);\n if (this.capturedCommandList.has(sessionId)) {\n this.capturedCommandList.delete(sessionId);\n }\n if (this.capturedPendingKernels.has(sessionId)) {\n this.capturedPendingKernels.delete(sessionId);\n }\n this.gpuDataManager.onReleaseSession(sessionId);\n }\n\n onRunStart(sessionId: number): void {\n this.currentSessionId = sessionId;\n this.setQueryType();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from '../wasm-types';\nimport {DataType, getTensorElementSize} from '../wasm-common';\n\nimport {WebGpuBackend} from './backend-webgpu';\nimport {LOG_DEBUG} from './log';\nimport {TensorView} from './tensor-view';\nimport {ShapeUtil} from './util';\nimport {AdapterInfo, ComputeContext, ComputeContextInputsOutputsMapping, ProgramInfo} from './webgpu/types';\n\n/* eslint-disable no-bitwise */\n\nclass TensorViewImpl implements TensorView {\n constructor(\n private module: OrtWasmModule, public readonly dataType: number, public readonly data: number,\n public readonly dims: readonly number[]) {}\n\n getFloat32Array(): Float32Array {\n if (this.dataType !== DataType.float) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Float32Array() :\n new Float32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getBigInt64Array(): BigInt64Array {\n if (this.dataType !== DataType.int64) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new BigInt64Array() :\n new BigInt64Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getInt32Array(): Int32Array {\n if (this.dataType !== DataType.int32) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Int32Array() : new Int32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n reshape(newDims: readonly number[]): TensorView {\n if (ShapeUtil.size(newDims) !== ShapeUtil.size(this.dims)) {\n throw new Error('Invalid new shape');\n }\n return new TensorViewImpl(this.module, this.dataType, this.data, newDims);\n }\n}\n\nclass ComputeContextImpl implements ComputeContext {\n readonly adapterInfo: AdapterInfo;\n readonly opKernelContext: number;\n readonly inputs: readonly TensorView[];\n readonly outputCount: number;\n get kernelCustomData(): {[key: string]: unknown} {\n return this.backend.currentKernelCustomData;\n }\n get customDataBuffer(): Uint8Array {\n return this.module.HEAPU8.subarray(this.customDataOffset, this.customDataOffset + this.customDataSize);\n }\n private customDataOffset = 0;\n private customDataSize = 0;\n constructor(private module: OrtWasmModule, private backend: WebGpuBackend, contextDataOffset: number) {\n this.adapterInfo = backend.adapterInfo;\n const heapU32 = module.HEAPU32;\n\n // extract context data\n let dataIndex = (contextDataOffset >>> 2);\n this.opKernelContext = heapU32[dataIndex++];\n const inputCount = heapU32[dataIndex++];\n this.outputCount = heapU32[dataIndex++];\n this.customDataOffset = heapU32[dataIndex++];\n this.customDataSize = heapU32[dataIndex++];\n\n const inputs: TensorView[] = [];\n for (let i = 0; i < inputCount; i++) {\n const dataType = heapU32[dataIndex++];\n const data = heapU32[dataIndex++];\n const dim = heapU32[dataIndex++];\n const dims: number[] = [];\n for (let d = 0; d < dim; d++) {\n dims.push(heapU32[dataIndex++]);\n }\n inputs.push(new TensorViewImpl(module, dataType, data, dims));\n }\n this.inputs = inputs;\n }\n\n getMaxComputeWorkgroupSizes(): [number, number, number] {\n return [\n this.backend.device.limits.maxComputeWorkgroupSizeX, this.backend.device.limits.maxComputeWorkgroupSizeY,\n this.backend.device.limits.maxComputeWorkgroupSizeZ\n ];\n }\n\n getMaxComputeWorkgroupStoragesize(): number {\n return this.backend.device.limits.maxComputeWorkgroupStorageSize;\n }\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[] {\n // prepare inputs. inputs should always be valid data.\n const mappedInputs =\n inputsOutputsMapping?.inputs?.map(i => typeof i === 'number' ? this.inputs[i] : i) ?? this.inputs;\n // prepare outputs.\n const outputIndices = inputsOutputsMapping?.outputs ?? [];\n const createKernelOutput = (index: number, dataType: number, dims: readonly number[]): TensorView =>\n new TensorViewImpl(this.module, dataType, this.output(index, dims), dims);\n const createTemporaryOutput = (dataType: number, dims: readonly number[]): TensorView => {\n const elementSize = getTensorElementSize(dataType);\n if (!elementSize) {\n throw new Error(`Unsupported data type: ${dataType}`);\n }\n const bufferSize = elementSize * ShapeUtil.size(dims);\n const gpuDataId = bufferSize > 0 ? this.backend.gpuDataManager.create(bufferSize).id : 0;\n return new TensorViewImpl(this.module, dataType, gpuDataId, dims);\n };\n return this.backend.run(\n program, mappedInputs, outputIndices, createKernelOutput, createTemporaryOutput, this.outputCount);\n }\n\n output(index: number, dims: readonly number[]): number {\n const stack = this.module.stackSave();\n try {\n const data = this.module.stackAlloc((1 + dims.length) * 4 /* sizeof(size_t) */);\n let offset = data >> 2;\n this.module.HEAPU32[offset++] = dims.length;\n for (let i = 0; i < dims.length; i++) {\n this.module.HEAPU32[offset++] = dims[i];\n }\n return this.module._JsepOutput!(this.opKernelContext, index, data);\n } catch (e) {\n throw new Error(\n `Failed to generate kernel's output[${index}] with dims [${dims}]. ` +\n 'If you are running with pre-allocated output, please make sure the output type/dims are correct. ' +\n `Error: ${e}`);\n } finally {\n this.module.stackRestore(stack);\n }\n }\n}\n\n/**\n * Initialize JSEP with WebGPU backend.\n *\n * This function will be called after the WebAssembly module is loaded and initialized (\"_OrtInit\" is called), once for\n * each of the following EPs if they are specified:\n * - \"webgpu\"\n * - \"webnn\"\n *\n * For WebGPU, this function expects:\n * - WebGPU is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebGPU is available in current environment. (a valid GPUAdapter is passed in)\n *\n * For WebNN, this function expects:\n * - WebNN is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebNN is available in current environment. (navigator.ml is not undefined)\n *\n * If the WebAssembly module is not built with JSEP support, this function will throw an error. This will invalidate\n * 'webgpu'/'webnn' backend.\n *\n * @param name - the name of the EP, either \"webgpu\" or \"webnn\"\n * @param module - the ORT WebAssembly module\n * @param env - the ORT environment variable (ort.env)\n * @param gpuAdapter - the pre-created GPU adapter\n */\nexport const init =\n async(name: 'webgpu'|'webnn', module: OrtWasmModule, env: Env, gpuAdapter?: GPUAdapter): Promise => {\n const jsepInit = module.jsepInit;\n if (!jsepInit) {\n throw new Error('Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.');\n }\n\n if (name === 'webgpu') {\n const backend = new WebGpuBackend();\n await backend.initialize(env, gpuAdapter!);\n\n jsepInit('webgpu', [\n // backend\n backend,\n\n // jsepAlloc()\n (size: number) => backend.alloc(size),\n\n // jsepFree()\n (ptr: number) => backend.free(ptr),\n\n // jsepCopy(src, dst, size, isSourceGpu)\n (src: number, dst: number, size: number, isSourceGpu = false) => {\n if (isSourceGpu) {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyGpuToGpu: src=${src}, dst=${dst}, size=${size}`);\n backend.memcpy(src, dst);\n } else {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyCpuToGpu: dataOffset=${src}, gpuDataId=${dst}, size=${size}`);\n const data = module.HEAPU8.subarray(src >>> 0, (src >>> 0) + size);\n backend.upload(dst, data);\n }\n },\n\n // jsepCopyAsync(src, dst, size)\n async(gpuDataId: number, dataOffset: number, size: number):\n Promise => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepCopyGpuToCpu: gpuDataId=${gpuDataId}, dataOffset=${dataOffset}, size=${size}`);\n\n await backend.download(\n gpuDataId, () => module.HEAPU8.subarray(dataOffset >>> 0, (dataOffset >>> 0) + size));\n },\n\n // jsepCreateKernel\n (kernelType: string, kernelId: number, attribute: unknown) => backend.createKernel(\n kernelType, kernelId, attribute, module.UTF8ToString(module._JsepGetNodeName!(kernelId))),\n\n // jsepReleaseKernel\n (kernel: number) => backend.releaseKernel(kernel),\n\n // jsepRun\n (kernel: number, contextDataOffset: number, sessionHandle: number, errors: Array>) => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepRun: sessionHandle=${sessionHandle}, kernel=${kernel}, contextDataOffset=${\n contextDataOffset}`);\n const context = new ComputeContextImpl(module, backend, contextDataOffset);\n return backend.computeKernel(kernel, context, errors);\n },\n // jsepCaptureBegin\n () => backend.captureBegin(),\n // jsepCaptureEnd\n () => backend.captureEnd(),\n // jsepReplay\n () => backend.replay()\n ]);\n } else {\n jsepInit('webnn');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// WebNN API currently does not have a TypeScript definition file. This file is a workaround with types generated from\n// WebNN API specification.\n// https://github.com/webmachinelearning/webnn/issues/677\n/// \n\nimport {Env, InferenceSession, Tensor} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport {setRunOptions} from './run-options';\nimport {setSessionOptions} from './session-options';\nimport {dataLocationStringToEnum, getTensorElementSize, isGpuBufferSupportedType, logLevelStringToEnum, tensorDataTypeEnumToString, tensorDataTypeStringToEnum, tensorTypeToTypedArrayConstructor} from './wasm-common';\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError} from './wasm-utils';\nimport {loadFile} from './wasm-utils-load-file';\n\n// #region Initializations\n\n/**\n * There are 4 different \"initialization\" steps for ORT. They happen in different places and different time.\n *\n * 1. JavaScript initialization for onnxruntime-common and onnxruntime-web.\n * This is the first initialization step. In this step, onnxruntime-web calls onnxruntime-common's registerBackend()\n * function multiple times to register all the available backends. The backend registration is very fast. It only\n * registers the backend name with the uninitialized backend object. No heavy initialization is done in this step.\n * Refer to web/lib/index.ts for the backend registration.\n *\n * 2. WebAssembly artifact initialization.\n * This happens when any registered wasm backend is used for the first time (ie. `ort.InferenceSession.create()` or\n * `ort.TrainingSession.create()` is called). In this step, onnxruntime-web does the followings:\n * - create a proxy worker and make sure the proxy worker is ready to receive messages, if proxy is enabled.\n * - perform feature detection, locate correct WebAssembly artifact path and call the Emscripten generated\n * JavaScript code to initialize the WebAssembly runtime.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-wasm'.\n * - downloading the 'ort-wasm{...}.wasm' file is done in this step.\n * - if multi-thread is enabled, one or more webworker will be created to initialize the PThread threadpool.\n *\n * 3. ORT environment initialization.\n * This happens after step 2. In this step, onnxruntime-web performs ONNX Runtime environment initialization.\n * Function `_OrtInit()` is called in this step.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-ort'.\n * - logging level (ort.env.logLevel) and thread number (ort.env.wasm.numThreads) are set in this step.\n *\n * 4. Session initialization.\n * This happens when `ort.InferenceSession.create()` or `ort.TrainingSession.create()` is called. Unlike the first 3\n * steps (they only called once), this step will be done for each session. In this step, onnxruntime-web does the\n * followings:\n * If the parameter is a URL:\n * - download the model data from the URL.\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - dereference the model buffer. This step allows the original ArrayBuffer to be garbage collected.\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n * If the parameter is a Uint8Array object:\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n *\n */\n\n/**\n * initialize ORT environment.\n *\n * @param numThreads SetGlobalIntraOpNumThreads(numThreads)\n * @param loggingLevel CreateEnv(static_cast(logging_level))\n */\nconst initOrt = (numThreads: number, loggingLevel: number): void => {\n const errorCode = getInstance()._OrtInit(numThreads, loggingLevel);\n if (errorCode !== 0) {\n checkLastError('Can\\'t initialize onnxruntime.');\n }\n};\n\n/**\n * initialize runtime environment.\n * @param env passed in the environment config object.\n */\nexport const initRuntime = async(env: Env): Promise => {\n // init ORT\n initOrt(env.wasm.numThreads!, logLevelStringToEnum(env.logLevel));\n};\n\n/**\n * perform EP specific initialization.\n *\n * @param env\n * @param epName\n */\nexport const initEp = async(env: Env, epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_JSEP) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n const initJsep = require('./jsep/init').init;\n\n if (epName === 'webgpu') {\n // perform WebGPU availability check\n if (typeof navigator === 'undefined' || !navigator.gpu) {\n throw new Error('WebGPU is not supported in current environment');\n }\n\n let adapter = env.webgpu.adapter as GPUAdapter | null;\n if (!adapter) {\n // if adapter is not set, request a new adapter.\n const powerPreference = env.webgpu.powerPreference;\n if (powerPreference !== undefined && powerPreference !== 'low-power' &&\n powerPreference !== 'high-performance') {\n throw new Error(`Invalid powerPreference setting: \"${powerPreference}\"`);\n }\n const forceFallbackAdapter = env.webgpu.forceFallbackAdapter;\n if (forceFallbackAdapter !== undefined && typeof forceFallbackAdapter !== 'boolean') {\n throw new Error(`Invalid forceFallbackAdapter setting: \"${forceFallbackAdapter}\"`);\n }\n adapter = await navigator.gpu.requestAdapter({powerPreference, forceFallbackAdapter});\n if (!adapter) {\n throw new Error(\n 'Failed to get GPU adapter. ' +\n 'You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.');\n }\n } else {\n // if adapter is set, validate it.\n if (typeof adapter.limits !== 'object' || typeof adapter.features !== 'object' ||\n typeof adapter.requestDevice !== 'function') {\n throw new Error('Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.');\n }\n }\n\n await initJsep('webgpu', getInstance(), env, adapter);\n }\n if (epName === 'webnn') {\n // perform WebNN availability check\n if (typeof navigator === 'undefined' || !(navigator as unknown as {ml: unknown}).ml) {\n throw new Error('WebNN is not supported in current environment');\n }\n\n await initJsep('webnn', getInstance(), env);\n }\n }\n};\n\n// #endregion Initializations\n\n/**\n * valid data locations for input/output tensors.\n */\ntype SupportedTensorDataLocationForInputOutput = 'cpu'|'cpu-pinned'|'gpu-buffer';\n\ntype IOBindingState = {\n /**\n * the handle of IO binding.\n */\n readonly handle: number;\n\n /**\n * the preferred location for each output tensor.\n *\n * value is one of 'cpu', 'cpu-pinned', 'gpu-buffer'.\n */\n readonly outputPreferredLocations: readonly SupportedTensorDataLocationForInputOutput[];\n\n /**\n * enum value of the preferred location for each output tensor.\n */\n readonly outputPreferredLocationsEncoded: readonly number[];\n};\n\n/**\n * tuple elements are: InferenceSession ID; inputNamesUTF8Encoded; outputNamesUTF8Encoded; bindingState\n */\ntype SessionMetadata = [\n inferenceSessionId: number, inputNamesUTF8Encoded: number[], outputNamesUTF8Encoded: number[],\n bindingState: IOBindingState|null, enableGraphCapture: boolean, inputOutputBound: boolean\n];\n\nconst activeSessions = new Map();\n\n/**\n * get the input/output count of the session.\n * @param sessionHandle the handle representing the session. should be non-zero.\n * @returns a tuple including 2 numbers, representing the input count and output count.\n */\nconst getSessionInputOutputCount = (sessionHandle: number): [number, number] => {\n const wasm = getInstance();\n const stack = wasm.stackSave();\n try {\n const dataOffset = wasm.stackAlloc(8);\n const errorCode = wasm._OrtGetInputOutputCount(sessionHandle, dataOffset, dataOffset + 4);\n if (errorCode !== 0) {\n checkLastError('Can\\'t get session input/output count.');\n }\n return [wasm.HEAP32[dataOffset / 4], wasm.HEAP32[dataOffset / 4 + 1]];\n } finally {\n wasm.stackRestore(stack);\n }\n};\n\n/**\n * allocate the memory and memcpy the external buffer.\n *\n * @param model - the external buffer containing the model data. Must not be the same buffer as the WASM heap.\n * @returns a 2-elements tuple - the pointer and size of the allocated buffer\n */\nexport const copyFromExternalBuffer = (model: Uint8Array): [number, number] => {\n const wasm = getInstance();\n const modelDataOffset = wasm._malloc(model.byteLength);\n if (modelDataOffset === 0) {\n throw new Error(`Can't create a session. failed to allocate a buffer of size ${model.byteLength}.`);\n }\n wasm.HEAPU8.set(model, modelDataOffset);\n return [modelDataOffset, model.byteLength];\n};\n\n/**\n * create an inference session from a model data buffer.\n *\n * @param modelData - either a Uint8Array object representing the model data, or a 2-elements tuple containing the\n * pointer and size of the model data buffer.\n * @param options an optional session options object.\n * @returns a 3-elements tuple containing [session handle, input names, output names]\n */\nexport const createSession = async(\n modelData: Uint8Array|SerializableInternalBuffer,\n options?: InferenceSession.SessionOptions): Promise => {\n let modelDataOffset: number, modelDataLength: number;\n const wasm = getInstance();\n\n if (Array.isArray(modelData)) {\n // if model data is an array, it must be a 2-elements tuple containing the pointer and size of the model data\n [modelDataOffset, modelDataLength] = modelData;\n } else if (modelData.buffer === wasm.HEAPU8.buffer) {\n // if model data uses the same buffer as the WASM heap, we don't need to copy it.\n [modelDataOffset, modelDataLength] = [modelData.byteOffset, modelData.byteLength];\n } else {\n // otherwise, copy the model data to the WASM heap.\n [modelDataOffset, modelDataLength] = copyFromExternalBuffer(modelData);\n }\n\n let sessionHandle = 0;\n let sessionOptionsHandle = 0;\n let ioBindingHandle = 0;\n let allocs: number[] = [];\n const inputNamesUTF8Encoded = [];\n const outputNamesUTF8Encoded = [];\n\n try {\n [sessionOptionsHandle, allocs] = setSessionOptions(options);\n\n if (options?.externalData && wasm.mountExternalData) {\n const loadingPromises = [];\n for (const file of options.externalData) {\n const path = typeof file === 'string' ? file : file.path;\n loadingPromises.push(loadFile(typeof file === 'string' ? file : file.data).then(data => {\n wasm.mountExternalData!(path, data);\n }));\n }\n\n // wait for all external data files to be loaded\n await Promise.all(loadingPromises);\n }\n\n for (const provider of options?.executionProviders ?? []) {\n const providerName = typeof provider === 'string' ? provider : provider.name;\n if (providerName === 'webnn') {\n if (wasm.currentContext) {\n throw new Error('WebNN execution provider is already set.');\n }\n if (typeof provider !== 'string') {\n const webnnOptions = provider as InferenceSession.WebNNExecutionProviderOption;\n const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const gpuDevice = (webnnOptions as InferenceSession.WebNNOptionsWebGpu)?.gpuDevice;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n const numThreads = (webnnOptions as InferenceSession.WebNNContextOptions)?.numThreads;\n const powerPreference = (webnnOptions as InferenceSession.WebNNContextOptions)?.powerPreference;\n if (context) {\n wasm.currentContext = context as MLContext;\n } else if (gpuDevice) {\n wasm.currentContext = await navigator.ml.createContext(gpuDevice);\n } else {\n wasm.currentContext = await navigator.ml.createContext({deviceType, numThreads, powerPreference});\n }\n } else {\n wasm.currentContext = await navigator.ml.createContext();\n }\n break;\n }\n }\n\n sessionHandle = await wasm._OrtCreateSession(modelDataOffset, modelDataLength, sessionOptionsHandle);\n if (sessionHandle === 0) {\n checkLastError('Can\\'t create a session.');\n }\n\n // clear current MLContext after session creation\n if (wasm.currentContext) {\n wasm.currentContext = undefined;\n }\n\n const [inputCount, outputCount] = getSessionInputOutputCount(sessionHandle);\n\n const enableGraphCapture = !!options?.enableGraphCapture;\n\n const inputNames = [];\n const outputNames = [];\n const outputPreferredLocations: SupportedTensorDataLocationForInputOutput[] = [];\n for (let i = 0; i < inputCount; i++) {\n const name = wasm._OrtGetInputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an input name.');\n }\n inputNamesUTF8Encoded.push(name);\n inputNames.push(wasm.UTF8ToString(name));\n }\n for (let i = 0; i < outputCount; i++) {\n const name = wasm._OrtGetOutputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an output name.');\n }\n outputNamesUTF8Encoded.push(name);\n const nameString = wasm.UTF8ToString(name);\n outputNames.push(nameString);\n\n if (!BUILD_DEFS.DISABLE_JSEP) {\n if (enableGraphCapture && options?.preferredOutputLocation === undefined) {\n outputPreferredLocations.push('gpu-buffer');\n continue;\n }\n const location = typeof options?.preferredOutputLocation === 'string' ?\n options.preferredOutputLocation :\n options?.preferredOutputLocation?.[nameString] ?? 'cpu';\n if (location !== 'cpu' && location !== 'cpu-pinned' && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${location}.`);\n }\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${\n location}. Only 'gpu-buffer' location is supported when enableGraphCapture is true.`);\n }\n outputPreferredLocations.push(location);\n }\n }\n\n // use IO binding only when at least one output is preffered to be on GPU.\n let bindingState: IOBindingState|null = null;\n if (!BUILD_DEFS.DISABLE_JSEP && outputPreferredLocations.some(l => l === 'gpu-buffer')) {\n ioBindingHandle = wasm._OrtCreateBinding(sessionHandle);\n if (ioBindingHandle === 0) {\n checkLastError('Can\\'t create IO binding.');\n }\n\n bindingState = {\n handle: ioBindingHandle,\n outputPreferredLocations,\n outputPreferredLocationsEncoded: outputPreferredLocations.map(l => dataLocationStringToEnum(l)),\n };\n }\n\n activeSessions.set(\n sessionHandle,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, bindingState, enableGraphCapture, false]);\n return [sessionHandle, inputNames, outputNames];\n } catch (e) {\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n\n if (ioBindingHandle !== 0) {\n wasm._OrtReleaseBinding(ioBindingHandle);\n }\n\n if (sessionHandle !== 0) {\n wasm._OrtReleaseSession(sessionHandle);\n }\n throw e;\n } finally {\n wasm._free(modelDataOffset);\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n\n // unmount external data if necessary\n wasm.unmountExternalData?.();\n }\n};\n\nexport const releaseSession = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot release session. invalid session id: ${sessionId}`);\n }\n const [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture] = session;\n\n if (ioBindingState) {\n if (enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n }\n wasm._OrtReleaseBinding(ioBindingState.handle);\n }\n\n wasm.jsepOnReleaseSession?.(sessionId);\n\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n wasm._OrtReleaseSession(sessionHandle);\n activeSessions.delete(sessionId);\n};\n\nexport const prepareInputOutputTensor =\n (tensor: TensorMetadata|null, tensorHandles: number[], allocs: number[], sessionId: number, index: number,\n enableGraphCapture = false): void => {\n if (!tensor) {\n tensorHandles.push(0);\n return;\n }\n\n const wasm = getInstance();\n\n const dataType = tensor[0];\n const dims = tensor[1];\n const location = tensor[3];\n\n let rawData: number;\n let dataByteLength: number;\n\n if (dataType === 'string' && location === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(\n `External buffer must be provided for input/output index ${index} when enableGraphCapture is true.`);\n }\n\n if (location === 'gpu-buffer') {\n const gpuBuffer = tensor[2].gpuBuffer as GPUBuffer;\n const elementSizeInBytes = getTensorElementSize(tensorDataTypeStringToEnum(dataType))!;\n dataByteLength = dims.reduce((a, b) => a * b, 1) * elementSizeInBytes;\n\n const registerBuffer = wasm.jsepRegisterBuffer;\n if (!registerBuffer) {\n throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');\n }\n rawData = registerBuffer(sessionId, index, gpuBuffer, dataByteLength);\n } else {\n const data = tensor[2];\n\n if (Array.isArray(data)) {\n // string tensor\n dataByteLength = 4 * data.length;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n let dataIndex = rawData / 4;\n for (let i = 0; i < data.length; i++) {\n if (typeof data[i] !== 'string') {\n throw new TypeError(`tensor data at index ${i} is not a string`);\n }\n wasm.HEAPU32[dataIndex++] = allocWasmString(data[i], allocs);\n }\n } else {\n dataByteLength = data.byteLength;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n wasm.HEAPU8.set(new Uint8Array(data.buffer, data.byteOffset, dataByteLength), rawData);\n }\n }\n\n const stack = wasm.stackSave();\n const dimsOffset = wasm.stackAlloc(4 * dims.length);\n try {\n let dimIndex = dimsOffset / 4;\n dims.forEach(d => wasm.HEAP32[dimIndex++] = d);\n const tensor = wasm._OrtCreateTensor(\n tensorDataTypeStringToEnum(dataType), rawData, dataByteLength, dimsOffset, dims.length,\n dataLocationStringToEnum(location));\n if (tensor === 0) {\n checkLastError(`Can't create tensor for input/output. session=${sessionId}, index=${index}.`);\n }\n tensorHandles.push(tensor);\n } finally {\n wasm.stackRestore(stack);\n }\n };\n\n/**\n * perform inference run\n */\nexport const run = async(\n sessionId: number, inputIndices: number[], inputTensors: TensorMetadata[], outputIndices: number[],\n outputTensors: Array, options: InferenceSession.RunOptions): Promise => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot run inference. invalid session id: ${sessionId}`);\n }\n const sessionHandle = session[0];\n const inputNamesUTF8Encoded = session[1];\n const outputNamesUTF8Encoded = session[2];\n const ioBindingState = session[3];\n const enableGraphCapture = session[4];\n const inputOutputBound = session[5];\n\n const inputCount = inputIndices.length;\n const outputCount = outputIndices.length;\n\n let runOptionsHandle = 0;\n let runOptionsAllocs: number[] = [];\n\n const inputTensorHandles: number[] = [];\n const outputTensorHandles: number[] = [];\n const inputOutputAllocs: number[] = [];\n\n const beforeRunStack = wasm.stackSave();\n const inputValuesOffset = wasm.stackAlloc(inputCount * 4);\n const inputNamesOffset = wasm.stackAlloc(inputCount * 4);\n const outputValuesOffset = wasm.stackAlloc(outputCount * 4);\n const outputNamesOffset = wasm.stackAlloc(outputCount * 4);\n\n try {\n [runOptionsHandle, runOptionsAllocs] = setRunOptions(options);\n\n // create input tensors\n for (let i = 0; i < inputCount; i++) {\n prepareInputOutputTensor(\n inputTensors[i], inputTensorHandles, inputOutputAllocs, sessionId, inputIndices[i], enableGraphCapture);\n }\n\n // create output tensors\n for (let i = 0; i < outputCount; i++) {\n prepareInputOutputTensor(\n outputTensors[i], outputTensorHandles, inputOutputAllocs, sessionId, inputCount + outputIndices[i],\n enableGraphCapture);\n }\n\n let inputValuesIndex = inputValuesOffset / 4;\n let inputNamesIndex = inputNamesOffset / 4;\n let outputValuesIndex = outputValuesOffset / 4;\n let outputNamesIndex = outputNamesOffset / 4;\n for (let i = 0; i < inputCount; i++) {\n wasm.HEAPU32[inputValuesIndex++] = inputTensorHandles[i];\n wasm.HEAPU32[inputNamesIndex++] = inputNamesUTF8Encoded[inputIndices[i]];\n }\n for (let i = 0; i < outputCount; i++) {\n wasm.HEAPU32[outputValuesIndex++] = outputTensorHandles[i];\n wasm.HEAPU32[outputNamesIndex++] = outputNamesUTF8Encoded[outputIndices[i]];\n }\n\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState && !inputOutputBound) {\n const {handle, outputPreferredLocations, outputPreferredLocationsEncoded} = ioBindingState;\n\n if (inputNamesUTF8Encoded.length !== inputCount) {\n throw new Error(`input count from feeds (${\n inputCount}) is expected to be always equal to model's input count (${inputNamesUTF8Encoded.length}).`);\n }\n\n // process inputs\n for (let i = 0; i < inputCount; i++) {\n const index = inputIndices[i];\n const errorCode = await wasm._OrtBindInput(handle, inputNamesUTF8Encoded[index], inputTensorHandles[i]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind input[${i}] for session=${sessionId}.`);\n }\n }\n\n // process pre-allocated outputs\n for (let i = 0; i < outputCount; i++) {\n const index = outputIndices[i];\n const location = outputTensors[i]?.[3]; // undefined means output is not pre-allocated.\n\n if (location) {\n // output is pre-allocated. bind the tensor.\n const errorCode = wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], outputTensorHandles[i], 0);\n if (errorCode !== 0) {\n checkLastError(`Can't bind pre-allocated output[${i}] for session=${sessionId}.`);\n }\n } else {\n // output is not pre-allocated. reset preferred location.\n const errorCode =\n wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], 0, outputPreferredLocationsEncoded[index]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind output[${i}] to ${outputPreferredLocations[i]} for session=${sessionId}.`);\n }\n }\n }\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, true]);\n }\n\n wasm.jsepOnRunStart?.(sessionHandle);\n let errorCode: number;\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState) {\n errorCode = await wasm._OrtRunWithBinding(\n sessionHandle, ioBindingState.handle, outputCount, outputValuesOffset, runOptionsHandle);\n } else {\n errorCode = await wasm._OrtRun(\n sessionHandle, inputNamesOffset, inputValuesOffset, inputCount, outputNamesOffset, outputCount,\n outputValuesOffset, runOptionsHandle);\n }\n\n if (errorCode !== 0) {\n checkLastError('failed to call OrtRun().');\n }\n\n const output: TensorMetadata[] = [];\n\n for (let i = 0; i < outputCount; i++) {\n const tensor = wasm.HEAPU32[outputValuesOffset / 4 + i];\n if (tensor === outputTensorHandles[i]) {\n // output tensor is pre-allocated. no need to copy data.\n output.push(outputTensors[i]!);\n continue;\n }\n\n const beforeGetTensorDataStack = wasm.stackSave();\n // stack allocate 4 pointer value\n const tensorDataOffset = wasm.stackAlloc(4 * 4);\n\n let keepOutputTensor = false;\n let type: Tensor.Type|undefined, dataOffset = 0;\n try {\n const errorCode = wasm._OrtGetTensorData(\n tensor, tensorDataOffset, tensorDataOffset + 4, tensorDataOffset + 8, tensorDataOffset + 12);\n if (errorCode !== 0) {\n checkLastError(`Can't access output tensor data on index ${i}.`);\n }\n let tensorDataIndex = tensorDataOffset / 4;\n const dataType = wasm.HEAPU32[tensorDataIndex++];\n dataOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsLength = wasm.HEAPU32[tensorDataIndex++];\n const dims = [];\n for (let i = 0; i < dimsLength; i++) {\n dims.push(wasm.HEAPU32[dimsOffset / 4 + i]);\n }\n wasm._OrtFree(dimsOffset);\n\n const size = dims.reduce((a, b) => a * b, 1);\n type = tensorDataTypeEnumToString(dataType);\n\n const preferredLocation = ioBindingState?.outputPreferredLocations[outputIndices[i]];\n\n if (type === 'string') {\n if (preferredLocation === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n const stringData: string[] = [];\n let dataIndex = dataOffset / 4;\n for (let i = 0; i < size; i++) {\n const offset = wasm.HEAPU32[dataIndex++];\n const maxBytesToRead = i === size - 1 ? undefined : wasm.HEAPU32[dataIndex] - offset;\n stringData.push(wasm.UTF8ToString(offset, maxBytesToRead));\n }\n output.push([type, dims, stringData, 'cpu']);\n } else {\n // If a certain output's preferred location is GPU but the tensor is empty, we still need to create a CPU\n // tensor for it. There is no mapping GPU buffer for an empty tensor.\n if (preferredLocation === 'gpu-buffer' && size > 0) {\n const getBuffer = wasm.jsepGetBuffer;\n if (!getBuffer) {\n throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');\n }\n const gpuBuffer = getBuffer(dataOffset);\n const elementSize = getTensorElementSize(dataType);\n if (elementSize === undefined || !isGpuBufferSupportedType(type)) {\n throw new Error(`Unsupported data type: ${type}`);\n }\n\n // do not release the tensor right now. it will be released when user calls tensor.dispose().\n keepOutputTensor = true;\n\n output.push([\n type, dims, {\n gpuBuffer,\n download: wasm.jsepCreateDownloader!(gpuBuffer, size * elementSize, type),\n dispose: () => {\n wasm._OrtReleaseTensor(tensor);\n }\n },\n 'gpu-buffer'\n ]);\n } else {\n const typedArrayConstructor = tensorTypeToTypedArrayConstructor(type);\n const data = new typedArrayConstructor(size);\n new Uint8Array(data.buffer, data.byteOffset, data.byteLength)\n .set(wasm.HEAPU8.subarray(dataOffset, dataOffset + data.byteLength));\n output.push([type, dims, data, 'cpu']);\n }\n }\n } finally {\n wasm.stackRestore(beforeGetTensorDataStack);\n if (type === 'string' && dataOffset) {\n wasm._free(dataOffset);\n }\n if (!keepOutputTensor) {\n wasm._OrtReleaseTensor(tensor);\n }\n }\n }\n\n if (ioBindingState && !enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, false]);\n }\n return output;\n } finally {\n wasm.stackRestore(beforeRunStack);\n\n inputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n outputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n inputOutputAllocs.forEach(p => wasm._free(p));\n\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n runOptionsAllocs.forEach(p => wasm._free(p));\n }\n};\n\n/**\n * end profiling\n */\nexport const endProfiling = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error('invalid session id');\n }\n const sessionHandle = session[0];\n\n // profile file name is not used yet, but it must be freed.\n const profileFileName = wasm._OrtEndProfiling(sessionHandle);\n if (profileFileName === 0) {\n checkLastError('Can\\'t get an profile file name.');\n }\n wasm._OrtFree(profileFileName);\n};\n\nexport const extractTransferableBuffers = (tensors: readonly SerializableTensorMetadata[]): ArrayBufferLike[] => {\n const buffers: ArrayBufferLike[] = [];\n for (const tensor of tensors) {\n const data = tensor[2];\n if (!Array.isArray(data) && 'buffer' in data) {\n buffers.push(data.buffer);\n }\n }\n return buffers;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env, InferenceSession} from 'onnxruntime-common';\n\nimport {OrtWasmMessage, SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport * as core from './wasm-core-impl';\nimport {initializeWebAssembly} from './wasm-factory';\nimport {importProxyWorker} from './wasm-utils-import';\n\nconst isProxy = (): boolean => !!env.wasm.proxy && typeof document !== 'undefined';\nlet proxyWorker: Worker|undefined;\nlet initializing = false;\nlet initialized = false;\nlet aborted = false;\nlet temporaryObjectUrl: string|undefined;\n\ntype PromiseCallbacks = [resolve: (result: T) => void, reject: (reason: unknown) => void];\nlet initWasmCallbacks: PromiseCallbacks;\nconst queuedCallbacks: Map>> = new Map();\n\nconst enqueueCallbacks = (type: OrtWasmMessage['type'], callbacks: PromiseCallbacks): void => {\n const queue = queuedCallbacks.get(type);\n if (queue) {\n queue.push(callbacks);\n } else {\n queuedCallbacks.set(type, [callbacks]);\n }\n};\n\nconst ensureWorker = (): void => {\n if (initializing || !initialized || aborted || !proxyWorker) {\n throw new Error('worker not ready');\n }\n};\n\nconst onProxyWorkerMessage = (ev: MessageEvent): void => {\n switch (ev.data.type) {\n case 'init-wasm':\n initializing = false;\n if (ev.data.err) {\n aborted = true;\n initWasmCallbacks[1](ev.data.err);\n } else {\n initialized = true;\n initWasmCallbacks[0]();\n }\n if (temporaryObjectUrl) {\n URL.revokeObjectURL(temporaryObjectUrl);\n temporaryObjectUrl = undefined;\n }\n break;\n case 'init-ep':\n case 'copy-from':\n case 'create':\n case 'release':\n case 'run':\n case 'end-profiling': {\n const callbacks = queuedCallbacks.get(ev.data.type)!;\n if (ev.data.err) {\n callbacks.shift()![1](ev.data.err);\n } else {\n callbacks.shift()![0](ev.data.out!);\n }\n break;\n }\n default:\n }\n};\n\n\nexport const initializeWebAssemblyAndOrtRuntime = async(): Promise => {\n if (initialized) {\n return;\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initWasm()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initWasm()\\' failed.');\n }\n\n initializing = true;\n\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n return new Promise((resolve, reject) => {\n proxyWorker?.terminate();\n\n void importProxyWorker().then(([objectUrl, worker]) => {\n try {\n proxyWorker = worker;\n proxyWorker.onerror = (ev: ErrorEvent) => reject(ev);\n proxyWorker.onmessage = onProxyWorkerMessage;\n initWasmCallbacks = [resolve, reject];\n const message: OrtWasmMessage = {type: 'init-wasm', in : env};\n proxyWorker.postMessage(message);\n temporaryObjectUrl = objectUrl;\n } catch (e) {\n reject(e);\n }\n }, reject);\n });\n\n } else {\n try {\n await initializeWebAssembly(env.wasm);\n await core.initRuntime(env);\n initialized = true;\n } catch (e) {\n aborted = true;\n throw e;\n } finally {\n initializing = false;\n }\n }\n};\n\nexport const initializeOrtEp = async(epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('init-ep', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'init-ep', in : {epName, env}};\n proxyWorker!.postMessage(message);\n });\n } else {\n await core.initEp(env, epName);\n }\n};\n\nexport const copyFromExternalBuffer = async(buffer: Uint8Array): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('copy-from', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'copy-from', in : {buffer}};\n proxyWorker!.postMessage(message, [buffer.buffer]);\n });\n } else {\n return core.copyFromExternalBuffer(buffer);\n }\n};\n\nexport const createSession =\n async(model: SerializableInternalBuffer|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check unsupported options\n if (options?.preferredOutputLocation) {\n throw new Error('session option \"preferredOutputLocation\" is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('create', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'create', in : {model, options: {...options}}};\n const transferable: Transferable[] = [];\n if (model instanceof Uint8Array) {\n transferable.push(model.buffer);\n }\n proxyWorker!.postMessage(message, transferable);\n });\n } else {\n return core.createSession(model, options);\n }\n };\n\nexport const releaseSession = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('release', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'release', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.releaseSession(sessionId);\n }\n};\n\nexport const run = async(\n sessionId: number, inputIndices: number[], inputs: TensorMetadata[], outputIndices: number[],\n outputs: Array, options: InferenceSession.RunOptions): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check inputs location\n if (inputs.some(t => t[3] !== 'cpu')) {\n throw new Error('input tensor on GPU is not supported for proxy.');\n }\n // check outputs location\n if (outputs.some(t => t)) {\n throw new Error('pre-allocated output tensor is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('run', [resolve, reject]);\n const serializableInputs = inputs as SerializableTensorMetadata[]; // every input is on CPU.\n const message: OrtWasmMessage =\n {type: 'run', in : {sessionId, inputIndices, inputs: serializableInputs, outputIndices, options}};\n proxyWorker!.postMessage(message, core.extractTransferableBuffers(serializableInputs));\n });\n } else {\n return core.run(sessionId, inputIndices, inputs, outputIndices, outputs, options);\n }\n};\n\nexport const endProfiling = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('end-profiling', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'end-profiling', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.endProfiling(sessionId);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, TensorMetadata} from './proxy-messages';\nimport {copyFromExternalBuffer, createSession, endProfiling, releaseSession, run} from './proxy-wrapper';\nimport {isGpuBufferSupportedType} from './wasm-common';\nimport {isNode} from './wasm-utils-env';\nimport {loadFile} from './wasm-utils-load-file';\n\nexport const encodeTensorMetadata = (tensor: Tensor, getName: () => string): TensorMetadata => {\n switch (tensor.location) {\n case 'cpu':\n return [tensor.type, tensor.dims, tensor.data, 'cpu'];\n case 'gpu-buffer':\n return [tensor.type, tensor.dims, {gpuBuffer: tensor.gpuBuffer}, 'gpu-buffer'];\n default:\n throw new Error(`invalid data location: ${tensor.location} for ${getName()}`);\n }\n};\n\nexport const decodeTensorMetadata = (tensor: TensorMetadata): Tensor => {\n switch (tensor[3]) {\n case 'cpu':\n return new Tensor(tensor[0], tensor[2], tensor[1]);\n case 'gpu-buffer': {\n const dataType = tensor[0];\n if (!isGpuBufferSupportedType(dataType)) {\n throw new Error(`not supported data type: ${dataType} for deserializing GPU tensor`);\n }\n const {gpuBuffer, download, dispose} = tensor[2];\n return Tensor.fromGpuBuffer(gpuBuffer, {dataType, dims: tensor[1], download, dispose});\n }\n default:\n throw new Error(`invalid data location: ${tensor[3]}`);\n }\n};\n\nexport class OnnxruntimeWebAssemblySessionHandler implements InferenceSessionHandler {\n private sessionId: number;\n\n inputNames: string[];\n outputNames: string[];\n\n async fetchModelAndCopyToWasmMemory(path: string): Promise {\n // fetch model from url and move to wasm heap.\n return copyFromExternalBuffer(await loadFile(path));\n }\n\n async loadModel(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n let model: Parameters[0];\n\n if (typeof pathOrBuffer === 'string') {\n if (isNode) {\n // node\n model = await loadFile(pathOrBuffer);\n } else {\n // browser\n // fetch model and copy to wasm heap.\n model = await this.fetchModelAndCopyToWasmMemory(pathOrBuffer);\n }\n } else {\n model = pathOrBuffer;\n }\n\n [this.sessionId, this.inputNames, this.outputNames] = await createSession(model, options);\n TRACE_FUNC_END();\n }\n\n async dispose(): Promise {\n return releaseSession(this.sessionId);\n }\n\n async run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType, options: InferenceSession.RunOptions):\n Promise {\n TRACE_FUNC_BEGIN();\n const inputArray: Tensor[] = [];\n const inputIndices: number[] = [];\n Object.entries(feeds).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.inputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid input '${name}'`);\n }\n inputArray.push(tensor);\n inputIndices.push(index);\n });\n\n const outputArray: Array = [];\n const outputIndices: number[] = [];\n Object.entries(fetches).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.outputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid output '${name}'`);\n }\n outputArray.push(tensor);\n outputIndices.push(index);\n });\n\n const inputs =\n inputArray.map((t, i) => encodeTensorMetadata(t, () => `input \"${this.inputNames[inputIndices[i]]}\"`));\n const outputs = outputArray.map(\n (t, i) => t ? encodeTensorMetadata(t, () => `output \"${this.outputNames[outputIndices[i]]}\"`) : null);\n\n const results = await run(this.sessionId, inputIndices, inputs, outputIndices, outputs, options);\n\n const resultMap: SessionHandler.ReturnType = {};\n for (let i = 0; i < results.length; i++) {\n resultMap[this.outputNames[outputIndices[i]]] = outputArray[i] ?? decodeTensorMetadata(results[i]);\n }\n TRACE_FUNC_END();\n return resultMap;\n }\n\n startProfiling(): void {\n // TODO: implement profiling\n }\n\n endProfiling(): void {\n void endProfiling(this.sessionId);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend, env, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {initializeOrtEp, initializeWebAssemblyAndOrtRuntime} from './wasm/proxy-wrapper';\nimport {OnnxruntimeWebAssemblySessionHandler} from './wasm/session-handler-inference';\nimport {scriptSrc} from './wasm/wasm-utils-import';\n\n/**\n * This function initializes all flags for WebAssembly.\n *\n * Those flags are accessible from `ort.env.wasm`. Users are allow to set those flags before the first inference session\n * being created, to override default value.\n */\nexport const initializeFlags = (): void => {\n if (typeof env.wasm.initTimeout !== 'number' || env.wasm.initTimeout < 0) {\n env.wasm.initTimeout = 0;\n }\n\n if (env.wasm.simd === false) {\n // eslint-disable-next-line no-console\n console.warn(\n 'Deprecated property \"env.wasm.simd\" is set to false. ' +\n 'non-SIMD build is no longer provided, and this setting will be ignored.');\n }\n\n if (typeof env.wasm.proxy !== 'boolean') {\n env.wasm.proxy = false;\n }\n\n if (typeof env.wasm.trace !== 'boolean') {\n env.wasm.trace = false;\n }\n\n if (typeof env.wasm.numThreads !== 'number' || !Number.isInteger(env.wasm.numThreads) || env.wasm.numThreads <= 0) {\n // The following logic only applies when `ort.env.wasm.numThreads` is not set by user. We will always honor user's\n // setting if it is provided.\n\n // Browser: when crossOriginIsolated is false, SharedArrayBuffer is not available so WebAssembly threads will not\n // work. In this case, we will set numThreads to 1.\n //\n // There is an exception: when the browser is configured to force-enable SharedArrayBuffer (e.g. Chromuim with\n // --enable-features=SharedArrayBuffer), it is possible that `self.crossOriginIsolated` is false and\n // SharedArrayBuffer is available at the same time. This is usually for testing. In this case, we will still set\n // numThreads to 1 here. If we want to enable multi-threading in test, we should set `ort.env.wasm.numThreads` to a\n // value greater than 1.\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n env.wasm.numThreads = 1;\n } else {\n const numCpuLogicalCores =\n typeof navigator === 'undefined' ? require('node:os').cpus().length : navigator.hardwareConcurrency;\n env.wasm.numThreads = Math.min(4, Math.ceil((numCpuLogicalCores || 1) / 2));\n }\n }\n\n if (!BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n // overwrite wasm paths override if not set\n if (env.wasm.wasmPaths === undefined && scriptSrc && scriptSrc.indexOf('blob:') !== 0) {\n env.wasm.wasmPaths = scriptSrc.substring(0, scriptSrc.lastIndexOf('/') + 1);\n }\n }\n};\n\nexport class OnnxruntimeWebAssemblyBackend implements Backend {\n /**\n * This function initializes the WebAssembly backend.\n *\n * This function will be called only once for each backend name. It will be called the first time when\n * `ort.InferenceSession.create()` is called with a registered backend name.\n *\n * @param backendName - the registered backend name.\n */\n async init(backendName: string): Promise {\n // populate wasm flags\n initializeFlags();\n\n // init wasm\n await initializeWebAssemblyAndOrtRuntime();\n\n // performe EP specific initialization\n await initializeOrtEp(backendName);\n }\n createInferenceSessionHandler(path: string, options?: InferenceSession.SessionOptions):\n Promise;\n createInferenceSessionHandler(buffer: Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n const handler = new OnnxruntimeWebAssemblySessionHandler();\n await handler.loadModel(pathOrBuffer, options);\n return Promise.resolve(handler);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OnnxruntimeWebAssemblyBackend} from './backend-wasm';\nexport const wasmBackend = new OnnxruntimeWebAssemblyBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports */\n\n// We use \"require\" instead of \"import\" here because import statement must be put in top level. Our current code does\n// not allow bundler to tree-shaking code as expected because some codes are treated as having side effects.\n// So we import code inside the if-clause to allow bundler remove the code safely.\n\nexport * from 'onnxruntime-common';\nimport * as ort from 'onnxruntime-common';\nexport default ort;\n\nimport {registerBackend, env} from 'onnxruntime-common';\nimport {version} from './version';\n\nif (!BUILD_DEFS.DISABLE_WEBGL) {\n const onnxjsBackend = require('./backend-onnxjs').onnxjsBackend;\n registerBackend('webgl', onnxjsBackend, -10);\n}\n\nif (!BUILD_DEFS.DISABLE_WASM) {\n const wasmBackend = BUILD_DEFS.DISABLE_TRAINING ? require('./backend-wasm-inference').wasmBackend :\n require('./backend-wasm-training').wasmBackend;\n if (!BUILD_DEFS.DISABLE_JSEP) {\n registerBackend('webgpu', wasmBackend, 5);\n registerBackend('webnn', wasmBackend, 5);\n }\n registerBackend('cpu', wasmBackend, 10);\n registerBackend('wasm', wasmBackend, 10);\n}\n\nObject.defineProperty(env.versions, 'web', {value: version, enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n"], - "mappings": ";;;;;05BAAA,IAgBMA,GACAC,GAYOC,GAwCPC,GAwCOC,GA7GbC,GAAAC,EAAA,kBAgBMN,GAAqC,IAAI,IACzCC,GAAqC,CAAA,EAY9BC,GAAkB,CAACK,EAAcC,EAAkBC,IAA0B,CACxF,GAAID,GAAW,OAAOA,EAAQ,MAAS,YAAc,OAAOA,EAAQ,+BAAkC,WAAY,CAChH,IAAME,EAAiBV,GAAS,IAAIO,CAAI,EACxC,GAAIG,IAAmB,OACrBV,GAAS,IAAIO,EAAM,CAAC,QAAAC,EAAS,SAAAC,CAAQ,CAAC,MACjC,IAAIC,EAAe,SAAWD,EAEnC,OACK,GAAIC,EAAe,WAAaD,GACjCC,EAAe,UAAYF,EAC7B,MAAM,IAAI,MAAM,4BAA4BD,CAAI,oBAAoBE,CAAQ,EAAE,EAIlF,GAAIA,GAAY,EAAG,CACjB,IAAME,EAAIV,GAAyB,QAAQM,CAAI,EAC3CI,IAAM,IACRV,GAAyB,OAAOU,EAAG,CAAC,EAGtC,QAAS,EAAI,EAAG,EAAIV,GAAyB,OAAQ,IACnD,GAAID,GAAS,IAAIC,GAAyB,CAAC,CAAC,EAAG,UAAYQ,EAAU,CACnER,GAAyB,OAAO,EAAG,EAAGM,CAAI,EAC1C,OAGJN,GAAyB,KAAKM,CAAI,EAEpC,OAGF,MAAM,IAAI,UAAU,qBAAqB,CAC3C,EAQMJ,GAAiC,MAAMS,GAAgD,CAC3F,IAAMC,EAAcb,GAAS,IAAIY,CAAW,EAC5C,GAAI,CAACC,EACH,MAAO,qBAGT,GAAIA,EAAY,YACd,OAAOA,EAAY,QACd,GAAIA,EAAY,QACrB,OAAOA,EAAY,MACd,CACL,IAAMC,EAAiB,CAAC,CAACD,EAAY,YACrC,GAAI,CACF,OAAKC,IACHD,EAAY,YAAcA,EAAY,QAAQ,KAAKD,CAAW,GAEhE,MAAMC,EAAY,YAClBA,EAAY,YAAc,GACnBA,EAAY,cACZE,EAAG,CACV,OAAKD,IACHD,EAAY,MAAQ,GAAGE,CAAC,GACxBF,EAAY,QAAU,IAEjBA,EAAY,cAEnB,OAAOA,EAAY,aAGzB,EAWaT,GAAsC,MAAMY,GACmB,CAEtE,IAAMC,EAAMD,EAAQ,oBAAsB,CAAA,EACpCE,EAAeD,EAAI,IAAIN,GAAK,OAAOA,GAAM,SAAWA,EAAIA,EAAE,IAAI,EAC9DQ,EAAeD,EAAa,SAAW,EAAIjB,GAA2BiB,EAGxEV,EACEY,EAAS,CAAA,EACTC,EAAwB,IAAI,IAClC,QAAWT,KAAeO,EAAc,CACtC,IAAMG,EAAgB,MAAMnB,GAA+BS,CAAW,EAClE,OAAOU,GAAkB,SAC3BF,EAAO,KAAK,CAAC,KAAMR,EAAa,IAAKU,CAAa,CAAC,GAE9Cd,IACHA,EAAUc,GAERd,IAAYc,GACdD,EAAsB,IAAIT,CAAW,GAM3C,GAAI,CAACJ,EACH,MAAM,IAAI,MAAM,oCAAoCY,EAAO,IAAIL,GAAK,IAAIA,EAAE,IAAI,KAAKA,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,EAI1G,OAAW,CAAC,KAAAR,EAAM,IAAAgB,CAAG,IAAKH,EACpBF,EAAa,SAASX,CAAI,GAE5B,QAAQ,KAAK,0CACTA,CAAI,uDAAuDgB,CAAG,EAAE,EAIxE,IAAMC,EAAcP,EAAI,OAAON,GAAKU,EAAsB,IAAI,OAAOV,GAAM,SAAWA,EAAIA,EAAE,IAAI,CAAC,EAEjG,MAAO,CACLH,EAAS,IAAI,MAAMQ,EAAS,CAC1B,IAAK,CAACS,EAAQC,IACRA,IAAS,qBACJF,EAEF,QAAQ,IAAIC,EAAQC,CAAI,EAElC,EAEL,IChKJ,IAAAC,GAAAC,EAAA,kBAoFAC,OCpFA,IAMaC,GANbC,GAAAC,EAAA,kBAMaF,GAAU,WCNvB,IAQIG,GAESC,GAVbC,GAAAC,EAAA,kBAIAC,KAIIJ,GAAwC,UAE/BC,GAAW,CACtB,KAAM,CAAA,EACN,MAAO,CAAA,EACP,OAAQ,CAAA,EACR,SAAU,CAAC,OAAQI,EAAO,EAE1B,IAAI,SAASC,EAAmB,CAC9B,GAAIA,IAAU,OAGd,IAAI,OAAOA,GAAU,UAAY,CAAC,UAAW,OAAQ,UAAW,QAAS,OAAO,EAAE,QAAQA,CAAK,IAAM,GACnG,MAAM,IAAI,MAAM,8BAA8BA,CAAK,EAAE,EAEvDN,GAAgBM,EAClB,EACA,IAAI,UAAQ,CACV,OAAON,EACT,GAIF,OAAO,eAAeC,GAAK,WAAY,CAAC,WAAY,EAAI,CAAC,IC/BzD,IAmRaM,GAnRbC,GAAAC,EAAA,kBAGAC,KAgRaH,GAAWA,KCnRxB,IASaI,GA+FAC,GAxGbC,GAAAC,EAAA,kBASaH,GAAkB,CAACI,EAAgBC,IAA4C,CAC1F,IAAMC,EAAS,OAAO,SAAa,IAAc,SAAS,cAAc,QAAQ,EAAK,IAAI,gBAAgB,EAAG,CAAC,EAC7GA,EAAO,MAAQF,EAAO,KAAK,CAAC,EAC5BE,EAAO,OAASF,EAAO,KAAK,CAAC,EAC7B,IAAMG,EACFD,EAAO,WAAW,IAAI,EAE1B,GAAIC,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAJ,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,IAEtBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,GAGxB,IAAMM,EAAcL,GAAS,SAAW,OAAYA,EAAQ,OAAS,MAE/DM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EAEpBO,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5B,QAASK,EAAI,EAAGA,EAAIV,EAAQU,IAC1B,QAASC,EAAI,EAAGA,EAAIZ,EAAOY,IAAK,CAC9B,IAAMC,GAAMjB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EU,GAAMlB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EW,GAAMnB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EY,EAAIN,IAAmB,GACzB,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,EAE1EL,EAAgB,UAAY,QAAUc,EAAI,IAAMC,EAAI,IAAMC,EAAI,IAAMC,EAAI,IACxEjB,EAAgB,SAASa,EAAGD,EAAG,EAAG,CAAC,EAGvC,GAAI,cAAeb,EACjB,OAAOA,EAAO,UAAS,EAEvB,MAAM,IAAI,MAAM,4BAA4B,MAG9C,OAAM,IAAI,MAAM,2BAA2B,CAE/C,EAKaL,GAAoB,CAACG,EAAgBC,IAAiD,CACjG,IAAME,EAAkB,OAAO,SAAa,IACxC,SAAS,cAAc,QAAQ,EAAE,WAAW,IAAI,EAChD,IAAI,gBAAgB,EAAG,CAAC,EAAE,WAAW,IAAI,EACzCkB,EACJ,GAAIlB,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAiB,EACArB,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,IAExBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,GAE1B,IAAMM,EAAcL,IAAY,QAAaA,EAAQ,SAAW,OAAYA,EAAQ,OAAkB,MAEhGM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,GAAG,EACrDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EACxB,GAAIH,IAAY,SACVA,EAAQ,SAAW,QAAcqB,IAAa,GAAKrB,EAAQ,SAAW,QACrEqB,IAAa,GAAMrB,EAAQ,SAAW,OAASA,EAAQ,SAAW,OACrE,MAAM,IAAI,MAAM,+CAAgD,EAKpE,IAAMsB,EAAO,EACTC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACzEhB,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5BW,EAAQlB,EAAgB,gBAAgBC,EAAOC,CAAM,EAErD,QAASU,EAAI,EAAGA,EAAIV,EAASD,EACxBoB,GAAiBD,EAAME,GAAiBF,EAAMG,GAAiBH,EAAMI,GAAiBJ,EAAMR,IAC/FM,EAAM,KAAKG,CAAa,GAAMxB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKI,CAAa,GAAMzB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKK,CAAa,GAAM1B,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKM,CAAa,EAAIb,IAAmB,GAC3C,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,MAI5E,OAAM,IAAI,MAAM,2BAA2B,EAE7C,OAAOa,CACT,ICtMA,IAiBaO,GAkFAC,GAgKAC,GAWAC,GASAC,GAvRbC,GAAAC,EAAA,kBAIAC,KAaaP,GAAiB,CAACQ,EAAqCC,IAA0C,CAC5G,GAAID,IAAW,OACb,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAIC,EAAQ,SAAW,QAAaA,EAAQ,QAAU,OACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAM,CAAC,OAAAC,EAAQ,MAAAC,CAAK,EAAIF,EAElBG,EAAOH,EAAQ,MAAQ,CAAC,KAAM,IAAK,KAAM,CAAC,EAC5CI,EACAC,EAEA,OAAQF,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDC,EAAW,CAACD,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,GAAG,EAG3E,OAAQA,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDE,EAAW,CAACF,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,CAAC,EAG7E,IAAMG,EAAcN,EAAQ,SAAW,OAAYA,EAAQ,OAAS,OAG9DO,EACFP,EAAQ,eAAiB,QAAaA,EAAQ,eAAiB,OAAYA,EAAQ,aAAwB,MACzGQ,EAASP,EAASC,EAClBO,EAAcF,IAAiB,OAAS,IAAI,aAAaC,EAAS,CAAC,EAAI,IAAI,aAAaA,EAAS,CAAC,EAGpGE,EAAO,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACnFC,EAAiB,EAAGC,EAAiBR,EAAQS,EAAiBT,EAAS,EAAGU,EAAiB,GAG3FZ,IAAgB,QAClBI,EAAO,EACPC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,IAIdP,IAAiB,OACnBW,EAAiBV,EAAS,EACjBD,IAAiB,OAC1BQ,EAAiB,EACjBE,EAAiBT,EACjBQ,EAAiBR,EAAS,GACjBD,IAAiB,QAC1BU,EAAiB,EACjBD,EAAiBR,EACjBO,EAAiBP,EAAS,GAG5B,QAASW,EAAI,EAAGA,EAAIX,EACfW,IAAKR,GAAiBD,EAAMG,GAAiBH,EAAME,GAAiBF,EAAMI,GAAiBJ,EAC9FD,EAAYM,GAAgB,GAAKhB,EAAOY,CAAa,EAAIN,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYO,GAAgB,GAAKjB,EAAOa,CAAa,EAAIP,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYQ,GAAgB,GAAKlB,EAAOc,CAAa,EAAIR,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC9Ec,IAAmB,IAAMJ,IAAkB,KAC7CL,EAAYS,GAAgB,GAAKnB,EAAOe,CAAa,EAAIT,EAAS,CAAC,GAAKD,EAAS,CAAC,GAOtF,OAFqBG,IAAiB,OAAS,IAAIa,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,EACxD,IAAIkB,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,CAEzG,EAKaV,GAAkB,MAC3B6B,EACArB,IACyC,CAE3C,IAAMsB,EAAiB,OAAQ,iBAAsB,KAAeD,aAAiB,iBAC/EE,EAAiB,OAAQ,UAAe,KAAeF,aAAiB,UACxEG,EAAgB,OAAQ,YAAiB,KAAeH,aAAiB,YACzEI,EAAW,OAAOJ,GAAU,SAE9BK,EACAC,EAA+C3B,GAAW,CAAA,EAExD4B,EAAe,IAAK,CACxB,GAAI,OAAO,SAAa,IACtB,OAAO,SAAS,cAAc,QAAQ,EACjC,GAAI,OAAO,gBAAoB,IACpC,OAAO,IAAI,gBAAgB,EAAG,CAAC,EAE/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,EACMC,EAAuBC,GACvBA,aAAkB,mBAEXA,aAAkB,gBADpBA,EAAO,WAAW,IAAI,EAItB,KAIX,GAAIR,EAAgB,CAElB,IAAMQ,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAI9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MAMlB,GALIrB,IAAY,QAAaA,EAAQ,gBAAkB,QAAaA,EAAQ,eAAiB,SAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,cAGdA,IAAY,OAAW,CAEzB,GADA2B,EAAwB3B,EACpBA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,6DAA6D,EAE7E2B,EAAsB,aAAe,OAEvCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,OAE9ByB,EAAsB,aAAe,OACrCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAGhC6B,EAAgB,UAAUV,EAAO,EAAG,CAAC,EACrCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,UAEpCsB,EAAgB,CACzB,IAAItB,EACAC,EAiBJ,GAfIF,IAAY,QAAaA,EAAQ,eAAiB,QAAaA,EAAQ,gBAAkB,QAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,eAEhBC,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,OAGZrB,IAAY,SACd2B,EAAwB3B,GAE1B2B,EAAsB,OAAS,OAC/BA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAE1BF,IAAY,OAAW,CACzB,IAAMgC,EAAaJ,EAAY,EAE/BI,EAAW,MAAQ9B,EACnB8B,EAAW,OAAS/B,EAEpB,IAAM8B,EAAkBF,EAAoBG,CAAU,EAEtD,GAAID,GAAmB,KACrBA,EAAgB,aAAaV,EAAO,EAAG,CAAC,EACxCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,OAG7CyB,EAAOL,EAAM,aAENG,EAAe,CAExB,GAAIxB,IAAY,OACd,MAAM,IAAI,MAAM,yDAAyD,EAG3E,IAAM8B,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAM9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MACpB,OAAAU,EAAgB,UAAUV,EAAO,EAAG,EAAGnB,EAAOD,CAAM,EACpDyB,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,KACzD0B,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EACvBX,GAAemC,EAAMC,CAAqB,MAEjD,OAAM,IAAI,MAAM,2BAA2B,MAExC,IAAIF,EACT,OAAO,IAAI,QAAQ,CAACQ,EAASC,IAAU,CACrC,IAAMJ,EAASF,EAAY,EACrBO,EAAUN,EAAoBC,CAAM,EAC1C,GAAI,CAACT,GAAS,CAACc,EACb,OAAOD,EAAM,EAEf,IAAME,EAAW,IAAI,MACrBA,EAAS,YAAc,YACvBA,EAAS,IAAMf,EACfe,EAAS,OAAS,IAAK,CACrBN,EAAO,MAAQM,EAAS,MACxBN,EAAO,OAASM,EAAS,OACzBD,EAAQ,UAAUC,EAAU,EAAG,EAAGN,EAAO,MAAOA,EAAO,MAAM,EAC7D,IAAMO,EAAMF,EAAQ,aAAa,EAAG,EAAGL,EAAO,MAAOA,EAAO,MAAM,EAElEH,EAAsB,OAASG,EAAO,OACtCH,EAAsB,MAAQG,EAAO,MACrCG,EAAQ1C,GAAe8C,EAAI,KAAMV,CAAqB,CAAC,CACzD,CACF,CAAC,EAED,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAID,IAAS,OACX,OAAOnC,GAAemC,EAAMC,CAAqB,EAEjD,MAAM,IAAI,MAAM,gEAAgE,CAEpF,EAKalC,GAAoB,CAC7B6C,EAAsCtC,IAAgD,CACxF,GAAM,CAAC,MAAAE,EAAO,OAAAD,EAAQ,SAAAsC,EAAU,QAAAC,CAAO,EAAIxC,EAErCyC,EAAO,CAAC,EAAGxC,EAAQC,EAAO,CAAC,EACjC,OAAO,IAAIkB,GAAO,CAAC,SAAU,UAAW,KAAM,UAAW,QAAAkB,EAAS,KAAAG,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC5F,EAKa9C,GAAsB,CAC/BgD,EAA0C1C,IAAkD,CAC9F,GAAM,CAAC,SAAA2C,EAAU,KAAAF,EAAM,SAAAF,EAAU,QAAAC,CAAO,EAAIxC,EAC5C,OAAO,IAAIoB,GAAO,CAAC,SAAU,aAAc,KAAMuB,GAAY,UAAW,UAAAD,EAAW,KAAAD,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC7G,EAKa7C,GAAyB,CAClCiD,EAAS7C,EAAwC0C,IACjD,IAAIrB,GAAO,CAAC,SAAU,aAAc,KAAAwB,EAAM,KAAM7C,EAAQ,KAAM0C,GAAQ,CAAC1C,EAAO,MAAM,CAAC,CAAC,ICzR1F,IAWa8C,GAaAC,GAoBTC,GACSC,GA7CbC,GAAAC,EAAA,kBAWaL,GAAwC,IAAI,IAA6C,CACpG,CAAC,UAAW,YAAY,EACxB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,SAAS,EAClB,CAAC,SAAU,WAAW,EACtB,CAAC,QAAS,UAAU,EACpB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,UAAU,EACnB,CAAC,UAAW,YAAY,EACxB,CAAC,SAAU,WAAW,EACvB,EAGYC,GAAwC,IAAI,IAAkD,CACzG,CAAC,aAAc,SAAS,EACxB,CAAC,WAAY,OAAO,EACpB,CAAC,UAAW,MAAM,EAClB,CAAC,YAAa,QAAQ,EACtB,CAAC,WAAY,OAAO,EACpB,CAAC,WAAY,OAAO,EACpB,CAAC,aAAc,SAAS,EACxB,CAAC,YAAa,QAAQ,EACvB,EAWGC,GAAsB,GACbC,GAAkB,IAAK,CAClC,GAAI,CAACD,GAAqB,CACxBA,GAAsB,GACtB,IAAMI,EAA2B,OAAO,cAAkB,KAAe,cAAc,KACjFC,EAA4B,OAAO,eAAmB,KAAe,eAAe,KACpFC,EAA0B,OAAO,aAAiB,KAAe,aAAa,KAEhFF,IACFN,GAAsC,IAAI,QAAS,aAAa,EAChEC,GAAsC,IAAI,cAAe,OAAO,GAE9DM,IACFP,GAAsC,IAAI,SAAU,cAAc,EAClEC,GAAsC,IAAI,eAAgB,QAAQ,GAEhEO,GACFR,GAAsC,IAAI,UAAW,YAAY,EACjEC,GAAsC,IAAI,aAAc,SAAS,GAGjED,GAAsC,IAAI,UAAW,WAAW,EAGtE,ICpEA,IAWaS,GAkBAC,GA7BbC,GAAAC,EAAA,kBAIAC,KAOaJ,GAAiBK,GAAoC,CAChE,IAAIC,EAAO,EACX,QAASC,EAAI,EAAGA,EAAIF,EAAK,OAAQE,IAAK,CACpC,IAAMC,EAAMH,EAAKE,CAAC,EAClB,GAAI,OAAOC,GAAQ,UAAY,CAAC,OAAO,cAAcA,CAAG,EACtD,MAAM,IAAI,UAAU,QAAQD,CAAC,8BAA8BC,CAAG,EAAE,EAElE,GAAIA,EAAM,EACR,MAAM,IAAI,WAAW,QAAQD,CAAC,0CAA0CC,CAAG,EAAE,EAE/EF,GAAQE,EAEV,OAAOF,CACT,EAKaL,GAAgB,CAACQ,EAAgBJ,IAAmC,CAC/E,OAAQI,EAAO,SAAU,CACvB,IAAK,MACH,OAAO,IAAIC,GAAOD,EAAO,KAAMA,EAAO,KAAMJ,CAAI,EAClD,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,KAAMD,EAAO,KACb,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,UACH,OAAO,IAAIK,GAAO,CAChB,SAAU,UACV,QAASD,EAAO,QAChB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,UAAWD,EAAO,UAClB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,QACE,MAAM,IAAI,MAAM,kCAAkCI,EAAO,QAAQ,mBAAmB,EAE1F,ICzDA,IAwBaE,GAxBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAgBaN,GAAP,KAAa,CAyCjB,YACIO,EAEAC,EAA8EC,EAAwB,CAExGC,GAAe,EAEf,IAAIC,EACAC,EAEJ,GAAI,OAAOL,GAAS,UAAY,aAAcA,EAO5C,OAHA,KAAK,aAAeA,EAAK,SACzBI,EAAOJ,EAAK,KACZK,EAAOL,EAAK,KACJA,EAAK,SAAU,CACrB,IAAK,aAAc,CACjB,IAAMM,EAAgCC,GAAsC,IAAIH,CAAI,EACpF,GAAI,CAACE,EACH,MAAM,IAAI,UAAU,qBAAqBF,CAAI,uCAAuC,EAEtF,GAAI,EAAEJ,EAAK,gBAAgBM,GACzB,MAAM,IAAI,UAAU,4BAA4BA,EAA8B,IAAI,EAAE,EAEtF,KAAK,QAAUN,EAAK,KACpB,MAEF,IAAK,UAAW,CACd,GAAII,IAAS,UACX,MAAM,IAAI,UAAU,qBAAqBA,CAAI,iCAAiC,EAEhF,KAAK,eAAiBJ,EAAK,QAC3B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,IAAK,aAAc,CACjB,GAAKI,IAAS,WAAaA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAC7FA,IAAS,SAAWA,IAAS,OAChC,MAAM,IAAI,UAAU,qBAAqBA,CAAI,oCAAoC,EAEnF,KAAK,cAAgBJ,EAAK,UAC1B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,QACE,MAAM,IAAI,MAAM,6CAA6C,KAAK,YAAY,GAAG,MAEhF,CAIL,IAAIQ,EACAC,EAEJ,GAAI,OAAOT,GAAS,SAMlB,GAFAI,EAAOJ,EACPS,EAAYP,EACRF,IAAS,SAAU,CAErB,GAAI,CAAC,MAAM,QAAQC,CAAI,EACrB,MAAM,IAAI,UAAU,gDAAiD,EAIvEO,EAAOP,MACF,CAEL,IAAMS,EAAwBH,GAAsC,IAAIP,CAAI,EAC5E,GAAIU,IAA0B,OAC5B,MAAM,IAAI,UAAU,4BAA4BV,CAAI,GAAG,EAEzD,GAAI,MAAM,QAAQC,CAAI,EAAG,CACvB,GAAID,IAAS,WAAaU,IAA0B,YAMlD,MAAM,IAAI,UACN,+FAA+F,EAC1FV,IAAS,UAAYA,IAAS,QAYvCQ,EAAQE,EAA8B,KAAKT,EAAM,MAAM,EAIvDO,EAAQE,EAA8B,KAAKT,CAAI,UAExCA,aAAgBS,EACzBF,EAAOP,MAEP,OAAM,IAAI,UAAU,KAAKG,CAAI,kCAAkCM,CAAqB,EAAE,UAO1FD,EAAYR,EACR,MAAM,QAAQD,CAAI,EAAG,CAEvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qDAAqD,EAE3E,IAAMW,EAAmB,OAAOX,EAAK,CAAC,EACtC,GAAIW,IAAqB,SACvBP,EAAO,SACPI,EAAOR,UACEW,IAAqB,UAC9BP,EAAO,OAIPI,EAAO,WAAW,KAAKR,CAAa,MAEpC,OAAM,IAAI,UAAU,uCAAuCW,CAAgB,GAAG,MAE3E,CAEL,IAAMC,EACFC,GAAsC,IAAIb,EAAK,WAA8C,EACjG,GAAIY,IAAe,OACjB,MAAM,IAAI,UAAU,qCAAqCZ,EAAK,WAAW,GAAG,EAE9EI,EAAOQ,EACPJ,EAAOR,EAKX,GAAIS,IAAc,OAEhBA,EAAY,CAACD,EAAK,MAAM,UACf,CAAC,MAAM,QAAQC,CAAS,EACjC,MAAM,IAAI,UAAU,wCAAyC,EAE/DJ,EAAOI,EAEP,KAAK,QAAUD,EACf,KAAK,aAAe,MAItB,IAAMM,EAAOC,GAAcV,CAAI,EAE/B,GAAI,KAAK,SAAWS,IAAS,KAAK,QAAQ,OACxC,MAAM,IAAI,MAAM,iBAAiBA,CAAI,gCAAgC,KAAK,QAAQ,MAAM,IAAI,EAG9F,KAAK,KAAOV,EACZ,KAAK,KAAOC,EACZ,KAAK,KAAOS,CACd,CAIA,aAAa,UACTE,EACAC,EACoB,CACtB,OAAOC,GAAgBF,EAAOC,CAAO,CACvC,CAEA,OAAO,YACHE,EAA4BF,EAAoC,CAClE,OAAOG,GAAkBD,EAASF,CAAO,CAC3C,CAEA,OAAO,cACHI,EAAgCJ,EAAsC,CACxE,OAAOK,GAAoBD,EAAWJ,CAAO,CAC/C,CAEA,OAAO,iBACHb,EAASmB,EAAwClB,EAAwB,CAC3E,OAAOmB,GAAuBpB,EAAMmB,EAAQlB,CAAI,CAClD,CAKA,UAAUY,EAAgC,CACxC,OAAOQ,GAAgB,KAAMR,CAAO,CACtC,CAEA,YAAYA,EAAkC,CAC5C,OAAOS,GAAkB,KAAMT,CAAO,CACxC,CAgDA,IAAI,MAAI,CAEN,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,QACR,MAAM,IAAI,MACN,gJAC2E,EAEjF,OAAO,KAAK,OACd,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,YACd,CAEA,IAAI,SAAO,CAET,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,eACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,cACd,CAEA,IAAI,WAAS,CAEX,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,cACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,aACd,CAKA,MAAM,QAAQU,EAAqB,CAEjC,OADA,KAAK,YAAW,EACR,KAAK,aAAc,CACzB,IAAK,MACL,IAAK,aACH,OAAO,KAAK,KACd,IAAK,UACL,IAAK,aAAc,CACjB,GAAI,CAAC,KAAK,WACR,MAAM,IAAI,MAAM,qEAAqE,EAEvF,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAE3D,GAAI,CACF,KAAK,cAAgB,GACrB,IAAMnB,EAAO,MAAM,KAAK,WAAU,EAClC,YAAK,WAAa,OAClB,KAAK,aAAe,MACpB,KAAK,QAAUA,EAEXmB,GAAe,KAAK,WACtB,KAAK,SAAQ,EACb,KAAK,SAAW,QAGXnB,UAGP,KAAK,cAAgB,IAGzB,QACE,MAAM,IAAI,MAAM,kCAAkC,KAAK,YAAY,EAAE,EAE3E,CAEA,SAAO,CACL,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAGvD,KAAK,WACP,KAAK,SAAQ,EACb,KAAK,SAAW,QAElB,KAAK,QAAU,OACf,KAAK,eAAiB,OACtB,KAAK,cAAgB,OACrB,KAAK,WAAa,OAClB,KAAK,cAAgB,OAErB,KAAK,aAAe,MACtB,CAKQ,aAAW,CACjB,GAAI,KAAK,eAAiB,OACxB,MAAM,IAAI,MAAM,yBAAyB,CAE7C,CAEA,QAAQH,EAAuB,CAE7B,GADA,KAAK,YAAW,EACZ,KAAK,YAAc,KAAK,SAC1B,MAAM,IAAI,MAAM,iDAAiD,EAEnE,OAAOuB,GAAc,KAAMvB,CAAI,CACjC,KCpaF,IAwUawB,GAxUbC,GAAAC,EAAA,kBAIAC,KAoUaH,GAASA,KCxUtB,IAQaI,GAQPC,GAqBOC,GAUAC,GA/CbC,GAAAC,EAAA,kBAGAC,KAKaN,GAAQ,CAACO,EAAoBC,IAAiB,EACrD,OAAOC,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAI9D,QAAQ,UAAU,GAAGF,CAAU,UAAUC,CAAK,EAAE,CAClD,EAEMP,GAAa,CAACS,EAAaC,IAAqB,CACpD,IAAMC,EAAQ,IAAI,MAAK,EAAG,OAAO,MAAM,aAAa,GAAK,CAAA,EACrDC,EAAe,GACnB,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,GAAID,GAAgB,CAACD,EAAME,CAAC,EAAE,SAAS,YAAY,EAAG,CACpD,IAAIN,EAAQ,QAAQE,CAAG,KAAKE,EAAME,CAAC,EAAE,KAAI,EAAG,MAAM,GAAG,EAAE,CAAC,CAAC,GACrDH,IACFH,GAAS,KAAKG,CAAQ,IAExBX,GAAM,MAAOQ,CAAK,EAClB,OAEEI,EAAME,CAAC,EAAE,SAAS,YAAY,IAChCD,EAAe,IAGrB,EAKaX,GAAoBS,GAAqB,EAChD,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,QAASU,CAAQ,CAC9B,EAKaR,GAAkBQ,GAAqB,EAC9C,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,MAAOU,CAAQ,CAC5B,ICpDA,IAgBaI,GAhBbC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAQaL,GAAP,MAAOM,CAAgB,CAC3B,YAAoBC,EAAgC,CAClD,KAAK,QAAUA,CACjB,CAGA,MAAM,IAAIC,EAAkBC,EAA+BC,EAAiB,CAC1EC,GAAgB,EAChB,IAAMC,EAA4C,CAAA,EAC9CC,EAAsB,CAAA,EAE1B,GAAI,OAAOL,GAAU,UAAYA,IAAU,MAAQA,aAAiBM,IAAU,MAAM,QAAQN,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIO,EAAiB,GAErB,GAAI,OAAON,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBK,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQL,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DM,EAAiB,GAEjB,QAAWC,KAAQP,EAAM,CACvB,GAAI,OAAOO,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAI,KAAK,YAAY,QAAQA,CAAI,IAAM,GACrC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEJ,EAAQI,CAAI,EAAI,KAGlB,GAAI,OAAON,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIO,EAAY,GACVC,EAAW,OAAO,oBAAoBT,CAAI,EAChD,QAAWO,KAAQ,KAAK,YACtB,GAAIE,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKV,EAA4DO,CAAI,GACvEG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBH,EAAQI,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOP,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDG,EAAUJ,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWO,KAAQ,KAAK,WACtB,GAAI,OAAOR,EAAMQ,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQ,KAAK,YACtBJ,EAAQI,CAAI,EAAI,KAMpB,IAAMI,EAAU,MAAM,KAAK,QAAQ,IAAIZ,EAAOI,EAASC,CAAO,EACxDQ,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAAC,GAAc,EACPH,CACT,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,CAOA,aAAa,OACTI,EAAyChB,EAA8BC,EACvEgB,EAAqB,CACvBf,GAAgB,EAEhB,IAAIgB,EACAd,EAA0B,CAAA,EAE9B,GAAI,OAAOY,GAAS,UAElB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7CgB,aAAgB,YAEzB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAGpDgB,aAAgB,aACf,OAAO,kBAAsB,KAAeA,aAAgB,kBAAoB,CACnF,IAAMG,EAASH,EACXI,EAAa,EACbC,EAAaL,EAAK,WACtB,GAAI,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,GAAS,SAAU,CAEnC,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,EAAa,GAAKA,GAAcD,EAAO,WACzC,MAAM,IAAI,WAAW,oCAAoCA,EAAO,UAAU,IAAI,EAGhF,GADAE,EAAaL,EAAK,WAAaI,EAC3B,OAAOnB,GAAS,SAAU,CAE5B,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,GAAc,GAAKD,EAAaC,EAAaF,EAAO,WACtD,MAAM,IAAI,WAAW,oCAAoCA,EAAO,WAAaC,CAAU,IAAI,EAE7F,GAAI,OAAOH,GAAS,UAAYA,IAAS,KACvCb,EAAUa,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7C,OAAOhB,EAAS,IACzB,MAAM,IAAI,UAAU,gCAAkC,UAE/C,OAAOD,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,EAEtDkB,EAAuB,IAAI,WAAWC,EAAQC,EAAYC,CAAU,MAEpE,OAAM,IAAI,UAAU,qDAAyD,EAI/E,GAAM,CAACC,EAASC,CAAuB,EAAI,MAAMC,GAAoCpB,CAAO,EACtFN,EAAU,MAAMwB,EAAQ,8BAA8BJ,EAAsBK,CAAuB,EACzG,OAAAR,GAAc,EACP,IAAIlB,EAAiBC,CAAO,CACrC,CAEA,gBAAc,CACZ,KAAK,QAAQ,eAAc,CAC7B,CACA,cAAY,CACV,KAAK,QAAQ,aAAY,CAC3B,CAEA,IAAI,YAAU,CACZ,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,aAAW,CACb,OAAO,KAAK,QAAQ,WACtB,KCxNF,IA8hBa2B,GA9hBbC,GAAAC,EAAA,kBAGAC,KA2hBaH,GAA4CA,KC9hBzD,IAAAI,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAgBMC,GAGOC,GAnBbC,GAAAC,EAAA,kBAGAC,KAIAC,KASML,GAA0B,gHAGnBC,GAAP,MAAOK,CAAe,CAC1B,YAAoBC,EAAiCC,EAA4BC,EAAqB,CACpG,KAAK,QAAUF,EACf,KAAK,kBAAoBC,EACzB,KAAK,aAAeC,CACtB,CAKA,IAAI,oBAAkB,CACpB,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,qBAAmB,CACrB,OAAO,KAAK,QAAQ,WACtB,CAEA,IAAI,gBAAc,CAChB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,eAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CACA,IAAI,iBAAe,CACjB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,gBAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CAEA,aAAa,OAAOC,EAA+CC,EAA+B,CAEhG,IAAMC,EAA+BF,EAAgB,WAAa,GAC5DG,EAAoCH,EAAgB,gBAAkB,GACtEI,EAA0BH,GAAkB,CAAA,EAG5C,CAACI,EAASC,CAAuB,EAAI,MAAMC,GAAoCH,CAAO,EAC5F,GAAIC,EAAQ,6BAA8B,CACxC,IAAMR,EAAU,MAAMQ,EAAQ,6BAC1BL,EAAgB,gBAAiBA,EAAgB,WAAYE,EAAWC,EACxEG,CAAuB,EAC3B,OAAO,IAAIV,EAAgBC,EAAS,CAAC,CAACG,EAAgB,eAAgB,CAAC,CAACA,EAAgB,SAAS,MAEjG,OAAM,IAAI,MAAMV,EAAe,CAEnC,CAeA,wBACIkB,EAA+BC,EAAgCC,EAAkBC,EACjFC,EAAiB,CACnB,IAAMC,EAA4C,CAAA,EAC9CT,EAAsB,CAAA,EAE1B,GAAI,OAAOM,GAAU,UAAYA,IAAU,MAAQA,aAAiBI,IAAU,MAAM,QAAQJ,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIK,EAAiB,GAErB,GAAI,OAAOJ,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBG,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQH,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DI,EAAiB,GAEjB,QAAWC,KAAQL,EAAM,CACvB,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAIP,EAAY,QAAQO,CAAI,IAAM,GAChC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEH,EAAQG,CAAI,EAAI,KAGlB,GAAI,OAAOJ,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIK,EAAY,GACVC,EAAW,OAAO,oBAAoBP,CAAI,EAChD,QAAWK,KAAQP,EACjB,GAAIS,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKR,EAAmDK,CAAI,GAC9DG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBF,EAAQG,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOL,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDR,EAAUO,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWK,KAAQR,EACjB,GAAI,OAAOE,EAAMM,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQP,EACjBI,EAAQG,CAAI,EAAI,KAIpB,MAAO,CAACH,EAAST,CAAO,CAC1B,CASA,uCAAuCgB,EAAkC,CACvE,IAAMC,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAOF,CACT,CAEA,MAAM,eAAa,CACjB,MAAM,KAAK,QAAQ,cAAa,CAClC,CAIA,MAAM,aAAaX,EAAkBC,EAA+BC,EAAiB,CACnF,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,mBAAoB,KAAK,oBAAqBM,EAAOC,EAAMC,CAAI,EAC/FQ,EAAU,MAAM,KAAK,QAAQ,aAAaV,EAAOG,EAAST,CAAO,EACvE,OAAO,KAAK,uCAAuCgB,CAAO,CAC5D,CAEA,MAAM,iBAAiBhB,EAA+C,CACpE,GAAI,KAAK,kBACP,MAAM,KAAK,QAAQ,iBAAiBA,GAAW,CAAA,CAAE,MAEjD,OAAM,IAAI,MAAM,oDAAoD,CAExE,CAIA,MAAM,YAAYM,EAAkBC,EAA+BC,EAAiB,CAClF,GAAI,KAAK,aAAc,CACrB,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,eAAgB,KAAK,gBAAiBM,EAAOC,EAAMC,CAAI,EACvFQ,EAAU,MAAM,KAAK,QAAQ,YAAYV,EAAOG,EAAST,CAAO,EACtE,OAAO,KAAK,uCAAuCgB,CAAO,MAE1D,OAAM,IAAI,MAAM,+CAA+C,CAEnE,CAEA,MAAM,kBAAkBI,EAAgB,GAAI,CAC1C,OAAO,KAAK,QAAQ,kBAAkBA,CAAa,CACrD,CAEA,MAAM,qBAAqBC,EAAmBD,EAAgB,GAAI,CAChE,IAAME,EAAa,MAAM,KAAK,kBAAkBF,CAAa,EAG7D,GAAIC,EAAM,SAAW,EAAIC,EACvB,MAAM,IAAI,MACN,qJAC0D,EAEhE,OAAO,KAAK,QAAQ,qBAAqBD,EAAOD,CAAa,CAC/D,CAEA,MAAM,wBAAwBA,EAAgB,GAAI,CAChD,OAAO,KAAK,QAAQ,wBAAwBA,CAAa,CAC3D,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,KCzPF,IAmMaG,GAnMbC,GAAAC,EAAA,kBAKAC,KA8LaH,GAA0CA,KCnMvD,IAAAI,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,UAAAC,GAAA,qBAAAC,GAAA,mBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,QAAAC,GAAA,oBAAAC,KAAA,IAAAC,GAAAC,EAAA,kBAmBAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,OCmHA,SAASC,GACLC,EAA8BC,EAAeC,EAAsBC,EAA8C,CACnH,GAAIF,IAAS,OAEX,OAAOG,GAAwBJ,CAAI,EAC9B,GAAIE,IAAS,OAElBG,GAAYL,EAAyBC,EAAM,CAAC,UACnC,OAAOC,GAAS,UAAYC,IAAS,OAE9CE,GAAYL,EAAyBC,EAAMC,CAAI,UACtC,OAAOA,GAAS,UAAYC,IAAS,OAE9CE,GAAYL,EAAyBE,EAAM,EAAGD,CAAI,UACzC,OAAOC,GAAS,UAAY,OAAOC,GAAS,SAErDE,GAAYL,EAAyBE,EAAMC,EAAMF,CAAI,MAErD,OAAM,IAAI,UAAU,gBAAgB,CAExC,CAEA,SAASG,GAAwBE,EAA4C,CAC3E,MAAO,CACL,QAASP,GAAI,QAAQ,KAAK,KAAMO,CAAQ,EACxC,KAAMP,GAAI,KAAK,KAAK,KAAMO,CAAQ,EAClC,QAASP,GAAI,QAAQ,KAAK,KAAMO,CAAQ,EACxC,MAAOP,GAAI,MAAM,KAAK,KAAMO,CAAQ,EACpC,MAAOP,GAAI,MAAM,KAAK,KAAMO,CAAQ,CACtC,CACF,CAKA,SAASD,GAAYE,EAA2BC,EAAiBC,EAAgBH,EAAmB,CAClG,IAAMI,EAASC,GAAkBL,GAAY,EAAE,GAAKK,GAAkB,EAAE,EACpEC,GAAeL,CAAQ,EAAIK,GAAeF,EAAO,eAAe,IAIhEA,EAAO,cACTF,EAAU,GAAG,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIA,CAAO,IAG9CE,EAAO,kBAIXG,GAAoBH,EAAO,QAAQ,EAAE,IAAIH,EAAUC,EAASF,CAAQ,EACtE,CAjMA,IAyFMQ,GAKAC,GAwBAH,GAQAC,GAIAG,GAMFL,GAsHSM,GAkBPC,GAmBAC,GAKOC,GAsJAC,GA9bbC,GAAAC,EAAA,kBAyFMT,GAAN,KAAmD,CACjD,IAAIU,EAA4BC,EAAkBC,EAAoB,CAEtE,CACF,EACMX,GAAN,KAAsD,CACpD,IAAIR,EAA2BC,EAAiBF,EAAmB,CAEjE,QAAQ,IAAI,GAAG,KAAK,MAAMC,CAAQ,CAAC,IAAID,EAAW,WAAaA,EAAW,WAAa,EAAE,GAAGE,CAAO,EAAE,CACvG,CAEQ,MAAMD,EAA2B,CACvC,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,sBACT,IAAK,OACH,MAAO,mBACT,IAAK,UACH,MAAO,sBACT,IAAK,QACH,MAAO,sBACT,IAAK,QACH,MAAO,oBACT,QACE,MAAM,IAAI,MAAM,yBAAyBA,CAAQ,EAAE,CACvD,CACF,CACF,EAEMK,GAAiB,CACrB,QAAS,IACT,KAAM,IACN,QAAS,IACT,MAAO,IACP,MAAO,GACT,EAEMC,GAA+E,CAClF,KAAS,IAAIC,GACb,QAAY,IAAIC,EACnB,EACMC,GAAwB,CAC5B,SAAU,UACV,gBAAiB,UACjB,YAAa,GACb,kBAAmB,EACrB,EACIL,GAC0D,CAAE,GAAKK,EAAgD,GA2D3GjB,GAAV,CAGS,SAAS4B,EAAQ3B,EAAcC,EAAe,CACnDF,EAAI,UAAWC,EAAMC,CAAI,CAC3B,CAFOF,EAAS,QAAA4B,EAKT,SAASC,EAAK5B,EAAcC,EAAe,CAChDF,EAAI,OAAQC,EAAMC,CAAI,CACxB,CAFOF,EAAS,KAAA6B,EAKT,SAASC,EAAQ7B,EAAcC,EAAe,CACnDF,EAAI,UAAWC,EAAMC,CAAI,CAC3B,CAFOF,EAAS,QAAA8B,EAKT,SAASC,EAAM9B,EAAcC,EAAe,CACjDF,EAAI,QAASC,EAAMC,CAAI,CACzB,CAFOF,EAAS,MAAA+B,EAKT,SAASC,EAAM/B,EAAcC,EAAe,CACjDF,EAAI,QAASC,EAAMC,CAAI,CACzB,CAFOF,EAAS,MAAAgC,EAIT,SAASC,EAAMtB,EAA8B,CAClDC,GAAoB,CAAC,EACrBsB,EAAI,GAAIvB,GAAU,CAAC,CAAC,CACtB,CAHOX,EAAS,MAAAiC,EAIT,SAASC,EAAI3B,EAAkBI,EAA6B,CACjE,GAAIJ,IAAa,IACf0B,EAAMtB,CAAM,MACP,CACL,IAAMwB,EAAiBvB,GAAkBL,CAAQ,GAAKU,GACtDL,GAAkBL,CAAQ,EAAI,CAC5B,SAAUI,EAAO,UAAYwB,EAAe,SAC5C,gBAAiBxB,EAAO,iBAAmBwB,EAAe,gBAC1D,YAAcxB,EAAO,cAAgB,OAAawB,EAAe,YAAcxB,EAAO,YACtF,kBAAoBA,EAAO,oBAAsB,OAAawB,EAAe,kBACfxB,EAAO,iBACvE,CACF,CAGF,CAfOX,EAAS,IAAAkC,EAiBT,SAASE,EAAWC,EAAgB,CACzC,IAAM1B,EAAwB,CAAC,EAC3B0B,EAAI,WACN1B,EAAO,gBAAkB0B,EAAI,UAE/BH,EAAI,GAAIvB,CAAM,CAChB,CANOX,EAAS,WAAAoC,IAhDRpC,KAAA,IA0DGkB,GAAiBlB,GAkBxBmB,GAAN,KAAsC,CACpC,YACWZ,EAAyC+B,EAAqBC,EAC7DC,EAAsDC,EAA2BC,EAAoB,CADtG,cAAAnC,EAAyC,UAAA+B,EAAqB,eAAAC,EAC7D,iBAAAC,EAAsD,WAAAC,EAA2B,SAAAC,CAAqB,CAElH,MAAM,KAAM,CACV,OAAO,KAAK,YAAY,IAAI,CAC9B,CAEA,MAAM,YAA8B,CAClC,GAAI,KAAK,MAAQ,QAAa,KAAK,QAAU,OAC3C,MAAM,IAAI,MAAM,sBAAsB,EAEtC,YAAK,IAAI,SAAS,EACX,KAAK,IAAI,uBAAuB,KAAK,KAAK,CAErD,CACF,EAEMtB,GAAN,KAAkB,CAChB,YACWb,EAAyC+B,EAAqBC,EAA0BI,EAAiB,CAAzG,cAAApC,EAAyC,UAAA+B,EAAqB,eAAAC,EAA0B,aAAAI,CAAkB,CACvH,EAEatB,GAAN,KAAe,CAQZ,YAAYuB,EAA0BC,EAAyBC,EAAsC,CA+H7G,KAAQ,SAAW,GASnB,KAAQ,cAAgB,EAvItB,KAAK,SAAW,GAChB,KAAK,iBAAmBF,IAAoB,OAAY,IAAQA,EAChE,KAAK,gBAAkBC,IAAmB,OAAY,GAAKA,EAC3D,KAAK,6BAA+BC,IAAgC,OAAY,IAAOA,CACzF,CAZA,OAAO,OAAOnC,EAAoC,CAChD,OAAIA,IAAW,OACN,IAAI,KAEN,IAAI,KAAKA,EAAO,gBAAiBA,EAAO,eAAgBA,EAAO,2BAA2B,CACnG,CAUA,OAAQ,CACN,KAAK,SAAW,GAChB,KAAK,cAAgB,CAAC,EACtB,KAAK,WAAaW,GAAI,EACtB,KAAK,cAAgB,CACvB,CAGA,MAAO,CAEL,IADA,KAAK,SAAW,GACT,KAAK,cAAgB,KAAK,cAAc,OAAQ,KAAK,gBAC1D,KAAK,YAAY,KAAK,cAAc,KAAK,aAAa,CAAC,CAE3D,CAMA,MAASf,EAAkC+B,EAAcS,EAA4BL,EACrE,CACd,IAAMM,EAAQ,KAAK,SAAW,KAAK,MAAMzC,EAAU+B,EAAMI,CAAG,EAAI,OAC5DO,EAAY,GAEVC,EAAMH,EAAK,EAGjB,GAAIG,GAAO,OAAQA,EAAmB,MAAS,WAC7C,OAAAD,EAAY,GACL,IAAI,QAAW,CAACE,EAASC,IAAW,CACxCF,EACI,KACG,MAAMG,GAAS,CACTL,GACF,MAAMA,EAAM,IAAI,EAElBG,EAAQE,CAAK,CACf,EACA,MAAMC,GAAU,CACVN,GACF,MAAMA,EAAM,IAAI,EAElBI,EAAOE,CAAM,CACf,CAAC,CACX,CAAC,EAEH,GAAI,CAACL,GAAaD,EAAO,CACvB,IAAMO,EAAWP,EAAM,IAAI,EAC3B,GAAIO,GAAY,OAAOA,EAAS,MAAS,WACvC,OAAO,IAAI,QAAW,CAACJ,EAASC,IAAW,CACxCG,EAAU,KACP,IAAM,CACJJ,EAAQD,CAAG,CACb,EACCI,GAAW,CACVF,EAAOE,CAAM,CACf,CAAC,CACP,CAAC,CAEL,CACA,OAAOJ,CACT,CAGA,MAAM3C,EAAkC+B,EAAcI,EAA2B,CAC/E,GAAI,CAAC,KAAK,SACR,MAAM,IAAI,MAAM,6BAA6B,EAE/C,GAAIA,IAAQ,OAAW,CACrB,IAAMH,EAAYjB,GAAI,EACtB,YAAK,MAAMiB,CAAS,EACb,IAAIpB,GAAMZ,EAAU+B,EAAMC,EAAWiB,GAAK,KAAK,QAAQA,CAAC,CAAC,CAClE,KAAO,CACL,IAAMf,EAAoBC,EAAI,WAAW,EACzC,OAAO,IAAIvB,GAAMZ,EAAU+B,EAAM,EAAG,MAAMkB,GAAK,KAAK,IAAIA,CAAC,EAAGf,EAAOC,CAAG,CACxE,CACF,CAGA,MAAc,IAAIM,EAA6B,CAC7C,IAAML,EAAkB,MAAMK,EAAM,WAAW,EAC3C,KAAK,cAAc,OAAS,KAAK,mBACnC,KAAK,cAAc,KAAK,IAAI5B,GAAY4B,EAAM,SAAUA,EAAM,KAAMA,EAAM,UAAWL,CAAO,CAAC,EAC7F,KAAK,MAAMA,CAAO,EAEtB,CAEQ,QAAQK,EAAoB,CAClC,IAAML,EAAkBrB,GAAI,EACxB,KAAK,cAAc,OAAS,KAAK,mBACnC,KAAK,cAAc,KAAK,IAAIF,GAAY4B,EAAM,SAAUA,EAAM,KAAMA,EAAM,UAAWL,CAAO,CAAC,EAC7F,KAAK,MAAMA,CAAO,EAEtB,CAEQ,YAAYK,EAAoB,CACtC9B,GAAO,QACH,YAAY8B,EAAM,QAAQ,GAC1B,IAAIA,EAAM,QAAUA,EAAM,WAAW,QAAQ,CAAC,CAAC,gBAAgBA,EAAM,IAAI,QAAQA,EAAM,QAAQ,QAAQ,CAAC,CAAC,EAAE,CACjH,CAEQ,MAAMS,EAAqB,CACjC,GAAI,KAAK,cAAc,OAAS,KAAK,eAAiB,KAAK,iBACvDA,EAAc,KAAK,YAAc,KAAK,6BAA8B,CAGtE,QAAWC,EAAkB,KAAK,cAAe,KAAK,cAAgBA,EAAkB,KAAK,iBACxF,KAAK,cAAgB,KAAK,cAAc,OACxC,KAAK,gBACR,KAAK,YAAY,KAAK,cAAc,KAAK,aAAa,CAAC,EAGzD,KAAK,WAAapC,GAAI,CACxB,CACF,CAEA,IAAI,SAAU,CACZ,OAAO,KAAK,QACd,CAWF,EAKaA,GAAO,OAAO,YAAgB,KAAe,YAAY,IAAO,IAAM,YAAY,IAAI,EAAI,KAAK,MCtarG,SAASqC,GAAgBC,EAAkBC,EAA0BC,EAAqC,CAC/G,QAAWC,KAAQD,EAAO,CACxB,IAAME,EAASD,EAAK,CAAC,EACfE,EAASF,EAAK,CAAC,EACfG,EAAkBH,EAAK,CAAC,EACxBI,EAASJ,EAAK,CAAC,EACfK,EAASL,EAAK,CAAC,EAErB,GAAIH,EAAK,SAAWI,GAClB,QAAWK,KAASR,EAElB,IAAIQ,EAAM,SAAWJ,GAAWI,EAAM,SAAW,WAAaJ,IAAW,KACnEK,GAAcD,EAAM,QAASH,CAAe,EAC9C,MAAO,CAAC,OAAAC,EAAQ,OAAAC,CAAM,EAKhC,CAEA,MAAM,IAAI,UAAU,4BAA4BR,EAAK,MAAM,kBACvDC,EAAO,IAAIU,GAAO,GAAGA,EAAI,QAAU,SAAS,KAAKA,EAAI,OAAO,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,CAClF,CAEA,SAASD,GAAcE,EAAiBC,EAA2B,CACjE,GAAIA,EAAS,SAAS,GAAG,EAAG,CAE1B,IAAMC,EAAa,OAAO,SAASD,EAAS,UAAU,EAAGA,EAAS,OAAS,CAAC,EAAG,EAAE,EACjF,MAAO,CAAC,MAAMC,CAAU,GAAKA,GAAcF,CAC7C,SAAWC,EAAS,MAAM,GAAG,EAAE,SAAW,EAAG,CAE3C,IAAME,EAAOF,EAAS,MAAM,GAAG,EACzBC,EAAa,OAAO,SAASC,EAAK,CAAC,EAAG,EAAE,EACxCC,EAAW,OAAO,SAASD,EAAK,CAAC,EAAG,EAAE,EAC5C,MAAO,CAAC,MAAMD,CAAU,GAAK,CAAC,MAAME,CAAQ,GAAKF,GAAcF,GAAWA,GAAWI,CACvF,KAEE,QAAO,OAAO,SAASH,EAAU,EAAE,IAAMD,CAE7C,CA/DA,IAAAK,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,GAAAC,IAAA,cACAA,GAAQ,WAAa,GACrB,IAAIC,GAAsB,UAAY,CAClC,SAASA,EAAKC,EAAM,CAChB,GAAI,CAACA,EACD,MAAM,IAAI,UAAU,yCAAyC,EAEjE,KAAK,MAAQD,EAAK,MACdC,GAAQD,EAAK,OAAOC,CAAI,IACxB,KAAK,MAAQA,EAErB,CACA,OAAAD,EAAK,OAAS,SAAUC,EAAM,CAC1B,IAAIC,EAAQD,EAAK,SAAS,EAC1B,OAAOA,IAASA,aAAgBD,GAAQA,EAAK,UAAU,KAAKE,CAAK,EACrE,EACAF,EAAK,OAAS,UAAY,CACtB,OAAO,IAAIA,EAAK,CAACA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAC/F,EACAA,EAAK,YAAc,UAAY,CAC3B,OAAO,IAAIA,EAAK,WAAW,CAC/B,EACAA,EAAK,MAAQ,SAAUC,EAAM,CACzB,OAAO,IAAID,EAAKC,CAAI,CACxB,EACAD,EAAK,IAAM,UAAY,CACnB,MAAO,CAACA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,CAAC,EAAE,KAAK,GAAG,CACrF,EACAA,EAAK,IAAM,SAAUG,EAAO,CAExB,QADIC,EAAM,GACDC,EAAI,EAAGA,EAAIF,EAAOE,IAEvBD,KAAU,EAAI,KAAK,OAAO,GAAK,MAAW,GAAG,SAAS,EAAE,EAAE,UAAU,CAAC,EAEzE,OAAOA,CACX,EACAJ,EAAK,UAAU,OAAS,SAAUM,EAAO,CAGrC,OAAON,EAAK,OAAOM,CAAK,GAAK,KAAK,QAAUA,EAAM,SAAS,CAC/D,EACAN,EAAK,UAAU,QAAU,UAAY,CACjC,OAAO,KAAK,QAAUA,EAAK,KAC/B,EACAA,EAAK,UAAU,SAAW,UAAY,CAClC,OAAO,KAAK,KAChB,EACAA,EAAK,UAAU,OAAS,UAAY,CAChC,MAAO,CACH,MAAO,KAAK,KAChB,CACJ,EACAA,EAAK,UAAY,IAAI,OAAO,iEAAkE,GAAG,EACjGA,EAAK,MAAQ,uCACNA,CACX,EAAE,EACFD,GAAQ,KAAOC,KChBf,SAASO,GAAKC,EAAKC,EAAMC,EAAU,CAMjC,KAAK,IAAMF,EAAM,EAMjB,KAAK,KAAOC,EAAO,EAMnB,KAAK,SAAW,CAAC,CAACC,CACpB,CAmCA,SAASC,GAAOC,EAAK,CACnB,OAAQA,GAAOA,EAAI,cAAmB,EACxC,CAQA,SAASC,GAAMC,EAAO,CACpB,IAAIC,EAAI,KAAK,MAAMD,EAAQ,CAACA,CAAK,EACjC,OAAOA,EAAQ,GAAKC,EAAIA,CAC1B,CA8BA,SAASC,GAAQF,EAAOJ,EAAU,CAChC,IAAIE,EAAKK,EAAWC,EACpB,OAAIR,GACFI,KAAW,GACPI,EAAS,GAAKJ,GAASA,EAAQ,OACjCG,EAAYE,GAAWL,CAAK,EACxBG,GACKA,GAEXL,EAAMQ,GAASN,EAAO,EAAG,EAAI,EACzBI,IACFC,GAAWL,CAAK,EAAIF,GACfA,KAEPE,GAAS,GACLI,EAAS,MAAQJ,GAASA,EAAQ,OACpCG,EAAYI,GAAUP,CAAK,EACvBG,GACKA,GAEXL,EAAMQ,GAASN,EAAOA,EAAQ,EAAI,GAAK,EAAG,EAAK,EAC3CI,IACFG,GAAUP,CAAK,EAAIF,GACdA,GAEX,CAiBA,SAASU,GAAWR,EAAOJ,EAAU,CACnC,GAAI,MAAMI,CAAK,EACb,OAAOJ,EAAWa,GAAQC,GAC5B,GAAId,EAAU,CACZ,GAAII,EAAQ,EACV,OAAOS,GACT,GAAIT,GAASW,GACX,OAAOC,EACX,KAAO,CACL,GAAIZ,GAAS,CAACa,GACZ,OAAOC,GACT,GAAId,EAAQ,GAAKa,GACf,OAAOE,EACX,CACA,OAAIf,EAAQ,EACHQ,GAAW,CAACR,EAAOJ,CAAQ,EAAE,IAAI,EACnCU,GAAUN,EAAQgB,GAAkB,EAAIhB,EAAQgB,GAAkB,EAAGpB,CAAQ,CACtF,CAkBA,SAASU,GAASW,EAASC,EAAUtB,EAAU,CAC7C,OAAO,IAAIH,GAAKwB,EAASC,EAAUtB,CAAQ,CAC7C,CA6BA,SAASuB,GAAWC,EAAKxB,EAAUyB,EAAO,CACxC,GAAID,EAAI,SAAW,EACjB,MAAM,MAAM,cAAc,EAQ5B,GAPI,OAAOxB,GAAa,UAEtByB,EAAQzB,EACRA,EAAW,IAEXA,EAAW,CAAC,CAACA,EAEXwB,IAAQ,OAASA,IAAQ,YAAcA,IAAQ,aAAeA,IAAQ,YACxE,OAAOxB,EAAWa,GAAQC,GAE5B,GADAW,EAAQA,GAAS,GACbA,EAAQ,GAAK,GAAKA,EACpB,MAAM,WAAW,OAAO,EAE1B,IAAIC,EACJ,IAAKA,EAAIF,EAAI,QAAQ,GAAG,GAAK,EAC3B,MAAM,MAAM,iBAAiB,EAC1B,GAAIE,IAAM,EACb,OAAOH,GAAWC,EAAI,UAAU,CAAC,EAAGxB,EAAUyB,CAAK,EAAE,IAAI,EAQ3D,QAHIE,EAAef,GAAWgB,GAAQH,EAAO,CAAC,CAAC,EAE3CI,EAASf,GACJgB,EAAI,EAAGA,EAAIN,EAAI,OAAQM,GAAK,EAAG,CACtC,IAAIC,EAAO,KAAK,IAAI,EAAGP,EAAI,OAASM,CAAC,EACnC1B,EAAQ,SAASoB,EAAI,UAAUM,EAAGA,EAAIC,CAAI,EAAGN,CAAK,EACpD,GAAIM,EAAO,EAAG,CACZ,IAAIC,EAAQpB,GAAWgB,GAAQH,EAAOM,CAAI,CAAC,EAC3CF,EAASA,EAAO,IAAIG,CAAK,EAAE,IAAIpB,GAAWR,CAAK,CAAC,CAClD,MACEyB,EAASA,EAAO,IAAIF,CAAY,EAChCE,EAASA,EAAO,IAAIjB,GAAWR,CAAK,CAAC,CAEzC,CACA,OAAAyB,EAAO,SAAW7B,EACX6B,CACT,CAmBA,SAASI,GAAUC,EAAKlC,EAAU,CAChC,OAAI,OAAOkC,GAAQ,SACVtB,GAAWsB,EAAKlC,CAAQ,EAC7B,OAAOkC,GAAQ,SACVX,GAAWW,EAAKlC,CAAQ,EAE1BU,GAASwB,EAAI,IAAKA,EAAI,KAAM,OAAOlC,GAAa,UAAYA,EAAWkC,EAAI,QAAQ,CAC5F,CAxTA,IAqBIC,GAqGAxB,GAOAF,GA2GAmB,GA+FAQ,GAOAC,GAOAjB,GAOAL,GAOAE,GAOAqB,GAMAxB,GAYAD,GAYA0B,GAYAC,GAYAC,GAYAtB,GAYAH,GAYAE,GAYAwB,EAs+BGC,GA17CPC,GAAAC,EAAA,KAqBIV,GAAO,KACX,GAAI,CACFA,GAAO,IAAI,YAAY,SAAS,IAAI,YAAY,OAAO,IAAI,WAAW,CACpE,EAAG,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,IAAK,IAAK,IAAK,EAAG,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,IAAK,EAAG,GAAI,EAAG,GAAI,EAAG,GAAI,EAAG,EAAG,IAAK,IAAK,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,IAAK,IAAK,IAAK,EAAG,EAAG,GAAI,IAAK,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,EAC5nC,CAAC,CAAC,EAAG,CAAC,CAAC,EAAE,OACX,MAAY,CAEZ,CAwDAtC,GAAK,UAAU,WAEf,OAAO,eAAeA,GAAK,UAAW,aAAc,CAAE,MAAO,EAAK,CAAC,EA6BnEA,GAAK,OAASI,GAOVU,GAAY,CAAC,EAObF,GAAa,CAAC,EA0ClBZ,GAAK,QAAUS,GAkCfT,GAAK,WAAae,GAsBlBf,GAAK,SAAWa,GASZkB,GAAU,KAAK,IA4DnB/B,GAAK,WAAa0B,GAyBlB1B,GAAK,UAAYoC,GAUbG,GAAiB,MAOjBC,GAAiB,GAAK,GAOtBjB,GAAiBgB,GAAiBA,GAOlCrB,GAAiBK,GAAiBA,GAOlCH,GAAiBF,GAAiB,EAOlCuB,GAAahC,GAAQ+B,EAAc,EAMnCvB,GAAOR,GAAQ,CAAC,EAMpBT,GAAK,KAAOiB,GAMRD,GAAQP,GAAQ,EAAG,EAAI,EAM3BT,GAAK,MAAQgB,GAMT0B,GAAMjC,GAAQ,CAAC,EAMnBT,GAAK,IAAM0C,GAMPC,GAAOlC,GAAQ,EAAG,EAAI,EAM1BT,GAAK,KAAO2C,GAMRC,GAAUnC,GAAQ,EAAE,EAMxBT,GAAK,QAAU4C,GAMXtB,GAAYT,GAAS,GAAgB,WAAgB,EAAK,EAM9Db,GAAK,UAAYsB,GAMbH,GAAqBN,GAAS,GAAgB,GAAgB,EAAI,EAMtEb,GAAK,mBAAqBmB,GAMtBE,GAAYR,GAAS,EAAG,YAAgB,EAAK,EAMjDb,GAAK,UAAYqB,GAMbwB,EAAgB7C,GAAK,UAOzB6C,EAAc,MAAQ,UAAiB,CACrC,OAAO,KAAK,SAAW,KAAK,MAAQ,EAAI,KAAK,GAC/C,EAOAA,EAAc,SAAW,UAAoB,CAC3C,OAAI,KAAK,UACE,KAAK,OAAS,GAAKtB,IAAmB,KAAK,MAAQ,GACvD,KAAK,KAAOA,IAAkB,KAAK,MAAQ,EACpD,EAUAsB,EAAc,SAAW,SAAkBjB,EAAO,CAEhD,GADAA,EAAQA,GAAS,GACbA,EAAQ,GAAK,GAAKA,EACpB,MAAM,WAAW,OAAO,EAC1B,GAAI,KAAK,OAAO,EACd,MAAO,IACT,GAAI,KAAK,WAAW,EAClB,GAAI,KAAK,GAAGP,EAAS,EAAG,CAGtB,IAAI4B,EAAYlC,GAAWa,CAAK,EAC9BsB,EAAM,KAAK,IAAID,CAAS,EACxBE,EAAOD,EAAI,IAAID,CAAS,EAAE,IAAI,IAAI,EACpC,OAAOC,EAAI,SAAStB,CAAK,EAAIuB,EAAK,MAAM,EAAE,SAASvB,CAAK,CAC1D,KACE,OAAO,IAAM,KAAK,IAAI,EAAE,SAASA,CAAK,EAQ1C,QAHIE,EAAef,GAAWgB,GAAQH,EAAO,CAAC,EAAG,KAAK,QAAQ,EAC5DwB,EAAM,KACJpB,EAAS,KACA,CACX,IAAIqB,EAASD,EAAI,IAAItB,CAAY,EAC/BwB,EAASF,EAAI,IAAIC,EAAO,IAAIvB,CAAY,CAAC,EAAE,MAAM,IAAM,EACvDyB,EAASD,EAAO,SAAS1B,CAAK,EAEhC,GADAwB,EAAMC,EACFD,EAAI,OAAO,EACb,OAAOG,EAASvB,EAEhB,KAAOuB,EAAO,OAAS,GACrBA,EAAS,IAAMA,EACjBvB,EAAS,GAAKuB,EAASvB,CAE3B,CACF,EAOAa,EAAc,YAAc,UAAuB,CACjD,OAAO,KAAK,IACd,EAOAA,EAAc,oBAAsB,UAA+B,CACjE,OAAO,KAAK,OAAS,CACvB,EAOAA,EAAc,WAAa,UAAsB,CAC/C,OAAO,KAAK,GACd,EAOAA,EAAc,mBAAqB,UAA8B,CAC/D,OAAO,KAAK,MAAQ,CACtB,EAOAA,EAAc,cAAgB,UAAyB,CACrD,GAAI,KAAK,WAAW,EAClB,OAAO,KAAK,GAAGxB,EAAS,EAAI,GAAK,KAAK,IAAI,EAAE,cAAc,EAE5D,QADIgB,EAAM,KAAK,MAAQ,EAAI,KAAK,KAAO,KAAK,IACnCmB,EAAM,GAAIA,EAAM,GAClB,EAAAnB,EAAO,GAAKmB,GADSA,IAC1B,CAEF,OAAO,KAAK,MAAQ,EAAIA,EAAM,GAAKA,EAAM,CAC3C,EAOAX,EAAc,OAAS,UAAkB,CACvC,OAAO,KAAK,OAAS,GAAK,KAAK,MAAQ,CACzC,EAMAA,EAAc,IAAMA,EAAc,OAOlCA,EAAc,WAAa,UAAsB,CAC/C,MAAO,CAAC,KAAK,UAAY,KAAK,KAAO,CACvC,EAOAA,EAAc,WAAa,UAAsB,CAC/C,OAAO,KAAK,UAAY,KAAK,MAAQ,CACvC,EAOAA,EAAc,MAAQ,UAAiB,CACrC,OAAQ,KAAK,IAAM,KAAO,CAC5B,EAOAA,EAAc,OAAS,UAAkB,CACvC,OAAQ,KAAK,IAAM,KAAO,CAC5B,EAQAA,EAAc,OAAS,SAAgBY,EAAO,CAG5C,OAFKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GACrB,KAAK,WAAaA,EAAM,UAAa,KAAK,OAAS,KAAQ,GAAMA,EAAM,OAAS,KAAQ,EACnF,GACF,KAAK,OAASA,EAAM,MAAQ,KAAK,MAAQA,EAAM,GACxD,EAQAZ,EAAc,GAAKA,EAAc,OAQjCA,EAAc,UAAY,SAAmBY,EAAO,CAClD,MAAO,CAAC,KAAK,GAAmBA,CAAK,CACvC,EAQAZ,EAAc,IAAMA,EAAc,UAQlCA,EAAc,GAAKA,EAAc,UAQjCA,EAAc,SAAW,SAAkBY,EAAO,CAChD,OAAO,KAAK,KAAqBA,CAAK,EAAI,CAC5C,EAQAZ,EAAc,GAAKA,EAAc,SAQjCA,EAAc,gBAAkB,SAAyBY,EAAO,CAC9D,OAAO,KAAK,KAAqBA,CAAK,GAAK,CAC7C,EAQAZ,EAAc,IAAMA,EAAc,gBAQlCA,EAAc,GAAKA,EAAc,gBAQjCA,EAAc,YAAc,SAAqBY,EAAO,CACtD,OAAO,KAAK,KAAqBA,CAAK,EAAI,CAC5C,EAQAZ,EAAc,GAAKA,EAAc,YAQjCA,EAAc,mBAAqB,SAA4BY,EAAO,CACpE,OAAO,KAAK,KAAqBA,CAAK,GAAK,CAC7C,EAQAZ,EAAc,IAAMA,EAAc,mBAQlCA,EAAc,GAAKA,EAAc,mBASjCA,EAAc,QAAU,SAAiBY,EAAO,CAG9C,GAFKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GACrB,KAAK,GAAGA,CAAK,EACf,MAAO,GACT,IAAIC,EAAU,KAAK,WAAW,EAC5BC,EAAWF,EAAM,WAAW,EAC9B,OAAIC,GAAW,CAACC,EACP,GACL,CAACD,GAAWC,EACP,EAEJ,KAAK,SAGFF,EAAM,OAAS,EAAM,KAAK,OAAS,GAAOA,EAAM,OAAS,KAAK,MAASA,EAAM,MAAQ,EAAM,KAAK,MAAQ,EAAM,GAAK,EAFlH,KAAK,IAAIA,CAAK,EAAE,WAAW,EAAI,GAAK,CAG/C,EASAZ,EAAc,KAAOA,EAAc,QAOnCA,EAAc,OAAS,UAAkB,CACvC,MAAI,CAAC,KAAK,UAAY,KAAK,GAAGxB,EAAS,EAC9BA,GACF,KAAK,IAAI,EAAE,IAAIqB,EAAG,CAC3B,EAOAG,EAAc,IAAMA,EAAc,OAQlCA,EAAc,IAAM,SAAae,EAAQ,CAClCxD,GAAOwD,CAAM,IAChBA,EAASxB,GAAUwB,CAAM,GAI3B,IAAIC,EAAM,KAAK,OAAS,GACpBC,EAAM,KAAK,KAAO,MAClBC,EAAM,KAAK,MAAQ,GACnBC,EAAM,KAAK,IAAM,MAEjBC,EAAML,EAAO,OAAS,GACtBM,EAAMN,EAAO,KAAO,MACpBO,EAAMP,EAAO,MAAQ,GACrBQ,EAAMR,EAAO,IAAM,MAEnBS,EAAM,EAAGC,EAAM,EAAGC,EAAM,EAAGC,EAAM,EACrC,OAAAA,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbI,GAAO,MACAxD,GAAU0D,GAAO,GAAMC,EAAMH,GAAO,GAAMC,EAAK,KAAK,QAAQ,CACrE,EAQAzB,EAAc,SAAW,SAAkB4B,EAAY,CACrD,OAAKrE,GAAOqE,CAAU,IACpBA,EAAarC,GAAUqC,CAAU,GAC5B,KAAK,IAAIA,EAAW,IAAI,CAAC,CAClC,EAQA5B,EAAc,IAAMA,EAAc,SAQlCA,EAAc,SAAW,SAAkB6B,EAAY,CACrD,GAAI,KAAK,OAAO,EACd,OAAO,KAKT,GAJKtE,GAAOsE,CAAU,IACpBA,EAAatC,GAAUsC,CAAU,GAG/BpC,GAAM,CACR,IAAIrC,EAAMqC,GAAK,IAAO,KAAK,IACzB,KAAK,KACLoC,EAAW,IACXA,EAAW,IAAI,EACjB,OAAO7D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,GAAIoC,EAAW,OAAO,EACpB,OAAO,KAAK,SAAW1D,GAAQC,GACjC,GAAI,KAAK,GAAGI,EAAS,EACnB,OAAOqD,EAAW,MAAM,EAAIrD,GAAYJ,GAC1C,GAAIyD,EAAW,GAAGrD,EAAS,EACzB,OAAO,KAAK,MAAM,EAAIA,GAAYJ,GAEpC,GAAI,KAAK,WAAW,EAClB,OAAIyD,EAAW,WAAW,EACjB,KAAK,IAAI,EAAE,IAAIA,EAAW,IAAI,CAAC,EAE/B,KAAK,IAAI,EAAE,IAAIA,CAAU,EAAE,IAAI,EACnC,GAAIA,EAAW,WAAW,EAC/B,OAAO,KAAK,IAAIA,EAAW,IAAI,CAAC,EAAE,IAAI,EAGxC,GAAI,KAAK,GAAGjC,EAAU,GAAKiC,EAAW,GAAGjC,EAAU,EACjD,OAAO1B,GAAW,KAAK,SAAS,EAAI2D,EAAW,SAAS,EAAG,KAAK,QAAQ,EAK1E,IAAIb,EAAM,KAAK,OAAS,GACpBC,EAAM,KAAK,KAAO,MAClBC,EAAM,KAAK,MAAQ,GACnBC,EAAM,KAAK,IAAM,MAEjBC,EAAMS,EAAW,OAAS,GAC1BR,EAAMQ,EAAW,KAAO,MACxBP,EAAMO,EAAW,MAAQ,GACzBN,EAAMM,EAAW,IAAM,MAEvBL,EAAM,EAAGC,EAAM,EAAGC,EAAM,EAAGC,EAAM,EACrC,OAAAA,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMK,EACbE,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAOP,EAAMG,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMM,EACbC,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAOP,EAAMI,EACbE,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAON,EAAME,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMO,EAAMN,EAAMK,EAAMJ,EAAMG,EAAMF,EAAMC,EACjDI,GAAO,MACAxD,GAAU0D,GAAO,GAAMC,EAAMH,GAAO,GAAMC,EAAK,KAAK,QAAQ,CACrE,EAQAzB,EAAc,IAAMA,EAAc,SASlCA,EAAc,OAAS,SAAgB8B,EAAS,CAG9C,GAFKvE,GAAOuE,CAAO,IACjBA,EAAUvC,GAAUuC,CAAO,GACzBA,EAAQ,OAAO,EACjB,MAAM,MAAM,kBAAkB,EAGhC,GAAIrC,GAAM,CAIR,GAAI,CAAC,KAAK,UACR,KAAK,OAAS,aACdqC,EAAQ,MAAQ,IAAMA,EAAQ,OAAS,GAEvC,OAAO,KAET,IAAI1E,GAAO,KAAK,SAAWqC,GAAK,MAAWA,GAAK,OAC9C,KAAK,IACL,KAAK,KACLqC,EAAQ,IACRA,EAAQ,IACV,EACA,OAAO9D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,GAAI,KAAK,OAAO,EACd,OAAO,KAAK,SAAWtB,GAAQC,GACjC,IAAI2D,EAAQxB,EAAKyB,EACjB,GAAK,KAAK,SA6BH,CAKL,GAFKF,EAAQ,WACXA,EAAUA,EAAQ,WAAW,GAC3BA,EAAQ,GAAG,IAAI,EACjB,OAAO3D,GACT,GAAI2D,EAAQ,GAAG,KAAK,KAAK,CAAC,CAAC,EACzB,OAAOhC,GACTkC,EAAM7D,EACR,KAvCoB,CAGlB,GAAI,KAAK,GAAGK,EAAS,EAAG,CACtB,GAAIsD,EAAQ,GAAGjC,EAAG,GAAKiC,EAAQ,GAAG/B,EAAO,EACvC,OAAOvB,GACJ,GAAIsD,EAAQ,GAAGtD,EAAS,EAC3B,OAAOqB,GAGP,IAAIoC,EAAW,KAAK,IAAI,CAAC,EAEzB,OADAF,EAASE,EAAS,IAAIH,CAAO,EAAE,IAAI,CAAC,EAChCC,EAAO,GAAG3D,EAAI,EACT0D,EAAQ,WAAW,EAAIjC,GAAME,IAEpCQ,EAAM,KAAK,IAAIuB,EAAQ,IAAIC,CAAM,CAAC,EAClCC,EAAMD,EAAO,IAAIxB,EAAI,IAAIuB,CAAO,CAAC,EAC1BE,EAGb,SAAWF,EAAQ,GAAGtD,EAAS,EAC7B,OAAO,KAAK,SAAWL,GAAQC,GACjC,GAAI,KAAK,WAAW,EAClB,OAAI0D,EAAQ,WAAW,EACd,KAAK,IAAI,EAAE,IAAIA,EAAQ,IAAI,CAAC,EAC9B,KAAK,IAAI,EAAE,IAAIA,CAAO,EAAE,IAAI,EAC9B,GAAIA,EAAQ,WAAW,EAC5B,OAAO,KAAK,IAAIA,EAAQ,IAAI,CAAC,EAAE,IAAI,EACrCE,EAAM5D,EACR,CAkBA,IADAmC,EAAM,KACCA,EAAI,IAAIuB,CAAO,GAAG,CAGvBC,EAAS,KAAK,IAAI,EAAG,KAAK,MAAMxB,EAAI,SAAS,EAAIuB,EAAQ,SAAS,CAAC,CAAC,EAWpE,QAPII,EAAO,KAAK,KAAK,KAAK,IAAIH,CAAM,EAAI,KAAK,GAAG,EAC9CI,EAASD,GAAQ,GAAM,EAAIhD,GAAQ,EAAGgD,EAAO,EAAE,EAI/CE,EAAYlE,GAAW6D,CAAM,EAC7BM,EAAYD,EAAU,IAAIN,CAAO,EAC5BO,EAAU,WAAW,GAAKA,EAAU,GAAG9B,CAAG,GAC/CwB,GAAUI,EACVC,EAAYlE,GAAW6D,EAAQ,KAAK,QAAQ,EAC5CM,EAAYD,EAAU,IAAIN,CAAO,EAK/BM,EAAU,OAAO,IACnBA,EAAYvC,IAEdmC,EAAMA,EAAI,IAAII,CAAS,EACvB7B,EAAMA,EAAI,IAAI8B,CAAS,CACzB,CACA,OAAOL,CACT,EAQAhC,EAAc,IAAMA,EAAc,OAQlCA,EAAc,OAAS,SAAgB8B,EAAS,CAK9C,GAJKvE,GAAOuE,CAAO,IACjBA,EAAUvC,GAAUuC,CAAO,GAGzBrC,GAAM,CACR,IAAIrC,GAAO,KAAK,SAAWqC,GAAK,MAAWA,GAAK,OAC9C,KAAK,IACL,KAAK,KACLqC,EAAQ,IACRA,EAAQ,IACV,EACA,OAAO9D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,OAAO,KAAK,IAAI,KAAK,IAAIqC,CAAO,EAAE,IAAIA,CAAO,CAAC,CAChD,EAQA9B,EAAc,IAAMA,EAAc,OAQlCA,EAAc,IAAMA,EAAc,OAOlCA,EAAc,IAAM,UAAe,CACjC,OAAOhC,GAAS,CAAC,KAAK,IAAK,CAAC,KAAK,KAAM,KAAK,QAAQ,CACtD,EAOAgC,EAAc,kBAAoB,UAA6B,CAC7D,OAAO,KAAK,KAAO,KAAK,MAAM,KAAK,IAAI,EAAI,KAAK,MAAM,KAAK,GAAG,EAAI,EACpE,EAQAA,EAAc,IAAMA,EAAc,kBAOlCA,EAAc,mBAAqB,UAA8B,CAC/D,OAAO,KAAK,IAAMvC,GAAM,KAAK,GAAG,EAAIA,GAAM,KAAK,IAAI,EAAI,EACzD,EAQAuC,EAAc,IAAMA,EAAc,mBAQlCA,EAAc,IAAM,SAAaY,EAAO,CACtC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,GAAK,SAAYY,EAAO,CACpC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,IAAM,SAAaY,EAAO,CACtC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,UAAY,SAAmBsC,EAAS,CAGpD,OAFI/E,GAAO+E,CAAO,IAChBA,EAAUA,EAAQ,MAAM,IACrBA,GAAW,MAAQ,EACf,KACAA,EAAU,GACVtE,GAAS,KAAK,KAAOsE,EAAU,KAAK,MAAQA,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,QAAQ,EAEnGtE,GAAS,EAAG,KAAK,KAAQsE,EAAU,GAAK,KAAK,QAAQ,CAChE,EAQAtC,EAAc,IAAMA,EAAc,UAQlCA,EAAc,WAAa,SAAoBsC,EAAS,CAGtD,OAFI/E,GAAO+E,CAAO,IAChBA,EAAUA,EAAQ,MAAM,IACrBA,GAAW,MAAQ,EACf,KACAA,EAAU,GACVtE,GAAU,KAAK,MAAQsE,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,MAAQA,EAAS,KAAK,QAAQ,EAEpGtE,GAAS,KAAK,MAASsE,EAAU,GAAK,KAAK,MAAQ,EAAI,EAAI,GAAI,KAAK,QAAQ,CACvF,EAQAtC,EAAc,IAAMA,EAAc,WAQlCA,EAAc,mBAAqB,SAA4BsC,EAAS,CAEtE,OADI/E,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,EAAU,GAAWtE,GAAU,KAAK,MAAQsE,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,OAASA,EAAS,KAAK,QAAQ,EAC1HA,IAAY,GAAWtE,GAAS,KAAK,KAAM,EAAG,KAAK,QAAQ,EACxDA,GAAS,KAAK,OAAUsE,EAAU,GAAK,EAAG,KAAK,QAAQ,CAChE,EAQAtC,EAAc,KAAOA,EAAc,mBAQnCA,EAAc,MAAQA,EAAc,mBAQpCA,EAAc,WAAa,SAAoBsC,EAAS,CACtD,IAAIC,EAEJ,OADIhF,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,IAAY,GAAWtE,GAAS,KAAK,KAAM,KAAK,IAAK,KAAK,QAAQ,EAClEsE,EAAU,IACZC,EAAK,GAAKD,EACHtE,GAAW,KAAK,KAAOsE,EAAY,KAAK,OAASC,EAAO,KAAK,MAAQD,EAAY,KAAK,MAAQC,EAAK,KAAK,QAAQ,IAEzHD,GAAW,GACXC,EAAK,GAAKD,EACHtE,GAAW,KAAK,MAAQsE,EAAY,KAAK,MAAQC,EAAO,KAAK,KAAOD,EAAY,KAAK,OAASC,EAAK,KAAK,QAAQ,EACzH,EAOAvC,EAAc,KAAOA,EAAc,WAQnCA,EAAc,YAAc,SAAqBsC,EAAS,CACxD,IAAIC,EAEJ,OADIhF,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,IAAY,GAAWtE,GAAS,KAAK,KAAM,KAAK,IAAK,KAAK,QAAQ,EAClEsE,EAAU,IACZC,EAAK,GAAKD,EACHtE,GAAW,KAAK,MAAQuE,EAAM,KAAK,MAAQD,EAAa,KAAK,KAAOC,EAAM,KAAK,OAASD,EAAW,KAAK,QAAQ,IAEzHA,GAAW,GACXC,EAAK,GAAKD,EACHtE,GAAW,KAAK,KAAOuE,EAAM,KAAK,OAASD,EAAa,KAAK,MAAQC,EAAM,KAAK,MAAQD,EAAW,KAAK,QAAQ,EACzH,EAOAtC,EAAc,KAAOA,EAAc,YAOnCA,EAAc,SAAW,UAAoB,CAC3C,OAAK,KAAK,SAEHhC,GAAS,KAAK,IAAK,KAAK,KAAM,EAAK,EADjC,IAEX,EAOAgC,EAAc,WAAa,UAAsB,CAC/C,OAAI,KAAK,SACA,KACFhC,GAAS,KAAK,IAAK,KAAK,KAAM,EAAI,CAC3C,EAQAgC,EAAc,QAAU,SAAiBwC,EAAI,CAC3C,OAAOA,EAAK,KAAK,UAAU,EAAI,KAAK,UAAU,CAChD,EAOAxC,EAAc,UAAY,UAAqB,CAC7C,IAAIyC,EAAK,KAAK,KACZC,EAAK,KAAK,IACZ,MAAO,CACLA,EAAK,IACLA,IAAO,EAAI,IACXA,IAAO,GAAK,IACZA,IAAO,GACPD,EAAK,IACLA,IAAO,EAAI,IACXA,IAAO,GAAK,IACZA,IAAO,EACT,CACF,EAOAzC,EAAc,UAAY,UAAqB,CAC7C,IAAIyC,EAAK,KAAK,KACZC,EAAK,KAAK,IACZ,MAAO,CACLD,IAAO,GACPA,IAAO,GAAK,IACZA,IAAO,EAAI,IACXA,EAAK,IACLC,IAAO,GACPA,IAAO,GAAK,IACZA,IAAO,EAAI,IACXA,EAAK,GACP,CACF,EASAvF,GAAK,UAAY,SAAmBwF,EAAOrF,EAAUkF,EAAI,CACvD,OAAOA,EAAKrF,GAAK,YAAYwF,EAAOrF,CAAQ,EAAIH,GAAK,YAAYwF,EAAOrF,CAAQ,CAClF,EAQAH,GAAK,YAAc,SAAqBwF,EAAOrF,EAAU,CACvD,OAAO,IAAIH,GACTwF,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZrF,CACF,CACF,EAQAH,GAAK,YAAc,SAAqBwF,EAAOrF,EAAU,CACvD,OAAO,IAAIH,GACTwF,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,EACPrF,CACF,CACF,EAEO2C,GAAQ9C,KC17Cf,IAiBIyF,EAjBJC,GAAAC,EAAA,KAiBIF,EAAc,CAAC,EAKnBA,EAAY,OAQZA,EAAY,MAMZA,EAAY,aAAe,EAM3BA,EAAY,WAAa,EAMzBA,EAAY,uBAAyB,EAMrCA,EAAY,mBAAqB,EAKjCA,EAAY,SAAW,CACrB,WAAY,EACZ,aAAc,CAChB,EAMAA,EAAY,MAAQ,IAAI,WAAW,CAAC,EAMpCA,EAAY,QAAU,IAAI,aAAaA,EAAY,MAAM,MAAM,EAM/DA,EAAY,QAAU,IAAI,aAAaA,EAAY,MAAM,MAAM,EAM/DA,EAAY,eAAiB,IAAI,YAAY,IAAI,WAAW,CAAC,EAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,IAAM,EASnFA,EAAY,KAAO,SAASG,EAAKC,EAAM,CAKrC,KAAK,IAAMD,EAAM,EAMjB,KAAK,KAAOC,EAAO,CACrB,EAOAJ,EAAY,KAAK,OAAS,SAASG,EAAKC,EAAM,CAE5C,OAAOD,GAAO,GAAKC,GAAQ,EAAIJ,EAAY,KAAK,KAAO,IAAIA,EAAY,KAAKG,EAAKC,CAAI,CACvF,EAKAJ,EAAY,KAAK,UAAU,UAAY,UAAW,CAChD,OAAQ,KAAK,MAAQ,GAAK,KAAK,KAAO,UACxC,EAMAA,EAAY,KAAK,UAAU,OAAS,SAASK,EAAO,CAClD,OAAO,KAAK,KAAOA,EAAM,KAAO,KAAK,MAAQA,EAAM,IACrD,EAMAL,EAAY,KAAK,KAAO,IAAIA,EAAY,KAAK,EAAG,CAAC,EAUjDA,EAAY,QAAU,SAASM,EAAkB,CAC/C,GAAKA,EAGH,IAAIC,EAAeD,MAFnB,KAAIC,EAAe,KASrB,KAAK,GAAKP,EAAY,WAAW,SAASO,CAAY,EAQtD,KAAK,MAAQA,EAQb,KAAK,SAAW,EAQhB,KAAK,OAAS,KAQd,KAAK,cAAgB,EAQrB,KAAK,SAAW,GAQhB,KAAK,aAAe,EAQpB,KAAK,QAAU,CAAC,EAQhB,KAAK,iBAAmB,EAQxB,KAAK,eAAiB,EACxB,EAEAP,EAAY,QAAQ,UAAU,MAAQ,UAAW,CAC/C,KAAK,GAAG,MAAM,EACd,KAAK,MAAQ,KAAK,GAAG,SAAS,EAC9B,KAAK,SAAW,EAChB,KAAK,OAAS,KACd,KAAK,cAAgB,EACrB,KAAK,SAAW,GAChB,KAAK,aAAe,EACpB,KAAK,QAAU,CAAC,EAChB,KAAK,iBAAmB,EACxB,KAAK,eAAiB,EACxB,EASAA,EAAY,QAAQ,UAAU,cAAgB,SAASQ,EAAe,CACpE,KAAK,eAAiBA,CACxB,EASAR,EAAY,QAAQ,UAAU,WAAa,UAAW,CACpD,OAAO,KAAK,EACd,EAQAA,EAAY,QAAQ,UAAU,aAAe,UAAW,CACtD,OAAO,KAAK,GAAG,MAAM,EAAE,SAAS,KAAK,GAAG,SAAS,EAAG,KAAK,GAAG,SAAS,EAAI,KAAK,OAAO,CAAC,CACxF,EAYAA,EAAY,QAAQ,UAAU,KAAO,SAASS,EAAMC,EAAkB,CAEhED,EAAO,KAAK,WACd,KAAK,SAAWA,GAQlB,QAHIE,EAAe,EAAE,KAAK,GAAG,SAAS,EAAI,KAAK,MAAQD,GAAqB,EAAMD,EAAO,EAGlF,KAAK,MAAQE,EAAaF,EAAOC,GAAkB,CACxD,IAAIE,EAAe,KAAK,GAAG,SAAS,EACpC,KAAK,GAAKZ,EAAY,QAAQ,eAAe,KAAK,EAAE,EACpD,KAAK,OAAS,KAAK,GAAG,SAAS,EAAIY,CACrC,CAEA,KAAK,IAAID,CAAU,CACrB,EAKAX,EAAY,QAAQ,UAAU,IAAM,SAASa,EAAW,CACtD,QAASC,EAAI,EAAGA,EAAID,EAAWC,IAC7B,KAAK,GAAG,UAAU,EAAE,KAAK,MAAO,CAAC,CAErC,EAKAd,EAAY,QAAQ,UAAU,UAAY,SAASe,EAAO,CACxD,KAAK,GAAG,UAAU,KAAK,OAAS,EAAGA,CAAK,CAC1C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,aAAe,SAASe,EAAO,CAC3D,KAAK,GAAG,aAAa,KAAK,OAAS,EAAGA,CAAK,CAC7C,EAKAf,EAAY,QAAQ,UAAU,aAAe,SAASe,EAAO,CAC3D,KAAK,GAAG,aAAa,KAAK,OAAS,EAAGA,CAAK,CAC7C,EAOAf,EAAY,QAAQ,UAAU,QAAU,SAASe,EAAO,CACtD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,UAAUA,CAAK,CACtB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,aAAaA,CAAK,CACzB,EAMAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,aAAaA,CAAK,CACzB,EAQAf,EAAY,QAAQ,UAAU,aAAe,SAASgB,EAASD,EAAOE,EAAc,EAC9E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,QAAQF,CAAK,EAClB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkB,CAACF,EAAM,OAAOE,CAAY,KACnD,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,gBAAkB,SAASgB,EAASD,EAAOE,EAAc,EACjF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,WAAWF,CAAK,EACrB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,gBAAkB,SAASgB,EAASD,EAAOE,EAAc,EACjF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,WAAWF,CAAK,EACrB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,eAAiB,SAASgB,EAASD,EAAOE,EAAc,EAChF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,UAAUF,CAAK,EACpB,KAAK,KAAKC,CAAO,EAErB,EASAhB,EAAY,QAAQ,UAAU,eAAiB,SAASgB,EAASD,EAAOE,EAAc,CAChFF,GAASE,IACX,KAAK,OAAOF,CAAK,EACjB,KAAK,KAAKC,CAAO,EAErB,EASAhB,EAAY,QAAQ,UAAU,OAAS,SAASkB,EAAK,CACnD,GAAIA,GAAO,KAAK,OAAO,EACrB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,EAMAlB,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,GAAI,KAAK,SACP,MAAM,IAAI,MAAM,uDAAuD,CAE3E,EAOAA,EAAY,QAAQ,UAAU,KAAO,SAASgB,EAAS,CACrD,KAAK,OAAOA,CAAO,EAAI,KAAK,OAAO,CACrC,EAKAhB,EAAY,QAAQ,UAAU,OAAS,UAAW,CAChD,OAAO,KAAK,GAAG,SAAS,EAAI,KAAK,KACnC,EAcAA,EAAY,QAAQ,eAAiB,SAASmB,EAAI,CAChD,IAAIP,EAAeO,EAAG,SAAS,EAG/B,GAAIP,EAAe,WACjB,MAAM,IAAI,MAAM,qDAAqD,EAGvE,IAAIQ,EAAeR,GAAgB,EAC/BS,EAAMrB,EAAY,WAAW,SAASoB,CAAY,EACtD,OAAAC,EAAI,YAAYD,EAAeR,CAAY,EAC3CS,EAAI,MAAM,EAAE,IAAIF,EAAG,MAAM,EAAGC,EAAeR,CAAY,EAChDS,CACT,EAQArB,EAAY,QAAQ,UAAU,UAAY,SAASsB,EAAQ,CACzD,KAAK,KAAKtB,EAAY,WAAY,CAAC,EACnC,KAAK,WAAW,KAAK,OAAO,EAAIsB,EAAStB,EAAY,UAAU,CACjE,EAUAA,EAAY,QAAQ,UAAU,YAAc,SAASuB,EAAW,CAC9D,KAAK,UAAU,EACX,KAAK,QAAU,OACjB,KAAK,OAAS,CAAC,GAEjB,KAAK,cAAgBA,EACrB,QAAST,EAAI,EAAGA,EAAIS,EAAWT,IAC7B,KAAK,OAAOA,CAAC,EAAI,EAEnB,KAAK,SAAW,GAChB,KAAK,aAAe,KAAK,OAAO,CAClC,EAOAd,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,GAAI,KAAK,QAAU,MAAQ,CAAC,KAAK,SAC/B,MAAM,IAAI,MAAM,mDAAmD,EAGrE,KAAK,SAAS,CAAC,EAKf,QAJIwB,EAAY,KAAK,OAAO,EAGxBV,EAAI,KAAK,cAAgB,EACtBA,GAAK,GAAK,KAAK,OAAOA,CAAC,GAAK,EAAGA,IAAK,CAI3C,QAHIW,EAAeX,EAAI,EAGhBA,GAAK,EAAGA,IAEb,KAAK,SAAS,KAAK,OAAOA,CAAC,GAAK,EAAIU,EAAY,KAAK,OAAOV,CAAC,EAAI,CAAC,EAGpE,IAAIY,EAAkB,EACtB,KAAK,SAASF,EAAY,KAAK,YAAY,EAC3C,IAAIG,GAAOF,EAAeC,GAAmB1B,EAAY,aACzD,KAAK,SAAS2B,CAAG,EAGjB,IAAIC,EAAkB,EAClBC,EAAM,KAAK,MACjBC,EACE,IAAKhB,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IAAK,CACxC,IAAIiB,EAAM,KAAK,GAAG,SAAS,EAAI,KAAK,QAAQjB,CAAC,EAC7C,GAAIa,GAAO,KAAK,GAAG,UAAUI,CAAG,EAAG,CACjC,QAASC,EAAIhC,EAAY,aAAcgC,EAAIL,EAAKK,GAAKhC,EAAY,aAC/D,GAAI,KAAK,GAAG,UAAU6B,EAAMG,CAAC,GAAK,KAAK,GAAG,UAAUD,EAAMC,CAAC,EACzD,SAASF,EAGbF,EAAkB,KAAK,QAAQd,CAAC,EAChC,KACF,CACF,CAEA,OAAIc,GAGF,KAAK,MAAQ,KAAK,GAAG,SAAS,EAAIJ,EAGlC,KAAK,GAAG,WAAW,KAAK,MAAOI,EAAkBJ,CAAS,IAI1D,KAAK,QAAQ,KAAK,KAAK,OAAO,CAAC,EAG/B,KAAK,GAAG,WAAW,KAAK,GAAG,SAAS,EAAIA,EAAW,KAAK,OAAO,EAAIA,CAAS,GAG9E,KAAK,SAAW,GACTA,CACT,EAUAxB,EAAY,QAAQ,UAAU,OAAS,SAASiC,EAAYC,EAAqBC,EAAiB,CAChG,IAAIC,EAAcD,EAAkBnC,EAAY,mBAAqB,EACrE,GAAIkC,EAAqB,CACvB,IAAIG,EAAkBH,EAGtB,GAFA,KAAK,KAAK,KAAK,SAAUlC,EAAY,WACnCA,EAAY,uBAAyBoC,CAAW,EAC9CC,EAAgB,QAAUrC,EAAY,uBACxC,MAAM,IAAI,MAAM,+CACdA,EAAY,sBAAsB,EAEtC,QAAS,EAAIA,EAAY,uBAAyB,EAAG,GAAK,EAAG,IAC3D,KAAK,UAAUqC,EAAgB,WAAW,CAAC,CAAC,CAEhD,CACA,KAAK,KAAK,KAAK,SAAUrC,EAAY,WAAaoC,CAAW,EAC7D,KAAK,UAAUH,CAAU,EACrBG,GACF,KAAK,SAAS,KAAK,GAAG,SAAS,EAAI,KAAK,KAAK,EAE/C,KAAK,GAAG,YAAY,KAAK,KAAK,CAChC,EAQApC,EAAY,QAAQ,UAAU,mBAAqB,SAAUiC,EAAYC,EAAqB,CAC5F,KAAK,OAAOD,EAAYC,EAAqB,EAAI,CACnD,EAUAlC,EAAY,QAAQ,UAAU,cAAgB,SAASsC,EAAOC,EAAO,CACnE,IAAIC,EAAc,KAAK,GAAG,SAAS,EAAIF,EACnCG,EAAeD,EAAc,KAAK,GAAG,UAAUA,CAAW,EAC1DE,EAAK,KAAK,GAAG,UAAUD,EAAeF,CAAK,GAAK,EAGpD,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,sBAAwBH,EAAQ,cAAc,CAElE,EAWAvC,EAAY,QAAQ,UAAU,YAAc,SAAS2C,EAAWC,EAAWC,EAAW,CACpF,KAAK,UAAU,EACf,KAAK,iBAAmBD,EACxB,KAAK,KAAK5C,EAAY,WAAY2C,EAAYC,CAAS,EACvD,KAAK,KAAKC,EAAWF,EAAYC,CAAS,CAC5C,EASA5C,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,YAAK,WAAW,KAAK,gBAAgB,EAC9B,KAAK,OAAO,CACrB,EAUAA,EAAY,QAAQ,UAAU,aAAe,SAAS8C,EAAG,CACvD,GAAIA,aAAa,WACf,IAAIC,EAAOD,MAKX,SAHIC,EAAO,CAAC,EACRjC,EAAI,EAEDA,EAAIgC,EAAE,QAAQ,CACnB,IAAIE,EAGAC,EAAIH,EAAE,WAAWhC,GAAG,EACxB,GAAImC,EAAI,OAAUA,GAAK,MACrBD,EAAYC,MACP,CACL,IAAIC,EAAIJ,EAAE,WAAWhC,GAAG,EACxBkC,GAAaC,GAAK,IAAMC,GAAK,MAAW,SAAgB,MAC1D,CAGIF,EAAY,IACdD,EAAK,KAAKC,CAAS,GAEfA,EAAY,KACdD,EAAK,KAAOC,GAAa,EAAK,GAAQ,GAAI,GAEtCA,EAAY,MACdD,EAAK,KAAOC,GAAa,GAAM,GAAQ,GAAI,EAE3CD,EAAK,KACDC,GAAa,GAAM,EAAQ,IAC3BA,GAAa,GAAM,GAAQ,GAAI,EAErCD,EAAK,KAAOC,GAAa,EAAK,GAAQ,GAAI,GAE5CD,EAAK,KAAMC,EAAY,GAAQ,GAAI,EAEvC,CAGF,KAAK,QAAQ,CAAC,EACd,KAAK,YAAY,EAAGD,EAAK,OAAQ,CAAC,EAClC,KAAK,GAAG,YAAY,KAAK,OAASA,EAAK,MAAM,EAC7C,QAASjC,EAAI,EAAGQ,EAAS,KAAK,MAAO6B,EAAQ,KAAK,GAAG,MAAM,EAAGrC,EAAIiC,EAAK,OAAQjC,IAC7EqC,EAAM7B,GAAQ,EAAIyB,EAAKjC,CAAC,EAE1B,OAAO,KAAK,UAAU,CACxB,EASAd,EAAY,QAAQ,UAAU,WAAa,SAASG,EAAKC,EAAM,CAC7D,OAAOJ,EAAY,KAAK,OAAOG,EAAKC,CAAI,CAC1C,EASAJ,EAAY,WAAa,SAASmD,EAAO,CAKvC,KAAK,OAASA,EAMd,KAAK,UAAY,CACnB,EAQAnD,EAAY,WAAW,SAAW,SAASa,EAAW,CACpD,OAAO,IAAIb,EAAY,WAAW,IAAI,WAAWa,CAAS,CAAC,CAC7D,EAEAb,EAAY,WAAW,UAAU,MAAQ,UAAW,CAClD,KAAK,UAAY,CACnB,EAOAA,EAAY,WAAW,UAAU,MAAQ,UAAW,CAClD,OAAO,KAAK,MACd,EAOAA,EAAY,WAAW,UAAU,SAAW,UAAW,CACrD,OAAO,KAAK,SACd,EAOAA,EAAY,WAAW,UAAU,YAAc,SAASoD,EAAU,CAChE,KAAK,UAAYA,CACnB,EAOApD,EAAY,WAAW,UAAU,SAAW,UAAW,CACrD,OAAO,KAAK,OAAO,MACrB,EAMAA,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQ,CAC3D,OAAO,KAAK,UAAUA,CAAM,GAAK,IAAM,EACzC,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,OAAOA,CAAM,CAC3B,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,WAAWA,CAAM,GAAK,IAAM,EAC1C,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,KAAK,OAAOA,CAAM,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,CAC1D,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,OAAOA,CAAM,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,GAAK,KAAK,OAAOA,EAAS,CAAC,GAAK,EACzH,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,KAAK,UAAUA,CAAM,IAAM,CACpC,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,IAAItB,EAAY,KAAK,KAAK,UAAUsB,CAAM,EAAG,KAAK,UAAUA,EAAS,CAAC,CAAC,CAChF,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,IAAItB,EAAY,KAAK,KAAK,WAAWsB,CAAM,EAAG,KAAK,WAAWA,EAAS,CAAC,CAAC,CAClF,EAMAtB,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQ,CAC9D,OAAAtB,EAAY,MAAM,CAAC,EAAI,KAAK,UAAUsB,CAAM,EACrCtB,EAAY,QAAQ,CAAC,CAC9B,EAMAA,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQ,CAC9D,OAAAtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,EAAI,KAAK,UAAUsB,CAAM,EAC7EtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,EAAI,KAAK,UAAUsB,EAAS,CAAC,EAC1EtB,EAAY,QAAQ,CAAC,CAC9B,EAMAA,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAA0BP,CAC9C,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,CACxB,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,CACrC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,CACvC,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,GACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACrC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,GACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACvC,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,WAAWO,EAAQP,EAAM,GAAG,EACjC,KAAK,WAAWO,EAAS,EAAGP,EAAM,IAAI,CACxC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,YAAYO,EAAQP,EAAM,GAAG,EAClC,KAAK,YAAYO,EAAS,EAAGP,EAAM,IAAI,CAC3C,EAMAf,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQP,EAAO,CACtEf,EAAY,QAAQ,CAAC,EAAIe,EACzB,KAAK,WAAWO,EAAQtB,EAAY,MAAM,CAAC,CAAC,CAC9C,EAMAA,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQP,EAAO,CACtEf,EAAY,QAAQ,CAAC,EAAIe,EACzB,KAAK,WAAWO,EAAQtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,CAAC,EAC7E,KAAK,WAAWsB,EAAS,EAAGtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,CAAC,CACnF,EAQAA,EAAY,WAAW,UAAU,oBAAsB,UAAW,CAChE,GAAI,KAAK,OAAO,OAAS,KAAK,UAAYA,EAAY,WAClDA,EAAY,uBACd,MAAM,IAAI,MACN,gEAAgE,EAGtE,QADIqD,EAAS,GACJvC,EAAI,EAAGA,EAAId,EAAY,uBAAwBc,IACtDuC,GAAU,OAAO,aACb,KAAK,SAAS,KAAK,UAAYrD,EAAY,WAAac,CAAC,CAAC,EAEhE,OAAOuC,CACT,EAUArD,EAAY,WAAW,UAAU,SAAW,SAASsD,EAAQC,EAAe,CAC1E,IAAIC,EAASF,EAAS,KAAK,UAAUA,CAAM,EAC3C,OAAOC,EAAgB,KAAK,UAAUC,CAAM,EAAI,KAAK,UAAUA,EAASD,CAAa,EAAI,CAC3F,EASAvD,EAAY,WAAW,UAAU,QAAU,SAASyD,EAAGnC,EAAQ,CAC7D,OAAAmC,EAAE,OAASnC,EAAS,KAAK,UAAUA,CAAM,EACzCmC,EAAE,GAAK,KACAA,CACT,EAeAzD,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQoC,EAAc,CACzEpC,GAAU,KAAK,UAAUA,CAAM,EAE/B,IAAIqC,EAAS,KAAK,UAAUrC,CAAM,EAC9B+B,EAAS,GACTvC,EAAI,EAIR,GAFAQ,GAAUtB,EAAY,WAElB0D,IAAiB1D,EAAY,SAAS,WACxC,OAAO,KAAK,OAAO,SAASsB,EAAQA,EAASqC,CAAM,EAGrD,KAAO7C,EAAI6C,GAAQ,CACjB,IAAIX,EAGAC,EAAI,KAAK,UAAU3B,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,EAAYC,MACP,CACL,IAAIC,EAAI,KAAK,UAAU5B,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,GACIC,EAAI,KAAS,EACdC,EAAI,OACF,CACL,IAAIU,EAAI,KAAK,UAAUtC,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,GACIC,EAAI,KAAS,IACbC,EAAI,KAAS,EACdU,EAAI,OACF,CACL,IAAIC,EAAI,KAAK,UAAUvC,EAASR,GAAG,EACnCkC,GACIC,EAAI,IAAS,IACbC,EAAI,KAAS,IACbU,EAAI,KAAS,EACdC,EAAI,EACT,CACF,CACF,CAGIb,EAAY,MACdK,GAAU,OAAO,aAAaL,CAAS,GAEvCA,GAAa,MACbK,GAAU,OAAO,cACdL,GAAa,IAAM,OACnBA,EAAc,KAAW,GAAM,KAAM,EAE5C,CAEA,OAAOK,CACT,EAOArD,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAOA,EAAS,KAAK,UAAUA,CAAM,CACvC,EAQAtB,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQ,CAC3D,OAAOA,EAAS,KAAK,UAAUA,CAAM,EAAItB,EAAY,UACvD,EAQAA,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQ,CAC/D,OAAO,KAAK,UAAUA,EAAS,KAAK,UAAUA,CAAM,CAAC,CACvD,EAMAtB,EAAY,WAAW,UAAU,iBAAmB,SAAS8D,EAAO,CAClE,GAAIA,EAAM,QAAU9D,EAAY,uBAC9B,MAAM,IAAI,MAAM,+CACAA,EAAY,sBAAsB,EAEpD,QAASc,EAAI,EAAGA,EAAId,EAAY,uBAAwBc,IACtD,GAAIgD,EAAM,WAAWhD,CAAC,GAAK,KAAK,SAAS,KAAK,UAAYd,EAAY,WAAac,CAAC,EAClF,MAAO,GAGX,MAAO,EACT,EASAd,EAAY,WAAW,UAAU,WAAa,SAASG,EAAKC,EAAM,CAChE,OAAOJ,EAAY,KAAK,OAAOG,EAAKC,CAAI,CAC1C,ICpuCA,IAQiB2D,GARjBC,GAAAC,EAAA,kBAGAC,MAKiBH,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKC,OACVA,IAAA,UAAY,GAAZ,YACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,IAAM,GAAN,MACAA,IAAA,OAAS,GAAT,SACAA,IAAA,OAAS,GAAT,SACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,OAAS,GAAT,SACAA,IAAA,KAAO,GAAP,OACAA,IAAA,QAAU,GAAV,UACAA,IAAA,QAAU,GAAV,UACAA,IAAA,OAAS,IAAT,SACAA,IAAA,cAAgB,IAAhB,gBACAA,IAAA,eAAiB,IAAjB,mBAbUA,EAAAD,EAAA,sBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqBAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKE,OAAoBA,IAAA,QAAU,GAAV,UAAaA,IAAA,MAAQ,GAAR,QAAWA,IAAA,MAAQ,GAAR,UAA5CA,EAAAF,EAAA,2BAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKG,OACVA,IAAA,UAAY,GAAZ,YACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,KAAO,GAAP,OACAA,IAAA,OAAS,GAAT,SACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,OAAS,GAAT,SACAA,IAAA,KAAO,GAAP,OACAA,IAAA,QAAU,IAAV,UACAA,IAAA,OAAS,IAAT,SACAA,IAAA,OAAS,IAAT,SACAA,IAAA,OAAS,IAAT,SACAA,IAAA,UAAY,IAAZ,YACAA,IAAA,WAAa,IAAb,aACAA,IAAA,SAAW,IAAX,WACAA,IAAA,aAAe,IAAf,eACAA,IAAA,eAAiB,IAAjB,iBACAA,IAAA,WAAa,IAAb,aACAA,IAAA,eAAiB,IAAjB,mBArBUA,EAAAH,EAAA,uBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6BAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKI,OAAUA,IAAA,UAAY,GAAZ,YAAeA,IAAA,MAAQ,GAAR,UAAzBA,EAAAJ,EAAA,iBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKK,OAAeA,IAAA,KAAO,GAAP,OAAUA,IAAA,YAAc,GAAd,cAAiBA,IAAA,cAAgB,GAAhB,gBAAmBA,IAAA,SAAW,GAAX,aAA7DA,EAAAL,EAAA,sBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMM,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOC,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAIH,GAAS,OAAOE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIH,GAAS,OAAOE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,IAAIG,EAAeF,EAA2F,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,WAAoB,CAClB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,WAAWC,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,OAAOA,EAA8BC,EAA+B,CACzED,EAAQ,eAAe,EAAGC,EAAW,CAAC,CACxC,CAOA,OAAO,gBAAgBD,EAA8BE,EAAgD,CACnGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,eAAeA,EAA8BG,EAAkB,CACpEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YAAYA,EAA8BC,EAAmD,CAClG,OAAAR,EAAM,WAAWO,CAAO,EACxBP,EAAM,OAAOO,EAASC,CAAS,EACxBR,EAAM,SAASO,CAAO,CAC/B,CACF,CAxGOb,EAAM,MAAAM,IAD2BN,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA8GAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiB,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOV,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAIQ,GAAa,OAAOT,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIQ,GAAa,OAAOT,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAMA,MAAMC,EAAqG,CACzG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,gBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAQA,WAAWM,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,eAAeL,EAA8B,CAClDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,SAASA,EAA8BM,EAAiC,CAC7EN,EAAQ,eAAe,EAAGM,EAAa,CAAC,CAC1C,CAMA,OAAO,cAAcN,EAA8BO,EAAsC,CACvFP,EAAQ,eAAe,EAAGO,EAAkB,CAAC,CAC/C,CAMA,OAAO,aAAaP,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8BM,EAC9BC,EAA0D,CAC5D,OAAAH,EAAU,eAAeJ,CAAO,EAChCI,EAAU,SAASJ,EAASM,CAAW,EACvCF,EAAU,cAAcJ,EAASO,CAAgB,EAC1CH,EAAU,aAAaJ,CAAO,CACvC,CACF,CAhGOb,EAAM,UAAAiB,IAD2BjB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqB,CAAe,CAArB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOd,EAAWC,EAA4C,CAC5D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,wBAAwBA,EAA4BC,EAAsC,CAC/F,OAAQA,GAAO,IAAIY,GAAkB,OAAOb,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC7F,CAOA,OAAO,oCAAoCA,EAA4BC,EAAsC,CAC3G,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIY,GAAkB,OAAOb,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC7F,CAKA,SAA2D,CACzD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC/C,CAClB,CAKA,UAA6B,CAC3B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,SAASM,EAAgD,CACvD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,oBAAoBL,EAA8B,CACvDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BS,EAA0D,CACxGT,EAAQ,aAAa,EAAGS,EAAS,CAAuD,CAC1F,CAMA,OAAO,YAAYT,EAA8BU,EAA4B,CAC3EV,EAAQ,cAAc,EAAGU,EAAUV,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC7D,CAMA,OAAO,YAAYA,EAA8BW,EAAoC,CACnFX,EAAQ,eAAe,EAAGW,EAAgB,CAAC,CAC7C,CAMA,OAAO,kBAAkBX,EAAkD,CAEzE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,qBACHA,EAA8BS,EAC9BC,EAA4BC,EAAwD,CACtF,OAAAH,EAAe,oBAAoBR,CAAO,EAC1CQ,EAAe,WAAWR,EAASS,CAAO,EAC1CD,EAAe,YAAYR,EAASU,CAAQ,EAC5CF,EAAe,YAAYR,EAASW,CAAc,EAC3CH,EAAe,kBAAkBR,CAAO,CACjD,CACF,CA/GOb,EAAM,eAAAqB,IAD2BrB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqHAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMyB,CAAmB,CAAzB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOlB,EAAWC,EAAgD,CAChE,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,4BAA4BA,EAA4BC,EAA8C,CAC3G,OAAQA,GAAO,IAAIgB,GAAsB,OAAOjB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACjG,CAOA,OAAO,wCAAwCA,EAA4BC,EACpD,CACrB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIgB,GAAsB,OAAOjB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACjG,CAKA,UAAwD,CACtD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,MAAMH,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,wBAAwBC,EAA8B,CAC3DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,YAAYA,EAA8Ba,EAAuD,CACtGb,EAAQ,cAAc,EAAGa,EAAU,CAAqD,CAC1F,CAMA,OAAO,SAASb,EAA8Bc,EAAiC,CAC7Ed,EAAQ,eAAe,EAAGc,EAAa,CAAC,CAC1C,CAMA,OAAO,sBAAsBd,EAAkD,CAE7E,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,yBACHA,EAA8Ba,EAC9BC,EAAqD,CACvD,OAAAF,EAAmB,wBAAwBZ,CAAO,EAClDY,EAAmB,YAAYZ,EAASa,CAAQ,EAChDD,EAAmB,SAASZ,EAASc,CAAW,EACzCF,EAAmB,sBAAsBZ,CAAO,CACzD,CACF,CA/FOb,EAAM,mBAAAyB,IAD2BzB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM4B,CAAQ,CAAd,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOrB,EAAWC,EAAqC,CACrD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,iBAAiBA,EAA4BC,EAAwB,CAC1E,OAAQA,GAAO,IAAImB,GAAW,OAAOpB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACtF,CAOA,OAAO,6BAA6BA,EAA4BC,EAAwB,CACtF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAImB,GAAW,OAAOpB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACtF,CAKA,SAAuD,CACrD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,UAAUH,EAAyF,CACjG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,aAAaC,EAA8B,CAChDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BgB,EAAsD,CACpGhB,EAAQ,cAAc,EAAGgB,EAAS,CAAqD,CACzF,CAMA,OAAO,aAAahB,EAA8BiB,EAAqC,CACrFjB,EAAQ,eAAe,EAAGiB,EAAiB,CAAC,CAC9C,CAMA,OAAO,WAAWjB,EAAkD,CAElE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,cACHA,EAA8BgB,EAC9BC,EAAyD,CAC3D,OAAAF,EAAQ,aAAaf,CAAO,EAC5Be,EAAQ,WAAWf,EAASgB,CAAO,EACnCD,EAAQ,aAAaf,EAASiB,CAAe,EACtCF,EAAQ,WAAWf,CAAO,CACnC,CACF,CA9FOb,EAAM,QAAA4B,IAD2B5B,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAoGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM+B,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOxB,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAIsB,GAAgB,OAAOvB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIsB,GAAgB,OAAOvB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,SAASC,EAAyF,CAChG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,YAAYA,EAA8BmB,EAAoC,CACnFnB,EAAQ,eAAe,EAAGmB,EAAgB,CAAC,CAC7C,CAMA,OAAO,gBAAgBnB,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBAAmBA,EAA8BmB,EAAwD,CAC9G,OAAAD,EAAa,kBAAkBlB,CAAO,EACtCkB,EAAa,YAAYlB,EAASmB,CAAc,EACzCD,EAAa,gBAAgBlB,CAAO,CAC7C,CACF,CA1EOb,EAAM,aAAA+B,IAD2B/B,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAgFAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiC,CAAQ,CAAd,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO1B,EAAWC,EAAqC,CACrD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAKA,WAAoB,CAClB,OAAO,KAAK,GAAI,WAAW,KAAK,MAAM,CACxC,CAKA,aAAsB,CACpB,OAAO,KAAK,GAAI,UAAU,KAAK,OAAS,CAAC,CAC3C,CAKA,aAAsB,CACpB,OAAO,KAAK,GAAI,UAAU,KAAK,OAAS,CAAC,CAC3C,CASA,OAAO,cACHK,EAA8BqB,EAAoBC,EAClDC,EAA2C,CAC7C,OAAAvB,EAAQ,KAAK,EAAG,EAAE,EAClBA,EAAQ,WAAWuB,CAAa,EAChCvB,EAAQ,WAAWsB,CAAa,EAChCtB,EAAQ,WAAWqB,CAAU,EACtBrB,EAAQ,OAAO,CACxB,CACF,CApDOb,EAAM,QAAAiC,IAD2BjC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0DAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqC,CAAS,CAAf,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO9B,EAAWC,EAAsC,CACtD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,kBAAkBA,EAA4BC,EAA0B,CAC7E,OAAQA,GAAO,IAAI4B,GAAY,OAAO7B,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAOA,OAAO,8BAA8BA,EAA4BC,EAA0B,CACzF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI4B,GAAY,OAAO7B,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAKA,WAAoB,CAClB,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAOA,WAAWD,EAAeF,EAAuF,CAC/G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,SACpC,OAAO,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,GAAI,KAAK,EAAG,EAC1E,IAClB,CAKA,kBAA2B,CACzB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,YAAYD,EAAeF,EAAuF,CAChH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,SACpC,OAAO,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,GAAI,KAAK,EAAG,EAC1E,IAClB,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,cAAcC,EAA8B,CACjDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,aAAaA,EAA8ByB,EAAmB,CACnEzB,EAAQ,cAAc,EAAGyB,EAAW,CAAC,CACvC,CAMA,OAAO,cAAczB,EAA8B0B,EAAsC,CACvF1B,EAAQ,eAAe,EAAG0B,EAAkB,CAAC,CAC/C,CAMA,OAAO,sBAAsB1B,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,GAAIG,EAAU,CAAC,CACrC,CAMA,OAAO,eAAeH,EAA8B2B,EAAuC,CACzF3B,EAAQ,eAAe,EAAG2B,EAAmB,CAAC,CAChD,CAMA,OAAO,uBAAuB3B,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,GAAIG,EAAU,CAAC,CACrC,CAMA,OAAO,YAAYH,EAAkD,CAEnE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,eACHA,EAA8ByB,EAAmBC,EACjDC,EAA2D,CAC7D,OAAAH,EAAS,cAAcxB,CAAO,EAC9BwB,EAAS,aAAaxB,EAASyB,CAAS,EACxCD,EAAS,cAAcxB,EAAS0B,CAAgB,EAChDF,EAAS,eAAexB,EAAS2B,CAAiB,EAC3CH,EAAS,YAAYxB,CAAO,CACrC,CACF,CAnJOb,EAAM,SAAAqC,IAD2BrC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAyJAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMyC,CAAK,CAAX,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOlC,EAAWC,EAAkC,CAClD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,cAAcA,EAA4BC,EAAkB,CACjE,OAAQA,GAAO,IAAIgC,GAAQ,OAAOjC,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnF,CAOA,OAAO,0BAA0BA,EAA4BC,EAAkB,CAC7E,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIgC,GAAQ,OAAOjC,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,OAAOA,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,CAC7D,CAKA,OAAgB,CACd,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAQA,OAAOM,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,MAA8C,CAC5C,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAQA,sBAAsBM,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CASA,OAAOP,EAAeO,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,WAAWD,EAAeF,EAChB,CACR,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,kBAA2B,CACzB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAMA,eAAeD,EAA4B,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC5F,CAKA,sBAA+B,CAC7B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,qBAAuC,CACrC,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,WACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CASA,eAAeD,EAAeO,EAAgD,CAC5E,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,sBAA+B,CAC7B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,UAAUC,EAA8B,CAC7CA,EAAQ,YAAY,EAAE,CACxB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,UAAU9B,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,gBAAgB/B,EAA8BgC,EAAsB,CACzEhC,EAAQ,cAAc,EAAGgC,EAAc,CAAC,CAC1C,CAMA,OAAO,SAAShC,EAA8BF,EAAe,CAC3DE,EAAQ,cAAc,EAAGF,EAAO,CAAC,CACnC,CAMA,OAAO,UAAUE,EAA8BiC,EAAkC,CAC/EjC,EAAQ,eAAe,EAAGiC,EAAc,CAAC,CAC3C,CAMA,OAAO,QAAQjC,EAA8BkC,EAA6C,CACxFlC,EAAQ,cAAc,EAAGkC,EAAM,CAA+C,CAChF,CAMA,OAAO,yBAAyBlC,EAA8BmC,EAAiD,CAC7GnC,EAAQ,eAAe,EAAGmC,EAA6B,CAAC,CAC1D,CAMA,OAAO,UAAUnC,EAA8BoC,EAAkC,CAC/EpC,EAAQ,eAAe,EAAGoC,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmBpC,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8BqC,EAAmC,CACjFrC,EAAQ,eAAe,EAAGqC,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBrC,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,cAAcH,EAA8BsC,EAAsC,CACvFtC,EAAQ,eAAe,GAAIsC,EAAkB,CAAC,CAChD,CAOA,OAAO,uBAAuBtC,EAA8BE,EAAgD,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,sBAAsBA,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,kBAAkBH,EAA8BuC,EAA0C,CAC/FvC,EAAQ,eAAe,GAAIuC,EAAsB,CAAC,CACpD,CAOA,OAAO,2BAA2BvC,EAA8BE,EAA+C,CAC7GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,0BAA0BA,EAA8BG,EAAkB,CAC/EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,kBAAkBH,EAA8BwC,EAA0C,CAC/FxC,EAAQ,eAAe,GAAIwC,EAAsB,CAAC,CACpD,CAOA,OAAO,2BAA2BxC,EAA8BE,EAAgD,CAC9GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,0BAA0BA,EAA8BG,EAAkB,CAC/EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,QAAQH,EAAkD,CAE/D,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,WACHA,EAA8B6B,EAAgCC,EAC9DC,EAAkCC,EAAsBlC,EAAemC,EACvEC,EAA6CC,EAC7CC,EAAkCC,EAAmCC,EACrEC,EAA0CC,EAA8D,CAC1G,OAAAZ,EAAK,UAAU5B,CAAO,EACtB4B,EAAK,QAAQ5B,EAAS6B,CAAU,EAChCD,EAAK,aAAa5B,EAAS8B,CAAe,EAC1CF,EAAK,UAAU5B,EAAS+B,CAAY,EACpCH,EAAK,gBAAgB5B,EAASgC,CAAY,EAC1CJ,EAAK,SAAS5B,EAASF,CAAK,EAC5B8B,EAAK,UAAU5B,EAASiC,CAAY,EACpCL,EAAK,QAAQ5B,EAASkC,CAAI,EAC1BN,EAAK,yBAAyB5B,EAASmC,CAA2B,EAClEP,EAAK,UAAU5B,EAASoC,CAAY,EACpCR,EAAK,WAAW5B,EAASqC,CAAa,EACtCT,EAAK,cAAc5B,EAASsC,CAAgB,EAC5CV,EAAK,kBAAkB5B,EAASuC,CAAoB,EACpDX,EAAK,kBAAkB5B,EAASwC,CAAoB,EAC7CZ,EAAK,QAAQ5B,CAAO,CAC7B,CACF,CAvdOb,EAAM,KAAAyC,IAD2BzC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6dAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMsD,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO/C,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAI6C,GAAa,OAAO9C,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI6C,GAAa,OAAO9C,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,KAAKT,EAAyF,CAC5F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,eAAeC,EAA8B,CAClDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8B0C,EAAgC,CAC3E1C,EAAQ,eAAe,EAAG0C,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa1C,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8B6B,EAAgCC,EAC9DY,EAAoD,CACtD,OAAAD,EAAU,eAAezC,CAAO,EAChCyC,EAAU,QAAQzC,EAAS6B,CAAU,EACrCY,EAAU,aAAazC,EAAS8B,CAAe,EAC/CW,EAAU,QAAQzC,EAAS0C,CAAU,EAC9BD,EAAU,aAAazC,CAAO,CACvC,CACF,CApHOb,EAAM,UAAAsD,IAD2BtD,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0HAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMwD,CAAS,CAAf,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOjD,EAAWC,EAAsC,CACtD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,kBAAkBA,EAA4BC,EAA0B,CAC7E,OAAQA,GAAO,IAAI+C,GAAY,OAAOhD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAOA,OAAO,8BAA8BA,EAA4BC,EAA0B,CACzF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI+C,GAAY,OAAOhD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAQA,WAAWU,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,WAAwD,CACtD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,MAAmCH,EAAgB,CACjD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,QAAQH,EAAK,KAAK,OAASG,CAAM,EAAI,IAChE,CAKA,OAAO,cAAcC,EAA8B,CACjDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,cAAcA,EAA8BO,EAAsC,CACvFP,EAAQ,eAAe,EAAGO,EAAkB,CAAC,CAC/C,CAMA,OAAO,aAAaP,EAA8B4C,EAAuD,CACvG5C,EAAQ,aAAa,EAAG4C,EAAW,CAA+C,CACpF,CAMA,OAAO,SAAS5C,EAA8BM,EAAiC,CAC7EN,EAAQ,eAAe,EAAGM,EAAa,CAAC,CAC1C,CAMA,OAAO,YAAYN,EAAkD,CAEnE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,eACHA,EAA8BO,EAC9BqC,EAAuDtC,EAAqD,CAC9G,OAAAqC,EAAS,cAAc3C,CAAO,EAC9B2C,EAAS,cAAc3C,EAASO,CAAgB,EAChDoC,EAAS,aAAa3C,EAAS4C,CAAS,EACxCD,EAAS,SAAS3C,EAASM,CAAW,EAC/BqC,EAAS,YAAY3C,CAAO,CACrC,CACF,CAhHOb,EAAM,SAAAwD,IAD2BxD,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsHAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM0D,CAAc,CAApB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOnD,EAAWC,EAA2C,CAC3D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,uBAAuBA,EAA4BC,EAAoC,CAC5F,OAAQA,GAAO,IAAIiD,GAAiB,OAAOlD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC5F,CAOA,OAAO,mCAAmCA,EAA4BC,EAAoC,CACxG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIiD,GAAiB,OAAOlD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC5F,CAQA,OAAOU,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,SAA4B,CAC1B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAKA,OAAO,mBAAmBC,EAA8B,CACtDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,UAAUA,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,WAAW/B,EAA8B8C,EAA2B,CACzE9C,EAAQ,cAAc,EAAG8C,EAAS9C,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC5D,CAMA,OAAO,iBAAiBA,EAAkD,CAExE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,oBACHA,EAA8B+B,EAAkCe,EAA+C,CACjH,OAAAD,EAAc,mBAAmB7C,CAAO,EACxC6C,EAAc,UAAU7C,EAAS+B,CAAY,EAC7Cc,EAAc,WAAW7C,EAAS8C,CAAO,EAClCD,EAAc,iBAAiB7C,CAAO,CAC/C,CACF,CA5FOb,EAAM,cAAA0D,IAD2B1D,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAkGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM4D,CAAO,CAAb,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOrD,EAAWC,EAAoC,CACpD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,gBAAgBA,EAA4BC,EAAsB,CACvE,OAAQA,GAAO,IAAImD,GAAU,OAAOpD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACrF,CAOA,OAAO,4BAA4BA,EAA4BC,EAAsB,CACnF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAImD,GAAU,OAAOpD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACrF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,KAAKP,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,UAAwD,CACtD,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,QAAQD,EAA4B,CAClC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,CAAK,EAAI,CACxF,CAKA,eAAwB,CACtB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,cAAgC,CAC9B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,WACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CASA,WAAWD,EAAeO,EAAgD,CACxE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,kBAA2B,CACzB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,YAAYC,EAA8B,CAC/CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8BgD,EAAgC,CAC3EhD,EAAQ,eAAe,EAAGgD,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiBhD,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,YAAYH,EAA8BiD,EAAuD,CACtGjD,EAAQ,cAAc,EAAGiD,EAAU,CAAqD,CAC1F,CAMA,OAAO,WAAWjD,EAA8BkD,EAAmC,CACjFlD,EAAQ,eAAe,EAAGkD,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBlD,EAA8BE,EAA+C,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,QAAQE,EAAKR,CAAC,CAAC,EAEzB,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,cAAcH,EAA8BmD,EAAsC,CACvFnD,EAAQ,eAAe,EAAGmD,EAAkB,CAAC,CAC/C,CAOA,OAAO,uBAAuBnD,EAA8BE,EAAgD,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,sBAAsBA,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAAkD,CAEjE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,aACHA,EAA8B6B,EAAgCC,EAC9DkB,EAAgCC,EAChCC,EAAmCC,EAA0D,CAC/F,OAAAJ,EAAO,YAAY/C,CAAO,EAC1B+C,EAAO,QAAQ/C,EAAS6B,CAAU,EAClCkB,EAAO,aAAa/C,EAAS8B,CAAe,EAC5CiB,EAAO,QAAQ/C,EAASgD,CAAU,EAClCD,EAAO,YAAY/C,EAASiD,CAAQ,EACpCF,EAAO,WAAW/C,EAASkD,CAAa,EACxCH,EAAO,cAAc/C,EAASmD,CAAgB,EACvCJ,EAAO,UAAU/C,CAAO,CACjC,CACF,CAhROb,EAAM,OAAA4D,IAD2B5D,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsRAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiE,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO1D,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAIwD,GAAgB,OAAOzD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIwD,GAAgB,OAAOzD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,OAAOC,EAAqF,CAC1F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,QAAQH,EAAqF,CAC3F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,KAAKD,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,UAAUA,EAA8BqD,EAAkC,CAC/ErD,EAAQ,eAAe,EAAGqD,EAAc,CAAC,CAC3C,CAMA,OAAO,WAAWrD,EAA8BsD,EAAmC,CACjFtD,EAAQ,eAAe,EAAGsD,EAAe,CAAC,CAC5C,CAMA,OAAO,QAAQtD,EAA8BgD,EAAgC,CAC3EhD,EAAQ,eAAe,EAAGgD,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiBhD,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBACHA,EAA8BqD,EAAkCC,EAChEN,EAAoD,CACtD,OAAAI,EAAa,kBAAkBpD,CAAO,EACtCoD,EAAa,UAAUpD,EAASqD,CAAY,EAC5CD,EAAa,WAAWpD,EAASsD,CAAa,EAC9CF,EAAa,QAAQpD,EAASgD,CAAU,EACjCI,EAAa,gBAAgBpD,CAAO,CAC7C,CACF,CAhJOb,EAAM,aAAAiE,IAD2BjE,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsJAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMoE,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO7D,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAI2D,GAAa,OAAO5D,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI2D,GAAa,OAAO5D,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,MAAmD,CACjD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAKA,GAAY,CACV,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,YAAY,KAAK,OAASA,CAAM,EAAI,CAC/D,CAKA,GAAsB,CACpB,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,EAAEM,EAAgD,CAChD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,EAAET,EAAqF,CACrF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,EAAEH,EAAmF,CACnF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,OAAOD,EAA4B,CACjC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,YAAY,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC9F,CAKA,cAAuB,CACrB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,aAAiC,CAC/B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,aACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CAMA,KAAKD,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,QAAQD,EAAeF,EAAqF,CAC1G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,eAAwB,CACtB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,OAAOD,EAAeF,EAAmF,CACvG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,cAAuB,CACrB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,eAAeC,EAA8B,CAClDA,EAAQ,YAAY,EAAE,CACxB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8BkC,EAAkD,CAC7FlC,EAAQ,cAAc,EAAGkC,EAAM,CAAoD,CACrF,CAMA,OAAO,KAAKlC,EAA8BwD,EAAW,CACnDxD,EAAQ,gBAAgB,EAAGwD,EAAG,CAAG,CACnC,CAMA,OAAO,KAAKxD,EAA8BN,EAAqB,CAC7DM,EAAQ,cAAc,EAAGN,EAAGM,EAAQ,WAAW,EAAG,CAAC,CAAC,CACtD,CAMA,OAAO,KAAKA,EAA8ByD,EAA6B,CACrEzD,EAAQ,eAAe,EAAGyD,EAAS,CAAC,CACtC,CAMA,OAAO,KAAKzD,EAA8B0D,EAA6B,CACrE1D,EAAQ,eAAe,EAAG0D,EAAS,CAAC,CACtC,CAMA,OAAO,KAAK1D,EAA8B2D,EAA6B,CACrE3D,EAAQ,eAAe,EAAG2D,EAAS,CAAC,CACtC,CAMA,OAAO,UAAU3D,EAA8B4D,EAAkC,CAC/E5D,EAAQ,eAAe,EAAG4D,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmB5D,EAA8BE,EAA+C,CACrGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,WAAWE,EAAKR,CAAC,CAAC,EAE5B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,QAAQH,EAA8B6D,EAAgC,CAC3E7D,EAAQ,eAAe,EAAG6D,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiB7D,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8B8D,EAAmC,CACjF9D,EAAQ,eAAe,GAAI8D,EAAe,CAAC,CAC7C,CAOA,OAAO,oBAAoB9D,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8B+D,EAAmC,CACjF/D,EAAQ,eAAe,GAAI+D,EAAe,CAAC,CAC7C,CAOA,OAAO,oBAAoB/D,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAA8BgE,EAAkC,CAC/EhE,EAAQ,eAAe,GAAIgE,EAAc,CAAC,CAC5C,CAOA,OAAO,mBAAmBhE,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,aAAaH,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8B6B,EAAgCC,EAC9DI,EAAkDsB,EAAW9D,EAAqB+D,EAClFC,EAA6BC,EAA6BC,EAC1DC,EAAgCC,EAAmCC,EACnEC,EAAsD,CACxD,OAAAT,EAAU,eAAevD,CAAO,EAChCuD,EAAU,QAAQvD,EAAS6B,CAAU,EACrC0B,EAAU,aAAavD,EAAS8B,CAAe,EAC/CyB,EAAU,QAAQvD,EAASkC,CAAI,EAC/BqB,EAAU,KAAKvD,EAASwD,CAAC,EACzBD,EAAU,KAAKvD,EAASN,CAAC,EACzB6D,EAAU,KAAKvD,EAASyD,CAAO,EAC/BF,EAAU,KAAKvD,EAAS0D,CAAO,EAC/BH,EAAU,KAAKvD,EAAS2D,CAAO,EAC/BJ,EAAU,UAAUvD,EAAS4D,CAAY,EACzCL,EAAU,QAAQvD,EAAS6D,CAAU,EACrCN,EAAU,WAAWvD,EAAS8D,CAAa,EAC3CP,EAAU,WAAWvD,EAAS+D,CAAa,EAC3CR,EAAU,UAAUvD,EAASgE,CAAY,EAClCT,EAAU,aAAavD,CAAO,CACvC,CACF,CApdOb,EAAM,UAAAoE,IAD2BpE,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0dAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM8E,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOvE,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAIqE,GAAS,OAAOtE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIqE,GAAS,OAAOtE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,aAAaG,EAAeF,EAAqF,CAC/G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,oBAA6B,CAC3B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,SAASD,EAAeF,EAA2F,CACjH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,gBAAyB,CACvB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,MAAMD,EAAeF,EAAiF,CACpG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,MACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,aAAsB,CACpB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,cAAuB,CACrB,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAOA,UAAUD,EAAeF,EAAyF,CAChH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,iBAA0B,CACxB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,OAAOD,EAAeO,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,mBAAmBD,EAAeF,EACiB,CACjD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,0BAAmC,CACjC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,WAAWC,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,gBAAgBA,EAA8BkE,EAAwC,CAC3FlE,EAAQ,eAAe,EAAGkE,EAAoB,CAAC,CACjD,CAOA,OAAO,yBAAyBlE,EAA8BE,EAAgD,CAC5GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,wBAAwBA,EAA8BG,EAAkB,CAC7EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,YAAYH,EAA8BmE,EAAoC,CACnFnE,EAAQ,eAAe,EAAGmE,EAAgB,CAAC,CAC7C,CAOA,OAAO,qBAAqBnE,EAA8BE,EAAgD,CACxGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,oBAAoBA,EAA8BG,EAAkB,CACzEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAA8BoE,EAAiC,CAC7EpE,EAAQ,eAAe,EAAGoE,EAAa,CAAC,CAC1C,CAOA,OAAO,kBAAkBpE,EAA8BE,EAAgD,CACrGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,iBAAiBA,EAA8BG,EAAkB,CACtEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAA8BqE,EAAsB,CACzErE,EAAQ,cAAc,EAAGqE,EAAc,CAAC,CAC1C,CAMA,OAAO,aAAarE,EAA8BsE,EAAqC,CACrFtE,EAAQ,eAAe,EAAGsE,EAAiB,CAAC,CAC9C,CAOA,OAAO,sBAAsBtE,EAA8BE,EAAgD,CACzGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,qBAAqBA,EAA8BG,EAAkB,CAC1EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAA8BoC,EAAkC,CAC/EpC,EAAQ,eAAe,EAAGoC,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmBpC,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8BqC,EAAmC,CACjFrC,EAAQ,eAAe,EAAGqC,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBrC,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,sBAAsBH,EAA8BuE,EAA8C,CACvGvE,EAAQ,eAAe,EAAGuE,EAA0B,CAAC,CACvD,CAOA,OAAO,+BAA+BvE,EAA8BE,EAC7C,CACrBF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,8BAA8BA,EAA8BG,EAAkB,CACnFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YACHA,EAA8BkE,EAAwCC,EACtEC,EAAiCC,EAAsBC,EACvDlC,EAAkCC,EAClCkC,EAAkE,CACpE,OAAAN,EAAM,WAAWjE,CAAO,EACxBiE,EAAM,gBAAgBjE,EAASkE,CAAkB,EACjDD,EAAM,YAAYjE,EAASmE,CAAc,EACzCF,EAAM,SAASjE,EAASoE,CAAW,EACnCH,EAAM,gBAAgBjE,EAASqE,CAAY,EAC3CJ,EAAM,aAAajE,EAASsE,CAAe,EAC3CL,EAAM,UAAUjE,EAASoC,CAAY,EACrC6B,EAAM,WAAWjE,EAASqC,CAAa,EACvC4B,EAAM,sBAAsBjE,EAASuE,CAAwB,EACtDN,EAAM,SAASjE,CAAO,CAC/B,CACF,CA3aOb,EAAM,MAAA8E,IAD2B9E,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAibAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqF,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO9E,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAI4E,GAAS,OAAO7E,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI4E,GAAS,OAAO7E,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAKA,WAA8B,CAC5B,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAOA,YAAYD,EAAeF,EACyB,CAClD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,eACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAQA,aAAaM,EAAgD,CAC3D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,gBAAgBA,EAAgD,CAC9D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,OAAOA,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,cAAiC,CAC/B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,UAAUM,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,MAAMT,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAQA,eAAeM,EAAgD,CAC7D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,WAAWL,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,aAAaA,EAA8ByE,EAA6B,CAC7EzE,EAAQ,cAAc,EAAGyE,EAAWzE,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC9D,CAMA,OAAO,eAAeA,EAA8B0E,EAAuC,CACzF1E,EAAQ,eAAe,EAAG0E,EAAmB,CAAC,CAChD,CAOA,OAAO,wBAAwB1E,EAA8BE,EAAgD,CAC3GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,uBAAuBA,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAA8B2E,EAAwC,CAC3F3E,EAAQ,eAAe,EAAG2E,EAAoB,CAAC,CACjD,CAMA,OAAO,mBAAmB3E,EAA8B4E,EAA2C,CACjG5E,EAAQ,eAAe,EAAG4E,EAAuB,CAAC,CACpD,CAMA,OAAO,UAAU5E,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,gBAAgB/B,EAA8B6E,EAAgC,CACnF7E,EAAQ,cAAc,EAAG6E,EAAc7E,EAAQ,WAAW,EAAG,CAAC,CAAC,CACjE,CAMA,OAAO,aAAaA,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,SAAS9B,EAA8B8E,EAAiC,CAC7E9E,EAAQ,eAAe,EAAG8E,EAAa,CAAC,CAC1C,CAMA,OAAO,kBAAkB9E,EAA8B+E,EAA0C,CAC/F/E,EAAQ,eAAe,EAAG+E,EAAsB,CAAC,CACnD,CAMA,OAAO,SAAS/E,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YACHA,EAA8ByE,EAA6BC,EAC3DC,EAAwCC,EACxC7C,EAAkC8C,EAAgC/C,EAClEgD,EAAiCC,EAA8D,CACjG,OAAAP,EAAM,WAAWxE,CAAO,EACxBwE,EAAM,aAAaxE,EAASyE,CAAS,EACrCD,EAAM,eAAexE,EAAS0E,CAAiB,EAC/CF,EAAM,gBAAgBxE,EAAS2E,CAAkB,EACjDH,EAAM,mBAAmBxE,EAAS4E,CAAqB,EACvDJ,EAAM,UAAUxE,EAAS+B,CAAY,EACrCyC,EAAM,gBAAgBxE,EAAS6E,CAAY,EAC3CL,EAAM,aAAaxE,EAAS8B,CAAe,EAC3C0C,EAAM,SAASxE,EAAS8E,CAAW,EACnCN,EAAM,kBAAkBxE,EAAS+E,CAAoB,EAC9CP,EAAM,SAASxE,CAAO,CAC/B,CACF,CAvQOb,EAAM,MAAAqF,IAD2BrF,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6QAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM6F,CAAkB,CAAxB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOtF,EAAWC,EAA+C,CAC/D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,2BAA2BA,EAA4BC,EAA4C,CACxG,OAAQA,GAAO,IAAIoF,GAAqB,OAAOrF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAChG,CAOA,OAAO,uCAAuCA,EAA4BC,EACpD,CACpB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIoF,GAAqB,OAAOrF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAChG,CAMA,YAAYG,EAA4B,CACtC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC7F,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,kBAAqC,CACnC,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EACH,IAAI,YACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CAMA,gBAAgBD,EAAsC,CACpD,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACvE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,uBAAgC,CAC9B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,uBAAuBC,EAA8B,CAC1DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,eAAeA,EAA8BiF,EAAuC,CACzFjF,EAAQ,eAAe,EAAGiF,EAAmB,CAAC,CAChD,CAOA,OAAO,wBAAwBjF,EAA8BE,EAA+C,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,uBAAuBA,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,mBAAmBH,EAA8BkF,EAA2C,CACjGlF,EAAQ,eAAe,EAAGkF,EAAuB,CAAC,CACpD,CAOA,OAAO,4BAA4BlF,EAA8BE,EAA8C,CAC7GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,2BAA2BA,EAA8BG,EAAkB,CAChFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,qBAAqBH,EAAkD,CAE5E,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,wBACHA,EAA8BiF,EAC9BC,EAA+D,CACjE,OAAAF,EAAkB,uBAAuBhF,CAAO,EAChDgF,EAAkB,eAAehF,EAASiF,CAAiB,EAC3DD,EAAkB,mBAAmBhF,EAASkF,CAAqB,EAC5DF,EAAkB,qBAAqBhF,CAAO,CACvD,CACF,CApKOb,EAAM,kBAAA6F,IAD2B7F,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0KAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMgG,CAAqB,CAA3B,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOzF,EAAWC,EAAkD,CAClE,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,8BAA8BA,EAA4BC,EAAkD,CACjH,OAAQA,GAAO,IAAIuF,GAAwB,OAAOxF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnG,CAOA,OAAO,0CAA0CA,EAA4BC,EACpD,CACvB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIuF,GAAwB,OAAOxF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnG,CAQA,QAAQU,EAAgD,CACtD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,aAAaT,EAAiG,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,0BAA0BC,EAA8B,CAC7DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BoF,EAAmC,CACjFpF,EAAQ,eAAe,EAAGoF,EAAe,CAAC,CAC5C,CAMA,OAAO,gBAAgBpF,EAA8BqF,EAAwC,CAC3FrF,EAAQ,eAAe,EAAGqF,EAAoB,CAAC,CACjD,CAMA,OAAO,wBAAwBrF,EAAkD,CAC/E,IAAID,EAASC,EAAQ,UAAU,EAC/B,OAAAA,EAAQ,cAAcD,EAAQ,CAAC,EACxBA,CACT,CAEA,OAAO,2BACHC,EAA8BoF,EAC9BC,EAA4D,CAC9D,OAAAF,EAAqB,0BAA0BnF,CAAO,EACtDmF,EAAqB,WAAWnF,EAASoF,CAAa,EACtDD,EAAqB,gBAAgBnF,EAASqF,CAAkB,EACzDF,EAAqB,wBAAwBnF,CAAO,CAC7D,CACF,CAlGOb,EAAM,qBAAAgG,IAD2BhG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAwGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMmG,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO5F,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAI0F,GAAgB,OAAO3F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI0F,GAAgB,OAAO3F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,QAAQC,EAA2G,CACjH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,mBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAOA,sBAAsBD,EAAeF,EACsB,CACzD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,sBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,6BAAsC,CACpC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BuF,EAAmC,CACjFvF,EAAQ,eAAe,EAAGuF,EAAe,CAAC,CAC5C,CAMA,OAAO,yBAAyBvF,EAA8BwF,EAAiD,CAC7GxF,EAAQ,eAAe,EAAGwF,EAA6B,CAAC,CAC1D,CAOA,OAAO,kCAAkCxF,EAA8BE,EAChD,CACrBF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,iCAAiCA,EAA8BG,EAAkB,CACtFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBACHA,EAA8BuF,EAC9BC,EAAqE,CACvE,OAAAF,EAAa,kBAAkBtF,CAAO,EACtCsF,EAAa,WAAWtF,EAASuF,CAAa,EAC9CD,EAAa,yBAAyBtF,EAASwF,CAA2B,EACnEF,EAAa,gBAAgBtF,CAAO,CAC7C,CACF,CAhIOb,EAAM,aAAAmG,IAD2BnG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsIAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMsG,CAAiB,CAAvB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO/F,EAAWC,EAA8C,CAC9D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,0BAA0BA,EAA4BC,EAA0C,CACrG,OAAQA,GAAO,IAAI6F,GAAoB,OAAO9F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC/F,CAOA,OAAO,sCAAsCA,EAA4BC,EAA0C,CACjH,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI6F,GAAoB,OAAO9F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC/F,CAMA,OAAO,oBAAoBA,EAAqC,CAC9D,OAAOA,EAAG,iBAAiB,MAAM,CACnC,CAQA,WAAWU,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,MAAMT,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,aAAaH,EAAiG,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,sBAAsBC,EAA8B,CACzDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,cAAcA,EAA8B0F,EAAsC,CACvF1F,EAAQ,eAAe,EAAG0F,EAAkB,CAAC,CAC/C,CAMA,OAAO,SAAS1F,EAA8B2F,EAAiC,CAC7E3F,EAAQ,eAAe,EAAG2F,EAAa,CAAC,CAC1C,CAMA,OAAO,gBAAgB3F,EAA8BqF,EAAwC,CAC3FrF,EAAQ,eAAe,EAAGqF,EAAoB,CAAC,CACjD,CAMA,OAAO,oBAAoBrF,EAAkD,CAE3E,OADaA,EAAQ,UAAU,CAEjC,CAMA,OAAO,6BAA6BA,EAA8BD,EAA4B,CAC5FC,EAAQ,OAAOD,EAAQ,MAAM,CAC/B,CAMA,OAAO,yCAAyCC,EAA8BD,EAA4B,CACxGC,EAAQ,OAAOD,EAAQ,OAAQ,EAAI,CACrC,CAEA,OAAO,uBACHC,EAA8B0F,EAAsCC,EACpEN,EAA4D,CAC9D,OAAAI,EAAiB,sBAAsBzF,CAAO,EAC9CyF,EAAiB,cAAczF,EAAS0F,CAAgB,EACxDD,EAAiB,SAASzF,EAAS2F,CAAW,EAC9CF,EAAiB,gBAAgBzF,EAASqF,CAAkB,EACrDI,EAAiB,oBAAoBzF,CAAO,CACrD,CACF,CA5IOb,EAAM,iBAAAsG,IAD2BtG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,MC/oHjB,IAAA8G,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAmBjB,SAASA,GAAUC,EAAIC,EAAmB,CAKtC,QAJIC,EAAU,IAAI,MAAM,UAAU,OAAS,CAAC,EACxCC,EAAU,EACVC,EAAU,EACVC,EAAU,GACPD,EAAQ,UAAU,QACrBF,EAAOC,GAAQ,EAAI,UAAUC,GAAO,EACxC,OAAO,IAAI,QAAQ,SAAkBE,EAASC,EAAQ,CAClDL,EAAOC,CAAM,EAAI,SAAkBK,EAAmB,CAClD,GAAIH,EAEA,GADAA,EAAU,GACNG,EACAD,EAAOC,CAAG,MACT,CAGD,QAFIN,EAAS,IAAI,MAAM,UAAU,OAAS,CAAC,EACvCC,EAAS,EACNA,EAASD,EAAO,QACnBA,EAAOC,GAAQ,EAAI,UAAUA,CAAM,EACvCG,EAAQ,MAAM,KAAMJ,CAAM,CAC9B,CAER,EACA,GAAI,CACAF,EAAG,MAAMC,GAAO,KAAMC,CAAM,CAChC,OAASM,EAAK,CACNH,IACAA,EAAU,GACVE,EAAOC,CAAG,EAElB,CACJ,CAAC,CACL,ICnDA,IAAAC,GAAAC,GAAAC,IAAA,cAOA,IAAIC,GAASD,GAObC,GAAO,OAAS,SAAgBC,EAAQ,CACpC,IAAIC,EAAID,EAAO,OACf,GAAI,CAACC,EACD,MAAO,GAEX,QADIC,EAAI,EACD,EAAED,EAAI,EAAI,GAAKD,EAAO,OAAOC,CAAC,IAAM,KACvC,EAAEC,EACN,OAAO,KAAK,KAAKF,EAAO,OAAS,CAAC,EAAI,EAAIE,CAC9C,EAGA,IAAIC,GAAM,IAAI,MAAM,EAAE,EAGlBC,GAAM,IAAI,MAAM,GAAG,EAGvB,IAASC,GAAI,EAAGA,GAAI,IAChBD,GAAID,GAAIE,EAAC,EAAIA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,EAAIA,GAAI,GAAK,EAAE,EAAIA,KAD5E,IAAAA,GAUTN,GAAO,OAAS,SAAgBO,EAAQC,EAAOC,EAAK,CAMhD,QALIC,EAAQ,KACRC,EAAQ,CAAC,EACTL,EAAI,EACJM,EAAI,EACJC,EACGL,EAAQC,GAAK,CAChB,IAAIK,EAAIP,EAAOC,GAAO,EACtB,OAAQI,EAAG,CACP,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIU,GAAK,CAAC,EACvBD,GAAKC,EAAI,IAAM,EACfF,EAAI,EACJ,MACJ,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIS,EAAIC,GAAK,CAAC,EAC3BD,GAAKC,EAAI,KAAO,EAChBF,EAAI,EACJ,MACJ,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIS,EAAIC,GAAK,CAAC,EAC3BH,EAAML,GAAG,EAAIF,GAAIU,EAAI,EAAE,EACvBF,EAAI,EACJ,KACR,CACIN,EAAI,QACHI,IAAUA,EAAQ,CAAC,IAAI,KAAK,OAAO,aAAa,MAAM,OAAQC,CAAK,CAAC,EACrEL,EAAI,EAEZ,CAOA,OANIM,IACAD,EAAML,GAAG,EAAIF,GAAIS,CAAC,EAClBF,EAAML,GAAG,EAAI,GACTM,IAAM,IACND,EAAML,GAAG,EAAI,KAEjBI,GACIJ,GACAI,EAAM,KAAK,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAAC,EAC5DI,EAAM,KAAK,EAAE,GAEjB,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAC9D,EAEA,IAAIS,GAAkB,mBAUtBf,GAAO,OAAS,SAAgBC,EAAQM,EAAQS,EAAQ,CAIpD,QAHIR,EAAQQ,EACRJ,EAAI,EACJC,EACKP,EAAI,EAAGA,EAAIL,EAAO,QAAS,CAChC,IAAIgB,EAAIhB,EAAO,WAAWK,GAAG,EAC7B,GAAIW,IAAM,IAAML,EAAI,EAChB,MACJ,IAAKK,EAAIZ,GAAIY,CAAC,KAAO,OACjB,MAAM,MAAMF,EAAe,EAC/B,OAAQH,EAAG,CACP,IAAK,GACDC,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,EAAIH,GAAK,GAAKI,EAAI,KAAO,EACxCJ,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,GAAKH,EAAI,KAAO,GAAKI,EAAI,KAAO,EAC/CJ,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,GAAKH,EAAI,IAAM,EAAII,EAClCL,EAAI,EACJ,KACR,CACJ,CACA,GAAIA,IAAM,EACN,MAAM,MAAMG,EAAe,EAC/B,OAAOC,EAASR,CACpB,EAOAR,GAAO,KAAO,SAAcC,EAAQ,CAChC,MAAO,mEAAmE,KAAKA,CAAM,CACzF,IC1IA,IAAAiB,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAQjB,SAASA,IAAe,CAOpB,KAAK,WAAa,CAAC,CACvB,CASAA,GAAa,UAAU,GAAK,SAAYC,EAAKC,EAAIC,EAAK,CAClD,OAAC,KAAK,WAAWF,CAAG,IAAM,KAAK,WAAWA,CAAG,EAAI,CAAC,IAAI,KAAK,CACvD,GAAMC,EACN,IAAMC,GAAO,IACjB,CAAC,EACM,IACX,EAQAH,GAAa,UAAU,IAAM,SAAaC,EAAKC,EAAI,CAC/C,GAAID,IAAQ,OACR,KAAK,WAAa,CAAC,UAEfC,IAAO,OACP,KAAK,WAAWD,CAAG,EAAI,CAAC,MAGxB,SADIG,EAAY,KAAK,WAAWH,CAAG,EAC1BI,EAAI,EAAGA,EAAID,EAAU,QACtBA,EAAUC,CAAC,EAAE,KAAOH,EACpBE,EAAU,OAAOC,EAAG,CAAC,EAErB,EAAEA,EAGlB,OAAO,IACX,EAQAL,GAAa,UAAU,KAAO,SAAcC,EAAK,CAC7C,IAAIG,EAAY,KAAK,WAAWH,CAAG,EACnC,GAAIG,EAAW,CAGX,QAFIE,EAAO,CAAC,EACRD,EAAI,EACDA,EAAI,UAAU,QACjBC,EAAK,KAAK,UAAUD,GAAG,CAAC,EAC5B,IAAKA,EAAI,EAAGA,EAAID,EAAU,QACtBA,EAAUC,CAAC,EAAE,GAAG,MAAMD,EAAUC,GAAG,EAAE,IAAKC,CAAI,CACtD,CACA,OAAO,IACX,IC3EA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAEAA,GAAO,QAAUC,GAAQA,EAAO,EAqFhC,SAASA,GAAQF,EAAS,CAGtB,OAAI,OAAO,aAAiB,IAAc,UAAW,CAEjD,IAAIG,EAAM,IAAI,aAAa,CAAE,EAAG,CAAC,EAC7BC,EAAM,IAAI,WAAWD,EAAI,MAAM,EAC/BE,EAAMD,EAAI,CAAC,IAAM,IAErB,SAASE,EAAmBC,EAAKC,EAAKC,EAAK,CACvCN,EAAI,CAAC,EAAII,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAEA,SAASM,EAAmBH,EAAKC,EAAKC,EAAK,CACvCN,EAAI,CAAC,EAAII,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAGAJ,EAAQ,aAAeK,EAAKC,EAAqBI,EAEjDV,EAAQ,aAAeK,EAAKK,EAAqBJ,EAEjD,SAASK,EAAkBH,EAAKC,EAAK,CACjC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbN,EAAI,CAAC,CAChB,CAEA,SAASS,EAAkBJ,EAAKC,EAAK,CACjC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbN,EAAI,CAAC,CAChB,CAGAH,EAAQ,YAAcK,EAAKM,EAAoBC,EAE/CZ,EAAQ,YAAcK,EAAKO,EAAoBD,CAGnD,EAAG,EAAS,UAAW,CAEnB,SAASE,EAAmBC,EAAWP,EAAKC,EAAKC,EAAK,CAClD,IAAIM,EAAOR,EAAM,EAAI,EAAI,EAGzB,GAFIQ,IACAR,EAAM,CAACA,GACPA,IAAQ,EACRO,EAAU,EAAIP,EAAM,EAAmB,EAAqB,WAAYC,EAAKC,CAAG,UAC3E,MAAMF,CAAG,EACdO,EAAU,WAAYN,EAAKC,CAAG,UACzBF,EAAM,qBACXO,GAAWC,GAAQ,GAAK,cAAgB,EAAGP,EAAKC,CAAG,UAC9CF,EAAM,sBACXO,GAAWC,GAAQ,GAAK,KAAK,MAAMR,EAAM,oBAAqB,KAAO,EAAGC,EAAKC,CAAG,MAC/E,CACD,IAAIO,EAAW,KAAK,MAAM,KAAK,IAAIT,CAAG,EAAI,KAAK,GAAG,EAC9CU,EAAW,KAAK,MAAMV,EAAM,KAAK,IAAI,EAAG,CAACS,CAAQ,EAAI,OAAO,EAAI,QACpEF,GAAWC,GAAQ,GAAKC,EAAW,KAAO,GAAKC,KAAc,EAAGT,EAAKC,CAAG,CAC5E,CACJ,CAEAT,EAAQ,aAAea,EAAmB,KAAK,KAAMK,EAAW,EAChElB,EAAQ,aAAea,EAAmB,KAAK,KAAMM,EAAW,EAEhE,SAASC,EAAkBC,EAAUb,EAAKC,EAAK,CAC3C,IAAIa,EAAOD,EAASb,EAAKC,CAAG,EACxBM,GAAQO,GAAQ,IAAM,EAAI,EAC1BN,EAAWM,IAAS,GAAK,IACzBL,EAAWK,EAAO,QACtB,OAAON,IAAa,IACdC,EACA,IACAF,EAAO,MACPC,IAAa,EACbD,EAAO,qBAAwBE,EAC/BF,EAAO,KAAK,IAAI,EAAGC,EAAW,GAAG,GAAKC,EAAW,QAC3D,CAEAjB,EAAQ,YAAcoB,EAAkB,KAAK,KAAMG,EAAU,EAC7DvB,EAAQ,YAAcoB,EAAkB,KAAK,KAAMI,EAAU,CAEjE,EAAG,EAGC,OAAO,aAAiB,IAAc,UAAW,CAEjD,IAAIC,EAAM,IAAI,aAAa,CAAC,EAAE,CAAC,EAC3BrB,EAAM,IAAI,WAAWqB,EAAI,MAAM,EAC/BpB,EAAMD,EAAI,CAAC,IAAM,IAErB,SAASsB,EAAoBnB,EAAKC,EAAKC,EAAK,CACxCgB,EAAI,CAAC,EAAIlB,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAEA,SAASuB,EAAoBpB,EAAKC,EAAKC,EAAK,CACxCgB,EAAI,CAAC,EAAIlB,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAGAJ,EAAQ,cAAgBK,EAAKqB,EAAsBC,EAEnD3B,EAAQ,cAAgBK,EAAKsB,EAAsBD,EAEnD,SAASE,EAAmBpB,EAAKC,EAAK,CAClC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbgB,EAAI,CAAC,CAChB,CAEA,SAASI,EAAmBrB,EAAKC,EAAK,CAClC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbgB,EAAI,CAAC,CAChB,CAGAzB,EAAQ,aAAeK,EAAKuB,EAAqBC,EAEjD7B,EAAQ,aAAeK,EAAKwB,EAAqBD,CAGrD,EAAG,EAAS,UAAW,CAEnB,SAASE,EAAoBhB,EAAWiB,EAAMC,EAAMzB,EAAKC,EAAKC,EAAK,CAC/D,IAAIM,EAAOR,EAAM,EAAI,EAAI,EAGzB,GAFIQ,IACAR,EAAM,CAACA,GACPA,IAAQ,EACRO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,EAAU,EAAIP,EAAM,EAAmB,EAAqB,WAAYC,EAAKC,EAAMuB,CAAI,UAChF,MAAMzB,CAAG,EAChBO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,EAAU,WAAYN,EAAKC,EAAMuB,CAAI,UAC9BzB,EAAM,sBACbO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,GAAWC,GAAQ,GAAK,cAAgB,EAAGP,EAAKC,EAAMuB,CAAI,MACvD,CACH,IAAIf,EACJ,GAAIV,EAAM,uBACNU,EAAWV,EAAM,OACjBO,EAAUG,IAAa,EAAGT,EAAKC,EAAMsB,CAAI,EACzCjB,GAAWC,GAAQ,GAAKE,EAAW,cAAgB,EAAGT,EAAKC,EAAMuB,CAAI,MAClE,CACH,IAAIhB,EAAW,KAAK,MAAM,KAAK,IAAIT,CAAG,EAAI,KAAK,GAAG,EAC9CS,IAAa,OACbA,EAAW,MACfC,EAAWV,EAAM,KAAK,IAAI,EAAG,CAACS,CAAQ,EACtCF,EAAUG,EAAW,mBAAqB,EAAGT,EAAKC,EAAMsB,CAAI,EAC5DjB,GAAWC,GAAQ,GAAKC,EAAW,MAAQ,GAAKC,EAAW,QAAU,WAAa,EAAGT,EAAKC,EAAMuB,CAAI,CACxG,CACJ,CACJ,CAEAhC,EAAQ,cAAgB8B,EAAoB,KAAK,KAAMZ,GAAa,EAAG,CAAC,EACxElB,EAAQ,cAAgB8B,EAAoB,KAAK,KAAMX,GAAa,EAAG,CAAC,EAExE,SAASc,EAAmBZ,EAAUU,EAAMC,EAAMxB,EAAKC,EAAK,CACxD,IAAIyB,EAAKb,EAASb,EAAKC,EAAMsB,CAAI,EAC7BI,EAAKd,EAASb,EAAKC,EAAMuB,CAAI,EAC7BjB,GAAQoB,GAAM,IAAM,EAAI,EACxBnB,EAAWmB,IAAO,GAAK,KACvBlB,EAAW,YAAckB,EAAK,SAAWD,EAC7C,OAAOlB,IAAa,KACdC,EACA,IACAF,EAAO,MACPC,IAAa,EACbD,EAAO,OAASE,EAChBF,EAAO,KAAK,IAAI,EAAGC,EAAW,IAAI,GAAKC,EAAW,iBAC5D,CAEAjB,EAAQ,aAAeiC,EAAmB,KAAK,KAAMV,GAAY,EAAG,CAAC,EACrEvB,EAAQ,aAAeiC,EAAmB,KAAK,KAAMT,GAAY,EAAG,CAAC,CAEzE,EAAG,EAEIxB,CACX,CAIA,SAASkB,GAAYX,EAAKC,EAAKC,EAAK,CAChCD,EAAIC,CAAO,EAAKF,EAAa,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,GAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAC5B,CAEA,SAASY,GAAYZ,EAAKC,EAAKC,EAAK,CAChCD,EAAIC,CAAO,EAAKF,IAAQ,GACxBC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,GAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,EAAa,GACjC,CAEA,SAASgB,GAAWf,EAAKC,EAAK,CAC1B,OAAQD,EAAIC,CAAO,EACXD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,MAAQ,CACpC,CAEA,SAASe,GAAWhB,EAAKC,EAAK,CAC1B,OAAQD,EAAIC,CAAO,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,KAAO,CAC9B,IC9UA,IAAA2B,GAAAC,GAAA,gCACA,OAAO,QAAU,QAQjB,SAAS,QAAQ,WAAY,CACzB,GAAI,CACA,IAAI,IAAM,KAAK,QAAQ,QAAQ,IAAI,IAAI,CAAC,EAAE,UAAU,EACpD,GAAI,MAAQ,IAAI,QAAU,OAAO,KAAK,GAAG,EAAE,QACvC,OAAO,GACf,OAASC,EAAG,CAAC,CACb,OAAO,IACX,IChBA,IAAAC,GAAAC,GAAAC,IAAA,cAOA,IAAIC,GAAOD,GAOXC,GAAK,OAAS,SAAqBC,EAAQ,CAGvC,QAFIC,EAAM,EACNC,EAAI,EACCC,EAAI,EAAGA,EAAIH,EAAO,OAAQ,EAAEG,EACjCD,EAAIF,EAAO,WAAWG,CAAC,EACnBD,EAAI,IACJD,GAAO,EACFC,EAAI,KACTD,GAAO,GACDC,EAAI,SAAY,QAAWF,EAAO,WAAWG,EAAI,CAAC,EAAI,SAAY,OACxE,EAAEA,EACFF,GAAO,GAEPA,GAAO,EAEf,OAAOA,CACX,EASAF,GAAK,KAAO,SAAmBK,EAAQC,EAAOC,EAAK,CAC/C,IAAIL,EAAMK,EAAMD,EAChB,GAAIJ,EAAM,EACN,MAAO,GAKX,QAJIM,EAAQ,KACRC,EAAQ,CAAC,EACTL,EAAI,EACJM,EACGJ,EAAQC,GACXG,EAAIL,EAAOC,GAAO,EACdI,EAAI,IACJD,EAAML,GAAG,EAAIM,EACRA,EAAI,KAAOA,EAAI,IACpBD,EAAML,GAAG,GAAKM,EAAI,KAAO,EAAIL,EAAOC,GAAO,EAAI,GAC1CI,EAAI,KAAOA,EAAI,KACpBA,IAAMA,EAAI,IAAM,IAAML,EAAOC,GAAO,EAAI,KAAO,IAAMD,EAAOC,GAAO,EAAI,KAAO,EAAID,EAAOC,GAAO,EAAI,IAAM,MAC1GG,EAAML,GAAG,EAAI,OAAUM,GAAK,IAC5BD,EAAML,GAAG,EAAI,OAAUM,EAAI,OAE3BD,EAAML,GAAG,GAAKM,EAAI,KAAO,IAAML,EAAOC,GAAO,EAAI,KAAO,EAAID,EAAOC,GAAO,EAAI,GAC9EF,EAAI,QACHI,IAAUA,EAAQ,CAAC,IAAI,KAAK,OAAO,aAAa,MAAM,OAAQC,CAAK,CAAC,EACrEL,EAAI,GAGZ,OAAII,GACIJ,GACAI,EAAM,KAAK,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAAC,EAC5DI,EAAM,KAAK,EAAE,GAEjB,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAC9D,EASAJ,GAAK,MAAQ,SAAoBC,EAAQI,EAAQM,EAAQ,CAIrD,QAHIL,EAAQK,EACRC,EACAC,EACKT,EAAI,EAAGA,EAAIH,EAAO,OAAQ,EAAEG,EACjCQ,EAAKX,EAAO,WAAWG,CAAC,EACpBQ,EAAK,IACLP,EAAOM,GAAQ,EAAIC,EACZA,EAAK,MACZP,EAAOM,GAAQ,EAAIC,GAAM,EAAU,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,MAC3BA,EAAK,SAAY,SAAYC,EAAKZ,EAAO,WAAWG,EAAI,CAAC,GAAK,SAAY,OAClFQ,EAAK,QAAYA,EAAK,OAAW,KAAOC,EAAK,MAC7C,EAAET,EACFC,EAAOM,GAAQ,EAAIC,GAAM,GAAU,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,GAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,EAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,MAEnCP,EAAOM,GAAQ,EAAIC,GAAM,GAAU,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,EAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,KAG3C,OAAOD,EAASL,CACpB,ICxGA,IAAAQ,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GA6BjB,SAASA,GAAKC,EAAOC,EAAOC,EAAM,CAC9B,IAAIC,EAASD,GAAQ,KACjBE,EAASD,IAAS,EAClBE,EAAS,KACTC,EAASH,EACb,OAAO,SAAoBD,EAAM,CAC7B,GAAIA,EAAO,GAAKA,EAAOE,EACnB,OAAOJ,EAAME,CAAI,EACjBI,EAASJ,EAAOC,IAChBE,EAAOL,EAAMG,CAAI,EACjBG,EAAS,GAEb,IAAIC,EAAMN,EAAM,KAAKI,EAAMC,EAAQA,GAAUJ,CAAI,EACjD,OAAII,EAAS,IACTA,GAAUA,EAAS,GAAK,GACrBC,CACX,CACJ,IC/CA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAO,KAUX,SAASD,GAASE,EAAIC,EAAI,CAStB,KAAK,GAAKD,IAAO,EAMjB,KAAK,GAAKC,IAAO,CACrB,CAOA,IAAIC,GAAOJ,GAAS,KAAO,IAAIA,GAAS,EAAG,CAAC,EAE5CI,GAAK,SAAW,UAAW,CAAE,MAAO,EAAG,EACvCA,GAAK,SAAWA,GAAK,SAAW,UAAW,CAAE,OAAO,IAAM,EAC1DA,GAAK,OAAS,UAAW,CAAE,MAAO,EAAG,EAOrC,IAAIC,GAAWL,GAAS,SAAW,mBAOnCA,GAAS,WAAa,SAAoBM,EAAO,CAC7C,GAAIA,IAAU,EACV,OAAOF,GACX,IAAIG,EAAOD,EAAQ,EACfC,IACAD,EAAQ,CAACA,GACb,IAAIJ,EAAKI,IAAU,EACfH,GAAMG,EAAQJ,GAAM,aAAe,EACvC,OAAIK,IACAJ,EAAK,CAACA,IAAO,EACbD,EAAK,CAACA,IAAO,EACT,EAAEA,EAAK,aACPA,EAAK,EACD,EAAEC,EAAK,aACPA,EAAK,KAGV,IAAIH,GAASE,EAAIC,CAAE,CAC9B,EAOAH,GAAS,KAAO,SAAcM,EAAO,CACjC,GAAI,OAAOA,GAAU,SACjB,OAAON,GAAS,WAAWM,CAAK,EACpC,GAAIL,GAAK,SAASK,CAAK,EAEnB,GAAIL,GAAK,KACLK,EAAQL,GAAK,KAAK,WAAWK,CAAK,MAElC,QAAON,GAAS,WAAW,SAASM,EAAO,EAAE,CAAC,EAEtD,OAAOA,EAAM,KAAOA,EAAM,KAAO,IAAIN,GAASM,EAAM,MAAQ,EAAGA,EAAM,OAAS,CAAC,EAAIF,EACvF,EAOAJ,GAAS,UAAU,SAAW,SAAkBQ,EAAU,CACtD,GAAI,CAACA,GAAY,KAAK,KAAO,GAAI,CAC7B,IAAIN,EAAK,CAAC,KAAK,GAAK,IAAM,EACtBC,EAAK,CAAC,KAAK,KAAW,EAC1B,OAAKD,IACDC,EAAKA,EAAK,IAAM,GACb,EAAED,EAAKC,EAAK,WACvB,CACA,OAAO,KAAK,GAAK,KAAK,GAAK,UAC/B,EAOAH,GAAS,UAAU,OAAS,SAAgBQ,EAAU,CAClD,OAAOP,GAAK,KACN,IAAIA,GAAK,KAAK,KAAK,GAAK,EAAG,KAAK,GAAK,EAAG,EAAQO,CAAS,EAEzD,CAAE,IAAK,KAAK,GAAK,EAAG,KAAM,KAAK,GAAK,EAAG,SAAU,EAAQA,CAAU,CAC7E,EAEA,IAAIC,GAAa,OAAO,UAAU,WAOlCT,GAAS,SAAW,SAAkBU,EAAM,CACxC,OAAIA,IAASL,GACFD,GACJ,IAAIJ,IACLS,GAAW,KAAKC,EAAM,CAAC,EACvBD,GAAW,KAAKC,EAAM,CAAC,GAAK,EAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,GAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,MAAQ,GAEpCD,GAAW,KAAKC,EAAM,CAAC,EACvBD,GAAW,KAAKC,EAAM,CAAC,GAAK,EAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,GAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,MAAQ,CAC1C,CACJ,EAMAV,GAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,OAAO,aACV,KAAK,GAAY,IACjB,KAAK,KAAO,EAAK,IACjB,KAAK,KAAO,GAAK,IACjB,KAAK,KAAO,GACZ,KAAK,GAAY,IACjB,KAAK,KAAO,EAAK,IACjB,KAAK,KAAO,GAAK,IACjB,KAAK,KAAO,EAChB,CACJ,EAMAA,GAAS,UAAU,SAAW,UAAoB,CAC9C,IAAIW,EAAS,KAAK,IAAM,GACxB,YAAK,KAAQ,KAAK,IAAM,EAAI,KAAK,KAAO,IAAMA,KAAU,EACxD,KAAK,IAAQ,KAAK,IAAM,EAAsBA,KAAU,EACjD,IACX,EAMAX,GAAS,UAAU,SAAW,UAAoB,CAC9C,IAAIW,EAAO,EAAE,KAAK,GAAK,GACvB,YAAK,KAAQ,KAAK,KAAO,EAAI,KAAK,IAAM,IAAMA,KAAU,EACxD,KAAK,IAAQ,KAAK,KAAO,EAAqBA,KAAU,EACjD,IACX,EAMAX,GAAS,UAAU,OAAS,UAAkB,CAC1C,IAAIY,EAAS,KAAK,GACdC,GAAS,KAAK,KAAO,GAAK,KAAK,IAAM,KAAO,EAC5CC,EAAS,KAAK,KAAO,GACzB,OAAOA,IAAU,EACVD,IAAU,EACRD,EAAQ,MACNA,EAAQ,IAAM,EAAI,EAClBA,EAAQ,QAAU,EAAI,EACxBC,EAAQ,MACNA,EAAQ,IAAM,EAAI,EAClBA,EAAQ,QAAU,EAAI,EAC1BC,EAAQ,IAAM,EAAI,EAC7B,ICvMA,IAAAC,GAAAC,GAAAC,IAAA,cACA,IAAIC,GAAOD,GAGXC,GAAK,UAAY,KAGjBA,GAAK,OAAS,KAGdA,GAAK,aAAe,KAGpBA,GAAK,MAAQ,KAGbA,GAAK,QAAU,KAGfA,GAAK,KAAO,KAGZA,GAAK,KAAO,KAGZA,GAAK,SAAW,KAOhBA,GAAK,OAAS,GAAQ,OAAO,OAAW,KAClB,QACA,OAAO,SACP,OAAO,QAAQ,UACf,OAAO,QAAQ,SAAS,MAO9CA,GAAK,OAASA,GAAK,QAAU,QACf,OAAO,OAAW,KAAe,QACjC,OAAO,KAAW,KAAe,MACjCD,GAQdC,GAAK,WAAa,OAAO,OAAS,OAAO,OAAO,CAAC,CAAC,EAA+B,CAAC,EAOlFA,GAAK,YAAc,OAAO,OAAS,OAAO,OAAO,CAAC,CAAC,EAA+B,CAAC,EAQnFA,GAAK,UAAY,OAAO,WAAwC,SAAmBC,EAAO,CACtF,OAAO,OAAOA,GAAU,UAAY,SAASA,CAAK,GAAK,KAAK,MAAMA,CAAK,IAAMA,CACjF,EAOAD,GAAK,SAAW,SAAkBC,EAAO,CACrC,OAAO,OAAOA,GAAU,UAAYA,aAAiB,MACzD,EAOAD,GAAK,SAAW,SAAkBC,EAAO,CACrC,OAAOA,GAAS,OAAOA,GAAU,QACrC,EAUAD,GAAK,MAQLA,GAAK,MAAQ,SAAeE,EAAKC,EAAM,CACnC,IAAIF,EAAQC,EAAIC,CAAI,EACpB,OAAIF,GAAS,MAAQC,EAAI,eAAeC,CAAI,EACjC,OAAOF,GAAU,WAAa,MAAM,QAAQA,CAAK,EAAIA,EAAM,OAAS,OAAO,KAAKA,CAAK,EAAE,QAAU,EACrG,EACX,EAaAD,GAAK,OAAU,UAAW,CACtB,GAAI,CACA,IAAII,EAASJ,GAAK,QAAQ,QAAQ,EAAE,OAEpC,OAAOI,EAAO,UAAU,UAAYA,EAAoC,IAC5E,MAAY,CAER,OAAO,IACX,CACJ,EAAG,EAGHJ,GAAK,aAAe,KAGpBA,GAAK,oBAAsB,KAO3BA,GAAK,UAAY,SAAmBK,EAAa,CAE7C,OAAO,OAAOA,GAAgB,SACxBL,GAAK,OACDA,GAAK,oBAAoBK,CAAW,EACpC,IAAIL,GAAK,MAAMK,CAAW,EAC9BL,GAAK,OACDA,GAAK,aAAaK,CAAW,EAC7B,OAAO,WAAe,IAClBA,EACA,IAAI,WAAWA,CAAW,CAC5C,EAMAL,GAAK,MAAQ,OAAO,WAAe,IAAc,WAAwC,MAezFA,GAAK,KAAkCA,GAAK,OAAO,SAAsCA,GAAK,OAAO,QAAQ,MACtEA,GAAK,OAAO,MACvCA,GAAK,QAAQ,MAAM,EAO/BA,GAAK,OAAS,mBAOdA,GAAK,QAAU,wBAOfA,GAAK,QAAU,6CAOfA,GAAK,WAAa,SAAoBC,EAAO,CACzC,OAAOA,EACDD,GAAK,SAAS,KAAKC,CAAK,EAAE,OAAO,EACjCD,GAAK,SAAS,QACxB,EAQAA,GAAK,aAAe,SAAsBM,EAAMC,EAAU,CACtD,IAAIC,EAAOR,GAAK,SAAS,SAASM,CAAI,EACtC,OAAIN,GAAK,KACEA,GAAK,KAAK,SAASQ,EAAK,GAAIA,EAAK,GAAID,CAAQ,EACjDC,EAAK,SAAS,EAAQD,CAAS,CAC1C,EAUA,SAASE,GAAMC,EAAKC,EAAKC,EAAU,CAC/B,QAASC,EAAO,OAAO,KAAKF,CAAG,EAAGG,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,GACpDJ,EAAIG,EAAKC,CAAC,CAAC,IAAM,QAAa,CAACF,KAC/BF,EAAIG,EAAKC,CAAC,CAAC,EAAIH,EAAIE,EAAKC,CAAC,CAAC,GAClC,OAAOJ,CACX,CAEAV,GAAK,MAAQS,GAObT,GAAK,QAAU,SAAiBe,EAAK,CACjC,OAAOA,EAAI,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAI,UAAU,CAAC,CACxD,EAQA,SAASC,GAASC,EAAM,CAEpB,SAASC,EAAYC,EAASC,EAAY,CAEtC,GAAI,EAAE,gBAAgBF,GAClB,OAAO,IAAIA,EAAYC,EAASC,CAAU,EAK9C,OAAO,eAAe,KAAM,UAAW,CAAE,IAAK,UAAW,CAAE,OAAOD,CAAS,CAAE,CAAC,EAG1E,MAAM,kBACN,MAAM,kBAAkB,KAAMD,CAAW,EAEzC,OAAO,eAAe,KAAM,QAAS,CAAE,MAAO,IAAI,MAAM,EAAE,OAAS,EAAG,CAAC,EAEvEE,GACAX,GAAM,KAAMW,CAAU,CAC9B,CAEA,OAAAF,EAAY,UAAY,OAAO,OAAO,MAAM,UAAW,CACnD,YAAa,CACT,MAAOA,EACP,SAAU,GACV,WAAY,GACZ,aAAc,EAClB,EACA,KAAM,CACF,IAAK,UAAe,CAAE,OAAOD,CAAM,EACnC,IAAK,OACL,WAAY,GAKZ,aAAc,EAClB,EACA,SAAU,CACN,MAAO,UAAiB,CAAE,OAAO,KAAK,KAAO,KAAO,KAAK,OAAS,EAClE,SAAU,GACV,WAAY,GACZ,aAAc,EAClB,CACJ,CAAC,EAEMC,CACX,CAEAlB,GAAK,SAAWgB,GAmBhBhB,GAAK,cAAgBgB,GAAS,eAAe,EAoB7ChB,GAAK,YAAc,SAAkBqB,EAAY,CAE7C,QADIC,EAAW,CAAC,EACPR,EAAI,EAAGA,EAAIO,EAAW,OAAQ,EAAEP,EACrCQ,EAASD,EAAWP,CAAC,CAAC,EAAI,EAO9B,OAAO,UAAW,CACd,QAASD,EAAO,OAAO,KAAK,IAAI,EAAG,EAAIA,EAAK,OAAS,EAAG,EAAI,GAAI,EAAE,EAC9D,GAAIS,EAAST,EAAK,CAAC,CAAC,IAAM,GAAK,KAAKA,EAAK,CAAC,CAAC,IAAM,QAAa,KAAKA,EAAK,CAAC,CAAC,IAAM,KAC5E,OAAOA,EAAK,CAAC,CACzB,CACJ,EAeAb,GAAK,YAAc,SAAkBqB,EAAY,CAQ7C,OAAO,SAASJ,EAAM,CAClB,QAASH,EAAI,EAAGA,EAAIO,EAAW,OAAQ,EAAEP,EACjCO,EAAWP,CAAC,IAAMG,GAClB,OAAO,KAAKI,EAAWP,CAAC,CAAC,CACrC,CACJ,EAkBAd,GAAK,cAAgB,CACjB,MAAO,OACP,MAAO,OACP,MAAO,OACP,KAAM,EACV,EAGAA,GAAK,WAAa,UAAW,CACzB,IAAII,EAASJ,GAAK,OAElB,GAAI,CAACI,EAAQ,CACTJ,GAAK,aAAeA,GAAK,oBAAsB,KAC/C,MACJ,CAGAA,GAAK,aAAeI,EAAO,OAAS,WAAW,MAAQA,EAAO,MAE1D,SAAqBH,EAAOsB,EAAU,CAClC,OAAO,IAAInB,EAAOH,EAAOsB,CAAQ,CACrC,EACJvB,GAAK,oBAAsBI,EAAO,aAE9B,SAA4BoB,EAAM,CAC9B,OAAO,IAAIpB,EAAOoB,CAAI,CAC1B,CACR,ICrbA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAY,KAEZC,GAEAC,GAAYF,GAAK,SACjBG,GAAYH,GAAK,OACjBI,GAAYJ,GAAK,KAWrB,SAASK,GAAGC,EAAIC,EAAKC,EAAK,CAMtB,KAAK,GAAKF,EAMV,KAAK,IAAMC,EAMX,KAAK,KAAO,OAMZ,KAAK,IAAMC,CACf,CAGA,SAASC,IAAO,CAAC,CAUjB,SAASC,GAAMC,EAAQ,CAMnB,KAAK,KAAOA,EAAO,KAMnB,KAAK,KAAOA,EAAO,KAMnB,KAAK,IAAMA,EAAO,IAMlB,KAAK,KAAOA,EAAO,MACvB,CAOA,SAASZ,IAAS,CAMd,KAAK,IAAM,EAMX,KAAK,KAAO,IAAIM,GAAGI,GAAM,EAAG,CAAC,EAM7B,KAAK,KAAO,KAAK,KAMjB,KAAK,OAAS,IAOlB,CAEA,IAAIG,GAAS,UAAkB,CAC3B,OAAOZ,GAAK,OACN,UAA+B,CAC7B,OAAQD,GAAO,OAAS,UAAyB,CAC7C,OAAO,IAAIE,EACf,GAAG,CACP,EAEE,UAAwB,CACtB,OAAO,IAAIF,EACf,CACR,EAOAA,GAAO,OAASa,GAAO,EAOvBb,GAAO,MAAQ,SAAec,EAAM,CAChC,OAAO,IAAIb,GAAK,MAAMa,CAAI,CAC9B,EAIIb,GAAK,QAAU,QACfD,GAAO,MAAQC,GAAK,KAAKD,GAAO,MAAOC,GAAK,MAAM,UAAU,QAAQ,GAUxED,GAAO,UAAU,MAAQ,SAAcO,EAAIC,EAAKC,EAAK,CACjD,YAAK,KAAO,KAAK,KAAK,KAAO,IAAIH,GAAGC,EAAIC,EAAKC,CAAG,EAChD,KAAK,KAAOD,EACL,IACX,EAEA,SAASO,GAAUN,EAAKO,EAAKC,EAAK,CAC9BD,EAAIC,CAAG,EAAIR,EAAM,GACrB,CAEA,SAASS,GAAcT,EAAKO,EAAKC,EAAK,CAClC,KAAOR,EAAM,KACTO,EAAIC,GAAK,EAAIR,EAAM,IAAM,IACzBA,KAAS,EAEbO,EAAIC,CAAG,EAAIR,CACf,CAWA,SAASU,GAASX,EAAKC,EAAK,CACxB,KAAK,IAAMD,EACX,KAAK,KAAO,OACZ,KAAK,IAAMC,CACf,CAEAU,GAAS,UAAY,OAAO,OAAOb,GAAG,SAAS,EAC/Ca,GAAS,UAAU,GAAKD,GAOxBlB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CAGnD,YAAK,MAAQ,KAAK,KAAO,KAAK,KAAK,KAAO,IAAID,IACzCC,EAAQA,IAAU,GACT,IAAY,EACpBA,EAAQ,MAAY,EACpBA,EAAQ,QAAY,EACpBA,EAAQ,UAAY,EACA,EAC1BA,CAAK,GAAG,IACD,IACX,EAQApB,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,OAAOA,EAAQ,EACT,KAAK,MAAMC,GAAe,GAAIlB,GAAS,WAAWiB,CAAK,CAAC,EACxD,KAAK,OAAOA,CAAK,CAC3B,EAOApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,OAAO,KAAK,QAAQA,GAAS,EAAIA,GAAS,MAAQ,CAAC,CACvD,EAEA,SAASC,GAAcZ,EAAKO,EAAKC,EAAK,CAClC,KAAOR,EAAI,IACPO,EAAIC,GAAK,EAAIR,EAAI,GAAK,IAAM,IAC5BA,EAAI,IAAMA,EAAI,KAAO,EAAIA,EAAI,IAAM,MAAQ,EAC3CA,EAAI,MAAQ,EAEhB,KAAOA,EAAI,GAAK,KACZO,EAAIC,GAAK,EAAIR,EAAI,GAAK,IAAM,IAC5BA,EAAI,GAAKA,EAAI,KAAO,EAExBO,EAAIC,GAAK,EAAIR,EAAI,EACrB,CAQAT,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAC9B,OAAO,KAAK,MAAMC,GAAeC,EAAK,OAAO,EAAGA,CAAI,CACxD,EASAtB,GAAO,UAAU,MAAQA,GAAO,UAAU,OAQ1CA,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAAE,SAAS,EACzC,OAAO,KAAK,MAAMC,GAAeC,EAAK,OAAO,EAAGA,CAAI,CACxD,EAOAtB,GAAO,UAAU,KAAO,SAAoBoB,EAAO,CAC/C,OAAO,KAAK,MAAML,GAAW,EAAGK,EAAQ,EAAI,CAAC,CACjD,EAEA,SAASG,GAAad,EAAKO,EAAKC,EAAK,CACjCD,EAAIC,CAAO,EAAKR,EAAc,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,EAAM,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,GAAM,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,EAC5B,CAOAT,GAAO,UAAU,QAAU,SAAuBoB,EAAO,CACrD,OAAO,KAAK,MAAMG,GAAc,EAAGH,IAAU,CAAC,CAClD,EAQApB,GAAO,UAAU,SAAWA,GAAO,UAAU,QAQ7CA,GAAO,UAAU,QAAU,SAAuBoB,EAAO,CACrD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAC9B,OAAO,KAAK,MAAMG,GAAc,EAAGD,EAAK,EAAE,EAAE,MAAMC,GAAc,EAAGD,EAAK,EAAE,CAC9E,EASAtB,GAAO,UAAU,SAAWA,GAAO,UAAU,QAQ7CA,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,OAAO,KAAK,MAAMnB,GAAK,MAAM,aAAc,EAAGmB,CAAK,CACvD,EAQApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,OAAO,KAAK,MAAMnB,GAAK,MAAM,cAAe,EAAGmB,CAAK,CACxD,EAEA,IAAII,GAAavB,GAAK,MAAM,UAAU,IAChC,SAAwBQ,EAAKO,EAAKC,EAAK,CACrCD,EAAI,IAAIP,EAAKQ,CAAG,CACpB,EAEE,SAAwBR,EAAKO,EAAKC,EAAK,CACrC,QAASQ,EAAI,EAAGA,EAAIhB,EAAI,OAAQ,EAAEgB,EAC9BT,EAAIC,EAAMQ,CAAC,EAAIhB,EAAIgB,CAAC,CAC5B,EAOJzB,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,IAAIZ,EAAMY,EAAM,SAAW,EAC3B,GAAI,CAACZ,EACD,OAAO,KAAK,MAAMO,GAAW,EAAG,CAAC,EACrC,GAAId,GAAK,SAASmB,CAAK,EAAG,CACtB,IAAIJ,EAAMhB,GAAO,MAAMQ,EAAMJ,GAAO,OAAOgB,CAAK,CAAC,EACjDhB,GAAO,OAAOgB,EAAOJ,EAAK,CAAC,EAC3BI,EAAQJ,CACZ,CACA,OAAO,KAAK,OAAOR,CAAG,EAAE,MAAMgB,GAAYhB,EAAKY,CAAK,CACxD,EAOApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIZ,EAAMH,GAAK,OAAOe,CAAK,EAC3B,OAAOZ,EACD,KAAK,OAAOA,CAAG,EAAE,MAAMH,GAAK,MAAOG,EAAKY,CAAK,EAC7C,KAAK,MAAML,GAAW,EAAG,CAAC,CACpC,EAOAf,GAAO,UAAU,KAAO,UAAgB,CACpC,YAAK,OAAS,IAAIW,GAAM,IAAI,EAC5B,KAAK,KAAO,KAAK,KAAO,IAAIL,GAAGI,GAAM,EAAG,CAAC,EACzC,KAAK,IAAM,EACJ,IACX,EAMAV,GAAO,UAAU,MAAQ,UAAiB,CACtC,OAAI,KAAK,QACL,KAAK,KAAS,KAAK,OAAO,KAC1B,KAAK,KAAS,KAAK,OAAO,KAC1B,KAAK,IAAS,KAAK,OAAO,IAC1B,KAAK,OAAS,KAAK,OAAO,OAE1B,KAAK,KAAO,KAAK,KAAO,IAAIM,GAAGI,GAAM,EAAG,CAAC,EACzC,KAAK,IAAO,GAET,IACX,EAMAV,GAAO,UAAU,OAAS,UAAkB,CACxC,IAAI0B,EAAO,KAAK,KACZC,EAAO,KAAK,KACZnB,EAAO,KAAK,IAChB,YAAK,MAAM,EAAE,OAAOA,CAAG,EACnBA,IACA,KAAK,KAAK,KAAOkB,EAAK,KACtB,KAAK,KAAOC,EACZ,KAAK,KAAOnB,GAET,IACX,EAMAR,GAAO,UAAU,OAAS,UAAkB,CAIxC,QAHI0B,EAAO,KAAK,KAAK,KACjBV,EAAO,KAAK,YAAY,MAAM,KAAK,GAAG,EACtCC,EAAO,EACJS,GACHA,EAAK,GAAGA,EAAK,IAAKV,EAAKC,CAAG,EAC1BA,GAAOS,EAAK,IACZA,EAAOA,EAAK,KAGhB,OAAOV,CACX,EAEAhB,GAAO,WAAa,SAAS4B,EAAe,CACxC1B,GAAe0B,EACf5B,GAAO,OAASa,GAAO,EACvBX,GAAa,WAAW,CAC5B,IChdA,IAAA2B,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAGjB,IAAIC,GAAS,MACZD,GAAa,UAAY,OAAO,OAAOC,GAAO,SAAS,GAAG,YAAcD,GAEzE,IAAIE,GAAO,KAQX,SAASF,IAAe,CACpBC,GAAO,KAAK,IAAI,CACpB,CAEAD,GAAa,WAAa,UAAY,CAOlCA,GAAa,MAAQE,GAAK,oBAE1BF,GAAa,iBAAmBE,GAAK,QAAUA,GAAK,OAAO,qBAAqB,YAAcA,GAAK,OAAO,UAAU,IAAI,OAAS,MAC3H,SAA8BC,EAAKC,EAAKC,EAAK,CAC7CD,EAAI,IAAID,EAAKE,CAAG,CAElB,EAEE,SAA+BF,EAAKC,EAAKC,EAAK,CAC9C,GAAIF,EAAI,KACNA,EAAI,KAAKC,EAAKC,EAAK,EAAGF,EAAI,MAAM,MAC7B,SAASG,EAAI,EAAGA,EAAIH,EAAI,QAC3BC,EAAIC,GAAK,EAAIF,EAAIG,GAAG,CACxB,CACR,EAMAN,GAAa,UAAU,MAAQ,SAA4BO,EAAO,CAC1DL,GAAK,SAASK,CAAK,IACnBA,EAAQL,GAAK,aAAaK,EAAO,QAAQ,GAC7C,IAAIC,EAAMD,EAAM,SAAW,EAC3B,YAAK,OAAOC,CAAG,EACXA,GACA,KAAK,MAAMR,GAAa,iBAAkBQ,EAAKD,CAAK,EACjD,IACX,EAEA,SAASE,GAAkBN,EAAKC,EAAKC,EAAK,CAClCF,EAAI,OAAS,GACbD,GAAK,KAAK,MAAMC,EAAKC,EAAKC,CAAG,EACxBD,EAAI,UACTA,EAAI,UAAUD,EAAKE,CAAG,EAEtBD,EAAI,MAAMD,EAAKE,CAAG,CAC1B,CAKAL,GAAa,UAAU,OAAS,SAA6BO,EAAO,CAChE,IAAIC,EAAMN,GAAK,OAAO,WAAWK,CAAK,EACtC,YAAK,OAAOC,CAAG,EACXA,GACA,KAAK,MAAMC,GAAmBD,EAAKD,CAAK,EACrC,IACX,EAUAP,GAAa,WAAW,ICpFxB,IAAAU,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAY,KAEZC,GAEAC,GAAYF,GAAK,SACjBG,GAAYH,GAAK,KAGrB,SAASI,GAAgBC,EAAQC,EAAa,CAC1C,OAAO,WAAW,uBAAyBD,EAAO,IAAM,OAASC,GAAe,GAAK,MAAQD,EAAO,GAAG,CAC3G,CAQA,SAASN,GAAOQ,EAAQ,CAMpB,KAAK,IAAMA,EAMX,KAAK,IAAM,EAMX,KAAK,IAAMA,EAAO,MACtB,CAEA,IAAIC,GAAe,OAAO,WAAe,IACnC,SAA4BD,EAAQ,CAClC,GAAIA,aAAkB,YAAc,MAAM,QAAQA,CAAM,EACpD,OAAO,IAAIR,GAAOQ,CAAM,EAC5B,MAAM,MAAM,gBAAgB,CAChC,EAEE,SAAsBA,EAAQ,CAC5B,GAAI,MAAM,QAAQA,CAAM,EACpB,OAAO,IAAIR,GAAOQ,CAAM,EAC5B,MAAM,MAAM,gBAAgB,CAChC,EAEAE,GAAS,UAAkB,CAC3B,OAAOT,GAAK,OACN,SAA6BO,EAAQ,CACnC,OAAQR,GAAO,OAAS,SAAuBQ,EAAQ,CACnD,OAAOP,GAAK,OAAO,SAASO,CAAM,EAC5B,IAAIN,GAAaM,CAAM,EAEvBC,GAAaD,CAAM,CAC7B,GAAGA,CAAM,CACb,EAEEC,EACV,EASAT,GAAO,OAASU,GAAO,EAEvBV,GAAO,UAAU,OAASC,GAAK,MAAM,UAAU,UAAuCA,GAAK,MAAM,UAAU,MAO3GD,GAAO,UAAU,OAAU,UAA6B,CACpD,IAAIW,EAAQ,WACZ,OAAO,UAAuB,CAKgC,GAJ1DA,GAAkB,KAAK,IAAI,KAAK,GAAG,EAAI,OAAgB,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,MACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAS,KAAO,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAK,KAAO,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,KAAK,OAAOA,EAGjG,IAAK,KAAK,KAAO,GAAK,KAAK,IACvB,WAAK,IAAM,KAAK,IACVN,GAAgB,KAAM,EAAE,EAElC,OAAOM,CACX,CACJ,EAAG,EAMHX,GAAO,UAAU,MAAQ,UAAsB,CAC3C,OAAO,KAAK,OAAO,EAAI,CAC3B,EAMAA,GAAO,UAAU,OAAS,UAAuB,CAC7C,IAAIW,EAAQ,KAAK,OAAO,EACxB,OAAOA,IAAU,EAAI,EAAEA,EAAQ,GAAK,CACxC,EAIA,SAASC,IAAiB,CAEtB,IAAIC,EAAO,IAAIV,GAAS,EAAG,CAAC,EACxBW,EAAI,EACR,GAAI,KAAK,IAAM,KAAK,IAAM,EAAG,CACzB,KAAOA,EAAI,EAAG,EAAEA,EAGZ,GADAD,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,KAAO,EAC1D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,EAKf,GAFAA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAC3DA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAS,KAAO,EACvD,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOA,EACXC,EAAI,CACR,KAAO,CACH,KAAOA,EAAI,EAAG,EAAEA,EAAG,CAEf,GAAI,KAAK,KAAO,KAAK,IACjB,MAAMT,GAAgB,IAAI,EAG9B,GADAQ,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,KAAO,EAC1D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,CACf,CAEA,OAAAA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,KAAK,EAAI,MAAQC,EAAI,KAAO,EACzDD,CACX,CACA,GAAI,KAAK,IAAM,KAAK,IAAM,GACtB,KAAOC,EAAI,EAAG,EAAEA,EAGZ,GADAD,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,EAAI,KAAO,EAC9D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,MAGf,MAAOC,EAAI,EAAG,EAAEA,EAAG,CAEf,GAAI,KAAK,KAAO,KAAK,IACjB,MAAMT,GAAgB,IAAI,EAG9B,GADAQ,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,EAAI,KAAO,EAC9D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,CACf,CAGJ,MAAM,MAAM,yBAAyB,CACzC,CA6BAb,GAAO,UAAU,KAAO,UAAqB,CACzC,OAAO,KAAK,OAAO,IAAM,CAC7B,EAEA,SAASe,GAAgBC,EAAKC,EAAK,CAC/B,OAAQD,EAAIC,EAAM,CAAC,EACXD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,MAAQ,CACpC,CAMAjB,GAAO,UAAU,QAAU,UAAwB,CAG/C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,OAAOU,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,CAClD,EAMAf,GAAO,UAAU,SAAW,UAAyB,CAGjD,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,OAAOU,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,EAAI,CACtD,EAIA,SAASG,IAAgC,CAGrC,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMb,GAAgB,KAAM,CAAC,EAEjC,OAAO,IAAIF,GAASY,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,EAAGA,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,CAAC,CAC1G,CAuBAf,GAAO,UAAU,MAAQ,UAAsB,CAG3C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,IAAIM,EAAQV,GAAK,MAAM,YAAY,KAAK,IAAK,KAAK,GAAG,EACrD,YAAK,KAAO,EACLU,CACX,EAOAX,GAAO,UAAU,OAAS,UAAuB,CAG7C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,IAAIM,EAAQV,GAAK,MAAM,aAAa,KAAK,IAAK,KAAK,GAAG,EACtD,YAAK,KAAO,EACLU,CACX,EAMAX,GAAO,UAAU,MAAQ,UAAsB,CAC3C,IAAImB,EAAS,KAAK,OAAO,EACrBC,EAAS,KAAK,IACdH,EAAS,KAAK,IAAME,EAGxB,GAAIF,EAAM,KAAK,IACX,MAAMZ,GAAgB,KAAMc,CAAM,EAGtC,GADA,KAAK,KAAOA,EACR,MAAM,QAAQ,KAAK,GAAG,EACtB,OAAO,KAAK,IAAI,MAAMC,EAAOH,CAAG,EAEpC,GAAIG,IAAUH,EAAK,CACf,IAAII,EAAepB,GAAK,OACxB,OAAOoB,EACDA,EAAa,MAAM,CAAC,EACpB,IAAI,KAAK,IAAI,YAAY,CAAC,CACpC,CACA,OAAO,KAAK,OAAO,KAAK,KAAK,IAAKD,EAAOH,CAAG,CAChD,EAMAjB,GAAO,UAAU,OAAS,UAAuB,CAC7C,IAAIsB,EAAQ,KAAK,MAAM,EACvB,OAAOlB,GAAK,KAAKkB,EAAO,EAAGA,EAAM,MAAM,CAC3C,EAOAtB,GAAO,UAAU,KAAO,SAAcmB,EAAQ,CAC1C,GAAI,OAAOA,GAAW,SAAU,CAE5B,GAAI,KAAK,IAAMA,EAAS,KAAK,IACzB,MAAMd,GAAgB,KAAMc,CAAM,EACtC,KAAK,KAAOA,CAChB,KACI,GAEI,IAAI,KAAK,KAAO,KAAK,IACjB,MAAMd,GAAgB,IAAI,QACzB,KAAK,IAAI,KAAK,KAAK,EAAI,KAEpC,OAAO,IACX,EAOAL,GAAO,UAAU,SAAW,SAASuB,EAAU,CAC3C,OAAQA,EAAU,CACd,IAAK,GACD,KAAK,KAAK,EACV,MACJ,IAAK,GACD,KAAK,KAAK,CAAC,EACX,MACJ,IAAK,GACD,KAAK,KAAK,KAAK,OAAO,CAAC,EACvB,MACJ,IAAK,GACD,MAAQA,EAAW,KAAK,OAAO,EAAI,KAAO,GACtC,KAAK,SAASA,CAAQ,EAE1B,MACJ,IAAK,GACD,KAAK,KAAK,CAAC,EACX,MAGJ,QACI,MAAM,MAAM,qBAAuBA,EAAW,cAAgB,KAAK,GAAG,CAC9E,CACA,OAAO,IACX,EAEAvB,GAAO,WAAa,SAASwB,EAAe,CACxCtB,GAAesB,EACfxB,GAAO,OAASU,GAAO,EACvBR,GAAa,WAAW,EAExB,IAAIuB,EAAKxB,GAAK,KAAO,SAAsC,WAC3DA,GAAK,MAAMD,GAAO,UAAW,CAEzB,MAAO,UAAsB,CACzB,OAAOY,GAAe,KAAK,IAAI,EAAEa,CAAE,EAAE,EAAK,CAC9C,EAEA,OAAQ,UAAuB,CAC3B,OAAOb,GAAe,KAAK,IAAI,EAAEa,CAAE,EAAE,EAAI,CAC7C,EAEA,OAAQ,UAAuB,CAC3B,OAAOb,GAAe,KAAK,IAAI,EAAE,SAAS,EAAEa,CAAE,EAAE,EAAK,CACzD,EAEA,QAAS,UAAwB,CAC7B,OAAOP,GAAY,KAAK,IAAI,EAAEO,CAAE,EAAE,EAAI,CAC1C,EAEA,SAAU,UAAyB,CAC/B,OAAOP,GAAY,KAAK,IAAI,EAAEO,CAAE,EAAE,EAAK,CAC3C,CAEJ,CAAC,CACL,IC/ZA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAGjB,IAAIC,GAAS,MACZD,GAAa,UAAY,OAAO,OAAOC,GAAO,SAAS,GAAG,YAAcD,GAEzE,IAAIE,GAAO,KASX,SAASF,GAAaG,EAAQ,CAC1BF,GAAO,KAAK,KAAME,CAAM,CAO5B,CAEAH,GAAa,WAAa,UAAY,CAE9BE,GAAK,SACLF,GAAa,UAAU,OAASE,GAAK,OAAO,UAAU,MAC9D,EAMAF,GAAa,UAAU,OAAS,UAA8B,CAC1D,IAAII,EAAM,KAAK,OAAO,EACtB,OAAO,KAAK,IAAI,UACV,KAAK,IAAI,UAAU,KAAK,IAAK,KAAK,IAAM,KAAK,IAAI,KAAK,IAAMA,EAAK,KAAK,GAAG,CAAC,EAC1E,KAAK,IAAI,SAAS,QAAS,KAAK,IAAK,KAAK,IAAM,KAAK,IAAI,KAAK,IAAMA,EAAK,KAAK,GAAG,CAAC,CAC5F,EASAJ,GAAa,WAAW,IClDxB,IAAAK,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAO,MAGVD,GAAQ,UAAY,OAAO,OAAOC,GAAK,aAAa,SAAS,GAAG,YAAcD,GAmC/E,SAASA,GAAQE,EAASC,EAAkBC,EAAmB,CAE3D,GAAI,OAAOF,GAAY,WACnB,MAAM,UAAU,4BAA4B,EAEhDD,GAAK,aAAa,KAAK,IAAI,EAM3B,KAAK,QAAUC,EAMf,KAAK,iBAAmB,EAAQC,EAMhC,KAAK,kBAAoB,EAAQC,CACrC,CAaAJ,GAAQ,UAAU,QAAU,SAASK,EAAQC,EAAQC,EAAaC,EAAcC,EAASC,EAAU,CAE/F,GAAI,CAACD,EACD,MAAM,UAAU,2BAA2B,EAE/C,IAAIE,EAAO,KACX,GAAI,CAACD,EACD,OAAOT,GAAK,UAAUI,EAASM,EAAML,EAAQC,EAAaC,EAAcC,CAAO,EAEnF,GAAI,CAACE,EAAK,QAAS,CACf,WAAW,UAAW,CAAED,EAAS,MAAM,eAAe,CAAC,CAAG,EAAG,CAAC,EAC9D,MACJ,CAEA,GAAI,CACA,OAAOC,EAAK,QACRL,EACAC,EAAYI,EAAK,iBAAmB,kBAAoB,QAAQ,EAAEF,CAAO,EAAE,OAAO,EAClF,SAAqBG,EAAKC,EAAU,CAEhC,GAAID,EACA,OAAAD,EAAK,KAAK,QAASC,EAAKN,CAAM,EACvBI,EAASE,CAAG,EAGvB,GAAIC,IAAa,KAAM,CACnBF,EAAK,IAAqB,EAAI,EAC9B,MACJ,CAEA,GAAI,EAAEE,aAAoBL,GACtB,GAAI,CACAK,EAAWL,EAAaG,EAAK,kBAAoB,kBAAoB,QAAQ,EAAEE,CAAQ,CAC3F,OAASD,EAAK,CACV,OAAAD,EAAK,KAAK,QAASC,EAAKN,CAAM,EACvBI,EAASE,CAAG,CACvB,CAGJ,OAAAD,EAAK,KAAK,OAAQE,EAAUP,CAAM,EAC3BI,EAAS,KAAMG,CAAQ,CAClC,CACJ,CACJ,OAASD,EAAK,CACVD,EAAK,KAAK,QAASC,EAAKN,CAAM,EAC9B,WAAW,UAAW,CAAEI,EAASE,CAAG,CAAG,EAAG,CAAC,EAC3C,MACJ,CACJ,EAOAZ,GAAQ,UAAU,IAAM,SAAac,EAAY,CAC7C,OAAI,KAAK,UACAA,GACD,KAAK,QAAQ,KAAM,KAAM,IAAI,EACjC,KAAK,QAAU,KACf,KAAK,KAAK,KAAK,EAAE,IAAI,GAElB,IACX,IC7IA,IAAAC,GAAAC,GAAAC,IAAA,cAMA,IAAIC,GAAMD,GA6BVC,GAAI,QAAU,OCnCd,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAU,CAAC,ICDlB,IAAAC,GAAAC,GAAAC,IAAA,cACA,IAAIC,GAAWD,GAQfC,GAAS,MAAQ,UAGjBA,GAAS,OAAe,KACxBA,GAAS,aAAe,KACxBA,GAAS,OAAe,KACxBA,GAAS,aAAe,KAGxBA,GAAS,KAAe,KACxBA,GAAS,IAAe,KACxBA,GAAS,MAAe,KACxBA,GAAS,UAAeC,GAOxB,SAASA,IAAY,CACjBD,GAAS,KAAK,WAAW,EACzBA,GAAS,OAAO,WAAWA,GAAS,YAAY,EAChDA,GAAS,OAAO,WAAWA,GAAS,YAAY,CACpD,CAGAC,GAAU,ICnCV,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAGAA,GAAO,QAAU,OCHjB,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAGA,IAAIC,GAAY,KAGZC,EAAUD,GAAU,OAAQE,GAAUF,GAAU,OAAQG,EAAQH,GAAU,KAG1EI,EAAQJ,GAAU,MAAM,UAAeA,GAAU,MAAM,QAAa,CAAC,GAEzEI,EAAM,KAAQ,UAAW,CAOrB,IAAIC,EAAO,CAAC,EAiBZ,OAAAA,EAAK,QAAW,UAAW,CACvB,IAAIC,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,gBAAgB,EAAI,EAC3CC,EAAOD,EAAW,CAAC,EAAI,uBAAuB,EAAI,EAClDC,EAAOD,EAAW,CAAC,EAAI,uBAAuB,EAAI,EAClDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,qBAAqB,EAAI,EAChDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,YAAY,EAAI,EAChCC,CACX,EAAG,EAEHF,EAAK,eAAkB,UAAW,CAkC9B,SAASG,EAAeC,EAAY,CAQhC,GAPA,KAAK,OAAS,CAAC,EACf,KAAK,KAAO,CAAC,EACb,KAAK,QAAU,CAAC,EAChB,KAAK,QAAU,CAAC,EAChB,KAAK,OAAS,CAAC,EACf,KAAK,cAAgB,CAAC,EACtB,KAAK,WAAa,CAAC,EACfA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAH,EAAe,UAAU,KAAO,GAQhCA,EAAe,UAAU,YAAc,GAQvCA,EAAe,UAAU,UAAY,GAQrCA,EAAe,UAAU,KAAO,EAQhCA,EAAe,UAAU,EAAI,EAQ7BA,EAAe,UAAU,EAAIL,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQ3EK,EAAe,UAAU,EAAIL,EAAM,UAAU,CAAC,CAAC,EAQ/CK,EAAe,UAAU,EAAI,KAQ7BA,EAAe,UAAU,EAAI,KAQ7BA,EAAe,UAAU,aAAe,KAQxCA,EAAe,UAAU,GAAK,KAQ9BA,EAAe,UAAU,OAASL,EAAM,WAQxCK,EAAe,UAAU,KAAOL,EAAM,WAQtCK,EAAe,UAAU,QAAUL,EAAM,WAQzCK,EAAe,UAAU,QAAUL,EAAM,WAQzCK,EAAe,UAAU,OAASL,EAAM,WAQxCK,EAAe,UAAU,cAAgBL,EAAM,WAQ/CK,EAAe,UAAU,WAAaL,EAAM,WAU5CK,EAAe,OAAS,SAAgBC,EAAY,CAChD,OAAO,IAAID,EAAeC,CAAU,CACxC,EAWAD,EAAe,OAAS,SAAgBI,EAASC,EAAQ,CAerD,GAdKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACjGD,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAChGD,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OAAQ,CACjDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,MAAMD,EAAQ,OAAO,CAAC,CAAC,EAClCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,SAAW,MAAQA,EAAQ,QAAQ,OAC3C,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1CC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,QAAQ,CAAC,CAAC,EACzE,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,QAAQ,OAC3C,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1CR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAAQ,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnH,GAAID,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,OAAO,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAKjH,GAJID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,GAAG,EAAE,OAAOD,EAAQ,SAAS,EACnEA,EAAQ,IAAM,MAAQ,OAAO,eAAe,KAAKA,EAAS,IAAI,GAC9DR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,GAAIC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAClGD,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OACjD,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAC7CR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,WAAW,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAOrH,GANID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA+B,GAAG,EAAE,MAAMD,EAAQ,IAAI,EAC7DA,EAAQ,aAAe,MAAQ,OAAO,eAAe,KAAKA,EAAS,aAAa,GAChFC,EAAO,OAA+B,GAAG,EAAE,OAAOD,EAAQ,WAAW,EACrEA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAcC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpHD,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAChI,OAAOA,CACX,EAWAL,EAAe,gBAAkB,SAAyBI,EAASC,EAAQ,CACvE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAL,EAAe,OAAS,SAAgBM,EAAQC,EAAQ,CAC9CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,eACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,YAAcE,EAAO,OAAO,EACpC,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,KAAOE,EAAO,MAAM,EAC5B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACjE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAChE,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,aAAeR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAClF,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,GAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAChE,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,IACjBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,OAAO,KAAKE,EAAO,MAAM,CAAC,OAEtCF,EAAQ,OAAO,KAAKE,EAAO,MAAM,CAAC,EACtC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,SAAWA,EAAQ,QAAQ,SACrCA,EAAQ,QAAU,CAAC,GACvBA,EAAQ,QAAQ,KAAKE,EAAO,MAAM,CAAC,EACnC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,SAAWA,EAAQ,QAAQ,SACrCA,EAAQ,QAAU,CAAC,GACvBA,EAAQ,QAAQ,KAAKR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACzE,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACvF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,GAC1BA,EAAQ,WAAW,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAJ,EAAe,gBAAkB,SAAyBM,EAAQ,CAC9D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAN,EAAe,OAAS,SAAgBI,EAAS,CAC7C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,GAC/D,CAACT,EAAM,SAASS,EAAQ,WAAW,EACnC,MAAO,+BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EACrD,OAAQA,EAAQ,KAAM,CACtB,QACI,MAAO,4BACX,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,IACL,IAAK,IACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,IACL,IAAK,IACL,IAAK,IACD,KACJ,CACJ,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,OAAOA,EAAQ,GAAM,SACrB,MAAO,qBACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,CAACT,EAAM,UAAUS,EAAQ,CAAC,GAAK,EAAEA,EAAQ,GAAKT,EAAM,UAAUS,EAAQ,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,EAAE,IAAI,GAC9G,MAAO,2BACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,EAAEA,EAAQ,GAAK,OAAOA,EAAQ,EAAE,QAAW,UAAYT,EAAM,SAASS,EAAQ,CAAC,GAC/E,MAAO,qBACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,EAAG,CAClD,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,CAAC,EACnD,GAAIO,EACA,MAAO,KAAOA,CACtB,CACA,GAAIP,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,EAAG,CAClD,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,CAAC,EAClD,GAAIO,EACA,MAAO,KAAOA,CACtB,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,IAAIO,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,YAAY,EACpE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACA,GAAIP,EAAQ,IAAM,MAAQA,EAAQ,eAAe,IAAI,EAAG,CACpD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,EAAE,EAClD,GAAIO,EACA,MAAO,MAAQA,CACvB,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzC,GAAI,OAAOA,EAAQ,OAAO,CAAC,GAAM,SAC7B,MAAO,2BACnB,CACA,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvC,GAAI,CAACT,EAAM,UAAUS,EAAQ,KAAK,CAAC,CAAC,GAAK,EAAEA,EAAQ,KAAK,CAAC,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAI,CAAC,MAAM,QAAQA,EAAQ,OAAO,EAC9B,MAAO,0BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1C,GAAI,EAAEA,EAAQ,QAAQ,CAAC,GAAK,OAAOA,EAAQ,QAAQ,CAAC,EAAE,QAAW,UAAYT,EAAM,SAASS,EAAQ,QAAQ,CAAC,CAAC,GAC1G,MAAO,4BACnB,CACA,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAI,CAAC,MAAM,QAAQA,EAAQ,OAAO,EAC9B,MAAO,0BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAAG,CAC7C,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAAQ,CAAC,CAAC,EAC5D,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACJ,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EAAG,CAC5C,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,OAAO,CAAC,CAAC,EAC1D,GAAIO,EACA,MAAO,UAAYA,CAC3B,CACJ,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAAG,CACnD,IAAIO,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,cAAc,CAAC,CAAC,EACxE,GAAIO,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,GAAIP,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAAG,CAChD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,WAAW,CAAC,CAAC,EAC7D,GAAIO,EACA,MAAO,cAAgBA,CAC/B,CACJ,CACA,OAAO,IACX,EAUAX,EAAe,WAAa,SAAoBY,EAAQ,CACpD,GAAIA,aAAkBhB,EAAM,KAAK,eAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,eAO7B,OANIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,aAAe,OACtBR,EAAQ,YAAc,OAAOQ,EAAO,WAAW,GAC/CA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACvCA,EAAO,KAAM,CACrB,QACI,GAAI,OAAOA,EAAO,MAAS,SAAU,CACjCR,EAAQ,KAAOQ,EAAO,KACtB,KACJ,CACA,MACJ,IAAK,YACL,IAAK,GACDR,EAAQ,KAAO,EACf,MACJ,IAAK,QACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,MACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,QACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,gBACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,aACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,OACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,UACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,UACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,iBACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,cACL,IAAK,IACDA,EAAQ,KAAO,GACf,KACJ,CAiBA,GAhBIQ,EAAO,GAAK,OACZR,EAAQ,EAAI,OAAOQ,EAAO,CAAC,GAC3BA,EAAO,GAAK,OACRjB,EAAM,MACLS,EAAQ,EAAIT,EAAM,KAAK,UAAUiB,EAAO,CAAC,GAAG,SAAW,GACnD,OAAOA,EAAO,GAAM,SACzBR,EAAQ,EAAI,SAASQ,EAAO,EAAG,EAAE,EAC5B,OAAOA,EAAO,GAAM,SACzBR,EAAQ,EAAIQ,EAAO,EACd,OAAOA,EAAO,GAAM,WACzBR,EAAQ,EAAI,IAAIT,EAAM,SAASiB,EAAO,EAAE,MAAQ,EAAGA,EAAO,EAAE,OAAS,CAAC,EAAE,SAAS,IACrFA,EAAO,GAAK,OACR,OAAOA,EAAO,GAAM,SACpBjB,EAAM,OAAO,OAAOiB,EAAO,EAAGR,EAAQ,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,CAAC,CAAC,EAAG,CAAC,EACtFA,EAAO,EAAE,QAAU,IACxBR,EAAQ,EAAIQ,EAAO,IACvBA,EAAO,GAAK,KAAM,CAClB,GAAI,OAAOA,EAAO,GAAM,SACpB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,CAAC,CAC1D,CACA,GAAIA,EAAO,GAAK,KAAM,CAClB,GAAI,OAAOA,EAAO,GAAM,SACpB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,EAAIR,EAAM,KAAK,WAAW,WAAWgB,EAAO,CAAC,CACzD,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,aAAeR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,YAAY,CACtF,CACA,GAAIA,EAAO,IAAM,KAAM,CACnB,GAAI,OAAOA,EAAO,IAAO,SACrB,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,GAAKR,EAAM,KAAK,UAAU,WAAWgB,EAAO,EAAE,CAC1D,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CACA,GAAIA,EAAO,QAAS,CAChB,GAAI,CAAC,MAAM,QAAQA,EAAO,OAAO,EAC7B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,QAAU,CAAC,EACnB,QAAS,EAAI,EAAG,EAAIQ,EAAO,QAAQ,OAAQ,EAAE,EACrC,OAAOA,EAAO,QAAQ,CAAC,GAAM,SAC7BjB,EAAM,OAAO,OAAOiB,EAAO,QAAQ,CAAC,EAAGR,EAAQ,QAAQ,CAAC,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,QAAQ,CAAC,CAAC,CAAC,EAAG,CAAC,EACjHA,EAAO,QAAQ,CAAC,EAAE,QAAU,IACjCR,EAAQ,QAAQ,CAAC,EAAIQ,EAAO,QAAQ,CAAC,EACjD,CACA,GAAIA,EAAO,QAAS,CAChB,GAAI,CAAC,MAAM,QAAQA,EAAO,OAAO,EAC7B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,QAAU,CAAC,EACnB,QAAS,EAAI,EAAG,EAAIQ,EAAO,QAAQ,OAAQ,EAAE,EAAG,CAC5C,GAAI,OAAOA,EAAO,QAAQ,CAAC,GAAM,SAC7B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,QAAQ,CAAC,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,QAAQ,CAAC,CAAC,CAC5E,CACJ,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EAAG,CAC3C,GAAI,OAAOA,EAAO,OAAO,CAAC,GAAM,SAC5B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,OAAO,CAAC,EAAIR,EAAM,KAAK,WAAW,WAAWgB,EAAO,OAAO,CAAC,CAAC,CACzE,CACJ,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CAC9F,CACJ,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EAAG,CAC/C,GAAI,OAAOA,EAAO,WAAW,CAAC,GAAM,SAChC,MAAM,UAAU,kDAAkD,EACtER,EAAQ,WAAW,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,WAAW,CAAC,CAAC,CAChF,CACJ,CACA,OAAOR,CACX,EAWAJ,EAAe,SAAW,SAAkBI,EAASS,EAAS,CACrDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAUd,IATIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,OAAS,CAAC,EACjBA,EAAO,KAAO,CAAC,EACfA,EAAO,QAAU,CAAC,EAClBA,EAAO,QAAU,CAAC,EAClBA,EAAO,OAAS,CAAC,EACjBA,EAAO,WAAa,CAAC,EACrBA,EAAO,cAAgB,CAAC,GAExBC,EAAQ,SAAU,CAGlB,GAFAD,EAAO,KAAO,GACdA,EAAO,EAAI,EACPjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,EAAIC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACzG,MACIF,EAAO,EAAIC,EAAQ,QAAU,OAAS,IAAM,EAC5CA,EAAQ,QAAU,OAClBD,EAAO,EAAI,IAEXA,EAAO,EAAI,CAAC,EACRC,EAAQ,QAAU,QAClBD,EAAO,EAAIjB,EAAM,UAAUiB,EAAO,CAAC,IAE3CA,EAAO,EAAI,KACXA,EAAO,EAAI,KACXA,EAAO,UAAY,GACnBA,EAAO,GAAK,KACZA,EAAO,KAAOC,EAAQ,QAAU,OAAS,YAAc,EACvDD,EAAO,YAAc,GACrBA,EAAO,aAAe,IAC1B,CAgBA,GAfIR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIC,EAAQ,MAAQ,CAAC,SAAST,EAAQ,CAAC,EAAI,OAAOA,EAAQ,CAAC,EAAIA,EAAQ,GAC9EA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC3C,OAAOA,EAAQ,GAAM,SACrBQ,EAAO,EAAIC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,CAAC,EAAIA,EAAQ,EAElEQ,EAAO,EAAIC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,CAAC,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,EAAE,MAAQ,EAAGA,EAAQ,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,GACxMA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIC,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,EAAG,EAAGA,EAAQ,EAAE,MAAM,EAAIS,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,CAAC,EAAIA,EAAQ,GACtKA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIhB,EAAM,KAAK,YAAY,SAASQ,EAAQ,EAAGS,CAAO,GAC7DT,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,EAAGS,CAAO,GAC5DT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,OAAOW,CAAC,CAAC,EAAI,OAAOX,EAAQ,OAAOW,CAAC,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CACtH,CACA,GAAIX,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CACA,GAAIX,EAAQ,SAAWA,EAAQ,QAAQ,OAAQ,CAC3CQ,EAAO,QAAU,CAAC,EAClB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,QAAQ,OAAQ,EAAEW,EAC1CH,EAAO,QAAQG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,QAAQW,CAAC,EAAG,EAAGX,EAAQ,QAAQW,CAAC,EAAE,MAAM,EAAIF,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,QAAQW,CAAC,CAAC,EAAIX,EAAQ,QAAQW,CAAC,CAC3N,CACA,GAAIX,EAAQ,SAAWA,EAAQ,QAAQ,OAAQ,CAC3CQ,EAAO,QAAU,CAAC,EAClB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,QAAQ,OAAQ,EAAEW,EAC1CH,EAAO,QAAQG,CAAC,EAAInB,EAAM,KAAK,YAAY,SAASQ,EAAQ,QAAQW,CAAC,EAAGF,CAAO,CACvF,CACA,GAAIT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAInB,EAAM,KAAK,WAAW,SAASQ,EAAQ,OAAOW,CAAC,EAAGF,CAAO,CACpF,CAKA,GAJIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,IAAM,MAAQA,EAAQ,eAAe,IAAI,IACjDQ,EAAO,GAAKhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,GAAIS,CAAO,GAC7DT,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,WAAWW,CAAC,EAAGF,CAAO,CAC3F,CAOA,GANIT,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOC,EAAQ,QAAU,OAASjB,EAAM,KAAK,eAAe,cAAcQ,EAAQ,IAAI,IAAM,OAAYA,EAAQ,KAAOR,EAAM,KAAK,eAAe,cAAcQ,EAAQ,IAAI,EAAIA,EAAQ,MAC9LA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,IACnEQ,EAAO,YAAcR,EAAQ,aAC7BA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,aAAcS,CAAO,GACzFT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CACzG,CACA,OAAOD,CACX,EASAZ,EAAe,UAAU,OAAS,UAAkB,CAChD,OAAO,KAAK,YAAY,SAAS,KAAMR,GAAU,KAAK,aAAa,CACvE,EAUAQ,EAAe,WAAa,SAAoBgB,EAAe,CAC3D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,sBAC3B,EAsBAhB,EAAe,cAAiB,UAAW,CACvC,IAAIF,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,WAAW,EAAI,EACtCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,KAAK,EAAI,EAChCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,EAAE,EAAI,eAAe,EAAI,GAC3CC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GAC5CC,EAAOD,EAAW,EAAE,EAAI,aAAa,EAAI,GAClCC,CACX,EAAG,EAEIC,CACX,EAAG,EAEHH,EAAK,eAAkB,UAAW,CAmB9B,SAASoB,EAAehB,EAAY,CAChC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAc,EAAe,UAAU,KAAO,GAQhCA,EAAe,UAAU,KAAO,KAQhCA,EAAe,UAAU,UAAY,GAUrCA,EAAe,OAAS,SAAgBhB,EAAY,CAChD,OAAO,IAAIgB,EAAehB,CAAU,CACxC,EAWAgB,EAAe,OAAS,SAAgBb,EAASC,EAAQ,CACrD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAMC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAClGD,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAC9DC,CACX,EAWAY,EAAe,gBAAkB,SAAyBb,EAASC,EAAQ,CACvE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAY,EAAe,OAAS,SAAgBX,EAAQC,EAAQ,CAC9CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,eACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAClE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAa,EAAe,gBAAkB,SAAyBX,EAAQ,CAC9D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAW,EAAe,OAAS,SAAgBb,EAAS,CAC7C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,IAAI,EACpD,GAAIO,EACA,MAAO,QAAUA,CACzB,CACA,OAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EAC1B,6BACR,IACX,EAUAa,EAAe,WAAa,SAAoBL,EAAQ,CACpD,GAAIA,aAAkBhB,EAAM,KAAK,eAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,eAG7B,GAFIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,MAAQ,KAAM,CACrB,GAAI,OAAOA,EAAO,MAAS,SACvB,MAAM,UAAU,4CAA4C,EAChER,EAAQ,KAAOR,EAAM,KAAK,UAAU,WAAWgB,EAAO,IAAI,CAC9D,CACA,OAAIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACxCR,CACX,EAWAa,EAAe,SAAW,SAAkBb,EAASS,EAAS,CACrDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,KAAO,KACdA,EAAO,UAAY,IAEnBR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAMS,CAAO,GACjET,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WACxBQ,CACX,EASAK,EAAe,UAAU,OAAS,UAAkB,CAChD,OAAO,KAAK,YAAY,SAAS,KAAMzB,GAAU,KAAK,aAAa,CACvE,EAUAyB,EAAe,WAAa,SAAoBD,EAAe,CAC3D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,sBAC3B,EAEOC,CACX,EAAG,EAEHpB,EAAK,UAAa,UAAW,CAuBzB,SAASqB,EAAUjB,EAAY,CAI3B,GAHA,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EACdA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAe,EAAU,UAAU,MAAQvB,EAAM,WAQlCuB,EAAU,UAAU,OAASvB,EAAM,WAQnCuB,EAAU,UAAU,KAAO,GAQ3BA,EAAU,UAAU,OAAS,GAQ7BA,EAAU,UAAU,OAAS,GAQ7BA,EAAU,UAAU,UAAYvB,EAAM,WAQtCuB,EAAU,UAAU,UAAY,GAUhCA,EAAU,OAAS,SAAgBjB,EAAY,CAC3C,OAAO,IAAIiB,EAAUjB,CAAU,CACnC,EAWAiB,EAAU,OAAS,SAAgBd,EAASC,EAAQ,CAGhD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EACxE,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,OAAO,CAAC,CAAC,EAKzE,GAJIA,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvH,OAAID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC3DC,CACX,EAWAa,EAAU,gBAAkB,SAAyBd,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAa,EAAU,OAAS,SAAgBZ,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKE,EAAO,OAAO,CAAC,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKE,EAAO,OAAO,CAAC,EACnC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAChF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAc,EAAU,gBAAkB,SAAyBZ,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAY,EAAU,OAAS,SAAgBd,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EACxC,GAAI,CAACR,EAAM,SAASS,EAAQ,MAAMD,CAAC,CAAC,EAChC,MAAO,0BACnB,CACA,GAAIC,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EACzC,GAAI,CAACR,EAAM,SAASS,EAAQ,OAAOD,CAAC,CAAC,EACjC,MAAO,2BACnB,CACA,GAAIC,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EACjE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,OAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EAC1B,6BACR,IACX,EAUAc,EAAU,WAAa,SAAoBN,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,UAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAC7B,GAAIgB,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,uCAAuC,EAC3DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EACvCR,EAAQ,MAAM,CAAC,EAAI,OAAOQ,EAAO,MAAM,CAAC,CAAC,CACjD,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CAOA,GANIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,UAAU,CAAC,CAAC,CACnF,CACJ,CACA,OAAIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACxCR,CACX,EAWAc,EAAU,SAAW,SAAkBd,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAYd,IAXIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,GAEpBC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,OAAS,GAChBA,EAAO,UAAY,GACnBA,EAAO,OAAS,IAEhBR,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAIX,EAAQ,MAAMW,CAAC,CACzC,CACA,GAAIX,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CAC3C,CAKA,GAJIX,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC9F,CACA,OAAIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACrBQ,CACX,EASAM,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAM1B,GAAU,KAAK,aAAa,CACvE,EAUA0B,EAAU,WAAa,SAAoBF,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,iBAC3B,EAEOE,CACX,EAAG,EAEHrB,EAAK,kBAAqB,UAAW,CAoBjC,SAASsB,EAAkBlB,EAAY,CAGnC,GAFA,KAAK,sBAAwB,CAAC,EAC9B,KAAK,cAAgB,CAAC,EAClBA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAgB,EAAkB,UAAU,eAAiB,KAQ7CA,EAAkB,UAAU,UAAY,KAQxCA,EAAkB,UAAU,sBAAwBxB,EAAM,WAQ1DwB,EAAkB,UAAU,cAAgBxB,EAAM,WAUlDwB,EAAkB,OAAS,SAAgBlB,EAAY,CACnD,OAAO,IAAIkB,EAAkBlB,CAAU,CAC3C,EAWAkB,EAAkB,OAAS,SAAgBf,EAASC,EAAQ,CAOxD,GANKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,gBAAkB,MAAQ,OAAO,eAAe,KAAKA,EAAS,gBAAgB,GACtFR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,eAAgBC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7GD,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5ER,EAAM,KAAK,WAAW,OAAOQ,EAAQ,UAAWC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACxGD,EAAQ,uBAAyB,MAAQA,EAAQ,sBAAsB,OACvE,QAAS,EAAI,EAAG,EAAIA,EAAQ,sBAAsB,OAAQ,EAAE,EACxDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,sBAAsB,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC3I,GAAID,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnI,OAAOA,CACX,EAWAc,EAAkB,gBAAkB,SAAyBf,EAASC,EAAQ,CAC1E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAc,EAAkB,OAAS,SAAgBb,EAAQC,EAAQ,CACjDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,kBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,eAAiBR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC7E,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACxE,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,uBAAyBA,EAAQ,sBAAsB,SACjEA,EAAQ,sBAAwB,CAAC,GACrCA,EAAQ,sBAAsB,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACpG,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5F,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAe,EAAkB,gBAAkB,SAAyBb,EAAQ,CACjE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAa,EAAkB,OAAS,SAAgBf,EAAS,CAChD,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,EAAG,CAC5E,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,cAAc,EAC/D,GAAIO,EACA,MAAO,kBAAoBA,CACnC,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,SAAS,EAC1D,GAAIO,EACA,MAAO,aAAeA,CAC9B,CACA,GAAIP,EAAQ,uBAAyB,MAAQA,EAAQ,eAAe,uBAAuB,EAAG,CAC1F,GAAI,CAAC,MAAM,QAAQA,EAAQ,qBAAqB,EAC5C,MAAO,wCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,sBAAsB,OAAQ,EAAE,EAAG,CAC3D,IAAIO,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,sBAAsB,CAAC,CAAC,EACrF,GAAIO,EACA,MAAO,yBAA2BA,CAC1C,CACJ,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAAG,CACnD,IAAIO,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,CAAC,EAC7E,GAAIO,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,OAAO,IACX,EAUAQ,EAAkB,WAAa,SAAoBP,EAAQ,CACvD,GAAIA,aAAkBhB,EAAM,KAAK,kBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,kBAC7B,GAAIgB,EAAO,gBAAkB,KAAM,CAC/B,GAAI,OAAOA,EAAO,gBAAmB,SACjC,MAAM,UAAU,yDAAyD,EAC7ER,EAAQ,eAAiBR,EAAM,KAAK,WAAW,WAAWgB,EAAO,cAAc,CACnF,CACA,GAAIA,EAAO,WAAa,KAAM,CAC1B,GAAI,OAAOA,EAAO,WAAc,SAC5B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,UAAYR,EAAM,KAAK,WAAW,WAAWgB,EAAO,SAAS,CACzE,CACA,GAAIA,EAAO,sBAAuB,CAC9B,GAAI,CAAC,MAAM,QAAQA,EAAO,qBAAqB,EAC3C,MAAM,UAAU,+DAA+D,EACnFR,EAAQ,sBAAwB,CAAC,EACjC,QAAS,EAAI,EAAG,EAAIQ,EAAO,sBAAsB,OAAQ,EAAE,EAAG,CAC1D,GAAI,OAAOA,EAAO,sBAAsB,CAAC,GAAM,SAC3C,MAAM,UAAU,gEAAgE,EACpFR,EAAQ,sBAAsB,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,sBAAsB,CAAC,CAAC,CACnH,CACJ,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,uDAAuD,EAC3ER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,wDAAwD,EAC5ER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CACnG,CACJ,CACA,OAAOR,CACX,EAWAe,EAAkB,SAAW,SAAkBf,EAASS,EAAS,CACxDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAad,IAZIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,sBAAwB,CAAC,EAChCA,EAAO,cAAgB,CAAC,GAExBC,EAAQ,WACRD,EAAO,eAAiB,KACxBA,EAAO,UAAY,MAEnBR,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,IACzEQ,EAAO,eAAiBhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,eAAgBS,CAAO,GACtFT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,UAAWS,CAAO,GAC5ET,EAAQ,uBAAyBA,EAAQ,sBAAsB,OAAQ,CACvEQ,EAAO,sBAAwB,CAAC,EAChC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,sBAAsB,OAAQ,EAAEW,EACxDH,EAAO,sBAAsBG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,sBAAsBW,CAAC,EAAGF,CAAO,CAC9H,CACA,GAAIT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CAC9G,CACA,OAAOD,CACX,EASAO,EAAkB,UAAU,OAAS,UAAkB,CACnD,OAAO,KAAK,YAAY,SAAS,KAAM3B,GAAU,KAAK,aAAa,CACvE,EAUA2B,EAAkB,WAAa,SAAoBH,EAAe,CAC9D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,yBAC3B,EAEOG,CACX,EAAG,EAEHtB,EAAK,WAAc,UAAW,CA2B1B,SAASuB,EAAWnB,EAAY,CAK5B,GAJA,KAAK,YAAc,CAAC,EACpB,KAAK,cAAgB,CAAC,EACtB,KAAK,aAAe,CAAC,EACrB,KAAK,UAAY,CAAC,EACdA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAiB,EAAW,UAAU,UAAYzB,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQ/EyB,EAAW,UAAU,YAAczB,EAAM,WAQzCyB,EAAW,UAAU,aAAe,GAQpCA,EAAW,UAAU,gBAAkB,GAQvCA,EAAW,UAAU,OAAS,GAQ9BA,EAAW,UAAU,aAAezB,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQlFyB,EAAW,UAAU,UAAY,GAQjCA,EAAW,UAAU,MAAQ,KAQ7BA,EAAW,UAAU,cAAgBzB,EAAM,WAQ3CyB,EAAW,UAAU,aAAezB,EAAM,WAQ1CyB,EAAW,UAAU,UAAYzB,EAAM,WAUvCyB,EAAW,OAAS,SAAgBnB,EAAY,CAC5C,OAAO,IAAImB,EAAWnB,CAAU,CACpC,EAWAmB,EAAW,OAAS,SAAgBhB,EAASC,EAAQ,CAiBjD,GAhBKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,SAAS,EAC/DA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,YAAY,EACpEA,EAAQ,iBAAmB,MAAQ,OAAO,eAAe,KAAKA,EAAS,iBAAiB,GACxFC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,eAAe,EACvEA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,YAAY,EACnEA,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,WAAW,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpGD,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,GAAID,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACrI,GAAID,EAAQ,cAAgB,MAAQA,EAAQ,aAAa,OACrD,QAAS,EAAI,EAAG,EAAIA,EAAQ,aAAa,OAAQ,EAAE,EAC/CR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAa,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/H,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,cAAc,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACxH,OAAOA,CACX,EAWAe,EAAW,gBAAkB,SAAyBhB,EAASC,EAAQ,CACnE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAe,EAAW,OAAS,SAAgBd,EAAQC,EAAQ,CAC1CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,WACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,UAAYE,EAAO,MAAM,EACjC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,mBAAmB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeE,EAAO,OAAO,EACrC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,gBAAkBE,EAAO,OAAO,EACxC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeE,EAAO,MAAM,EACpC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACpE,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5F,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,cAAgBA,EAAQ,aAAa,SAC/CA,EAAQ,aAAe,CAAC,GAC5BA,EAAQ,aAAa,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,cAAc,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAgB,EAAW,gBAAkB,SAAyBd,EAAQ,CAC1D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAc,EAAW,OAAS,SAAgBhB,EAAS,CACzC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,UAAUS,EAAQ,SAAS,GAAK,EAAEA,EAAQ,WAAaT,EAAM,UAAUS,EAAQ,UAAU,GAAG,GAAKT,EAAM,UAAUS,EAAQ,UAAU,IAAI,GAC9I,MAAO,mCACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,GACjE,CAACT,EAAM,SAASS,EAAQ,YAAY,EACpC,MAAO,gCACf,GAAIA,EAAQ,iBAAmB,MAAQA,EAAQ,eAAe,iBAAiB,GACvE,CAACT,EAAM,SAASS,EAAQ,eAAe,EACvC,MAAO,mCACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,GACjE,CAACT,EAAM,UAAUS,EAAQ,YAAY,GAAK,EAAEA,EAAQ,cAAgBT,EAAM,UAAUS,EAAQ,aAAa,GAAG,GAAKT,EAAM,UAAUS,EAAQ,aAAa,IAAI,GAC1J,MAAO,sCACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,KAAK,EACtD,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,cAAc,OAAQ,EAAED,EAAG,CACnD,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAcD,CAAC,CAAC,EAC7E,GAAIQ,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAI,CAAC,MAAM,QAAQA,EAAQ,YAAY,EACnC,MAAO,+BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,aAAa,OAAQ,EAAED,EAAG,CAClD,IAAIQ,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAaD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,cAAc,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EAChE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,OAAO,IACX,EAUAS,EAAW,WAAa,SAAoBR,EAAQ,CAChD,GAAIA,aAAkBhB,EAAM,KAAK,WAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,WAU7B,GATIgB,EAAO,WAAa,OAChBjB,EAAM,MACLS,EAAQ,UAAYT,EAAM,KAAK,UAAUiB,EAAO,SAAS,GAAG,SAAW,GACnE,OAAOA,EAAO,WAAc,SACjCR,EAAQ,UAAY,SAASQ,EAAO,UAAW,EAAE,EAC5C,OAAOA,EAAO,WAAc,SACjCR,EAAQ,UAAYQ,EAAO,UACtB,OAAOA,EAAO,WAAc,WACjCR,EAAQ,UAAY,IAAIT,EAAM,SAASiB,EAAO,UAAU,MAAQ,EAAGA,EAAO,UAAU,OAAS,CAAC,EAAE,SAAS,IAC7GA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,mBAAmB,WAAWgB,EAAO,YAAY,CAAC,CAAC,CAC3F,CACJ,CAkBA,GAjBIA,EAAO,cAAgB,OACvBR,EAAQ,aAAe,OAAOQ,EAAO,YAAY,GACjDA,EAAO,iBAAmB,OAC1BR,EAAQ,gBAAkB,OAAOQ,EAAO,eAAe,GACvDA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,cAAgB,OACnBjB,EAAM,MACLS,EAAQ,aAAeT,EAAM,KAAK,UAAUiB,EAAO,YAAY,GAAG,SAAW,GACzE,OAAOA,EAAO,cAAiB,SACpCR,EAAQ,aAAe,SAASQ,EAAO,aAAc,EAAE,EAClD,OAAOA,EAAO,cAAiB,SACpCR,EAAQ,aAAeQ,EAAO,aACzB,OAAOA,EAAO,cAAiB,WACpCR,EAAQ,aAAe,IAAIT,EAAM,SAASiB,EAAO,aAAa,MAAQ,EAAGA,EAAO,aAAa,OAAS,CAAC,EAAE,SAAS,IACtHA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,MAAQR,EAAM,KAAK,WAAW,WAAWgB,EAAO,KAAK,CACjE,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CACnG,CACJ,CACA,GAAIA,EAAO,aAAc,CACrB,GAAI,CAAC,MAAM,QAAQA,EAAO,YAAY,EAClC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAe,CAAC,EACxB,QAAS,EAAI,EAAG,EAAIQ,EAAO,aAAa,OAAQ,EAAE,EAAG,CACjD,GAAI,OAAOA,EAAO,aAAa,CAAC,GAAM,SAClC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,aAAa,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,aAAa,CAAC,CAAC,CAC5F,CACJ,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,cAAc,WAAWgB,EAAO,UAAU,CAAC,CAAC,CAClF,CACJ,CACA,OAAOR,CACX,EAWAgB,EAAW,SAAW,SAAkBhB,EAASS,EAAS,CACjDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAOd,IANIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,YAAc,CAAC,EACtBA,EAAO,cAAgB,CAAC,EACxBA,EAAO,aAAe,CAAC,EACvBA,EAAO,UAAY,CAAC,GAEpBC,EAAQ,SAAU,CAClB,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,UAAYC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACjH,MACIF,EAAO,UAAYC,EAAQ,QAAU,OAAS,IAAM,EAIxD,GAHAD,EAAO,aAAe,GACtBA,EAAO,gBAAkB,GACzBA,EAAO,OAAS,GACZjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,aAAeC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACpH,MACIF,EAAO,aAAeC,EAAQ,QAAU,OAAS,IAAM,EAC3DD,EAAO,UAAY,GACnBA,EAAO,MAAQ,IACnB,CAqBA,GApBIR,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC3D,OAAOA,EAAQ,WAAc,SAC7BQ,EAAO,UAAYC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,SAAS,EAAIA,EAAQ,UAElFQ,EAAO,UAAYC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,SAAS,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,UAAU,MAAQ,EAAGA,EAAQ,UAAU,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,WACxOA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAeR,EAAQ,cAC9BA,EAAQ,iBAAmB,MAAQA,EAAQ,eAAe,iBAAiB,IAC3EQ,EAAO,gBAAkBR,EAAQ,iBACjCA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACjE,OAAOA,EAAQ,cAAiB,SAChCQ,EAAO,aAAeC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,YAAY,EAAIA,EAAQ,aAExFQ,EAAO,aAAeC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,YAAY,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,aAAa,MAAQ,EAAGA,EAAQ,aAAa,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,cACpPA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,MAAOS,CAAO,GACpET,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,mBAAmB,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CACtG,CACA,GAAIT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CAC9G,CACA,GAAIT,EAAQ,cAAgBA,EAAQ,aAAa,OAAQ,CACrDQ,EAAO,aAAe,CAAC,EACvB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,aAAa,OAAQ,EAAEW,EAC/CH,EAAO,aAAaG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,aAAaW,CAAC,EAAGF,CAAO,CACvG,CACA,GAAIT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,cAAc,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC7F,CACA,OAAOD,CACX,EASAQ,EAAW,UAAU,OAAS,UAAkB,CAC5C,OAAO,KAAK,YAAY,SAAS,KAAM5B,GAAU,KAAK,aAAa,CACvE,EAUA4B,EAAW,WAAa,SAAoBJ,EAAe,CACvD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kBAC3B,EAEOI,CACX,EAAG,EAEHvB,EAAK,uBAA0B,UAAW,CAkBtC,SAASwB,EAAuBpB,EAAY,CACxC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAkB,EAAuB,UAAU,IAAM,GAQvCA,EAAuB,UAAU,MAAQ,GAUzCA,EAAuB,OAAS,SAAgBpB,EAAY,CACxD,OAAO,IAAIoB,EAAuBpB,CAAU,CAChD,EAWAoB,EAAuB,OAAS,SAAgBjB,EAASC,EAAQ,CAC7D,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,KAAO,MAAQ,OAAO,eAAe,KAAKA,EAAS,KAAK,GAChEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,GAAG,EAC3DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,KAAK,EAC1DC,CACX,EAWAgB,EAAuB,gBAAkB,SAAyBjB,EAASC,EAAQ,CAC/E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAgB,EAAuB,OAAS,SAAgBf,EAAQC,EAAQ,CACtDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,uBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,IAAME,EAAO,OAAO,EAC5B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQE,EAAO,OAAO,EAC9B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAiB,EAAuB,gBAAkB,SAAyBf,EAAQ,CACtE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAe,EAAuB,OAAS,SAAgBjB,EAAS,CACrD,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,GAC/C,CAACT,EAAM,SAASS,EAAQ,GAAG,EACpB,uBACXA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,GACnD,CAACT,EAAM,SAASS,EAAQ,KAAK,EACtB,yBACR,IACX,EAUAiB,EAAuB,WAAa,SAAoBT,EAAQ,CAC5D,GAAIA,aAAkBhB,EAAM,KAAK,uBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,uBAC7B,OAAIgB,EAAO,KAAO,OACdR,EAAQ,IAAM,OAAOQ,EAAO,GAAG,GAC/BA,EAAO,OAAS,OAChBR,EAAQ,MAAQ,OAAOQ,EAAO,KAAK,GAChCR,CACX,EAWAiB,EAAuB,SAAW,SAAkBjB,EAASS,EAAS,CAC7DA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,IAAM,GACbA,EAAO,MAAQ,IAEfR,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,IACnDQ,EAAO,IAAMR,EAAQ,KACrBA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQR,EAAQ,OACpBQ,CACX,EASAS,EAAuB,UAAU,OAAS,UAAkB,CACxD,OAAO,KAAK,YAAY,SAAS,KAAM7B,GAAU,KAAK,aAAa,CACvE,EAUA6B,EAAuB,WAAa,SAAoBL,EAAe,CACnE,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,8BAC3B,EAEOK,CACX,EAAG,EAEHxB,EAAK,iBAAoB,UAAW,CAkBhC,SAASyB,EAAiBrB,EAAY,CAElC,GADA,KAAK,0BAA4B,CAAC,EAC9BA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAmB,EAAiB,UAAU,WAAa,GAQxCA,EAAiB,UAAU,0BAA4B3B,EAAM,WAU7D2B,EAAiB,OAAS,SAAgBrB,EAAY,CAClD,OAAO,IAAIqB,EAAiBrB,CAAU,CAC1C,EAWAqB,EAAiB,OAAS,SAAgBlB,EAASC,EAAQ,CAKvD,GAJKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAClEA,EAAQ,2BAA6B,MAAQA,EAAQ,0BAA0B,OAC/E,QAAS,EAAI,EAAG,EAAIA,EAAQ,0BAA0B,OAAQ,EAAE,EAC5DR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,0BAA0B,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/I,OAAOA,CACX,EAWAiB,EAAiB,gBAAkB,SAAyBlB,EAASC,EAAQ,CACzE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAiB,EAAiB,OAAS,SAAgBhB,EAAQC,EAAQ,CAChDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,2BAA6BA,EAAQ,0BAA0B,SACzEA,EAAQ,0BAA4B,CAAC,GACzCA,EAAQ,0BAA0B,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACxG,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAkB,EAAiB,gBAAkB,SAAyBhB,EAAQ,CAChE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAgB,EAAiB,OAAS,SAAgBlB,EAAS,CAC/C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAClC,MAAO,8BACf,GAAIA,EAAQ,2BAA6B,MAAQA,EAAQ,eAAe,2BAA2B,EAAG,CAClG,GAAI,CAAC,MAAM,QAAQA,EAAQ,yBAAyB,EAChD,MAAO,4CACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,0BAA0B,OAAQ,EAAED,EAAG,CAC/D,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,0BAA0BD,CAAC,CAAC,EACzF,GAAIQ,EACA,MAAO,6BAA+BA,CAC9C,CACJ,CACA,OAAO,IACX,EAUAW,EAAiB,WAAa,SAAoBV,EAAQ,CACtD,GAAIA,aAAkBhB,EAAM,KAAK,iBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAG7B,GAFIgB,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC7CA,EAAO,0BAA2B,CAClC,GAAI,CAAC,MAAM,QAAQA,EAAO,yBAAyB,EAC/C,MAAM,UAAU,kEAAkE,EACtFR,EAAQ,0BAA4B,CAAC,EACrC,QAAS,EAAI,EAAG,EAAIQ,EAAO,0BAA0B,OAAQ,EAAE,EAAG,CAC9D,GAAI,OAAOA,EAAO,0BAA0B,CAAC,GAAM,SAC/C,MAAM,UAAU,mEAAmE,EACvFR,EAAQ,0BAA0B,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,0BAA0B,CAAC,CAAC,CAC3H,CACJ,CACA,OAAOR,CACX,EAWAkB,EAAiB,SAAW,SAAkBlB,EAASS,EAAS,CACvDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAOd,IANIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,0BAA4B,CAAC,GACpCC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YAC5BA,EAAQ,2BAA6BA,EAAQ,0BAA0B,OAAQ,CAC/EQ,EAAO,0BAA4B,CAAC,EACpC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,0BAA0B,OAAQ,EAAEW,EAC5DH,EAAO,0BAA0BG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,0BAA0BW,CAAC,EAAGF,CAAO,CACtI,CACA,OAAOD,CACX,EASAU,EAAiB,UAAU,OAAS,UAAkB,CAClD,OAAO,KAAK,YAAY,SAAS,KAAM9B,GAAU,KAAK,aAAa,CACvE,EAUA8B,EAAiB,WAAa,SAAoBN,EAAe,CAC7D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEOM,CACX,EAAG,EAEHzB,EAAK,WAAc,UAAW,CAyB1B,SAAS0B,EAAWtB,EAAY,CAQ5B,GAPA,KAAK,KAAO,CAAC,EACb,KAAK,YAAc,CAAC,EACpB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EAClB,KAAK,uBAAyB,CAAC,EAC3BA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAoB,EAAW,UAAU,KAAO5B,EAAM,WAQlC4B,EAAW,UAAU,KAAO,GAQ5BA,EAAW,UAAU,YAAc5B,EAAM,WAQzC4B,EAAW,UAAU,kBAAoB5B,EAAM,WAQ/C4B,EAAW,UAAU,UAAY,GAQjCA,EAAW,UAAU,MAAQ5B,EAAM,WAQnC4B,EAAW,UAAU,OAAS5B,EAAM,WAQpC4B,EAAW,UAAU,UAAY5B,EAAM,WAQvC4B,EAAW,UAAU,uBAAyB5B,EAAM,WAUpD4B,EAAW,OAAS,SAAgBtB,EAAY,CAC5C,OAAO,IAAIsB,EAAWtB,CAAU,CACpC,EAWAsB,EAAW,OAAS,SAAgBnB,EAASC,EAAQ,CAGjD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OACrC,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvCR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAK,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7G,GAFID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAGtH,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAClEA,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,MAAM,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpH,GAAID,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,OAAO,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACrH,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACzH,GAAID,EAAQ,wBAA0B,MAAQA,EAAQ,uBAAuB,OACzE,QAAS,EAAI,EAAG,EAAIA,EAAQ,uBAAuB,OAAQ,EAAE,EACzDR,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,uBAAuB,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACxI,GAAID,EAAQ,mBAAqB,MAAQA,EAAQ,kBAAkB,OAC/D,QAAS,EAAI,EAAG,EAAIA,EAAQ,kBAAkB,OAAQ,EAAE,EACpDR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,kBAAkB,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpI,OAAOA,CACX,EAWAkB,EAAW,gBAAkB,SAAyBnB,EAASC,EAAQ,CACnE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAkB,EAAW,OAAS,SAAgBjB,EAAQC,EAAQ,CAC1CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,WACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,GACpBA,EAAQ,KAAK,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,mBAAqBA,EAAQ,kBAAkB,SACzDA,EAAQ,kBAAoB,CAAC,GACjCA,EAAQ,kBAAkB,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3F,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC7E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAChF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,wBAA0BA,EAAQ,uBAAuB,SACnEA,EAAQ,uBAAyB,CAAC,GACtCA,EAAQ,uBAAuB,KAAKR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/F,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAmB,EAAW,gBAAkB,SAAyBjB,EAAQ,CAC1D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAiB,EAAW,OAAS,SAAgBnB,EAAS,CACzC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EAAG,CAC1C,IAAIQ,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAKD,CAAC,CAAC,EACvD,GAAIQ,EACA,MAAO,QAAUA,CACzB,CACJ,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EAChE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,GAAIP,EAAQ,mBAAqB,MAAQA,EAAQ,eAAe,mBAAmB,EAAG,CAClF,GAAI,CAAC,MAAM,QAAQA,EAAQ,iBAAiB,EACxC,MAAO,oCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,kBAAkB,OAAQ,EAAED,EAAG,CACvD,IAAIQ,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,kBAAkBD,CAAC,CAAC,EAC5E,GAAIQ,EACA,MAAO,qBAAuBA,CACtC,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EAAG,CAC3C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,MAAMD,CAAC,CAAC,EAC7D,GAAIQ,EACA,MAAO,SAAWA,CAC1B,CACJ,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EAAG,CAC5C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,OAAOD,CAAC,CAAC,EAC9D,GAAIQ,EACA,MAAO,UAAYA,CAC3B,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EACjE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,GAAIP,EAAQ,wBAA0B,MAAQA,EAAQ,eAAe,wBAAwB,EAAG,CAC5F,GAAI,CAAC,MAAM,QAAQA,EAAQ,sBAAsB,EAC7C,MAAO,yCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,uBAAuB,OAAQ,EAAED,EAAG,CAC5D,IAAIQ,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,uBAAuBD,CAAC,CAAC,EAChF,GAAIQ,EACA,MAAO,0BAA4BA,CAC3C,CACJ,CACA,OAAO,IACX,EAUAY,EAAW,WAAa,SAAoBX,EAAQ,CAChD,GAAIA,aAAkBhB,EAAM,KAAK,WAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,WAC7B,GAAIgB,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,uCAAuC,EAC3DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAAG,CACzC,GAAI,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC1B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,KAAK,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,KAAK,CAAC,CAAC,CACpE,CACJ,CAGA,GAFIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,YAAY,CAAC,CAAC,CACpF,CACJ,CACA,GAAIA,EAAO,kBAAmB,CAC1B,GAAI,CAAC,MAAM,QAAQA,EAAO,iBAAiB,EACvC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,kBAAoB,CAAC,EAC7B,QAAS,EAAI,EAAG,EAAIQ,EAAO,kBAAkB,OAAQ,EAAE,EAAG,CACtD,GAAI,OAAOA,EAAO,kBAAkB,CAAC,GAAM,SACvC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,kBAAkB,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,kBAAkB,CAAC,CAAC,CACtG,CACJ,CAGA,GAFIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EAAG,CAC1C,GAAI,OAAOA,EAAO,MAAM,CAAC,GAAM,SAC3B,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,MAAM,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,MAAM,CAAC,CAAC,CAC3E,CACJ,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EAAG,CAC3C,GAAI,OAAOA,EAAO,OAAO,CAAC,GAAM,SAC5B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,OAAO,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,OAAO,CAAC,CAAC,CAC7E,CACJ,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,UAAU,CAAC,CAAC,CACnF,CACJ,CACA,GAAIA,EAAO,uBAAwB,CAC/B,GAAI,CAAC,MAAM,QAAQA,EAAO,sBAAsB,EAC5C,MAAM,UAAU,yDAAyD,EAC7ER,EAAQ,uBAAyB,CAAC,EAClC,QAAS,EAAI,EAAG,EAAIQ,EAAO,uBAAuB,OAAQ,EAAE,EAAG,CAC3D,GAAI,OAAOA,EAAO,uBAAuB,CAAC,GAAM,SAC5C,MAAM,UAAU,0DAA0D,EAC9ER,EAAQ,uBAAuB,CAAC,EAAIR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,uBAAuB,CAAC,CAAC,CAC/G,CACJ,CACA,OAAOR,CACX,EAWAmB,EAAW,SAAW,SAAkBnB,EAASS,EAAS,CACjDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAcd,IAbIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,EACfA,EAAO,YAAc,CAAC,EACtBA,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,EACpBA,EAAO,uBAAyB,CAAC,EACjCA,EAAO,kBAAoB,CAAC,GAE5BC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,UAAY,IAEnBR,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACvCH,EAAO,KAAKG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAKW,CAAC,EAAGF,CAAO,CAC/E,CAGA,GAFIT,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,YAAY,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CAC/F,CAGA,GAFIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,MAAMW,CAAC,EAAGF,CAAO,CACtF,CACA,GAAIT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,OAAOW,CAAC,EAAGF,CAAO,CACxF,CACA,GAAIT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC9F,CACA,GAAIT,EAAQ,wBAA0BA,EAAQ,uBAAuB,OAAQ,CACzEQ,EAAO,uBAAyB,CAAC,EACjC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,uBAAuB,OAAQ,EAAEW,EACzDH,EAAO,uBAAuBG,CAAC,EAAInB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,uBAAuBW,CAAC,EAAGF,CAAO,CAC1H,CACA,GAAIT,EAAQ,mBAAqBA,EAAQ,kBAAkB,OAAQ,CAC/DQ,EAAO,kBAAoB,CAAC,EAC5B,QAASG,EAAI,EAAGA,EAAIX,EAAQ,kBAAkB,OAAQ,EAAEW,EACpDH,EAAO,kBAAkBG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,kBAAkBW,CAAC,EAAGF,CAAO,CACjH,CACA,OAAOD,CACX,EASAW,EAAW,UAAU,OAAS,UAAkB,CAC5C,OAAO,KAAK,YAAY,SAAS,KAAM/B,GAAU,KAAK,aAAa,CACvE,EAUA+B,EAAW,WAAa,SAAoBP,EAAe,CACvD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kBAC3B,EAEOO,CACX,EAAG,EAEH1B,EAAK,YAAe,UAAW,CA8B3B,SAAS2B,EAAYvB,EAAY,CAS7B,GARA,KAAK,KAAO,CAAC,EACb,KAAK,UAAY,CAAC,EAClB,KAAK,UAAY,CAAC,EAClB,KAAK,WAAa,CAAC,EACnB,KAAK,UAAY,CAAC,EAClB,KAAK,aAAe,CAAC,EACrB,KAAK,WAAa,CAAC,EACnB,KAAK,WAAa,CAAC,EACfA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAqB,EAAY,UAAU,KAAO7B,EAAM,WAQnC6B,EAAY,UAAU,SAAW,EAQjCA,EAAY,UAAU,QAAU,KAQhCA,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,WAAa7B,EAAM,WAQzC6B,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,KAAO,GAQ7BA,EAAY,UAAU,UAAY,GAQlCA,EAAY,UAAU,QAAU7B,EAAM,UAAU,CAAC,CAAC,EAQlD6B,EAAY,UAAU,aAAe7B,EAAM,WAQ3C6B,EAAY,UAAU,aAAe,EAQrCA,EAAY,UAAU,WAAa7B,EAAM,WAQzC6B,EAAY,UAAU,WAAa7B,EAAM,WAUzC6B,EAAY,OAAS,SAAgBvB,EAAY,CAC7C,OAAO,IAAIuB,EAAYvB,CAAU,CACrC,EAWAuB,EAAY,OAAS,SAAgBpB,EAASC,EAAQ,CAGlD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CAKA,GAJID,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,QAAQ,EAC/DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,YAAY,QAAQ,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/GD,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OACjD,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,WAAW,CAAC,CAAC,EAC5E,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CAKA,GAJID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,OAAO,EAC9DA,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OAAQ,CACzDC,EAAO,OAA+B,EAAE,EAAE,KAAK,EAC/C,QAAS,EAAI,EAAG,EAAID,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAAOD,EAAQ,WAAW,CAAC,CAAC,EACvCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OAAQ,CACzDC,EAAO,OAA+B,EAAE,EAAE,KAAK,EAC/C,QAAS,EAAI,EAAG,EAAID,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAAOD,EAAQ,WAAW,CAAC,CAAC,EACvCC,EAAO,OAAO,CAClB,CAGA,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAClEA,EAAQ,cAAgB,MAAQA,EAAQ,aAAa,OACrD,QAAS,EAAI,EAAG,EAAIA,EAAQ,aAAa,OAAQ,EAAE,EAC/CR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,aAAa,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpI,OAAID,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA+B,GAAG,EAAE,MAAMD,EAAQ,YAAY,EAClEC,CACX,EAWAmB,EAAY,gBAAkB,SAAyBpB,EAASC,EAAQ,CACpE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAmB,EAAY,OAAS,SAAgBlB,EAAQC,EAAQ,CAC3CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,YACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CAGA,GAFML,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,YAAY,QAAQ,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC/E,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,GAC1BA,EAAQ,WAAW,KAAKE,EAAO,MAAM,CAAC,EACtC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,cAAgBA,EAAQ,aAAa,SAC/CA,EAAQ,aAAe,CAAC,GAC5BA,EAAQ,aAAa,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3F,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,aAAeE,EAAO,MAAM,EACpC,KACJ,CACJ,IAAK,IAAI,CAGD,GAFMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,IACrBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,OAE3CF,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,EAC3C,KACJ,CACJ,IAAK,IAAI,CAGD,GAFMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,IACrBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,OAE3CF,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,EAC3C,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAoB,EAAY,gBAAkB,SAAyBlB,EAAQ,CAC3D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAkB,EAAY,OAAS,SAAgBpB,EAAS,CAC1C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EACvC,GAAI,CAACR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,CAAC,GAAK,EAAEC,EAAQ,KAAKD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,GAAIC,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,IAAIO,EAAQf,EAAM,KAAK,YAAY,QAAQ,OAAOQ,EAAQ,OAAO,EACjE,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,OAAOC,EAAQ,UAAUD,CAAC,GAAM,SAChC,MAAO,8BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,CAAC,EACrC,MAAO,+BACnB,CACA,GAAIC,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,EAAEC,EAAQ,WAAWD,CAAC,GAAK,OAAOC,EAAQ,WAAWD,CAAC,EAAE,QAAW,UAAYR,EAAM,SAASS,EAAQ,WAAWD,CAAC,CAAC,GACnH,MAAO,+BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,CAAC,GAAK,EAAEC,EAAQ,UAAUD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,EAAE,IAAI,GAC1J,MAAO,oCACnB,CACA,GAAIC,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,EAAEA,EAAQ,SAAW,OAAOA,EAAQ,QAAQ,QAAW,UAAYT,EAAM,SAASS,EAAQ,OAAO,GACjG,MAAO,2BACf,GAAIA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAI,CAAC,MAAM,QAAQA,EAAQ,YAAY,EACnC,MAAO,+BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,aAAa,OAAQ,EAAED,EAAG,CAClD,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,aAAaD,CAAC,CAAC,EAC5E,GAAIQ,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EACrE,OAAQA,EAAQ,aAAc,CAC9B,QACI,MAAO,oCACX,IAAK,GACL,IAAK,GACD,KACJ,CACJ,GAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,OAAOC,EAAQ,WAAWD,CAAC,GAAM,SACjC,MAAO,+BACnB,CACA,GAAIC,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,CAACR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,CAAC,GAAK,EAAEC,EAAQ,WAAWD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,EAAE,IAAI,GAC9J,MAAO,qCACnB,CACA,OAAO,IACX,EAUAqB,EAAY,WAAa,SAAoBZ,EAAQ,CACjD,GAAIA,aAAkBhB,EAAM,KAAK,YAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,YAC7B,GAAIgB,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CAGA,GAFIA,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,QAAUR,EAAM,KAAK,YAAY,QAAQ,WAAWgB,EAAO,OAAO,CAC9E,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAI,OAAOQ,EAAO,UAAU,CAAC,CAAC,CACzD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAIQ,EAAO,UAAU,CAAC,EAAI,CACrD,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EACxC,OAAOA,EAAO,WAAW,CAAC,GAAM,SAChCjB,EAAM,OAAO,OAAOiB,EAAO,WAAW,CAAC,EAAGR,EAAQ,WAAW,CAAC,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,WAAW,CAAC,CAAC,CAAC,EAAG,CAAC,EAC1HA,EAAO,WAAW,CAAC,EAAE,QAAU,IACpCR,EAAQ,WAAW,CAAC,EAAIQ,EAAO,WAAW,CAAC,EACvD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EACvCjB,EAAM,MACLS,EAAQ,UAAU,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,UAAU,CAAC,CAAC,GAAG,SAAW,GACzE,OAAOA,EAAO,UAAU,CAAC,GAAM,SACpCR,EAAQ,UAAU,CAAC,EAAI,SAASQ,EAAO,UAAU,CAAC,EAAG,EAAE,EAClD,OAAOA,EAAO,UAAU,CAAC,GAAM,SACpCR,EAAQ,UAAU,CAAC,EAAIQ,EAAO,UAAU,CAAC,EACpC,OAAOA,EAAO,UAAU,CAAC,GAAM,WACpCR,EAAQ,UAAU,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,UAAU,CAAC,EAAE,MAAQ,EAAGA,EAAO,UAAU,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC9H,CAUA,GATIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,SAAW,OACd,OAAOA,EAAO,SAAY,SAC1BjB,EAAM,OAAO,OAAOiB,EAAO,QAASR,EAAQ,QAAUT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,OAAO,CAAC,EAAG,CAAC,EACxGA,EAAO,QAAQ,QAAU,IAC9BR,EAAQ,QAAUQ,EAAO,UAC7BA,EAAO,aAAc,CACrB,GAAI,CAAC,MAAM,QAAQA,EAAO,YAAY,EAClC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,aAAe,CAAC,EACxB,QAAS,EAAI,EAAG,EAAIQ,EAAO,aAAa,OAAQ,EAAE,EAAG,CACjD,GAAI,OAAOA,EAAO,aAAa,CAAC,GAAM,SAClC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,aAAa,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,aAAa,CAAC,CAAC,CACjG,CACJ,CACA,OAAQA,EAAO,aAAc,CAC7B,QACI,GAAI,OAAOA,EAAO,cAAiB,SAAU,CACzCR,EAAQ,aAAeQ,EAAO,aAC9B,KACJ,CACA,MACJ,IAAK,UACL,IAAK,GACDR,EAAQ,aAAe,EACvB,MACJ,IAAK,WACL,IAAK,GACDA,EAAQ,aAAe,EACvB,KACJ,CACA,GAAIQ,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EAC5CR,EAAQ,WAAW,CAAC,EAAI,OAAOQ,EAAO,WAAW,CAAC,CAAC,CAC3D,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EACxCjB,EAAM,MACLS,EAAQ,WAAW,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,WAAW,CAAC,CAAC,GAAG,SAAW,GAC3E,OAAOA,EAAO,WAAW,CAAC,GAAM,SACrCR,EAAQ,WAAW,CAAC,EAAI,SAASQ,EAAO,WAAW,CAAC,EAAG,EAAE,EACpD,OAAOA,EAAO,WAAW,CAAC,GAAM,SACrCR,EAAQ,WAAW,CAAC,EAAIQ,EAAO,WAAW,CAAC,EACtC,OAAOA,EAAO,WAAW,CAAC,GAAM,WACrCR,EAAQ,WAAW,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,WAAW,CAAC,EAAE,MAAQ,EAAGA,EAAO,WAAW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAI,EACrI,CACA,OAAOR,CACX,EAWAoB,EAAY,SAAW,SAAkBpB,EAASS,EAAS,CAClDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAyBd,IAxBIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,EACfA,EAAO,UAAY,CAAC,EACpBA,EAAO,UAAY,CAAC,EACpBA,EAAO,WAAa,CAAC,EACrBA,EAAO,UAAY,CAAC,EACpBA,EAAO,WAAa,CAAC,EACrBA,EAAO,WAAa,CAAC,EACrBA,EAAO,aAAe,CAAC,GAEvBC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,QAAU,KACjBA,EAAO,KAAO,GACVC,EAAQ,QAAU,OAClBD,EAAO,QAAU,IAEjBA,EAAO,QAAU,CAAC,EACdC,EAAQ,QAAU,QAClBD,EAAO,QAAUjB,EAAM,UAAUiB,EAAO,OAAO,IAEvDA,EAAO,UAAY,GACnBA,EAAO,aAAeC,EAAQ,QAAU,OAAS,UAAY,GAE7DT,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CAKA,GAJIX,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,YAAY,QAAQ,SAASQ,EAAQ,QAASS,CAAO,GACjFT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,UAAUW,CAAC,CAAC,EAAI,OAAOX,EAAQ,UAAUW,CAAC,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CAClI,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CACjD,CACA,GAAIX,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,WAAWW,CAAC,EAAG,EAAGX,EAAQ,WAAWW,CAAC,EAAE,MAAM,EAAIF,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,CAC1O,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EACxC,OAAOX,EAAQ,UAAUW,CAAC,GAAM,SAChCH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,UAAUW,CAAC,CAAC,EAAIX,EAAQ,UAAUW,CAAC,EAEnGH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,UAAUW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,UAAUW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,UAAUW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,UAAUW,CAAC,CACvQ,CAKA,GAJIX,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUC,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,QAAS,EAAGA,EAAQ,QAAQ,MAAM,EAAIS,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,OAAO,EAAIA,EAAQ,SAC9LA,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,WAAWW,CAAC,CAAC,EAAI,OAAOX,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,CACtI,CACA,GAAIX,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EACzC,OAAOX,EAAQ,WAAWW,CAAC,GAAM,SACjCH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,EAEtGH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,WAAWW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,WAAWW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,WAAWW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAI,EAAIX,EAAQ,WAAWW,CAAC,CAChR,CAGA,GAFIX,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,cAAgBA,EAAQ,aAAa,OAAQ,CACrDQ,EAAO,aAAe,CAAC,EACvB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,aAAa,OAAQ,EAAEW,EAC/CH,EAAO,aAAaG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,aAAaW,CAAC,EAAGF,CAAO,CAC5G,CACA,OAAIT,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAeC,EAAQ,QAAU,OAASjB,EAAM,KAAK,YAAY,aAAaQ,EAAQ,YAAY,IAAM,OAAYA,EAAQ,aAAeR,EAAM,KAAK,YAAY,aAAaQ,EAAQ,YAAY,EAAIA,EAAQ,cACnNQ,CACX,EASAY,EAAY,UAAU,OAAS,UAAkB,CAC7C,OAAO,KAAK,YAAY,SAAS,KAAMhC,GAAU,KAAK,aAAa,CACvE,EAUAgC,EAAY,WAAa,SAAoBR,EAAe,CACxD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,mBAC3B,EA4BAQ,EAAY,SAAY,UAAW,CAC/B,IAAI1B,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,WAAW,EAAI,EACtCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,EAAE,EAAI,SAAS,EAAI,GACrCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,WAAW,EAAI,GACvCC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,EAAE,EAAI,UAAU,EAAI,GACtCC,EAAOD,EAAW,EAAE,EAAI,cAAc,EAAI,GAC1CC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GAC5CC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GACrCC,CACX,EAAG,EAEHyB,EAAY,QAAW,UAAW,CAkB9B,SAASC,EAAQxB,EAAY,CACzB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAG,EAAI,EAAG,EAAIC,EAAK,OAAQ,EAAE,EAC3DD,EAAWC,EAAK,CAAC,CAAC,GAAK,OACvB,KAAKA,EAAK,CAAC,CAAC,EAAID,EAAWC,EAAK,CAAC,CAAC,EAClD,CAQA,OAAAuB,EAAQ,UAAU,MAAQ9B,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQxE8B,EAAQ,UAAU,IAAM9B,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAUtE8B,EAAQ,OAAS,SAAgBxB,EAAY,CACzC,OAAO,IAAIwB,EAAQxB,CAAU,CACjC,EAWAwB,EAAQ,OAAS,SAAgBrB,EAASC,EAAQ,CAC9C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpEC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,KAAK,EAC3DA,EAAQ,KAAO,MAAQ,OAAO,eAAe,KAAKA,EAAS,KAAK,GAChEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,GAAG,EACvDC,CACX,EAWAoB,EAAQ,gBAAkB,SAAyBrB,EAASC,EAAQ,CAChE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAoB,EAAQ,OAAS,SAAgBnB,EAAQC,EAAQ,CACvCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,YAAY,QACjGU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,MAAQE,EAAO,MAAM,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,IAAME,EAAO,MAAM,EAC3B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAqB,EAAQ,gBAAkB,SAAyBnB,EAAQ,CACvD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAmB,EAAQ,OAAS,SAAgBrB,EAAS,CACtC,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,GACnD,CAACT,EAAM,UAAUS,EAAQ,KAAK,GAAK,EAAEA,EAAQ,OAAST,EAAM,UAAUS,EAAQ,MAAM,GAAG,GAAKT,EAAM,UAAUS,EAAQ,MAAM,IAAI,GACvH,+BACXA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,GAC/C,CAACT,EAAM,UAAUS,EAAQ,GAAG,GAAK,EAAEA,EAAQ,KAAOT,EAAM,UAAUS,EAAQ,IAAI,GAAG,GAAKT,EAAM,UAAUS,EAAQ,IAAI,IAAI,GAC/G,6BACR,IACX,EAUAqB,EAAQ,WAAa,SAAoBb,EAAQ,CAC7C,GAAIA,aAAkBhB,EAAM,KAAK,YAAY,QACzC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,YAAY,QACzC,OAAIgB,EAAO,OAAS,OACZjB,EAAM,MACLS,EAAQ,MAAQT,EAAM,KAAK,UAAUiB,EAAO,KAAK,GAAG,SAAW,GAC3D,OAAOA,EAAO,OAAU,SAC7BR,EAAQ,MAAQ,SAASQ,EAAO,MAAO,EAAE,EACpC,OAAOA,EAAO,OAAU,SAC7BR,EAAQ,MAAQQ,EAAO,MAClB,OAAOA,EAAO,OAAU,WAC7BR,EAAQ,MAAQ,IAAIT,EAAM,SAASiB,EAAO,MAAM,MAAQ,EAAGA,EAAO,MAAM,OAAS,CAAC,EAAE,SAAS,IACjGA,EAAO,KAAO,OACVjB,EAAM,MACLS,EAAQ,IAAMT,EAAM,KAAK,UAAUiB,EAAO,GAAG,GAAG,SAAW,GACvD,OAAOA,EAAO,KAAQ,SAC3BR,EAAQ,IAAM,SAASQ,EAAO,IAAK,EAAE,EAChC,OAAOA,EAAO,KAAQ,SAC3BR,EAAQ,IAAMQ,EAAO,IAChB,OAAOA,EAAO,KAAQ,WAC3BR,EAAQ,IAAM,IAAIT,EAAM,SAASiB,EAAO,IAAI,MAAQ,EAAGA,EAAO,IAAI,OAAS,CAAC,EAAE,SAAS,IACxFR,CACX,EAWAqB,EAAQ,SAAW,SAAkBrB,EAASS,EAAS,CAC9CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,GAAIC,EAAQ,SAAU,CAClB,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,MAAQC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC7G,MACIF,EAAO,MAAQC,EAAQ,QAAU,OAAS,IAAM,EACpD,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,IAAMC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC3G,MACIF,EAAO,IAAMC,EAAQ,QAAU,OAAS,IAAM,CACtD,CACA,OAAIT,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACnD,OAAOA,EAAQ,OAAU,SACzBQ,EAAO,MAAQC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAK,EAAIA,EAAQ,MAE1EQ,EAAO,MAAQC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAK,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,MAAM,MAAQ,EAAGA,EAAQ,MAAM,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,OACxNA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,IAC/C,OAAOA,EAAQ,KAAQ,SACvBQ,EAAO,IAAMC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,GAAG,EAAIA,EAAQ,IAEtEQ,EAAO,IAAMC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,GAAG,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,IAAI,MAAQ,EAAGA,EAAQ,IAAI,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,KAC7MQ,CACX,EASAa,EAAQ,UAAU,OAAS,UAAkB,CACzC,OAAO,KAAK,YAAY,SAAS,KAAMjC,GAAU,KAAK,aAAa,CACvE,EAUAiC,EAAQ,WAAa,SAAoBT,EAAe,CACpD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,2BAC3B,EAEOS,CACX,EAAG,EASHD,EAAY,aAAgB,UAAW,CACnC,IAAI1B,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,CAAC,EAAI,UAAU,EAAI,EAC9BC,CACX,EAAG,EAEIyB,CACX,EAAG,EAEH3B,EAAK,kBAAqB,UAAW,CAmBjC,SAAS6B,EAAkBzB,EAAY,CAEnC,GADA,KAAK,KAAO,CAAC,EACTA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAuB,EAAkB,UAAU,OAAS,KAQrCA,EAAkB,UAAU,QAAU,KAQtCA,EAAkB,UAAU,KAAO/B,EAAM,WAUzC+B,EAAkB,OAAS,SAAgBzB,EAAY,CACnD,OAAO,IAAIyB,EAAkBzB,CAAU,CAC3C,EAWAyB,EAAkB,OAAS,SAAgBtB,EAASC,EAAQ,CAOxD,GANKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtER,EAAM,KAAK,YAAY,OAAOQ,EAAQ,OAAQC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACtGD,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGD,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CACA,OAAOA,CACX,EAWAqB,EAAkB,gBAAkB,SAAyBtB,EAASC,EAAQ,CAC1E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAqB,EAAkB,OAAS,SAAgBpB,EAAQC,EAAQ,CACjDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,kBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,OAASR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACvE,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAsB,EAAkB,gBAAkB,SAAyBpB,EAAQ,CACjE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAoB,EAAkB,OAAS,SAAgBtB,EAAS,CAChD,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,MAAM,EACxD,GAAIO,EACA,MAAO,UAAYA,CAC3B,CACA,GAAIP,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,OAAO,EACzD,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvC,GAAI,CAACT,EAAM,UAAUS,EAAQ,KAAK,CAAC,CAAC,GAAK,EAAEA,EAAQ,KAAK,CAAC,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,OAAO,IACX,EAUAsB,EAAkB,WAAa,SAAoBd,EAAQ,CACvD,GAAIA,aAAkBhB,EAAM,KAAK,kBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,kBAC7B,GAAIgB,EAAO,QAAU,KAAM,CACvB,GAAI,OAAOA,EAAO,QAAW,SACzB,MAAM,UAAU,iDAAiD,EACrER,EAAQ,OAASR,EAAM,KAAK,YAAY,WAAWgB,EAAO,MAAM,CACpE,CACA,GAAIA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,kDAAkD,EACtER,EAAQ,QAAUR,EAAM,KAAK,YAAY,WAAWgB,EAAO,OAAO,CACtE,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CACA,OAAOR,CACX,EAWAsB,EAAkB,SAAW,SAAkBtB,EAASS,EAAS,CACxDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAWd,IAVIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,GACfC,EAAQ,WACRD,EAAO,OAAS,KAChBA,EAAO,QAAU,MAEjBR,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAAShB,EAAM,KAAK,YAAY,SAASQ,EAAQ,OAAQS,CAAO,GACvET,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,YAAY,SAASQ,EAAQ,QAASS,CAAO,GACzET,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CACA,OAAOH,CACX,EASAc,EAAkB,UAAU,OAAS,UAAkB,CACnD,OAAO,KAAK,YAAY,SAAS,KAAMlC,GAAU,KAAK,aAAa,CACvE,EAUAkC,EAAkB,WAAa,SAAoBV,EAAe,CAC9D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,yBAC3B,EAEOU,CACX,EAAG,EAEH7B,EAAK,iBAAoB,UAAW,CAiBhC,SAAS8B,EAAiB1B,EAAY,CAElC,GADA,KAAK,IAAM,CAAC,EACRA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAwB,EAAiB,UAAU,IAAMhC,EAAM,WAUvCgC,EAAiB,OAAS,SAAgB1B,EAAY,CAClD,OAAO,IAAI0B,EAAiB1B,CAAU,CAC1C,EAWA0B,EAAiB,OAAS,SAAgBvB,EAASC,EAAQ,CAGvD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,KAAO,MAAQA,EAAQ,IAAI,OACnC,QAAS,EAAI,EAAG,EAAIA,EAAQ,IAAI,OAAQ,EAAE,EACtCR,EAAM,KAAK,iBAAiB,UAAU,OAAOQ,EAAQ,IAAI,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,OAAOA,CACX,EAWAsB,EAAiB,gBAAkB,SAAyBvB,EAASC,EAAQ,CACzE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAsB,EAAiB,OAAS,SAAgBrB,EAAQC,EAAQ,CAChDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,KAAOA,EAAQ,IAAI,SAC7BA,EAAQ,IAAM,CAAC,GACnBA,EAAQ,IAAI,KAAKR,EAAM,KAAK,iBAAiB,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAuB,EAAiB,gBAAkB,SAAyBrB,EAAQ,CAChE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAqB,EAAiB,OAAS,SAAgBvB,EAAS,CAC/C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,EAAG,CACtD,GAAI,CAAC,MAAM,QAAQA,EAAQ,GAAG,EAC1B,MAAO,sBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,IAAI,OAAQ,EAAED,EAAG,CACzC,IAAIQ,EAAQf,EAAM,KAAK,iBAAiB,UAAU,OAAOQ,EAAQ,IAAID,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,OAASA,CACxB,CACJ,CACA,OAAO,IACX,EAUAgB,EAAiB,WAAa,SAAoBf,EAAQ,CACtD,GAAIA,aAAkBhB,EAAM,KAAK,iBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAC7B,GAAIgB,EAAO,IAAK,CACZ,GAAI,CAAC,MAAM,QAAQA,EAAO,GAAG,EACzB,MAAM,UAAU,4CAA4C,EAChER,EAAQ,IAAM,CAAC,EACf,QAAS,EAAI,EAAG,EAAIQ,EAAO,IAAI,OAAQ,EAAE,EAAG,CACxC,GAAI,OAAOA,EAAO,IAAI,CAAC,GAAM,SACzB,MAAM,UAAU,6CAA6C,EACjER,EAAQ,IAAI,CAAC,EAAIR,EAAM,KAAK,iBAAiB,UAAU,WAAWgB,EAAO,IAAI,CAAC,CAAC,CACnF,CACJ,CACA,OAAOR,CACX,EAWAuB,EAAiB,SAAW,SAAkBvB,EAASS,EAAS,CACvDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAGd,IAFIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,IAAM,CAAC,GACdR,EAAQ,KAAOA,EAAQ,IAAI,OAAQ,CACnCQ,EAAO,IAAM,CAAC,EACd,QAASG,EAAI,EAAGA,EAAIX,EAAQ,IAAI,OAAQ,EAAEW,EACtCH,EAAO,IAAIG,CAAC,EAAInB,EAAM,KAAK,iBAAiB,UAAU,SAASQ,EAAQ,IAAIW,CAAC,EAAGF,CAAO,CAC9F,CACA,OAAOD,CACX,EASAe,EAAiB,UAAU,OAAS,UAAkB,CAClD,OAAO,KAAK,YAAY,SAAS,KAAMnC,GAAU,KAAK,aAAa,CACvE,EAUAmC,EAAiB,WAAa,SAAoBX,EAAe,CAC7D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEAW,EAAiB,UAAa,UAAW,CAmBrC,SAASC,EAAU3B,EAAY,CAC3B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQAyB,EAAU,UAAU,SAAW,KAQ/BA,EAAU,UAAU,SAAW,KAQ/BA,EAAU,UAAU,WAAa,GAGjC,IAAIC,EAQJ,cAAO,eAAeD,EAAU,UAAW,QAAS,CAChD,IAAKjC,EAAM,YAAYkC,EAAe,CAAC,WAAY,UAAU,CAAC,EAC9D,IAAKlC,EAAM,YAAYkC,CAAY,CACvC,CAAC,EAUDD,EAAU,OAAS,SAAgB3B,EAAY,CAC3C,OAAO,IAAI2B,EAAU3B,CAAU,CACnC,EAWA2B,EAAU,OAAS,SAAgBxB,EAASC,EAAQ,CAChD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,QAAQ,EAChEA,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAC/DC,CACX,EAWAuB,EAAU,gBAAkB,SAAyBxB,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAuB,EAAU,OAAS,SAAgBtB,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBAAiB,UACtGU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,SAAWE,EAAO,OAAO,EACjC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAwB,EAAU,gBAAkB,SAAyBtB,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAsB,EAAU,OAAS,SAAgBxB,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,IAAIH,EAAa,CAAC,EAClB,GAAIG,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DH,EAAW,MAAQ,EACf,CAACN,EAAM,UAAUS,EAAQ,QAAQ,GAAK,EAAEA,EAAQ,UAAYT,EAAM,UAAUS,EAAQ,SAAS,GAAG,GAAKT,EAAM,UAAUS,EAAQ,SAAS,IAAI,IAC1I,MAAO,kCAEf,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBAEX,GADAA,EAAW,MAAQ,EACf,CAACN,EAAM,SAASS,EAAQ,QAAQ,EAChC,MAAO,2BACf,CACA,OAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAC3B,8BACR,IACX,EAUAwB,EAAU,WAAa,SAAoBhB,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,iBAAiB,UAC9C,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAAiB,UAC9C,OAAIgB,EAAO,UAAY,OACfjB,EAAM,MACLS,EAAQ,SAAWT,EAAM,KAAK,UAAUiB,EAAO,QAAQ,GAAG,SAAW,GACjE,OAAOA,EAAO,UAAa,SAChCR,EAAQ,SAAW,SAASQ,EAAO,SAAU,EAAE,EAC1C,OAAOA,EAAO,UAAa,SAChCR,EAAQ,SAAWQ,EAAO,SACrB,OAAOA,EAAO,UAAa,WAChCR,EAAQ,SAAW,IAAIT,EAAM,SAASiB,EAAO,SAAS,MAAQ,EAAGA,EAAO,SAAS,OAAS,CAAC,EAAE,SAAS,IAC1GA,EAAO,UAAY,OACnBR,EAAQ,SAAW,OAAOQ,EAAO,QAAQ,GACzCA,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC1CR,CACX,EAWAwB,EAAU,SAAW,SAAkBxB,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IACzD,OAAOA,EAAQ,UAAa,SAC5BQ,EAAO,SAAWC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,QAAQ,EAAIA,EAAQ,SAEhFQ,EAAO,SAAWC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,QAAQ,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,SAAS,MAAQ,EAAGA,EAAQ,SAAS,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,SAChOS,EAAQ,SACRD,EAAO,MAAQ,aAEnBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,SACtBS,EAAQ,SACRD,EAAO,MAAQ,aAEnBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YACzBQ,CACX,EASAgB,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAMpC,GAAU,KAAK,aAAa,CACvE,EAUAoC,EAAU,WAAa,SAAoBZ,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kCAC3B,EAEOY,CACX,EAAG,EAEID,CACX,EAAG,EAEH9B,EAAK,UAAa,UAAW,CAsBzB,SAASiC,EAAU7B,EAAY,CAC3B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAG,EAAI,EAAG,EAAIC,EAAK,OAAQ,EAAE,EAC3DD,EAAWC,EAAK,CAAC,CAAC,GAAK,OACvB,KAAKA,EAAK,CAAC,CAAC,EAAID,EAAWC,EAAK,CAAC,CAAC,EAClD,CAQA4B,EAAU,UAAU,WAAa,KAQjCA,EAAU,UAAU,aAAe,KAQnCA,EAAU,UAAU,QAAU,KAQ9BA,EAAU,UAAU,aAAe,KAQnCA,EAAU,UAAU,iBAAmB,KAQvCA,EAAU,UAAU,WAAa,GAGjC,IAAID,EAQJ,cAAO,eAAeC,EAAU,UAAW,QAAS,CAChD,IAAKnC,EAAM,YAAYkC,EAAe,CAAC,aAAc,eAAgB,UAAW,eAAgB,kBAAkB,CAAC,EACnH,IAAKlC,EAAM,YAAYkC,CAAY,CACvC,CAAC,EAUDC,EAAU,OAAS,SAAgB7B,EAAY,CAC3C,OAAO,IAAI6B,EAAU7B,CAAU,CACnC,EAWA6B,EAAU,OAAS,SAAgB1B,EAASC,EAAQ,CAChD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9ER,EAAM,KAAK,UAAU,OAAO,OAAOQ,EAAQ,WAAYC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/GD,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,aAAcC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnHD,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,UAAU,IAAI,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACzGD,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAClEA,EAAQ,kBAAoB,MAAQ,OAAO,eAAe,KAAKA,EAAS,kBAAkB,GAC1FR,EAAM,KAAK,UAAU,aAAa,OAAOQ,EAAQ,iBAAkBC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC3HD,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,aAAcC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAChHA,CACX,EAWAyB,EAAU,gBAAkB,SAAyB1B,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAyB,EAAU,OAAS,SAAgBxB,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,WAAaR,EAAM,KAAK,UAAU,OAAO,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC/E,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACnF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,UAAU,IAAI,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACzE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACnF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,iBAAmBR,EAAM,KAAK,UAAU,aAAa,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC3F,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA0B,EAAU,gBAAkB,SAAyBxB,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAwB,EAAU,OAAS,SAAgB1B,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,IAAIH,EAAa,CAAC,EAClB,GAAIG,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpEH,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,OAAO,OAAOQ,EAAQ,UAAU,EACjE,GAAIO,EACA,MAAO,cAAgBA,CAC/B,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,YAAY,EACrE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,IAAI,OAAOQ,EAAQ,OAAO,EAC3D,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,YAAY,EACrE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,kBAAoB,MAAQA,EAAQ,eAAe,kBAAkB,EAAG,CAChF,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,aAAa,OAAOQ,EAAQ,gBAAgB,EAC7E,GAAIO,EACA,MAAO,oBAAsBA,CACrC,CACJ,CACA,OAAIP,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAC3B,8BACR,IACX,EAUA0B,EAAU,WAAa,SAAoBlB,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,UAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAC7B,GAAIgB,EAAO,YAAc,KAAM,CAC3B,GAAI,OAAOA,EAAO,YAAe,SAC7B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,WAAaR,EAAM,KAAK,UAAU,OAAO,WAAWgB,EAAO,UAAU,CACjF,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,WAAWgB,EAAO,YAAY,CACvF,CACA,GAAIA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,QAAUR,EAAM,KAAK,UAAU,IAAI,WAAWgB,EAAO,OAAO,CACxE,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,WAAWgB,EAAO,YAAY,CACvF,CACA,GAAIA,EAAO,kBAAoB,KAAM,CACjC,GAAI,OAAOA,EAAO,kBAAqB,SACnC,MAAM,UAAU,mDAAmD,EACvER,EAAQ,iBAAmBR,EAAM,KAAK,UAAU,aAAa,WAAWgB,EAAO,gBAAgB,CACnG,CACA,OAAIA,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC1CR,CACX,EAWA0B,EAAU,SAAW,SAAkB1B,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAahB,EAAM,KAAK,UAAU,OAAO,SAASQ,EAAQ,WAAYS,CAAO,EAChFA,EAAQ,SACRD,EAAO,MAAQ,eAEnBR,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,UAAU,SAAS,SAASQ,EAAQ,aAAcS,CAAO,EACtFA,EAAQ,SACRD,EAAO,MAAQ,iBAEnBR,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,UAAU,IAAI,SAASQ,EAAQ,QAASS,CAAO,EACvEA,EAAQ,SACRD,EAAO,MAAQ,YAEnBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YAC5BA,EAAQ,kBAAoB,MAAQA,EAAQ,eAAe,kBAAkB,IAC7EQ,EAAO,iBAAmBhB,EAAM,KAAK,UAAU,aAAa,SAASQ,EAAQ,iBAAkBS,CAAO,EAClGA,EAAQ,SACRD,EAAO,MAAQ,qBAEnBR,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,UAAU,SAAS,SAASQ,EAAQ,aAAcS,CAAO,EACtFA,EAAQ,SACRD,EAAO,MAAQ,iBAEhBA,CACX,EASAkB,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAMtC,GAAU,KAAK,aAAa,CACvE,EAUAsC,EAAU,WAAa,SAAoBd,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,iBAC3B,EAEAc,EAAU,OAAU,UAAW,CAkB3B,SAASC,EAAO9B,EAAY,CACxB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA4B,EAAO,UAAU,SAAW,EAQ5BA,EAAO,UAAU,MAAQ,KAUzBA,EAAO,OAAS,SAAgB9B,EAAY,CACxC,OAAO,IAAI8B,EAAO9B,CAAU,CAChC,EAWA8B,EAAO,OAAS,SAAgB3B,EAASC,EAAQ,CAC7C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGA,CACX,EAWA0B,EAAO,gBAAkB,SAAyB3B,EAASC,EAAQ,CAC/D,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA0B,EAAO,OAAS,SAAgBzB,EAAQC,EAAQ,CACtCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,OAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC1E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA2B,EAAO,gBAAkB,SAAyBzB,EAAQ,CACtD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAyB,EAAO,OAAS,SAAgB3B,EAAS,CACrC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,KAAK,EAC5D,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,OAAO,IACX,EAUAoB,EAAO,WAAa,SAAoBnB,EAAQ,CAC5C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,OACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,OAGvC,GAFIgB,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,+CAA+C,EACnER,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,KAAK,CACvE,CACA,OAAOR,CACX,EAWA2B,EAAO,SAAW,SAAkB3B,EAASS,EAAS,CAC7CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,MAAQ,MAEfR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,MAAOS,CAAO,GACvED,CACX,EASAmB,EAAO,UAAU,OAAS,UAAkB,CACxC,OAAO,KAAK,YAAY,SAAS,KAAMvC,GAAU,KAAK,aAAa,CACvE,EAUAuC,EAAO,WAAa,SAAoBf,EAAe,CACnD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEOe,CACX,EAAG,EAEHD,EAAU,SAAY,UAAW,CAiB7B,SAASE,EAAS/B,EAAY,CAC1B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA6B,EAAS,UAAU,SAAW,KAU9BA,EAAS,OAAS,SAAgB/B,EAAY,CAC1C,OAAO,IAAI+B,EAAS/B,CAAU,CAClC,EAWA+B,EAAS,OAAS,SAAgB5B,EAASC,EAAQ,CAC/C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAUC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnGA,CACX,EAWA2B,EAAS,gBAAkB,SAAyB5B,EAASC,EAAQ,CACjE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA2B,EAAS,OAAS,SAAgB1B,EAAQC,EAAQ,CACxCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,SAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA4B,EAAS,gBAAkB,SAAyB1B,EAAQ,CACxD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA0B,EAAS,OAAS,SAAgB5B,EAAS,CACvC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,QAAQ,EACxD,GAAIO,EACA,MAAO,YAAcA,CAC7B,CACA,OAAO,IACX,EAUAqB,EAAS,WAAa,SAAoBpB,EAAQ,CAC9C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,SACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,SACvC,GAAIgB,EAAO,UAAY,KAAM,CACzB,GAAI,OAAOA,EAAO,UAAa,SAC3B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,SAAWR,EAAM,KAAK,UAAU,WAAWgB,EAAO,QAAQ,CACtE,CACA,OAAOR,CACX,EAWA4B,EAAS,SAAW,SAAkB5B,EAASS,EAAS,CAC/CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,MAClBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,SAAUS,CAAO,GACtED,CACX,EASAoB,EAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,KAAK,YAAY,SAAS,KAAMxC,GAAU,KAAK,aAAa,CACvE,EAUAwC,EAAS,WAAa,SAAoBhB,EAAe,CACrD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOgB,CACX,EAAG,EAEHF,EAAU,IAAO,UAAW,CAkBxB,SAASG,EAAIhC,EAAY,CACrB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA8B,EAAI,UAAU,QAAU,EAQxBA,EAAI,UAAU,UAAY,KAU1BA,EAAI,OAAS,SAAgBhC,EAAY,CACrC,OAAO,IAAIgC,EAAIhC,CAAU,CAC7B,EAWAgC,EAAI,OAAS,SAAgB7B,EAASC,EAAQ,CAC1C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,OAAO,EAC7DA,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,UAAWC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpGA,CACX,EAWA4B,EAAI,gBAAkB,SAAyB7B,EAASC,EAAQ,CAC5D,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA4B,EAAI,OAAS,SAAgB3B,EAAQC,EAAQ,CACnCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,IAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACvE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA6B,EAAI,gBAAkB,SAAyB3B,EAAQ,CACnD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA2B,EAAI,OAAS,SAAgB7B,EAAS,CAClC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,CAACT,EAAM,UAAUS,EAAQ,OAAO,EAChC,MAAO,4BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAS,EACzD,GAAIO,EACA,MAAO,aAAeA,CAC9B,CACA,OAAO,IACX,EAUAsB,EAAI,WAAa,SAAoBrB,EAAQ,CACzC,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,IACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,IAGvC,GAFIgB,EAAO,SAAW,OAClBR,EAAQ,QAAUQ,EAAO,QAAU,GACnCA,EAAO,WAAa,KAAM,CAC1B,GAAI,OAAOA,EAAO,WAAc,SAC5B,MAAM,UAAU,gDAAgD,EACpER,EAAQ,UAAYR,EAAM,KAAK,UAAU,WAAWgB,EAAO,SAAS,CACxE,CACA,OAAOR,CACX,EAWA6B,EAAI,SAAW,SAAkB7B,EAASS,EAAS,CAC1CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,QAAU,EACjBA,EAAO,UAAY,MAEnBR,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUR,EAAQ,SACzBA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,UAAWS,CAAO,GACxED,CACX,EASAqB,EAAI,UAAU,OAAS,UAAkB,CACrC,OAAO,KAAK,YAAY,SAAS,KAAMzC,GAAU,KAAK,aAAa,CACvE,EAUAyC,EAAI,WAAa,SAAoBjB,EAAe,CAChD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,qBAC3B,EAEOiB,CACX,EAAG,EAEHH,EAAU,SAAY,UAAW,CAiB7B,SAASI,EAASjC,EAAY,CAC1B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA+B,EAAS,UAAU,SAAW,KAU9BA,EAAS,OAAS,SAAgBjC,EAAY,CAC1C,OAAO,IAAIiC,EAASjC,CAAU,CAClC,EAWAiC,EAAS,OAAS,SAAgB9B,EAASC,EAAQ,CAC/C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAUC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnGA,CACX,EAWA6B,EAAS,gBAAkB,SAAyB9B,EAASC,EAAQ,CACjE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA6B,EAAS,OAAS,SAAgB5B,EAAQC,EAAQ,CACxCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,SAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA8B,EAAS,gBAAkB,SAAyB5B,EAAQ,CACxD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA4B,EAAS,OAAS,SAAgB9B,EAAS,CACvC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,QAAQ,EACxD,GAAIO,EACA,MAAO,YAAcA,CAC7B,CACA,OAAO,IACX,EAUAuB,EAAS,WAAa,SAAoBtB,EAAQ,CAC9C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,SACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,SACvC,GAAIgB,EAAO,UAAY,KAAM,CACzB,GAAI,OAAOA,EAAO,UAAa,SAC3B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,SAAWR,EAAM,KAAK,UAAU,WAAWgB,EAAO,QAAQ,CACtE,CACA,OAAOR,CACX,EAWA8B,EAAS,SAAW,SAAkB9B,EAASS,EAAS,CAC/CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,MAClBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,SAAUS,CAAO,GACtED,CACX,EASAsB,EAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,KAAK,YAAY,SAAS,KAAM1C,GAAU,KAAK,aAAa,CACvE,EAUA0C,EAAS,WAAa,SAAoBlB,EAAe,CACrD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOkB,CACX,EAAG,EAEHJ,EAAU,aAAgB,UAAW,CAkBjC,SAASK,EAAalC,EAAY,CAC9B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAgC,EAAa,UAAU,SAAW,EAQlCA,EAAa,UAAU,MAAQ,KAU/BA,EAAa,OAAS,SAAgBlC,EAAY,CAC9C,OAAO,IAAIkC,EAAalC,CAAU,CACtC,EAWAkC,EAAa,OAAS,SAAgB/B,EAASC,EAAQ,CACnD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGA,CACX,EAWA8B,EAAa,gBAAkB,SAAyB/B,EAASC,EAAQ,CACrE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA8B,EAAa,OAAS,SAAgB7B,EAAQC,EAAQ,CAC5CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,aAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC1E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA+B,EAAa,gBAAkB,SAAyB7B,EAAQ,CAC5D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA6B,EAAa,OAAS,SAAgB/B,EAAS,CAC3C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,KAAK,EAC5D,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,OAAO,IACX,EAUAwB,EAAa,WAAa,SAAoBvB,EAAQ,CAClD,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,aACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,aAGvC,GAFIgB,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,qDAAqD,EACzER,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,KAAK,CACvE,CACA,OAAOR,CACX,EAWA+B,EAAa,SAAW,SAAkB/B,EAASS,EAAS,CACnDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,MAAQ,MAEfR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,MAAOS,CAAO,GACvED,CACX,EASAuB,EAAa,UAAU,OAAS,UAAkB,CAC9C,OAAO,KAAK,YAAY,SAAS,KAAM3C,GAAU,KAAK,aAAa,CACvE,EAUA2C,EAAa,WAAa,SAAoBnB,EAAe,CACzD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,8BAC3B,EAEOmB,CACX,EAAG,EAEIL,CACX,EAAG,EAEHjC,EAAK,mBAAsB,UAAW,CAkBlC,SAASuC,EAAmBnC,EAAY,CACpC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAiC,EAAmB,UAAU,OAAS,GAQtCA,EAAmB,UAAU,QAAUzC,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAUrFyC,EAAmB,OAAS,SAAgBnC,EAAY,CACpD,OAAO,IAAImC,EAAmBnC,CAAU,CAC5C,EAWAmC,EAAmB,OAAS,SAAgBhC,EAASC,EAAQ,CACzD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,OAAO,EAC3DC,CACX,EAWA+B,EAAmB,gBAAkB,SAAyBhC,EAASC,EAAQ,CAC3E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA+B,EAAmB,OAAS,SAAgB9B,EAAQC,EAAQ,CAClDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,mBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAgC,EAAmB,gBAAkB,SAAyB9B,EAAQ,CAClE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA8B,EAAmB,OAAS,SAAgBhC,EAAS,CACjD,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EACvB,0BACXA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,CAACT,EAAM,UAAUS,EAAQ,OAAO,GAAK,EAAEA,EAAQ,SAAWT,EAAM,UAAUS,EAAQ,QAAQ,GAAG,GAAKT,EAAM,UAAUS,EAAQ,QAAQ,IAAI,GAC/H,iCACR,IACX,EAUAgC,EAAmB,WAAa,SAAoBxB,EAAQ,CACxD,GAAIA,aAAkBhB,EAAM,KAAK,mBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,mBAC7B,OAAIgB,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,SAAW,OACdjB,EAAM,MACLS,EAAQ,QAAUT,EAAM,KAAK,UAAUiB,EAAO,OAAO,GAAG,SAAW,GAC/D,OAAOA,EAAO,SAAY,SAC/BR,EAAQ,QAAU,SAASQ,EAAO,QAAS,EAAE,EACxC,OAAOA,EAAO,SAAY,SAC/BR,EAAQ,QAAUQ,EAAO,QACpB,OAAOA,EAAO,SAAY,WAC/BR,EAAQ,QAAU,IAAIT,EAAM,SAASiB,EAAO,QAAQ,MAAQ,EAAGA,EAAO,QAAQ,OAAS,CAAC,EAAE,SAAS,IACpGR,CACX,EAWAgC,EAAmB,SAAW,SAAkBhC,EAASS,EAAS,CACzDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,GAAIC,EAAQ,SAER,GADAD,EAAO,OAAS,GACZjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,QAAUC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC/G,MACIF,EAAO,QAAUC,EAAQ,QAAU,OAAS,IAAM,EAE1D,OAAIT,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IACvD,OAAOA,EAAQ,SAAY,SAC3BQ,EAAO,QAAUC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,OAAO,EAAIA,EAAQ,QAE9EQ,EAAO,QAAUC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,OAAO,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,QAAQ,MAAQ,EAAGA,EAAQ,QAAQ,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,SAC7NQ,CACX,EASAwB,EAAmB,UAAU,OAAS,UAAkB,CACpD,OAAO,KAAK,YAAY,SAAS,KAAM5C,GAAU,KAAK,aAAa,CACvE,EAUA4C,EAAmB,WAAa,SAAoBpB,EAAe,CAC/D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOoB,CACX,EAAG,EASHvC,EAAK,eAAkB,UAAW,CAC9B,IAAIC,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,cAAc,EAAI,EACzCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EAC5BC,CACX,EAAG,EAEHF,EAAK,cAAiB,UAAW,CAyB7B,SAASwC,EAAcpC,EAAY,CAO/B,GANA,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EAClB,KAAK,eAAiB,CAAC,EACvB,KAAK,KAAO,CAAC,EACb,KAAK,YAAc,CAAC,EAChBA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAkC,EAAc,UAAU,KAAO,GAQ/BA,EAAc,UAAU,MAAQ1C,EAAM,WAQtC0C,EAAc,UAAU,OAAS1C,EAAM,WAQvC0C,EAAc,UAAU,UAAY1C,EAAM,WAQ1C0C,EAAc,UAAU,eAAiB1C,EAAM,WAQ/C0C,EAAc,UAAU,KAAO1C,EAAM,WAQrC0C,EAAc,UAAU,UAAY,GAQpCA,EAAc,UAAU,YAAc1C,EAAM,WAQ5C0C,EAAc,UAAU,OAAS,GAUjCA,EAAc,OAAS,SAAgBpC,EAAY,CAC/C,OAAO,IAAIoC,EAAcpC,CAAU,CACvC,EAWAoC,EAAc,OAAS,SAAgBjC,EAASC,EAAQ,CAKpD,GAJKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EACxE,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,OAAO,CAAC,CAAC,EACzE,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,CAAC,CAAC,EAC5E,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OACrC,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvCR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAK,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7G,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7H,GAFID,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC/DA,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,OACzD,QAAS,EAAI,EAAG,EAAIA,EAAQ,eAAe,OAAQ,EAAE,EACjDR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,eAAe,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,OAAOA,CACX,EAWAgC,EAAc,gBAAkB,SAAyBjC,EAASC,EAAQ,CACtE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAgC,EAAc,OAAS,SAAgB/B,EAAQC,EAAQ,CAC7CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,cACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKE,EAAO,OAAO,CAAC,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKE,EAAO,OAAO,CAAC,EACnC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKE,EAAO,OAAO,CAAC,EACtC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,gBAAkBA,EAAQ,eAAe,SACnDA,EAAQ,eAAiB,CAAC,GAC9BA,EAAQ,eAAe,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACrF,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,GACpBA,EAAQ,KAAK,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,mBAAmB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAiC,EAAc,gBAAkB,SAAyB/B,EAAQ,CAC7D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA+B,EAAc,OAAS,SAAgBjC,EAAS,CAC5C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EACxC,GAAI,CAACR,EAAM,SAASS,EAAQ,MAAMD,CAAC,CAAC,EAChC,MAAO,0BACnB,CACA,GAAIC,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EACzC,GAAI,CAACR,EAAM,SAASS,EAAQ,OAAOD,CAAC,CAAC,EACjC,MAAO,2BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,SAASS,EAAQ,UAAUD,CAAC,CAAC,EACpC,MAAO,8BACnB,CACA,GAAIC,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,EAAG,CAC5E,GAAI,CAAC,MAAM,QAAQA,EAAQ,cAAc,EACrC,MAAO,iCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,eAAe,OAAQ,EAAED,EAAG,CACpD,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,eAAeD,CAAC,CAAC,EACtE,GAAIQ,EACA,MAAO,kBAAoBA,CACnC,CACJ,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EAAG,CAC1C,IAAIQ,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAKD,CAAC,CAAC,EACvD,GAAIQ,EACA,MAAO,QAAUA,CACzB,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,OAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EACvB,0BACR,IACX,EAUAiC,EAAc,WAAa,SAAoBzB,EAAQ,CACnD,GAAIA,aAAkBhB,EAAM,KAAK,cAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,cAG7B,GAFIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EACvCR,EAAQ,MAAM,CAAC,EAAI,OAAOQ,EAAO,MAAM,CAAC,CAAC,CACjD,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAI,OAAOQ,EAAO,UAAU,CAAC,CAAC,CACzD,CACA,GAAIA,EAAO,eAAgB,CACvB,GAAI,CAAC,MAAM,QAAQA,EAAO,cAAc,EACpC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,eAAiB,CAAC,EAC1B,QAAS,EAAI,EAAG,EAAIQ,EAAO,eAAe,OAAQ,EAAE,EAAG,CACnD,GAAI,OAAOA,EAAO,eAAe,CAAC,GAAM,SACpC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,eAAe,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,eAAe,CAAC,CAAC,CAC7F,CACJ,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAAG,CACzC,GAAI,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC1B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,KAAK,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,KAAK,CAAC,CAAC,CACpE,CACJ,CAGA,GAFIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,kDAAkD,EACtER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,mBAAmB,WAAWgB,EAAO,YAAY,CAAC,CAAC,CAC3F,CACJ,CACA,OAAIA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GAClCR,CACX,EAWAiC,EAAc,SAAW,SAAkBjC,EAASS,EAAS,CACpDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAgBd,IAfIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,EACpBA,EAAO,KAAO,CAAC,EACfA,EAAO,YAAc,CAAC,EACtBA,EAAO,eAAiB,CAAC,GAEzBC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,UAAY,GACnBA,EAAO,OAAS,IAEhBR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAIX,EAAQ,MAAMW,CAAC,CACzC,CACA,GAAIX,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CAC3C,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CACjD,CACA,GAAIX,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACvCH,EAAO,KAAKG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAKW,CAAC,EAAGF,CAAO,CAC/E,CAGA,GAFIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,mBAAmB,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CACtG,CAGA,GAFIT,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,gBAAkBA,EAAQ,eAAe,OAAQ,CACzDQ,EAAO,eAAiB,CAAC,EACzB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,eAAe,OAAQ,EAAEW,EACjDH,EAAO,eAAeG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,eAAeW,CAAC,EAAGF,CAAO,CACxG,CACA,OAAOD,CACX,EASAyB,EAAc,UAAU,OAAS,UAAkB,CAC/C,OAAO,KAAK,YAAY,SAAS,KAAM7C,GAAU,KAAK,aAAa,CACvE,EAUA6C,EAAc,WAAa,SAAoBrB,EAAe,CAC1D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,qBAC3B,EAEOqB,CACX,EAAG,EAEIxC,CACX,EAAG,EAEHN,GAAO,QAAUK,IC78OV,SAAS0C,GAAOC,EAAeC,EAAmB,CACvD,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,OAAOC,GAAQ,SAAWA,EAAMA,EAAI,CAAC,CAEzD,CAosCO,SAASC,GAAiBC,EAA4B,CAC3D,OAAO,IAAI,YAAY,EAAE,OAAOA,CAAM,CACxC,CAtuCA,IAQAC,GA0BaC,GAwBAC,GAoDAC,GAwNAC,GAiDAC,GAkGAC,GAiBAC,EAuaAC,GAsIAC,GA0MAC,GACAC,GAluCbC,GAAAC,EAAA,kBAGAC,KACAC,KAIAf,GAAmB,SACnBgB,KAyBaf,GAAN,KAAgB,CAOrB,OAAO,YACHgB,EAEAC,EAC2B,CAC7B,GAAID,EAAG,SAAWC,EAAG,OACnB,MAAO,GAET,QAASC,EAAI,EAAGA,EAAIF,EAAG,OAAQE,IAC7B,GAAIF,EAAGE,CAAC,IAAMD,EAAGC,CAAC,EAChB,MAAO,GAGX,MAAO,EACT,CACF,EAEajB,GAAN,KAAiB,CAOtB,OAAO,sBAAsBkB,EAA0BC,EACZ,CAIzC,IAAMC,EAAKF,EAAM,SAAW,EAAK,CAAC,EAAGA,EAAM,CAAC,CAAC,EAAIA,EAK3CG,EAAKF,EAAM,SAAW,EAAK,CAACA,EAAM,CAAC,EAAG,CAAC,EAAIA,EAEjD,MAAO,CAACC,EAAGC,CAAC,CACd,CASA,OAAO,uBAAuBC,EAAuBC,EAAeC,EAAe,CAE7ED,IAAU,GAEZD,EAAY,OAAOA,EAAY,OAAS,EAAG,CAAC,EAG1CE,IAAU,GACZF,EAAY,IAAI,CAEpB,CAQA,OAAO,gBAAgBF,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAEapB,GAAN,MAAMwB,CAAc,CAQzB,OAAO,UAAUC,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFjC,GAAW,gBAAgB,CAAC0B,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAAShB,EAAIW,EAAW,EAAI,EAAGX,GAAKc,EAAOd,IAAK,CAC9C,IAAMiB,EAAOL,EAAQZ,EAAI,EAAI,EAAIS,EAAMG,EAAQZ,CAAC,EAC1CkB,EAAOL,EAAQb,EAAI,EAAI,EAAIU,EAAMG,EAAQb,CAAC,EAEhD,GAAIiB,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEFH,EAAMD,EAAQd,CAAC,EAAI,KAAK,IAAIiB,EAAMC,CAAI,CACxC,CAEA,OAAOH,CACT,CAQA,OAAO,MAAMI,EAAuCC,EAA4C,CAI9F,IAAMC,EAAkB,IAAI,MAAMD,EAAc,MAAM,EACtD,OAAAZ,EAAc,UAAUW,EAAoBC,EAAeC,CAAe,EACnEA,CACT,CASA,OAAO,UAAUF,EAAuCC,EAAkCC,EAA2B,CAInH,IAAMC,EAAYH,EAAmB,OAASC,EAAc,OAC5D,QAAS,EAAI,EAAG,EAAIA,EAAc,OAAQ,IACxCC,EAAgB,CAAC,EAAIF,EAAmBG,EAAY,CAAC,EAAIF,EAAc,CAAC,CAE5E,CAUA,OAAO,KACHjB,EAAWC,EAAWmB,EAA+DC,EACrFC,EAAgD,CAClD,IAAMpB,EAAcG,EAAc,UAAUL,EAAE,KAAMC,EAAE,IAAI,EAE1D,GAAIC,EAAa,CACf,GAAImB,GAAW,CAACpC,EAAU,SAASiB,EAAaF,EAAE,IAAI,EAEpD,OAGF,IAAMuB,EAAOtC,EAAU,KAAKiB,CAAW,EACjCsB,EAAIH,EAAUrB,EAAI,IAAIyB,GAAOvB,EAAaoB,GAActB,EAAE,IAAI,EAGpE,GAAIE,EAAY,SAAW,EACzBsB,EAAE,IAAI,CAAC,EAAGJ,EAAGpB,EAAE,IAAI,CAAC,CAAC,EAAaC,EAAE,IAAI,CAAC,CAAC,CAAW,CAAC,MAInD,CACH,IAAMyB,EAAgB,IAAI,MAAcxB,EAAY,MAAM,EACpDyB,EAAmB,IAAI,MAAM3B,EAAE,KAAK,MAAM,EAC1C4B,EAAmB,IAAI,MAAM3B,EAAE,KAAK,MAAM,EAC5C4B,EAAsB,EACtBC,EAAsB,EACtBC,EAAY,GACZC,EAAY,GACZhC,EAAE,KAAK,SAAW,IACpB6B,EAAO7B,EAAE,IAAI,CAAC,CAAC,EACf+B,EAAY,IAEV9B,EAAE,KAAK,SAAW,IACpB6B,EAAO7B,EAAE,IAAI,CAAC,CAAC,EACf+B,EAAY,IAEd,IAAIC,EACJ,QAASpC,EAAI,EAAGA,EAAI0B,EAAM1B,IAAK,CAE7BoC,EAAOpC,EACP,QAASqC,EAAIhC,EAAY,OAAS,EAAGgC,GAAK,EAAGA,IAC3CR,EAAcQ,CAAC,EAAID,EAAO/B,EAAYgC,CAAC,EACvCD,EAAO,KAAK,MAAMA,EAAO/B,EAAYgC,CAAC,CAAC,EAGpCH,IAEH1B,EAAc,UAAUqB,EAAe1B,EAAE,KAAM2B,CAAgB,EAC/DE,EAAO7B,EAAE,IAAI2B,CAAgB,GAE1BK,IACH3B,EAAc,UAAUqB,EAAezB,EAAE,KAAM2B,CAAgB,EAC/DE,EAAO7B,EAAE,IAAI2B,CAAgB,GAG/BJ,EAAE,IAAIE,EAAeN,EAAGS,EAAMC,CAAI,CAAC,CACrC,CACF,CAEA,OAAON,CACT,CAGF,CAOA,OAAO,iBAAiBW,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CASA,OAAO,iBAAiBC,EAA+BrC,EAA0C,CAC/F,IAAMsC,EAASD,EAAW,OACpBE,EAAiB,CAAC,EACxB,QAAS,EAAI,EAAG,EAAID,EAAQ,IAAK,CAC/B,IAAME,EAAMF,EAAS,EAAI,EACnB,EAAID,EAAWG,CAAG,GAAK,GACnBxC,EAAYA,EAAY,OAAS,EAAI,CAAC,GAAK,GAC7C,GAAK,IAAM,GACjBuC,EAAK,QAAQC,CAAG,CAEpB,CACA,OAAOD,CACT,CACF,EAyBa3D,GAAN,KAAe,CAIpB,OAAO,qBACH6D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAEalE,GAAN,MAAMqE,CAAU,CACrB,OAAO,wBAAwBC,EAC8D,CAC3F,OAAQA,EAAW,CACjB,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,OACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,OACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,UACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,UACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SAIT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0B,QAAK,YAAY,SAASA,CAAS,CAAC,EAAE,CACpF,CACF,CAEA,OAAO,2BAA2BC,EAAyC,CACzE,OAAQA,EAAM,CACZ,IAAK,OACH,OAAO,QAAK,YAAY,SAAS,KACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,OACH,OAAO,QAAK,YAAY,SAAS,KACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,UACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,UACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OAEnC,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,CAEA,OAAO,oBAAoBb,EAAoC,CAE7D,OAAOA,EAAK,IAAIc,GAAKC,GAAK,OAAOD,CAAC,EAAIA,EAAE,SAAS,EAAIA,CAAC,CACxD,CAEA,OAAO,yBAAyBE,EAAoD,CAClF,MAAO,CACL,WAAYL,EAAU,wBAAwBK,EAAU,QAAS,EACjE,MAAO,CAAC,KAAML,EAAU,oBAAoBK,EAAU,MAAO,IAAK,IAAIF,GAAKA,EAAE,QAAS,CAAC,CAAC,CAC1F,CACF,CAEA,OAAO,wBAAwBG,EAA6C,CAC1E,IAAMjB,EAAO,CAAC,EACd,QAAS5C,EAAI,EAAGA,EAAI6D,EAAO,WAAW,EAAG7D,IACvC4C,EAAK,KAAKzD,GAAS,aAAa0E,EAAO,KAAK7D,CAAC,CAAE,CAAC,EAElD,OAAO4C,CACT,CAEA,OAAO,8BAA8BkB,EAAyC,CAC5E,IAAMC,EAAa,CAAC,EACpB,QAAS/D,EAAI,EAAGA,EAAI8D,EAAK,iBAAiB,EAAG9D,IAC3C+D,EAAW,KAAKD,EAAK,WAAW9D,CAAC,CAAE,EAErC,OAAO+D,CACT,CACF,EAEa5E,GAAN,KAAe,CAIpB,OAAO,aAAa6E,EAAiCC,EAAoB,CACvE,OAAIN,GAAK,OAAOK,CAAC,EACRA,EAAE,SAAS,EACTA,aAAaE,EAAY,KAC3BP,GAAK,UAAU,CAAC,IAAKK,EAAE,IAAK,KAAMA,EAAE,KAAM,SAAUC,GAAY,EAAK,CAAC,EAAE,SAAS,EAEnFD,CACT,CACA,OAAO,OAAOA,EAAY,CACxB,OAAOL,GAAK,OAAOK,CAAC,GAAKA,aAAaE,EAAY,IACpD,CACF,EAEa9E,EAAN,MAAM+E,CAAU,CACrB,OAAO,KAAKvB,EAAiC,CAC3C,OAAOuB,EAAU,0BAA0BvB,EAAM,EAAGA,EAAK,MAAM,CACjE,CAGA,OAAO,kBAAkBA,EAAyBwB,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOxB,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBwB,CAAI,wCAAwCxB,EAAK,MAAM,cAAc,EAE/G,OAAOuB,EAAU,0BAA0BvB,EAAMwB,EAAMxB,EAAK,MAAM,CACpE,CAGA,OAAO,gBAAgBA,EAAyBwB,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOxB,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBwB,CAAI,sCAAsCxB,EAAK,MAAM,cAAc,EAE7G,OAAOuB,EAAU,0BAA0BvB,EAAM,EAAGwB,CAAI,CAC1D,CAEA,OAAO,0BAA0BxB,EAAyByB,EAAeC,EAAqB,CAC5F,IAAI5C,EAAO,EACX,QAAS,EAAI2C,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAI1B,EAAK,CAAC,GAAK,EACb,MAAM,IAAI,MAEN,oHAAoH,EAE1HlB,GAAQkB,EAAK,CAAC,CAChB,CACA,OAAOlB,CACT,CAEA,OAAO,eAAekB,EAA4C,CAChE,IAAM2B,EAAO3B,EAAK,OAClB,GAAI2B,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC9BC,EAAQD,EAAO,CAAC,EAAI,EACpBC,EAAQD,EAAO,CAAC,EAAI3B,EAAK2B,EAAO,CAAC,EACjC,QAASvE,EAAIuE,EAAO,EAAGvE,GAAK,EAAG,EAAEA,EAC/BwE,EAAQxE,CAAC,EAAIwE,EAAQxE,EAAI,CAAC,EAAI4C,EAAK5C,EAAI,CAAC,EAE1C,OAAOwE,CACT,CAEA,OAAO,UAAU5B,EAA4C,CAE3D,OADaA,EAAK,MAAM,EACZ,QAAQ,CACtB,CAEA,OAAO,gBAAgB6B,EAA4BD,EAA4BJ,EAAuB,CAChGA,IAAS,SACXA,EAAOK,EAAQ,QAEjB,IAAIC,EAAS,EACb,QAAS,EAAI,EAAG,EAAIN,EAAM,EAAE,EAC1BM,GAAUF,EAAQ,CAAC,EAAIC,EAAQ,CAAC,EAElC,OAAOC,CACT,CAEA,OAAO,gBAAgBA,EAAgBF,EAA+C,CACpF,IAAMD,EAAOC,EAAQ,OACrB,GAAID,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAACG,EAASF,EAAQ,CAAC,CAAC,EAE7B,IAAMC,EAAoB,IAAI,MAAMD,EAAQ,MAAM,EAClD,QAAS,EAAI,EAAG,EAAIC,EAAQ,OAAS,EAAG,EAAE,EACxCA,EAAQ,CAAC,EAAI,KAAK,MAAMC,EAASF,EAAQ,CAAC,CAAC,EAC3CE,GAAUD,EAAQ,CAAC,EAAID,EAAQ,CAAC,EAElC,OAAAC,EAAQA,EAAQ,OAAS,CAAC,EAAIC,EACvBD,CACT,CAKA,OAAO,cAAcL,EAAcO,EAA4B,CAC7D,GAAIP,EAAO,CAACO,GAAcP,GAAQO,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOP,EAAO,EAAIA,EAAOO,EAAaP,CACxC,CAEA,OAAO,cAAcQ,EAAyBD,EAA8B,CAC1E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,CAAU,CAAC,CACxD,CAUA,OAAO,eAAeG,EAAiBlC,EAAyBmC,EAA4B,CAC1F,GAAInC,EAAK,SAAW,GAAKkC,EAAM,SAAW,EACxC,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIC,IAAsB,OACxBA,EAAoBnC,EAAK,eAErBmC,GAAqB,GAAKA,EAAoBnC,EAAK,OACrD,MAAM,IAAI,MAAM,gCAAgC,EAIpD,QAASoC,EAAID,EAAoB,EAAGC,GAAK,IACvCF,EAAME,CAAC,IACH,EAAAF,EAAME,CAAC,EAAIpC,EAAKoC,CAAC,IAFqB,EAAEA,EAK5CF,EAAME,CAAC,EAAI,CAEf,CAcA,OAAO,sBAAsBC,EAAiCC,EAAyC,CAErG,GAAIA,EAAW,SAAW,EAAG,CAC3B,GAAID,EAAa,SAAW,GAAKd,EAAU,KAAKc,CAAY,IAAM,EAChE,MAAO,CAAC,EAER,MAAM,IAAI,MAAM,mCAAmC,CAEvD,CAEA,IAAME,EAAQD,EAAW,OACnBE,EAAe,IAAI,MAAcD,CAAK,EACxCE,EAAmB,GACnBC,EAAgB,EACpB,QAAStF,EAAI,EAAGA,EAAImF,EAAOnF,IAAK,CAC9B,GAAIkF,EAAWlF,CAAC,EAAI,GAClB,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIkF,EAAWlF,CAAC,IAAM,GAAI,CACxB,GAAIqF,IAAqB,GACvB,MAAM,IAAI,MAAM,gDAAgD,EAElEA,EAAmBrF,CACrB,KAAO,CACL,GAAIkF,EAAWlF,CAAC,IAAM,EAAG,CACvB,GAAIA,GAAKiF,EAAa,OACpB,MAAM,IAAI,MAAM,8EAA8E,EAEhGG,EAAapF,CAAC,EAAIiF,EAAajF,CAAC,CAClC,MACEoF,EAAapF,CAAC,EAAIkF,EAAWlF,CAAC,EAEhCsF,GAAiBF,EAAapF,CAAC,CACjC,CACF,CAEA,IAAMuF,EAAgBpB,EAAU,KAAKc,CAAY,EACjD,GAAII,IAAqB,GAAI,CAC3B,GAAIE,EAAgBD,IAAkB,EACpC,MAAM,IAAI,MAAM,6EACZL,CAAY,oBAAoBC,CAAU,GAAG,EAEnDE,EAAaC,CAAgB,EAAIE,EAAgBD,CACnD,SAGMA,IAAkBC,EACpB,MAAM,IAAI,MAAM,yDAA0D,EAG9E,OAAOH,CACT,CAQA,OAAO,gBAAgBjF,EAAsBqF,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAMtF,EAAEsF,CAAC,CAAC,EAEpBtF,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASyC,EAAyB8C,EAA2C,CAClF,IAAMnB,EAAO3B,EAAK,OAClB,OAAOA,EAAK,IAAI,CAAC6C,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAInB,CAAI,CAAC,CACtD,CAOA,OAAO,SAASoB,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGzF,IAAMyF,IAAMG,EAAO5F,CAAC,CAAC,CAC/C,CAMA,OAAO,wBAAwB4C,EAAiC,CAC9D,GAAIA,EAAK,OAAS,EAChB,MAAM,IAAI,UAAU,iDAAiD,EAEvE,IAAIlB,EAAO,EACX,QAAWsC,KAAKpB,EAAM,CACpB,GAAI,CAAC,OAAO,UAAUoB,CAAC,EACrB,MAAM,IAAI,UAAU,kBAAkBA,CAAC,oBAAoB,EAE7D,GAAIA,EAAI,GAAKA,EAAI,WACf,MAAM,IAAI,UAAU,yBAAyBA,CAAC,iBAAiB,EAEjEtC,GAAQsC,CACV,CACA,OAAOtC,CACT,CAOA,OAAO,aAAakB,EAAyBwB,EAAiC,CACxEA,EAAO,IACTA,GAAQxB,EAAK,QAEf,IAAMiD,EAAQjD,EAAK,OAAO,CAACiC,EAAGiB,IAAMjB,EAAIiB,EAAG,CAAC,EACtCC,EAAQnD,EAAK,MAAMwB,CAAI,EAAE,OAAO,CAACS,EAAGiB,IAAMjB,EAAIiB,EAAG,CAAC,EAGxD,MAFmB,CAACD,EAAQE,EAAOA,CAAK,CAG1C,CAOA,OAAO,aAAanD,EAAyBgC,EAA4C,CACvF,IAAMoB,EAAa,IAAI,MAGvBpB,EAAOT,EAAU,cAAcS,EAAMhC,EAAK,MAAM,EAEhD,QAAS5C,EAAI,EAAGA,EAAI4C,EAAK,OAAQ5C,IAAK,CACpC,IAAMiG,EAAgBrB,EAAK,QAAQ5E,CAAC,GAAK,EACzC,GAAIiG,GAAiBrD,EAAK5C,CAAC,IAAM,EAC/B,MAAM,IAAI,MAAM,0CAA0C,GAGvD4E,EAAK,SAAW,GAAKhC,EAAK5C,CAAC,EAAI,GAAO4E,EAAK,OAAS,GAAK,CAACqB,IAC7DD,EAAW,KAAKpD,EAAK5C,CAAC,CAAC,CAE3B,CAEA,OAAOgG,CACT,CAOA,OAAO,eAAepD,EAAyBgC,EAA4C,CACzF,IAAMoB,EAAa,IAAI,MAAcpD,EAAK,OAASgC,EAAK,MAAM,EAG9DoB,EAAW,KAAK,CAAC,EAGjB,QAAS,EAAI,EAAG,EAAIpB,EAAK,OAAQ,IAAK,CACpC,IAAMR,EAAOD,EAAU,cAAcS,EAAK,CAAC,EAAGoB,EAAW,MAAM,EAC/D,GAAI5B,GAAQ4B,EAAW,OACrB,MAAM,IAAI,MAAM,iCAAmC,EAErD,GAAIA,EAAW5B,CAAI,IAAM,EACvB,MAAM,IAAI,MAAM,6BAA+B,EAGjD4B,EAAW5B,CAAI,EAAI,CACrB,CAGA,IAAI8B,EAAoB,EACxB,QAAS,EAAI,EAAG,EAAIF,EAAW,OAAQ,IACjCA,EAAW,CAAC,IAAM,IACpBA,EAAW,CAAC,EAAIpD,EAAKsD,GAAmB,GAM5C,GAAIA,IAAsBtD,EAAK,OAC7B,MAAM,IAAI,MAAM,mDAAmD,EAGrE,OAAOoD,CACT,CACF,EA6Fa3G,GAAN,MAAM8G,CAAU,CAOrB,OAAO,WAAWvD,EAAyBwB,EAAcgC,EAAiBC,EAC/C,CACzB,GAAID,EAAM,SAAW,EAAG,CACtB,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,4EAA8E,EAEhGF,EAAU,eAAevD,EAAKwB,CAAI,EAAGiC,EAAYD,CAAK,CACxD,CAEA,IAAME,EAAqB,CAAC,EACtBC,EAAU,CAAC,CAAC,EAClB,QAASvG,EAAI,EAAGA,EAAIoG,EAAM,OAAQ,EAAEpG,EAAG,CACjCA,IAAM,GACRuG,EAAQ,KAAKA,EAAQvG,EAAI,CAAC,EAAIoG,EAAMpG,EAAI,CAAC,CAAC,EAE5C,IAAMsC,EAAQM,EAAK,MAAM,EACzBN,EAAM8B,CAAI,EAAIgC,EAAMpG,CAAC,EACrBsG,EAAO,KAAKhE,CAAK,CACnB,CACA,MAAO,CAACgE,EAAQC,CAAO,CACzB,CAEA,OAAO,eAAeC,EAA8BH,EAAoBD,EAAiB,CAEvF,GAAII,EAAuBH,IAAe,EACxC,MAAM,IAAI,MAAM,0CAA0C,EAE5D,QAASrG,EAAI,EAAGA,EAAIqG,EAAY,EAAErG,EAChCoG,EAAM,KAAKI,EAAuBH,CAAU,CAEhD,CACF,EAgGa/G,GAAN,MAAMmH,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBpC,EAChFqC,EAAqBC,EAAgB,CACvC,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAAS7D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IACxCA,GAAO+D,EAAY,OACrBA,EAAY,KAAKD,EAAU9D,EAAM,CAAC,CAAC,EAEnC+D,EAAY/D,CAAG,EAAI8D,EAAU9D,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAC1C,GAAIA,EAAM2B,EAAQ,QAChB,GAAIA,EAAQ3B,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhE2B,EAAQ,KAAK,CAAC,EAKlB,QAAS3B,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAC1C,GAAIA,EAAMgE,EAAU,QAClB,GAAIA,EAAUhE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEgE,EAAU,KAAK,CAAC,EAKpB,QAAShE,EAAM,EAAGA,EAAM+D,EAAY,OAAS,EAAG/D,IAC9C,GAAIA,EAAMiE,EAAK,QACb,GAAIA,EAAKjE,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DiE,EAAK,KAAK,CAAC,EAKf,QAASjE,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAAO,CACjD,GAAI+D,EAAY/D,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIiE,EAAKjE,CAAG,GAAK+D,EAAY/D,CAAG,GAAKiE,EAAKjE,EAAM+D,EAAY,MAAM,GAAKA,EAAY/D,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACH8D,EAA8BnC,EAA4BqC,EAC1DD,EAAgCE,EAAgBC,EAAkB,CACpE,GAAKA,EAIL,IAAID,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAInC,EAAQ,SAAYmC,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAAS9D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5C4D,EAAa,wBACTE,EAAU9D,EAAM,CAAC,EAAG2B,EAAQ3B,CAAG,EAAGgE,EAAUhE,CAAG,EAAG+D,EAAY/D,CAAG,EAAGiE,EAAMjE,EAAKA,EAAM8D,EAAU,OAAS,EACxGI,CAAO,EAEf,CAaA,OAAO,uBACHL,EAA2BC,EAA8BnC,EAAmBqC,EAC5ED,EAAuBE,EAAgBC,EAA4B,CACrE,GAAIJ,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMX,EAAa,CAACW,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWX,EAAYxB,EAASqC,EAAWD,EAAaE,EAAMC,CAAO,EACpFf,CACT,CAYA,OAAO,uBACHW,EAA8BK,EAA+BxC,EAAmBqC,EAChFD,EAAuBE,EAAgBC,EAA4B,CACrE,GAAIJ,EAAU,QAAU,GAAKK,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMhB,EAAa,CAACW,EAAU,CAAC,EAAGK,EAAW,CAAC,CAAC,EAE/C,OAAAP,EAAa,mBAAmB,GAAOE,EAAWX,EAAYxB,EAASqC,EAAWD,EAAaE,EAAMC,CAAO,EACrGf,CACT,CAKA,OAAe,mBACXU,EAA2BC,EAA8BX,EAAsBxB,EAC/EqC,EAA8BD,EAAgCE,EAAgBC,EAAkB,CAClG,GAAIL,EACF,QAAS7D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5CmD,EAAW,KAAK,CAAC,MAGnB,SAASnD,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5CmD,EAAW,KAAKS,EAAa,wBACzBE,EAAU9D,EAAM,CAAC,EAAG2B,EAAQ3B,CAAG,EAAGgE,EAAUhE,CAAG,EAAG+D,EAAY/D,CAAG,EAAGiE,EAAMjE,EAAKA,EAAM8D,EAAU,OAAS,EACxGI,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXE,EAAgBC,EAAgBC,EAAkBC,EAAgBN,EAAgBO,EAClFC,EAAsBP,EAA0B,CAClD,IAAMQ,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIL,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAD,EAAKO,CAAY,EAAI,EACrBP,EAAKQ,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAH,EAAKO,CAAY,EACgB,KAAK,MAAjCN,IAAY,cAA4BS,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/DV,EAAKQ,CAAY,EAAIE,EAAYV,EAAKO,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASH,EAAKO,CAAY,EAAIP,EAAKQ,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEa3H,GAAW,sBACXC,GAAW,uBCn4BxB,SAASiI,GAAOC,EAA+B,CAC7C,OAAQA,EAAM,CACZ,IAAK,OACL,IAAK,OACL,IAAK,QACH,MAAO,GACT,IAAK,QACL,IAAK,SACH,MAAO,GACT,IAAK,QACL,IAAK,SACL,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,qCAAqCA,CAAI,EAAE,CAC/D,CACF,CAEA,SAASC,GAAYD,EAA+D,CAClF,OAAQA,EAAM,CACZ,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,GACT,QACE,MAAM,IAAI,MAAM,qCAAqC,QAAK,YAAY,SAASA,CAAI,CAAC,EAAE,CAC1F,CACF,CAEA,SAASE,GAAWC,EAAyBH,EAAuB,CAClE,OAAO,IAAKI,GAAoBJ,CAAI,GAAGG,CAAU,CACnD,CAEA,SAASC,GAAoBJ,EAAuB,CAClD,OAAQA,EAAM,CACZ,IAAK,OACL,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,QACH,OAAO,WACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,UACH,OAAO,aACT,IAAK,UACH,OAAO,aACT,QAEE,MAAM,IAAI,MAAM,mBAAmB,CACvC,CACF,CAGA,SAASK,GAAaC,EAASN,EAA+D,CAE5F,GAAIA,IAAS,QAAK,YAAY,SAAS,OAASA,IAASO,GAAO,eAAe,OAC7E,GAAID,EAAE,mBAAmB,UAAU,GAAKA,EAAE,SAAS,WAAW,EAC5D,MAAM,IAAI,UAAU,wBAAwB,UAG5CN,IAAS,QAAK,YAAY,SAAS,QAAUA,IAASO,GAAO,eAAe,QAC5EP,IAAS,QAAK,YAAY,SAAS,QAAUA,IAASO,GAAO,eAAe,QAC9E,GAAID,EAAE,mBAAmB,UAAU,GAAKA,EAAE,SAAS,CAAC,EAClD,MAAM,IAAI,UAAU,yBAAyB,MAG/C,OAAM,IAAI,UAAU,oBAAoB,QAAK,YAAY,SAASN,CAAI,CAAC,EAAE,EAG3E,OAAOM,EAAE,SAAS,CACpB,CAGA,SAASE,GAAUC,EAAgBT,EAAuDU,EAA4B,CACpH,OAAQV,EAAM,CACZ,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOS,EAAK,SAASC,CAAU,EACjC,KAAK,QAAK,YAAY,SAAS,KAC7B,OAAOD,EAAK,QAAQC,CAAU,EAChC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOD,EAAK,UAAUC,EAAY,EAAI,EACxC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,SAASC,EAAY,EAAI,EACvC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,WAAWC,EAAY,EAAI,EACzC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,SAASC,EAAY,EAAI,EACvC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOD,EAAK,UAAUC,EAAY,EAAI,EACxC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOL,GACHM,GAAK,SAASF,EAAK,UAAUC,EAAY,EAAI,EAAGD,EAAK,UAAUC,EAAa,EAAG,EAAI,EAAG,EAAK,EAAGV,CAAI,EACxG,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOS,EAAK,WAAWC,EAAY,EAAI,EACzC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOL,GACHM,GAAK,SAASF,EAAK,UAAUC,EAAY,EAAI,EAAGD,EAAK,UAAUC,EAAa,EAAG,EAAI,EAAG,EAAI,EAAGV,CAAI,EACvG,QACE,MAAM,IAAI,MAAM,sCAAsC,QAAK,YAAY,SAASA,CAAI,CAAC,EAAE,CAC3F,CACF,CAzdA,IAGAY,GAIAC,GAGON,GAkCMO,GA5CbC,GAAAC,EAAA,kBAGAJ,GAAmB,SACnBK,KAEAC,KACAL,GAAmB,SACnBM,KAEOZ,GAASa,GAAY,aAAa,IAkC5BN,GAAN,MAAMO,CAAO,CA+GlB,YAIoBC,EAIAtB,EAA+BuB,EACvCC,EAA+CC,EAIvCC,EAAe,QAAK,OAAO,EAAG,CAT9B,UAAAJ,EAIA,UAAAtB,EAA+B,kBAAAuB,EACvC,uBAAAC,EAA+C,WAAAC,EAIvC,YAAAC,EAClB,KAAK,KAAOC,EAAU,wBAAwBL,CAAI,EAClD,IAAMM,EAAO,KAAK,KACZC,EAASN,IAAiB,QAAaC,IAAsB,QAAaC,IAAU,OAE1F,GAAIA,IAAU,QACRA,EAAM,SAAWG,EACnB,MAAM,IAAI,WAAW,uCAAwC,EAIjE,GAAI5B,IAAS,SAAU,CACrB,GAAIyB,IAAU,SAAc,CAAC,MAAM,QAAQA,CAAK,GAAK,CAACA,EAAM,MAAMnB,GAAK,OAAOA,GAAM,QAAQ,GAC1F,MAAM,IAAI,UAAU,gCAAgC,EAGlDuB,IACF,KAAK,MAAQ,IAAI,MAAcD,CAAI,EAEvC,KAAO,CACL,GAAIH,IAAU,OAAW,CACvB,IAAMK,EAAc1B,GAAoBJ,CAAI,EAC5C,GAAI,EAAEyB,aAAiBK,GACrB,MAAM,IAAI,UAAU,wBAAwBA,EAAY,IAAI,EAAE,CAElE,CAEA,GAAID,EAAO,CACT,IAAME,EAAM,IAAI,YAAYH,EAAO7B,GAAOC,CAAI,CAAC,EAC/C,KAAK,MAAQE,GAAW6B,EAAK/B,CAAI,CACnC,CACF,CACF,CAxJA,IAAI,MAAmB,CACrB,GAAI,KAAK,QAAU,OAAW,CAC5B,IAAMgC,EAAO,KAAK,aAAc,KAAK,MAAM,EAC3C,GAAIA,EAAK,SAAW,KAAK,KACvB,MAAM,IAAI,MAAM,4FAA4F,EAE9G,KAAK,MAAQA,CACf,CACA,OAAO,KAAK,KACd,CAKA,IAAI,YAAa,CACf,GAAI,KAAK,OAAS,SAChB,MAAM,IAAI,UAAU,yBAAyB,EAG/C,OAAO,KAAK,IACd,CAMA,IAAI,aAAc,CAChB,OAAQ,KAAK,KAAM,CACjB,IAAK,QACL,IAAK,OACL,IAAK,SACL,IAAK,QACL,IAAK,QACL,IAAK,SACL,IAAK,OACH,OAAO,KAAK,KAEd,QACE,MAAM,IAAI,UAAU,4EAA4E,CACpG,CACF,CAKA,IAAI,WAAY,CACd,OAAQ,KAAK,KAAM,CACjB,IAAK,UACL,IAAK,UACH,OAAO,KAAK,KAEd,QACE,MAAM,IAAI,UAAU,2CAA2C,CACnE,CACF,CAMA,IAAI,YAAa,CACf,GAAI,KAAK,OAAS,SAChB,OAAO,KAAK,KAEd,MAAM,IAAI,UAAU,oCAAoC,CAC1D,CAKA,IAAIC,EAAyE,CAC3E,OAAO,KAAK,KAAKN,EAAU,gBAAgBM,EAAS,KAAK,OAAO,CAAC,CACnE,CAKA,IAAIA,EAA4BC,EAAoD,CAClF,KAAK,KAAKP,EAAU,gBAAgBM,EAAS,KAAK,OAAO,CAAC,EAAIC,CAChE,CAKA,MAAM,SAA+B,CACnC,OAAI,KAAK,QAAU,SACjB,KAAK,MAAQ,MAAM,KAAK,kBAAmB,KAAK,MAAM,GAEjD,KAAK,KACd,CAWA,IAAI,SAA6B,CAC/B,OAAK,KAAK,WACR,KAAK,SAAWP,EAAU,eAAe,KAAK,IAAI,GAE7C,KAAK,QACd,CAqDA,OAAO,UAAUQ,EAAwC,CACvD,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,6CAA6C,EAE/D,IAAMnC,EAAOoC,GAAU,wBAAwBD,EAAY,QAAS,EAC9Db,EAAOc,GAAU,oBAAoBD,EAAY,IAAK,EAEtDD,EAAQ,IAAIb,EAAOC,EAAMtB,CAAI,EAEnC,GAAIA,IAAS,SAGXmC,EAAY,WAAY,QAAQ,CAACE,EAAK/B,IAAM,CAC1C4B,EAAM,KAAK5B,CAAC,EAAIgC,GAAiBD,CAAG,CACtC,CAAC,UAGCF,EAAY,SAAW,OAAOA,EAAY,QAAQ,YAAe,UACjEA,EAAY,QAAQ,WAAa,EAAG,CAItC,IAAMI,EAAWL,EAAM,KACjBM,EACF,IAAI,SAASL,EAAY,QAAQ,OAAQA,EAAY,QAAQ,WAAYA,EAAY,QAAQ,UAAU,EACrGM,EAAcxC,GAAYkC,EAAY,QAAS,EAC/CO,EAASP,EAAY,QAAQ,WAAaM,EAEhD,GAAIN,EAAY,QAAQ,WAAaM,IAAgB,EACnD,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIF,EAAS,SAAWG,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,QAASpC,EAAI,EAAGA,EAAIoC,EAAQpC,IAAK,CAC/B,IAAMqC,EAAInC,GAAUgC,EAAYL,EAAY,SAAW7B,EAAImC,CAAW,EACtEF,EAASjC,CAAC,EAAIqC,CAChB,CACF,KAAO,CAEL,IAAIC,EACJ,OAAQT,EAAY,SAAU,CAC5B,KAAK,QAAK,YAAY,SAAS,MAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,KAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,MAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,OAC7BS,EAAQT,EAAY,WACpB,MACF,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,OAC7BS,EAAQT,EAAY,WACpB,MACF,QAEE,MAAM,IAAI,MAAM,kBAAkB,CACtC,CAEA,GAAIS,GAAU,KACZ,MAAM,IAAI,MAAM,kDAAkD,EAGpE,IAAMZ,EAAOE,EAAM,KACnB,GAAIF,EAAK,SAAWY,EAAM,OACxB,MAAM,IAAI,MAAM,uBAAuB,EAGzC,QAAStC,EAAI,EAAGA,EAAIsC,EAAM,OAAQtC,IAAK,CACrC,IAAMuC,EAAUD,EAAMtC,CAAC,EACnBK,GAAK,OAAOkC,CAAO,EACrBb,EAAK1B,CAAC,EAAID,GAAawC,EAASV,EAAY,QAAQ,EAEpDH,EAAK1B,CAAC,EAAIuC,CAEd,CACF,CAEA,OAAOX,CACT,CASA,OAAO,SAASF,EAA2CV,EAAyBtB,EAAuB,CACzG,OAAO,IAAIqB,EAAOC,EAAMtB,EAAM,OAAW,OAAWgC,CAAI,CAC1D,CAEA,OAAO,cAAcc,EAA0B,CAC7C,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,6CAA6C,EAE/D,IAAMxB,EAAOc,GAAU,wBAAwBU,CAAS,EAClD9C,EAAOoC,GAAU,wBAAwBU,EAAU,SAAS,CAAC,EAE7DZ,EAAQ,IAAIb,EAAOC,EAAMtB,CAAI,EAEnC,GAAIA,IAAS,SAGX,QAAS,EAAI,EAAG,EAAI8C,EAAU,iBAAiB,EAAG,IAChDZ,EAAM,KAAK,CAAC,EAAIY,EAAU,WAAW,CAAC,UAItCA,EAAU,aAAa,GAAK,OAAOA,EAAU,cAAc,GAAM,UAAYA,EAAU,cAAc,EAAI,EAAG,CAI9G,IAAMP,EAAWL,EAAM,KACjBM,EAAa,IAAI,SACnBM,EAAU,aAAa,EAAG,OAAQA,EAAU,aAAa,EAAG,WAAYA,EAAU,cAAc,CAAC,EAC/FL,EAAcxC,GAAY6C,EAAU,SAAS,CAAC,EAC9CJ,EAASI,EAAU,cAAc,EAAIL,EAE3C,GAAIK,EAAU,cAAc,EAAIL,IAAgB,EAC9C,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIF,EAAS,SAAWG,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,QAASpC,EAAI,EAAGA,EAAIoC,EAAQpC,IAAK,CAC/B,IAAMqC,EAAInC,GAAUgC,EAAYM,EAAU,SAAS,EAAGxC,EAAImC,CAAW,EACrEF,EAASjC,CAAC,EAAIqC,CAChB,CACF,CACA,OAAOT,CACT,CACF,IC1TO,SAASa,GAAQC,EAAc,CACpC,OAAOA,IAAY,EAAIC,GAAcC,EACvC,CAEO,SAASC,GAAsBH,EAAsB,CAC1D,IAAMI,EAAOL,GAAQC,CAAO,EAC5B,MAAO,GAAGI,EAAK,OAAO;AAAA;AAAA,QAEhBA,EAAK,SAAS;AAAA,QACdA,EAAK,SAAS;AAAA;AAAA,QAEdA,EAAK,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAO1B,CAEO,SAASC,GAAsBL,EAAsB,CAC1D,IAAMI,EAAOL,GAAQC,CAAO,EAC5B,MAAO,GAAGI,EAAK,OAAO;AAAA;AAAA;AAAA;AAAA,MAIlBA,EAAK,WAAW;AAAA,MAChBA,EAAK,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KA4B5B,CAEO,SAASE,GAAyBN,EAAcO,EAAmC,CACxF,IAAMH,EAAOL,GAAQC,CAAO,EAC5B,MAAO;AAAA;AAAA,kBAESO,CAAiB;AAAA;AAAA;AAAA,MAG7BH,EAAK,MAAM;AAAA;AAAA,GAGjB,CAtGA,IAgBMH,GASAC,GAzBNM,GAAAC,EAAA,kBAgBMR,GAAoB,CACxB,QAAS,GACT,UAAW,YACX,cAAe,UACf,YAAa,UACb,UAAW,YACX,OAAQ,eACR,kBAAmB,EACrB,EACMC,GAAoB,CACxB,QAAS,kBACT,UAAW,KACX,cAAe,MACf,YAAa,KACb,UAAW,UACX,OAAQ,cACR,kBAAmB,uBACrB,ICjCA,IAAAQ,GAAAC,EAAA,oBCeA,eAAsBC,GAClBC,EAAwBC,EAAWC,GAAqB,EAAGC,EAAoC,CACjG,OAAO,IAAI,QAAc,CAACC,EAASC,IAAW,CAC5C,IAAIC,EAAW,EAETC,EAAQ,IAAM,CAClB,GAAIP,EAAQ,EAAG,CACbI,EAAQ,EACR,MACF,CAEAE,IAEA,IAAME,EAAcP,EAAQK,CAAQ,EAEpC,GAAIH,GAAc,MAAQG,GAAYH,EAAY,CAChDE,EAAO,EACP,MACF,CACA,WAAWE,EAAOC,CAAW,CAC/B,EAEAD,EAAM,CACR,CAAC,CACH,CAMO,SAASE,GAA2CC,EAA6B,CACtF,OAAAC,GAAO,OAAOD,EAAgB,KAAeA,EAAY,SAAW,EAAG,IAAM,qCAAqC,EAC3G,MAAQA,EAAY,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAY,MAAM,CAAC,CAC1E,CAMO,SAASE,GAAsDF,EAA6B,CACjG,OAAAC,GAAO,OAAOD,EAAgB,KAAeA,EAAY,SAAW,EAAG,IAAM,qCAAqC,EAC3G,MAAQA,EAAY,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAY,MAAM,CAAC,EAAI,aAC9E,CAGO,SAASG,GAAkBC,EAA+BC,EAAmC,CAElG,IAAIC,EAA0B,KAAK,MAAM,KAAK,UAAUF,CAAU,CAAC,EACnE,OAAAE,EAAgBD,EACTC,CACT,CAGO,SAASC,GAAkBC,EAAkBC,EAA4B,CAC9E,OAAOA,EAAS,IAAIC,GAAKF,EAAOE,CAAC,CAAC,EAAE,KAAK,IAAI,CAC/C,CAGO,SAASC,GAAkBC,EAAsB,CACtD,GAAIA,GAAQ,EACV,MAAO,MACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QAEP,MAAM,MAAM,gBAAgBA,CAAI,uBAAuB,CAE3D,CAEO,SAASC,GAAcD,EAAO,EAAa,CAChD,MAAO,CAAC,IAAK,IAAK,IAAK,IAAK,IAAK,GAAG,EAAE,MAAM,EAAGA,CAAI,CACrD,CA7FA,IAAAE,GAAAC,EAAA,kBAGAC,OCEO,SAASC,GAAeC,EAAcC,EAAwB,CACnE,OAAOC,GAAcD,CAAI,EAAE,IAAIE,GAAK,GAAGH,CAAI,IAAIG,CAAC,EAAE,CACpD,CAEO,SAASC,GAAYJ,EAAcC,EAAwB,CAChE,OAAIA,IAAS,EACJ,CAACD,CAAI,EAEPD,GAAeC,EAAMC,CAAI,CAClC,CAEO,SAASI,IAA4B,CAC1C,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaT,CA9BA,IAAAC,GAAAC,EAAA,kBAGAC,OCgEA,SAASC,GAAwBC,EAAcC,EAA0BC,EAAwB,CAC/F,GAAIF,IAAS,EACX,MAAO,QAET,GAAIA,IAAS,EACX,MAAO,QAAQC,EAAM,CAAC,CAAC,GAGzB,IAAIE,EAAO,GACX,QAASC,EAAIJ,EAAO,EAAGI,EAAIJ,EAAMI,IAC/BD,GAAQ,GAAGD,EAAKE,CAAC,CAAC,OAAOH,EAAMG,EAAIJ,EAAO,CAAC,CAAC,GACxCI,EAAIJ,EAAO,IACbG,GAAQ,MAIZ,OAAOA,CACT,CAKA,SAASE,GAAUJ,EAA0BC,EAAwB,CACnE,IAAMF,EAAOC,EAAM,OAEnB,GAAID,IAAS,EACX,MAAO,kBAGT,GAAIA,IAAS,EACX,MAAO;AAAA,wBACaC,EAAM,CAAC,CAAC;AAAA,kBAI9B,IAAMK,EAAU,OACVC,EAAU,SACVC,EAAU,SACVC,EAAU,WACZC,EAAI,GACR,GAAIV,EAAO,EACT,QAASI,EAAI,EAAGA,EAAIJ,EAAO,EAAG,EAAEI,EAC9BM,EAAIA,EAAI,GAAGR,EAAKE,CAAC,CAAC,IAGtB,MAAO,QAAQM,CAAC,GAAGJ,CAAO;AAAA,8BACEI,CAAC,GAAGF,CAAO;AAAA,8BACXE,CAAC,GAAGH,CAAO;AAAA,uCACFG,CAAC,GAAGD,CAAO,GAClD,CAKA,SAASE,GAASX,EAAcE,EAAgBU,EAAcC,EAAsB,CAClF,OAAIb,IAAS,GAAKA,IAAS,EAClB,GAIO;AAAA,cACJE,EAAKF,EAAO,CAAC,CAAC;AAAA,cACdE,EAAKF,EAAO,CAAC,CAAC;AAAA,gBACZE,EAAKF,EAAO,CAAC,CAAC;AAAA,gBACdE,EAAKF,EAAO,CAAC,CAAC;AAAA,0BACJa,CAAI;AAAA,0BACJD,CAAI;AAAA,KAI9B,CAzIA,IAWME,GAMAC,GA4COC,GA7DbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAEAC,KAEMR,GAAsB,CAC1B,KAAM,OACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAA6B,CAC3C,EAEMC,GAAwB,CAACQ,EAAgCC,IAA+B,CAC5F,IAAMC,EAAOC,GAAQH,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDI,EAAaH,EAAM,KAEnBI,EAAYD,EAAW,OAEvBE,EAAaL,EAAM,KAAK,OAExBM,EAAiBC,GAAkBF,CAAU,EAC7CG,EAAWC,GAAY,KAAMJ,CAAU,EACvCK,EAAQvB,GAASkB,EAAYG,EAAUL,EAAWA,EAAW,OAAS,CAAC,EAAGA,EAAWA,EAAW,OAAS,CAAC,CAAC,EAE7GQ,EACAP,IAAc,EAChBO,EAAkB,CAAC,EAAG,CAAC,EACdP,IAAc,EACvBO,EAAkB,CAACR,EAAW,CAAC,EAAG,CAAC,EAEnCQ,EAAkB,CAACR,EAAWE,EAAa,CAAC,EAAGF,EAAWE,EAAa,CAAC,CAAC,EAE3E,IAAMO,EAAuBrC,GAAwB8B,EAAYM,EAAiBH,CAAQ,EACpFK,EAAShC,GAAUsB,EAAYK,CAAQ,EAEvCM,EAAe;AAAA;AAAA,YAEXR,CAAc;AAAA;AAAA,eAEXM,CAAoB;AAAA,cACrBX,EAAK,MAAM;AAAA;AAAA,cAEXS,CAAK;AAAA;AAAA,cAELT,EAAK,MAAM,WAAWY,CAAM;AAAA;AAAA;AAAA,QAIxC,MAAO,CACL,GAAGvB,GACH,QAAS,GACT,OAAQ,CAAC,KAAMU,EAAM,KAAM,KAAMA,EAAM,KAAM,aAA+B,EAC5E,aAAAc,CACF,CACF,EAEatB,GAA8B,CAACO,EAAgCC,KACvE,CAAC,GAAGV,GAAqB,IAAK,IAAMC,GAAsBQ,EAASC,CAAK,CAAC,KC0BvE,SAASe,GAAcC,EAAoD,CAChF,GAAIA,EAAM,SAAW,EACnB,MAAO,CAAC,EAAG,EAAG,CAAC,EAGjB,IAAIC,EAAQ,EACZ,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAS,EAAG,EAAEE,EACtCD,GAASD,EAAME,CAAC,EAElB,MAAO,CAACD,EAAOD,EAAM,OAAS,EAAIA,EAAMA,EAAM,OAAS,CAAC,EAAI,EAAGA,EAAMA,EAAM,OAAS,CAAC,CAAC,CACxF,CAaO,SAASG,GAAeC,EAAyBC,EAAiC,CACvF,IAAIC,EAAiB,GACrB,OAAIF,EAAK,SAAW,GAAKC,EAAa,SAAW,EAC/CC,EAAiB,GACRF,EAAK,OAAS,GAAKC,EAAa,OAAS,EAClDC,EAAiBF,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,EAE/EC,EAAiBF,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,GAC3ED,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,EAG7DC,CACT,CAEA,SAASC,GAAuBP,EAAyC,CACvE,IAAMQ,EAAUC,EAAU,eAAeT,CAAK,EACxCU,EAAS,CAAC,IAAK,IAAK,GAAG,EACvBC,EAAQ,QAWd,MAAO;AAAA;AAAA,QAVwBH,EACK,IAAI,CAACI,EAAQV,IAAM,CAClB,IAAMW,EAAQ,OAAOH,EAAOR,CAAC,CAAC,MAAMS,CAAK,MAAMC,CAAM,GAC/CE,EAAQZ,IAAMM,EAAQ,OAAS,EACjC,OAAOE,EAAOR,EAAI,CAAC,CAAC,MAAMS,CAAK,MAAMD,EAAOR,CAAC,CAAC,MAAMU,CAAM,GAC1D,YAAYF,EAAOR,CAAC,CAAC,MAAMU,CAAM,GACrC,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,CAIf;AAAA;AAAA;AAAA,GAI9B,CAEA,SAASC,GAAwBf,EAAyC,CACxE,IAAMQ,EAAUC,EAAU,eAAeT,CAAK,EAE9C,MAAO;AAAA;AAAA;AAAA,wBAGeQ,EAAQ,CAAC,CAAC,iBAAiBA,EAAQ,CAAC,CAAC;AAAA;AAAA,CAG7D,CA5JA,IAWMQ,GAGAC,GAoEOC,GAlFbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KAEAC,KAEMR,GAAwCS,IACzC,CAAC,KAAM,mBAAoB,WAAY,EAAmB,EAAG,WAAY,CAAC,GAAG,EAAG,UAAW,GAAGA,CAAa,EAAE,GAE5GR,GACF,CAACS,EAAgCC,EAAiBC,EAA2BH,IAC1D,CACb,IAAMI,EAAeF,EAAQ,KACvBG,EAAsBL,EAExBM,EAAW,GACf,QAAS7B,EAAI,EAAGA,EAAI,EAAGA,IAAK,CAC1B,IAAI8B,EAAe,GACnB,OAAQ9B,EAAG,CACT,IAAK,GACH8B,EAAe,qBACf,MACF,IAAK,GACHA,EAAe,4CACf,MACF,IAAK,GACHA,EAAe,4CACf,MACF,IAAK,GACHA,EAAe,8CACf,MACF,QACE,MAAM,IAAI,KACd,CAEAD,GAAY;AAAA,UACdC,CAAY;AAAA,UACZ9B,EAAI,EAAI,sDAAwD,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAMzDA,CAAC;AAAA;AAAA,UAEVA,EAAI,EAAI,IAAM,EAAE;AAAA,OAEhB,CACA,IAAM+B,EAAOC,GAAQR,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAExDS,EAAe;AAAA,QACvB5B,GAAuBsB,CAAY,CAAC;AAAA,QACpCd,GAAwBe,CAAmB,CAAC;AAAA,QAC5CM,GAAkB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQNN,EAAoB,CAAC,CAAC;AAAA,qBACtBA,EAAoB,CAAC,CAAC;AAAA;AAAA,UAEjCC,CAAQ;AAAA,UACRE,EAAK,MAAM;AAAA;AAAA,MAIX,MAAO,CACL,GAAGL,EACH,OAAQ,CAAC,KAAME,EAAqB,KAAMH,EAAQ,KAAM,aAA+B,EACvF,aAAAQ,EACA,QAAS,EACX,CACF,EAEKjB,GACT,CAACQ,EAAgCC,EAAiBF,IAAwD,CACxG,IAAMG,EAAWZ,GAAqCS,CAAa,EACnE,MAAO,CAAC,GAAGG,EAAU,IAAK,IAAMX,GAAiCS,EAASC,EAASC,EAAUH,CAAa,CAAC,CAC7G,ICtFJ,IAOaY,GAPbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEaJ,GAAgB,CAACK,EAAyCC,IAAoC,CACzG,IAAMC,EAAcD,EAAM,MACpBE,EAAOC,GAAQJ,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EAIjEK,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBA6CDF,EAAK,SAAS;AAAA,QAC5BA,EAAK,MAAM;AAAA,OAEXG,EAAc,CAClB,KAAM,cACN,WAAY,EAAqB,EACjC,WAAY,CAAC,GAAG,EAChB,OAAQ,CAAC,KAAMJ,EAAa,KAAMD,EAAM,OAAO,KAAM,aAA6C,EAClG,aAAAI,EACA,QAAS,EACX,EACA,OAAOL,EAAiB,eAAeM,EAAa,CAACL,EAAM,MAAM,CAAC,CACpE,ICnBA,SAASM,GAAgBC,EAAcC,EAAwB,CAC7D,GAAID,IAAS,EACX,MAAO,KAGT,IAAIE,EAAS,GACb,QAASC,EAAI,EAAGA,EAAIH,EAAMG,IACxBD,GAAUD,EAAKE,CAAC,EACZA,EAAIH,EAAO,IACbE,GAAU,KAGd,OAAOA,CACT,CAhEA,IAWME,GAMOC,GA+BAC,GAhDbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAEAC,KAEMR,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,CACjC,EAEaC,GAA0B,CAACQ,EAAgCC,IAA+B,CACrG,IAAMd,EAAOc,EAAM,KAAK,OAElBC,EAAWC,GAAY,KAAMhB,CAAI,EACjCiB,EAAYF,EAAS,MAAM,EAAE,EAC7BG,EAAiBC,GAAkBnB,CAAI,EACvCoB,EAAgBC,GAAkB,EAElCC,EADYR,EAAM,KAAK,SAAW,EACR,GAAKf,GAAgBC,EAAMe,CAAQ,EAC7Db,EAASF,GAAQ,EAAI,KAAO,QAAQiB,EAAU,KAAK,GAAG,CAAC,IACvDM,EAAOC,GAAQX,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDY,EAAe;AAAA,MACjBL,CAAa;AAAA;AAAA,QAEXF,CAAc;AAAA;AAAA;AAAA,iCAGWI,CAAY;AAAA;AAAA,SAEpCC,EAAK,MAAM,mCAAmCrB,CAAM;AAAA;AAAA,KAI3D,MAAO,CACL,GAAGE,GACH,QAAS,GACT,OAAQ,CAAC,KAAMU,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,aAAAW,CACF,CACF,EAEanB,GAAgC,CAACO,EAAgCC,KACzE,CAAC,GAAGV,GAAuB,IAAK,IAAMC,GAAwBQ,EAASC,CAAK,CAAC,KCjDlF,IAyCaY,GAoDAC,GAmCAC,GAhIbC,GAAAC,EAAA,kBAGAC,KAsCaL,GAAN,KAAmD,CAKxD,YAAYM,EAA4BC,EAAW,EAAG,CACpD,GAAIA,IAAa,EACf,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,IACjB,KAAK,YAAcA,EAAG,MACtB,KAAK,YAAcC,UACVA,IAAa,EACtB,KAAK,eAAiBD,EAAG,QACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcA,EAAG,MACtB,KAAK,YAAcC,MAEnB,OAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,CAE7D,CACA,OAAOC,EAA4BC,EAA4C,CAC7E,IAAIC,EACAC,EACJ,OAAIH,EAAI,cAAgB,eACtBI,GAAO,QAAQ,UAAW,yDAAyD,EACnFD,EAAS,IAAI,aAAaH,CAAG,GAE3BC,EAAc,KAAK,YAAcD,EAAI,QACvCI,GAAO,QAAQ,UAAW,gDAAgD,EAC1ED,EAASH,EACTE,EAAS,KAAK,SAASD,EAAc,KAAK,WAAW,EACrDE,EAAO,QAAQ,CAACE,EAAGC,IAAMJ,EAAOI,CAAC,EAAID,CAAC,IAEtCF,EAASH,EACTE,EAASC,GAEJD,CACT,CACA,SAASK,EAAqC,CAC5C,OAAO,IAAI,aAAaA,EAAO,CAAC,CAClC,CACA,OAAOC,EAA+BC,EAAgC,CACpE,OAAI,KAAK,cAAgB,EACDD,EAAwB,OAAO,CAACE,EAAQC,IAAUA,EAAQ,IAAM,CAAC,EAAE,SAAS,EAAGF,CAAQ,EAGxGD,EAAO,SAAS,EAAGC,CAAQ,CACpC,CACF,EAIahB,GAAN,KAAkD,CAKvD,YAAYK,EAA2BC,EAAW,EAAGa,EAAsB,CACzE,GAAIb,IAAa,GAAKA,IAAa,EACjC,MAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,EAE3D,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcC,EACnB,KAAK,YAAca,GAAed,EAAG,KACvC,CACA,OAAOE,EAAmBC,EAA4C,CACpE,IAAIY,EAAOb,EACX,OAAI,KAAK,cAAgB,IACvBI,GAAO,QAAQ,UAAW,+BAA+B,EACzDS,EAAO,KAAK,SAASZ,CAAW,EAChCD,EAAI,QAAQ,CAACK,EAAG,IAAMQ,EAAK,EAAI,CAAC,EAAIR,CAAC,GAEhCQ,CACT,CACA,SAASN,EAAqC,CAC5C,OAAO,IAAI,aAAaA,EAAO,CAAC,CAClC,CACA,OAAOC,EAA+BC,EAAgC,CACpE,OAAI,KAAK,cAAgB,EACDD,EAAwB,OAAO,CAACE,EAAQC,IAAUA,EAAQ,IAAM,CAAC,EAAE,SAAS,EAAGF,CAAQ,EAGxGD,EAAO,SAAS,EAAGC,CAAQ,CACpC,CACF,EAEaf,GAAN,KAA8C,CAKnD,YAAYI,EAA2BC,EAAW,EAAG,CADrD,iBAAc,EAEZ,GAAIA,IAAa,EACf,KAAK,eAAiBD,EAAG,MACzB,KAAK,OAASA,EAAG,MACjB,KAAK,YAAcA,EAAG,cACtB,KAAK,YAAcC,UACVA,IAAa,EACtB,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcA,EAAG,cACtB,KAAK,YAAcC,MAEnB,OAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,CAE7D,CACA,OAAOC,EAAiBc,EAA6C,CACnE,OAAO,IAAI,WAAWd,EAAI,OAAQA,EAAI,WAAYA,EAAI,UAAU,CAClE,CACA,SAASO,EAAqC,CAC5C,OAAO,IAAI,WAAWA,EAAO,KAAK,WAAW,CAC/C,CACA,OAAOC,EAA+BC,EAA8B,CAClE,GAAID,aAAkB,WACpB,OAAOA,EAAO,SAAS,EAAGC,CAAQ,EAEpC,MAAM,IAAI,MAAM,uBAAuBD,EAAO,WAAW,EAAE,CAC7D,CACF,IChKA,IAQaO,GAcAC,GAUAC,GAhCbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaN,GACT,CAACO,EAA8CC,EAC9CC,IAA4C,CAC3C,IAAMC,EAAWD,IAAgB,GAAwBA,IAAgB,EAAgC,EAAI,EACvGE,EAAWF,IAAgB,EAC3BG,EAAaH,IAAgB,GAAgCA,IAAgB,EAC7EI,EAAYJ,IAAgB,EAAkCD,EAAM,OAAS,EAAI,OACjFM,EAAgBL,IAAgB,EAClCD,EAAM,IAAI,CAACO,EAAGC,IAAMA,IAAMR,EAAM,OAAS,EAAIO,EAAI,EAAIA,CAAC,EACtD,OACJ,OAAOb,GACHK,EAAuBC,EAAOE,EAASI,EAAe,CAAC,SAAAH,EAAU,UAAAC,EAAW,UAAAC,CAAS,CAAC,CAC5F,EAESZ,GACT,CAACM,EAA8CC,EAA0BC,IACjD,CAClB,IAAMQ,EAASjB,GAAmCO,EAAuBC,EAAOC,CAAW,EAC3F,MAAO,CAACQ,EAAO,MAAOA,EAAO,MAAM,CACrC,EAKKf,GACT,CAACK,EAA8CC,EAA0BU,EAAgB,EACxFJ,EAAmCK,IAA4C,CAC9E,IAAMR,EAAW,CAAC,EAAEQ,GAASA,EAAM,UAC7B,CAACC,EAAOC,CAAM,EAAId,EAAsB,iBAAiBI,GAAWG,GAAiBN,EAAeW,CAAK,EACzGG,EAAOd,EAAM,OACfe,EAAef,EAAM,MAAM,CAAC,EAIhC,GAHIc,IAAS,IACXC,EAAe,CAAC,CAAC,GAEfL,IAAa,EAEfJ,EAAgBN,UACPG,EAAU,CACnB,GAAIO,IAAa,EACf,MAAM,IAAI,MAAM,oCAAoC,EAEtDJ,EAAgBN,EACZc,EAAO,IACTC,EAAaD,EAAO,CAAC,EAAI,KAAK,KAAKC,EAAaD,EAAO,CAAC,EAAI,CAAC,GAE3DA,EAAO,IACTC,EAAaD,EAAO,CAAC,EAAI,KAAK,KAAKC,EAAaD,EAAO,CAAC,EAAI,CAAC,EAEjE,SAAW,CAACR,EACV,MAAM,IAAI,MAAM,kDAAkD,EAEpE,MAAO,CACL,MAAAM,EACA,OAAAC,EACA,SAAAH,EACA,SAAAP,EACA,MAAOY,EACP,QAASC,EAAU,eAAeD,CAAY,EAC9C,cAAAT,EACA,WAAaK,GAASA,EAAM,SAC9B,CACF,ICrEJ,IAiBMM,GAaOC,GA9BbC,GAAAC,EAAA,kBAIAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMb,GACF,CAACc,EAA4CC,IAA6C,CACxF,IAAMC,EACFD,EAAkB,IAAIE,GAAW,GAAGA,EAAQ,cAAc,KAAK,GAAG,CAAC,IAAIA,EAAQ,KAAK,IAAIA,EAAQ,MAAM,EAAE,EACnG,KAAK,GAAG,EACbC,EAAMJ,EAAY,KACtB,OAAIA,EAAY,YACdI,GAAO,IAAMJ,EAAY,UAAY,KAEvCI,GAAO,IAAMF,EACNE,CACT,EAESjB,GAAN,KAAwD,CAG7D,YAAmBkB,EAA8B,CAA9B,aAAAA,EACjB,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAA2B,IAAI,GACtC,CAKA,+BAA+BC,EAA0BC,EAA4C,CACnG,OAAOC,GAA+B,KAAK,QAAQ,eAAgBF,EAAOC,CAAW,CACvF,CAEA,eAAeE,EAAwCP,EAAwC,CAC7F,GAAIA,EAAO,OAASO,EAAQ,WAAW,OACrC,MAAM,IAAI,MAAM,mCAAmCA,EAAQ,WAAW,MAAM,GAAG,EAEjF,GAAIA,EAAQ,WAAW,SAAWA,EAAQ,WAAW,OACnD,MAAM,IAAI,MAAM,6CAA6C,EAI/D,IAAMR,EAAmC,CAAC,EAC1C,QAASS,EAAI,EAAGA,EAAID,EAAQ,WAAW,OAAQ,EAAEC,EAC/CT,EAAkBS,CAAC,EAAI,KAAK,uBAAuBR,EAAOQ,CAAC,EAAGD,EAAQ,WAAWC,CAAC,CAAC,EAGrF,IAAMN,EAAMlB,GAAwBuB,EAASR,CAAiB,EAC1DU,EAAW,KAAK,QAAQ,eAAe,YAAYP,CAAG,EACpDJ,EAAcW,EAChBA,EAAS,YACR,OAAQF,EAA8B,KAAQ,WAAcA,EAA8B,IAAI,EAClCA,EAG3DG,EAAsBC,GACxB,KAAK,QAAQ,eAAgBb,EAAY,OAAO,KAAMA,EAAY,OAAO,WAAW,EAClFc,EAAoB,KAAK,kBAAkBF,EAAqBZ,EAAY,OAAO,IAAI,EAE7F,OAAKW,IACHA,EAAW,KAAK,QAAQ,eAAe,MAAMX,EAAaC,EAAmBa,CAAiB,EAC9F,KAAK,QAAQ,eAAe,YAAYV,EAAKO,CAAQ,GAGvD,KAAK,WAAWA,EAAUV,EAAmBa,CAAiB,EACvDA,CACT,CAEA,IAAIL,EAA4BP,EAAmC,CAEjE,OAD0B,KAAK,eAAeO,EAASP,CAAM,EACpC,MAC3B,CAEQ,WAAWS,EAAoBT,EAAuBa,EAA2B,CAEvF,QAASL,EAAI,EAAGA,EAAIR,EAAO,OAAQ,EAAEQ,EACnC,GAAI,CAAC,CAACR,EAAOQ,CAAC,EAAE,WAAcC,EAAS,YAAY,WAAWD,CAAC,IAAM,GACnE,MAAM,IAAI,MAAM,SAASA,CAAC,gCAAgC,EAK9D,GAAI,CAAC,CAACK,EAAO,WAAcJ,EAAS,YAAY,OAAO,cAAgB,GACrE,MAAM,IAAI,MAAM,qCAAqC,EAGvD,KAAK,QAAQ,eAAe,IAAIA,EAAUT,EAAQa,CAAM,CAC1D,CAaQ,uBAAuBC,EAAgBT,EAA0B,CACvE,IAAIU,EAAK,KAAK,eAAeD,EAAO,OAAQT,IAAgB,CAAkB,EAE9E,GAAI,CAACU,IAEHA,EAAK,KAAK,eAAeD,EAAO,OAAQT,IAAgB,CAAkB,EACtEU,GACF,OAAIV,IAAgB,EACX,KAAK,KAAKU,CAAE,EAEZ,KAAK,OAAOA,CAAE,EAK3B,GAAI,CAACA,EAAI,CACP,IAAMC,EAASL,GAAmC,KAAK,QAAQ,eAAgBG,EAAO,KAAMT,CAAW,EAEvG,GAAIA,IAAgB,EAAiC,CAGnD,IAAMD,EAAQU,EAAO,KACrB,GAAIV,EAAM,SAAW,EAAG,CAQtB,IAAMa,EAAsB,CAACb,EAAM,CAAC,EAAG,KAAK,KAAMA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAK,CAAQ,CAAC,EACvFc,EACFP,GAAmC,KAAK,QAAQ,eAAgBM,EAAqBZ,CAAW,EAChGc,EAASL,EAAO,WACpB,GAAIV,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAI,IAAa,EAAG,CACnD,IAAMgB,EAAiBhB,EAAM,CAAC,EACxBiB,EAAajB,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAC1CkB,EAAa,KAAK,KAAKD,EAAa,EAAQ,CAAQ,EAAI,EACxDE,EAAUH,EAAiBE,EACjCH,EAAS,IAAI,aAAaI,CAAO,EACjC,QAASC,EAAI,EAAGA,EAAIJ,EAAgB,EAAEI,EAAG,CACvC,IAAMC,EAAYD,EAAIH,EAChBK,EAAYF,EAAIF,EAAaE,EAAI,EAAQH,EAC/CF,EAAO,IAAIL,EAAO,WAAW,SAASW,EAAWA,EAAYJ,CAAU,EAAGK,CAAS,CACrF,CACF,CACA,OAAO,KAAK,kBAAkBR,EAAgBJ,EAAO,KAAMK,EAAQL,GAA+B,CACpG,CACF,CAEA,GAAIT,IAAgB,EAAoB,CACtC,IAAMsB,EACFC,GAA6B,KAAK,QAAQ,eAAgBd,EAAO,KAAM,EAAG,CAAC,EAAG,CAAC,UAAW,EAAI,CAAC,EAC7Fe,EAAsB,KAAK,kBAC7BF,EAAuBb,EAAO,KAAMA,EAAO,WAAYA,GAA+B,EAC1FC,EAAK,KAAK,KAAKc,CAAmB,CACpC,MACEd,EAAK,KAAK,kBAAkBC,EAAQF,EAAO,KAAMA,EAAO,WAAYA,GAA+B,CAEvG,CACA,OAAOC,CACT,CAWA,sCACIC,EAAuBc,EAA2BC,EAAyBjB,EAA6B,CAC1G,OAAO,KAAK,kBAAkBE,EAAQc,EAAUC,EAAMjB,GAA+B,CACvF,CAEQ,kBACJE,EAAuBc,EAA2BC,EAA0BjB,EAC5EkB,EAAmC,CACrCC,GAAO,QAAQ,mBAAoB,iCAAiC,KAAK,UAAUjB,CAAM,CAAC,GAAG,EAC7F,IAAMf,EAAU,KAAK,QAAQ,eAAe,wBAAwB6B,EAAUd,EAAQe,EAAMC,CAAK,EACjG,OAAO,KAAK,6BAA6BhB,EAAQc,EAAU7B,EAASa,CAAM,CAC5E,CAEA,gBAAgBoB,EAAeC,EAAyC,CACtE,IAAMC,EAAU,KAAK,uBAAuBF,GAA2B,EACjEG,EAAkC,CACtC,SAAUD,EAAQ,SAClB,OAAQA,EAAQ,OAChB,MAAOA,EAAQ,MAEf,MAAOD,EAAa,SAAW,EAAIA,EAAe,CAAC,CAAC,EACpD,QAASG,EAAU,eAAeH,CAAY,EAC9C,cAAeA,CACjB,EAEA,OADuB,KAAK,6BAA6BE,EAAkBH,EAAM,KAAME,EAAQ,OAAO,EAChF,MACxB,CAEA,cAAcF,EAAeC,EAAyC,CACpE,IAAMC,EAAU,KAAK,uBAAuBF,GAAyB,EAGrE,GAAIK,GAAeL,EAAM,KAAMC,CAAY,EAAG,CAC5C,IAAME,EAAkC,CACtC,SAAUD,EAAQ,SAClB,OAAQA,EAAQ,OAChB,MAAOA,EAAQ,MAEf,MAAOD,EAAa,SAAW,EAAIA,EAAe,CAAC,CAAC,EACpD,QAASG,EAAU,eAAeH,CAAY,EAC9C,cAAeA,EACf,SAAU,EACZ,EAEA,OADuB,KAAK,6BAA6BE,EAAkBH,EAAM,KAAME,EAAQ,OAAO,EAChF,MACxB,CAEA,IAAMI,EAAqBC,GAAcP,EAAM,IAAI,EAC7CQ,EAAsBD,GAAcN,CAAY,EAEhDQ,EAAsB,KAAK,cAAcT,EAAOM,CAAkB,EAClEI,EAAuB,KAAK,IAC9BC,GAAuC,KAAMF,EAAqBD,CAAmB,EAAG,CAACC,CAAmB,CAAC,EAEjH,OADqB,KAAK,cAAcC,EAAsBT,CAAY,CAE5E,CAEA,KAAKD,EAAeY,EAA+B,CACjD,IAAMV,EAAU,KAAK,uBAAuBF,GAA2B,EAEvE,OADuB,KAAK,6BAA6BE,EAA0BU,EAAMV,EAAQ,OAAO,EAClF,MACxB,CAEQ,6BACJpB,EAAuBc,EAA2B7B,EAAuBa,EAAiBiC,EAAsB,CAClH,IAAMC,EAA2B,CAC/B,GAAGhC,EACH,OAAQF,GACJ,IAAImC,GACIjC,EAAO,cAAec,EAAWoB,GAAmB,KAAK,YAAYF,CAAW,EAChF,MAAOE,GAAmB,KAAK,iBAAiBF,CAAW,EAAG,OAAWD,CAAQ,EAC7F,QAAA9C,CACF,EACA,YAAK,eAAe+C,EAAY,OAAO,OAAQA,EAAahC,EAAO,QAAQ,EACpEgC,CACT,CAEQ,eAAeD,EAAqBI,EAAW,GAA8B,CACnF,OAAO,KAAK,QAAQ,cAAcJ,CAAQ,EAAI,KAAK,QAAQ,eAAeA,EAAUI,CAAQ,EACxFA,EAA0C,KAAK,uBAAuB,IAAIJ,CAAQ,EACxC,KAAK,yBAAyB,IAAIA,CAAQ,CAC1F,CACA,eAAeA,EAAqBhC,EAAiBoC,EAAW,GAAa,CACvE,KAAK,QAAQ,cAAcJ,CAAQ,EACrC,KAAK,QAAQ,eAAeA,EAAUhC,EAAIoC,CAAQ,GAEjDA,EAAW,KAAK,uBAAyB,KAAK,0BAA0B,IAAIJ,EAAUhC,CAAE,CAE7F,CACA,sBAAsBD,EAAgBqC,EAAW,GAAgB,CAC/D,MAAO,CAAC,CAAC,KAAK,eAAerC,EAAO,OAAQqC,CAAQ,CACtD,CAEA,SAAgB,CACd,KAAK,QAAQ,eAAe,oBAAoB,EAChD,KAAK,uBAAuB,QAAQpC,GAAM,KAAK,QAAQ,eAAe,eAAeA,CAAE,CAAC,EACxF,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAAyB,QAAQA,GAAM,KAAK,QAAQ,eAAe,eAAeA,CAAE,CAAC,EAC1F,KAAK,yBAA2B,IAAI,GACtC,CAEA,YAAYiC,EAA6C,CACvD,OAAIA,EAAY,SACP,KAAK,YAAY,KAAK,OAAOA,CAAW,CAAC,EAE7C,KAAK,QAAQ,QAAQ,UAAU,2BAG7B,KAAK,QAAQ,eAAe,YAAYA,EAAaA,EAAY,OAAO,KAAMA,EAAY,QAAQ,EAFhG,KAAK,QAAQ,eAAe,wBAAwBI,GAAc,KAAMJ,CAAW,CAAC,CAG/F,CAEA,MAAM,iBAAiBA,EAAsD,CAC3E,OAAIA,EAAY,SACP,KAAK,iBAAiB,KAAK,OAAOA,CAAW,CAAC,EAElD,KAAK,QAAQ,QAAQ,UAAU,2BAG7B,KAAK,QAAQ,eAAe,iBAAiBA,EAAaA,EAAY,OAAO,KAAMA,EAAY,QAAQ,EAFrG,KAAK,QAAQ,eAAe,wBAAwBI,GAAc,KAAMJ,CAAW,CAAC,CAG/F,CAEA,KAAKd,EAAiC,CAEpC,OAD0B,KAAK,eAAemB,GAA4B,KAAMnB,EAAM,MAAM,EAAG,CAACA,EAAM,MAAM,CAAC,CAE/G,CAEA,OAAOA,EAAiC,CAEtC,OAD0B,KAAK,eAAeoB,GAA8B,KAAMpB,EAAM,MAAM,EAAG,CAACA,EAAM,MAAM,CAAC,CAEjH,CACF,IC1TA,IAGMqB,GAmBOC,GAtBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EAMaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,ICvB3C,IAiBME,GAOOC,GAaAC,GAQPC,GAwBAC,GArENC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAQMT,GAAoC,CACxC,KAAM,qBACN,WAAY,CAAC,IAAK,QAAS,IAAK,OAAQ,UAAU,EAClD,WACI,UAA6G,CACnH,EAEaC,GACT,CAACS,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGV,GACH,UAAWY,EAAW,SACtB,IAAK,IAAMT,GAAoCO,EAAkBC,EAAQC,CAAU,CACrF,EACAD,CAAM,CACI,GAGPT,GACRW,GAAmD,CAClD,IAAMC,EAAUD,EAAK,WAAW,SAAS,UAAW,IAAI,EAClDE,EAAWF,EAAK,WAAW,SAAS,WAAY,EAAG,EACnDG,EAAUH,EAAK,WAAW,OAAO,UAAW,CAAC,EACnD,OAAOI,GAA4B,CAAC,QAAAH,EAAS,SAAAC,EAAU,QAAAC,CAAO,CAAC,CACjE,EAEEb,GACF,CAACO,EAAyCC,EAAkBC,IACzC,CACb,IAAMM,EAAOC,GAAQT,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEU,EAAOT,EAAO,CAAC,EAAE,KAAK,OACtB,CAACU,EAAYC,CAAW,EAC1BZ,EAAiB,+BAA+BC,EAAO,CAAC,EAAE,MAA0B,EAClFY,EAAe;AAAA,sBACTH,CAAI;AAAA,iDACuBC,CAAU,KAAKC,CAAW;AAAA,oCACvCJ,EAAK,SAAS;AAAA,mCACfA,EAAK,SAAS;AAAA,uCACVA,EAAK,SAAS;AAAA,gCACrBA,EAAK,SAAS;AAAA;AAAA,oEAEsBN,EAAW,OAAO;AAAA,KAE5E,MAAO,CACL,GAAGZ,GACH,OAAQ,CAAC,KAAMW,EAAO,CAAC,EAAE,KAAM,KAAMA,EAAO,CAAC,EAAE,KAAM,aAAiC,EACtF,aAAAY,CACF,CACF,EAEFnB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMa,EAAIb,EAAO,CAAC,EACZc,EAAQd,EAAO,CAAC,EAChBe,EAAIf,EAAO,CAAC,EACZgB,EAAOhB,EAAO,CAAC,EACfiB,EAAOjB,EAAO,CAAC,EAIrB,GAAIa,EAAE,KAAK,OAAS,GAAKC,EAAM,KAAK,SAAW,GAAKC,EAAE,KAAK,SAAW,GAAKC,EAAK,KAAK,SAAW,GAC5FC,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIH,EAAM,KAAK,CAAC,IAAMD,EAAE,KAAK,CAAC,GAAKE,EAAE,KAAK,CAAC,IAAMF,EAAE,KAAK,CAAC,GAAKG,EAAK,KAAK,CAAC,IAAMH,EAAE,KAAK,CAAC,GACnFI,EAAK,KAAK,CAAC,IAAMJ,EAAE,KAAK,CAAC,EAC3B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAKA,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAM,OAAS,WAAaA,EAAM,OAAS,WAC7FC,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAK,OAAS,WAAaA,EAAK,OAAS,WAC3FC,EAAK,OAAS,WAAaA,EAAK,OAAS,UAC5C,MAAM,IAAI,MAAM,6BAA6B,CAEjD,IC/FA,IAsBaC,GAKSC,GAOTC,EAMAC,GAsBAC,GA9DbC,GAAAC,EAAA,kBAsBaN,GAAN,KAAkB,CACvB,YACWO,EAAgCC,EAAiCC,EACjEC,EAAoC,CADpC,eAAAH,EAAgC,iBAAAC,EAAiC,yBAAAC,EACjE,yBAAAC,CAAqC,CAClD,EACsBT,GAAf,KAAuB,CAC5B,YAAmBU,EAAsB,CAAtB,aAAAA,CAAuB,CAG5C,EAGaT,EAAN,KAAqB,CAC1B,YAAmBU,EAA4BC,EAAyB,CAArD,iBAAAD,EAA4B,kBAAAC,CAA0B,CAC3E,EAIaV,GAAN,KAAyB,CAG9B,YAAmBW,EAAcF,EAAsBC,EAAqC,CAAzE,UAAAC,EACbD,EACF,KAAK,aAAeA,EAEpB,KAAK,aAAe,CAAC,EAGnBD,IACF,KAAK,YAAcA,EAEvB,CACA,cAAcG,EAA0B,CAClCA,GACF,KAAK,aAAa,KAAKA,CAAI,CAE/B,CACF,EAGaX,GAAN,KAAkC,CACvC,OAAO,mBAAmBY,EAAmD,CAC3E,GAAI,CAACA,GAASA,EAAM,SAAW,EAC7B,MAAO,CAAC,EAGV,GAAIA,EAAM,SAAW,EACnB,OAAOA,EAGT,IAAMC,EAAa,IAAI,IACjBC,EAAmB,IAAI,IACvBC,EAAS,IAAI,MAEnB,YAAK,mBAAmBH,EAAOC,EAAYC,EAAkBC,CAAM,EAC5DA,CACT,CAEA,OAAe,mBACXC,EAAkCH,EAAyBC,EAC3DC,EAA8B,CAChC,QAAS,EAAI,EAAG,EAAIC,EAAW,OAAQ,EAAE,EACvC,KAAK,YAAYA,EAAW,CAAC,EAAGH,EAAYC,EAAkBC,CAAM,CAExE,CAEA,OAAe,YACXE,EAA0BJ,EAAyBC,EAA+BC,EAA8B,CAElH,GAAI,CAACE,GAAQH,EAAiB,IAAIG,EAAK,IAAI,EACzC,OAIF,GAAIJ,EAAW,IAAII,EAAK,IAAI,EAC1B,MAAM,IAAI,MAAM,kFAAmF,EAIrGJ,EAAW,IAAII,EAAK,IAAI,EAGxB,IAAMR,EAAeQ,EAAK,aAC1B,GAAIR,GAAgBA,EAAa,OAAS,EACxC,QAASS,EAAI,EAAGA,EAAIT,EAAa,OAAQ,EAAES,EACzC,KAAK,YAAYT,EAAaS,CAAC,EAAGL,EAAYC,EAAkBC,CAAM,EAK1EA,EAAO,KAAKE,CAAI,EAGhBH,EAAiB,IAAIG,EAAK,IAAI,EAG9BJ,EAAW,OAAOI,EAAK,IAAI,CAC7B,CACF,IC9GO,SAASE,IAA6B,CAC3C,IAAMC,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASC,IAA6B,CAC3C,IAAMD,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASE,IAA6B,CAC3C,IAAMF,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASG,IAA6B,CAC3C,IAAMH,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASI,IAA+B,CAC7C,IAAMJ,EAAO,SASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASK,IAAiC,CAC/C,IAAML,EAAO,WAYb,MAAO,CAAC,KAXK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASM,IAA8B,CAC5C,IAAMN,EAAO,QAYb,MAAO,CAAC,KAXK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASO,IAA6B,CAC3C,IAAMP,EAAO,OAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASQ,IAA4B,CAC1C,IAAMR,EAAO,MAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASS,IAA6B,CAC3C,IAAMT,EAAO,OAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASU,IAA6B,CAC3C,OAAOC,GAAkB,KAAK,CAChC,CACO,SAASC,IAA+B,CAC7C,IAAMZ,EAAO,SAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CAEA,SAASW,GAAkBE,EAAkC,CAC3D,IAAMb,EAAO,GAAGa,CAAK,IASrB,MAAO,CAAC,KARK;AAAA,UACLb,CAAI;AAAA,aACDa,CAAK;AAAA;AAAA,SAETb,CAAI;AAAA,aACAa,CAAK;AAAA;AAAA,IAGF,KAAAb,EAAM,MAA6B,CACnD,CAvLA,IAyLMc,GAaAC,GAsEOC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GA7SbC,GAAAC,EAAA,kBAIAC,KACAC,KACAC,KAEAC,KAiLMnB,GACF,CAACoB,EAAgCC,EAAkBC,EAClDC,EAAoCF,EAAO,CAAC,EAAE,KAAMG,IAAyC,CAC5F,IAAMC,EAAcL,EAAQ,QAAQ,SACpC,MAAO,CACL,KAAME,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,UAAWD,EACX,IAAK,IAAMvB,GAAwBmB,EAASC,EAAQC,EAAUC,CAAgB,CAChF,CACF,EAEEtB,GACF,CAACmB,EAAgCC,EAAkBC,EAClDC,EAAoCF,EAAO,CAAC,EAAE,OAAsB,CACnE,IAAMI,EAAcL,EAAQ,QAAQ,SAC9BM,EAAc,CAACC,EAAU,SAASN,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,EAClEO,EAAcP,EAAO,CAAC,EAAE,KAEtBQ,EAAmBT,EAAQ,QAAQ,KAEzC,GAAIM,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUV,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAM,EAAK,EACrF,GAAI,CAACS,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEF,EAAcE,EACd,IAAME,EAAaJ,EAAY,OACzBK,EAAQZ,EAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC9Da,EAAQb,EAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC9Dc,EAASd,EAAO,CAAC,EAAE,KAAK,SAAW,EAAI,qCAAuC,mBAC9Ee,EAASf,EAAO,CAAC,EAAE,KAAK,SAAW,EAAI,qCAAuC,mBAE9EgB,EAAOC,GAAQlB,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDmB,EAAeV,EAAmB;AAAA,QACxCP,EAAS,IAAI;AAAA;AAAA;AAAA;AAAA,wBAIGA,EAAS,IAAI;AAAA,UAC3Be,EAAK,MAAM;AAAA,SAE2B;AAAA,QACxCf,EAAS,IAAI;AAAA,kCACaU,CAAU;AAAA,uBACrBC,CAAK;AAAA,uBACLC,CAAK;AAAA,UAClBC,CAAM;AAAA,UACNC,CAAM;AAAA,iBACCd,EAAS,IAAI;AAAA,SAGtB,MAAO,CACL,KAAMA,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,OAAQ,CAAC,KAAMG,EAAa,KAAML,EAAkB,YAAAE,CAAW,EAC/D,aAAAc,EACA,QAASV,CACX,CACF,CACA,IAAMQ,EAAOC,GAAQlB,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDmB,EAAe;AAAA,MACrBjB,EAAS,IAAI;AAAA;AAAA,kBAEDe,EAAK,SAAS;AAAA,kBACdA,EAAK,SAAS;AAAA,sBACVf,EAAS,IAAI;AAAA,QAC3Be,EAAK,MAAM;AAAA;AAAA,MAIb,MAAO,CACL,KAAMf,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,OAAQ,CAAC,KAAMJ,EAAO,CAAC,EAAE,KAAM,KAAME,EAAkB,YAAAE,CAAW,EAClE,aAAAc,EACA,QAAS,EACX,CACF,EAESrC,GAAM,CAACkB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQpC,GAAQ,CAAC,EAAGoC,CAAM,CAAC,EAElFlB,GAAM,CAACiB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ5B,GAAQ,EAAG,MAAM,EAAG4B,CAAM,CAAC,EAE1FjB,GAAM,CAACgB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQlC,GAAQ,CAAC,EAAGkC,CAAM,CAAC,EAElFhB,GAAQ,CAACe,EAAgCC,IACtC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ/B,GAAU,EAAG,MAAM,EAAG+B,CAAM,CAAC,EAE5Ff,GAAU,CAACc,EAAgCC,IACxC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ9B,GAAY,EAAG,MAAM,EAAG8B,CAAM,CAAC,EAE9Fd,GAAO,CAACa,EAAgCC,IACrC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ7B,GAAS,EAAG,MAAM,EAAG6B,CAAM,CAAC,EAE3Fb,GAAM,CAACY,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQjC,GAAQ,CAAC,EAAGiC,CAAM,CAAC,EAElFZ,GAAK,CAACW,EAAgCC,IACnC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ3B,GAAO,EAAG,MAAM,EAAG2B,CAAM,CAAC,EAEzFX,GAAM,CAACU,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQzB,GAAQ,CAAC,EAAGyB,CAAM,CAAC,EAElFV,GAAQ,CAACS,EAAgCC,IACtC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQvB,GAAU,CAAC,EAAGuB,CAAM,CAAC,EAEpFT,GAAM,CAACQ,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQhC,GAAQ,CAAC,EAAGgC,CAAM,CAAC,EAElFR,GAAM,CAACO,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ1B,GAAQ,EAAG,MAAM,EAAG0B,CAAM,CAAC,IC9SvG,IASamB,GAMAC,GAGPC,GAlBNC,GAAAC,EAAA,kBAMAC,KAGaL,GACT,CAACM,EAAgCC,EAAkBC,KACjDN,GAAeK,CAAM,EACd,CAACD,EAAQ,KAAKC,EAAO,CAAC,EAAGC,CAAE,CAAC,GAG5BP,GAAgEQ,GACzEC,GAAU,wBAAwBD,EAAK,WAAW,OAAO,IAAI,CAAC,EAE5DP,GAAkBK,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC1BA,IAYMI,GAOAC,GA2GOC,GAMPC,GApINC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAGAC,KAEMT,GAAoC,CAACU,EAAoBC,KAAuB,CACpF,KAAM,kBACN,WAAY,MAAM,KAAK,CAAC,OAAQD,CAAU,EAAG,CAACE,EAAIC,IAAM,IAAIA,CAAC,EAAE,EAC/D,WAAY,MAAMH,CAAU,EAAE,MAAuB,EACrD,UAAAC,CACF,GAEMV,GACF,CAACa,EAAgCC,EAA2BC,EAAkBC,IAA8B,CAC1G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EACxC,GAAIC,GAAQC,EAAW,QAAUD,EAAQ,GAAKC,EAAW,OACvD,MAAM,IAAI,MAAM,8DAA+D,EAE7ED,EAAO,IACTA,EAAOC,EAAW,OAASD,GAI7B,IAAME,EAAcD,EAAW,MAAM,CAAC,EACtC,QAASL,EAAI,EAAGA,EAAIG,EAAO,OAAQH,IAAK,CACtC,IAAMO,EAAaJ,EAAOH,CAAC,EAAE,KAAK,MAAM,EACxC,QAASQ,EAAY,EAAGA,EAAYH,EAAW,OAAQG,IAErD,GAAIA,IAAcJ,EAChBE,EAAYF,CAAI,GAAKG,EAAWC,CAAS,UAGlCH,EAAWG,CAAS,IAAMD,EAAWC,CAAS,EACrD,MAAM,IAAI,MAAM,kCAAkC,CAGxD,CAEA,IAAMC,EAAOH,EAAY,OACnBI,EAASC,GAAY,SAAUF,CAAI,EACnCG,EAAQC,GAAkBJ,CAAI,EAC9BK,EAAgBC,GAAkB,EAElCC,EAASb,EAAO,IAAIH,GAAKA,EAAE,IAAI,EAC/BiB,EAAWC,GAAcT,CAAI,EAC7BU,EAAoB,IAAI,MAAMH,EAAO,OAAS,CAAC,EAErDG,EAAQ,CAAC,EAAIH,EAAO,CAAC,EAAEZ,CAAI,EAC3B,QAASJ,EAAI,EAAGA,EAAImB,EAAQ,OAAQnB,IAClCmB,EAAQnB,CAAC,EAAImB,EAAQnB,EAAI,CAAC,EAAIgB,EAAOhB,CAAC,EAAEI,CAAI,EAG9C,IAAMgB,EAAUH,EAASb,CAAI,EACvBiB,EAAeJ,EAAS,MAAM,EAAE,EAChCK,EAAcL,EAAS,KAAK,EAE9BM,EAAkB,OAAOH,CAAO,MAAMD,EAAQ,CAAC,CAAC;AAAA;AAAA,oBAEtCG,CAAW,WAAWD,EAAa,KAAK,CAAC;AAAA,WAEvD,QAASrB,EAAI,EAAGA,EAAImB,EAAQ,OAAQnB,IAAK,CACvC,IAAMwB,EAAQL,EAAQnB,EAAI,CAAC,EAC3BuB,GAAmB;AAAA,kBACTH,CAAO,MAAMD,EAAQnB,CAAC,CAAC,QAAQoB,CAAO,OAAOD,EAAQnB,EAAI,CAAC,CAAC;AAAA;AAAA,sBAEvDA,CAAC,IAAIV,GAA0B2B,EAAUG,EAASI,CAAK,CAAC;AAAA,uBACvDlC,GAA0B+B,EAAcD,EAASI,CAAK,CAAC;AAAA,cAExE,CACA,IAAMC,EAAYN,EAAQ,OACpBK,EAAQL,EAAQA,EAAQ,OAAS,CAAC,EACxCI,GAAmB;AAAA;AAAA,oBAELE,CAAS,IAAInC,GAA0B2B,EAAUG,EAASI,CAAK,CAAC;AAAA,qBAC/DlC,GAA0B+B,EAAcD,EAASI,CAAK,CAAC,MAEtE,IAAME,EAAOC,GAAQ1B,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAExD2B,EAAe;AAAA,YACfd,CAAa;AAAA,2BACEG,EAAS,IAAIY,GAAK,OAASA,CAAC,CAAC;AAAA,cAC1CN,CAAe;AAAA;AAAA;AAAA;AAAA,cAIfX,CAAK;AAAA,mCACgBK,EAASR,EAAO,CAAC,CAAC;AAAA,qBAChCQ,EAASR,EAAO,CAAC,CAAC,aAAaQ,EAASR,EAAO,CAAC,CAAC;AAAA,qBACjDQ,EAASR,EAAO,CAAC,CAAC;AAAA;AAAA,0CAEGC,CAAM;AAAA;AAAA,cAElCA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA;AAAA,cAG5BA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA;AAAA,cAG5BA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,kBAC3CC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA,cAE5BgB,EAAK,MAAM;AAAA;AAAA,UAInB,MAAO,CACL,GAAGxB,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAMH,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,aAAAyB,EACA,QAAS,EACX,CACF,EAESvC,GACT,CAACY,EAAgCE,EAAkB2B,IAAoD,CACrG,IAAM5B,EAAWf,GAAkCgB,EAAO,OAAQ2B,EAAW,QAAQ,EACrF,MAAO,CAAC,GAAG5B,EAAU,IAAK,IAAMd,GAA8Ba,EAASC,EAAUC,EAAQ2B,EAAW,IAAI,CAAC,CAC3G,EAEExC,GAA4B,CAAC2B,EAAoBG,EAAiBI,IAA0B,CAChG,IAAMO,EAAad,EAAS,QAAQG,CAAO,EAQ3C,OAPYH,EAAS,IAAI,CAACe,EAAGC,IACvBA,IAAQF,EACH,GAAGC,CAAC,MAAMR,CAAK,GAEfQ,CAEV,EACU,KAAK,CAClB,IC9IA,IAgBaE,GAcPC,GAOAC,GAiEAC,GAMAC,GASAC,GAGAC,GAuBAC,GAwBOC,GAGPC,GA1KNC,GAAAC,EAAA,kBAGAC,KAKAC,KAEAC,KAMad,GACT,CAACe,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EACjBD,EAAiB,QAAQ,MAAQC,EAAO,CAAC,EAAE,KAAK,OAAS,EAGpD,CADHD,EAAiB,IAAIG,GAAoCH,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC5F,EAIP,CADHD,EAAiB,IAAIZ,GAAsCY,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC9F,GAIhBf,GAAsC,CAACkB,EAAoBC,KAAuB,CACtF,KAAM,SACN,WAAY,MAAM,KAAK,CAAC,OAAQD,CAAU,EAAG,CAACE,EAAIC,IAAM,IAAIA,CAAC,EAAE,EAC/D,WAAY,MAAMH,CAAU,EAAE,MAAyB,EACvD,UAAAC,CACF,GAEMlB,GACF,CAACqB,EAAiCC,EAA2BR,EAAkBS,IAA8B,CAC3G,IAAMC,EAAaV,EAAO,CAAC,EAAE,KAAK,MAAM,EACxC,GAAIS,GAAQC,EAAW,QAAUD,EAAQ,GAAKC,EAAW,OACvD,MAAM,IAAI,MAAM,8DAA+D,EAE7ED,EAAO,IACTA,EAAOC,EAAW,OAASD,GAI7B,IAAME,EAAcD,EAAW,MAAM,CAAC,EACtC,QAASJ,EAAI,EAAGA,EAAIN,EAAO,OAAQM,IAAK,CACtC,IAAMM,EAAaZ,EAAOM,CAAC,EAAE,KAAK,MAAM,EACxC,QAASO,EAAY,EAAGA,EAAYH,EAAW,OAAQG,IAErD,GAAIA,IAAcJ,EAChBE,EAAYF,CAAI,GAAKG,EAAWC,CAAS,UAGlCH,EAAWG,CAAS,IAAMD,EAAWC,CAAS,EACrD,MAAM,IAAI,MAAM,kCAAkC,CAGxD,CAEA,IAAMC,EAAOH,EAAY,OAEnBI,EAAmB,IAAI,MAAcf,EAAO,MAAM,EACpDgB,EAAc,EAClB,QAASV,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EAC7CU,GAAehB,EAAOM,CAAC,EAAE,KAAKG,CAAI,EAClCM,EAAiBT,CAAC,EAAIU,EAGxB,IAAIC,EAAwC,GAExCjB,EAAO,OAAS,EAClBiB,EAAwC7B,GAA4C2B,CAAgB,EAEpGE,EAAwC5B,GAA4C0B,CAAgB,EAGtG,IAAMG,EAAoC5B,GAAqCU,EAAO,OAAQc,CAAI,EAC5FK,EAA0C5B,GAA2CwB,CAAgB,EACrGK,EAAe;AAAA,UACjBF,CAAiC;AAAA,UACjCC,CAAuC;AAAA,UACvCF,CAAqC;AAAA,oCACXH,CAAI;AAAA,mEAC2BL,CAAI;AAAA;AAAA;AAAA,sBAGjDA,CAAI,eAAeA,CAAI;AAAA;AAAA;AAAA;AAAA,WAKvC,MAAO,CACL,GAAGD,EACH,OAAQ,CAAC,KAAMG,EAAa,KAAMX,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAoB,CACF,CACF,EAEEjC,GACF,CAACkC,EAAgCrB,EAAkBC,IAAoD,CACrG,IAAMO,EAAWvB,GAAoCe,EAAO,OAAQC,EAAW,QAAQ,EACvF,MAAO,CAAC,GAAGO,EAAU,IAAK,IAAMtB,GAAgCmC,EAASb,EAAUR,EAAQC,EAAW,IAAI,CAAC,CAC7G,EAEEb,GAA+C2B,GAG5C;AAAA,QAFYA,EAAiB,IAAI,CAACO,EAAMhB,IAAM,YAAYgB,CAAI,aAAahB,CAAC;AAAA,CACpF,EAEkB,KAAK,EAAE,CAAC;AAAA,OAKrBjB,GAA+C0B,GACjD3B,GAA4C2B,CAAgB,EAE1DzB,GAAuC,CAACiC,EAAyBC,IAAuB,CAC5F,IAAMC,EAAsB,CAAC,mEAAmED,CAAU,MAAM,EAChH,QAASlB,EAAI,EAAGA,EAAIiB,EAAiB,EAAEjB,EACjCA,IAAM,EACRmB,EAAU,KACN,wBACuBnB,CAAC,gBAAgBA,CAAC,cAAc,EAClDA,IAAMiB,EAAkB,EACjCE,EAAU,KACN,oBACmBnB,CAAC,cAAc,EAEtCmB,EAAU,KACN,6BAC4BnB,CAAC,gBAAgBA,CAAC,cAAc,EAGpE,OAAAmB,EAAU,KACN,IACG,EACAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMlC,GAA8CwB,GAAuC,CACzF,IAAMU,EAAsB,CAAC,oDAAoD,EACjF,QAASnB,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EACzCA,IAAM,EACRmB,EAAU,KACN,iBACgBnB,CAAC,cAAcS,EAAiBT,CAAC,CAAC,KAAK,EAClDA,IAAMS,EAAiB,OAAS,EACzCU,EAAU,KACN,kBACiBV,EAAiBT,CAAC,CAAC,KAAK,EAE7CmB,EAAU,KACN,sBACqBnB,CAAC,cAAcS,EAAiBT,CAAC,CAAC,KAAK,EAGpE,OAAAmB,EAAU,KACN,IACG,EAEAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEajC,GAAmEkC,GAC5EC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,MAAM,CAAC,CAAC,EAEhEjC,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAGlC,IAAM4B,EAAY5B,EAAO,CAAC,EAAE,KACtB6B,EAAsB7B,EAAO,CAAC,EAAE,KAAK,OAG3C,GAAI4B,IAAc,SAChB,MAAM,IAAI,MAAM,oCAAoC,EAGtD,QAAWE,KAAS9B,EAAQ,CAE1B,GAAI8B,EAAM,OAASF,EACjB,MAAM,IAAI,MAAM,kCAAkC,EAIpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,CAE9D,CACF,ICtLO,SAASE,IAA6B,CAC3C,OAAOC,GAAiB,KAAK,CAC/B,CACO,SAASC,IAA8B,CAC5C,OAAOD,GAAiB,MAAM,CAChC,CACO,SAASE,IAA8B,CAC5C,OAAOF,GAAiB,MAAM,CAChC,CACO,SAASG,IAA8B,CAC5C,OAAOH,GAAiB,MAAM,CAChC,CACO,SAASI,IAA8B,CAC5C,OAAOJ,GAAiB,MAAM,CAChC,CACO,SAASK,IAA6B,CAC3C,OAAOL,GAAiB,KAAK,CAC/B,CACO,SAASM,GAAQC,EAAkC,CACxD,IAAMC,EAAO,MAWb,MAAO,CAAC,KAVK;AAAA,8BACeD,CAAK;AAAA;AAAA,UAEzBC,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA,kBACKA,CAAI,WAAWA,CAAI,WAAWA,CAAI,WAAWA,CAAI;AAAA;AAAA,IAGnD,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASC,IAA6B,CAC3C,OAAOT,GAAiB,KAAK,CAC/B,CACO,SAASU,IAA+B,CAC7C,OAAOV,GAAiB,OAAO,CACjC,CACO,SAASW,GAASC,EAAaC,EAAgC,CACpE,IAAML,EAAO,OAYb,MAAO,CAAC,KAXK;AAAA,4BACaI,CAAG;AAAA,4BACHC,CAAG;AAAA;AAAA,UAErBL,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASM,IAAkC,CAChD,IAAMN,EAAO,YASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASO,GAAcR,EAAkC,CAC9D,IAAMC,EAAO,YAWb,MAAO,CAAC,KAVK;AAAA,8BACeD,CAAK;AAAA;AAAA,UAEzBC,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA,kBACKA,CAAI,WAAWA,CAAI,WAAWA,CAAI,WAAWA,CAAI;AAAA;AAAA,IAGnD,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASQ,IAA6B,CAC3C,OAAOhB,GAAiB,KAAK,CAC/B,CACO,SAASiB,IAA6B,CAC3C,IAAMT,EAAO,MASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASU,IAA6B,CAC3C,IAAMV,EAAO,MAeb,MAAO,CAAC,KAdK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,SAGJA,CAAI;AAAA;AAAA;AAAA,UAGHA,CAAI;AAAA;AAAA;AAAA,IAIE,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASW,IAA6B,CAC3C,OAAOnB,GAAiB,KAAK,CAC/B,CACO,SAASoB,IAA8B,CAC5C,IAAMZ,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASa,IAAiC,CAC/C,IAAMb,EAAO,UASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASc,IAA8B,CAC5C,OAAOtB,GAAiB,MAAM,CAChC,CACO,SAASuB,IAA6B,CAC3C,OAAOvB,GAAiB,KAAK,CAC/B,CACO,SAASwB,IAA8B,CAC5C,IAAMhB,EAAO,OAab,MAAO,CAAC,KAZK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,SAKLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,IAMG,KAAAA,EAAM,MAA6B,CACnD,CACA,SAASR,GAAiBQ,EAAiC,CASzD,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA,aACDA,CAAI;AAAA;AAAA,SAERA,CAAI;AAAA,aACAA,CAAI;AAAA;AAAA,IAGD,KAAAA,EAAM,MAA6B,CACnD,CAvLA,IA6LMiB,GAoBAC,GAQOC,GAGAC,GAGAC,GAGAC,GAQAC,GAMAC,GAGAC,GAKPC,GAWOC,GAGAC,GAOAC,GAKAC,GAGAC,GAGAC,GAGAC,GAOAC,GAKAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAlUbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEAC,KAmLMlC,GACF,CAACmC,EAAgCC,EAA2BC,EAAeC,IACxD,CACb,IAAMC,EAAcJ,EAAQ,QAAQ,SAC9BK,EAAOC,GAAQN,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAC9D,MAAO,CACL,GAAGC,EACH,OAAQ,CAAC,KAAMC,EAAM,KAAM,KAAMA,EAAM,KAAM,YAAAE,CAAW,EACxD,aAAc;AAAA,OACnBD,EAAS,IAAI;AAAA;AAAA,kBAEFE,EAAK,SAAS;AAAA,aACnBF,EAAS,IAAI;AAAA,SACjBE,EAAK,MAAM;AAAA;AAAA,OAGR,QAAS,EACX,CACF,EAEFvC,GACF,CAACkC,EAAgCE,EAAeC,EAA6BI,IACpD,CACnB,IAAMH,EAAcJ,EAAQ,QAAQ,SAC9BC,EAAW,CAAC,KAAME,EAAS,KAAM,WAAY,CAACC,CAAW,EAAG,WAAY,CAAC,GAAG,EAAG,UAAWG,CAAQ,EACxG,MAAO,CAAC,GAAGN,EAAU,IAAK,IAAMpC,GAA6BmC,EAASC,EAAUC,EAAOC,CAAQ,CAAC,CAClG,EAEKpC,GAAM,CAACiC,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGrE,GAAQ,CAAC,EAAGqE,CAAM,CAAC,EAE1FxC,GAAO,CAACgC,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGnE,GAAS,CAAC,EAAGmE,CAAM,CAAC,EAE3FvC,GAAO,CAAC+B,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGlE,GAAS,CAAC,EAAGkE,CAAM,CAAC,EAE3FtC,GAAO,CAAC8B,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGjE,GAAS,CAAC,EAAGiE,CAAM,CAAC,EAO3FrC,GACT,CAAC6B,EAAgCQ,EAAkBC,IAAyC,CAACT,EAAQ,IACjGlC,GACIkC,EAASQ,EAAO,CAAC,EAAGzD,GAAS0D,EAAW,IAAKA,EAAW,GAAG,EAAGA,EAAW,QAAQ,EACrFD,CAAM,CAAC,EAEFpC,GAAuBsC,GAAqCC,GACrE,CAAC,IAAKD,EAAK,WAAW,SAAS,MAAOE,EAAQ,EAAG,IAAKF,EAAK,WAAW,SAAS,MAAOG,EAAQ,CAAC,CAAC,EAEvFxC,GAAU,CAAC2B,EAAgCQ,IAA+B,CACrF,IAAMC,EAAanC,GAAiC0B,EAASQ,CAAM,EACnE,OAAOrC,GAAK6B,EAAS,CAACQ,EAAO,CAAC,CAAC,EAAGC,CAAU,CAC9C,EAEMnC,GAAmC,CAAC0B,EAAgCQ,IAAqC,CAC7G,GAAIA,EAAO,QAAU,IAChB,CAACR,EAAQ,QAAQ,cAAcQ,EAAO,CAAC,EAAE,MAAM,GAAK,CAACR,EAAQ,QAAQ,cAAcQ,EAAO,CAAC,EAAE,MAAM,GACtG,MAAM,IAAI,MAAM,yCAAyC,EAG3D,IAAMxD,EAAOwD,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,EAAII,GACvD3D,EAAOuD,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,EAAIK,GAC7D,OAAOF,GAA4B,CAAC,IAAA3D,EAAK,IAAAC,CAAG,CAAC,CAC/C,EAEasB,GAAO,CAACyB,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGhE,GAAS,CAAC,EAAGgE,CAAM,CAAC,EAE3FhC,GAAM,CAACwB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG/D,GAAQ,CAAC,EAAG+D,CAAM,CAAC,EAM1F/B,GACT,CAACuB,EAAgCQ,EAAkBC,IAAwC,CAACT,EAAQ,IAChGlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG9D,GAAQ+D,EAAW,KAAK,EAAGA,EAAW,QAAQ,EACrGD,CAAM,CAAC,EAEF9B,GAAsBgC,GAC/BC,GAA4B,CAAC,MAAOD,EAAK,WAAW,SAAS,QAAS,CAAG,CAAC,CAAC,EAElE/B,GAAM,CAACqB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG3D,GAAQ,CAAC,EAAG2D,CAAM,CAAC,EAE1F5B,GAAQ,CAACoB,EAAgCQ,IACtC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG1D,GAAU,CAAC,EAAG0D,CAAM,CAAC,EAE5F3B,GAAW,CAACmB,EAAgCQ,IACzC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGtD,GAAa,CAAC,EAAGsD,CAAM,CAAC,EAM/F1B,GACT,CAACkB,EAAgCQ,EAAkBC,IAA8C,CAACT,EAAQ,IACtGlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGrD,GAAcsD,EAAW,KAAK,EAAGA,EAAW,QAAQ,EAC3GD,CAAM,CAAC,EAEFzB,GAA4B2B,GACrCC,GAA4B,CAAC,MAAOD,EAAK,WAAW,SAAS,QAAS,GAAI,CAAC,CAAC,EAEnE1B,GAAM,CAACgB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGpD,GAAQ,CAAC,EAAGoD,CAAM,CAAC,EAE1FvB,GAAM,CAACe,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGnD,GAAQ,CAAC,EAAGmD,CAAM,CAAC,EAE1FtB,GAAM,CAACc,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGlD,GAAQ,CAAC,EAAGkD,CAAM,CAAC,EAE1FrB,GAAO,CAACa,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGhD,GAAS,CAAC,EAAGgD,CAAM,CAAC,EAE3FpB,GAAU,CAACY,EAAgCQ,IACxC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG/C,GAAY,CAAC,EAAG+C,CAAM,CAAC,EAE9FnB,GAAM,CAACW,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGjD,GAAQ,CAAC,EAAGiD,CAAM,CAAC,EAE1FlB,GAAO,CAACU,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG9C,GAAS,CAAC,EAAG8C,CAAM,CAAC,EAE3FjB,GAAM,CAACS,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG7C,GAAQ,CAAC,EAAG6C,CAAM,CAAC,EAE1FhB,GAAO,CAACQ,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG5C,GAAS,CAAC,EAAG4C,CAAM,CAAC,ICnTjG,SAASM,GAAqBC,EAA0C,CAC7E,IAAIC,EACJ,OAAQD,EAAW,WAAY,CAC7B,IAAK,OACHC,EAAOC,GAAS,EAChB,MACF,IAAK,UACHD,EAAOE,GAAY,EACnB,MACF,IAAK,OACHF,EAAOG,GAASJ,EAAW,QAAUA,EAAW,OAAQ,EACxD,MAEF,QACE,MAAO,CAAC,mBAAoB,GAAI,gBAAiB,EAAE,CACvD,CAEA,IAAMK,EAAiBJ,EAAK,KACtBK,EAAqBL,EAAK,KAC1BM,EAAkB,WAAWF,CAAc,YACjD,MAAO,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,CAC7C,CArCA,IAuCaC,GAvCbC,GAAAC,EAAA,kBAIAC,KAGAC,KAgCaJ,GAAqCR,GAAwD,CACxG,IAAMa,EAAab,EAAW,UAAU,aAAc,EAAE,EAExD,GAAIa,IAAe,OAAQ,CACzB,GAAM,CAACC,EAASC,CAAO,EAAIf,EAAW,UAAU,oBAAqB,CAACgB,GAAUC,EAAQ,CAAC,EACzF,MAAO,CAAC,WAAAJ,EAAY,QAAAE,EAAS,QAAAD,EAAS,mBAAoB,GAAGD,CAAU,IAAIC,CAAO,IAAIC,CAAO,EAAE,CACjG,CACA,MAAO,CAAC,WAAAF,EAAY,mBAAoBA,CAAU,CACpD,IC/CA,IAYMK,GAQAC,GA+DOC,GAnFbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KACAC,KAEMT,GAA2C,CAACU,EAAkBC,KAAwC,CAC1G,KAAM,cACN,WAAYD,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAC,CACF,GAEMV,GACF,CAACW,EAAyCC,EAA2BC,EACpEC,IAA4C,CAE3C,IAAMC,EADUH,EAAO,OAAS,EACF,oCAAsC,GAC9DI,EAASJ,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BK,EAASL,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BM,EAAyBD,EAAO,CAAC,EAAIH,EAAW,MACtDK,GAAO,QACH,cACA,WAAWL,EAAW,OAAO,eAAeA,EAAW,SAAS,WAAWA,EAAW,KAAK,iBACvFA,EAAW,WAAW,UAAUA,EAAW,IAAI,aAAaA,EAAW,OAAO,EAAE,EACxF,IAAMM,EACFC,GAAqBL,EAAQC,EAAQH,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5FQ,EAAOC,GAAQZ,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAAC,mBAAAa,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBZ,CAAU,EAEvEa,EAAe;AAAA,gCACKb,EAAW,QAAQ,CAAC,CAAC,KAAKA,EAAW,QAAQ,CAAC,CAAC;AAAA,6BAClDA,EAAW,KAAK,CAAC,CAAC,KAAKA,EAAW,KAAK,CAAC,CAAC;AAAA,IAClEU,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAMgBN,CAAsB;AAAA;AAAA;AAAA,4CAGhBD,EAAO,CAAC,CAAC;AAAA,uCACdA,EAAO,CAAC,CAAC;AAAA,wCACRA,EAAO,CAAC,CAAC;AAAA,gDACDH,EAAW,UAAU,CAAC,CAAC;AAAA;AAAA,wCAE/BE,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,wCAITC,EAAO,CAAC,CAAC;AAAA,gDACDH,EAAW,UAAU,CAAC,CAAC;AAAA,wCAC/BE,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAU3CD,CAAW;AAAA,MACXU,CAAe;AAAA,MACfH,EAAK,MAAM;AAAA;AAAA,EAGX,MAAO,CACL,GAAGT,EACH,OAAQ,CAAC,KAAMO,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAe,EACA,QAAS,EACX,CACF,EAES1B,GACT,CAACU,EAAyCC,EAA2BE,IAC5C,CACnB,IAAMD,EAAWd,GAAyCa,EAAO,OAAS,EAAGE,EAAW,QAAQ,EAChG,MAAO,CACL,GAAGD,EACH,IAAK,IAAMb,GAAqCW,EAAkBC,EAAQC,EAAUC,CAAU,CAChG,CACF,IC3FR,IAWMc,GAOAC,GAiEOC,GAnFbC,GAAAC,EAAA,kBAIAC,KAEAC,KAGAC,KAEMP,GAAqCQ,IAAuB,CAChE,KAAM,kBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,EAC/B,UAAAA,CACF,GAEMP,GACF,CAACQ,EAAyCC,EAA2BC,EAAWC,EAC/EC,EAAgCC,IAA4C,CAC3E,IAAMC,EAASJ,EAAE,KACXK,EAASJ,EAAE,KACXK,EAAS,EACTC,EAAS,EACTC,EAAON,EAAY,OACnBO,EAAc,CAACJ,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAGH,EAAY,CAAC,EAAIA,EAAY,CAAC,CAAC,EACjFQ,EAAaL,EAAO,CAAC,EAAIA,EAAO,CAAC,EACjCM,EAAgBC,GAAkB,EAClCC,EAAOC,GAAQhB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACnEiB,EAAW,GAEf,QAASC,EAAM,EAAGA,GAAO,EAAGA,IAC1B,QAASC,EAAM,EAAGA,GAAO,EAAGA,IAC1BF,GAAY;AAAA,kCACYE,CAAG;AAAA,2BACVD,CAAG;AAAA;AAAA,8BAEAP,EAAY,CAAC,CAAC,aAAaA,EAAY,CAAC,CAAC;AAAA,4CAC3BP,EAAYM,EAAO,CAAC,CAAC,QAAQL,EAAW,QAAQ,CAAC,CAAC;AAAA,kBAC5EA,EAAW,KAAK,CAAC,CAAC;AAAA,+BACLA,EAAW,UAAU,CAAC,CAAC,iBAAiBO,CAAU,OAAOL,EAAO,CAAC,CAAC;AAAA;AAAA,wBAEzED,EAAOE,CAAM,CAAC;AAAA,6CACOJ,EAAYM,EAAO,CAAC,CAAC,OAAOL,EAAW,QAAQ,CAAC,CAAC;AAAA,oBAC1EA,EAAW,KAAK,CAAC,CAAC;AAAA,iCACLA,EAAW,UAAU,CAAC,CAAC,qBAAqBO,CAAU,MAAML,EAAO,CAAC,CAAC;AAAA;AAAA,0BAE5ED,EAAOG,CAAM,CAAC;AAAA;AAAA,yCAECG,CAAU;AAAA;AAAA,6BAEtBM,EAAM,EAAIC,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAWpC,IAAMC,EAAe;AAAA,QACnBP,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAOTI,CAAQ;AAAA,YACRF,EAAK,MAAM;AAAA;AAAA,cAGjB,MAAO,CACL,GAAGd,EACH,OAAQ,CAAC,KAAMU,EAAa,KAAMT,EAAE,KAAM,aAA+B,EACzE,aAAAkB,EACA,QAAS,EACX,CACF,EAES3B,GACT,CAACO,EAAyCE,EAAWC,EAAWC,EAC/DC,IAAkD,CACjD,IAAMJ,EAAWV,GAAkCc,EAAW,QAAQ,EACtE,MAAO,CACL,GAAGJ,EACH,IAAK,IAAMT,GAA8BQ,EAAkBC,EAAUC,EAAGC,EAAGC,EAAaC,CAAU,CACpG,CACF,ICtDJ,SAASgB,GACLC,EAA2BC,EAAkBC,EAAiE,CAChH,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EAAcC,GAAc,UAAUH,EAAQC,EAAQ,EAAI,EAChE,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAME,EAAiBC,GAAkBH,EAAY,MAAM,EACrDI,EAAgBC,GAAc,EAC9B,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBX,CAAoB,EAEjFY,EAAUb,EAAO,OAAS,EAC1Bc,EAAcD,EAAU,+BAAiC,GACzDE,EACFF,EAAU,GAAGG,GAAiBV,EAAgBE,EAAeR,EAAO,CAAC,EAAE,KAAMI,EAAa,EAAK,CAAC,GAAK,GAEnGa,EAAOb,EAAY,OACnBc,EAAQhB,EAAO,OACfiB,EAAQhB,EAAO,OACfiB,EAAYlB,EAAOA,EAAO,OAAS,CAAC,EACpCmB,EAAe;AAAA,MACjBX,CAAkB;AAAA,MAClBK,CAAuB;AAAA,gCACGE,CAAI;AAAA,gBACpBC,CAAK;AAAA,gBACLC,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKKC,CAAS;AAAA,gBACnBF,EAAQ,CAAC;AAAA,gBACTC,EAAQ,CAAC;AAAA;AAAA;AAAA,UAGfL,CAAW;AAAA,UACXH,CAAe;AAAA;AAAA,OAGvB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAMJ,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAqB,CACF,CACF,CAEO,SAASC,GACZtB,EAAkBC,EAAuE,CAC3F,IAAMF,EAAWwB,GAA4BvB,EAAO,OAAS,EAAGC,EAAqB,kBAAkB,EACvG,MAAO,CAAC,GAAGF,EAAU,IAAK,IAAMD,GAAwBC,EAAUC,EAAQC,CAAoB,CAAC,CACjG,CAqBO,SAASe,GACZV,EAAwBE,EAAkCgB,EAA4BC,EACtFC,EAA2B,CAC7B,IAAIC,EAAwB,GACtBC,EAASJ,EAAQ,OACjBK,EAAUJ,EAAS,OACnBK,EAAWD,EAAUD,EACvBC,EAAU,GAAKD,EAAS,EAC1BD,EAAwB,SAExBA,EAAwBH,EAAQ,IAAI,CAACO,EAAIC,IAAM,UAAUxB,EAAcwB,EAAIF,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAGnG,IAAMG,EADgB5B,GAAc,iBAAiBmB,EAASC,CAAQ,EAClC,IAAIS,GAAK,UAAU1B,EAAc0B,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAE9FK,EADSC,EAAU,KAAKZ,CAAO,IACJ,EAC7Ba,EAAS,uCACb,OAAIF,IACFE,EAAS,uBAEoBX,EAAW;AAAA;AAAA,IAExCpB,CAAc;AAAA,IACd2B,CAAa;AAAA,+BACcN,CAAqB;AAAA,WACzCU,CAAM;AAAA,GAE2B;AAAA;AAAA,IAExC/B,CAAc;AAAA,IACd2B,CAAa;AAAA;AAAA,EAKjB,CAhJA,IAcaK,GAYAC,GAGPhB,GA6DAiB,GA1FNC,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KACAC,KAEaT,GACT,CAACU,EAAyChD,EAAkBiD,KAC1DT,GAAexC,CAAM,EAEjBgD,EAAiB,QAAQ,KACpB,CAACA,EAAiB,IACrBE,GAAoCF,EAAkBhD,EAAQiD,CAAU,EAAGjD,CAAM,CAAC,EAE/E,CAACgD,EAAiB,IAAI1B,GAA8BtB,EAAQiD,CAAU,EAAGjD,CAAM,CAAC,GAIlFuC,GACRY,GAAmDC,GAAkCD,EAAK,UAAU,EAEnG5B,GAA8B,CAACV,EAAkBwC,KAAuB,CAC5E,KAAM,SACN,WAAYxC,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAwC,CACF,GAuDMb,GAAkBxC,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACtD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,KAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,ICbA,SAASsD,GACLC,EAAwBC,EAAkCC,EAAkBC,EAAqC,CACnH,IAAIC,EAAyB,CAAC,EAC1BC,EAAyB,CAAC,EAExBC,EAAWJ,EAAO,CAAC,EAAE,KACrBK,EAAWL,EAAO,CAAC,EAAE,KAErBM,EAAUF,EAAS,OACnBG,EAAUF,EAAS,OAEnBG,EAAUP,EAAS,OACnBQ,EAAYD,EAAUF,EACtBI,EAAYF,EAAUD,EAE5BL,EAAyBE,EAAS,IAAI,CAACO,EAAIC,IAAM,UAAUb,EAAca,EAAIH,CAAS,CAAC,EAAE,EACzFP,EAAuBI,EAAU,CAAC,EAAI,MACtCJ,EAAuB,KAAK,IAAI,EAChCC,EAAyBE,EAAS,IAAI,CAACM,EAAIC,IAAM,UAAUb,EAAca,EAAIF,CAAS,CAAC,EAAE,EACzFP,EAAuBI,EAAU,CAAC,EAAI,MACtCJ,EAAuB,KAAK,IAAI,EAEhC,IAAMU,EAAiBC,GAAc,iBAAiBV,EAAUH,CAAQ,EAClEc,EAAiBD,GAAc,iBAAiBT,EAAUJ,CAAQ,EAElEe,EAAiBH,EAAe,IAAII,GAAK,UAAUlB,EAAckB,EAAIR,CAAS,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EACjGS,EAAiBH,EAAe,IAAIE,GAAK,UAAUlB,EAAckB,EAAIP,CAAS,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EACjGS,EAAiB,wBAAwBpB,EAAcS,EAAU,CAAC,CAAC;AAAA,WAChET,EAAcS,EAAU,CAAC,CAAC,aAAaT,EAAcS,EAAU,CAAC,CAAC;AAAA,WACjET,EAAcS,EAAU,CAAC,CAAC,cAmBnC,MAjBoC;AAAA;AAAA,IAElCV,CAAc;AAAA,IACdqB,CAAc;AAAA,IACdH,CAAc;AAAA,4BACUd,CAAsB;AAAA;AAAA;AAAA;AAAA;AAAA,IAK9CJ,CAAc;AAAA,IACdqB,CAAc;AAAA,IACdD,CAAc;AAAA,4BACUf,CAAsB;AAAA;AAAA,EAKlD,CAEA,SAASiB,GAAKrB,EAAyBsB,EAAsB,CAC3D,IAAIC,EAAM,GACV,QAASV,EAAI,EAAGA,EAAIS,EAAO,EAAGT,IAC5BU,GAAO,MAAMvB,EAAca,CAAC,CAAC,KAE/B,OAAAU,GAAO,MAAMvB,EAAcsB,EAAO,CAAC,CAAC,QAE7BC,CACT,CAEA,SAASC,GAAKxB,EAAyBsB,EAAsB,CAC3D,IAAIC,EAAM,GACV,QAASV,EAAI,EAAGA,EAAIS,EAAO,EAAGT,IAC5BU,GAAO,MAAMvB,EAAca,CAAC,CAAC,KAE/B,OAAAU,GAAO,WACGvB,EAAcsB,EAAO,CAAC,CAAC,GAC1BC,CACT,CAnKA,IAaME,GAQAC,GA+DOC,GApFbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KACAC,KAEAC,KACAC,KAEMV,GAAoC,CAACW,EAAkBC,KAAuB,CAClF,KAAM,kBACN,WAAYD,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAA2D,EAC3D,IAAuC,EAC7D,UAAAC,CACF,GAEMX,GACF,CAACY,EAAyCC,EAA2BtC,EACpEuC,IAAoE,CACnE,IAAMJ,EAAUnC,EAAO,OAAS,EAC1BwC,EAAcL,EAAU,+BAAiC,GACzDM,EAASzC,EAAO,CAAC,EAAE,KACnB0C,EAAS1C,EAAO,CAAC,EAAE,KACnB2C,EAAc7B,GAAc,UAAU2B,EAAQC,EAAQ,EAAI,EAC1DE,EAAc,CAACC,EAAU,SAAS7C,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,EAEtE,GAAI,CAAC2C,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMG,EAAYL,EAAOA,EAAO,OAAS,CAAC,EACpCM,EAAiB,KAAK,KAAKD,EAAY,CAAC,EACxCE,EAAQP,EAAO,OACfQ,EAAQP,EAAO,OAEfQ,EAAOC,GAAQd,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEvC,EAAiBsD,GAAkBT,EAAY,MAAM,EACrDnC,EAAUmC,EAAY,OACtB5C,EAAgBsD,GAAc,EAC9B,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBjB,CAAoB,EAEjFkB,EACFtB,EAAU,GAAGuB,GAAiB5D,EAAgBC,EAAeC,EAAO,CAAC,EAAE,KAAM2C,EAAa,EAAI,CAAC,GAAK,GAElGgB,EACFf,EAAc,GAAG/C,GAAyBC,EAAgBC,EAAeC,EAAQ2C,CAAW,CAAC,GAAK,GAEhGiB,EAA2BhB,EAAc,2BAA6B,QAAQxB,GAAKrB,EAAeiD,CAAK,CAAC,IACxGa,EAA2BjB,EAAc,2BAA6B,QAAQrB,GAAKxB,EAAekD,CAAK,CAAC,IACxGa,EAAyBlB,EAAc,GAAK,GAAG9C,CAAc;AAAA,gDACzBC,EAAcS,EAAU,CAAC,CAAC,QAAQT,EAAcS,EAAU,CAAC,CAAC;AAAA,eAC7FT,EAAcS,EAAU,CAAC,CAAC,QAAQT,EAAcS,EAAU,CAAC,CAAC;AAAA,QAE/DuD,EAAe;AAAA,cACbJ,CAAiC;AAAA,cACjCF,CAAuB;AAAA,cACvBH,CAAkB;AAAA;AAAA,gBAEhBQ,CAAsB;AAAA;AAAA;AAAA,oCAGFf,CAAc;AAAA,2BACvBa,CAAwB;AAAA,2BACxBC,CAAwB;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKnCrB,CAAW;AAAA,gBACXe,CAAe;AAAA,gBACfL,EAAK,MAAM;AAAA,eAErB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAM3C,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,aAAA+D,EACA,QAAS,EACX,CACF,EAESrC,GACT,CAACW,EAAyCrC,EACzCuC,IAA0E,CACzE,IAAMD,EAAWd,GAAkCxB,EAAO,OAAS,EAAGuC,EAAqB,kBAAkB,EAC7G,MAAO,CACL,GAAGD,EACH,IAAK,IAAMb,GAA8BY,EAAkBC,EAAUtC,EAAQuC,CAAoB,CACnG,CACF,IC5FJ,IAyBayB,GAzBbC,GAAAC,EAAA,kBAMAC,KACAC,KACAC,KAiBaL,GACT,CAACM,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EACFC,GAAqBH,EAAQC,EAAQF,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAG5FK,EAAeP,EAAiB,IAClCQ,GAAoCR,EAAkBC,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGI,EAAaH,CAAU,EACnG,CAACD,EAAO,CAAC,CAAC,CAAC,EAGTQ,EAAiBT,EAAiB,cAAcC,EAAO,CAAC,EAAG,CAACG,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAIA,EAAO,CAAC,CAAC,CAAC,EAGzGM,EACDT,EAAO,SAAW,EAAK,CAACQ,EAAgBF,EAAcN,EAAO,CAAC,CAAC,EAAI,CAACQ,EAAgBF,CAAY,EAC/FI,EAAeX,EAAiB,IAClCY,GAAoCZ,EAAkBU,EAAcR,CAAU,EAAGQ,CAAY,EAIjG,OADuBV,EAAiB,cAAcW,EAAcN,CAAW,CAEjF,ICjDJ,IASMQ,GAOAC,GA6DOC,GAWAC,GAxFbC,GAAAC,EAAA,kBAKAC,KAIMN,GAA+BO,IAAuB,CAC1D,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,UAAAA,CACF,GAEMN,GACF,CAACO,EAA0CC,EAA2BC,EAAWC,EAChFC,EAAgCC,IAA4C,CAC3E,IAAMC,EAASJ,EAAE,KACXK,EAASJ,EAAE,KAEXK,EAAOJ,EAAY,OACnBK,EAAad,GAAoBW,EAAQC,EAAQH,EAAa,CAAC,EAE/DM,EAAe;AAAA,yBACFJ,EAAO,CAAC,CAAC;AAAA,yBACTA,EAAO,CAAC,CAAC;AAAA,yBACTA,EAAO,CAAC,CAAC;AAAA,yBACTD,EAAW,YAAY,CAAC,CAAC;AAAA,yBACzBA,EAAW,YAAY,CAAC,CAAC;AAAA,gCAClBA,EAAW,UAAU,CAAC,CAAC;AAAA,gCACvBA,EAAW,UAAU,CAAC,CAAC;AAAA,8BACzBA,EAAW,QAAQ,CAAC,CAAC;AAAA,8BACrBA,EAAW,QAAQ,CAAC,CAAC;AAAA,2BACxBA,EAAW,KAAK,CAAC,CAAC;AAAA,2BAClBA,EAAW,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,mCAIVG,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAajBF,EAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAiB7B,MAAO,CACL,GAAGL,EACH,OAAQ,CAAC,KAAMQ,EAAY,KAAMP,EAAE,KAAM,aAA4C,EACrF,aAAAQ,CACF,CACF,EAEShB,GACT,CAACiB,EAAyCT,EAAWC,EAAWC,EAC/DC,IAAkD,CACjD,IAAMJ,EAAWT,GAA4Ba,EAAW,QAAQ,EAChE,MAAO,CACL,GAAGJ,EACH,IAAK,IAAMR,GAAwBkB,EAAkBV,EAAUC,EAAGC,EAAGC,EAAaC,CAAU,CAC9F,CACF,EAGSV,GACT,CAACiB,EAA+BC,EAAgCT,EAAgCU,EAAW,IAEnG,CAACV,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAC7C,KAAK,KAAKQ,EAAW,CAAC,EAAIC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAIC,CAAQ,CAAC,IC5FlF,IAYMC,GAQAC,GAiDOC,GArEbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KAEAC,KACAC,KAEMT,GAAkC,CAACU,EAAkBC,KAA8C,CACvG,KAAM,iBACN,WAAYD,EAAU,CAAC,SAAU,IAAK,GAAG,EAAI,CAAC,SAAU,GAAG,EAC3D,WAAYA,EAAU,MAA4E,EAC5E,IAAsD,EAC5E,SAAUC,EAAW,kBACvB,GAEMV,GACF,CAACW,EAAyCC,EAA2BC,EACpEC,EAAuBJ,IAA0D,CAChF,IAAMK,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EAAsB,CAACD,EAAO,CAAC,EAAG,KAAK,KAAMD,EAAO,CAAC,EAAIC,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAK,CAAC,CAAC,EACpFE,EAAcC,GAAoBJ,EAAQC,EAAQF,CAAW,EAC7D,CAACM,EAAQC,CAAO,EAClBV,EAAiB,+BAA+BM,GAAoD,EAElGK,EAAgBC,EAAU,eAAeL,CAAW,EACpD,CAACM,EAAaC,CAAY,EAC5Bd,EAAiB,+BAA+BO,GAA4C,EAC1FQ,EAAOZ,EAAY,OAEnBa,EAAad,EAAO,OAAS,EAAK,MAAQ,QAC1Ce,EAAY,KAAK,KAAKb,EAAO,CAAC,EAAIC,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAI,CAAC,EAC3D,CAAC,mBAAAa,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBrB,CAAU,EACvEsB,EAAOC,GAAQtB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEuB,EAAe;AAAA,EACzBL,CAAkB;AAAA,4BACQH,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAOGJ,EAAc,CAAC,CAAC,kBAAkBA,EAAc,CAAC,CAAC,kBAC3EA,EAAc,CAAC,CAAC;AAAA,oCACUL,EAAoB,CAAC,CAAC;AAAA,kBACxCU,CAAS;AAAA,wBACHC,CAAS;AAAA,uDACsBJ,CAAW,KAAKC,CAAY;AAAA,uDAC5BL,CAAM,KAAKC,CAAO;AAAA,mBACtDW,EAAK,SAAS,2BAA2BA,EAAK,SAAS;AAAA;AAAA;AAAA;AAAA,IAItEF,CAAe;AAAA;AAAA,GAGb,MAAO,CACL,GAAGlB,EACH,OAAQ,CAAC,KAAME,EAAa,KAAMD,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAqB,CACF,CACF,EAESjC,GACT,CAACU,EAAyCE,EAA2BC,EACpEJ,IAAgE,CAC/D,IAAME,EAAWb,GAAgCc,EAAO,OAAS,EAAGH,CAAU,EAC9E,MAAO,CACL,GAAGE,EACH,IAAK,IAAMZ,GAA4BW,EAAkBC,EAAUC,EAAQC,EAAaJ,CAAU,CACpG,CACF,IC7EJ,IAmBayB,GAyBAC,GAMPC,GAkBAC,GAcAC,GAeAC,GAkBOC,GAcPC,GAjINC,GAAAC,EAAA,kBAGAC,KAKAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAGajB,GACT,CAACkB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,IAAyC,CACvE,IAAMC,EAAYL,EAAW,CAAC,EACxBM,EAAoBN,EAAW,MAAM,CAAC,EACtCO,EAAcD,EAAkB,OAChCE,EAAcP,EAAY,CAAC,EAE3BQ,EADqBR,EAAY,MAAM,CAAC,EACA,IAAI,CAACS,EAAGC,IAAMD,GAAKA,EAAI,IAAMR,EAAUS,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIP,EAAWQ,CAAC,EAAIR,EAAWQ,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIP,EAAQO,CAAC,GAAKP,EAAQO,CAAC,CAAC,CAAC,EAE5G,MADoB,CAACN,EAAWG,CAAW,EAAE,OAAO,GAAGI,CAAkB,CAE3E,EAWS7B,GACT,CAAC8B,EAAoCC,EAAkBC,KACrD1B,GAAeyB,EAAQC,CAAU,EAC1B/B,GAAO6B,EAAkBC,EAAQC,CAAU,GAGlD/B,GACF,CAAC6B,EAAyCC,EAAkBC,IAAyC,CACnG,IAAMC,EAAqB7B,GAA0B4B,EAAYD,CAAM,EACjEG,EAAWJ,EAAiB,QAAQ,KACpCK,EAAcF,EAAmB,YAAY,CAAC,IAAM,GAAKA,EAAmB,YAAY,CAAC,IAAM,EACrG,OAAIA,EAAmB,MAAQ,EAGtB,CAFQH,EAAiB,IAC5BM,GAA2CN,EAAkBC,EAAQE,CAAkB,EAAGF,CAAM,CACtF,EACLI,GAAeD,EACjB,CAAChC,GAAwB4B,EAAkBC,EAAQE,CAAkB,CAAC,EACpEC,GAAYH,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAK,CAACI,EACzE,CAACE,GAAaP,EAAkBC,EAAQE,CAAkB,CAAC,EAE3D,CAAC9B,GAAe2B,EAAkBC,EAAQE,CAAkB,CAAC,CAExE,EAEE/B,GACF,CAAC4B,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMM,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EACFzC,GAAqBuC,EAAQC,EAAQP,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5FS,EAAYX,EAAiB,gBAAgBC,EAAO,CAAC,EAAG,CAACO,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAIA,EAAO,CAAC,CAAC,CAAC,EAC1FI,EAAYZ,EAAiB,gBAAgBC,EAAO,CAAC,EAAG,CAACQ,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,CAAC,EAE9EI,EAAeZ,EAAO,OAAS,EAAI,CAACW,EAAWD,EAAWV,EAAO,CAAC,CAAC,EAAI,CAACW,EAAWD,CAAS,EAC5FG,EAAed,EAAiB,IAAIe,GAA8BF,EAAcX,CAAU,EAAGW,CAAY,EAC/G,OAAOb,EAAiB,gBAAgBc,EAAcJ,CAAW,CACnE,EAEErC,GACF,CAAC2B,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMM,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EACFzC,GAAqBuC,EAAQC,EAAQP,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5Fc,EAAUhB,EAAiB,IAC7BiB,GAA8BjB,EAAkBC,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGS,EAAaR,CAAU,EAAG,CAACD,EAAO,CAAC,CAAC,CAAC,EAEzGiB,EAAmBjB,EAAO,SAAW,EAAI,CAACe,EAASf,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EAAI,CAACe,EAASf,EAAO,CAAC,CAAC,EAGpG,OAFeD,EAAiB,IAC5BmB,GAAkCnB,EAAkBC,EAAQS,EAAaR,CAAU,EAAGgB,CAAgB,CAE5G,EAEE5C,GAA4B,CAA2B4B,EAAeD,IAAwB,CAClG,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,EACpC,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EAC3Cb,EAAY,KAAKa,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAGtC,IAAMmB,EAAOlB,EAAW,KAAK,MAAM,EACnCmB,GAAa,yBACTpB,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAagC,EAAMlB,EAAW,OAAO,EAGnG,IAAMoB,EAAmB,OAAO,OAAO,CAAC,EAAGpB,CAAU,EACrD,cAAO,OAAOoB,EAAe,CAAC,YAAAlC,EAAa,KAAAgC,EAAM,SAAUlB,EAAW,QAAQ,CAAC,EACxEoB,CACT,EAEa/C,GAA+DgD,GAAqC,CAC/G,IAAMrB,EAAaqB,EAAK,WAClBC,EAAuBC,GAAkCvB,CAAU,EAEnEwB,EAAUxB,EAAW,UAAU,WAAY,QAAQ,EACnDb,EAAYa,EAAW,QAAQ,YAAa,CAAC,EAAG,CAAC,CAAC,EAClDyB,EAAQzB,EAAW,OAAO,QAAS,CAAC,EACpCd,EAAcc,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDkB,EAAOlB,EAAW,QAAQ,OAAQ,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,EAC9CX,EAAUW,EAAW,QAAQ,UAAW,CAAC,EAAG,CAAC,CAAC,EAEpD,OAAO0B,GAA4B,CAAC,QAAAF,EAAS,UAAArC,EAAW,MAAAsC,EAAO,YAAAvC,EAAa,KAAAgC,EAAM,QAAA7B,EAAS,GAAGiC,CAAoB,CAAC,CACrH,EAEMhD,GAAiB,CAACyB,EAAkBC,IAAqC,CAG7E,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAI7D,IAAM4B,EAAc5B,EAAO,CAAC,EAAE,KAAK,CAAC,EAC9B6B,EAAkB7B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAI2B,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAI7B,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMP,EAAcO,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWR,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIQ,EAAW,QAAQ,SAAWR,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIQ,EAAW,KAAK,SAAWR,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIQ,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,UAC5C,MAAM,IAAI,MAAM,yCAAyC,CAE7D,ICvLA,IAeM8B,GAIAC,GAWAC,GAsBOC,GAMPC,GAMAC,GAQAC,GA2DAC,GAWAC,GAQAC,GAwBOC,GAkBPC,GAhMNC,GAAAC,EAAA,kBAGAC,KAKAC,KAEAC,KAGAC,KAEMjB,GACF,CAACkB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DtB,GAAoB,CAACuB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEM3B,GACF,CAAC4B,EAA+BC,EAAgCC,EAA8BP,EAC7FC,EAAgBO,EAA4BC,EAAkCC,IAA0B,CACvG,IAAMC,EAAcN,EAAW,OAAS,EAClCO,EAAcF,EAAY,SAAW,EAC3C,QAASG,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EAAG,CACpC,IAAMf,EAAUc,EAAcP,EAAWQ,EAAI,CAAC,EAAIL,EAAQK,CAAC,EAAIH,EAAYG,CAAC,EACtEd,EAAWxB,GAAgB8B,EAAWQ,EAAI,CAAC,EAAGL,EAAQK,CAAC,EAAGZ,EAAKY,CAAC,EAAGP,EAAYO,CAAC,EAAGN,EAAUM,CAAC,EAAGf,CAAO,EAC9GtB,GAAkBuB,EAAUC,EAASC,EAAMY,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRF,EAAQK,CAAC,GAAKR,EAAWQ,EAAI,CAAC,EAAI,GAAKJ,EAAcI,CAAC,GAAKP,EAAYO,CAAC,EAAI,GAAKN,EAAUM,CAAC,EAAI,EAChGZ,EAAKY,CAAC,EAAIZ,EAAKY,EAAIF,CAAW,CAAC,CAEvC,CACF,EAOSjC,GACT,CAACoC,EAAoCC,EAAkBC,KACrD9B,GAAe6B,EAAQC,CAAU,EAC1BrC,GAAgBmC,EAAkBC,EAAQC,CAAU,GAG3DrC,GACF,CAACmC,EAAyCC,EAAkBC,IAAkD,CAC5G,IAAMC,EAAqBjC,GAAmCgC,EAAYD,CAAM,EAChF,MAAO,CAAChC,GAAwB+B,EAAkBC,EAAQE,CAAkB,CAAC,CAC/E,EAEErC,GAAqC,CAACsC,EAAkBC,KAAuB,CACnF,KAAM,gBACN,WAAYD,EAAU,CAAC,IAAK,IAAK,GAAG,EAAI,CAAC,IAAK,GAAG,EACjD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAC,CACF,GAEMtC,GACF,CAACiC,EAAyCC,EAA2BK,EACpEJ,IAAqD,CAEpD,IAAMK,EADUN,EAAO,OAAS,EACJ,uBAAyB,MAC/CO,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EAAyBD,EAAO,CAAC,EACjCE,EAAwBF,EAAO,CAAC,EAAIP,EAAW,MAC/CN,EAAc,CAACK,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MAAO,GAAGA,EAAW,WAAW,EACjGU,EAAOC,GAAQb,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAAC,mBAAAc,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBd,CAAU,EAEvEe,EAAe;AAAA,gCACKf,EAAW,QAAQ,CAAC,CAAC,KAAKA,EAAW,QAAQ,CAAC,CAAC;AAAA,6BAClDA,EAAW,KAAK,CAAC,CAAC,KAAKA,EAAW,KAAK,CAAC,CAAC;AAAA,IAClEY,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAQgBJ,CAAsB;AAAA,oDACRA,CAAsB;AAAA;AAAA,oBAEtDH,CAAS;AAAA,sDACyBI,CAAqB;AAAA,uCACpCA,CAAqB;AAAA,oCACxBF,EAAO,CAAC,CAAC;AAAA,sCACPA,EAAO,CAAC,CAAC;AAAA,uCACRP,EAAW,UAAU,CAAC,CAAC,aAAaA,EAAW,UAAU,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0CAKxDM,EAAO,CAAC,CAAC;AAAA,0CACTA,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS7CO,CAAe;AAAA,MACfH,EAAK,MAAM;AAAA;AAAA,EAGX,MAAO,CACL,GAAGN,EACH,OAAQ,CAAC,KAAMV,EAAa,KAAMK,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAgB,EACA,QAAS,EACX,CACF,EAEEjD,GACF,CAACgC,EAAyCC,EAA2BC,IAC5C,CACnB,IAAMI,EAAWxC,GAAmCmC,EAAO,OAAS,EAAGC,EAAW,QAAQ,EAC1F,MAAO,CACL,GAAGI,EACH,IAAK,IAAMvC,GAAuCiC,EAAkBC,EAAQK,EAAUJ,CAAU,CAClG,CACF,EAGFjC,GACF,CAAC+B,EAAyCC,EAA2BC,IAEhDF,EAAiB,IAC5BhC,GAA6CgC,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,EAIlG/B,GAAqC,CAAoCgC,EAAeD,IAAwB,CACpH,IAAMT,EAAcU,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,EACpC,QAASH,EAAI,EAAGA,EAAIE,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEF,EAC3CP,EAAY,KAAKS,EAAO,CAAC,EAAE,KAAKF,CAAC,CAAC,EAItC,IAAMZ,EAAOe,EAAW,KAAK,MAAM,EAC7BN,EAAcM,EAAW,YAAY,MAAM,EAC3CX,EAAaU,EAAO,CAAC,EAAE,KAG7BtC,GACI4B,EAAYC,EAAaU,EAAW,UAAWA,EAAW,QAASf,EAAMe,EAAW,QACpFA,EAAW,cAAeN,CAAW,EAGzC,IAAMsB,EAAmB,OAAO,OAAO,CAAC,EAAGhB,CAAU,EACrD,cAAO,OAAOgB,EAAe,CAAC,YAAA1B,EAAa,KAAAL,EAAM,YAAAS,EAAa,SAAUM,EAAW,QAAQ,CAAC,EACrFgB,CACT,EAEa/C,GACRgD,GAA8C,CAC7C,IAAMjB,EAAaiB,EAAK,WAClBC,EAAuBC,GAAkCnB,CAAU,EAEnEhB,EAAUgB,EAAW,UAAU,WAAY,QAAQ,EACnDT,EAAYS,EAAW,QAAQ,YAAa,CAAC,EAAG,CAAC,CAAC,EAClDoB,EAAQpB,EAAW,OAAO,QAAS,CAAC,EACpCV,EAAcU,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDP,EAAgBO,EAAW,QAAQ,iBAAkB,CAAC,EAAG,CAAC,CAAC,EAC3DN,EAAcM,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDf,EAAOe,EAAW,QAAQ,OAAQ,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,EAC9CR,EAAUQ,EAAW,QAAQ,UAAW,CAAC,EAAG,CAAC,CAAC,EAEpD,OAAOqB,GACH,CAAC,QAAArC,EAAS,UAAAO,EAAW,MAAA6B,EAAO,YAAA9B,EAAa,cAAAG,EAAe,YAAAC,EAAa,KAAAT,EAAM,QAAAO,EAAS,GAAG0B,CAAoB,CAAC,CAClH,EAEEhD,GAAiB,CAAC6B,EAAkBC,IAA8C,CAGtF,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAI7D,IAAMuB,EAAcvB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC9BwB,EAAkBxB,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIuB,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAczB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MAGnD,GAAID,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMyB,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAM7B,EAAcI,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWL,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIK,EAAW,QAAQ,SAAWL,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIK,EAAW,KAAK,SAAWL,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIK,EAAW,cAAc,SAAWL,EACtC,MAAM,IAAI,MAAM,4BAA4BA,CAAW,GAAG,EAK5D,GAAIK,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIC,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,UAC5C,MAAM,IAAI,MAAM,kDAAkD,CAEtE,IClQA,IAeM0B,GAMOC,GAaAC,GAGPC,GAuBAC,GAOAC,GAKAC,GAUAC,GAlFNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAMMZ,GAA2B,CAC/B,KAAM,YACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACY,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGb,GACH,UAAWe,EAAW,SACtB,IAAK,IAAMZ,GAA2BU,EAAkBC,EAAO,CAAC,EAAGC,EAAW,IAAI,CACpF,EACAD,CAAM,CACI,GAGPZ,GACRc,GAA0CC,GAA4B,CAAC,KAAMD,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,CAAC,CAAC,EAEhHb,GACF,CAACe,EAA0CC,EAAeC,IAAgC,CACxF,IAAMC,EAAaF,EAAM,KACzBC,EAAOhB,GAAgBiB,EAAYD,CAAI,EACvC,IAAME,EAAsBjB,GAAegB,EAAYD,CAAI,EACrDG,EAAOF,EAAW,OAIlBG,EAAe;AAAA,QACnBlB,GAAoB,OAAQc,EAAMG,CAAI,CAAC;AAAA,kCACbA,CAAI;AAAA,gBACtBA,CAAI;AAAA;AAAA;AAAA,SAId,MAAO,CACL,GAAGvB,GACH,OAAQ,CAAC,KAAMsB,EAAqB,KAAMH,EAAM,KAAM,aAAiC,EACvF,aAAAK,CACF,CACF,EAEEpB,GAAkB,CAACiB,EAA+BD,KAClDA,GAAQA,EAAK,SAAWC,EAAW,SACrCD,EAAO,CAAC,GAAIC,EAAW,KAAK,CAAE,EAAE,QAAQ,GAEnCD,GAGHf,GAAiB,CAACgB,EAA+BD,KACrDA,EAAOhB,GAAgBiB,EAAYD,CAAI,EAChCK,EAAU,gBAAgBJ,EAAYD,CAAI,GAG7Cd,GAAsB,CAACoB,EAAcN,EAAgBG,IAAyB,CAClF,IAAMI,EAAc,CAAC,EACrBA,EAAY,KAAK,QAAQD,CAAI,cAAcH,CAAI,cAAcA,CAAI,MAAM,EACvE,QAASK,EAAI,EAAGA,EAAIL,EAAM,EAAEK,EAC1BD,EAAY,KAAK,MAAOP,EAAKQ,CAAC,CAAC,SAASA,CAAC,IAAI,EAE/C,OAAAD,EAAY,KAAK,IAAK,EACfA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMpB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,8BAA8B,CAElD,IC1FA,IAeae,GAqCAC,GAcPC,GAlENC,GAAAC,EAAA,kBAQAC,KAOaL,GACT,CAACM,EAAyCC,EAAkBC,IAAiD,CAC3GN,GAAeK,CAAM,EACrB,IAAME,EAAYD,EAAW,UACvBE,EAAeD,EAAYA,EAC3BE,EAAgBH,EAAW,OAAS,MAAQ,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAClFI,EAAoBJ,EAAW,OAAS,MAC1C,CACED,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGE,EAAWA,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcH,EAAO,CAAC,EAAE,KAAK,CAAC,EAC3FA,EAAO,CAAC,EAAE,KAAK,CAAC,CAClB,EACA,CACEA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcD,EAAWA,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAC3FA,EAAO,CAAC,EAAE,KAAK,CAAC,CAClB,EAQEM,EAAsBP,EAAiB,gBAAgBC,EAAO,CAAC,EAAGK,CAAiB,EAGnFE,EAA2C,CAAC,KAAMH,EAAe,SAAU,GAAGA,CAAa,EAAE,EAC7F,CAACI,CAAe,EAAIC,GAAUV,EAAkB,CAACO,CAAmB,EAAGC,CAAmB,EAG1FG,EAAqB,CACzBV,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIE,EACzEF,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIE,CACtB,EAEA,MAAO,CADQH,EAAiB,gBAAgBS,EAAiBE,CAAkB,CACrE,CAChB,EAEShB,GACRiB,GAA6C,CAE5C,IAAMT,EAAYS,EAAK,WAAW,OAAO,WAAW,EACpD,GAAIT,EAAY,EACd,MAAM,IAAI,MAAM,qCAAqCA,CAAS,mBAAmB,EAEnF,IAAMU,EAAOD,EAAK,WAAW,UAAU,OAAQ,KAAK,EACpD,GAAIC,IAAS,OAASA,IAAS,MAC7B,MAAM,IAAI,MAAM,sBAAsBA,CAAI,mBAAmB,EAE/D,MAAO,CAAC,KAAAA,EAAM,UAAAV,CAAS,CACzB,EAEEP,GAAkBK,GAA2B,CACjD,GAAIA,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyCA,EAAO,MAAM,EAAE,EAK1E,GAAIA,EAAO,CAAC,EAAE,OAAS,UAAYA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,UAAU,mDAAmD,CAE3E,IC5EA,IASaa,GAQAC,GAGPC,GApBNC,GAAAC,EAAA,kBAMAC,KAGaL,GACT,CAACM,EAAyCC,EAAkBC,IAA2B,CACrFN,GAAeK,EAAQC,CAAI,EAE3B,IAAMC,EAAaC,EAAU,aAAaH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAC9D,MAAO,CAACF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAU,CAAC,CACjE,EAESR,GAA0DU,GACnEA,EAAK,WAAW,OAAO,OAAQ,CAAC,EAE9BT,GAAiB,CAACK,EAAkBC,IAAuB,CAC/D,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,IAAMK,EAAIL,EAAO,CAAC,EAAE,KAAK,OACzB,GAAIK,IAAM,EACR,MAAM,IAAI,MAAM,iCAAiC,EAGnD,GAAIJ,EAAO,CAACI,GAAKJ,EAAOI,EACtB,MAAM,IAAI,MAAM,cAAc,EAIhC,GAAIL,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,iCAAiC,CAErD,ICtCA,IAeaM,GAfbC,GAAAC,EAAA,kBAeaF,GACT,CAAC,UAAW,UAAW,QAAS,QAAS,OAAQ,SAAU,SAAU,OAAO,IChBhF,IAeaG,GAOAC,GAGPC,GAMAC,GAgDAC,GAMAC,GArFNC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAMaX,GACT,CAACY,EAAyCC,EAAkBC,KAC1DT,GAAeQ,EAAQC,EAAW,IAAI,EAE/B,CADQF,EAAiB,IAAIR,GAA8BQ,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CACjG,GAGPZ,GAAmEc,GAC5EC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,CAAC,CAAC,CAAC,EAEnEb,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,IAA2C,CACzD,EAEMC,GACF,CAACc,EAAiCC,EAA2BL,EAAkBM,IAA8B,CAC3G,IAAMC,EAAaP,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCQ,EAAiBR,EAAO,CAAC,EAAE,KAAK,MAAM,EACtCS,EAAc,IAAI,MAAMF,EAAW,OAASC,EAAe,OAAS,CAAC,EAE3EF,EAAOI,EAAU,cAAcJ,EAAMC,EAAW,MAAM,EACtD,IAAMI,EAAyB,CAAC,EAChC,QAASC,EAAI,EAAGA,EAAIH,EAAY,OAAQG,IAMlCA,EAAIN,GACNG,EAAYG,CAAC,EAAIL,EAAWK,CAAC,EAC7BD,EAAa,KAAK,YAAYC,CAAC,iBAAiBA,CAAC,IAAI,GAEjDA,EAAIN,EAAOE,EAAe,QAC5BC,EAAYG,CAAC,EAAIJ,EAAeI,EAAIN,CAAI,EACxCK,EAAa,KAAK,gBAAgBC,EAAIN,CAAI,iBAAiBM,CAAC,IAAI,IAEhEH,EAAYG,CAAC,EAAIL,EAAWK,EAAIJ,EAAe,OAAS,CAAC,EACzDG,EAAa,KAAK,YAAYC,EAAIJ,EAAe,OAAS,CAAC,iBAAiBI,CAAC,IAAI,GAKvF,IAAMC,EAAQJ,EAAY,QAAU,EAC9BK,EAAQP,EAAW,OACnBQ,EAASP,EAAe,QAAU,EAClCQ,EAAe;AAAA,oCACSH,CAAK;AAAA,uBAClBC,CAAK;AAAA,2BACDC,CAAM;AAAA;AAAA,UAEvBJ,EAAa,KAAK;AAAA,SAAY,CAAC;AAAA;AAAA,mBAEtBL,CAAI,uBAAuBC,EAAWD,CAAI,CAAC;AAAA;AAAA,SAGxD,MAAO,CACL,GAAGD,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAMT,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAgB,CACF,CACF,EAEEzB,GACF,CAAC0B,EAAgCjB,EAAkBC,IAAoD,CACrG,IAAMI,EAAW,CAAC,GAAGhB,GAAuB,UAAWY,EAAW,QAAQ,EAC1E,MAAO,CAAC,GAAGI,EAAU,IAAK,IAAMf,GAAwB2B,EAASZ,EAAUL,EAAQC,EAAW,IAAI,CAAC,CACrG,EAEET,GAAiB,CAACQ,EAAkBM,IAAuB,CAC/D,GAAI,CAACN,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAE7C,IAAMkB,EAAalB,EAAO,CAAC,EAAE,KAAK,OAClC,GAAIkB,EAAa,EACf,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIZ,EAAO,CAACY,GAAcZ,EAAOY,EAAa,EAC5C,MAAM,IAAI,MAAM,eAAe,EAEjC,GAAIC,GAAa,QAAQnB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,oBAAoB,EAEtC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,QACnD,MAAM,IAAI,MAAM,oBAAoB,CAExC,ICtGA,IAmBaoB,GAOPC,GAQOC,GAGAC,GAGPC,GAYAC,GA2DAC,GA/GNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAUaX,GACT,CAACY,EAAyCC,EAAkBC,KAC1DR,GAAeO,EAAQC,CAAU,EAE1B,CADQF,EAAiB,IAAIR,GAA4BS,EAAQC,CAAU,EAAGD,CAAM,CAC7E,GAGdZ,GAAsB,CAACc,EAAkBC,IAAyC,CACtF,IAAMC,EAASF,EAAK,WAAW,OAAO,SAAU,CAAC,IAAM,EACjDG,EAASH,EAAK,WAAW,OAAO,SAAU,CAAC,IAAM,EACjDI,EAAQJ,EAAK,WAAW,SAAS,QAAS,CAAG,EAC7CK,EAAOL,EAAK,WAAW,SAAS,OAAQ,CAAG,EACjD,OAAOM,GAA4B,CAAC,OAAAJ,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,YAAAJ,CAAW,CAAC,CAC/E,EAEad,GAAiEa,GAC1Ed,GAAoBc,EAAM,EAAK,EAEtBZ,GAAkEY,GAC3Ed,GAAoBc,EAAM,EAAI,EAE5BX,GAA8B,CAACS,EAAkBC,IAAkD,CACvG,IAAMQ,EAAW,CACf,KAAM,OACN,WAAYT,EAAO,SAAW,EAAI,CAAC,IAAK,IAAK,GAAG,EAAI,CAAC,IAAK,GAAG,EAC7D,WAAYA,EAAO,SAAW,EAAI,MAAiE,EACjE,IAA2C,EAC7E,IAAKC,EAAW,QAClB,EAEA,MAAO,CAAC,GAAGQ,EAAU,IAAK,IAAMjB,GAAsBiB,EAAUT,EAAQC,CAAU,CAAC,CACrF,EAEMT,GACF,CAACiB,EAA2BT,EAAkBC,IAA4C,CACxF,IAAMS,EAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BW,EAASX,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACY,EAAGC,CAAC,EAAIC,GAAS,qBACpBJ,EAAQT,EAAW,OAAQU,EAAQV,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGe,EAAc,CAACH,EAAGC,CAAC,EACzB,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAIC,EAAYN,EAAOA,EAAO,OAAS,CAAC,EACpCO,EAAO,GACPhB,EAAW,SACbe,EAAYN,EAAO,CAAC,GAElBT,EAAW,QAAUA,EAAW,OAClCgB,EAAO,8BACEhB,EAAW,QAAU,CAACA,EAAW,OAC1CgB,EAAO,4BACE,CAAChB,EAAW,QAAUA,EAAW,OAC1CgB,EAAO,4BACE,CAAChB,EAAW,QAAU,CAACA,EAAW,SAC3CgB,EAAO,2BAET,IAAMC,EAAOH,EAAY,OACnBI,EAAWnB,EAAO,SAAW,EAAI,SAASA,EAAO,CAAC,EAAE,KAAK,MAAM,KAAO,GACtEoB,EAAapB,EAAO,SAAW,EAAI,8BAAgC,GACnEqB,EAAarB,EAAO,SAAW,EAAI,yBAA2B,GAC9DsB,EAAe;AAAA,kCACOJ,CAAI;AAAA,kBACpBA,CAAI;AAAA,kBACJA,CAAI;AAAA,YACVC,CAAQ;AAAA;AAAA;AAAA;AAAA,YAIRC,CAAU;AAAA;AAAA;AAAA,4BAGMJ,CAAS;AAAA,kBACnBE,EAAO,CAAC;AAAA,kBACRA,EAAO,CAAC;AAAA,gBACVD,CAAI;AAAA;AAAA;AAAA;AAAA,YAIRI,CAAU;AAAA;AAAA,SAGhB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMM,EAAa,KAAMf,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,UAAW,CACT,CAAC,KAAM,QAAS,KAAM,QAAS,KAAMC,EAAW,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,QAAS,KAAMA,EAAW,IAAI,CAC7G,EACA,aAAAqB,CACF,CACF,EAEE7B,GAAiB,CAACO,EAAkBC,IAAqC,CAC7E,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIC,EAAW,cAAgBD,EAAO,OAAS,GAAKA,EAAO,OAAS,GAClE,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAI,CAACC,EAAW,aAAeD,EAAO,SAAW,EAC/C,MAAM,IAAI,MAAM,wBAAwB,EAI1C,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAClF,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UAC7E,MAAM,IAAI,MAAM,qBAAqB,EAGvC,GAAKA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,MAAUA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,KAC9F,MAAM,IAAI,MAAM,4BAA4B,CAEhD,ICxIA,IAeauB,GAQAC,GAOPC,GAMAC,GAsBAC,GAMAC,GAuBAC,GAvFNC,GAAAC,EAAA,kBAGAC,KAKAC,KAOaV,GACT,CAACW,EAAyCC,EAAkBC,KAC1DP,GAAeM,CAAM,EAGd,CADHD,EAAiB,IAAIP,GAAmCO,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC3F,GAGPX,GACRa,GAA4C,CAC3C,IAAMC,EAAQD,EAAK,WAAW,SAAS,OAAO,EACxCE,EAAOF,EAAK,WAAW,UAAU,MAAM,EAC7C,OAAOG,GAA4B,CAAC,MAAAF,EAAO,KAAAC,CAAI,CAAC,CAClD,EAEEd,GAA6B,CACjC,KAAM,cACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GACF,CAACe,EAAiCC,EAA2BP,EAAkBC,IAC5D,CACb,IAAMO,EAAcR,EAAO,CAAC,EAAE,KAAK,MAAM,EACnCS,EAAOD,EAAY,OAEnBE,EAAe;AAAA,QADCjB,GAAoBQ,EAAW,KAAK,MAAM,CAErD;AAAA,kCACaQ,CAAI;AAAA;AAAA,SAG5B,MAAO,CACL,GAAGF,EACH,OAAQ,CAAC,KAAMC,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,UAAW,CACT,CAAC,KAAM,OAAQ,KAAM,QAAS,YAAaC,EAAW,KAAK,OAAQ,KAAMA,EAAW,IAAI,EACxF,CAAC,KAAM,QAAS,KAAM,QAAS,KAAMA,EAAW,KAAK,CACvD,EACA,aAAAS,CACF,CACF,EAEFlB,GACF,CAACmB,EAAgCX,EAAkBC,IAAyD,CAC1G,IAAMM,EAAW,CAAC,GAAGjB,GAA4B,UAAWW,EAAW,QAAQ,EAC/E,MAAO,CAAC,GAAGM,EAAU,IAAK,IAAMhB,GAA6BoB,EAASJ,EAAUP,EAAQC,CAAU,CAAC,CACrG,EAEER,GAAuBmB,GAAgC,CAC3D,IAAMC,EAAsB,CAAC,4BAA4BD,CAAW,mBAAmB,EACvF,QAASE,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EAC7BA,IAAM,EACRD,EAAU,KACN,mBACkBC,CAAC,mBAAmBA,CAAC,MAAM,EACxCA,IAAMF,EAAc,EAC7BC,EAAU,KACN,uBACsBC,CAAC,MAAM,EAEjCD,EAAU,KACN,wBACuBC,CAAC,mBAAmBA,CAAC,MAAM,EAG1D,OAAAD,EAAU,KACN,IACG,EACAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMnB,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,+BAA+B,EAEjD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,ICjGA,IAUae,GAWAC,GAGPC,GAMAC,GA2CAC,GAKAC,GAMAC,GAqCAC,GAUAC,GAnINC,GAAAC,EAAA,kBAMAC,KAEAC,KAEaZ,GACT,CAACa,EAAyCC,EAAkBC,IAA8B,CACxFP,GAAeM,CAAM,EAErB,IAAME,EAAkBH,EAAiB,IAAIT,GAAuCU,EAAO,CAAC,CAAC,EAAGA,CAAM,EAItG,MAAO,CAHQD,EAAiB,IAC5BN,GAAqCM,EAAkBC,EAAO,CAAC,EAAGC,EAASC,EAAgB,IAAI,EAC/F,CAACF,EAAO,CAAC,EAAGE,EAAiBF,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,CAAC,CACxC,CAChB,EAESb,GAAwEgB,GACjFA,EAAK,WAAW,SAAS,UAAW,IAAI,EAEtCf,GAAiC,CACrC,KAAM,wCACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GAAmC,CAACe,EAA2BC,IAA+B,CAClG,IAAMC,EAAQD,EAAM,KAAK,MAAM,EACzBE,EAAUD,EAAM,CAAC,EACjBE,EAAcF,EAAM,CAAC,EAAIA,EAAM,CAAC,EAChCG,EAAc,CAACH,EAAM,CAAC,EAAGC,CAAO,EAEhCG,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOIJ,EAAM,CAAC,CAAC;AAAA;AAAA,6BAENA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAMDE,CAAW;AAAA;AAAA,2BAEpBF,EAAM,CAAC,CAAC;AAAA;AAAA,6BAENA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAORE,CAAW;AAAA;AAAA;AAAA,SAItC,MAAO,CACL,GAAGJ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAMJ,EAAM,KAAM,aAA4C,EAC1F,aAAAK,CACF,CACF,EAEMpB,GAA0Ce,IAAsC,CACpF,GAAGjB,GACH,IAAK,IAAMC,GAAiCD,GAAgCiB,CAAK,CACnF,GAEMd,GAA+B,CACnC,KAAM,sCACN,WAAY,CAAC,IAAK,kBAAmB,QAAS,GAAG,EACjD,WAAY,QAAkG,CAChH,EAEMC,GACF,CAACO,EAAyCK,EAA2BC,EAAeJ,EACnFU,IAAyD,CACxD,IAAMC,EAAOC,GAAQd,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACe,EAAcC,CAAa,EAC9BhB,EAAiB,+BAA+BY,GAAqD,EACnG,CAACK,EAAsBC,CAAqB,EAAI,CAACH,EAAe,EAAGC,CAAa,EAChFL,EAAe;AAAA;AAAA;AAAA,+CAGoBM,CAAoB,KAAKC,CAAqB;AAAA,iBAC5EL,EAAK,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAkBzB,MAAO,CACL,GAAGR,EACH,OAAQ,CAAC,KAAMC,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,UAAW,CAAC,CAAC,KAAM,UAAW,KAAM,QAAS,KAAMJ,CAAO,CAAC,EAC3D,aAAAS,CACF,CACF,EAEEjB,GACF,CAACM,EAAyCM,EAAeJ,EAAiBU,IACjD,CACnB,IAAMP,EAAW,CAAC,GAAGb,GAA8B,UAAW,GAAGU,CAAO,EAAE,EAC1E,MAAO,CACL,GAAGG,EACH,IAAK,IAAMZ,GAA+BO,EAAkBK,EAAUC,EAAOJ,EAASU,CAAoB,CAC5G,CACF,EAEFjB,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMkB,EAAIlB,EAAO,CAAC,EACZmB,EAAQnB,EAAO,CAAC,EAChBoB,EAAIpB,EAAO,CAAC,EAIlB,GAAIkB,EAAE,KAAK,OAAS,GAAKC,EAAM,KAAK,SAAW,GAAKC,EAAE,KAAK,SAAW,EACpE,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAID,EAAM,KAAK,CAAC,IAAMD,EAAE,KAAK,CAAC,GAAKE,EAAE,KAAK,CAAC,IAAMF,EAAE,KAAK,CAAC,EACvD,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAKA,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAM,OAAS,WAAaA,EAAM,OAAS,WAC7FC,EAAE,OAAS,WAAaA,EAAE,OAAS,UACtC,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIpB,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,+BAA+B,CAEnD,IC/GA,SAASqB,GAAqBC,EAAkBC,EAAwC,CACtF,IAAMC,EAAIF,EAAO,CAAC,EAAE,KAAK,CAAC,EACpBG,EAAOH,EAAO,CAAC,EAAE,KAAK,OACtBI,EAAO,CAAC,KAAK,OAAOH,EAAW,KAAO,GAAK,CAAC,EAC5CI,EAAK,KAAK,MAAMJ,EAAW,KAAO,GAAK,CAAC,EACxCK,EAAQ,SAASL,EAAW,KAAK,aAAaA,EAAW,IAAI,IAC7DM,EAAO,SAASN,EAAW,IAAI,IAC/BO,EAAO,SAASP,EAAW,IAAI,IAE/BQ,EAAe;AAAA,gCACSN,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,uBAKbC,CAAI,UAAUC,CAAE;AAAA;AAAA,8BAETH,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAMNK,CAAI,MAAMD,CAAK,kBAAkBE,CAAI;AAAA,OAE5D,MAAO,CACL,GAAGE,GACH,UAAWT,EAAW,SACtB,OAAQ,CAAC,KAAMD,EAAO,CAAC,EAAE,KAAM,KAAMA,EAAO,CAAC,EAAE,KAAM,aAAiC,EACtF,aAAAS,CACF,CACF,CAEO,SAASE,GAA2BX,EAAkBC,EAA8C,CACzG,MAAO,CAAC,GAAGS,GAAoB,UAAWT,EAAW,SAAU,IAAK,IAAMF,GAAqBC,EAAQC,CAAU,CAAC,CACpH,CA/EA,IAiBaW,GAYAC,GASPH,GA2CAI,GAjFNC,GAAAC,EAAA,kBAGAC,KAKAC,KASaN,GACT,CAACO,EAAyCnB,EAAkBC,KAC1Da,GAAed,CAAM,EAMd,CAACmB,EAAiB,IAAIR,GAA2BX,EAAQC,CAAU,EAAGD,CAAM,CAAC,GAI7Ea,GAA6DO,GAAoC,CAC5G,IAAMd,EAAQc,EAAK,WAAW,SAAS,QAAS,IAAM,EAChDZ,EAAOY,EAAK,WAAW,SAAS,OAAQ,GAAI,EAC5Cb,EAAOa,EAAK,WAAW,SAAS,OAAQ,CAAG,EAC3CC,EAAOD,EAAK,WAAW,OAAO,MAAM,EAE1C,OAAOE,GAA4B,CAAC,MAAAhB,EAAO,KAAAE,EAAM,KAAAD,EAAM,KAAAc,CAAI,CAAC,CAC9D,EAEMX,GAAqB,CACzB,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAuCMI,GAAkBd,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,yDAAyD,EAE3E,GAAIA,EAAO,CAAC,EAAE,OAAS,UACrB,MAAM,IAAI,MAAM,4BAA4B,CAEhD,IC3FA,IAkBMuB,GAMOC,GAaAC,GAOAC,GAOAC,GAGPC,GAaAC,GAmBAC,GASAC,GAYAC,GAiBAC,GA0BAC,GA8BAC,GApLNC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAEAC,KAQMlB,GAAqB,CACzB,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACkB,EAAyCC,EAAkBC,KAC1Dd,GAAiBa,CAAM,EAQhB,CAPQD,EAAiB,IAC5B,CACE,GAAGnB,GACH,UAAWqB,EAAW,SACtB,IAAK,IAAMf,GAAqBa,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CACzE,EACAD,CAAM,CACI,GAGPlB,GAA+DoB,GAAoC,CAC9G,IAAMC,EAAOD,EAAK,WAAW,UAAU,OAAQ,UAAU,EACnDE,EAAQF,EAAK,WAAW,SAAS,QAAS,CAAG,EAC7CG,EAAOH,EAAK,WAAW,QAAQ,MAAM,EAC3C,OAAOI,GAA4B,CAAC,KAAAH,EAAM,MAAAC,EAAO,KAAAC,CAAI,CAAC,CACxD,EAEatB,GACT,CAACgB,EAAyCC,EAAkBG,IAA2B,CACrFf,GAAkBY,CAAM,EACxB,IAAMO,EAAatB,GAAgCc,EAAkBC,EAAQG,CAAI,EACjF,OAAOtB,GAAMkB,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAGO,CAAU,CACxD,EAESvB,GAAyDkB,GAClEA,EAAK,WAAW,UAAU,OAAQ,UAAU,EAE1CjB,GACF,CAACc,EAAyCC,EAAkBG,IAAgC,CAC1F,GAAI,CAACJ,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACvDA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,EACjF,MAAM,IAAI,MAAM,wCAAwC,EAG1D,IAAMK,EAAO,MAAM,KAAKL,EAAO,CAAC,EAAE,WAAW,EACvCI,EAASJ,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,UAAU,CAAC,EAAI,EAE9D,OAAOM,GAA4B,CAAC,KAAAH,EAAM,KAAAE,EAAM,MAAAD,CAAK,CAAC,CACxD,EAEElB,GACF,CAACa,EAAyCS,EAAeP,IAA2C,CAClG,IAAMQ,EAAcC,EAAU,SAASF,EAAM,KAAK,MAAM,EAAGP,EAAW,IAAI,EACpEU,EAAOF,EAAY,OAEnBG,EAAe;AAAA,QADDvB,GAAeU,EAAkBS,EAAOP,CAAU,CAEzD;AAAA,0BACOU,CAAI;AAAA;AAAA,SAGxB,MAAO,CACL,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,OAAQ,CAAC,KAAMF,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAI,CACF,CACF,EAEEzB,GAAoBa,GAA2B,CACnD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMZ,GAAqBY,GAA2B,CACpD,GAAI,CAACA,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,4BAA4B,EAE9C,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,SAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMX,GAAiB,CAACU,EAAyCS,EAAeP,IAAsC,CACpH,IAAMY,EAAOC,GAAQf,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACgB,EAAOC,CAAM,EAAIjB,EAAiB,+BAA+BS,EAAM,MAA0B,EAClGS,EAAUP,EAAU,eAAeF,EAAM,IAAI,EAEnD,OAAQP,EAAW,KAAM,CACvB,IAAK,WACH,OAAOX,GAAeuB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,KAAMA,EAAW,KAAK,EACnG,IAAK,UACH,OAAOV,GAAcsB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,IAAI,EAChF,IAAK,OACH,OAAOT,GAAWqB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,IAAI,EAC7E,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMX,GACF,CAACuB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,EACjGD,IAA0B,CACzB,IAAMO,EAAOO,EAAM,OACfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACDC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA,mBAEZF,EAAME,CAAC,CAAC;AAAA,wBACHH,EAAQG,CAAC,CAAC;AAAA,UAG5B,MAAO;AAAA,yBACYT,CAAI;AAAA,uCACUP,CAAK;AAAA;AAAA;AAAA,UAGlCe,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAIlD,EAEEtB,GACF,CAACsB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,IACpF,CACR,IAAMM,EAAOO,EAAM,OAEfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACLC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGD,GAAKF,EAAME,CAAC,EAAI,EAAE;AAAA;AAAA,oBAE5BF,EAAME,CAAC,CAAC;AAAA;AAAA,wBAEJH,EAAQG,CAAC,CAAC;AAAA,UAGxB,MAAO;AAAA,yBACQT,CAAI;AAAA;AAAA;AAAA,UAGnBQ,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAI9C,EAEFrB,GACF,CAACqB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,IACpF,CACR,IAAMM,EAAOO,EAAM,OAEfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACLC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA,mBAEZF,EAAME,CAAC,CAAC,SAASF,EAAME,CAAC,EAAI,CAAC;AAAA,wBACxBH,EAAQG,CAAC,CAAC;AAAA,QAGxB,MAAO;AAAA,yBACQT,CAAI;AAAA;AAAA;AAAA,UAGnBQ,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAI9C,IC5MR,IAoBaQ,GAUAC,GAiBPC,GAwBOC,GAcAC,GAYAC,GAUAC,GAsBPC,GAoBAC,GAuBAC,GAYAC,GAMOC,GAWPC,GASAC,GAwIAC,GAUAC,GApWNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAWapB,GACT,CAACqB,EAAyCC,EAAkBC,IAAgD,CAC1GX,GAAeU,CAAM,EACrB,IAAME,EACF,CAAC,KAAM,cAAe,WAAY,CAAC,GAAG,EAAG,WAAY,EAAqB,EAAG,UAAWD,EAAW,QAAQ,EAG/G,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMtB,GAA6BoB,EAAQE,EAAU,GAAOD,CAAU,CAAC,EAAGD,CAAM,CACzF,CAChB,EAESrB,GACRwB,GAA4C,CAC3C,IAAMC,EAAUD,EAAK,WAAW,UAAU,WAAY,QAAQ,EACxDE,EAAWF,EAAK,WAAW,OAAO,YAAa,CAAC,EAChDG,EAAmBH,EAAK,WAAW,OAAO,oBAAqB,CAAC,IAAM,EACtEI,EAAcJ,EAAK,WAAW,QAAQ,cAAc,EACpDK,EAAUL,EAAK,WAAW,QAAQ,UAAW,CAAC,CAAC,EAC/CM,EAAON,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EAG/C,GAAIE,IAAa,EACf,MAAM,IAAI,MAAM,wEAAwE,EAG1F,OAAOK,GAA4B,CAAC,QAAAN,EAAS,SAAAC,EAAU,gBAAAC,EAAiB,YAAAC,EAAa,QAAAC,EAAS,KAAAC,CAAI,CAAC,CACrG,EAEE7B,GACF,CAACoB,EAAkBE,EAA2BS,EAA2BV,IACtD,CACb,GAAM,CAACW,EAAoBC,CAAW,EAClC3B,GAAwCc,EAAQC,EAAYU,CAAgB,EAC1EG,EAAaC,EAAU,KAAKH,EAAmB,WAAW,EAC1DI,EAAM,kBACRC,EAAM,GACNL,EAAmB,gBACrBK,GAAO,kBAAkBH,CAAU,KAEnCG,GAAO,kBAAkBH,CAAU,WAGrC,IAAMI,EAAe;AAAA,UADD3B,GAAoBS,EAAO,CAAC,EAAE,KAAMY,EAAoBI,EAAKC,EAAK,KAAK,CAEhF;AAAA,QAEX,MAAO,CACL,GAAGf,EACH,OAAQ,CAAC,KAAMW,EAAa,KAAMb,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEKrC,GACT,CAACkB,EAAyCC,EAAkBC,IAAgD,CAC1GX,GAAeU,CAAM,EACrB,IAAME,EAAW,CACf,KAAM,oBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,UAAW,GAAGD,EAAW,eAAe,EAC1C,EAGA,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMtB,GAA6BoB,EAAQE,EAAU,GAAMD,CAAU,CAAC,EAAGD,CAAM,CACxF,CAChB,EAESlB,GACRqB,GAA4C,CAC3C,IAAMG,EAAmBH,EAAK,WAAW,OAAO,oBAAqB,CAAC,IAAM,EAC5E,OAAOO,GACH,CAAC,QAAS,GAAI,SAAU,EAAG,gBAAAJ,EAAiB,YAAa,CAAC,EAAG,QAAS,CAAC,EAAG,KAAM,CAAC,CAAC,CAAC,CACzF,EAOSvB,GACT,CAACgB,EAAyCC,EAAkBC,IAA4C,CACtGX,GAAeU,CAAM,EACrB,IAAME,EACF,CAAC,KAAM,UAAW,WAAY,CAAC,GAAG,EAAG,WAAY,EAAqB,EAAG,UAAWD,EAAW,QAAQ,EAG3G,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMjB,GAAyBe,EAAQE,EAAU,GAAOD,CAAU,CAAC,EAAGD,CAAM,CACrF,CAChB,EAEShB,GACRmB,GAAwC,CACvC,IAAMC,EAAUD,EAAK,WAAW,UAAU,WAAY,QAAQ,EACxDE,EAAWF,EAAK,WAAW,OAAO,YAAa,CAAC,EAChDI,EAAcJ,EAAK,WAAW,QAAQ,cAAc,EACpDK,EAAUL,EAAK,WAAW,QAAQ,UAAW,CAAC,CAAC,EAC/CM,EAAON,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EACzCgB,EAAehB,EAAK,WAAW,OAAO,gBAAiB,CAAC,EACxDiB,EAAYjB,EAAK,WAAW,QAAQ,YAAa,CAAC,CAAC,EAGzD,GAAIgB,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAId,IAAa,EACf,MAAM,IAAI,MAAM,oEAAoE,EAGtF,OAAOK,GACH,CAAC,QAAAN,EAAS,SAAAC,EAAU,gBAAiB,GAAO,YAAAE,EAAa,QAAAC,EAAS,KAAAC,EAAM,aAAAU,EAAc,UAAAC,CAAS,CAAC,CACtG,EAEEnC,GACF,CAACe,EAAkBE,EAA2BS,EAA2BV,IACtD,CACb,GAAM,CAACW,EAAoBC,CAAW,EAClC3B,GAAwCc,EAAQC,EAAYU,CAAgB,EAC1EK,EAAM;AAAA;AAAA,MAGNC,EAAM,GAENC,EAAe;AAAA,QADD3B,GAAoBS,EAAO,CAAC,EAAE,KAAMY,EAAoBI,EAAKC,EAAK,MAAM,CAEnF;AAAA,MAET,MAAO,CACL,GAAGf,EACH,OAAQ,CAAC,KAAMW,EAAa,KAAMb,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEFhC,GACF,CAACc,EAAkBC,EAAqDU,IACb,CACrD,IAAMU,EAAarB,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCsB,EAAe,OAAO,eAAe,KAAKrB,EAAY,WAAW,EACjEM,EAAcN,EAAW,YAAY,MAAM,EAC3CO,EAAUP,EAAW,QAAQ,MAAM,EACnCmB,EAAsBE,EAAgBrB,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCsB,GAAa,qBAAqBZ,EAAkBU,EAAYd,EAAaC,EAASY,EAAWX,CAAI,EAErG,IAAMI,EAAcU,GAAa,uBAC7BZ,EAAkBU,EAAYb,EAASY,EAAWb,EAAaE,EAAMR,EAAW,OAAO,EAErFuB,EAAgB,OAAO,OAAO,CAAC,EAAGvB,CAAU,EAClD,OAAIqB,EACF,OAAO,OAAOE,EAAe,CAAC,YAAAjB,EAAa,QAAAC,EAAS,KAAAC,EAAM,UAAAW,EAAW,SAAUnB,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOuB,EAAe,CAAC,YAAAjB,EAAa,QAAAC,EAAS,KAAAC,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAEnF,CAACuB,EAAeX,CAAW,CACpC,EAEF1B,GAA0B,CAC9B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,EACZ,SAAU,EACZ,EAEMC,GAAwB,CAC5B,KAAM,gBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GAAgB,CAACU,EAAyCC,KACrEV,GAAeU,CAAM,EAOd,CANQD,EAAiB,IAC5B,CACE,GAAGX,GACH,IAAK,IAAMH,GAAyBe,EAAQZ,GAAuB,GAAMD,EAAuB,CAClG,EACAa,CAAM,CACI,GAGVV,GAAkBU,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,4BAA4B,EAE9C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMT,GACF,CAACkC,EAA8BxB,EAAmCe,EAAaC,EAAaS,IAC9E,CACR,IAAMC,EAAOF,EAAU,OACvB,GAAIxB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAM2B,EAAK3B,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D4B,EAAK5B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD6B,EAAU7B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD8B,EAAQ9B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClD+B,EAAOP,EAAUE,EAAO,CAAC,EAC3BM,EAAQ,GACRC,EAAQ,GACRC,EAAW,GAmBf,GAlBIL,EAAUC,IAAU,EACtBE,EAAQ;AAAA,gCACUL,CAAE;AAAA,gBAClBD,CAAI,mBAAmBA,CAAI,WAAWE,CAAE,MAAMC,CAAO;AAAA,oBACjDH,CAAI,kBAAkBA,CAAI,YAAYK,CAAI;AAAA;AAAA;AAAA;AAAA,cAIhDhB,CAAG;AAAA,aAGHiB,EAAQ;AAAA,gCACUL,CAAE;AAAA,gBAClBD,CAAI,mBAAmBA,CAAI,WAAWE,CAAE,MAAMC,CAAO;AAAA,cACvDd,CAAG;AAAA,aAIDf,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMmC,EAAKnC,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoC,EAAKpC,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqC,EAAUrC,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsC,EAAQtC,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuC,EAAOf,EAAUE,EAAO,CAAC,EAC3BW,EAAUC,IAAU,EACtBL,EAAQ;AAAA,kCACUE,CAAE;AAAA,kBAClBT,CAAI,mBAAmBA,CAAI,WAAWU,CAAE,MAAMC,CAAO;AAAA,sBACjDX,CAAI,kBAAkBA,CAAI,YAAYa,CAAI;AAAA,wBACxCZ,CAAE;AAAA;AAAA;AAAA,YAKVM,EAAQ;AAAA,kCACUE,CAAE;AAAA,kBAClBT,CAAI,mBAAmBA,CAAI,WAAWU,CAAE,MAAMC,CAAO;AAAA,cAGzDH,EAAW;AAAA;AAAA,SAGb,CAgBA,MAdoB;AAAA,oCACIR,CAAI;AAAA,kBACtBA,CAAI;AAAA;AAAA;AAAA,0BAGID,CAAK;AAAA;AAAA,YAEnBQ,CAAK;AAAA,YACLD,CAAK;AAAA,YACLE,CAAQ;AAAA,YACRlB,CAAG;AAAA;AAAA;AAAA,OAKL,KAAO,CACL,IAAMH,EAAaC,EAAU,KAAKd,EAAW,WAAW,EAClDwC,EAAgB1B,EAAU,eAAed,EAAW,WAAW,EAC/DyC,EAAcD,EAAc,OAC5BE,EAAW1C,EAAW,KAAK,OAC3B2C,EAA0BnD,GAAgBiD,CAAW,EACrDG,EAAgBrD,GAAUiC,EAAW,WAAW,EAChDqB,EAAWtD,GAAUS,EAAW,KAAM,MAAM,EAC5C8C,EAAoBvD,GAAUiD,EAAe,eAAe,EAC5DO,EAAcxD,GAAUS,EAAW,QAAS,SAAS,EACrDgD,EAAUhD,EAAW,KAAK,OAAO,CAACiD,EAAKC,IAAQD,EAAMC,CAAG,EAC1DC,EAAU,GACd,OAAIH,EACFG,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAQVpC,CAAG;AAAA,aAGHoC,EAAU;AAAA;AAAA,YAEZpC,CAAG;AAAA,UAGiB;AAAA,UACtB4B,CAAuB;AAAA,oCACGjB,CAAI;AAAA,kBACtBA,CAAI;AAAA;AAAA,uBAECe,CAAW;AAAA,qBACbC,CAAQ;AAAA,0BACHhB,CAAI;AAAA,8BACAe,CAAW;AAAA,wBACjBA,CAAW;AAAA,YACvBI,CAAQ;AAAA,YACRD,CAAa;AAAA,YACbG,CAAW;AAAA,YACXD,CAAiB;AAAA;AAAA,0BAEHrB,CAAK;AAAA;AAAA;AAAA,gCAGCZ,CAAU;AAAA;AAAA;AAAA,2BAGfa,CAAI,MAAMe,CAAW,SAASf,CAAI;AAAA,gDACbA,CAAI,MAAMe,CAAW;AAAA,+BACtCf,CAAI,MAAMe,CAAW;AAAA,gBACpCU,CAAO;AAAA;AAAA,YAEXnC,CAAG;AAAA;AAAA;AAAA;AAAA,OAML,CACF,EAEFzB,GAAY,CAAC6D,EAA0BC,IAA8B,CACzE,IAAIC,EAAQ,GACZ,QAASC,EAAI,EAAGA,EAAIH,EAAM,OAAQG,IAChCD,GAAS;AAAA,QACLD,CAAS,IAAIE,CAAC,OAAOH,EAAMG,CAAC,CAAC;AAAA,MAGnC,OAAOD,CACT,EAEM9D,GAAmBkC,GAAyB;AAAA,yCACTA,CAAI,sBAAsBA,CAAI;AAAA,UAC7DA,CAAI;AAAA;AAAA;AAAA,0BAGYA,CAAI;AAAA;AAAA;AAAA;AAAA,cAIhBA,CAAI;OC7WlB,IAmBM8B,GAsBOC,GAMPC,GAoDAC,GAWOC,GAMAC,GAeAC,GAeAC,GAeAC,GAMAC,GAMAC,GA7KbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAUMhB,GACF,CAACiB,EAAyCC,EAAkBC,EAA8BC,EACzFC,IAAiC,CAChClB,GAAee,CAAM,EAErB,IAAMI,EAAwB,CAC5B,KAAAF,EACA,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAUA,MAAO,CARQH,EAAiB,IAC5B,CACE,GAAGK,EACH,UAAWH,EAAW,SACtB,IAAK,IACDjB,GAAwBe,EAAkBC,EAAQC,EAAYC,EAAMC,EAAUC,CAAqB,CACzG,EACAJ,CAAM,CACI,CAChB,EAESjB,GAAmEsB,GAAuC,CACrH,IAAMC,EAAOD,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EACzCE,EAAWF,EAAK,WAAW,OAAO,WAAY,CAAC,IAAM,EAC3D,OAAOG,GAA4B,CAAC,KAAAF,EAAM,SAAAC,CAAQ,CAAC,CACrD,EAEMvB,GACF,CAACyB,EAAiCT,EAAkBC,EAA8BS,EAAeP,EAChGC,IAAwD,CACvD,IAAMO,EAAwB,CAAC,EACzBC,EAAQZ,EAAO,CAAC,EAAE,KAAK,QAAU,EAEjCa,EAAU,CAAC,EAEXP,EAAOQ,EAAU,cAAcb,EAAW,KAAMD,EAAO,CAAC,EAAE,KAAK,MAAM,EACrEe,EAAMZ,EAASH,EAAQM,CAAI,EAC7BU,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,IAErCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,GACtCL,EAAW,UACbU,EAAY,KAAK,CAAC,EAIpBK,EAAY;AAAA,qBACDC,CAAC,UAAUA,CAAC,MAAMjB,EAAO,CAAC,EAAE,KAAKiB,CAAC,CAAC,MAAMA,CAAC;AAAA,uBACxCA,CAAC,QAAQA,CAAC;AAAA,cACnBD,CAAS;AAAA,eAGbH,EAAQ,KAAK,YAAYI,CAAC,iBAAiBN,EAAY,MAAM,IAAI,EAEjEA,EAAY,KAAKX,EAAO,CAAC,EAAE,KAAKiB,CAAC,CAAC,GAMtC,IAAMC,EAAe;AAAA,oCAFPP,EAAY,QAAU,CAGD;AAAA;AAAA,uBAElBC,CAAK;AAAA,UAClBC,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,UAClBE,EAAI,CAAC,CAAC;AAAA,UACNC,CAAS;AAAA,UACTD,EAAI,CAAC,CAAC;AAAA;AAAA,SAIV,MAAO,CACL,GAAGX,EACH,OAAQ,CAAC,KAAMO,EAAa,KAAMX,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEEjC,GAAkBe,GAA2B,CAEjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAImB,GAAa,QAAQnB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEad,GACT,CAACa,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YADzB,IAAgB,CAAC,eAAgB,yBAA0B,EAAE,CACf,EAGlEd,GACT,CAACY,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,aAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAIc,EAAO,EACX,QAASH,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,KAC1Cc,GAAQpB,EAAO,CAAC,EAAE,KAAKiB,CAAC,GAI5B,MAAO,CAAC,eAAgB,yBAA0B,YAAYG,CAAI,IAAI,CACxE,CAC0E,EAGnEhC,GACT,CAACW,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAMe,EAAU,CAAC,EACjB,QAASJ,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,IAC1Ce,EAAQ,KAAK,YAAYJ,CAAC,QAAQ,EAItC,MAAO,CAAC,GAAGI,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,uBAA2B,oCAAqC,EAAE,CACjG,CACyE,EAGlEhC,GACT,CAACU,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAMe,EAAU,CAAC,EACjB,QAASJ,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,IAC1Ce,EAAQ,KAAK,YAAYJ,CAAC,QAAQ,EAItC,MAAO,CAAC,GAAGI,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,uBAA2B,oCAAqC,EAAE,CACjG,CACyE,EAGlE/B,GACT,CAACS,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,aADzB,IAAgB,CAAC,eAAgB,yBAA0B,EAAE,CACd,EAGnEV,GACT,CAACQ,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,eADzB,IAAgB,CAAC,eAAgB,yBAA0B,qBAAqB,CAC/B,EAGrET,GACT,CAACO,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,qBADzB,IAAgB,CAAC,wBAAyB,oCAAqC,EAAE,CAC1B,IChLxF,IAOaqB,GAPbC,GAAAC,EAAA,kBAIAC,KAGaH,GAAU,CAACI,EAAgCC,IAA+B,CACrF,IAAMC,EAAeC,EAAU,sBAAsBF,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,WAAW,EAC1F,OAAID,EAAQ,QAAQ,KACX,CAACA,EAAQ,cAAcC,EAAO,CAAC,EAAGC,CAAY,CAAC,EAE/C,CAACF,EAAQ,gBAAgBC,EAAO,CAAC,EAAGC,CAAY,CAAC,CAE5D,ICdA,IA6BME,GAMOC,GAaAC,GAGAC,GAGAC,GA+EPC,GAmLOC,GAgBAC,GAxUbC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAoBMZ,GAA0B,CAC9B,KAAM,WACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACY,EAAyCC,EAAkBC,KAC1DT,GAAeQ,EAAQC,CAAU,EAQ1B,CAPQF,EAAiB,IAC5B,CACE,GAAGb,GACH,UAAWe,EAAW,SACtB,IAAK,IAAMV,GAA0BQ,EAAkBC,EAAQC,CAAU,CAC3E,EACAD,CAAM,CACI,GAGPZ,GACRc,GAAyCZ,GAAwBY,EAAM,CAAC,EAEhEb,GACRa,GAAyCZ,GAAwBY,EAAM,CAAC,EAEhEZ,GAA0B,CAACY,EAAkBC,IAAsC,CAC9F,IAAMC,EAAYD,GAAS,GAGrBE,EAAOH,EAAK,WAAW,UAAU,OAAQ,SAAS,EACxD,GAAIG,IAAS,WAAaA,IAAS,WAAaF,EAAQ,IAAME,IAAS,SACrE,MAAM,IAAI,MAAM,sBAAsBA,CAAI,EAAE,EAG9C,IAAIC,EAAmB,CAAC,EACpBH,EAAQ,IACVG,EAASJ,EAAK,WAAW,UAAU,QAAQ,EAC3CT,GAAiBa,EAAQD,EAAMD,CAAQ,GAGzC,IAAMG,EAAqBL,EAAK,WAAW,SAAS,sBAAuB,CAAG,EAExEM,EACFL,EAAQ,GAAKD,EAAK,WAAW,UAAU,iCAAkC,YAAY,EAAI,aAC7F,GAAI,CACE,aAAc,qBAAsB,uBAAwB,gBAAiB,qBAAsB,YACrG,EAAE,QAAQM,CAAuB,IAAM,GACzC,MAAM,IAAI,MAAM,8BAA8BA,CAAuB,oBAAoB,EAE3F,IAAMC,EAAgBD,IAA4B,qBAC5CE,EAAmBD,EAEnBE,EACDN,IAAS,WAAaF,GAAS,GAAMD,EAAK,WAAW,UAAU,eAAgB,oBAAoB,EAAI,GAC5G,GAAI,CAAC,qBAAsB,oBAAqB,QAAS,OAAQ,EAAE,EAAE,QAAQS,CAAW,IAAM,GAC5F,MAAM,IAAI,MAAM,iBAAiBA,CAAW,oBAAoB,EAGlE,IAAMC,EAAoBV,EAAK,WAAW,SAAS,gBAAiB,IAAK,EACnEW,EAAiBX,EAAK,WAAW,OAAO,kBAAmB,CAAC,IAAM,EACxE,GAAIW,GAAkBR,IAAS,QAC7B,MAAM,IAAI,MAAM,0DAA0D,EAG5E,IAAMS,EACDX,EAAQ,GAAM,GAAQE,IAAS,WAAaG,IAA4B,cAAgBG,IAAgB,QAEzGI,EAAc,EACdC,EAAiB,EACjBC,EAAgB,EAEpB,OAAId,EAAQ,GAEND,EAAK,OAAO,OAAS,GACvBa,EAAc,EACdC,EAAiB,EACjBC,EAAgB,IAEhBD,EAAiB,EACjBC,EAAgB,GAETd,IAAU,IACnBa,EAAiB,GAGZE,GAA4B,CACjC,MAAAf,EACA,SAAAC,EACA,KAAAC,EACA,OAAAC,EACA,mBAAAC,EACA,wBAAAC,EACA,iBAAAE,EACA,aAAAD,EACA,YAAAE,EACA,kBAAAC,EACA,eAAAC,EACA,yBAAAC,EACA,YAAAC,EACA,eAAAC,EACA,cAAAC,CACF,CAAC,CACH,EAEM1B,GACF,CAACQ,EAAyCC,EAAkBC,IAAgD,CAC1G,IAAMkB,EAAOC,GAAQrB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACsB,EAAYC,CAAW,EAC1BvB,EAAiB,+BAA+BC,EAAO,CAAC,EAAE,MAA0B,EAElFuB,EAAcvB,EAAO,CAAC,EAAE,KAAK,IAAI,CAACwB,EAAKC,IAAM,KAAK,MAAMD,EAAMvB,EAAW,OAAOwB,CAAC,CAAC,CAAC,EACnF,CAACC,EAAaC,CAAY,EAC5B5B,EAAiB,+BAA+BwB,GAAiC,EAC/EC,EAAMD,EAAY,OAElBK,EAAgB,IAAI,MAAcJ,CAAG,EACrCK,EAAe,IAAI,MAAcL,CAAG,EACtCM,EAAuB;AAAA,2BACNN,CAAG;AAAA,0BACJA,CAAG;AAAA,QAEvB,QAASO,EAAIP,EAAM,EAAGO,GAAK,EAAGA,IAC5BH,EAAcG,CAAC,EAAKA,IAAMP,EAAM,EAAK,EAAII,EAAcG,EAAI,CAAC,EAAIR,EAAYQ,EAAI,CAAC,EACjFF,EAAaE,CAAC,EAAKA,IAAMP,EAAM,EAAK,EAAIK,EAAaE,EAAI,CAAC,EAAI/B,EAAO,CAAC,EAAE,KAAK+B,EAAI,CAAC,EAElFD,GAAwB;AAAA,yBACPC,CAAC,OAAOH,EAAcG,CAAC,CAAC;AAAA,wBACzBA,CAAC,OAAOF,EAAaE,CAAC,CAAC;AAAA,UAGzC,IAAMC,EAAwB;AAAA;AAAA,8CAEUX,CAAU,KAAKC,CAAW;AAAA,wCAChCH,EAAK,SAAS;AAAA;AAAA;AAAA,QAK1Cc,EAAehC,EAAW,OAAS,UAErC;AAAA,MACJ+B,CAAqB;AAAA,gCACKR,CAAG;AAAA;AAAA,qDAEkBE,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA,gCAGIN,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAezBA,IAAQ,EAEJ;AAAA,MACRQ,CAAqB;AAAA;AAAA;AAAA,qDAG0BN,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCA2BQ9B,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAuBvC;AAAA,MACRgC,CAAqB;AAAA;AAAA;AAAA,qDAG0BN,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAoBQ9B,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAsB/C,MAAO,CACL,GAAGd,GACH,OAAQ,CAAC,KAAMqC,EAAa,KAAMvB,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAiC,EACA,UAAW,CAAC,CACV,KAAM,SACN,KAAM,MACN,YAAahC,EAAW,OAAO,OAC/B,KAAMA,EAAW,OAAO,IAAIiC,GAAK,KAAK,KAAKA,CAAC,CAAC,CAC/C,CAAC,CACH,CACF,EAES1C,GAAiB,CAACQ,EAAkBmC,IAAwC,CACvF,GAAI,CAACnC,GAAWmC,EAAU,MAAQ,GAAKnC,EAAO,SAAW,GACpDmC,EAAU,OAAS,GAAKA,EAAU,MAAQ,IAAMnC,EAAO,SAAW,GAClEmC,EAAU,OAAS,IAAMnC,EAAO,OAAS,EAC5C,MAAM,IAAI,MAAM,iBAAiB,EAGnC,GAAImC,EAAU,OAAO,OAAS,GAAKnC,EAAO,CAAC,EAAE,KAAK,SAAWmC,EAAU,OAAO,OAC5E,MAAM,IAAI,MAAM,sBAAsB,EAGxC,GAAInC,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEaP,GAAmB,CAACa,EAAkBD,EAAcD,IAA4B,CAC3F,GAAKA,GAOH,QAAWgC,KAAS9B,EAClB,GAAI8B,GAAS,EACX,MAAM,IAAI,MAAM,uCAAuC,MAR3D,SAAWA,KAAS9B,EAClB,GAAI8B,EAAQ,EACV,MAAM,IAAI,MAAM,mDAAmD,EAUzE,IAAI/B,IAAS,UAAYA,IAAS,UAC5BC,EAAO,SAAW,IAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAClF,MAAM,IAAI,MAAM,+KAELF,EAAW,SAAW,UAAU,YAAY,CAG7D,IC7VA,IAcMiC,GAMOC,GAaAC,GAGAC,GAGPC,GAyKAC,GAiCAC,GAMAC,GAvPNC,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KACAC,KAEMd,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,CACjC,EAEaC,GACT,CAACc,EAAyCC,EAAkBC,KAC1DC,GAAeF,EAAQC,CAAU,EAQ1B,CAPQF,EAAiB,IAC5B,CACE,GAAGf,GACH,UAAWiB,EAAW,SACtB,IAAK,IAAMb,GAA8BW,EAAkBC,EAAQC,CAAU,CAC/E,EACAD,CAAM,CACI,GAGPd,GACRiB,GAAyCC,GAAwBD,EAAM,EAAE,EAEjEhB,GACRgB,GAAyCC,GAAwBD,EAAM,EAAE,EAExEf,GACF,CAACW,EAAyCC,EAAkBC,IAAgD,CAC1G,IAAMI,EAAOC,GAAQP,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACQ,EAAQC,CAAW,EAAInB,GAAcW,EAAQC,CAAU,EAI9D,GADIM,EAAO,MAAOE,GAAcA,IAAM,CAAC,GAAKR,EAAW,0BAA4B,qBAEjF,MAAO,CACL,GAAGjB,GACH,OAAQ,CAAC,KAAMwB,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,QAAS,GACT,aAAc;AAAA,+BACOK,EAAK,SAAS;AAAA,sBACvBA,EAAK,MAAM;AAAA,kBAEzB,EAGF,IAAMK,EAAMF,EAAY,OACxB,GAAIE,EAAM,EACR,MAAM,IAAI,MAAM,kDAAkDA,CAAG,EAAE,EAGzE,IAAMC,EAAeH,EAAYE,EAAM,CAAC,EAClCE,EAAcJ,EAAYE,EAAM,CAAC,EAEjCG,EAAab,EAAO,CAAC,EAAE,KAC7B,GAAIU,IAAQG,EAAW,OACrB,MAAM,IAAI,MAAM,uCAAuCA,EAAW,MAAM,aAAaH,CAAG,EAAE,EAE5F,IAAMI,EAAcD,EAAWH,EAAM,CAAC,EAChCK,EAAaF,EAAWH,EAAM,CAAC,EAE/BM,EAAeT,EAAOG,EAAM,CAAC,EAC7BO,EAAcV,EAAOG,EAAM,CAAC,EAE9BQ,EAAqB,GAEzB,GAAIjB,EAAW,OAAS,SAEtB,MAAM,IAAI,MAAM,2CAA2CA,EAAW,IAAI,GAAG,EAE/E,OAAQA,EAAW,wBAAyB,CAC1C,IAAK,aACHiB,EAAqB;AAAA;AAAA;AAAA;AAAA,kBAKrB,MACF,IAAK,aACHA,EAAqB;AAAA;AAAA;AAAA;AAAA,kBAKrB,MACF,IAAK,qBACHA,EAAqB;AAAA;AAAA;AAAA;AAAA,8BAIDN,CAAW;AAAA,8BACXD,CAAY;AAAA,8BACZC,CAAW;AAAA,8BACXD,CAAY;AAAA;AAAA;AAAA,kBAIhC,MACF,IAAK,gBACHO,EAAqB;AAAA;AAAA,8CAEeN,CAAW,aAAaD,CAAY,aAAaC,CAAW;AAAA,8BAC5ED,CAAY;AAAA,+CACKI,CAAU,aAAaD,CAAW,aAAaC,CAAU;AAAA,8BAC1ED,CAAW;AAAA;AAAA;AAAA;AAAA,kBAK/B,MACF,QAEE,MAAM,IAAI,MAAM,8FACSb,EAAW,uBAAuB,GAAG,CAClE,CAEA,IAAMkB,EAAiBC,GAAkBV,CAAG,EACtCW,EAAgBC,GAAkB,EAClCC,EAAe;AAAA,wCACaT,CAAW,OAAOC,CAAU;AAAA,gDACpBC,CAAY,YAAYC,CAAW,YAAYD,CAAY,YACjGC,CAAW;AAAA,cACPI,CAAa;AAAA,cACbH,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA,kBAKdC,CAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2CAiBWR,EAAe,CAAC;AAAA,2CAChBC,EAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAsCxCP,EAAK,MAAM;AAAA;AAAA,UAGvB,MAAO,CACL,GAAGrB,GACH,OAAQ,CAAC,KAAMwB,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,QAAS,GACT,aAAAuB,CACF,CACF,EAGElC,GAAgB,CAACW,EAAkBC,IAA2E,CAElH,IAAMuB,EADIxB,EAAO,CAAC,EACF,KAEZO,EAASN,EAAW,OACpBwB,EACJ,GAAIlB,EAAO,SAAW,EAAG,CACvB,IAAMmB,EAAe1B,EAAOC,EAAW,cAAc,EACrD,GAAIyB,GAAgBA,EAAa,OAAS,EAAG,CAC3C,GAAI1B,EAAOC,EAAW,aAAa,EACjC,MAAM,IAAI,MAAM,wDAAwD,EAE1EM,EAASjB,GAAgBoC,EAAczB,EAAW,KAAMA,EAAW,QAAQ,CAC7E,KAAO,CACL,IAAM0B,EAAc3B,EAAOC,EAAW,aAAa,EACnD,GAAI,CAAC0B,GAAeA,EAAY,OAAS,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAGrEF,EAAc,MAAM,KAAKE,EAAY,WAAW,EAChDpB,EAAShB,GAA8BkC,EAAaD,EAAOvB,EAAW,KAAMA,EAAW,QAAQ,CACjG,CACF,SACMD,EAAOC,EAAW,aAAa,EACjC,MAAM,IAAI,MAAM,wDAAwD,EAI5E,IAAM2B,EAAQH,GAAgBD,EAAM,IAAI,CAACd,EAAKmB,IAAM,KAAK,MAAMnB,EAAMH,EAAOsB,CAAC,CAAC,CAAC,EAE/E,MAAO,CAACtB,EAAQqB,CAAK,CACvB,EAEMtC,GAAkB,CAACwC,EAAeC,EAAcC,IAAgC,CACpF,IAAMzB,EAAS,MAAM,KAAKuB,EAAM,SAAS,EACzC,OAAAG,GAAiB1B,EAAQwB,EAAMC,CAAQ,EAChCzB,CACT,EAEMhB,GACF,CAACqC,EAA0BJ,EAA0BO,EAAcC,IAAgC,CACjG,IAAME,EAASV,EAAM,OACfjB,EAAS,IAAI,MAAc2B,CAAM,EAEvC,QAASL,EAAI,EAAGM,EAAMD,EAAQL,EAAIM,EAAKN,IACrC,GAAIL,EAAMK,CAAC,IAAM,EAAG,CAClB,GAAID,EAAMC,CAAC,IAAM,EACf,MAAM,IAAI,MAAM,wDAAwD,EAE1EtB,EAAOsB,CAAC,EAAI,CACd,MACEtB,EAAOsB,CAAC,EAAID,EAAMC,CAAC,EAAIL,EAAMK,CAAC,EAGlC,OAAAI,GAAiB1B,EAAQwB,EAAMC,CAAQ,EAChCzB,CACT,ICxQJ,IAMa6B,GAKPC,GAXNC,GAAAC,EAAA,kBAGAC,KAGaJ,GAAQ,CAACK,EAA0CC,KAC9DL,GAAeK,CAAM,EACd,CAAC,IAAIC,GAAO,CAACD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAG,QAAS,OAAW,OAAW,IAAI,WAAWA,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,GAGtGL,GAAkBK,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,ICfA,IAiBME,GAMOC,GAaAC,GAOPC,GAwCAC,GASOC,GAaPC,GAoBAC,GA7HNC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAQMb,GAAuB,CAC3B,KAAM,QACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACa,EAAyCC,EAAkBC,KAC1DZ,GAAeW,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGd,GACH,UAAWgB,EAAW,SACtB,IAAK,IAAMb,GAAuBW,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CAC3E,EACAD,CAAM,CACI,GAGPb,GAAiEe,GAAsC,CAClH,IAAMC,EAASD,EAAK,WAAW,QAAQ,QAAQ,EACzCE,EAAOF,EAAK,WAAW,QAAQ,MAAM,EACrCG,EAAOH,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EAC/C,OAAOI,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,EAEMjB,GACF,CAACmB,EAA0CC,EAAeP,IAA6C,CACrG,IAAMI,EAAQJ,EAAW,KAAK,SAAW,EAAKO,EAAM,KAAK,MAAM,CAAC,EAAE,IAAI,CAACC,EAAMC,IAAMA,CAAC,EAAIT,EAAW,KAC7FU,EAAiBC,EAAU,cAAcP,EAAMG,EAAM,KAAK,MAAM,EAChEL,EAASF,EAAW,OAAO,IAAI,CAACY,EAAOH,IACvCG,EAAQL,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAAI,EACnCF,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAE9BE,EAAU,cAAcC,EAAOL,EAAM,KAAKG,EAAeD,CAAC,CAAC,CAAC,CACpE,EACKN,EAAOH,EAAW,KAAK,IAAI,CAACa,EAAKJ,IACjCI,EAAMN,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAAI,EACjCF,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAE9BE,EAAU,cAAcE,EAAKN,EAAM,KAAKG,EAAeD,CAAC,CAAC,CAAC,CAClE,EAEKK,EAAcP,EAAM,KAAK,MAAM,EAE/BQ,EAAqB,CAAC,EAC5B,QAASN,EAAI,EAAGA,EAAIC,EAAe,OAAQD,IACzCK,EAAYJ,EAAeD,CAAC,CAAC,EAAIN,EAAKM,CAAC,EAAIP,EAAOO,CAAC,EAC/CP,EAAOO,CAAC,EAAI,GACdM,EAAS,KAAK,aAAaL,EAAeD,CAAC,CAAC,QAAQP,EAAOO,CAAC,CAAC,GAAG,EAKpE,IAAMO,EAAe;AAAA,oCADRF,EAAY,MAES;AAAA,UAC9BC,EAAS,KAAK;AAAA,OAAU,CAAC;AAAA;AAAA,SAG7B,MAAO,CACL,GAAG/B,GACH,OAAQ,CAAC,KAAM8B,EAAa,KAAMP,EAAM,KAAM,aAAiC,EAC/E,aAAAS,CACF,CACF,EAEE5B,GAAkBW,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAE3C,GAAIkB,GAAa,QAAQlB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEaV,GAAW,CAACS,EAAyCC,IAA+B,CAC/FR,GAAkBQ,CAAM,EACxB,IAAMC,EAAaV,GAAkCQ,EAAkBC,CAAM,EAQ7E,MAAO,CAPQD,EAAiB,IAC5B,CACE,GAAGd,GACH,UAAWgB,EAAW,SACtB,IAAK,IAAMb,GAAuBW,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CAC3E,EACA,CAACD,EAAO,CAAC,CAAC,CAAC,CACD,CAChB,EAEMT,GACF,CAACQ,EAAyCC,IAAsC,CAC9E,GAAI,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACxD,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACvDA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GAC9EA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,EACjF,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,YAAY,KAAMU,GAAcA,IAAM,CAAC,EACzE,MAAM,IAAI,MAAM,kDAAkD,EAGpE,IAAMP,EAAS,MAAM,KAAKH,EAAO,CAAC,EAAE,WAAW,EACzCI,EAAO,MAAM,KAAKJ,EAAO,CAAC,EAAE,WAAW,EACvCK,EAAOL,EAAO,QAAU,EAAI,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,EAAI,CAAC,EACjEmB,EAAW,GAAGd,CAAI,IAAIF,CAAM,IAAIC,CAAI,GAC1C,MAAO,CAAC,OAAAD,EAAQ,KAAAC,EAAM,KAAAC,EAAM,SAAAc,CAAQ,CACtC,EAEE3B,GAAqBQ,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,OAAS,GAAKA,EAAO,OAAS,EAClD,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC1D,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC1D,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,IAAMA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,GACjF,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,IAAMA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,GACjF,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC7IA,IAkBMoB,GAMAC,GAMAC,GAMOC,GAaAC,GAGAC,GASAC,GA2CPC,GA4BAC,GA8CAC,GAiDAC,GA8CAC,GAjRNC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAEAC,KAEAC,KAMMlB,GAAmC,CACvC,KAAM,oBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GAAqC,CACzC,KAAM,sBACN,WAAY,CAAC,IAAK,KAAK,EACvB,WAAY,IAA2C,CACzD,EAEMC,GAAyB,CAC7B,KAAM,UACN,WAAY,CAAC,IAAK,MAAO,MAAM,EAC/B,WAAY,MAAiE,CAC/E,EAEaC,GACT,CAACgB,EAAyCC,EAAkBC,IAA4C,CACtGV,GAAeS,CAAM,EAErB,IAAME,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCG,EAAOC,EAAU,cAAcH,EAAW,KAAMC,EAAW,MAAM,EACjEG,EAAkBD,EAAU,gBAAgBF,EAAYC,CAAI,EAC5DG,EAAeF,EAAU,kBAAkBF,EAAYC,CAAI,EAGjE,OADehB,GAAeY,EAAkBC,EAAQC,EAAYI,EAAiBC,CAAY,CAEnG,EAEStB,GACRuB,GAAwCC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,CAAC,CAAC,CAAC,EAErGtB,GACRsB,GAAwCC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,EAAE,CAAC,CAAC,EAQtGrB,GACT,CAACa,EAAyCC,EAAkBC,IAA4C,CACtGV,GAAeS,CAAM,EAErB,IAAME,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCG,EAAOC,EAAU,cAAcH,EAAW,KAAMC,EAAW,MAAM,EACjEO,EAAOP,EAAW,OAElBQ,EAAuBP,IAASM,EAAO,EACvCE,EAAiC,CAAC,EACpCC,EAAiB,CAAC,EAClBC,EAA6B,CAAC,EAC9BC,EAEAJ,IACFE,EAAO,MAAM,KAAK,CAAC,OAAQH,CAAI,CAAC,EAAE,IAAI,CAACM,EAAGC,IAAMA,CAAC,EAGjDJ,EAAKT,CAAI,EAAIM,EAAO,EACpBG,EAAKH,EAAO,CAAC,EAAIN,EAEjBS,EAAK,IAAIK,GAAKN,EAAqB,KAAKT,EAAWe,CAAC,CAAC,CAAC,EAEtDH,EAAqBN,GAA4B,CAAC,KAAAI,CAAI,CAAC,EACvDC,EAAmBK,GAAUnB,EAAkBC,EAAQc,CAAkB,GAG3E,IAAMT,EAAkBK,EAAsBN,EAAU,gBAAgBO,EAAsBF,EAAO,CAAC,EACxDL,EAAU,gBAAgBF,EAAYO,EAAO,CAAC,EACtFH,EAAeI,EAAsBN,EAAU,kBAAkBO,EAAsBF,EAAO,CAAC,EAC1DL,EAAU,kBAAkBF,EAAYO,EAAO,CAAC,EAErFU,EAAShC,GACXY,EAAkBW,EAAsBG,EAAmBb,EAAQC,EAAYI,EAAiBC,CAAY,EAEhH,OAAII,EACqBQ,GAAUnB,EAAkBoB,EAAQL,CAAmB,EAGvEK,CAEX,EAEEhC,GACF,CAACY,EAAyCC,EAAkBC,EAA+BI,EAC1FC,IAAmC,CAClC,IAAMc,EACFhC,GAA4BW,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAc,CAACD,CAAe,CAAC,EACvGgB,EAAMtB,EAAiB,IACzB,CAAC,GAAGnB,GAAkC,UAAWqB,EAAW,SAAU,IAAK,IAAMmB,CAAqB,EACtGpB,CAAM,EAEJsB,EAA0BjC,GAC5BU,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAcc,EAAsB,OAAO,KACzF,CAACf,CAAe,CAAC,EACfkB,EAAQxB,EAAiB,IAC3B,CAAC,GAAGlB,GAAoC,UAAWoB,EAAW,SAAU,IAAK,IAAMqB,CAAuB,EAC1G,CAACtB,EAAO,CAAC,EAAGqB,CAAG,CAAC,EAEdG,EAAqBlC,GACvBS,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAcc,EAAsB,OAAO,KACzFE,EAAwB,OAAO,IAAI,EAIvC,MAAO,CAHQvB,EAAiB,IAC5B,CAAC,GAAGjB,GAAwB,UAAWmB,EAAW,SAAU,IAAK,IAAMuB,CAAkB,EACzF,CAACxB,EAAO,CAAC,EAAGqB,EAAKE,CAAK,CAAC,CACb,CAChB,EAKEnC,GACF,CAACW,EAAyC0B,EAAepB,EAAyBC,EACjFoB,IAAuC,CACtC,GAAM,CAACC,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOiB,EAAY,OAEzB,GAAIrB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAIoB,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAY,CAAC,IAAMrB,EACrB,MAAM,IAAI,MAAM,0DAA0D,EAG5E,IAAMwB,EAAOC,GAAQ/B,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEgC,EAAe;AAAA,0BACDtB,CAAI;AAAA,sDACwBH,CAAY;AAAA;AAAA,sCAE5BuB,EAAK,SAAS,gDAAgDF,CAAY;AAAA,UACtGC,CAAa;AAAA,yBACEtB,CAAY;AAAA;AAAA,4CAEOuB,EAAK,SAAS;AAAA,cAC5CF,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOtC,MAAO,CACL,GAAGhD,GACH,OAAQ,CAAC,KAAM8C,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAM,CACF,CACF,EAKE1C,GACF,CAACU,EAAyC0B,EAAepB,EAAyBC,EACjF0B,EAA4CN,IAAuC,CAClF,GAAM,CAACC,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOiB,EAAY,OAEzB,GAAIrB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAIoB,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAY,CAAC,IAAMrB,EACrB,MAAM,IAAI,MAAM,0DAA0D,EAG5E,GAAI2B,EAAwB,SAAW,EACrC,MAAM,IAAI,MAAM,wDAAwD,EAG1E,GAAIA,EAAwB,CAAC,IAAM3B,EACjC,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAMwB,EAAOC,GAAQ/B,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEgC,EAAe;AAAA,0BACDtB,CAAI;AAAA,sDACwBH,CAAY;AAAA;AAAA;AAAA;AAAA,yBAIzCA,CAAY;AAAA;AAAA,+CAEUuB,EAAK,SAAS;AAAA,cAC/CF,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA,SAKtC,MAAO,CACL,GAAG/C,GACH,OAAQ,CAAC,KAAM6C,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAM,CACF,CACF,EAEEzC,GACF,CAACS,EAAyC0B,EAAepB,EAAyBC,EACjF0B,EAA4CC,IAA+D,CAC1G,GAAM,CAACN,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOgB,EAAM,KAAK,OAExB,GAAIpB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAI0B,EAAwB,SAAW,GAAKC,EAA2B,SAAW,EAChF,MAAM,IAAI,MAAM,wDAAwD,EAG1E,GAAID,EAAwB,CAAC,IAAM3B,GAAmB4B,EAA2B,CAAC,IAAM5B,EACtF,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAM0B,EAAe;AAAA,0BACDtB,CAAI;AAAA;AAAA;AAAA,+CAGiBkB,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA,wCAIrCtB,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAY9C,MAAO,CACL,GAAGxB,GACH,OAAQ,CAAC,KAAM2C,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,aAAAM,CACF,CACF,EAEExC,GAAkBS,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,oBAAoB,CAExC,ICzRA,IAiBMkC,GAMOC,GAoBAC,GAOPC,GAMAC,GAqBAC,GA7ENC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAQMV,GAAuB,CAC3B,KAAM,QACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACU,EAAyCC,EAAkBC,IAA0C,CACpGR,GAAeO,CAAM,EAErB,IAAME,EAAOC,EAAU,cAAcF,EAAW,KAAMD,EAAO,CAAC,EAAE,KAAK,MAAM,EACrEI,EAAQb,GAAgBQ,EAAkBC,EAAQE,EAAMD,CAAU,EAClEI,EAAmB,CAAC,EAC1B,QAASC,EAAI,EAAGA,EAAIF,EAAO,EAAEE,EAC3BD,EAAO,KAAKN,EAAiB,IACzB,CACE,GAAGX,GACH,UAAW,GAAGa,EAAW,QAAQ,IAAIK,CAAC,GACtC,IAAK,IAAMd,GAAuBO,EAAkBC,EAAO,CAAC,EAAGC,EAAYC,EAAMI,CAAC,CACpF,EACAN,CAAM,CAAC,EAGb,OAAOK,CACT,EAESf,GAAiEiB,GAAsC,CAClH,IAAML,EAAOK,EAAK,WAAW,OAAO,OAAQ,CAAC,EACvClB,EAAQkB,EAAK,WAAW,QAAQ,QAAS,CAAC,CAAC,EAC3CC,EAAaD,EAAK,QAAQ,OAChC,OAAOE,GAA4B,CAAC,KAAAP,EAAM,MAAAb,EAAO,WAAAmB,CAAU,CAAC,CAC9D,EAEMjB,GACF,CAACmB,EAA0CV,EAAkBE,EAAcD,IAAwC,CACjH,GAAM,CAAC,CAAEU,CAAO,EAAIC,GAAU,WAAWZ,EAAO,CAAC,EAAE,KAAME,EAAMD,EAAW,MAAOA,EAAW,UAAU,EACtG,OAAOU,EAAQ,MACjB,EAEEnB,GACF,CAACkB,EAA0CG,EAAeZ,EAA6BC,EAAcY,IAClF,CACb,GAAM,CAACC,EAAQJ,CAAO,EAAIC,GAAU,WAAWC,EAAM,KAAMX,EAAMD,EAAW,MAAOA,EAAW,UAAU,EAClGe,EAASL,EAAQG,CAAK,EACtBG,EAAcF,EAAOD,CAAK,EAE1BI,EAAe;AAAA,kCADRD,EAAY,MAEG;AAAA,kBACpBf,CAAI,QAAQc,CAAM;AAAA;AAAA;AAAA,MAI1B,MAAO,CACL,GAAG5B,GACH,UAAW,GAAGa,EAAW,QAAQ,IAAIa,CAAK,GAC1C,OAAQ,CAAC,KAAMG,EAAa,KAAMJ,EAAM,KAAM,aAAiC,EAC/E,aAAAK,CACF,CACF,EAEFzB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,QAAUA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,SAC9EA,EAAO,CAAC,EAAE,OAAS,UAAYA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,UAChFA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,OACrF,MAAM,IAAI,MAAM,qBAAqB,CAEzC,ICvFA,IASamB,GAQAC,GAKAC,GAGPC,GAUAC,GAnCNC,GAAAC,EAAA,kBAMAC,KAGaP,GACT,CAACQ,EAAyCC,EAAkBC,IAA6B,CACvFP,GAAeM,CAAM,EACrB,IAAME,EAAcC,EAAU,aAAaH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAE/D,MAAO,CADQF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAW,CACxD,CAChB,EAESV,GAAa,CAACO,EAAyCC,KAClEL,GAAkBK,CAAM,EACjBT,GAAQQ,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAG,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,GAGpEP,GAA4DW,GACrEA,EAAK,WAAW,QAAQ,MAAM,EAE5BV,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEML,GAAqBK,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,4BAA4B,EAG9C,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC3CA,IAQaK,GAcPC,GAmBAC,GAzCNC,GAAAC,EAAA,kBAIAC,KAEAC,KAEaN,GAAM,CAACO,EAAyCC,IAA+B,CAC1FN,GAAeM,CAAM,EAErB,IAAMC,EAAqB,CACzB,KAAM,MACN,WAAYD,EAAO,IAAI,CAACE,EAAI,IAAM,IAAI,CAAC,EAAE,EACzC,WAAY,IAAI,MAAMF,EAAO,MAAM,EAAE,MAAyB,CAChE,EAIA,MAAO,CAFQD,EAAiB,IAC5B,CAAC,GAAGE,EAAoB,IAAK,IAAMR,GAAqBM,EAAkBC,EAAQC,CAAkB,CAAC,EAAGD,CAAM,CACpG,CAChB,EAEMP,GACF,CAACM,EAAyCC,EAAkBC,IAAqD,CAC/G,IAAME,EAAOC,GAAQL,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEM,EAAcL,EAAO,CAAC,EAAE,KAAK,MAAM,EAEnCM,EAAe;AAAA;AAAA,wBADLN,EAAO,IAAI,CAACE,EAAIK,IAAM,GAAGJ,EAAK,SAAS,KAAKI,CAAC,aAAa,EAAE,KAAK,KAAK,CAG7D;AAAA,UACrBJ,EAAK,MAAM;AAAA;AAAA,MAGf,MAAO,CACL,GAAGF,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAML,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,QAAS,GACT,aAAAM,CACF,CACF,EAEEZ,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAMQ,EAASR,EAAO,CAAC,EAAE,KAAK,OAC9B,QAASO,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IAAK,CACtC,GAAIC,IAAWR,EAAOO,CAAC,EAAE,KAAK,OAC5B,MAAM,IAAI,MAAM,8BAA8B,EAGhD,QAASE,EAAI,EAAGA,EAAID,EAAQC,IAC1B,GAAIT,EAAO,CAAC,EAAE,KAAKS,CAAC,IAAMT,EAAOO,CAAC,EAAE,KAAKE,CAAC,EACxC,MAAM,IAAI,MAAM,+BAA+B,CAGrD,CAEA,GAAIT,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,EAEvC,QAASO,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjC,GAAIP,EAAO,CAAC,EAAE,OAASA,EAAOO,CAAC,EAAE,KAC/B,MAAM,IAAI,MAAM,8BAA8B,CAGpD,ICnEA,IAQaG,GAePC,GA0BAC,GAjDNC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaN,GAAO,CAACO,EAAyCC,IAA+B,CAC3FN,GAAeM,CAAM,EAErB,IAAMC,EAAsB,CAC1B,KAAM,OACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAKA,MAAO,CAHQF,EAAiB,IAC5B,CAAC,GAAGE,EAAqB,IAAK,IAAMR,GAAsBM,EAAkBC,EAAQC,CAAmB,CAAC,EACxGD,CAAM,CACI,CAChB,EAEMP,GACF,CAACS,EAAiCF,EAAkBC,IAAsD,CACxG,IAAME,EAAaH,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCI,EAAc,IAAI,MAAMD,EAAW,MAAM,EAEzCE,EAAoB,CAAC,EAC3B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQG,IACrCF,EAAYE,CAAC,EAAIH,EAAWG,CAAC,EAAIN,EAAO,CAAC,EAAE,WAAWM,CAAC,EACvDD,EAAQ,KAAK,YAAYC,CAAC,+BAA+BA,CAAC,OAAOH,EAAWG,CAAC,CAAC,MAAM,EAGtF,IAAMC,EAAOH,EAAY,OACnBI,EAAe;AAAA,oCACSD,CAAI;AAAA,uBACjBA,CAAI;AAAA,UACjBF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA;AAAA;AAAA,MAItB,MAAO,CACL,GAAGJ,EACH,OAAQ,CAAC,KAAMG,EAAa,KAAMJ,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAQ,CACF,CACF,EAEEd,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,OACvC,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIS,GAAa,QAAQT,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,QACnD,MAAM,IAAI,MAAM,sBAAsB,CAE1C,ICjEA,IASaU,GAQAC,GAKAC,GAGPC,GAUAC,GAnCNC,GAAAC,EAAA,kBAMAC,KAGaP,GACT,CAACQ,EAAyCC,EAAkBC,IAA6B,CACvFP,GAAeM,CAAM,EACrB,IAAME,EAAcC,EAAU,eAAeH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAEjE,MAAO,CADQF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAW,CACxD,CAChB,EAESV,GAAe,CAACO,EAAyCC,KACpEL,GAAkBK,CAAM,EACjBT,GAAUQ,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAG,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,GAGtEP,GAA8DW,GACvEA,EAAK,WAAW,QAAQ,MAAM,EAE5BV,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEML,GAAqBK,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,EAGhD,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC3CA,IAoCaK,GApCbC,GAAAC,EAAA,kBAKAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEahC,GAAuD,CAClE,CAAC,MAAO,GAAI,KAAeiC,EAAG,EAC9B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAEhC,CAAC,cAAe,GAAI,KAAMC,GAAaC,EAA0B,EACjE,CAAC,qBAAsB,GAAI,KAAMC,GAAoBC,EAAiC,EACtF,CAAC,OAAQ,GAAI,KAAMC,GAAMC,EAAmB,EAC5C,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,OAAiBC,GAAeC,EAAmB,EAChE,CAAC,OAAQ,GAAI,MAAgBC,EAAO,EACpC,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAqB,EAClD,CAAC,OAAQ,GAAI,KAAMC,GAAMC,EAAmB,EAC5C,CAAC,gBAAiB,GAAI,KAAMC,GAAeC,EAA4B,EACvE,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,UAAW,GAAI,KAAeC,EAAQ,EACvC,CAAC,eAAgB,GAAI,KAAMC,GAAcC,EAA2B,EACpE,CAAC,QAAS,GAAI,KAAgBC,EAAK,EACnC,CAAC,MAAO,GAAI,KAAeC,GAAcC,EAAkB,EAC3D,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,UAAW,GAAI,KAAMC,GAASC,EAAsB,EACrD,CAAC,QAAS,GAAI,KAAeC,EAAK,EAClC,CAAC,YAAa,gBAAiB,KAAMf,GAAMC,EAAmB,EAC9D,CAAC,SAAU,GAAI,KAAMe,GAAQC,EAAqB,EAClD,CAAC,OAAQ,GAAI,OAAQC,GAAMC,EAAqB,EAChD,CAAC,OAAQ,GAAI,MAAOD,GAAME,EAAsB,EAChD,CAAC,oBAAqB,GAAI,KAAMC,GAAmBC,EAAgC,EACnF,CAAC,gBAAiB,GAAI,KAAMC,EAAa,EACzC,CAAC,UAAW,GAAI,KAAgBC,EAAO,EACvC,CAAC,WAAY,GAAI,KAAelB,EAAQ,EACxC,CAAC,cAAe,GAAI,KAAMmB,GAAaC,EAA0B,EACjE,CAAC,wBAAyB,GAAI,KAAMC,GAAuBC,EAAoC,EAC/F,CAAC,YAAa,GAAI,KAAeC,GAAoBC,EAAwB,EAC7E,CAAC,OAAQ,GAAI,KAAgBC,EAAI,EACjC,CAAC,MAAO,GAAI,KAAMC,GAAKC,EAAkB,EACzC,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAqB,EAElD,CAAC,UAAW,GAAI,KAAMC,GAASC,EAAsB,EACrD,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,KAAM,GAAI,KAAgBC,EAAE,EAC7B,CAAC,MAAO,GAAI,OAAQC,GAAOC,EAAoB,EAC/C,CAAC,MAAO,GAAI,MAAOC,GAAQC,EAAqB,EAChD,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,QAAS,GAAI,KAAgBC,EAAK,EACnC,CAAC,eAAgB,GAAI,KAAMC,GAAcC,EAAqB,EAC9D,CAAC,YAAa,GAAI,KAAMC,GAAWD,EAAqB,EACxD,CAAC,aAAc,GAAI,KAAME,GAAYF,EAAqB,EAC1D,CAAC,YAAa,GAAI,KAAMG,GAAWH,EAAqB,EACxD,CAAC,aAAc,GAAI,KAAMI,GAAYJ,EAAqB,EAC1D,CAAC,YAAa,GAAI,OAAQK,GAAWL,EAAqB,EAC1D,CAAC,kBAAmB,GAAI,KAAMM,GAAoBN,EAAqB,EACvE,CAAC,OAAQ,GAAI,KAAeO,EAAI,EAChC,CAAC,UAAW,GAAI,KAAMC,EAAO,EAC7B,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAwB,EACrD,CAAC,SAAU,GAAI,MAAOD,GAAQE,EAAwB,EACtD,CAAC,QAAS,GAAI,KAAMC,EAAK,EACzB,CAAC,UAAW,GAAI,KAAeC,EAAO,EACtC,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,QAAS,GAAI,MAAOC,EAAQ,EAC7B,CAAC,QAAS,GAAI,MAAOC,GAAOC,EAAoB,EAEhD,CAAC,UAAW,GAAI,OAAQC,GAASC,EAAsB,EACvD,CAAC,UAAW,GAAI,MAAOC,GAAYC,EAAyB,EAK5D,CAAC,QAAS,GAAI,OAAQC,GAAOC,EAAoB,EACjD,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,UAAW,GAAI,OAAQC,GAASC,EAAsB,EACvD,CAAC,UAAW,GAAI,MAAOC,EAAU,EACjC,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAMC,EAAG,EACrB,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,KAAMC,EAAI,EACvB,CAAC,YAAa,GAAI,KAAMC,GAAWC,EAAwB,EAC3D,CAAC,WAAY,GAAI,MAAOC,GAAUC,EAAyB,EAC3D,CAAC,WAAY,GAAI,IAAKD,GAAUE,EAAyB,EACzD,CAAC,YAAa,GAAI,OAAQC,GAAWC,EAAwB,EAC7D,CAAC,YAAa,GAAI,MAAOC,EAAY,EACrC,CAAC,MAAO,GAAI,KAAgBC,EAAG,CACjC,ICrHO,SAASC,GAAeC,EAAwB,CACrD,IAAMC,EAAiG,CAAC,EACpGC,EACJ,MAAQA,EAAQC,GAAsB,KAAKH,CAAM,KAAO,MAAM,CAC5D,IAAMI,EAASF,EAAM,CAAC,EACF,MAAM,GAAG,EACT,IAAIG,GAAK,CACR,IAAMC,EAASD,EAAE,KAAK,EAAE,MAAM,GAAG,EACjC,OAAIC,GAAUA,EAAO,SAAW,EACvB,CAAC,KAAMA,EAAO,CAAC,EAAG,KAAMA,EAAO,CAAC,CAAC,EAEnC,IACT,CAAC,EACA,OAAOC,GAAKA,IAAM,IAAI,EAC1CN,EAAWC,EAAM,CAAC,CAAC,EAAI,CAAC,OAAAE,EAAQ,KAAMF,EAAM,CAAC,CAAC,CAChD,CACA,QAAWM,KAAQP,EAAY,CAC7B,IAAMQ,EAAcC,GAAgB,QAAQ,WAAYF,CAAI,EACtDG,EAAQ,IAAI,OAAOF,EAAa,IAAI,EAC1C,MAAQP,EAAQS,EAAM,KAAKX,CAAM,KAAO,MAAM,CAC5C,IAAMY,EAAOV,EAAM,CAAC,EACdW,EAAWX,EAAM,CAAC,EAClBE,EAASF,EAAM,CAAC,EAAE,MAAM,GAAG,EAC3BY,EAAYF,EAAQ,GAAGA,CAAI,IAAIC,CAAQ,IAAM,GAC/CE,EAAkBd,EAAWO,CAAI,EAAE,KACnCQ,EAAiB,GACrBf,EAAWO,CAAI,EAAE,OAAO,QAAQ,CAACD,EAAGU,IAAM,CACpCV,IACFS,GAAkB,GAAGT,EAAE,IAAI,IAAIA,EAAE,IAAI,MAAMH,EAAOa,CAAC,CAAC;AAAA,EAExD,CAAC,EACDF,EAAU,GAAGC,CAAc;AAAA,GAAMD,CAAO,GACxCA,EAAUA,EAAQ,QAAQ,SAAU,GAAGF,CAAQ,KAAK,EACpD,IAAMK,EAAc;AAAA,QAClBJ,CAAQ;AAAA;AAAA,UAENC,CAAO;AAAA;AAAA,QAGXf,EAASA,EAAO,QAAQE,EAAM,CAAC,EAAGgB,CAAW,CAC/C,CACF,CACA,OAAAlB,EAASA,EAAO,QAAQG,GAAuB,EAAE,EAC1CH,CACT,CApDA,IAGMG,GACAO,GAJNS,GAAAC,EAAA,kBAGMjB,GAAwB,qFACxBO,GAAkB,+DCqJjB,SAASW,GAAaC,EAAiBC,EAA2D,CACvG,IAAMC,EAAqB,CAAC,EACtBC,EAAqB,CAAC,EACtBC,EAAeH,GAAQ,MAAQ,MAAM,QAAQA,CAAI,GAAKA,EAAK,SAAW,EACtEI,EAAQJ,GAAQ,MAAQG,EAAgB,KAAOE,GAAeL,EAAMD,CAAK,EAAE,KAAK,EAClFO,EAAI,EACR,QAASC,EAAI,EAAGA,EAAIR,EAAM,OAAQ,EAAEQ,EAAG,CACrC,GAAIH,GAAQ,KAAM,CAChB,GAAIA,EAAKE,CAAC,IAAMC,GAAKR,EAAMQ,CAAC,IAAM,EAChC,MAAM,IAAI,MAAM,sBAAsBA,CAAC,mBAAmBR,EAAMQ,CAAC,CAAC,YAAY,GAE3EH,EAAKE,CAAC,GAAK,MAAQF,EAAKE,CAAC,EAAIC,IAAMR,EAAMQ,CAAC,IAAM,IACnDN,EAAS,KAAKF,EAAMQ,CAAC,CAAC,EACtBL,EAAS,KAAKK,CAAC,GAEbH,EAAKE,CAAC,GAAKC,GACbD,GAEJ,CACIP,EAAMQ,CAAC,IAAM,IACfN,EAAS,KAAKF,EAAMQ,CAAC,CAAC,EACtBL,EAAS,KAAKK,CAAC,EAEnB,CACA,MAAO,CAAC,SAAAN,EAAU,SAAAC,CAAQ,CAC5B,CAEO,SAASG,GAAeL,EAAuBD,EAA2B,CAC/E,IAAMS,EAAOT,EAAM,OAGnB,OAAAC,EAAOA,GAAQ,KAAOD,EAAM,IAAI,CAACU,EAAIF,IAAMA,CAAC,EAAK,CAAC,EAAe,OAAOP,CAAI,EAG5EU,GACIV,EAAK,MAAMW,GAAMA,GAAM,CAACH,GAAQG,EAAKH,CAAI,EACzC,IAAM,+CAA+CA,CAAI,KAAKA,CAAI,kBAClDR,CAAI,EAAE,EAG1BU,GACIV,EAAK,MAAMY,EAAK,EAChB,IAAM,0DACUZ,CAAI,EAAE,EAGnBA,EAAK,IAAIa,GAAKA,EAAI,EAAIL,EAAOK,EAAIA,CAAC,CAC3C,CACO,SAASD,GAAMC,EAAoB,CACxC,OAAOA,EAAI,IAAM,CACnB,CACO,SAASC,GAAcf,EAAyB,CACrD,GAAIA,EAAM,SAAW,EAEnB,MAAO,GAET,IAAIgB,EAAOhB,EAAM,CAAC,EAClB,QAASQ,EAAI,EAAGA,EAAIR,EAAM,OAAQQ,IAChCQ,GAAQhB,EAAMQ,CAAC,EAEjB,OAAOQ,CACT,CAQO,SAASC,GAAoBD,EAAgC,CAClE,IAAME,EAAQ,KAAK,KAAK,KAAK,KAAKF,CAAI,CAAC,EACvC,MAAO,CAACE,EAAO,KAAK,KAAKF,EAAOE,CAAK,CAAC,CACxC,CAjOA,IA8DaC,GA9DbC,GAAAC,EAAA,kBAGAC,KACAC,KA0DaJ,GAAN,KAA6D,CAClE,YAAmBK,EAAwB,CAAxB,oBAAAA,CAAyB,CAC5C,iBAAiBxB,EAA0ByB,EAA4C,CACrF,IAAMC,EAAK,KAAK,eAAe1B,EAAOyB,CAAK,EAK3C,OAJIA,GAASA,EAAM,WACjBC,EAAG,CAAC,GAAK,EACTA,EAAG,CAAC,GAAK,GAEPD,GAASA,EAAM,UACV,CAACC,EAAG,CAAC,EAAGA,EAAG,CAAC,CAAC,EAEfA,CACT,CAEA,eAAe1B,EAA0ByB,EAA4C,CACnF,IAAME,EAAWF,GAASA,EAAM,SAEhC,GAAIzB,EAAM,SAAW,EACnB,OAAO2B,EAAW,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAElC,IAAIH,EAAiB,KAAK,eAC1B,GAAIC,GAASA,EAAM,YAAc,OAAW,CAE1C,IAAMG,EAAQH,EAAM,WAAazB,EAAM,OAAS,EAAIA,EAAM,MAAMyB,EAAM,SAAS,EAAE,OAAO,CAACX,EAAGe,IAAMf,EAAIe,CAAC,EACjGC,EAAQL,EAAM,WAAa,EAAI,EAAIzB,EAAM,MAAM,EAAGyB,EAAM,SAAS,EAAE,OAAO,CAACX,EAAGe,IAAMf,EAAIe,CAAC,EAC/F,GAAID,EAAQJ,GAAkBM,EAAQN,EAGpCO,GAAO,QACH,gBACA,2DAA2D/B,CAAK,eAAeyB,EAAM,SAAS,EAAE,MAEpG,OAAO,CAACG,EAAOE,CAAK,CAExB,CACA,IAAIE,EAAWhC,EAAM,MAAM,CAAC,EACxB2B,IACFH,EAAiBA,EAAiB,EAOlCQ,EAAWA,EAAS,IAChB,CAACC,EAAIzB,IAAMA,GAAKwB,EAAS,OAAS,EAAKA,EAASxB,CAAC,EAAI,IAAM,EAAIwB,EAASxB,CAAC,EAAIwB,EAASxB,CAAC,EAAI,EAAKwB,EAASxB,CAAC,CAAC,EAI3GwB,EAAS,SAAW,IACtBA,EAAW,CAAC,EAAGA,EAAS,CAAC,CAAC,IAK1BA,EAAS,SAAW,IAEtBA,EADsBjC,GAAaiC,CAAQ,EAClB,UAG3B,IAAMhB,EAAOD,GAAciB,CAAQ,EACnC,OAAIA,EAAS,QAAU,GAAKhB,GAAQQ,EAC3B,CAAC,EAAGR,CAAI,EACNgB,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,GAAKR,EAC3EQ,EACEA,EAAS,SAAW,GAAKA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,GAAKR,EACzF,CAACQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EACrCA,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,EACzF,CAACQ,EAAS,CAAC,EAAGA,EAAS,CAAC,EAAIA,EAAS,CAAC,CAAC,EAE5CA,EAAS,SAAW,GAAKA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,GACpEQ,EAAS,CAAC,GAAKR,EACV,CAACQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAE1DA,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GACxCQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,EACtC,CAACQ,EAAS,CAAC,EAAGA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,CAAC,EAExDL,EAMKV,GAAoBD,EAAO,CAAC,EAAE,IAAIkB,GAAKA,EAAI,CAAC,EAE9CjB,GAAoBD,CAAI,CAEnC,CACF,ICvJA,IAeamB,GAfbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAEAC,KAMaP,GAAN,cAA4BQ,EAAQ,CAGzC,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CACL,GAAG,KAAK,eAAe,EACvB,GAAG,KAAK,eAAe,EACvB,GAAG,KAAK,MAAM,EACd,GAAG,KAAK,UAAU,EAElB,GAAG,KAAK,mBAAmB,EAC3B,GAAG,KAAK,0BAA0B,EAClC,GAAG,KAAK,yBAAyB,CACnC,CACF,CACA,gBAAiB,CACf,MAAO,CAAC,CACV,CAKU,gBAAmD,CAC3D,IAAMC,EAAW,iBACjB,MAAO,CACL,eAAgB,IAAIC,EAAe;AAAA,aAC5BD,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMd,CACH,CACF,CAMU,gBAAmD,CAC3D,IAAMA,EAAW,iBACjB,MAAO,CACL,eAAgB,IAAIC,EAAe;AAAA,YAC7BD,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMb,CACH,CACF,CAMU,0BAA6D,CACrE,IAAME,EAAe,KAAK,QAAQ,oBAClC,OAAIA,EAAa,SACR,KAAK,+BAA+BA,CAAY,EAEhD,KAAK,iCAAiCA,CAAY,CAE7D,CAKU,+BAA+BA,EAA+D,CACtG,IAAMC,EAAWD,EAAa,cACxBE,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtDG,EAA2C,CAAC,EAC5CL,EAAW,kBACjB,OAAQG,EAAS,OAAQ,CACvB,IAAK,GACHE,EAAOL,CAAQ,EAAI,KAAK,sBAAsB,EAC9C,MACF,IAAK,GACHK,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAAsBC,CAA+B,EACrG,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAA8BC,CAA+B,EAC7G,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,wBAAwBG,EAAsCC,CAA+B,EACtG,MACF,QACEC,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAAUC,CAA+B,CAC7F,CAIA,IAAME,EAA4B;AAAA;AAAA,UAHrBC,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAKxC,MAAM;AAAA;AAAA,MAGXC,EAA8B,sBACpC,OAAAH,EAAOG,CAA2B,EAAI,IAAIP,EAAeK,CAAyB,EAC3ED,CACT,CAKU,iCAAiCH,EAA+D,CACxG,IAAMC,EAAWD,EAAa,cACxBE,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtDG,EAA2C,CAAC,EAC5CL,EAAW,kBACjB,OAAQG,EAAS,OAAQ,CACvB,IAAK,GACHE,EAAOL,CAAQ,EAAI,KAAK,sBAAsB,EAC9C,MACF,IAAK,GACHK,EAAOL,CAAQ,EAAI,KAAK,0BAA0BG,EAAsBC,CAA+B,EACvG,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,0BAA0BG,EAA8BC,CAA+B,EAChG,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,0BAA0BG,EAAsCC,CAA+B,EACxG,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAA8CC,CAA+B,EACjF,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAAsDC,CAA+B,EACzF,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAA8DC,CAA+B,EACjG,MACF,QACE,MAAM,IAAI,MAAM,sCAAsCD,EAAS,MAAM,EAAE,CAC3E,CAIA,IAAMM,EAAyB;AAAA;AAAA,YAHlBF,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAKtC,MAAM;AAAA;AAAA,MAGbG,EAA2B,mBACjC,OAAAL,EAAOK,CAAwB,EAAI,IAAIT,EAAeQ,CAAsB,EACrEJ,CACT,CAKU,uBAAwC,CAChD,OAAO,IAAIJ,EAAe;AAAA;AAAA;AAAA;AAAA,KAIzB,CACH,CAKU,wBAAwBU,EAAkBC,EAA4C,CAC9F,IAAMC,EAAiBD,EACnBE,EAAS,GACb,OAAID,EAAe,CAAC,IAAM,GACxBC,EAAS;AAAA;AAAA,2CAE4BD,EAAe,CAAC,CAAC;AAAA;AAAA,UAG/C,IAAIZ,EAAea,CAAM,GAG9BD,EAAe,CAAC,IAAM,GACxBC,EAAS;AAAA;AAAA,2CAE4BD,EAAe,CAAC,CAAC;AAAA;AAAA,UAG/C,IAAIZ,EAAea,CAAM,IAGlCA,EAAS;AAAA;AAAA;AAAA,wCAG2BD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,qCAC1CA,EAAe,CAAC,CAAC;AAAA;AAAA,QAG3C,IAAIZ,EAAea,CAAM,EAClC,CAKU,wBAAwBC,EAAyBH,EAA4C,CACrG,IAAIE,EAAS,GACb,GAAIE,GAAU,YAAYD,EAAOH,CAAQ,EACvC,OAAAE,EAAS;AAAA;AAAA,iDAEkCF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA;AAAA,QAG/D,IAAIX,EAAea,CAAM,EAGlC,IAAMD,EAAiBD,EAEjBK,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAWjD,OAAAD,EAAS;AAAA;AAAA;AAAA,uCAG0BD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA;AAAA,qCAEzCA,EAAe,CAAC,CAAC;AAAA;AAAA;AAAA,gCAGtBI,CAAkB;AAAA,iCACjBA,CAAkB;AAAA;AAAA;AAAA;AAAA,QAKxC,IAAIhB,EAAea,CAAM,CAClC,CAKU,wBAAwBC,EAAiCH,EAA4C,CAC7G,IAAMC,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CK,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAC3CG,EAAgBD,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAC3DD,EAAS;AAAA;AAAA;AAAA,uCAGoBD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,qCACzCA,EAAe,CAAC,CAAC;AAAA;AAAA,4BAE1BK,CAAa;AAAA,yBAChBA,CAAa;AAAA;AAAA;AAAA,gCAGND,CAAkB;AAAA,iCACjBA,CAAkB;AAAA;AAAA;AAAA;AAAA,QAK/C,OAAO,IAAIhB,EAAea,CAAM,CAClC,CAKU,wBAAwBC,EAA0BH,EAA4C,CACtG,IAAMC,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAE1CK,EAAqB,KAAK,KAAKF,EAAMA,EAAM,OAAS,CAAC,EAAI,CAAC,EAC1DG,EAAgBD,EAAqB,KAAK,KAAKF,EAAMA,EAAM,OAAS,CAAC,EAAI,CAAC,EAC5EI,EAAiBD,EACjBE,EAAU,GACVC,EAAS,UAEb,QAASC,EAAI,EAAGA,EAAIP,EAAM,OAAS,EAAGO,IACpCH,GAAkBJ,EAAMA,EAAM,OAASO,EAAI,CAAC,EAC5CF,EAAU;AAAA,aACHE,CAAC,cAAcH,CAAc;AAAA,kBACxBG,CAAC,MAAMH,CAAc;AAAA,MAC/BC,EACFC,EAAS,IAAIC,CAAC,KAAOD,EAEvB,IAAMP,EAAS;AAAA,YACPC,EAAM,MAAM;AAAA;AAAA,qCAEaF,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,mCACzCA,EAAe,CAAC,CAAC;AAAA;AAAA,UAE1CO,CAAO;AAAA;AAAA,0BAESF,CAAa;AAAA,uBAChBA,CAAa;AAAA;AAAA;AAAA,8BAGND,CAAkB;AAAA,+BACjBA,CAAkB;AAAA;AAAA,qBAE5BF,EAAM,MAAM,IAAIM,CAAM;AAAA;AAAA,MAGvC,OAAO,IAAIpB,EAAea,CAAM,CAClC,CAKU,0BAA0BH,EAAkBC,EAA4C,CAChG,IAAME,EAAS;AAAA;AAAA;AAAA,uCAGoBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,gCAClCA,EAAS,CAAC,CAAC;AAAA;AAAA,QAGvC,OAAO,IAAIX,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyBH,EAA4C,CACvG,IAAME,EAAS;AAAA;AAAA;AAAA,uCAGoBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,4BACpBG,EAAM,CAAC,CAAC;AAAA,gCACJA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA,QAIpC,OAAO,IAAId,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAiCH,EAA4C,CAC/G,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,GAAG,EAChCC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyCH,EAC1D,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,IAAI,EACtCC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAiDH,EAClE,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,KAAM,IAAI,EAC5CC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyDH,EAE1E,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,KAAM,KAAM,IAAI,EAClDC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,sCAGyBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,oCAC7BA,EAAS,CAAC,CAAC;AAAA,WACpCe,CAAsB;AAAA;AAAA;AAAA,OAItB,IAAI1B,EAAea,CAAM,CAClC,CAKU,oBAAuD,CAC/D,IAAMT,EAA2C,CAAC,EAC9CL,EAAW,aACfK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQrC,EACDD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOnC,EACHD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOnC,EACHD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OASnC,EACHD,EAAW,gBACX,IAAM+B,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,OAAAF,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA,qBAErB8B,EAAK,SAAS;AAAA,UACzB,EACC1B,CACT,CAKU,2BAA8D,CACtE,IAAMA,EAA2C,CAAC,EAC5CH,EAAe,KAAK,QAAQ,oBAClC,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAAC8B,EAAa,IAAM,CAC9D,IAAMC,EAAc,KAAK,QAAQ,oBAAoB,CAAC,EAChDjC,EAAWkC,GAA2CF,CAAW,EACnEC,EAAY,SACd5B,EAAOL,CAAQ,EAAI,KAAK,0BAA0BA,EAAUgC,EAAaC,CAAW,EAEpF5B,EAAOL,CAAQ,EAAI,KAAK,4BAA4BA,EAAUgC,EAAaC,CAAW,EAGxF,IAAME,EAAmBC,GAAsDJ,CAAW,EACtFC,EAAY,cAAc,QAAU/B,EAAa,cAAc,SAC7D+B,EAAY,SACd5B,EAAO8B,CAAgB,EACnB,KAAK,+BAA+BA,EAAkBF,EAAa/B,EAAc8B,CAAW,EAEhG3B,EAAO8B,CAAgB,EACnB,KAAK,iCAAiCA,EAAkBF,EAAa/B,EAAc8B,CAAW,EAGxG,CAAC,EAEM3B,CACT,CAKU,+BACNL,EAAkBiC,EAA4B/B,EAA6BmC,EAA8B,CAC3G,IAAMC,EAAUL,EAAY,cACtB9B,EAAWD,EAAa,cAExBqC,EAAiBL,GADPG,CACyD,EAEnEG,EAASF,EAAQ,OACjBG,EAAUtC,EAAS,OAEnBuC,EAAgBC,GAAc,iBAAiBL,EAASnC,CAAQ,EAEhEyC,EAAOC,GAAkBJ,CAAO,EAChCK,EAAWL,EAAUD,EACvBO,EACEC,EAASC,GAAc,EAEzBT,IAAW,EACbO,EAAgB,GACPN,EAAU,GAAKC,EAAc,QAAU,EAChDK,EAAgB,cAEhBA,EAAgBL,EAAc,IAAIQ,GAAK,UAAUF,EAAOE,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAEzF,IAAIK,EAAwB,GACxBV,EAAU,GAAKD,EAAS,EAC1BW,EAAwB,SAExBA,EAAwBb,EAAQ,IAAI,CAACc,EAAI3B,IAAM,UAAUuB,EAAOvB,EAAIqB,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAG5F,IAAIO,EAAS,sBAEPC,EADSC,EAAU,KAAKjB,CAAO,IACJ,EAE3BkB,EADUD,EAAU,KAAKpD,CAAQ,IACJ,EAEnC,GAAIqC,IAAW,GAAK,CAACc,GAAiB,CAACE,EACrCH,EAAS;AAAA;AAAA,gBAGAC,GAAiB,CAACE,EACvBf,IAAY,EACdY,EAAS;AAAA;AAAA,UAITA,EAAS;AAAA;AAAA,kBAIFX,EAAc,OAAQ,CAC/B,IAAMe,EAAOjB,EAAS,EAChBkB,EAAOlB,EAAS,EAElBE,EAAc,QAAQe,CAAI,EAAI,IAAMf,EAAc,QAAQgB,CAAI,EAAI,GACpEL,EAAS,8BACAX,EAAc,QAAQe,CAAI,EAAI,GACvCJ,EAAS,2EAEAX,EAAc,QAAQgB,CAAI,EAAI,KACvCL,EAAS,+CAEb,CAEA,IAAMM,EAAsB;AAAA,+BACDX,EAAOP,EAAU,CAAC,CAAC;AAAA,iBACjCO,EAAOP,EAAU,CAAC,CAAC,aAAaO,EAAOP,EAAU,CAAC,CAAC;AAAA,iBACnDO,EAAOP,EAAU,CAAC,CAAC;AAAA,QAE1B3B,EAAS;AAAA,aACNd,CAAQ;AAAA,UACX4C,CAAI;AAAA,UACJe,CAAmB;AAAA,UACnBZ,CAAa;AAAA,6BACMR,CAAc,IAAIY,CAAqB;AAAA,UAC1DE,CAAM;AAAA;AAAA,MAGZ,OAAO,IAAIpD,EAAea,EAAQ,CAAC,6BAA6B,CAAC,CACnE,CAKU,iCACNd,EAAkBiC,EAA4B/B,EAA6BmC,EAA8B,CAC3G,IAAMjC,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtD0D,EAAa,CAAC3B,EAAY,MAAOA,EAAY,MAAM,EACnDO,EAASP,EAAY,cAAc,OACnCQ,EAAUvC,EAAa,cAAc,OACrCoC,EAAUL,EAAY,cACtB9B,EAAWD,EAAa,cACxBqC,EAAiBL,GAA2CG,CAAI,EAEtE,GAAIG,IAAWC,GAAWzB,GAAU,YAAY4C,EAAYxD,CAAW,EAAG,CACxE,IAAMU,EAAS;AAAA,kBACHd,CAAQ;AAAA,mCACSqC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAM8B,EAAOC,GAAkBJ,CAAO,EAChCC,EAAgBC,GAAc,iBAAiBL,EAASnC,CAAQ,EAChE2C,EAAWL,EAAUD,EACvBO,EACEC,EAASC,GAAc,EAEzBT,IAAW,EACbO,EAAgB,GACPN,EAAU,GAAKC,EAAc,QAAU,EAChDK,EAAgB,cAEhBA,EAAgBL,EAAc,IAAIQ,GAAK,UAAUF,EAAOE,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAEzF,IAAIK,EAAwB,GACxBV,EAAU,GAAKD,EAAS,EAC1BW,EAAwB,SAExBA,EAAwBlB,EAAY,cAAc,IAAI,CAACmB,EAAI3B,IAAM,UAAUuB,EAAOvB,EAAIqB,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAE9G,IAAMhC,EAAS;AAAA,gBACHd,CAAQ;AAAA,YACZ4C,CAAI;AAAA,YACJG,CAAa;AAAA,mBACNR,CAAc,IAAIY,CAAqB;AAAA;AAAA,QAGtD,OAAO,IAAIlD,EAAea,EAAQ,CAAC,6BAA6B,CAAC,CACnE,CAKU,0BAA0Bd,EAAkBqC,EAAcJ,EAA4C,CAC9G,OAAQA,EAAY,cAAc,OAAQ,CACxC,IAAK,GACH,OAAO,KAAK,uBAAuBjC,EAAUqC,CAAI,EACnD,IAAK,GACH,OAAO,KAAK,mBAAmBrC,EAAUqC,EAAMJ,CAAW,EAC5D,IAAK,GACH,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,EAC5D,IAAK,GACH,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,EAC5D,QACE,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,CAC9D,CACF,CAKU,4BAA4BjC,EAAkBqC,EAAcJ,EAA4C,CAChH,IAAMlB,EAAQkB,EAAY,cAC1B,OAAQlB,EAAM,OAAQ,CACpB,IAAK,GACH,OAAO,KAAK,yBAAyBf,EAAUqC,EAAMJ,CAAW,EAClE,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,QAEE,MAAM,IAAI,MAAM,yBAAyBlB,EAAM,MAAM,IAAI,CAC7D,CACF,CAKU,uBAAuBf,EAAkBqC,EAA8B,CAC/E,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAC7CO,EAAS;AAAA,iBACFd,CAAQ;AAAA,qBACJ+B,EAAK,SAAS,IAAIM,CAAI;AAAA;AAAA,UAGvC,OAAO,IAAIpC,EAAea,CAAM,CAClC,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDpB,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CmB,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAO7CO,EALgB,QAAQd,CAAQ;AAAA;AAAA,QAElCa,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,eAChCkB,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDF,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAC7CsD,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EAE1B,GAAIA,GAAY,MAAQI,GAAU,YAAYD,EAAOH,CAAQ,EAAG,CAC9D,IAAMmD,EAAgB,QAAQ/D,CAAQ;AAAA,qDACS8D,CAAO,OAAOD,CAAO;AAAA,iBACzD9B,EAAK,SAAS,IAAIM,CAAI;AAAA,SAGjC,OAAO,IAAIpC,EAAe8D,CAAa,CACzC,CACA,IAAMlD,EAAiBD,EACjBoD,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EAKrCD,EAJgB,QAAQd,CAAQ;AAAA,iCACTa,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC,KAAKmD,CAAY;AAAA,eAC1EjC,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDpB,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CmB,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAEnD,GAAIQ,EAAM,CAAC,IAAM,EAAG,CAClB,IAAMkD,EAAgBlD,EAAM,MAAM,CAAC,EAC7BmD,EAAW,CAAC,EAAG,CAAC,EAChBC,EAAgBC,GAAkBrD,EAAOkD,CAAa,EACtDI,EAAS,CAAC,IAAK,MAAO,KAAK,EAE3BC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAC/B,IAAMI,EAAiB,KAAK,0BAA0BvE,EAAUqC,EAAMiC,CAAc,EAK9ExD,EAJgB,GAAGyD,EAAe,WAAW;AAAA,aAC5CvE,CAAQ;AAAA,iBACJA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA,UAG1D,OAAO,IAAIjE,EAAea,EAAQyD,EAAe,YAAY,CAC/D,CACA,IAAMV,EAAUhD,EAAe,CAAC,EAC1BiD,EAAUjD,EAAe,CAAC,EAE1BmD,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EACrCG,EAAgB8C,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EAMrDD,EAJgB,QAAQd,CAAQ;AAAA;AAAA,UAEhC8D,CAAO,KAAKD,CAAO,KAAK3C,CAAa,KAAK8C,CAAY;AAAA,eACjDjC,EAAK,SAAS,IAAIM,CAAI,UAEjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAIU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBV,EAAOR,EAAM,OACbH,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDF,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAE7CM,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CiD,EAAUhD,EAAe,CAAC,EAC1BiD,EAAUjD,EAAe,CAAC,EAC1BmD,EAAe,KAAK,KAAKjD,EAAMQ,EAAO,CAAC,EAAI,CAAC,EAC9CL,EAAgB8C,EAAe,KAAK,KAAKjD,EAAMQ,EAAO,CAAC,EAAI,CAAC,EAC5D8C,EAAS,0BACTI,EAAQ,OAAOvD,CAAa,kBAAkB8C,CAAY,eAC9D,QAAS1C,EAAI,EAAGA,EAAIC,EAAO,EAAGD,IAC5B+C,EAAS,QAAQ/C,CAAC,KAAO+C,EACzBnD,GAAiBH,EAAMQ,EAAOD,EAAI,CAAC,EACnCmD,EAAQ,IAAInD,CAAC,MAAMJ,CAAa,MAAQuD,EAS1C,IAAM3D,EAPgB,QAAQd,CAAQ,IAAIqE,CAAM;AAAA,oBAChCI,CAAK;AAAA,2BACEX,CAAO;AAAA,kCACAA,CAAO;AAAA,qDACYA,CAAO,KAAKD,CAAO;AAAA,eACzD9B,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,CAAM,CAClC,CAKU,yBAAyBd,EAAkBqC,EAAcJ,EAA4C,CAC7G,GAAM,CAAC4B,EAASC,CAAO,EAAI,CAAC7B,EAAY,MAAOA,EAAY,MAAM,EACjE,GAAI4B,IAAY,GAAKC,IAAY,EAAG,CAClC,IAAMhD,EAAS;AAAA,kBACHd,CAAQ;AAAA,mCACSqC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,uBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,iCAC7CD,CAAO,KAAKC,CAAO,YAAYzB,CAAI;AAAA,iCACnCA,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMyC,EAAQzC,EAAY,MACpB0C,EAAQ1C,EAAY,OAE1B,GAAI0C,IAAU,GAAKD,IAAU,EAAG,CAC9B,IAAM5D,EAAS;AAAA,gBACLd,CAAQ;AAAA,iCACSqC,CAAI;AAAA;AAAA,QAG/B,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,GAAI6D,IAAU,EAAG,CACf,IAAM7D,EAAS;AAAA,kBACHd,CAAQ;AAAA,oDAC0B0E,CAAK;AAAA,mCACtBrC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CACA,GAAI4D,IAAU,EAAG,CACf,IAAM5D,EAAS;AAAA,kBACHd,CAAQ;AAAA,yDAC+B2E,CAAK;AAAA,mCAC3BtC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CACA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,iCACS0E,CAAK,KAAKC,CAAK;AAAA,iCACftC,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,yBAA0B,2BAA2B,CAAC,CAC3F,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cAGpBrB,EAAW,CAACqB,EAAY,OAAQA,EAAY,KAAK,EAEvD,GAAIrB,GAAY,MAAQI,GAAU,YAAYD,EAAOH,CAAQ,EAAG,CAC9D,IAAMiD,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EACpBE,EAAS;AAAA,kBACHd,CAAQ;AAAA,yDAC+B6D,CAAO,OAAOC,CAAO;AAAA,mCAC3CzB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,GAAM,CAAC,SAAA8D,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EACrDkD,EAAgBW,EACtB,GAAIX,EAAc,OAASlD,EAAM,OAAQ,CACvC,IAAMoD,EAAgBC,GAAkBrD,EAAOkD,CAAa,EAEtDK,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAME,EAAS,CAAC,MAAO,KAAK,EACtBvD,EAAS;AAAA,YACT,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,kBACtEtE,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,UAG9D,OAAO,IAAIjE,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAM+C,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EAC1B,GAAIkD,IAAY,EAAG,CACjB,IAAMhD,EAAS;AAAA,kBACHd,CAAQ;AAAA,yBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,sDAC1BzB,CAAI,WAAWtB,EAAM,CAAC,CAAC;AAAA,kDAC3B8C,CAAO;AAAA,mCACtBxB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,4BAA4B,CAAC,CAC/F,CAEA,GAAI+C,IAAY,EAAG,CACjB,IAAM/C,EAAS;AAAA,kBACHd,CAAQ;AAAA,yBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,sDAC1BzB,CAAI,WAAWtB,EAAM,CAAC,CAAC;AAAA,6CAChC+C,CAAO;AAAA,mCACjBzB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,4BAA4B,CAAC,CAC/F,CAEA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACMe,EAAM,CAAC,CAAC;AAAA,iCACL8C,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpB6C,EAAU/D,EAAM,CAAC,EAAIA,EAAM,CAAC,EAC5BgE,EAAUhE,EAAM,CAAC,EAEjB,CAAC,SAAA6D,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EACrDkD,EAAgBW,EACtB,GAAIX,EAAc,OAASlD,EAAM,OAAQ,CACvC,IAAMoD,EAAgBC,GAAkBrD,EAAOkD,CAAa,EACtDI,EAAS,CAAC,QAAS,MAAO,KAAK,EAE/BC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAC/B,IAAMa,EAAU,KAAK,4BAA4BhF,EAAUqC,EAAMiC,CAAc,EAEzEW,EAAUf,EAAS,QAAQ,EAC3BpD,EAAS;AAAA,YACTkE,EAAQ,WAAW;AAAA,kBACbhF,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQY,CAAO,CAAC;AAAA;AAAA,UAG7D,OAAO,IAAIhF,EAAea,EAAQkE,EAAQ,YAAY,CACxD,CAEA,IAAMnB,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,kBACDd,CAAQ;AAAA;AAAA,kCAEQ8E,CAAO,YAAYC,CAAO;AAAA,mCACzBlB,CAAO,KAAKC,CAAO;AAAA,mCACnBzB,CAAI;AAAA;AAAA,QAGnC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBiD,EAAUnE,EAAM,CAAC,EACjBgE,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAsBrBlB,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACM8E,CAAO,YAAYC,CAAO;AAAA,yBAC/BG,CAAO;AAAA,iCACCrB,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,yBAA0B,2BAA2B,CAAC,CAC3F,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBkD,EAAUpE,EAAM,CAAC,EACjBmE,EAAUnE,EAAM,CAAC,EAAIoE,EACrBJ,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAErB,CAAC,SAAAH,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EAC3D,GAAI6D,EAAS,OAAS7D,EAAM,OAAQ,CAClC,IAAMoD,EAAgBC,GAAkBrD,EAAO6D,CAAQ,EACjDP,EAAS,CAAC,MAAO,MAAO,QAAS,SAAU,QAAQ,EAEnDC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAMrD,EAAS;AAAA,YACT,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,kBACtEtE,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,UAG9D,OAAO,IAAIjE,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAEA,IAAM+C,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACM8E,CAAO,YAAYC,CAAO,cAAcG,CAAO;AAAA,qBACxDC,CAAO;AAAA,iCACKtB,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBmD,EAAUrE,EAAM,CAAC,EACjBoE,EAAUpE,EAAM,CAAC,EAAIqE,EACrBF,EAAUnE,EAAM,CAAC,EAAIoE,EACrBJ,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAErB,CAAC,SAAAH,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EAC3D,GAAI6D,EAAS,OAAS7D,EAAM,OAAQ,CAClC,IAAMoD,EAAgBC,GAAkBrD,EAAO6D,CAAQ,EACjDP,EAAS,CAAC,MAAO,MAAO,QAAS,SAAU,SAAU,QAAQ,EAE7DC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAMrD,EAAS;AAAA,cACP,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,oBACtEtE,CAAQ;AAAA;AAAA,uBAELA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,YAGhE,OAAO,IAAIjE,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAEA,IAAM+C,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,kBACDd,CAAQ;AAAA;AAAA,gCAEM8E,CAAO,YAAYC,CAAO,cAAcG,CAAO;AAAA,uBACxDC,CAAO,eAAeC,CAAO;AAAA,mCACjBvB,CAAO,KAAKC,CAAO;AAAA,mCACnBzB,CAAI;AAAA;AAAA,UAGnC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAQU,OAA0C,CAClD,IAAMuC,EAAS,KAAK,QAAQ,oBACtB9B,EAAO8B,EAAO,MAAM,OACpB7B,EAAU6B,EAAO,QACjBgC,EAAShC,EAAO,MAChBiC,EAASjC,EAAO,OAEhBkC,EAAe,CAAC,EACtB,QAAS9D,EAAI,EAAGA,EAAIF,EAAO,EAAG,EAAEE,EAC9B8D,EAAa,KAAK;AAAA,YACZ9D,CAAC,gBAAgBD,EAAQC,CAAC,CAAC,GAAG,EACpC8D,EAAa,KAAK;AAAA,sBACF9D,CAAC,OAAOD,EAAQC,CAAC,CAAC,GAAG,EAEvC8D,EAAa,KAAK;AAAA,YACVhE,EAAO,CAAC,aAAa,EAC7B,IAAMiE,EAAO;AAAA,6CAC4BjE,CAAI;AAAA,iDACA8D,CAAM,KAAKC,CAAM;AAAA,UACxDC,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,yCAEUhE,CAAI;AAAA,UACnCgE,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,MAG3B,MAAO,CAAC,MAAO,IAAItF,EAAeuF,EAAM,CAAC,4BAA4B,CAAC,CAAC,CACzE,CAOU,WAA8C,CACtD,IAAMnF,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACgC,EAAMZ,IAAM,CACvD,IAAMgE,EAAS,KAAK,QAAQ,oBAAoBhE,CAAC,EAE3CF,GADQkE,EAAO,cAAc,OAAS,EAAIA,EAAO,cAAgBA,EAAO,OAC3D,OACfzF,EAAW,IAAIqC,CAAI,GACvBhC,EAAOL,CAAQ,EAAI,IAAIC,EACnB,KAAK,mBAAmBoC,EAAMd,EAAMkE,EAAO,MAAOA,EAAO,OAAQ,EAAK,EACtE,CAAC,6BAA6BzF,CAAQ,GAAI,6BAA8B,2BAA2B,CAAC,EACxGA,EAAWA,EAAW,KACtBK,EAAOL,CAAQ,EAAI,IAAIC,EACnB,KAAK,mBAAmBoC,EAAMd,EAAMkE,EAAO,MAAOA,EAAO,OAAQ,EAAI,EACrE,CAAC,6BAA6BzF,CAAQ,GAAI,6BAA8B,2BAA2B,CAAC,CAC1G,CAAC,EACMK,CACT,CAQU,mBAAmBqF,EAAiBnE,EAAcoE,EAAeC,EAAgBC,EAChF,CACT,IAAIxD,EAAO,IAAIqD,CAAO,GAClBG,IACFxD,EAAOA,EAAO,MAEhB,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO;AAAA,gBACK8B,CAAI,UAAUd,CAAI;AAAA,wCACMc,CAAI;AAAA,iDACKsD,CAAK,KAAKC,CAAM;AAAA,0CACvB7D,EAAK,SAAS,IAAI2D,CAAO;AAAA;AAAA;AAAA,SAIjE,CASU,mBAAmBA,EAAiBnE,EAAcoE,EAAeC,EAAgBC,EAChF,CACT,IAAIxD,EAAO,IAAIqD,CAAO,QAClBG,IACFxD,EAAOA,EAAO,MAEhB,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO;AAAA,eACI8B,CAAI,UAAUd,CAAI;AAAA,yCACQmE,CAAO;AAAA,iDACCC,CAAK,KAAKC,CAAM;AAAA,mBAC9C7D,EAAK,SAAS,IAAI2D,CAAO;AAAA;AAAA,SAG1C,CACF,ICzzCA,IASaI,GATbC,GAAAC,EAAA,kBAGAC,KAMaH,GAAN,MAAMI,UAAwBC,EAAQ,CAC3C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,cAAc,EAAG,GAAG,KAAK,cAAc,CAAC,CAC1D,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACU,eAAkD,CAC1D,MAAO,CACL,OAAQ,IAAIC,EAAe;AAAA;AAAA;AAAA,SAGxB,CACL,CACF,CACU,eAAkD,CAC1D,MAAO,CACL,OAAQ,IAAIA,EAAe;AAAA;AAAA;AAAA,SAGxB,CACL,CACF,CAKU,aAAgD,CACxD,IAAMC,EAAaJ,EAAgB,eAAe,EAAI,uBAAyB,GAC/E,MAAO,CACL,OAAQ,IAAIG,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAYvBC,CAAU;AAAA;AAAA;AAAA;AAAA,SAIX,CACL,CACF,CAKU,aAAgD,CACxD,IAAMA,EAAaJ,EAAgB,eAAe,EAAI,uBAAyB,GAC/E,MAAO,CACL,OAAQ,IAAIG,EAAe;AAAA;AAAA;AAAA,YAGrBC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOb,CACL,CACF,CAKA,OAAO,gBAA0B,CAC/B,IAAMC,EAAI,IAAI,YAAY,CAAC,EACrBC,EAAI,IAAI,YAAYD,CAAC,EACrBE,EAAI,IAAI,WAAWF,CAAC,EAE1B,GADAC,EAAE,CAAC,EAAI,WACHC,EAAE,CAAC,IAAM,IACX,MAAO,GAET,GAAIA,EAAE,CAAC,IAAM,IACX,MAAO,GAET,MAAM,IAAI,MAAM,oBAAoB,CACtC,CACF,IClGA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KACAC,KAOaJ,GAAN,cAA+BK,EAAQ,CAC5C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,aAAa,EAAG,GAAG,KAAK,gBAAgB,CAAC,CAC3D,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACU,cAAiD,CACzD,IAAMC,EAAOC,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO,CACL,aAAc,IAAIC,EACd;AAAA;AAAA,cAEIF,EAAK,MAAM;AAAA;AAAA,UAGf,CAAC,iBAAiB,CAAC,CACzB,CACF,CACU,iBAAoD,CAC5D,MAAO,CACL,gBAAiB,IAAIE,EACjB;AAAA;AAAA;AAAA;AAAA,UAKA,CAAC,iBAAiB,CAAC,CACzB,CACF,CACF,IC5CA,IASaC,GATbC,GAAAC,EAAA,kBAGAC,KAMaH,GAAN,MAAMI,UAA0BC,EAAQ,CAC7C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CACL,GAAG,KAAK,WAAW,EACnB,GAAG,KAAK,iBAAiB,EACzB,GAAG,KAAK,gBAAgB,EACxB,GAAG,KAAK,gBAAgB,EACxB,GAAG,KAAK,iBAAiB,CAC3B,CACF,CACA,gBAAiB,CACf,MAAO,CAAC,CACV,CACU,YAA+C,CACvD,IAAMC,EAAa,KAAK,QAAQ,oBAAoB,MAAM,OACpDC,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,cAClD,GAAIC,EAAM,QAAUJ,EAAY,CAC9B,IAAMK,EAAOD,EAAM,OACbE,EAAYN,EAAaK,EACzBE,EAAW,gBAAgBL,CAAI,GACjCM,EAAQ,GACZ,QAASL,EAAI,EAAGA,EAAIE,EAAM,EAAEF,EAC1BK,GAAS;AAAA,wBACKL,CAAC,qCAAqCG,EAAYH,CAAC,OAAOC,EAAMD,CAAC,CAAC;AAAA,YAGlF,IAAMM,EAAO;AAAA,eACNF,CAAQ,wBAAwBP,CAAU,0BAA0BK,CAAI;AAAA,YAC3EG,CAAK;AAAA;AAAA,UAGTP,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CACF,CAAC,EACMR,CACT,CACU,kBAAqD,CAC7D,IAAMD,EAAa,KAAK,QAAQ,oBAAoB,MAAM,OACpDC,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAClD,GAAI,EAAEC,EAAM,OAAS,GAAKA,EAAM,OAASJ,GAAa,CACpD,IAAMK,EAAOD,EAAM,OACbE,EAAYN,EAAaK,EACzBE,EAAW,sBAAsBL,CAAI,GACvCM,EAAQ,GACZ,QAASL,EAAI,EAAGA,EAAIE,EAAO,EAAG,EAAEF,EAC9BK,GAAS;AAAA,wBACKL,CAAC,qCAAqCG,EAAYH,CAAC,OAAOC,EAAMD,CAAC,CAAC;AAAA,YAGlF,IAAMM,EAAO;AAAA,eACNF,CAAQ,uBAAuBP,CAAU,0BAA0BK,CAAI;AAAA,YAC1EG,CAAK;AAAA,wBACOH,EAAO,CAAC,sBAAsBL,EAAa,CAAC;AAAA,wBAC5CK,EAAO,CAAC,sBAAsBL,EAAa,CAAC;AAAA;AAAA,UAG5DC,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CACF,CAAC,EACMR,CACT,CACU,iBAAoD,CAC5D,IAAMA,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CQ,EAAU,KAAK,QAAQ,oBAAoBR,CAAC,EAAE,QAC9CE,EAAOD,EAAM,OACfG,EAAW,mBAAmBL,CAAI,GACtCD,EAAOM,CAAQ,EAAI,IAAIG,EAAeb,EAAkB,oBAAoBU,EAAUF,EAAMM,CAAO,CAAC,EACpGJ,EAAW,mBAAmBL,CAAI,KAClCD,EAAOM,CAAQ,EACX,IAAIG,EAAeb,EAAkB,oBAAoBU,EAAUF,EAAMM,EAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,CACzG,CAAC,EACMV,CACT,CACA,OAAO,oBAAoBC,EAAcG,EAAcM,EAAoC,CACzF,IAAIH,EAAQ,GACZ,QAAS,EAAIH,EAAO,EAAG,GAAK,EAAG,EAAE,EAC/BG,GAAS;AAAA,4BACa,CAAC,OAAOG,EAAQ,CAAC,CAAC;AAAA,UAG1C,MAAO;AAAA,YACCT,CAAI,gBAAgBG,CAAI;AAAA;AAAA,UAE1BG,CAAK;AAAA;AAAA;AAAA,OAIb,CACU,iBAAoD,CAC5D,IAAMP,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CQ,EAAU,KAAK,QAAQ,oBAAoBR,CAAC,EAAE,QAC9CE,EAAOD,EAAM,OACfG,EAAW,mBAAmBL,CAAI,GACtCD,EAAOM,CAAQ,EAAI,IAAIG,EAAeb,EAAkB,sBAAsBU,EAAUF,EAAMM,CAAO,CAAC,EACtGJ,EAAW,mBAAmBL,CAAI,KAClCD,EAAOM,CAAQ,EACX,IAAIG,EAAeb,EAAkB,sBAAsBU,EAAUF,EAAMM,EAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,CAC3G,CAAC,EACMV,CACT,CACA,OAAO,sBAAsBC,EAAcG,EAAcM,EAAoC,CAC3F,IAAMC,EAAe,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIP,EAAO,EAAG,EAAE,EAC9BO,EAAa,KAAK;AAAA,gBACR,CAAC,gBAAgBD,EAAQ,CAAC,CAAC,GAAG,EACxCC,EAAa,KAAK;AAAA,4BACI,CAAC,OAAOD,EAAQ,CAAC,CAAC,GAAG,EAE7C,OAAAC,EAAa,KAAK;AAAA,gBACNP,EAAO,CAAC,aAAa,EAC1B;AAAA,aACEH,CAAI,gCAAgCG,CAAI;AAAA,UAC3CO,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,OAG7B,CACU,kBAAqD,CAC7D,IAAMX,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CE,EAAOD,EAAM,OACbG,EAAW,oBAAoBL,CAAI,GACrCW,EAAY,GAChB,QAASV,EAAI,EAAGA,EAAIE,EAAM,EAAEF,EAC1BU,GAAa;AAAA,gBACLV,CAAC,OAAOC,EAAMD,CAAC,CAAC,IAE1B,IAAMM,EAAO;AAAA,eACJF,CAAQ,8BAA8BF,CAAI;AAAA,sBACnCA,CAAI;AAAA,YACdQ,CAAS;AAAA,wBACGR,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAUtBJ,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CAAC,EACMR,CACT,CACF,ICrKA,IAUaa,GAVbC,GAAAC,EAAA,kBAGAC,KAOaH,GAAN,cAAyBI,EAAQ,CACtC,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,mBAAmB,EAAG,GAAG,KAAK,QAAQ,EAAG,GAAG,KAAK,WAAW,EAAG,GAAG,KAAK,WAAW,CAAC,CACrG,CACU,oBAAuD,CAE/D,IAAMC,EADe,KAAK,QAAQ,oBACR,MAAM,OAC1BC,EAAmC,CAAC,IAAK,KAAM,IAAK,KAAM,IAAK,KAAM,IAAK,IAAI,EAC9EC,EAA2C,CAAC,EAClD,QAAWC,KAAQF,EAAQ,CACzB,IAAMG,EAAQ,GAAGD,CAAI,MACjBE,EAAkB,GACtB,QAASC,EAAI,EAAGA,EAAIN,EAAM,EAAEM,EAC1BD,GAAmB;AAAA,iBACVC,CAAC,KAAKL,EAAOE,CAAI,CAAC,QAAQG,CAAC;AAAA,YAGtC,IAAMC,EAAO;AAAA,eACJH,CAAK,YAAYJ,CAAI,mBAAmBA,CAAI;AAAA,YAC/CK,CAAe;AAAA;AAAA,UAGrBH,EAAOE,CAAK,EAAI,IAAII,EAAeD,CAAI,CACzC,CAEA,OAAOL,CACT,CACU,SAA4C,CAEpD,IAAMF,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BK,EAAkB,GACtB,QAAS,EAAI,EAAG,EAAIL,EAAM,EAAE,EAC1BK,GAAmB;AAAA,eACV,CAAC,WAAW,CAAC;AAAA,UAGxB,IAAME,EAAO;AAAA,6BACYP,CAAI,mBAAmBA,CAAI;AAAA,UAC9CK,CAAe;AAAA;AAAA,QAGrB,MAAO,CAAC,QAAS,IAAIG,EAAeD,CAAI,CAAC,CAC3C,CAEU,YAA+C,CAEvD,IAAMP,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BS,EAAQ;AAAA;AAAA,qBAEKT,CAAI;AAAA;AAAA;AAAA,UAIrB,QAAS,EAAI,EAAG,EAAIA,EAAO,EAAG,EAAE,EAC9BS,GAAS;AAAA,4BACa,CAAC;AAAA,gBACb,CAAC;AAAA,cAGbA,GAAS;AAAA;AAAA,gBAEGT,EAAO,CAAC;AAAA,UAEpB,IAAMO,EAAO;AAAA,kCACiBP,CAAI;AAAA,UAC5BS,CAAK;AAAA;AAAA,UAGX,MAAO,CAAC,WAAY,IAAID,EAAeD,CAAI,CAAC,CAC9C,CACU,YAA+C,CAEvD,IAAMP,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BS,EAAQ;AAAA;AAAA,sBAEMT,CAAI;AAAA;AAAA;AAAA,QAItB,QAAS,EAAI,EAAG,EAAIA,EAAO,EAAG,EAAE,EAC9BS,GAAS;AAAA,4BACa,CAAC;AAAA,uBACN,CAAC;AAAA,QAGpBA,GAAS;AAAA;AAAA,uBAEUT,EAAO,CAAC;AAAA,UAE3B,IAAMO,EAAO;AAAA,6BACYP,CAAI;AAAA,UACvBS,CAAK;AAAA;AAAA,MAGX,MAAO,CAAC,WAAY,IAAID,EAAeD,CAAI,CAAC,CAC9C,CACF,IChHA,IAUaG,GAVbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KACAC,KAEaP,GAAwE,CACnF,SAAYQ,GACZ,UAAaC,GACb,IAAOC,GACP,WAAcC,GACd,YAAeC,EAEjB,ICjBA,IAkBaC,GAlBbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KAYaN,GAAN,KAAuB,CAK5B,YACIO,EAAyBC,EAA0BC,EACnDC,EAAoC,CALxC,KAAS,KAAkC,CAAC,EAC5C,KAAS,8BAA6E,CAAC,EAKrF,KAAK,QAAU,IAAIC,GAAYJ,EAAWC,EAAaC,EAAqBC,CAAmB,EAG/F,OAAO,KAAKE,EAAY,EAAE,QAASC,GAAiB,CAClD,IAAMC,EAAM,IAAIF,GAAaC,CAAI,EAAE,KAAK,OAAO,EAC/C,KAAK,KAAKA,CAAI,EAAIC,CACpB,CAAC,EAGD,IAAMC,EAAM,KAAK,8BACjB,QAAWC,KAAW,KAAK,KAAM,CAE/B,IAAMC,EADM,KAAK,KAAKD,CAAO,EACH,aAAa,EACvC,QAAWE,KAAWD,EAAe,CACnC,IAAME,EAAMH,EAAU,IAAME,EACxBE,EACAL,EAAII,CAAG,GACTC,EAAcL,EAAII,CAAG,EACrBC,EAAY,YAAcH,EAAcC,CAAO,EAAE,cAEjDE,EAAc,IAAIC,GAAmBF,EAAKF,EAAcC,CAAO,EAAE,WAAW,EAC5EH,EAAII,CAAG,EAAIC,GAEb,IAAME,EAAeL,EAAcC,CAAO,EAAE,aAC5C,GAAII,EACF,QAASC,EAAI,EAAGA,EAAID,EAAa,OAAQ,EAAEC,EACzC,GAAKR,EAAIO,EAAaC,CAAC,CAAC,EAKtBH,EAAY,cAAcL,EAAIO,EAAaC,CAAC,CAAC,CAAC,MALrB,CACzB,IAAMC,EAAO,IAAIH,GAAmBC,EAAaC,CAAC,CAAC,EACnDR,EAAIO,EAAaC,CAAC,CAAC,EAAIC,EACvBJ,EAAY,cAAcI,CAAI,CAChC,CAKN,CACF,CACF,CAEA,YAAqB,CACnB,IAAMhB,EAAc,KAAK,QAAQ,YAC7BiB,EAASjB,EAAY,aAGzB,OAAK,KAAK,QAAQ,YAAY,UAC5BiB,EAAS,GAAGA,CAAM;AAAA,QAChBC,GAAyB,KAAK,QAAQ,UAAU,QAAS,KAAK,QAAQ,oBAAoB,MAAM,MAAM,CAAC,IAG3GD,EAASE,GAAeF,CAAM,EAGvB,GAAGG,GAAsB,KAAK,QAAQ,UAAU,OAAO,CAAC;AAAA,MAC7D,KAAK,YAAYpB,EAAY,WAAYA,EAAY,SAAS,CAAC;AAAA,MAC/D,KAAK,WAAWiB,CAAM,CAAC;AAAA,MACvBA,CAAM,EACV,CAEU,WAAWI,EAAwB,CAC3C,IAAMC,EAAmB,KAAK,kCAAkCD,CAAM,EAEtE,GAAIC,EAAiB,SAAW,EAC9B,MAAO,GAGT,IAAIC,EAAW,GACf,QAASR,EAAI,EAAGA,EAAIO,EAAiB,OAAQ,EAAEP,EAC7C,GAAIO,EAAiBP,CAAC,EAAE,YACtBQ,GAAYD,EAAiBP,CAAC,EAAE,YAAc;AAAA,MAE9C,OAAM,IAAI,MAAM,8CAA8CO,EAAiBP,CAAC,EAAE,IAAI,EAAE,EAI5F,OAAOQ,CACT,CACQ,kCAAkCF,EAAsC,CAC9E,IAAMG,EAA8B,CAAC,EAErC,cAAO,KAAK,KAAK,6BAA6B,EAAE,QAAQC,GAAmB,CACzE,IAAMf,EAAUe,EAAgB,MAAM,GAAG,EAAE,CAAC,EACxCJ,EAAO,QAAQX,CAAO,IAAM,IAC9Bc,EAAM,KAAK,KAAK,8BAA8BC,CAAe,CAAC,CAElE,CAAC,EAEMC,GAA4B,mBAAmBF,CAAK,CAC7D,CAEU,YAAYG,EAAqBC,EAAoC,CAC7E,IAAMC,EAAyB,CAAC,EAChC,GAAIF,EACF,QAAWG,KAAWH,EACpBE,EAAa,KAAK,qBAAqBC,CAAO,GAAG,EAGrD,GAAIF,EACF,QAAWG,KAAYH,EACrBC,EAAa,KACT,WAAWE,EAAS,IAAI,IAAIA,EAAS,IAAI,GAAGA,EAAS,YAAc,IAAIA,EAAS,WAAW,IAAM,EAAE,GAAG,EAG9G,OAAOF,EAAa,KAAK;AAAA,CAAI,CAC/B,CACF,IClIA,IAsBaG,GAtBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAcaN,GAAN,KAAqB,CAK1B,YACWO,EAAqCC,EACrCC,EAA8C,CAD9C,cAAAF,EAAqC,eAAAC,EACrC,2BAAAC,EACT,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAuBC,EAA2B,CAC7E,KAAK,SAAS,MAAM,KAAM,sBAAsBF,EAAc,YAAY,MAAQ,gBAAgB,GAAI,IAAM,CAC1G,IAAMG,EAAK,KAAK,UAAU,GACpBC,EAAUJ,EAAc,QAC9BG,EAAG,WAAWC,CAAO,EACrB,GAAI,CACF,KAAK,WAAWF,CAAM,EACjB,KAAK,iBACR,KAAK,eAAeF,EAAc,eAAe,EAEnD,KAAK,aAAaA,EAAc,iBAAkBA,EAAc,YAAY,WAAa,CAAC,EAAGC,CAAM,CACrG,OAASI,EAAK,CACZ,MAAAC,GAAO,MAAM,iBAAkBN,EAAc,YAAY,YAAY,EAC/DK,CACR,CACA,KAAK,SAAS,MAAM,UAAW,mBAAoB,IAAM,CACvD,KAAK,UAAU,KAAK,CACtB,CAAC,CACH,EAAG,KAAK,SAAS,CACnB,CACA,SAAgB,CACV,KAAK,cACP,KAAK,UAAU,aAAa,KAAK,YAAY,EAE/C,KAAK,KAAK,QAAQE,GAAK,KAAK,UAAU,cAAcA,EAAE,OAAO,CAAC,CAChE,CACA,MAAMC,EAA0BC,EAAsCC,EAA8C,CAClH,OAAO,KAAK,SAAS,MAAM,UAAW,uBAAwB,IAAM,CAClE,IAAMC,EAAe,IAAIC,GAAiB,KAAK,UAAWJ,EAAaC,EAAqBC,CAAmB,EACzGG,EAAaF,EAAa,WAAW,EACrCP,EAAU,KAAK,QAAQS,CAAU,EAQvC,MAPiB,CACf,YAAAL,EACA,QAAAJ,EACA,iBAAkB,KAAK,oBACnBA,EAASO,EAAa,QAAQ,YAAY,WAAYA,EAAa,QAAQ,YAAY,SAAS,EACpG,gBAAiB,KAAK,mBAAmBP,CAAO,CAClD,CAEF,CAAC,CACH,CACU,QAAQU,EAAwC,CACxD,GAAI,CAAC,KAAK,aAAc,CACtBR,GAAO,QAAQ,kBAAmB,wDAAwD,EAC1F,IAAMS,EAAqBC,GAAsB,KAAK,UAAU,OAAO,EACvE,KAAK,aAAe,KAAK,UAAU,cAAcD,EAAoB,KAAK,UAAU,GAAG,aAAa,CACtG,CACIE,GAAI,OACNX,GAAO,QAAQ,kBAAmB;AAAA,EACtCQ,CAAgB;AAAA,CACjB,EAEG,IAAMI,EAAa,KAAK,UAAU,cAAcJ,EAAkB,KAAK,UAAU,GAAG,eAAe,EAC7FV,EAAU,KAAK,UAAU,cAAc,KAAK,aAAcc,CAAU,EAC1E,YAAK,UAAU,aAAaA,CAAU,EAC/Bd,CACT,CACA,WAAWe,EAAuB,CAChC,IAAMC,EAAQD,EAAG,MACXE,EAASF,EAAG,OAClBb,GAAO,QACH,kBACA,8CAA8Cc,CAAK,IAAIC,CAAM,WAAWF,EAAG,KAAK,UAAUA,EAAG,OAAO,IAAI,EAAE,EAC9G,KAAK,UAAU,kBAAkBA,EAAG,QAASC,EAAOC,CAAM,CAC5D,CACA,eAAeC,EAAiD,CAC9D,IAAMC,EAAiBD,EAAgB,SACjCE,EAAqBF,EAAgB,aAC3C,KAAK,UAAU,oBAAoBC,EAAgBC,CAAkB,EACrE,KAAK,gBAAkB,EACzB,CACA,aAAaC,EAA6CC,EAA8BC,EAC/E,CACP,IAAMxB,EAAK,KAAK,UAAU,GACtByB,EAAkB,EACtB,OAAW,CAAC,KAAAC,EAAM,KAAAC,EAAM,SAAAC,EAAU,YAAAC,CAAW,IAAKP,EAAkB,CAClE,IAAMQ,EAAQP,EAAU,KAAKQ,GAAKA,EAAE,OAASL,CAAI,GAAG,KACpD,GAAIC,IAAS,aAAe,CAACG,EAC3B,MAAM,IAAI,MAAM,aAAaJ,CAAI,8CAA8C,EAEjF,OAAQC,EAAM,CACZ,IAAK,YACH,KAAK,YAAYH,EAASC,CAAe,EAAGG,EAAUH,CAAe,EACrEA,IACA,MACF,IAAK,QACCI,EACF7B,EAAG,WAAW4B,EAAUE,CAAiB,EAEzC9B,EAAG,UAAU4B,EAAUE,CAAe,EAExC,MACF,IAAK,MACCD,EACF7B,EAAG,WAAW4B,EAAUE,CAAiB,EAEzC9B,EAAG,UAAU4B,EAAUE,CAAe,EAExC,MACF,QACE,MAAM,IAAI,MAAM,4BAA4BH,CAAI,EAAE,CACtD,CACF,CACF,CACA,YAAYX,EAAiBgB,EAAqCC,EAAwB,CACxF,KAAK,UAAU,qBAAqBjB,EAAG,QAASiB,EAAUD,CAAa,CACzE,CACA,mBAAmB/B,EAAiD,CAClE,MAAO,CACL,SAAU,KAAK,kBAAkBA,EAAS,UAAU,EACpD,aAAc,KAAK,kBAAkBA,EAAS,cAAc,CAC9D,CACF,CACA,oBAAoBA,EAAuBiC,EAAqBX,EAClC,CAC5B,IAAMD,EAA8C,CAAC,EACrD,GAAIY,EACF,QAAWC,KAAWD,EACpBZ,EAAiB,KAAK,CAAC,KAAMa,EAAS,KAAM,YAAa,SAAU,KAAK,mBAAmBlC,EAASkC,CAAO,CAAC,CAAC,EAGjH,GAAIZ,EACF,QAAWa,KAAYb,EACrBD,EAAiB,KAAK,CAAC,GAAGc,EAAU,SAAU,KAAK,mBAAmBnC,EAASmC,EAAS,IAAI,CAAC,CAAC,EAGlG,OAAOd,CACT,CACA,mBAAmBrB,EAAuByB,EAAoC,CAE5E,IAAMW,EADK,KAAK,UAAU,GACL,mBAAmBpC,EAASyB,CAAI,EACrD,GAAIW,IAAc,KAChB,MAAM,IAAI,MAAM,WAAWX,CAAI,aAAa,EAE9C,OAAOW,CACT,CACA,kBAAkBpC,EAAuByB,EAAsB,CAG7D,OAFW,KAAK,UAAU,GACW,kBAAkBzB,EAASyB,CAAI,CAEtE,CACF,ICpLA,IAyBaY,GAzBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAmBaJ,GAAN,KAAqB,CAM1B,YACWK,EAAgCC,EAA8CC,EAC7EC,EAA8B,CAD/B,eAAAH,EAAgC,oBAAAC,EAA8C,cAAAC,EAC7E,YAAAC,EAJZ,KAAiB,YAAuE,IAAI,IAKtFA,EAAO,gBACT,KAAK,cAAgB,IAAI,IACzB,KAAK,aAAe,IAAI,IACxB,KAAK,cAAgB,IAAI,IAE7B,CACA,wBACIC,EAA2BC,EAAuBC,EAA0BC,EAAsB,CACpG,IAAMC,EAAkB,KAAK,cAAcJ,CAAQ,EAE7CK,EAAU,KAAK,UAAU,WAAWD,EAAiBH,EAAO,UAAY,EAAGE,CAAK,EACtF,GAAIF,EAAO,UAAYE,IAAU,EAC/B,MAAM,IAAI,MAAM,iBAAiB,EAEnC,IAAMG,EAAQL,EAAO,MACfM,EAASN,EAAO,OAElBO,EACAC,EACJ,GAAI,KAAK,OAAO,cAAe,CAC7BD,EAAM,GAAGF,CAAK,IAAIC,CAAM,IAAIF,EAAQ,MAAM,IAAIA,EAAQ,cAAc,IAAIA,EAAQ,WAAW,GAC3FI,EAAgB,KAAK,cAAc,IAAID,CAAG,EACrCC,IACHA,EAAgB,CAAC,EACjB,KAAK,cAAc,IAAID,EAAKC,CAAa,GAG3C,IAAMC,EAAe,KAAK,aAAa,IAAIF,CAAG,EAC9C,GAAIE,GAAgBA,EAAa,OAAS,EAAG,CAC3C,IAAMC,EAAUD,EAAa,IAAI,EACjC,OAAAD,EAAc,KAAKE,CAAO,EACtBR,IAAU,GACZ,KAAK,UAAU,cAAcQ,EAASL,EAAOC,EAAQF,EAAS,KAAK,cAAcL,EAAUE,CAAI,CAAE,EAE5FS,CACT,CACF,CAEAC,GAAO,QAAQ,iBAAkB,gCAAgCX,EAAO,KAAK,IAAIA,EAAO,MAAM,EAAE,EAChG,IAAMU,EAAU,KAAK,UAAU,gBAAgBL,EAAOC,EAAQF,EAAS,KAAK,cAAcL,EAAUE,CAAI,CAAC,EAEzG,OAAI,KAAK,OAAO,gBACdO,EAAe,KAAKE,CAAO,EAC3B,KAAK,cAAc,IAAIA,EAASH,CAAI,GAE/BG,CACT,CACA,YAAYE,EAAiBb,EAA2Bc,EAAsC,CAC5F,OAAKA,IACHA,EAAW,GAEN,KAAK,SAAS,MAAM,UAAW,6BAA8B,IAAM,CACxE,IAAMC,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAAIH,EAC9CZ,EAAO,KAAK,UAAU,YACxBW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAU,KAAK,cAAcf,CAAQ,EAAGc,CAAS,EACtF,OAAO,KAAK,aAAad,EAAUE,CAAI,CACzC,CAAC,CACH,CACA,MAAM,iBAAiBW,EAAiBb,EAA2Bc,EAA+C,CAChH,IAAMI,EAASL,EAAG,OAAO,OAIzB,GAHKC,IACHA,EAAW,GAET,KAAK,YAAY,IAAII,CAAM,EAAG,CAChC,IAAMC,EAAc,KAAK,YAAY,IAAID,CAAM,EAC/C,OAAO,IAAI,QAA2BE,GAAWD,GAAa,KAAKC,CAAO,CAAC,CAC7E,CACA,OAAO,KAAK,SAAS,MAAM,UAAW,kCAAmC,SAAY,CACnF,KAAK,YAAY,IAAIF,EAAQ,CAAC,CAAC,EAC/B,IAAMH,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAAIH,EAEpD,MAAM,KAAK,UAAU,sBAAsB,EAC3C,IAAMZ,EAAO,KAAK,UAAU,YACxBW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAU,KAAK,cAAcf,CAAQ,EAAGc,CAAS,EAChFO,EAAa,KAAK,aAAarB,EAAUE,CAAI,EAC7CiB,EAAc,KAAK,YAAY,IAAID,CAAM,EAC/C,YAAK,YAAY,OAAOA,CAAM,EAC9BC,GAAa,QAAQC,GAAWA,EAAQC,CAAU,CAAC,EAC5CA,CACT,CAAC,CACH,CACA,wBAAwBR,EAA+B,CACrD,OAAO,KAAK,SAAS,MAAM,UAAW,yCAA0C,IAAM,CACpF,IAAME,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAC1Cf,EAAO,KAAK,UAAU,YAAYW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAW,EAAG,OAAQ,CAAC,EAChG,OAAO,IAAI,aAAab,EAAK,OAAQA,EAAK,WAAYa,CAAQ,CAChE,CAAC,CACH,CACA,eAAeO,EAA0BC,EAA+B,CACtE,IAAIf,EACJ,GAAI,KAAK,OAAO,gBACdA,EAAM,KAAK,cAAc,IAAIc,EAAY,OAAO,EAC5Cd,GAAK,CACHe,GACF,KAAK,cAAc,OAAOf,CAAG,EAE/B,IAAMC,EAAgB,KAAK,cAAc,IAAID,CAAG,EAChD,GAAIC,EAAe,CACjB,IAAMe,EAAQf,EAAc,QAAQa,EAAY,OAAO,EACvD,GAAIE,IAAU,GAAI,CAChBf,EAAc,OAAOe,EAAO,CAAC,EAC7B,IAAId,EAAe,KAAK,aAAa,IAAIF,CAAG,EACvCE,IACHA,EAAe,CAAC,EAChB,KAAK,aAAa,IAAIF,EAAKE,CAAY,GAEzCA,EAAa,KAAKY,EAAY,OAAO,CACvC,CACF,CACF,EAGE,CAACd,GAAOe,KACVX,GAAO,QAAQ,iBAAkB,4BAA4BU,EAAY,KAAK,IAAIA,EAAY,MAAM,EAAE,EACtG,KAAK,UAAU,cAAcA,EAAY,OAAO,EAEpD,CACA,aAAatB,EAA2BE,EAAgD,CACtF,OAAQF,EAAU,CAChB,IAAK,QACH,OAAOE,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,QACH,OAAOA,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,OACH,OAAOA,aAAgB,UAAYA,EAAO,UAAU,KAAKA,CAAI,EAC/D,IAAK,SACH,OAAOA,aAAgB,YAAcA,EAAO,YAAY,KAAKA,CAAI,EACnE,IAAK,SACH,OAAOA,aAAgB,YAAcA,EAAO,YAAY,KAAKA,CAAI,EACnE,IAAK,QACL,IAAK,OACH,OAAOA,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,UACH,OAAOA,aAAgB,aAAeA,EAAO,aAAa,KAAKA,CAAI,EACrE,IAAK,UACH,OAAOA,aAAgB,aAAeA,EAAO,aAAa,KAAKA,CAAI,EACrE,QACE,MAAM,IAAI,MAAM,mBAAmBF,CAAQ,mBAAmB,CAClE,CACF,CACA,cAAcyB,EAA4BvB,EAAoE,CAC5G,GAAKA,EAGL,OAAQA,aAAgB,aAAgBA,EAAO,IAAI,aAAaA,CAAI,CAmBtE,CACA,cAAcuB,EAA8C,CAC1D,MAAO,OAgBT,CACA,qBAA4B,CAC1B,KAAK,UAAU,oBAAoB,CACrC,CACF,IC3NA,IAmBaC,GAnBbC,GAAAC,EAAA,kBAKAC,KAEAC,KAKAC,KACAC,KACAC,KACAC,KACAC,KAGaT,GAAN,KAAoD,CAWzD,YAA4BU,EAAuCC,EAA0B,CAAjE,aAAAD,EAAuC,aAAAC,EACjE,KAAK,eAAiB,IAAIC,GAAsBF,EAAQ,UAAU,cAAc,EAChF,KAAK,eAAiB,IAAIG,GAAe,KAAK,QAAQ,SAAUH,EAAQ,UAAW,KAAK,cAAc,EACtG,KAAK,eAAiB,IAAII,GACtBJ,EAAQ,UAAW,KAAK,eAAgB,KAAK,QAAQ,SACrD,CAAC,cAAeA,EAAQ,mBAAqB,MAAM,CAAC,EACxD,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAA2B,IAAI,IACpC,KAAK,KAAOA,EAAQ,KACpB,KAAK,eAAiB,IAAI,IAC1B,KAAK,eAAiB,IAAI,GAC5B,CAEA,wBAAyB,CACvB,OAAO,IAAIK,GAAsB,IAAI,CACvC,CACA,mBAAmBC,EAAoB,CACrC,IAAMC,EAAeD,EAAM,UAAU,EAAE,OAAOE,GAAKA,EAAE,OAAS,IAAMA,EAAE,MAAM,EAAE,IAAIA,GAAKA,EAAE,OAAQ,MAAM,EACvG,KAAK,aAAe,IAAI,IAAID,CAAY,CAC1C,CACA,cAAcE,EAA8B,CAC1C,OAAO,KAAK,aAAe,KAAK,aAAa,IAAIA,CAAQ,EAAI,EAC/D,CACA,eAAeA,EAA2B,CACxC,KAAK,aAAa,IAAIA,CAAQ,CAChC,CACA,eAAeA,EAAqBC,EAA0C,CAC5E,OAAIA,EACK,KAAK,uBAAuB,IAAID,CAAQ,EAExC,KAAK,yBAAyB,IAAIA,CAAQ,CAErD,CACA,eAAeA,EAAqBE,EAA0BD,EAAW,GAAa,CACpFE,GAAO,QAAQ,sBAAuB,+BAA+B,EACjEF,EACF,KAAK,uBAAuB,IAAID,EAAUE,CAAW,EAErD,KAAK,yBAAyB,IAAIF,EAAUE,CAAW,CAE3D,CACA,SAAgB,CACd,KAAK,eAAe,QAAQ,EAC5B,KAAK,eAAe,oBAAoB,EACxC,KAAK,uBAAuB,QAAQE,GAAM,KAAK,eAAe,eAAeA,EAAI,EAAI,CAAC,EACtF,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAAyB,QAAQA,GAAM,KAAK,eAAe,eAAeA,EAAI,EAAI,CAAC,EACxF,KAAK,yBAA2B,IAAI,GACtC,CACA,QAAQC,EAAkBC,EAA0BT,EAAwB,CAC1E,IAAMU,EAAKC,GAAgBH,EAAMC,EAAQG,EAAsB,EAC/D,MAAO,CAAC,KAAMF,EAAG,OAAQ,QAASA,EAAG,OAASA,EAAG,OAAOF,EAAMR,CAAK,EAAIQ,CAAI,CAC7E,CACF,ICjEO,SAASK,GAAqBC,EAAmC,CACtE,IAAIC,EAAI,EACR,KAAOA,EAAID,EAAI,QACEA,EAAIC,CAAC,EAAE,EADD,EAAEA,EAEvB,CAIF,OAAOA,EAAI,CACb,CA3BA,IAgCaC,GAhCbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAA,KACAC,KAyBaL,GAAN,KAAmB,CAwCxB,YAAYM,EAA2BC,EAAc,CAFrD,KAAQ,iBAAmB,GAogB3B,KAAQ,YAA0B,CAAC,EAjgBjC,KAAK,GAAKD,EACV,KAAK,QAAUC,EAEf,KAAK,cAAc,EACnB,KAAK,aAAe,KAAK,mBAAmB,EAC5C,KAAK,YAAc,KAAK,kBAAkB,EAC1C,KAAK,qBAAqB,CAC5B,CAEA,gBAAgBC,EAAeC,EAAgBC,EAAsBC,EAA4C,CAC/G,IAAML,EAAK,KAAK,GAEVM,EAAUN,EAAG,cAAc,EAEjCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,cAAcA,EAAG,WAAYA,EAAG,mBAAoBA,EAAG,OAAO,EACjEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,mBAAoBA,EAAG,OAAO,EACjEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,eAAgBA,EAAG,aAAa,EACnEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,eAAgBA,EAAG,aAAa,EACnE,IAAMO,EAASF,EAAOD,EAAQ,OAAOC,EAAMH,EAAQC,CAAM,EAAI,KAC7D,OAAAH,EAAG,WACCA,EAAG,WACH,EACAI,EAAQ,eAAgBF,EAAOC,EAC/B,EACAC,EAAQ,OAAQA,EAAQ,YAAaG,CAAM,EAC/C,KAAK,WAAW,EACTD,CACT,CACA,cACIA,EAAuBJ,EAAeC,EAAgBC,EAAsBC,EAAmC,CACjH,IAAML,EAAK,KAAK,GAChBA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrC,IAAMC,EAASH,EAAQ,OAAOC,EAAMH,EAAQC,CAAM,EAClDH,EAAG,cACCA,EAAG,WACH,EACA,EACA,EACAE,EAAOC,EAAQC,EAAQ,OAAQA,EAAQ,YAAaG,CAAM,EAC9D,KAAK,WAAW,CAClB,CACA,kBAAkBD,EAAuBJ,EAAeC,EAAsB,CAC5E,IAAMH,EAAK,KAAK,GAEhBA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,gBAAgBA,EAAG,YAAa,KAAK,WAAW,EACnDA,EAAG,qBACCA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EACrD,CAAC,EACL,KAAK,WAAW,EAChBN,EAAG,SAAS,EAAG,EAAGE,EAAOC,CAAM,EAC/BH,EAAG,QAAQ,EAAG,EAAGE,EAAOC,CAAM,CAChC,CACA,YACIG,EAAuBJ,EAAeC,EAAgBK,EAAkBC,EACxEC,EAAyC,CAC3C,IAAMV,EAAK,KAAK,GACXU,IACHA,EAAW,GAER,KAAK,kBACR,KAAK,kBAAkBJ,EAASJ,EAAOC,CAAM,EAE/C,IAAMC,EAAU,KAAK,WAAWK,EAAUC,CAAQ,EAC5CH,EAASH,EAAQ,SAASF,EAAQC,CAAM,EAE9C,OAAAH,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,qBACCA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EACrD,CAAC,EAELN,EAAG,WAAW,EAAG,EAAGE,EAAOC,EAAQH,EAAG,KAAMI,EAAQ,YAAaG,CAAM,EACvE,KAAK,WAAW,EAETH,EAAQ,OAAOG,EAAQC,CAAQ,CACxC,CAEA,oBAA8B,CAE5B,MAAO,EACT,CACA,kBAA2B,CACzB,IAAMR,EAAK,KAAK,GAEhB,MAAO,UADGA,EAAG,aAAa,KAAK,GAAG,cAAc,EAC1BA,EAAG,QAAS,EACpC,CACA,mBAAkC,CAChC,OAAO,KAAK,GAAG,aAAa,KAAK,GAAG,kBAAkB,CACxD,CACA,uBAA0C,CACxC,OAAO,KAAK,GAAG,aAAa,KAAK,GAAG,mBAAmB,CACzD,CACA,oBAAoBW,EAAwBC,EAAkC,CAC5E,IAAMZ,EAAK,KAAK,GAChBA,EAAG,oBAAoBW,EAAgB,EAAGX,EAAG,MAAO,GAAO,GAAI,CAAC,EAChEA,EAAG,wBAAwBW,CAAc,EACrCC,IAAuB,KACzBZ,EAAG,oBAAoBY,EAAoB,EAAGZ,EAAG,MAAO,GAAO,GAAI,EAAE,EACrEA,EAAG,wBAAwBY,CAAkB,GAE/C,KAAK,WAAW,CAClB,CACA,cACIC,EACAC,EACgB,CAClB,IAAMd,EAAK,KAAK,GACVe,EAAUf,EAAG,cAAc,EAGjC,OAAAA,EAAG,aAAae,EAASF,CAAY,EACrCb,EAAG,aAAae,EAASD,CAAU,EACnCd,EAAG,YAAYe,CAAO,EACfA,CACT,CACA,cAAcC,EAAsBC,EAAiC,CACnE,IAAMjB,EAAK,KAAK,GACVkB,EAASlB,EAAG,aAAaiB,CAAU,EACzC,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,0CAA0CD,CAAU,EAAE,EAKxE,GAFAjB,EAAG,aAAakB,EAAQF,CAAY,EACpChB,EAAG,cAAckB,CAAM,EACnBlB,EAAG,mBAAmBkB,EAAQlB,EAAG,cAAc,IAAM,GACvD,MAAM,IAAI,MAAM,6BAA6BA,EAAG,iBAAiBkB,CAAM,CAAC;AAAA;AAAA,EAE5EF,CAAY,EAAE,EAEZ,OAAOE,CACT,CACA,aAAaA,EAA2B,CACtC,KAAK,GAAG,aAAaA,CAAM,CAC7B,CACA,qBAAqBZ,EAAuBa,EAAkBC,EAA2C,CACvG,IAAMpB,EAAK,KAAK,GAChBA,EAAG,cAAcA,EAAG,SAAWmB,CAAQ,EACvC,KAAK,WAAW,EAChBnB,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrC,KAAK,WAAW,EAChBN,EAAG,UAAUoB,EAAeD,CAAQ,EACpC,KAAK,WAAW,CAClB,CACA,MAAa,CACX,KAAK,GAAG,WAAW,KAAK,GAAG,eAAgB,EAAG,CAAC,EAC/C,KAAK,WAAW,CAClB,CACA,YAAmB,CACjB,GAAIE,GAAI,MAAO,CACb,IAAMrB,EAAK,KAAK,GACVsB,EAAQtB,EAAG,SAAS,EACtBuB,EAAQ,GACZ,OAAQD,EAAO,CACb,KAAMtB,EAAG,SACP,OACF,KAAMA,EAAG,aACPuB,EAAQ,eACR,MACF,KAAMvB,EAAG,cACPuB,EAAQ,gBACR,MACF,KAAMvB,EAAG,kBACPuB,EAAQ,oBACR,MACF,KAAMvB,EAAG,8BACPuB,EAAQ,gCACR,MACF,KAAMvB,EAAG,cACPuB,EAAQ,gBACR,MACF,KAAMvB,EAAG,mBACPuB,EAAQ,qBACR,MACF,QACEA,EAAQ,wBAAwBD,EAAM,SAAS,EAAE,CAAC,EACtD,CACA,MAAM,IAAI,MAAMC,CAAK,CACvB,CACF,CACA,cAAcjB,EAA6B,CACzC,KAAK,GAAG,cAAcA,CAAO,CAC/B,CACA,cAAcS,EAA6B,CACzC,KAAK,GAAG,cAAcA,CAAO,CAC/B,CACA,WAAWN,EAA4BC,EAAkBc,IAAyD,CAChH,GAAI,KAAK,UAAY,EACnB,OAAO,IAAiBC,GAAsB,KAAK,GAA8Bf,CAAQ,EAG3F,OAAQD,EAAU,CAChB,IAAK,QACH,OAAIe,IAAU,GAA2B,KAAK,yBACrC,IAAiBE,GAAqB,KAAK,GAAIhB,CAAQ,EAEvD,IAAiBgB,GACpB,KAAK,GAAIhB,EAAU,KAAK,0BAA2B,cAAc,EAEzE,IAAK,MACH,MAAM,IAAI,MAAM,iBAAiB,EACnC,IAAK,OACH,OAAO,IAAiBiB,GAAiB,KAAK,GAAIjB,CAAQ,EAC5D,QACE,MAAM,IAAI,MAAM,qBAAqBD,CAAQ,EAAE,CACnD,CACF,CACA,qBAA4B,CAC1B,IAAMT,EAAK,KAAK,GAChB,QAAS4B,EAAO,EAAGA,EAAO,KAAK,qBAAsB,EAAEA,EACrD5B,EAAG,cAAcA,EAAG,SAAW4B,CAAI,EACnC5B,EAAG,YAAYA,EAAG,WAAY,IAAI,CAEtC,CACA,SAAgB,CACd,GAAI,KAAK,SACP,OAEF,IAAMA,EAAK,KAAK,GAChBA,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,kBAAkB,KAAK,WAAW,EACrCA,EAAG,WAAWA,EAAG,aAAc,IAAI,EACnCA,EAAG,aAAa,KAAK,YAAY,EACjCA,EAAG,WAAWA,EAAG,qBAAsB,IAAI,EAC3CA,EAAG,OAAO,EACV,KAAK,SAAW,EAClB,CAEQ,uBAAsC,CAE5C,OAAO,IAAI,aAAa,CACtB,GAAM,EAAM,EAAK,EAAK,EACtB,GAAM,GAAM,EAAK,EAAK,EACtB,EAAM,EAAM,EAAK,EAAK,EACtB,EAAM,GAAM,EAAK,EAAK,CACxB,CAAC,CACH,CACQ,oBAAkC,CACxC,IAAMA,EAAK,KAAK,GACVO,EAASP,EAAG,aAAa,EAC/B,GAAI,CAACO,EACH,MAAM,IAAI,MAAM,8BAA8B,EAEhD,IAAMsB,EAAW,KAAK,sBAAsB,EAC5C,OAAA7B,EAAG,WAAWA,EAAG,aAAcO,CAAM,EACrCP,EAAG,WAAWA,EAAG,aAAc6B,EAAU7B,EAAG,WAAW,EACvD,KAAK,WAAW,EACTO,CACT,CACQ,mBAAsC,CAC5C,IAAMuB,EAAK,KAAK,GAAG,kBAAkB,EACrC,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,iCAAiC,EAEnD,OAAOA,CACT,CAEQ,sBAA6B,CACnC,IAAM9B,EAAK,KAAK,GAMhB,GAJA,KAAK,sCAAwC,KAAK,yCAAyC,EAC3F,KAAK,yBAA2B,KAAK,mBAAmB,EACxD,KAAK,2BAA6B,KAAK,qBAAqB,EAExD,KAAK,UAAY,GAAK,CAAC,KAAK,2BAA6B,CAAC,KAAK,yBACjE,MAAM,IAAI,MAAM,wDAAwD,EAG1E,KAAK,iBAAmB,CAAC,KAAK,0BAA4B,KAAK,kBAAkB,EAGjF,KAAK,eAAiBA,EAAG,aAAaA,EAAG,gBAAgB,EACzD,KAAK,qBAAuBA,EAAG,aAAaA,EAAG,uBAAuB,EAMlE,KAAK,OAMX,CACQ,eAAsB,CACxB,KAAK,UAAY,GACnB,KAAK,0BAA4B,KAAK,GAAG,aAAa,wBAAwB,EAC9E,KAAK,kCAAoC,KAAK,GAAG,aAAa,iCAAiC,IAE/F,KAAK,sBAAwB,KAAK,GAAG,aAAa,mBAAmB,EACrE,KAAK,0BAA4B,KAAK,GAAG,aAAa,wBAAwB,EAElF,CAEQ,0CAAoD,CAG1D,IAAMA,EAAK,KAAK,GACVM,EAAUN,EAAG,cAAc,EACjCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EAErC,IAAMyB,EAAiB,KAAK,UAAY,EAAK/B,EAAoC,QAAUA,EAAG,KAC9FA,EAAG,WAAWA,EAAG,WAAY,EAAG+B,EAAgB,EAAG,EAAG,EAAG/B,EAAG,KAAMA,EAAG,MAAO,IAAI,EAEhF,IAAMgC,EAAchC,EAAG,kBAAkB,EACzCA,EAAG,gBAAgBA,EAAG,YAAagC,CAAW,EAE9ChC,EAAG,qBAAqBA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EAAS,CAAC,EAEvF,IAAM2B,EAAajC,EAAG,uBAAuBA,EAAG,WAAW,IAAMA,EAAG,qBACpE,OAAAA,EAAG,YAAYA,EAAG,WAAY,IAAI,EAClCA,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,cAAcM,CAAO,EACxBN,EAAG,kBAAkBgC,CAAW,EACzBC,CACT,CAEQ,oBAA8B,CACpC,GAAI,KAAK,UAAY,GACnB,GAAI,CAAC,KAAK,0BACR,MAAO,WAGL,CAAC,KAAK,sBACR,MAAO,GAGX,OAAO,KAAK,qCACd,CAEQ,sBAAgC,CACtC,GAAI,KAAK,UAAY,GACnB,GAAI,CAAC,KAAK,0BACR,MAAO,WAGL,CAAC,KAAK,uBAGN,CAAC,KAAK,GAAG,aAAa,0BAA0B,EAClD,MAAO,GAGX,OAAO,KAAK,qCACd,CAKQ,mBAA6B,CAInC,IAAMjC,EAAK,KAAK,GAEZM,EACA0B,EACAnB,EACAqB,EACAnB,EAEJ,GAAI,CACFT,EAAUN,EAAG,cAAc,EAC3BgC,EAAchC,EAAG,kBAAkB,EACnCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EAGrC,IAAMyB,EAAiB,KAAK,UAAY,EAAK/B,EAAoC,QAAUA,EAAG,KAuB9F,OAtBAA,EAAG,WAAWA,EAAG,WAAY,EAAG+B,EAAgB,EAAG,EAAG,EAAG/B,EAAG,KAAMA,EAAG,MAAO,IAAI,EAEhFA,EAAG,gBAAgBA,EAAG,YAAagC,CAAW,EAC9ChC,EAAG,qBAAqBA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EAAS,CAAC,EAEvFN,EAAG,OAAOA,EAAG,KAAK,EAElBa,EAAeb,EAAG,aAAaA,EAAG,aAAa,EAC3C,CAACa,IAGLb,EAAG,aAAaa,EAAc,eAAe,EAC7Cb,EAAG,cAAca,CAAY,EAE7BqB,EAAiBlC,EAAG,aAAaA,EAAG,eAAe,EAC/C,CAACkC,KAGLlC,EAAG,aAAakC,EAAgB,4DAA4D,EAC5FlC,EAAG,cAAckC,CAAc,EAE/BnB,EAAUf,EAAG,cAAc,EACvB,CAACe,GACI,IAETf,EAAG,aAAae,EAASF,CAAY,EACrCb,EAAG,aAAae,EAASmB,CAAc,EACvClC,EAAG,YAAYe,CAAO,EACtBf,EAAG,WAAWe,CAAO,EAErBf,EAAG,WAAWA,EAAG,OAAQ,EAAG,CAAC,EACtBA,EAAG,SAAS,IAAMA,EAAG,SAE9B,QAAE,CACAA,EAAG,QAAQA,EAAG,KAAK,EAEfe,GACFf,EAAG,cAAce,CAAO,EAEtBF,GACFb,EAAG,aAAaa,CAAY,EAE1BqB,GACFlC,EAAG,aAAakC,CAAc,EAE5BF,IACFhC,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,kBAAkBgC,CAAW,GAE9B1B,IACFN,EAAG,YAAYA,EAAG,WAAY,IAAI,EAClCA,EAAG,cAAcM,CAAO,EAE5B,CACF,CAEA,YAAyB,CACvB,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAM6B,EAAM,KAAK,GACXC,EAAM,KAAK,kCAEXC,EAAQF,EAAI,YAAY,EAC9B,OAAAA,EAAI,WAAWC,EAAI,iBAAkBC,CAAK,EACnCA,CACT,KAEE,OAAM,IAAI,MAAM,2CAA2C,CAE/D,CAEA,UAAW,CACT,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAMF,EAAM,KAAK,GACXC,EAAM,KAAK,kCACjBD,EAAI,SAASC,EAAI,gBAAgB,EACjC,MACF,KAEE,OAAM,IAAI,MAAM,0CAA0C,CAE9D,CAEA,uBAAuBC,EAA4B,CACjD,IAAIC,EAAY,GAAOC,EAAW,GAClC,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAMJ,EAAM,KAAK,GACXC,EAAM,KAAK,kCAEjBE,EAAYH,EAAI,kBAAkBE,EAAOF,EAAI,sBAAsB,EACnEI,EAAWJ,EAAI,aAAaC,EAAI,gBAAgB,CAClD,KAEE,OAAM,IAAI,MAAM,0CAA0C,EAG5D,OAAOE,GAAa,CAACC,CACvB,CAEA,eAAeF,EAA2B,CACxC,IAAIG,EAAc,EAClB,GAAI,KAAK,UAAY,EAAG,CACtB,IAAML,EAAM,KAAK,GACjBK,EAAcL,EAAI,kBAAkBE,EAAOF,EAAI,YAAY,EAC3DA,EAAI,YAAYE,CAAK,CACvB,KAEE,OAAM,IAAI,MAAM,0CAA0C,EAG5D,OAAOG,EAAc,GACvB,CAEA,MAAM,uBAAuBH,EAAoC,CAC/D,aAAMI,GAAY,IAAM,KAAK,uBAAuBJ,CAAK,CAAC,EACnD,KAAK,eAAeA,CAAK,CAClC,CAEA,MAAa,uBAAuC,CAClD,IAAMK,EAAe,KAAK,YAAY,KAAK,EAAE,EAC7C,OAAO,KAAK,UAAUA,CAAY,CACpC,CAEQ,YAAY1C,EAAyC,CAC3D,IAAI2C,EACER,EAAMnC,EACNqC,EAAQF,EAAI,UAAUA,EAAI,2BAA4B,CAAC,EAC7D,OAAAnC,EAAG,MAAM,EACLqC,IAAU,KACZM,EAAgB,IAAM,GAEtBA,EAAgB,IAAM,CACpB,IAAMC,EAAST,EAAI,eAAeE,EAAO,EAAG,CAAC,EAC7C,OAAOO,IAAWT,EAAI,kBAAoBS,IAAWT,EAAI,mBAC3D,EAEK,CAAC,MAAAE,EAAO,cAAAM,CAAa,CAC9B,CAEA,MAAM,UAAUD,EAA4B,CAC1C,OAAO,IAAI,QAAcG,GAAW,CAC7B,KAAK,cAAc,IAAMH,EAAa,cAAc,EAAG,IAAMG,EAAQ,CAAC,CAC7E,CAAC,CACH,CAIA,WAAkB,CAEhB,IAAMC,EAAQvD,GAAqB,KAAK,YAAY,IAAIwD,GAAKA,EAAE,QAAQ,CAAC,EACxE,QAAStD,EAAI,EAAGA,GAAKqD,EAAO,EAAErD,EAAG,CAC/B,GAAM,CAAC,UAAAuD,CAAS,EAAI,KAAK,YAAYvD,CAAC,EACtCuD,EAAU,CACZ,CACA,KAAK,YAAc,KAAK,YAAY,MAAMF,EAAQ,CAAC,CACrD,CAEA,MAAc,cAAcG,EAAyBD,EAAuB,CAC1E,KAAK,YAAY,KAAK,CAAC,SAAAC,EAAU,UAAAD,CAAS,CAAC,EACvC,OAAK,YAAY,OAAS,IAK9B,MAAMP,GAAY,KAChB,KAAK,UAAU,EAER,KAAK,YAAY,SAAW,EACpC,CACH,CACF,ICrlBO,SAASS,GAAmBC,EAA4C,CAC7E,IAAIC,EAOJ,IANK,CAACD,GAAaA,IAAc,WAAa,WAAYE,GACxDD,EAAUC,GAAM,QACN,CAACF,GAAaA,IAAc,UAAY,UAAWE,KAC7DD,EAAUC,GAAM,OAGd,CAACD,EACH,GAAI,CAEF,IAAME,EAAkBC,GAAsB,EAC9CH,EAAUI,GAAsBF,EAAiBH,CAAS,CAC5D,MAAY,CAEV,IAAMM,EAASC,GAAa,EAC5BN,EAAUI,GAAsBC,EAAQN,CAAS,CACnD,CAGFA,EAAYA,GAAaC,EAAQ,UAAY,EAAI,QAAU,SAC3D,IAAMO,EAAKP,EAAQ,GAInB,OAFAC,GAAMF,CAAS,EAAIC,EAEfO,EAAG,cAAc,GACnB,OAAON,GAAMF,CAAS,EACfD,GAAmBC,CAAS,IAGrCQ,EAAG,QAAQA,EAAG,UAAU,EACxBA,EAAG,QAAQA,EAAG,YAAY,EAC1BA,EAAG,QAAQA,EAAG,KAAK,EACnBA,EAAG,QAAQA,EAAG,MAAM,EACpBA,EAAG,QAAQA,EAAG,mBAAmB,EACjCA,EAAG,QAAQA,EAAG,eAAe,EAC7BA,EAAG,OAAOA,EAAG,YAAY,EACzBA,EAAG,OAAOA,EAAG,SAAS,EACtBA,EAAG,SAASA,EAAG,IAAI,EAEZP,EACT,CAEO,SAASI,GAAsBC,EAA2BN,EAA4C,CAC3G,IAAMS,EAA4C,CAChD,MAAO,GACP,MAAO,GACP,UAAW,GACX,QAAS,GACT,sBAAuB,GACvB,mBAAoB,GACpB,6BAA8B,EAChC,EACID,EACEE,EAAKD,EACX,IAAI,CAACT,GAAaA,IAAc,YAC9BQ,EAAKF,EAAO,WAAW,SAAUI,CAAE,EAC/BF,GACF,GAAI,CACF,OAAO,IAAIG,GAAaH,EAAI,CAAC,CAC/B,OAASI,EAAK,CACZC,GAAO,QAAQ,mBAAoB,kEAAkED,CAAG,EAAE,CAC5G,CAGJ,IAAI,CAACZ,GAAaA,IAAc,WAC9BQ,EAAKF,EAAO,WAAW,QAASI,CAAE,GAAKJ,EAAO,WAAW,qBAAsBI,CAAE,EAC7EF,GACF,GAAI,CACF,OAAO,IAAIG,GAAaH,EAAI,CAAC,CAC/B,OAASI,EAAK,CACZC,GAAO,QACH,mBACA,yFAAyFD,CAAG,EAAE,CACpG,CAIJ,MAAM,IAAI,MAAM,wBAAwB,CAC1C,CAKA,SAASL,IAAkC,CACzC,GAAI,OAAO,SAAa,IACtB,MAAM,IAAI,UAAU,oDAAoD,EAE1E,IAAMD,EAA4B,SAAS,cAAc,QAAQ,EACjE,OAAAA,EAAO,MAAQ,EACfA,EAAO,OAAS,EACTA,CACT,CAEA,SAASF,IAA2C,CAClD,GAAI,OAAO,gBAAoB,IAC7B,MAAM,IAAI,UAAU,qEAAqE,EAE3F,OAAO,IAAI,gBAAgB,EAAG,CAAC,CACjC,CAjHA,IAOMF,GAPNY,GAAAC,EAAA,kBAGAC,KAEAC,KAEMf,GAA6C,CAAC,ICPpD,IAkBagB,GAlBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAGAC,KAEAC,KAOaN,GAAN,KAAsC,CAG3C,IAAI,WAAwC,CAC1C,OAAOO,GAAI,MAAM,SACnB,CACA,IAAI,UAAUC,EAAmC,CAC/CD,GAAI,MAAM,UAAYC,CACxB,CAEA,IAAI,oBAAuC,CACzC,OAAOD,GAAI,MAAM,kBACnB,CACA,IAAI,mBAAmBC,EAAyB,CAC9CD,GAAI,MAAM,mBAAqBC,CACjC,CAEA,IAAI,kBAAuD,CACzD,OAAOD,GAAI,MAAM,gBACnB,CACA,IAAI,iBAAiBC,EAA2C,CAC9DD,GAAI,MAAM,iBAAmBC,CAC/B,CAEA,IAAI,MAA0B,CAC5B,OAAOD,GAAI,MAAM,IACnB,CACA,IAAI,KAAKC,EAA0B,CACjCD,GAAI,MAAM,KAAOC,CACnB,CAEA,IAAI,OAA2B,CAC7B,OAAOD,GAAI,MAAM,KACnB,CACA,IAAI,MAAMC,EAA0B,CAClCD,GAAI,MAAM,MAAQC,CACpB,CAEA,YAAsB,CACpB,GAAI,CACF,YAAK,UAAYC,GAAmB,KAAK,SAAS,EAC9C,OAAO,KAAK,oBAAuB,WACrC,KAAK,mBAAqB,IAExB,OAAO,KAAK,kBAAqB,WACnC,KAAK,iBAAmB,QAEtB,OAAO,KAAK,MAAS,YACvB,KAAK,KAAO,IAEV,OAAO,KAAK,OAAU,YACxB,KAAK,MAAQ,IAGfC,GAAO,WAAWH,EAAG,EAEhBA,GAAI,MAAM,SACb,OAAO,eAAeA,GAAI,MAAO,UAAW,CAAC,MAAO,KAAK,UAAU,EAAE,CAAC,EAGxEG,GAAO,QACH,eACA,yBAAyB,OAAO,KAAK,SAAS,6BAC1C,KAAK,kBAAkB,uBAAuB,KAAK,gBAAgB,WAAW,KAAK,IAAI,YACvF,KAAK,KAAK,GAAG,EACd,EACT,OAAS,EAAG,CACV,OAAAA,GAAO,QAAQ,eAAgB,sCAAsC,CAAC,EAAE,EACjE,EACT,CACF,CACA,qBAAqBC,EAA0C,CAC7D,OAAO,IAAIC,GAAoB,KAAMD,CAAO,CAC9C,CACA,SAAgB,CACd,KAAK,UAAU,QAAQ,CACzB,CACF,ICRA,eAAsBE,GAAeC,EAAmD,CACtF,GAAKA,EAEE,CACL,IAAMC,EAAQ,OAAOD,GAAS,SAAW,CAACA,CAAI,EAAIA,EAElD,QAAWE,KAAeD,EAAO,CAC/B,IAAME,EAAQC,GAAc,IAAIF,CAAW,EAC3C,GAAIC,EACF,OAAOA,EAGT,IAAME,EAAU,MAAMC,GAAeJ,CAAW,EAChD,GAAIG,EACF,OAAOA,CAEX,CACF,KAfE,QAAON,GAAe,CAAC,OAAO,CAAC,EAiBjC,MAAM,IAAI,MAAM,6BAA6B,CAC/C,CAEA,eAAeO,GAAeJ,EAAiD,CAC7E,IAAMK,EAAaF,GAEnB,GAAI,OAAOE,EAAWL,CAAW,EAAM,KAAeM,GAAUD,EAAWL,CAAW,CAAC,EAAG,CACxF,IAAMG,EAAUE,EAAWL,CAAW,EAClCO,EAAOJ,EAAQ,WAAW,EAI9B,GAHI,OAAOI,GAAS,UAAY,SAAUA,IACxCA,EAAO,MAAMA,GAEXA,EACF,OAAAL,GAAc,IAAIF,EAAaG,CAAO,EAC/BA,CAEX,CAGF,CAEA,SAASG,GAAUE,EAAc,CAE/B,IAAMC,EAAID,EAGV,MACI,eAAgBC,GAAK,OAAOA,EAAE,YAAe,YAC7C,yBAA0BA,GAAK,OAAOA,EAAE,sBAAyB,YACjE,YAAaA,GAAK,OAAOA,EAAE,SAAY,UAM7C,CA7IA,IA6EMP,GAEOC,GA/EbO,GAAAC,EAAA,kBAGAC,KA0EMV,GAAsC,IAAI,IAEnCC,GAAqC,CAChD,MAAO,IAAIU,EACb,ICjFA,IASMC,GAIOC,GAbbC,GAAAC,EAAA,kBAKAC,KAIMJ,GAAN,KAAe,CACb,YAAmBK,EAAqBC,EAAkB,CAAvC,QAAAD,EAAqB,UAAAC,CAAmB,CAC7D,EAEaL,GAAN,KAAoB,CACzB,YAAoBM,EAAcC,EAAyBC,EAA8B,CAArE,WAAAF,EAAuC,cAAAE,EACzD,KAAK,WAAWD,CAAG,CACrB,CAEA,WAAWA,EAAiB,CAC1B,KAAK,SAAS,MAAM,UAAW,2BAA4B,IAAM,CAC/D,IAAME,EAAa,KAAK,MAAM,SAAS,EACvC,GAAIA,EAAW,SAAWF,EAAI,OAC5B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,KAAK,KAAOA,EAAI,IAAI,CAACH,EAAIM,IAAM,IAAIX,GAASK,EAAIK,EAAWC,CAAC,CAAC,CAAC,EAC9D,KAAK,MAAM,EAGX,KAAK,SAAW,CAAC,EACjB,KAAK,KAAK,QAAQ,CAACN,EAAIM,IAAM,CAC3B,IAAIC,EAAW,GACf,QAAWC,KAASR,EAAG,KAAK,OAC1B,GACI,CAAC,KAAK,QAAQQ,CAAK,GAChB,KAAK,MAAM,gBAAgB,EAAE,QAAQA,CAAK,IAAM,GACrD,CACAD,EAAW,GACX,KACF,CAEEA,GACF,KAAK,SAAS,KAAKD,CAAC,CAExB,CAAC,CACH,CAAC,CACH,CAEA,OAAQ,CACN,KAAK,QAAU,KAAK,MAAM,UAAU,EAAE,IAAIA,GAAKA,EAAE,MAAM,CACzD,CAEA,MAAM,QAAQG,EAAgCC,EAA0C,CACtF,OAAO,KAAK,SAAS,MAAM,UAAW,wBAAyB,SAAY,CAEzE,KAAK,MAAM,EAGX,IAAMC,EAAmBF,EAAe,uBAAuB,EAGzDG,EAAc,KAAK,MAAM,gBAAgB,EAC/C,GAAIF,EAAY,SAAWE,EAAY,OACrC,MAAM,IAAI,MAAM,kFACZF,EAAY,MAAM,cAAcE,EAAY,MAAM,EAAE,EAG1DF,EAAY,QAAQ,CAACF,EAAOF,IAAM,CAChC,IAAMO,EAAQD,EAAYN,CAAC,EAC3B,KAAK,QAAQO,CAAK,EAAIL,CACxB,CAAC,EAGD,IAAMM,EAAqB,KAAK,SAAS,MAAM,CAAC,EAG1CC,EAAc,KAAK,MAAM,UAAU,EACnCV,EAAa,KAAK,MAAM,SAAS,EAEnCW,EAAO,EACX,KAAOA,EAAOF,EAAS,QAAQ,CAC7B,IAAMG,EAAcH,EAASE,GAAM,EAC7BE,EAAS,KAAK,KAAKD,CAAW,EAG9BE,EAAYD,EAAO,KAAK,OAAO,IAAIZ,GAAK,KAAK,QAAQA,CAAC,CAAC,EAC7D,GAAIa,EAAU,QAAQ,MAAS,IAAM,GACnC,MAAM,IAAI,MAAM,kCAAkCD,EAAO,IAAI,EAAE,EAIjE,IAAME,EAAeD,EACrBE,GAAO,QACH,WACA,cAAcH,EAAO,KAAK,IAAI,KAC1BE,EAAa,IAAI,CAACE,EAAGhB,IAAM,IAAIY,EAAO,KAAK,OAAOZ,CAAC,CAAC,MAAMgB,EAAE,IAAI,IAAIA,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,GAAG,EAE9G,IAAMC,EAAa,MAAM,KAAK,SAAS,MACnC,OAAQL,EAAO,KAAK,KAAM,SAAYA,EAAO,GAAG,KAAKP,EAAkBS,EAAcF,EAAO,GAAG,OAAO,CAAC,EAG3G,GAAIK,EAAW,SAAWL,EAAO,KAAK,QAAQ,OAC5C,MAAM,IAAI,MAAM,qDAAqD,EAIvEK,EAAW,QAAQ,CAACC,EAAQlB,IAAM,CAChC,IAAMmB,EAAIP,EAAO,KAAK,QAAQZ,CAAC,EAC/B,GAAI,KAAK,QAAQmB,CAAC,EAChB,MAAM,IAAI,MAAM,WAAWA,CAAC,2BAA2BP,EAAO,KAAK,IAAI,EAAE,EAE3E,KAAK,QAAQO,CAAC,EAAID,CACpB,CAAC,EAGD,IAAME,EAAkB,IAAI,IAC5BH,EAAW,QAAQ,CAACI,EAASrB,IAAM,CACjC,IAAMmB,EAAIP,EAAO,KAAK,QAAQZ,CAAC,EAC/B,QAAWsB,KAA8Bb,EAAYU,CAAC,EAAE,GAAI,CAC1D,IAAMI,EAAwBxB,EAAWuB,CAA0B,EAC/DrB,EAAW,GACf,QAAWuB,KAAKD,EAAsB,OACpC,GAAI,CAAC,KAAK,QAAQC,CAAC,EAAG,CACpBvB,EAAW,GACX,KACF,CAEEA,GACFmB,EAAgB,IAAIE,CAA0B,CAElD,CACF,CAAC,EACDd,EAAS,KAAK,GAAGY,CAAe,CAClC,CAEA,IAAMF,EAAmB,CAAC,EAC1B,QAASlB,EAAI,EAAGA,EAAI,KAAK,MAAM,iBAAiB,EAAE,OAAQA,IAAK,CAC7D,IAAMyB,EAAc,KAAK,MAAM,iBAAiB,EAAEzB,CAAC,EAC7C0B,EAAe,KAAK,QAAQD,CAAW,EAC7C,GAAIC,IAAiB,OACnB,MAAM,IAAI,MAAM,oBAAoBD,CAAW,uBAAuB,EAEpEA,IAAgB,EAClB,MAAMC,EAAa,QAAQ,EAG3BA,EAAa,KAEfR,EAAO,KAAKQ,CAAY,CAC1B,CACA,OAAAX,GAAO,QAAQ,WAAY,+BAA+B,EAC1DV,EAAiB,QAAQ,EAClBa,CACT,CAAC,CACH,CAKF,IC/JA,IAMAS,GAIOC,GAqBMC,GA/BbC,GAAAC,EAAA,kBAKAC,KACAL,GAAmB,SACnBM,KACAC,KAEON,GAASO,GAAY,aAAa,IAqB5BN,GAAN,MAAMO,CAAU,CACrB,YAAYC,EAAsE,CAEhF,GADA,KAAK,YAAc,IAAI,IACnBA,GAAe,KAAkC,CACnD,QAAWC,KAAQD,EACbC,aAAgB,QAAK,eACvB,KAAK,YAAY,IAAIA,EAAK,KAAM,CAACF,EAAU,SAASE,CAAI,EAAGF,EAAU,QAAQE,CAAI,CAAC,CAAC,EAC1EA,aAAgBV,GAAO,WAChC,KAAK,YAAY,IAAIU,EAAK,KAAK,EAAI,CAACF,EAAU,SAASE,CAAI,EAAGF,EAAU,QAAQE,CAAI,CAAC,CAAC,EAG1F,GAAI,KAAK,YAAY,KAAOD,EAAW,OACrC,MAAM,IAAI,MAAM,4BAA4B,CAEhD,CACF,CAEA,IAAIE,EAAaC,EAA0BC,EAAyB,CAClE,KAAK,YAAY,IAAIF,EAAK,CAACE,EAAOD,CAAI,CAAC,CACzC,CACA,OAAOD,EAAmB,CACxB,KAAK,YAAY,OAAOA,CAAG,CAC7B,CACA,SAASA,EAAaG,EAA+C,CACnE,OAAO,KAAK,IAAIH,EAAK,QAASG,CAAY,CAC5C,CAEA,OAAOH,EAAaG,EAA6C,CAC/D,OAAO,KAAK,IAAIH,EAAK,MAAOG,CAAY,CAC1C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,QAAQH,EAAaG,EAA8C,CACjE,OAAO,KAAK,IAAIH,EAAK,OAAQG,CAAY,CAC3C,CAEA,WAAWH,EAAaG,EAAiD,CACvE,OAAO,KAAK,IAAIH,EAAK,UAAWG,CAAY,CAC9C,CAEA,WAAWH,EAAaG,EAAiD,CACvE,OAAO,KAAK,IAAIH,EAAK,UAAWG,CAAY,CAC9C,CAEQ,IACJH,EAAaC,EAA0BE,EAAqB,CAC9D,IAAMC,EAAe,KAAK,YAAY,IAAIJ,CAAG,EAC7C,GAAII,IAAiB,OAAW,CAC9B,GAAID,IAAiB,OACnB,OAAOA,EAET,MAAM,IAAI,MAAM,iCAAiCH,CAAG,EAAE,CACxD,CACA,GAAII,EAAa,CAAC,IAAMH,EACtB,MAAM,IAAI,MAAM,2BAA2BA,CAAI,YAAYG,EAAa,CAAC,CAAC,EAAE,EAE9E,OAAOA,EAAa,CAAC,CACvB,CAEA,OAAe,QAAQL,EAAiE,CACtF,IAAME,EAAOF,aAAgB,QAAK,eAAkBA,EAAM,KAAQA,EAA0B,KAAK,EACjG,OAAQE,EAAM,CACZ,KAAK,QAAK,eAAe,cAAc,MACrC,MAAO,QACT,KAAK,QAAK,eAAe,cAAc,IACrC,MAAO,MACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,KACrC,MAAO,OACT,KAAK,QAAK,eAAe,cAAc,QACrC,MAAO,UACT,KAAK,QAAK,eAAe,cAAc,QACrC,MAAO,UACT,QACE,MAAM,IAAI,MAAM,wCAAwC,QAAK,eAAe,cAAcA,CAAI,CAAC,EAAE,CACrG,CACF,CAEA,OAAe,SAASF,EAA6C,CACnE,IAAMM,EAAWN,aAAgB,QAAK,eAAiBA,EAAK,KAAQA,EAA0B,KAAK,EACnG,GAAIM,IAAa,QAAK,eAAe,cAAc,OAASA,IAAa,QAAK,eAAe,cAAc,OACzG,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMH,EAAQ,KAAK,gBAAgBH,CAAI,EAGvC,GAAIM,IAAa,QAAK,eAAe,cAAc,KAAOC,GAAS,OAAOJ,CAAK,EAC7E,OAAOI,GAAS,aAAaJ,CAAgC,EAI/D,GAAIG,IAAa,QAAK,eAAe,cAAc,KAAM,CACvD,IAAME,EAAOL,EACPM,EAAwB,IAAI,MAAcD,EAAI,MAAM,EAE1D,QAASE,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAAK,CACnC,IAAMC,EAAYH,EAAIE,CAAC,EACvBD,EAAYC,CAAC,EAAIH,GAAS,aAAaI,CAAS,CAClD,CAEA,OAAOF,CACT,CAGA,GAAIH,IAAa,QAAK,eAAe,cAAc,OACjD,OAAON,aAAgB,QAAK,eAAiBY,GAAO,UAAUT,CAA0B,EAC3CS,GAAO,cAAcT,CAAsB,EAI1F,GAAIG,IAAa,QAAK,eAAe,cAAc,QAAS,CAC1D,GAAIN,aAAgB,QAAK,eAEvB,OADqBG,EACD,IAAIA,GAASS,GAAO,UAAUT,CAAK,CAAC,EACnD,GAAIH,aAAgBV,GAAO,UAEhC,OADqBa,EACD,IAAIA,GAASS,GAAO,cAAcT,CAAK,CAAC,CAEhE,CAGA,OAAIG,IAAa,QAAK,eAAe,cAAc,QAG7CN,aAAgB,QAAK,eAEhBa,GADYV,CACe,EAKlCG,IAAa,QAAK,eAAe,cAAc,SAG7CN,aAAgB,QAAK,eACHG,EACD,IAAIU,EAAgB,EAIpCV,CACT,CAEA,OAAe,gBAAgBH,EAA6C,CAC1E,OAAOA,aAAiB,QAAK,eAAkB,KAAK,8BAA8BA,CAAI,EACvC,KAAK,6BAA6BA,CAAwB,CAC3G,CAEA,OAAe,8BAA8BA,EAA4B,CACvE,OAAQA,EAAK,KAAO,CAClB,KAAK,QAAK,eAAe,cAAc,MACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,IACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,MACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,OACd,KAAK,QAAK,eAAe,cAAc,KACrC,OAAOA,EAAK,KACd,KAAK,QAAK,eAAe,cAAc,QACrC,OAAOA,EAAK,QACd,KAAK,QAAK,eAAe,cAAc,QACrC,OAAOA,EAAK,QACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,OACd,QACE,MAAM,IAAI,MAAM,+BAA+B,QAAK,eAAe,cAAcA,EAAK,IAAK,CAAC,EAAE,CAClG,CACF,CAEA,OAAe,6BAA6BA,EAAwB,CAClE,OAAQA,EAAK,KAAK,EAAG,CACnB,KAAKV,GAAO,cAAc,MACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,IACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,MACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,YAAY,EAC1B,KAAKV,GAAO,cAAc,KAAM,CAC9B,IAAMwB,EAAO,CAAC,EACd,QAASJ,EAAI,EAAGA,EAAIV,EAAK,WAAW,EAAGU,IACrCI,EAAK,KAAKd,EAAK,KAAKU,CAAC,CAAE,EAEzB,OAAOI,CACT,CACA,KAAKxB,GAAO,cAAc,QAAS,CACjC,IAAMyB,EAAU,CAAC,EACjB,QAASL,EAAI,EAAGA,EAAIV,EAAK,cAAc,EAAGU,IACxCK,EAAQ,KAAKf,EAAK,QAAQU,CAAC,CAAC,EAE9B,OAAOK,CACT,CACA,KAAKzB,GAAO,cAAc,QAAS,CACjC,IAAM0B,EAAU,CAAC,EACjB,QAASN,EAAI,EAAGA,EAAIV,EAAK,cAAc,EAAGU,IACxCM,EAAQ,KAAKhB,EAAK,QAAQU,CAAC,CAAE,EAE/B,OAAOM,CACT,CAQA,QACE,MAAM,IAAI,MAAM,+BAA+B1B,GAAO,cAAcU,EAAK,KAAK,CAAC,CAAC,EAAE,CACtF,CACF,CAGF,IC/QA,IAKAiB,GAIOC,GAmEMC,GAQPC,GAwBAC,GAyBAC,GArINC,GAAAC,EAAA,kBAGAC,KACAC,KACAT,GAAmB,SACnBU,KACAC,KAEOV,GAASW,GAAY,aAAa,IAmE5BV,GAAQ,CAInB,KAAM,CAACW,EAA2CC,IAC9C,IAAIT,GAAUQ,EAAYC,CAAW,CAC3C,EAEMX,GAAN,KAAmC,CACjC,YAAYY,EAAkC,CAC5C,KAAK,MAAQ,OACb,KAAK,IAAM,CAAC,EACZ,KAAK,OAAS,OACd,KAAK,KAAO,OAERA,IACF,KAAK,KAAOC,GAAU,yBAAyBD,EAAU,KAAM,UAAW,EAE9E,CAGA,IAAI,MAAO,CACT,OAAO,KAAK,KACd,CAEA,IAAI,IAAK,CACP,OAAO,KAAK,GACd,CAGF,EAEMX,GAAN,KAAiC,CAC/B,YAAYa,EAAyCC,EAAe,CAC9DD,aAAsB,QAAK,WAC7B,KAAK,KAAOA,EAAW,KACvB,KAAK,OAASA,EAAW,OACzB,KAAK,WAAa,IAAIE,GAAUF,EAAW,SAAS,GAC3CA,aAAsBhB,GAAO,OACtC,KAAK,KAAOiB,GAAQD,EAAW,KAAK,EACpC,KAAK,OAASA,EAAW,OAAO,EAChC,KAAK,WAAa,IAAIE,GAAUH,GAAU,8BAA8BC,CAAU,CAAC,GAGrF,KAAK,OAAS,CAAC,EACf,KAAK,QAAU,CAAC,EAChB,KAAK,YAAc,EACrB,CAQF,EAEMZ,GAAN,KAAoD,CAWlD,YAAYe,EAAsCC,EAAsC,CACtF,GAAI,CAACD,EACH,MAAM,IAAI,UAAU,gBAAgB,EAItC,KAAK,WAAWA,CAAK,EAGrB,KAAK,eAAeC,CAAgB,EAGpC,KAAK,eAAe,CACtB,CAEA,iBAAqC,CACnC,OAAO,KAAK,gBACd,CAEA,eAAmC,CACjC,OAAO,KAAK,cACd,CAEA,kBAAsC,CACpC,OAAO,KAAK,iBACd,CAEA,gBAAoC,CAClC,OAAO,KAAK,eACd,CAEA,WAAoC,CAClC,OAAO,KAAK,QACd,CAEA,UAAkC,CAChC,OAAO,KAAK,MACd,CAEQ,WAAWD,EAAsC,CAEvD,GAAIA,aAAiB,QAAK,WACxB,KAAK,yBAAyBA,CAAK,UAC1BA,aAAiBnB,GAAO,MACjC,KAAK,wBAAwBmB,CAAK,MAElC,OAAM,IAAI,UAAU,8BAA8B,CAEtD,CACQ,yBAAyBA,EAAyB,CACxD,IAAME,EAAc,IAAI,IACxB,KAAK,SAAW,CAAC,EAEjB,KAAK,iBAAmB,CAAC,EACzB,KAAK,eAAiB,CAAC,EAEvB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,gBAAkB,CAAC,EAExB,KAAK,OAAS,CAAC,EAEf,IAAMC,EAAe,IAAI,IAGzB,GAAI,CAACH,EAAM,MACT,MAAM,IAAI,MAAM,qCAAqC,EAEvD,IAAMI,EAAkB,CAAC,EACzB,QAAW,KAAKJ,EAAM,MAAO,CAC3B,GAAIE,EAAY,IAAI,EAAE,IAAK,EACzB,MAAM,IAAI,MAAM,0BAA0B,EAAE,IAAI,EAAE,EAEpD,IAAMG,EAAe,KAAK,SAAS,KAAK,IAAItB,GAAM,CAAC,CAAC,EAAI,EACxDmB,EAAY,IAAI,EAAE,KAAOG,CAAY,EACrCD,EAAgB,KAAK,EAAE,IAAK,CAC9B,CAGA,GAAI,CAACJ,EAAM,YACT,MAAM,IAAI,MAAM,2CAA2C,EAE7D,QAAW,KAAKA,EAAM,YAAa,CACjC,IAAIM,EAAQJ,EAAY,IAAI,EAAE,IAAK,EACnC,GAAII,IAAU,OAAW,CACvB,IAAMC,EAAQ,IAAIxB,GAClBwB,EAAM,KAAO,CACX,MAAO,CAAC,KAAMX,GAAU,oBAAoB,EAAE,IAAK,CAAC,EACpD,WAAYA,GAAU,wBAAwB,EAAE,QAAS,CAC3D,EACAU,EAAQ,KAAK,SAAS,KAAKC,CAAK,EAAI,EACpCL,EAAY,IAAI,EAAE,KAAOI,CAAK,CAChC,CACA,KAAK,SAASA,CAAK,EAAE,MAAQ,GAC7B,KAAK,SAASA,CAAK,EAAE,OAASE,GAAO,UAAU,CAAC,CAClD,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,SAAS,OAAQ,IACnC,KAAK,SAAS,CAAC,EAAE,SACpB,KAAK,iBAAiB,KAAK,CAAC,EAC5B,KAAK,eAAe,KAAKJ,EAAgB,CAAC,CAAC,GAK/C,GAAI,CAACJ,EAAM,OACT,MAAM,IAAI,MAAM,sCAAsC,EAExD,QAAW,KAAKA,EAAM,OAAQ,CAC5B,GAAIE,EAAY,IAAI,EAAE,IAAK,EACzB,MAAM,IAAI,MAAM,2BAA2B,EAAE,IAAI,EAAE,EAErD,IAAMG,EAAe,KAAK,SAAS,KAAK,IAAItB,GAAM,CAAC,CAAC,EAAI,EACxDmB,EAAY,IAAI,EAAE,KAAOG,CAAY,EACrC,KAAK,kBAAkB,KAAKA,CAAY,EACxC,KAAK,gBAAgB,KAAK,EAAE,IAAK,CACnC,CAGA,GAAI,CAACL,EAAM,KACT,MAAM,IAAI,MAAM,oCAAoC,EAEtD,QAAWS,KAAaT,EAAM,KAAM,CAClC,GAAI,CAACS,EAAU,KAEb,QAASC,EAAO,GAAIA,IAAQ,CAC1B,IAAMZ,EAAO,WAAWW,EAAU,MAAM,IAAIC,CAAI,GAChD,GAAI,CAACP,EAAa,IAAIL,CAAI,EAAG,CAC3BW,EAAU,KAAOX,EACjB,KACF,CACF,CAGF,GAAIK,EAAa,IAAIM,EAAU,IAAI,EACjC,MAAM,IAAI,MAAM,yBAAyBA,EAAU,IAAI,EAAE,EAE3D,IAAMJ,EAAe,KAAK,OAAO,KAAK,IAAIrB,GAAKyB,CAAS,CAAC,EAAI,EAC7DN,EAAa,IAAIM,EAAU,KAAMJ,CAAY,CAC/C,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAMM,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,KAAK,CAAC,EAC9B,GAAI,CAACS,EAAU,OACb,MAAM,IAAI,MAAM,4BAA4BA,EAAU,IAAI,EAAE,EAE9D,QAAWG,KAAUH,EAAU,OAAQ,CACrC,IAAII,EAAYX,EAAY,IAAIU,CAAM,EAOtC,GANI,OAAOC,EAAc,MACvBA,EAAY,KAAK,SAAS,KAAK,IAAI9B,EAAO,EAAI,EAC9CmB,EAAY,IAAIU,EAAQC,CAAS,GAEnCF,EAAK,QAAQ,KAAKE,CAAS,EAEvB,KAAK,SAASA,CAAS,EAAE,QAAU,OACrC,MAAM,IAAI,MAAM,4CAA4CA,CAAS,EAAE,EAMzE,GAJA,KAAK,SAASA,CAAS,EAAE,MAAQ,EAI7BJ,EAAU,SAAW,WAAY,CACnC,GAAI,CAACA,EAAU,WAAaA,EAAU,UAAU,SAAW,GAAK,CAACA,EAAU,UAAU,CAAC,EAAE,EACtF,MAAM,IAAI,MAAM,qFAAqF,EAEvG,GAAI,CAACA,EAAU,QAAUA,EAAU,OAAO,SAAW,EACnD,MAAM,IAAI,MAAM,0EAA0E,EAE5FE,EAAK,QAAQ,IAAI,EACjBA,EAAK,YAAc,GAEnB,KAAK,SAASE,CAAS,EAAE,MAAQ,GACjC,KAAK,SAASA,CAAS,EAAE,OAASL,GAAO,UAAUC,EAAU,UAAU,CAAC,EAAE,CAAC,CAC7E,CACF,CACF,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAME,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,KAAK,CAAC,EAE9B,GAAI,CAACS,EAAU,MACb,MAAM,IAAI,MAAM,2BAA2BA,EAAU,IAAI,EAAE,EAE7D,QAAWK,KAASL,EAAU,MAAO,CACnC,IAAMI,EAAYX,EAAY,IAAIY,CAAK,EACvC,GAAI,OAAOD,EAAc,IAAa,CAEpC,GAAIC,IAAU,KAAOL,EAAU,MAAM,SAAW,GAAKA,EAAU,MAAM,SAAW,IAC5EA,EAAU,SAAW,SACvB,SAEF,MAAM,IAAI,MAAM,uBAAuBK,CAAK,eAAeL,EAAU,IAAI,EAAE,CAC7E,CACAE,EAAK,OAAO,KAAKE,CAAS,EAE1B,KAAK,SAASA,CAAS,EAAE,IAAI,KAAK,CAAC,CACrC,CACF,CAEA,MAAO,EACT,CAEQ,wBAAwBb,EAAqB,CACnD,IAAME,EAAc,IAAI,IACxB,KAAK,SAAW,CAAC,EAEjB,KAAK,iBAAmB,CAAC,EACzB,KAAK,eAAiB,CAAC,EAEvB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,gBAAkB,CAAC,EAExB,KAAK,OAAS,CAAC,EAEf,IAAMC,EAAe,IAAI,IAGnBC,EAAkB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIJ,EAAM,aAAa,EAAG,IAAK,CAC7C,IAAMe,EAAYf,EAAM,OAAO,CAAC,EAChC,GAAIE,EAAY,IAAIa,CAAS,EAC3B,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,EAGvD,QAASC,EAAI,EAAGA,EAAIhB,EAAM,eAAe,EAAGgB,IAC1C,GAAIhB,EAAM,SAASgB,CAAC,GAAG,KAAK,IAAMD,EAAW,CAC3C,IAAMR,EAAQ,IAAIxB,GAElB,GADkBiB,EAAM,SAASgB,CAAC,GAAG,KAAK,GAAG,UAAU,IACrCnC,GAAO,cAAc,YACrC,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMc,EAAYK,EAAM,SAASgB,CAAC,EAAG,KAAK,EAAG,MAAM,IAAInC,GAAO,kBAAoB,EAC5EoC,EAAOrB,GAAU,wBAAwBD,EAAU,SAAS,CAAC,EAC7DuB,EAAQvB,EAAU,MAAM,EACxBwB,EAAO,CAAC,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAM,UAAU,EAAIE,IACtCD,EAAK,KAAKE,GAAS,aAAaH,EAAM,IAAIE,CAAC,EAAG,MAAM,EAAG,SAAS,CAAE,CAAC,EAErEb,EAAM,KAAO,CAAC,MAAO,CAAC,KAAAY,CAAI,EAAG,WAAYF,CAAI,EAC7C,IAAMZ,EAAe,KAAK,SAAS,KAAKE,CAAK,EAAI,EACjDL,EAAY,IAAIa,EAAWV,CAAY,EACvCD,EAAgB,KAAKW,CAAS,CAChC,CAEJ,CAEA,QAAS,EAAI,EAAG,EAAIf,EAAM,mBAAmB,EAAG,IAAK,CACnD,IAAMN,EAAcM,EAAM,aAAa,CAAC,EACpCM,EAAQJ,EAAY,IAAIR,EAAY,KAAK,CAAE,EAC/C,GAAIY,IAAU,OAAW,CACvB,IAAMC,EAAQ,IAAIxB,GACZoC,EAAOvB,GAAU,wBAAwBF,CAAW,EACpDuB,EAAOrB,GAAU,wBAAwBF,EAAY,SAAS,CAAC,EACrEa,EAAM,KAAO,CAAC,MAAO,CAAC,KAAAY,CAAI,EAAG,WAAYF,CAAI,EAC7CX,EAAQ,KAAK,SAAS,KAAKC,CAAK,EAAI,EACpCL,EAAY,IAAIR,EAAY,KAAK,EAAIY,CAAK,CAC5C,CACA,KAAK,SAASA,CAAK,EAAE,MAAQ,GAC7B,KAAK,SAASA,CAAK,EAAE,OAASE,GAAO,cAAcd,CAAW,CAChE,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,SAAS,OAAQ,IACnC,KAAK,SAAS,CAAC,EAAE,SACpB,KAAK,iBAAiB,KAAK,CAAC,EAC5B,KAAK,eAAe,KAAKU,EAAgB,CAAC,CAAC,GAK/C,QAAS,EAAI,EAAG,EAAIJ,EAAM,cAAc,EAAG,IAAK,CAC9C,IAAMsB,EAAatB,EAAM,QAAQ,CAAC,EAClC,GAAIE,EAAY,IAAIoB,CAAU,EAC5B,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,EAEzD,IAAMjB,EAAe,KAAK,SAAS,KAAK,IAAItB,EAAO,EAAI,EACvDmB,EAAY,IAAIoB,EAAYjB,CAAY,EACxC,KAAK,kBAAkB,KAAKA,CAAY,EACxC,KAAK,gBAAgB,KAAKiB,CAAU,CACtC,CAGA,GAAI,CAACtB,EAAM,MACT,MAAM,IAAI,MAAM,oCAAoC,EAEtD,QAAS,EAAI,EAAG,EAAIA,EAAM,YAAY,EAAG,IAAK,CAC5C,IAAMS,EAAYT,EAAM,MAAM,CAAC,EAC3BF,EAAOW,EAAW,KAAK,EAC3B,GAAI,CAACX,EAEH,QAASY,EAAO,EACdZ,EAAO,WAAWW,EAAW,OAAO,CAAC,IAAIC,CAAI,GACzC,EAACP,EAAa,IAAIL,CAAI,EAFRY,IAElB,CAOJ,GAAIP,EAAa,IAAIL,CAAI,EACvB,MAAM,IAAI,MAAM,yBAAyBA,CAAI,EAAE,EAEjD,IAAMO,EAAe,KAAK,OAAO,KAAK,IAAIrB,GAAKyB,EAAYX,CAAI,CAAC,EAAI,EACpEK,EAAa,IAAIL,EAAMO,CAAY,CACrC,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAMM,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,MAAM,CAAC,EAC/B,GAAIS,GAAa,KACf,MAAM,IAAI,MAAM,2BAA2B,CAAC,EAAE,EAEhD,GAAIA,GAAW,cAAc,IAAM,EACjC,MAAM,IAAI,MAAM,4BAA4BA,EAAU,IAAI,EAAE,EAE9D,QAASO,EAAI,EAAGA,EAAIP,GAAW,cAAc,EAAGO,IAAK,CACnD,IAAMJ,EAASH,GAAW,QAAQO,CAAC,EAC/BH,EAAYX,EAAY,IAAIU,CAAM,EAOtC,GANI,OAAOC,EAAc,MACvBA,EAAY,KAAK,SAAS,KAAK,IAAI9B,EAAO,EAAI,EAC9CmB,EAAY,IAAIU,EAAQC,CAAS,GAEnCF,EAAK,QAAQ,KAAKE,CAAS,EAEvB,KAAK,SAASA,CAAS,EAAE,QAAU,OACrC,MAAM,IAAI,MAAM,4CAA4CA,CAAS,EAAE,EAMzE,GAJA,KAAK,SAASA,CAAS,EAAE,MAAQ,EAI7BJ,EAAU,OAAO,IAAM,WAAY,CACrC,GAAIA,EAAU,iBAAiB,IAAM,GAAK,CAACA,EAAU,WAAW,CAAC,EAAG,EAAE,EACpE,MAAM,IAAI,MAAM,qFAAqF,EAEvG,GAAIA,EAAU,cAAc,IAAM,EAChC,MAAM,IAAI,MAAM,0EAA0E,EAE5FE,EAAK,QAAQ,IAAI,EACjBA,EAAK,YAAc,GAEnB,KAAK,SAASE,CAAS,EAAE,MAAQ,GACjC,KAAK,SAASA,CAAS,EAAE,OAASL,GAAO,cAAcC,EAAU,WAAW,CAAC,EAAG,EAAE,CAAE,CACtF,CACF,CACF,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAME,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,MAAM,CAAC,EAE/B,GAAIS,EAAU,aAAa,IAAM,EAC/B,MAAM,IAAI,MAAM,2BAA2BA,EAAU,IAAI,EAAE,EAE7D,QAASO,EAAI,EAAGA,EAAIP,EAAU,aAAa,EAAIO,IAAK,CAClD,IAAMF,EAAQL,EAAU,OAAOO,CAAC,EAC1BH,EAAYX,EAAY,IAAIY,CAAK,EACvC,GAAI,OAAOD,EAAc,IACvB,MAAM,IAAI,MAAM,uBAAuBC,CAAK,eAAeL,EAAW,KAAK,CAAC,EAAE,EAEhFE,EAAK,OAAO,KAAKE,CAAS,EAE1B,KAAK,SAASA,CAAS,EAAE,IAAI,KAAK,CAAC,CACrC,CACF,CACF,CAEQ,gBAAiB,CAEvB,IAAMU,EAAwB,IAAI,IAClC,KAAK,iBAAiB,QAAQC,GAAK,CACpB,KAAK,SAASA,CAAC,EACvB,IAAI,QAAQR,GAAK,CACpBO,EAAS,IAAIP,CAAC,CAChB,CAAC,CACH,CAAC,EAGD,IAAMS,EAAa,MAAM,KAAKF,CAAQ,EAChCG,EAAa,IAAI,MAAc,KAAK,OAAO,MAAM,EAAE,KAAK,OAAO,EAErE,KAAOD,EAAW,OAAS,GAAG,CAC5B,IAAME,EAAYF,EAAW,IAAI,EAE7BC,EAAWC,CAAS,IAAM,OAC5BD,EAAWC,CAAS,EAAI,SAGxBF,EAAW,KAAKE,CAAS,EACzBD,EAAWC,CAAS,EAAI,OAExB,KAAK,OAAOA,CAAS,EAAE,QAAQ,QAASC,GAAsB,CAC5D,IAAMC,EAAO,KAAK,SAASD,CAAiB,EAC5C,GAAI,OAAOC,EAAK,OAAW,IACzB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,QAAUF,EACjB,MAAM,IAAI,MAAM,+EAAgF,EAElGE,EAAK,IAAI,QAASC,GAAwB,CAExC,GAAIJ,EAAWI,CAAmB,IAAM,OACtC,MAAM,IAAI,MAAM,uBAAuB,EAGhCJ,EAAWI,CAAmB,IAAM,SAC3CL,EAAW,KAAKK,CAAmB,CAEvC,CAAC,CACH,CAAC,EAEL,CACF,CAEQ,eAAe7B,EAA4C,CAEjE,KAAK,uBAAuB,EAC5B,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAEzBA,GACFA,EAAiB,eAAe,IAAI,EAItC,KAAK,cAAc,CACrB,CAQA,eAAgB,CACd,IAAI8B,EAAS,EAMPC,EAAa,IAAI,MAAc,KAAK,OAAO,OAAQ,CAAC,EACtDC,EAAgB,EAEpB,QAAST,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IAEtCQ,EAAWR,CAAC,EAAIS,EACZ,KAAK,OAAOT,CAAC,EAAE,aACbS,IAAkBT,IACpB,KAAK,OAAOS,CAAa,EAAI,KAAK,OAAOT,CAAC,GAE5CS,KAIA,KAAK,OAAOT,CAAC,EAAE,QAAQ,QAAQU,GAAO,CACpC,KAAK,SAASA,CAAG,EAAE,MAAQ,EAC7B,CAAC,EAKL,KAAK,OAAO,OAAOD,EAAe,KAAK,OAAO,OAASA,CAAa,EAGpE,QAAST,EAAI,EAAGA,EAAI,KAAK,SAAS,OAAQA,IAAK,CAC7C,IAAMW,EAAc,KAAK,SAASX,CAAC,EAC/BW,EAAY,QAAU,QAAaA,EAAY,QAAU,IAAMA,EAAY,QAAU,KACvFA,EAAY,MAAQH,EAAWG,EAAY,KAAK,GAGlD,QAASnB,EAAI,EAAGA,EAAImB,EAAY,IAAI,OAAQnB,IAC1C,GAAImB,EAAY,IAAInB,CAAC,GAAK,EACxBmB,EAAY,IAAInB,CAAC,EAAIgB,EAAWG,EAAY,IAAInB,CAAC,CAAC,MAElD,OAAM,IAAI,MAAM,iCAAiC,CAGvD,CAEAe,EAAS,EAET,QAASP,EAAI,EAAGA,EAAI,KAAK,SAAS,OAAQA,IAAK,CAE7C,GAAI,KAAK,SAASA,CAAC,EAAE,OAAS,IAAM,KAAK,kBAAkB,QAAQA,EAAIO,CAAM,IAAM,GAAI,CACrFA,IACA,KAAK,SAAS,OAAOP,EAAG,CAAC,EACzBA,IACA,QACF,CACA,GAAIO,EAAS,EAAG,CACd,IAAIG,EAAM,GAGN,KAAK,SAASV,CAAC,EAAE,OAAS,QAAa,KAAK,SAASA,CAAC,EAAE,OAAS,IACnEU,EAAM,KAAK,OAAO,KAAK,SAASV,CAAC,EAAE,IAAI,EAAE,QAAQ,QAAQA,EAAIO,CAAM,EAC/DG,IAAQ,KACV,KAAK,OAAO,KAAK,SAASV,CAAC,EAAE,IAAI,EAAE,QAAQU,CAAG,EAAIV,KAIpDU,EAAM,KAAK,iBAAiB,QAAQV,EAAIO,CAAM,EAC1CG,IAAQ,KACV,KAAK,iBAAiBA,CAAG,EAAIV,IAKjC,KAAK,SAASA,CAAC,EAAE,GAAG,QAAQb,GAAQ,CAClCuB,EAAM,KAAK,OAAOvB,CAAI,EAAE,OAAO,QAAQa,EAAIO,CAAM,EAC7CG,IAAQ,KACV,KAAK,OAAOvB,CAAI,EAAE,OAAOuB,CAAG,EAAIV,EAEpC,CAAC,EACG,KAAK,SAASA,CAAC,EAAE,GAAG,SAAW,IAEjCU,EAAM,KAAK,kBAAkB,QAAQV,EAAIO,CAAM,EAC3CG,IAAQ,KACV,KAAK,kBAAkBA,CAAG,EAAIV,GAGpC,CACF,CACF,CAOQ,WAAWG,EAAmB,CACpC,IAAMhB,EAAO,KAAK,OAAOgB,CAAS,EAClC,GAAIhB,EAAK,QAAQ,OAAS,GACxB,QAASa,EAAI,EAAGA,EAAIb,EAAK,QAAQ,OAAQa,IACvC,GAAI,KAAK,SAASb,EAAK,QAAQa,CAAC,CAAC,EAAE,GAAG,OAAS,EAC7C,MAAM,IAAI,MAAM,qFAAqF,EAM3Gb,EAAK,YAAc,GACnB,IAAMyB,EAAkBzB,EAAK,OAAO,CAAC,EAC/B0B,EAAmB1B,EAAK,QAAQ,CAAC,EACjC2B,EAAuB,KAAK,SAASD,CAAgB,EAAE,GAG7D,QAASb,EAAI,EAAGA,EAAIb,EAAK,OAAO,OAAQa,IAAK,CAC3C,IAAMe,EAAW,KAAK,SAAS5B,EAAK,OAAOa,CAAC,CAAC,EAAE,GAAG,QAAQG,CAAS,EAEnE,GAAIY,IAAa,GACf,MAAM,IAAI,MAAM,uEAA2E,EAE7F,KAAK,SAAS5B,EAAK,OAAOa,CAAC,CAAC,EAAE,GAAG,OAAOe,EAAU,CAAC,CACrD,CAGA,KAAK,SAASF,CAAgB,EAAE,IAAM,CAAC,EAGvC,IAAM/B,EAAQ,KAAK,kBAAkB,QAAQ+B,CAAgB,EAM7D,GALI/B,IAAU,KACZ,KAAK,kBAAkBA,CAAK,EAAI8B,GAI9BE,GAAwBA,EAAqB,OAAS,EACxD,QAAWX,KAAaW,EAAsB,CAC5C,IAAME,EAAe,KAAK,OAAOb,CAAS,EAAE,OAAO,QAAQU,CAAgB,EAE3E,GAAIG,IAAiB,GACnB,MAAM,IAAI,MAAM,0EAA8E,EAEhG,KAAK,OAAOb,CAAS,EAAE,OAAOa,CAAY,EAAIJ,EAC9C,KAAK,SAASA,CAAe,EAAE,GAAG,KAAKT,CAAS,CAClD,CAEJ,CAEA,uBAAwB,CACtB,IAAIA,EAAY,EAChB,QAAWhB,KAAQ,KAAK,OAAQ,CAE9B,GAAIA,EAAK,SAAW,UAAW,CAE7B,GAAIA,EAAK,OAAO,SAAW,EACzB,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIA,EAAK,QAAQ,SAAW,GAAKA,EAAK,QAAQ,SAAW,EACvD,MAAM,IAAI,MAAM,sDAAsD,EAGxE,GAAIA,EAAK,QAAQ,SAAW,GAAK,KAAK,SAASA,EAAK,QAAQ,CAAC,CAAC,EAAE,IAAI,SAAW,EAC7E,MAAM,IAAI,MAAM,uEAAwE,EAE1F,KAAK,WAAWgB,CAAS,CAC3B,CACAA,GACF,CACF,CAEA,wBAAyB,CACvB,IAAIA,EAAY,EAChB,QAAWhB,KAAQ,KAAK,OAElBA,EAAK,SAAW,YAClB,KAAK,WAAWgB,CAAS,EAE3BA,GAEJ,CAEA,aAAac,EAAkB,CAC7B,OAAQA,EAAE,OAAQ,CAEhB,IAAK,OACL,IAAK,UACL,IAAK,OACH,MAAO,GACT,QACE,MAAO,EACX,CACF,CAEA,yBAA0B,CACxB,QAAW9B,KAAQ,KAAK,OACtB,GAAIA,EAAK,SAAW,OAAQ,CAC1B,IAAM+B,EAAO,KAAK,SAAS/B,EAAK,QAAQ,CAAC,CAAC,EAAE,IAC5C,GAAI+B,EAAK,SAAW,GAAK,KAAK,aAAa,KAAK,OAAOA,EAAK,CAAC,CAAC,CAAC,EAAG,CAChE,IAAMC,EAAQ,KAAK,OAAOD,EAAK,CAAC,CAAC,EACjC,GAAIC,EAAM,SAAW,OACnB,GAAIA,EAAM,OAAO,SAAW,EAC1B,GAAI,CACFhC,EAAK,WAAW,IACZ,oBAAqB,SACrB,CAACgC,EAAM,WAAW,SAAS,KAAK,EAAGA,EAAM,WAAW,SAAS,KAAK,CAAC,CAAC,CAC1E,MAAY,CACVhC,EAAK,WAAW,IAAI,oBAAqB,SAAU,CAACiC,GAAUC,EAAQ,CAAC,CACzE,SAEEF,EAAM,OAAO,QAAU,GAAK,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,SAAW,QACtE,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,SAAW,OAC5ChC,EAAK,WAAW,IAAI,oBAAqB,SAAU,CACjD,KAAK,SAASgC,EAAM,OAAO,CAAC,CAAC,EAAE,OAAQ,UAAU,CAAC,EAAG,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,OAAQ,UAAU,CAAC,CACzG,CAAC,MAGD,UAGJhC,EAAK,WAAW,IAAI,aAAc,SAAWgC,EAAM,MAAO,EAC1D,KAAK,WAAWD,EAAK,CAAC,CAAC,CACzB,CACF,CAEJ,CACF,ICtyBA,IAQAI,GAGOC,GAEMC,GAbbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAP,GAAmB,SACnBQ,KAEOP,GAASQ,GAAY,aAAa,IAE5BP,GAAN,KAAY,CAEjB,aAAc,CAAC,CAEf,KAAKQ,EAAiBC,EAAsCC,EAA6B,CACvF,IAAIC,EACJ,GAAI,CAACD,EAEH,GAAI,CACF,KAAK,mBAAmBF,EAAKC,CAAgB,EAC7C,MACF,OAASG,EAAG,CACV,GAAIF,IAAgB,OAClB,MAAME,EAERD,EAAYC,CACd,CAGF,GAAI,CACF,KAAK,kBAAkBJ,EAAKC,CAAgB,CAC9C,OAASG,EAAG,CACV,MAAIF,IAAgB,OACZE,EAGF,IAAI,MAAM,wCAAwCD,CAAS;AAAA,iBAAoBC,CAAC,EAAE,CAC1F,CACF,CAEQ,mBAAmBJ,EAAiBC,EAA4C,CACtF,IAAMI,EAAa,QAAK,WAAW,OAAOL,CAAG,EAE7C,GADkBM,GAAS,aAAaD,EAAW,SAAS,EAC5C,EACd,MAAM,IAAI,MAAM,4CAA4C,EAG9D,KAAK,QACDA,EAAW,YAAY,IAAI,IAAM,CAAC,OAAQ,EAAE,OAAkB,QAASC,GAAS,aAAa,EAAE,OAAQ,CAAC,EAAE,EAE9G,KAAK,OAASC,GAAM,KAAKF,EAAW,MAAQJ,CAAgB,CAC9D,CAEQ,kBAAkBD,EAAiBC,EAA4C,CACrF,IAAMO,EAAK,IAAIC,EAAY,WAAWT,CAAG,EACnCU,EAAWnB,GAAO,iBAAiB,0BAA0BiB,CAAE,EAAE,MAAM,EAE7E,GADkBF,GAAS,aAAaI,EAAS,UAAU,CAAC,EAC5C,EACd,MAAM,IAAI,MAAM,4CAA4C,EAE9D,KAAK,QAAU,CAAC,EAChB,QAASC,EAAI,EAAGA,EAAID,EAAS,kBAAkB,EAAGC,IAAK,CACrD,IAAMC,EAAUF,EAAS,YAAYC,CAAC,EACtC,KAAK,QAAQ,KAAK,CAAC,OAAQC,GAAS,OAAO,EAAa,QAASN,GAAS,aAAaM,EAAQ,QAAQ,CAAE,CAAC,CAAC,CAC7G,CAEA,KAAK,OAASL,GAAM,KAAKG,EAAS,MAAM,EAAIT,CAAgB,CAC9D,CAGA,IAAI,OAAe,CACjB,OAAO,KAAK,MACd,CAGA,IAAI,QAA2B,CAC7B,OAAO,KAAK,OACd,CACF,ICjFA,IAwBaY,GAxBbC,GAAAC,EAAA,kBAGAC,KACAC,KAEAC,KACAC,KAiBaN,GAAN,KAAc,CACnB,YAAYO,EAAyB,CAAC,EAAG,CACvC,KAAK,aAAe,GACpB,KAAK,YAAcA,EAAO,YAC1B,KAAK,SAAWC,GAAS,OAAOD,EAAO,QAAQ,EAC/C,KAAK,QAAU,CAAC,SAAU,KAAK,SAAU,gBAAiB,CAAC,EAAG,eAAgB,CAAC,CAAC,CAClF,CAEA,IAAI,YAAgC,CAClC,OAAO,KAAK,OAAO,MAAM,cAAc,CACzC,CACA,IAAI,aAAiC,CACnC,OAAO,KAAK,OAAO,MAAM,eAAe,CAC1C,CAEA,gBAAiB,CACf,KAAK,SAAS,MAAM,CACtB,CAEA,cAAe,CACb,KAAK,SAAS,KAAK,CACrB,CAKA,MAAM,UAAUE,EAAoCC,EAAqBC,EAAgC,CACvG,MAAM,KAAK,SAAS,MAAM,UAAW,oBAAqB,SAAY,CAEpE,IAAMC,EAAU,MAAMC,GAAe,KAAK,WAAW,EAIrD,GAHA,KAAK,eAAiBD,EAAQ,qBAAqB,KAAK,OAAO,EAE/D,KAAK,OAAS,IAAIE,GACd,OAAOL,GAAQ,SAAU,CAC3B,IAAMM,EAAcN,EAAI,SAAS,MAAM,EAMhC,CAGL,IAAMO,EAAM,MADK,MAAM,MAAMP,CAAG,GACL,YAAY,EACvC,KAAK,WAAW,IAAI,WAAWO,CAAG,EAAGD,CAAW,CAClD,CACF,SAAY,YAAY,OAAON,CAAG,EAMhC,KAAK,WAAWA,CAAG,MANgB,CAEnC,IAAMQ,EAAM,IAAI,WAAWR,EAAKC,GAAc,EAAGC,GAAUF,EAAI,UAAU,EACzE,KAAK,WAAWQ,CAAG,CACrB,CAIF,CAAC,CACH,CAEQ,WAAWC,EAA4BH,EAA6B,CAC1E,GAAI,KAAK,aACP,MAAM,IAAI,MAAM,qBAAqB,EAGvC,KAAK,SAAS,MAAM,UAAW,qBAAsB,IAAM,CAEzD,IAAMI,EACF,KAAK,eAAe,eAAiB,KAAK,eAAsC,OACpF,KAAK,OAAO,KAAKD,EAAgBC,EAAkBJ,CAAW,EAG1D,KAAK,eAAe,oBACtB,KAAK,eAAe,mBAAmB,KAAK,OAAO,KAAK,EAG1D,KAAK,cAAc,KAAK,OAAO,KAAK,EAGpC,KAAK,eAAiB,IAAIK,GAAc,KAAK,OAAO,MAAO,KAAK,KAAM,KAAK,QAAQ,CACrF,CAAC,EAED,KAAK,aAAe,EACtB,CAEA,MAAM,IAAIC,EAAoE,CAC5E,GAAI,CAAC,KAAK,aACR,MAAM,IAAI,MAAM,6BAA6B,EAG/C,OAAO,KAAK,SAAS,MAAM,UAAW,cAAe,SAAY,CAC/D,IAAMC,EAAe,KAAK,2BAA2BD,CAAM,EAErDE,EAAgB,MAAM,KAAK,eAAe,QAAQ,KAAK,eAAgBD,CAAY,EAEzF,OAAO,KAAK,aAAaC,CAAa,CACxC,CAAC,CACH,CAEQ,2BAA2BF,EAAgD,CACjF,IAAMG,EAAkB,KAAK,OAAO,MAAM,cAAc,EAIxD,GAAI,MAAM,QAAQH,CAAM,GACtB,GAAIA,EAAO,SAAWG,EAAgB,OACpC,MAAM,IAAI,MAAM,0CAA0CA,EAAgB,MAAM,YAAYH,EAAO,MAAM,EAAE,MAK1G,CACH,GAAIA,EAAO,OAASG,EAAgB,OAClC,MAAM,IAAI,MAAM,sCAAsCA,EAAgB,MAAM,YAAYH,EAAO,IAAI,EAAE,EAGvG,IAAMI,EAAe,IAAI,MAAcJ,EAAO,IAAI,EAC9CK,EAAoB,EACxB,QAAS,EAAI,EAAG,EAAIF,EAAgB,OAAQ,EAAE,EAAG,CAC/C,IAAMG,EAASN,EAAO,IAAIG,EAAgB,CAAC,CAAC,EAC5C,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,8BAA8B,IAAI,GAAG,EAEvDF,EAAaC,GAAmB,EAAIC,CACtC,CAEAN,EAASI,CACX,CAIA,GAAI,CAAC,KAAK,QAAQ,iBAAmB,KAAK,QAAQ,gBAAgB,SAAW,GAAK,CAAC,KAAK,QAAQ,gBAC5F,KAAK,QAAQ,eAAe,SAAW,EAAG,CAC5C,IAAMG,EAAoB,KAAK,OAAO,MAAM,gBAAgB,EACtDC,EAAc,KAAK,OAAO,MAAM,UAAU,EAE1CC,EAAiB,IAAI,MAAyBF,EAAkB,MAAM,EAE5E,QAASG,EAAI,EAAGA,EAAIH,EAAkB,OAAQ,EAAEG,EAAG,CACjD,IAAMC,EAAaH,EAAYD,EAAkBG,CAAC,CAAC,EACnDD,EAAeC,CAAC,EAAIC,EAAW,KAAM,MAAM,KAI3C,KAAK,QAAQ,gBAAiB,KAAKA,EAAW,KAAM,UAAU,EAC9D,KAAK,QAAQ,eAAgB,KAAKX,EAAOU,CAAC,EAAE,IAAI,CAClD,CAEA,KAAK,wBAAwBD,EAAgBT,EAAQ,EAAI,CAC3D,MAIE,KAAK,wBAAwB,KAAK,QAAQ,eAAgBA,EAAQ,EAAK,EAIzE,YAAK,yBAAyB,KAAK,QAAQ,gBAAkBA,CAAM,EAE5DA,CACT,CAEQ,yBAAyBY,EAAoCC,EAAuB,CAC1F,QAASH,EAAI,EAAGA,EAAIG,EAAY,OAAQH,IAAK,CAC3C,IAAMI,EAAeF,EAAgBF,CAAC,EAChCK,EAAaF,EAAYH,CAAC,EAAE,KAClC,GAAII,IAAiBC,EACnB,MAAM,IAAI,MAAM,gBAAgBL,CAAC,kCAAkCI,CAAY,aAAaC,CAAU,EAAE,CAE5G,CACF,CAEQ,wBACJN,EAA0CI,EAAuBG,EAA2B,CAC9F,QAASN,EAAI,EAAGA,EAAIG,EAAY,OAAQH,IAAK,CAC3C,IAAMO,EAAeR,EAAeC,CAAC,EAC/BQ,EAAaL,EAAYH,CAAC,EAAE,KAClC,GAAI,CAAC,KAAK,kBAAkBO,EAAcC,EAAYF,CAAgB,EACpE,MAAM,IAAI,MAAM,gBAAgBN,CAAC,oCAAoCO,EAAa,KAAK,GAAG,CAAC,eACvFC,EAAW,KAAK,GAAG,CAAC,GAAG,CAE/B,CACF,CAEQ,kBAAkBD,EAAiCC,EAA+BF,EAC9E,CACV,GAAIC,EAAa,SAAWC,EAAW,OACrC,MAAO,GAGT,QAASR,EAAI,EAAGA,EAAIO,EAAa,OAAQ,EAAEP,EACzC,GAAIO,EAAaP,CAAC,IAAMQ,EAAWR,CAAC,IAAM,CAACM,GAAoBC,EAAaP,CAAC,IAAM,GAEjF,MAAO,GAIX,MAAO,EACT,CAEQ,aAAaR,EAA8C,CACjE,IAAMiB,EAAmB,KAAK,OAAO,MAAM,eAAe,EAC1D,GAAIjB,EAAc,SAAWiB,EAAiB,OAC5C,MAAM,IAAI,MAAM,qEAAqE,EAGvF,IAAMC,EAAS,IAAI,IACnB,QAASV,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EAC7CU,EAAO,IAAID,EAAiBT,CAAC,EAAGR,EAAcQ,CAAC,CAAC,EAGlD,OAAOU,CACT,CAEQ,cAAcC,EAAoB,CACxC,IAAMC,EAAQD,EAAM,SAAS,EAC7B,KAAK,KAAO,IAAI,MAAMC,EAAM,MAAM,EAElC,QAASZ,EAAI,EAAGA,EAAIY,EAAM,OAAQZ,IAChC,KAAK,KAAKA,CAAC,EAAI,KAAK,eAAe,QAAQY,EAAMZ,CAAC,EAAG,KAAK,OAAO,OAAQW,CAAK,CAElF,CAaF,IC/PA,IAQaE,GARbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaJ,GAAN,KAA8D,CACnE,YAAoBK,EAAkB,CAAlB,aAAAA,EAClB,KAAK,WAAa,KAAK,QAAQ,WAC/B,KAAK,YAAc,KAAK,QAAQ,WAClC,CAEA,MAAM,SAAyB,CAAC,CAGhC,MAAM,IACFC,EAAiCC,EACjCC,EAA2E,CAC7E,IAAMC,EAAW,IAAI,IACrB,QAAWC,KAAQJ,EACjB,GAAI,OAAO,eAAe,KAAKA,EAAOI,CAAI,EAAG,CAC3C,IAAMC,EAAOL,EAAMI,CAAI,EACvBD,EAAS,IACLC,EACA,IAAIE,GACAD,EAAK,KAAMA,EAAK,KAA+B,OAAW,OAC1DA,EAAK,IAA+B,CAAC,CAC/C,CAEF,IAAME,EAAY,MAAM,KAAK,QAAQ,IAAIJ,CAAQ,EAC3CK,EAAoC,CAAC,EAC3C,OAAAD,EAAU,QAAQ,CAACE,EAAQL,IAAS,CAClCI,EAAOJ,CAAI,EAAI,IAAIE,GAAOG,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,CACjE,CAAC,EACMD,CACT,CACA,gBAAuB,CACrB,KAAK,QAAQ,eAAe,CAC9B,CACA,cAAqB,CACnB,KAAK,QAAQ,aAAa,CAC5B,CACF,IC5CA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,mBAAAE,KAAA,IASMC,GAuBOD,GAhCbE,GAAAC,EAAA,kBAMAC,KACAC,KAEMJ,GAAN,KAAuC,CAErC,MAAM,MAAsB,CAAC,CAE7B,MAAM,8BAA8BK,EAAiCC,EAChC,CAKnC,IAAMC,EAAU,IAAIC,GAAQF,CAAoC,EAGhE,OAAI,OAAOD,GAAiB,SAC1B,MAAME,EAAQ,UAAUF,CAAY,EAEpC,MAAME,EAAQ,UAAUF,CAAY,EAG/B,IAAII,GAAqBF,CAAO,CACzC,CACF,EAEaR,GAAgB,IAAIC,KChCjC,IAAAU,GAAAC,EAAA,oBCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IA0FMC,GACAC,GAwFCF,GAnLPG,GAAAC,EAAA,kBAsFAC,KACAC,KACAC,KAEMN,GAAc,wBACdC,GAAgB,WAAW,MAAM,OAASD,GAE5CC,KAEF,KAAK,UAAaM,GAA2C,CAC3D,GAAM,CAAC,KAAAC,EAAM,GAAKC,CAAO,EAAIF,EAAG,KAChC,GAAI,CACF,OAAQC,EAAM,CACZ,IAAK,YACHE,GAAsBD,EAAS,IAAI,EAC9B,KACG,IAAM,CACJE,GAAYF,CAAQ,EAAE,KAClB,IAAM,CACJ,YAAY,CAAC,KAAAD,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,CACP,EACAA,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,MACF,IAAK,UAAW,CACd,GAAM,CAAC,OAAAC,EAAQ,IAAAC,CAAG,EAAIL,EACtBM,GAAOD,EAAKD,CAAM,EACb,KACG,IAAM,CACJ,YAAY,CAAC,KAAAL,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,YAAa,CAChB,GAAM,CAAC,OAAAI,CAAM,EAAIP,EACXQ,EAAaC,GAAuBF,CAAM,EAChD,YAAY,CAAC,KAAAR,EAAM,IAAKS,CAAU,CAAmB,EACrD,KACF,CACA,IAAK,SAAU,CACb,GAAM,CAAC,MAAAE,EAAO,QAAAC,CAAO,EAAIX,EACzBY,GAAcF,EAAOC,CAAO,EACvB,KACGE,GAAmB,CACjB,YAAY,CAAC,KAAAd,EAAM,IAAKc,CAAe,CAAmB,CAC5D,EACAV,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,UACHW,GAAed,CAAQ,EACvB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,IAAK,MAAO,CACV,GAAM,CAAC,UAAAgB,EAAW,aAAAC,EAAc,OAAAC,EAAQ,cAAAC,EAAe,QAAAP,CAAO,EAAIX,EAClEmB,GAAIJ,EAAWC,EAAcC,EAAQC,EAAe,IAAI,MAAMA,EAAc,MAAM,EAAE,KAAK,IAAI,EAAGP,CAAO,EAClG,KACGS,GAAW,CACLA,EAAQ,KAAKC,GAAKA,EAAE,CAAC,IAAM,KAAK,EAClC,YAAY,CAAC,KAAAtB,EAAM,IAAK,iDAAiD,CAAC,EAE1E,YACI,CAAC,KAAAA,EAAM,IAAKqB,CAAO,EACnBE,GAA2B,CAAC,GAAGL,EAAQ,GAAGG,CAAO,CAAiC,CAAC,CAE3F,EACAjB,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,gBACHoB,GAAavB,CAAQ,EACrB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,QACF,CACF,OAASI,EAAK,CACZ,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAmB,CAC3C,CACF,GAGKb,GAAQE,GACX,KACCgC,GACG,IAAI,OAAOA,GAAeC,GAAY,CAAC,KAA0B,SAAsB,KAAMlC,EAAW,CAAC,ICtLjH,IAAAmC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IAAIC,GAAEC,GAA6h0CF,GAAni0CG,GAAAC,EAAA,kBAAMF,IAAGD,GAAE,YAAY,IAAI,eAAeC,EAAE,CAAC,EAAE,CAAC,SAASG,GAAG,CAAC,OAAOC,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAED,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOD,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEC,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOJ,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEG,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOL,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEI,EAAC,CAAC,SAAS,GAAG,CAAC,OAAON,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEK,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOP,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEM,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOT,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEQ,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOX,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEU,EAAC,CAAC,IAAI,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAElB,CAAC,EAAEmB,EAAE,IAAI,QAAS,CAACpB,EAAEC,IAAI,CAAC,EAAED,EAAEkB,EAAEjB,CAAC,CAAE,EAAEoB,EAAY,OAAO,QAAjB,SAAwBC,EAAc,OAAO,eAAnB,WAAiCC,EAAED,GAAiB,KAAK,MAAnB,aAAwBH,EAAE,kBAAkB,CAACnB,EAAEC,IAAI,EAAEkB,EAAE,KAAKA,EAAE,GAAG,IAAI,MAAM,IAAInB,EAAEC,CAAC,CAAC,EAAEkB,EAAE,oBAAoB,IAAI,CAAC,OAAOA,EAAE,EAAE,EAAE,IAAIK,EAAE,WAAW,mBAAmB,IAAI,YAAY,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,YAAY,IAAIC,EAAE,IAAI,CAAC,IAAMzB,EAAE,CAACA,EAAEC,EAAEG,IAAI,IAAIsB,IAAI,CAAC,IAAMjB,EAAEkB,GAAGC,EAAE3B,IAAI,EAAEyB,EAAE1B,EAAE,GAAG0B,CAAC,EAAE,IAAMG,EAAE5B,IAAI,EAAE,OAAO2B,IAAIC,IAAI7B,EAAE6B,EAAEzB,EAAEwB,CAAC,EAAE3B,EAAEG,EAAE,MAAMuB,IAAIlB,EAAE,IAAI,QAAS,CAACT,EAAEC,KAAI,CAAC6B,GAAG,CAAC,QAAQ9B,EAAE,OAAOC,EAAC,CAAC,CAAE,EAAEyB,CAAC,EAAEzB,EAAED,GAAG,SAASC,IAAI,CAAC,GAAG,CAAC,GAAGkB,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMf,EAAEe,EAAE,GAAG,CAAC,GAAGlB,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAEyB,EAAE,MAAM1B,EAAE,GAAGC,CAAC,EAAE,GAAGkB,EAAE,KAAKf,EAAE,MAAM,MAAM,kBAAkB,EAAEe,EAAE,IAAI,MAAM,EAAE,IAAMV,EAAEL,EAAE,OAAO,GAAG,EAAEK,EAAE,OAAO,CAAC,IAAIT,EAAE,MAAM,QAAQ,IAAIS,CAAC,EAAE,GAAGT,EAAEA,EAAE,OAAQA,GAAGA,CAAE,EAAE,EAAEA,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAC,CAAC,OAAO0B,CAAC,QAAC,CAAQP,EAAE,GAAG,IAAI,CAAC,EAAEA,EAAE,kBAAkBnB,EAAEmB,EAAE,kBAAmB,IAAIA,EAAE,kBAAoBnB,GAAGmB,EAAE,kBAAkBnB,CAAE,EAAEmB,EAAE,QAAQlB,EAAED,EAAEmB,EAAE,QAAS,IAAIA,EAAE,QAAUnB,GAAGmB,EAAE,QAAQnB,CAAE,CAAC,EAAEmB,EAAE,mBAAmBlB,EAAED,EAAEmB,EAAE,mBAAoB,IAAIA,EAAE,mBAAqBnB,GAAGmB,EAAE,mBAAmBnB,CAAE,CAAC,EAAEmB,EAAE,cAAcnB,EAAEmB,EAAE,cAAe,IAAIA,EAAE,cAAgBnB,GAAGmB,EAAE,cAAcnB,CAAE,EAAEyB,EAAE,MAAM,EAAEN,EAAE,SAAS,CAACnB,EAAEC,IAAI,CAAC,GAAGwB,IAAI,EAAazB,IAAX,SAAa,CAAC,CAACmB,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,EAAE,EAAElB,EAAE,IAAMD,EAAEmB,EAAE,GAAGA,EAAE,mBAAmB,CAAClB,EAAEG,EAAEsB,EAAEjB,IAAIT,EAAE,eAAeC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,cAAclB,GAAGD,EAAE,UAAUC,CAAC,EAAEkB,EAAE,qBAAqB,CAAClB,EAAEG,EAAEsB,IAAI1B,EAAE,iBAAiBC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,qBAAqBlB,GAAG,CAACD,EAAE,iBAAiBC,CAAC,CAAC,EAAEkB,EAAE,eAAelB,GAAGD,EAAE,WAAWC,CAAC,CAAC,CAAC,EAAE,IAAI8B,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAEd,CAAC,EAAEe,EAAE,iBAAiBC,EAAE,CAACnC,EAAEC,IAAI,CAAC,MAAMA,CAAC,EAAEmC,EAAE,IAAIf,GAAGC,KAAKA,EAAEc,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAKpC,KAAIoC,EAAEpC,IAAGoC,EAAEA,EAAE,WAAW,OAAO,EAAE,GAAGA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAsFd,IAAIU,EAAEhC,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAOA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAE,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAG8B,EAAE,CAAC/B,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAM1B,EAAE,EAAE,EAAE0B,EAAE,aAAa,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASzB,EAAEyB,EAAE,QAAQ,EAAEtB,EAAE,CAAC,EAAEsB,EAAE,QAAQtB,EAAEsB,EAAE,KAAK,IAAI,CAAC,GAAG,IAAIW,EAAE,QAAQ,IAAI,KAAK,OAAO,EAAEC,EAAE,QAAQ,MAAM,KAAK,OAAO,EAAEC,EAAEF,EAAEG,EAAEF,EAAE,GAAG,OAAO,OAAOnB,EAAEc,CAAC,EAAEA,EAAE,KAAKV,EAAE,CAAY,IAASkB,EAAT,SAAYzC,EAAE,CAAC,GAAG,CAAC,IAAIC,EAAED,EAAE,KAAKI,EAAEH,EAAE,IAAI,GAAYG,IAAT,OAAW,CAAC,IAAIJ,EAAE,CAAC,EAAE,KAAK,UAAUC,GAAGD,EAAE,KAAKC,CAAC,EAAE,KAAK,YAAY,IAAI,CAAC,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAE,QAAQA,KAAKD,EAAEyC,EAAGxC,CAAC,EAAE,KAAK,UAAUwC,CAAE,EAAE,QAAUzC,KAAKC,EAAE,SAASkB,EAAEnB,CAAC,GAAG,CAACmB,EAAEnB,CAAC,EAAE,QAAQmB,EAAEnB,CAAC,EAAE,IAAIC,IAAI,CAAC,YAAY,CAAC,GAAG,cAAc,GAAGD,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAWD,GAAT,UAAauC,EAAEpB,EAAEnB,CAAC,GAAeA,GAAZ,aAAgBwC,EAAErB,EAAEnB,CAAC,IAAIK,GAAEJ,EAAE,WAAWM,GAAE,EAAEmC,GAAEzC,EAAE,UAAU,CAAC,SAAiBG,IAAR,MAAU,CAACuC,GAAG1C,EAAE,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE2C,GAAG3C,EAAE,WAAW,EAAE4C,GAAG,EAAEC,GAAG,EAAEC,KAAIC,GAAG,EAAED,GAAE,IAAI,GAAG,CAACE,GAAGhD,EAAE,cAAcA,EAAE,GAAG,CAAC,OAAOD,EAAE,CAAC,GAAaA,GAAV,SAAY,MAAMA,CAAC,CAAC,MAAgBI,IAAX,SAAa8C,GAAG,GAAGC,GAAG,EAAE,EAAmBlD,EAAE,SAAnB,iBAA6CG,IAAjB,eAAmB2C,IAAGK,GAAG,EAAEhD,IAAIoC,EAAE,oCAAoCpC,CAAC,EAAE,EAAEoC,EAAEvC,CAAC,GAAG,OAAOD,EAAE,CAAC,MAAMqD,GAAG,EAAErD,CAAC,CAAC,EAAjqB,IAAAyC,KAAhBC,GAAEK,GAAE,GAA8qBP,EAAE,YAAYxC,EAAE,CAACA,EAAEA,EAAE,KAAK,GAAG,EAAE,QAAQ,MAAMA,CAAC,CAAC,EAAE,KAAK,MAAM,YAAYA,EAAE,CAAC,YAAY,CAAC,GAAG,QAAQ,KAAKA,EAAE,KAAK,GAAG,EAAE,GAAGkD,GAAG,CAAC,CAAC,CAAC,EAAE/B,EAAE,gBAAgB,CAACnB,EAAEC,IAAI,IAAI,QAASD,GAAG,CAAC0C,GAAEtC,GAAG,CAACA,EAAE,IAAI,YAAY,SAASA,EAAEkD,GAAG,CAAC,EAAErD,EAAEG,CAAC,EAAEJ,EAAE,CAAC,CAAC,CAAE,EAAE,KAAK,qBAAqBA,GAAG,CAAC,MAAMA,EAAE,QAAQA,CAAC,EAAE,KAAK,UAAUyC,CAAE,CAAC,IAAIpC,GAAEkD,GAAEC,EAAElD,GAAEE,GAAEE,GAAEC,GAAEC,GAAEC,GAAEE,GAAE0C,EAAEC,GAAEzC,GAAE0C,GAAE,GAAG,SAASpD,IAAG,CAAC,IAAIP,EAAEK,GAAE,OAAOc,EAAE,MAAMb,GAAE,IAAI,UAAUN,CAAC,EAAEmB,EAAE,OAAOT,GAAE,IAAI,WAAWV,CAAC,EAAEmB,EAAE,OAAOX,GAAE,IAAI,WAAWR,CAAC,EAAEmB,EAAE,QAAQR,GAAE,IAAI,YAAYX,CAAC,EAAEmB,EAAE,OAAOP,GAAE,IAAI,WAAWZ,CAAC,EAAEmB,EAAE,QAAQN,GAAE,IAAI,YAAYb,CAAC,EAAEmB,EAAE,QAAQJ,GAAE,IAAI,aAAaf,CAAC,EAAEmB,EAAE,QAAQF,GAAE,IAAI,aAAajB,CAAC,EAAEmB,EAAE,OAAOsC,EAAE,IAAI,cAAczD,CAAC,EAAEmB,EAAE,QAAQuC,GAAE,IAAI,eAAe1D,CAAC,CAAC,CAAC,GAAG,CAACuB,EAAE,CAAC,GAAGJ,EAAE,WAAWd,GAAEc,EAAE,mBAAmB,GAAGd,GAAE,IAAI,YAAY,OAAO,CAAC,QAAQ,IAAI,QAAQ,MAAM,OAAO,EAAE,CAAC,GAAG,kBAAkBmB,GAAG,MAAMgB,EAAE,6NAA6N,EAAE,MAAM,YAAY,EAAEjC,GAAE,CAAC,CAAC,IAAIqD,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,EAAEC,GAAE,KAAKC,GAAE,KAAK,SAASC,IAAG,CAAC,GAAM,EAAEH,IAAL,IAAgBC,KAAP,OAAW,cAAcA,EAAC,EAAEA,GAAE,MAAMC,IAAG,CAAC,IAAIjE,EAAEiE,GAAEA,GAAE,KAAKjE,EAAE,CAAC,CAAC,CAAC,SAASmE,GAAEnE,EAAE,CAAC,MAAMwC,EAAExC,EAAE,WAAWA,EAAE,GAAG,EAAE2D,GAAE,GAAGH,EAAE,EAAExD,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAEkB,EAAElB,CAAC,EAAEA,CAAC,CAAC,IAAIoE,GAAGC,GAAGrE,GAAGA,EAAE,WAAW,uCAAuC,EAAEsE,GAAGtE,GAAGA,EAAE,WAAW,SAAS,EAAE,SAASuE,GAAGvE,EAAE,CAAC,GAAGgC,EAAE,OAAOA,EAAEhC,CAAC,EAAE,KAAK,iDAAiD,CAAC,SAASwE,GAAGxE,EAAEC,EAAEG,EAAE,CAAC,OAAO,SAASJ,EAAE,CAAC,GAAGqB,GAAGC,EAAE,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAACgD,GAAGtE,CAAC,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,CAAC,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAE,EAAE,MAAO,IAAIsE,GAAGvE,CAAC,CAAE,EAAE,GAAG+B,EAAE,OAAO,IAAI,QAAS,CAAC9B,EAAEG,IAAI,CAAC2B,EAAE/B,EAAGA,GAAGC,EAAE,IAAI,WAAWD,CAAC,CAAC,EAAGI,CAAC,CAAC,CAAE,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAM,IAAImE,GAAGvE,CAAC,CAAE,CAAC,EAAEA,CAAC,EAAE,KAAMA,GAAG,YAAY,YAAYA,EAAEC,CAAC,CAAE,EAAE,KAAKG,EAAGJ,GAAG,CAACwC,EAAE,0CAA0CxC,CAAC,EAAE,EAAEmE,GAAEnE,CAAC,CAAC,CAAE,CAAC,CAAC,SAASsD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,GAAGmB,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG3D,GAAM,GAAG4D,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAE7I,GAAE,GAAG8I,GAAG,EAAEC,EAAE,CAAC,CAAC,CAAC,IAAIC,GAAG,CAAC,QAAQ,CAACrJ,EAAEC,EAAEG,EAAEK,IAAI,CAAC,GAAYU,IAAT,QAAY,CAACA,EAAE,GAAG,MAAO,GAAE,IAAInB,EAAEsJ,GAAGtJ,IAAI,CAAC,GAAG,WAAW,IAAI,IAAIA,EAAEA,EAAE,UAAU,CAAC,GAAG,EAAEA,EAAEmB,EAAE,GAAG,IAAInB,CAAC,GAAG,MAAO,GAAE,GAAGS,KAAK,GAAGR,KAAK,IAAIG,KAAK,GAAGJ,EAAE,WAAW,MAAO,GAAE,GAAG,CAAC,OAAO,EAAE,EAAE,IAAIA,EAAE,SAASC,EAAEA,EAAEG,CAAC,EAAEK,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAO,EAAC,CAAC,EAAE,QAAQ,IAAI,CAACU,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQnB,GAAGmB,EAAE,GAAGnB,CAAC,EAAE,QAAQA,GAAGmB,EAAE,GAAGnB,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAGnB,EAAEC,EAAEG,EAAE,EAAE,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAGnB,EAAEC,EAAEG,CAAC,CAAC,EAAE,QAAQ,IAAiB,OAAO,oBAApB,IAAwC,QAAQJ,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,aAAanB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,cAAcnB,EAAE,CAAC,MAAMC,EAAE,KAAKG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,OAAOnB,EAAE,CAAC,IAAIC,EAAE,IAAIG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,MAAMnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,YAAYnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,OAAOnB,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,iBAAiBnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,cAAcnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,aAAanB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,aAAanB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,WAAWnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,WAAWnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,eAAenB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,YAAYnB,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEG,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,eAAenB,EAAE,CAAC,UAAUC,EAAE,KAAKqJ,GAAGlJ,CAAC,EAAE,OAAOsB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,eAAenB,EAAE,CAAC,UAAUC,EAAE,KAAKqJ,GAAGlJ,CAAC,EAAE,OAAOsB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,EAAEwI,EAAEtI,EAAEE,GAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOwJ,EAAE,OAAO,OAAO,QAAQvJ,EAAE,UAAU,CAACyB,CAAC,EAAE,MAAMjB,EAAE,YAAY,CAACmB,CAAC,EAAE,KAAK,CAAC2H,EAAEzI,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACZ,EAAE,EAAEc,IAAI,CAAC,EAAE,cAAcE,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW+H,GAAG9H,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,EAAEwI,EAAEtI,EAAEE,GAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOgB,EAAE,OAAO,OAAO,QAAQf,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASyB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAMjB,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASmB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAAS2H,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASzI,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACV,EAAE,EAAEoJ,IAAI,CAAC,EAAE,cAActI,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWgI,GAAG/H,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACvB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,EAAEwI,EAAEtI,EAAEE,GAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOwJ,EAAE,OAAO,OAAO,QAAQvJ,EAAE,UAAU,CAACyB,CAAC,EAAE,MAAMjB,EAAE,YAAY,CAACmB,CAAC,EAAE,KAAK,CAAC2H,EAAEzI,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACZ,EAAE,EAAEc,IAAI,CAAC,EAAE,cAAcE,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW+H,GAAG9H,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,EAAEwI,EAAEtI,EAAEE,GAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOgB,EAAE,OAAO,OAAO,QAAQf,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASyB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAMjB,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASmB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAAS2H,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASzI,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACV,EAAE,EAAEoJ,IAAI,CAAC,EAAE,cAActI,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWgI,GAAG/H,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACvB,EAAEC,IAAI,CAACkB,EAAE,GAAG,oBAAoBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,EAAEwI,GAAEtI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC0H,EAAEzI,CAAC,EAAE,KAAK,CAACE,EAAEwI,GAAEtI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,oBAAoBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,EAAEwI,GAAEtI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC0H,EAAEzI,CAAC,EAAE,KAAK,CAACE,EAAEwI,GAAEtI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,EAAEwI,GAAEtI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC0H,EAAEzI,CAAC,EAAE,KAAK,CAACE,EAAEwI,GAAEtI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,EAAEwI,GAAEtI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC0H,EAAEzI,CAAC,EAAE,KAAK,CAACE,EAAEwI,GAAEtI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,OAAOnB,EAAE,CAAC,MAAMC,EAAE,KAAKG,EAAE,OAAOsB,EAAE,OAAOjB,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,SAASnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,SAASnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACG,EAAE,KAAKsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,SAASnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACG,EAAE,KAAKsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,IAAI,CAACkB,EAAE,GAAG,UAAUnB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,QAAQnB,EAAE,CAAC,KAAKC,EAAE,WAAWG,EAAE,WAAWsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,SAASnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,iBAAiBnB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,EAAEwI,EAAEtI,KAAI,CAACC,EAAE,GAAG,SAASnB,EAAE,CAAC,UAAUC,EAAE,KAAKG,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEsB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwB4H,GAAG7I,CAAC,EAAE,YAAYmB,EAAE,eAAe2H,EAAE,mBAAmBzI,EAAE,sBAAsBwI,GAAGtI,CAAC,EAAE,KAAKsI,GAAGE,CAAC,EAAE,YAAYF,GAAGpI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAClB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE2H,IAAI,CAACpI,EAAE,GAAG,QAAQnB,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEG,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKmB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE2H,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQvJ,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,wBAAwBnB,EAAE,CAAC,QAAQC,EAAE,OAAOG,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,wBAAwBnB,EAAE,CAAC,QAAQC,EAAE,OAAOG,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,SAASsJ,GAAGrJ,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,MAAMnB,EAAE,CAAC,KAAKC,EAAE,MAAMG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,QAAQC,EAAE,SAASG,EAAE,QAAQ,CAAC,CAACK,EAAE,aAAa,CAAC,CAACiB,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,QAAQC,EAAE,SAASG,EAAE,QAAQ,CAAC,CAACK,EAAE,aAAa,CAAC,CAACiB,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,SAASnB,EAAE,CAAC,UAAU,OAAOC,CAAC,EAAE,QAAQ,OAAOG,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE2H,EAAEzI,EAAEE,IAAI,CAACG,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAASC,EAAE,iBAAiBG,EAAE,gBAAgBsB,EAAE,MAAMjB,EAAE,SAASmB,EAAE,eAAe2H,EAAE,MAAM,KAAK,EAAE,EAAE,SAAS,OAAOzI,CAAC,IAAI,EAAE,OAAOA,CAAC,EAAEyI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,uBAAuB,CAAC,CAACvI,CAAC,CAAC,CAAC,EAAE,QAAQhB,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,gBAAgBnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,WAAWnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE2H,EAAEvI,EAAEwI,EAAEtI,EAAEE,EAAEC,GAAEC,GAAEC,GAAEC,EAAEC,GAAEM,KAAI,CAACZ,EAAE,GAAG,OAAOnB,EAAE,CAAC,OAAOsB,GAAE,OAAO,OAAO,SAASrB,EAAE,UAAUyB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,MAAMmB,EAAE,aAAa2H,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEvI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKwI,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEtI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,IAAI,CAAC,CAACjB,EAAE,EAAEmB,KAAI,CAAC,EAAE,WAAW+H,GAAG9H,CAAC,EAAE,kBAAkBC,GAAE,MAAM,KAAKX,EAAE,EAAE,SAASW,KAAI,EAAEM,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ/B,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,sBAAsBnB,EAAE,CAAC,SAASC,EAAE,WAAWG,EAAE,MAAMsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,KAAKC,EAAE,QAAQG,EAAE,WAAW,CAAC,CAACsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,KAAKC,EAAE,QAAQG,EAAE,WAAW,CAAC,CAACsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,cAAcnB,EAAE,CAAC,EAAEC,EAAE,EAAEG,EAAE,cAAcsB,EAAE,KAAKjB,EAAE,UAAUmB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,SAASC,EAAE,iBAAiBG,EAAE,gBAAgBsB,EAAE,MAAMjB,EAAE,SAASmB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,IAAI,CAACkB,EAAE,GAAG,YAAYnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,YAAY,CAAC,CAACC,EAAE,SAASG,EAAE,mBAAmBsB,EAAE,MAAMjB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,yBAAyBnB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAGnB,CAAC,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAIkB,EAAE,GAAGnB,EAAEC,EAAEkB,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,EAAE,QAAQ,CAACnB,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,yBAAyBnB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACG,CAAC,CAAC,CAAC,CAAC,EAAE,SAASqE,GAAGzE,EAAEC,EAAEG,EAAE,CAAC,OAAOqJ,GAAI,SAAS,CAAC,MAAMtI,EAAE,GAAGnB,EAAEC,EAAEG,CAAC,CAAC,CAAE,CAAC,CAAC,SAASsJ,GAAG1J,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,IAAI2J,GAAG3J,GAAG,CAACA,EAAE,UAAU,EAAEA,EAAE,UAAU,IAAI,CAAC,CAAC,EAAE4J,GAAG5J,GAAG,CAAI6J,GAAG,QAAN,IAAeC,GAAG,EAAEC,GAAGF,GAAG,CAAC,CAAC,GAAG,IAAI5J,EAAE4J,GAAG,IAAI,EAAE,GAAG,CAAC5J,EAAE,MAAO,GAAE+J,GAAG,KAAK/J,CAAC,EAAEgK,GAAGjK,EAAE,EAAE,EAAEC,EAAEA,EAAE,GAAGD,EAAE,GAAG,IAAII,EAAE,CAAC,IAAI,MAAM,cAAcJ,EAAE,GAAG,IAAIA,EAAE,GAAG,YAAYA,EAAE,EAAE,EAAE,OAAOC,EAAE,YAAYG,EAAEJ,EAAE,EAAE,EAAE,CAAC,EAAEkK,GAAG,EAAEC,GAAG,CAACnK,EAAEC,KAAKG,IAAI,CAAC,QAAQsB,EAAE,EAAEtB,EAAE,OAAOK,EAAE2J,GAAG,EAAExI,EAAEyI,GAAG,EAAE3I,CAAC,EAAEG,EAAED,IAAI,EAAE2H,EAAE,EAAEA,EAAEnJ,EAAE,OAAOmJ,IAAI,CAAC,IAAIzI,EAAEV,EAAEmJ,CAAC,EAAY,OAAOzI,GAAjB,UAAoB2C,EAAE5B,EAAE,EAAE0H,CAAC,EAAE,GAAG9F,EAAE5B,EAAE,EAAE0H,EAAE,CAAC,EAAEzI,IAAI2C,EAAE5B,EAAE,EAAE0H,CAAC,EAAE,GAAGvI,EAAE,EAAEa,EAAE,EAAE0H,EAAE,IAAI,CAAC,EAAEzI,EAAE,CAAC,OAAOd,EAAEsK,GAAGtK,EAAE,EAAE0B,EAAEE,EAAE3B,CAAC,EAAEsK,GAAG9J,CAAC,EAAET,CAAC,EAAE,SAASwK,GAAGxK,EAAE,CAAC,GAAGuB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,CAAC,EAAE,GAAGwD,EAAExD,EAAE,EAAE,EAAEkK,IAAI,CAAC,QAAQjK,KAAK+J,GAAGL,GAAG1J,CAAC,EAAE,IAAIA,KAAK4J,GAAGF,GAAG1J,CAAC,EAAE4J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE9I,EAAE,SAASnB,CAAC,EAAE2D,GAAE,EAAE,CAACxB,EAAEnC,EAAE,IAAI0J,GAAG1J,CAAC,CAAC,CAAC,CAAC,SAASyK,GAAGzK,EAAE,CAAC,GAAGuB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,CAAC,EAAE8I,GAAG9I,CAAC,CAAC,CAAC,IAAI8I,GAAG9I,GAAG,CAAC,GAAGwD,EAAExD,EAAEuB,EAAE,MAAMkJ,GAAGzK,CAAC,EAAE,SAASwK,GAAGxK,CAAC,CAAC,EAAE6J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEU,GAAG,CAAC,EAAET,GAAG,CAAC,EAAEU,GAAG3K,GAAG,CAAC,IAAIC,EAAED,EAAE,GAAG,OAAOiK,GAAGhK,CAAC,EAAE4J,GAAG,KAAK7J,CAAC,EAAEgK,GAAG,OAAOA,GAAG,QAAQhK,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,EAAE4K,GAAG3K,CAAC,CAAC,EAAE,SAAS6C,IAAI,CAAC4H,GAAG,QAAS1K,GAAGA,EAAE,CAAE,CAAC,CAAC,IAAI+J,GAAG/J,GAAG,IAAI,QAASC,GAAG,CAACD,EAAE,UAAUI,GAAG,CAAC,IAAIsB,GAAGtB,EAAEA,EAAE,MAAM,IAAI,GAAGA,EAAE,cAAcA,EAAE,cAAc8C,GAAG,EAAE,CAAC,IAAIzC,EAAEwJ,GAAG7J,EAAE,YAAY,EAAEK,EAAEA,EAAE,YAAYL,EAAEA,EAAE,YAAY,EAAEoC,EAAE,0CAA0Cd,CAAC,uBAAuBtB,EAAE,YAAY,qCAAqC,CAAC,MAAsBsB,IAAjB,eAAmB0B,GAAG,EAAkB1B,IAAhB,cAAkBkI,GAAGxJ,CAAC,EAAoBsB,IAAlB,gBAAoBiJ,GAAGV,GAAG7J,EAAE,MAAM,CAAC,EAAiBsB,IAAf,cAAkBtB,EAAEA,EAAE,OAAOsB,EAAEuI,GAAG7J,CAAC,EAAE,OAAO6J,GAAG7J,CAAC,EAAEuJ,GAAGjI,CAAC,EAAEkJ,GAAGxK,CAAC,EAAE4J,GAAG,OAAOA,GAAG,QAAQtI,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,GAAoBA,IAAjB,eAAmBuI,GAAG7J,EAAE,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAasB,IAAX,UAAc1B,EAAE,OAAO,GAAGC,EAAED,CAAC,GAAa0B,IAAV,QAAY,MAAM,UAAUtB,EAAE,QAAQ,KAAKA,EAAE,IAAI,EAAE,EAAmBA,EAAE,SAAnB,eAA0BJ,EAAE,YAAYI,CAAC,EAAkBsB,IAAhB,cAAkBP,EAAEf,EAAE,OAAO,EAAE,GAAGA,EAAE,IAAI,EAAEsB,GAAGc,EAAE,kCAAkCd,CAAC,EAAE,CAAC,EAAE1B,EAAE,QAAQA,GAAG,CAAC,MAAMwC,EAAE,yBAAyBxC,EAAE,QAAQ,IAAIA,EAAE,MAAM,KAAKA,EAAE,OAAO,EAAE,EAAEA,CAAC,EAAE,IAAII,EAAEsB,EAAE,CAAC,EAAE,IAAItB,IAAI,CAAC,QAAQ,EAAEe,EAAE,eAAef,CAAC,GAAGsB,EAAE,KAAKtB,CAAC,EAAEJ,EAAE,YAAY,CAAC,IAAI,OAAO,SAAS0B,EAAE,WAAWrB,GAAE,WAAWkD,EAAC,CAAC,CAAC,CAAE,EAAE,SAASuG,IAAI,CAAC,IAAI9J,EAAE,IAAI,OAAO,IAAI,IAAI,YAAY,GAAG,EAAE,CAAC,KAAK,SAAS,WAAW,aAAa,KAAK,YAAY,CAAC,EAAE6J,GAAG,KAAK7J,CAAC,CAAC,CAAC,IAAI6K,GAAG7K,GAAG,CAAC,KAAK,EAAEA,EAAE,QAAQA,EAAE,MAAM,EAAEmB,CAAC,CAAC,EAAE0B,GAAG,IAAI,CAAC,IAAI7C,EAAEkD,GAAG,EAAEjD,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEA,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE8K,GAAG7K,EAAEA,EAAED,CAAC,EAAEuK,GAAGtK,CAAC,CAAC,EAAEgD,GAAG,CAACjD,EAAEC,IAAI,CAACiK,GAAG,EAAElK,EAAE+K,GAAG/K,EAAEC,CAAC,EAAE,EAAEiK,GAAG1G,EAAExD,EAAEmD,GAAGnD,CAAC,CAAC,EAAE,MAAMgL,EAAE,CAAC,YAAYhL,EAAE,CAAC,KAAK,GAAGA,EAAE,EAAE,CAAC,CAAC,SAAS0E,GAAG1E,EAAEC,EAAEG,EAAE,CAAC,IAAIsB,EAAE,IAAIsJ,GAAGhL,KAAK,CAAC,EAAE,MAAMC,KAAK,EAAEG,KAAK,EAAE,EAAE,EAAEsB,EAAE,GAAG,KAAK,IAAI,CAAC,EAAE,EAAE,EAAE,EAAEA,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEzB,EAAE,EAAE,EAAEyB,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEtB,EAAEJ,CAAC,CAAC,SAASiL,GAAGjL,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE4I,GAAG,EAAE,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,EAAEiD,GAAG3E,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAASiD,GAAG3E,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAG1B,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAWF,IAAT,OAAW,OAAOgB,EAAE,qFAAqF,EAAE,EAAE,IAAI/B,EAAE,CAAC,EAAE,OAAOc,GAAOd,EAAE,SAAN,EAAawK,GAAGjL,EAAEC,EAAEG,EAAEsB,CAAC,GAAG1B,EAAE,CAAC,GAAGI,EAAE,GAAGJ,EAAE,GAAG0B,EAAE,GAAGjB,CAAC,EAAEc,GAAGvB,EAAE,GAAG,cAAc,YAAYA,EAAES,CAAC,EAAE,GAAGmJ,GAAG5J,CAAC,EAAE,CAAC,IAAIkL,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAACnL,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,GAAGzB,KAAK,GAAGG,EAAE,IAAIA,EAAEH,EAAED,EAAEI,CAAC,GAAG,EAAEA,GAAGsB,IAAI,EAAEtB,EAAE,GAAG,GAAGA,EAAEH,GAAGD,EAAE,QAAQkL,GAAG,OAAOA,GAAG,OAAOlL,EAAE,kBAAkBwB,EAAExB,EAAE,MAAMC,EAAEG,CAAC,EAAEJ,EAAE,SAASC,EAAEG,CAAC,CAAC,EAAE,IAAIsB,EAAE,GAAGzB,EAAEG,GAAG,CAAC,IAAIK,EAAET,EAAEC,GAAG,EAAE,GAAG,IAAIQ,EAAE,CAAC,IAAImB,EAAE,GAAG5B,EAAEC,GAAG,EAAE,IAAS,IAAIQ,IAAV,IAAaiB,GAAG,OAAO,cAAc,GAAGjB,IAAI,EAAEmB,CAAC,MAAM,CAAC,IAAIC,EAAE,GAAG7B,EAAEC,GAAG,EAAE,OAAOQ,GAAQ,IAAIA,IAAV,KAAc,GAAGA,IAAI,GAAGmB,GAAG,EAAEC,GAAG,EAAEpB,IAAI,GAAGmB,GAAG,GAAGC,GAAG,EAAE,GAAG7B,EAAEC,GAAG,GAAGyB,GAAG,OAAO,aAAajB,CAAC,GAAGA,GAAG,MAAMiB,GAAG,OAAO,aAAa,MAAMjB,GAAG,GAAG,MAAM,KAAKA,CAAC,EAAE,CAAC,MAAMiB,GAAG,OAAO,aAAajB,CAAC,CAAC,CAAC,OAAOiB,CAAC,EAAE4H,GAAG,CAACtJ,EAAEC,KAAKD,KAAK,GAAGmL,GAAG,EAAE,EAAEnL,EAAEC,CAAC,EAAE,GAAG,SAAS2E,GAAG5E,EAAEC,EAAEG,EAAE,CAAC,OAAOmB,EAAE4I,GAAG,EAAE,EAAEnK,EAAEC,EAAEG,CAAC,EAAE,CAAC,CAAC,SAASyE,GAAG7E,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,EAAEC,CAAC,CAAC,CAAC,IAAImL,GAAGpL,GAAG,CAAC,QAAQC,EAAE,EAAEG,EAAE,EAAEA,EAAEJ,EAAE,OAAO,EAAEI,EAAE,CAAC,IAAIsB,EAAE1B,EAAE,WAAWI,CAAC,EAAE,KAAKsB,EAAEzB,IAAI,MAAMyB,EAAEzB,GAAG,EAAE,OAAOyB,GAAG,OAAOA,GAAGzB,GAAG,EAAE,EAAEG,GAAGH,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEoL,GAAG,CAACrL,EAAEC,EAAEG,EAAEsB,IAAI,CAAC,GAAG,EAAE,EAAEA,GAAG,MAAO,GAAE,IAAIjB,EAAEL,KAAK,EAAEsB,EAAEtB,EAAEsB,EAAE,EAAE,QAAQE,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIC,EAAE7B,EAAE,WAAW4B,CAAC,EAAE,GAAG,OAAOC,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK7B,EAAE,WAAW,EAAE4B,CAAC,GAAG,KAAKC,EAAE,CAAC,GAAGzB,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAEyB,CAAC,KAAK,CAAC,GAAG,MAAMA,EAAE,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,EAAE,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,EAAE,KAAK,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,GAAG5B,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,GAAG,EAAE,CAAC5B,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,EAAE,EAAE,CAAC5B,EAAEG,MAAM,CAAC,EAAE,IAAI,GAAGyB,CAAC,CAAC,CAAC,OAAO5B,EAAEG,IAAI,CAAC,EAAE,EAAEA,EAAEK,CAAC,EAAE6K,GAAG,CAACtL,EAAEC,EAAEG,IAAIiL,GAAGrL,EAAE,EAAE,EAAEC,EAAEG,CAAC,EAAE,SAAS0E,GAAG9E,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,EAAEC,CAAC,CAAC,CAAC,SAAS8E,GAAG/E,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,EAAEC,EAAEG,CAAC,CAAC,CAAC,SAAS4E,GAAGhF,EAAEC,EAAEG,EAAE,CAAC,OAAOmB,EAAE4I,GAAG,EAAE,EAAEnK,EAAEC,EAAEG,CAAC,EAAE,CAAC,CAAC,SAAS6E,GAAGjF,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,EAAEC,CAAC,CAAC,CAAC,SAASiF,GAAGlF,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO4I,GAAG,EAAE,EAAEnK,EAAEC,EAAEG,CAAC,CAAC,CAAC,SAAS+E,GAAGnF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS0D,GAAGpF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS2D,GAAGrF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS4D,GAAGtF,EAAE,CAAC,GAAGuB,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,CAAC,CAAC,CAAC,SAASuF,GAAGvF,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,CAAC,CAAC,CAAC,SAASuF,GAAGxF,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,CAAC,CAAC,CAAC,IAAImL,GAAGC,GAAG/F,GAAG,IAAI,CAACtB,GAAE,EAAE,CAAC,EAAEsH,GAAGzL,GAAG,CAAC,QAAQC,EAAE,GAAG,EAAE,EAAED,IAAI,CAAC,GAAGC,GAAGsL,GAAG,EAAE,EAAEvL,MAAM,CAAC,CAAC,EAAE,OAAOC,CAAC,EAAEyL,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASC,GAAG7L,EAAEC,EAAEG,EAAE,CAAC,EAAE,CAAC,GAAG,EAAE,mBAAmBH,GAAG,MAAM,IAAI,UAAU,yDAAyD,EAAE,OAAO,SAASD,EAAEC,EAAEG,EAAE,CAAC,EAAE,CAAC,IAAIsB,EAAEzB,EAAE,KAAK,GAAG,CAACD,EAAE,MAAM,IAAIwL,GAAG,SAAS9J,CAAC,+CAA+C,EAAE,GAAGiK,GAAG,eAAe3L,CAAC,EAAE,CAAC,GAAGI,EAAE,GAAG,OAAO,MAAM,IAAIoL,GAAG,yBAAyB9J,CAAC,SAAS,CAAC,CAACiK,GAAG3L,CAAC,EAAEC,EAAE,OAAO2L,GAAG5L,CAAC,EAAE0L,GAAG,eAAe1L,CAAC,IAAIC,EAAEyL,GAAG1L,CAAC,EAAE,OAAO0L,GAAG1L,CAAC,EAAEC,EAAE,QAASD,GAAGA,EAAE,CAAE,EAAE,EAAEA,EAAEC,EAAEG,CAAC,CAAC,CAAC,IAAI0L,GAAG,CAAC9L,EAAEC,EAAEa,IAAI,CAAC,OAAOb,EAAE,CAAC,IAAK,GAAE,OAAOa,EAAEd,GAAGI,EAAE,EAAEJ,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAGS,EAAE,EAAET,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAGyD,EAAEzD,IAAI,CAAC,EAAEA,GAAG0D,GAAE1D,IAAI,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,0BAA0BC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS0F,GAAG1F,EAAEC,EAAEG,EAAE,CAACA,KAAK,EAAEyL,GAAG7L,KAAK,EAAE,CAAC,KAAKC,EAAEwL,GAAGxL,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,GAAa,OAAOA,GAAjB,UAA8B,OAAOA,GAAjB,SAAmB,MAAMA,EAASA,IAAP,KAAS,QAAkBD,EAAE,OAAOC,IAApB,UAAkCD,IAAV,SAA0BA,IAAb,WAAeC,EAAE,SAAS,EAAE,GAAGA,EAAE,IAAI,UAAU,mBAAmBA,CAAC,QAAQ,KAAK,IAAI,EAAE,EAAE,OAAgB,OAAOA,GAAjB,WAAqBA,EAAE,OAAOA,CAAC,GAAGA,CAAC,EAAE,eAAe8L,GAAG,qBAAqBD,GAAG7L,EAAEG,EAAMH,EAAE,QAAQ,GAAG,GAAjB,EAAkB,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI8L,GAAG,EAAE,SAASpG,GAAG3F,EAAEC,EAAEG,EAAEK,EAAE,CAACoL,GAAG7L,KAAK,EAAE,CAAC,KAAKC,EAAEwL,GAAGxL,IAAI,CAAC,EAAE,aAAa,SAASD,EAAE,CAAC,MAAM,CAAC,CAACA,CAAC,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,OAAOA,EAAEG,EAAEK,CAAC,EAAE,eAAesL,GAAG,qBAAqB,SAAS/L,EAAE,CAAC,OAAO,KAAK,aAAa,EAAE,EAAEA,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAIgM,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASpF,GAAG7G,EAAE,CAAC,GAAGA,KAAK,IAAO,EAAEiM,GAAGjM,EAAE,CAAC,GAAX,IAAeiM,GAAGjM,CAAC,EAAE,OAAOgM,GAAG,KAAKhM,CAAC,EAAE,CAAC,IAAIkM,GAAGlM,GAAG,CAAC,GAAG,CAACA,EAAE,MAAM,IAAIwL,GAAG,oCAAoCxL,CAAC,EAAE,OAAOiM,GAAGjM,CAAC,CAAC,EAAEmM,GAAGnM,GAAG,CAAC,OAAOA,EAAE,CAAC,KAAK,OAAO,MAAO,GAAE,KAAK,KAAK,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,QAAQ,IAAMC,EAAE+L,GAAG,IAAI,GAAGC,GAAG,OAAO,OAAOA,GAAGhM,CAAC,EAAED,EAAEiM,GAAGhM,EAAE,CAAC,EAAE,EAAEA,CAAC,CAAC,EAAE,SAASmM,GAAGpM,EAAE,CAAC,OAAO,KAAK,aAAa,EAAE,EAAEA,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIqM,GAAG,CAAC,KAAK,kBAAkB,aAAarM,GAAG,CAAC,IAAIC,EAAEiM,GAAGlM,CAAC,EAAE,OAAO6G,GAAG7G,CAAC,EAAEC,CAAC,EAAE,WAAW,CAACD,EAAEC,IAAIkM,GAAGlM,CAAC,EAAE,eAAe8L,GAAG,qBAAqBK,GAAG,GAAG,IAAI,EAAE,SAASxG,GAAG5F,EAAE,CAAC,OAAO6L,GAAG7L,IAAI,EAAEqM,EAAE,CAAC,CAAC,IAAIC,GAAG,CAACtM,EAAEC,IAAI,CAAC,OAAOA,EAAE,CAAC,IAAK,GAAE,OAAO,SAASD,EAAE,CAAC,OAAO,KAAK,aAAac,EAAE,EAAEd,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,IAAK,GAAE,OAAO,SAASA,EAAE,CAAC,OAAO,KAAK,aAAagB,EAAE,EAAEhB,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,wBAAwBC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS6F,GAAG7F,EAAEC,EAAEG,EAAE,CAACA,KAAK,EAAEyL,GAAG7L,KAAK,EAAE,CAAC,KAAKC,EAAEwL,GAAGxL,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,CAACA,EAAEC,IAAIA,EAAE,eAAe8L,GAAG,qBAAqBO,GAAGrM,EAAEG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAAS0F,GAAG9F,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,GAAGT,KAAK,EAAEI,KAAK,EAAEH,EAAEwL,GAAGxL,IAAI,CAAC,EAAOQ,IAAL,KAASA,EAAE,YAAYA,EAAET,GAAGA,EAAM0B,IAAJ,EAAM,CAAC,IAAIE,EAAE,GAAG,EAAExB,EAAEK,EAAET,GAAGA,GAAG4B,IAAIA,CAAC,CAAC,IAAIC,EAAE5B,EAAE,SAAS,UAAU,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,CAAC,EAAE4L,GAAG7L,EAAE,CAAC,KAAKC,EAAE,aAAaQ,EAAE,WAAWoB,EAAE,eAAekK,GAAG,qBAAqBD,GAAG7L,EAAEG,EAAMsB,IAAJ,CAAK,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAASqE,GAAG/F,EAAEC,EAAEyB,EAAE,CAAC,SAASjB,EAAET,EAAE,CAAC,IAAIC,EAAE,EAAE,EAAED,IAAI,IAAI,CAAC,EAAE,OAAOA,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,IAAI4B,EAAExB,EAAE,EAAE,OAAOJ,EAAEC,CAAC,CAAC,CAAC,IAAI2B,EAAE,CAAC,UAAU,WAAW,WAAW,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,cAAc,EAAE3B,CAAC,EAAE4L,GAAG7L,KAAK,EAAE,CAAC,KAAK0B,EAAE+J,GAAG/J,IAAI,CAAC,EAAE,aAAajB,EAAE,eAAesL,GAAG,qBAAqBtL,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,SAASuF,GAAGhG,EAAEC,EAAE,CAACD,KAAK,EAAE,IAAII,GAAmBH,EAAEwL,GAAGxL,IAAI,CAAC,KAA3B,cAA8B4L,GAAG7L,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASD,EAAE,CAAC,IAAIC,EAAE,EAAE,EAAED,IAAI,IAAI,CAAC,EAAES,EAAET,EAAE,EAAE,GAAGI,EAAE,QAAQwB,EAAEnB,EAAEoB,EAAE,EAAEA,GAAG5B,EAAE,EAAE4B,EAAE,CAAC,IAAIf,EAAEL,EAAEoB,EAAE,GAAGA,GAAG5B,GAAM,EAAE,EAAEa,IAAI,CAAC,GAAZ,EAAc,CAAC,GAAGc,EAAE0H,GAAG1H,EAAEd,EAAEc,CAAC,EAAWZ,IAAT,OAAW,IAAIA,EAAEY,OAAOZ,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGY,EAAEA,EAAEd,EAAE,CAAC,CAAC,KAAK,CAAC,IAAIE,EAAE,MAAMf,CAAC,EAAE4B,EAAE,EAAEA,EAAE5B,EAAE,EAAE4B,EAAEb,EAAEa,CAAC,EAAE,OAAO,aAAa,EAAE,EAAEpB,EAAEoB,IAAI,CAAC,CAAC,EAAEb,EAAEA,EAAE,KAAK,EAAE,CAAC,CAAC,OAAOuL,GAAGvM,CAAC,EAAEgB,CAAC,EAAE,WAAW,SAAShB,EAAEC,EAAE,CAACA,aAAa,cAAcA,EAAE,IAAI,WAAWA,CAAC,GAAG,IAAIQ,EAAY,OAAOR,GAAjB,SAAmB,GAAG,EAAEQ,GAAGR,aAAa,YAAYA,aAAa,mBAAmBA,aAAa,WAAW,MAAM,IAAIuL,GAAG,uCAAuC,EAAE,IAAI5J,EAAExB,GAAGK,EAAE2K,GAAGnL,CAAC,EAAEA,EAAE,OAAO4B,EAAE2K,GAAG,EAAE5K,EAAE,CAAC,EAAEd,EAAEe,EAAE,EAAE,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAED,EAAExB,GAAGK,EAAE6K,GAAGrL,EAAEa,EAAEc,EAAE,CAAC,UAAUnB,EAAE,IAAIA,EAAE,EAAEA,EAAEmB,EAAE,EAAEnB,EAAE,CAAC,IAAIO,EAAEf,EAAE,WAAWQ,CAAC,EAAE,GAAG,IAAIO,EAAE,MAAMuL,GAAGzL,CAAC,EAAE,IAAI0K,GAAG,wDAAwD,EAAE,EAAE,EAAE1K,EAAEL,IAAI,CAAC,EAAEO,CAAC,KAAM,KAAIP,EAAE,EAAEA,EAAEmB,EAAE,EAAEnB,EAAE,EAAE,EAAEK,EAAEL,IAAI,CAAC,EAAER,EAAEQ,CAAC,EAAE,OAAcT,IAAP,MAAUA,EAAE,KAAKuM,GAAG1K,CAAC,EAAEA,CAAC,EAAE,eAAekK,GAAG,qBAAqBK,GAAG,GAAGpM,EAAE,CAACuM,GAAGvM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAIyM,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,UAAU,EAAE,OAAOC,GAAG,CAAC1M,EAAEC,IAAI,CAAC,QAAQG,EAAEJ,GAAG,EAAE6B,EAAEzB,EAAEH,EAAE,EAAE,EAAEG,GAAGyB,IAAI,EAAE,EAAEzB,IAAI,CAAC,GAAG,EAAEA,EAAE,GAAG,IAAIA,IAAI,GAAGJ,GAAGyM,GAAG,OAAOA,GAAG,OAAO,EAAE,EAAE,MAAMzM,EAAEI,CAAC,CAAC,EAAE,IAAIA,EAAE,GAAGyB,EAAE,EAAE,EAAEA,GAAG5B,EAAE,GAAG,EAAE4B,EAAE,CAAC,IAAI0H,EAAE9I,EAAE,EAAET,EAAE,EAAE6B,IAAI,IAAI,CAAC,EAAE,GAAM0H,GAAH,EAAK,MAAMnJ,GAAG,OAAO,aAAamJ,CAAC,CAAC,CAAC,OAAOnJ,CAAC,EAAEuM,GAAG,CAAC3M,EAAEC,EAAEG,IAAI,CAAC,GAAGA,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIsB,EAAEzB,EAAEG,GAAGA,GAAG,GAAG,EAAEJ,EAAE,OAAOI,EAAE,EAAEJ,EAAE,OAAO,QAAQ4B,EAAE,EAAEA,EAAExB,EAAE,EAAEwB,EAAE,CAAC,IAAIC,EAAE7B,EAAE,WAAW4B,CAAC,EAAEnB,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE4B,EAAE5B,GAAG,CAAC,CAAC,OAAOQ,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEyB,CAAC,EAAEkL,GAAG5M,GAAG,EAAEA,EAAE,OAAO6M,GAAG,CAAC7M,EAAEC,IAAI,CAAC,QAAQG,EAAE,EAAEsB,EAAE,GAAG,EAAEtB,GAAGH,EAAE,IAAI,CAAC,IAAIQ,EAAE,EAAE,EAAET,EAAE,EAAEI,IAAI,IAAI,CAAC,EAAE,GAAMK,GAAH,EAAK,MAAM,EAAEL,EAAE,OAAOK,GAAGA,GAAG,MAAMiB,GAAG,OAAO,aAAa,MAAMjB,GAAG,GAAG,MAAM,KAAKA,CAAC,GAAGiB,GAAG,OAAO,aAAajB,CAAC,CAAC,CAAC,OAAOiB,CAAC,EAAEoL,GAAG,CAAC9M,EAAEC,EAAEG,IAAI,CAAC,GAAGH,KAAK,EAAEG,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIsB,EAAEzB,EAAEG,EAAEsB,EAAEtB,EAAE,EAAE,QAAQK,EAAE,EAAEA,EAAET,EAAE,OAAO,EAAES,EAAE,CAAC,IAAImB,EAAE5B,EAAE,WAAWS,CAAC,EAAE,GAAG,OAAOmB,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK5B,EAAE,WAAW,EAAES,CAAC,GAAG,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE2B,GAAG3B,GAAG,GAAG,EAAEG,EAAE,KAAK,CAAC,OAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEyB,CAAC,EAAEqL,GAAG/M,GAAG,CAAC,QAAQC,EAAE,EAAEG,EAAE,EAAEA,EAAEJ,EAAE,OAAO,EAAEI,EAAE,CAAC,IAAIsB,EAAE1B,EAAE,WAAWI,CAAC,EAAE,OAAOsB,GAAG,OAAOA,GAAG,EAAEtB,EAAEH,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAE,SAASgG,GAAGjG,EAAEC,EAAEG,EAAE,CAAC,GAAGJ,KAAK,EAAEC,KAAK,EAAEG,EAAEqL,GAAGrL,KAAK,CAAC,EAAMH,IAAJ,EAAM,IAAIyB,EAAEgL,GAAGjM,EAAEkM,GAAG9K,EAAE+K,GAAG9L,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,OAAWC,IAAJ,IAAQyB,EAAEmL,GAAGpM,EAAEqM,GAAGjL,EAAEkL,GAAGjM,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,GAAG6L,GAAG7L,EAAE,CAAC,KAAKI,EAAE,aAAaJ,GAAG,CAAC,QAAQI,EAAEK,EAAE,EAAE,EAAET,IAAI,IAAI,CAAC,EAAE4B,GAAE5B,EAAE,EAAE6B,GAAE,EAAEA,IAAGpB,EAAE,EAAEoB,GAAE,CAAC,IAAIb,GAAEhB,EAAE,EAAE6B,GAAE5B,EAAE4B,IAAGpB,GAAMK,EAAEE,EAAC,GAAN,IAAUY,GAAEF,EAAEE,GAAEZ,GAAEY,EAAC,EAAWxB,IAAT,OAAWA,EAAEwB,IAAGxB,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGwB,IAAGA,GAAEZ,GAAEf,EAAE,CAAC,OAAOsM,GAAGvM,CAAC,EAAEI,CAAC,EAAE,WAAW,CAACJ,EAAE0B,IAAI,CAAC,GAAa,OAAOA,GAAjB,SAAmB,MAAM,IAAI8J,GAAG,6CAA6CpL,CAAC,EAAE,EAAE,IAAIwB,EAAEC,EAAEH,CAAC,EAAEZ,GAAE0L,GAAG,EAAE5K,EAAE3B,CAAC,EAAE,OAAO,EAAE,EAAEa,KAAI,IAAI,CAAC,EAAEc,EAAE3B,EAAEQ,EAAEiB,EAAEZ,GAAE,EAAEc,EAAE3B,CAAC,EAASD,IAAP,MAAUA,EAAE,KAAKuM,GAAGzL,EAAC,EAAEA,EAAC,EAAE,eAAeiL,GAAG,qBAAqBK,GAAG,GAAGpM,EAAE,CAACuM,GAAGvM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAASkG,GAAGlG,EAAEC,EAAE,CAAC4L,GAAG7L,KAAK,EAAE,CAAC,GAAG,GAAG,KAAKC,EAAEwL,GAAGxL,IAAI,CAAC,EAAE,eAAe,EAAE,aAAa,IAAI,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAIkG,GAAG,IAAI,EAAE,SAASC,GAAGpG,EAAE,CAAC2C,GAAG3C,IAAI,EAAE,CAACsB,EAAE,EAAE,CAACD,EAAE,OAAO,EAAE,EAAEyB,GAAG,CAAC,CAAC,IAAIkK,GAAGhN,GAAG,CAAC,GAAG,CAAC2D,GAAE,GAAG,CAAC,GAAG3D,EAAE,EAAE,EAAE,EAAEkK,IAAI,GAAG,CAAC3I,EAAE4B,GAAGK,CAAC,EAAEsF,GAAGtF,CAAC,CAAC,OAAOxD,EAAE,CAACA,aAAa0J,IAAc1J,GAAV,UAAamC,EAAE,EAAEnC,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAa0J,IAAc1J,GAAV,UAAamC,EAAE,EAAEnC,CAAC,CAAC,CAAC,EAAE,SAAS4C,GAAG5C,EAAE,CAACA,KAAK,EAAc,OAAO,QAAQ,IAA3B,aAAgC,QAAQ,GAAG,EAAE,EAAEA,IAAI,EAAEA,CAAC,EAAE,MAAM,KAAKoD,EAAE,EAAEpD,GAAG,IAAI,QAAQ,MAAM,EAAE,EAAEA,IAAI,EAAE,CAAC,EAAE,CAAC,IAAIoD,GAAG,IAAI,CAAC,IAAIpD,EAAEkD,GAAG,EAAElD,IAAI4C,GAAG5C,CAAC,EAAEgN,GAAGC,EAAE,EAAE,EAAE,SAAS5G,GAAGrG,EAAEC,EAAE,EAAED,KAAK,IAAIC,IAAI,EAAE,WAAWmD,EAAE,EAAE7B,EAAE,YAAY,CAAC,aAAavB,EAAE,IAAI,cAAc,CAAC,GAAGA,EAAEiK,GAAGjK,CAAC,IAAIA,EAAE,YAAY,CAAC,IAAI,cAAc,CAAC,CAAC,CAAC,IAAIkN,GAAG,CAAC,EAAE,SAAS5G,GAAGtG,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,IAAIR,KAAK,EAAEyB,GAAG,EAAEwL,GAAG,OAAOxL,EAAEtB,EAAEK,IAAI,IAAI,EAAEA,EAAE,EAAEA,EAAEiB,EAAEjB,IAAIyM,GAAGzM,CAAC,EAAEgD,EAAErD,EAAE,EAAEK,CAAC,EAAEgD,EAAErD,EAAE,EAAEK,EAAE,CAAC,EAAEO,EAAE,EAAEZ,EAAE,EAAEK,EAAE,IAAI,CAAC,EAAE,OAAOR,EAAEoJ,GAAGpJ,CAAC,EAAEkN,GAAGnN,CAAC,GAAG,GAAGkN,EAAE,CAAC,CAAC,SAAS3G,GAAGvG,EAAE,CAACA,KAAK,EAAEuB,EAAE,YAAY,CAAC,IAAI,gBAAgB,OAAOvB,CAAC,CAAC,EAAE2K,GAAGV,GAAGjK,CAAC,CAAC,CAAC,CAAC,SAASwG,GAAGxG,EAAE,CAAC,CAAC,IAAIoN,GAAG,CAACpN,EAAEC,IAAI,CAAC,IAAIG,EAAEuL,GAAG3L,CAAC,EAAE,GAAYI,IAAT,OAAW,MAAMJ,EAAEqN,GAAGrN,CAAC,EAAEI,EAAEqL,GAAGzL,CAAC,EAAEuM,GAAGvM,CAAC,EAAE,IAAIwL,GAAG,GAAGvL,CAAC,qBAAqBG,CAAC,EAAE,EAAE,OAAOA,CAAC,EAAEkN,GAAG,CAACtN,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,EAAE,CAAC,EAAE,OAAO1B,EAAEA,EAAE,WAAW0B,EAAEtB,CAAC,EAAEsB,EAAE,SAAS,EAAE,EAAEzB,IAAI,IAAI,CAAC,EAAEkM,GAAGzK,CAAC,GAAG1B,CAAC,EAAE,SAASyG,GAAGzG,EAAEC,EAAEG,EAAE,CAAC,OAAOH,KAAK,EAAEG,KAAK,EAAEJ,EAAEkM,GAAGlM,IAAI,CAAC,EAAEC,EAAEmN,GAAGnN,EAAE,WAAW,EAAEqN,GAAGrN,EAAEG,EAAEJ,CAAC,CAAC,CAAC,IAAIuN,GAAGvN,GAAG,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOA,EAAE,CAACmE,GAAEnE,CAAC,CAAC,CAAC,EAAEwN,GAAG,EAAE7L,GAAG,KAAK8L,GAAG,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAE/L,GAAG,KAAKgM,GAAG,CAAC,EAAE,SAASrE,GAAGzJ,EAAE,CAAC,OAAO,SAASA,EAAE,CAAC,GAAG,CAAC2D,GAAE,CAAC,GAAO6J,KAAJ,EAAO,CAAC,IAAIvN,EAAE,GAAGG,EAAE,GAAGJ,EAAG,CAACA,EAAE,IAAI,CAAC,GAAG,CAAC2D,KAAI8J,GAAGzN,EAAEC,EAAE,GAAGG,GAAG,CAACoN,GAAG,EAAED,GAAI,IAAIQ,GAAGpM,EAAE,CAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAE3B,EAAE,GAAG,GAAG,CAAC,IAAI0B,EAAE,UAAU,CAAC,IAAI1B,EAAE,EAAE,EAAE2B,GAAG,IAAI,IAAI,CAAC,EAAE,OAAO3B,EAAEgO,GAAGJ,GAAG5N,CAAC,CAAC,EAAE,EAAEkK,GAAGlK,EAAE,CAAC,EAAE,CAAC,OAAOC,EAAE,CAACyB,EAAEzB,EAAED,EAAE,EAAE,CAAC,IAAIS,EAAE,GAAG,GAAG,CAACkB,GAAG,CAAC,IAAIC,EAAEE,GAAGF,IAAIE,GAAG,MAAM9B,EAAE4B,EAAE,OAAOA,EAAE,SAASF,CAAC,EAAEjB,EAAE,GAAG,CAAC,GAAGT,GAAG,CAACS,EAAE,MAAMiB,CAAC,CAAC,CAAE,EAAEtB,EAAE,GAAGH,IAAIuN,GAAG,EAAE7L,GAAG,UAAU,CAAC,IAAI3B,EAAEwM,GAAG,KAAK,EAAEvM,EAAED,EAAE,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEC,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAEyN,GAAG,CAAC,EAAE,IAAItN,EAAEuN,GAAG1N,CAAC,EAAE,OAAgBG,IAAT,SAAaA,EAAEyN,KAAKF,GAAG1N,CAAC,EAAEG,EAAEwN,GAAGxN,CAAC,EAAEH,GAAGA,EAAEG,EAAE,EAAE,EAAEJ,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAED,CAAC,EAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEuN,GAAI,IAAIU,GAAGtM,EAAE,CAAE,EAAE,MAAU6L,KAAJ,GAAQA,GAAG,EAAED,GAAGW,EAAE,EAAE3B,GAAG5K,EAAE,EAAEA,GAAG,KAAKmM,GAAG,QAAQd,EAAE,GAAG7I,GAAE,kBAAkBqJ,EAAE,EAAE,EAAE,OAAOC,EAAE,CAAC,EAAGxN,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAE,CAAC,CAAC,SAASyG,GAAG1G,EAAE,CAAC,OAAOA,KAAK,EAAEyJ,GAAI,KAAKzJ,EAAEkM,GAAGlM,CAAC,GAAG,KAAKmM,EAAE,CAAE,CAAC,CAAC,IAAIgC,GAAG,CAAC,EAAE,SAASxH,GAAG3G,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOtB,KAAK,EAAEsB,KAAK,GAAG1B,EAAEmO,GAAGnO,IAAI,CAAC,GAAG,KAAKC,EAAEiM,GAAGjM,IAAI,CAAC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,IAAI0M,GAAG,CAAC,EAAEC,GAAGrO,GAAG,CAAC,IAAIC,EAAEmO,GAAGpO,CAAC,EAAE,OAAgBC,IAAT,OAAWwL,GAAGzL,CAAC,EAAEC,CAAC,EAAE,SAAS2G,GAAG5G,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,OAAOL,KAAK,EAAEsB,KAAK,EAAEjB,KAAK,GAAGT,EAAEmO,GAAGnO,IAAI,CAAC,GAAGC,EAAEiM,GAAGjM,IAAI,CAAC,EAAEA,EAAEG,EAAEiO,GAAGjO,CAAC,CAAC,EAAEsB,EAAEjB,CAAC,CAAC,CAAC,IAAI6N,GAAG,IAAc,OAAO,YAAjB,SAA4B,WAAW,SAAS,aAAa,EAAE,EAAE,SAASxH,GAAG9G,EAAE,CAAC,OAAWA,KAAK,IAAT,EAAYmM,GAAGmC,GAAG,CAAC,GAAGtO,EAAEqO,GAAGrO,CAAC,EAAEmM,GAAGmC,GAAG,EAAEtO,CAAC,CAAC,EAAE,CAAC,IAAIuO,GAAGvO,GAAG,CAAC,IAAIC,EAAEkO,GAAG,OAAO,OAAOA,GAAG,KAAKnO,CAAC,EAAEC,CAAC,EAAEuO,GAAG,CAACxO,EAAEC,IAAI,CAAC,QAAQG,EAAE,MAAMJ,CAAC,EAAE0B,EAAE,EAAEA,EAAE1B,EAAE,EAAE0B,EAAEtB,EAAEsB,CAAC,EAAE0L,GAAG,EAAE,EAAEnN,EAAE,EAAEyB,IAAI,IAAI,CAAC,EAAE,aAAaA,CAAC,EAAE,OAAOtB,CAAC,EAAEqO,GAAG,CAACzO,EAAEC,IAAI,OAAO,eAAeA,EAAE,OAAO,CAAC,MAAMD,CAAC,CAAC,EAAE,SAAS+G,GAAG/G,EAAEC,EAAEG,EAAE,CAAC,IAAIsB,GAAGzB,EAAEuO,GAAGxO,EAAEC,IAAI,CAAC,GAAG,MAAM,EAAED,IAAI,IAAIS,EAAE;AAAA,EAAwDmB,EAAE,EAAEC,EAAE,CAAC,EAAMzB,IAAJ,GAAOyB,EAAE,KAAK,KAAK,EAAE,QAAQ0H,EAAE,CAAC,SAAS,EAAEzI,EAAE,CAACY,CAAC,EAAEV,EAAE,EAAEA,EAAEhB,EAAE,EAAEgB,EAAEa,EAAE,KAAK,MAAMb,CAAC,EAAEuI,EAAE,KAAK,UAAUvI,CAAC,EAAEF,EAAE,KAAKb,EAAEe,CAAC,CAAC,EAAEP,GAAG,YAAYO,CAAC,aAAaA,CAAC,6BAA6BY,EAAE,IAAIA,EAAE,EAAE;AAAA,EAAOA,GAAG3B,EAAEe,CAAC,EAAE,eAAe,OAAOP,GAAG,cAAkBL,IAAJ,EAAM,WAAW,WAAW,IAAIyB,EAAE,KAAK,IAAI,CAAC;AAAA,EAAOH,EAAE,KAAK6H,EAAE,KAAK,mBAAmB,EAAEzI,EAAE,KAAKwM,EAAE,EAAE7M,GAAG;AAAA,GAA8D8I,EAAE,KAAK9I,EAAE;AAAA,CAAM,EAAET,EAAE,SAASA,GAAE,CAAC,IAAIC,GAAE,SAAS,GAAG,EAAEA,cAAa,UAAU,MAAM,IAAI,UAAU,qCAAqC,OAAOA,EAAC,0BAA0B,EAAE,IAAIG,GAAEqO,GAAGxO,GAAE,MAAM,sBAAuB,UAAU,CAAC,CAAE,EAAE,OAAOG,GAAE,UAAUH,GAAE,UAAUG,GAAE,IAAIA,IAAGJ,GAAEC,GAAE,MAAMG,GAAEJ,EAAC,aAAa,OAAOA,GAAEI,EAAC,EAAEmJ,CAAC,EAAE,GAAGzI,CAAC,EAAEV,EAAE,iBAAiBH,EAAE,IAAKD,IAAGA,GAAE,IAAK,EAAE,KAAK,IAAI,CAAC,QAAQ0B,EAAE,IAAI,IAAI6M,GAAGE,GAAGrO,EAAEJ,CAAC,CAAC,CAAC,CAAC,SAASgH,GAAGhH,EAAE,CAAC,OAAOA,EAAEqO,GAAGrO,IAAI,CAAC,EAAEmM,GAAGhL,EAAEnB,CAAC,CAAC,CAAC,CAAC,SAASiH,GAAGjH,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,EAAEkM,GAAGlM,IAAI,CAAC,EAAEC,EAAEiM,GAAGjM,CAAC,EAAEkM,GAAGnM,EAAEC,CAAC,CAAC,CAAC,CAAC,SAASiH,GAAGlH,EAAE,CAAC,GAAGA,KAAK,KAAKiM,GAAGjM,EAAE,CAAC,GAAG,EAAE,CAAC,SAASmH,IAAI,CAAC,OAAOgF,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS/E,GAAGpH,EAAE,CAACA,EAAEkM,GAAGlM,IAAI,CAAC,EAAE,QAAQC,EAAE,MAAMD,EAAE,MAAM,EAAEI,EAAE,EAAEA,EAAEJ,EAAE,OAAOI,IAAIH,EAAEG,CAAC,EAAEJ,EAAEI,CAAC,EAAE,OAAO+L,GAAGlM,CAAC,CAAC,CAAC,SAASoH,GAAGrH,EAAE,CAAC,OAAOmM,GAAGkC,GAAGrO,IAAI,CAAC,CAAC,CAAC,CAAC,SAASsH,IAAI,CAAC,OAAO6E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS5E,GAAGvH,EAAE,CAAC,QAAQC,EAAEiM,GAAGlM,KAAK,CAAC,EAAEC,EAAE,QAAQ,CAAC,IAAIG,EAAEH,EAAE,IAAI,EAAEA,EAAE,IAAI,EAAEG,CAAC,CAAC,CAACyG,GAAG7G,CAAC,CAAC,CAAC,SAASwH,GAAGxH,EAAEC,EAAEG,EAAE,CAACH,KAAK,EAAEG,KAAK,EAAEJ,EAAEkM,GAAGlM,IAAI,CAAC,EAAEC,EAAEiM,GAAGjM,CAAC,EAAEG,EAAE8L,GAAG9L,CAAC,EAAEJ,EAAEC,CAAC,EAAEG,CAAC,CAAC,SAASqH,GAAGzH,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,GAAGA,EAAEoN,GAAGpN,IAAI,EAAE,mBAAmB,GAAG,qBAAqBC,CAAC,EAAEkM,GAAGnM,CAAC,CAAC,CAAC,SAAS0H,GAAG1H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,eAAe,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,UAAU,EAAEA,GAAGA,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,IAAI0O,GAAG1O,GAAMA,EAAE,GAAL,IAAYA,EAAE,KAAL,GAAaA,EAAE,KAAL,GAAU2O,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAE,SAASjH,GAAG3H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,QAAQ,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,OAAO,EAAE,IAAII,GAAGsO,GAAG1O,EAAE,YAAY,CAAC,EAAE2O,GAAGC,IAAI5O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAEG,EAAE,EAAE,EAAEH,EAAE,KAAK,IAAI,CAAC,EAAE,IAAID,EAAE,kBAAkB,EAAEI,EAAE,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE,IAAI0B,EAAE,IAAI,KAAK1B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEA,EAAE,GAAGI,GAAGsB,GAAG1B,EAAE,kBAAkB,GAAG,KAAK,IAAI0B,EAAEtB,CAAC,GAAG,EAAE,EAAEH,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,SAAS4H,GAAG5H,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAK,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE,KAAK,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,CAAC,EAAEI,EAAE,EAAE,EAAEJ,EAAE,KAAK,IAAI,CAAC,EAAE0B,EAAEzB,EAAE,kBAAkB,EAAEQ,EAAE,IAAI,KAAKR,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE2B,EAAE,IAAI,KAAK3B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEsJ,EAAE,KAAK,IAAI3H,EAAEnB,CAAC,EAAE,MAAO,GAAEL,EAAE,EAAE,EAAEJ,EAAE,KAAK,IAAI,CAAC,EAAE,EAAOS,GAAGmB,GAAG2H,GAAG7H,GAAG,EAAEtB,IAAImJ,GAAG7H,KAAKjB,EAAE,KAAK,IAAImB,EAAEnB,CAAC,EAAER,EAAE,QAAQA,EAAE,QAAQ,EAAE,MAAM,EAAEG,EAAEmJ,EAAE9I,GAAGiB,EAAE,GAAG,EAAE,EAAE1B,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAEG,GAAGsO,GAAGzO,EAAE,YAAY,CAAC,EAAE0O,GAAGC,IAAI3O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEI,EAAE,EAAE,EAAEJ,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAE,OAAO,MAAMD,CAAC,EAAE,GAAGA,EAAE,GAAG,CAAC,CAAC,SAAS6H,GAAG7H,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE,CAAC,OAAON,EAAE4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,CAAC,EAAE,GAAG,CAAC,SAASiG,GAAG9H,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE,CAAC,GAAGL,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,CAAC,CAAC,SAASmG,GAAG/H,EAAEC,EAAEG,EAAEsB,EAAE,CAAC1B,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAE,IAAIjB,EAAG,IAAI,OAAM,YAAY,EAAEmB,EAAE,IAAI,KAAKnB,EAAE,EAAE,CAAC,EAAEK,EAAE,IAAI,KAAKL,EAAE,EAAE,CAAC,EAAEA,EAAEmB,EAAE,kBAAkB,EAAE,IAAIZ,EAAEF,EAAE,kBAAkB,EAAE0I,EAAE,KAAK,IAAI/I,EAAEO,CAAC,EAAE,EAAE,EAAEhB,IAAI,IAAI,CAAC,EAAE,GAAGwJ,EAAE,EAAE,EAAEvJ,IAAI,IAAI,CAAC,EAAE,EAAOQ,GAAGO,GAAGY,GAAG5B,EAAEA,GAAGA,EAAE,mBAAmB,OAAO,CAAC,OAAO,GAAG,aAAa,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC,GAAG4B,CAAC,EAAEd,EAAEd,EAAEc,CAAC,EAAEE,EAAEP,GAAG6K,GAAG1J,EAAExB,EAAE,EAAE,EAAEkL,GAAGxK,EAAEY,EAAE,EAAE,IAAI4J,GAAG1J,EAAEF,EAAE,EAAE,EAAE4J,GAAGxK,EAAEV,EAAE,EAAE,EAAE,CAAC,IAAIyO,GAAG,CAAC,EAAEC,GAAG,CAAC9O,EAAEC,IAAI,CAAC4O,GAAG,OAAO,EAAE,QAAQzO,EAAEA,EAAE,EAAE,EAAEJ,MAAM,CAAC,GAAG,CAAC,IAAIS,EAAOL,GAAL,IAAOH,IAAIQ,GAAQL,GAAL,MAASH,EAAE,EAAE,EAAE,EAAE4O,GAAG,KAAUzO,GAAL,IAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAOG,GAAL,IAAOqD,EAAExD,IAAI,CAAC,EAAOG,GAAL,IAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAEe,EAAE,EAAEf,IAAI,IAAI,CAAC,CAAC,EAAEA,GAAGQ,EAAE,EAAE,CAAC,CAAC,OAAOoO,EAAE,EAAE,SAAS7G,GAAGhI,EAAEC,EAAEG,EAAE,CAAC,OAAOJ,KAAK,EAAEC,EAAE6O,GAAG7O,IAAI,EAAEG,IAAI,CAAC,EAAEiJ,GAAGrJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,SAASgI,GAAGjI,EAAEC,EAAEG,EAAE,CAAC,OAAOJ,KAAK,EAAEC,EAAE6O,GAAG7O,IAAI,EAAEG,IAAI,CAAC,EAAEiJ,GAAGrJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,IAAIiI,GAAG,IAAI,CAAC,EAAEC,GAAG,IAAI,KAAK,IAAI,EAAE,SAASC,GAAGpI,EAAEC,EAAE,CAAC,OAAOuC,EAAE8G,GAAGtJ,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIsI,GAAGF,GAAG,IAAI,CAAC,MAAM6B,IAAI,EAAE,QAAQ,EAAE,SAAS5B,IAAI,CAAC,MAAO,WAAU,CAACC,GAAG,IAAI,YAAY,WAAW,YAAY,IAAI,EAAE,IAAIC,GAAG,IAAI,UAAU,oBAAoB,SAASC,GAAGzI,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,EAAE,EAAE,OAAO,GAAGD,GAAGC,GAAG,WAAWD,EAAE,MAAM,GAAG,QAAQI,EAAE,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAIK,EAAER,GAAG,EAAE,GAAGG,GAAGK,EAAE,KAAK,IAAIA,EAAET,EAAE,SAAS,EAAE,IAAI4B,EAAE,KAAKnB,EAAE,KAAK,IAAIT,EAAES,CAAC,EAAET,EAAE,CAAC4B,GAAGA,EAAE,IAAI,KAAKA,EAAE,WAAWnB,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEJ,GAAE,OAAO,WAAW,OAAO,MAAM,GAAG,CAACA,GAAE,KAAKuB,CAAC,EAAErB,GAAE,EAAE,IAAIsB,EAAE,EAAE,MAAM7B,CAAC,MAAS,CAAC,CAAC6B,EAAE,MAAM,CAAC,GAAGA,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,CAAC,IAAIkN,GAAG,KAAK5K,GAAE,iGAAiG,EAAE,GAAG6K,GAAG,CAAC,EAAEC,GAAGjP,GAAG,CAACA,EAAE,QAASA,GAAG,CAAC,IAAIC,EAAE8O,GAAG,EAAE9O,IAAI+O,GAAG/O,CAAC,EAAED,EAAE,CAAE,CAAC,EAAE,SAAS0I,IAAI,CAAC,IAAI1I,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,EAAE,OAAeA,EAAE,CAAC,GAAZ,SAAeA,EAAE,MAAM,EAAEiP,GAAGjP,CAAC,EAAEgP,GAAG,GAAGD,GAAG,EAAEC,GAAG,GAAGhP,EAAEgP,GAAG,EAAE,CAAC,SAASrG,GAAG3I,EAAEC,EAAEG,EAAE,CAAC,GAAGJ,KAAK,EAAEC,KAAK,EAAE+O,GAAG,IAAIhP,EAAE,IAAI0B,EAAEsN,GAAG,QAAiBtN,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,GAAG,CAAC,GAAnD,SAAsDA,EAAE,MAAM,EAAEuN,GAAGvN,CAAC,EAAE,QAAQjB,EAAE,EAAEiB,EAAEjB,CAAC,GAAGsO,GAAG,GAAG/O,GAAG,EAAES,EAAE,IAAIT,EAAE,EAAEA,EAAEI,GAAGsB,EAAE1B,EAAES,CAAC,EAAE,EAAET,EAAE,EAAE,EAAEC,EAAE,EAAED,IAAI,IAAI,CAAC,EAAE+O,GAAG,EAAE,OAAO/O,CAAC,CAAC,IAAIkP,GAAGC,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACF,GAAG,CAAC,IAAIlP,EAAEC,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAAG,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEiC,GAAG,gBAAgB,EAAE,IAAIlC,KAAKmP,GAAYA,GAAGnP,CAAC,IAAb,OAAe,OAAOC,EAAED,CAAC,EAAEC,EAAED,CAAC,EAAEmP,GAAGnP,CAAC,EAAE,IAAII,EAAE,CAAC,EAAE,IAAIJ,KAAKC,EAAEG,EAAE,KAAK,GAAGJ,CAAC,IAAIC,EAAED,CAAC,CAAC,EAAE,EAAEkP,GAAG9O,CAAC,CAAC,OAAO8O,EAAE,EAAE,SAAStG,GAAG5I,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIyB,EAAE,EAAE,OAAO0N,GAAG,EAAE,QAAS,CAAC3O,EAAEmB,IAAI,CAAC,IAAIC,EAAE5B,EAAEyB,EAAE,IAAIE,EAAE,EAAE,EAAE5B,EAAE,EAAE4B,IAAI,IAAI,CAAC,EAAEC,EAAEA,EAAE,EAAEA,EAAEpB,EAAE,OAAO,EAAEoB,EAAEzB,EAAE,EAAEwB,MAAM,CAAC,EAAEnB,EAAE,WAAWoB,CAAC,EAAEzB,EAAE,EAAEwB,IAAI,CAAC,EAAE,EAAEF,GAAGjB,EAAE,OAAO,CAAC,CAAE,EAAE,CAAC,CAAC,SAASoI,GAAG7I,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIG,EAAEgP,GAAG,EAAE,EAAE,EAAEpP,IAAI,IAAI,CAAC,EAAEI,EAAE,OAAO,IAAIsB,EAAE,EAAE,OAAOtB,EAAE,QAASJ,GAAG0B,GAAG1B,EAAE,OAAO,CAAE,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAEyB,EAAE,CAAC,CAAC,SAASqH,GAAG/I,EAAE,CAAC,OAAOuB,EAAE4I,GAAG,GAAG,EAAEnK,CAAC,EAAE,EAAE,CAAC,SAASgJ,GAAGhJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,EAAE,EAAE,CAAC,SAASuH,GAAGjJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEsB,CAAC,EAAE,EAAE,CAAC,IAAI2N,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,SAASnG,GAAGlJ,EAAEC,EAAEG,EAAEK,EAAE,CAAC,GAAGc,EAAE,OAAO4I,GAAG,GAAG,EAAEnK,EAAEC,EAAEG,EAAEK,CAAC,EAAER,KAAK,EAAEG,KAAK,EAAEK,KAAK,EAAE,QAAQmB,EAAE,EAAEC,EAAE,EAAEA,EAAEzB,EAAEyB,IAAI,CAAC,IAAIf,EAAE,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAEe,EAAE,EAAE,EAAEf,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQuJ,EAAE,EAAEA,EAAExI,EAAEwI,IAAI,CAAC,IAAItI,EAAE,EAAE,EAAEJ,EAAE0I,IAAI,CAAC,EAAErI,GAAEkO,GAAGrP,CAAC,EAAMkB,IAAJ,GAAYA,IAAL,KAAalB,IAAJ,EAAMuC,EAAEC,GAAG2I,GAAGhK,GAAE,CAAC,CAAC,EAAEA,GAAE,OAAO,GAAGA,GAAE,KAAKD,CAAC,CAAC,CAACU,GAAGZ,CAAC,CAAC,OAAO,EAAE,EAAEP,IAAI,IAAI,CAAC,EAAEmB,EAAE,CAAC,CAAC,IAAI0N,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAACxP,EAAEC,IAAI,CAACG,EAAE,EAAE,IAAIJ,EAAEC,IAAI,CAAC,CAAC,EAAE,SAASkJ,GAAGnJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,SAASjB,EAAET,EAAEC,GAAEG,GAAE,CAAC,IAAIJ,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,IAAGD,EAAEI,GAAE,CAAC,EAAEJ,EAAE,OAAOA,CAAC,CAAC,SAAS4B,EAAE5B,EAAEC,GAAE,CAAC,OAAOQ,EAAET,EAAEC,GAAE,GAAG,CAAC,CAAC,SAASa,EAAEd,EAAEC,GAAE,CAAC,SAASG,GAAEJ,GAAE,CAAC,MAAO,GAAEA,GAAE,GAAG,EAAEA,GAAE,EAAE,CAAC,CAAC,IAAI0B,GAAE,OAAYA,GAAEtB,GAAEJ,EAAE,YAAY,EAAEC,GAAE,YAAY,CAAC,KAAxC,IAAiDyB,GAAEtB,GAAEJ,EAAE,SAAS,EAAEC,GAAE,SAAS,CAAC,KAAlC,IAAuCyB,GAAEtB,GAAEJ,EAAE,QAAQ,EAAEC,GAAE,QAAQ,CAAC,GAAGyB,EAAC,CAAC,SAASV,EAAEhB,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAASwJ,EAAExJ,EAAE,CAAC,IAAIC,GAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAK,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,EAAE,EAAEC,IAAG,CAAC,IAAIG,GAAEJ,EAAE,SAAS,EAAE0B,IAAGgN,GAAG1O,EAAE,YAAY,CAAC,EAAEsP,GAAGC,IAAInP,EAAC,EAAE,GAAG,EAAEH,GAAEyB,GAAE1B,EAAE,QAAQ,GAAG,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,EAAC,EAAE,KAAK,CAACA,IAAGyB,GAAE1B,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAGI,GAAEJ,EAAE,SAASI,GAAE,CAAC,GAAGJ,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,EAAE,CAAC,OAAOI,GAAE,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,GAAEe,EAAE,IAAI,KAAKhB,EAAE,YAAY,EAAE,EAAE,CAAC,CAAC,EAAEI,GAAEY,EAAEZ,EAAC,EAAE,GAAGU,EAAEb,GAAED,CAAC,EAAE,GAAGc,EAAEV,GAAEJ,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAACA,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAE,IAAIR,EAAE,EAAE,EAAEQ,EAAE,KAAK,IAAI,CAAC,EAAE,QAAQP,MAAKO,EAAE,CAAC,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAGR,EAAEoI,GAAGpI,CAAC,EAAE,EAAE,EAAEd,EAAEkJ,GAAGlJ,CAAC,EAAEc,EAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAEd,EAAEA,EAAE,QAAQ,IAAI,OAAOe,GAAE,GAAG,EAAED,EAAEC,EAAC,CAAC,EAAE,IAAIC,GAAE,2DAA2D,MAAM,GAAG,EAAEC,GAAE,wFAAwF,MAAM,GAAG,EAAE,IAAIF,MAAKD,EAAE,CAAC,KAAKlB,GAAGoB,GAAEpB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGoB,GAAEpB,EAAE,EAAE,EAAE,KAAKA,GAAGqB,GAAErB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGqB,GAAErB,EAAE,EAAE,EAAE,KAAKA,GAAG4B,GAAG5B,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGS,EAAET,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAGwJ,EAAExJ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKwJ,EAAE,KAAKxJ,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAKA,KAAQA,EAAEA,EAAE,KAAR,EAAYA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAI4B,EAAE5B,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,GAAE,EAAEG,GAAE,EAAEA,IAAGJ,EAAE,GAAG,EAAEC,KAAIyO,GAAG1O,EAAE,GAAG,IAAI,EAAEsP,GAAGC,IAAInP,IAAG,EAAE,CAAC,OAAOwB,EAAE5B,EAAE,GAAGC,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAG4B,EAAE5B,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAG4B,EAAE,KAAK,OAAO5B,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GAAG,CAAC,IAAIC,GAAE,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,GAAG,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,KAAIA,GAAMA,IAAJ,MAAYG,IAAGJ,EAAE,GAAG,IAAIA,EAAE,IAAI,IAAtB,GAA6BI,IAAH,GAAMsO,GAAG1O,EAAE,EAAE,IAAIC,GAAE,QAAQ,CAACA,GAAE,GAAG,IAAIG,IAAGJ,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAMI,IAAH,GAASA,IAAH,GAAMsO,GAAG1O,EAAE,GAAG,IAAI,CAAC,IAAIC,IAAG,CAAC,OAAO2B,EAAE3B,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAG4B,EAAE,KAAK,OAAO5B,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAAC,IAAIC,GAAE,IAAID,EAAEA,EAAE,IAAI,OAAOA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAIC,GAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAEI,EAAEA,EAAE,QAAQ,MAAM,MAAM,EAAEc,EAAEd,EAAE,SAASe,EAAC,IAAIf,EAAEA,EAAE,QAAQ,IAAI,OAAOe,GAAE,GAAG,EAAED,EAAEC,EAAC,EAAEO,CAAC,CAAC,GAAG,OAAOP,GAAE,SAASnB,EAAE,CAAC,IAAIC,GAAE,MAAMmL,GAAGpL,CAAC,EAAE,CAAC,EAAE,OAAOqL,GAAGrL,EAAEC,GAAE,EAAEA,GAAE,MAAM,EAAEA,EAAC,EAAEG,EAAEA,EAAE,QAAQ,QAAQ,GAAG,CAAC,EAAEe,GAAE,OAAOlB,EAAE,GAAGuP,GAAGrO,GAAEnB,CAAC,EAAEmB,GAAE,OAAO,EAAE,CAAC,SAASiI,GAAGpJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOyH,GAAGnJ,IAAI,EAAEC,IAAI,EAAEG,IAAI,EAAEsB,IAAI,CAAC,CAAC,CAACH,GAAG,UAAU,CAAC,QAAQvB,EAAEmB,EAAE,WAAW,EAAEnB,KAAK8J,GAAG,EAAElG,GAAE,QAAS,IAAI,CAACG,KAAI,SAAS/D,EAAE,CAACuB,EAAEvB,EAAE,EAAE,QAAQ,IAAI6J,GAAG,IAAIE,EAAE,CAAC,EAAE,KAAK/J,CAAC,CAAC,EAAG,IAAIkE,GAAE,CAAE,CAAC,CAAE,CAAC,EAAE,EAAE,QAAQuL,GAAG,MAAM,GAAG,EAAEC,GAAG,EAAE,IAAIA,GAAG,EAAEA,GAAGD,GAAGC,EAAE,EAAE,OAAO,aAAaA,EAAE,EAAEnE,GAAGkE,GAAGjE,GAAGrK,EAAE,aAAa,cAAc,KAAK,CAAC,YAAYnB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,cAAc,CAAC,EAAEmB,EAAE,cAAc,cAAc,KAAK,CAAC,YAAYnB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,eAAe,CAAC,EAAEiM,GAAG,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE9K,EAAE,oBAAoB,IAAI8K,GAAG,OAAO,EAAE,EAAED,GAAG,OAAO,IAAImB,GAAG,CAAC3C,GAAGC,GAAGQ,GAAGrG,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGqC,GAAGC,GAAGc,GAAGC,GAAGE,GAAGC,GAAGC,GAAGC,EAAE,EAAE8E,GAAG,UAAU,CAAC,SAAShO,EAAEA,EAAEC,EAAE,CAAC,OAAO+N,GAAGhO,EAAE,QAAQgO,GAAG,UAAU,CAAC,IAAIhO,EAAEgO,GAAG/N,EAAE,CAAC,EAAE,OAAO,CAACG,EAAEsB,CAAC,IAAI,OAAO,QAAQ1B,CAAC,EAAEC,EAAEG,CAAC,EAAc,OAAOsB,GAAnB,WAAqB,IAAI1B,IAAI,CAAC0N,GAAG,KAAKtN,CAAC,EAAE,GAAG,CAAC,OAAOsB,EAAE,GAAG1B,CAAC,CAAC,QAAC,CAAQ2D,KAAI+J,GAAG,IAAI,EAAE/L,IAAQ6L,KAAJ,GAAYE,GAAG,SAAP,IAAgBF,GAAG,EAAEtD,IAAI,EAAEqD,GAAGoC,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEjO,EAAE,OAAOzB,CAAC,EAAE,EAAE+N,GAAG,UAAU,CAAC,IAAIhO,EAAEgO,GAAG/N,EAAED,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAEG,EAAEJ,GAAG,IAAIA,EAAE,IAAI,EAAE,OAAOA,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,GAAG,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGI,EAAEJ,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,kCAAkCI,EAAEJ,EAAE,iCAAiC,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGI,EAAEJ,EAAE,EAAE,EAAEA,CAAC,EAAE,EAAE0K,GAAG,KAAKsD,GAAG,EAAE,EAAEnK,GAAE,QAAQmK,GAAG,EAAE,EAAEzK,GAAEtD,EAAEiE,GAAE,EAAE8J,EAAE,CAAC,IAAI/N,EAAEqD,GAAG,EAAE,GAAGS,KAAI5C,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBlB,EAAED,CAAC,CAAC,OAAOA,EAAE,CAACwC,EAAE,sDAAsDxC,CAAC,EAAE,EAAEkB,EAAElB,CAAC,CAAC,CAAC,OAAOoE,KAAKjD,EAAE,WAAWkD,GAAG,kCAAkC,EAAE,mCAAmClD,EAAE,WAAWA,EAAE,WAAW,mCAAmCiB,CAAC,EAAEA,EAAE,mCAAmC,IAAI,IAAI,mCAAmC,YAAY,GAAG,EAAE,KAAK,SAASpC,EAAEC,EAAE,CAAC,IAAIG,EAAEgE,GAAG,OAAkB,OAAO,YAAY,sBAA/B,YAAqDC,GAAGjE,CAAC,GAAGkE,GAAGlE,CAAC,GAAe,OAAO,OAAnB,WAAyBoE,GAAGpE,EAAEJ,EAAEC,CAAC,EAAE,MAAMG,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMsB,GAAG,YAAY,qBAAqBA,EAAE1B,CAAC,EAAE,KAAKC,EAAG,SAASyB,EAAE,CAAC,OAAOc,EAAE,kCAAkCd,CAAC,EAAE,EAAEc,EAAE,2CAA2C,EAAEgC,GAAGpE,EAAEJ,EAAEC,CAAC,CAAC,CAAE,CAAE,CAAC,EAAEA,EAAG,SAASA,EAAE,CAACD,EAAEC,EAAE,SAASA,EAAE,MAAM,CAAC,CAAE,EAAE,MAAMiB,CAAC,EAAE,CAAC,CAAC,EAAE,EAAEmM,GAAGrN,IAAIqN,GAAGW,GAAG,IAAIhO,CAAC,EAAEgD,GAAG,KAAKA,GAAGgL,GAAG,IAAI,EAAE7M,EAAE,SAAS,CAACnB,EAAEC,KAAKkB,EAAE,SAAS6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEkB,EAAE,iBAAiB,CAACnB,EAAEC,KAAKkB,EAAE,iBAAiB6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEkB,EAAE,yBAAyB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,KAAKG,EAAE,yBAAyB6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,EAAEzI,EAAEE,CAAC,EAAEG,EAAE,4BAA4B,CAACnB,EAAEC,KAAKkB,EAAE,4BAA4B6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEkB,EAAE,6BAA6B,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,6BAA6B6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,0BAA0B,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,0BAA0B6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,0BAA0BnB,IAAImB,EAAE,0BAA0B6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,kBAAkB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,kBAAkB6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,mBAAmBnB,IAAImB,EAAE,mBAAmB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,wBAAwB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,wBAAwB6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,iBAAiB,CAACnB,EAAEC,KAAKkB,EAAE,iBAAiB6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEkB,EAAE,kBAAkB,CAACnB,EAAEC,KAAKkB,EAAE,kBAAkB6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEkB,EAAE,SAASnB,IAAImB,EAAE,SAAS6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,iBAAiB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,KAAKT,EAAE,iBAAiB6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,EAAET,EAAE,kBAAkB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAKU,EAAE,kBAAkB6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,kBAAkBnB,IAAImB,EAAE,kBAAkB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,qBAAqB,CAACnB,EAAEC,EAAEG,EAAEsB,KAAKP,EAAE,qBAAqB6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,sBAAsB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,sBAAsB6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,sBAAsBnB,IAAImB,EAAE,sBAAsB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,kBAAkBnB,IAAImB,EAAE,kBAAkB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,cAAc,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,cAAc6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,eAAe,CAACnB,EAAEC,EAAEG,EAAEsB,KAAKP,EAAE,eAAe6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,sBAAsBnB,IAAImB,EAAE,sBAAsB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,mBAAmBnB,IAAImB,EAAE,mBAAmB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,mBAAmB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAKU,EAAE,mBAAmB6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,QAAQ,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,KAAKpI,EAAE,QAAQ6M,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE0H,CAAC,EAAEpI,EAAE,iBAAiBnB,IAAImB,EAAE,iBAAiB6M,GAAG,IAAIhO,CAAC,EAAEmB,EAAE,YAAY,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,YAAY6M,GAAG,IAAIhO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,iBAAiBnB,IAAImB,EAAE,iBAAiB6M,GAAG,IAAIhO,CAAC,EAAE,IAAI4P,GAAG1M,GAAG,KAAKA,GAAG8K,GAAG,IAAI,EAAExB,GAAGrL,EAAE,QAAQnB,IAAIwM,GAAGrL,EAAE,QAAQ6M,GAAG,IAAIhO,CAAC,EAAEuM,GAAGpL,EAAE,MAAMnB,IAAIuM,GAAGpL,EAAE,MAAM6M,GAAG,IAAIhO,CAAC,EAAE2C,GAAG,CAAC3C,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,KAAKe,GAAGqL,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,EAAEyB,GAAG,KAAKA,GAAG2K,GAAG,IAAI,EAAE1D,GAAG,CAACtK,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAK6J,GAAG0D,GAAG,IAAIhO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEmK,GAAG5K,IAAI4K,GAAGoD,GAAG,IAAIhO,CAAC,EAAEmD,GAAGnD,IAAImD,GAAG6K,GAAG,IAAIhO,CAAC,EAAEiN,GAAG,KAAKA,GAAGe,GAAG,IAAI,EAAElD,GAAG,CAAC9K,EAAEC,KAAK6K,GAAGkD,GAAG,IAAIhO,EAAEC,CAAC,EAAEsK,GAAGvK,IAAIuK,GAAGyD,GAAG,IAAIhO,CAAC,EAAEqK,GAAGrK,IAAIqK,GAAG2D,GAAG,IAAIhO,CAAC,EAAEoK,GAAG,KAAKA,GAAG4D,GAAG,IAAI,EAAEjD,GAAG5J,EAAE,WAAW,CAACnB,EAAEC,KAAK8K,GAAG5J,EAAE,WAAW6M,GAAG,IAAIhO,EAAEC,CAAC,EAAEgO,GAAGjO,IAAIiO,GAAGD,GAAG,IAAIhO,CAAC,EAAE2P,GAAG,KAAKA,GAAG3B,GAAG,IAAI,EAAED,GAAG/N,IAAI+N,GAAGC,GAAG,IAAIhO,CAAC,EAAEkO,GAAG,KAAKA,GAAGF,GAAG,IAAI,EAAE,SAAS6B,IAAI,CAAC,GAAG,EAAE,EAAE9L,IAAG,GAAGxC,EAAE,EAAEJ,CAAC,EAAEI,GAAGsJ,GAAGhH,EAAC,EAAE,YAAY1C,CAAC,MAAM,CAAC,GAAGA,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQyC,GAAE,QAAQzC,EAAE,OAAO,MAAM,CAAC,EAAE0J,GAAGjH,EAAC,EAAE,EAAEG,IAAG6L,KAAKA,GAAG,GAAGzO,EAAE,UAAU,GAAGwC,KAAIpC,GAAGsJ,GAAGhH,EAAC,EAAE,EAAE1C,CAAC,EAAEI,GAAGsJ,GAAG/G,EAAC,GAAG,CAAC,CAAC,OAAO3C,EAAE,eAAe,QAAQA,EAAE,cAAc,QAAQA,EAAE,UAAU,IAAIiJ,GAAG,EAAEjJ,EAAE,aAAanB,GAAGuK,GAAGvK,CAAC,EAAEmB,EAAE,WAAWnB,GAAGqK,GAAGrK,CAAC,EAAEmB,EAAE,aAAamI,GAAGnI,EAAE,aAAamK,GAAGnK,EAAE,gBAAgBiK,GAAGnH,GAAE,SAASjE,GAAG,CAAC4P,IAAIC,GAAG,EAAED,KAAK3L,GAAEjE,EAAE,EAAE6P,GAAG,EAAEzO,CAAC,GAAUrB,GAAQE,GAAiB,WAAW,MAAM,OAAhC,cAAsCA,GAAE,ICArl0C,IAWa6P,GAePC,GAKAC,GAwCAC,GAsBAC,GAeOC,GAoBPC,GAsBOC,GAtJbC,GAAAC,EAAA,kBAIAC,KAOaV,GAET,GAAS,OAEA,kBAEJ,OAAO,SAAa,IAAe,SAAS,eAAqC,IAE9C,OAAO,KAAS,IAAc,KAAK,UAAU,KAAO,QAO1FC,GAAS,IAAU,OAAO,SAAa,IAAc,OAAY,SAAS,OAK1EC,GAAe,CAACS,EAAkBC,IAA4B,CAClE,GAAI,CACF,IAAMC,EAAUD,GAAkBZ,GAElC,OADYa,EAAU,IAAI,IAAIF,EAAUE,CAAO,EAAI,IAAI,IAAIF,CAAQ,GACxD,SAAWV,EACxB,MAAQ,CACN,MAAO,EACT,CACF,EAgCME,GAAU,MAAMW,GAAyC,CAE7D,IAAMC,EAAO,MADI,MAAM,MAAMD,EAAa,CAAC,YAAa,aAAa,CAAC,GAC1C,KAAK,EACjC,OAAO,IAAI,gBAAgBC,CAAI,CACjC,EAkBMX,GAE0C,cAA+B,QAalEC,GAAoB,SAAkD,CACjF,GAAI,CAACL,GACH,MAAM,IAAI,MAAM,sEAAsE,EAIxF,GAAIE,GAAaF,EAAS,EACxB,MAAO,CAAC,OAAWI,GAAmB,CAAC,EAIzC,IAAMY,EAAM,MAAMb,GAAQH,EAAS,EACnC,MAAO,CAACgB,EAAKZ,GAAmBY,CAAG,CAAC,CACtC,EAOMV,GAGF,cAIK,QAeIC,GAAmB,MAC5BU,EAA+BL,EAC/BM,IAEO,CAAC,OAAWZ,EAAmB,IC1J1C,IAQIa,GACAC,GACAC,GACAC,GAEEC,GAwBAC,GAyBOC,GA+GAC,GA7KbC,GAAAC,EAAA,kBAMAC,KAGIT,GAAc,GACdC,GAAe,GACfC,GAAU,GAERC,GAAyB,IAAe,CAE5C,GAAI,OAAO,kBAAsB,IAC/B,MAAO,GAGT,GAAI,CAGF,OAAI,OAAO,eAAmB,KAC5B,IAAI,eAAe,EAAE,MAAM,YAAY,IAAI,kBAAkB,CAAC,CAAC,EAK1D,YAAY,SAAS,IAAI,WAAW,CACzC,EAAG,GAAI,IAAK,IAAK,EAAG,EAAI,EAAI,EAAG,EAAG,EAAG,EAAI,GAAI,EAAK,EAAI,EAAG,EAAG,EAAI,EAAG,EACnE,EAAG,EAAI,EAAK,EAAK,EAAG,GAAI,GAAI,EAAG,EAAG,EAAG,GAAI,EAAI,IAAK,GAAI,EAAG,EAAG,GAAI,EAClE,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMC,GAAkB,IAAe,CACrC,GAAI,CAeF,OAAO,YAAY,SAAS,IAAI,WAAW,CACzC,EAAK,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,GAAI,EAAK,GAAK,EAAG,GAAI,EACvF,IAAK,GAAI,IAAK,GAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAI,IAAK,IAAK,EAAG,GAAI,EACzF,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEaC,GAAwB,MAAMK,GAA+C,CACxF,GAAIV,GACF,OAAO,QAAQ,QAAQ,EAEzB,GAAIC,GACF,MAAM,IAAI,MAAM,uDAAyD,EAE3E,GAAIC,GACF,MAAM,IAAI,MAAM,oDAAsD,EAGxED,GAAe,GAGf,IAAMU,EAAUD,EAAM,YAClBE,EAAaF,EAAM,WAGvB,GAAI,CAACN,GAAgB,EACnB,MAAM,IAAI,MAAM,+DAA+D,EAIjF,IAAMS,EAAuBV,GAAuB,EAChDS,EAAa,GAAK,CAACC,IACjB,OAAO,KAAS,KAAe,CAAC,KAAK,qBAEvC,QAAQ,KACJ,iCAAmCD,EACnC,uIACkE,EAIxE,QAAQ,KACJ,4GACmC,EAGvCF,EAAM,WAAaE,EAAa,GAGlC,IAAME,EAAYJ,EAAM,UAClBK,EAAqB,OAAOD,GAAc,SAAWA,EAAY,OACjEE,EAAuBF,GAAiC,IACxDG,EAAmBD,GAA6B,MAAQA,EACxDE,EAAwBJ,GAAiC,KACzDK,EAAoBD,GAA8B,MAAQA,EAE1D,CAACE,EAAWC,CAAc,EAAK,MAAMC,GAAiBL,EAAiBF,EAAoBH,EAAa,CAAC,EAE3GW,EAAY,GAEVC,EAA8B,CAAC,EAqDrC,GAlDIb,EAAU,GACZa,EAAM,KAAK,IAAI,QAASC,GAAY,CAClC,WAAW,IAAM,CACfF,EAAY,GACZE,EAAQ,CACV,EAAGd,CAAO,CACZ,CAAC,CAAC,EAIJa,EAAM,KAAK,IAAI,QAAQ,CAACC,EAASC,IAAW,CAC1C,IAAMC,EAAiC,CAKrC,WAAAf,CACF,GAEIO,GAAoBJ,KAMtBY,EAAO,WAAa,CAACC,EAAUC,IAC3BV,IAAqBJ,GAAsBc,GAAmBD,GAGpEP,EAAeM,CAAM,EAAE,KAEnBG,GAAU,CACR7B,GAAe,GACfD,GAAc,GACdD,GAAO+B,EACPL,EAAQ,EACJL,GACF,IAAI,gBAAgBA,CAAS,CAEjC,EAECW,GAAS,CACR9B,GAAe,GACfC,GAAU,GACVwB,EAAOK,CAAI,CACb,CAAC,CACP,CAAC,CAAC,EAEF,MAAM,QAAQ,KAAKP,CAAK,EAEpBD,EACF,MAAM,IAAI,MAAM,2DAA2DZ,CAAO,IAAI,CAE1F,EAEaL,GAAc,IAAqB,CAC9C,GAAIN,IAAeD,GACjB,OAAOA,GAGT,MAAM,IAAI,MAAM,qCAAqC,CACvD,ICnLA,IAKaiC,GAeAC,GA6BAC,GAjDbC,GAAAC,EAAA,kBAGAC,KAEaL,GAAkB,CAACM,EAAcC,IAA6B,CACzE,IAAMC,EAAOC,GAAY,EAEnBC,EAAaF,EAAK,gBAAgBF,CAAI,EAAI,EAC1CK,EAAaH,EAAK,QAAQE,CAAU,EAC1C,OAAAF,EAAK,aAAaF,EAAMK,EAAYD,CAAU,EAC9CH,EAAO,KAAKI,CAAU,EAEfA,CACT,EAMaV,GACT,CAACW,EAAkCC,EAAgBC,EAClDC,IAAuC,CACtC,GAAI,OAAOH,GAAW,UAAYA,IAAY,KAAM,CAClD,GAAIE,EAAK,IAAIF,CAAO,EAClB,MAAM,IAAI,MAAM,+BAA+B,EAE/CE,EAAK,IAAIF,CAAO,CAEpB,CAEA,OAAO,QAAQA,CAAO,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAChD,IAAMC,EAAQL,EAAUA,EAASG,EAAMA,EACvC,GAAI,OAAOC,GAAU,SACnBhB,GAAoBgB,EAAkCC,EAAO,IAAKJ,EAAMC,CAAO,UACtE,OAAOE,GAAU,UAAY,OAAOA,GAAU,SACvDF,EAAQG,EAAMD,EAAM,SAAS,CAAC,UACrB,OAAOA,GAAU,UAC1BF,EAAQG,EAAOD,EAAS,IAAM,GAAG,MAEjC,OAAM,IAAI,MAAM,mCAAmC,OAAOA,CAAK,EAAE,CAErE,CAAC,CACH,EAMSf,GAAkBiB,GAA0B,CACvD,IAAMX,EAAOC,GAAY,EAEnBW,EAAQZ,EAAK,UAAU,EAC7B,GAAI,CACF,IAAMa,EAAeb,EAAK,WAAW,CAAC,EACtCA,EAAK,iBAAiBa,EAAcA,EAAe,CAAC,EACpD,IAAMC,EAAYd,EAAK,OAAOa,EAAe,CAAC,EACxCE,EAAsBf,EAAK,QAAQa,EAAe,EAAI,CAAC,EACvDG,EAAeD,EAAsBf,EAAK,aAAae,CAAmB,EAAI,GACpF,MAAM,IAAI,MAAM,GAAGJ,CAAO,gBAAgBG,CAAS,oBAAoBE,CAAY,EAAE,CACvF,QAAE,CACAhB,EAAK,aAAaY,CAAK,CACzB,CACF,IC/DA,IAQaK,GARbC,GAAAC,EAAA,kBAKAC,KACAC,KAEaJ,GAAiBK,GAA6D,CACzF,IAAMC,EAAOC,GAAY,EACrBC,EAAmB,EACjBC,EAAmB,CAAC,EAEpBC,EAA0CL,GAAW,CAAC,EAE5D,GAAI,CACF,GAAIA,GAAS,mBAAqB,OAChCK,EAAW,iBAAmB,UAE5B,OAAOL,EAAQ,kBAAqB,UAAY,CAAC,OAAO,UAAUA,EAAQ,gBAAgB,GAC1FA,EAAQ,iBAAmB,GAAKA,EAAQ,iBAAmB,EAC7D,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,gBAAgB,EAAE,EAGjF,GAAIA,GAAS,oBAAsB,OACjCK,EAAW,kBAAoB,UACtB,OAAOL,EAAQ,mBAAsB,UAAY,CAAC,OAAO,UAAUA,EAAQ,iBAAiB,EACrG,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,iBAAiB,EAAE,EAG9EA,GAAS,YAAc,SACzBK,EAAW,UAAY,IAGzB,IAAIC,EAAgB,EACpB,OAAIN,GAAS,MAAQ,SACnBM,EAAgBC,GAAgBP,EAAQ,IAAKI,CAAM,GAGrDD,EAAmBF,EAAK,qBACpBI,EAAW,iBAAmBA,EAAW,kBAAoB,CAAC,CAACA,EAAW,UAAYC,CAAa,EACnGH,IAAqB,GACvBK,GAAe,2BAA4B,EAGzCR,GAAS,QAAU,QACrBS,GAAoBT,EAAQ,MAAO,GAAI,IAAI,QAAoC,CAACU,EAAKC,IAAU,CAC7F,IAAMC,EAAgBL,GAAgBG,EAAKN,CAAM,EAC3CS,EAAkBN,GAAgBI,EAAOP,CAAM,EAEjDH,EAAK,sBAAsBE,EAAkBS,EAAeC,CAAe,IAAM,GACnFL,GAAe,iCAAiCE,CAAG,MAAMC,CAAK,GAAG,CAErE,CAAC,EAGI,CAACR,EAAkBC,CAAM,CAClC,OAASU,EAAG,CACV,MAAIX,IAAqB,GACvBF,EAAK,sBAAsBE,CAAgB,EAE7CC,EAAO,QAAQW,GAASd,EAAK,MAAMc,CAAK,CAAC,EACnCD,CACR,CACF,IChEA,IAQME,GAeAC,GAWAC,GAoBAC,GAwDOC,GA9GbC,GAAAC,EAAA,kBAKAC,KACAC,KAEMR,GAA4BS,GAAmD,CACnF,OAAQA,EAAwB,CAC9B,IAAK,WACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,IAAK,MACH,MAAO,IACT,QACE,MAAM,IAAI,MAAM,yCAAyCA,CAAsB,EAAE,CACrF,CACF,EAEMR,GAAoBS,GAAmD,CAC3E,OAAQA,EAAe,CACrB,IAAK,aACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,+BAA+BA,CAAa,EAAE,CAClE,CACF,EAEMR,GAAwBS,GAAmD,CAC1EA,EAAQ,QACXA,EAAQ,MAAQ,CAAC,GAEdA,EAAQ,MAAM,UACjBA,EAAQ,MAAM,QAAU,CAAC,GAE3B,IAAMC,EAAUD,EAAQ,MAAM,QACzBC,EAAQ,+BAEXA,EAAQ,6BAA+B,KAIrCD,EAAQ,oBACRA,EAAQ,mBAAmB,KAAKE,IAAO,OAAOA,GAAO,SAAWA,EAAKA,EAAG,QAAU,QAAQ,IAC5FF,EAAQ,iBAAmB,GAE/B,EAEMR,GACF,CAACW,EAA8BC,EAC9BC,IAA2B,CAC1B,QAAWH,KAAME,EAAoB,CACnC,IAAIE,EAAS,OAAOJ,GAAO,SAAWA,EAAKA,EAAG,KAG9C,OAAQI,EAAQ,CACd,IAAK,QAEH,GADAA,EAAS,QACL,OAAOJ,GAAO,SAAU,CAG1B,IAAMK,EAFeL,GAEsD,WAC3E,GAAIK,EAAY,CACd,IAAMC,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBF,EAAYF,CAAM,EACtDM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,CAAU,GAAG,CAEpF,CACF,CACA,MACF,IAAK,SAEH,GADAD,EAAS,KACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMW,EAAgBX,EACtB,GAAIW,GAAe,gBAAiB,CAClC,GAAIA,EAAc,kBAAoB,QAAUA,EAAc,kBAAoB,OAChF,MAAM,IAAI,MAAM,oDAAoDA,EAAc,eAAe,EAAE,EAErG,IAAML,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBI,EAAc,gBAAiBR,CAAM,EACzEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDC,EAAc,eAAe,GAAG,CAEjG,CACF,CACA,MACF,IAAK,OACL,IAAK,MACH,SACF,QACE,MAAM,IAAI,MAAM,qCAAqCP,CAAM,EAAE,CACjE,CAEA,IAAMQ,EAAmBL,GAAgBH,EAAQD,CAAM,EACnDM,GAAY,EAAE,4BAA4BR,EAAsBW,CAAgB,IAAM,GACxFF,GAAe,oCAAoCN,CAAM,GAAG,CAEhE,CACF,EAESb,GAAqBO,GAAkE,CAClG,IAAMe,EAAOJ,GAAY,EACrBR,EAAuB,EACrBE,EAAmB,CAAC,EAEpBW,EAAkDhB,GAAW,CAAC,EACpET,GAAqByB,CAAc,EAEnC,GAAI,CACF,IAAMlB,EAAyBT,GAAyB2B,EAAe,wBAA0B,KAAK,EAChGjB,EAAgBT,GAAiB0B,EAAe,eAAiB,YAAY,EAC7EC,EACF,OAAOD,EAAe,OAAU,SAAWP,GAAgBO,EAAe,MAAOX,CAAM,EAAI,EAEzFa,EAAmBF,EAAe,kBAAoB,EAC5D,GAAI,CAAC,OAAO,UAAUE,CAAgB,GAAKA,EAAmB,GAAKA,EAAmB,EACpF,MAAM,IAAI,MAAM,qCAAqCA,CAAgB,EAAE,EAGzE,IAAMC,EAAoBH,EAAe,mBAAqB,EAC9D,GAAI,CAAC,OAAO,UAAUG,CAAiB,GAAKA,EAAoB,GAAKA,EAAoB,EACvF,MAAM,IAAI,MAAM,qCAAqCA,CAAiB,EAAE,EAG1E,IAAMC,EAA+B,OAAOJ,EAAe,wBAA2B,SAClFP,GAAgBO,EAAe,uBAAwBX,CAAM,EAC7D,EAcJ,GAZAF,EAAuBY,EAAK,yBACxBjB,EAAwB,CAAC,CAACkB,EAAe,kBAAmB,CAAC,CAACA,EAAe,iBAAkBjB,EAC/F,CAAC,CAACiB,EAAe,gBAAiB,EAAGC,EAAiBC,EAAkBC,EACxEC,CAA4B,EAC5BjB,IAAyB,GAC3BS,GAAe,+BAAgC,EAG7CI,EAAe,oBACjBxB,GAAsBW,EAAsBa,EAAe,mBAAoBX,CAAM,EAGnFW,EAAe,qBAAuB,OAAW,CACnD,GAAI,OAAOA,EAAe,oBAAuB,UAC/C,MAAM,IAAI,MAAM,+CAA+CA,EAAe,kBAAkB,EAAE,EAEpG,IAAMR,EAAgBC,GAAgB,qBAAsBJ,CAAM,EAC5DK,EAAkBD,GAAgBO,EAAe,mBAAmB,SAAS,EAAGX,CAAM,EACxFU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GACI,4DAA4DI,EAAe,kBAAkB,GAAG,CAExG,CAEA,GAAIA,EAAe,uBACjB,OAAW,CAACK,EAAMC,CAAK,IAAK,OAAO,QAAQN,EAAe,sBAAsB,EAAG,CACjF,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,MAAM,kDAAkDA,CAAI,EAAE,EAE1E,GAAI,OAAOC,GAAU,UAAY,CAAC,OAAO,UAAUA,CAAK,GAAKA,EAAQ,EACnE,MAAM,IAAI,MAAM,iEAAiEA,CAAK,EAAE,EAE1F,IAAMC,EAAad,GAAgBY,EAAMhB,CAAM,EAC3CU,EAAK,6BAA6BZ,EAAsBoB,EAAYD,CAAK,IAAM,GACjFV,GAAe,wCAAwCS,CAAI,MAAMC,CAAK,GAAG,CAE7E,CAGF,OAAIN,EAAe,QAAU,QAC3BQ,GAAoBR,EAAe,MAAO,GAAI,IAAI,QAAoC,CAACS,EAAKH,IAAU,CACpG,IAAMd,EAAgBC,GAAgBgB,EAAKpB,CAAM,EAC3CK,EAAkBD,GAAgBa,EAAOjB,CAAM,EAEjDU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GAAe,qCAAqCa,CAAG,MAAMH,CAAK,GAAG,CAEzE,CAAC,EAGI,CAACnB,EAAsBE,CAAM,CACtC,OAASqB,EAAG,CACV,MAAIvB,IAAyB,GAC3BY,EAAK,0BAA0BZ,CAAoB,EAErDE,EAAO,QAAQsB,GAASZ,EAAK,MAAMY,CAAK,CAAC,EACnCD,CACR,CACF,ICpMA,IAuCaE,GAqCAC,GAsCAC,GAMAC,GAqCAC,GAoBAC,GAOAC,GAxLbC,GAAAC,EAAA,kBAuCaR,GAA8BS,GAA2B,CACpE,OAAQA,EAAM,CACZ,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IACT,IAAK,UACH,MAAO,IACT,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,IACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,EAKaR,GAA8BS,GAAqC,CAC9E,OAAQA,EAAW,CACjB,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,UACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,CACzD,CACF,EAMaR,GAAwBS,GACpB,CAAC,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,OAAW,MAAS,EAAEA,CAAQ,EAKxGR,GAAqCM,GAEoD,CAChG,OAAQA,EAAM,CACZ,IAAK,UAEH,OAAO,OAAO,aAAiB,KAAe,aAAa,KAAO,aAAe,YACnF,IAAK,UACH,OAAO,aACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,WACT,IAAK,UACH,OAAO,aACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,SACH,OAAO,eACT,QACE,MAAM,IAAI,MAAM,qBAAqBA,CAAI,EAAE,CAC/C,CACF,EAKSL,GAAwBQ,GAAkE,CACrG,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,EAKaP,GAA4BI,GAAyDA,IAAS,WACvGA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAAYA,IAAS,SAC5FA,IAAS,OAKAH,GAA4BO,GAA0C,CACjF,OAAQA,EAAU,CAChB,IAAK,OACH,MAAO,GACT,IAAK,MACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,ICvMA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KAQaH,GAAW,MAAMI,GAAsE,CAClG,GAAI,OAAOA,GAAS,SAClB,GAAI,GAEF,GAAI,CACF,GAAM,CAAC,SAAAC,CAAQ,EAAI,GAAQ,kBAAkB,EAC7C,OAAO,IAAI,WAAW,MAAMA,EAASD,CAAI,CAAC,CAC5C,OAAS,EAAG,CACV,GAAI,EAAE,OAAS,wBAAyB,CAEtC,GAAM,CAAC,iBAAAE,CAAgB,EAAI,GAAQ,SAAS,EACtCC,EAASD,EAAiBF,CAAI,EAC9BI,EAAuB,CAAC,EAC9B,cAAiBC,KAASF,EACxBC,EAAO,KAAKC,CAAK,EAEnB,OAAO,IAAI,WAAW,OAAO,OAAOD,CAAM,CAAC,CAC7C,CACA,MAAM,CACR,KACK,CAEL,IAAME,EAAW,MAAM,MAAMN,CAAI,EACjC,GAAI,CAACM,EAAS,GACZ,MAAM,IAAI,MAAM,sCAAsCN,CAAI,EAAE,EAE9D,IAAMO,EAAsBD,EAAS,QAAQ,IAAI,gBAAgB,EAC3DE,EAAWD,EAAsB,SAASA,EAAqB,EAAE,EAAI,EAC3E,GAAIC,EAAW,WAGb,OAAO,IAAI,WAAW,MAAMF,EAAS,YAAY,CAAC,EAC7C,CAEL,GAAI,CAACA,EAAS,KACZ,MAAM,IAAI,MAAM,sCAAsCN,CAAI,qBAAqB,EAEjF,IAAMS,EAASH,EAAS,KAAK,UAAU,EAEnCI,EACJ,GAAI,CAEFA,EAAS,IAAI,YAAYF,CAAQ,CACnC,OAASG,EAAG,CACV,GAAIA,aAAa,WAAY,CAE3B,IAAMC,EAAQ,KAAK,KAAKJ,EAAW,KAAK,EACxCE,EAAS,IAAI,YAAY,OAAO,CAAC,QAASE,EAAO,QAASA,CAAK,CAAC,EAAE,MACpE,KACE,OAAMD,CAEV,CAEA,IAAIE,EAAS,EAEb,OAAa,CACX,GAAM,CAAC,KAAAC,EAAM,MAAAC,CAAK,EAAI,MAAMN,EAAO,KAAK,EACxC,GAAIK,EACF,MAEF,IAAME,EAAYD,EAAM,WACV,IAAI,WAAWL,EAAQG,EAAQG,CAAS,EAChD,IAAID,CAAK,EACfF,GAAUG,CACZ,CACA,OAAO,IAAI,WAAWN,EAAQ,EAAGF,CAAQ,CAC3C,CACF,KAEK,QAAIR,aAAgB,KAClB,IAAI,WAAW,MAAMA,EAAK,YAAY,CAAC,EACrCA,aAAgB,WAClBA,EAEA,IAAI,WAAWA,CAAI,CAE9B,ICvFA,IAYMiB,GAEAC,GAKFC,GACAC,GAESC,GAQAC,GAWAC,GAzCbC,GAAAC,EAAA,kBAKAC,KAOMT,GAAiB,CAAC,IAAK,IAAK,IAAK,IAAK,GAAG,EAEzCC,GAAQ,CAACS,EAAeC,IAA0B,CAEtD,QAAQ,IAAI,IAAIX,GAAeU,CAAK,CAAC,IAAI,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIC,CAAO,EAAE,CAChF,EAKaP,GAAkB,CAACQ,EAA2BC,IAA0B,CACnFX,GAAiBU,EACjBT,GAAQU,CACV,EAKaR,GAAM,CAACS,EAAoBC,IAAuB,CAC7D,IAAMC,EAAeC,GAAqBH,CAAQ,EAC5CI,EAAcD,GAAqBf,EAAc,EACnDc,GAAgBE,GAClBjB,GAAMe,EAAc,OAAOD,GAAQ,WAAaA,EAAI,EAAIA,CAAG,CAE/D,EAKaT,GAAwB,IAAIa,IAAiC,CACpEhB,IACFE,GAAI,GAAGc,CAAI,CAEf,IC7CA,IAOaC,GAPbC,GAAAC,EAAA,kBAKAC,KAEaH,GAAa,CAACI,EAAyBC,IAE5C,IAAKC,GAAkCD,CAAI,GAAGD,CAAU,ICThE,IAAAG,GAAAC,EAAA,oBCAA,IA8EMC,GA+BAC,GAKAC,GAKAC,GAWFC,GACEC,GAYOC,GAkCPC,GAoSOC,GArdbC,GAAAC,EAAA,kBAIAC,KAEAC,KAwEMZ,GAAsC,IAAI,IAAI,CAClD,CAAC,GAAI,GAAG,EACR,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,EAAE,EACT,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,CAAC,EACZ,CAAC,SAAU,CAAC,EAGZ,CAAC,SAAU,CAAC,EACZ,CAAC,UAAW,CAAC,EACb,CAAC,UAAW,CAAC,CACf,CAAC,EAEKC,GAAsB,CAAC,EAKvBC,GAA4BW,GAAiB,KAAK,KAAKA,EAAO,EAAE,EAAI,GAKpEV,GAAwBU,GAAiB,CAC7C,QAASC,EAAM,EAAGA,EAAMb,GAAU,OAAQa,IAAO,CAC/C,IAAMC,EAAgBd,GAAUa,CAAG,EACnC,GAAID,GAAQE,EACV,OAAOA,CAEX,CAEA,OAAO,KAAK,KAAKF,EAAO,EAAE,EAAI,EAChC,EAEIT,GAAO,EACLC,GAAqB,IAAMD,KAYpBE,GACT,MAAMU,EAAwBC,EAAsBC,EAAsBC,IAC/C,CACrB,IAAMC,EAAalB,GAAyBgB,CAAY,EAClDG,EAAgBL,EAAQ,OAAO,aAEjC,CAAC,KAAMI,EAAY,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAChF,GAAI,CACF,IAAME,EAAiBN,EAAQ,kBAAkB,EACjDA,EAAQ,eAAe,EACvBM,EAAe,mBACXL,EAA+B,EAAuBI,EACtD,EAA4BD,CAChC,EACAJ,EAAQ,MAAM,EAEd,MAAMK,EAAc,SAAS,WAAW,IAAI,EAE5C,IAAME,EAAcF,EAAc,eAAe,EACjD,GAAIF,EAAiB,CAEnB,IAAMK,EAAeL,EAAgB,EACrC,OAAAK,EAAa,IAAI,IAAI,WAAWD,EAAa,EAAGL,CAAY,CAAC,EACtDM,CACT,KAGE,QAAO,IAAI,WAAWD,EAAY,MAAM,EAAGL,CAAY,CAAC,CAE5D,QAAE,CACAG,EAAc,QAAQ,CACxB,CACF,EAEFd,GAAN,KAAmD,CAqBjD,YAAoBS,EAAwB,CAAxB,aAAAA,EAClB,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,2BAA6B,CAAC,EACnC,KAAK,eAAiB,CAAC,EACvB,KAAK,gBAAkB,IAAI,IAC3B,KAAK,uBAAyB,IAAI,IAElC,OAAW,CAACS,CAAK,IAAKzB,GACpBC,GAAU,KAAKwB,CAAG,EAClB,KAAK,YAAY,IAAIA,EAAK,CAAC,CAAC,EAC5B,KAAK,mBAAmB,IAAIA,EAAK,CAAC,CAAC,CAEvC,CAEA,OAAOC,EAAeC,EAAwB,CAC5C,IAAMC,EAAiBD,EAAK,OACtBE,EAAYF,EAAK,WACjBG,EAAYH,EAAK,WACjBd,EAAOX,GAAyB4B,CAAS,EAGzCC,EAAe,KAAK,aAAa,IAAIL,CAAE,EAC7C,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,uCAAuC,EAEzD,GAAIA,EAAa,eAAiBD,EAChC,MAAM,IAAI,MAAM,yCAAyCC,EAAa,YAAY,eAAeD,CAAS,EAAE,EAI9G,IAAME,EAAwB,KAAK,QAAQ,OAAO,aAE9C,CAAC,iBAAkB,GAAM,KAAAnB,EAAM,MAAO,eAAe,UAAY,eAAe,QAAQ,CAAC,EAGvFU,EAAcS,EAAsB,eAAe,EACzD,IAAI,WAAWT,CAAW,EAAE,IAAI,IAAI,WAAWK,EAAgBC,EAAWC,CAAS,CAAC,EACpFE,EAAsB,MAAM,EAI5B,IAAMV,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBAAmBU,EAAuB,EAAGD,EAAa,QAAQ,OAAQ,EAAGlB,CAAI,EAEhGoB,GAAU,UAAW,IAAM,qCAAqCP,CAAE,GAAG,EAErE,KAAK,2BAA2B,KAAKM,CAAqB,CAC5D,CAEA,OAAOE,EAAqBC,EAAgC,CAE1D,IAAMC,EAAqB,KAAK,aAAa,IAAIF,CAAQ,EACzD,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,2CAA2C,EAG7D,IAAMC,EAA0B,KAAK,aAAa,IAAIF,CAAa,EACnE,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAID,EAAmB,eAAiBC,EAAwB,aAC9D,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMxB,EAAOX,GAAyBkC,EAAmB,YAAY,EAG/Dd,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBACXc,EAAmB,QAAQ,OAAQ,EAAGC,EAAwB,QAAQ,OAAQ,EAAGxB,CAAI,CAC3F,CAEA,uBAAuByB,EAAmBpB,EAAsBqB,EAAoC,CAClG,IAAIb,EACJ,GAAIa,EAAgB,CAElB,GADAb,EAAK,KAAK,gBAAgB,IAAIa,CAAc,EACxCb,IAAO,OACT,MAAM,IAAI,MAAM,mCAAmC,EAErD,GAAIY,IAAWC,EACb,OAAAN,GACI,UACA,IAAM,uDAAuDf,CAAY,WACrEQ,CAAE,6BAA6B,EAChCA,EACF,GAAI,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC5E,MAAM,IAAI,MAAM;AAAA,sDAC8B,EAEhD,KAAK,gBAAgB,OAAOa,CAAc,CAC5C,MACEb,EAAKrB,GAAmB,EAG1B,YAAK,aAAa,IAAIqB,EAAI,CAAC,QAAS,CAAC,GAAAA,EAAI,OAA2B,OAAAY,CAAM,EAAG,aAAApB,CAAY,CAAC,EAC1F,KAAK,gBAAgB,IAAIoB,EAAQZ,CAAE,EACnCO,GACI,UACA,IAAM,uDAAuDf,CAAY,WAAWQ,CAAE,eAAe,EAClGA,CACT,CAEA,yBAAyBY,EAAyB,CAChD,IAAMZ,EAAK,KAAK,gBAAgB,IAAIY,CAAM,EACtCZ,IAAO,SACT,KAAK,aAAa,OAAOA,CAAE,EAC3B,KAAK,gBAAgB,OAAOY,CAAM,EAClCL,GAAU,UAAW,IAAM,4DAA4DP,CAAE,EAAE,EAE/F,CAGA,OAAOb,EAAc2B,EAAQ,eAAe,QAAU,eAAe,SAAW,eAAe,SAAmB,CAChH,IAAMpB,EAAajB,GAAqBU,CAAI,EAExCI,EAGEwB,GAAaD,EAAQ,eAAe,WAAa,eAAe,QAEhEE,GAAaF,EAAQ,eAAe,WAAa,eAAe,QACtE,GAAIC,GAAaC,EAAW,CAE1B,IAAMC,GADcF,EAAY,KAAK,YAAc,KAAK,oBAC5B,IAAIrB,CAAU,EACrCuB,EAICA,EAAQ,OAAS,EAEnB1B,EAAY0B,EAAQ,IAAI,EAGxB1B,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAPxEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,CAU1E,MAEEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAGxE,IAAMI,EAAU,CAAC,GAAIvC,GAAmB,EAAG,OAA2B,OAAQY,CAAS,EACvF,YAAK,aAAa,IAAI2B,EAAQ,GAAI,CAAC,QAAAA,EAAS,aAAc/B,CAAI,CAAC,EAE/DoB,GAAU,UAAW,IAAM,uCAAuCpB,CAAI,WAAW+B,EAAQ,EAAE,EAAE,EACtFA,CACT,CAEA,IAAIlB,EAAkC,CACpC,OAAO,KAAK,aAAa,IAAIA,CAAE,GAAG,OACpC,CAEA,QAAQA,EAAuB,CAC7B,IAAMmB,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAAZ,GAAU,UAAW,IAAM,sCAAsCP,CAAE,gBAAgBmB,EAAW,QAAQ,EAAE,EAAE,EAE1G,KAAK,aAAa,OAAOnB,CAAE,EAC3B,KAAK,eAAe,KAAKmB,EAAW,QAAQ,MAAM,EAG3CA,EAAW,YACpB,CAEA,MAAM,SAASnB,EAAeP,EAAkD,CAC9E,IAAM0B,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,qBAAqB,EAEvC,MAAMvC,GAAgB,KAAK,QAASuC,EAAW,QAAQ,OAAQA,EAAW,aAAc1B,CAAe,CACzG,CAEA,uBAA8B,CAC5B,QAAWmB,KAAU,KAAK,2BAExBA,EAAO,QAAQ,EAIjB,GAFA,KAAK,2BAA6B,CAAC,EAE/B,KAAK,eAAe,SAAW,EAInC,GAAI,KAAK,QAAQ,gBAAkB,UAAW,CAC5C,QAAWA,KAAU,KAAK,eAAgB,CACxC,IAAMQ,EAAgB9C,GAAe,IAAIsC,EAAO,IAAI,EAGpD,IAAKA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAEtE,IAAMS,EAAW,KAAK,YAAY,IAAIT,EAAO,IAAI,GAAK,CAAC,EACnDQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAGxB,UAAYA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAE7E,IAAMS,EAAW,KAAK,mBAAmB,IAAIT,EAAO,IAAI,GAAK,CAAC,EAC1DQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAExB,MACEA,EAAO,QAAQ,CAEnB,CACA,KAAK,eAAiB,CAAC,CACzB,KAAO,CAGL,IAAIU,EAAkB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,gBAAiB,EAC/EA,IACHA,EAAkB,CAAC,EACnB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,iBAAmBA,CAAe,GAEjF,QAAWV,KAAU,KAAK,eACxBU,EAAgB,KAAKV,CAAM,EAE7B,KAAK,eAAiB,CAAC,CACzB,CACF,CAEA,SAAU,CACR,KAAK,YAAY,QAASK,GAAY,CACpCA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,mBAAmB,QAASK,GAAY,CAC3CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EAED,KAAK,aAAa,QAASW,GAAY,CACrCA,EAAQ,QAAQ,OAAO,QAAQ,CACjC,CAAC,EAED,KAAK,uBAAuB,QAASN,GAAY,CAC/CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,uBAAyB,IAAI,GACpC,CAEA,iBAAiBY,EAAmB,CAElC,IAAMC,EAAiB,KAAK,uBAAuB,IAAID,CAAS,EAC5DC,IACFA,EAAe,QAAQb,GAAU,CAC/BA,EAAO,QAAQ,CACjB,CAAC,EACD,KAAK,uBAAuB,OAAOY,CAAS,EAEhD,CACF,EAEa1C,GAAuB,IAAI4C,IACpC,IAAI7C,GAAmB,GAAG6C,CAAI,ICtdlC,IAGMC,GAsBOC,GAzBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EASaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,IC1B3C,IAKaE,GAaAC,GA6EAC,EA6IAC,GA0MAC,GAkDAC,GACAC,GAzebC,GAAAC,EAAA,kBAKaR,GAAN,KAAiB,CAOtB,OAAO,gBAAgBS,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAGaT,GAAN,KAAoB,CAQzB,OAAO,UAAUU,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFlB,GAAW,gBAAgB,CAACW,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAASC,EAAIN,EAAW,EAAI,EAAGM,GAAKH,EAAOG,IAAK,CAC9C,IAAMC,EAAON,EAAQK,EAAI,EAAI,EAAIR,EAAMG,EAAQK,CAAC,EAC1CE,EAAON,EAAQI,EAAI,EAAI,EAAIP,EAAMG,EAAQI,CAAC,EAEhD,GAAIC,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEF,IAAMC,EAAM,KAAK,IAAIF,EAAMC,CAAI,EAC/B,GAAID,GAAQC,EACVJ,EAAMD,EAAQG,CAAC,EAAI,KAAK,IAAIC,EAAMC,CAAI,MACjC,CAEL,GAAIC,EAAM,EACR,OAEFL,EAAMD,EAAQG,CAAC,EAAI,CACrB,CACF,CAEA,OAAOF,CACT,CAOA,OAAO,iBAAiBM,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CACF,EAGaxB,EAAN,MAAMyB,CAAU,CAIrB,OAAO,KAAKC,EAAiC,CAC3C,OAAOD,EAAU,0BAA0BC,EAAM,EAAGA,EAAK,MAAM,CACjE,CAKA,OAAO,aAAaA,EAAyBC,EAAO,EAAsB,CACxE,IAAMC,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EAEV,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC1B,EAAIA,EAAO,EACf,KAAO,GAAK,GAAG,CACb,GAAIF,EAAK,CAAC,EAAIC,IAAS,EAAG,CACxBE,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAAIC,EACvB,KACF,CACA,GAAIA,EAAOD,EAAK,CAAC,IAAM,EACrB,MAAM,IAAI,MAAM,sBAAsB,EAExCG,EAAQ,CAAC,EAAI,EACbF,GAAQD,EAAK,CAAC,EACd,GACF,CACA,IAAK,IAAK,GAAK,EAAG,IAChBG,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAErB,OAAOG,CACT,CAKA,OAAO,kBAAkBH,EAAyBI,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,wCAAwCJ,EAAK,MAAM,cAAc,EAE/G,OAAOD,EAAU,0BAA0BC,EAAMI,EAAMJ,EAAK,MAAM,CACpE,CAKA,OAAO,gBAAgBA,EAAyBI,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,sCAAsCJ,EAAK,MAAM,cAAc,EAE7G,OAAOD,EAAU,0BAA0BC,EAAM,EAAGI,CAAI,CAC1D,CAKA,OAAO,0BAA0BJ,EAAyBK,EAAeC,EAAqB,CAC5F,IAAIL,EAAO,EACX,QAAS,EAAII,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAIN,EAAK,CAAC,EAAI,EACZ,MAAM,IAAI,MAEN,+GAA+G,EAErHC,GAAQD,EAAK,CAAC,CAChB,CACA,OAAOC,CACT,CAEA,OAAO,eAAeD,EAA4C,CAChE,IAAME,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMK,EAAU,IAAI,MAAML,CAAI,EAC9BK,EAAQL,EAAO,CAAC,EAAI,EACpBK,EAAQL,EAAO,CAAC,EAAIF,EAAKE,EAAO,CAAC,EACjC,QAASX,EAAIW,EAAO,EAAGX,GAAK,EAAG,EAAEA,EAC/BgB,EAAQhB,CAAC,EAAIgB,EAAQhB,EAAI,CAAC,EAAIS,EAAKT,EAAI,CAAC,EAE1C,OAAOgB,CACT,CAKA,OAAO,cAAcH,EAAcI,EAA4B,CAC7D,GAAIJ,EAAO,CAACI,GAAcJ,GAAQI,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOJ,EAAO,EAAIA,EAAOI,EAAaJ,CACxC,CAEA,OAAO,cAAcK,EAAyBD,EAA+B,CAC3E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,GAAcC,EAAK,MAAM,CAAC,CACvE,CAQA,OAAO,gBAAgB5B,EAAsB8B,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAM/B,EAAE+B,CAAC,CAAC,EAEpB/B,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASmB,EAAyBa,EAA2C,CAClF,IAAMX,EAAOF,EAAK,OAClB,OAAOA,EAAK,IAAI,CAACY,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAIX,CAAI,CAAC,CACtD,CAOA,OAAO,SAASY,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGrB,IAAMqB,IAAMG,EAAOxB,CAAC,CAAC,CAC/C,CACF,EAEahB,GAAN,MAAMyC,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBZ,EAChFa,EAAqBC,EAAsB,CAC7C,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IACxCA,GAAOH,EAAY,OACrBA,EAAY,KAAKD,EAAUI,EAAM,CAAC,CAAC,EAEnCH,EAAYG,CAAG,EAAIJ,EAAUI,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMf,EAAQ,QAChB,GAAIA,EAAQe,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhEf,EAAQ,KAAK,CAAC,EAKlB,QAASe,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMF,EAAU,QAClB,GAAIA,EAAUE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEF,EAAU,KAAK,CAAC,EAKpB,QAASE,EAAM,EAAGA,EAAMH,EAAY,OAAS,EAAGG,IAC9C,GAAIA,EAAMD,EAAK,QACb,GAAIA,EAAKC,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DD,EAAK,KAAK,CAAC,EAKf,QAASC,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAAO,CACjD,GAAIH,EAAYG,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAID,EAAKC,CAAG,GAAKH,EAAYG,CAAG,GAAKD,EAAKC,EAAMH,EAAY,MAAM,GAAKA,EAAYG,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACHJ,EAA8BX,EAA4Ba,EAC1DD,EAAgCE,EAAgBE,EAAwBC,EAAwB,CAClG,GAAKA,EAIL,IAAIH,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIX,EAAQ,SAAYW,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAASI,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CN,EAAa,wBACTE,EAAUI,GAAOC,EAAgB,EAAI,EAAE,EAAGhB,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAChGA,EAAMJ,EAAU,OAAS,EAAGM,CAAO,EAE3C,CAaA,OAAO,uBACHP,EAA2BC,EAA8BX,EAAmBa,EAC5ED,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMO,EAAa,CAACP,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACpFC,CACT,CAYA,OAAO,uBACHP,EAA8BQ,EAA+BnB,EAAmBa,EAChFD,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,GAAKQ,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMD,EAAa,CAACP,EAAU,CAAC,EAAGQ,EAAW,CAAC,CAAC,EAE/C,OAAAV,EAAa,mBAAmB,GAAOE,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACrGC,CACT,CAKA,OAAe,mBACXR,EAA2BC,EAA8BO,EAAsBlB,EAC/Ea,EAA8BD,EAAgCE,EAAgBG,EAAkB,CAClG,GAAIP,EACF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAK,CAAC,MAGnB,SAASH,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAKT,EAAa,wBACzBE,EAAUI,EAAM,CAAC,EAAGf,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAAKA,EAAMJ,EAAU,OAAS,EACxGM,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXG,EAAgBC,EAAgBC,EAAkBC,EAAgBT,EAAgBU,EAClFC,EAAsBR,EAA0B,CAClD,IAAMS,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIN,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAH,EAAKU,CAAY,EAAI,EACrBV,EAAKW,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAN,EAAKU,CAAY,EACgB,KAAK,MAAjCP,IAAY,cAA4BU,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/Db,EAAKW,CAAY,EAAIE,EAAYb,EAAKU,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASN,EAAKU,CAAY,EAAIV,EAAKW,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEapD,GAAN,KAAe,CAIpB,OAAO,qBACH2D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAGahE,GAAW,sBACXC,GAAW,uBCzexB,IAiBakE,GAsMPC,GAoCOC,GAKAC,GAKAC,EAeAC,GAiBAC,GAcAC,GAgBAC,GAmBAC,EA+BPC,GAiTOC,EAaAC,EAaAC,GAgFPC,GAwJOC,GAaAC,GAr7BbC,GAAAC,EAAA,kBAGAC,KACAC,KAaapB,GAAiB,GAsMxBC,GAAoB,CAACoB,EAAcC,IAAiD,CACxF,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,OAAQD,EAAM,CACZ,QACE,OAAOC,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,QACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,QACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,OACE,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mBAAmB,EAErC,MAAO,CAAC,MAAO,YAAY,EAE7B,QACE,MAAM,IAAI,MAAM,sBAAsBD,CAAI,EAAE,CAChD,CACF,EAEanB,GAA8B,CAACmB,EAAgBC,EAAsB,IAAM,CACtF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEapB,GAA4B,CAACkB,EAAgBC,EAAsB,IAAM,CACpF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEanB,EAA6B,IAAIoB,IAA6D,CACzG,IAAMC,EAAoC,CAAC,EAC3C,OAAAD,EAAK,QAAQE,GAAO,CACdA,EAAI,SAAW,GACjBD,EAAgB,KACZ,CAAC,QAAuB,KAAMC,CAAG,EAAG,CAAC,QAAuB,KAAMC,EAAU,eAAeD,CAAG,CAAC,CAAC,CAExG,CAAC,EACMD,CACT,EAMapB,GAAoBuB,GAE3BA,EAAO,IAAM,EACR,EACEA,EAAO,IAAM,EACf,EAGF,EASItB,GAAa,CAACuB,EAAW,MAAOP,EAAqBQ,EAAQ,MACpE,CAACR,GAAcA,IAAe,EACzB,GAAGO,CAAQ,IAAIC,CAAK,IAGtB,MAAMR,CAAU,IAAIO,CAAQ,KAAKC,CAAK,IASlCvB,GAAY,CAACsB,EAAkBP,EAAoBQ,IAC1DD,IAAa,MACRC,EAELR,IAAe,EACV,OAAOQ,CAAK,IAGd,MAAMR,CAAU,SAASQ,CAAK,IAQ1BtB,GAAY,CAACuB,EAAcT,IAClCA,IAAe,EACV,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAC1CT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,MAClBT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAGlCA,EAUItB,EACT,CAACsB,EAAcC,EAAsBC,EAAgBZ,IAC/CU,EAAK,WAAW,WAAW,GAAKE,EAAS,EACvC,OAAQD,GAAW,SACjBX,IAAS,MACJ,GAAGU,CAAI,KAAKC,CAAK,WAAWA,CAAK,eAAeA,CAAK,aAErD,GAAGD,CAAI,KAAKC,CAAK,WAAWA,CAAK,SAGtCX,IAAS,MACJ,GAAGU,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAK,KAAK,MAAMA,EAAQ,EAAI,CAAC,CAAC,KAAKA,EAAQ,EAAI,CAAC,IAEhF,GAAGD,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAKA,EAAQ,CAAC,IAIlDC,EAAS,EAAI,GAAGF,CAAI,IAAIC,CAAK,IAAMD,EAc5CrB,GACF,CAACqB,EAAcG,EAAoBC,EAAuCC,EACzEd,IAAuC,CACtC,IAAMe,EAAa,OAAOF,GAAgB,SACpCG,EAAOD,EAAaF,EAAcA,EAAY,OAC9CI,EAAe,CAAC,GAAG,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAC,EACzCE,EAAcF,EAAO,EAAI,MAAQA,GAAQ,EAAI,MAAMA,CAAI,QAAU,cAAcA,CAAI,IACnFf,EAAatB,GAAkBiC,EAAYZ,CAAU,EACrDmB,EAAY,OAAOlB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACtEmB,EAAc,OAAOnB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACxEF,EAAO,CAAC,QAASmB,EAAa,MAAOC,EAAW,QAASC,EAAa,OAAQR,CAAU,EAExFS,EAAgBjB,GAA+B,OAAOA,GAAQ,SAAWA,EAAM,GAAGA,CAAG,IAErFkB,EAAqB,CACzB,gBAAiB,GACjB,gBAAiB,GACjB,2BAA4B,GAC5B,IAAK,GACL,aAAc,GACd,IAAK,GACL,aAAc,EAChB,EAEMC,EAAgBR,EAAa,YAAc,GAC3CS,EAAQ,GAAGD,CAAa,GAAGd,CAAI,SAC/BgB,EAAU,GAAGF,CAAa,GAAGd,CAAI,WAEnCiB,EAAa,GACjB,QAASC,EAAI,EAAGA,EAAIX,EAAO,EAAGW,IAC5BD,GAAc;AAAA,aACTC,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC9CW,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC/CW,CAAC,UAAUA,CAAC;AAAA,oBACNA,CAAC;AAAA,MAGfD,GAAc,WAAWV,EAAO,CAAC,eAEjC,IAAMY,EAAgCZ,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,oBAAoBV,EAAK,OAAO;AAAA,mBAC5BA,EAAK,OAAO;AAAA;AAAA,MAEzB2B,CAAU;AAAA;AAAA,KAIJG,EAAmBC,IACvBR,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIc,EAAY,OAAOrB,CAAI,IAAIqB,CAAS,KAGlDC,EAAoB,CAAC,EAC3B,GAAIf,GAAQ,EACV,QAASW,EAAIX,EAAO,EAAGW,GAAK,EAAGA,IAC7BI,EAAQ,KAAK,GAAG5C,EAAasC,EAASE,EAAGX,CAAI,CAAC,eAAeW,CAAC,IAAI,EAItE,IAAMK,EAAgChB,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,aAAaV,EAAK,OAAO;AAAA,aAC3BgC,EAAQ,KAAK,GAAG,CAAC;AAAA,KAGlBE,EAAmBC,IACvBZ,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIkB,EAAa,OAAOzB,CAAI,IAAIyB,CAAU,KAGpDC,EAAU,IAAIC,IAChBpB,IAAS,EAAI,KAAO,GAAGjB,EAAK,OAAO,IAAIqC,EAAK,IAAIf,CAAY,EAAE,KAAK,GAAG,CAAC,IAErEgB,EAAa,CAACH,EAAoBI,KAClCtB,EAAO,EACF,GAAGkB,CAAU,GAEb,GAAG/C,EAAa+C,EAAYI,GAAKtB,CAAI,CAAC,GAI3CuB,EAAa,CAACL,EAAoBI,GAAoB9B,KACtDQ,EAAO,EACF,GAAGkB,CAAU,IAAI1B,EAAK,IAEtB,GAAGrB,EAAa+C,EAAYI,GAAKtB,CAAI,CAAC,IAAIR,EAAK,IAIpDgC,EAAoE,CAAC,EACrEC,GAA6B,CAACP,EAAoBQ,KAA0B,CAChFpB,EAAmB,2BAA6B,GAChD,IAAMqB,GAAU,GAAGD,GAAO,IAAI,uBAAuBjC,CAAI,SACzD,GAAIkC,MAAWH,EACb,MAAO,GAAGG,EAAO,IAAIT,CAAU,IAEjC,IAAMH,GAAU,CAAC,EACjB,QAASJ,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAAK,CAClC,IAAMW,GAAMI,GAAO,WAAW,gBAAiBf,GAAIe,GAAO,KAAO1B,CAAI,EACrEe,GAAQ,KAAK,GAAGM,EAAWZ,EAASE,EAAC,CAAC,OAAOW,EAAG,MAAMD,EAAWb,EAAOG,EAAC,CAAC,GAAG,CAC/E,CACA,OAAAa,EAAyCG,EAAO,EAC5C,MAAMA,EAAO,mBAAmBD,GAAO,KAAK,OAAO;AAAA,sBACzCX,GAAQ,OAAS,EAAIA,GAAQ,KAAK,GAAG,EAAI,IAAI;AAAA,cAGpD,GAAGY,EAAO,IAAIT,CAAU,GACjC,EAEMU,GAAc,CAACC,EAAuBrC,MAAmB,IAAM,CACnE,GAAIT,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,KAAKrC,EAAK,IAC7B,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,EAAK,8BAA8BA,EAAK,UAC9E,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,EAAK,UAC3C,GAAIT,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,8DAA8DrC,EAAK,MAE3F,MAAM,IAAI,MAAM,6CAA6CT,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEG+C,GAAeD,IAA2B,IAAM,CACpD,GAAI9C,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,IACnB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,mBAAmBU,CAAI,IAAIoC,CAAM,oBAAoBpC,CAAI,IAAIoC,CAAM,sBAAsBpC,CAAI,IAChGoC,CAAM,wBAAwBpC,CAAI,IAAIoC,CAAM,oBAEhD,MAAM,IAAI,MAAM,6CAA6C9C,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEGgD,GAA6B/B,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,QAAQoB,CAAS;AAAA,aACrD2B,GAAY,OAAOrC,CAAI,WAAW,CAAC;AAAA,KAGpCuC,EAAoBhC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,QAAQ9B,CAAS;AAAA,iBACjCV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAE/C,GAAG,EAEGC,GAAM,IAAIhB,IAA0C,CACxD,GAAIA,EAAQ,SAAWnB,EACrB,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAGlD,IAAMoC,GAAoBjB,EAAQ,IAAId,CAAY,EAAE,KAAK,GAAG,EAE5D,OAAIL,IAAS,EACJ8B,GAAY,IAAI,EACd9B,IAAS,EACX8B,GAAYM,GAAkB,CAAC,CAAC,GAEvC9B,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,IAE3C,EAEMC,GAAgBnB,GAChBlB,EAAO,EACF8B,GAAYZ,CAAU,GAE7BZ,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAIvCoB,GAA6BtC,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,YAAYoB,CAAS;AAAA,MAChEyB,GAAY,OAAOnC,CAAI,YAAa,OAAO,CAAC;AAAA,KAGtC8C,GAAoBvC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,YAAY9B,CAAS;AAAA,UAC5CV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAExC,GAAG,EA0EH,MAAO,CACL,KAxCW,IAAM,CACjB,IAAMM,EAAQ,CAAC,EACXC,GAAmB,GACvB,OAAInC,EAAmB,kBACrBkC,EAAM,KAAK5B,CAA6B,EACxC6B,GAAmB,IAEjBnC,EAAmB,kBACrBkC,EAAM,KAAKxB,CAA6B,EACxCyB,GAAmB,IAEjBnC,EAAmB,6BACrB,OAAO,OAAOkB,CAAwC,EAAE,QAAQkB,IAAQF,EAAM,KAAKE,EAAI,CAAC,EACxFD,GAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKD,EAAiB,EAC5BE,GAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKF,EAA0B,EACrCG,GAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKR,CAAiB,EAC5BS,GAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKT,EAA0B,EACrCU,GAAmB,IAEjB,CAAC1C,GAAc0C,IACjBD,EAAM,QACF,SAAShC,CAAK,MAAMzB,EAAK,OAAO,IAAIc,EAAY,KAAK,GAAG,CAAC,KACzD,SAASY,CAAO,MAAM1B,EAAK,OAAO,IAAIM,EAAU,eAAeQ,CAAW,EAAE,KAAK,GAAG,CAAC,IAAI,EAExF2C,EAAM,KAAK;AAAA,CAAI,CACxB,EAIE,KAAAzD,EACA,gBAAA8B,EACA,gBAAAI,EACA,2BAAAQ,GACA,QAAAN,EACA,WAAAE,EACA,WAAAE,EACA,IAjFU,IAAIoB,IAAkD,CAChE,GAAIA,EAAgB,SAAW3C,EAAO,EACpC,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAElD,IAAMR,GAAQmD,EAAgB3C,CAAI,EAClC,GAAI,OAAOR,IAAU,SACnB,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAM4C,GAAoBO,EAAgB,MAAM,EAAG3C,CAAI,EAAE,IAAIK,CAAY,EAAE,KAAK,GAAG,EAEnF,OAAIL,IAAS,EACJ4B,GAAY,KAAMpC,EAAK,EACrBQ,IAAS,EACX4B,GAAYQ,GAAkB,CAAC,EAAG5C,EAAK,GAE9Cc,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,KAAK5C,EAAK,IAErD,EA6DE,YAAAoC,GACA,aA5DmB,CAACV,EAAoB1B,KACpCQ,EAAO,EACF4B,GAAYV,EAAY1B,EAAK,GAEpCc,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAAK1B,EAAK,MAuDrD,IAAA2C,GACA,YAAAL,GACA,aAAAO,GAEA,MAAAvC,EACA,KAAAL,EACA,QAAAgB,EACA,MAAAD,EACA,KAAAR,CACF,CACF,EAWS3B,EACT,CAACoB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,QAASb,CAAU,EAW3DV,EACT,CAACmB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,SAAUb,CAAU,EAW5DT,GACT,CAACkB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,WAAYb,CAAU,EA8ErER,GAAN,KAA+C,CAC7C,YAAoBoE,EAA2DC,EAA4B,CAAvF,6BAAAD,EAA2D,YAAAC,EAoG/E,KAAQ,kBAAqC,CAAC,EAC9C,KAAQ,UAA6B,CAAC,EACtC,KAAQ,SAA8B,CAAC,EAwBvC,KAAQ,cAAgB,CA9HoF,CAE5G,sCAAsCvD,EAA6B,CAGjE,MAAO,qBADY,OAAOA,GAAS,SAAW,GAAGA,CAAI,IAAMA,CACrB,eACxC,CAEA,UAAUwD,EAAiDpF,GAAgB,CACzE,IAAMqF,EAAiB,OAAOD,GAAkB,SAAWA,EAAgBA,EAAc,CAAC,EACpFE,EAAiB,OAAOF,GAAkB,SAAW,EAAIA,EAAc,CAAC,EACxEG,EAAiB,OAAOH,GAAkB,SAAW,EAAIA,EAAc,CAAC,EAE9E,GAAIC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,yBAC/B,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,yCAAyC,KAAK,OAAO,wBAAwB,KAC3F,KAAK,OAAO,wBAAwB,KAAK,KAAK,OAAO,wBAAwB,IAAI,EAGvF,GAAIF,EAAiBC,EAAiBC,EAAiB,KAAK,OAAO,kCACjE,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,+CACd,KAAK,OAAO,iCAAiC,GAAG,EAGtD,IAAMC,EAAuB,KAAK,wBAAwB,CAAC,IAAM,GAAK,KAAK,wBAAwB,CAAC,IAAM,EACpGC,EAAYD,EAAuB;AAAA;AAAA,wDAGA;AAAA;AAAA;AAAA;AAAA,yDAKnCE,EAAsBF,EACxB,4DACA;AAAA,mEAEIH,EAAiBC,EAAiBC,CAAc,iBAExD,MAAO,4BAA4BF,CAAc,KAAKC,CAAc,KAAKC,CAAc;AAAA,YAC/EE,CAAS;AAAA,MACfC,CAAmB;AAAA,GAEvB,CAEQ,uBAAuBC,EAA+B,CACxDA,EAAS,OAAS,IAChBA,EAAS,MAAM,WAAW,WAAW,GACvC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,MAAM,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAEpGA,EAAS,QAAQ,WAAW,WAAW,GACzC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,QAAQ,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAG9G,CAEQ,gBAAgBA,EAAyBC,EAA8B,CAC7E,GAAID,EAAS,QAAU,WACrB,MAAM,IAAI,MAAM,+FAA+F,EAEjH,KAAK,UAAU,KAAKA,CAAQ,EAC5B,KAAK,uBAAuBA,CAAQ,EAEpC,IAAME,EAASF,EAAS,QAAU,QAAU,OAAS,aAC/CjD,EAAciD,EAAS,KAAK,QAClC,MAAO,sBAAsBC,CAAY,kBAAkBC,CAAM,KAAKF,EAAS,IAAI,WAAWjD,CAAW,IAC3G,CAEA,oBAAoBoD,EAAoC,CACtD,OAAOA,EAAU,IAAIC,GAAK,KAAK,gBAAgBA,EAAG,KAAK,eAAe,CAAC,EAAE,KAAK;AAAA,CAAI,CACpF,CAEQ,yBAAyBJ,EAA+B,CAC9D,GAAIA,EAAS,QAAU,WACrB,MAAM,IAAI,MACN,sGAAsG,EAG5G,KAAK,kBAAkB,KAAKA,CAAQ,EACpC,KAAK,uBAAuBA,CAAQ,CACtC,CAEA,6BAA6BG,EAA0C,CACrE,OAAAA,EAAU,QAAQC,GAAK,KAAK,yBAAyBA,CAAC,CAAC,EAChD,IACT,CAEA,gBAAgBhE,EAAcV,EAA8BY,EAAS,EAAiB,CACpF,YAAK,SAAS,KAAK,CAAC,KAAAF,EAAM,KAAAV,EAAM,OAAAY,CAAM,CAAC,EAChC,IACT,CAEA,iBAAiB+D,EAAqD,CACpE,YAAK,SAAW,KAAK,SAAS,OAAOA,CAAkB,EAChD,IACT,CAKQ,oBAA6B,CACnC,GAAI,KAAK,SAAS,SAAW,EAC3B,MAAO,GAGT,IAAMC,EAA4B,CAAC,EACnC,OAAW,CAAC,KAAAlE,EAAM,KAAAV,EAAM,OAAAY,CAAM,IAAK,KAAK,SACtC,GAAIA,GAAUA,EAAS,EACjBZ,IAAS,MACX4E,EAAgB,KAAK,cAAclE,CAAI,iBAAiBV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,EAE1FgE,EAAgB,KAAK,GAAGlE,CAAI,eAAeV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,MAE1E,CACL,IAAMiE,EAAWjE,GAAU,MAAQA,IAAW,EAAIZ,EAAO,MAAMY,CAAM,IAAIZ,CAAI,IAC7E4E,EAAgB,KAAK,GAAGlE,CAAI,IAAImE,CAAQ,EAAE,CAC5C,CAGF,MAAO;AAAA,0BACeD,EAAgB,KAAK,IAAI,CAAC;AAAA,2BACzB,KAAK,aAAa,oCAC3C,CAMA,IAAI,2BAAoC,CACtC,OAAO,KAAK,mBAAmB,EAAI,KAAK,UAAU,IAAIhD,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,EAC1E,KAAK,kBAAkB,IAAIA,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,CACzD,CAKA,IAAI,eAAwD,CAC1D,GAAI,KAAK,SAAS,SAAW,EAC3B,OAGF,IAAMkD,EAA6B9E,GAC9B,UACe,EAAE,CAAC,MAAO,MAAO,MAAO,KAAK,EAAE,QAAQA,CAAI,CAAC,EAChE,OAAO,KAAK,SAAS,IAAI+E,GAAM,CAACD,EAA0BC,EAAE,IAAI,EAAGA,EAAE,QAAU,CAAC,CAAE,CACpF,CACF,EAEarF,GAAqB,CAACsF,EAAyClB,IACxE,IAAIrE,GAAiBuF,EAAelB,CAAM,EAYjCnE,GAAmB,CAACsF,EAA4BC,IAA0C,CACrG,IAAMC,EAASF,EAAQ,OACjB9E,EAAiB,CAAC,EACxB,QAASyB,EAAI,EAAGA,EAAIuD,EAAQvD,IAAK,CAC/B,IAAMvB,EAAM8E,EAAS,EAAIvD,EACnBwD,EAAIH,EAAQ5E,CAAG,GAAK,GAChB6E,EAASA,EAAS,OAAS,EAAItD,CAAC,GAAK,GACvC,GAAKwD,IAAM,GACjBjF,EAAK,QAAQE,CAAG,CAEpB,CACA,OAAOF,CACT,ICj8BA,IAeMkF,GAMAC,GAGAC,GAGAC,GAWOC,GA4DAC,GAKAC,GAvGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEMZ,GAAkB,CAACa,EAAmBC,IACvCA,GAAQA,EAAK,SAAWD,EAAa,CAAC,GAAI,IAAI,MAAMA,CAAS,EAAE,KAAK,CAAE,EAAE,QAAQ,EAAIC,EAEnFb,GAAiB,CAACc,EAA+BD,IACnDE,EAAU,gBAAgBD,EAAYf,GAAgBe,EAAW,OAAQD,CAAI,CAAC,EAE5EZ,GAAmB,CAACY,EAAgBG,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKJ,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAM,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEajB,GAA6B,CAACkB,EAAyBC,IAAoC,CACtG,IAAMC,EAAgBF,EAAY,SAC5BR,EAAYQ,EAAY,KAAK,OAC7BP,EAAOd,GAAgBa,EAAWS,CAAQ,EAC1CE,EAAcvB,GAAeoB,EAAY,KAAMP,CAAI,EACnDK,EAASM,EAAe,SAAUF,EAAeC,EAAY,MAAM,EACnEN,EAAQQ,EAAc,IAAKH,EAAeV,CAAS,EACrDc,EACJ,GAAIb,EAAK,SAAW,GAAKA,EAAK,CAAC,IAAM,GAAKA,EAAK,CAAC,IAAM,EAAG,CACvD,IAAMc,EAAWT,EAAO,KAAK,MACvBU,EAA0C,CAAC,GAAI,GAAI,CAAC,EAC1DF,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA,sCAChDS,CAAQ,KAAKC,EAAc,CAAC,EAAI,CAAC,MAAMA,EAAc,CAAC,CAAC;AAAA,IACzFC,EAAa,UAAUD,CAAa,CAAC;AAAA,+BACVA,EAAc,CAAC,CAAC;AAAA,+BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,uCAIRX,EAAM,YAAY,eAAe,CAAC;AAAA;AAAA;AAAA,2BAG9CW,EAAc,CAAC,CAAC;AAAA,2BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA,QAEnCV,EAAO,YAAY,iBAAkB,8BAA8B,CAAC;AAAA;AAAA,IAG1E,MACEQ,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA;AAAA,IAElFjB,GAAiBY,EAAMD,EAAWK,EAAOC,CAAM,CAAC;AAAA;AAAA,IAEhDW,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DX,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGlDA,EAAO,YAAY,aAAcD,EAAM,aAAa,UAAU,CAAC,CAAC;AAAA,KAGpE,MAAO,CACL,KAAM,YACN,YAAa,CAAC,KAAM,GAAGI,CAAQ,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC9D,WAAaV,GAAW,CACtB,IAAMmB,EAAaf,EAAU,KAAKQ,CAAW,EAC7C,MAAO,CACL,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKmB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGC,EAA2BpB,EAAO,CAAC,EAAE,KAAMY,CAAW,CAAC,CAC5G,CACF,EACA,gBAAAG,CACF,CACF,EAEavB,GAAY,CAAC6B,EAAyBC,IAA0C,CAC3FnC,GAAekC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ9B,GAA2B8B,EAAQ,OAAO,CAAC,EAAGC,EAAW,IAAI,CAAC,CAChF,EAEa7B,GAA4B6B,GACrCC,GAA4B,CAAC,KAAMD,EAAW,IAAgB,CAAC,ICxGnE,IAYME,GAaAC,GAaAC,GAaAC,GAYAC,GAQAC,GAYAC,GAcAC,GASAC,GAaOC,GAyEPC,GAkCOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAtQbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEM3B,GAAqC,CACzC,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,oCACX,UAAW,6BACX,GAAI,6BACJ,GAAI,oCACJ,OAAQ,uBACV,EAEMC,GAA2C,CAC/C,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,wBACX,UAAW,wBACX,GAAI,wBACJ,GAAI,wBACJ,OAAQ,uBACV,EAEMC,GAA4C,CAChD,IAAK,aACL,IAAK,aACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,UAAW,IACX,UAAW,IACX,GAAI,IACJ,GAAI,IACJ,OAAQ,GACV,EAEMC,GAA8C,CAClD,IAAK,YACL,IAAK,YACL,IAAK,YACL,KAAM,YACN,UAAW,YACX,UAAW,iBACX,GAAI,YACJ,GAAI,kBACJ,OAAQ,gBACV,EAEMC,GAAmB,CAACwB,EAAsBC,IAA2B,CACzE,IAAMC,EAAM,CAAC,EACb,QAASC,EAAIF,EAAOD,EAAcG,EAAIF,EAAM,EAAEE,EAC5CD,EAAI,KAAKC,CAAC,EAEZ,OAAOD,CACT,EAEMzB,GAA4B,CAAC2B,EAA0BC,IAAkD,CAC7G,IAAMC,EAAc,CAAC,EACfL,EAAOG,EAAM,OACnB,QAASG,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,IACxBD,EAAY,KAAKF,EAAMG,CAAG,CAAC,EAG/B,IAAMC,EAAcH,EAAK,IAAIE,GAAOH,EAAMG,CAAG,CAAC,EAC9C,MAAO,CAACD,EAAaE,CAAW,CAClC,EAEM9B,GAAuB,CAAC0B,EAAiBC,IAA6B,CAC1E,IAAMJ,EAAOG,EAAM,OAASC,EAAK,OAC3BI,EAAc,CAAC,EACjBC,EAAW,EACf,QAASH,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,GACxBE,EAAY,KAAKL,EAAMM,GAAU,CAAC,EAElCD,EAAY,KAAK,CAAC,EAGtB,OAAOA,CACT,EAEM9B,GAAuB,CAAC0B,EAAgBJ,IAA0B,CACtE,QAASE,EAAI,EAAGA,EAAIE,EAAK,OAAQ,EAAEF,EACjC,GAAIE,EAAKA,EAAK,OAASF,EAAI,CAAC,IAAMF,EAAO,EAAIE,EAC3C,MAAO,GAGX,MAAO,EACT,EAEMvB,GAAqB,CAACyB,EAAgBJ,IAA2B,CACrE,IAAMC,EAAM,CAAC,EACb,GAAI,CAACvB,GAAqB0B,EAAMJ,CAAI,EAAG,CACrC,QAASE,EAAI,EAAGA,EAAIF,EAAM,EAAEE,EACtBE,EAAK,QAAQF,CAAC,IAAM,IACtBD,EAAI,KAAKC,CAAC,EAGdE,EAAK,QAAQM,GAAQT,EAAI,KAAKS,CAAI,CAAC,CACrC,CACA,OAAOT,CACT,EAEarB,GACT,CAAC+B,EAAcC,EAAqCC,EAA+BC,EAClFC,EAA0BV,EAAuBE,IAAuC,CACvF,IAAMS,EAAaH,EAAO,CAAC,EAAE,KAEvBI,EAAaC,EAAU,KAAKb,CAAW,EACvCc,EAAaD,EAAU,KAAKX,CAAW,EAEvCa,EAAQC,EAAc,KAAMR,EAAO,CAAC,EAAE,SAAUG,CAAU,EAC1DM,EAASC,EAAe,SAAUR,EAAgBV,CAAW,EAE7DmB,EAAgB,GAEhBC,EAAsB;AAAA,oDACkBD,CAAa;AAAA,SA+C3D,MAAO,CACL,KAAAb,EACA,YAAAC,EACA,gBA/CuBc,GAA+B;AAAA,UACpDA,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBN,EAAOE,CAAM,CAAC;AAAA,UACjFG,CAAmB;AAAA;AAAA;AAAA;AAAA,WAIlBC,EAAa,UAAUF,CAAa,CAAC;AAAA;AAAA,2CAELA,CAAa;AAAA;AAAA;AAAA,gCAGxBnD,GAAiByC,CAAU,CAAC;AAAA;AAAA,wDAEJU,CAAa;AAAA,iCACpCJ,EAAM,YAAY,YAAY,CAAC;AAAA,yBACvCjD,GAAU2C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,wCAKNU,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAM3BpD,GAAgB0C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAS3CQ,EAAO,YACH,cACA,GACIR,IAAe,OAAS,GAAGQ,EAAO,KAAK,OAAO,yCACtB,GAAGA,EAAO,KAAK,OAAO,IAAIhD,GAAmBwC,CAAU,CAAC,GAAG,EAAE,CAAC;AAAA;AAAA,WAShG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUU,CAAc,CAAC,EACvD,cAAe,CAAC,EAAGE,CAAU,EAC7B,gBAAiB,CAAC,CAAC,QAAuB,KAAME,CAAU,CAAC,CAC7D,EACF,CACF,EAEEtC,GACF,CAAC8C,EAAyBhB,EAAciB,EACvCd,IAAiG,CAChG,IAAMe,EACFF,EAAQ,OAAO,SAAW,EAAIC,EAAaE,GAAiCH,EAAQ,OAAQC,CAAU,EAEtGG,EAAcF,EAAkB,KAChCE,EAAY,SAAW,GAAK,CAACF,EAAkB,oBACjDE,EAAcJ,EAAQ,OAAO,CAAC,EAAE,KAAK,IAAI,CAACK,EAAM9B,IAAMA,CAAC,GAEzD,IAAM+B,EAAgBf,EAAU,cAAca,EAAaJ,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EAEpFvB,EAAO6B,EACPb,EAAQO,EAAQ,OAAO,CAAC,EACtBO,EAAevD,GAAmByB,EAAMuB,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EACvEO,EAAa,OAAS,IACxBd,EAAQO,EAAQ,QACZQ,GAA2BR,EAAQ,OAAO,CAAC,EAAGO,CAAY,EAAG,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAChG9B,EAAO7B,GAAiB6B,EAAK,OAAQgB,EAAM,KAAK,MAAM,GAGxD,GAAM,CAACf,EAAaE,CAAW,EAAI/B,GAA0B4C,EAAM,KAAMhB,CAAI,EACzEgC,EAAmB/B,EACnBwB,EAAkB,WACpBO,EAAmB3D,GAAqB4B,EAAa4B,CAAa,GAGpEN,EAAQ,QACJ/C,GACI+B,EAAM,CAAC,KAAMkB,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACT,CAAK,EAAGN,EAChFa,EAAQ,OAAO,CAAC,EAAE,SAAUS,EAAkB7B,CAAW,EAC7D,CAAC,OAAQ,CAACa,CAAK,CAAC,CAAC,CACvB,EAEStC,GAAmB,CAAC6C,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEa7C,GAAiB,CAAC4C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa5C,GAAiB,CAAC2C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa3C,GAAwB,CAAC0C,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEa1C,GAAkB,CAACyC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEazC,GAAkB,CAACwC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEaxC,GAAmB,CAACuC,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEavC,GAAkB,CAACsC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEatC,GAAwB,CAACqC,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEarC,GAAqB,CAACoC,EAAyBC,IAAuC,CACjG/C,GAAa8C,EAAS,qBAAsBC,EAAY,QAAQ,CAClE,ICxQA,IAYMS,GAoBAC,GACOC,GA2EAC,GAUPC,GAeAC,GAWAC,GAWAC,GAWAC,GAWAC,GAoBAC,GAqBAC,GAoBAC,GAWAC,GAWAC,GAWAC,GAsBOC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GA7WbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KACAC,KAEMhC,GAAkBiC,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,0BAA0B,CAE9C,EAYMhC,GAAkBiC,GAAU,CAAC,GAAI,GAAI,eAAeA,EAAM,aAAa,eAAe,CAAC,IAAK,EAAE,EACvFhC,GACT,CAACiC,EAAcC,EAAqCH,EAA+BI,EAClFC,EAAqBC,EAA0BC,EAAW,GAAOC,EAAoB,KAAuB,CAC3G,IAAMC,EAAwB,CAAC,EACzBC,EAAaV,EAAO,CAAC,EAAE,KACvBW,EAAYD,EAAW,OACvBE,EAAOC,EAAU,cAAcR,EAAWM,CAAS,EACnDG,EAAkB,CAACN,GAAqBI,EAAK,SAAW,EAC9DF,EAAW,QAAQ,CAACK,EAAGC,IAAM,CACvBF,GAAmBF,EAAK,QAAQI,CAAC,GAAK,EACpCT,GACFE,EAAY,KAAK,CAAC,EAGpBA,EAAY,KAAKM,CAAC,CAEtB,CAAC,EACD,IAAME,EAAaR,EAAY,OACzBS,EAAaL,EAAU,KAAKJ,CAAW,EA4C7C,MAAO,CACL,KAAAP,EACA,YAAAC,EACA,gBA9CuBgB,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EAErBnB,EAAQoB,EAAc,KAAMrB,EAAO,CAAC,EAAE,SAAUW,CAAS,EACzDW,EAASC,EAAe,SAAUjB,EAAgBW,CAAU,EAC5DO,EAAMpB,EAASH,EAAOqB,EAAQV,CAAI,EACpCa,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGC,EAAI,EAAGD,EAAIf,EAAWe,IAEhCZ,GAAmBF,EAAK,QAAQc,CAAC,GAAK,GACpCnB,GACFoB,IAGFF,EAAY,YAAYC,CAAC,eAAeA,CAAC,MAAMhB,EAAWgB,CAAC,CAAC,MAAMA,CAAC;AAAA,oBAC3DF,EAAI,CAAC,EAAE,SAAS,YAAY,EAAI,qBAAqBE,CAAC,IAAM,EAAE;AAAA,oBAC9DzB,EAAM,WAAW,gBAAiByB,EAAG,IAAIA,CAAC,EAAE,CAAC;AAAA,oBAC7CD,CAAS;AAAA,qBAGjBL,EAAQ,KAAK,GAAGnB,EAAM,WAAW,gBAAiByB,EAAGJ,EAAO,WAAW,iBAAkBK,CAAC,CAAC,CAAC,GAAG,EAC/FA,KAGJ,MAAO;AAAA;AAAA,UAELR,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBlB,EAAOqB,CAAM,CAAC;AAAA;AAAA,UAElFH,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,+BACvDlB,EAAM,KAAK,OAAO;AAAA,iCAChBqB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,YAClBI,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,CAAC,CAAC;AAAA,YACNC,CAAS;AAAA,YACTD,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,SAAW,EAAIF,EAAO,YAAY,aAAc,OAAO,EAAIE,EAAI,MAAM,CAAC,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,UAE5F,EAME,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUH,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGU,EAA2BlB,EAAYD,CAAW,CAAC,CACxG,EACF,CACF,EAESvC,GACT,CAAC8B,EAA+B6B,IAAmD,CACjF,IAAMjB,EAAiB,CAAC,EACxB,OAAIZ,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,GACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQ8B,GAAKlB,EAAK,KAAK,OAAOkB,CAAC,CAAC,CAAC,EAEzDC,GACH,CAAC,KAAAnB,EAAM,SAAUiB,EAAW,SAAU,kBAAmBA,EAAW,iBAAiB,CAAC,CAC5F,EAEE1D,GACF,CAAC6D,EAAyB9B,EAAc2B,EAA8BzB,IAA6B,CACjG,IAAMJ,EAASgC,EAAQ,OACjBC,EACFjC,EAAO,SAAW,EAAI6B,EAAa3D,GAAiC8B,EAAQ6B,CAAU,EAE1FG,EAAQ,QACJ/D,GACIiC,EAAM,CAAC,KAAM+B,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACjC,EAAO,CAAC,CAAC,EACjFiC,EAAkB,mBAAqBA,EAAkB,KAAK,SAAW,EAAIjE,GAAOoC,EACpF6B,EAAkB,KAAMjC,EAAO,CAAC,EAAE,SAAUiC,EAAkB,SAC9DA,EAAkB,iBAAiB,EACvC,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEE7D,GAAoB,CAAC4D,EAAyBH,IAAuC,CACzF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,eAAgBH,EANf,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,qBACL,CAC8D,CAChE,EAEM5B,GAAgB,CAAC2D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,EACL,CAC0D,CAC5D,EAEM3B,GAAgB,CAAC0D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,sBAC1C,sBACL,CAC0D,CAC5D,EAEM1B,GAAuB,CAACyD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,qBACL,CACiE,CACnE,EAEMzB,GAAiB,CAACwD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAKlC,EAAM,WAAW,gBAAiByB,EAAG,CAAC,CAAC,EAIxD,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMxB,GAAkB,CAACuD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAiB7B7D,GAAiB6D,EAAS,aAAcH,EAhBb,CAAC5B,EAAOqB,EAAQV,IAAS,CAClD,IAAIwB,EAAO,EACX,QAASV,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,KAE1CwB,GAAQJ,EAAQ,OAAO,CAAC,EAAE,KAAKN,CAAC,GAIpC,MAAO,CACL,oBACA,GACA,cAAczB,EAAM,aAAa,eAAe,CAAC,KACjD,eAAeqB,EAAO,KAAK,KAAK,UAAUc,CAAI,IAChD,CACF,CAC4D,CAC9D,EAEM1D,GAAiB,CAACsD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAK,iBAAiBT,CAAC,QAAQ,EAI3C,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMtB,GAAkB,CAACqD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,aAAcH,EANb,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC4D,CAC9D,EAEMrB,GAAiB,CAACoD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,YAAaH,EANZ,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC2D,CAC7D,EAEMpB,GAAuB,CAACmD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,oBAC1C,EACL,CACiE,CACnE,EAEMnB,GACF,CAACuD,EAA0BzB,EAAyBJ,IAAwC,CAC1F,GAAII,EAAK,SAAW,EAClB,OAAOJ,EAGT,IAAIU,EAAa,EACboB,EAAa,EACjB,QAASC,EAAM,EAAGA,EAAM3B,EAAK,OAAQ2B,IAC/B3B,EAAK,QAAQ2B,CAAG,IAAM,GACxBrB,GAAcmB,EAAME,CAAG,EAEvBD,GAAcD,EAAME,CAAG,EAO3B,OAAOD,EAAa,IAAMpB,EAAa,IACzC,EAESnC,GAAa,CAACiD,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FpD,GAAgBuD,EAASH,CAAU,EAEnCW,GAAiBR,EAASH,CAAU,CAExC,EAEa7C,GAAW,CAACgD,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FxD,GAAc2D,EAASH,CAAU,EAEjCY,GAAeT,EAASH,CAAU,CAEtC,EAEa5C,GAAW,CAAC+C,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FvD,GAAc0D,EAASH,CAAU,EAEjCa,GAAeV,EAASH,CAAU,CAEtC,EAEa3C,GAAkB,CAAC8C,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FtD,GAAqByD,EAASH,CAAU,EAExCc,GAAsBX,EAASH,CAAU,CAE7C,EAEa1C,GAAY,CAAC6C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FrD,GAAewD,EAASH,CAAU,EAElCe,GAAgBZ,EAASH,CAAU,CAEvC,EAEazC,GAAY,CAAC4C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FnD,GAAesD,EAASH,CAAU,EAElCgB,GAAgBb,EAASH,CAAU,CAEvC,EAEaxC,GAAa,CAAC2C,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FlD,GAAgBqD,EAASH,CAAU,EAEnCiB,GAAiBd,EAASH,CAAU,CAExC,EAEavC,GAAY,CAAC0C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FjD,GAAeoD,EAASH,CAAU,EAElCkB,GAAgBf,EAASH,CAAU,CAEvC,EAEatC,GAAkB,CAACyC,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FhD,GAAqBmD,EAASH,CAAU,EAExCmB,GAAsBhB,EAASH,CAAU,CAE7C,EAEarC,GAAe,CAACwC,EAAyBH,IAAuC,CACvF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FzD,GAAkB4D,EAASH,CAAU,EAErCoB,GAAmBjB,EAASH,CAAU,CAE1C,ICnXA,IAcMqB,GAeOC,GA0BAC,GA0BAC,GAjFbC,GAAAC,EAAA,kBAOAC,KAEAC,KAGAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAQaR,GAAS,CAACS,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaT,GAAS,CAACQ,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaR,GAA4BQ,GACrCQ,GAA4BR,CAAoE,IClFpG,IAuEMS,GAmKAC,GAsGAC,GA2JAC,GA0HOC,GAqCPC,GAmHOC,GA7vBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAgEMX,GAA0B,CAACY,EAA+BC,IAAoD,CAmClH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAUH,EAAO,CAAC,EAClBI,EAAOJ,EAAO,CAAC,EACfK,EAAYL,EAAO,CAAC,EACpBM,EAAON,EAAO,CAAC,EACfO,EAAuBP,EAAO,CAAC,EAErC,GAAIM,GAAQC,EACV,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIL,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMM,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAkBR,EAAM,KAAK,CAAC,EAEpC,GAAIE,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIA,EAAQ,KAAK,CAAC,IAAMO,EACtB,MAAM,IAAI,MAAM,uEAAuE,EAGzF,GAAIN,EAAK,KAAK,CAAC,IAAMD,EAAQ,KAAK,CAAC,EACjC,MAAM,IAAI,MAAM,oFAAoF,EAGtG,IAAIQ,EAAcP,EAAK,KAAK,CAAC,EAAI,EAC7BQ,EAAcD,EACdE,EAAcD,EAClB,GAAIX,EAAW,eAAe,OAAS,EAAG,CACxC,GAAIA,EAAW,eAAe,SAAW,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAErE,QAAWa,KAAMb,EAAW,eAC1B,GAAIa,EAAKb,EAAW,WAAa,EAC/B,MAAM,IAAI,MAAM,mDAAmD,EAIvEU,EAAcV,EAAW,eAAe,CAAC,EACzCW,EAAcX,EAAW,eAAe,CAAC,EACzCY,EAAcZ,EAAW,eAAe,CAAC,CAC3C,CAEA,IAAMc,EAAmBN,EAEzB,GAAIE,IAAgBC,EAClB,MAAM,IAAI,MAAM,6DAA6D,EAG/E,GAAIR,EAAK,KAAK,CAAC,IAAMO,EAAcC,EAAcC,EAC/C,MAAM,IAAI,MAAM,+EAA+E,EAGjG,IAAIG,EAAqB,EACzB,GAAIV,EAAM,CACR,GAAIM,IAAgBC,EAClB,MAAM,IAAI,MAAM,oDAAoD,EAEtE,GAAIP,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,qCAAqC,EAEvD,GAAIA,EAAK,KAAK,CAAC,IAAM,EACnB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,KAAK,CAAC,IAAME,EACnB,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIF,EAAK,KAAK,CAAC,IAAML,EAAW,SAC9B,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAIK,EAAK,KAAK,CAAC,IAAMM,EAAcX,EAAW,SAC5C,MAAM,IAAI,MAAM,gEAAgE,EAG7EA,EAAW,yBACde,EAAqBV,EAAK,KAAK,CAAC,EAGpC,CAEA,IAAMW,EAAsBF,EAAmBC,EACzCE,EAAoB,GAEpBC,EAAW,EACjB,GAAId,EAGF,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAIC,EACF,MAAM,IAAI,MAAM,uBAAuB,EAGzC,MAAO,CACL,UAAAE,EACA,eAAAC,EACA,mBAAAO,EACA,iBAAAD,EACA,oBAAAE,EACA,kBAAAC,EACA,gBAAAR,EACA,WAAYC,EACZ,YAAAE,EACA,SAAU,KAAK,MAAMF,EAAcV,EAAW,QAAQ,EACtD,UAAW,KAAK,MAAMY,EAAcZ,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAqB,GACrB,aAAc,GACd,UAAW,CACb,CACF,EAEMZ,GAAkC,CAAC+B,EAA0BlB,EAAmB,EAAWmB,IAAc,CAC7G,IAAMC,EAAaC,GAAiBF,CAAC,EACjCG,EAAK,GACHC,EAAQJ,EAAIC,EACdG,EAAQD,EACVA,EAAK,EACIC,EAAQ,EAAI,KACrBD,EAAK,KAAK,KAAKC,EAAQ,CAAC,GAE1B,IAAMC,EAAoB,KAAK,KAAKL,EAAIC,EAAaE,CAAE,EACjDG,EAAoC,CACxC,CAAC,KAAMzB,EAAM,SAAU,KAAM,EAAImB,CAAC,EAAG,CAAC,QAAuB,KAAMI,CAAK,EACxE,CAAC,QAAuB,KAAMC,CAAiB,CACjD,EACME,EAAWC,GAA4B3B,EAAM,SAAUoB,CAAU,EACjEQ,EAAUC,KAA0CT,CAAU,EAE9DU,EAAmBC,GAA+B,CACtD,IAAMC,EAAcC,EAAe,IAAKjC,EAAM,SAAUA,EAAM,KAAMoB,CAAU,EAExEc,EAA8B,CAClC,CAAC,KAAM,QAAS,KAFIL,GAA0B7B,EAAM,QAAQ,CAEC,EAAG,CAAC,KAAM,SAAU,KAAM,KAAK,EAC5F,CAAC,KAAM,sBAAuB,KAAM,KAAK,CAC3C,EAEA,MAAO;AAAA,0CAC+BsB,CAAE;AAAA,0CACFA,CAAE;AAAA,IACxCS,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBF,CAAW,CAAC;AAAA,IACrED,EAAa,UAAU,CACrBT,EAAI,EAAG,CACT,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,8BAIwBM,CAAO;AAAA;AAAA,gCAELA,CAAO;AAAA;AAAA,+BAER,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,gDACT,IAAK,GACH,MAAO,oGACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA,uBAINM,CAAO;AAAA;AAAA,0BAEJA,CAAO;AAAA;AAAA,+BAEF,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,aACT,IAAK,GACH,MAAO,8BACT,IAAK,GACH,MAAO,4DACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAMHU,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,yBAIvBJ,CAAO;AAAA,0BACNI,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA,IAI9C,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CAAC,KAAM,GAAGV,CAAE,IAAII,CAAQ,IAAIN,CAAU,EAAE,EACrD,gBAAAU,EACA,WAAY,KAAO,CAAC,QAAS,CAAC,EAAG,cAAe,CAAC,EAAG,CAAC,EAAG,gBAAAL,CAAe,EACzE,CACF,EAEMrC,GACF,CAAC+C,EAAyBC,EAAeC,EAAiBC,EACzDjC,EAA4CkC,EAAiCxC,EAC7Ee,IAA+B,CAC9B,IAAMC,EAAsBD,EAAqByB,EAAW,iBACtDC,EAAa,CAACD,EAAW,UAAWA,EAAW,SAAUA,EAAW,eAAgBxB,CAAmB,EACvG0B,EAAaF,EAAW,aAAe,QAAaJ,EAAQ,YAAc,EAC1EO,EAAkBD,EACpB,CAACF,EAAW,UAAWA,EAAW,SAAUxB,EAAqBwB,EAAW,QAAQ,EACpF,OAIEI,EAAQ5C,EAAW,QAAU,EAAI,EAAM,KAAK,KAAKwC,EAAW,QAAQ,EAAIxC,EAAW,MACnFqB,EAAaC,GAAiBkB,EAAW,QAAQ,EACjDK,EAAqBL,EAAW,SAAWnB,EAC3CyB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAK/B,EAAsB8B,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMd,EAAoC,CACxC,CAAC,QAAuB,KAAMc,EAAW,cAAc,EAAG,CAAC,QAAuB,KAAMK,CAAkB,EAC1G,CAAC,QAAuB,KAAM7B,CAAmB,EAAG,CAAC,QAAuB,KAAMwB,EAAW,QAAQ,EACrG,CAAC,OAAsB,KAAMI,CAAK,EAAG,CAAC,QAAuB,KAAM7B,CAAkB,EACrF,CAAC,QAAuB,KAAMyB,EAAW,gBAAgB,CAC3D,EAEMQ,EAAwD,CAAC,OAAQ,MAAM,EACzET,GACFS,EAAkB,KAAK,MAAM,EAE3B1C,GACF0C,EAAkB,KAAK,MAAM,EAE/B,IAAMC,EAAU,CAAC,CAAC,KAAMR,EAAY,SAAUJ,EAAE,SAAU,aAAgC,CAAC,EACvFK,GACFO,EAAQ,KAAK,CAAC,KAAMN,EAAkB,SAAUN,EAAE,SAAU,aAAgC,CAAC,EAE/F,IAAMN,EAAmBC,GAA+B,CACtD,IAAMkB,EAASC,EAAc,IAAKd,EAAE,SAAUA,EAAE,KAAMhB,CAAU,EAC1D+B,EAASD,EAAc,MAAOb,EAAI,SAAUA,EAAI,KAAMjB,CAAU,EAChEgC,EAAY,CAACH,EAAQE,CAAM,EACjC,GAAIb,EAAS,CACX,IAAMe,GAAeH,EAAc,WAAYZ,EAAQ,SAAUA,EAAQ,KAAMlB,CAAU,EACzFgC,EAAU,KAAKC,EAAY,CAC7B,CACIhD,GACF+C,EAAU,KACNF,EAAc,yBAA0B7C,EAAqB,SAAUA,EAAqB,IAAI,CAAC,EAEvG,IAAMiD,EAASrB,EAAe,SAAUG,EAAE,SAAUI,CAAU,EACxDe,EAAa,CAACD,CAAM,EACtBb,GACFc,EAAW,KAAKtB,EAAe,cAAeG,EAAE,SAAUM,EAAkBtB,CAAU,CAAC,EAEzF,IAAMQ,EAAUC,KAA0CT,CAAU,EAE9Dc,GAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAM,KAA+B,EACvF,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA;AAAA,gCAECI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,gCAC7CI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,IACzEd,EAAa,iBAAiBG,EAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMH,IACOP,GAAWG,EACN;AAAA;AAAA,+EAIA;AAAA,wEAGR,CAAC;AAAA,MACNA,EAAa,4DAA8D,EAAE;AAAA,kBACjEb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOhB,IACKU,GAAWG,EACN;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAQA,yEAER,CAAC;AAAA,QAEAA,EACI,+FACA,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA,mBAKCb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBASF,IAAM,CACpB,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,wCACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA,8BACkBkC,EAAO,KAAK,KAAK,6BACnCjD,EAAuB,oCAAsC,KAAK;AAAA;AAAA,IAGxE,EACA,MAAO,CACL,KAAM,iBACN,YAAa,CACX,KAAM,GAAGe,CAAU,IAAIf,IAAyB,MAAS,IAAIiC,IAAY,MAAS,IAAIH,EAAQ,WAAW,GACzG,kBAAAY,CACF,EACA,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAGEzC,GACF,CAAC8C,EAAyBqB,EAAmBC,EAAeC,EAC3DC,EAA6B7C,IAA+B,CAC3D,IAAMC,EAAsBD,EAAqB6C,EAAO,iBAClDC,EAAQD,EAAO,MAAQA,EAAO,MAAQ,EACtCE,EAAsBF,EAAO,YAAcC,EAC3CE,EAAeH,EAAO,YAAc,MAAQxB,EAAQ,YAAc,EAClE4B,EACFD,EAAe,CAACH,EAAO,UAAWA,EAAO,SAAU5C,EAAqB4C,EAAO,QAAQ,EAAI,OACzFK,EAAc,CAACL,EAAO,UAAWA,EAAO,eAAgBE,CAAmB,EAC3EhB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKa,EAAO,UAAYd,CAAS,EACzC,EAAG,KAAK,KAAKc,EAAO,eAAiBd,CAAS,EAC9C,EAAGc,EAAO,UAAYA,EAAO,QAC/B,EAEMlC,EAAoC,CACxC,CAAC,QAAuB,KAAMkC,EAAO,cAAc,EAAG,CAAC,QAAuB,KAAM5C,CAAmB,EACvG,CAAC,QAAuB,KAAM4C,EAAO,SAAS,EAAG,CAAC,QAAuB,KAAMA,EAAO,QAAQ,EAC9F,CAAC,QAAuB,KAAME,CAAmB,EAAG,CAAC,QAAuB,KAAM/C,CAAkB,EACpG,CAAC,QAAuB,KAAM6C,EAAO,gBAAgB,CACvD,EACMZ,EACFW,EAAY,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,EACpDV,EAAU,CAAC,CAAC,KAAMgB,EAAa,SAAUR,EAAM,SAAU,aAAgC,CAAC,EAC5FM,GACFd,EAAQ,KAAK,CAAC,KAAMe,EAAoB,SAAUP,EAAM,SAAU,aAAgC,CAAC,EAErG,IAAM1B,EAAmBC,GAA+B,CACtD,IAAMkC,EAAcf,EAAc,QAASM,EAAM,SAAUA,EAAM,IAAI,EAC/DU,EAAUhB,EAAc,IAAKO,EAAE,SAAUA,EAAE,IAAI,EAC/CL,EAAY,CAACa,EAAaC,CAAO,EACnCR,GACFN,EAAU,KAAKF,EAAc,aAAcQ,EAAU,SAAUA,EAAU,IAAI,CAAC,EAGhF,IAAMH,EAAa,CADJtB,EAAe,SAAUuB,EAAM,SAAUQ,CAAW,CACzC,EACtBF,GACFP,EAAW,KAAKtB,EAAe,gBAAiBuB,EAAM,SAAUO,CAAkB,CAAC,EAErF,IAAM7B,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,gBAAiB,KAAM,KAAK,EACrE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA,gCACCoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,gCAChDoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,IAC5Ed,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMJ,IACQa,GAAaI,EACR;AAAA;AAAA;AAAA,QAKA;AAAA;AAAA,eAIR,CAAC;AAAA,MACNA,EAAe,kEAAoE,EAAE;AAAA,iBAC1EG,EAAY,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAO9B,IACGP,GAAaI,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA;AAAA,SAIR,CAAC;AAAA,UACFA,EAAe,kFAAoF,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAkBzG,EAEA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,KAAM,GAAGJ,IAAc,MAAS,IAAIvB,EAAQ,WAAW,GAAI,kBAAAY,CAAiB,EAC1F,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAESxC,GACT,CAAC6C,EAAyBC,EAAe+B,EAAeV,EAAeW,EACtEC,EAA6B/B,EAA+BoB,EAC5DrD,EAA4CkC,EAAiCxC,IAA+B,CAC3G,IAAMuE,EAAcnC,EAAQ,YACtBrB,EACFyB,EAAW,aAAe,QAAa+B,EAAc,EAAI/B,EAAW,mBAAqB,EACvFxB,EAAsBD,EAAqByB,EAAW,iBAEtDgC,EAAWhC,EAAW,aAAe,QAAa+B,EAAc,GAAKhC,EAAW,CAACF,EAAG+B,EAAG7B,CAAO,EAAI,CAACF,EAAG+B,CAAC,EACzG9D,GACFkE,EAAQ,KAAKlE,CAAoB,EAInC,IAAMmD,EAAQrB,EAAQ,QAClB/C,GACI+C,EAASC,EAAG+B,EAAGG,EAAc,EAAIhC,EAAU,OAAWjC,EAAsBkC,EAAYxC,EACxFe,CAAkB,EACtB,CAAC,OAAQyD,EAAS,QAAUhC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,GAAI,CAAC,EAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAG5GnC,EAAQ,QACJhD,GACIgD,EAASqB,EAAOjB,EAAW,UAAYA,EAAW,SAAWA,EAAW,eACxExB,CAAmB,EACvB,CAAC,OAAQ,CAACyC,CAAK,EAAG,QAAS,CAAC,CAAC,CAAC,EAGlC,IAAMgB,EACDjC,EAAW,aAAe,QAAa+B,EAAc,GAAKZ,EAAa,CAACF,EAAOC,EAAGC,CAAS,EAAI,CAACF,EAAOC,CAAC,EAC7GtB,EAAQ,QACJ9C,GACI8C,EAASqB,EAAOC,EAAGa,EAAc,GAAKZ,EAAYA,EAAY,OAAWnB,EAAYzB,CAAkB,EAC3G,CAAC,OAAQ0D,EAAS,QAAUjC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,EAAG,CAAC,EAAI,CAAC,CAAC,CAAC,CAAC,CACzG,EAEE/E,GAAU,CAAC4C,EAAyBI,IAAoC,CAC5E,IAAMyB,EAAc,CAClBzB,EAAW,UACXA,EAAW,SACXA,EAAW,eACXA,EAAW,QACb,EACMkC,EAAIlC,EAAW,eACfmC,EAAInC,EAAW,gBACfoC,EAAIpC,EAAW,SACfM,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKP,EAAW,SAAWM,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMzC,EAAS,CAACqC,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,CAAC,EACjEV,EAAoC,CACxC,CAAC,QAAuB,KAAMgD,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EACnG,CAAC,QAAuB,KAAMpC,EAAW,QAAQ,EAAG,CAAC,QAAuB,KAAMA,EAAW,QAAQ,EACrG,CAAC,QAAuB,KAAMA,EAAW,UAAU,EACnD,CAAC,QAAuB,KAAMA,EAAW,WAAaA,EAAW,WAAaA,EAAW,WAAW,CACtG,EAEMT,EAAmBC,GAA+B,CACtD,IAAM6C,EAAU3C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEa,EAAU5C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEc,EAAU7C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEhE,EAAQkD,EAAc,QAASpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEiF,EAAS7B,EAAc,SAAUpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACnEI,EAAOgD,EAAc,OAAQpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/D4B,EAAW1B,EAAM,KAAK,QAEtBkC,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAC7G,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,MAAO,KAAM,KAAK,CACjG,EACA,MAAO;AAAA,sBACWW,CAAS;AAAA,oCACKnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAChCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,IACpEd,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBlC,EAAO+E,EAAQ7E,EAAM0E,EAASC,EAASC,CAAO,CAAC;AAAA,IACxG/C,EAAa,UAAU,CACrBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWanB,CAAQ;AAAA,mBACRA,CAAQ;AAAA,mBACRA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoCzB,EAEA,OAAOS,EAAQ,QACX,CACE,KAAM,mBACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,CAC5F,EACA,cAAeW,EACf,gBAAArB,CACF,GACA,gBAAAK,CACF,EACA,CAAC,OAAAhC,EAAQ,QAAS,CAAC,GAAI,GAAI,EAAE,CAAC,CAAC,CACrC,EAEaN,GAAY,CAAC2C,EAAyBpC,IAAqC,CACtF,IAAM4D,EAASzE,GAAwBiD,EAAQ,OAAQpC,CAAU,EAE3D,CAACqC,EAAG+B,EAAGV,CAAC,EAAIlE,GAAQ4C,EAASwB,CAAM,EAEzC,OAAOrE,GACH6C,EAASC,EAAG+B,EAAGV,EAAGtB,EAAQ,OAAO,CAAC,EAAG,OAAW,OAAW,OAAWA,EAAQ,OAAO,CAAC,EAAGwB,EAAQ5D,CAAU,CACjH,ICpwBA,IAsBMiF,GAkCAC,GAgFOC,GAGAC,GA3IbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAGAC,KAWMV,GAAiB,CAACW,EAA+BC,IAA0C,CAC/F,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAME,EAAkB,CAACC,EAA2BC,EAA6BC,IAAoB,CACnG,IAAMC,EAAIF,EAAS,OACnB,GAAIE,IAAMH,EAAO,OACf,MAAM,IAAI,MAAM,GAAGE,CAAO,uBAAuBC,CAAC,EAAE,EAEtDF,EAAS,QAAQ,CAACG,EAAGC,IAAM,CACzB,GAAID,IAAMJ,EAAOK,CAAC,EAChB,MAAM,IAAI,MAAM,GAAGH,CAAO,SAASG,CAAC,gBAAgB,CAExD,CAAC,CACH,EAEA,GAAIR,EAAO,CAAC,EAAE,KAAK,OAAS,EAAG,CAC7B,IAAMS,EAAQR,EAAW,SAAW,OAC/BA,EAAW,QAAUD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EACvBA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EAAE,OAAOA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,CAAC,EACxGA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGC,EAAW,QAAU,EAAI,MAAS,EAC9DC,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,qBAAqB,EAC5DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,iBAAiB,EACxDP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,oBAAoB,EAC3DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,mBAAmB,CAC5D,MACEP,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,qBAAqB,EAC1DE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,iBAAiB,EACtDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,oBAAoB,EACzDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,mBAAmB,CAE5D,EAEMV,GACF,CAACU,EAA+BC,IAAiD,CAC/E,GAAM,CAAC,QAAAS,EAAS,QAAAC,EAAS,OAAAC,CAAM,EAAIX,EAC7BY,EAASb,EAAO,CAAC,EAAE,KACnBc,EAAaH,EAAUI,GAAiBF,EAAOA,EAAO,OAAS,CAAC,CAAC,EAAI,EACrEG,EAAcJ,IAAW,QAAUC,EAAO,OAAS,EAAIC,EAAa,EACpEG,EAAaC,EAAU,KAAKL,CAAM,EAAIC,EAEtCK,EAAoBR,EACpBS,EAAcD,EAAoBN,EAAO,OAASA,EAClDQ,EAAIC,EAAc,IAAKtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMc,CAAU,EACrES,EAAQD,EAAc,QAAStB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC9EQ,EAAOF,EAAc,OAAQtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC5ES,EAAYH,EAAc,YAAatB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACtFU,EAAWJ,EAAc,WAAYtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACpFW,EAAIC,EAAe,IAAK5B,EAAO,CAAC,EAAE,SAAUoB,EAAaN,CAAU,EAGnEe,EAAc,IAAc,CAChC,IAAIC,EAAU,GACd,GAAInB,EACFmB,EAAU,iBACNjB,EAAO,SAAW,EAAM,KACpBD,IAAW,OAAS,iBAAiBC,EAAO,OAAS,CAAC,OAAOC,CAAU,GACnD,kBAAkB,YAE1CF,IAAW,OACbkB,EAAU;AAAA,cACRH,EAAE,WAAW,gBAAiB,IAAK,GAAG,CAAC;AAAA,4BACzBA,EAAE,gBAAgB,eAAe,CAAC,QAC7C,CAELG,EAAU,kBAAkBP,EAAM,KAAK,OAAO;AAAA,qDACLV,EAAO,OAAS,CAAC,KAE1D,QAASL,EAAI,EAAGA,EAAIe,EAAM,KAAMf,IAC9BsB,GAAW,YAAYtB,CAAC,qBAAqBA,CAAC,KAEhDsB,GAAW,iBAAiBP,EAAM,gBAAgB,UAAU,CAAC,GAC/D,CAEF,OAAOO,CACT,EACMC,EAAgCC,GAAyB;AAAA,oBACjDtB,CAAO;AAAA,IACvBsB,EAAO,gBAAgB,aAAc,KAAK,EAAE,iBAAiBX,EAAGE,EAAOC,EAAMC,EAAWC,EAAUC,CAAC,CAAC;AAAA,IACpGK,EAAO,UAAU,CAAC;AAAA,IAClBA,EAAO,sCAAsC,qBAAqB,CAAC;AAAA,0BAC7CL,EAAE,gBAAgB,gBAAgBb,CAAU,EAAE,CAAC;AAAA,MACnEe,EAAY,CAAC;AAAA,kBACDN,EAAM,YAAY,SAAS,CAAC;AAAA,iBAC7BC,EAAK,YAAY,SAAS,CAAC;AAAA,sBACtBC,EAAU,YAAY,SAAS,CAAC;AAAA,qBACjCC,EAAS,YAAY,SAAS,CAAC;AAAA,cACtCL,EAAE,YAAY,YAAY,CAAC;AAAA;AAAA,MAEnCM,EAAE,YAAY,aAAc,OAAO,CAAC;AAAA,KAEpC,MAAO,CACL,KAAM,qBACN,YAAa,CACX,KAAM,GAAG1B,EAAW,OAAO,IAAIA,EAAW,MAAM,IAAIU,CAAO,IAAIG,CAAU,GACzE,kBAAmBK,EAAoB,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,MAAM,EAAI,MACpF,EACA,gBAAiBY,EACjB,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM/B,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKiB,EAAa,EAAuB,CAAC,EAClE,gBAAiBE,EACb,CACE,CAAC,QAAuB,KAAMF,CAAU,EACxC,GAAGgB,EAA2BpB,CAAM,CACtC,EACA,CACE,CAAC,QAAuB,KAAMI,CAAU,CAC1C,CACN,EACF,CACF,EAES1B,GAA4BU,GACrCiC,GAA4BjC,CAAoE,EAEvFT,GAAY,CAAC2C,EAAyBlC,IAA8C,CAC/F,GAAM,CAAC,OAAAD,EAAQ,YAAAoC,CAAW,EAAID,EACxBE,EAAoB9C,GAAyB,CAAC,GAAGU,EAAY,YAAAmC,CAAW,CAAC,EAI/E,GAHIE,GAAI,OAAO,sBACbjD,GAAeW,EAAQqC,CAAiB,EAEtCpC,EAAW,aACb,MAAM,IAAI,MAAM,uDAAuD,EAEvEkC,EAAQ,QAAQ7C,GAAoCU,EAAQqC,CAAiB,CAAC,CAElF,ICtJA,IASME,GAkBAC,GAkCOC,GA7DbC,GAAAC,EAAA,kBAIAC,KAGAC,KAEMN,GAAkBO,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,IAAK,IAAK,IAAI,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC9C,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMN,GAA4BM,GAA+C,CAC/E,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAExBE,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAE3BG,EAAaC,EAAU,KAAKH,CAAW,EAAI,EAE3CI,EAAWL,EAAO,CAAC,EAAE,SACrBM,EAAQC,EAAc,QAASF,EAAUJ,EAAa,CAAC,EACvDO,EAAOD,EAAc,OAAQF,EAAU,CAACH,CAAQ,EAAG,CAAC,EACpDO,EAAWF,EAAc,WAAYF,EAAUJ,EAAa,CAAC,EAC7DS,EAASC,EAAe,SAAUN,EAAUJ,EAAa,CAAC,EAahE,MAAO,CACL,KAAM,UACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,CACpE,GACA,gBAjBuBS,GAA+B;AAAA,qBACrCV,CAAQ;AAAA,IACzBU,EAAa,iBAAiBN,EAAOE,EAAMC,EAAUC,CAAM,CAAC;AAAA;AAAA,IAE5DE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCT,CAAU,CAAC;AAAA,kBAClDG,EAAM,YAAY,YAAY,CAAC;AAAA,UACvCE,EAAK,YAAY,uBAAuB,CAAC,MAAMC,EAAS,YAAY,YAAY,CAAC;AAAA,MACrFC,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAU7C,CACF,EAEaf,GAAWkB,GAAkC,CACxDpB,GAAeoB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQnB,GAAyBmB,EAAQ,MAAM,CAAC,CAC1D,IChEA,IAeMC,GA4BAC,GAiBOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAGAC,GASAC,GAIAC,GA8BPC,GAMOC,GAaAC,GAIAC,GAIAC,GAQAC,GAGAC,GAgBAC,GAcAC,GAKAC,GAIAC,GAIAC,GAMAC,GAOAC,GAIAC,GAIAC,GAIAC,GAMAC,GASAC,GAMAC,GASAC,GAIAC,GAIAC,GAIAC,GAIAC,GAEAC,GAKAC,GAUAC,GAGAC,GAOAC,GAQAC,GAIAC,GAmBAC,GAEAC,GAlVbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMlD,GACF,CAACmD,EAA4BC,EAAkBC,EAAuBC,EACrEC,EAAmCC,IAA8C,CAChF,IAAMC,EAAU,KAAK,KAAKL,EAAW,CAAC,EAElCM,EAAa,GACb,OAAOH,GAAa,SACtBG,EAAa,GAAGH,CAAQ,MAExBG,EAAaH,EAAS,GAAG,EAG3B,IAAMI,EAAQC,EAAc,YAAaP,EAAe,CAACI,CAAO,EAAG,CAAC,EAC9DI,EAASC,EAAe,aAAcR,EAAgB,CAACG,CAAO,EAAG,CAAC,EAExE,MAAO;AAAA,QACLN,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBQ,EAAOE,CAAM,CAAC;AAAA;AAAA,IAEnFL,GAA4B,EAAE;AAAA;AAAA,IAE9BL,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA;AAAA,cAE/DQ,EAAM,YAAY,YAAY,CAAC;AAAA,MACvCE,EAAO,YAAY,aAAcH,CAAU,CAAC;AAAA,IAE9C,EAEEzD,GACF,CAAC0D,EAAmBI,EAAcR,EAAmCC,EACpEQ,EAAmBV,EAAyBK,EAAM,YAA2B,CAC5E,KAAAI,EACA,YAAa,CAAC,KAAMC,EAAU,kBAAmB,CAAC,MAAM,CAAC,EACzD,gBAAiBb,GAAgBnD,GAC7BmD,EAAcc,EAAU,KAAKN,EAAM,IAAI,EAAGA,EAAM,SAAUL,EAAgBC,EAAUC,CAAwB,EAChH,WAAaU,IAAkB,CAC7B,QAAS,CAAC,CAAC,KAAMP,EAAM,KAAM,SAAUL,CAAc,CAAC,EACtD,cACI,CAAC,EAAG,KAAK,KAAKW,EAAU,KAAKC,EAAa,CAAC,EAAE,IAAI,EAAI,GAA0B,CAAgB,CAAC,EACpG,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKD,EAAU,KAAKN,EAAM,IAAI,EAAI,CAAC,CAAC,CACzE,CACF,EACF,GAESzD,GAAOiE,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahE,GAAQgE,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa/D,GAAS+D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa9D,GAAQ8D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa7D,GAAS6D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa5D,GAAQ4D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EACa3D,GAAS2D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAOa1D,GAAuB2D,GAChCC,GAA4BD,CAA0B,EAG7C1D,GAAO,CAACyD,EAAyBC,IAAqC,CACjF,IAAIE,EACJ,OAAQF,EAAW,GAAI,CACrB,QACEE,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,QACEA,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,OACEA,EAAO,aACP,MACF,QACE,MAAM,IAAI,WAAW,0EAA0EF,EAAW,EAAE,EAAE,CAClH,CACAD,EAAQ,QACJlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQG,EAAM,OAAWF,EAAW,SAAUA,EAAW,EAAE,CAAC,CAClH,EAOMzD,GAAoC4D,GAAkD,CAC1F,IAAMC,EAAOD,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAIE,GACtFC,EAAOH,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAII,GAC5F,OAAON,GAA4B,CAAC,IAAAG,EAAK,IAAAE,CAAG,CAAC,CAC/C,EAEa9D,GAAO,CAACuD,EAAyBS,IAAyC,CACrF,IAAMR,EAAaD,EAAQ,OAAO,SAAW,EAAIS,EAAiBjE,GAAiCwD,EAAQ,MAAM,EAC3GU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QACJlE,GACIkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,0BAA2B;AAAA,4BACnDF,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,4BAC9CS,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,EAEhEA,EAAW,QAAQ,EACvB,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEavD,GAAQsD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEarD,GAAOqD,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEapD,GAAQoD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAManD,GAAwBoD,GACjCC,GAA4BD,CAA6B,EAEhDnD,GAAM,CAACkD,EAAyBC,IAAsC,CACjF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK;AAAA,uBAChCF,CAAQ,IAAIT,EAAW,KAAK;AAAA;AAAA,kBAEjCS,CAAQ,QAAQA,CAAQ;AAAA;AAAA;AAAA;AAAA,wBAIlBA,CAAQ,cAAcA,CAAQ;AAAA;AAAA,KAGhDT,EAAW,QAAQ,CAAC,CAC1B,EAEalD,GAAU,CAAC8D,EAAU,QAAU;AAAA,YAChCA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA;AAAA,sBAEGA,CAAO,cAAcA,CAAO;AAAA;AAAA;AAAA;AAAA,GAMrC7D,GAAOgD,GAAkC,CACpD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK7D,GAAQ2D,CAAQ,CAAC,CAAC,CAClH,EAEazD,GAAO+C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa9C,GAAS8C,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa7C,GAAQ6C,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,sBAAsBA,CAAC,0BAA2B7D,GAAQ2D,CAAQ,CAAC,CAAC,CACpH,EAEatD,GAAY,CAAC4C,EAAyBC,IAAsC,CACvF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaY,GAAK,8BAA8BA,CAAC,KAAKA,CAAC,KAAKA,CAAC,YAAYF,CAAQ,UACpG,6BAA6BA,CAAQ,IAAIT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,CACzF,EAEa5C,GAAO2C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEatD,GAAO0C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEarD,GAAcyC,GAAkC,CAC3DA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,aAAcY,GAAK,OAAOA,CAAC,EAAE,CAAC,CAChG,EAEapD,GAAQwC,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,WAAWF,CAAQ,SAAS,CAAC,CAC5G,EAEajD,GAAWuC,GAAkC,CACxDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,UAAWY,GAAK,sBAAsBA,CAAC,KAAK,CAAC,CAC/G,EAOalD,GAA8BuC,GACvCC,GAA4BD,CAG3B,EAEQtC,GAAc,CAACqC,EAAyBC,IAA4C,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,cACnBY,GAAK,YAAYF,CAAQ,oBAAoBA,CAAQ,WAAWT,EAAW,KAAK,MAAMW,CAAC,WAAWF,CAAQ,KACtGT,EAAW,IAAI,MACnB,OAAWA,EAAW,QAAQ,CAAC,CACrC,EAEarC,GAAOoC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEanC,GAAQmC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEalC,GAAQkC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEajC,GAAOiC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahC,GAAkB4C,GAAc,QAAQA,CAAC,yBAAyBA,CAAC,2BAA2BA,CAAC,MAE/F3C,GAAQ+B,GAAkC,CAErDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQhC,EAAc,CAAC,CACzF,EAEaE,GAAe,CAAC2C,EAAU,QAAU;AAAA,qBAC5BA,CAAO;AAAA,qBACPA,CAAO;AAAA,qBACPA,CAAO;AAAA;AAAA,oBAERA,CAAO,cAAcA,CAAO;AAAA,WACrC7C,GAAe,GAAG,CAAC;AAAA;AAAA,EAIjBG,GAAsB2C,GAC/B,uCAAuCA,CAAC,qBAAqBA,CAAC,MAAMA,CAAC,uBAAuBA,CAAC,GAEpF1C,GAAY4B,GAAkC,CACzD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,WAAY7B,GAAoBD,GAAawC,CAAQ,EAAG,OAC3EV,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,EAEa3B,GAAkB,CAAC2B,EAAyBC,IAAwC,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrE,OAAAA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,kBAAmBY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,8BACpF,wCAAwCF,CAAQ,KAAKT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,EAC5F,CACT,EAEa3B,GAAO0B,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEazB,GAAgB,CAACsC,EAAiBE,IAAkB;AAAA,qBAC5CF,CAAO,KAAKE,CAAK;AAAA,cACxBF,CAAO;AAAA,eACNA,CAAO;AAAA;AAAA,6BAEOA,CAAO,cAAcA,CAAO;AAAA;AAAA,kBAEvCA,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYZrC,GAAuBsC,GAAc,mBAAmBA,CAAC,IAEzDrC,GAAY,CAACuB,EAAyBC,IAAsC,CACvF,IAAMe,EAAQL,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAClEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaxB,GAAqBD,GAAcyC,EAAOf,EAAW,KAAK,EAAGA,EAAW,SACxGD,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,ICvVA,IAUMiB,GAkBAC,GAyCOC,GArEbC,GAAAC,EAAA,kBAIAC,KAGAC,KACAC,KAEMP,GAAkBQ,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,KAAM,KAAM,KAAK,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EACjD,MAAM,IAAI,MAAM,4CAA4C,EAG9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMP,GAAkCO,GAA+C,CACrF,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAAK,MAAM,EACzCC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAI,EAElC,IAAMC,EAAQC,EAAc,QAASH,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM,CAAC,EACpEI,EAAOD,EAAc,OAAQH,EAAO,CAAC,EAAE,SAAU,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAAG,CAAC,EACvEK,EAASC,EAAe,SAAUN,EAAO,CAAC,EAAE,SAAUC,EAAa,CAAC,EAEpEM,EAAaC,EAAU,KAAKP,CAAW,EAAI,EAC3CQ,EAAWC,GAA4BV,EAAO,CAAC,EAAE,QAAQ,EAsB/D,MAAO,CACL,KAAM,gBACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKO,EAAa,EAAuB,CAAC,CACpE,GACA,gBA1BuBI,GAA+B;AAAA;AAAA,yBAEjCX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,EAAI,CAAC;AAAA;AAAA,IAE9CW,EAAa,iBAAiBT,EAAOE,EAAMC,CAAM,CAAC;AAAA;AAAA,IAElDO,GAAQH,CAAQ,CAAC;AAAA;AAAA,IAEjBE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCJ,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ9DF,EAAO,YAAY,aAAc,uBAAuB,CAAC;AAAA,IAU7D,CACF,EAEaX,GAAiBmB,GAAkC,CAC9DrB,GAAeqB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQpB,GAA+BoB,EAAQ,MAAM,CAAC,CAChE,ICxEA,IAiBMC,GAqGAC,GAsEAC,GAQOC,GAIAC,GAIAC,GAMAC,GAIAC,GAsBAC,GAIAC,GAMAC,GAMAC,GAMAC,GAlQbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KASMjB,GACF,CAACkB,EAA4BC,EAA0BC,EAA0BC,EAChFC,EAAoBC,EAAsBC,EAAsCC,EAChFC,EAAeC,EAAeC,EAAoBC,IAAsC,CACvF,IAAIC,EACAC,EACA,OAAON,GAAa,SACtBK,EAAmBC,EAAmB,CAACC,EAAGC,IAAM,GAAGR,CAAQ,KAAKO,CAAC,MAAMC,CAAC,KAC/D,OAAOR,GAAa,WAC7BK,EAAmBC,EAAmBN,GAEtCK,EAAmBL,EAAS,OAC5BM,EAAmBN,EAAS,QAG9B,IAAMS,EAASC,EAAe,aAAcP,EAAYP,EAAW,OAAQ,CAAC,EACtEW,EAAII,EAAc,QAASV,EAAOP,EAAM,OAAQ,CAAC,EACjDc,EAAIG,EAAc,QAAST,EAAOP,EAAM,OAAQ,CAAC,EAEnDiB,EACJ,GAAIf,EACF,GAAIC,EAAa,CACf,IAAMe,EAAgBC,EAAU,KAAKpB,CAAK,IAAM,EAC1CqB,EAAgBD,EAAU,KAAKnB,CAAK,IAAM,EAC1CqB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC3EuB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC7EkB,GAAiBE,EACnBH,EAAaH,EAAO,YAChB,aACAH,EACIO,EAAgB,GAAGN,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,EACvFQ,EAAgB,GAAGP,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,CAAC,CAAC,EAEjGI,EAAa;AAAA,kCACSH,EAAO,gBAAgB,iBAAiB,CAAC;AAAA,4BAC/CF,EAAE,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,4BACrDD,EAAE,2BAA2B,gBAAiBC,CAAM,CAAC;AAAA,cAEjEA,EAAO,YACH,aACAH,EACIP,GAA+BiB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,kBACpDR,GAA+BkB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAAA,WAGvF,MACEI,EAAaH,EAAO,YAChB,aAAcH,EAAiBC,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAEzF,CACL,GAAI,CAACV,EACH,MAAM,IAAI,MAAM,sFAAsF,EAGxG,IAAMoB,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,eAAeF,CAAC,eAAeA,CAAC,IAC9CG,EAAc,eAAeH,CAAC,eAAeA,CAAC,IACpD,MAAO;AAAA,+BACcA,CAAC,MAAMX,EAAO,gBAAgB,qBAAqBW,CAAC,GAAG,CAAC;AAAA,yBAC9DA,CAAC,MAAMb,EAAE,2BAA2B,gBAAgBa,CAAC,GAAIX,CAAM,CAAC;AAAA,yBAChEW,CAAC,MAAMZ,EAAE,2BAA2B,gBAAgBY,CAAC,GAAIX,CAAM,CAAC;AAAA,wBACjEW,CAAC,aAAaA,CAAC;AAAA,wBACfA,CAAC,aAAaA,CAAC;AAAA,4BACXA,CAAC,aAAaA,CAAC;AAAA,4BACfA,CAAC,aAAaA,CAAC;AAAA,cAC7BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIhB,EAAiBiB,EAAaC,CAAW,CAAC;AAAA,WAE9E,EACIpB,IAAe,EACjBS,EAAa;AAAA;AAAA,cAETM,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,uGAGtCN,EAAa;AAAA,cACTM,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,WAGrD,CAEA,MAAO;AAAA,UACHzB,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBc,EAAGC,EAAGC,CAAM,CAAC;AAAA;AAAA,UAE9EL,GAA4B,EAAE;AAAA;AAAA,UAE9BX,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEmB,CAAU;AAAA,QAEhB,EAEEpC,GACF,CAACgD,EAAcC,EAAkBlB,EAAeC,EAAeR,EAC9DI,EAAmCsB,EAAyBnB,EAAE,WAA0B,CACvF,IAAMoB,EAAc,CAACb,EAAU,SAASP,EAAE,KAAMC,EAAE,IAAI,EAClDoB,EAAcrB,EAAE,KAChBsB,EAAaf,EAAU,KAAKP,EAAE,IAAI,EAElCV,EAAY,GACZE,EAA8B,GAG5B+B,EAAc,CAACH,CAAW,EAChC,GAAIA,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUzB,EAAE,KAAMC,EAAE,KAAM,EAAK,EACrE,GAAI,CAACuB,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEH,EAAcG,EACdF,EAAaf,EAAU,KAAKc,CAAW,EACvC,IAAMf,EAAgBC,EAAU,KAAKP,EAAE,IAAI,IAAM,EAC3CQ,EAAgBD,EAAU,KAAKN,EAAE,IAAI,IAAM,EAC3CQ,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EAC9EU,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EACpFsB,EAAY,KAAKjB,CAAa,EAC9BiB,EAAY,KAAKf,CAAa,EAC9Be,EAAY,KAAKd,CAAoB,EACrCc,EAAY,KAAKb,CAAoB,EAErC,IAAIgB,EAAkB,EACtB,QAASC,EAAI,EAAGA,EAAIN,EAAY,OAAQM,IAAK,CAC3C,IAAMC,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS2B,CAAC,GAAK,EACpCE,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS0B,CAAC,GAAK,EAC1C,GAAIC,IAASC,EACXH,GAAmBE,MAEnB,MAEJ,CACIF,EAAkB,IAAM,GAC1BlC,EAA8B,GAC9BF,EAAY,KACHgB,GAAiBE,GAAiBC,GAAwBC,KACnEpB,EAAY,GAEhB,MAEEA,EAAY,GAEd,OAAAiC,EAAY,KAAKjC,CAAS,EAEnB,CACL,KAAA2B,EACA,YAAa,CACX,KAAMC,EAAWK,EAAY,IAAKV,GAAMA,EAAE,SAAS,CAAC,EAAE,KAAK,GAAG,EAC9D,kBAAmB,CAAC,OAAQ,MAAM,CACpC,EACA,gBAAkB3B,GAAiBlB,GAC/BkB,EAAcc,EAAE,KAAMC,EAAE,KAAMoB,EAAa/B,EAAW8B,EAAa5B,EAA6BC,EAChGO,EAAE,SAAUC,EAAE,SAAUkB,EAAgBtB,CAAwB,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAsB,CAAC,EAC3F,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKf,EAAU,KAAKc,CAAW,EAAI,CAAC,CAAC,EACxE,GAAGS,EAA2B9B,EAAE,KAAMC,EAAE,KAAMoB,CAAW,CAC3D,CACF,EACF,CACF,EAEEnD,GACF,CAAC6D,EAAyBd,EAAcxB,EAA8BI,EACrEqB,EAAmBC,IAAkC,CACpDY,EAAQ,QAAQ9D,GACZgD,EAAMC,GAAY,GAAIa,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGtC,EAAUI,EACtEsB,CAAc,CAAC,CACrB,EAEShD,GAAO4D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa7B,GAAO2D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa5B,GAAS0D,GAAkC,CACtD7D,GACI6D,EAAS,QAAU,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEa3B,GAAOyD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa1B,GAAOwD,GAAkC,CACpD,IAAMC,EAAO5B,EAAc,QAAS2B,EAAQ,OAAO,CAAC,EAAE,SAAUA,EAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,MAE7F7D,GACI6D,EAAS,MAAQ,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,cAAcD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,qBAAqBD,CAAC,IAAIC,CAAC,GAAG,EAC7G;AAAA,wBACkB+B,CAAI,SAASA,CAAI,QAAQA,CAAI;AAAA,iBACpCA,CAAI;AAAA,iBACJA,CAAI;AAAA,uBACEA,CAAI;AAAA,iBACVA,CAAI;AAAA;AAAA,+BAEUA,CAAI,6BAA6BA,CAAI,qBAAqBA,CAAI,IAV1EA,IAAS,MAAQ,QAAU,EAW5B;AAAA;AAAA,oCAEkBA,CAAI,eAAeA,CAAI,cAAcA,CAAI;AAAA;AAAA,oBAEzDA,CAAI;AAAA;AAAA,OAEjB,CACP,EAEaxD,GAAOuD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEaxB,GAAWsD,GAAkC,CACxD7D,GACI6D,EAAS,UAAY,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEavB,GAAQqD,GAAkC,CACrD7D,GACI6D,EAAS,OAAS,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACnG,QAAwB,CAC9B,EAEatB,GAAkBoD,GAAkC,CAC/D7D,GACI6D,EAAS,iBAAmB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAC3G,OAAW,QAAwB,CACzC,EAEarB,GAAemD,GAAkC,CAC5D7D,GACI6D,EAAS,cAAgB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EACxG,OAAW,QAAwB,CACzC,ICtQA,IAeMgC,GA4BAC,GAWAC,GAmBAC,GAkEOC,GAcAC,GAzJbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMX,GAAiB,CAACY,EAA+BC,IAAuB,CAC5E,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,IAAME,EAAiB,EACjBC,EAAiBH,EAAOE,CAAc,EACtCE,EAAYD,EAAe,SAC3BE,EAAYF,EAAe,KAAK,OACtCH,EAAO,QAAQ,CAACM,EAAOC,IAAM,CAC3B,GAAIA,IAAML,EAIV,IAAII,EAAM,WAAaF,EACrB,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAE5DC,EAAM,KAAK,QAAQ,CAACE,EAAKD,IAAM,CAC7B,GAAIA,IAAMN,GAAQO,IAAQL,EAAe,KAAKI,CAAC,EAC7C,MAAM,IAAI,MAAM,kCAAkC,CAEtD,CAAC,EACH,CAAC,CACH,EAEMlB,GAA0B,CAACoB,EAAyBC,IAAwC;AAAA;AAAA,wCAE1DD,CAAe,MAAMC,CAAmB;AAAA,gCAChDD,CAAe;AAAA;AAAA;AAAA;AAAA;AAAA,aAKlCA,CAAe;AAAA,KAGtBnB,GAAmB,CAACU,EAAkCW,IAA0B,CACpF,IAAMF,EAAkBT,EAAO,OAEzBY,EAAsB,CAAC,EAC7B,QAASL,EAAI,EAAGA,EAAIE,EAAiB,EAAEF,EAAG,CACxC,IAAMM,EAAgBF,EAAO,YAAY,aAAcX,EAAOO,CAAC,EAAE,aAAa,SAAS,CAAC,EACpFE,IAAoB,EACtBG,EAAU,KAAKC,CAAa,EACnBN,IAAM,EACfK,EAAU,KAAK,qBAAqBL,CAAC,QAAQM,CAAa,IAAI,EACrDN,IAAME,EAAkB,EACjCG,EAAU,KAAK,UAAUC,CAAa,IAAI,EAE1CD,EAAU,KAAK,0BAA0BL,CAAC,OAAOM,CAAa,IAAI,CAEtE,CACA,OAAOD,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMrB,GACF,CAACS,EAA+Bc,EAAsBC,EAAuBC,IAAoC,CAC/G,IAAMC,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAmB,IAAI,MAAcnB,EAAO,MAAM,EAClDoB,EAAY,IAAI,MAAqBpB,EAAO,MAAM,EAEpDqB,EAAc,EACZC,EAAwD,CAAC,EACzDC,EAAa,CAAC,EACdC,EAAoC,CAAC,CAAC,QAAuB,KAAMP,CAAU,CAAC,EACpF,QAASV,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCc,GAAerB,EAAOO,CAAC,EAAE,KAAKO,CAAY,EAC1CK,EAAiBZ,CAAC,EAAIc,EACtBE,EAAW,KAAKvB,EAAOO,CAAC,EAAE,KAAK,MAAM,EACrCa,EAAUb,CAAC,EAAIkB,EAAc,QAAQlB,CAAC,GAAIS,EAAUO,EAAWhB,CAAC,CAAC,EACjEe,EAAkB,KAAK,MAAM,EAC7BE,EAAgB,KAAK,CAAC,QAAuB,KAAML,EAAiBZ,CAAC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCiB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAOO,CAAC,EAAE,IAAI,CAAC,EAEpEiB,EAAgB,KAAK,GAAGE,EAA2BX,CAAW,CAAC,EAE/D,IAAMJ,EAASgB,EAAe,SAAUX,EAAUD,EAAY,MAAM,EAC9Da,EAAcjB,EAAO,WAAW,UAAWG,CAAY,EACvDJ,EACF,MAAM,KAAK,MAAMS,EAAiB,MAAM,EAAE,KAAK,CAAC,EAAE,IAAIZ,GAAK,4BAA4BA,CAAC,EAAE,EAAE,KAAK,GAAG,EAClGsB,EAAmBC,GAA+B;AAAA;AAAA,KAEzD,IAAM,CACHA,EAAa,gBAAgB,aAAc,KAAK,EAChD,QAASvB,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjCuB,EAAa,gBAAgB,mBAAmBvB,CAAC,GAAI,KAAK,EAE5D,OAAOuB,EAAa,iBAAiB,GAAGV,EAAWT,CAAM,CAC3D,GAAG,CAAC;AAAA;AAAA,IAENtB,GAAwB8B,EAAiB,OAAQT,CAAmB,CAAC;AAAA;AAAA,IAErEoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DnB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,2CAEbiB,CAAW;AAAA;AAAA,0CAEZT,EAAiB,MAAM,MAAMT,CAAmB;AAAA,QAClFkB,CAAW;AAAA;AAAA;AAAA,MAGbtC,GAAiB8B,EAAWT,CAAM,CAAC;AAAA,KAGnC,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGG,CAAY,GAAI,kBAAAQ,CAAiB,EACxD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAa,SAAAC,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKC,EAAa,EAAuB,CAAC,EAClE,gBAAAO,CACF,GACA,gBAAAK,CACF,CACF,EAESrC,GAAS,CAACuC,EAAyBC,IAAuC,CACrF,IAAMhC,EAAS+B,EAAQ,OACjBE,EAAajC,EAAO,CAAC,EAAE,KACvBc,EAAeI,EAAU,cAAcc,EAAW,KAAMC,EAAW,MAAM,EAC/E7C,GAAeY,EAAQc,CAAY,EACnC,IAAMC,EAAckB,EAAW,MAAM,EACrClB,EAAYD,CAAY,EACpBd,EAAO,OAAO,CAACkC,EAAK5B,IAAU4B,GAAO5B,EAAM,KAAK,OAASQ,EAAeR,EAAM,KAAKQ,CAAY,EAAI,GAAI,CAAC,EAE5G,IAAMqB,EAAiBnC,EAAO,OAAOM,GAASY,EAAU,KAAKZ,EAAM,IAAI,EAAI,CAAC,EAC5EyB,EAAQ,QACJxC,GAAwB4C,EAAgBrB,EAAcC,EAAaf,EAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQmC,CAAc,CAAC,CACtH,EAEa1C,GAAyBuC,GAClCI,GAA4B,CAAC,KAAMJ,EAAW,IAAc,CAAC,IC1JjE,IAiBaK,GAuBAC,GAaAC,GAUAC,GA/DbC,GAAAC,EAAA,kBAGAC,KACAC,KAaaP,GACT,CAACQ,EAA0CC,EAAmBC,EAAW,QAAkB,CACzF,OAAQF,EAAW,WAAY,CAC7B,IAAK,OACH,MAAO,sBAAsBC,CAAS,UACxC,IAAK,UACH,MAAO,YAAYA,CAAS,YAAYA,CAAS,yBACnD,IAAK,OACH,MAAO,wBAAwBA,CAAS,IAAIC,CAAQ,yBAAyBD,CAAS,IAClFC,CAAQ,yBACd,IAAK,cACH,MAAO,eAAeD,CAAS,cAAcA,CAAS,UAAUC,CAAQ,8BACpEA,CAAQ,qBACd,IAAK,YACH,MAAO,kBAAkBA,CAAQ,6CAA6CD,CAAS,UACzF,IAAK,GACH,MAAO,GAET,QACE,MAAM,IAAI,MAAM,0BAA0BD,EAAW,UAAU,EAAE,CACrE,CACF,EAESP,GACT,CAACO,EAA0CG,IAAqC,CAC1EH,EAAW,aAAe,OAC5BG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,OAAQ,EAAG,CAAC,OAAsB,KAAMA,EAAW,OAAQ,CAAC,EAC/FA,EAAW,aAAe,cACnCG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,KAAM,EAAG,CAAC,OAAsB,KAAMA,EAAW,IAAK,CAAC,EAC1FA,EAAW,aAAe,aACnCG,EAAe,KAAK,CAAC,OAAsB,KAAMH,EAAW,KAAM,CAAC,CAEvE,EAESN,GAA2B,CAACM,EAA0CI,IAAgC,CAC7GJ,EAAW,aAAe,OAC5BI,EAAS,KAAK,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,WAAY,KAAM,KAAK,CAAC,EACrEJ,EAAW,aAAe,cACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAAC,EAC9DJ,EAAW,aAAe,aACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAE9C,EAEaT,GACRK,GAAgF,CAC/E,IAAMK,EAAaL,GAAY,YAAwB,GACvD,GAAIK,IAAe,cAAe,CAChC,GAAM,CAACC,EAAOC,CAAI,EAAIP,GAAY,mBAAyC,CAAC,GAAK,EAAG,EACpF,MAAO,CAAC,WAAAK,EAAY,MAAAC,EAAO,KAAAC,CAAI,CACjC,SAAWF,IAAe,OAAQ,CAChC,GAAM,CAACG,EAASC,CAAO,EAAIT,GAAY,mBAAyC,CAACU,GAAUC,EAAQ,EACnG,MAAO,CAAC,WAAAN,EAAY,QAAAI,EAAS,QAAAD,CAAO,CACtC,SAAWH,IAAe,YAAa,CACrC,GAAM,CAACC,CAAK,EAAIN,GAAY,mBAAiC,CAAC,GAAI,EAClE,MAAO,CAAC,WAAAK,EAAY,MAAAC,CAAK,CAC3B,CACA,MAAO,CAAC,WAAAD,CAAU,CACpB,IC7EJ,IAqBaO,GAeAC,GApCbC,GAAAC,EAAA,kBAqBaH,GAAc,CAACI,EAAmBC,IAAqB,CAClE,OAAQD,EAAW,CACjB,IAAK,GACH,OAAOC,EACT,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,QACE,MAAM,IAAI,MAAM,GAAGD,CAAS,8BAA8B,CAC9D,CACF,EAEaH,GAAeK,GAA6B;AAAA,QACjDA,EAAU,iDAAmD,EAAE;UCrCvE,IAqBaC,GArBbC,GAAAC,EAAA,kBAqBaF,GAAiBG,GAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAO3CA,CAAS,YAAYA,CAAS,YAAYA,CAAS;AAAA;IC5B7D,IA8BMC,GAiBAC,GAyBOC,GAuFPC,GAiBAC,GAKOC,GA0JPC,GA8EOC,GA7ZbC,GAAAC,EAAA,kBAqBAC,KAEAC,KAEAC,KACAC,KAEAC,KAEMd,GAA6B,CAACe,EAAoBC,IAClDD,EACK;AAAA;AAAA;AAAA,wDAG6CC,EAAY,iBAAmB,EAAE;AAAA,UAI9E;AAAA;AAAA;AAAA,gDAGqCA,EAAY,iBAAmB,EAAE;AAAA,UAK3Ef,GAAyB,CAACgB,EAAqBC,IAC/CD,EACK;AAAA;AAAA;AAAA;AAAA,UAIDC,IAAqB,EAAI,GAAK,6DAA6D;AAAA;AAAA;AAAA;AAAA;AAAA,YAKzFA,IAAqB,EAAI,GAAK,2CAA2C;AAAA,WAG1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMCA,IAAqB,EAAI,GAAK,yCAAyC;AAAA,WAKtEhB,GACT,CAACiB,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,KAAe,CACpF,IAAMC,EAAaL,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CO,EAAaN,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CQ,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EACtCP,EAAmBS,EAAaP,EAAc,CAAC,EAC/CS,EAAgBP,EAAYF,EAAc,CAAC,EAEjD,GAAI,GAAIH,GAAcC,IAAqB,GAAKC,EAAc,CAAC,IAAM,GAC7D,CAACF,IAAeC,IAAqB,GAAKA,IAAqB,KACjES,EAAaP,EAAc,CAAC,IAAM,GAAKE,EAAYF,EAAc,CAAC,IAAM,GAAKD,EAAc,CAAC,IAAM,GACtG,MAAM,IAAI,MAAM,iBAAiBF,CAAU,8BACvCC,CAAgB,yBAAyBC,EAAc,CAAC,CAAC;AAAA,oCACjCD,CAAgB;AAAA,eACrCS,CAAU,yCAAyCP,EAAc,CAAC,CAAC,eACtEE,CAAS,0CAA0CF,EAAc,CAAC,CAAC,kBACnED,EAAc,CAAC,CAAC,aAAa,EAEnC,MAAO;AAAA,yCAC4BD,CAAgB,IAAIG,CAAI,MAAMM,EAAaT,CAAgB,MAAMU,CAAU;AAAA,2CACzEP,CAAI,MAAMK,EAAaP,EAAc,CAAC,CAAC,MAAMG,CAAS;AAAA;AAAA,uBAE1EH,EAAc,CAAC,CAAC;AAAA,uBAChBA,EAAc,CAAC,CAAC;AAAA,2BACZD,CAAgB;AAAA,oBACvBI,CAAS;AAAA;AAAA,2BAEFF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAUrEG,EAAS,IAAM,iBAAiB;AAAA,IAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,8CACvCS,CAAU;AAAA;AAAA,oBAEpCF,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,iBACpGC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,wBAE9CH,CAAI;AAAA;AAAA;AAAA,8BAGEQ,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAM/B7B,GAA2BiB,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,0CAInBa,CAAa;AAAA;AAAA;AAAA,sFAI7Cb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAU/BE,IAAqB,EAAI,GAAK,4DAA4D;AAAA;AAAA,YAE1FjB,GAAuBgB,EAAYC,CAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5D,EAEEf,GAAyB,CAACY,EAAoBC,IAC9CD,EACK;AAAA;AAAA;AAAA,yCAG8BC,EAAY,iBAAmB,EAAE;AAAA,cAI/D;AAAA;AAAA;AAAA,iCAGsBA,EAAY,iBAAmB,EAAE;AAAA,cAK5DZ,GAA2Ba,GAC7BA,EAAa,gDAAkD,gDAItDZ,GACT,CAACc,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,GACtEM,EAA4B,KAAkB,CAC7C,IAAML,EAAaN,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CM,EAAaP,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CO,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EAE5C,GAAI,EAAEG,EAAaR,EAAc,CAAC,IAAM,GAAKO,EAAaP,EAAc,CAAC,IAAM,GACzEE,EAAYF,EAAc,CAAC,IAAM,GACrC,MAAM,IAAI,MAAM,cAAcQ,CAAU,yCACpCR,EAAc,CAAC,CAAC,gBAAgBO,CAAU,yCAC1CP,EAAc,CAAC,CAAC,eAAeE,CAAS,yCAAyCF,EAAc,CAAC,CAAC,EAAE,EAEzG,IAAMW,EAAgBH,EAAaR,EAAc,CAAC,EAC5CY,EAAgBL,EAAaP,EAAc,CAAC,EAC5CS,EAAgBP,EAAYF,EAAc,CAAC,EAC3Ca,EAAgBH,EAClB;AAAA;AAAA;AAAA,gDAGsCL,CAAU;AAAA,gDACVC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,iDAKTE,CAAU,2BAA2BR,EAAc,CAAC,CAAC;AAAA,mDACnDO,CAAU,2BAA2BP,EAAc,CAAC,CAAC;AAAA,YAC5FjB,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,iDAIRM,CAAS,2BAA2BF,EAAc,CAAC,CAAC;AAAA,uDAC9CM,CAAU,2BAA2BN,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,uCAGrEJ,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAO5CK,CAAI;AAAA;AAAA;AAAA,2DAG2BD,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,0BAI7DH,EAAa,oCAAoCG,EAAc,CAAC,CAAC,KACpD,iCAAiCA,EAAc,CAAC,CAAC,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAUzBA,EAAc,CAAC,CAAC;AAAA;AAAA,4DAEdA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,MAKlE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4CAMkCK,CAAU;AAAA;AAAA,kCAEpBM,CAAa;AAAA,kCACbC,CAAa;AAAA,kCACbH,CAAa;AAAA;AAAA;AAAA;AAAA,sCAITE,CAAa;AAAA,wCACXC,CAAa;AAAA;AAAA;AAAA,QAG7C7B,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sCAKfa,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMrBb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOvCK,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOpBjB,GAAwBa,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBrC,MAAO;AAAA,yCAC4BI,CAAI,KAAKM,CAAU,MAAMC,CAAU;AAAA,yCACnCP,CAAI,KAAKK,CAAU,MAAMJ,CAAS;AAAA,yBAClDH,EAAc,CAAC,CAAC;AAAA,yBAChBA,EAAc,CAAC,CAAC;AAAA,sBACnBG,CAAS;AAAA;AAAA,2BAEJF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,kBAInEG,EAAS,IAAM,iBAAiB;AAAA,MAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,sBAE7EO,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,mBACxFC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,4BAE5CH,CAAI;AAAA,MAC1BY,CAAa;AAAA;AAAA,CAGf,EAEE3B,GACF,CAAC4B,EAAmBC,EAAkBC,EAAyBC,EAC9DC,EAAuCC,EAAiB,KAAkB,CACzE,GAAM,CAACC,EAAaC,EAAaC,CAAU,EAAIJ,EACzC,CAACK,EAAeC,EAAWC,EAAWC,CAAc,EAAIT,EACxDU,EAAiBC,GAAiBR,EAAaE,CAAU,EACzDO,EAAiBD,GAAiBP,EAAaC,CAAU,EACzDQ,EAAWC,GAA4Bd,EAAU,CAAC,EAAE,KAAK,MAAM,EAC/De,EAAc,IAAM,CACxB,IAAMC,EAAQT,EAAU,KAClBU,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBX,EAAU,KAAK,OAAO,IACpD,QAASY,EAAIH,EAAQ,EAAI,EAAGI,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAV,EAAe,QAAQS,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcF,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBE,CACT,EACMG,EAAc,IAAM,CACxB,IAAMC,EAAQd,EAAU,KAClBS,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBV,EAAU,KAAK,OAAO,IACpD,QAASW,EAAIG,EAAQ,EAAI,EAAGF,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAR,EAAe,QAAQO,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcI,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBJ,CACT,EAwCA,MAvCe;AAAA,kEAC6CZ,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBkB,EAAY,CAAC;AAAA,kBACLR,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kEAKcD,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBwB,EAAY,CAAC;AAAA,kBACLb,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6DAKSe,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BACnEhB,CAAS;AAAA;AAAA;AAAA;AAAA,UAKzBC,EACI,mBAAmBI,EAAiB,cAAgB,GAAGqB,GAAY1B,EAAWgB,CAAQ,CAAC,aAAa,IAChE,EAAsC;AAAA,UAC9Ed,CAAe;AAAA,UACfU,EAAe,aAAa,oBAAqB,OAAO,CAAC;AAAA;AAAA;AAAA,KAK/D,EAESvC,GACT,CAACsD,EAA+BC,EAAoDC,EACnFC,EACAzB,EAAiB,KAAyD,CACzE,IAAM0B,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAASL,EAAO,CAAC,EAAE,KACnBM,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAYL,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAC5FO,EAAYC,EAAU,KAAKF,CAAS,EACpCG,EAAYP,EAAOA,EAAO,OAAS,CAAC,EACpCQ,EAAWR,EAAOA,EAAO,OAAS,CAAC,EACnCS,EAAYR,EAAOA,EAAO,OAAS,CAAC,EACpCS,EAASF,EAAW,IAAM,GAAKC,EAAY,IAAM,EAGjDE,EAAoBJ,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDpD,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDyD,EAAW,CACf,KAAK,KAAKH,EAAYtD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKJ,EAAYpD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKN,EAAYlD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,CAC/D,EAEME,EAAaH,EAAS,EAAI,EAC1BI,EAAa,CAAC,GAAGZ,EAAYK,EAAWC,EAAWK,CAAU,EAC7DzB,EAAQ0B,EAAW,OACnBC,EAAa,CAAC,GAAGZ,EAAYK,EAAUC,EAAYI,CAAU,EAC7DnB,EAAQqB,EAAW,OACnBC,EAAkB,CAACX,EAAWE,EAAWE,EAAYI,CAAU,EAC/DI,EAAoC,CACxC,CAAC,OAAsB,KAAMV,CAAS,EAAG,CAAC,OAAsB,KAAME,CAAS,EAC/E,CAAC,OAAsB,KAAMD,CAAQ,CACvC,EACAU,GAA6BrB,EAAsBoB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2Bf,EAAWU,EAAYC,CAAU,CAAC,EACrF,IAAMK,EAAwD,CAAC,OAAQ,MAAM,EAEvElD,EAAU0B,EAAO,OAAS,EAC5B1B,IACF+C,EAAgB,KAAK,GAAGE,EAA2BvB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEwB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BH,CAAe,CAAC,EAEnE,IAAMK,EAAmBC,IAA+B,CACtD,IAAMjC,GAAYe,EAAU,OACtBrD,GAAYwE,GAAiB,YAAa3B,EAAO,CAAC,EAAE,SAAUP,GAAW,CAAC,EAC1EJ,GAAWC,GAA4BU,EAAO,CAAC,EAAE,QAAQ,EAEzD4B,EAAIC,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUR,EAAOyB,CAAU,EAC5Da,GAAID,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUF,EAAOmB,CAAU,EAC5Dc,GAAS9C,EAAe,SAAUe,EAAO,CAAC,EAAE,SAAUoB,EAAgB,OAAQH,CAAU,EACxFe,GAAiB,CAACJ,EAAGE,EAAC,EAC5B,GAAIxD,EAAS,CACX,IAAM2D,EAAiBvD,EAAiBuC,EAAa,EACrDe,GAAe,KAAKH,EAAc,OAAQ7B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQiC,CAAc,CAAC,CACtG,CACA,IAAMC,GACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAC7GC,GAAyBlC,EAAsBiC,EAAQ,EACvD,IAAME,GAAW9C,GAA4ByC,GAAO,KAAK,MAAM,EACzDxD,GAAkB8D,GAAqBpC,EAAsB8B,GAAO,KAAK,MAAOK,EAAQ,EACxFE,GAAmB7F,GACrBwE,EAAY3C,EAASC,GAAiB,CAACpB,GAAWyE,EAAGE,GAAGC,EAAM,EAAG,CAACzB,EAAYC,EAAYC,CAAS,EACnG9B,CAAc,EAClB,MAAO;AAAA,IAEHgD,GAAa,iBAAiBQ,EAAQ,EAAE,0BAA0B/E,EAAS,EAAE,iBACzE,GAAG6E,GAAgBD,EAAM,CAAC;AAAA,IACtCO,EAAgB;AAAA,IAERxB,EAASzE,GAA2B0E,EAAmBxD,EAAe8B,GAAUlC,EAAS,EAChFX,GAAuBuE,EAAmBxD,EAAe8B,GAAUlC,EAAS,CAAC;AAAA,oBAE5F,EACA,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAG4D,CAAiB,IAAId,EAAqB,UAAU,IAAIa,CAAM,IAAIpC,CAAc,GACzF,kBAAA8C,CACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGgB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAK,CACF,GACA,gBAAAI,CACF,CACF,ICtfJ,IAiCMc,GA4HOC,GA7JbC,GAAAC,EAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAoBC,EAAoBC,EAAmBC,EAAU,GAC9FC,EAA4BC,EAAoB,EAAGC,EAAoB,EAAGC,EAAmB,EAC7FC,EAAW,QAAkB,CAC5B,IAAMC,EAAeF,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,uBACT,IAAK,GACH,MAAO,kBAAkBC,CAAQ,8CACnC,IAAK,GACH,MAAO,2BACT,QACE,MAAM,IAAI,MAAM,oBAAoBD,CAAgB,oBAAoB,CAC5E,CACF,EACMG,EAAeH,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,oDACT,IAAK,GACH,MAAO,wDACT,QACE,MAAM,IAAI,MAAM,oBAAoBA,CAAgB,oBAAoB,CAC5E,CACF,EACMI,EAAgBZ,EAAiB;AAAA;AAAA,MAGA;AAAA;AAAA,MAIjCa,EAAkBb,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCc,EAAUd,EAAiB,2BAA6B,2BACxDe,EAASf,EAAiB,2BAA6B,2BACvDgB,EAAMhB,EAAiB,MAAQ,MAC/BiB,EAAMjB,EAAiB,MAAQ,MAC/BkB,EAAe;AAAA;AAAA,qBAENlB,EAAiB,gCAAkC,+BAA+B;AAAA,mBACpFgB,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA,iBAELC,CAAG;AAAA,iBACHA,CAAG;AAAA;AAAA;AAAA,gBAGJA,CAAG;AAAA,oBACCE,GAAYb,EAAmBG,CAAQ,CAAC;AAAA;AAAA;AAAA,8BAG9BK,CAAO,2BAA2BC,CAAM;AAAA,QAC9DH,CAAa;AAAA;AAAA,QAEbF,EAAYJ,CAAiB,CAAC;AAAA;AAAA,qBAI1Bc,EAAUpB,EAAkBC,GAAaE,EAAW;AAAA,wBACxCG,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SACbN,GAAYD,EAAY;AAAA,wBACxCI,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SAEzCY,EAAU,GAAGV,EAAYJ,CAAiB,CAAC,GAE3Ce,EAAUH,GAAYX,EAAkBC,CAAQ,EAChDc,EACFvB,EAAiBmB,GAAYb,EAAmBG,CAAQ,EAAIU,GAAYZ,EAAmBE,CAAQ,EACjGe,EACFxB,EAAiBmB,GAAYZ,EAAmBE,CAAQ,EAAIU,GAAYb,EAAmBG,CAAQ,EACjGgB,EAAkBC,GAAqBrB,EAAYiB,EAASb,CAAQ,EAsB1E,MArBiB;AAAA,yDACkCc,CAAK;AAAA,QACtDvB,EAAiBoB,EAAUC,CAAO;AAAA;AAAA;AAAA,yDAGeG,CAAK;AAAA,QACtDxB,EAAiBqB,EAAUD,CAAO;AAAA;AAAA;AAAA,gEAGsBE,CAAO;AAAA,0BAC7Cd,CAAgB;AAAA;AAAA;AAAA;AAAA,uBAInBR,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGa,CAAe;AAAA,QACfc,GAAYvB,CAAO,CAAC;AAAA,QACpBqB,CAAe;AAAA;AAAA;AAAA,MAKnB,EAESnC,GACT,CAACsC,EAA+BvB,EAA4BwB,EAAgCC,EAC3FC,EAAmBC,EAAkBC,EAAkBC,IAAoD,CAC1G,IAAMlC,EAAiBK,EAAW,SAAW,OACvC8B,EAAanC,EAAiB4B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAWrC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAYtC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAcvC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAASxC,IAAmBmC,EAAa,IAAM,GAAKA,EAAa,IAAM,IAAMI,EAAc,IAAM,EAGjGE,EAAYzC,EAAiBuC,EAAcF,EAAWC,EACtDI,EAAY1C,EAAiBqC,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,iCAAiCD,CAAQ,EAAE,EAEtE,IAAMrC,EAAmBgC,EAAUxC,GAAkBmC,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EY,EAAaJ,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDI,EAAaL,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDK,EAAY,KAAK,IAAIN,EAAc,CAAC,EAAInC,EAAkBmC,EAAc,CAAC,CAAC,EAC1E1C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAW6B,EAAWiB,IAAc,EACpCC,EAAeV,EAAS,CAAChC,EAAkB,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EAE3D2C,GAAoC,CACxC,CAAC,OAAsB,KAAMrB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAM,CAAC3B,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EAC7G,CAAC,OAAsB,KAAMA,EAAW,OAAO,EAAG,CAAC,OAAsB,KAAMA,EAAW,SAAS,CACrG,EACA+C,GAA6B/C,EAAY8C,EAAe,EACxDA,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAM0B,GAAwD,CAAC,OAAQ,MAAM,EACzErB,IACFkB,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE0B,GAAkB,KAAK,MAAM,GAE/BH,GAAgB,KAAK,GAAGE,EAA2BxB,CAAW,CAAC,EAE/D,IAAM0B,GAAmBC,IAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,MAAO,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ,CAAC,EAC9E,CAAC,KAAM,WAAY,KAAM,MAAO,OAAQ,CAAC,CAC3C,EACAC,GAAyBrD,EAAYoD,CAAQ,EAG7C,IAAME,GAAanB,EAAS,EAAI,EAC1BoB,GAAIC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EACpDkC,GAAmB;AAAA,qDACsBtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,8BAChDpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,6EAEsBpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,qCAEjEpB,EAAS,MAAQ,EAAE;AAAA,SAE1CuB,GAAIC,EACN,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQpB,IAAqB,EAAI,EAAIA,CAAgB,EAC3FyD,GAAID,EAAc,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EAC5EO,GAAiB,CAACH,GAAGE,EAAC,EACtBE,GAASC,EAAe,SAAUxC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQ8B,EAAU,EAC1F,GAAI1B,EAAS,CACX,IAAMoC,EAAOL,EAAc,OAAQpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EACxFO,GAAe,KAAKG,CAAI,EACxBP,IAAoB;AAAA,0DAC4BtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,+BACpD5D,EAAiB,IAAM,GAAG,GAAGwC,EAAS,MAAQ,EAAE;AAAA,UAEvE,CAEA,MAAO;AAAA,UACL8B,GAAc,yBAAyB,CAAC;AAAA;AAAA;AAAA;AAAA,UAIxCd,GAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGS,GAAgBC,EAAM,CAAC;AAAA,UACnFL,EAAgB;AAAA,UAEdzE,GACIW,EAAgBC,EAAWC,EAAWC,EAAU8B,EAAS5B,EAAY6C,EAAa,CAAC,EAAGA,EAAa,CAAC,EACpGA,EAAa,CAAC,EAAGU,EAAC,CAAC;AAAA,UAEvBpB,EACI+B,GAA2B3B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,CAAS,EACrGuB,GACI5B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,EAAW,GAAO,OACnFf,CAAyB,CAAC,EACxC,EACA,MAAO,CACL,KAAM,eACN,YAAa,CACX,KAAM,GAAG7B,EAAW,QAAQ,IAAIG,CAAgB,IAAIgC,CAAM,IAAIvC,CAAS,IAAIC,CAAS,IAAIC,CAAQ,IAC5F4C,CAAU,IAAIC,CAAU,IAAIC,CAAS,GACzC,kBAAAK,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMzB,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,EACF,GACA,gBAAAI,EACF,CACF,IC9QJ,IA6BMkB,GAQAC,GAGAC,GAQAC,GAOAC,GAgBAC,GAmFOC,GA+DAC,GAzNbC,GAAAC,EAAA,kBAqBAC,KACAC,KAEAC,KAEAC,KAGMb,GAAgBc,GAAkB,CACtC,IAAIC,EAAU,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAC9BD,GAAWD,EAAIE,CAAC,EAElB,OAAOD,CACT,EAEMd,GAAoBgB,GACtB,OAAOA,GAAU,SAAW,CAACA,EAAOA,EAAOA,CAAK,EAAIA,EAElDf,GAAyB,CAACgB,EAAoBC,IAC9CA,GAAY,EACPD,EAGFA,GAAcA,EAAa,IAAMC,EAAW,GAG/ChB,GACF,CAACiB,EAA+DC,EAAmBC,EAAgBH,EAAW,IAChG,CACR,IAAMI,EAAqBrB,GAAuBmB,EAAWF,CAAQ,EACrE,OAAO,KAAK,OAAOC,EAAW,CAAC,GAAKE,EAAS,GAAKA,EAASC,GAAsB,CAAC,CACpF,EAEFnB,GACF,CAACoB,EAA2CC,EAAuCC,EAClFC,EAAmCC,IAAuD,CACrFA,GAAW,OAEbA,EAAUzB,GAAkBqB,EAASC,EAAY,CAAC,EAAGE,EAAQ,CAAC,CAAC,GAEjE,IAAME,EAA6C,CAAC,EAAG,EAAG,EAAGH,CAAW,EACxE,QAASI,EAAQ,EAAGA,EAAQ,EAAGA,IACzBN,EAAQM,CAAK,EAAI,EAAIF,GAAWH,EAAYK,CAAK,IACnDD,EAASC,CAAK,EAAI,KAAK,OAAON,EAAQM,CAAK,EAAIL,EAAYK,CAAK,EAAI,EAAIF,GAAWD,EAAQG,CAAK,EAAI,CAAC,GAGzG,OAAOD,CACT,EAEExB,GACF,CAAC0B,EAA6BC,EAAiBC,EAAkBC,EAAiBC,EACjFC,EAAsBC,EAAqBC,EAAqBC,EAChEC,IAAqG,CACpG,IAAIC,EACAC,EACAC,EACAC,EAOJ,GALIb,IAAQ,UAEVA,EAAM,GAGJ,OAAOA,GAAQ,SAAU,CAC3BU,EAAU,CAAC,IAAKV,EAAK,OAAQA,EAAK,KAAMA,EAAK,MAAOA,EAAK,MAAOA,EAAK,KAAMA,CAAG,EAC9E,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,CAAG,EACjDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAW,MAAM,QAAQE,CAAG,EAAG,CAC7B,GAAI,CAACA,EAAI,MAAM,CAACc,EAAKC,EAAGhC,IAAQ+B,IAAQ/B,EAAI,CAAC,CAAC,EAC5C,MAAM,MAAM,kCAAkCiB,CAAG,EAAE,EAErDU,EAAU,CAAC,IAAKV,EAAI,CAAC,EAAG,OAAQA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,CAAC,EAChG,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,EAAI,CAAC,CAAC,EACpDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAWE,IAAQ,aAAc,CAE/BW,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1CQ,EAAY,KAAK,KAAKV,EAAWG,CAAY,EAC7CQ,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1C,IAAMU,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,GAAkBL,EAAY,GAAKP,EAAeG,EAAeN,EACjEgB,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,EAAQ,KAAK,MAAMH,EAAgB,CAAC,EACpCI,EAAOJ,EAAgBG,EACvBE,EAAM,KAAK,MAAMJ,EAAiB,CAAC,EACnCK,EAASL,EAAiBI,EAC1BE,EAAO,KAAK,MAAML,EAAgB,CAAC,EACnCM,EAAQN,EAAgBK,EAE9Bb,EAAU,CAAC,IAAAW,EAAK,OAAAC,EAAQ,KAAAC,EAAM,MAAAC,EAAO,MAAAL,EAAO,KAAAC,CAAI,CAClD,KACE,OAAM,MAAM,8BAA8BpB,CAAG,EAAE,EAEjD,MAAO,CAAC,QAAAU,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,CAAQ,CAChD,EA8BStC,GACT,CAACkB,EAAmDC,EACnDE,EAA0C6B,EAA4CzB,EACtF0B,EAAY,GAAOC,EAA6C,iBAA+B,CAC9F,IAAIC,EAAW3B,EAASC,EAAUC,EAAS0B,EAC3C,GAAIF,IAAe,eACjB,CAACC,EAAW3B,EAASC,EAAUC,EAAS0B,CAAU,EAAIpC,UAC7CkC,IAAe,gBACxB,CAACC,EAAWC,EAAY5B,EAASC,EAAUC,CAAO,EAAIV,MAEtD,OAAM,IAAI,MAAM,sBAAsBkC,CAAU,EAAE,EAEpD,GAAM,CAACG,EAAgB,CAAEvB,EAAaC,EAAcC,CAAW,EAAIf,EAE7D,CAACU,EAAaC,EAAcC,CAAW,EAAIpC,GAAiB0B,CAAO,EACnE,CAACmC,EAAeC,EAAgBC,CAAa,EAAI/D,GAAiBuD,CAAS,EAE3ES,EAAuB/D,GAAuBoC,EAAawB,CAAa,EACxEI,EAAwBhE,GAAuBqC,EAAcwB,CAAc,EAC3EI,EAAuBjE,GAAuBsC,EAAawB,CAAa,EACxE,CAAC,QAAAvB,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,EAAQ,EAAIvC,GAC7C0B,EAAKC,EAASC,EAAUC,EAASC,EAAaC,EAAcC,EAAa4B,EACzEC,EAAuBC,CAAoB,EAEzCzC,GAAc+B,EAAYI,EAAiBD,EAAaC,EAE1DhC,GAAqD,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACvE,OAAI6B,IAAe,gBACjB7B,GAAW,CAAC8B,EAAWjC,GAAagB,EAAUC,EAAWC,EAAQ,EACxDc,IAAe,iBACxB7B,GAAW,CAAC8B,EAAWjB,EAAUC,EAAWC,GAAUlB,EAAW,GAG5D,CACL,UAAAiC,EACA,WAAAD,EACA,QAAA1B,EACA,SAAAC,EACA,QAAAC,EACA,WAAA0B,EACA,SAAAlB,EACA,UAAAC,EACA,SAAAC,GACA,YAAAlB,GACA,QAAAe,EACA,YAAAN,EACA,aAAAC,EACA,YAAAC,EACA,YAAAC,EACA,aAAAC,EACA,YAAAC,EACA,qBAAAyB,EACA,sBAAAC,EACA,qBAAAC,EACA,cAAAL,EACA,eAAAC,EACA,cAAAC,EACA,QAAAxC,EACA,SAAAK,GACA,YAAAJ,CACF,CACF,EAESlB,GACT,CAAC6D,EAA+BC,EAA4BC,EAC3DC,EAA+BC,EAAyBd,IAAoC,CAC3F,IAAMe,EAAiBf,IAAe,eAChCE,EAAaa,EAAiBL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAElEM,EAAS,GACTC,EAA0C,CAAC,GAAI,EAAG,CAAC,EACnDC,EAAiB,CAAC,EAAGN,EAAY,IAAI,CAACxB,EAAG9B,IAAMA,CAAC,CAAC,EACjD6D,EAAW,CAAC,KAAK,KAAK7E,GAAa4E,EAAe,EAAE,IAAIE,GAAKR,EAAYQ,CAAC,CAAC,CAAC,EAAKH,EAAc,CAAC,CAAE,EAAG,EAAG,CAAC,EAE/GI,GAAU,UAAW,IAAM,oCAAoCF,CAAQ,EAAE,EAEzE,IAAMG,EAAmBN,EAAUD,GAAkBb,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EqB,EAAaC,EAAU,KAAKZ,CAAW,EACvCa,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,CAAU,EACnF,CAAC,QAAuB,KAAMC,CAAI,EAAG,CAAC,QAAuB,KAAMH,EAAW,OAAO,EACrF,CAAC,QAAuB,KAAMA,EAAW,SAAS,CACpD,EACAc,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAMiB,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAUlB,EAAO,SAAW,EAC9BkB,IACFH,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEiB,EAAkB,KAAK,MAAM,GAE/BF,EAAgB,KAAK,GAAGC,EAA2Bd,CAAW,CAAC,EAE/D,IAAMiB,EAAmBC,GAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQlB,EAAW,MAAM,EAChG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAC/C,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQH,EAAW,QAAQ,MAAM,EAChE,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,UAAU,MAAM,CACtE,EAEMqB,EAAahB,EAAS,EAAI,EAC1BiB,EAAIC,GAA4BxB,EAAO,CAAC,EAAE,QAAQ,EAElDyB,EAAIC,EACN,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQY,IAAqB,EAAI,EAAIA,CAAgB,EAC3Fe,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EAC5EM,EAAiB,CAACH,EAAGE,CAAC,EACtBE,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUE,EAAY,OAAQoB,CAAU,EACtFS,EAAmB,GACvB,GAAIb,EAAS,CACX,IAAMc,EAAON,EAAc,OAAQ1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EACxFM,EAAe,KAAKI,CAAI,EACxBD,GAAoB;AAAA,8DACgCzB,EAAS,QAAQiB,CAAC,IAAMA,CAAC;AAAA,wBAC/DlB,EAAiB4B,EAAa,SAAU,EAAG,CAAC,EAAIA,EAAa,SAAU,EAAG,CAAC,CAAC,GACtF3B,EAAS,MAAQ,EAAE;AAAA,UAEzB,CAEA,MAAO;AAAA,cACDyB,CAAgB;AAAA;AAAA;AAAA,uBAGPN,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA,uBAI1BE,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA,YAErCP,EAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGO,EAAgBC,CAAM,CAAC;AAAA,YACnFT,EAAa,UAAU,CAAC;AAAA,YACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACzDS,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACrCI,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,yBAEjDpB,EAAiB4B,EAAa,SAAUR,EAAE,KAAO,EAAGA,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,2CAE/FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAClFpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAE1FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAChCQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA,8BAKlDpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAyB1GpB,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAUjBA,EAAiB;AAAA,0EAEA;AAAA,yEAC4C;AAAA;AAAA,wBAG7DA,EAAiB;AAAA;AAAA;AAAA,wBAIA;AAAA;AAAA;AAAA,qBAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOTA,EAAiB;AAAA;AAAA;AAAA;AAAA,wBAKA;AAAA;AAAA;AAAA;AAAA,qBAIR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAULa,EAAU,oDAAsD,EAAE;AAAA;AAAA,YAG5E,EACA,MAAO,CACL,KAAM,cACN,YACI,CAAC,KAAM,GAAGjB,EAAW,QAAQ,IAAII,CAAc,IAAIO,CAAgB,IAAIM,CAAO,GAAI,kBAAAD,CAAiB,EACvG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGS,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,CACF,GACA,gBAAAI,CACF,CACF,ICtZJ,IAgBae,GAuGAC,GAvHbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAMaR,GACT,CAACS,EAA+BC,EAC/BC,IAAqF,CACpF,IAAMC,EAAUH,EAAO,OAAS,EAC1BI,EAAcD,EAAU,8BAAgC,GACxDE,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KACnBO,EAAyBD,EAAO,CAAC,EAAIL,EAAW,MAEhDO,EAAgBP,EAAW,SAAW,OACtCQ,EAAcC,GAChBL,EAAQC,EAAQL,EAAW,UAAWA,EAAW,KAAMA,EAAW,QAASO,CAAa,EACtFG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,EAAW,SAAS,EAC7F,CAAC,QAAuB,KAAM,CAACA,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC5E,CAAC,QAAuB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EACtE,CAAC,QAAuB,KAAMM,CAAsB,CACtD,EACAO,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,CAAM,CAAC,EAClE,IAAMU,EAAwD,CAAC,OAAQ,MAAM,EACzEb,IACFU,EAAgB,KAAK,GAAGE,EAA2Bf,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEgB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BN,CAAW,CAAC,EAE/D,IAAMQ,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEY,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,MAAM,EACxDsB,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,MAAM,EACxDsB,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,CAAC,EAG9E,IAAM6B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ5B,EAAW,UAAU,MAAM,EACxG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,EAChF,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACA,OAAA6B,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA;AAAA,IAE9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,0BAEtDC,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,8CAEhBX,EAAgB,EAAI,CAAC;AAAA,yDACVA,EAAgB,EAAI,CAAC,oBAClEA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA,iBAGhBW,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMCX,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMrBA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA,uBAKnEA,EAAgBiB,EAAE,IAAI,QAAS,UAAW,SAAU,eAAe,EACnDA,EAAE,IAAI,QAAS,gBAAiB,UAAW,QAAQ,CAAC;AAAA,uBACzDE,EAAE,IAAI,iBAAkB,aAAc,UAAW,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,MAK3EvB,CAAW;AAAA,MACXmB,CAAe;AAAA,MACfJ,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAEzC,EACA,MAAO,CACL,KAAM,cACN,YAAa,CAAC,KAAMlB,EAAW,SAAU,kBAAAe,CAAiB,EAC1D,WAAY,KAAO,CACjB,QAAS,CAAC,CACR,KAAMd,EAA6BA,EAA2BO,CAAW,EAAIA,EAC7E,SAAUT,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,EAESzB,GACT,CAACQ,EAA+BC,EAA4BQ,IAAgD,CAC1G,IAAMN,EAAUH,EAAO,OAAS,EAC1B+B,EAAaC,GAAiBvB,EAAY,CAAC,CAAC,EAC5CwB,EAAeD,GAAiBvB,EAAY,CAAC,CAAC,EAC9CE,EAAaC,EAAU,KAAKH,CAAW,EAAIsB,EAAaE,EACxD5B,EAAS,CAACL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGzB,EAAS,CAACN,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGG,EAAsB,CAACzB,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAIsB,CAAU,EAElGlB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EACxC,CAAC,OAAsB,KAAM,CAACV,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC3E,CAAC,OAAsB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,CACvE,EACAa,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,EAAQ4B,CAAmB,CAAC,EACvF,IAAMC,GAAWF,EAAe,GAAKhC,EAAW,QAAQ,CAAC,EAAIK,EAAO,CAAC,EAC/DW,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUkC,EAAoB,OAAQH,CAAU,EAC5FV,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQ0B,CAAU,EACpEJ,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQyB,CAAU,EACpEH,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM+B,CAAU,CAAC,EAEnF,IAAM3B,EAAcD,EAAU,8BAAgC,GACxD0B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EACxC,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,CACvC,EACA,OAAAC,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA,IAC9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,8CAIlCe,CAAY;AAAA,oCACtBA,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOxBR,EAAE,KAAK,KAAK,KAAKU,CAAO;AAAA,wBACxBhB,EAAO,KAAK,KAAK,KAAKc,CAAY;AAAA;AAAA;AAAA,8CAGZ3B,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGzB6B,CAAO;AAAA;AAAA;AAAA,0BAGXV,EAAE,IAAI,QAAS,gBAAiB,eAAgB,eAAe,CAAC;AAAA;AAAA,0BAEhEA,EAAE,KAAK,KAAK;AAAA;AAAA;AAAA,gDAGUnB,EAAO,CAAC,CAAC;AAAA,wBACjCqB,EAAE,IAAI,WAAY,UAAW,IAAK,gBAAgB,CAAC;AAAA,iCAC1CM,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOlBA,CAAY;AAAA;AAAA,QAE/B7B,CAAW;AAAA,QACXmB,CAAe;AAAA,QACfJ,EAAO,IAAI,QAAS,MAAO,UAAW,iBAAkB,OAAO,CAAC;AAAA;AAAA,IAGlE,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CACX,KAAM,GAAGlB,EAAW,QAAQ,IAAI8B,CAAU,IAAIE,CAAY,IAAIE,CAAO,IAAI7B,EAAO,CAAC,CAAC,IAAIA,EAAO,CAAC,CAAC,GAC/F,kBAAmBH,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMM,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,ICvNJ,IAYamB,GA6IPC,GAUOC,GAnKbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEaT,GACT,CAACU,EAA+BC,EAAoDC,EACnFC,EACAC,EAAiB,KAAyD,CACzE,IAAMC,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KAEnBO,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIJ,EAAOA,EAAO,OAAS,CAAC,EAC5BK,EAAaC,GAAiBH,CAAC,EAC/BI,EAAcD,GAAiBF,CAAC,EAChCI,EAAeF,GAAiBJ,CAAC,EACjCO,EAAaC,EAAU,KAAKb,CAAW,EAAIQ,EAAaG,EACxDG,EAAUhB,EAAO,OAAS,EAC1BiB,EAAYd,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAE5FgB,EAAsB,CADVH,EAAU,KAAKE,CAAS,EACFV,EAAGC,CAAC,EAEtCW,EAAoC,CACxC,CAAC,QAAuB,KAAML,CAAU,EAAG,CAAC,QAAuB,KAAMP,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,CACjC,EACAW,GAA6BnB,EAAsBkB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2BJ,EAAWZ,EAAQC,CAAM,CAAC,EACzEU,GACFG,EAAgB,KAAK,GAAGE,EAA2BrB,EAAO,CAAC,EAAE,IAAI,CAAC,EAEpEmB,EAAgB,KAAK,GAAGE,EAA2BH,CAAmB,CAAC,EAEvE,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAYC,GAAiB,aAAczB,EAAO,CAAC,EAAE,SAAUiB,EAAU,MAAM,EAC/ES,EAAIC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQO,CAAW,EACrEgB,EAAID,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQI,CAAU,EACpEmB,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUkB,EAAoB,OAAQR,CAAU,EAC5FqB,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBjC,EAAsB4B,EAAO,KAAK,MAAOE,CAAQ,EACxFI,EAAiB,CAACT,EAAGE,CAAC,EACxBQ,GAAc,GAClB,GAAIpB,EAAS,CACX,IAAMqB,GAAiBjC,EAAiBM,EAAa,EACrDyB,EAAe,KAAKR,EAAc,OAAQ3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQqC,EAAc,CAAC,EACpGD,GAAc,GACVhC,EAAiB,uBAAuBiC,EAAc,KACrC,YAAYR,EAAO,KAAK,KAAK,kBAAkB,EACtE,CAEA,IAAMS,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAiBC,GAAiBH,GAAYrB,CAAS,EACvDyB,EAAiBD,GAAiBF,GAAYtB,CAAS,EACvD0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EACrF,CAAC,KAAM,IAAK,KAAM,KAAK,CACzB,EACAC,GAAyB3C,EAAsB0C,EAAQ,EAEvD,IAAME,GAAa,CAACC,GAAyBC,KAA4B,CACvE,IAAMC,GAAOF,GAAS,KAChBG,GAAOH,GAAS,KACtB,GAAIE,KAAS,EACX,MAAO,OAAOC,EAAI,cAAcH,GAAS,KAAK,OAAO,YAEvD,IAAMI,EAAY1B,EAAU,KACxB2B,GAAS,OAAOF,EAAI,aAAaH,GAAS,KAAK,OAAO,IAC1D,QAASM,GAAIJ,GAAO,EAAI,EAAGK,GAAIH,EAAY,EAAGE,IAAK,EAAGA,KAAKC,KACzDF,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,OAAOF,EAAY,EAAI,iBAAiBG,EAAC,IAAM,eAAe,IAEhG,OAAAN,GAAc,QAAQK,IAAK,CACzBD,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,QAClC,CAAC,EACDD,IAAU,GAAGF,EAAI,YAAYD,GAAO,CAAC;AAAA,uBACxBC,EAAI,YAAYD,GAAO,CAAC,UAC9BG,EACT,EAEMG,GAAa,IAAc,CAC/B,IAAIC,GAAU,eAAe7B,EAAE,KAAK,KAAK,IACzC,QAAS0B,GAAI,EAAGA,GAAIxC,EAAawC,KAC/BG,IAAW;AAAA,0BACGH,EAAC,yBAAyBA,EAAC,2BAA2B1C,CAAU,KAEhF,QAAS0C,GAAI,EAAGA,GAAIvC,EAAcuC,KAAK,CACrCG,IAAW,iCAAiCH,EAAC,yBAAyBxC,CAAW,KAEjF,QAASyC,GAAI,EAAGA,GAAIzC,EAAayC,KAC/BE,IAAW;AAAA,qBACJH,EAAC,WAAWxB,EAAE,KAAK,KAAK,UAAUhB,IAAgB,EAAI,GAAK,IAAIyC,EAAC,GAAG,YAAYA,EAAC,YACnFD,EAAC;AAAA,CAET,CACA,OAAOG,EACT,EAEA,MAAO;AAAA,IAEHhC,EAAa,iBAAiBoB,EAAQ,EAAE,0BAA0BnB,CAAS,EAAE,iBACzE,GAAGW,EAAgBN,CAAM,CAAC;AAAA,IACtCN,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,4CACpCb,CAAU,QAAQA,CAAU;AAAA,8CAC1BA,CAAU;AAAA,iCACvBG,CAAY;AAAA,qCACRA,CAAY;AAAA;AAAA;AAAA,MAG3CX,EAAY,SAAW,EAAI,GAAK,uBAAuBsB,EAAU,gBAAgB,OAAO,CAAC,GAAG;AAAA,MAC5FqB,GAAWnB,EAAGc,EAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,MAC7CmB,GAAWjB,EAAGc,CAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,wBAC3BC,EAAO,KAAK,KAAK,KAAKhB,CAAY;AAAA,oDACND,CAAW;AAAA,QACvD0C,GAAW,CAAC;AAAA;AAAA,2BAEOzC,CAAY;AAAA;AAAA,QAE/BuB,EAAW;AAAA,QACXH,CAAe;AAAA,0BACGJ,EAAO,KAAK,OAAO;AAAA,qBACxBA,EAAO,gBAAgB,aAAa,CAAC;AAAA,QAClDA,EAAO,YAAY,YAAYnB,CAAU,GAAI,OAAO,CAAC;AAAA;AAAA;AAAA,GAIvD,EACA,MAAO,CACL,KAAM,cACN,YAAa,CACX,KAAM,GAAGT,EAAqB,UAAU,IAAIS,CAAU,IAAIE,CAAW,IAAIC,CAAY,IAAIT,CAAc,GACvG,kBAAmBY,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMd,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAClE,gBAAAK,CACF,GACA,gBAAAG,CACF,CACF,EAEE/B,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,CAEtD,EAEaR,GAAUgE,GAAkC,CACvDjE,GAAeiE,EAAQ,MAAM,EAC7B,IAAMtD,EAAcuD,GAAc,UAAUD,EAAQ,OAAO,CAAC,EAAE,KAAMA,EAAQ,OAAO,CAAC,EAAE,KAAM,EAAI,EAChG,GAAI,CAACtD,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMM,EAAIN,EAAYA,EAAY,OAAS,CAAC,EACtCO,EAAI+C,EAAQ,OAAO,CAAC,EAAE,KAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EAC9DhD,EAAI,GAAKC,EAAI,EACf+C,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,EAE3FsD,EAAQ,QAAQE,GAAwBF,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,CAE1F,IChLA,IAgBayD,GA6BPC,GAEAC,GAkDAC,GAmBOC,GA0BPC,GAyIAC,GA0BAC,GAeOC,GAhUbC,GAAAC,EAAA,kBAIAC,KAIAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEalB,GACT,CAACmB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,EAA4BC,IAAqC,CAC/F,IAAMC,EAAYN,EAAW,CAAC,EACxBO,EAAoBP,EAAW,MAAMK,EAAgB,EAAI,EAAGA,EAAgB,EAAI,CAAC,EACjFG,EAAcD,EAAkB,OAChCE,EAAcR,EAAY,CAAC,EAE3BS,EADqBT,EAAY,MAAM,CAAC,EACA,IAAI,CAACU,EAAGC,IAAMD,GAAKA,EAAI,IAAMT,EAAUU,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIR,EAAWS,CAAC,EAAIT,EAAWS,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIR,EAAQQ,CAAC,GAAKR,EAAQQ,CAAC,CAAC,CAAC,EAC5G,OAAAC,EAAY,OAAO,EAAG,EAAGP,CAAS,EAClCO,EAAY,OAAOR,EAAgB,EAAI,EAAG,EAAGI,CAAW,EACjDI,CACT,EAcE/B,GAA2B,CAAC,EAAG,EAAG,EAAG,CAAC,EAEtCC,GAAiB,CAAC+B,EAA+BC,IAAqC,CAG1F,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAME,EAAcF,EAAO,CAAC,EAAE,KAAKC,EAAW,SAAW,OAASD,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFG,EAAkBH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAIC,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAIH,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMN,EAAcM,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWP,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIO,EAAW,QAAQ,SAAWP,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIO,EAAW,KAAK,SAAWP,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIO,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAEM9B,GAA4B,CAA2B+B,EAAeD,IAAqC,CAC/G,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EACvCb,EAAY,EAAI,CAAC,IAAM,IACzBA,EAAY,EAAI,CAAC,EAAIa,EAAO,CAAC,EAAE,KAAK,CAAC,GAGzC,IAAMI,EAAOH,EAAW,KAAK,MAAM,EACnCI,GAAa,yBACTL,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAaiB,EAAMH,EAAW,SAAW,OACnGA,EAAW,OAAO,EAGtB,IAAMK,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAiB,CAAI,CAAC,EACzCE,CACT,EAEanC,GAAuB8B,GAAwD,CAC1F,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBS,EAAU,CAAC,SAAU,QAAS,aAAc,YAAY,EAAET,EAAW,QAAkB,EACvFb,EAAYa,EAAW,UACvBU,EAAQV,EAAW,MACnBd,EAAcc,EAAW,aACzBG,EAAOH,EAAW,KAClBX,EAAUW,EAAW,QACrBW,EAAYX,EAAW,WAA6B,EAE1D,MAAO,CACL,QAAAS,EACA,OAAAD,EACA,UAAArB,EACA,MAAAuB,EACA,YAAAxB,EACA,KAAAiB,EACA,QAAAd,EACA,SAAAsB,EACA,GAAGL,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEMnC,GAAS,CAACyC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EAKjEe,EAAiBd,EAAW,SAAW,OAC7C,GAAIA,EAAW,QAAU,EAAG,CAM1B,GADmC,CAACY,EAAQ,YAAY,eAAe,QAAQ,GAC7CE,GAAkBf,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMC,EAAW,OACjFD,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAKC,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,EAAG,CAC7F,IAAMF,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZC,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAEhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3ChB,EAAO,SAAW,GACpBkB,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAE3Ba,EAAQ,QACJM,GAAsCD,EAAYJ,EAAoBf,CAAW,EAAG,CAAC,OAAQmB,CAAU,CAAC,CAC9G,MACEL,EAAQ,QAAQO,GAA6BpB,EAAQc,CAAkB,CAAC,EAE1E,MACF,CAEA,IAAMO,EAAUrB,EAAO,SAAW,EAC5BsB,EAActB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACnDQ,EAAavB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EAClDS,EAAgBxB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACrDU,EAAezB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/B0B,EAAc1B,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BD,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZY,EAAY5B,EAAYgB,EAAiB,EAAI,CAAC,EAC9Ca,EAAW7B,EAAYgB,EAAiB,EAAI,CAAC,EAC7CpB,EAAcI,EAAYgB,EAAiB,EAAI,CAAC,EAEhDc,EAAWd,GAAkBU,IAAiBH,GAAeI,IAAgBH,GAC/EtB,EAAW,KAAK,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,EACvD,GAAI4B,GACCJ,IAAiB,GAAKC,IAAgB,GAAKzB,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,GACxGA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,GACrFA,EAAW,KAAK,CAAC,IAAM,EAAI,CAE9B,IAAM6B,EAAQ/B,EAAY,CAAC,EACvBgC,EAAWC,EAAWC,EACpBC,EAAe,CAAC,EACtB,GAAInB,EAAgB,CAClB,IAAMC,GAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAIlE,GAHIA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,IAE5Ba,EAAU,CACZ,IAAMM,GAAYb,EAAcC,EAAaC,EAC7CO,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAG8B,EAAOK,EAAS,CAAC,EACnDH,EAAYhB,GAAiB,QAAQ,CAAC,EAAGmB,GAAWxC,CAAW,CAAC,EAChEsC,EAAoB,CAAC,EAAGH,EAAOnC,CAAW,CAC5C,MACEoC,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAOR,EAAcC,EAAYC,CAAa,CAAC,EAC9EQ,EAAYhB,GAAiB,QAAQ,CAAC,EAAGQ,EAAe7B,CAAW,CAAC,EACpEsC,EAAoB,CAACH,EAAOH,EAAYC,EAAUjC,CAAW,EAE/DuC,EAAa,KAAKH,CAAS,EAC3BG,EAAa,KAAKF,CAAS,CAC7B,MACED,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAON,EAAeF,EAAcC,CAAU,CAAC,EAC9ES,EAAYhC,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAGL,EAAa6B,CAAa,CAAC,EAC7DS,EAAoB,CAACH,EAAOnC,EAAagC,EAAYC,CAAQ,EAC7DM,EAAa,KAAKF,CAAS,EAC3BE,EAAa,KAAKH,CAAS,EAEzBV,GACFa,EAAa,KAAKlC,EAAO,CAAC,CAAC,EAE7B,IAAMoC,EAAIH,EAAkB,CAAC,EACvBI,GAAIH,EAAa,CAAC,EAAE,KAAKA,EAAa,CAAC,EAAE,KAAK,OAAS,CAAC,EAE1DE,EAAI,GAAKC,GAAI,EACfxB,EAAQ,QACJyB,GACIJ,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACpF,CAAC,OAAQmB,CAAY,CAAC,EAE1BrB,EAAQ,QACJ0B,GAAwBL,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACxG,CAAC,OAAQmB,CAAY,CAAC,EAE5B,MACF,CAIA,IAAMM,EAAgE,GAGhExB,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAIhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3CK,GACFH,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAI3B,IAAMyC,EAAY1B,EAAiBY,EAAYC,EAAWjC,EACpD+C,EAAY3B,EAAiBpB,EAAcgC,EAAYC,EACvDe,EAAWlB,EAAeC,EAAcF,EAC9CX,EAAQ,QACJ+B,GACI1B,EAAYJ,EAAoBf,EAAa0C,EAAWC,EAAWC,EAAUtB,EAC7EmB,CAAyB,EAC7B,CAAC,OAAQtB,CAAU,CAAC,CAC1B,EAEM7C,GAAS,CAACwC,EAAyBZ,IAAqC,CAE5E,IAAMV,EAAgBU,EAAW,SAAW,OACtCD,EAAS,CACba,EAAQ,OAAO,CAAC,EAAE,QACdtB,EAEI,CAACsB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5Bb,EAAO,KAAKa,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAMT,EAAO,CAAC,EAAGH,EAAW,KAAK,CAAC,EAAG,EAAGA,EAAW,KAAK,CAAC,CAAC,EACpDX,EAAU,CAAC,CAAC,EAAE,OAAOW,EAAW,OAAO,EACvCb,EAAY,CAAC,CAAC,EAAE,OAAOa,EAAW,SAAS,EAC3Cd,EAAc,CAAC,CAAC,EAAE,OAAOc,EAAW,WAAW,EAC/Ca,EAAqB5C,GAA0B,CAAC,GAAG+B,EAAY,KAAAG,EAAM,QAAAd,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGa,CAAM,EACnHa,EAAQ,QAAQO,GACZpB,EAAQc,EACRf,GAAeR,EAAgB,CAACQ,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAAI,CAAC,CAAC,CAAC,CAC3F,EAEMzB,GAAS,CAACuC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMQ,EAASR,EAAW,SAAW,OAAS,eAAiB,gBACzDa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EACjEI,EAAOH,EAAW,UAAY,SAAWA,EAAW,KAAOA,EAAW,QACtE4C,EAAWC,GACb9C,EAAO,CAAC,EAAE,KACVA,EAAO,CAAC,EAAE,KACVC,EAAW,QACXA,EAAW,UAAgDG,EAA2B,GAAOK,CAAM,EACvGI,EAAQ,QAAQkC,GACZ/C,EAAQc,EAAoB+B,EAAS,SACrC,CAACA,EAAS,YAAaA,EAAS,aAAcA,EAAS,WAAW,EAClE,CAACA,EAAS,QAAQ,MAAOA,EAAS,QAAQ,IAAKA,EAAS,QAAQ,IAAI,EAAGpC,CAAM,CAAC,CACpF,EAEalC,GAAO,CAACsC,EAAyBZ,IAAqC,CACjFhC,GAAe4C,EAAQ,OAAQZ,CAAU,EACrCY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpCxC,GAAOwC,EAASZ,CAAU,EACjBY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAC3CvC,GAAOuC,EAASA,EAAQ,OAAQZ,CAAU,EAE1C7B,GAAOyC,EAASA,EAAQ,OAAQZ,CAAU,CAE9C,ICzUA,IAiCM+C,GA2HOC,GA5JbC,GAAAC,EAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAU,GAAOC,EAAqCC,EAC/EC,EAAmB,IAAc,CAChC,IAAMC,EAAeD,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,sEACT,IAAK,GACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQED,CAAI;AAAA,cAEf,QACE,MAAM,IAAI,MAAM,oBAAoBC,CAAgB,oBAAoB,CAC5E,CACF,EACME,EAAgBN,EAAiB;AAAA;AAAA,QAGA;AAAA;AAAA,QAIjCO,EAAkBP,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCQ,EAAUR,EAAiB,2BAA6B,2BACxDS,EAAST,EAAiB,2BAA6B,2BACvDU,EAAMV,EAAiB,MAAQ,MAC/BW,EAAMX,EAAiB,MAAQ,MAE/BY,EAAe;AAAA,yBACFZ,EAAiB,2BAA6B,0BAA0B;AAAA,uBAC1EA,EAAiB,gCAAkC,+BAA+B;AAAA,qBACpFU,CAAG;AAAA,qBACHA,CAAG;AAAA;AAAA,mBAELC,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA;AAAA,kCAGYH,CAAO;AAAA,iBACxBL,CAAI;AAAA;AAAA,kCAEaM,CAAM;AAAA,iBACvBN,CAAI;AAAA;AAAA;AAAA;AAAA,kBAIHQ,CAAG;AAAA,QACbL,CAAa;AAAA,0EACqDF,CAAgB,KAE9ES,EAAUb,EAAiB;AAAA,0BACbI,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SACoB;AAAA,0BACbC,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SAEPW,EAAU;AAAA,0BACIV,CAAgB;AAAA,yBACjBJ,EAAiB,2BAA6B,0BAA0B;AAAA;AAAA;AAAA,YAIvFA,EAAiB,yDACA,wDAAwD;AAAA;AAAA;AAAA,UAGzEK,EAAYD,CAAgB,CAAC;AAAA;AAAA,eAExBD,CAAI;AAAA,QAGPY,EAAkBC,GAAqBd,EAAYC,CAAI,EAqB7D,MApBiB;AAAA,uDACgCA,CAAI;AAAA,MACrDH,EAAiBa,EAAUC,CAAO;AAAA;AAAA;AAAA,uDAGeX,CAAI;AAAA,MACrDH,EAAiBc,EAAUD,CAAO;AAAA;AAAA;AAAA,iEAGyBV,CAAI;AAAA,wBAC7CC,CAAgB;AAAA;AAAA;AAAA,uBAGjBJ,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGO,CAAe;AAAA,QACfU,GAAYhB,CAAO,CAAC;AAAA,QACpBc,CAAe;AAAA,8EACuDX,CAAgB;AAAA;AAAA,IAI1F,EAESd,GACT,CAAC4B,EAA+BhB,EAAqCiB,EACpEC,EAAmBC,EAAmBC,EAAkBC,EACxDC,IAAoD,CACnD,IAAMxB,EAAiBE,EAAW,SAAW,OACvCuB,EAAazB,EAAiBkB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAW3B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAY5B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAc7B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAAS9B,GAAmByB,EAAa,IAAM,GAAKA,EAAa,GAAMI,EAAc,IAAM,EAG3FE,EAAY/B,EAAiB6B,EAAcF,EAAWC,EACtDI,EAAYhC,EAAiB2B,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,wCAAwCD,CAAQ,EAAE,EAE7E,IAAM/B,EAAmB0B,EAAS,EAAI,EAChCO,EAAY,KAAK,IAAIJ,EAAc,CAAC,EAAI7B,EAAkB6B,EAAc,CAAC,CAAC,EAC1EK,EAAaR,EAAS,EAAI,EAC1BS,EACF,CAACrC,EAAW,YAAYF,EAAiB,EAAI,CAAC,EAAGE,EAAW,YAAYF,EAAiB,EAAI,CAAC,CAAC,EAC7FwC,EAAsB,CAC1BD,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,IACrGqC,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,GACvG,EACMuC,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFsC,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,CACvF,EAEMwC,EAAoC,CACxC,CAAC,OAAsB,KAAMtB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAMpB,EAAW,OAAO,EACvF,CAAC,OAAsB,KAAMA,EAAW,SAAS,EAAG,CAAC,OAAsB,KAAMqC,CAAU,EAC3F,CAAC,OAAsB,KAAME,CAAI,CACnC,EACAE,GAA6BzC,EAAYwC,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAElF,IAAM2B,EAAwD,CAAC,OAAQ,MAAM,EACzEtB,IACFmB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE2B,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BzB,CAAW,CAAC,EAE/D,IAAM2B,GAAmBC,IAA+B,CACtD,IAAMC,GAAIC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EAC5EY,GAAID,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACnEiC,EAASC,EAAe,SAAUlC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQmB,CAAU,EACpFe,GAAiB,CAACL,GAAGE,EAAC,EAExBI,GAAmB,GACvB,GAAI/B,EAAS,CACX,IAAMgC,GAAON,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EACxFe,GAAe,KAAKE,EAAI,EACxBD,IAAoB;AAAA,4DAC8BC,GAAK,KAAK,KAAK;AAAA,iCAC1CvD,EAAiB,IAAM,GAAG,GAAG8B,EAAS,MAAQ,EAAE;AAAA,YAEzE,CAEA,IAAM0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ,CAAC,EACrF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQjB,EAAW,MAAM,EAC5D,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQE,EAAK,MAAM,CACjD,EACAgB,GAAyBvD,EAAYsD,EAAQ,EAC7C,IAAME,GAAWC,GAA4BzC,EAAO,CAAC,EAAE,SAAU,CAAC,EAClE,GAAIwC,KAAa,OAASA,KAAa,MACrC,MAAM,IAAI,MAAM,YAAYA,EAAQ,oBAAoB,EAE1D,MAAO;AAAA,UACLE,GAAc,yBAAyB,CAAC;AAAA,UACxCb,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGH,GAAgBF,CAAM,CAAC;AAAA,UACnFG,EAAgB;AAAA,UAChBjE,GAA6BW,EAAgBuB,EAASrB,EAAY8C,GAAE,KAAK,MAAO5C,CAAgB,CAAC;AAAA,UAE/F0B,EAAS+B,GACI3B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,CAAS,EACrFyB,GACI5B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,EAAW,GACnF,OAAWb,CAAyB,CAAC,EACxD,EAEA,MAAO,CACL,KAAM,wBACN,YACI,CAAC,KAAM,GAAGtB,EAAW,QAAQ,IAAIgC,CAAiB,IAAID,CAAa,IAAIH,CAAM,GAAI,kBAAAe,CAAiB,EACtG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM1B,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAO,CACF,GACA,gBAAAI,EACF,CACF,ICvQJ,IA2BMiB,GAiMOC,GA5NbC,GAAAC,EAAA,kBAmBAC,KACAC,KAEAC,KAEAC,KAGMP,GACF,CAACQ,EAA4BC,EAA+BC,EAAgCC,EAC3FC,EAA+BC,EAAS,GAAOC,EAAkBC,EACjEC,EAAiB,KAAkB,CAClC,IAAMC,EAASD,EAAiB,EAAI,EAC9BE,EAASF,EAAiB,EAAI,EAC9BG,EAAaH,EAAiB,EAAI,EAClCI,EAAgBP,EAAS,EAAI,EAE/BQ,EAAmB;AAAA,iDACoBR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,0BAC9DD,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,KAEvDH,IACFU,GAAoB;AAAA,sDAC0BR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,2BAClEE,EAAiB,IAAM,GAAG,GAAGH,EAAS,MAAQ,EAAE;AAAA,QAGrE,IAAMS,EAAaT,EAAS,EAAI,EAC1BU,EAAIC,EAAc,IAAKf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC5EG,EAAKD,EAAc,KAAMf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC9EI,EAAiB,CAACD,EAAIF,CAAC,EACzBZ,GACFe,EAAe,KAAKF,EAAc,OAAQf,EAAO,CAAC,EAAE,SAAU,CAACC,EAAYS,CAAU,CAAC,EAAE,OAAQG,CAAU,CAAC,EAE7G,IAAMK,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQY,CAAU,EAEpFO,EAAe;AAAA,2BACAjB,EAAuB,cAAgB,gBAAgB;AAAA,kBAChEA,EAAuB,cAAgB,gBAAgB;AAAA,kBACvDA,EAAuB,cAAgB,gBAAgB,MAAMQ,CAAa;AAAA,wBACpER,EAAuB,cAAgB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM7CE,CAAQ,MAAMM,CAAa;AAAA,8BAC/BA,CAAa;AAAA,8BACbN,CAAQ;AAAA;AAAA;AAAA,uBAGfA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,oCAExCA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAOnBA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA,0BACpDA,CAAQ,wBAAwBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAO/CA,CAAQ;AAAA;AAAA;AAAA;AAAA,wCAINA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAUhBS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAMhBW,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA;AAAA,iDAEjBX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAMRK,CAAU;AAAA;AAAA,gCAE3BI,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCASZS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA,oCACjCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAUTM,CAAa;AAAA,qCACXT,EAAU,YAAc,QAAQG,CAAQ,QAAQ;AAAA,YACzEa,EAAO,IAAI,QAAS,IAAK,QAAS,KAAM,OAAO,CAAC;AAAA;AAAA,SAGhDG,EAAc;AAAA,gCACMH,EAAO,gBAAgB,YAAY,CAAC;AAAA,wBAC5CA,EAAO,WAAW,gBAAiB,CAAC,CAAC;AAAA,qBACxCA,EAAO,WAAW,gBAAiBR,CAAU,CAAC;AAAA,oBAC/CQ,EAAO,WAAW,gBAAiBV,CAAM,CAAC;AAAA,oBAC1CU,EAAO,WAAW,gBAAiBT,CAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQpCJ,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,yBAKTA,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,sCAEvCA,CAAQ,sBAAsBG,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAU/CH,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,wCAEvCA,CAAQ,sBAAsBI,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAQlEF,EAAiBS,EAAG,IAAI,QAAS,OAAQ,OAAQ,cAAc,EAC9CA,EAAG,IAAI,QAAS,eAAgB,OAAQ,MAAM,CAAC;AAAA,+BAC3CF,EAAE,IAAI,eAAgB,cAAe,cAAe,aAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM/DZ,EAAU,WAAa,GAAGG,CAAQ,OAAO;AAAA,YAC/Da,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,UAG/C,MAAO;AAAA,IACTnB,EAAa,iBAAiBO,CAAQ,EAAE,iBAAiB,GAAGW,EAAgBC,CAAM,CAAC;AAAA,IACnFN,CAAgB;AAAA;AAAA,MAEdb,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,IAC5EK,EAASgB,EAAeC,CAAW,GACnC,EAES7B,GACT,CAACQ,EAA+BsB,EAC/BC,IAAqF,CACpF,IAAMrB,EAAUF,EAAO,OAAS,EAE1BC,EAAcqB,EAAW,YACzBE,EAAaC,EAAU,KAAKxB,CAAW,EAMvCyB,EAAW,CACf,KAAK,KAAKF,EAAa,EAAE,EACzB,EACA,CACF,EACAG,GAAU,UAAW,IAAM,uCAAuCD,CAAQ,EAAE,EAE5E,IAAMnB,EAAiBe,EAAW,SAAW,OACvCM,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAU,CAACP,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,EACvDQ,EACF,CAACR,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAGe,EAAW,YAAYf,EAAiB,EAAI,CAAC,CAAC,EAC7FwB,EAAY,CAACT,EAAW,UAAU,CAAC,EAAGA,EAAW,UAAU,CAAC,CAAC,EAC7DU,EAAsB,CAC1BF,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,IAC3FQ,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,GAC7F,EACMW,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFU,EAAoB,CAAC,EAAI,EAAI,KAAK,MAAMV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,CAAC,EAAI,CACrF,EAEMlB,EAAS,GACT8B,EAAQZ,EAAW,MACnBa,EAASnC,EAAO,CAAC,EAAE,KACnBoC,EAAwBD,EAAO,CAAC,EAAID,EACpCG,EAAyBF,EAAO,CAAC,EAEjCG,EAAoC,CACxC,CAAC,QAAuB,KAAMd,CAAU,EAAG,CAAC,QAAuB,KAAMK,CAAO,EAChF,CAAC,QAAuB,KAAMC,CAAU,EAAG,CAAC,QAAuB,KAAMC,CAAS,EAClF,CAAC,QAAuB,KAAMC,CAAmB,EAAG,CAAC,OAAsB,KAAMC,CAAI,EACrF,CAAC,QAAuB,KAAMG,CAAqB,EAAG,CAAC,QAAuB,KAAMC,CAAsB,EAC1G,GAAGE,EAA2BvC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9D,EACIE,IACFoC,EAAgB,KAAK,GAAGC,EAA2BvC,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE4B,EAAkB,KAAK,MAAM,GAE/BU,EAAgB,KAAK,GAAGC,EAA2BtC,CAAW,CAAC,EAE/D,IAAME,EAAuBuB,EAAS,CAAC,IAAM,GAAKA,EAAS,CAAC,IAAM,EAC5Dc,EAAmBzC,GAA+B,CACtD,IAAMO,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQuB,EAAQ,MAAM,EACzF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQC,EAAW,MAAM,EAC5D,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,MAAM,EAC1D,CAAC,KAAM,wBAAyB,KAAM,MAAO,OAAQE,EAAoB,MAAM,EAC/E,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAAG,CAAC,KAAM,2BAA4B,KAAM,KAAK,EAChG,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACM5B,EAAWoC,GAA4BzC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO,GACHT,GACIQ,EAAcC,EAAQC,EAAaC,EAASC,EAAsBC,EAAQC,EAAUC,EACpFC,CAAc,CAAC,EACzB,EACA,MAAO,CACL,KAAM,kBACN,YAAa,CAAC,KAAM,GAAGe,EAAW,QAAQ,IAAK,kBAAAM,CAAiB,EAChE,WAAY,KAAO,CACjB,cAAe,CAAC,EAAGF,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,QAAS,CAAC,CACR,KAAMH,EAA6BA,EAA2BtB,CAAW,EAAIA,EAC7E,SAAUD,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,gBAAAsC,CACF,GACA,gBAAAE,CACF,CACF,ICpTJ,IAYME,GAIAC,GAWAC,GAiCAC,GAwCOC,GA+BPC,GAqEAC,GAEAC,GAsDAC,GA6COC,GA7SbC,GAAAC,EAAA,kBAMAC,KACAC,KAEAC,KACAC,KAEMf,GACF,CAACgB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DpB,GAAoB,CAACqB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEMzB,GACF,CAAC0B,EAA+BC,EAAgCC,EAA8BP,EAC7FQ,EAAeP,EAAgBQ,EAA4BC,EAAwBC,EACnFC,IAA0B,CACzB,IAAMC,EAAcR,EAAW,OAAS,EAClCS,EAAoBF,EAAY,SAAW,EACjD,GAAID,EAAc,SAAW,EAC3B,QAASI,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EACjCJ,EAAc,KAAK,CAAC,EAGxB,IAAMK,EAAYX,EAAW,CAAC,EACxBY,EAAcX,EAAYI,EAAgB,EAAI,CAAC,EAAIF,EACzD,QAASO,EAAI,EAAGG,EAAIb,EAAW,OAASQ,GAAeH,EAAgB,EAAI,GAAIK,EAAIF,EAAa,EAAEE,EAAG,EAAEG,EAAG,CACxG,IAAMC,EAASd,EAAWa,CAAC,EACrBpB,EAAUgB,EAAoBK,EAASV,EAAQM,CAAC,EAAIH,EAAYG,CAAC,EACjEhB,EAAWtB,GAAgB0C,EAAQV,EAAQM,CAAC,EAAGd,EAAKc,CAAC,EAAGT,EAAYY,CAAC,EAAGX,EAAUQ,CAAC,EAAGjB,CAAO,EACnGpB,GAAkBqB,EAAUC,EAASC,EAAMc,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRH,EAAQM,CAAC,GAAKI,EAAS,GAAKR,EAAcI,CAAC,GAAKT,EAAYY,CAAC,EAAI,GAAKX,EAAUQ,CAAC,EAAI,EAAId,EAAKc,CAAC,EAC/Fd,EAAKc,EAAIF,CAAW,CAAC,CAE7B,CACAD,EAAY,OAAO,EAAG,EAAGI,CAAS,EAClCJ,EAAY,OAAOF,EAAgB,EAAI,EAAG,EAAGO,CAAW,CAC1D,EAOErC,GACF,CAAoCwC,EAAeC,IAAqC,CACtF,IAAMf,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAClGjB,EAAY,OAAS,EACrB,QAASS,EAAI,EAAGA,EAAIM,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEN,EAC3CT,EAAY,KAAKe,EAAO,CAAC,EAAE,KAAKN,CAAC,CAAC,CAEtC,CACA,IAAMS,EAAiBJ,EAAW,SAAW,OAC7Cd,EAAY,OAAO,EAAG,EAAGe,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC1Cf,EAAY,OAAOkB,EAAiB,EAAI,EAAG,EAAGH,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAE/D,IAAMpB,EAAOmB,EAAW,KAAK,MAAM,EAC7BR,EAAcQ,EAAW,YAAY,MAAM,EAC3CT,EAAgBS,EAAW,cAAc,MAAM,EAC/Cf,EAAagB,EAAO,CAAC,EAAE,KACzBd,EAAYa,EAAW,UAAU,MAAM,EAC3C,GAAIb,EAAU,OAAO,CAACe,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC9C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5Cd,EAAY,IAAI,MAAMM,CAAW,EAAE,KAAK,CAAC,CAC3C,CACA,IAAIJ,EAAUW,EAAW,QAAQ,MAAM,EACvC,GAAIX,EAAQ,OAAO,CAACa,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC5C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5CZ,EAAU,IAAI,MAAMI,CAAW,EAAE,KAAK,CAAC,CACzC,CAGAlC,GACI0B,EAAYC,EAAaC,EAAWa,EAAW,QAASA,EAAW,MAAOnB,EAAMQ,EAASe,EACzFb,EAAeC,CAAW,EAG9B,IAAMa,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAL,EAAM,cAAAU,EAAe,YAAAC,EAAa,UAAAL,EAAW,QAAAE,CAAO,CAAC,EACzFgB,CACT,EAES5C,GAAgCuC,GAAiE,CAC5G,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBpB,EACF,CAAC,SAAU,QAAS,aACnB,YAAY,EAAE,OAAOoB,EAAW,QAAW,IAAc,EAAIA,EAAW,OAAiB,EACxFb,EAAYa,EAAW,UACvBZ,EAAQY,EAAW,MACnBd,EAAcc,EAAW,YACzBnB,EAAOmB,EAAW,KAClBX,EAAUW,EAAW,QACrBS,EAAYT,EAAW,SAA2B,EAClDT,EAAgBS,EAAW,cAC3BR,EAAcQ,EAAW,YAC/B,MAAO,CACL,QAAApB,EACA,OAAA4B,EACA,UAAArB,EACA,MAAAC,EACA,YAAAF,EACA,cAAAK,EACA,YAAAC,EACA,KAAAX,EACA,QAAAQ,EACA,SAAAoB,EACA,GAAGH,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEM5C,GAAiB,CAACuC,EAA+BD,IAA8C,CAGnG,GAAI,CAACC,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAG7D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAMS,EAAcT,EAAO,CAAC,EAAE,KAAKD,EAAW,SAAW,OAASC,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFU,EAAkBV,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIS,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAcX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAID,EAAW,MAGnD,GAAIC,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMW,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMnB,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAG5C,GAFqBD,EAAW,UAAU,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEnDH,EAAW,UAAU,SAAWP,EAClD,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAKvD,GAFmBO,EAAW,QAAQ,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEjDH,EAAW,QAAQ,SAAWP,EAC9C,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAKrD,GADgBO,EAAW,KAAK,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAC9CH,EAAW,KAAK,SAAWP,EAAc,EACtD,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIO,EAAW,cAAc,SAAWP,GAAeO,EAAW,cAAc,SAAW,EACzF,MAAM,IAAI,MAAM,4BAA4BP,CAAW,GAAG,EAM5D,GADuBO,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GACrDH,EAAW,YAAY,SAAW,GACpDA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5D,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAID,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAGMtC,GAAsB,CAAC,EAAG,EAAG,EAAG,CAAC,EAEjCC,GACF,CAACiD,EAAyBZ,EAA+BD,IAA8C,CACrG,IAAMc,EAAqBtD,GAAmCwC,EAAYC,CAAM,EAC1EG,EAAiBJ,EAAW,SAAW,OACvCR,EAAcsB,EAAmB,YACjCjB,EAAcL,EAAYY,EAAiB,EAAI,CAAC,EAChDW,EAAgBd,EAAO,CAAC,EAAE,KAAKG,EAAiB,EAAI,CAAC,EAI3D,GAAIU,EAAmB,QAAU,GAAMjB,IAAgB,GAAKkB,IAAkB,EAAI,CAChFF,EAAQ,QAAQG,GAAiCf,EAAQa,CAAkB,CAAC,EAC5E,MACF,CACA,IAAMG,EAAYzB,EAAYY,EAAiB,EAAI,CAAC,EAC9Cc,EAAW1B,EAAYY,EAAiB,EAAI,CAAC,EAC7Ce,EAAelB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/BmB,EAAcnB,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BoB,EAAYjB,EAAiBa,EAAYC,EAAWrB,EACpDyB,EAAYlB,EAAiBP,EAAcoB,EAAYC,EACvDK,EAAWJ,EAAeC,EAAcL,EAExCS,EAAgE,GAIhEC,EAAoBZ,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJa,GAA2BzB,EAAO,CAAC,EAAGtC,EAAmB,EACzD,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACqC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACa,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKY,GAIhC,IAAME,EAAsB,CAAC1B,EAAO,CAAC,EAAGwB,CAAgB,EAClDG,EAAU3B,EAAO,SAAW,EAC9B2B,IACE,CAACxB,GAAkBH,EAAO,CAAC,EAAE,KAAK,SAAW,EAC/C0B,EAAoB,KAAK1B,EAAO,CAAC,EAAE,QAAQ,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,CAAC,CAAC,EAErE0B,EAAoB,KAAK1B,EAAO,CAAC,CAAC,GAKtCY,EAAQ,QACJgB,GACIF,EAAqBb,EAAoBtB,EAAa6B,EAAWC,EAAWC,EAAUK,EACtFJ,CAAyB,EAC7B,CAAC,OAAQG,CAAmB,CAAC,CACnC,EAEE9D,GAAkB,CAACgD,EAAyBb,IAA8C,CAE9F,IAAMV,EAAgBU,EAAW,SAAW,OAEtCC,EAAS,CACbY,EAAQ,OAAO,CAAC,EAAE,QACdvB,EAEI,CAACuB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5BZ,EAAO,KAAKY,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAI3B,EAAcc,EAAW,aACzBd,EAAY,SAAW,GAAKA,EAAY,CAAC,IAAM,KACjDA,EAAc,CAAC2B,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,GAE1C,IAAI1B,EAAYa,EAAW,WACvBb,EAAU,SAAW,GAAKA,EAAU,CAAC,IAAM,KAC7CA,EAAY,CAAC,CAAC,GAEhB,IAAIE,EAAUW,EAAW,SACrBX,EAAQ,SAAW,GAAKA,EAAQ,CAAC,IAAM,KACzCA,EAAU,CAAC,CAAC,GAEd,IAAIR,EAAOmB,EAAW,KAClBnB,EAAK,SAAW,IAClBA,EAAO,CAAC,EAAG,CAAC,GAEdA,EAAO,CAAC,EAAGA,EAAK,CAAC,EAAG,EAAGA,EAAK,CAAC,CAAC,EAC9BQ,EAAU,CAAC,CAAC,EAAE,OAAOA,CAAO,EAC5BF,EAAY,CAAC,CAAC,EAAE,OAAOA,CAAS,EAChCD,EAAc,CAAC,CAAC,EAAE,OAAOA,CAAW,EACpC,IAAM4B,EACFtD,GAAmC,CAAC,GAAGwC,EAAY,KAAAnB,EAAM,QAAAQ,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGe,CAAM,EACrGY,EAAQ,QAAQG,GACZf,EAAQa,EACRtB,GAAeF,EAAgB,CAACE,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAC/C,CAACA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,CAAC,CAAC,CACtF,EAEa1B,GAAgB,CAAC+C,EAAyBb,IAA8C,CACnGtC,GAAemD,EAAQ,OAAQb,CAAU,EACrCa,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpChD,GAAgBgD,EAASb,CAAU,EAEnCpC,GAAgBiD,EAASA,EAAQ,OAAQb,CAAU,CAEvD,ICpTA,IAgBM8B,GAkDOC,GAOAC,GAzEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAOMR,GACF,CAACS,EAAmBC,EAA+BC,EAAuBC,IACvD,CACb,IAAMC,EAAaC,EAAU,KAAKJ,CAAU,EACtCK,EAAOL,EAAW,OAClBM,EAAQC,EAAc,QAASR,EAAWM,CAAI,EAC9CG,EAASC,EAAe,SAAUV,EAAWM,CAAI,EACjDK,EAAYT,EAAU,WAAa,EAAiBA,EAAU,cAAc,EAAE,CAAC,EAC3B,OAAOA,EAAU,iBAAiB,EAAE,CAAC,CAAC,EAC1FU,EAAOP,EAAU,cAAcM,EAAWL,CAAI,EAC9CO,EAAmBC,GAA+B,CACtD,IAAMC,EAAQ,QAAQR,EAAM,WAAW,eAAgB,eAAe,CAAC,KACjES,EAAMC,EAAa,uBAAwB,gBAAiBX,CAAI,EAChEY,EAAaf,EAAW,QAAUY,GAASZ,EAAW,UAAY,OAAS,IAAM,IACjFgB,EAAahB,EAAW,QAAUa,EAAMD,GAASZ,EAAW,UAAY,GAAK,QACnF,MAAO;AAAA,kBAEHW,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,CAAM,CAAC;AAAA,kBAClCK,EAAa,UAAU,CAAC;AAAA,oBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,uCACtDL,EAAO,gBAAgB,YAAY,CAAC;AAAA,8BAC7CA,EAAO,KAAK,KAAK;AAAA,sCACTS,CAAU;AAAA,qCACXC,CAAU;AAAA;AAAA,sBAEzBZ,EAAM,WAAW,eAAgB,gBAAiB,QAAQ,CAAC;AAAA,kCAC/CA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,oBAEhDE,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,kBAEjD,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMF,EAAY,SAAUD,CAAS,CAAC,EACjD,cAAe,CAAC,EAAG,KAAK,KAAKI,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,QAAuB,KAAMQ,CAAI,EAC7E,GAAGQ,EAA2BnB,EAAYA,CAAU,CACtD,CAEF,GACA,gBAAAY,CACF,CACF,EAGKrB,GAAS,CAAC6B,EAAyBlB,IAAuC,CACrF,IAAMF,EAAaoB,EAAQ,OAAO,CAAC,EAAE,KAC/BrB,EAAYqB,EAAQ,OAAO,CAAC,EAAE,SAC9BT,EAAOS,EAAQ,OAAO,CAAC,EAC7BA,EAAQ,QAAQ9B,GAAwBS,EAAWC,EAAYW,EAAMT,CAAU,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACjG,EAEaV,GAAyBU,GAA0D,CAC9F,IAAMmB,EAAYnB,EAAW,YAAwB,EAC/CoB,EAAUpB,EAAW,UAAsB,EACjD,OAAOqB,GAA4B,CAAC,UAAAF,EAAW,QAAAC,CAAO,CAAC,CACzD,IC7EA,IAoBME,GASAC,GAWAC,GA2DOC,GAKAC,GAxGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMV,GAAkBW,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,gCAAgC,EAElD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,iCAAiC,CAErD,EAEMV,GAAmB,CAACW,EAAgBC,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKF,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAI,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMd,GAAgC,CAACe,EAAyBC,IAAoD,CAClH,IAAI,EAAWC,EAAWC,EAAWC,EACjCC,EACAV,EACEW,EAAgBL,EAAW,SAAW,OACtCM,EAAYN,EAAW,UACvBO,EAAYP,EAAW,OAAS,MAClCK,GACF,CAAC,EAAGJ,EAAGC,EAAGC,CAAC,EAAIJ,EAAY,KAC3BK,EAAQG,EAAY,CAAC,EAAGN,EAAGC,EAAGI,EAAWA,EAAWH,EAAKG,GAAa,CAAE,EACpD,CAAC,EAAGL,EAAGC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,CAAS,EACxEZ,EAAOa,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,IAEzD,CAAC,EAAGN,EAAGC,EAAGC,CAAC,EAAI,CAACJ,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,CAAC,EAClGK,EAAQG,EAAY,CAAC,EAAGD,EAAWA,EAAWH,EAAKG,GAAa,EAAIL,EAAGC,CAAC,EACpD,CAAC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,EAAWL,EAAGC,CAAC,EACxER,EAAOa,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,GAE3D,IAAMC,EAAsBT,EAAY,QAAQK,CAAK,EAC/CK,EAAoBD,EAAoB,KAAK,OAC7CE,EAAgBX,EAAY,SAE5BY,EAAgBC,EAAc,IAAKF,EAAeD,CAAiB,EACnEI,EAAeC,EAAe,SAAUJ,EAAeD,CAAiB,EAExEM,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEhG9B,GAAiBW,EAAMe,EAAmBE,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEtEG,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DH,EAAa,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGxDA,EAAa,YAAY,aAAcF,EAAc,aAAa,UAAU,CAAC,CAAC;AAAA,KAGlF,MAAO,CACL,KAAM,eACN,YAAa,CAAC,KAAM,GAAGZ,EAAY,IAAI,IAAIC,EAAW,SAAS,IAAIA,EAAW,IAAI,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACjH,WAAaP,GAAW,CACtB,IAAMwB,EAAcZ,EAAgB,CAAC,EAAGJ,EAAIK,EAAWJ,EAAII,EAAWH,EAAKG,GAAa,CAAE,EACtD,CAAC,EAAGH,EAAKG,GAAa,EAAIL,EAAIK,EAAWJ,EAAII,CAAS,EACpFY,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAkBZ,EAAoB,KACtCa,EAAiBF,EAAU,gBAAgBC,EAAiB1B,CAAI,EACtE,MAAO,CACL,QAAS,CAAC,CAAC,KAAMuB,EAAa,SAAUxB,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKyB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGI,EAA2BF,EAAiBC,CAAc,CAAC,CAChH,CACF,EACA,gBAAAN,CACF,CACF,EAEa9B,GAAe,CAACsC,EAAyBvB,IAA6C,CACjGlB,GAAeyC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAA8BuC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CAC9E,EAEad,GAA+Bc,GACxCwB,GAA4B,CAC1B,UAAWxB,EAAW,UACtB,KAAMA,EAAW,KACjB,OAAQA,EAAW,MACrB,CAAC,IC7GL,IAsBMyB,GAEAC,GACAC,GACAC,GACAC,GAQAC,GAqBAC,GA4HAC,GAEAC,GA+GOC,GAOAC,GA5SbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAaMhB,GACF,qBACEC,GAAc,IAAMD,GAAgB,KACpCE,GAAkB,IAAMD,GAAc,IACtCE,GAAa,IAAMF,GAAc,MAAQA,GACzCG,GAAiB,IAAMD,GAAa,IAQpCE,GAAN,KAAiB,CACf,YAAYY,EAAa,GAAI,CAC3B,KAAK,gBAAkB,IAAI,IAC3B,KAAK,WAAaA,CACpB,CAGA,UAAUC,EAAgBC,EAAe,CACvC,IAAIC,EAAQ,KAAK,gBAAgB,IAAIF,CAAM,EACvCE,IAAU,OACZA,EAAQ,CAACD,CAAK,EAEdC,EAAM,KAAKD,CAAK,EAElB,KAAK,gBAAgB,IAAID,EAAQE,CAAK,CACxC,CAIF,EAEMd,GAAN,KAAqB,CACnB,YAAYe,EAA+CC,EAAkB,CAAlB,cAAAA,EACzD,KAAK,YAAc,GACnB,KAAK,aAAe,IAAI,IACxB,KAAK,IAAM,IAAI,MACf,KAAK,WAAa,CAAC,EAGnB,GAAI,CAACC,EAAKC,CAAG,EAAIF,EAAS,SAAS,IAAI,EAAIA,EAAS,MAAM,KAAM,CAAC,EAAI,CAACA,EAAU,EAAE,EAClF,GAAI,CAACC,EAAI,MAAM,OAAOnB,EAAc,CAAC,EACnC,MAAM,IAAI,MAAM,kBAAkB,EAapC,GAXmBmB,EAAI,MAAM,GAAG,EACrB,QAAQ,CAACE,EAAWN,IAAU,CACvC,IAAMO,EAAOL,EAAOF,CAAK,EAAE,KAAK,MAAM,EACtC,GAAI,CAACM,EAAU,MAAM,OAAOvB,EAAe,CAAC,EAC1C,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMyB,EAAa,KAAK,YAAYF,EAAW,GAAMC,EAAMP,CAAK,EAChE,KAAK,IAAI,KAAKQ,CAAU,CAC1B,CAAC,EAGGH,IAAQ,GAEVA,GAAO,CAAC,GAAG,KAAK,aAAa,QAAQ,CAAC,EAC1B,OAAO,CAAC,CAACI,EAAKC,CAAI,IAAOA,EAAK,QAAU,GAAKD,IAAQ,KAAM,EAC3D,IAAI,CAAC,CAACA,CAAG,IAAMA,CAAG,EAClB,KAAK,EAAE,UAEf,CAACJ,EAAI,MAAM,OAAOvB,EAAW,CAAC,EAChC,MAAM,IAAI,MAAM,aAAa,EAKduB,EAAI,MAAM,OAAOxB,GAAe,GAAG,CAAC,GAC3C,QAASkB,GAAW,CAC9B,GAAIA,IAAW,MACb,KAAK,WAAa,KAAK,WAAW,OAAO,KAAK,YAAY,MACrD,CACL,IAAMW,EAAO,KAAK,aAAa,IAAIX,CAAM,EACzC,GAAIW,IAAS,OACX,MAAM,IAAI,MAAM,oBAAoB,EAEtC,KAAK,WAAW,KAAKA,EAAK,QAAQ,CACpC,CACF,CAAC,EACD,KAAK,IAAM,KAAK,YAAYL,EAAK,GAAO,KAAK,UAAU,CACzD,CAGA,UAAUN,EAAgBY,EAAkBb,EAAoB,CAC9D,IAAIY,EAAO,KAAK,aAAa,IAAIX,CAAM,EACvC,GAAIW,IAAS,OAAW,CACtB,GAAIA,EAAK,WAAaC,GAAYD,EAAK,QAAU,EAC/C,MAAM,IAAI,MAAM,oBAAoB,EAEpCA,EAAK,QACLA,EAAK,aAAa,KAAKZ,CAAU,CAErC,MACEY,EAAO,CAAC,MAAO,EAAG,SAAAC,EAAU,aAAc,CAACb,CAAU,CAAC,EAExD,KAAK,aAAa,IAAIC,EAAQW,CAAI,CACpC,CAGA,YAAYE,EAAcC,EAAkBN,EAAyBP,EAAQ,GAAgB,CAC3F,IAAMc,EAAOP,EAAK,OACdQ,EAAW,GACXC,EAAe,CAAC,EAChBC,EAAU,EAEd,GAAI,CAACL,EAAK,MAAM,OAAO7B,EAAe,CAAC,GAAM,CAAC8B,GAAWD,IAAS,GAChE,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMM,EAAeN,EAAK,MAAM,OAAO/B,GAAe,GAAG,CAAC,EACpD2B,EAAa,IAAItB,GAAWc,CAAK,EAEvC,OAAAkB,GAAc,QAAQ,CAACnB,EAAgBoB,IAAc,CACnD,GAAIpB,IAAW,MAAO,CACpB,GAAIgB,EACF,MAAM,IAAI,MAAM,6CAA6C,EAE/DA,EAAW,GACX,IAAMK,EAAoBN,EAAOI,EAAa,OAAS,EACvD,GAAIE,EAAoB,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GADAJ,EAAeT,EAAK,MAAMU,EAASA,EAAUG,CAAiB,EAC1D,KAAK,aACP,GAAI,KAAK,aAAa,SAAWJ,EAAa,QAC1C,KAAK,aAAa,SAAS,IAAMA,EAAa,SAAS,EACzD,MAAM,IAAI,MAAM,8BAA8B,UAEvCH,EACT,KAAK,YAAc,GACnB,KAAK,aAAeG,MAEpB,OAAM,IAAI,MAAM,uCAAuC,EAGzD,QAASK,EAAI,EAAGA,EAAIL,EAAa,OAAQK,IAAK,CAC5C,IAAMtB,EAAS,OAAO,aAAa,IAAI,WAAW,CAAC,EAAIsB,CAAC,EACxDb,EAAW,UAAUT,EAAQoB,EAAIE,CAAC,EAClC,KAAK,UAAUtB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAC/C,CACF,MACEQ,EAAW,UAAUT,EAAQoB,GAAK,KAAK,YAAc,KAAK,aAAa,OAAS,EAAI,EAAE,EACtF,KAAK,UAAUpB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAEjD,CAAC,EACMQ,CACT,CAQF,EAEMpB,GAAakC,GAAyBA,EAAO,OAE7CjC,GACF,CAACkC,EAAuCC,EAAkBC,EACzDC,IAAgD,CAE/C,IAAMC,EADQJ,EAAY,IAAKhB,GAASA,EAAK,MAAM,EAC3B,IAAI,CAACO,EAAMd,IAAU4B,EAAc,QAAQ5B,CAAK,GAAIwB,EAAUV,CAAI,CAAC,EACrFe,EAAaC,EAAU,KAAKJ,CAAW,EACvCK,EAASC,EAAe,SAAUR,EAAUE,EAAY,MAAM,EAC9DO,EACF,CAAC,GAAGR,EAAe,aAAa,KAAK,CAAC,EAAE,OAAQ1B,GAAW,CAAC0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,CAAC,EACxGmC,EAAmBC,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EACrBC,EAAW,kBACXC,EAAU,iBACVC,EAAY,eACZC,EAAgC,CAAC,EACjCC,EAAiC,CAAC,EAClCC,EAAiC,CAAC,EAClCC,EAA4B,CAAC,EAC7BC,EAAyBnB,EAAe,aAAa,OAASA,EAAe,IAAI,gBAAgB,KACvGA,EAAe,aAAa,QAAQ,CAACf,EAAMX,IAAW,CACpD,GAAI0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,EAAG,CAClD,IAAM8C,EAAcpB,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,IAAI,CAAC,EAClE8C,IAAgB,QAClBpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBoC,EAAQ,KAAK,GACTT,EAAUR,CAAC,EAAE,WACT,QAAQA,CAAC,UAAWnB,EAAO+B,EAAO,WAAW,gBAAiBc,CAAW,CAAC,CAAC,EAAE,CACvF,CAAC,CACH,CACF,CAAC,CAEL,MACEpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBwC,EAAoB,KAAK,GAAGb,EAAUR,CAAC,EAAE,WAAW,QAAQA,CAAC,UAAWnB,EAAO,GAAGD,CAAM,EAAE,CAAC,EAAE,CAC/F,CAAC,EACD4C,EAAgB,KAAK,WAAWhB,EAAUR,CAAC,EAAE,aAAa,QAAQA,CAAC,SAAS,CAAC,GAAG,CAClF,CACF,CAAC,EACDsB,EAAqB,KACjB,WAAW1C,CAAM,cAAcA,CAAM,eAAeX,GAAUW,CAAM,CAAC,KAAKA,CAAM,OAAO,EAC3F2C,EAAqB,KAAK,GAAG,CAEjC,CAAC,EACD,IAAMK,EAAYH,EACd,CACE,GAAGR,EACH,aAAaT,EAAU,IAAI,CAACqB,EAAU7B,IAAM6B,EAAS,aAAa,QAAQ7B,CAAC,SAAS,CAAC,EAAE,KAAK,KAAK,CAAC,GACpG,EACA,CACE,GAAGiB,EACHE,EACA,GAAGG,EACH,GAAGD,EACHH,EACA,GAAGM,EACHJ,EACA,GAAGG,CACL,EACJ,MAAO;AAAA,cAEHP,EACK,iBAAiBF,EAAgB,IAAKlC,IAAY,CAAC,KAAM,GAAGX,GAAUW,CAAM,CAAC,GAAI,KAAM,KAAK,EAAE,CAAC,EAC/F,gBAAgB,aAAc,KAAK,EACnC,iBAAiB,GAAG4B,EAAWI,CAAM,CAAC;AAAA;AAAA,cAEzCI,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,kCACrDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA,cACxDJ,EAAU,IAAI,CAACsB,EAAM9B,IAAM,YAAYA,CAAC,YAAYQ,EAAUR,CAAC,EAAE,KAAK,OAAO,GAAG,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,cAC5F4B,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,cACpBhB,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,YAE/C,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAe,SAAU,kBAAmBF,EAAY,IAAI,IAAM,MAAM,CAAC,EAC7F,WAAY,IAAM,CAGhB,IAAM2B,EACFjB,EAAgB,OAAQlC,GAAW0B,EAAe,aAAa,IAAI1B,CAAM,CAAC,EACrE,IACIA,IACI,CAAC,QAAuB,KAAM0B,EAAe,aAAa,IAAI1B,CAAM,GAAG,UAAY,CAAC,EAAE,EACvGmD,EAAoB,KAAK,CAAC,QAAuB,KAAMrB,CAAU,CAAC,EAClE,IAAMsB,EACF5B,EAAY,IAAI,CAAChB,EAAM6C,IAAM,CAAC,GAAGC,EAA2B9C,CAAI,CAAC,CAAC,EAC7D,OAAO,CAAC+C,EAAKC,IAAyBD,EAAI,OAAOC,CAAoB,EAAGL,CAAmB,EACpG,OAAAC,EAAgB,KAAK,GAAGE,EAA2B3B,CAAW,CAAC,EACvD,CACN,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAAsB,CACF,CACF,EACA,gBAAAjB,CACF,CACF,EAES5C,GAAS,CAACkE,EAAyBC,IAAuC,CACrF,IAAMhC,EAAiB,IAAItC,GAAeqE,EAAQ,OAAQC,EAAW,QAAQ,EACvE/B,EAAcD,EAAe,WAC7BF,EAAciC,EAAQ,OAAO,IAAI,CAACE,EAAON,IAAMM,EAAM,IAAI,EAC/DF,EAAQ,QAAQnE,GAAwBkC,EAAaiC,EAAQ,OAAO,CAAC,EAAE,SAAU/B,EAAgBC,CAAW,CAAC,CAC/G,EAEanC,GAAyBkE,GAA0D,CAC9F,IAAMtD,EAAYsD,EAAW,SAAoB,QAAQ,OAAQ,EAAE,EACnE,OAAOE,GAA4B,CAAC,SAAAxD,CAAQ,CAAC,CAC/C,IC/SA,IAUMyD,GAiBAC,GAYAC,GAIAC,GAyDOC,GApGbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0BAA0B,EAE5C,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EAEzDG,EAAaD,EAAM,OAASD,EAAW,OAAS,EAAIC,EAAM,OAASD,EAAW,OAC9EG,EAAkBH,EAAW,OAASC,EAAM,OAAS,EAAID,EAAW,OAASC,EAAM,OACvF,KAAOC,EAAaD,EAAM,QAAUE,EAAkBH,EAAW,OAAQ,EAAEE,EAAY,EAAEC,EACvF,GAAIF,EAAMC,CAAU,IAAMF,EAAWG,CAAe,GAAKF,EAAMC,CAAU,IAAM,GAC3EF,EAAWG,CAAe,IAAM,EAClC,MAAM,IAAI,MAAM,oDAAoD,CAG1E,EAEMb,GAAmB,CAACc,EAA2BC,IAAwC,CAC3F,IAAMC,EAAOF,EAAO,OAASC,EAAO,OAC9BJ,EAAkB,CAAC,EACzB,QAASM,EAAI,EAAGA,EAAID,EAAM,EAAEC,EAC1BN,EAAM,KAAKG,EAAOG,CAAC,CAAC,EAEtB,QAASA,EAAI,EAAGA,EAAIF,EAAO,OAAQ,EAAEE,EACnCN,EAAM,KAAKI,EAAOE,CAAC,IAAM,EAAIH,EAAOG,EAAID,CAAI,EAAID,EAAOE,CAAC,CAAC,EAE3D,OAAON,CACT,EAEMV,GAAuB,CAACS,EAA+BC,IACxDD,EAAW,OAASC,EAAM,OAAUX,GAAiBU,EAAYC,CAAK,EAAIX,GAAiBW,EAAOD,CAAU,EAG3GR,GAA2BO,GAA+C,CAC9E,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EACvDS,EAAwBjB,GAAqBS,EAAYC,CAAK,EAC9DQ,EAAWV,EAAO,CAAC,EAAE,SACrBW,EAAaD,IAAa,EAAgB,EAAI,EAC9CE,EAAa,KAAK,KAAKC,EAAU,KAAKJ,CAAW,EAAIE,CAAU,EAE/DG,EAAmBC,GAA+B,CACtD,IAAMC,EAAQC,EAAc,QAASP,EAAUT,EAAW,OAAQU,CAAU,EACtEO,EAASC,EAAe,SAAUT,EAAUD,EAAY,OAAQE,CAAU,EAC5ES,EACJ,GAAIV,IAAa,EAAe,CAC9B,IAAMW,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO;AAAA,6BAChDD,CAAC,MAAML,EAAO,gBAAgB,kBAAkBK,CAAC,GAAG,CAAC;AAAA,sBAC5DA,CAAC,MAAMP,EAAM,2BAA2B,gBAAgBO,CAAC,GAAIL,CAAM,CAAC;AAAA,qBACrEK,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIR,EAAM,YAAY,QAAQO,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAEhFH,EAAa;AAAA,0CACuBT,CAAU;AAAA;AAAA,UAE1CU,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCH,EAAO,YAAY,aAAc,MAAM,CAAC;AAAA,QAE9C,MACEE,EAAa;AAAA,8BACWF,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACtCF,EAAM,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,UAC3EA,EAAO,YAAY,aAAcF,EAAM,YAAY,aAAa,CAAC,CAAC;AAAA,SAGxE,MAAO;AAAA,MACLD,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBC,EAAOE,CAAM,CAAC;AAAA,MAC/EH,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,MACvEK,CAAU,EACd,EAEMK,EACF,CAAC,CAAC,QAAuB,KAAMb,CAAU,EAAG,GAAGc,EAA2BzB,EAAYQ,CAAW,CAAC,EACtG,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGA,EAAY,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACxE,gBAAAK,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBAAAa,CACF,EACF,CACF,EAEa/B,GAAUiC,GAAkC,CACvDrC,GAAeqC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAwBkC,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxE,ICvGA,IAaMC,GAiDOC,GA9DbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KAIMP,GAA6BQ,GAAqD,CACtF,IAAMC,EAAWD,EAAa,CAAC,EAAE,SAC3BE,EAAaC,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAChDI,EAAaD,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAEhDK,EAAUD,EAAa,IAAM,EAC7BE,EAAmBC,GAAuC,CAC9D,IAAMC,EAAIC,EAAc,IAAKR,EAAU,CAAC,CAAC,EAAG,CAAC,EACvCS,EAAOD,EAAc,OAAQR,EAAU,CAAC,CAAC,EAAG,CAAC,EAC7CU,EAAIC,EAAe,IAAKX,EAAU,CAAC,CAAC,EAAG,CAAC,EAExCY,EAA8B,CAAC,CAAC,KAAM,kBAAmB,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAEvGC,EAAqBC,GAAe;AAAA,gBAC9BA,CAAC,oCAAoCA,CAAC;AAAA,gBACtCA,CAAC,MAAML,EAAK,YAAY,OAAOK,CAAC,aAAa,CAAC,QAAQA,CAAC,gBAC7DC,EAAoBX,EACtB;AAAA,mBACWK,EAAK,YAAY,uCAAuC,CAAC,IACpE,GAAGI,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC;AAAA,mBACjFN,EAAE,KAAK,KAAK,gCAE3B,MAAO,GAAGD,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBL,EAAGE,EAAMC,CAAC,CAAC;AAAA;AAAA,MAEtEM,GAAaC,GAA0BjB,CAAQ,CAAC,CAAC;AAAA;AAAA,MAEvDM,EAAa,UAAUY,EAAc,CAAC;AAAA,QACpCZ,EAAa,sCAAsC,0BAA0B,CAAC;AAAA;AAAA,gBAEtEC,EAAE,YAAY,YAAY,CAAC;AAAA,QACnCQ,CAAiB;AAAA;AAAA,QAEjBL,EAAE,YAAY,aAAoBS,GAAmB,MAAM,CAAC,CAAC;AAAA,MAEnE,EAEA,MAAO,CACL,KAAM,mBACN,YAAa,CAAC,KAAM,GAAGf,CAAO,GAAI,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACrE,gBAAAC,EACA,WAAae,IAAY,CACvB,QAAS,CAAC,CAAC,KAAMA,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,gBACI,CAAC,CAAC,QAAuB,KAAM,KAAK,KAAKnB,EAAa,CAAC,CAAC,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACxG,cAAe,CAAC,EAAG,KAAK,KAAKF,EAAaiB,GAAiB,CAAC,CAAC,CAC/D,EACF,CACF,EAEa1B,GAAY6B,GAAkC,CACrDA,EAAQ,OAAO,OAAS,GAAKnB,EAAU,KAAKmB,EAAQ,OAAO,CAAC,EAAE,IAAI,IAAM,EACpE7B,GAAS6B,CAAO,EAEtBA,EAAQ,QAAQ9B,GAA0B8B,EAAQ,MAAM,CAAC,CAE7D,ICpEA,IAeMC,GAMAC,GAsGOC,GAGAC,GA9HbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,EAEMT,GAA0B,CAACS,EAA+BC,IAA8C,CAC5G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAeH,EAAO,CAAC,EAAE,KAEzBI,EAAYF,EAAW,OACvBG,EAAOC,EAAU,cAAcL,EAAW,KAAMG,CAAS,EAEzDG,EAAcL,EAAW,MAAM,CAAC,EACtCK,EAAY,OAAOF,EAAM,EAAG,GAAGF,CAAY,EAE3C,IAAMK,EAAeN,EAAWG,CAAI,EAC9BI,EAAaT,EAAO,CAAC,EAAE,WAAa,EAAgB,EAAI,EACxDU,EAAa,KAAK,KAAKJ,EAAU,KAAKC,CAAW,EAAIE,CAAU,EAE/DE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMH,CAAI,EAAG,GAAGO,EAA2BZ,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMO,CAAW,CAChH,EAEMM,EAAmBC,GAA+B,CACtD,IAAMC,EAAOC,EAAc,OAAQhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQS,CAAU,EAClFQ,EAAUD,EAAc,eAAgBhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACjFkB,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUO,EAAY,OAAQE,CAAU,EAEpFW,EAAmBC,GAA6B,CACpD,IAAMC,EAAcnB,EAAa,OAC7BoB,EAAU,qBAAqBF,CAAC,OAAOJ,EAAQ,KAAK,OAAO,OAC/D,QAASO,EAAI,EAAGA,EAAIF,EAAaE,IAC/BD,GAAW,GAAGD,EAAc,EAAI,iBAAiBD,CAAC,IAAIG,CAAC,IAAM,iBAAiBH,CAAC,EAAE,MAC7Ed,EAAY,OAAS,EAAI,gBAAgBc,CAAC,oBAAoBG,CAAC,IAAM,gBAAgBH,CAAC,EAAE,IAE9FE,GAAW;AAAA,mBACEF,CAAC,MAAMJ,EAAQ,aAAa,iBAAiBI,CAAC,EAAE,CAAC;AAAA,mBACjDA,CAAC;AAAA,iBACHA,CAAC,SAASA,CAAC;AAAA;AAAA,2BAEDA,CAAC,MAAMN,EAAK,KAAK,OAAO;AAAA,UAE7C,QAASS,EAAI,EAAGC,EAAI,EAAGD,EAAIpB,EAAWoB,IAChCA,IAAMnB,GACRkB,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,aAAaA,CAAC,KACvFI,GAAKH,IAELC,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,MACrEd,EAAY,OAAS,EAAI,gBAAgBc,CAAC,IAAII,CAAC,IAAM,gBAAgBJ,CAAC,EAAE,IAC5EI,KAGJ,OAAOF,CACT,EACIG,EACJ,GAAI1B,EAAO,CAAC,EAAE,WAAa,EAAe,CACxC,IAAM2B,EAAmB,CAACC,EAAgBP,EAAWQ,EAAW,KAAO;AAAA,6BAChDR,CAAC,MAAMH,EAAO,gBAAgB,kBAAkBG,CAAC,GAAG,CAAC;AAAA,YACtED,EAAgBC,CAAC,CAAC;AAAA,sBACRA,CAAC,MAAMN,EAAK,gBAAgB,cAAcM,CAAC,EAAE,CAAC;AAAA,qBAC/CA,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BO,CAAM,IAAIP,CAAC,OAAOQ,CAAQ,IAAId,EAAK,YAAY,QAAQM,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAE/EK,EAAa;AAAA,0CACuBjB,CAAU;AAAA;AAAA,UAE1CkB,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCT,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,OAE/C,MACEQ,EAAa;AAAA,4BACSR,EAAO,gBAAgB,YAAY,CAAC;AAAA,QACxDE,EAAgB,EAAE,CAAC;AAAA,oBACPL,EAAK,aAAa,aAAa,CAAC;AAAA,QAC5CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,QAG7C,MAAO;AAAA,QAEHJ,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBC,EAAME,EAASC,CAAM,CAAC;AAAA,QAC5CJ,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,UACzEY,CAAU;AAAA,QAElB,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMzB,EAAW,SAAU,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMM,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAE,CACF,CACF,EAEarB,GAAyBS,GAClC6B,GAA4B,CAAC,KAAM7B,EAAW,IAAc,CAAC,EAEpDR,GAAS,CAACsC,EAAyB9B,IAAuC,CACrF,IAAMD,EAAS+B,EAAQ,OACvBzC,GAAeU,CAAM,EACrB+B,EAAQ,QAAQxC,GAAwBwC,EAAQ,OAAQ9B,CAAU,CAAC,CACrE,IClIA,IAeM+B,GAeAC,GA+DOC,GAGAC,GAhGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM;AAAA,4DACwC,CAE5D,EAEMT,GACF,CAACS,EAA+BC,IAAsD,CACpF,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAsBH,EAAO,CAAC,EAAE,SAChCI,EAAYF,EAAW,OAEvBG,EAAeL,EAAO,CAAC,EAAE,KACzBM,EAAkBN,EAAO,CAAC,EAAE,SAC5BO,EAAOC,EAAU,cAAcP,EAAW,KAAMG,CAAS,EACzDK,EAAeP,EAAWK,CAAI,EAE9BG,EAAcL,EAAa,MAAM,CAAC,EAClCM,EAAaH,EAAU,KAAKE,CAAW,EAEvCE,EAAQC,EAAc,QAASV,EAAqBC,CAAS,EAC7DU,EAAUD,EAAc,eAAgBP,EAAiBD,EAAa,MAAM,EAC5EU,EAASC,EAAe,SAAUb,EAAqBO,EAAY,MAAM,EAGzEO,EAAoC,CACxC,CAAC,QAAuB,KAAMN,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMF,CAAI,CACpC,EACA,OAAAU,EAAgB,KAAK,GAAGC,EAA2BhB,EAAYG,EAAcK,CAAW,CAAC,EA4BlF,CACL,KAAM,iBACN,YAAa,CAAC,kBA7B8C,CAAC,OAAQ,MAAM,CA6B5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAM,CACF,GACA,gBA9BuBE,GAA+B;AAAA,QAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,EAASC,CAAM,CAAC;AAAA,QAC/CI,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,4BAErDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,kBAE9CD,EAAQ,YAAY,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA,2BAIxBF,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,eAAgB,gBAAiB,UAAU,CAAC;AAAA,oBACjDA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,QAE9CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAY3C,CACF,EAESvB,GAAiCS,GAC1CmB,GAA4B,CAAC,KAAMnB,EAAW,IAAc,CAAC,EAEpDR,GAAiB,CAAC4B,EAAyBpB,IAA+C,CACrG,IAAMD,EAASqB,EAAQ,OACvB/B,GAAeU,CAAM,EACrBqB,EAAQ,QAAQ9B,GAAgC8B,EAAQ,OAAQpB,CAAU,CAAC,CAC7E,ICpGA,IAWMqB,GA0BAC,GAwFOC,GAQAC,GArIbC,GAAAC,EAAA,kBAGAC,KAEAC,KAIAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,UACjCA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,SAC3D,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EASMR,GAAwB,CAACQ,EAA+BC,IAA4C,CACxG,IAAMC,EAASF,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BG,EAASH,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACI,EAAGC,EAAGC,CAAC,EAAIC,GAAS,qBACvBL,EAAQD,EAAW,OAAQE,EAAQF,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGQ,EAAc,CAACJ,EAAGC,CAAC,EACzB,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAMC,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAML,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,OAAsB,KAAML,EAAW,KAAK,EAC/E,CAAC,OAAsB,KAAMA,EAAW,IAAI,CAC9C,EACMW,EAAwD,CAAC,OAAQ,MAAM,EACzEZ,EAAO,SAAW,IACpBW,EAAgB,KAAK,GAAGE,EAA2Bb,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEY,EAAkB,KAAK,MAAM,GAE/BD,EAAgB,KAAK,GAAGE,EAA2BL,CAAW,CAAC,EAE/D,IAAMM,EAAmBC,GAA+B,CACtD,IAAIC,EAAO,GACPf,EAAW,QAAUA,EAAW,OAClCe,EAAO,0DACEf,EAAW,QAAU,CAACA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAUA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAU,CAACA,EAAW,SAC3Ce,EAAO,2DAGT,IAAMC,EAAiBhB,EAAW,QAAU,EAAI,GAAK,2BAC/CiB,EAAIC,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDoB,EAAID,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDqB,EAAWH,EAAE,KAAK,MACpBI,EAAwB,KACtBC,EAAY,CAACL,EAAGE,CAAC,EACnBpB,EAAO,SAAW,IACpBsB,EAAIH,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAChEuB,EAAU,KAAKD,CAAC,GAElB,IAAME,EAASC,EAAe,SAAUzB,EAAO,CAAC,EAAE,SAAUQ,EAAY,MAAM,EAC9Ee,EAAU,KAAKC,CAAM,EACrB,IAAME,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC/G,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAC1D,EACA,MAAO;AAAA,IACPX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA;AAAA,IAEtER,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kBAK9DM,CAAQ;AAAA;AAAA,QAElBL,CAAI;AAAA;AAAA;AAAA,MAGNC,CAAc;AAAA,OACb,IACGK,GAAK,KACA,iBAAiBA,EAAE,2BAA2B,aAAcE,CAAM,CAAC,cACtEH,CAAQ,qBAAqBC,EAAE,YAAY,SAAS,CAAC,IAEpD,IACN,CAAC;AAAA;AAAA,IAGN,EAEA,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGrB,EAAW,QAAQ,GAAI,kBAAAW,CAAiB,EAC/D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAUR,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKS,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAG,CACF,CACF,EAEarB,GAAuBQ,GAAwD,CAC1F,IAAM0B,EAAS1B,EAAW,OACpB2B,EAAS3B,EAAW,OACpB4B,EAAQ5B,EAAW,MACnB6B,EAAO7B,EAAW,KACxB,MAAO,CAAC,OAAA0B,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,SAAU,GAAG7B,EAAW,MAAM,IAAIA,EAAW,MAAM,IAAIA,EAAW,QAAU,CAAC,EAAE,CACtH,EAEaP,GAAO,CAACqC,EAAyB9B,IAAqC,CACjFV,GAAewC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAAsBuC,EAAQ,OAAQ9B,CAAU,CAAC,CACnE,ICxIA,IAaM+B,GAGAC,GAiOOC,GAGPC,GAEAC,GA0COC,GA2BAC,GA3TbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMf,GAAW,CAACgB,EAA+BC,IAC5CD,EAAO,OAASC,GAAOD,EAAOC,CAAC,EAAE,KAAK,OAAS,GAAOC,EAAU,KAAKF,EAAOC,CAAC,EAAE,IAAI,EAAK,EAAID,EAAOC,CAAC,EAAI,OAEvGhB,GAAiB,CAACe,EAA+BG,IAAoD,CACzG,IAAMC,EAAQJ,EAAO,CAAC,EAChBK,EAAMrB,GAASgB,EAAQ,CAAC,EACxBM,EAAQtB,GAASgB,EAAQ,CAAC,EAC1BO,EAAOvB,GAASgB,EAAQ,CAAC,EACzBQ,EAAiBxB,GAASgB,EAAQ,CAAC,EACnCS,EAAuBzB,GAASgB,EAAQ,CAAC,EACzCU,EAAU1B,GAASgB,EAAQ,CAAC,EAC5BW,EAAY3B,GAASgB,EAAQ,CAAC,EAoCpC,GAAII,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMQ,EAAe,GACfC,EAAYT,EAAM,KAAK,CAAC,EACxBU,EAAiBV,EAAM,KAAK,CAAC,EAC7BW,EAAaX,EAAM,KAAK,SAAW,EAAKQ,EAAeR,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3EY,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaZ,EAAW,QAAQ,EAC5D,GAAIO,GAAWC,EAAW,CACxB,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIA,EAAQ,KAAK,CAAC,IAAMG,GAAaH,EAAQ,KAAK,CAAC,IAAMP,EAAW,UAAYO,EAAQ,KAAK,CAAC,IAAMS,EAClG,MAAM,IAAI,MAAM,iFAAiF,EAEnG,GAAIR,EAAU,KAAK,CAAC,IAAME,GAAaF,EAAU,KAAK,CAAC,IAAMR,EAAW,UACpEQ,EAAU,KAAK,CAAC,IAAMQ,EACxB,MAAM,IAAI,MAAM,mFAAmF,EAErG,GAAIT,EAAQ,KAAK,CAAC,IAAMC,EAAU,KAAK,CAAC,EACtC,MAAM,IAAI,MAAM,gFAAgF,EAElG,GAAIA,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEvEM,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,CACpC,SAAWA,GAAWC,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIS,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAIA,EAAI,KAAK,CAAC,IAAMD,EAAM,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,6DAA6D,EAE/EgB,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMc,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIb,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMc,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GC,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,GAAIb,EAAM,CACR,GAAIA,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8CAA8C,EAGhE,GAAID,GACEF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,CAAC,IAAM,EAC/C,MAAM,IAAI,MAAM,oCAAoC,CAG1D,CAEA,IAAIiB,IACJ,GAAIb,EAAgB,CAClBa,EAAW,EACX,IAAMC,EAAWd,EAAe,KAUhC,MATIc,EAAS,SAAW,EAClBA,EAAS,CAAC,IAAMT,EAClBQ,EAAW,EACFC,EAAS,CAAC,IAAM,EAAIT,EAAY,IACzCQ,EAAW,GAEJC,EAAS,SAAW,GAAKA,EAAS,CAAC,IAAMT,GAAaS,EAAS,CAAC,IAAMN,IAC/EK,EAAW,GAETA,IAAa,EACT,IAAI,MAAM,0FAA0F,EAEtG,IAAI,MAAM,oBAAoB,CACtC,CAEA,IAAIE,EAAe,GACfC,EAAcT,EAClB,GAAIT,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FkB,EAAclB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGkB,EAAclB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CiB,EAAe,EACjB,CACF,CAEA,IAAME,EAAsBR,EAAqBD,EAC3CU,EAAsB,GAE5B,GAAIlB,EACF,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIC,EAAsB,CACxB,GAAIA,EAAqB,KAAK,SAAW,EACvC,MAAM,IAAI,MAAM,iEAAiE,EAEnF,GAAKA,EAAqB,KAAK,CAAC,IAAMI,GAAaJ,EAAqB,KAAK,CAAC,IAAM,GAChFA,EAAqB,KAAK,CAAC,IAAMN,EAAW,UAAYM,EAAqB,KAAK,CAAC,IAAMK,GACzFL,EAAqB,KAAK,CAAC,IAAMgB,EACnC,MAAM,IAAI,MAAM,2FAA2F,CAE/G,CAEA,MAAO,CACL,UAAAZ,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAS,EACA,kBAAAP,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAS,EACA,SAAAL,EACA,UAAW,KAAK,MAAMK,EAAcrB,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAuB,EACA,aAAAH,EACA,UAAAH,CACF,CACF,EAEalC,GAAqCiB,GAC9CwB,GAA4B,CAAC,GAAGxB,CAAU,CAAC,EAEzChB,GAAgDwC,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhGvC,GACF,CAACwC,EAAyBC,EAAiBtB,EAAkBM,EAAmBC,EAC/EC,EAAoBe,IAAuB,CAC1C,IAAMC,EAAc,CAAClB,EAAWC,EAAgBC,CAAU,EACpDiB,EAAa9B,EAAU,KAAK6B,CAAW,EACvCE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,QAAuB,KAAMF,CAAU,EACnF,CAAC,QAAuB,KAAMf,CAAU,CAC1C,EAEMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,gBAAiBR,EAAI,SAAUE,CAAW,EAClEO,EAAWC,EAAc,MAAOV,EAAI,SAAUE,CAAW,EACzDS,EAAYD,EAAc,OAAQhC,EAAK,SAAUwB,CAAW,EAE5DU,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,CAC3G,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBH,EAAUE,EAAWJ,CAAM,CAAC;AAAA,IACrFD,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,IAK1E,EAEA,OAAOP,EAAQ,QACX,CACE,KAAM,4BACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACjD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMG,EAAa,SAAUF,EAAI,SAAU,aAAgC,CAAC,EACvF,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAC,CACF,EACA,CAAC,OAAQ,CAACL,EAAKtB,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC7C,EAESlB,GACT,CAACuC,EAAyBf,EAAmB6B,EAAkB5B,EAAwBK,EACtFwB,EAAmBpC,EAAmBuB,IAAwB,CAG7D,IAAIc,EAAgBD,EACpB,GAAKpC,EAOE,CACL,GAAIO,IAAmB,EACrB,MAAM,IAAI,MAAM,mFAAmF,EAEnG,OAAA8B,EACIxD,GAAiBwC,EAASe,EAAOpC,EAAMM,EAAWC,EAAgB4B,EAAWvB,EAAUW,CAAW,EACtGc,EAAgBA,EAAc,QAAQ,CAAC/B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,EAC9ES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAEnD,KAjBE,QAAID,EAAM,KAAK,SAAW,IACxBC,EAAgBD,EAAM,QAAQ,CAAC9B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,GAExES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAanD,EAEStD,GAAqB,CAACsC,EAAyBzB,IAAqC,CAC/F,IAAM2C,EAAS7D,GAAe2C,EAAQ,OAAQzB,CAAU,EAClDC,EAAQwB,EAAQ,OAAO,CAAC,EACxBvB,EAAMrB,GAAS4C,EAAQ,OAAQ,CAAC,EAChCtB,EAAQtB,GAAS4C,EAAQ,OAAQ,CAAC,EAClCrB,EAAOvB,GAAS4C,EAAQ,OAAQ,CAAC,EACjCpB,EAAiBxB,GAAS4C,EAAQ,OAAQ,CAAC,EAC3CnB,EAAuBzB,GAAS4C,EAAQ,OAAQ,CAAC,EACjDlB,EAAU1B,GAAS4C,EAAQ,OAAQ,CAAC,EACpCjB,EAAY3B,GAAS4C,EAAQ,OAAQ,CAAC,EAC5C,GAAIxB,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIC,GAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8BAA8B,EAIhD,IAAM0C,EAAS1C,GAAOC,GAASD,EAAI,KAAK,SAAW,GAAKC,EAAM,KAAK,SAAW,EAExE0C,EAAI3D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAU1C,EAAOG,EAAM,CAAC,EAEtG,GAAIwC,EACF,OAAOE,GACHrB,EAASoB,EAAG3C,EAAKC,EAAOE,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAC7F3C,CAAU,EAEhB,GAAI,CAACE,GAAO,CAACC,EACX,MAAM,IAAI,MAAM,gCAAgC,EAElD,IAAM4C,EAAI7D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,SAAUzC,EAAKE,EAC3FuC,EAAO,UAAU,EAEfK,EAAI9D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,UAAWxC,EAAOC,EAC9F,EAAIuC,EAAO,UAAU,EAEzBG,GACIrB,EAASoB,EAAGE,EAAGC,EAAG3C,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAAQ3C,CAAU,CAC/G,ICrWA,IAUMiD,GAIAC,GAyBAC,GAUOC,GAuCAC,GAxFbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAcU,GAChB,MAAM,KAAKA,EAAkB,iBAAiB,EAAG,MAAM,EAGrDT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,IAChEA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,oCAAoC,EAKtD,GAFmCX,GAAWW,EAAO,CAAC,CAAC,EAE3C,SAAWA,EAAO,CAAC,EAAE,KAAK,OACpC,MAAM,IAAI,MAAM,uFAAuF,CAE3G,EAEMT,GAAiB,CAACU,EAA+BC,IAAkD,CACvG,IAAMC,EAAwB,CAAC,EAE/B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQ,EAAEG,EACvCD,EAAY,KAAKF,EAAWG,CAAC,EAAIF,EAAQE,CAAC,CAAC,EAG7C,OAAOD,CACT,EAEaX,GAAwB,CAACQ,EAA+BK,IAAkC,CACrG,IAAMJ,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAA6BG,GAAgBhB,GAAWW,EAAO,CAAC,CAAC,EACjEG,EAAcZ,GAAeU,EAAYC,CAAO,EAChDI,EAAaC,EAAU,KAAKJ,CAAW,EAEvCK,EAAWR,EAAO,CAAC,EAAE,SACrBS,EAAQC,EAAc,QAASF,EAAUP,EAAW,MAAM,EAC1DU,EAASC,EAAe,SAAUJ,EAAUL,EAAY,MAAM,EAE9DU,EAAmBC,GAA+B;AAAA,2BAC/BL,EAAM,QAAQ,GAAGR,CAAU,CAAC;AAAA,QAC/Ca,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAOE,CAAM,CAAC;AAAA,QAClFG,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACrDH,EAAO,gBAAgB,YAAY,CAAC;AAAA,2BACtCF,EAAM,KAAK,OAAO;AAAA,4BACjBR,EAAW,MAAM;AAAA,4BACjBQ,EAAM,WAAW,uBAAwB,GAAG,CAAC;AAAA,gCACzCE,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,UAE9DF,EAAM,WAAW,gBAAiB,IAAK,iBAAiB,CAAC;AAAA;AAAA,QAE3DE,EAAO,YAAY,aAAcF,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,OAG3E,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGP,CAAO,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC7D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKM,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGS,EAA2Bf,EAAO,CAAC,EAAE,KAAMG,CAAW,CAAC,CAC5G,GACA,gBAAAU,CACF,CACF,EAEapB,GAAQuB,GAAkC,CACrD1B,GAAe0B,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxB,GAAsBwB,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACtE,IC3FA,IAeaC,GA6KPC,GAuGOC,GAGPC,GAEAC,GA+BOC,GAvUbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KAEaf,GAAiB,CAACgB,EAA+BC,IAAoD,CAChH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAMH,EAAO,CAAC,EACdI,EAAQJ,EAAO,CAAC,EAChBK,EAAUL,EAAO,CAAC,EAClBM,EAAYN,EAAO,CAAC,EA+B1B,GAAIE,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMK,EAAe,GACfC,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAaR,EAAM,KAAK,SAAW,EAAKK,EAAeL,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3ES,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaT,EAAW,QAAQ,EACtDc,EAAaV,GAAWA,EAAQ,KAAK,SAAW,EAChDW,EAAeV,GAAaA,EAAU,KAAK,SAAW,EAEtDW,EAAe,GACrB,GAAIF,GAAcC,EAAc,CAC9B,GAAIX,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIC,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEnEW,GAEFL,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,IAGlCO,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,EAEtC,SAAWU,GAAcC,EACvB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIE,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAID,EAAM,KAAK,CAAC,EAAIC,EAAI,KAAK,CAAC,IAAM,EAClC,MAAM,IAAI,MAAM,sDAAsD,EAExEe,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMW,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIV,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMW,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GI,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,IAAMC,IACFC,EAAe,GACfC,EAAcX,EAClB,GAAIN,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FiB,EAAcjB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGiB,EAAcjB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CgB,EAAe,EACjB,CACF,CACA,IAAME,EAAsBV,EAAqBD,EAC3CY,EAAsB,GAE5B,MAAO,CACL,UAAAf,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAW,EACA,kBAAAT,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAW,EACA,SAAAP,EACA,UAAW,KAAK,MAAMO,EAAcpB,EAAW,UAAW,EAC1D,SAAUA,EAAW,SACrB,WAAYA,EAAW,WACvB,MAAOA,EAAW,SAAWA,EAAW,WACxC,uBAAwB,GACxB,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAsB,EACA,aAAAH,EACA,UAAAF,EACA,aAAAD,CACF,CACF,EAEMhC,GACF,CAACuC,EAAeC,EAAyBC,EAAoBC,IAA6C,CACxG,IAAMC,EAAc,CAACD,EAAO,UAAWA,EAAO,oBAAqBA,EAAO,WAAaA,EAAO,QAAQ,EAChGE,EAAY,EACZC,EAAaC,EAAU,KAAKH,CAAW,EAAIC,EAC3CG,EAAwBL,EAAO,oBAC/BM,EAASC,EAAe,aAAcR,EAAUE,EAAY,OAAQC,CAAS,EAC7EM,EAASC,EAAc,SAAUZ,EAAE,SAAUA,EAAE,KAAK,OAAQK,CAAS,EACrEQ,EAASZ,EAAIW,EAAc,UAAWX,EAAE,SAAUA,EAAE,KAAK,OAAQI,CAAS,EAAI,OAE9ES,EAAI,KAAK,KAAKX,EAAO,SAAWE,CAAS,EACzCU,EAAW,CAAC,EAAGP,EAAuB,EAAGR,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,EAExDgB,EAAwDf,EAAI,CAAC,OAAQ,MAAM,EAAI,CAAC,MAAM,EAEtFgB,EAAoC,CACxC,CAAC,QAAuB,KAAMX,CAAU,EAAG,CAAC,QAAuB,KAAMH,EAAO,kBAAkB,EAClG,CAAC,QAAuB,KAAMA,EAAO,gBAAgB,EACrD,CAAC,QAAuB,KAAMA,EAAO,mBAAmB,CAC1D,EAEM3B,EAAS,CAACmC,CAAM,EAClBE,GACFI,EAAgB,KACZ,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2BjB,EAAG,IAAI,EAC5E,GAAGiB,EAA2Bd,CAAW,CAAC,EAC9C5B,EAAO,KAAKqC,CAAM,GAElBI,EAAgB,KAAK,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2Bd,CAAW,CAAC,EAExG,IAAMe,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,EACxG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CACtC,EAEMC,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDAOVC,EAAS;AAAA;AAAA;AAAA;AAAA,qDAKTC,EAAYrB,EAAI;AAAA,UAClBmB,CAAO;AAAA;AAAA,UAEPC,CAAM;AAAA,WAEY;AAAA,YAChBA,CAAM;AAAA,WAINE,EAAmBC,GAA+B;AAAA;AAAA,IAE1DA,EAAa,iBAAiBL,CAAQ,EAAE,iBAAiB,GAAG3C,EAAQiC,CAAM,CAAC;AAAA,IAC3Ee,EAAa,UAAU,CACnBV,EAAGX,EAAO,WAAa,CACzB,CAAC,CAAC;AAAA,MACFqB,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,oBAC5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKlCN,EAAO,UAAW;AAAA,cAC1BW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKKX,EAAO,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAajCmB,CAAS;AAAA,KAGT,MAAO,CACL,KAAM,gBACN,YAAa,CAAC,KAAM,GAAGnB,EAAO,UAAW,GAAGW,CAAC,GAAG,CAAC,CAACb,CAAC,GAAI,kBAAAe,CAAiB,EACxE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMZ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAea,EACf,gBAAAE,CACF,GACA,gBAAAM,CACF,CACF,EAES7D,GAAsCe,GAC/CgD,GAA4B,CAAC,GAAGhD,CAAU,CAAC,EAEzCd,GAAgD8D,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhG7D,GACF,CAAC8D,EAAyBC,EAAmBC,EAA8BzB,EAC1E0B,IAAwB,CACvB,IAAIC,EAAgBH,EACdI,EAAW5B,EAAO,WAClB6B,EAAQ7B,EAAO,MACrB,OAAIwB,EAAM,KAAK,SAAW,GAAKxB,EAAO,mBAAqB,IACzD2B,EAAgBH,EAAM,QAAQ,CAACxB,EAAO,UAAWA,EAAO,iBAAkB4B,EAAU5B,EAAO,QAAQ,CAAC,GAGlGyB,EACFE,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAeF,EAAQE,EAAc,SAAU3B,CAAM,EAC7E,CAAC,OAAQ,CAAC2B,EAAeF,CAAM,EAAG,QAAS,CAACzB,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAE3FC,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAe,OAAWA,EAAc,SAAU3B,CAAM,EAChF,CAAC,OAAQ,CAAC2B,CAAa,EAAG,QAAS,CAAC3B,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjFG,IAAU,IACZF,EAAgBJ,EAAQ,QACpBO,GAAsB,CAACH,CAAa,EAAG,CAAC,EAAG,EAAG,EAAGE,CAAK,CAAC,EAAG,CAAC,OAAQ,CAACF,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EACzGA,EACIA,EAAc,QAAQ,CAAC3B,EAAO,UAAWA,EAAO,oBAAqB4B,EAAWC,EAAO7B,EAAO,QAAQ,CAAC,GAGtGuB,EAAQ,QACXQ,GAA2BJ,EAAenE,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACmE,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CACjD,EAESjE,GAAsB,CAAC6D,EAAyBjD,IAAqC,CAChG,IAAM0B,EAAS3C,GAAekE,EAAQ,OAAQjD,CAAU,EACxD,GAAIiD,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpC,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIA,EAAQ,OAAO,CAAC,GAAG,KAAK,SAAW,EACrC,MAAM,IAAI,MAAM,8BAA8B,EAGhD,IAAMS,EAAIC,GACNV,EAASvB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAUuB,EAAQ,OAAO,CAAC,EAAG,OACvG,CAAC,EACC7C,EAAU6C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OACzF5C,EAAY4C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OAC3FW,EAAIzE,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG7C,EAASsB,EAAQ,CAAC,EAChFmC,EAAI1E,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG5C,EAAWqB,EAAQ,CAAC,EACxFoC,GAAeb,EAASS,EAAGE,EAAGC,EAAG,OAAW,OAAW,OAAW,OAAW,OAAWnC,EAAQ1B,CAAU,CAC5G,ICzVA,IAeM+D,GAwGAC,GAwHAC,GAoDOC,GAnSbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAOMR,GACF,CAACS,EAA+BC,IAAoD,CAClF,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdE,EAAO,EACPC,EAAYC,EAAU,gBAAgBJ,EAAQE,CAAI,EAClDG,EAAWD,EAAU,kBAAkBJ,EAAQE,CAAI,EACnDI,EAAaC,GAAiBF,CAAQ,EACtCG,EAAiBH,EAAWC,EAC5BG,EAAa,CAACT,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGQ,CAAc,EAClDE,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EC,EACF,CAAC,CAAC,QAAuB,KAAMN,CAAQ,EAAG,CAAC,QAAuB,KAAMG,CAAc,CAAC,EAC3FG,EAAgB,KAAK,GAAGC,EAA2BH,EAAYA,CAAU,CAAC,EAE1E,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAKlB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACxEW,EAAQD,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEoB,EAAOF,EAAc,OAAQlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/DqB,EAASC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACnFe,EAAY,CAACN,EAAGE,EAAOC,EAAMC,CAAM,EACnCG,EAAWP,EAAE,KAAK,MAClBQ,EAAUjB,IAAe,EAAI,MAAQ,MAAMA,CAAU,QACrDkB,EAAgB,GAEhBC,EAA8B,CAAC,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CAAC,EAC3G,MAAO;AAAA;AAAA;AAAA,2CAG4BF,CAAO,KAAKC,CAAa;AAAA,0BAC1CA,CAAa;AAAA,IACnCV,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGJ,CAAS,CAAC;AAAA,IACtEP,EAAa,UAAUU,CAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOrBD,CAAO;AAAA;AAAA,4BAECA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAahDW,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKhDiB,CAAO;AAAA;AAAA,yBAEEA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,OAAOQ,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAcpDG,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,mFAIYP,EAAW,OAAO;AAAA,yCAC5DkB,EAAM,YAAY,SAAS,CAAC;AAAA,6BACxCC,EAAK,YAAY,SAAS,CAAC;AAAA;AAAA,oBAEpCH,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,MAAMO,CAAQ,IAAIC,CAAO,qBAAqBD,CAAQ,IAC5FC,CAAO;AAAA,QACXJ,EAAO,IAAI,QAAS,UAAW,IAAK,OAAO,CAAC;AAAA;AAAA,IAG9C,EACA,MAAO,CACD,KAAM,wBAEV,YAAa,CAAC,KAAM,GAAGpB,EAAW,OAAO,IAAIO,CAAU,GAAI,kBAAAI,CAAiB,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAGK,CAAS,EAC5B,gBAAAQ,CACF,GACA,gBAAAE,CACF,CACF,EAEEvB,GACF,CAACqC,EAAyBC,EAAmBX,EAAmBC,EAAkBW,EAAWC,EAAWC,EACvGC,IAAoB,CACnB,IAAM1B,EAAaC,GAAiBwB,CAAC,EAC/BE,EAAK,GAGLC,EAAa5B,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC5D6B,EAAc7B,IAAe,EAAI,MAAQ,MAAMA,CAAU,IACzD8B,EAAiB,CAACC,EAAcC,IAAiB,GAAGJ,CAAU,IAAIG,CAAI,KAAKC,CAAI,IAC/EC,EAAcV,EAAIE,EAAIzB,EACtBkC,EAAS,KAAK,KAAKV,EAAIG,CAAE,EAEzBQ,EAA4D,CAAC,MAAM,EACnEC,EAAwC,CAC5C,CAAC,QAAuB,KAAMF,CAAM,EAAG,CAAC,QAAuB,KAAMV,CAAC,EACtE,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAMwB,EAAIC,EAAIzB,CAAU,CAAC,CAC9D,EAEMqC,EAAuB7B,GAA+B,CAC1D,IAAM8B,EAAc5B,EAAc,QAASY,EAAM,SAAUA,EAAM,KAAMtB,CAAU,EACjF,MAAO;AAAA,IACXQ,EAAa,iBAAiB8B,CAAW,CAAC;AAAA,kEACoBV,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAUmB,CAAE,CAAC;AAAA,4CACcA,CAAE;AAAA,+CACCA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAQjCY,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA;AAAA,sBAE9B6B,CAAW;AAAA;AAAA;AAAA;AAAA,2BAINC,EAAe,MAAO,YAAY,CAAC;AAAA,IAExD,EAEMU,EAAanB,EAAQ,QACvB,CACE,KAAM,0BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAmBmC,CAAqB,EAC7E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACZ,EAAGE,EAAGE,EAAI,CAAC,EAAG,UAAwB,CAChD,EACA,cAAe,CAAC,EAAGJ,EAAIE,EAAIzB,CAAU,EACrC,gBAAiBoC,CACnB,GACA,gBAAiBC,CACnB,EACA,CAAC,OAAQ,CAACf,CAAK,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjCjB,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,CAAW,EAAG,CAAC,QAAuB,KAAMT,CAAC,EAC3E,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAM2B,EAAKF,EAAIzB,CAAU,CAAC,CAC/D,EACMI,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EG,EAAmBC,GAA+B,CACtD,IAAMiC,EAAc/B,EAAc,QAASC,EAAM,SAAUA,EAAM,KAAMX,CAAU,EAC3E0C,EAAahC,EAAc,OAAQE,EAAK,SAAUA,EAAK,KAAMZ,CAAU,EAC7E,MAAO;AAAA,2DAC4C4B,CAAU;AAAA,2DACVa,EAAY,KAAK,OAAO;AAAA,0DACzBC,EAAW,KAAK,OAAO;AAAA,kEACfd,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,wBAAwB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKlE+B,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA,mCACjB2B,CAAE;AAAA,gEAC2BA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+DAMHD,CAAO;AAAA,qCACjCG,CAAW;AAAA,yBACvBA,CAAW;AAAA;AAAA,2BAETC,EAAe,eAAgB,cAAc,CAAC;AAAA,IAEnE,EACA,OAAOT,EAAQ,QACX,CACE,KAAM,uCAEN,YAAa,CAAC,KAAM,GAAGrB,CAAU,IAAI0B,CAAO,GAAI,kBAAAtB,CAAiB,EACjE,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACmB,EAAGE,EAAG,CAAC,EAAG,UAAwB,CAC5C,EACA,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAc,EAAuB,CAAC,EACnE,gBAAA5B,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACiC,EAAY7B,EAAOC,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC3D,EAEE3B,GACF,CAACoC,EAAyB7B,EAA+BC,IAAuC,CAC9F,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdiD,EAAIjD,EAAO,CAAC,EACZkD,EAAIlD,EAAOA,EAAO,OAAS,CAAC,EAC5BmD,EAAI/C,EAAU,kBAAkBJ,EAAQ,CAAC,EAAIkD,EAC7C5C,EAAaC,GAAiB2C,CAAC,EAC/BE,EAAahD,EAAU,KAAKH,CAAW,EAAIK,EAC3CK,EACF,CAAC,CAAC,QAAuB,KAAMwC,CAAC,EAAG,CAAC,QAAuB,KAAM,KAAK,MAAMD,EAAI5C,CAAU,CAAC,CAAC,EAC1FI,EAAwD,CAAC,OAAQ,MAAM,EAEvE2C,EAAoB/D,GAAYqC,EAAS7B,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGmD,EAAGE,EAAGD,EAAGnD,EAAW,OAAO,EACrGc,EAAmBC,GAA+B,CACtD,IAAMQ,EAAWgC,GAA4BxD,EAAO,CAAC,EAAE,QAAQ,EACzDyD,EAAYjD,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC3DkD,EAAgBlD,IAAe,EAAIgB,EAAW,MAAMhB,CAAU,IAAIgB,CAAQ,IAE1EsB,EAAc5B,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMQ,CAAU,EACnFmD,EAAerC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUG,EAAaK,CAAU,EAEzF,MAAO;AAAA,2DAC4CsC,EAAY,KAAK,OAAO;AAAA,gEACnBW,CAAS;AAAA,kEACPE,EAAa,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,IAIvF3C,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kDAMsB0C,CAAa,eAAeA,CAAa;AAAA,IAErF,EACA7B,EAAQ,QACJ,CACE,KAAM,4BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAAI,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKsD,EAAa,EAAuB,CAAC,EAClE,gBAAAzC,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACf,EAAO,CAAC,EAAGuD,CAAiB,CAAC,CAAC,CAC9C,EAES7D,GAAe,CAACmC,EAAyB5B,IAA6C,CAC7FA,EAAW,SAAW,OACxBR,GAAkCoC,EAASA,EAAQ,OAAQ5B,CAAU,EAErE4B,EAAQ,QAAQtC,GAA8BsC,EAAQ,OAAQ5B,CAAU,CAAC,CAE7E,ICzSA,IAgBM2D,GAMAC,GA6GOC,GAnIbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAQMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,CAE3D,EAEMP,GACF,CAACO,EAA+BC,EAAiCC,IAAqC,CACpG,IAAMC,EAAaF,EAAW,WAExBG,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAAQL,EAAO,CAAC,EAChBM,EAAO,CAACH,GAAcH,EAAO,CAAC,EAE9BO,EAAcH,EACdI,EAAOC,EAAU,cAAcR,EAAW,KAAMG,EAAO,MAAM,EAC7DM,EAAYD,EAAU,gBAAgBL,EAAQI,CAAI,EAClDG,EAAWF,EAAU,kBAAkBL,EAAQI,CAAI,EAEnDI,EAAYH,EAAU,KAAKJ,EAAM,IAAI,EACrCQ,EAAWP,EAAOG,EAAU,KAAKH,EAAK,IAAI,EAAI,EACpD,GAAIM,IAAcD,GAAaL,GAAQO,IAAaF,EAClD,MAAM,IAAI,MAAM,+BAA+BA,CAAQ;AAAA;AAAA,2BAEpCC,CAAS,qBAAqBC,CAAQ,EAAE,EAG7D,IAAMC,EAA6B,CAAC,EACpC,QAASC,EAAI,EAAGA,EAAIX,EAAO,OAAQ,EAAEW,EAC/BA,EAAIP,EACNM,EAAiB,KAAKV,EAAOW,CAAC,CAAC,EAE/BD,EAAiB,KAAK,CAAC,EAG3B,IAAME,EAAaC,GAAiBN,CAAQ,EACtCO,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAQ,EAC/E,CAAC,QAAuB,KAAM,KAAK,MAAMA,EAAWK,CAAU,CAAC,EAC/D,CAAC,OAAsB,KAAMf,EAAW,OAAO,CACjD,EACIK,GACFY,EAAkB,KAAK,MAAM,EAE/B,IAAME,EAAoBlB,EAAc,EAClCmB,EAAkBnB,EAAc,EAEhCoB,EAAmBC,GAA+B,CACtD,IAAMC,EAAWC,GAA4BzB,EAAO,CAAC,EAAE,QAAQ,EACzD0B,EAAY,CAChBC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAU,EACjEW,EAAc,QAAStB,EAAM,SAAUA,EAAM,KAAMW,CAAU,CAC/D,EACIV,GACFoB,EAAU,KAAKC,EAAc,OAAQrB,EAAK,SAAUA,EAAK,KAAMU,CAAU,CAAC,EAE5EU,EAAU,KAAKE,EAAe,SAAU5B,EAAO,CAAC,EAAE,SAAUO,EAAaS,CAAU,CAAC,EAChFI,GACFM,EAAU,KAAKE,EAAe,qBAAoCd,CAAgB,CAAC,EAEjFO,GACFK,EAAU,KAAKE,EAAe,mBAAkCd,CAAgB,CAAC,EAGnF,IAAMe,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAClE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,CAC5E,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA,IACtEH,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,wBAEvDO,GAAW,MAAOd,CAAU,CAAC;AAAA,+BACtBc,GAAW,MAAOd,CAAU,CAAC;AAAA;AAAA;AAAA,oBAGxCe,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA,iBAInDgB,GAAU,cAAehB,CAAU,CAAC;AAAA,oCACjBgB,GAAU,qBAAsBhB,CAAU,CAAC,yBACnEb,EAAa,GAAK,eAAe;AAAA;AAAA;AAAA,uBAGtB4B,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA,uBAChDe,GAAUP,EAAUR,EAAY,UAAU,CAAC;AAAA,6BACrCU,EAAU,CAAC,EAAE,KAAK,KAAK,cAAcvB,EAAa,GAAK,QAAQ;AAAA,UAClFG,EAAO,KAAKyB,GAAUP,EAAUR,EAAY,SAAS,CAAC,GAAK,EAAE;AAAA;AAAA;AAAA;AAAA,MAIjEI,EAAoB,sCAAwC,EAAE;AAAA,MAC9DC,EAAkB,2CAA6C,EAAE;AAAA,IAEjE,EACMY,EAAU,CAAC,CAAC,KAAM1B,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIoB,GACFa,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAE7DO,GACFY,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAG1D,CACL,KAAM,qBACN,YAAa,CAAC,KAAM,GAAGE,CAAU,IAAId,CAAW,IAAIC,CAAU,GAAI,kBAAAe,CAAiB,EACnF,WAAY,KACP,CAAC,QAAAe,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKvB,EAAY,EAAuB,CAAC,EAAG,gBAAAS,CAAe,GAClG,gBAAAG,CACF,CACF,EAES5B,GAAY,CAACwC,EAAyBjC,IAA0C,CAC3FT,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQzC,GAA2ByC,EAAQ,OAAQjC,EAAYiC,EAAQ,WAAW,CAAC,CAC7F,ICtIA,IAoBMC,GA+BOC,GAqPAC,GAQAC,GAhTbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMT,GAAiB,CAACU,EAA+BC,IAA4C,CACjG,GAAID,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,oCAAoC,EAEtD,IAAME,EAAIF,EAAO,CAAC,EACZG,EAAQD,EAAE,KAAK,OACrB,GAAIA,EAAE,KAAKC,EAAQ,CAAC,IAAMF,EAAW,EACnC,MAAM,IAAI,MAAM,wDAAwD,EAE1E,IAAMG,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3FI,EAAWJ,EAAW,UAAY,EAAIA,EAAW,KACjDK,EAAIN,EAAO,CAAC,EAClB,GAAI,CAACO,EAAU,SAASD,EAAE,KAAM,CAACL,EAAW,EAAGG,EAAeC,CAAQ,CAAC,EACrE,MAAM,IAAI,MAAM,6EAA6E,EAG/F,IAAMG,EADSR,EAAO,CAAC,EACI,KAC3B,GAAIO,EAAU,KAAKC,CAAW,IAAMP,EAAW,EAAIG,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAE5C,GAAIJ,EAAO,SAAW,EAAG,CAEvB,IAAMS,EADaT,EAAO,CAAC,EACQ,KAC7BU,EACFT,EAAW,KAAO,EAAKA,EAAW,EAAIG,EAAiBH,EAAW,EAAI,KAAK,OAAOG,EAAgB,GAAK,CAAC,EAC5G,GAAIG,EAAU,KAAKE,CAAe,IAAMC,EACtC,MAAM,IAAI,MAAM,8BAA8B,CAElD,CACF,EAEanB,GACT,CAACS,EAA+BC,EAC/BU,EAAoDC,IAAwD,CAC3G,IAAMC,EAAab,EAAO,CAAC,EAAE,KACvBG,EAAQU,EAAW,OACnBT,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3Fa,EAAYD,EAAWV,EAAQ,CAAC,EAChCY,EAAWd,EAAW,EACtBe,EAAYf,EAAW,EACvBgB,EAAYJ,EAAW,MAAM,EAAGV,EAAQ,CAAC,EACzCe,EAAYX,EAAU,KAAKU,CAAS,EAEpCE,EADWlB,EAAW,UAAY,EAAIA,EAAW,KACpB,EAC7BmB,EAAWpB,EAAO,CAAC,EAAE,SACrBqB,EAAeC,GAAiBR,CAAS,EACzCS,EAAcD,GAAiBrB,EAAW,CAAC,EAC3CuB,EAAcF,GAAiBH,CAAe,EAC9CM,EAAcC,GAAqBN,CAAQ,EAC3CO,EAAsBb,EAAYV,EAAgBqB,EAClDG,EAAwB,KAAK,MAAMhB,EAAiCe,CAAmB,EACvFE,EAA0BzB,GAAiBO,EAAyB,CAAC,GAAKiB,EAAwB,EAClGE,EAAc,CAACD,GAA2BD,GAAyB,EAAKN,GAAiBN,CAAS,EAClGY,GAAyB,GAAMN,GAAiBN,CAAS,GAAK,EAAU,EACA,EACxEe,EAAcd,EAAU,OAAO,CAACH,EAAWE,CAAS,CAAC,EACrDgB,EAAazB,EAAU,KAAKwB,CAAW,EAAID,EAAaT,EAExDY,EAAoCJ,EACtC,CAAC,EACD,CAAC,CAAC,QAAuB,KAAMG,CAAU,EAAG,CAAC,QAAuB,KAAM/B,EAAW,SAAS,CAAC,EAC7FiC,EAAiB,CAAChB,EAAWJ,EAAWC,EAAWQ,CAAW,EAC9DY,EAAS5B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAC5DmC,EAAO,OAAO,GAAI,EAAGhB,EAAkBK,CAAW,EAClDS,EAAgB,KAAK,GAAGG,EAA2BF,CAAc,CAAC,EAClED,EAAgB,KAAK,GAAGG,EAA2BD,CAAM,CAAC,EAC1DF,EAAgB,KAAK,GAAGG,EAA2BpC,EAAO,CAAC,EAAE,IAAI,CAAC,EAC9DA,EAAO,SAAW,GACpBiC,EAAgB,KAAK,GAAGG,EAA2B7B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAE5F,IAAMqC,GAAkB,CAACnB,EAAWJ,EAAWE,EAAYc,CAAU,EACrEG,EAAgB,KAAK,GAAGG,EAA2BC,EAAe,CAAC,EACnE,IAAMC,GAAmBC,IAA+B,CACtD,IAAMC,GAAYN,EAAe,OAC3BhC,EAAIuC,EAAc,IAAKzC,EAAO,CAAC,EAAE,SAAUwC,GAAWjB,CAAW,EACjEjB,GAAImC,EAAc,OAAsBN,EAAO,OAAQX,CAAW,EAClEkB,GAASD,EAAc,SAAUzC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC1E2C,GAAiB,CAACzC,EAAGI,GAAGoC,EAAM,EAC9BE,GACF5C,EAAO,SAAW,EAAIyC,EAAc,iBAAgCzC,EAAO,CAAC,EAAE,KAAK,MAAM,EAAI,OAC7F4C,IACFD,GAAe,KAAKC,EAAU,EAEhC,IAAMC,GAAaR,GAAgB,OAC7BS,GAASC,EAAe,SAAU/C,EAAO,CAAC,EAAE,SAAU6C,GAAYf,CAAU,EAC5EkB,GAA8B,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACpG5B,EAAW6B,GAA4BjD,EAAO,CAAC,EAAE,QAAQ,EAEzDkD,IAAe,IAAM,CACzB,OAAQ3B,EAAa,CACnB,IAAK,GACH,MAAO,SAASH,CAAQ,OAC1B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,QACE,MAAM,IAAI,MAAM,GAAGG,CAAW,8BAA8B,CAChE,CACF,GAAG,EAEG4B,GAAkB;AAAA,yCACShC,CAAe,aAAaK,CAAW;AAAA,YACpElB,GAAE,WAAW,YAAa,IAAK,MAAM,CAAC;AAAA,yBACzBA,GAAE,aAAa,WAAW,CAAC;AAAA,qCACfkB,CAAW;AAAA,iCACfA,IAAgB,EAAI,SAAW,kBAAkB;AAAA;AAAA;AAAA;AAAA,uCAI3C0B,EAAW,IACtC,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,GAAGjC,CAAQ,kBAAkBiC,EAAC,OAAOjC,CAAQ,kBAAkBiC,EAAC,IAAI,EACjG,KAAK,IAAI,CAAC;AAAA,0CACe,IAC5B9B,IAAgB,EACX,GAAG2B,EAAW,IACjB,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,uBAAuBA,EAAC,yBAAyB,EAAE,KAAK,IAAI,CAAC,KAE5F,yBAAyBH,EAAW,IAAI,MAAM,CAAC,EAAE,KAAK,YAAY,EAAE,KAAK,GAAG,CAAC,eAErF,CAAC;AAAA;AAAA,uCAE2BrB,EAA0Bf,EAAYO,CAAY;AAAA,gBACzEnB,EAAE,WAAW,YAAasC,GAAY,EAAGX,EAA0B,IAAM,SAASR,CAAY,MAAM,CAAC;AAAA,gBACrGnB,EAAE,WAAW,YAAasC,GAAY,EAAG,aAAa,CAAC;AAAA,mCACpCtC,EAAE,gBAAgB,WAAW,CAAC;AAAA,4BACrCgD,EAAW;AAAA,yCACE,EAAI3B,CAAW;AAAA,8BAC1BrB,EAAE,YAAY,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG3C2B,EAA0B,gDAAkD,kBAAkB,GAClGC,EAAa,EAAI,MAAQ,EAAE,OAC3B,MACK,KACG,CAAC,OAAQ,EAAIP,CAAW,EACxB,CAAC6B,GAAGC,KAAM,GACN9B,IAAgB,EAAI,UAAU8B,EAAC,4BAA4BA,EAAC,IACxC,cAAcA,EAAC,2BAA2BA,EAAC,IAAI,EAAE,EAC5E,KAAK,KAAK,CAAC;AAAA;AAAA,6BAEC,EAAI9B,CAAW;AAAA;AAAA,WAG9B+B,GAAuBV,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKlBA,GAAW,YAAY,kBAAkB,CAAC;AAAA,aAExB,GAE1C,OAAOf,EAA0B;AAAA,iDACQiB,GAAO,KAAK,KAAK,KAAKhC,EAAYV,CAAa;AAAA,UACtFmC,GAAa,iBAAiB,GAAGI,GAAgBG,EAAM,CAAC;AAAA,UACxDP,GAAa,UAAU,CACvBnC,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA,2BACiBF,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAI7BA,EAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,qCAEd4B,CAAU;AAAA,sDACOA,CAAU;AAAA,gBAEnBc,GAAa;AAAA,mDACPxC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yCAMvBwC,GAAW,YAAY,uBAAuB,CAAC,8BAC9B,EAAE;AAAA,6BAC/BtC,GAAE,KAAK,OAAO;AAAA,cAC7BA,GAAE,WAAW,YAAa,IAAK,6BAA6B,CAAC;AAAA;AAAA,+DAEZF,CAAa;AAAA,0BAClDsC,GAAO,YAAY,cAAc,CAAC;AAAA;AAAA,+BAE7BtB,CAAQ,IAAIwB,GAAa,2BAA6B,CAAG;AAAA,cAC1EtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA,6CACRL,EAAW,UAAYsB,CAAW;AAAA,yDACtBT,CAAS;AAAA,cACpDqC,EAAe;AAAA;AAAA;AAAA,gCAGGL,GAAO,KAAK,OAAO;AAAA,2CACR,KAAK,KAAKhC,EAAYV,CAAa,CAAC;AAAA,YACnE0C,GAAO,WAAW,iBAAkB,IAAK,OAAO,CAAC;AAAA,YACjDA,GAAO,WAAW,iBAAkBD,GAAa,EAAG,KAAK,CAAC;AAAA,YAC1DC,GAAO,WAAW,iBAAkBD,GAAa,EAAG,kCAAkC,CAAC;AAAA,gCACnEC,GAAO,gBAAgB,gBAAgB,CAAC;AAAA;AAAA;AAAA,wBAGhDhC,CAAS;AAAA,kCACCgC,GAAO,KAAK,KAAK,MAAMA,GAAO,KAAK,KAAK;AAAA;AAAA,0CAEhC1C,CAAa;AAAA;AAAA,6CAEVU,CAAS;AAAA;AAAA,gBAEtCgC,GAAO,YAAY,gBAAiB,cAAc,CAAC;AAAA,iCAClC9B,EAAYc,CAAU;AAAA;AAAA;AAAA,WAId;AAAA,UAC/BS,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGL,GAAgBG,EAAM,CAAC;AAAA,UACnFP,GAAa,UAAU,CAAC;AAAA,YACtBA,GAAa,sCAAsC,sBAAsB,CAAC;AAAA,qCACjDO,GAAO,KAAK,KAAK,KAAKzB,CAAY;AAAA,iCACtCyB,GAAO,gBAAgB,YAAY,CAAC;AAAA,sBAC/CA,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,sBACnDC,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,2BAC9C3C,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAKI0C,GAAa;AAAA,8CACZd,CAAU,QAAQ1B,CAAa;AAAA;AAAA,uCAEtCwC,GAAW,YAAY,kBAAkB,CAAC;AAAA,yEAEvB,EAAE;AAAA,oCACxBxC,EAAgB0B,CAAU;AAAA,2BACnCxB,GAAE,KAAK,OAAO;AAAA,qCACJwB,CAAU;AAAA,cACjCxB,GAAE,WAAW,YAAa,IAAK,SAASwB,CAAU,MAAM,CAAC;AAAA;AAAA,+CAExB1B,CAAa;AAAA;AAAA,4BAEhCsC,GAAO,YAAY,aAAa,CAAC;AAAA;AAAA,iCAE5BtB,CAAQ,IAAIwB,GAAa,qDAAuD,CAAG;AAAA,gBACpGtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,gBAEvC6C,EAAe;AAAA;AAAA,gBAEfG,EAAoB;AAAA,sDACkB/B,CAAW;AAAA;AAAA;AAAA,cAIpBqB,GAAa;AAAA,kBACxCU,EAAoB;AAAA,iBAEoB,EAAE;AAAA;AAAA,wCAEpBjC,CAAY;AAAA,gBACpCyB,GAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAGxB,CAAY,YAAY,CAAC;AAAA,gBAChFyB,GAAO,aAAa,iBAAkB,kBAAkB,CAAC;AAAA;AAAA,UAGnE,EACA,MAAO,CACL,KAAMjB,EAA0B,uBAAyB,cACzD,YAAa,CACX,KAAM,GAAG5B,EAAW,QAAQ,IAAIa,CAAS,IAAIM,CAAQ,IAAIpB,EAAO,MAAM,GACtE,kBAAmB,MAAMA,EAAO,MAAM,EAAE,KAAK,MAAM,CACrD,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM+B,EAAa,SAAAX,CAAQ,CAAC,EACvC,KAAMS,EAA0B,uBAAyB,cACzD,cAAeA,EAA0B,CAAC,EAAG,EAAG,EAAG,KAAK,KAAKb,EAAYc,CAAU,EAAG,EAAGZ,CAAS,EACzD,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAC5F,gBAAAC,CACF,GACA,gBAAAK,EACF,CACF,EAES9C,GAAc,CAAC+D,EAAyBtD,IAA4C,CAC/FX,GAAeiE,EAAQ,OAAQtD,CAAU,EACzC,IAAMU,EAAqD4C,EAAQ,4BAA4B,EACzF3C,EAAiC2C,EAAQ,kCAAkC,EACjFA,EAAQ,QAAQhE,GACZgE,EAAQ,OAAQtD,EAAYU,EAA0BC,CAA8B,CAAC,CAC3F,EAEanB,GAA8BQ,GACvCuD,GAA4BvD,CAAsE,ICjTtG,IAiBMwD,GAmBAC,GA0BAC,GA2BAC,GAuBAC,GAuBAC,GAeAC,GAiDAC,GA0BOC,GAjObC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KASMb,GAAkBc,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAIA,EAAO,QAAU,EAAG,CACtB,IAAIC,EAAYD,EAAO,CAAC,EAAE,KAAK,OAAS,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EAI9D,GAHIA,EAAO,SAAW,IACpBC,EAAYD,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAEpD,CAACC,EACH,MAAM,IAAI,MAAM,6EAA6E,CAEjG,CACF,EAEMd,GAAiB,CAACe,EAAuBC,EAAmBC,IAA+B,CAC/F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,sBACSH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,2BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA,gCAGzCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,UAI9E,MAAO;AAAA,oBACWD,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,cAIvBG,CAAK;AAAA;AAAA;AAAA,OAInB,EAEMjB,GAAgB,CAACc,EAAuBC,EAAmBC,IAA+B,CAC9F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,yCAKnEG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,gCAEvDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,oCAI1CI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMhB,GAAa,CAACa,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,+BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,4BACjDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEtCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMf,GAAa,CAACY,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA,6BAE/EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,+BAE5CI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,6BAChDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEvCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMd,GAAgB,CAACW,EAAuBC,EAAmBK,IAAsC,CACrG,OAAQA,EAAW,KAAM,CACvB,IAAK,GACH,OAAOrB,GAAee,EAAQC,EAAWK,EAAW,KAAK,MAAM,EACjE,IAAK,GACH,OAAOpB,GAAcc,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAChE,IAAK,GACH,OAAOnB,GAAWa,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,IAAK,GACH,OAAOlB,GAAWY,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMhB,GAAuB,CAACQ,EAA+BQ,IAA2C,CACtG,IAAMC,EAAcC,EAAU,SAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGQ,EAAW,IAAI,EACxEG,EAAYX,EAAO,CAAC,EAAE,KACtBY,EAAaF,EAAU,KAAKD,CAAW,EACvCI,EACF,CAAC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMJ,EAAW,IAAI,CAAC,EACzFA,EAAW,OAAS,GACtBK,EAAgB,KAAK,CAAC,KAAMb,EAAO,CAAC,EAAE,SAAU,KAAMQ,EAAW,KAAK,CAAC,EAGzEK,EAAgB,KAAK,GAAGC,EAA2Bd,EAAO,CAAC,EAAE,KAAMS,CAAW,CAAC,EAC/E,IAAMM,EAAwD,CAAC,MAAM,EAE/DC,EAAmBC,GAA+B,CACtD,IAAMf,EAASgB,EAAe,SAAUlB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEU,EAAQC,EAAc,IAAKpB,EAAO,CAAC,EAAE,SAAUW,EAAU,MAAM,EAC/DU,EAAWF,EAAM,KAAK,MACtBG,EAAa/B,GAAcW,EAAQS,EAAU,OAAQH,CAAU,EAC/De,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQf,EAAW,KAAK,MAAM,CAAC,EACpG,OAAIA,EAAW,OAAS,GACtBe,EAAS,KAAK,CAAC,KAAM,iBAAkB,KAAMF,CAAkC,CAAC,EAG3E;AAAA,cACGJ,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBJ,EAAOjB,CAAM,CAAC;AAAA,cACvEe,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,4BAE5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,0BAEtCmB,CAAQ;AAAA,cACpBC,CAAU;AAAA;AAAA,UAGtB,EAEA,MAAO,CACL,KAAM,MACN,YAAa,CAAC,KAAM,GAAGd,EAAW,IAAI,GAAI,kBAAAO,CAAiB,EAC3D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMN,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAU,KAAKD,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAAG,CACF,CACF,EAEMvB,GAAgC,CAACO,EAA+BQ,IAA6C,CACjH,GAAIR,EAAO,OAAS,EAAG,CACrB,IAAMwB,EAAexB,EAAO,CAAC,EAAE,iBAAiB,EAC1CyB,EAASzB,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,KAAQA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAI,EAElFG,EAAYH,EAAO,CAAC,EAAE,KAAK,OAC3B0B,EAAa,IAAI,WAAW,EAAIvB,CAAS,EAAE,KAAK,CAAC,EACvD,GAAIH,EAAO,QAAU,EAAG,CACtB,IAAM2B,EAAO3B,EAAO,CAAC,EAAE,iBAAiB,EACxC,QAASM,EAAI,EAAGA,EAAIqB,EAAK,OAAQrB,IAC/BoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,CAAC,EAAI,OAAOkB,EAAalB,CAAC,CAAC,EACpDoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,EAAIH,CAAS,EAAI,OAAOqB,EAAalB,EAAIqB,EAAK,MAAM,CAAC,CAElF,MACEH,EAAa,QAAQ,CAACI,EAAGtB,IAAMoB,EAAW,OAAOpB,CAAC,CAAC,EAAK,OAAOsB,CAAC,CAAE,EAGpE,IAAMC,EAAiB,CAAC,EACxB,OAAAH,EAAW,QAAQE,GAAKC,EAAK,KAAKD,CAAC,CAAC,EAE7B,CAAC,KAAMpB,EAAW,KAAM,MAAAiB,EAAO,KAAAI,CAAI,CAC5C,KACE,QAAOrB,CAEX,EAEad,GAAM,CAACoC,EAAyBtB,IAAoC,CAC/EtB,GAAe4C,EAAQ,MAAM,EAC7B,IAAMC,EAAoBtC,GAA8BqC,EAAQ,OAAQtB,CAAU,EAClFsB,EAAQ,QAAQtC,GAAqBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxF,ICrOA,IAmBMC,GAMAC,GA4BAC,GA2DAC,GAsJAC,GAGAC,GAGAC,GAGAC,GAaAC,GAiCOC,GAYAC,GAKPC,GAWOC,GAKAC,GAUPC,GA6BOC,GAKAC,GAgBAC,GAKAC,GA/ZbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAIAC,KAQMxB,GAAkByB,GAAwC,CAC9D,GAAIC,GAAI,OAAO,uBAAyB,CAACD,GAAUA,EAAO,SAAW,GACnE,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EAEMxB,GAA0C,CAC5C0B,EAAmBC,EAA2BC,IAAyD,CACzG,IAAMC,EAAiBF,EAAW,SAAW,OACvCG,EAA2BJ,EAAM,KAAK,MAAM,EAC9CG,GACFC,EAAyB,OAAO,EAAG,EAAGA,EAAyB,IAAI,CAAE,EAEvE,IAAMC,EAAe,OAAO,eAAe,KAAKJ,EAAY,WAAW,EACjEK,EAAcL,EAAW,YAAY,MAAM,EAC3CM,EAAUN,EAAW,QAAQ,MAAM,EACnCO,EAAsBH,EAAgBJ,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCS,GAAa,qBAAqBR,EAAkBE,EAA0BE,EAAaC,EAASC,EAAWC,CAAI,EAEnH,IAAME,EAA4BD,GAAa,uBAC3CR,EAAkBE,EAA0BG,EAASC,EAAWF,EAAaG,EAAMR,EAAW,OAAO,EAEnGW,EAAgB,OAAO,OAAO,CAAC,EAAGX,CAAU,EAC9CI,EACF,OAAO,OAAOO,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,UAAAD,EAAW,SAAUP,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOW,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAE1F,IAAMY,EAA2BF,EAA0B,MAAM,EACjE,OAAAE,EAAyB,KAAKA,EAAyB,OAAO,EAAG,CAAC,EAAE,CAAC,CAAC,EAC/D,CAACD,EAAeT,EAAiBU,EAA2BF,CAAyB,CAC9F,EAEMpC,GAAuB,CACzBuC,EACAb,IAAgG,CAClG,IAAME,EAAiBF,EAAW,SAAW,OACvCc,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAaD,EAAU,KAAKf,EAAW,WAAW,EAClDiB,EACF,CAAC,CAAC,QAAuB,KAAMH,CAAU,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACnFE,EAA8B,CAAC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACzG,GAAIlB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAMmB,EAAKnB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoB,EAAKpB,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqB,EAAUrB,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsB,EAAQtB,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuB,EAAoB,CAAC,EAAEF,EAAUC,GACvCL,EAAgB,KACZ,CAAC,QAAuB,KAAME,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAO,EACrC,CAAC,QAAuB,KAAMC,CAAK,CACvC,EACAJ,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,EAEhC,IAAIM,EAAoB,GACxB,GAAIxB,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMyB,EAAKzB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D0B,EAAK1B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD2B,EAAU3B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD4B,EAAQ5B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EACxDwB,EAAoB,CAAC,EAAEG,EAAUC,GACjCX,EAAgB,KACZ,CAAC,QAAuB,KAAMQ,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAO,EAC3G,CAAC,QAAuB,KAAMC,CAAK,CAAC,EAExCV,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAClC,CACA,MAAO,CAACD,EAAiBC,EAAU,GAAMK,EAAmBC,CAAiB,CAC/E,KAAO,CACL,GAAItB,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM2B,EAAgBd,EAAU,eAAef,EAAW,WAAW,EACrEiB,EAAgB,KACZ,CAAC,QAAuB,KAAMY,CAAa,EAAG,CAAC,QAAuB,KAAM7B,EAAW,IAAI,EAC3F,CAAC,QAAuB,KAAMA,EAAW,OAAO,CAAC,EACrDkB,EAAS,KACL,CAAC,KAAM,gBAAiB,KAAM,MAAO,OAAQW,EAAc,MAAM,EACjE,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ7B,EAAW,KAAK,MAAM,EAC1D,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQA,EAAW,QAAQ,MAAM,CAAC,EAErE,IAAM8B,EAAU9B,EAAW,KAAK,OAAO,CAAC+B,EAAKC,IAAQD,EAAMC,CAAG,EAC9D,MAAO,CAACf,EAAiBC,EAAU,CAAC,CAACY,EAAS,GAAO,EAAK,CAC5D,CACF,EAEMvD,GAAsB,CACxB0D,EAA4BC,EAAkBC,EAAcC,EAAyBpC,EACrFqC,EAAaC,EAAaC,EAAerB,EAA6BY,EAAkBP,EACxFC,IAAuC,CACzC,IAAMtB,EAAiBF,EAAW,SAAW,OACvCwC,EAAWN,EAAE,KAAK,MAClBO,EAASC,EAAe,SAAUR,EAAE,KAAK,OAAQE,CAAe,EAEtE,GAAIpC,EAAW,YAAY,QAAU,EAAG,CACtC,IAAI2C,EAAQ,GACRC,EAAQ,GACRC,EAAW,GACTC,EAAUX,GAAQjC,EAAiB,EAAI,GAsB7C,GArBIqB,EACFoB,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO;AAAA,4CACxBA,CAAO;AAAA;AAAA;AAAA;AAAA,kCAIjBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAGjBM,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,kCACxBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAIfrC,EAAW,YAAY,SAAW,EAAG,CACvC,IAAM+C,EAAUZ,GAAQjC,EAAiB,EAAI,GACzCsB,EACFoB,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO,yBAAyBA,CAAO;AAAA;AAAA;AAAA;AAAA,gBAM5FH,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,kBAGpDF,EAAW;AAAA;AAAA,aAGb,CAoBA,MAlBoB;AAAA,cACVZ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,8BAE3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,4BAEvCD,CAAQ,IAAID,CAAK;AAAA;AAAA,gBAE7BK,CAAK;AAAA,gBACLD,CAAK;AAAA,gBACLE,CAAQ;AAAA,gBACRP,CAAG;AAAA;AAAA;AAAA,cAKjB,KAAO,CACL,GAAIpC,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM8C,EAAchD,EAAW,YAAY,OACrCiD,EAAWjD,EAAW,KAAK,OAC7BkD,EAAU,GACd,OAAIpB,EACFoB,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAQgBhB,EAAE,gBAAgB,UAAU,CAAC;AAAA,kBAC3CG,CAAG;AAAA,iBAGfa,EAAU;AAAA;AAAA,8BAEchB,EAAE,gBAAgB,UAAU,CAAC;AAAA,gBAC3CG,CAAG;AAAA,cAGK;AAAA,cACVJ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,8BAC3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,wCAE3BO,CAAW;AAAA;AAAA,4BAEvBR,CAAQ,IAAID,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uCAMNS,EAAc,CAAC;AAAA,0CACZG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA,2CACvDG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA;AAAA,0BAEzEA,EAAc,CAAC;AAAA;AAAA;AAAA,+BAGVb,EAAOa,CAAW,UAAUb,CAAI;AAAA,+CAEvDgB,EAAa,mBAAoB,OAAOhB,EAAOa,CAAW,IAAKA,CAAW,CAAC;AAAA,oCAC/Cb,EAAOa,CAAW,QAAQG,EAAa,gBAAiB,SAAUF,CAAQ,CAAC;AAAA,oBAC3FC,CAAO;AAAA;AAAA,gBAEXZ,CAAG;AAAA;AAAA;AAAA,cAKjB,CACF,EAcM9D,GAAiCwB,GAClC,GAAGA,EAAW,MAAM,IAAIA,EAAW,QAAQ,IAAIA,EAAW,OAAO,IAAIA,EAAW,YAAY,MAAM,GAEjGvB,GAA4CuB,GAC7C,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,eAAe,GAEzEtB,GAAwCsB,GACzC,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,YAAY,IAAIA,EAAW,SAAS,GAE9FrB,GAA6BqB,IAA+D,CAChG,OAAQA,EAAW,OACnB,QAAS,CAAC,SAAU,QAAS,aAAc,YAAY,EAAEA,EAAW,QAAkB,EACtF,SAAUA,EAAW,UACrB,YAAaA,EAAW,aACxB,QAASA,EAAW,QACpB,KAAMA,EAAW,IACnB,GAMMpB,GACF,CAACwE,EAAcrD,EAAmBE,EAA2BD,IAAmD,CAC9G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEiC,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyC,EAAWN,EAAE,KAAK,MAElBG,EAAM,kBACRC,EAAM,GACNe,EAAmB,gBACrBf,GAAO,YAAYE,CAAQ,yBAE3BF,GAAO,YAAYE,CAAQ,oCAE7B,GAAM,CAACvB,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxDpC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EAC3E,IAAM2C,EAAwD,CAAC,MAAM,EACrE,MAAO,CACL,KAAAJ,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAAK,EAAKpB,EAC3FY,EAASP,EAAmBC,CAAiB,CACnD,CACF,EAES3C,GAA8BmB,GAA+D,CACxG,IAAMyD,EAAmBzD,EAAW,oBAAiC,EAE/D0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI0D,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAE1F,IAAMC,EAAwB,CAAC,gBAAAF,EAAiB,GAAGC,EAAM,SAAU,EAAE,EACrE,MAAO,CAAC,GAAGC,EAAuB,SAAUlF,GAAyCkF,CAAqB,CAAC,CAC7G,EAEa7E,GAAc,CAAC8E,EAAyB5D,IAA4C,CAC/F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,cAAegF,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CACnG,EAEMjB,GAAuB,CAC3B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,CACd,EAEaC,GAAoCgB,GAA+D,CAC9G,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEa5E,GAAoB,CAAC2E,EAAyB5D,IAA4C,CACrG5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,oBAAqBgF,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CACxG,EAOMd,GACF,CAACkE,EAAcrD,EAAmBE,EAA2BD,IAA+C,CAC1G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEoC,EAAM;AAAA;AAAA,MAGNC,EAAM,GACNJ,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyD,EAAwD,CAAC,MAAM,EAC/D,CAACvC,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxD,OAAApC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EACpE,CACL,KAAAuC,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAChFvC,EAAM,WAAa,GAAoB,OAAS,KAAMmB,EAAUY,EAASP,EAC1EC,CAAiB,CACvB,CACF,EAESrC,GAAU,CAACyE,EAAyB5D,IAAwC,CACvF5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,UAAW0E,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CAC3F,EAEaZ,GAA0BY,GAA2D,CAChG,IAAM8D,EAAe9D,EAAW,cAC1BO,EAAYP,EAAW,UAEvB0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI8D,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAIJ,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,oEAAoE,EAEtF,IAAMK,EAAoB,CAAC,aAAAD,EAAc,UAAAvD,EAAW,GAAGmD,EAAM,SAAU,EAAE,EACzE,MAAO,CAAC,GAAGK,EAAmB,SAAUrF,GAAqCqF,CAAiB,CAAC,CACjG,EAEa1E,GAAgCW,GAA2D,CACtG,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEavE,GAAgB,CAACsE,EAAyB5D,IAAwC,CAC7F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,gBAAiB0E,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CAChG,IClaA,IAUMgE,GAUAC,GAoCOC,GAxDbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GAAwB,CAACQ,EAAeC,EAAeC,IAAwB,CACnF,IAAMC,EAAiBH,IAAUC,EAC3BG,EAA8BJ,EAAQC,GAASC,EAAQ,EACvDG,EAA8BL,EAAQC,GAASC,EAAQ,EAE7D,GAAIC,GAAkBC,GAA+BC,EACnD,MAAM,IAAI,MAAM,2CAA4C,CAEhE,EAEMZ,GAAyB,CAACO,EAAeC,EAAeC,EAAeI,IAAoC,CAC/G,IAAMC,EAAc,KAAK,IAAI,KAAK,MAAMN,EAAQD,GAASE,CAAK,CAAC,EACzDM,EAAwB,CAACD,CAAW,EACpCE,EAAaF,EACbG,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,KAAMH,EAAU,KAAMN,CAAK,EAAG,CAAC,KAAMM,EAAU,KAAMJ,CAAK,EACtG,GAAGS,EAA2BH,CAAW,CAC3C,EAEMI,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUT,EAAUE,EAAY,MAAM,EAC9DQ,EAAWF,EAAO,KAAK,MACvBG,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAMD,CAAkC,EAC3F,CAAC,KAAM,QAAS,KAAMA,CAAkC,CAC1D,EACA,MAAO;AAAA,UACDH,EAAa,iBAAiBI,CAAQ,EAAE,iBAAiBH,CAAM,CAAC;AAAA,UAChED,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,gDACnCG,CAAQ;AAAA,QAEtD,EAEA,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGV,CAAQ,EAAE,EACjC,gBAAAM,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,EACF,CACF,EAEahB,GAASwB,GAAkC,CACtD,IAAIlB,EAAQ,EACRC,EAAQ,EACRC,EAAQ,EACRgB,EAAQ,OAAO,CAAC,EAAE,WAAa,GACjClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,GAClCA,EAAQ,OAAO,CAAC,EAAE,WAAa,IACxClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,GAE3CC,GAAI,OAAO,sBACb3B,GAAsBQ,EAAOC,EAAOC,CAAK,EAG3CgB,EAAQ,QAAQzB,GAAuBO,EAAOC,EAAOC,EAAOgB,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CACvG,IC1EA,IAiCME,GAuBAC,GASAC,GA6CAC,GAkDAC,GAkCAC,GAaAC,GAwBAC,GAyBAC,GAuBAC,GAkCAC,GAWAC,GAQAC,GAsDAC,GA6EAC,GAwEAC,GAoHAC,GAOOC,GAiBAC,GAnqBbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAGAC,KAuBMxB,GAAiB,CAACyB,EAAkBC,IAAuC,CAK/E,GAJAD,EAAO,MAAOE,GAAUA,EAAQ,IAAM,IAAM,CAClB,MAAM,IAAI,MAAM,oDAAoD,CACtE,EAAE,EAEtBF,EAAO,OAAS,GAClB,GAAIC,EAAW,OAAS,UACtB,GAAI,EAAED,EAAO,SAAW,GAAKA,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACtGA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACxDA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MACN;AAAA,oGACwF,UAErFC,EAAW,OAAS,SACzB,EAAED,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC/EA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MAAM,+DAA+D,EAIvF,EAEMxB,GAAe,CAACwB,EAA2BG,EAAyBC,IAA2B,CACnGD,EAAK,MAAOD,GAAUA,GAAS,GAAKA,EAAQE,IAAS,IAAM,CACnC,MAAM,IAAI,MAAM,qEAAqE,CACvF,EAAE,EACxB,IAAMC,EAAY,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAG,EAC1C,OAAAD,EAAK,QAAQ,CAACD,EAAOI,IAAUD,EAAUH,CAAK,EAAIF,EAAOM,CAAK,CAAC,EACxDD,CACT,EAEM5B,GACF,CAAC8B,EAA+BN,EAA8BO,EAAsBR,EACnFS,EAAiBC,IAAwB,CACxC,GAAM,CAACC,EAAeC,EAAkBC,CAAe,EAClDL,EAAe,GAAM,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,GAAKD,EAAO,OAAS,EAAK,EAAI,GAAI,EAAE,EACrEH,EAAOG,EAAO,CAAC,EAAE,KAAK,OAC5B,GAAII,EAAgB,GAAKJ,EAAO,OAASI,GAAiBJ,EAAOI,CAAa,EAAE,KAAK,OAAS,EAC5FJ,EAAOI,CAAa,EAAE,gBAAgB,EAAE,QAAST,GAAUQ,EAAI,KAAKR,CAAK,CAAC,UACjED,EAAW,0BAA4B,qBAChD,MAAM,IAAI,MAAM,2FAA2F,EAG7G,GAAIW,EAAmB,GAAKL,EAAO,OAASK,GAAoBL,EAAOK,CAAgB,EAAE,KAAK,OAAS,EAAG,CAExG,GADAL,EAAOK,CAAgB,EAAE,gBAAgB,EAAE,QAASV,GAAUF,EAAO,KAAKE,CAAK,CAAC,EAC5EF,EAAO,SAAW,GACjBA,EAAO,SAAWI,GAASI,GAAgB,IAAMR,EAAO,SAAWC,EAAW,KAAK,OACtF,MAAM,IAAI,MACN,6FAA6F,EAEnG1B,GAAeyB,EAAQC,CAAU,EAC7BA,EAAW,KAAK,OAAS,GAC3BzB,GAAawB,EAAQC,EAAW,KAAMG,CAAI,EAAE,QAAQ,CAACF,EAAOI,IAAUN,EAAOM,CAAK,EAAIJ,CAAK,CAE/F,CACA,GAAIW,EAAkB,GAAKN,EAAO,OAASM,IACzCN,EAAOM,CAAe,EAAE,iBAAiB,EAAE,QAASX,GAAUO,EAAM,KAAK,OAAOP,CAAK,CAAC,CAAC,EACnFO,EAAM,SAAWL,GAASI,GAAgB,IAAMC,EAAM,SAAWR,EAAW,KAAK,QACnF,MAAM,IAAI,MAAM,4FAA4F,EAIhH,GAAIA,EAAW,KAAK,OAAS,EAAG,CAC9B,GAAID,EAAO,SAAWC,EAAW,KAAK,OACpC,MAAM,IAAI,MAAM,0FAA0F,EAE5G,GAAIQ,EAAM,SAAWR,EAAW,KAAK,OACnC,MAAM,IAAI,MACN,8FAA8F,CAEtG,CACA,GAAI,OAAOD,EAAW,KAAe,OAAOS,EAAU,KAAeT,EAAO,OAAS,GAAKS,EAAM,OAASL,EACvG,MAAM,IAAI,MAAM,yDAAyD,CAE7E,EAEE1B,GACF,CAACoC,EAAiDC,IAC9C;AAAA,2DACmDA,CAAK,OAC3D,IAAM,CACD,OAAQD,EAAwB,CAC9B,IAAK,aACH,MAAO,UAAUC,CAAK,gBAAgBA,CAAK,YAC7C,IAAK,qBACH,MAAO;AAAA,8BACSA,CAAK,uBAAuBA,CAAK;AAAA;AAAA;AAAA,qBAInD,IAAK,uBACH,MAAO,WAAWA,CAAK,uBAAuBA,CAAK,YACrD,IAAK,gBACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMaA,CAAK;AAAA;AAAA,0BAEbA,CAAK,6DAA6DA,CAAK;AAAA;AAAA,qBAGrF,IAAK,qBACH,MAAO;AAAA,6BACQA,CAAK,gBAAgBA,CAAK;AAAA,2BAC5BA,CAAK,gBAAgBA,CAAK,yBAAyBA,CAAK;AAAA,0BACzDA,CAAK;AAAA;AAAA,mCAEIA,CAAK,yBAAyBA,CAAK;AAAA,qBAE1D,IAAK,uBACH,MAAO,uBAAuBA,CAAK,YAAYA,CAAK;AAAA,uCAC3BA,CAAK;AAAA,mCACTA,CAAK;AAAA;AAAA,sCAEFA,CAAK,uBAAuBA,CAAK,mBAC3D,IAAK,aACH,MAAO,YAAYA,CAAK,uBAAuBA,CAAK,mBACtD,QACE,MAAM,IAAI,MAAM,6BAA6BD,CAAsB,mBAAmB,CAC1F,CACF,GAAG,EACP,IAEEnC,GAA8B,CAACqC,EAA0BR,EAAsBO,IACjF,6CAA6CA,CAAK,4BAA4BA,CAAK,MAAQ,IAAM,CAC/F,OAAQC,EAAa,CACnB,IAAK,oBACH,MAAO,yIAKT,IAAK,QACH,MAAO,2BACT,IAAK,OACH,MAAO,0BACT,IAAK,qBACH,MAAO,0KAKT,IAAK,SACL,QACE,GAAIR,EAAe,GACjB,MAAO,mLAOT,MAAM,IAAI,MAAM,gBAAgBQ,CAAW,mBAAmB,CAClE,CACF,GAAG,EACH,IAEEpC,GAAY,CAAC8B,EAAwBP,EAAyBC,IAA2B,CAC7F,IAAMa,EAAS,IAAI,MAAMb,CAAI,EAAE,KAAK,CAAC,EAAE,OAAO,IAAI,MAAMA,CAAI,EAAE,KAAK,CAAC,CAAC,EAC/Dc,EAAWR,EAAI,SAAW,EAAIO,EAASP,EAAI,MAAM,EACvD,OAAIP,EAAK,OAAS,GAChBA,EAAK,QAAQ,CAACgB,EAAGC,IAAM,CACrBH,EAAOE,CAAC,EAAID,EAASE,CAAC,EACtBH,EAAOG,EAAIhB,CAAI,EAAIc,EAASf,EAAK,OAASiB,CAAC,CAC7C,CAAC,EACMH,GAEFC,CACT,EAEMrC,GACF,CAACwC,EAA+BrB,EAA2BS,EAA0BN,IACrE,CACV,IAAImB,EAAwB,CAAC,EAC7B,GAAIb,EAAM,OAAS,EACjB,GAAIN,EAAK,OAAS,EAAG,CAEnB,GADAkB,EAAW,QAASF,GAAMG,EAAY,KAAKH,CAAC,CAAC,EACzC,KAAK,IAAI,GAAGhB,CAAI,EAAIkB,EAAW,OACjC,MAAM,IAAI,MAAM,sBAAsB,EAExClB,EAAK,QAAQ,CAACgB,EAAGC,IAAME,EAAYH,CAAC,EAAIV,EAAMW,CAAC,CAAC,CAClD,MACEX,EAAM,QAASU,GAAMG,EAAY,KAAKH,CAAC,CAAC,MAErC,CACL,GAAInB,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyC,EAEzDsB,EAAcD,EAAW,IAAI,CAACnB,EAAOI,IAAU,KAAK,MAAMJ,EAAQF,EAAOM,CAAK,CAAC,CAAC,CAEpF,CACA,OAAOgB,CACT,EAEFxC,GAAoB,CAACuC,EAA+BrB,EAAkBC,IAAiC,CAC3G,IAAMsB,GAAiB,IAAM,CAC3B,OAAQtB,EAAW,sBAAuB,CACxC,IAAK,aACH,OAAOA,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,IAAK,cACH,OAAOC,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,QACE,MAAM,IAAI,MAAM,4BAA4BC,EAAW,qBAAqB,mBAAmB,CACnG,CACF,GAAG,EACHD,EAAO,KAAK,EAAK,EAAGA,EAAO,MAAM,EACjC,IAAMwB,EAAsBH,EAAW,MAAM,EAC7C,OAAIpB,EAAW,KAAK,OAAS,GAC3BA,EAAW,KAAK,QAASkB,GAAMnB,EAAOmB,CAAC,EAAII,CAAa,EACxDtB,EAAW,KAAK,QAASkB,GAAMK,EAAoBL,CAAC,EAAI,KAAK,MAAME,EAAWF,CAAC,EAAInB,EAAOmB,CAAC,CAAC,CAAC,IAE7FnB,EAAO,KAAKuB,EAAe,EAAGvB,EAAO,MAAM,EAC3CwB,EAAoB,QAAQ,CAACL,EAAGC,IAAMI,EAAoBJ,CAAC,EAAI,KAAK,MAAMD,EAAInB,EAAOoB,CAAC,CAAC,CAAC,GAEnFI,CACT,EAEMzC,GACF,CAAC0C,EAAuBJ,EAA+BC,EAAgCI,EACtFC,IAA8B;AAAA,mEACgCF,EAAO,KAAK,OAAO,cAC9EA,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,oCACZG,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,gCAC5CA,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA,sBAC/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA,wBAChDE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,uBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA;AAAA,kCAExDF,EAAO,KAAK,KAAK;AAAA;AAAA,gCAEnBG,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQzFtC,GACF,CAAC6C,EAAsBJ,EAAuBJ,EAA+BC,EAC5EI,EAAsBC,EAAmBG,IAAsC;AAAA,gEACpBL,EAAO,KAAK,OAAO,QAAQI,EAAM,KAAK,OAAO;AAAA,2BAClFA,EAAM,KAAK,OAAO;AAAA,gCACbP,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,sBAE/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA;AAAA;AAAA;AAAA,0BAI9CE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,yBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA,gCAC5DC,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA,iBAG9EQ,CAAgB,4CAA4CL,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA,wCAGtDA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAS/CI,EAAM,WAAW,gBAAiB,IAAK,cAAc,CAAC;AAAA;AAAA;AAAA,OAI1D5C,GAAoB,CAAC4C,EAAsBR,IAA0C;AAAA,0CACjDQ,EAAM,KAAK,OAAO;AAAA,gCAC5BR,EAAW,MAAM;AAAA,4BACrBQ,EAAM,WAAW,gBAAiB,GAAG,CAAC;AAAA,gDAClBD,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,OAOtGnC,GACF,CAAC2C,EAAsBE,EAAoBC,EAAkBC,IACzDJ,EAAM,KAAOI,EAAc;AAAA,MAC7BJ,EAAM,WAAW,gBAAiBE,EAAY,SAAS,CAAC;AAAA,MACxDF,EAAM,WAAW,gBAAiBG,EAAU,OAAO,CAAC;AAAA,EAEvB,GAE7B7C,GACF,CAAC0C,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUG,EAAWC,EAAUL,CAAU,EAC5CV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,CAAC,EAC9DN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wEAC2Dd,CAAK;AAAA,2BAClDc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBM,EAAW,mBAAmBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QAC9FN,EAAM,WAAW,gBAAiBO,EAAU,mBAAmBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC5FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,+CAGHJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,gBAE/DA,CAAK,sBAAsBoB,CAAS;AAAA,gBACpCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAEzCN,EACI,yBAAyBT,EAAWc,CAAS,CAAC,8BAA8Bd,EAAWe,CAAQ,CAAC;AAAA,iBAC7FF,CAAkB;AAAA,SAErB,EAAE;AAAA,8BACcb,EAAWc,CAAS,CAAC;AAAA,8BACrBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKvBf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA,iBAC1EjB,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWjC,EAEE3B,GACF,CAACyC,EAAsBJ,EAAuBJ,EAA+BC,EAC5EtB,EAA2BU,EAAwB2B,EAAqBP,EACxEI,EAA4BI,IAAoC,CAC/D,IAAMC,EAAOlB,EAAW,SAAW,EAC7BmB,EAAS,GACT,CAACL,EAAWC,CAAQ,EAAIG,EAAO,CAAC,EAAG,CAAC,EAAIC,EAAS,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAC/DzB,EAAQc,EAAM,KAAK,MACnBY,EAAoCC,GAAwB,CAChE,IAAMC,EAAYD,IAAQP,EAAY,MAAQ,MAC9C,MAAO;AAAA,WACJQ,CAAS,qCAAqCd,EAAM,KAAK,OAAO,qBAC/DJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,6BACfU,EAAO,WAAW,iBAAkBiB,CAAG,CAAC;AAAA,2BAC1C3B,CAAK,+DAA+Df,EAAO0C,CAAG,CAAC;AAAA,UAChGpB,EAAYoB,CAAG,CAAC,KAAKrB,EAAWqB,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,MAAMrB,EAAW,MAAM;AAAA,gCAC/DN,CAAK;AAAA;AAAA;AAAA,cAGvBe,CAAgB,0CAA0CT,EAAWqB,CAAG,CAAC;AAAA,mBACpER,CAAkB;AAAA;AAAA,0BAEXnB,CAAK,gBAAgBA,CAAK;AAAA;AAAA,gBAEpC4B,CAAS,KAAK5B,CAAK,oBAAoBA,CAAK;AAAA,gBAC5C4B,CAAS,WAAWA,CAAS,OAAOtB,EAAWqB,CAAG,CAAC;AAAA,eACpD,IACDJ,EACK;AAAA,mCAEER,EACF,UAAUI,CAAkB,IAE5B,GAAGS,CAAS,iBAAiBA,CAAS,KAAKtB,EAAWqB,CAAG,CAAC,WAElE,CAAC;AAAA;AAAA,kCAEsBb,EAAM,KAAK,OAAO;AAAA,YACxCA,EAAM,WAAW,qBAAsBa,EAAK,OAAOC,CAAS,GAAG,CAAC;AAAA,0BAEhED,IAAQP,EAAYN,EAAM,aAAa,oBAAoB,EACvC,2DAA2D;AAAA;AAAA;AAAA,QAIrF,EAEA,MAAO;AAAA,MACPY,EAAiCN,CAAS,CAAC;AAAA,MAC3CM,EAAiCL,CAAQ,CAAC;AAAA,qCACXrB,CAAK,cAAcA,CAAK;AAAA;AAAA,wBAErCA,CAAK,gBAAgBA,CAAK;AAAA,wBAC1BA,CAAK;AAAA,wBACLA,CAAK;AAAA,uBACNA,CAAK;AAAA,oBACRsB,CAAW,wBAAwBA,CAAW,yBACxDA,CAAW,yBAAyBA,CAAW;AAAA,oBACrCA,CAAW,mBAAmBA,CAAW;AAAA,oBACzCA,CAAW,2BAA2BA,CAAW;AAAA,oBACjDA,CAAW,yBAAyBA,CAAW,0BACzDA,CAAW,0BAA0BA,CAAW;AAAA;AAAA;AAAA;AAAA,qCAIrBtB,CAAK,sBAAsBA,CAAK,YAAYA,CAAK;AAAA,oBAClEA,CAAK;AAAA;AAAA;AAAA;AAAA,4CAImBU,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,yBACnDc,EAAM,KAAK,OAAO;AAAA;AAAA;AAAA,KAIvC,EAEExC,GACF,CAACwC,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUY,EAAUT,EAAWC,EAAUL,CAAU,EACtDV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACpEN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wFAC2Ed,CAAK;AAAA,2BAClEc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBe,EAAU,qBAAqBvB,EAAWuB,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9Ff,EAAM,WAAW,gBAAiBM,EAAW,sBAAsBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QACjGN,EAAM,WAAW,gBAAiBO,EAAU,qBAAqBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,gDAGFJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,kBAE9DA,CAAK,sBAAsB6B,CAAQ;AAAA,mBAClC7B,CAAK,sBAAsBoB,CAAS;AAAA,kBACrCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAE3CN,EAAmB,6BAA6BT,EAAWuB,CAAQ,CAAC,oCAC7CvB,EAAWc,CAAS,CAAC,kCAAkCd,EAAWe,CAAQ,CAAC;AAAA,eAC7FF,CAAkB;AAAA,WAEJ,EAAE;AAAA;AAAA,gCAECb,EAAWuB,CAAQ,CAAC;AAAA,oCAChBvB,EAAWc,CAAS,CAAC;AAAA,kCACvBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAO3Bf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA;AAAA,kBAEzEjB,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,iBACNA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,mBAAmBA,CAAK;AAAA,iBAC7BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAgBjC,EAEEzB,GACF,CAACuD,EAAyB5C,EAA8BO,EAAsBsC,EAC7ErC,EAA0BsC,IAA6C,CACtE,IAAM1B,EAAawB,EAAY,KACzBnC,EAAM9B,GAAUmE,EAAU9C,EAAW,KAAMoB,EAAW,MAAM,EAE9DC,EAAczC,GAAgBwC,EAAYyB,EAAarC,EAAOR,EAAW,IAAI,EAC7ED,EAAS8C,EAAY,MAAM,EAC3BA,EAAY,SAAW,IACzB9C,EAASqB,EAAW,IAAI,CAACnB,EAAOI,IAAUJ,IAAU,EAAI,EAAMoB,EAAYhB,CAAK,EAAIJ,CAAK,EACpFD,EAAW,wBAA0B,YACvCqB,EAAcxC,GAAkBuC,EAAYrB,EAAQC,CAAU,IAGlE,IAAMwB,EAASuB,EAAe,SAAUH,EAAY,SAAUvB,EAAY,MAAM,EAC1EO,EAAQoB,EAAc,QAASJ,EAAY,SAAUxB,EAAW,MAAM,EACtE6B,EAAaC,EAAU,KAAK7B,CAAW,EACvC8B,EAAU/B,EAAW,SAAWC,EAAY,QAAUD,EAAW,MAAM,CAACgC,EAAGjC,IAAMiC,IAAM/B,EAAYF,CAAC,CAAC,EACrGU,EAAmB7B,EAAW,0BAA4B,qBAC1DiC,EAAqBjC,EAAW,mBAChCqD,EAAWzB,EAAM,KAAK,MACtB0B,EAAmBC,GAA+B;AAAA,QACtDJ,EAAU,GAAK;AAAA,QACf1E,GAA2CuB,EAAW,wBAAyBqD,CAAQ,CAAC;AAAA,SACvF,IAAM,CACP,OAAQrD,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA,gBACHhB,GAAkB4C,EAAOR,CAAU,CAAC;AAAA,gBACpC1C,GAA4BsB,EAAW,YAAaO,EAAc8C,CAAQ,CAAC;AAAA,gBAE3EtE,GACI6C,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,OAAQoB,CAAgB,CAAC;AAAA,gBAE9F,IAAK,SACH,MAAO;AAAA,gBACH/C,GAA0C0C,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,MAAM,CAAC;AAAA,iBACpG,IAAM,CACT,GAAIW,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GAAGlC,GAAsB0C,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAC3F,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EAC1D,MAAO,GAAGhC,GAAuBwC,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAEjG,MAAM,MAAM,kFAAkF,CAElG,GAAG,CAAC;AAAA,cAEN,IAAK,QACH,MAAO;AAAA,eACJ,IAAM,CACP,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GACHjC,GACIyC,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAQU,EAAKT,EAAW,YAAa6B,EAC7E7B,EAAW,mBAAoBA,EAAW,cAAc,CAAC,GAEjE,MAAM,MAAM,2EAA2E,CAE3F,GAAG,CAAC;AAAA,cAEN,QACE,MAAM,MAAM,qBAAqB,CACrC,CACF,GAAG,CAAC;AAAA,OACH;AAAA,QAEGuD,EAAa,gBAAgB,cAAe,KAAK,EAC5C,gBAAgB,SAAU,MAAOxD,EAAO,MAAM,EAC9C,gBAAgB,MAAO,MAAOU,EAAI,MAAM,EACxC,iBAAiBmB,EAAOJ,CAAM,CAAC;AAAA,QACtC+B,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,UAC1EJ,EAAU,0CAA4C;AAAA,+BACjC3B,EAAO,gBAAgB,YAAY,CAAC;AAAA,6BACtCI,EAAM,KAAK,OAAO;AAAA,WACpC,IAAM,CACT,OAAQ5B,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA;AAAA,yCAEsB4B,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,yCAEnC5B,EAAW,kBAAkB;AAAA,mBAE5D,IAAK,SACH,MAAO,wBACFoB,EAAW,SAAW,GAAKA,EAAW,SAAW,EAAK,wBACA,wBAAwB,oBACrF,IAAK,QACH,MAAO,6DACT,QACE,MAAM,MAAM,4BAA4BpB,EAAW,IAAI,EAAE,CAC7D,CACF,GAAG,CAAC;AAAA,CACT;AAAA,SAGK,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAGA,EAAW,QAAQ,IAAIO,CAAY,IAAIR,EAAO,OAAS,EAAIA,EAAS,EAAE,IAC3ES,EAAM,OAAS,EAAIA,EAAQ,EAAE,IAAIC,EAAI,OAAS,EAAIA,EAAM,EAAE,IAAI0C,CAAO,IAAI/B,CAAU,GACvF,kBAAmB,CAAC,MAAM,CAC5B,EACA,gBAAAkC,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMjC,EAAa,SAAUuB,EAAY,QAAQ,CAAC,EAC7D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,OAAsB,KAAMlD,CAAM,EAC9E,CAAC,OAAsB,KAAMU,CAAG,EAAG,GAAG+C,EAA2BpC,EAAYC,CAAW,CAC1F,CACF,EACF,CACF,EAEE/B,GAAuCmE,GAAoC,CAC/E,IAAMC,EAAmBD,EAAQ,iBAGjC,OAF2B,IAAI,YAAYC,EAAkBA,EAAiB,WAAY,CAAC,EACnD,CAAC,CAE3C,EAEanE,GAAS,CAACkE,EAAyBzD,IAAuC,CACrF,IAAMD,EAAmB,CAAC,EACpBS,EAAkB,CAAC,EACnBC,EAAgB,CAAC,EAKjBF,EAAejB,GAAoCmE,CAAO,EAChE,GAAIzD,EAAW,YAAc,EAC3B,MAAM,MAAM,6DAA6D,EAE3ExB,GAAeiF,EAAQ,OAAQzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAC3EgD,EAAQ,QACJpE,GAAwBoE,EAAQ,OAAO,CAAC,EAAGzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC7G,EAEajB,GAAyBQ,GAA0D,CAC9F,IAAM2D,EAAY3D,EAAW,UACvBE,EAAOF,EAAW,KAClB4D,EACF5D,EAAW,wBACToC,EAAcpC,EAAW,YACzBqC,EAAiBrC,EAAW,iBAA6B,EACzDiC,EAAqBjC,EAAW,mBAChC6D,EAA+C7D,EAAW,sBAC1D8D,EAAa9D,EAAW,KAExBe,EAA4Bf,EAAW,cAAgB,GAAK,SAAWA,EAAW,YACxF,OAAO+D,GAA4B,CACjC,UAAAJ,EACA,KAAAzD,EACA,wBAAA0D,EACA,YAAAxB,EACA,eAAAC,EACA,mBAAAJ,EACA,sBAAA4B,EACA,KAAAC,EACA,YAAA/C,CACF,CAAC,CACH,IC1rBA,IAkBMiD,GAqDAC,GA+FOC,GAtKbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KASMR,GAAiB,CAACS,EAA+BC,IAAgD,CACrG,GAAM,CAACC,EAAOC,EAAaC,EAAUC,CAAQ,EAAIL,EAC3C,CAAC,SAAAM,EAAU,mBAAAC,CAAkB,EAAIN,EAEvC,GAAIC,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wDAAwDA,EAAM,KAAK,MAAM,EAAE,EAE7F,GAAI,CAACM,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,GAAK,CAACK,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,CAAC,GACtFA,EAAY,KAAK,SAAW,EAC9B,MAAM,IAAI,MAAM,uEAAuEA,EAAY,KAAK,MAAM,EAAE,EAElH,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAI,CAACG,EAAU,SAASJ,EAAS,KAAMC,EAAS,IAAI,EAClD,MAAM,IAAI,MAAM,wEAA4E,EAG9F,GAAIE,EAAqB,GAAKD,IAAa,EACzC,MAAM,IAAI,MAAM,iEAAiE,EAGnF,IAAMG,EAAYP,EAAM,KAAK,CAAC,EACxBQ,EAAiBR,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACjDS,EAAoBP,EAAS,KAAK,CAAC,EACnCQ,EAAaJ,EAAU,kBAAkBN,EAAM,KAAM,CAAC,EAAIQ,EAC1DG,EAAWN,IAAuB,EAAIH,EAAS,KAAK,CAAC,EAAI,EAAIQ,EAAaN,EAChF,GAAIC,EAAqBM,EACvB,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIV,EAAY,KAAK,SAAW,EAAG,CACjC,GAAIM,IAAcN,EAAY,KAAK,CAAC,EAClC,MAAM,IAAI,MAAM,sEAAsEA,EAAY,KAAK,CAAC,CAAC,EAAE,EAE7G,GAAIO,IAAmBP,EAAY,KAAK,CAAC,EACvC,MAAM,IAAI,MAAM,2EAA2EA,EAAY,KAAK,CAAC,CAAC,EAAE,CAEpH,CAEA,GAAIU,EAAW,IAAMT,EAAS,KAAK,CAAC,GAAKG,EAAqB,IAAMH,EAAS,KAAK,CAAC,EACjF,MAAM,IAAI,MAAM,kGACZA,EAAS,KAAK,CAAC,CAAC,EAAE,EAGxB,GAAIM,EAAiBC,EACnB,MAAM,IAAI,MAAM,gFAAgF,CAEpG,EAEMnB,GACF,CAACQ,EAA+BC,IAAuD,CACrF,GAAM,CAAC,YAAAa,EAAa,SAAAR,EAAU,mBAAAC,EAAoB,MAAAQ,CAAK,EAAId,EACrDQ,EAAYT,EAAO,CAAC,EAAE,KAAK,CAAC,EAC5BgB,EAAcR,EAAU,kBAAkBR,EAAO,CAAC,EAAE,KAAM,CAAC,EAC3DU,EAAiBV,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACzDY,EAAaI,EAAcN,EAC3BO,EAAyBjB,EAAO,CAAC,EAAE,KAAK,CAAC,EACzCa,EAAWN,IAAuB,EAAIU,EAAyB,EAAIL,EAAaN,EAKhFY,EACF,IAAI,MAAcT,EAAWC,EAAgBE,EAAaC,EAAUA,EAAWI,CAAsB,EACnGE,EAAgBX,EAAU,eAAeU,CAAW,EAEpDE,EAAoC,CACxC,CAAC,OAAsB,KAAML,CAAK,EAClC,CAAC,QAAuB,KAAMG,CAAW,EACzC,CAAC,QAAuB,KAAMC,CAAa,EAI3C,GAAInB,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MAAsB,CAAC,QAAuB,KAAM,CAACgB,EAAaJ,EAAYC,EAAU,CAAC,CAAC,CAAC,EAC/F,CAAC,EACT,GAAIb,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MACA,CAAC,QAAuB,KAAM,CAACgB,EAAaH,EAAUH,EAAiBG,EAAU,CAAC,CAAC,CAAC,EACxF,CAAC,EAET,GAAGQ,EAA2BrB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9G,EAEMsB,EAAmBC,GAA+B,CACtD,IAAMrB,EAAQsB,EAAc,QAASxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEG,EAAcqB,EAAc,eAAgBxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACrFI,EAAWoB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EK,EAAWmB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EyB,EAASC,EAAe,SAAU1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAEjF,OAAAuB,EAAa,iBAAiB,CAC5B,CAAC,KAAM,QAAS,KAAM,KAAK,EAC3B,CAAC,KAAM,eAAgB,KAAM,MAAO,OAAQL,EAAY,MAAM,EAC9D,CAAC,KAAM,iBAAkB,KAAM,MAAO,OAAQC,EAAc,MAAM,EAClE,CAAC,KAAM,uBAAwB,KAAM,MAAO,OAAQA,EAAc,MAAM,CAC1E,CAAC,EAEM;AAAA,UACLI,EAAa,iBAAiBrB,EAAOC,EAAaC,EAAUC,EAAUoB,CAAM,CAAC;AAAA;AAAA,UAE7EF,EAAa,UAAUI,EAAc,CAAC;AAAA,+CACDvB,EAAS,IAAI;AAAA;AAAA;AAAA,YAGhDmB,EAAa,sCAAsC,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA,kBAIpDpB,EAAY,2BAA2B,UAAWuB,EAAe,GAAIvB,EAAY,KAAK,OAAQ,CAAC,CAAC,CAAC;AAAA;AAAA,sBAE7FA,EAAY,YAAY,kBAAkB,CAAC;AAAA,oFACmBW,CAAW;AAAA,yDACtCA,CAAW;AAAA,uBAC7CZ,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEF,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEoB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA,uBACpBvB,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEH,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEqB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA;AAAA;AAAA,cAG7BA,EAAO,YAAY,IAAKvB,EAAM,YAAY,GAAG,CAAC,CAAC;AAAA;AAAA,UAGvD,EAEA,MAAO,CACL,KAAM,kBACN,YAAa,CACX,KAAM0B,GAA4B,CAC1B,YAAAd,CACF,CAAC,EAAE,SACT,kBAAmB,CAAC,OAAQ,OAAQ,OAAQ,MAAM,CACpD,EACA,gBAAAQ,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAU,KAAKU,CAAW,EAAIS,EAAc,CAAC,EAC1E,gBAAAP,CACF,EACF,CACF,EAES3B,GAAkB,CAACoC,EAAyB5B,IAAgD,CACvGV,GAAesC,EAAQ,OAAQ5B,CAAU,EACzC4B,EAAQ,QAAQrC,GAAiCqC,EAAQ,OAAQ5B,CAAU,CAAC,CAC9E,ICzKA,IAeM6B,GAwDAC,GA4IOC,GAnNbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAOMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMC,EAAoBD,EAAO,CAAC,EAC5BE,EAAmBF,EAAO,CAAC,EAC3BG,EAAoBH,EAAO,CAAC,EAElC,GAAIC,EAAM,WAAaC,EAAK,UAAYD,EAAM,WAAaE,EAAM,SAC/D,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIC,EAAK,KAAK,SAAW,GAAKA,EAAK,KAAK,SAAW,EACjD,MAAM,IAAI,MAAM,uBAAuB,EAGzC,IAAME,EAAaH,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EAC7CI,EAAiBJ,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACvD,GAAIC,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAME,EACtC,MAAM,IAAI,MAAM,8CAA8C,EAEhE,GAAIF,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMG,EACtC,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIF,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,IAAMC,EACxC,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBN,EAAO,CAAC,EACjC,GAAIM,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMF,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACA,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMO,EAAmBP,EAAO,CAAC,EACjC,GAAIO,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMH,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACF,EAEMX,GACF,CAACO,EAA+BQ,EAAqCC,EAAqBC,IACvE,CACb,IAAMC,EAAaH,EAAW,WAExBI,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAcH,EACdI,EAAaH,EACbT,EAAaQ,EAAW,MAAM,EAAE,EAAE,CAAC,EACnCK,EAAmBP,EAAaE,EAAW,MAAM,EAAG,EAAE,EAAE,OAAO,CAAC,EAAI,CAAC,EACrEM,EAAe,CAACP,GAAcX,EAAO,OAAS,EAC9CmB,EAAenB,EAAO,OAAS,EAC/BoB,EAAgBV,GAAcD,EAAc,EAC5CY,EAAqBX,GAAcD,EAAc,EACjDa,EAA4Bb,EAAc,EAC1Cc,EAAgB,GAEhBC,EAAaC,GAAiBrB,CAAU,EAExCsB,EAAoC,CACxC,CAAC,QAAuB,KAAMV,CAAU,EACxC,CAAC,QAAuB,KAAMQ,CAAU,EACxC,CAAC,QAAuB,KAAMpB,CAAU,EACxC,CAAC,OAAsB,KAAMI,EAAW,OAAO,CACjD,EACMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAAmC,CACvC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,aAAc,KAAM,KAAK,EAChC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,KAAK,CAC/B,EACMC,EAAY,CAChBC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACjEO,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACpEO,EAAc,QAAS/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CACvE,EACIN,GACFY,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAElFL,GACFW,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAEtFM,EAAU,KAAKE,EAAe,SAAUhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAChFJ,GACFU,EAAU,KAAKE,EAAe,gBAA+Bf,CAAgB,CAAC,EAE5EI,GACFS,EAAU,KAAKE,EAAe,mBAAkCf,CAAgB,CAAC,EAE/EK,GACFQ,EAAU,KAAKE,EAAe,sBAAuBhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAEnG,IAAMS,EAAWC,GAA4BlC,EAAO,CAAC,EAAE,QAAQ,EACzDmC,EAAcD,KAA4CV,CAAU,EAC1E,MAAO;AAAA;AAAA,QAEXI,EAAa,iBAAiBC,CAAa,EAAE,iBAAiB,GAAGC,CAAS,CAAC;AAAA,0CACzCK,CAAW,KAAKZ,CAAa;AAAA,kDACrBY,CAAW,KAAKZ,CAAa;AAAA;AAAA,QAEvEK,EAAa,UAAU,CACjBL,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA;AAAA,iCAEmBA,CAAa;AAAA;AAAA;AAAA,gDAGEA,CAAa;AAAA;AAAA;AAAA,oBAGzCA,EAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKRJ,EAAe,qBAAuBc,EAAW,OAAO;AAAA;AAAA;AAAA,YAGzEX,EAA4B,2CAA6C,EAAE;AAAA;AAAA,4BAE3Dc,GAAUH,EAAUT,EAAY,OAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMlCD,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAY1Bc,GAAU,MAAOb,CAAU,CAAC;AAAA,wCACTa,GAAU,aAAcb,CAAU,CAAC,gCAC3Db,EAAa,GAAK,eAAe;AAAA,UACvCS,EAAgB,kCAAoC,EAAE;AAAA,UACtDC,EAAqB,4CAA8C,EAAE;AAAA;AAAA;AAAA,qDAG1BV,EAAa,GAAK,KAAKsB,CAAQ,QAAQ;AAAA,cAC9EA,CAAQ;AAAA,cACRf,EAAe,uBAAyB,EAAE;AAAA;AAAA,QAG9C,EACMoB,EAAU,CAAC,CAAC,KAAMvB,EAAa,SAAUf,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIS,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAM1B,EAAY,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAExD,CACL,KAAM,yBACN,YAAa,CACX,KAAM,GAAGwB,CAAU,IAAIJ,CAAa,IAAIC,CAAkB,IAAIC,CAAyB,GACvF,kBAAmBtB,EAAO,IAAI,CAACuC,EAAQC,IAAW,MAAM,CAC1D,EACA,gBAAAb,EACA,WAAY,KAAO,CACjB,QAAAW,EACA,cAAe,CACb,EAAG,KAAK,KAAKtB,EAAaZ,CAAU,CACtC,EACA,gBAAAsB,CACF,EACF,CACF,EAEKhC,GAAgB,CAAC+C,EAAyBjC,IAA8C,CAGnGhB,GAAeiD,EAAQ,MAAM,EAG7B,IAAMH,EAAU,CAAC,CAAC,EACdG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAK,CAAC,EAEhBG,EAAQ,QACJhD,GAA+BgD,EAAQ,OAAQjC,EAAYiC,EAAQ,YAAa,EAAU,EAAG,CAAC,QAAAH,CAAO,CAAC,CAC5G,ICrOA,IAiBMI,GAkBAC,GAcAC,GAeAC,GAcAC,GAsBAC,GAmFOC,GAYAC,GAnMbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMb,GAAiB,CAACc,EAA+BC,IAAsC,CAC3F,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIC,EAAW,KAAK,SAAW,GAC7B,GAAIA,EAAW,KAAK,SAAWA,EAAW,OAAO,QAAUA,EAAW,KAAK,SAAWA,EAAW,KAAK,OACpG,MAAM,IAAI,MAAM,iDAAiD,UAE1DA,EAAW,OAAO,SAAWA,EAAW,KAAK,OACtD,MAAM,IAAI,MAAM,2CAA2C,EAE7DD,EAAO,MAAM,CAAC,EAAE,QAAQ,CAACE,EAAGC,IAAQ,CAClC,GAAIH,EAAOG,EAAM,CAAC,EAAE,WAAa,GAAkBH,EAAOG,EAAM,CAAC,EAAE,WAAa,EAC9E,MAAM,IAAI,MAAM,SAASA,CAAG,qCAAqC,CAErE,CAAC,CACH,EAEMhB,GAAY,CAACa,EAA+BG,IAA0B,CAC1E,IAAMC,EAAkB,CAAC,EACzB,GAAIJ,EAAO,OAASG,EAClB,GAAIH,EAAOG,CAAG,EAAE,WAAa,EAC3BH,EAAOG,CAAG,EAAE,iBAAiB,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,UACxDL,EAAOG,CAAG,EAAE,WAAa,EAClCH,EAAOG,CAAG,EAAE,cAAc,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,MAE9D,OAAM,IAAI,MAAM,SAASF,CAAG,qCAAqC,EAGrE,OAAOC,CACT,EAEMhB,GACF,CAACY,EAA+BC,IAAiD,CAC/E,GAAID,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBnB,GAAUa,EAAQ,CAAC,EACtCO,EAAiBpB,GAAUa,EAAQ,CAAC,EACtCQ,EAAiBrB,GAAUa,EAAQ,CAAC,EACxC,OAAIQ,EAAK,SAAW,IAClBA,EAAO,CAAC,GAAG,MAAMR,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,CAAC,GAEzCS,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,KACE,QAAOP,CAEX,EAEEZ,GACF,CAACqB,EAAeC,EAAeC,EAA+BJ,EAAyBK,IACzE,CACR,IAAIC,EAAWJ,EAIf,OAHIA,EAAQ,IACVI,GAAYF,EAAWJ,EAAKG,CAAK,CAAC,GAEhCE,EAAMF,CAAK,EAAI,EACV,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,EAAI,CAAC,CAAC,EAE3D,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,CAAC,CAAC,CAElE,EAEFrB,GACF,CAACc,EAAsBW,EAAuBH,IAC1C,4CAA4CG,EAAO,KAAK,OAAO,QAAQX,EAAM,KAAK,OAAO;AAAA,+BAClEA,EAAM,KAAK,OAAO;AAAA;AAAA,yBAExBQ,EAAW,MAAM;AAAA,kCACRI,EAAa,uBAAwB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BAClEI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BACtDI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,6BACrDI,EAAa,kBAAmB,IAAKJ,EAAW,MAAM,CAAC;AAAA,iCACnDG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAO3DX,EAAM,WAAW,gBAAiB,IAAK,aAAa,CAAC;AAAA;AAAA;AAAA,SAK7Db,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBiB,EAAYC,EAAU,KAAKN,CAAU,EACrCJ,EAAQP,EAAW,KAAK,OAAS,EAAKiB,EAAU,cAAcjB,EAAW,KAAMW,EAAW,MAAM,EAC1D,CAAC,GAAG,MAAMA,EAAW,MAAM,EAAE,KAAK,CAAC,EAC3EC,EAAQ1B,GAAUa,EAAQ,CAAC,EAC/Ba,EAAM,QAASM,GAASA,IAAS,IAAM,IAAM,CACnB,MAAM,IAAI,MAAM,kBAAkB,CACpC,EAAE,EACtBN,EAAM,SAAW,IACnBA,EAAQ,MAAML,EAAK,MAAM,EAAE,KAAK,CAAC,GAEnC,IAAMF,EAASL,EAAW,OAAO,IAAI,CAACmB,EAAOC,IAAMhC,GAAkB+B,EAAOC,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAEjGN,EAAON,EAAW,KAAK,IAAI,CAACqB,EAAKD,IAAMhC,GAAkBiC,EAAKD,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAE/F,GAAIL,EAAK,SAAWF,EAAO,QAAUE,EAAK,SAAWD,EAAK,OACxD,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIC,EAAK,SAAWI,EAAW,OAC7B,QAASS,EAAI,EAAGA,EAAIT,EAAW,OAAQ,EAAES,EAClCb,EAAK,SAASa,CAAC,IAClBf,EAAO,OAAOe,EAAG,EAAG,CAAC,EACrBd,EAAK,OAAOc,EAAG,EAAGT,EAAWS,CAAC,CAAC,EAC/BR,EAAM,OAAOQ,EAAG,EAAG,CAAC,GAI1B,IAAME,EAAQV,EAAM,IAAIM,GAAQ,KAAK,KAAKA,CAAI,CAAC,EAE/CN,EAAM,QAAQ,CAACM,EAAME,EAAGG,IAAU,CAChC,GAAIL,EAAO,EAAG,CACZ,IAAMM,GAAYlB,EAAKc,CAAC,EAAIf,EAAOe,CAAC,GAAKF,EACnCO,EAASpB,EAAOe,CAAC,EACjBM,EAAWD,EAASD,EAAWZ,EAAMQ,CAAC,EAC5Cf,EAAOe,CAAC,EAAIM,EACZpB,EAAKc,CAAC,EAAIK,EACVF,EAAMH,CAAC,EAAI,CAACF,CACd,CACF,CAAC,EAED,IAAMS,EAAchB,EAAW,MAAM,CAAC,EACtCJ,EAAK,QAAQ,CAACqB,EAAM3B,IAAM,CACxB0B,EAAYC,CAAI,EAAI,KAAK,MAAMtB,EAAKsB,CAAI,EAAIvB,EAAOuB,CAAI,GAAKhB,EAAMgB,CAAI,CAAC,CACzE,CAAC,EACD,IAAMC,EAA+B,CAAC,KAAMF,EAAa,SAAU5B,EAAO,CAAC,EAAE,QAAQ,EAE/Ee,EAASgB,EAAe,SAAU/B,EAAO,CAAC,EAAE,SAAU4B,EAAY,MAAM,EACxExB,EAAQ4B,EAAc,QAAShC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEiC,EAAaf,EAAU,KAAKU,CAAW,EACvCM,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ5B,EAAO,MAAM,EACtF,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQiB,EAAM,MAAM,EAAG,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQV,EAAM,MAAM,CACvG,EAEMsB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAM3B,CAAM,EAC/E,CAAC,OAAsB,KAAMiB,CAAK,EAAG,CAAC,QAAuB,KAAMV,CAAK,EACxE,GAAGuB,EAA2BpC,EAAO,CAAC,EAAE,KAAM4B,CAAW,CAC3D,EAEMS,EAAmBC,GAA+B;AAAA,QAClDA,EAAa,iBAAiBJ,CAAQ,EAAE,iBAAiB9B,EAAOW,CAAM,CAAC;AAAA,UACrEzB,GAA0Bc,EAAOW,EAAQH,CAAU,CAAC;AAAA,UACpD0B,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,iCACpDvB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDA,EAAO,YAAY,aAAcX,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,SAE/E,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGmB,EAAM,MAAM,IAAIjB,EAAO,MAAM,IAAIO,EAAM,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACnG,gBAAAwB,EACA,WAAY,KAAO,CACjB,QAAS,CAACP,CAAgB,EAC1B,cAAe,CAAC,EAAG,KAAK,KAAKb,EAAY,EAAuB,CAAC,EACjE,gBAAAkB,CACF,EACF,CACF,EAEa3C,GAAQ,CAAC+C,EAAyBtC,IAAsC,CACnFf,GAAeqD,EAAQ,OAAQtC,CAAU,EACzC,IAAMuC,EAAoBpD,GAAgCmD,EAAQ,OAAQtC,CAAU,EACpFsC,EAAQ,QAAQhD,GAAuBgD,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAO1F,EAEa/C,GAAwBQ,GAAyD,CAC5F,IAAMK,EAASL,EAAW,OACpBM,EAAON,EAAW,KAClBO,EAAOP,EAAW,KACxB,OAAOQ,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,ICxMA,IAeMiC,GAUAC,GAwHOC,GAKAC,GAtJbC,GAAAC,EAAA,kBAOAC,KAEAC,KACAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,CAElD,EAMMT,GAA2B,CAACU,EAAmBC,IAA+C,CAClG,IAAMC,EAAQF,EAAM,KACdG,EAAaC,EAAU,KAAKF,CAAK,EACjCG,EAAK,GACPC,EAAOL,EAAW,KAItB,GAHIK,EAAO,IACTA,EAAOJ,EAAM,OAASI,GAEpBA,EAAOJ,EAAM,OAAS,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMK,EAAOL,EAAMI,CAAI,EACjBE,EAAOL,EAAaI,EACpBE,EAAaC,GAAiBH,CAAI,EAClCI,EAAaJ,EAAOE,EAEpBG,EAAY,CAACC,EAAcJ,IAC3BA,IAAe,EACV,WAAWI,CAAI,OAAOA,CAAI,YAAYA,CAAI,OAAOA,CAAI,OACnDJ,IAAe,EACjB,OAAOI,CAAI,OAAOA,CAAI,MACpBJ,IAAe,EACjB,WAAWI,CAAI,OAAOA,CAAI,QAAQA,CAAI,MAGxCA,EAEHC,EAAIC,EAAc,IAAKf,EAAM,SAAUA,EAAM,KAAMS,CAAU,EAC7DO,EAASC,EAAe,SAAUjB,EAAM,SAAUA,EAAM,KAAMS,CAAU,EACxES,EAAYJ,EAAE,KAAK,MAEnBK,EAAgBC,GAA4BpB,EAAM,QAAQ,IAAM,MAClE,mBAAmBkB,CAAS,oBAC5B,mBAAmBA,CAAS,eAC1BG,EAAmBC,GAA+B;AAAA,sCACpBJ,CAAS;AAAA,sCACTA,CAAS;AAAA,4CACHA,CAAS,KAAKb,CAAE;AAAA;AAAA,4DAEAa,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA,gEAKLA,CAAS;AAAA;AAAA;AAAA;AAAA,QAIjEI,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBR,EAAGE,CAAM,CAAC;AAAA,QAC7EM,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA,qBAGXjB,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMbc,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAmBID,CAAS,IAAIN,EAAU,kBAAmBH,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKtDS,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAeRA,CAAS,IAAIK,GAAU,kBAAmBd,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAU9E,MAAO,CACL,KAAM,UACN,YAAa,CAAC,KAAM,GAAGA,CAAU,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAChE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAO,SAAUF,EAAM,QAAQ,CAAC,EACjD,cAAe,CAAC,EAAGQ,CAAI,EACvB,gBAAiB,CAAC,CAAC,OAAsB,KAAMG,CAAU,CAAC,CAC5D,GACA,gBAAAU,CACF,CACF,EAEa9B,GAAU,CAACiC,EAAyBvB,IAAwC,CACvFZ,GAAemC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAyBkC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CACzE,EAEaT,GAA0BS,GACnCwB,GAA4B,CAAC,KAAMxB,EAAW,IAAc,CAAC,ICvJjE,IAiBMyB,GAMAC,GAWAC,GASAC,GAqBAC,GAuDOC,GAOAC,GA9HbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,CAEpC,EAEMZ,GACF,CAACY,EAA+BC,IAAiD,CAC/E,IAAMC,EAAuB,CAAC,EAC1BC,EAAqBF,EAAW,WACpC,OAAID,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQI,GAAKF,EAAW,KAAK,OAAOE,CAAC,CAAC,CAAC,EACpED,EAAaD,EAAW,QAEnBG,GAA4B,CAAC,WAAAF,EAAY,KAAMF,EAAW,KAAM,WAAAC,CAAU,CAAC,CACpF,EAEEb,GAA4BiB,GAAoC;AAAA;AAAA,gCAEtCA,CAAe;AAAA,kBAC7BC,EAAa,8BAA+B,IAAKD,CAAe,CAAC;AAAA;AAAA;AAAA;AAAA,aAItEA,CAAe;AAAA,GAEtBhB,GAAuBkB,GAAsC,CACjE,IAAMF,EAAkBE,EAAQ,OAC1BC,EAAsB,CAAC,EAC7B,QAASC,EAAI,EAAGA,EAAIJ,EAAiB,EAAEI,EAAG,CACxC,IAAMC,EAAgBH,EAAQE,CAAC,EAAE,aAAa,UAAW,mBAAmB,EACxEJ,IAAoB,EACtBG,EAAU,KAAKE,CAAa,EACnBD,IAAM,EACfD,EAAU,KAAK,wBAAwBC,CAAC,QAAQC,CAAa,IAAI,EACxDD,IAAMJ,EAAkB,EACjCG,EAAU,KAAK,UAAUE,CAAa,IAAI,EAE1CF,EAAU,KAAK,6BAA6BC,CAAC,OAAOC,CAAa,IAAI,CAEzE,CACA,MAAO;AAAA,wDAC+CH,EAAQ,CAAC,EAAE,KAAK,OAAO;AAAA,UACrEC,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,QAE9B,EAEMlB,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAWf,EAAO,CAAC,EAAE,SACrBgB,EAAOF,EAAU,cAAcb,EAAW,KAAMW,EAAW,MAAM,EACjEJ,EAAU,IAAI,MAAqBP,EAAW,UAAU,EACxDgB,EAAQC,EAAc,QAASH,EAAUH,EAAW,MAAM,EAC1DO,EAAkB,IAAI,MAAclB,EAAW,UAAU,EACzDmB,EAAkC,CAAC,EACnCC,EAA2B,CAAC,EAC9BC,EAAc,EACZC,EAAoC,CAAC,CAAC,QAAuB,KAAMV,CAAS,CAAC,EACnF,QAASH,EAAI,EAAGA,EAAIT,EAAW,WAAYS,IAAK,CAC9CY,GAAerB,EAAW,WAAWS,CAAC,EACtCS,EAAgBT,CAAC,EAAIY,EACrB,IAAME,EAAcZ,EAAW,MAAM,EACrCY,EAAYvB,EAAW,IAAI,EAAIA,EAAW,WAAWS,CAAC,EACtDW,EAAa,KAAKG,CAAW,EAC7BhB,EAAQE,CAAC,EAAIe,EAAe,SAASf,CAAC,GAAIK,EAAUS,EAAY,MAAM,EACtEJ,EAAkB,KAAK,CAAC,KAAMC,EAAaX,CAAC,EAAG,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,CAC9E,CACAuB,EAAgB,KACZ,CAAC,QAAuB,KAAMJ,CAAe,EAAG,GAAGO,EAA2Bd,EAAY,GAAGS,CAAY,CAAC,EAC9G,IAAMM,EAAmBC,GAA+B;AAAA,IAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,qBAAsB,MAAOT,EAAgB,MAAM,EACnE,iBAAiBF,EAAO,GAAGT,CAAO,CAAC;AAAA,IAC1CnB,GAAyB8B,EAAgB,MAAM,CAAC;AAAA,IAChD7B,GAAoBkB,CAAO,CAAC;AAAA;AAAA,IAE5BoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DX,EAAM,gBAAgB,YAAY,CAAC;AAAA,kBACrCA,EAAM,WAAW,UAAWD,CAAI,CAAC;AAAA;AAAA;AAAA,iBAGlCT,EAAa,8BAA+B,qBAAsBY,EAAgB,MAAM,CAAC;AAAA,QAClGF,EAAM,WAAW,UAAWD,EAAM,OAAO,CAAC;AAAA;AAAA;AAAA,KAIhD,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAMf,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,gBAAA0B,EACA,WAAY,KAAO,CACjB,QAASP,EACT,cAAe,CAAC,EAAG,KAAK,KAAKP,EAAY,EAAuB,CAAC,EACjE,gBAAAU,CACF,EACF,CACF,EAEa/B,GAAQ,CAACqC,EAAyB5B,IAAsC,CACnFd,GAAe0C,EAAQ,MAAM,EAC7B,IAAMC,EACFD,EAAQ,OAAO,SAAW,EAAI5B,EAAab,GAAgCyC,EAAQ,OAAQ5B,CAAU,EACzG4B,EAAQ,QAAQtC,GAAuBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC1F,EAEarC,GAAwBQ,GAAyD,CAC5F,IAAMe,EAAOf,EAAW,KAClBC,EAAuBD,EAAW,WAClCE,EAAaF,EAAW,WAAuB,EAAIC,EAAW,OAASD,EAAW,WACxF,GAAIE,IAAeD,EAAW,OAC5B,MAAM,IAAI,MAAM,+CAA+C,EAEjE,OAAOG,GAA4B,CAAC,KAAAW,EAAM,WAAAb,EAAY,WAAAD,CAAU,CAAC,CACnE,ICtIA,IAUM6B,GA4DAC,GAoCOC,GA1GbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GACF,CAACQ,EAA4BC,EAA+BC,EAA+BC,EAC1FC,IAAuB,CACtB,IAAMC,EAASC,EAAe,cAAeF,EAAYF,EAAW,OAAQ,CAAC,EACvEK,EAAIC,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEQ,EAAID,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxES,EAAIF,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EAE1EU,EACEC,EAAa,CAACL,EAAWE,EAAWC,IAAc,UAAUD,CAAC,KAAKF,CAAC,KAAKG,CAAC,IAC/E,GAAI,CAACP,EACHQ,EAAaN,EAAO,YAChB,aACAO,EAAWL,EAAE,YAAY,YAAY,EAAGE,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAChG,CACL,IAAMG,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,iBAAiBF,CAAC,gBAAgBA,CAAC,IACjDG,EAAc,iBAAiBH,CAAC,gBAAgBA,CAAC,IAEjDI,EAAc,sBAAsBJ,CAAC,6BAA6BA,CAAC,UACzE,MAAO;AAAA,gCACeA,CAAC,MAAMV,EAAO,gBAAgB,qBAAqBU,CAAC,GAAG,CAAC;AAAA,0BAC9DA,CAAC,MAAMR,EAAE,2BAA2B,iBAAiBQ,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMN,EAAE,2BAA2B,iBAAiBM,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAML,EAAE,2BAA2B,iBAAiBK,CAAC,GAAIV,CAAM,CAAC;AAAA,yBAClEU,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,6BACZA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,cAC/BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIJ,EAAWK,EAAaC,EAAaC,CAAW,CAAC;AAAA,WAErF,EACIf,IAAe,EACjBO,EAAa;AAAA;AAAA,cAETE,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,wGAGtCF,EAAa;AAAA,cACTE,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,WAGtD,CAEA,MAAO;AAAA,UACHb,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBU,EAAGH,EAAGE,EAAGJ,CAAM,CAAC;AAAA,UACjFL,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEW,CAAU;AAAA,QAEhB,EAEElB,GAA4BQ,GAA+C,CAC/E,IAAMmB,EAAQnB,EAAO,CAAC,EAAE,KAClBoB,EAAQpB,EAAO,CAAC,EAAE,KAClBqB,EAAQrB,EAAO,CAAC,EAAE,KAClBsB,EAAiBtB,EAAO,CAAC,EAAE,SAE3BE,EAAc,EAAEqB,EAAU,SAASJ,EAAOC,CAAK,GAAKG,EAAU,SAASH,EAAOC,CAAK,GACrFG,EAAcL,EACdM,EAAaF,EAAU,KAAKJ,CAAK,EAGrC,GAAIjB,EAAa,CACf,IAAMwB,EAAkBC,GAAc,UAAUA,GAAc,UAAUR,EAAOC,EAAO,EAAK,EAAIC,EAAO,EAAK,EAC3G,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,6CAA8C,EAEhEF,EAAcE,EACdD,EAAaF,EAAU,KAAKC,CAAW,CACzC,CAEA,IAAMI,EAAU,KAAK,KAAKH,EAAa,CAAC,EAExC,MAAO,CACL,KAAM,QACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,gBAAkB1B,GACdR,GAA2BQ,EAAcC,EAAQwB,EAAatB,EAAaoB,CAAc,EAC7F,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAME,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAgB,CAAC,EACrF,gBACI,CAAC,CAAC,QAAuB,KAAMG,CAAO,EAAG,GAAGC,EAA2BR,EAAOF,EAAOC,EAAOI,CAAW,CAAC,CAC9G,EACF,CACF,EAEa/B,GAASqC,GAAkC,CACtDA,EAAQ,QAAQtC,GAAyBsC,EAAQ,MAAM,CAAC,CAC1D,IC5GA,IA8CaC,GA9CbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAOavC,GAA+D,IAAI,IAAI,CAClF,CAAC,MAAO,CAAUwC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAwB,CAAC,EAC7C,CAAC,SAAU,CAACC,GAAQD,EAAwB,CAAC,EAC7C,CAAC,OAAQ,CAAUE,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACC,EAAS,CAAC,EAEzB,CAAC,cAAe,CAAMC,GAAkBC,EAA0B,CAAC,EACnE,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,UAAW,CAACC,EAAO,CAAC,EACrB,CAAC,gBAAiB,CAACC,EAAa,CAAC,EACjC,CAAC,OAAQ,CAAUC,GAAeC,EAAmB,CAAC,EACtD,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,gBAAiB,CAACC,GAAeC,EAA4B,CAAC,EAC/D,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,eAAgB,CAACC,GAAcC,EAA2B,CAAC,EAC5D,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,MAAO,CAAUC,GAAcC,EAAoB,CAAC,EACrD,CAAC,QAAS,CAAWC,EAAK,CAAC,EAC3B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACpB,GAAMC,EAAmB,CAAC,EACzC,CAAC,SAAU,CAACoB,GAAQC,EAAqB,CAAC,EAC1C,CAAC,iBAAkB,CAACC,GAAgBC,EAA6B,CAAC,EAClE,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,oBAAqB,CAAMC,GAAwBC,EAAgC,CAAC,EACrF,CAAC,gBAAiB,CAAMC,GAAoBC,EAA4B,CAAC,EACzE,CAAC,UAAW,CAAWC,EAAO,CAAC,EAC/B,CAAC,iBAAkB,CAAWC,EAAc,CAAC,EAC7C,CAAC,sBAAuB,CAACC,GAAqBC,EAAkC,CAAC,EACjF,CAAC,cAAe,CAAUC,GAAsBC,EAA0B,CAAC,EAC3E,CAAC,wBAAyB,CAACC,EAAY,CAAC,EACxC,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,YAAa,CAAUC,GAAoB1B,EAAoB,CAAC,EACjE,CAAC,OAAQ,CAAW2B,EAAI,CAAC,EACzB,CAAC,cAAe,CAAWC,EAAW,CAAC,EACvC,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,cAAe,CAACC,GAAaC,EAA0B,CAAC,EAEzD,CAAC,UAAW,CAAMC,GAAcC,EAAsB,CAAC,EACvD,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,qBAAsB,CAACC,GAAoBC,EAAiC,CAAC,EAC9E,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAACC,EAAG,CAAC,EACb,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,YAAa,CAAUC,GAAoB1C,EAAoB,CAAC,EACjE,CAAC,QAAS,CAAC2C,EAAK,CAAC,EACjB,CAAC,aAAc,CAAUC,EAAU,CAAC,EACpC,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,eAAgB,CAACC,EAAY,CAAC,EAC/B,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,UAAW,CAAUC,EAAO,CAAC,EAC9B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,yBAA0B,CAACC,EAAa,CAAC,EAC1C,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,UAAW,CAACC,GAASC,EAAsB,CAAC,EAC7C,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,kBAAmB,CAAUC,GAA0BzE,EAAoB,CAAC,EAC7E,CAAC,OAAQ,CAAC0E,EAAI,CAAC,EACf,CAAC,YAAa,CAACC,GAAWC,EAAwB,CAAC,EACnD,CAAC,QAAS,CAACC,EAAK,CAAC,CACnB,CAAC,IC5ID,IAoBaC,GApBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEAC,KAYaL,GAAN,KAAqB,CAI1B,YAAoBM,EAAwB,CAAxB,aAAAA,EAClB,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAmBC,EAAoBC,EAChEC,EAA0D,CAC5DC,GAAiBL,EAAc,YAAY,IAAI,EAC/C,IAAMM,EAAS,KAAK,QAAQ,OACtBC,EAAqB,KAAK,QAAQ,sBAAsB,EAC9D,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,CAAC,EAClE,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAASR,EAClBO,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQC,EAAM,MAAM,CAAC,CAAC,EAE1E,QAAWC,KAAUR,EACnBM,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQE,EAAO,MAAM,CAAC,CAAC,EAEvEN,GACFI,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAUJ,CAAoB,CAAC,EAExE,IAAMO,EAAYL,EAAO,gBACrB,CAAC,OAAQN,EAAc,gBAAgB,mBAAmB,CAAC,EAAG,QAAAQ,EAAS,MAAOR,EAAc,YAAY,IAAI,CAAC,EAEjH,GAAI,KAAK,QAAQ,gBAAkB,YAAa,CAC9C,IAAMY,EAAc,CAClB,SAAU,KAAK,QAAQ,gBACvB,gBAAiBZ,EAAc,gBAC/B,UAAAW,EACA,cAAAR,CACF,EAC2B,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC1E,KAAKS,CAAW,CACtC,CAEAL,EAAmB,YAAYP,EAAc,eAAe,EAC5DO,EAAmB,aAAa,EAAGI,CAAS,EAC5CJ,EAAmB,mBAAmB,GAAGJ,CAAa,EACtD,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,EAAI,CAAC,EACtE,KAAK,QAAQ,yBAET,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACnD,KAAK,QAAQ,YAAc,cAC7B,KAAK,QAAQ,eAAe,EAE1B,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACrD,KAAK,QAAQ,MAAM,EAErBU,GAAeb,EAAc,YAAY,IAAI,CAC/C,CACA,SAAgB,CAEhB,CACA,MAAMc,EAA0BC,EAAiE,CAC/FV,GAAiBS,EAAY,IAAI,EACjC,IAAMR,EAAS,KAAK,QAAQ,OACtBU,EAAuB,CAAC,EAC1BV,EAAO,SAAS,IAAI,YAAY,GAClCU,EAAW,KAAK,aAAa,EAE/B,IAAMC,EAAeC,GAAmBH,EAA6B,KAAK,QAAQ,OAAO,MAAM,EACzFI,EAAWL,EAAY,gBAAgBG,CAAY,EACnDG,EAAO,GAAGJ,EAAW,KAAK;AAAA,CAAI,CAAC;AAAA,EAAKC,EAAa,yBAAyB;AAAA,EAAKE,CAAQ,GACvFE,EAAef,EAAO,mBAAmB,CAAC,KAAAc,EAAM,MAAON,EAAY,IAAI,CAAC,EAC9EQ,GAAU,UAAW,IAAM,YAAYR,EAAY,IAAI,iBAAiBM,CAAI,EAAE,EAE9E,IAAMG,EAAkBjB,EAAO,sBAC3B,CAAC,QAAS,CAAC,OAAQe,EAAc,WAAY,MAAM,EAAG,OAAQ,OAAQ,MAAOP,EAAY,IAAI,CAAC,EAElG,OAAAD,GAAeC,EAAY,IAAI,EACxB,CAAC,YAAAA,EAAa,gBAAAS,EAAiB,qBAAsBN,EAAa,aAAa,CACxF,CAEA,2BAA2Bd,EACE,CAC3B,IAAMqB,EAAI,OAAOrB,GAAkB,SAAWA,EAAgBA,EAAc,EACtEsB,EAAI,OAAOtB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEuB,EAAI,OAAOvB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEwB,EAAoB,KAAK,QAAQ,OAAO,OAAO,iCACrD,GAAIH,GAAKG,GAAqBF,GAAKE,GAAqBD,GAAKC,EAC3D,MAAO,CAACH,EAAGC,EAAGC,CAAC,EAEjB,IAAME,EAAOJ,EAAIC,EAAIC,EACjBG,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EAC/C,GAAIC,EAAkBF,EAAmB,CAEvC,GADAE,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EACvCC,EAAkBF,EACpB,MAAM,IAAI,MAAM,6CAA6C,EAE/D,MAAO,CAACE,EAAiBA,EAAiBA,CAAe,CAC3D,KACE,OAAO,CAACA,EAAiBA,EAAiB,CAAC,CAE/C,CACF,IC3HA,IAmCMC,GA4CAC,GAiBAC,GAwBOC,GAxHbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KACAC,KACAC,KACAC,KAwBMZ,GACF,CAACa,EAAqCC,IAA2E,CAC/G,GAAIA,EAAkB,SAAWD,EAAa,OAC5C,MAAM,IAAI,MAAM,4BAA4BC,EAAkB,MAAM,wCAChED,EAAa,MAAM,GAAG,EAG5B,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIH,EAAa,OAAQ,EAAEG,EAAG,CAC5C,IAAMC,EAAOJ,EAAaG,CAAC,EAAE,SAC7B,OAAQF,EAAkBE,CAAC,EAAG,CAC5B,IAAK,OAAQ,CACXD,EAAW,KAAK,EAAE,EAClB,KACF,CACA,IAAK,OAAQ,CACXA,EAAW,KAAK,GAAGE,CAAI,EAAE,EACzB,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAOL,EAAaG,CAAC,EAAE,KAAK,OAClCD,EAAW,KAAK,GAAGE,CAAI,IAAIC,CAAI,EAAE,EACjC,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAON,EAAaG,CAAC,EAAE,KAAK,KAAK,GAAG,EAC1CD,EAAW,KAAK,GAAGE,CAAI,IAAIE,CAAI,EAAE,EACjC,KACF,CACA,QACE,MAAM,IAAI,MAAM,iCAAiCL,EAAkBE,CAAC,CAAC,EAAE,CAC3E,CACF,CAEA,OAAOD,EAAW,KAAK,GAAG,CAC5B,EASEd,GACF,CAACmB,EAA0BP,EAAqCQ,IAA0C,CAGxG,IAAIC,EAAMF,EAAY,KACtB,OAAIA,EAAY,aAAa,OAC3BE,GAAO,IAAMF,EAAY,YAAY,KAAO,KAE9CE,GAAO,IAAMD,EACT,IACOrB,GACIa,EACAO,EAAY,aAAa,mBACrB,IAAI,MAAwCP,EAAa,MAAM,EAAE,KAAK,MAAM,CAAC,CAAC,GAC1FS,CACT,EAEEpB,GAAN,KAA6C,CAI3C,YAAYqB,EAA6B,CACnCA,IACF,KAAK,aAAeA,EAAY,aAChC,KAAK,OAASA,EAAY,OAE9B,CAEA,eAAeC,EAAwC,CACrD,OAAO,KAAK,eAAiBA,CAC/B,CAEA,SAASC,EAA4B,CACnC,OAAO,KAAK,SAAWA,CACzB,CACF,EAMatB,GAAN,KAAoB,CAApB,cAkBL,sBAAgC,KAOhC,qBAA+B,KAgC/B,KAAQ,eAAyC,KACjD,KAAQ,mBAAiD,KACzD,uBAAoB,GACpB,2BAAwB,EAGxB,KAAQ,eAAsC,CAAC,EAE/C,KAAQ,eAAsD,IAAI,IAOlE,mBAA8B,UAI9B,yBAAkD,IAAI,IAKtD,KAAQ,uBAA2D,IAAI,IAKvE,gCAA4E,IAAI,IA7ChF,IAAI,yBAAoD,CACtD,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,yEAAyE,EAG3F,IAAIuB,EAAO,KAAK,iBAAiB,IAAI,KAAK,eAAe,EACzD,OAAKA,IACHA,EAAO,CAAC,EACR,KAAK,iBAAiB,IAAI,KAAK,gBAAiBA,CAAI,GAG/CA,CACT,CAmCA,MAAM,WAAWC,EAAUC,EAAoC,CAC7D,KAAK,IAAMD,EACX,IAAME,EAAqC,CAAC,EACtCC,EAAwC,CAC5C,eAAgB,CACd,+BAAgCF,EAAQ,OAAO,+BAC/C,iCAAkCA,EAAQ,OAAO,iCACjD,4BAA6BA,EAAQ,OAAO,4BAC5C,cAAeA,EAAQ,OAAO,cAC9B,kCAAmCA,EAAQ,OAAO,kCAClD,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,wBAC3C,EACA,iBAAAC,CACF,EAEID,EAAQ,SAAS,IAAI,qDAAqD,EAC5EC,EAAiB,KAAK,qDAAuE,EACpFD,EAAQ,SAAS,IAAI,iBAAiB,GAC/CC,EAAiB,KAAK,iBAAiB,EAErCD,EAAQ,SAAS,IAAI,YAAY,GACnCC,EAAiB,KAAK,YAAY,EAGpC,KAAK,OAAS,MAAMD,EAAQ,cAAcE,CAAgB,EAC1D,KAAK,YAAc,IAAI5B,GAAgB0B,EAAQ,MAAQ,MAAMA,EAAQ,mBAAmB,CAAC,EACzF,KAAK,eAAiBG,GAAqB,IAAI,EAC/C,KAAK,eAAiB,IAAIC,GAAe,IAAI,EAC7C,KAAK,QAAU,IAAI,IACnB,KAAK,qBAAuB,IAAI,IAChC,KAAK,iBAAmB,IAAI,IAG5BC,GAAgBN,EAAI,SAAW,CAAC,CAACA,EAAI,KAAK,EAI1C,KAAK,OAAO,kBAAoBO,GAAM,CAChCA,EAAG,iBAAiB,oBAEtB,QAAQ,MAAM,mDAAmDA,EAAG,MAAM,OAAO,EAAE,CAEvF,EAEA,OAAO,eACH,KAAK,IAAI,OAAQ,SAAU,CAAC,MAAO,KAAK,OAAQ,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAC3G,OAAO,eACH,KAAK,IAAI,OAAQ,UAAW,CAAC,MAAON,EAAS,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAGxG,KAAK,aAAa,CACpB,CAEA,SAAgB,CACV,OAAO,KAAK,SAAa,KAC3B,KAAK,SAAS,QAAQ,EAExB,KAAK,eAAe,QAAQ,CAC9B,CAEA,mBAAuC,CACrC,OAAK,KAAK,iBACR,KAAK,eAAiB,KAAK,OAAO,qBAAqB,GAElD,KAAK,cACd,CAEA,uBAA+C,CAC7C,GAAI,CAAC,KAAK,mBAAoB,CAC5B,IAAMO,EAAiB,KAAK,kBAAkB,EACxCC,EAAkD,CAAC,EAErD,KAAK,YAAc,cACrBA,EAAsB,gBAAkB,CACtC,SAAU,KAAK,SACf,0BAA2B,KAAK,sBAAwB,EACxD,oBAAqB,KAAK,sBAAwB,EAAI,CACxD,GAGF,KAAK,mBAAqBD,EAAe,iBAAiBC,CAAqB,CACjF,CACA,OAAO,KAAK,kBACd,CAEA,gBAAuB,CACjB,KAAK,qBACP,KAAK,mBAAmB,IAAI,EAC5B,KAAK,mBAAqB,KAE9B,CAEA,OAAc,CACZ,GAAI,CAAC,KAAK,eACR,OAGFC,GAAiB,EAEjB,KAAK,eAAe,EACpB,IAAIC,EACA,KAAK,YAAc,SACrB,KAAK,eAAe,gBAChB,KAAK,SAAW,EAAG,KAAK,sBAAwB,EAAG,KAAK,mBAAqB,CAAC,EAElFA,EAAkB,KAAK,OAAO,aAE1B,CAAC,KAAM,KAAK,sBAAwB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAExG,KAAK,eAAe,IAAIA,EAAiB,KAAK,cAAc,EAC5D,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAe,mBAChB,KAAK,mBAAqB,EAAGA,EAAiB,EAAG,KAAK,sBAAwB,EAAI,CAAC,GAGzF,KAAK,OAAO,MAAM,OAAO,CAAC,KAAK,eAAe,OAAO,CAAC,CAAC,EACvD,KAAK,eAAe,sBAAsB,EAC1C,KAAK,eAAiB,KACtB,KAAK,sBAAwB,EAEzB,KAAK,YAAc,QAChBA,EAAiB,SAAS,WAAW,IAAI,EAAE,KAAK,IAAM,CACzD,IAAMC,EAAa,IAAI,eAAeD,EAAgB,eAAe,CAAC,EAChEE,EAAiB,KAAK,eAAe,IAAIF,CAAe,EAC9D,QAAStB,EAAI,EAAGA,EAAIuB,EAAW,OAAS,EAAGvB,IAAK,CAC9C,IAAMyB,EAAoBD,EAAexB,CAAC,EACpC0B,EAAWD,EAAkB,SAC7BE,EAAa,KAAK,QAAQ,IAAID,CAAQ,EACtCE,EAAaD,EAAW,WACxBE,EAAaF,EAAW,WACxBG,EAAcL,EAAkB,YAChCM,EAAmBN,EAAkB,iBACrCO,EAAoBP,EAAkB,kBACtCQ,EAAeV,EAAWvB,EAAI,CAAC,EAC/BkC,EAAaX,EAAWvB,EAAI,EAAI,CAAC,EAEnC,OAAO,KAAK,cAAkB,MAChC,KAAK,cAAgBiC,GAGvB,IAAME,EAAY,OAAOF,EAAe,KAAK,aAAa,EACpDG,EAAU,OAAOF,EAAa,KAAK,aAAa,EAEtD,GAAI,CAAC,OAAO,cAAcC,CAAS,GAAK,CAAC,OAAO,cAAcC,CAAO,EACnE,MAAM,IAAI,WAAW,2BAA2B,EAGlD,GAAI,KAAK,IAAI,OAAO,WAAW,OAC7B,KAAK,IAAI,OAAO,UAAU,OAAO,CAC/B,QAAS,EACT,eAAgBL,EAAiB,IAC7BM,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,gBAAiBL,EAAkB,IAC/BK,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,SAAAX,EACA,WAAAE,EACA,WAAAC,EACA,YAAAC,EACA,UAAAK,EACA,QAAAC,CACF,CAAC,MACI,CAEL,IAAIG,EAAc,GAClBR,EAAiB,QAAQ,CAACM,EAAOrC,IAAM,CACrCuC,GAAe,SAASvC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC7F,CAAC,EACD,IAAIG,EAAe,GACnBR,EAAkB,QAAQ,CAACK,EAAOrC,IAAM,CACtCwC,GAAgB,UAAUxC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC/F,CAAC,EAED,QAAQ,IAAI,uBAAuBX,CAAQ,IAAIE,CAAU,IAAIC,CAAU,IAAIC,CAAW,KAAKS,CAAW,GAClGC,CAAY,mBAAmBJ,EAAUD,CAAS,KAAK,CAC7D,CACAM,GAAM,MAAO,GAAGX,CAAW,KAAKG,CAAY,KAAKC,CAAU,EAAE,CAC/D,CACAZ,EAAgB,MAAM,EACtB,KAAK,eAAe,OAAOA,CAAe,CAC5C,CAAC,EAEHoB,GAAe,CACjB,CAaA,IAAIC,EAAsBZ,EAAyCa,EAC/DC,EACAC,EACAC,EAAmC,CACrC1B,GAAiBsB,EAAQ,IAAI,EAE7B,IAAMK,EAAwB,CAAC,EAC/B,QAAShD,EAAI,EAAGA,EAAI+B,EAAiB,OAAQ,EAAE/B,EAAG,CAChD,IAAMU,EAAOqB,EAAiB/B,CAAC,EAAE,KAEjC,GAAIU,IAAS,EACX,SAEF,IAAMuC,EAAU,KAAK,eAAe,IAAIvC,CAAI,EAC5C,GAAI,CAACuC,EACH,MAAM,IAAI,MAAM,0BAA0BvC,CAAI,EAAE,EAElDsC,EAAW,KAAKC,CAAO,CACzB,CAEA,GAAM,CAAC,QAAAC,EAAS,cAAAC,EAAe,gBAAAC,CAAe,EAAIT,EAAQ,WAAWZ,CAAgB,EAG/EsB,EAAyBT,EAAc,SAAW,EAAIM,EAAQ,IAAI,CAACI,EAAGtD,IAAMA,CAAC,EAAI4C,EACvF,GAAIS,EAAuB,SAAWH,EAAQ,OAC5C,MAAM,IAAI,MAAM,eAAeG,EAAuB,MAAM,qBAAqBH,EAAQ,MAAM,GAAG,EAIpG,IAAMlB,EAAkC,CAAC,EACnCuB,EAAyB,CAAC,EAChC,QAASvD,EAAI,EAAGA,EAAIkD,EAAQ,OAAQ,EAAElD,EAAG,CAIvC,GAAI,CAAC,OAAO,UAAUqD,EAAuBrD,CAAC,CAAC,GAAKqD,EAAuBrD,CAAC,EAAI,IAC5EqD,EAAuBrD,CAAC,GAAK+C,EAC/B,MAAM,IAAI,MAAM,yBAAyBM,EAAuBrD,CAAC,CAAC,EAAE,EAEtE,GAAIqD,EAAuBrD,CAAC,IAAM,GAChC,SAEF,IAAMwD,EAAcH,EAAuBrD,CAAC,IAAM,GAC5CyD,EAAeJ,EAAuBrD,CAAC,IAAM,GAC7C0D,EAAcF,GAAeC,EAC/BX,EAAyBI,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAC7D6C,EAAmBQ,EAAuBrD,CAAC,EAAGkD,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAGtF,GAFAgC,EAAkB,KAAK0B,CAAU,EAE7BA,EAAW,OAAS,EACtB,SAEF,IAAMT,EAAU,KAAK,eAAe,IAAIS,EAAW,IAAI,EACvD,GAAI,CAACT,EACH,MAAM,IAAI,MAAM,2BAA2BS,EAAW,IAAI,EAAE,EAK9D,GAHIF,GACF,KAAK,cAAc,KAAKP,CAAO,EAE7BQ,EAAc,CAChB,IAAIE,EAAiB,KAAK,qBAAqB,IAAI,KAAK,eAAgB,EACnEA,IACHA,EAAiB,CAAC,EAClB,KAAK,qBAAqB,IAAI,KAAK,gBAAkBA,CAAc,GAErEA,EAAe,KAAKV,CAAO,CAC7B,CACAM,EAAY,KAAKN,CAAO,CAC1B,CAIA,GAAID,EAAW,SAAWjB,EAAiB,QAAUwB,EAAY,SAAWvB,EAAkB,OAAQ,CAEpG,GAAIuB,EAAY,SAAW,EACzB,OAAAb,GAAeC,EAAQ,IAAI,EACpBX,EAMT,MAAM,IAAI,MACN,WAAWW,EAAQ,IAAI,4EAA4E,CACzG,CAKA,IAAIiB,EACJ,GAAIR,EAAiB,CACnB,IAAIS,EAAgB,EACdC,EAAoB,CAAC,EAE3BV,EAAgB,QAAQW,GAAK,CAC3B,IAAMrD,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIrD,EAAK,SAAW,EAClB,OAGF,IAAMsD,EAAgBD,EAAE,OAAS,GAAmB,EAAI,EACpDE,EACAC,GACAH,EAAE,OAAS,IACbG,GAAgBxD,EAAK,OAAS,EAAI,GAAMA,EAAK,OAAS,EAAI,EAAIA,EAAK,OAASsD,EAC5EC,EAAiBvD,EAAK,OAAS,EAAI,GAAKsD,EAAgBtD,EAAK,SAE7DwD,GAAgBxD,EAAK,QAAU,EAAIA,EAAK,OAASsD,EAAgB,GACjEC,EAAiB,IAEnBJ,EAAgB,KAAK,KAAKA,EAAgBK,EAAa,EAAIA,GAC3DJ,EAAQ,KAAKD,CAAa,EAM1B,IAAMM,GAAqBJ,EAAE,OAAS,GAAmB,EAAI,EAC7DF,GAAiBnD,EAAK,OAAS,EAAI,KAAK,KAAKA,EAAK,OAASyD,EAAkB,EAAIF,EAC9CvD,EAAK,OAASsD,CACnD,CAAC,EAID,IAAMI,EAAsB,GAC5BP,EAAgB,KAAK,KAAKA,EAAgBO,CAAmB,EAAIA,EACjE,IAAMC,EAAc,IAAI,YAAYR,CAAa,EACjDT,EAAgB,QAAQ,CAACW,EAAG/D,IAAM,CAChC,IAAMsE,EAASR,EAAQ9D,CAAC,EAClBU,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIA,EAAE,OAAS,EACb,IAAI,WAAWM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UAChDqD,EAAE,OAAS,GACpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,GAEpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,EACpB,IAAI,aAAaM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,MAE3D,OAAM,IAAI,MAAM,6BAA6B4B,GAA2ByB,EAAE,IAAI,CAAC,EAAE,CAErF,CAAC,EAED,IAAMQ,EAEF,KAAK,eAAe,OAAOV,EAAe,eAAe,SAAW,eAAe,OAAO,EAC9F,KAAK,OAAO,MAAM,YAAYU,EAAkB,OAAQ,EAAGF,EAAa,EAAGR,CAAa,EACxF,KAAK,eAAe,QAAQU,EAAkB,EAAE,EAChDX,EAAuB,CAAC,OAAQ,EAAG,KAAMC,EAAe,OAAQU,EAAkB,MAAM,CAC1F,CAEA,IAAMC,EAA0B,KAAK,eAAe,2BAA2BrB,CAAa,EACtF9C,EAAuBmE,EAAwB,CAAC,IAAM,GAAKA,EAAwB,CAAC,IAAM,EAE1FlE,EAAMrB,GAAwB0D,EAASZ,EAAkB1B,CAAoB,EAC/EoE,EAAW,KAAK,eAAe,YAAYnE,CAAG,EAQlD,GAPKmE,IACHA,EAAW,KAAK,eAAe,MAAM9B,EAAS6B,CAAuB,EACrE,KAAK,eAAe,YAAYlE,EAAKmE,CAAQ,EAC7CC,GAAU,OAAQ,IAAM,mBAAmBpE,CAAG,kBAAkBqC,EAAQ,IAAI,EAAE,GAI5ES,GAAmBqB,EAAS,qBAAsB,CACpD,GAAIrB,EAAgB,SAAWqB,EAAS,qBAAqB,OAC3D,MAAM,IAAI,MAAM,4CAA4CA,EAAS,qBAAqB,MAAM,SAC5FrB,EAAgB,MAAM,gBAAgBqB,EAAS,YAAY,IAAI,IAAI,EAEzE,QAASzE,EAAI,EAAGA,EAAIoD,EAAgB,OAAQpD,IAAK,CAC/C,IAAM2E,EAAUvB,EAAgBpD,CAAC,EAC3B4E,EAAaD,EAAQ,KACrBE,EAAe,OAAOF,EAAQ,MAAS,SAAW,EAAIA,EAAQ,KAAK,OACnE,CAAC1E,EAAM6E,CAAM,EAAIL,EAAS,qBAAqBzE,CAAC,EACtD,GAAI4E,IAAe3E,GAAQ4E,IAAiBC,EAC1C,MAAM,IAAI,MAAM,oBAAoB9E,CAAC,0BAA0BC,CAAI,cAAc6E,CAAM,cACnFF,CAAU,cAAcC,CAAY,gBAAgBJ,EAAS,YAAY,IAAI,IAAI,CAEzF,CACF,CAOA,GALAC,GACI,OACA,IAAM,yBAAyB/B,EAAQ,IAAI,UAAUrC,CAAG,UAAUkE,EAAwB,CAAC,CAAC,IACxFA,EAAwB,CAAC,CAAC,IAAIA,EAAwB,CAAC,CAAC,EAAE,EAE9D,KAAK,YAAc,QAAU,KAAK,gBAAkB,YAAa,CACnE,IAAM/C,EAAuC,CAC3C,SAAU,KAAK,gBACf,YAAagD,EAAS,YAAY,KAClC,iBAAA1C,EACA,kBAAAC,CACF,EACA,KAAK,eAAe,KAAKP,CAAiB,EAEtC,KAAK,gBAAkB,aACK,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC7D,KAAKA,CAAiB,CAEjD,CAEA,YAAK,eAAe,IAAIgD,EAAUzB,EAAYO,EAAaiB,EAAyBZ,CAAoB,EAExGlB,GAAeC,EAAQ,IAAI,EACpBX,CACT,CAEA,OAAO+C,EAAmBrE,EAAwB,CAChD,KAAK,eAAe,OAAOqE,EAAWrE,CAAI,CAC5C,CAEA,OAAOsE,EAAaC,EAAmB,CACrC,KAAK,eAAe,OAAOD,EAAKC,CAAG,CACrC,CAEA,MAAM,SAASF,EAAmBG,EAAkD,CAGlF,MAAM,KAAK,eAAe,SAASH,EAAWG,CAAe,CAC/D,CAEA,MAAMC,EAAsB,CAC1B,OAAO,KAAK,eAAe,OAAOA,CAAI,EAAE,EAC1C,CAEA,KAAKC,EAAqB,CACxB,OAAO,KAAK,eAAe,QAAQA,CAAG,CACxC,CAEA,aAAaxD,EAAoBF,EAAkB2D,EAAoBxD,EAA0B,CAC/F,IAAMyD,EAAKC,GAAwB,IAAI3D,CAAU,EACjD,GAAI,CAAC0D,EACH,MAAM,IAAI,MAAM,2BAA2B1D,CAAU,EAAE,EAGzD,IAAMD,EAAyB,CAC7B,WAAAC,EACA,WAAAC,EACA,YAAayD,EAAG,CAAC,EACjB,WAAY,CAACA,EAAG,CAAC,EAAGD,CAAS,CAC/B,EACA,KAAK,QAAQ,IAAI3D,EAAUC,CAAU,CACvC,CAEA,cAAcD,EAAwB,CACpC,IAAMiC,EAAiB,KAAK,qBAAqB,IAAIjC,CAAQ,EAC7D,GAAIiC,EAAgB,CAClB,QAAWjD,KAAQiD,EACjB,KAAK,eAAe,QAAQjD,EAAK,EAAE,EAErC,KAAK,qBAAqB,OAAOgB,CAAQ,CAC3C,CAEA,KAAK,iBAAiB,OAAOA,CAAQ,EACrC,KAAK,QAAQ,OAAOA,CAAQ,CAC9B,CAEA,cAAcA,EAAkB8D,EAAyBC,EAA6C,CACpG,IAAMC,EAAS,KAAK,QAAQ,IAAIhE,CAAQ,EACxC,GAAI,CAACgE,EACH,MAAM,IAAI,MAAM,uBAAuBhE,CAAQ,EAAE,EAEnD,IAAME,EAAa8D,EAAO,WACpB7D,EAAa6D,EAAO,WACpBC,EAAcD,EAAO,YACrBE,EAAaF,EAAO,WAC1B,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,YAAY9D,CAAU,KAAKC,CAAU,2CAA2C,EAElG,KAAK,gBAAkBH,EAGnBkE,EAAW,CAAC,IACdA,EAAW,CAAC,EAAIA,EAAW,CAAC,EAAEA,EAAW,CAAC,CAAC,EAC3CA,EAAW,CAAC,EAAI,QAGlBlB,GAAU,OAAQ,IAAM,kCAAkC9C,CAAU,KAAKC,CAAU,MAAM,EAEzF,IAAMgE,EAAgB,KAAK,IAAI,MAE/B,KAAK,cAAgB,CAAC,EACtB,GAAI,CACF,OAAIA,GACF,KAAK,OAAO,eAAe,YAAY,EAGzCF,EAAYH,EAASI,EAAW,CAAC,CAAC,EAC3B,CACT,OAASE,EAAG,CACV,OAAAL,EAAO,KAAK,QAAQ,QAAQ,qBAAqB7D,CAAU,KAAKC,CAAU,aAAaiE,CAAC,EAAE,CAAC,EACpF,CACT,QAAE,CACID,GACFJ,EAAO,KAAK,KAAK,OAAO,cAAc,EAAE,KACpCM,GAAOA,EAAM,qCAAqCnE,CAAU,KAAKC,CAAU,MAAMkE,EAAI,OAAO,GAAK,IAAI,CAAC,EAG5G,QAAWrF,KAAQ,KAAK,cACtB,KAAK,eAAe,QAAQA,EAAK,EAAE,EAErC,KAAK,cAAgB,CAAC,EACtB,KAAK,gBAAkB,IACzB,CACF,CAGA,eAAesF,EAAmBC,EAAeC,EAAmBf,EAAsB,CACxF,IAAIgB,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EACxEG,IACHA,EAA4B,IAAI,IAChC,KAAK,2BAA2B,IAAIH,EAAWG,CAAyB,GAG1E,IAAMC,EAAiBD,EAA0B,IAAIF,CAAK,EACpDI,EAAK,KAAK,eAAe,uBAAuBH,EAAQf,EAAMiB,IAAiB,CAAC,CAAC,EACvF,OAAAD,EAA0B,IAAIF,EAAO,CAACI,EAAIH,CAAM,CAAC,EAC1CG,CACT,CACA,kBAAkBL,EAAyB,CACzC,IAAMG,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EAC3EG,IACFA,EAA0B,QAAQG,GAAc,KAAK,eAAe,yBAAyBA,EAAW,CAAC,CAAC,CAAC,EAC3G,KAAK,2BAA2B,OAAON,CAAS,EAEpD,CACA,UAAUjB,EAA8B,CACtC,IAAM9B,EAAU,KAAK,eAAe,IAAI8B,CAAS,EACjD,GAAI,CAAC9B,EACH,MAAM,IAAI,MAAM,2BAA2B8B,CAAS,EAAE,EAExD,OAAO9B,EAAQ,MACjB,CACA,iBAAiBsD,EAAsBpB,EAAclF,EAClB,CACjC,MAAO,UAAY,CACjB,IAAMS,EAAO,MAAM8F,GAAgB,KAAMD,EAAWpB,CAAI,EACxD,OAAOsB,GAAW/F,EAAK,OAAQT,CAAI,CACrC,CACF,CAEA,eAAegG,EAAqB,CAC9B,KAAK,YAAc,iBAKtB,KAAK,mBAA2B,eAAe,KAAK,SAAUA,CAAK,CACtE,CACA,cAAqB,CACnB,KAAK,UAAY,QACb,KAAK,IAAI,OAAO,WAAW,OAAS,YACnC,OAAO,KAAK,IAAI,MAAU,IAAc,KAAK,IAAI,KAAK,MAAQ,KAAK,IAAI,UACtE,KAAK,OAAO,SAAS,IAAI,qDAAqD,EAChF,KAAK,UAAY,gBACR,KAAK,OAAO,SAAS,IAAI,iBAAiB,IACnD,KAAK,UAAY,aAGf,KAAK,YAAc,QAAU,OAAO,KAAK,SAAa,MACxD,KAAK,SAAW,KAAK,OAAO,eAAe,CACzC,KAAM,YACN,MAAO,KAAK,kBAAoB,CAClC,CAAC,EACD,KAAK,mBAAqB,KAAK,OAAO,aAElC,CAAC,KAAM,KAAK,kBAAoB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,aAAa,CAAC,GAG/G,CAEA,cAAqB,CACnBvB,GAAU,OAAQ,cAAc,EAC3B,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,GACtD,KAAK,oBAAoB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAEpD,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,GACzD,KAAK,uBAAuB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAG5D,KAAK,MAAM,EACX,KAAK,cAAgB,WACvB,CACA,YAAmB,CACjBA,GAAU,OAAQ,YAAY,EAE9B,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CACA,QAAe,CACbA,GAAU,OAAQ,QAAQ,EAC1B,KAAK,cAAgB,YACrB,IAAMgC,EAAqB,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,EACxEC,EAAwB,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC9E7B,EAAS4B,EAAoB,OACnC,KAAK,eAAiB,CAAC,EACvB,QAAS1G,EAAI,EAAGA,EAAI8E,EAAQ9E,IAAK,CAC/B,IAAM4G,EAAqB,KAAK,sBAAsB,EAChDC,EAAUH,EAAoB1G,CAAC,EACrC,KAAK,eAAe,KAAK,sBAAwB,CAAC,EAClD4G,EAAmB,YAAYC,EAAQ,eAAe,EACtDD,EAAmB,aAAa,EAAGC,EAAQ,SAAS,EACpDD,EAAmB,mBAAmB,GAAGC,EAAQ,aAAa,EAC9D,KAAK,eAAe,KAAK,sBAAwB,EAAI,CAAC,EACtD,KAAK,wBACD,KAAK,YAAc,QACrB,KAAK,eAAe,KAAKF,EAAuB3G,CAAC,CAAC,GAEhD,KAAK,uBAAyB,KAAK,mBAAqB,KAAK,YAAc,cAC7E,KAAK,eAAe,EAElB,KAAK,uBAAyB,KAAK,mBACrC,KAAK,MAAM,CAEf,CAEA,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CAEA,iBAAiBgG,EAAyB,CACxC,KAAK,kBAAkBA,CAAS,EAC5B,KAAK,oBAAoB,IAAIA,CAAS,GACxC,KAAK,oBAAoB,OAAOA,CAAS,EAEvC,KAAK,uBAAuB,IAAIA,CAAS,GAC3C,KAAK,uBAAuB,OAAOA,CAAS,EAE9C,KAAK,eAAe,iBAAiBA,CAAS,CAChD,CAEA,WAAWA,EAAyB,CAClC,KAAK,iBAAmBA,EACxB,KAAK,aAAa,CACpB,CACF,ICx0BA,IAAAc,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAgBMC,GAuCAC,GAoHOF,GA3KbG,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KAKMP,GAAN,MAAMQ,CAAqC,CACzC,YACYC,EAAuCC,EAAkCC,EACjEC,EAAyB,CADjC,YAAAH,EAAuC,cAAAC,EAAkC,UAAAC,EACjE,UAAAC,CAA0B,CAE9C,iBAAgC,CAC9B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMC,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,aACJ,IAAI,aAAa,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CAChG,CAEA,kBAAkC,CAChC,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,cACJ,IAAI,cAAc,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjG,CAEA,eAA4B,CAC1B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,WAAe,IAAI,WAAW,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjH,CAEA,QAAQE,EAAwC,CAC9C,GAAID,EAAU,KAAKC,CAAO,IAAMD,EAAU,KAAK,KAAK,IAAI,EACtD,MAAM,IAAI,MAAM,mBAAmB,EAErC,OAAO,IAAIN,EAAe,KAAK,OAAQ,KAAK,SAAU,KAAK,KAAMO,CAAO,CAC1E,CACF,EAEMd,GAAN,KAAmD,CAajD,YAAoBQ,EAA+BO,EAAwBC,EAA2B,CAAlF,YAAAR,EAA+B,aAAAO,EAFnD,KAAQ,iBAAmB,EAC3B,KAAQ,eAAiB,EAEvB,KAAK,YAAcA,EAAQ,YAC3B,IAAME,EAAUT,EAAO,QAGnBU,EAAaF,IAAsB,EACvC,KAAK,gBAAkBC,EAAQC,GAAW,EAC1C,IAAMC,EAAaF,EAAQC,GAAW,EACtC,KAAK,YAAcD,EAAQC,GAAW,EACtC,KAAK,iBAAmBD,EAAQC,GAAW,EAC3C,KAAK,eAAiBD,EAAQC,GAAW,EAEzC,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIF,EAAYE,IAAK,CACnC,IAAMZ,EAAWQ,EAAQC,GAAW,EAC9BR,EAAOO,EAAQC,GAAW,EAC1BI,EAAML,EAAQC,GAAW,EACzBP,EAAiB,CAAC,EACxB,QAASY,EAAI,EAAGA,EAAID,EAAKC,IACvBZ,EAAK,KAAKM,EAAQC,GAAW,CAAC,EAEhCE,EAAO,KAAK,IAAIrB,GAAeS,EAAQC,EAAUC,EAAMC,CAAI,CAAC,CAC9D,CACA,KAAK,OAASS,CAChB,CAhCA,IAAI,kBAA6C,CAC/C,OAAO,KAAK,QAAQ,uBACtB,CACA,IAAI,kBAA+B,CACjC,OAAO,KAAK,OAAO,OAAO,SAAS,KAAK,iBAAkB,KAAK,iBAAmB,KAAK,cAAc,CACvG,CA6BA,6BAAwD,CACtD,MAAO,CACL,KAAK,QAAQ,OAAO,OAAO,yBAA0B,KAAK,QAAQ,OAAO,OAAO,yBAChF,KAAK,QAAQ,OAAO,OAAO,wBAC7B,CACF,CAEA,mCAA4C,CAC1C,OAAO,KAAK,QAAQ,OAAO,OAAO,8BACpC,CAEA,QAAQI,EAAsBC,EAAyE,CAErG,IAAMC,EACFD,GAAsB,QAAQ,IAAIJ,GAAK,OAAOA,GAAM,SAAW,KAAK,OAAOA,CAAC,EAAIA,CAAC,GAAK,KAAK,OAEzFM,EAAgBF,GAAsB,SAAW,CAAC,EAClDG,EAAqB,CAACC,EAAepB,EAAkBE,IACzD,IAAIZ,GAAe,KAAK,OAAQU,EAAU,KAAK,OAAOoB,EAAOlB,CAAI,EAAGA,CAAI,EACtEmB,EAAwB,CAACrB,EAAkBE,IAAwC,CACvF,IAAMoB,EAAcC,GAAqBvB,CAAQ,EACjD,GAAI,CAACsB,EACH,MAAM,IAAI,MAAM,0BAA0BtB,CAAQ,EAAE,EAEtD,IAAMwB,EAAaF,EAAclB,EAAU,KAAKF,CAAI,EAC9CuB,EAAYD,EAAa,EAAI,KAAK,QAAQ,eAAe,OAAOA,CAAU,EAAE,GAAK,EACvF,OAAO,IAAIlC,GAAe,KAAK,OAAQU,EAAUyB,EAAWvB,CAAI,CAClE,EACA,OAAO,KAAK,QAAQ,IAChBa,EAASE,EAAcC,EAAeC,EAAoBE,EAAuB,KAAK,WAAW,CACvG,CAEA,OAAOD,EAAelB,EAAiC,CACrD,IAAMwB,EAAQ,KAAK,OAAO,UAAU,EACpC,GAAI,CACF,IAAMzB,EAAO,KAAK,OAAO,YAAY,EAAIC,EAAK,QAAU,CAAsB,EAC1EyB,EAAS1B,GAAQ,EACrB,KAAK,OAAO,QAAQ0B,GAAQ,EAAIzB,EAAK,OACrC,QAASU,EAAI,EAAGA,EAAIV,EAAK,OAAQU,IAC/B,KAAK,OAAO,QAAQe,GAAQ,EAAIzB,EAAKU,CAAC,EAExC,OAAO,KAAK,OAAO,YAAa,KAAK,gBAAiBQ,EAAOnB,CAAI,CACnE,OAAS2B,EAAG,CACV,MAAM,IAAI,MACN,sCAAsCR,CAAK,gBAAgBlB,CAAI,8GAErD0B,CAAC,EAAE,CACnB,QAAE,CACA,KAAK,OAAO,aAAaF,CAAK,CAChC,CACF,CACF,EA0BarC,GACT,MAAMwC,EAAwB9B,EAAuB+B,EAAUC,IAA2C,CAC5G,IAAMC,EAAWjC,EAAO,SACxB,GAAI,CAACiC,EACH,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIH,IAAS,SAAU,CACrB,IAAMvB,EAAU,IAAI2B,GACpB,MAAM3B,EAAQ,WAAWwB,EAAKC,CAAW,EAEzCC,EAAS,SAAU,CAEjB1B,EAGC4B,GAAiB5B,EAAQ,MAAM4B,CAAI,EAGnCC,GAAgB7B,EAAQ,KAAK6B,CAAG,EAGjC,CAACC,EAAaC,EAAaH,EAAcI,EAAc,KAAU,CAC/D,GAAIA,EACFC,GAAU,UAAW,IAAM,kCAAkCH,CAAG,SAASC,CAAG,UAAUH,CAAI,EAAE,EAC5F5B,EAAQ,OAAO8B,EAAKC,CAAG,MAClB,CACLE,GAAU,UAAW,IAAM,yCAAyCH,CAAG,eAAeC,CAAG,UAAUH,CAAI,EAAE,EACzG,IAAMjC,EAAOF,EAAO,OAAO,SAASqC,IAAQ,GAAIA,IAAQ,GAAKF,CAAI,EACjE5B,EAAQ,OAAO+B,EAAKpC,CAAI,CAC1B,CACF,EAGA,MAAMwB,EAAmBe,EAAoBN,IACxB,CACfK,GACI,UACA,IAAM,wCAAwCd,CAAS,gBAAgBe,CAAU,UAAUN,CAAI,EAAE,EAErG,MAAM5B,EAAQ,SACVmB,EAAW,IAAM1B,EAAO,OAAO,SAASyC,IAAe,GAAIA,IAAe,GAAKN,CAAI,CAAC,CAC1F,EAGJ,CAACO,EAAoBC,EAAkBC,IAAuBrC,EAAQ,aAClEmC,EAAYC,EAAUC,EAAW5C,EAAO,aAAaA,EAAO,iBAAkB2C,CAAQ,CAAC,CAAC,EAG3FE,GAAmBtC,EAAQ,cAAcsC,CAAM,EAGhD,CAACA,EAAgBrC,EAA2BsC,EAAuBC,IAAwC,CACzGP,GACI,UACA,IAAM,mCAAmCM,CAAa,YAAYD,CAAM,uBACpErC,CAAiB,EAAE,EAC3B,IAAMwC,EAAU,IAAIxD,GAAmBQ,EAAQO,EAASC,CAAiB,EACzE,OAAOD,EAAQ,cAAcsC,EAAQG,EAASD,CAAM,CACtD,EAEA,IAAMxC,EAAQ,aAAa,EAE3B,IAAMA,EAAQ,WAAW,EAEzB,IAAMA,EAAQ,OAAO,CACvB,CAAC,CACH,MACE0B,EAAS,OAAO,CAEpB,ICjPA,IAoEMgB,GAWOC,GAWAC,GAoFPC,GAOAC,GAqBOC,GAkBAC,GAmKAC,GAuBAC,GA+EAC,GA6OAC,GAgBAC,GAluBbC,GAAAC,EAAA,kBAWAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAoDMnB,GAAU,CAACoB,EAAoBC,IAA+B,CAChDC,GAAY,EAAE,SAASF,EAAYC,CAAY,IAC/C,GAChBE,GAAe,+BAAgC,CAEnD,EAMatB,GAAc,MAAMuB,GAA4B,CAE3DxB,GAAQwB,EAAI,KAAK,WAAaC,GAAqBD,EAAI,QAAQ,CAAC,CAClE,EAQatB,GAAS,MAAMsB,EAAUE,IAAkC,CACxC,CAE5B,IAAMC,EAAW,cAAuB,KAExC,GAAID,IAAW,SAAU,CAEvB,GAAI,OAAO,UAAc,KAAe,CAAC,UAAU,IACjD,MAAM,IAAI,MAAM,gDAAgD,EAGlE,IAAIE,EAAUJ,EAAI,OAAO,QACzB,GAAKI,GAmBH,GAAI,OAAOA,EAAQ,QAAW,UAAY,OAAOA,EAAQ,UAAa,UAClE,OAAOA,EAAQ,eAAkB,WACnC,MAAM,IAAI,MAAM,kFAAkF,MArBxF,CAEZ,IAAMC,EAAkBL,EAAI,OAAO,gBACnC,GAAIK,IAAoB,QAAaA,IAAoB,aACrDA,IAAoB,mBACtB,MAAM,IAAI,MAAM,qCAAqCA,CAAe,GAAG,EAEzE,IAAMC,EAAuBN,EAAI,OAAO,qBACxC,GAAIM,IAAyB,QAAa,OAAOA,GAAyB,UACxE,MAAM,IAAI,MAAM,0CAA0CA,CAAoB,GAAG,EAGnF,GADAF,EAAU,MAAM,UAAU,IAAI,eAAe,CAAC,gBAAAC,EAAiB,qBAAAC,CAAoB,CAAC,EAChF,CAACF,EACH,MAAM,IAAI,MACN,0GAC+E,CAEvF,CAQA,MAAMD,EAAS,SAAUL,GAAY,EAAGE,EAAKI,CAAO,CACtD,CACA,GAAIF,IAAW,QAAS,CAEtB,GAAI,OAAO,UAAc,KAAe,CAAE,UAAuC,GAC/E,MAAM,IAAI,MAAM,+CAA+C,EAGjE,MAAMC,EAAS,QAASL,GAAY,EAAGE,CAAG,CAC5C,CACF,CACF,EAoCMrB,GAAiB,IAAI,IAOrBC,GAA8B2B,GAA4C,CAC9E,IAAMC,EAAOV,GAAY,EACnBW,EAAQD,EAAK,UAAU,EAC7B,GAAI,CACF,IAAME,EAAaF,EAAK,WAAW,CAAC,EAEpC,OADkBA,EAAK,wBAAwBD,EAAeG,EAAYA,EAAa,CAAC,IACtE,GAChBX,GAAe,uCAAwC,EAElD,CAACS,EAAK,OAAOE,EAAa,CAAC,EAAGF,EAAK,OAAOE,EAAa,EAAI,CAAC,CAAC,CACtE,QAAE,CACAF,EAAK,aAAaC,CAAK,CACzB,CACF,EAQa5B,GAA0B8B,GAAwC,CAC7E,IAAMH,EAAOV,GAAY,EACnBc,EAAkBJ,EAAK,QAAQG,EAAM,UAAU,EACrD,GAAIC,IAAoB,EACtB,MAAM,IAAI,MAAM,+DAA+DD,EAAM,UAAU,GAAG,EAEpG,OAAAH,EAAK,OAAO,IAAIG,EAAOC,CAAe,EAC/B,CAACA,EAAiBD,EAAM,UAAU,CAC3C,EAUa7B,GAAgB,MACzB+B,EACAC,IAAoF,CACtF,IAAIF,EAAyBG,EACvBP,EAAOV,GAAY,EAErB,MAAM,QAAQe,CAAS,EAEzB,CAACD,EAAiBG,CAAe,EAAIF,EAC5BA,EAAU,SAAWL,EAAK,OAAO,OAE1C,CAACI,EAAiBG,CAAe,EAAI,CAACF,EAAU,WAAYA,EAAU,UAAU,EAGhF,CAACD,EAAiBG,CAAe,EAAIlC,GAAuBgC,CAAS,EAGvE,IAAIN,EAAgB,EAChBS,EAAuB,EACvBC,EAAkB,EAClBC,EAAmB,CAAC,EAClBC,EAAwB,CAAC,EACzBC,EAAyB,CAAC,EAEhC,GAAI,CAGF,GAFA,CAACJ,EAAsBE,CAAM,EAAIG,GAAkBP,CAAO,EAEtDA,GAAS,cAAgBN,EAAK,kBAAmB,CACnD,IAAMc,EAAkB,CAAC,EACzB,QAAWC,KAAQT,EAAQ,aAAc,CACvC,IAAMU,EAAO,OAAOD,GAAS,SAAWA,EAAOA,EAAK,KACpDD,EAAgB,KAAKG,GAAS,OAAOF,GAAS,SAAWA,EAAOA,EAAK,IAAI,EAAE,KAAKG,GAAQ,CACtFlB,EAAK,kBAAmBgB,EAAME,CAAI,CACpC,CAAC,CAAC,CACJ,CAGA,MAAM,QAAQ,IAAIJ,CAAe,CACnC,CAEA,QAAWK,KAAYb,GAAS,oBAAsB,CAAC,EAErD,IADqB,OAAOa,GAAa,SAAWA,EAAWA,EAAS,QACnD,QAAS,CAC5B,GAAInB,EAAK,eACP,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAI,OAAOmB,GAAa,SAAU,CAChC,IAAMC,EAAeD,EACfE,EAAWD,GAA6D,QACxEE,EAAaF,GAAsD,UACnEG,EAAcH,GAAuD,WACrEhC,EAAcgC,GAAuD,WACrEvB,EAAmBuB,GAAuD,gBAC5EC,EACFrB,EAAK,eAAiBqB,EACbC,EACTtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAcsB,CAAS,EAEhEtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,CAAC,WAAAuB,EAAY,WAAAnC,EAAY,gBAAAS,CAAe,CAAC,CAEpG,MACEG,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,EAEzD,KACF,CAGFD,EAAgB,MAAMC,EAAK,kBAAkBI,EAAiBG,EAAiBC,CAAoB,EAC/FT,IAAkB,GACpBR,GAAe,yBAA0B,EAIvCS,EAAK,iBACPA,EAAK,eAAiB,QAGxB,GAAM,CAACwB,EAAYC,CAAW,EAAIrD,GAA2B2B,CAAa,EAEpE2B,EAAqB,CAAC,CAACpB,GAAS,mBAEhCqB,EAAa,CAAC,EACdC,EAAc,CAAC,EACfC,EAAwE,CAAC,EAC/E,QAASC,EAAI,EAAGA,EAAIN,EAAYM,IAAK,CACnC,IAAMC,EAAO/B,EAAK,iBAAiBD,EAAe+B,CAAC,EAC/CC,IAAS,GACXxC,GAAe,0BAA2B,EAE5CoB,EAAsB,KAAKoB,CAAI,EAC/BJ,EAAW,KAAK3B,EAAK,aAAa+B,CAAI,CAAC,CACzC,CACA,QAASD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMC,EAAO/B,EAAK,kBAAkBD,EAAe+B,CAAC,EAChDC,IAAS,GACXxC,GAAe,2BAA4B,EAE7CqB,EAAuB,KAAKmB,CAAI,EAChC,IAAMC,EAAahC,EAAK,aAAa+B,CAAI,EACzCH,EAAY,KAAKI,CAAU,EAEG,CAC5B,GAAIN,GAAsBpB,GAAS,0BAA4B,OAAW,CACxEuB,EAAyB,KAAK,YAAY,EAC1C,QACF,CACA,IAAMI,EAAW,OAAO3B,GAAS,yBAA4B,SACzDA,EAAQ,wBACRA,GAAS,0BAA0B0B,CAAU,GAAK,MACtD,GAAIC,IAAa,OAASA,IAAa,cAAgBA,IAAa,aAClE,MAAM,IAAI,MAAM,4CAA4CA,CAAQ,GAAG,EAEzE,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MAAM,4CACZA,CAAQ,4EAA4E,EAE1FJ,EAAyB,KAAKI,CAAQ,CACxC,CACF,CAGA,IAAIC,EAAoC,KACxC,OAAgCL,EAAyB,KAAKM,GAAKA,IAAM,YAAY,IACnF1B,EAAkBT,EAAK,kBAAkBD,CAAa,EAClDU,IAAoB,GACtBlB,GAAe,0BAA2B,EAG5C2C,EAAe,CACb,OAAQzB,EACR,yBAAAoB,EACA,gCAAiCA,EAAyB,IAAIM,GAAKC,GAAyBD,CAAC,CAAC,CAChG,GAGFhE,GAAe,IACX4B,EACA,CAACA,EAAeY,EAAuBC,EAAwBsB,EAAcR,EAAoB,EAAK,CAAC,EACpG,CAAC3B,EAAe4B,EAAYC,CAAW,CAChD,OAASS,EAAG,CACV,MAAA1B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EAEpD7B,IAAoB,GACtBT,EAAK,mBAAmBS,CAAe,EAGrCV,IAAkB,GACpBC,EAAK,mBAAmBD,CAAa,EAEjCsC,CACR,QAAE,CACArC,EAAK,MAAMI,CAAe,EACtBI,IAAyB,GAC3BR,EAAK,0BAA0BQ,CAAoB,EAErDE,EAAO,QAAQ6B,GAASvC,EAAK,MAAMuC,CAAK,CAAC,EAGzCvC,EAAK,sBAAsB,CAC7B,CACF,EAEazB,GAAkBiE,GAA4B,CACzD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,+CAA+CD,CAAS,EAAE,EAE5E,GAAM,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,CAAkB,EAAIe,EAEvGC,IACEhB,GACF1B,EAAK,sBAAsB0C,EAAe,MAAM,EAElD1C,EAAK,mBAAmB0C,EAAe,MAAM,GAG/C1C,EAAK,uBAAuBwC,CAAS,EAErC7B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACxDtC,EAAK,mBAAmBD,CAAa,EACrC5B,GAAe,OAAOqE,CAAS,CACjC,EAEahE,GACT,CAACmE,EAA6BC,EAAyBlC,EAAkB8B,EAAmBK,EAC3FnB,EAAqB,KAAgB,CACpC,GAAI,CAACiB,EAAQ,CACXC,EAAc,KAAK,CAAC,EACpB,MACF,CAEA,IAAM5C,EAAOV,GAAY,EAEnBwD,EAAWH,EAAO,CAAC,EACnBI,EAAOJ,EAAO,CAAC,EACfV,EAAWU,EAAO,CAAC,EAErBK,EACAC,EAEJ,GAAIH,IAAa,UAAYb,IAAa,aACxC,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MACN,2DAA2DY,CAAK,mCAAmC,EAGzG,GAAIZ,IAAa,aAAc,CAC7B,IAAMiB,EAAYP,EAAO,CAAC,EAAE,UACtBQ,EAAqBC,GAAqBC,GAA2BP,CAAQ,CAAC,EACpFG,EAAiBF,EAAK,OAAO,CAACO,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAIJ,EAEnD,IAAMK,EAAiBxD,EAAK,mBAC5B,GAAI,CAACwD,EACH,MAAM,IAAI,MAAM,qEAAqE,EAEvFR,EAAUQ,EAAehB,EAAWK,EAAOK,EAAWD,CAAc,CACtE,KAAO,CACL,IAAM/B,EAAOyB,EAAO,CAAC,EAErB,GAAI,MAAM,QAAQzB,CAAI,EAAG,CAEvB+B,EAAiB,EAAI/B,EAAK,OAC1B8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnB,IAAIS,EAAYT,EAAU,EAC1B,QAASlB,EAAI,EAAGA,EAAIZ,EAAK,OAAQY,IAAK,CACpC,GAAI,OAAOZ,EAAKY,CAAC,GAAM,SACrB,MAAM,IAAI,UAAU,wBAAwBA,CAAC,kBAAkB,EAEjE9B,EAAK,QAAQyD,GAAW,EAAIC,GAAgBxC,EAAKY,CAAC,EAAGpB,CAAM,CAC7D,CACF,MACEuC,EAAiB/B,EAAK,WACtB8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnBhD,EAAK,OAAO,IAAI,IAAI,WAAWkB,EAAK,OAAQA,EAAK,WAAY+B,CAAc,EAAGD,CAAO,CAEzF,CAEA,IAAM/C,EAAQD,EAAK,UAAU,EACvB2D,EAAa3D,EAAK,WAAW,EAAI+C,EAAK,MAAM,EAClD,GAAI,CACF,IAAIa,EAAWD,EAAa,EAC5BZ,EAAK,QAAQc,GAAK7D,EAAK,OAAO4D,GAAU,EAAIC,CAAC,EAC7C,IAAMlB,EAAS3C,EAAK,iBAChBqD,GAA2BP,CAAQ,EAAGE,EAASC,EAAgBU,EAAYZ,EAAK,OAChFX,GAAyBH,CAAQ,CAAC,EAClCU,IAAW,GACbpD,GAAe,iDAAiDiD,CAAS,WAAWK,CAAK,GAAG,EAE9FD,EAAc,KAAKD,CAAM,CAC3B,QAAE,CACA3C,EAAK,aAAaC,CAAK,CACzB,CACF,EAKSxB,GAAM,MACf+D,EAAmBsB,EAAwBC,EAAgCC,EAC3EC,EAA2C3D,IAAoE,CACjH,IAAMN,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,6CAA6CD,CAAS,EAAE,EAE1E,IAAMzC,EAAgB0C,EAAQ,CAAC,EACzB9B,EAAwB8B,EAAQ,CAAC,EACjC7B,EAAyB6B,EAAQ,CAAC,EAClCC,EAAiBD,EAAQ,CAAC,EAC1Bf,EAAqBe,EAAQ,CAAC,EAC9ByB,EAAmBzB,EAAQ,CAAC,EAE5BjB,EAAasC,EAAa,OAC1BrC,EAAcuC,EAAc,OAE9BG,EAAmB,EACnBC,EAA6B,CAAC,EAE5BC,EAA+B,CAAC,EAChCC,EAAgC,CAAC,EACjCC,EAA8B,CAAC,EAE/BC,EAAiBxE,EAAK,UAAU,EAChCyE,EAAoBzE,EAAK,WAAWwB,EAAa,CAAC,EAClDkD,EAAmB1E,EAAK,WAAWwB,EAAa,CAAC,EACjDmD,EAAqB3E,EAAK,WAAWyB,EAAc,CAAC,EACpDmD,EAAoB5E,EAAK,WAAWyB,EAAc,CAAC,EAEzD,GAAI,CACF,CAAC0C,EAAkBC,CAAgB,EAAIS,GAAcvE,CAAO,EAG5D,QAASwB,EAAI,EAAGA,EAAIN,EAAYM,IAC9BtD,GACIuF,EAAajC,CAAC,EAAGuC,EAAoBE,EAAmB/B,EAAWsB,EAAahC,CAAC,EAAGJ,CAAkB,EAI5G,QAASI,EAAI,EAAGA,EAAIL,EAAaK,IAC/BtD,GACIyF,EAAcnC,CAAC,EAAGwC,EAAqBC,EAAmB/B,EAAWhB,EAAawC,EAAclC,CAAC,EACjGJ,CAAkB,EAGxB,IAAIoD,EAAmBL,EAAoB,EACvCM,EAAkBL,EAAmB,EACrCM,GAAoBL,EAAqB,EACzCM,GAAmBL,EAAoB,EAC3C,QAAS9C,EAAI,EAAGA,EAAIN,EAAYM,IAC9B9B,EAAK,QAAQ8E,GAAkB,EAAIT,EAAmBvC,CAAC,EACvD9B,EAAK,QAAQ+E,GAAiB,EAAIpE,EAAsBmD,EAAahC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIL,EAAaK,IAC/B9B,EAAK,QAAQgF,IAAmB,EAAIV,EAAoBxC,CAAC,EACzD9B,EAAK,QAAQiF,IAAkB,EAAIrE,EAAuBoD,EAAclC,CAAC,CAAC,EAG5E,GAAgCY,GAAkB,CAACwB,EAAkB,CACnE,GAAM,CAAC,OAAAgB,EAAQ,yBAAArD,GAA0B,gCAAAsD,EAA+B,EAAIzC,EAE5E,GAAI/B,EAAsB,SAAWa,EACnC,MAAM,IAAI,MAAM,2BACZA,CAAU,4DAA4Db,EAAsB,MAAM,IAAI,EAI5G,QAASmB,GAAI,EAAGA,GAAIN,EAAYM,KAAK,CACnC,IAAMe,GAAQiB,EAAahC,EAAC,EACV,MAAM9B,EAAK,cAAckF,EAAQvE,EAAsBkC,EAAK,EAAGwB,EAAmBvC,EAAC,CAAC,IACpF,GAChBvC,GAAe,oBAAoBuC,EAAC,iBAAiBU,CAAS,GAAG,CAErE,CAGA,QAASV,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMe,GAAQmB,EAAclC,EAAC,EACZmC,EAAcnC,EAAC,IAAI,CAAC,EAIjB9B,EAAK,eAAekF,EAAQtE,EAAuBiC,EAAK,EAAGyB,EAAoBxC,EAAC,EAAG,CAAC,IACpF,GAChBvC,GAAe,mCAAmCuC,EAAC,iBAAiBU,CAAS,GAAG,EAK9ExC,EAAK,eAAekF,EAAQtE,EAAuBiC,EAAK,EAAG,EAAGsC,GAAgCtC,EAAK,CAAC,IACtF,GAChBtD,GAAe,qBAAqBuC,EAAC,QAAQD,GAAyBC,EAAC,CAAC,gBAAgBU,CAAS,GAAG,CAG1G,CACArE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAI,CAAC,CAC9G,CAEA1B,EAAK,iBAAiBD,CAAa,EACnC,IAAIqF,GAC4B1C,EAC9B0C,GAAY,MAAMpF,EAAK,mBACnBD,EAAe2C,EAAe,OAAQjB,EAAakD,EAAoBR,CAAgB,EAE3FiB,GAAY,MAAMpF,EAAK,QACnBD,EAAe2E,EAAkBD,EAAmBjD,EAAYoD,EAAmBnD,EACnFkD,EAAoBR,CAAgB,EAGtCiB,KAAc,GAChB7F,GAAe,0BAA0B,EAG3C,IAAM8F,GAA2B,CAAC,EAElC,QAASvD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMa,GAAS3C,EAAK,QAAQ2E,EAAqB,EAAI7C,CAAC,EACtD,GAAIa,KAAW2B,EAAoBxC,CAAC,EAAG,CAErCuD,GAAO,KAAKpB,EAAcnC,CAAC,CAAE,EAC7B,QACF,CAEA,IAAMwD,GAA2BtF,EAAK,UAAU,EAE1CuF,GAAmBvF,EAAK,WAAW,EAAI,CAAC,EAE1CwF,GAAmB,GACnBC,GAA6BvF,GAAa,EAC9C,GAAI,CACgBF,EAAK,kBACnB2C,GAAQ4C,GAAkBA,GAAmB,EAAGA,GAAmB,EAAGA,GAAmB,EAAE,IAC7E,GAChBhG,GAAe,4CAA4CuC,CAAC,GAAG,EAEjE,IAAI4D,EAAkBH,GAAmB,EACnCzC,GAAW9C,EAAK,QAAQ0F,GAAiB,EAC/CxF,GAAaF,EAAK,QAAQ0F,GAAiB,EAC3C,IAAM/B,GAAa3D,EAAK,QAAQ0F,GAAiB,EAC3CC,GAAa3F,EAAK,QAAQ0F,GAAiB,EAC3C3C,GAAO,CAAC,EACd,QAASjB,GAAI,EAAGA,GAAI6D,GAAY7D,KAC9BiB,GAAK,KAAK/C,EAAK,QAAQ2D,GAAa,EAAI7B,EAAC,CAAC,EAE5C9B,EAAK,SAAS2D,EAAU,EAExB,IAAMiC,GAAO7C,GAAK,OAAO,CAACO,GAAGC,KAAMD,GAAIC,GAAG,CAAC,EAC3CkC,GAAOI,GAA2B/C,EAAQ,EAE1C,IAAMgD,GAAoBpD,GAAgB,yBAAyBsB,EAAclC,CAAC,CAAC,EAEnF,GAAI2D,KAAS,SAAU,CACrB,GAAIK,KAAsB,aACxB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMC,GAAuB,CAAC,EAC1BtC,GAAYvD,GAAa,EAC7B,QAAS4B,GAAI,EAAGA,GAAI8D,GAAM9D,KAAK,CAC7B,IAAMkE,GAAShG,EAAK,QAAQyD,IAAW,EACjCwC,GAAiBnE,KAAM8D,GAAO,EAAI,OAAY5F,EAAK,QAAQyD,EAAS,EAAIuC,GAC9ED,GAAW,KAAK/F,EAAK,aAAagG,GAAQC,EAAc,CAAC,CAC3D,CACAZ,GAAO,KAAK,CAACI,GAAM1C,GAAMgD,GAAY,KAAK,CAAC,CAC7C,SAGMD,KAAsB,cAAgBF,GAAO,EAAG,CAClD,IAAMM,GAAYlG,EAAK,cACvB,GAAI,CAACkG,GACH,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAMhD,GAAYgD,GAAUhG,EAAU,EAChCiG,GAAc/C,GAAqBN,EAAQ,EACjD,GAAIqD,KAAgB,QAAa,CAACC,GAAyBX,EAAI,EAC7D,MAAM,IAAI,MAAM,0BAA0BA,EAAI,EAAE,EAIlDD,GAAmB,GAEnBH,GAAO,KAAK,CACVI,GAAM1C,GAAM,CACV,UAAAG,GACA,SAAUlD,EAAK,qBAAsBkD,GAAW0C,GAAOO,GAAaV,EAAI,EACxE,QAAS,IAAM,CACbzF,EAAK,kBAAkB2C,EAAM,CAC/B,CACF,EACA,YACF,CAAC,CACH,KAAO,CACL,IAAM0D,GAAwBC,GAAkCb,EAAI,EAC9DvE,GAAO,IAAImF,GAAsBT,EAAI,EAC3C,IAAI,WAAW1E,GAAK,OAAQA,GAAK,WAAYA,GAAK,UAAU,EACvD,IAAIlB,EAAK,OAAO,SAASE,GAAYA,GAAagB,GAAK,UAAU,CAAC,EACvEmE,GAAO,KAAK,CAACI,GAAM1C,GAAM7B,GAAM,KAAK,CAAC,CACvC,CAEJ,QAAE,CACAlB,EAAK,aAAasF,EAAwB,EACtCG,KAAS,UAAYvF,IACvBF,EAAK,MAAME,EAAU,EAElBsF,IACHxF,EAAK,kBAAkB2C,EAAM,CAEjC,CACF,CAEA,OAAID,GAAkB,CAAChB,IACrB1B,EAAK,sBAAsB0C,EAAe,MAAM,EAChDvE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAK,CAAC,GAExG2D,EACT,QAAE,CACArF,EAAK,aAAawE,CAAc,EAEhCH,EAAmB,QAAQkC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EACzDjC,EAAoB,QAAQiC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EAC1DhC,EAAkB,QAAQiC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,EAExCrC,IAAqB,GACvBnE,EAAK,sBAAsBmE,CAAgB,EAE7CC,EAAiB,QAAQoC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,CAC7C,CACF,EAKa9H,GAAgB8D,GAA4B,CACvD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,oBAAoB,EAEtC,IAAM1C,EAAgB0C,EAAQ,CAAC,EAGzBgE,EAAkBzG,EAAK,iBAAiBD,CAAa,EACvD0G,IAAoB,GACtBlH,GAAe,iCAAkC,EAEnDS,EAAK,SAASyG,CAAe,CAC/B,EAEa9H,GAA8B+H,GAAsE,CAC/G,IAAMC,EAA6B,CAAC,EACpC,QAAWhE,KAAU+D,EAAS,CAC5B,IAAMxF,EAAOyB,EAAO,CAAC,EACjB,CAAC,MAAM,QAAQzB,CAAI,GAAK,WAAYA,GACtCyF,EAAQ,KAAKzF,EAAK,MAAM,CAE5B,CACA,OAAOyF,CACT,IC3uBA,IAUMC,GACFC,GACAC,GACAC,GACAC,GACAC,GAGAC,GACEC,GAEAC,GASAC,GAMAC,GAmCOC,GA8CAC,GAaAC,GAaAC,GAuBAC,GAaAC,GAyBAC,GA5MbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEMvB,GAAU,IAAe,CAAC,CAACwB,GAAI,KAAK,OAAS,OAAO,SAAa,IAEnEtB,GAAe,GACfC,GAAc,GACdC,GAAU,GAKRG,GAAiF,IAAI,IAErFC,GAAmB,CAACiB,EAA8BC,IAA+C,CACrG,IAAMC,EAAQpB,GAAgB,IAAIkB,CAAI,EAClCE,EACFA,EAAM,KAAKD,CAAS,EAEpBnB,GAAgB,IAAIkB,EAAM,CAACC,CAAS,CAAC,CAEzC,EAEMjB,GAAe,IAAY,CAC/B,GAAIP,IAAgB,CAACC,IAAeC,IAAW,CAACH,GAC9C,MAAM,IAAI,MAAM,kBAAkB,CAEtC,EAEMS,GAAwBkB,GAA2C,CACvE,OAAQA,EAAG,KAAK,KAAM,CACpB,IAAK,YACH1B,GAAe,GACX0B,EAAG,KAAK,KACVxB,GAAU,GACVE,GAAkB,CAAC,EAAEsB,EAAG,KAAK,GAAG,IAEhCzB,GAAc,GACdG,GAAkB,CAAC,EAAE,GAEnBD,KACF,IAAI,gBAAgBA,EAAkB,EACtCA,GAAqB,QAEvB,MACF,IAAK,UACL,IAAK,YACL,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,gBAAiB,CACpB,IAAMqB,EAAYnB,GAAgB,IAAIqB,EAAG,KAAK,IAAI,EAC9CA,EAAG,KAAK,IACVF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAG,EAEjCF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAI,EAEpC,KACF,CACA,QACF,CACF,EAGajB,GAAqC,SAA0B,CAC1E,GAAI,CAAAR,GAGJ,IAAID,GACF,MAAM,IAAI,MAAM,0CAA4C,EAE9D,GAAIE,GACF,MAAM,IAAI,MAAM,uCAAyC,EAK3D,GAFAF,GAAe,GAEuBF,GAAQ,EAC5C,OAAO,IAAI,QAAc,CAAC6B,EAASC,IAAW,CAC5C7B,IAAa,UAAU,EAElB8B,GAAkB,EAAE,KAAK,CAAC,CAACC,EAAWC,CAAM,IAAM,CACrD,GAAI,CACFhC,GAAcgC,EACdhC,GAAY,QAAW2B,GAAmBE,EAAOF,CAAE,EACnD3B,GAAY,UAAYS,GACxBJ,GAAoB,CAACuB,EAASC,CAAM,EACpC,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAKV,EAAG,EAC5DvB,GAAY,YAAYiC,CAAO,EAC/B7B,GAAqB2B,CACvB,OAASG,EAAG,CACVL,EAAOK,CAAC,CACV,CACF,EAAGL,CAAM,CACX,CAAC,EAGD,GAAI,CACF,MAAMM,GAAsBZ,GAAI,IAAI,EACpC,MAAWa,GAAYb,EAAG,EAC1BrB,GAAc,EAChB,OAASgC,EAAG,CACV,MAAA/B,GAAU,GACJ+B,CACR,QAAE,CACAjC,GAAe,EACjB,EAEJ,EAEaU,GAAkB,MAAM0B,GAAkC,CACrE,GAAsCtC,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAK,CAAC,OAAAI,EAAQ,IAAAd,EAAG,CAAC,EACpEvB,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAED,MAAWK,GAAOf,GAAKc,CAAM,CAEjC,EAEazB,GAAyB,MAAM2B,GACJxC,GAAQ,GAC5CS,GAAa,EACN,IAAI,QAAoC,CAACoB,EAASC,IAAW,CAClEtB,GAAiB,YAAa,CAACqB,EAASC,CAAM,CAAC,EAC/C,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAK,CAAC,OAAAM,CAAM,CAAC,EACjEvC,GAAa,YAAYiC,EAAS,CAACM,EAAO,MAAM,CAAC,CACnD,CAAC,GAEW3B,GAAuB2B,CAAM,EAIhC1B,GACT,MAAM2B,EAA8CC,IACR,CACtC,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI0C,GAAS,wBACX,MAAM,IAAI,MAAM,sEAAsE,EAExF,OAAAjC,GAAa,EACN,IAAI,QAAqC,CAACoB,EAASC,IAAW,CACnEtB,GAAiB,SAAU,CAACqB,EAASC,CAAM,CAAC,EAC5C,IAAMI,EAA0B,CAAC,KAAM,SAAU,GAAK,CAAC,MAAAO,EAAO,QAAS,CAAC,GAAGC,CAAO,CAAC,CAAC,EAC9EC,EAA+B,CAAC,EAClCF,aAAiB,YACnBE,EAAa,KAAKF,EAAM,MAAM,EAEhCxC,GAAa,YAAYiC,EAASS,CAAY,CAChD,CAAC,CACH,KACE,QAAY7B,GAAc2B,EAAOC,CAAO,CAE5C,EAEK3B,GAAiB,MAAM6B,GAAqC,CACvE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAKU,CAAS,EAChE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEInB,GAAe6B,CAAS,CAEjC,EAEa5B,GAAM,MACf4B,EAAmBC,EAAwBC,EAA0BC,EACrEC,EAAqCN,IAAoE,CAC3G,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI8C,EAAO,KAAKG,GAAKA,EAAE,CAAC,IAAM,KAAK,EACjC,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAID,EAAQ,KAAKC,GAAKA,CAAC,EACrB,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAAxC,GAAa,EACN,IAAI,QAAsC,CAACoB,EAASC,IAAW,CACpEtB,GAAiB,MAAO,CAACqB,EAASC,CAAM,CAAC,EACzC,IAAMoB,EAAqBJ,EACrBZ,EACF,CAAC,KAAM,MAAO,GAAK,CAAC,UAAAU,EAAW,aAAAC,EAAc,OAAQK,EAAoB,cAAAH,EAAe,QAAAL,CAAO,CAAC,EACpGzC,GAAa,YAAYiC,EAAciB,GAA2BD,CAAkB,CAAC,CACvF,CAAC,CACH,KACE,QAAYlC,GAAI4B,EAAWC,EAAcC,EAAQC,EAAeC,EAASN,CAAO,CAEpF,EAEazB,GAAe,MAAM2B,GAAqC,CACrE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,gBAAiB,CAACqB,EAASC,CAAM,CAAC,EACnD,IAAMI,EAA0B,CAAC,KAAM,gBAAiB,GAAKU,CAAS,EACtE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEIjB,GAAa2B,CAAS,CAE/B,ICvNA,IAWaQ,GAWAC,GAiBAC,GAvCbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KACAC,KAEaT,GAAuB,CAACU,EAAgBC,IAA0C,CAC7F,OAAQD,EAAO,SAAU,CACvB,IAAK,MACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAMA,EAAO,KAAM,KAAK,EACtD,IAAK,aACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAM,CAAC,UAAWA,EAAO,SAAS,EAAG,YAAY,EAC/E,QACE,MAAM,IAAI,MAAM,0BAA0BA,EAAO,QAAQ,QAAQC,EAAQ,CAAC,EAAE,CAChF,CACF,EAEaV,GAAwBS,GAAmC,CACtE,OAAQA,EAAO,CAAC,EAAG,CACjB,IAAK,MACH,OAAO,IAAIE,GAAOF,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EACnD,IAAK,aAAc,CACjB,IAAMG,EAAWH,EAAO,CAAC,EACzB,GAAI,CAACI,GAAyBD,CAAQ,EACpC,MAAM,IAAI,MAAM,4BAA4BA,CAAQ,+BAA+B,EAErF,GAAM,CAAC,UAAAE,EAAW,SAAAC,EAAU,QAAAC,CAAO,EAAIP,EAAO,CAAC,EAC/C,OAAOE,GAAO,cAAcG,EAAW,CAAC,SAAAF,EAAU,KAAMH,EAAO,CAAC,EAAG,SAAAM,EAAU,QAAAC,CAAO,CAAC,CACvF,CACA,QACE,MAAM,IAAI,MAAM,0BAA0BP,EAAO,CAAC,CAAC,EAAE,CACzD,CACF,EAEaR,GAAN,KAA8E,CAMnF,MAAM,8BAA8BgB,EAAmD,CAErF,OAAOC,GAAuB,MAAMC,GAASF,CAAI,CAAC,CACpD,CAEA,MAAM,UAAUG,EAAiCC,EAA0D,CACzGC,GAAiB,EACjB,IAAIC,EAEA,OAAOH,GAAiB,SACtB,GAEFG,EAAQ,MAAMJ,GAASC,CAAY,EAInCG,EAAQ,MAAM,KAAK,8BAA8BH,CAAY,EAG/DG,EAAQH,EAGV,CAAC,KAAK,UAAW,KAAK,WAAY,KAAK,WAAW,EAAI,MAAMI,GAAcD,EAAOF,CAAO,EACxFI,GAAe,CACjB,CAEA,MAAM,SAAyB,CAC7B,OAAOC,GAAe,KAAK,SAAS,CACtC,CAEA,MAAM,IAAIC,EAAiCC,EAAqCP,EACzC,CACrCC,GAAiB,EACjB,IAAMO,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAChC,OAAO,QAAQH,CAAK,EAAE,QAAQI,GAAO,CACnC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,WAAW,QAAQD,CAAI,EAC1C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,kBAAkBD,CAAI,GAAG,EAE3CH,EAAW,KAAKpB,CAAM,EACtBqB,EAAa,KAAKG,CAAK,CACzB,CAAC,EAED,IAAMC,EAAkC,CAAC,EACnCC,EAA0B,CAAC,EACjC,OAAO,QAAQP,CAAO,EAAE,QAAQG,GAAO,CACrC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,YAAY,QAAQD,CAAI,EAC3C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,mBAAmBD,CAAI,GAAG,EAE5CE,EAAY,KAAKzB,CAAM,EACvB0B,EAAc,KAAKF,CAAK,CAC1B,CAAC,EAED,IAAMG,EACFP,EAAW,IAAI,CAACQ,EAAGC,IAAMvC,GAAqBsC,EAAG,IAAM,UAAU,KAAK,WAAWP,EAAaQ,CAAC,CAAC,CAAC,GAAG,CAAC,EACnGC,EAAUL,EAAY,IACxB,CAACG,EAAGC,IAAMD,EAAItC,GAAqBsC,EAAG,IAAM,WAAW,KAAK,YAAYF,EAAcG,CAAC,CAAC,CAAC,GAAG,EAAI,IAAI,EAElGE,EAAU,MAAMC,GAAI,KAAK,UAAWX,EAAcM,EAAQD,EAAeI,EAASlB,CAAO,EAEzFqB,EAAuC,CAAC,EAC9C,QAASJ,EAAI,EAAGA,EAAIE,EAAQ,OAAQF,IAClCI,EAAU,KAAK,YAAYP,EAAcG,CAAC,CAAC,CAAC,EAAIJ,EAAYI,CAAC,GAAKtC,GAAqBwC,EAAQF,CAAC,CAAC,EAEnG,OAAAb,GAAe,EACRiB,CACT,CAEA,gBAAuB,CAEvB,CAEA,cAAqB,CACdC,GAAa,KAAK,SAAS,CAClC,CACF,IC9HA,IAeaC,GAiDAC,GAhEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAQaP,GAAkB,IAAY,CAoBzC,IAnBI,OAAOQ,GAAI,KAAK,aAAgB,UAAYA,GAAI,KAAK,YAAc,KACrEA,GAAI,KAAK,YAAc,GAGrBA,GAAI,KAAK,OAAS,IAEpB,QAAQ,KACJ,8HACyE,EAG3E,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,YAAe,UAAY,CAAC,OAAO,UAAUA,GAAI,KAAK,UAAU,GAAKA,GAAI,KAAK,YAAc,EAY9G,GAAI,OAAO,KAAS,KAAe,CAAC,KAAK,oBACvCA,GAAI,KAAK,WAAa,MACjB,CACL,IAAMC,EACF,OAAO,UAAc,IAAc,GAAQ,SAAS,EAAE,KAAK,EAAE,OAAS,UAAU,oBACpFD,GAAI,KAAK,WAAa,KAAK,IAAI,EAAG,KAAK,MAAMC,GAAsB,GAAK,CAAC,CAAC,CAC5E,CASJ,EAEaR,GAAN,KAAuD,CAS5D,MAAM,KAAKS,EAAoC,CAE7CV,GAAgB,EAGhB,MAAMW,GAAmC,EAGzC,MAAMC,GAAgBF,CAAW,CACnC,CAKA,MAAM,8BAA8BG,EAAiCC,EAChC,CACnC,IAAMC,EAAU,IAAIC,GACpB,aAAMD,EAAQ,UAAUF,EAAcC,CAAO,EACtC,QAAQ,QAAQC,CAAO,CAChC,CACF,IC7FA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,KAAA,IAIaA,GAJbC,GAAAC,EAAA,kBAGAC,KACaH,GAAc,IAAII,KCK/BC,KACAA,KAGAA,KCPO,IAAMC,GAAU,SDKvB,IAAOC,GAAQC,GAKgB,CAC7B,IAAMC,EAAgB,cAA4B,cAClDC,GAAgB,QAASD,EAAe,GAAG,CAC7C,CAE8B,CAC5B,IAAME,EAA4C,cAAoC,YAGpFD,GAAgB,SAAUC,EAAa,CAAC,EACxCD,GAAgB,QAASC,EAAa,CAAC,EAEzCD,GAAgB,MAAOC,EAAa,EAAE,EACtCD,GAAgB,OAAQC,EAAa,EAAE,CACzC,CAEA,OAAO,eAAeC,GAAI,SAAU,MAAO,CAAC,MAAOC,GAAS,WAAY,EAAI,CAAC", - "names": ["backends", "backendsSortedByPriority", "registerBackend", "tryResolveAndInitializeBackend", "resolveBackendAndExecutionProviders", "init_backend_impl", "__esmMin", "name", "backend", "priority", "currentBackend", "i", "backendName", "backendInfo", "isInitializing", "e", "options", "eps", "backendHints", "backendNames", "errors", "availableBackendNames", "resolveResult", "err", "filteredEps", "target", "prop", "init_backend", "__esmMin", "init_backend_impl", "version", "init_version", "__esmMin", "logLevelValue", "env", "init_env_impl", "__esmMin", "init_version", "version", "value", "env", "init_env", "__esmMin", "init_env_impl", "tensorToDataURL", "tensorToImageData", "init_tensor_conversion_impl", "__esmMin", "tensor", "options", "canvas", "pixels2DContext", "width", "height", "inputformat", "norm", "normMean", "normBias", "stride", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "j", "R", "G", "B", "A", "image", "channels", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "bufferToTensor", "tensorFromImage", "tensorFromTexture", "tensorFromGpuBuffer", "tensorFromPinnedBuffer", "init_tensor_factory_impl", "__esmMin", "init_tensor_impl", "buffer", "options", "height", "width", "norm", "normMean", "normBias", "inputformat", "outputformat", "stride", "float32Data", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "Tensor", "image", "isHTMLImageEle", "isImageDataEle", "isImageBitmap", "isString", "data", "bufferToTensorOptions", "createCanvas", "createCanvasContext", "canvas", "pixels2DContext", "tempCanvas", "resolve", "reject", "context", "newImage", "img", "texture", "download", "dispose", "dims", "gpuBuffer", "dataType", "type", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "isTypedArrayChecked", "checkTypedArray", "init_tensor_impl_type_mapping", "__esmMin", "isBigInt64ArrayAvailable", "isBigUint64ArrayAvailable", "isFloat16ArrayAvailable", "calculateSize", "tensorReshape", "init_tensor_utils_impl", "__esmMin", "init_tensor_impl", "dims", "size", "i", "dim", "tensor", "Tensor", "Tensor", "init_tensor_impl", "__esmMin", "init_tensor_conversion_impl", "init_tensor_factory_impl", "init_tensor_impl_type_mapping", "init_tensor_utils_impl", "arg0", "arg1", "arg2", "checkTypedArray", "type", "dims", "expectedTypedArrayConstructor", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "data", "maybeDims", "typedArrayConstructor", "firstElementType", "mappedType", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "size", "calculateSize", "image", "options", "tensorFromImage", "texture", "tensorFromTexture", "gpuBuffer", "tensorFromGpuBuffer", "buffer", "tensorFromPinnedBuffer", "tensorToDataURL", "tensorToImageData", "releaseData", "tensorReshape", "Tensor", "init_tensor", "__esmMin", "init_tensor_impl", "TRACE", "TRACE_FUNC", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "init_trace", "__esmMin", "init_env_impl", "deviceType", "label", "env", "msg", "extraMsg", "stack", "hasTraceFunc", "i", "InferenceSession", "init_inference_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "init_trace", "_InferenceSession", "handler", "feeds", "arg1", "arg2", "TRACE_FUNC_BEGIN", "fetches", "options", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "TRACE_FUNC_END", "arg0", "arg3", "filePathOrUint8Array", "buffer", "byteOffset", "byteLength", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "InferenceSession", "init_inference_session", "__esmMin", "init_inference_session_impl", "init_tensor_conversion", "__esmMin", "init_tensor_factory", "__esmMin", "init_onnx_model", "__esmMin", "init_onnx_value", "__esmMin", "noBackendErrMsg", "TrainingSession", "init_training_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "_TrainingSession", "handler", "hasOptimizerModel", "hasEvalModel", "trainingOptions", "sessionOptions", "evalModel", "optimizerModel", "options", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "inputNames", "outputNames", "feeds", "arg1", "arg2", "fetches", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "trainableOnly", "array", "paramsSize", "TrainingSession", "init_training_session", "__esmMin", "init_training_session_impl", "esm_exports", "__export", "InferenceSession", "TRACE", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "Tensor", "TrainingSession", "env", "registerBackend", "init_esm", "__esmMin", "init_backend", "init_env", "init_inference_session", "init_tensor", "init_tensor_conversion", "init_tensor_factory", "init_trace", "init_onnx_model", "init_onnx_value", "init_training_session", "log", "arg0", "arg1", "arg2", "arg3", "createCategorizedLogger", "logInternal", "category", "severity", "content", "_stack", "config", "LOGGER_CONFIG_MAP", "SEVERITY_VALUE", "LOGGER_PROVIDER_MAP", "NoOpLoggerProvider", "ConsoleLoggerProvider", "LOGGER_DEFAULT_CONFIG", "Logger", "Event", "EventRecord", "Profiler", "now", "init_instrument", "__esmMin", "_severity", "_content", "_category", "verbose", "info", "warning", "error", "fatal", "reset", "set", "previousConfig", "setWithEnv", "env", "name", "startTime", "endCallback", "timer", "ctx", "endTime", "maxNumberEvents", "flushBatchSize", "flushIntervalInMilliseconds", "func", "event", "isPromise", "res", "resolve", "reject", "value", "reason", "eventRes", "e", "currentTime", "previousPointer", "resolveOperator", "node", "opsets", "rules", "rule", "opType", "domain", "versionSelector", "opImpl", "opInit", "opset", "matchSelector", "set", "version", "selector", "rangeStart", "pair", "rangeEnd", "init_opset", "__esmMin", "require_guid", "__commonJSMin", "exports", "Guid", "guid", "value", "count", "out", "i", "other", "Long", "low", "high", "unsigned", "isLong", "obj", "ctz32", "value", "c", "fromInt", "cachedObj", "cache", "UINT_CACHE", "fromBits", "INT_CACHE", "fromNumber", "UZERO", "ZERO", "TWO_PWR_64_DBL", "MAX_UNSIGNED_VALUE", "TWO_PWR_63_DBL", "MIN_VALUE", "MAX_VALUE", "TWO_PWR_32_DBL", "lowBits", "highBits", "fromString", "str", "radix", "p", "radixToPower", "pow_dbl", "result", "i", "size", "power", "fromValue", "val", "wasm", "TWO_PWR_16_DBL", "TWO_PWR_24_DBL", "TWO_PWR_24", "ONE", "UONE", "NEG_ONE", "LongPrototype", "long_default", "init_long", "__esmMin", "radixLong", "div", "rem1", "rem", "remDiv", "intval", "digits", "bit", "other", "thisNeg", "otherNeg", "addend", "a48", "a32", "a16", "a00", "b48", "b32", "b16", "b00", "c48", "c32", "c16", "c00", "subtrahend", "multiplier", "divisor", "approx", "res", "halfThis", "log2", "delta", "approxRes", "approxRem", "numBits", "b", "le", "hi", "lo", "bytes", "flatbuffers", "init_flatbuffers", "__esmMin", "low", "high", "other", "opt_initial_size", "initial_size", "forceDefaults", "size", "additional_bytes", "align_size", "old_buf_size", "byte_size", "i", "value", "voffset", "defaultValue", "obj", "bb", "new_buf_size", "nbb", "offset", "numfields", "vtableloc", "trimmed_size", "standard_fields", "len", "existing_vtable", "vt1", "outer_loop", "vt2", "j", "root_table", "opt_file_identifier", "opt_size_prefix", "size_prefix", "file_identifier", "table", "field", "table_start", "vtable_start", "ok", "elem_size", "num_elems", "alignment", "s", "utf8", "codePoint", "a", "b", "bytes", "position", "result", "bb_pos", "vtable_offset", "vtable", "t", "opt_encoding", "length", "c", "d", "ident", "onnxruntime", "init_ort_generated", "__esmMin", "init_flatbuffers", "experimental", "fbs", "AttributeType", "DimensionValueType", "TensorDataType", "NodeType", "TypeInfoValue", "Shape", "i", "bb", "obj", "flatbuffers", "index", "offset", "builder", "dimOffset", "data", "numElems", "Dimension", "optionalEncoding", "valueOffset", "denotationOffset", "DimensionValue", "dimType", "dimValue", "dimParamOffset", "TensorTypeAndShape", "elemType", "shapeOffset", "MapType", "keyType", "valueTypeOffset", "SequenceType", "elemTypeOffset", "EdgeEnd", "node_index", "src_arg_index", "dst_arg_index", "NodeEdge", "nodeIndex", "inputEdgesOffset", "outputEdgesOffset", "Node", "nameOffset", "docStringOffset", "domainOffset", "sinceVersion", "opTypeOffset", "type", "executionProviderTypeOffset", "inputsOffset", "outputsOffset", "attributesOffset", "inputArgCountsOffset", "implicitInputsOffset", "ValueInfo", "typeOffset", "TypeInfo", "valueType", "OperatorSetId", "version", "Tensor", "dimsOffset", "dataType", "rawDataOffset", "stringDataOffset", "SparseTensor", "valuesOffset", "indicesOffset", "Attribute", "f", "sOffset", "tOffset", "gOffset", "floatsOffset", "intsOffset", "stringsOffset", "tensorsOffset", "graphsOffset", "Graph", "initializersOffset", "nodeArgsOffset", "nodesOffset", "maxNodeIndex", "nodeEdgesOffset", "sparseInitializersOffset", "Model", "irVersion", "opsetImportOffset", "producerNameOffset", "producerVersionOffset", "modelVersion", "graphOffset", "graphDocStringOffset", "KernelCreateInfos", "nodeIndicesOffset", "kernelDefHashesOffset", "SubGraphSessionState", "graphIdOffset", "sessionStateOffset", "SessionState", "kernelsOffset", "subGraphSessionStatesOffset", "InferenceSession", "ortVersionOffset", "modelOffset", "require_aspromise", "__commonJSMin", "exports", "module", "asPromise", "fn", "ctx", "params", "offset", "index", "pending", "resolve", "reject", "err", "require_base64", "__commonJSMin", "exports", "base64", "string", "p", "n", "b64", "s64", "i", "buffer", "start", "end", "parts", "chunk", "j", "t", "b", "invalidEncoding", "offset", "c", "require_eventemitter", "__commonJSMin", "exports", "module", "EventEmitter", "evt", "fn", "ctx", "listeners", "i", "args", "require_float", "__commonJSMin", "exports", "module", "factory", "f32", "f8b", "le", "writeFloat_f32_cpy", "val", "buf", "pos", "writeFloat_f32_rev", "readFloat_f32_cpy", "readFloat_f32_rev", "writeFloat_ieee754", "writeUint", "sign", "exponent", "mantissa", "writeUintLE", "writeUintBE", "readFloat_ieee754", "readUint", "uint", "readUintLE", "readUintBE", "f64", "writeDouble_f64_cpy", "writeDouble_f64_rev", "readDouble_f64_cpy", "readDouble_f64_rev", "writeDouble_ieee754", "off0", "off1", "readDouble_ieee754", "lo", "hi", "require_inquire", "__commonJSMin", "e", "require_utf8", "__commonJSMin", "exports", "utf8", "string", "len", "c", "i", "buffer", "start", "end", "parts", "chunk", "t", "offset", "c1", "c2", "require_pool", "__commonJSMin", "exports", "module", "pool", "alloc", "slice", "size", "SIZE", "MAX", "slab", "offset", "buf", "require_longbits", "__commonJSMin", "exports", "module", "LongBits", "util", "lo", "hi", "zero", "zeroHash", "value", "sign", "unsigned", "charCodeAt", "hash", "mask", "part0", "part1", "part2", "require_minimal", "__commonJSMin", "exports", "util", "value", "obj", "prop", "Buffer", "sizeOrArray", "hash", "unsigned", "bits", "merge", "dst", "src", "ifNotSet", "keys", "i", "str", "newError", "name", "CustomError", "message", "properties", "fieldNames", "fieldMap", "encoding", "size", "require_writer", "__commonJSMin", "exports", "module", "Writer", "util", "BufferWriter", "LongBits", "base64", "utf8", "Op", "fn", "len", "val", "noop", "State", "writer", "create", "size", "writeByte", "buf", "pos", "writeVarint32", "VarintOp", "value", "writeVarint64", "bits", "writeFixed32", "writeBytes", "i", "head", "tail", "BufferWriter_", "require_writer_buffer", "__commonJSMin", "exports", "module", "BufferWriter", "Writer", "util", "val", "buf", "pos", "i", "value", "len", "writeStringBuffer", "require_reader", "__commonJSMin", "exports", "module", "Reader", "util", "BufferReader", "LongBits", "utf8", "indexOutOfRange", "reader", "writeLength", "buffer", "create_array", "create", "value", "readLongVarint", "bits", "i", "readFixed32_end", "buf", "end", "readFixed64", "length", "start", "nativeBuffer", "bytes", "wireType", "BufferReader_", "fn", "require_reader_buffer", "__commonJSMin", "exports", "module", "BufferReader", "Reader", "util", "buffer", "len", "require_service", "__commonJSMin", "exports", "module", "Service", "util", "rpcImpl", "requestDelimited", "responseDelimited", "rpcCall", "method", "requestCtor", "responseCtor", "request", "callback", "self", "err", "response", "endedByRPC", "require_rpc", "__commonJSMin", "exports", "rpc", "require_roots", "__commonJSMin", "exports", "module", "require_index_minimal", "__commonJSMin", "exports", "protobuf", "configure", "require_minimal", "__commonJSMin", "exports", "module", "require_onnx", "__commonJSMin", "exports", "module", "$protobuf", "$Reader", "$Writer", "$util", "$root", "onnx", "valuesById", "values", "AttributeProto", "properties", "keys", "i", "message", "writer", "reader", "length", "end", "tag", "end2", "error", "object", "options", "long", "j", "typeUrlPrefix", "ValueInfoProto", "NodeProto", "TrainingInfoProto", "ModelProto", "StringStringEntryProto", "TensorAnnotation", "GraphProto", "TensorProto", "Segment", "SparseTensorProto", "TensorShapeProto", "Dimension", "$oneOfFields", "TypeProto", "Tensor", "Sequence", "Map", "Optional", "SparseTensor", "OperatorSetIdProto", "FunctionProto", "assert", "expr", "msg", "decodeUtf8String", "buffer", "import_onnx", "ArrayUtil", "MatMulUtil", "BroadcastUtil", "GemmUtil", "ProtoUtil", "LongUtil", "ShapeUtil", "SplitUtil", "PoolConvUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "init_flatbuffers", "init_long", "init_tensor", "n1", "n2", "i", "dimsA", "dimsB", "a", "b", "outputShape", "aRank", "bRank", "_BroadcastUtil", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "aLen", "bLen", "broadcastedIndices", "originalShape", "originalIndices", "dimOffset", "op", "inplace", "resultType", "size", "c", "Tensor", "outputIndices", "originalIndicesA", "originalIndicesB", "valA", "valB", "isAScalar", "isBScalar", "rest", "j", "shape", "finalShape", "inputRank", "finalRank", "inputShape", "inRank", "dims", "dim", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "_ProtoUtil", "typeProto", "type", "d", "long_default", "valueType", "tensor", "node", "attributes", "n", "unsigned", "flatbuffers", "_ShapeUtil", "axis", "start", "end", "rank", "strides", "indices", "offset", "tensorRank", "axes", "x", "index", "axisToIncrementOn", "k", "originalDims", "shapeHints", "nDims", "reshapedDims", "unknownDimension", "newTensorSize", "oldTensorSize", "perm", "v", "pad", "shape1", "shape2", "total", "y", "right", "outputDims", "inSqueezeList", "inputDimsIterator", "_SplitUtil", "split", "numOutputs", "shapes", "offsets", "numElementsAlongAxis", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "autoPad", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "sizeof", "type", "sizeofProto", "createView", "dataBuffer", "dataviewConstructor", "longToNumber", "i", "ortFbs", "readProto", "view", "byteOffset", "long_default", "import_guid_typescript", "import_onnx", "Tensor", "init_tensor", "__esmMin", "init_long", "init_ort_generated", "init_util", "onnxruntime", "_Tensor", "dims", "dataProvider", "asyncDataProvider", "cache", "dataId", "ShapeUtil", "size", "empty", "constructor", "buf", "data", "indices", "value", "tensorProto", "ProtoUtil", "str", "decodeUtf8String", "dataDest", "dataSource", "elementSize", "length", "n", "array", "element", "ortTensor", "getGlsl", "version", "GLSL_ES_2_0", "GLSL_ES_3_0", "getVertexShaderSource", "glsl", "getFragShaderPreamble", "getDefaultFragShaderMain", "outputShapeLength", "init_glsl_source", "__esmMin", "init_types", "__esmMin", "repeatedTry", "checkFn", "delayFn", "_counter", "maxCounter", "resolve", "reject", "tryCount", "tryFn", "nextBackoff", "generateShaderFuncNameFromInputSamplerName", "samplerName", "assert", "generateShaderFuncNameFromInputSamplerNameAtOutCoords", "squeezeInputShape", "inputShape", "squeezedShape", "newInputShape", "getSqueezedParams", "params", "keptDims", "d", "getCoordsDataType", "rank", "getGlChannels", "init_utils", "__esmMin", "init_util", "getVecChannels", "name", "rank", "getGlChannels", "d", "getChannels", "unpackFromChannel", "init_packing_utils", "__esmMin", "init_utils", "getOutOfBoundsCondition", "rank", "shape", "dims", "cond", "i", "getOutput", "coord00", "coord01", "coord10", "coord11", "D", "getSetup", "rows", "cols", "packProgramMetadata", "createPackProgramInfo", "createPackProgramInfoLoader", "init_pack", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "handler", "input", "glsl", "getGlsl", "inputShape", "inputRank", "outputRank", "coordsDataType", "getCoordsDataType", "channels", "getChannels", "setup", "reversedInputWH", "outOfBoundsCondition", "output", "shaderSource", "processDims3D", "shape", "batch", "i", "isReshapeCheap", "dims", "reshapedDims", "isCheapReshape", "getReshapedInputCoords", "strides", "ShapeUtil", "coords", "index", "stride", "line1", "line2", "getFlattenedIndexFrom3D", "createPackedReshape3DProgramMetadata", "createPackedReshape3DProgramInfo", "createPackedReshape3DProgramInfoLoader", "init_reshape_packed", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_packing_utils", "outputShape3D", "handler", "input3D", "metadata", "inputShape3D", "squeezedOutputShape", "mainLoop", "outputCoords", "glsl", "getGlsl", "shaderSource", "unpackFromChannel", "encodeAsUint8", "init_uint8_encode", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "input", "outputShape", "glsl", "getGlsl", "shaderSource", "programInfo", "getSourceCoords", "rank", "dims", "coords", "i", "unpackProgramMetadata", "createUnpackProgramInfo", "createUnpackProgramInfoLoader", "init_unpack", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "handler", "input", "channels", "getChannels", "innerDims", "coordsDataType", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "sourceCoords", "glsl", "getGlsl", "shaderSource", "RedFloat32DataEncoder", "RGBAFloatDataEncoder", "Uint8DataEncoder", "init_texture_data_encoder", "__esmMin", "init_instrument", "gl", "channels", "src", "textureSize", "result", "source", "Logger", "v", "i", "size", "buffer", "dataSize", "_value", "index", "textureType", "dest", "_textureSize", "createTextureLayoutFromTextureType", "calculateTextureWidthAndHeight", "createTextureLayoutFromShape", "init_texture_layout", "__esmMin", "init_util", "init_types", "textureLayoutStrategy", "shape", "textureType", "channel", "isPacked", "reverseWH", "breakAxis", "unpackedShape", "d", "i", "layout", "channels", "prefs", "width", "height", "rank", "inferredDims", "ShapeUtil", "getProgramInfoUniqueKey", "WebGLInferenceHandler", "init_inference_handler", "__esmMin", "init_instrument", "init_tensor", "init_util", "init_pack", "init_reshape_packed", "init_uint8_encode", "init_unpack", "init_texture_data_encoder", "init_texture_layout", "init_types", "programInfo", "inputTextureDatas", "inputs", "texture", "key", "session", "shape", "textureType", "calculateTextureWidthAndHeight", "program", "i", "artifact", "outputTextureLayout", "createTextureLayoutFromTextureType", "outputTextureData", "output", "tensor", "td", "layout", "adjustedKernelShape", "adjustedLayout", "buffer", "numFeatureMaps", "oldRowSize", "newRowSize", "newSize", "f", "oldOffset", "newOffset", "unpackedTextureLayout", "createTextureLayoutFromShape", "unpackedTextureData", "dataType", "data", "usage", "Logger", "input", "reshapedDims", "inputTD", "newTextureLayout", "ShapeUtil", "isReshapeCheap", "squeezedInputShape", "processDims3D", "squeezedOutputShape", "squeezedInputTensor", "squeezedOutputTensor", "createPackedReshape3DProgramInfoLoader", "type", "tensorId", "textureData", "Tensor", "_id", "isPacked", "encodeAsUint8", "createPackProgramInfoLoader", "createUnpackProgramInfoLoader", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "batchNormalizationProgramMetadata", "batchNormalization", "parseBatchNormalizationAttributes", "createBatchNormalizationProgramInfo", "validateInputs", "init_batch_normalization", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "epsilon", "momentum", "spatial", "createAttributeWithCacheKey", "glsl", "getGlsl", "rank", "scaleWidth", "scaleHeight", "shaderSource", "X", "scale", "B", "mean", "var_", "GlslContext", "GlslLib", "GlslLibRoutine", "GlslLibRoutineNode", "TopologicalSortGlslRoutines", "init_glsl_definitions", "__esmMin", "glContext", "programInfo", "inputTextureLayouts", "outputTextureLayout", "context", "routineBody", "dependencies", "name", "node", "nodes", "cycleCheck", "alreadyTraversed", "result", "graphNodes", "root", "i", "glslAdd", "name", "glslDiv", "glslMul", "glslSub", "glslEqual", "glslGreater", "glslLess", "glslAnd", "glslOr", "glslXor", "glslPow", "glslBuiltinBinary", "glslPRelu", "fname", "createBinaryProgramInfoLoader", "createBinaryProgramInfo", "add", "and", "div", "equal", "greater", "less", "mul", "or", "pow", "pRelu", "sub", "xor", "init_binary_op", "__esmMin", "init_util", "init_glsl_definitions", "init_glsl_source", "init_types", "handler", "inputs", "glslFunc", "outputTensorType", "cacheKey", "textureType", "isBroadcast", "ShapeUtil", "outputShape", "usePackedTexture", "calculatedShape", "BroadcastUtil", "outputRank", "aRank", "bRank", "aBcast", "bBcast", "glsl", "getGlsl", "shaderSource", "cast", "parseCastAttributes", "validateInputs", "init_cast", "__esmMin", "init_util", "handler", "inputs", "to", "node", "ProtoUtil", "createPackedConcatProgramMetadata", "createPackedConcatProgramInfo", "createPackedConcatProgramInfoLoader", "getShiftedChannelsSnippet", "init_concat_packed", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "inputCount", "cacheHint", "_v", "i", "handler", "metadata", "inputs", "axis", "inputShape", "outputShape", "dataNShape", "axisIndex", "rank", "coords", "getChannels", "dtype", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "shapes", "channels", "getGlChannels", "offsets", "channel", "lastChannels", "allChannels", "getValueSnippet", "shift", "lastIndex", "glsl", "getGlsl", "shaderSource", "x", "attributes", "channelIdx", "c", "idx", "concat", "createUnpackedConcatProgramMetadata", "createUnpackedConcatProgramInfo", "createUnpackedConcatProgramInfoLoader", "getTextureIndexWhereDataResidesLinearSearch", "getTextureIndexWhereDataResidesBinarySearch", "getFetchDataFromCorrectTextureMethod", "getGetSizeInConcatAxisValueFromIndexMethod", "parseConcatAttributes", "validateInputs", "init_concat", "__esmMin", "init_attribute_with_cache_key", "init_types", "init_concat_packed", "inferenceHandler", "inputs", "attributes", "createPackedConcatProgramInfoLoader", "inputCount", "cacheHint", "_v", "i", "_handler", "metadata", "axis", "inputShape", "outputShape", "dataNShape", "axisIndex", "rank", "sizeInConcatAxis", "previousSum", "getTextureIndexWhereDataResidesMethod", "fetchDataFromCorrectTextureMethod", "getSizeInConcatAxisValueFromIndexMethod", "shaderSource", "handler", "size", "numberOfTensors", "tensorRank", "codeLines", "node", "createAttributeWithCacheKey", "inputType", "inputDimensionality", "input", "glslAbs", "glslBuiltinUnary", "glslAcos", "glslAsin", "glslAtan", "glslCeil", "glslCos", "glslElu", "alpha", "name", "glslExp", "glslFloor", "glslClip", "min", "max", "glslIdentity", "glslLeakyRelu", "glslLog", "glslNeg", "glslNot", "glslSin", "glslRelu", "glslSigmoid", "glslSqrt", "glslTan", "glslTanh", "createElementwiseProgramInfo", "createElementwiseProgramInfoLoader", "abs", "acos", "asin", "atan", "clip", "parseClipAttributes", "clipV11", "generateClipAttributesFromInputs", "ceil", "cos", "elu", "parseEluAttributes", "exp", "floor", "identity", "leakyRelu", "parseLeakyReluAttributes", "log", "neg", "not", "relu", "sigmoid", "sin", "sqrt", "tan", "tanh", "init_unary_op", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_definitions", "init_glsl_source", "init_types", "handler", "metadata", "input", "glslFunc", "textureType", "glsl", "getGlsl", "cacheKey", "inputs", "attributes", "node", "createAttributeWithCacheKey", "MIN_CLIP", "MAX_CLIP", "getActivationSnippet", "attributes", "func", "glslRelu", "glslSigmoid", "glslClip", "activationName", "activationFunction", "applyActivation", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_util", "init_unary_op", "activation", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "createUnpackedGroupedConvProgramMetadata", "createUnpackedGroupedConvProgramInfo", "createUnpackedGroupedConvProgramInfoLoader", "init_conv_grouped", "__esmMin", "init_instrument", "init_glsl_source", "init_types", "init_conv", "init_fuse_utils", "hasBias", "cacheHint", "inferenceHandler", "inputs", "metadata", "attributes", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "Logger", "outputShape", "calculateOutputShape", "glsl", "getGlsl", "activationFunction", "applyActivation", "getActivationSnippet", "shaderSource", "createPackedIm2ColProgramMetadata", "createPackedIm2ColProgramInfo", "createPackedIm2ColProgramInfoLoader", "init_im2col_pack", "__esmMin", "init_glsl_source", "init_types", "init_packing_utils", "cacheHint", "inferenceHandler", "metadata", "x", "w", "outputShape", "attributes", "xshape", "wshape", "rowDim", "colDim", "rank", "im2colShape", "kernelSize", "unpackChannel", "unpackFromChannel", "glsl", "getGlsl", "unrolled", "row", "col", "shaderSource", "createMatmulProgramInfo", "metadata", "inputs", "activationAttributes", "aShape", "bShape", "outputShape", "BroadcastUtil", "coordsDataType", "getCoordsDataType", "allGlChannels", "getGlChannels", "activationFunction", "applyActivation", "getActivationSnippet", "hasBias", "processBias", "getBiasForMatmulSnippet", "getBiasForMatmul", "rank", "arank", "brank", "sharedDim", "shaderSource", "createMatmulProgramInfoLoader", "createMatmulProgramMetadata", "inShape", "outShape", "isPacked", "unpackedCoordsSnippet", "inRank", "outRank", "rankDiff", "_s", "i", "coordsSnippet", "d", "isInputScalar", "ShapeUtil", "output", "matMul", "parseMatMulAttributes", "validateInputs", "init_matmul", "__esmMin", "init_util", "init_types", "init_utils", "init_fuse_utils", "init_matmul_pack", "inferenceHandler", "attributes", "createPackedMatmulProgramInfoLoader", "node", "parseInternalActivationAttributes", "cacheHint", "getBcastSamplerForMatmul", "coordsDataType", "allGlChannels", "inputs", "outShape", "unpackedACoordsSnippet", "unpackedBCoordsSnippet", "inAShape", "inBShape", "inARank", "inBRank", "outRank", "rankADiff", "rankBDiff", "_s", "i", "broadcastADims", "BroadcastUtil", "broadcastBDims", "coordsASnippet", "d", "coordsBSnippet", "swapDimSnippet", "getA", "rank", "res", "getB", "createPackedMatmulProgramMetadata", "createPackedMatmulProgramInfo", "createPackedMatmulProgramInfoLoader", "init_matmul_pack", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_utils", "init_fuse_utils", "init_matmul", "hasBias", "cacheHint", "inferenceHandler", "metadata", "activationAttributes", "processBias", "aShape", "bShape", "outputShape", "isBroadcast", "ShapeUtil", "sharedDim", "sharedDimIndex", "aRank", "bRank", "glsl", "getGlsl", "getCoordsDataType", "getGlChannels", "activationFunction", "applyActivation", "getActivationSnippet", "getBiasForMatmulSnippet", "getBiasForMatmul", "getBcastedSamplerForMatmulSnippet", "getSamplerAInLoopSnippet", "getSamplerBInLoopSnippet", "getOutputCoordsSnippet", "shaderSource", "conv2DPacked", "init_conv_pack", "__esmMin", "init_conv", "init_im2col_pack", "init_matmul_pack", "inferenceHandler", "inputs", "attributes", "xshape", "kshape", "outputShape", "calculateOutputShape", "im2colOutput", "createPackedIm2ColProgramInfoLoader", "kernelReshaped", "matmulInputs", "matmulOutput", "createPackedMatmulProgramInfoLoader", "createIm2ColProgramMetadata", "createIm2ColProgramInfo", "createIm2ColProgramInfoLoader", "calculateIm2ColDims", "init_im2col", "__esmMin", "init_types", "cacheHint", "_inferenceHandler", "metadata", "x", "w", "outputShape", "attributes", "xshape", "wshape", "rank", "im2colDims", "shaderSource", "inferenceHandler", "inputShape", "kernelShape", "channels", "createDotProductProgramMetadata", "createDotProductProgramInfo", "createDotProductProgramInfoLoader", "init_dot_product", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_fuse_utils", "init_im2col", "hasBias", "attributes", "inferenceHandler", "metadata", "inputs", "outputShape", "xshape", "kshape", "adjustedKernelShape", "im2colShape", "calculateIm2ColDims", "kWidth", "kHeight", "im2colStrides", "ShapeUtil", "im2colWidth", "im2colHeight", "rank", "initValue", "sharedDim", "activationFunction", "applyActivation", "getActivationSnippet", "glsl", "getGlsl", "shaderSource", "calculateOutputShape", "conv", "conv2d", "conv2DUnpackedPointwise", "conv2DUnpacked", "getAdjustedConvAttributes", "parseConvAttributes", "validateInputs", "init_conv", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_conv_grouped", "init_conv_pack", "init_dot_product", "init_fuse_utils", "init_im2col", "init_matmul", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputSpatialShape", "inferenceHandler", "inputs", "attributes", "adjustedAttributes", "packMode", "isPointwise", "createUnpackedGroupedConvProgramInfoLoader", "conv2DPacked", "xshape", "kshape", "outputShape", "reshapedX", "reshapedK", "matmulInputs", "matmulOutput", "createMatmulProgramInfoLoader", "xIm2Col", "createIm2ColProgramInfoLoader", "dotProductInputs", "createDotProductProgramInfoLoader", "pads", "PoolConvUtil", "newAttributes", "node", "activationAttributes", "parseInternalActivationAttributes", "autoPad", "group", "createAttributeWithCacheKey", "dataChannel", "filterInChannel", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "convTranspose", "convTranspose2d", "createConvTransposeProgramMetadata", "createUnpackedConvTransposeProgramInfo", "createUnpackedConvTransposeProgramInfoLoader", "convTranspose2DUnpacked", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "init_conv_transpose", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "init_fuse_utils", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "strides", "outputPadding", "outputShape", "spatialRank", "updateShape", "i", "inferenceHandler", "inputs", "attributes", "adjustedAttributes", "hasBias", "cacheHint", "metadata", "valueInit", "xShape", "wShape", "outputChannelsPerGroup", "inputChannelsPerGroup", "glsl", "getGlsl", "activationFunction", "applyActivation", "getActivationSnippet", "shaderSource", "newAttributes", "node", "activationAttributes", "parseInternalActivationAttributes", "group", "createAttributeWithCacheKey", "dataChannel", "filterInChannel", "featureMaps", "transposeProgramMetadata", "transpose", "parseTransposeAttributes", "createTransposeProgramInfo", "getAdjustedPerm", "getOutputShape", "getPermFunctionBody", "validateInputs", "init_transpose", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "createAttributeWithCacheKey", "_inferenceHandler", "input", "perm", "inputShape", "unpackedOutputShape", "rank", "shaderSource", "ShapeUtil", "name", "reverseFunc", "i", "depthToSpace", "parseDepthToSpaceAttributes", "validateInputs", "init_depth_to_space", "__esmMin", "init_transpose", "inferenceHandler", "inputs", "attributes", "blocksize", "blocksizeSqr", "transposePerm", "firstReshapeShape", "firstReshapedTensor", "transposeAttributes", "transposeOutput", "transpose", "secondReshapeShape", "node", "mode", "flatten", "parseFlattenAttributes", "validateInputs", "init_flatten", "__esmMin", "init_util", "inferenceHandler", "inputs", "axis", "outputDims", "ShapeUtil", "node", "r", "NUMBER_TYPES", "init_operators", "__esmMin", "gather", "parseGatherAttributes", "gatherProgramMetadata", "createGatherProgramInfo", "createGatherProgramInfoLoader", "validateInputs", "init_gather", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "createAttributeWithCacheKey", "_handler", "metadata", "axis", "inputShape", "indexDataShape", "outputShape", "ShapeUtil", "indexCopyOps", "i", "orank", "irank", "iDrank", "shaderSource", "handler", "tensorRank", "NUMBER_TYPES", "gemm", "parseGemmAttributes", "parseGemmAttributesV7", "parseGemmAttributesV11", "createGemmProgramInfoLoader", "createGemmProgramInfo", "validateInputs", "init_gemm", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "isOptionalC", "transA", "transB", "alpha", "beta", "createAttributeWithCacheKey", "metadata", "aShape", "bShape", "M", "N", "GemmUtil", "outputShape", "sharedDim", "line", "rank", "declareC", "broadcastC", "calculateC", "shaderSource", "imageScaler", "parseImageScalerAttributes", "imageScalerProgramMetadata", "createImageScalerProgramInfo", "createImageScalerProgramInfoLoader", "createGetBiasMethod", "validateInputs", "init_image_scaler", "__esmMin", "init_attribute_with_cache_key", "init_types", "inferenceHandler", "inputs", "attributes", "node", "scale", "bias", "createAttributeWithCacheKey", "_handler", "metadata", "outputShape", "rank", "shaderSource", "handler", "numChannels", "codeLines", "i", "instanceNormalization", "parseInstanceNormalizationAttributes", "meanAndVarianceProgramMetadata", "createMeanAndVarianceProgramInfo", "createMeanAndVarianceProgramInfoLoader", "computeOutputProgramMetadata", "createComputeOutputProgramInfo", "createComputeOutputProgramInfoLoader", "validateInputs", "init_instance_normalization", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "epsilon", "meanAndVariance", "node", "metadata", "input", "xDims", "channel", "channelSize", "outputShape", "shaderSource", "meanAndVarianceShape", "glsl", "getGlsl", "textureWidth", "textureHeight", "meanAndVarianceWidth", "meanAndVarianceHeight", "X", "scale", "B", "createLrnProgramInfo", "inputs", "attributes", "C", "rank", "from", "to", "alpha", "bias", "beta", "shaderSource", "lrnProgramMetadata", "createLrnProgramInfoLoader", "lrn", "parseLrnAttributes", "validateInputs", "init_lrn", "__esmMin", "init_attribute_with_cache_key", "init_types", "inferenceHandler", "node", "size", "createAttributeWithCacheKey", "padProgramMetadata", "padV2", "parsePadAttributesV2", "padV11", "parsePadAttributesV11", "generatePadAttributesFromInputs", "createPadProgramInfo", "validateInputsV2", "validateInputsV11", "getPadFunction", "getPadConstant", "getPadReflect", "getPadEdge", "init_pad", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "mode", "value", "pads", "createAttributeWithCacheKey", "attrubutes", "input", "outputShape", "ShapeUtil", "rank", "shaderSource", "glsl", "getGlsl", "width", "height", "strides", "shape", "block", "i", "averagePool", "parseAveragePoolAttributes", "createAveragePoolProgramInfo", "globalAveragePool", "parseGlobalAveragePoolAttributes", "maxPool", "parseMaxPoolAttributes", "createMaxPoolProgramInfo", "getAdjustedPoolAttributesAndOutputShape", "globalMaxPoolAttributes", "globalMaxPoolMetadata", "globalMaxPool", "validateInputs", "generatePoolingCode", "copyArray", "offsetToIndices", "init_pool", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "metadata", "node", "autoPad", "ceilMode", "countIncludePad", "kernelShape", "strides", "pads", "createAttributeWithCacheKey", "isGlobalOperator", "adjustedAttributes", "outputShape", "kernelSize", "ShapeUtil", "op1", "op2", "shaderSource", "storageOrder", "dilations", "inputShape", "hasDilations", "PoolConvUtil", "newAttributes", "inputDims", "start", "rank", "kw", "sw", "pwStart", "pwEnd", "dimW", "codeW", "codeH", "codeHEnd", "kh", "sh", "phStart", "phEnd", "dimH", "kernelStrides", "stridesRank", "padsRank", "offsetToIndicesFunction", "copyInputDims", "copyPads", "copyKernelStrides", "copyStrides", "hasPads", "sum", "cur", "padCode", "array", "arrayName", "block", "i", "reduce", "parseReduceAttributes", "createReduceProgramInfo", "validateInputs", "reduceSum", "reduceMean", "reduceMax", "reduceMin", "reduceProd", "reduceLogSum", "reduceLogSumSquare", "init_reduce", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "name", "reduceOp", "reduceProgramMetadata", "node", "axes", "keepDims", "createAttributeWithCacheKey", "_handler", "_name", "outputShape", "iRank", "idxCopy", "ShapeUtil", "ops", "reduceOps", "k", "shaderSource", "NUMBER_TYPES", "size", "idxZero", "reshape", "init_reshape", "__esmMin", "init_util", "handler", "inputs", "reshapedDims", "ShapeUtil", "upsampleProgramMetadata", "upsample", "parseUpsampleAttributesV7", "parseUpsampleAttributesV9", "parseUpsampleAttributes", "createUpsampleProgramInfo", "validateInputs", "scalesValidation", "init_upsample", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "opset", "isResize", "mode", "scales", "extrapolationValue", "coordinateTransformMode", "needRoiInput", "useExtrapolation", "nearestMode", "cubicCoefficientA", "excludeOutside", "useNearest2xOptimization", "roiInputIdx", "scalesInputIdx", "sizesInputIdx", "createAttributeWithCacheKey", "glsl", "getGlsl", "inputWidth", "inputHeight", "outputShape", "dim", "i", "outputWidth", "outputHeight", "outputPitches", "inputPitches", "precalculatedPitches", "d", "getInputFloatFunction", "shaderSource", "x", "attribute", "scale", "resizeProgramMetadata", "resize", "parseResizeAttributesV10", "parseResizeAttributesV11", "createPackedResizeProgramInfo", "prepareInputs", "parseScalesData", "parseScalesDataFromOutputSize", "init_resize_packed", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "init_upsample", "inferenceHandler", "inputs", "attributes", "validateInputs", "node", "parseUpsampleAttributes", "glsl", "getGlsl", "scales", "outputShape", "s", "dim", "outputHeight", "outputWidth", "inputShape", "inputHeight", "inputWidth", "scalesHeight", "scalesWidth", "getSourceFracIndex", "coordsDataType", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "shaderSource", "xDims", "outputSizes", "scalesTensor", "sizesTensor", "yDims", "i", "scale", "mode", "isResize", "scalesValidation", "length", "end", "shape", "validateInputs", "init_shape", "__esmMin", "init_tensor", "_inferenceHandler", "inputs", "Tensor", "sliceProgramMetadata", "slice", "parseSliceAttributes", "createSliceProgramInfo", "validateInputs", "sliceV10", "generateSliceAttributesFromInputs", "validateInputsV10", "init_slice", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "starts", "ends", "axes", "createAttributeWithCacheKey", "_inferenceHandler", "input", "_val", "i", "normalizedAxes", "ShapeUtil", "start", "end", "outputShape", "sliceOps", "shaderSource", "NUMBER_TYPES", "cacheKey", "softmaxComputeMaxProgramMetadata", "softmaxComputeScaleProgramMetadata", "softmaxProgramMetadata", "softmax", "parseSoftmaxAttributes", "parseSoftmaxAttributesV13", "softmaxV13", "computeSoftmax", "createComputeMaxProgramInfo", "createComputScaleProgramInfo", "createSoftMaxProgramInfo", "validateInputs", "init_softmax", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_source", "init_types", "init_transpose", "inferenceHandler", "inputs", "attributes", "inputShape", "axis", "ShapeUtil", "logicalRowCount", "featureCount", "node", "createAttributeWithCacheKey", "rank", "isTransposeRequired", "transposedInputShape", "perm", "transposedInputs", "transposeAttribute", "_", "i", "p", "transpose", "output", "computeMaxProgramInfo", "max", "computeScaleProgramInfo", "scale", "softMaxProgramInfo", "input", "outputShape", "textureWidth", "textureHeight", "glsl", "getGlsl", "shaderSource", "maxElementPerLogicalRow", "normalizationPerLogicalRow", "splitProgramMetadata", "split", "parseSplitAttributes", "getProgramCount", "createSplitProgramInfo", "validateInputs", "init_split", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "axis", "ShapeUtil", "count", "output", "i", "node", "numOutputs", "createAttributeWithCacheKey", "_inferenceHandler", "offsets", "SplitUtil", "input", "index", "shapes", "offset", "outputShape", "shaderSource", "squeeze", "squeezeV13", "parseSqueezeAttributes", "validateInputs", "validateInputsV13", "init_squeeze", "__esmMin", "init_util", "inferenceHandler", "inputs", "axes", "outputShape", "ShapeUtil", "node", "sum", "createSumProgramInfo", "validateInputs", "init_sum", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "sumProgramMetadata", "_v", "glsl", "getGlsl", "outputShape", "shaderSource", "i", "length", "j", "tile", "createTileProgramInfo", "validateInputs", "init_tile", "__esmMin", "init_operators", "init_types", "inferenceHandler", "inputs", "tileProgramMetadata", "_handler", "inputShape", "outputShape", "tileOps", "i", "rank", "shaderSource", "NUMBER_TYPES", "unsqueeze", "unsqueezeV13", "parseUnsqueezeAttributes", "validateInputs", "validateInputsV13", "init_unsqueeze", "__esmMin", "init_util", "inferenceHandler", "inputs", "axes", "outputShape", "ShapeUtil", "node", "WEBGL_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_batch_normalization", "init_binary_op", "init_cast", "init_concat", "init_conv", "init_conv_transpose", "init_depth_to_space", "init_flatten", "init_gather", "init_gemm", "init_image_scaler", "init_instance_normalization", "init_lrn", "init_matmul", "init_pad", "init_pool", "init_reduce", "init_reshape", "init_resize_packed", "init_shape", "init_slice", "init_softmax", "init_split", "init_squeeze", "init_sum", "init_tile", "init_transpose", "init_unary_op", "init_unsqueeze", "init_upsample", "abs", "acos", "add", "and", "asin", "atan", "averagePool", "parseAveragePoolAttributes", "batchNormalization", "parseBatchNormalizationAttributes", "cast", "parseCastAttributes", "ceil", "clip", "parseClipAttributes", "clipV11", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "div", "identity", "depthToSpace", "parseDepthToSpaceAttributes", "equal", "elu", "parseEluAttributes", "exp", "flatten", "parseFlattenAttributes", "floor", "gather", "parseGatherAttributes", "gemm", "parseGemmAttributesV7", "parseGemmAttributesV11", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "greater", "imageScaler", "parseImageScalerAttributes", "instanceNormalization", "parseInstanceNormalizationAttributes", "leakyRelu", "parseLeakyReluAttributes", "less", "lrn", "parseLrnAttributes", "log", "matMul", "parseMatMulAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "neg", "not", "or", "padV2", "parsePadAttributesV2", "padV11", "parsePadAttributesV11", "pow", "pRelu", "reduceLogSum", "parseReduceAttributes", "reduceMax", "reduceMean", "reduceMin", "reduceProd", "reduceSum", "reduceLogSumSquare", "relu", "reshape", "resize", "parseResizeAttributesV10", "parseResizeAttributesV11", "shape", "sigmoid", "sin", "sliceV10", "slice", "parseSliceAttributes", "softmax", "parseSoftmaxAttributes", "softmaxV13", "parseSoftmaxAttributesV13", "split", "parseSplitAttributes", "sqrt", "squeeze", "parseSqueezeAttributes", "squeezeV13", "sub", "sum", "tan", "tanh", "tile", "transpose", "parseTransposeAttributes", "upsample", "parseUpsampleAttributesV7", "parseUpsampleAttributesV9", "unsqueeze", "parseUnsqueezeAttributes", "unsqueezeV13", "xor", "replaceInlines", "script", "inlineDefs", "match", "INLINE_FUNC_DEF_REGEX", "params", "s", "tokens", "v", "name", "regexString", "FUNC_CALL_REGEX", "regex", "type", "variable", "declLine", "newBody", "paramRedecLine", "i", "replacement", "init_glsl_function_inliner", "__esmMin", "squeezeShape", "shape", "axis", "newShape", "keptDims", "isEmptyArray", "axes", "parseAxisParam", "j", "i", "rank", "_s", "assert", "ax", "isInt", "a", "sizeFromShape", "size", "sizeToSquarishShape", "width", "PreferLogicalStrategy", "init_texture_layout_strategy", "__esmMin", "init_instrument", "init_util", "maxTextureSize", "prefs", "wh", "isPacked", "wsize", "b", "hsize", "Logger", "logShape", "_d", "d", "CoordsGlslLib", "init_glsl_coordinate_lib", "__esmMin", "init_util", "init_glsl_definitions", "init_glsl_source", "init_texture_layout_strategy", "init_utils", "GlslLib", "context", "funcName", "GlslLibRoutine", "outputLayout", "outShape", "outTexShape", "result", "floatTextureSetRGBASource", "getGlsl", "floatTextureSetRGBAFuncName", "floatTextureSetRSource", "floatTextureSetRFuncName", "_shape", "texShape", "packedTexShape", "source", "shape", "ArrayUtil", "texelsInLogicalRow", "texelsInBatch", "texelsInBatchN", "batches", "coords", "b", "rank", "strides", "i", "coordsToCompute", "coordsFromIndexSnippet", "stride", "line1", "line2", "glsl", "samplerName", "inputLayout", "generateShaderFuncNameFromInputSamplerName", "outCoordFuncName", "generateShaderFuncNameFromInputSamplerNameAtOutCoords", "name", "inShape", "texFuncSnippet", "inRank", "outRank", "broadcastDims", "BroadcastUtil", "type", "getCoordsDataType", "rankDiff", "coordsSnippet", "fields", "getGlChannels", "d", "unpackedCoordsSnippet", "_s", "output", "isInputScalar", "ShapeUtil", "isOutputScalar", "rows", "cols", "swapLastDimsSnippet", "inTexShape", "texNumR", "texNumC", "packedSampler", "valuesPerRow", "squeezedShape", "keptDims", "newInputShape", "squeezeInputShape", "params", "newInputLayout", "samplerRoutine", "getSqueezedParams", "index", "tNumR", "tNumC", "newShape", "squeezeShape", "stride0", "stride1", "routine", "revDims", "stride2", "stride3", "stride4", "xScale", "yScale", "stridesBlock", "body", "layout", "varName", "width", "height", "transpose", "EncodingGlslLib", "init_glsl_encoding_lib", "__esmMin", "init_glsl_definitions", "_EncodingGlslLib", "GlslLib", "context", "GlslLibRoutine", "endianness", "b", "a", "c", "FragColorGlslLib", "init_glsl_fragcolor_lib", "__esmMin", "init_glsl_definitions", "init_glsl_source", "GlslLib", "context", "glsl", "getGlsl", "GlslLibRoutine", "ShapeUtilsGlslLib", "init_glsl_shape_utils_lib", "__esmMin", "init_glsl_definitions", "_ShapeUtilsGlslLib", "GlslLib", "context", "outputRank", "result", "name", "i", "shape", "rank", "dimOffset", "funcName", "block", "body", "GlslLibRoutine", "strides", "stridesBlock", "shapeInit", "VecGlslLib", "init_glsl_vec_lib", "__esmMin", "init_glsl_definitions", "GlslLib", "context", "rank", "nameOp", "result", "name", "fname", "assignmentBlock", "i", "body", "GlslLibRoutine", "block", "glslRegistry", "init_glsl_registered_libs", "__esmMin", "init_glsl_coordinate_lib", "init_glsl_encoding_lib", "init_glsl_fragcolor_lib", "init_glsl_shape_utils_lib", "init_glsl_vec_lib", "EncodingGlslLib", "FragColorGlslLib", "VecGlslLib", "ShapeUtilsGlslLib", "CoordsGlslLib", "GlslPreprocessor", "init_glsl_preprocessor", "__esmMin", "init_glsl_definitions", "init_glsl_function_inliner", "init_glsl_registered_libs", "init_glsl_source", "glContext", "programInfo", "inputTextureLayouts", "outputTextureLayout", "GlslContext", "glslRegistry", "name", "lib", "map", "libName", "routinesInLib", "routine", "key", "currentNode", "GlslLibRoutineNode", "dependencies", "i", "node", "source", "getDefaultFragShaderMain", "replaceInlines", "getFragShaderPreamble", "script", "routinesIncluded", "routines", "nodes", "classAndRoutine", "TopologicalSortGlslRoutines", "samplers", "variables", "uniformLines", "sampler", "variable", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_instrument", "init_glsl_preprocessor", "init_glsl_source", "profiler", "glContext", "textureLayoutStrategy", "key", "artifact", "buildArtifact", "inputs", "output", "gl", "program", "err", "Logger", "a", "programInfo", "inputTextureLayouts", "outputTextureLayout", "preprocessor", "GlslPreprocessor", "fragScript", "fragShaderScript", "vertexShaderScript", "getVertexShaderSource", "env", "fragShader", "td", "width", "height", "attribLocations", "positionHandle", "textureCoordHandle", "uniformLocations", "variables", "textures", "texturePosition", "name", "type", "location", "arrayLength", "value", "v", "uniformHandle", "position", "samplers", "sampler", "variable", "reference", "TextureManager", "init_texture_manager", "__esmMin", "init_instrument", "init_texture_data_encoder", "glContext", "layoutStrategy", "profiler", "config", "dataType", "layout", "data", "usage", "textureDataType", "encoder", "width", "height", "key", "inUseTextures", "idleTextures", "texture", "Logger", "td", "channels", "dataSize", "a", "b", "dataId", "subscribers", "resolve", "tensorData", "textureData", "deleteTexture", "index", "_dataType", "WebGLSessionHandler", "init_session_handler", "__esmMin", "init_instrument", "init_opset", "init_inference_handler", "init_op_resolve_rules", "init_program_manager", "init_texture_layout_strategy", "init_texture_manager", "backend", "context", "PreferLogicalStrategy", "ProgramManager", "TextureManager", "WebGLInferenceHandler", "graph", "initializers", "v", "tensorId", "isPacked", "textureData", "Logger", "td", "node", "opsets", "op", "resolveOperator", "WEBGL_OP_RESOLVE_RULES", "linearSearchLastTrue", "arr", "i", "WebGLContext", "init_webgl_context", "__esmMin", "init_esm", "init_texture_data_encoder", "init_utils", "gl", "version", "width", "height", "encoder", "data", "texture", "buffer", "dataSize", "dataType", "channels", "positionHandle", "textureCoordHandle", "vertexShader", "fragShader", "program", "shaderSource", "shaderType", "shader", "position", "uniformHandle", "env", "error", "label", "usage", "RedFloat32DataEncoder", "RGBAFloatDataEncoder", "Uint8DataEncoder", "unit", "geometry", "fb", "internalFormat", "frameBuffer", "isComplete", "fragmentShader", "gl2", "ext", "query", "available", "disjoint", "timeElapsed", "repeatedTry", "fenceContext", "isFencePassed", "status", "resolve", "index", "x", "resolveFn", "isDoneFn", "createWebGLContext", "contextId", "context", "cache", "offscreenCanvas", "createOffscreenCanvas", "createNewWebGLContext", "canvas", "createCanvas", "gl", "contextAttributes", "ca", "WebGLContext", "err", "Logger", "init_webgl_context_factory", "__esmMin", "init_instrument", "init_webgl_context", "WebGLBackend", "init_backend_webgl", "__esmMin", "init_esm", "init_instrument", "init_session_handler", "init_webgl_context_factory", "env", "value", "createWebGLContext", "Logger", "context", "WebGLSessionHandler", "resolveBackend", "hint", "hints", "backendHint", "cache", "backendsCache", "backend", "tryLoadBackend", "backendObj", "isBackend", "init", "obj", "o", "init_backend", "__esmMin", "init_backend_webgl", "WebGLBackend", "KernelOp", "ExecutionPlan", "init_execution_plan", "__esmMin", "init_instrument", "op", "node", "graph", "ops", "profiler", "graphNodes", "i", "resolved", "input", "sessionHandler", "modelInputs", "inferenceHandler", "graphInputs", "index", "sequence", "graphValues", "rear", "thisOpIndex", "thisOp", "inputList", "inputTensors", "Logger", "t", "outputList", "output", "j", "downstreamNodes", "_output", "currentDownstreamNodeIndex", "currentDownstreamNode", "k", "outputIndex", "outputTensor", "import_onnx", "ortFbs", "Attribute", "init_attribute", "__esmMin", "init_ort_generated", "init_tensor", "init_util", "onnxruntime", "_Attribute", "attributes", "attr", "key", "type", "value", "defaultValue", "valueAndType", "attrType", "LongUtil", "arr", "numberValue", "i", "maybeLong", "Tensor", "decodeUtf8String", "ints", "strings", "tensors", "import_onnx", "ortFbs", "Graph", "Value", "Node", "GraphImpl", "init_graph", "__esmMin", "init_attribute", "init_ort_generated", "init_tensor", "init_util", "onnxruntime", "graphProto", "initializer", "valueInfo", "ProtoUtil", "_nodeProto", "name", "Attribute", "graph", "graphInitializer", "dataIndices", "nodesIndices", "inputValueNames", "currentIndex", "index", "value", "Tensor", "nodeProto", "pick", "node", "output", "dataIndex", "input", "inputName", "j", "type", "shape", "dims", "k", "LongUtil", "outputName", "starters", "i", "nodesStack", "nodesState", "nodeIndex", "outgoingEdgeIndex", "data", "downstreamNodeIndex", "offset", "newIndices", "nodePossition", "ind", "currentData", "inputValueIndex", "outputValueIndex", "nodesConsumingOutput", "delIndex", "replaceIndex", "n", "next", "child", "MIN_CLIP", "MAX_CLIP", "import_onnx", "ortFbs", "Model", "init_model", "__esmMin", "init_flatbuffers", "init_graph", "init_ort_generated", "init_util", "onnxruntime", "buf", "graphInitializer", "isOrtFormat", "onnxError", "e", "modelProto", "LongUtil", "Graph", "fb", "flatbuffers", "ortModel", "i", "opsetId", "Session", "init_session", "__esmMin", "init_backend", "init_execution_plan", "init_instrument", "init_model", "config", "Profiler", "arg", "byteOffset", "length", "backend", "resolveBackend", "Model", "isOrtFormat", "buf", "arr", "modelProtoBlob", "graphInitializer", "ExecutionPlan", "inputs", "inputTensors", "outputTensors", "modelInputNames", "sortedInputs", "sortedInputsIndex", "tensor", "modelInputIndices", "modelValues", "graphInputDims", "i", "graphInput", "graphInputTypes", "givenInputs", "expectedType", "actualType", "noneDimSupported", "expectedDims", "actualDims", "modelOutputNames", "output", "graph", "nodes", "OnnxjsSessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_tensor", "session", "feeds", "_fetches", "_options", "inputMap", "name", "feed", "Tensor", "outputMap", "output", "tensor", "backend_onnxjs_exports", "__export", "onnxjsBackend", "OnnxjsBackend", "init_backend_onnxjs", "__esmMin", "init_session", "init_session_handler_inference", "pathOrBuffer", "options", "session", "Session", "OnnxjsSessionHandler", "init_wasm_utils_env", "__esmMin", "main_exports", "__export", "main_default", "WORKER_NAME", "isProxyWorker", "init_main", "__esmMin", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "ev", "type", "message", "initializeWebAssembly", "initRuntime", "err", "epName", "env", "initEp", "buffer", "bufferData", "copyFromExternalBuffer", "model", "options", "createSession", "sessionMetadata", "releaseSession", "sessionId", "inputIndices", "inputs", "outputIndices", "run", "outputs", "o", "extractTransferableBuffers", "endProfiling", "urlOverride", "scriptSrc", "ort_wasm_simd_threaded_jsep_exports", "__export", "ort_wasm_simd_threaded_jsep_default", "r", "e", "init_ort_wasm_simd_threaded_jsep", "__esmMin", "t", "k", "R", "Y", "P", "a", "D", "F", "B", "I", "u", "U", "f", "G", "c", "d", "b", "g", "m", "p", "h", "v", "n", "Ye", "o", "i", "Qe", "y", "w", "A", "_", "C", "O", "T", "S", "W", "E", "Dn", "x", "wn", "He", "Tr", "Ar", "M", "mn", "Sr", "hn", "On", "Re", "An", "or", "N", "H", "j", "$", "z", "L", "V", "q", "J", "X", "Q", "Z", "K", "rr", "er", "tr", "nr", "ar", "sr", "Er", "Mr", "Rr", "Pr", "Ir", "Ur", "jr", "$r", "Gr", "zr", "Yr", "Lr", "Vr", "qr", "Jr", "Zr", "oe", "se", "me", "he", "ve", "ye", "we", "Ee", "xe", "Me", "ke", "Pe", "Fe", "Be", "Ie", "$e", "rt", "tt", "ot", "le", "st", "ct", "dt", "bt", "gt", "mt", "pt", "ht", "vt", "yt", "wt", "At", "_t", "St", "Wt", "Et", "xt", "Mt", "Ht", "Rt", "Pt", "Dt", "Ft", "It", "Ut", "Bt", "jt", "$t", "Lt", "Vt", "Qt", "Zt", "mr", "Kt", "rn", "en", "nn", "un", "fn", "ir", "Hr", "s", "l", "Ke", "ur", "fr", "lr", "pr", "Cr", "_r", "hr", "yr", "cr", "dr", "xn", "En", "_n", "Wn", "br", "gr", "vr", "wr", "Cn", "Or", "Sn", "Mn", "Wr", "xr", "kr", "Nr", "Dr", "Fr", "Br", "Xr", "Qr", "Kr", "re", "ee", "te", "ne", "ae", "ie", "ue", "fe", "ce", "de", "be", "ge", "pe", "yn", "vn", "Ae", "_e", "Ce", "Oe", "Te", "Se", "We", "Ne", "Tn", "De", "dn", "Ue", "gn", "je", "Ge", "ze", "Le", "Ve", "qe", "Je", "Xe", "Ze", "Hn", "bn", "kn", "Rn", "et", "nt", "at", "it", "ut", "ft", "lt", "Ct", "Ot", "Tt", "kt", "Nt", "Gt", "zt", "Yt", "qt", "Jt", "Xt", "tn", "an", "on", "sn", "ln", "cn", "Nn", "pn", "Pn", "scriptSrc", "origin", "isSameOrigin", "preload", "createProxyWorker", "importProxyWorker", "embeddedWasmModule", "importWasmModule", "init_wasm_utils_import", "__esmMin", "init_wasm_utils_env", "filename", "prefixOverride", "baseUrl", "absoluteUrl", "blob", "url", "urlOverride", "isMultiThreaded", "wasm", "initialized", "initializing", "aborted", "isMultiThreadSupported", "isSimdSupported", "initializeWebAssembly", "getInstance", "init_wasm_factory", "__esmMin", "init_wasm_utils_import", "flags", "timeout", "numThreads", "multiThreadSupported", "wasmPaths", "wasmPrefixOverride", "mjsPathOverrideFlag", "mjsPathOverride", "wasmPathOverrideFlag", "wasmPathOverride", "objectUrl", "ortWasmFactory", "importWasmModule", "isTimeout", "tasks", "resolve", "reject", "config", "fileName", "scriptDirectory", "module", "what", "allocWasmString", "iterateExtraOptions", "checkLastError", "init_wasm_utils", "__esmMin", "init_wasm_factory", "data", "allocs", "wasm", "getInstance", "dataLength", "dataOffset", "options", "prefix", "seen", "handler", "key", "value", "name", "message", "stack", "paramsOffset", "errorCode", "errorMessagePointer", "errorMessage", "setRunOptions", "init_run_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "options", "wasm", "getInstance", "runOptionsHandle", "allocs", "runOptions", "tagDataOffset", "allocWasmString", "checkLastError", "iterateExtraOptions", "key", "value", "keyDataOffset", "valueDataOffset", "e", "alloc", "getGraphOptimzationLevel", "getExecutionMode", "appendDefaultOptions", "setExecutionProviders", "setSessionOptions", "init_session_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "graphOptimizationLevel", "executionMode", "options", "session", "ep", "sessionOptionsHandle", "executionProviders", "allocs", "epName", "deviceType", "keyDataOffset", "allocWasmString", "valueDataOffset", "getInstance", "checkLastError", "webgpuOptions", "epNameDataOffset", "wasm", "sessionOptions", "logIdDataOffset", "logSeverityLevel", "logVerbosityLevel", "optimizedModelFilePathOffset", "name", "value", "nameOffset", "iterateExtraOptions", "key", "e", "alloc", "tensorDataTypeStringToEnum", "tensorDataTypeEnumToString", "getTensorElementSize", "tensorTypeToTypedArrayConstructor", "logLevelStringToEnum", "isGpuBufferSupportedType", "dataLocationStringToEnum", "init_wasm_common", "__esmMin", "type", "typeProto", "dateType", "logLevel", "location", "loadFile", "init_wasm_utils_load_file", "__esmMin", "init_wasm_utils_env", "file", "readFile", "createReadStream", "stream", "chunks", "chunk", "response", "contentLengthHeader", "fileSize", "reader", "buffer", "e", "pages", "offset", "done", "value", "chunkSize", "logLevelPrefix", "doLog", "configLogLevel", "debug", "configureLogger", "LOG", "LOG_DEBUG", "init_log", "__esmMin", "init_wasm_common", "level", "message", "$configLogLevel", "$debug", "logLevel", "msg", "messageLevel", "logLevelStringToEnum", "configLevel", "args", "createView", "init_tensor_view", "__esmMin", "init_wasm_common", "dataBuffer", "type", "tensorTypeToTypedArrayConstructor", "init_types", "__esmMin", "bucketFreelist", "bucketArr", "calcNormalizedBufferSize", "calcBucketBufferSize", "guid", "createNewGpuDataId", "downloadGpuData", "GpuDataManagerImpl", "createGpuDataManager", "init_gpu_data_manager", "__esmMin", "init_log", "init_types", "size", "idx", "sizeForBucket", "backend", "gpuBuffer", "originalSize", "getTargetBuffer", "bufferSize", "gpuReadBuffer", "commandEncoder", "arrayBuffer", "targetBuffer", "key", "id", "data", "srcArrayBuffer", "srcOffset", "srcLength", "gpuDataCache", "gpuBufferForUploading", "LOG_DEBUG", "sourceId", "destinationId", "sourceGpuDataCache", "destinationGpuDataCache", "buffer", "previousBuffer", "usage", "isStorage", "isUniform", "buffers", "gpuData", "cachedData", "maxInFreeList", "freelist", "capturedBuffers", "storage", "sessionId", "pendingBuffers", "args", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "MatMulUtil", "BroadcastUtil", "ShapeUtil", "PoolConvUtil", "GemmUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "a", "b", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "i", "aLen", "bLen", "max", "shape", "finalShape", "inputRank", "finalRank", "_ShapeUtil", "dims", "size", "rank", "newDims", "axis", "start", "end", "strides", "tensorRank", "axes", "x", "perm", "v", "pad", "shape1", "shape2", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "dim", "isChannelLast", "autoPad", "outputDims", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "WORKGROUP_SIZE", "getWgslMappedType", "tensorTypeToWsglStorageType", "tensorTypeToWsglValueType", "createTensorShapeVariables", "getMaxComponents", "fillVector", "castToF32", "sumVector", "getElementAt", "createIndicesHelper", "inputVariable", "outputVariable", "internalVariable", "ShaderHelperImpl", "createShaderHelper", "getBroadcastDims", "init_common", "__esmMin", "init_wasm_common", "init_util", "type", "components", "mappedType", "dims", "programUniforms", "dim", "ShapeUtil", "size", "dataType", "value", "name", "index", "length", "tensorType", "shapeOrRank", "usage", "useUniform", "rank", "rankIdentity", "indicesType", "valueType", "storageType", "normalizeDim", "implementationUsed", "uniformPrefix", "shape", "strides", "o2iSnippet", "i", "offsetToIndicesImplementation", "offsetToIndices", "varOffset", "offsets", "indicesToOffsetImplementation", "indicesToOffset", "varIndices", "indices", "init", "indicesGet", "idx", "indicesSet", "broadcastedIndicesToOffsetImplementation", "broadcastedIndicesToOffset", "output", "implKey", "setByOffset", "offset", "getByOffset", "getByIndicesImplementation", "getImplementation", "functionParams", "dimsParams", "get", "normalizedIndices", "getByIndices", "setByIndicesImplementation", "setImplementation", "impls", "needShapeStrides", "impl", "indicesAndValue", "normalizedDispatchGroup", "limits", "workgroupSize", "workgroupSizeX", "workgroupSizeY", "workgroupSizeZ", "is1DimensionDispatch", "paramList", "globalIdxDefinition", "variable", "bindingIndex", "access", "variables", "v", "additionalUniforms", "uniformSnippets", "typeTemp", "uniformWgslTypeToDataType", "u", "dispatchGroup", "inShape", "outShape", "inRank", "a", "validateInputs", "getAdjustedPerm", "getOutputShape", "permFunctionBody", "createTransposeProgramInfo", "transpose", "parseTransposeAttributes", "init_transpose", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "inputRank", "perm", "inputShape", "ShapeUtil", "rank", "input", "output", "reverseFunc", "inputTensor", "permAttr", "inputDataType", "outputShape", "outputVariable", "inputVariable", "getShaderSource", "wgslType", "workgroupSize", "shaderHelper", "outputSize", "createTensorShapeVariables", "context", "attributes", "createAttributeWithCacheKey", "reduceOps", "reduceSharedOps", "reduceInitValues", "reduceOutputValues", "getInnerMostAxes", "computeOutAndReduceShapes", "expandShapeToKeepDim", "areAxesInnerMostDims", "getAxesPermutation", "createReduceSharedProgramInfo", "reduceCommon", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "init_reduce_shared", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_reduce", "init_transpose", "numInnerAxes", "rank", "res", "i", "shape", "axes", "outputShape", "dim", "reduceShape", "expandShape", "shapeIdx", "axis", "name", "shaderCache", "inputs", "reduceType", "outputDataType", "inputShape", "outputSize", "ShapeUtil", "reduceSize", "input", "inputVariable", "output", "outputVariable", "workgroupSize", "sharedMemorySnippet", "shaderHelper", "context", "attributes", "updatedAttributes", "createReduceAttributesFromInputs", "updatedAxes", "_dim", "normalizeAxes", "permutedAxes", "createTransposeProgramInfo", "finalOutputShape", "validateInputs", "noOp", "createReduceProgramInfo", "createReduceAttributesFromInputs", "runReduceProgram", "reduceLogSumNaive", "reduceL1Naive", "reduceL2Naive", "reduceLogSumExpNaive", "reduceMaxNaive", "reduceMeanNaive", "reduceMinNaive", "reduceProdNaive", "reduceSumNaive", "reduceSumSquareNaive", "useNaiveReduceMethod", "reduceMean", "reduceL1", "reduceL2", "reduceLogSumExp", "reduceMax", "reduceMin", "reduceProd", "reduceSum", "reduceSumSquare", "reduceLogSum", "init_reduce", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "init_reduce_shared", "inputs", "input", "name", "shaderCache", "reduceOp", "axesInput", "outputDataType", "keepDims", "noopWithEmptyAxes", "outputShape", "inputShape", "inputRank", "axes", "ShapeUtil", "reduceOnAllAxes", "d", "i", "outputRank", "outputSize", "shaderHelper", "idxCopy", "inputVariable", "output", "outputVariable", "ops", "reduceOps", "k", "l", "createTensorShapeVariables", "attributes", "v", "createAttributeWithCacheKey", "context", "updatedAttributes", "_output", "idxZero", "size", "shape", "reduceSize", "dim", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "validateInputs", "argMin", "argMax", "parseArgMinMaxAttributes", "init_argminmax", "__esmMin", "init_wasm_common", "init_attribute_with_cache_key", "init_reduce", "inputs", "context", "attributes", "argMinMaxOp", "input", "output", "axes", "idxZero", "k", "createReduceProgramInfo", "createAttributeWithCacheKey", "validateAttentionInputs", "createInPlaceSoftmaxProgramInfo", "createAttentionProbsProgramInfo", "createVxAttentionScoreProgramInfo", "applyAttention", "prepare", "attention", "init_attention", "__esmMin", "init_wasm_common", "init_types", "init_common", "inputs", "attributes", "input", "weights", "bias", "maskIndex", "past", "relativePositionBias", "batchSize", "sequenceLength", "inputHiddenSize", "qHiddenSize", "kHiddenSize", "vHiddenSize", "sz", "kvSequenceLength", "pastSequenceLength", "totalSequenceLength", "maxSequenceLength", "maskType", "_context", "d", "components", "getMaxComponents", "WG", "dComp", "elementsPerThread", "programUniforms", "dataType", "tensorTypeToWsglStorageType", "f32Type", "tensorTypeToWsglValueType", "getShaderSource", "shaderHelper", "inputHelper", "outputVariable", "uniforms", "context", "q", "key", "pastKey", "parameters", "probsShape", "presentKey", "presentKeyShape", "alpha", "vectorizedHeadSize", "TILE_SIZE", "dispatch", "inputDependencies", "outputs", "qInput", "inputVariable", "kInput", "inputVars", "pastKeyInput", "output", "outputVars", "probs", "v", "pastValue", "params", "nReps", "repeatedVHiddenSize", "presentValue", "presentValueShape", "outputShape", "probsHelper", "vHelper", "k", "_maskIndex", "_past", "outputCount", "inputsK", "inputsV", "M", "K", "N", "outputQ", "outputK", "outputV", "weight", "validateInputs", "createBatchNormInferenceProgramInfo", "parseBatchNormAttributes", "batchNorm", "init_batch_norm", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "checkShapeEqual", "actual", "expected", "message", "r", "v", "i", "shape", "epsilon", "spatial", "format", "yShape", "components", "getMaxComponents", "cComponents", "outputSize", "ShapeUtil", "useShapesUniforms", "shapeOrRank", "x", "inputVariable", "scale", "bias", "inputMean", "inputVar", "y", "outputVariable", "calcCOffset", "cOffset", "getInferenceModeShaderSource", "helper", "createTensorShapeVariables", "createAttributeWithCacheKey", "context", "outputCount", "updatedAttributes", "env", "validateInputs", "createBiasAddProgramInfo", "biasAdd", "init_bias_add", "__esmMin", "init_util", "init_common", "inputs", "outputShape", "channels", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "bias", "residual", "output", "outputVariable", "shaderHelper", "context", "createElementwiseProgramShader", "createElementwiseProgramInfo", "abs", "acos", "acosh", "asin", "asinh", "atan", "atanh", "parseCastAttributes", "cast", "generateClipAttributesFromInputs", "clip", "ceil", "cos", "cosh", "parseAlphaAttributes", "elu", "erfImpl", "erf", "exp", "floor", "gelu", "leakyRelu", "not", "neg", "reciprocal", "relu", "sigmoid", "parseHardSigmoidAttributes", "hardSigmoid", "sin", "sinh", "sqrt", "tan", "tanhExpression", "tanh", "fastGeluImpl", "fastGeluExpression", "fastGelu", "thresholdedRelu", "log", "quickGeluImpl", "quickGeluExpression", "quickgelu", "init_unary_op", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "shaderHelper", "datasize", "inputDataType", "outputDataType", "funcCall", "additionalImplementation", "vecSize", "expression", "input", "inputVariable", "output", "outputVariable", "name", "cacheKey", "ShapeUtil", "inputTensors", "context", "attributes", "createAttributeWithCacheKey", "func", "inputs", "min", "MIN_CLIP", "max", "MAX_CLIP", "clipAttributes", "dataType", "tensorTypeToWsglValueType", "a", "varType", "x", "alpha", "dType", "validateInputs", "createBiasSplitGeluProgramInfo", "biasSplitGelu", "init_bias_split_gelu", "__esmMin", "init_util", "init_common", "init_unary_op", "inputs", "outputShape", "input", "inputVariable", "bias", "output", "outputVariable", "outputSize", "ShapeUtil", "dataType", "tensorTypeToWsglStorageType", "shaderHelper", "erfImpl", "context", "createBinaryOpProgramShader", "createBinaryOpProgramInfo", "runBinaryOp", "add", "div", "equal", "mul", "pow", "sub", "greater", "less", "greaterOrEqual", "lessOrEqual", "init_binary_op", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "dimsA", "dimsB", "dimsOutput", "vectorize", "doBroadcast", "sharedDimensionDivisibleBy4", "funcCall", "typeA", "typeB", "typeOutput", "additionalImplementation", "expressionScalar", "expressionVector", "a", "b", "output", "outputVariable", "inputVariable", "assignment", "isAOneElement", "ShapeUtil", "isBOneElement", "aLastDimDivisibleBy4", "bLastDimDivisibleBy4", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "name", "cacheKey", "outputDataType", "isBroadcast", "outputShape", "outputSize", "cacheKeyAux", "calculatedShape", "BroadcastUtil", "sharedDimension", "i", "dimA", "dimB", "createTensorShapeVariables", "context", "type", "validateInputs", "calculateInputIndexImpl", "assignOutputData", "createConcatProgramInfo", "concat", "parseConcatAttributes", "init_concat", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "axis", "referenceIndex", "referenceInput", "inputType", "inputRank", "input", "i", "dim", "numberOfTensors", "sizeInConcatAxisStr", "output", "codeLines", "returnSnippet", "adjustedAxis", "outputShape", "dataType", "outputSize", "ShapeUtil", "sizeInConcatAxis", "inputVars", "previousSum", "inputDependencies", "inputRanks", "programUniforms", "inputVariable", "createTensorShapeVariables", "outputVariable", "indicesAxis", "getShaderSource", "shaderHelper", "context", "attributes", "inputShape", "sum", "nonEmptyInputs", "createAttributeWithCacheKey", "getActivationSnippet", "appendActivationUniformsData", "appendActivationUniforms", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_wasm_common", "init_util", "attributes", "valueType", "baseType", "programUniform", "uniforms", "activation", "alpha", "beta", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "typeSnippet", "biasSnippet", "init_activation_util", "__esmMin", "component", "dataType", "hasBias", "utilFunctions", "init_conv_util", "__esmMin", "strideStr", "writeDataToSubAVec4Snippet", "calculateResultSnippet", "makeMatMulPackedVec4Source", "writeDataToSubASnippet", "readDataFromSubASnippet", "makeMatMulPackedSource", "matMulReadWriteFnSource", "createMatmulProgramInfo", "init_matmul_packed_webgpu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_fuse_utils", "init_activation_util", "transpose", "batchDims", "transposeA", "innerElementSize", "workPerThread", "workgroupSize", "type", "tileInner", "splitK", "splitedDimInner", "tileAOuter", "tileBOuter", "tileAWidth", "tileAHight", "rowPerThreadB", "sequentialAccessByThreads", "rowPerThreadA", "colPerThreadA", "matmulSnippet", "component", "hasBias", "applyActivation", "variables", "batchShapes", "isChannelsLast", "batchAShape", "batchBShape", "batchShape", "batchVariable", "aVariable", "bVariable", "outputVariable", "broadCastADims", "getBroadcastDims", "broadCastBDims", "dataType", "tensorTypeToWsglStorageType", "getAIndices", "aRank", "batchRank", "resStr", "i", "j", "getBIndices", "bRank", "typeSnippet", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "aShape", "bShape", "outerDimsA", "outerDimsB", "outerDims", "batchSize", "ShapeUtil", "dimAOuter", "dimInner", "dimBOuter", "isVec4", "elementsPerThread", "dispatch", "components", "aShapeTemp", "bShapeTemp", "outputShapeTemp", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "internalVariable", "A", "inputVariable", "B", "output", "inputVariables", "biasComponents", "uniforms", "appendActivationUniforms", "baseType", "getActivationSnippet", "declareFunctions", "conv2dCommonSnippet", "createConv2DMatMulProgramInfo", "init_conv2d_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "fitAOuter", "fitBOuter", "fitInner", "addBias", "attributes", "innerElementSizeX", "innerElementSizeW", "innerElementSize", "dataType", "getXSnippet", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readXSnippet", "typeSnippet", "sampleX", "sampleW", "resType", "aType", "bType", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileAOuter", "tileBOuter", "tileInner", "elementsSize", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "uniforms", "appendActivationUniforms", "components", "t", "tensorTypeToWsglStorageType", "declareFunctions", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "bias", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "arrayProduct", "parse3TupleParam", "getEffectiveFilterSize", "computeDefaultPad", "computeOutputShape4D", "get3DPadAndOutInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "init_conv3d_naive_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "arr", "product", "i", "param", "filterSize", "dilation", "inputShape", "fieldSize", "stride", "effectiveFieldSize", "inShape", "filterShape", "outChannels", "strides", "zeroPad", "outShape", "index", "pad", "inDepth", "inHeight", "inWidth", "strideDepth", "strideHeight", "strideWidth", "filterDepth", "filterHeight", "filterWidth", "padInfo", "outDepth", "outHeight", "outWidth", "val", "_", "padAlongDepth", "padAlongHeight", "padAlongWidth", "front", "back", "top", "bottom", "left", "right", "dilations", "depthwise", "dataFormat", "batchSize", "inChannels", "filterChannels", "dilationDepth", "dilationHeight", "dilationWidth", "effectiveFilterDepth", "effectiveFilterHeight", "effectiveFilterWidth", "inputs", "attributes", "outputShape", "filterDims", "pads", "isChannelsLast", "isVec4", "workGroupSize", "dispatchLayout", "dispatch", "d", "LOG_DEBUG", "innerElementSize", "outputSize", "ShapeUtil", "programUniforms", "createTensorShapeVariables", "inputDependencies", "hasBias", "getShaderSource", "shaderHelper", "uniforms", "components", "t", "tensorTypeToWsglStorageType", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "declareFunctions", "bias", "getElementAt", "createGroupedConvProgramInfo", "createGroupedConvVectorizeProgramInfo", "init_conv_grouped", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_conv", "init_fuse_utils", "inputs", "attributes", "squeezeOutputShapeFunction", "hasBias", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "isChannelLast", "outputShape", "calculateOutputShape", "outputSize", "ShapeUtil", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "x", "inputVariable", "w", "inputVars", "uniforms", "appendActivationUniforms", "components", "getMaxComponents", "outputNumber", "outputShapeInShader", "xNumber", "createNaiveMatmulProgramInfo", "validateInputs", "matMul", "init_matmul", "__esmMin", "init_wasm_common", "init_util", "init_matmul_packed_webgpu", "init_common", "init_fuse_utils", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "isChannelsLast", "aShape", "bShape", "M", "N", "K", "components", "getMaxComponents", "aComponents", "outputNumber", "outputSize", "ShapeUtil", "hasBias", "outerDims", "outputShapeInShader", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "batchDims", "internalVariable", "a", "inputVariable", "b", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "inputVariables", "processBias", "biasComponents", "outerDimsA", "outerDimsB", "broadCastADims", "getBroadcastDims", "broadCastBDims", "uniforms", "appendActivationUniforms", "getIndices", "variable", "broadCastDims", "rank", "name", "batchRank", "resStr", "i", "j", "calcResult", "calcStr", "context", "BroadcastUtil", "createMatmulProgramInfo", "calculateOutputShape", "weightTransposeAttribute", "validateInputs", "getAdjustedConvAttributes", "parseConvAttributes", "conv2d", "conv1d", "conv3d", "conv", "init_conv", "__esmMin", "init_util", "init_conv2d_mm_webgpu", "init_conv3d_naive_webgpu", "init_matmul_packed_webgpu", "init_conv_grouped", "init_fuse_utils", "init_matmul", "init_transpose", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "isChannelLast", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputShape", "inputs", "attributes", "dataChannel", "filterInChannel", "pads", "PoolConvUtil", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "autoPad", "group", "wIsConst", "context", "adjustedAttributes", "isChannelsLast", "transposedWeight", "createTransposeProgramInfo", "convInputs", "createGroupedConvVectorizeProgramInfo", "createGroupedConvProgramInfo", "hasBias", "inputHeight", "inputWidth", "inputChannels", "weightHeight", "weightWidth", "outHeight", "outWidth", "sameSize", "batch", "xReshaped", "wReshaped", "matmulOutputShape", "matmulInputs", "sharedDim", "N", "K", "createNaiveMatmulProgramInfo", "createMatmulProgramInfo", "sequentialAccessByThreads", "dimAOuter", "dimBOuter", "dimInner", "createConv2DMatMulProgramInfo", "convInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "conv2dTransposeCommonSnippet", "createConv2DTransposeMatMulProgramInfo", "init_conv_backprop_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "addBias", "attributes", "type", "innerElementSize", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readASnippet", "sampleA", "sampleW", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileInner", "components", "filterDims", "effectiveFilterDims", "pads", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "x", "inputVariable", "w", "output", "outputVariable", "inputVariables", "declareFunctions", "bias", "uniforms", "appendActivationUniforms", "elemType", "tensorTypeToWsglStorageType", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createConvTranspose2DOpProgramShaderSource", "createConvTranspose2DProgramInfo", "init_conv_backprop_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "shaderHelper", "inputs", "outputShape", "hasBias", "is1DimensionDispatch", "isVec4", "dataType", "uniforms", "isChannelsLast", "rowDim", "colDim", "channelDim", "workPerThread", "declareFunctions", "components", "w", "inputVariable", "dy", "inputVariables", "output", "outputVariable", "codeSnippet4", "codeSnippet", "attributes", "squeezeOutputShapeFunction", "outputSize", "ShapeUtil", "dispatch", "LOG_DEBUG", "inputDependencies", "strides", "filterDims", "dilations", "effectiveFilterDims", "pads", "group", "wShape", "inputChannelsPerGroup", "outputChannelsPerGroup", "programUniforms", "createTensorShapeVariables", "getShaderSource", "tensorTypeToWsglStorageType", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "weightTransposePerm", "convTranspose2d", "convTranspose1d", "convTranspose", "init_conv_transpose", "__esmMin", "init_conv_backprop_mm_webgpu", "init_conv_backprop_webgpu", "init_fuse_utils", "init_transpose", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "group", "strides", "isChannelLast", "outputPadding", "outputShape", "spatialRank", "updateOutputShape", "i", "batchSize", "outChannels", "j", "inSize", "attributes", "inputs", "a", "b", "isChannelsLast", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "wIsConst", "dataChannel", "filterInChannel", "featureMaps", "context", "adjustedAttributes", "inputChannels", "createConvTranspose2DProgramInfo", "outHeight", "outWidth", "weightHeight", "weightWidth", "dimAOuter", "dimBOuter", "dimInner", "sequentialAccessByThreads", "transposedWeight", "createTransposeProgramInfo", "convTransposeInputs", "hasBias", "createConv2DTransposeMatMulProgramInfo", "createCumsumProgramInfo", "cumsum", "parseCumSumAttributes", "init_cumsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputType", "inputShape", "axisInput", "attributes", "outputSize", "ShapeUtil", "rank", "input", "inputVariable", "output", "outputVariable", "axisValue", "axis", "getShaderSource", "shaderHelper", "index", "max", "getElementAt", "lowerLimit", "upperLimit", "createTensorShapeVariables", "context", "exclusive", "reverse", "createAttributeWithCacheKey", "validateInputs", "permFunctionBody", "createDepthToSpaceProgramInfo", "depthToSpace", "parseDepthToSpaceAttributes", "init_depth_to_space", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "perm", "rank", "input", "output", "reverseFunc", "inputTensor", "attributes", "h", "w", "c", "shape", "isChannelLast", "blocksize", "isDCRmode", "reshapedInputTensor", "reshapedInputRank", "inputDataType", "reshapedInput", "inputVariable", "permedOutput", "outputVariable", "getShaderSource", "shaderHelper", "outputShape", "outputSize", "ShapeUtil", "shapeBeforePerm", "shapeAfterPerm", "createTensorShapeVariables", "context", "createAttributeWithCacheKey", "symbolPattern", "termPattern", "termPatternOnly", "lhsPattern", "lhsPatternOnly", "EinsumTerm", "EinsumEquation", "appendMax", "createEinsumProgramInfo", "einsum", "parseEinsumAttributes", "init_einsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputIndex", "symbol", "index", "value", "inputs", "equation", "lhs", "rhs", "inputTerm", "dims", "einsumTerm", "sym", "info", "dimValue", "term", "isInput", "rank", "ellipsis", "ellipsisDims", "nextDim", "indexSymbols", "i", "ellipsisDimLength", "j", "name", "inputShapes", "dataType", "einsumEquation", "outputShape", "inputVars", "inputVariable", "outputSize", "ShapeUtil", "output", "outputVariable", "uniformsSymbols", "getShaderSource", "shaderHelper", "idxCopy", "initProd", "initSum", "updateSum", "reduceOpsSetIndices", "reduceOpsLoopHeaders", "reduceOpsLoopFooters", "reduceOpCompute", "isReduceOpsWithoutLoop", "outputIndex", "indices", "reduceOps", "inputVar", "_var", "programUniformsInit", "programUniforms", "_", "createTensorShapeVariables", "acc", "inputProgramUniforms", "context", "attributes", "input", "createAttributeWithCacheKey", "validateInputs", "getAdjustedShape", "calculateOutputShape", "createExpandProgramInfo", "expand", "init_expand", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "inputShape", "shape", "shapeIndex", "inputShapeIndex", "shape1", "shape2", "diff", "i", "outputShape", "dataType", "components", "outputSize", "ShapeUtil", "getShaderSource", "shaderHelper", "input", "inputVariable", "output", "outputVariable", "assignment", "singleAssignment", "resStr", "x", "typeCast", "programUniforms", "createTensorShapeVariables", "context", "createFastGeluProgramInfo", "fastGelu", "init_fast_gelu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_unary_op", "inputTensors", "dataType", "outputSize", "ShapeUtil", "biasLength", "useVec4", "getShaderSource", "shaderHelper", "x", "inputVariable", "bias", "y", "outputVariable", "uniforms", "singleElementBias", "i", "biasGetExpression", "fastGeluImpl", "tensorTypeToWsglValueType", "WORKGROUP_SIZE", "fastGeluExpression", "inputs", "context", "validateInputs", "createGatherProgramInfo", "parseGatherAttributes", "gather", "init_gather", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "indicesShape", "inputRank", "axis", "ShapeUtil", "outputShape", "axisDimLimit", "components", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "data", "inputVariable", "indices", "output", "outputVariable", "calcDataIndices", "x", "indicesRank", "calcStr", "i", "j", "assignment", "singleAssignment", "resStr", "typeCast", "createAttributeWithCacheKey", "context", "validateInputs", "createGatherElementsProgramInfo", "parseGatherElementsAttributes", "gatherElements", "init_gather_elements", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "inputOutputDataType", "inputRank", "indicesShape", "indicesDataType", "axis", "ShapeUtil", "axisDimLimit", "outputShape", "outputSize", "input", "inputVariable", "indices", "output", "outputVariable", "programUniforms", "createTensorShapeVariables", "shaderHelper", "createAttributeWithCacheKey", "context", "validateInputs", "createGemmProgramInfo", "parseGemmAttributes", "gemm", "init_gemm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "aShape", "bShape", "M", "N", "K", "GemmUtil", "outputShape", "outputSize", "ShapeUtil", "programUniforms", "inputDependencies", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "line", "calculateAlpha", "a", "inputVariable", "b", "dataType", "c", "variables", "output", "outputVariable", "uniforms", "transA", "transB", "alpha", "beta", "context", "getInput", "validateInputs", "parseMultiHeadAttentionAttributes", "weightTransposeAttribute", "addBiasTranspose", "maybeTransposeToBNSHAndAddBias", "multiHeadAttention", "init_multihead_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_types", "init_attention", "init_common", "init_transpose", "inputs", "i", "ShapeUtil", "attributes", "query", "key", "value", "bias", "keyPaddingMask", "relativePositionBias", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "qkvFormat", "maskType", "maskDims", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "createAttributeWithCacheKey", "context", "qkv", "biasOffset", "outputShape", "outputSize", "programUniforms", "getShaderSource", "shaderHelper", "output", "outputVariable", "qkvInput", "inputVariable", "biasInput", "uniforms", "numHeads", "input", "reshapedInput", "createTransposeProgramInfo", "params", "kvBNSH", "Q", "applyAttention", "K", "V", "getRepeats", "validateInputs", "getOutputShape", "createTileProgramInfo", "tile", "init_tile", "__esmMin", "init_wasm_common", "init_util", "init_common", "repeatsTensorView", "inputs", "inputShape", "repeats", "outputShape", "i", "shape", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "output", "outputVariable", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "validateInputs", "createConcatProgramInfo", "parseGroupQueryAttentionAttributes", "weightTransposeAttribute", "maybeExpandAndTransposeToBNSH", "groupQueryAttention", "init_group_query_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_attention", "init_common", "init_multihead_attention", "init_tile", "init_transpose", "inputs", "attributes", "query", "key", "value", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "hasPastKey", "hasPastValue", "isPastkvBSNH", "qkvFormat", "maskType", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "a", "b", "dataType", "params", "outputShape", "component", "outputSize", "ShapeUtil", "presentSequenceLength", "output", "outputVariable", "inputA", "inputVariable", "inputB", "H", "dispatch", "inputDependencies", "programUniforms", "createTensorShapeVariables", "uniforms", "pastStr", "newStr", "concatStr", "getShaderSource", "shaderHelper", "createAttributeWithCacheKey", "context", "input", "pastKV", "outputIndex", "reshapedInput", "numHeads", "nReps", "createTileProgramInfo", "createTransposeProgramInfo", "Q", "maybeTransposeToBNSHAndAddBias", "K", "V", "applyAttention", "createInstanceNormProgramInfo", "computeMean", "createInstanceNormNHWCProgramInfo", "instanceNorm", "init_instance_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "xShape", "outputShape", "axis", "normCount", "ShapeUtil", "normSize", "components", "getMaxComponents", "normPackedSize", "inputShape", "inputDependencies", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "x", "inputVariable", "scale", "bias", "output", "outputVariable", "variables", "dataType", "f32Type", "workgroupSize", "uniforms", "sumVector", "context", "input", "n", "h", "c", "epsilon", "WG", "outputType", "sumCastType", "setOutputValue", "var1", "var2", "unitsOfWork", "wgSize", "meanInputDependencies", "meanProgramUniforms", "getMeanShaderSource", "inputHelper", "fillVector", "meanValues", "scaleHelper", "biasHelper", "N", "C", "H", "outputSize", "channelScaleShift", "tensorTypeToWsglStorageType", "scaleType", "scaleCastType", "outputHelper", "validateInputs", "createLayerNormProgramInfo", "layerNorm", "init_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "outputCount", "simplified", "xShape", "scale", "bias", "outputShape", "axis", "ShapeUtil", "normCount", "normSize", "scaleSize", "biasSize", "meanInvStdDevDim", "i", "components", "getMaxComponents", "inputDependencies", "programUniforms", "hasMeanDataOutput", "hasInvStdOutput", "getShaderSource", "shaderHelper", "dataType", "tensorTypeToWsglStorageType", "variables", "inputVariable", "outputVariable", "uniforms", "fillVector", "castToF32", "sumVector", "outputs", "context", "validateInputs", "createMatMulNBitsProgramInfo", "matMulNBits", "parseMatMulNBitsAttributes", "init_matmulnbits", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "a", "aRank", "nBlocksPerCol", "blobSize", "b", "ShapeUtil", "scalesShape", "zeroPointsShape", "expectedZeroPointsSize", "maxComputeWorkgroupSizes", "maxComputeWorkgroupStorageSize", "inputShape", "dimAOuter", "dimInner", "dimBOuter", "batchDims", "batchSize", "blobSizeInWords", "dataType", "outputNumber", "getMaxComponents", "aComponents", "bComponents", "elementSize", "getTensorElementSize", "workgroupOutputSize", "maxNumberOfComponents", "useBlockwiseMatMulNBits", "components", "outputShape", "outputSize", "programUniforms", "inputShapeTemp", "bShape", "createTensorShapeVariables", "outputShapeTemp", "getShaderSource", "shaderHelper", "inputRank", "inputVariable", "scales", "inputVariables", "zeroPoints", "outputRank", "output", "outputVariable", "uniforms", "tensorTypeToWsglStorageType", "qDqDataType", "processOneBlock", "_", "i", "updateZeroPointIndex", "context", "createAttributeWithCacheKey", "validateInputs", "getPadConstant", "getPadReflect", "getPadEdge", "getPadWrap", "getPadSnippet", "createPadProgramInfo", "createPadAttributesFromInputs", "pad", "init_pad", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "validPads", "output", "inputRank", "padsLength", "block", "i", "getElementAt", "attributes", "outputShape", "ShapeUtil", "inputDims", "outputSize", "programUniforms", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "outputVariable", "input", "inputVariable", "dataType", "padSnippet", "uniforms", "bigInt64Pads", "value", "updatePads", "axes", "v", "pads", "context", "updatedAttributes", "validateInputs", "getAdjustedPoolAttributesAndOutputShape", "getUniformAndPadInfo", "generatePoolingCode", "createShaderKeyFromAttributes", "createAveragePoolShaderKeyFromAttributes", "createMaxPoolShaderKeyFromAttributes", "parsePoolCommonAttributes", "createAveragePoolProgramInfo", "parseAveragePoolAttributes", "averagePool", "globalPoolAttributes", "parseGlobalAveragePoolAttributes", "globalAveragePool", "createMaxPoolProgramInfo", "maxPool", "parseMaxPoolAttributes", "parseGlobalMaxPoolAttributes", "globalMaxPool", "init_pool", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_common", "inputs", "env", "input", "attributes", "isGlobalOperator", "isChannelsLast", "inputShapeAsChannelFirst", "hasDilations", "kernelShape", "strides", "dilations", "pads", "PoolConvUtil", "outputShapeAsChannelFirst", "newAttributes", "outputShapeAsChannelLast", "outputShape", "outputSize", "ShapeUtil", "kernelSize", "programUniforms", "uniforms", "kw", "sw", "pwStart", "pwEnd", "pwStartEndNotZero", "phStartEndNotZero", "kh", "sh", "phStart", "phEnd", "kernelStrides", "hasPads", "sum", "cur", "shaderHelper", "x", "rank", "outputShapeRank", "op1", "op2", "start", "dataType", "output", "outputVariable", "codeW", "codeH", "codeHEnd", "dimIdxW", "dimIdxH", "stridesRank", "padsRank", "padCode", "getElementAt", "name", "adjustedAttributes", "inputVariable", "createTensorShapeVariables", "inputDependencies", "countIncludePad", "attr", "averagePoolAttributes", "context", "format", "storageOrder", "maxPoolAttributes", "validateInputsContent", "createRangeProgramInfo", "range", "init_range", "__esmMin", "init_esm", "init_wasm_common", "init_common", "start", "limit", "delta", "sameStartLimit", "increasingRangeNegativeStep", "decreasingRangePositiveStep", "dataType", "numElements", "outputShape", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "output", "outputVariable", "wgslType", "uniforms", "context", "env", "validateScales", "updateScales", "validateInputs", "getOriginalCoordinateFromResizedCoordinate", "getNearestPixelFromOriginal", "updateRoI", "initOutputShape", "adjustOutputShape", "calculateOriginalIndicesFromOutputIndices", "calculateInputIndicesFromOutputIndices", "checkInputIndices", "setChannelAndBatchIndices", "bilinearInterpolation", "bicubicInterpolation", "trilinearInterpolation", "createResizeProgramInfo", "getOpsetVersionFromCustomDataBuffer", "resize", "parseResizeAttributes", "init_resize", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "scales", "attributes", "value", "axes", "rank", "newScales", "index", "inputs", "opsetVersion", "sizes", "roi", "roiInputIndex", "scalesInputIndex", "sizesInputIndex", "coordinateTransferMode", "dType", "nearestMode", "roiTmp", "roiLocal", "v", "i", "inputShape", "outputShape", "scaleInPolicy", "adjustedOutputShape", "output", "scalesLength", "roiLength", "getElementAt", "input", "useExtrapolation", "channelIdx", "batchIdx", "spacialDims", "extrapolationValue", "heightIdx", "widthIdx", "cubicCoeffA", "excludeOutside", "is2D", "isNchw", "createCubicInterpolationFunction", "idx", "direction", "depthIdx", "inputTensor", "scalesInput", "roiInput", "outputVariable", "inputVariable", "outputSize", "ShapeUtil", "noScale", "d", "dataType", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "customDataBuffer", "antialias", "coordinateTransformMode", "keepAspectRatioPolicy", "mode", "createAttributeWithCacheKey", "validateInputs", "createRotaryEmbeddingProgramInfo", "rotaryEmbedding", "init_rotary_embedding", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "input", "positionIds", "cosCache", "sinCache", "numHeads", "rotaryEmbeddingDim", "ShapeUtil", "batchSize", "sequenceLength", "maxSequenceLength", "hiddenSize", "headSize", "interleaved", "scale", "batchStride", "halfRotaryEmbeddingDim", "globalShape", "globalStrides", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "inputVariable", "output", "outputVariable", "WORKGROUP_SIZE", "createAttributeWithCacheKey", "context", "validateInputs", "createSkipLayerNormProgramInfo", "skipLayerNorm", "init_skip_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "input", "skip", "gamma", "hiddenSize", "sequenceLength", "beta", "bias", "attributes", "outputCount", "isTraining", "simplified", "inputShape", "inputSize", "ShapeUtil", "outputShape", "outputSize", "meanInvStdDevDim", "hasBetaInput", "hasBiasInput", "hasMeanOutput", "hasInvStdDevOutput", "hasInputSkipBiasSumOutput", "workgroupSize", "components", "getMaxComponents", "programUniforms", "getShaderSource", "shaderHelper", "uniformsArray", "variables", "inputVariable", "outputVariable", "dataType", "tensorTypeToWsglStorageType", "vecDataType", "castToF32", "sumVector", "outputs", "_input", "_index", "context", "validateInputs", "readInput", "createSliceAttributesFromInputs", "fixStartEndValues", "calculateInputIndicesImpl", "createSliceProgramInfo", "slice", "parseSliceAttributes", "init_slice", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "_", "idx", "input", "v", "starts", "ends", "axes", "createAttributeWithCacheKey", "value", "index", "inputShape", "steps", "newValue", "output", "getElementAt", "inputSize", "ShapeUtil", "step", "start", "i", "end", "signs", "array", "numSteps", "newEnd", "newStart", "outputShape", "axis", "outputTensorInfo", "outputVariable", "inputVariable", "outputSize", "uniforms", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "validateInputs", "createSoftmaxProgramInfo", "softmax", "parseSoftmaxAttributes", "init_softmax", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "input", "attributes", "shape", "outputSize", "ShapeUtil", "WG", "axis", "cols", "rows", "components", "getMaxComponents", "packedCols", "maxVector", "name", "x", "inputVariable", "output", "outputVariable", "valueType", "threadMaxDecl", "tensorTypeToWsglStorageType", "getShaderSource", "shaderHelper", "sumVector", "context", "createAttributeWithCacheKey", "validateInputs", "createSplitAttributesFromInputs", "calculateOutputIndexImpl", "writeBufferDataImpl", "createSplitProgramInfo", "split", "parseSplitAttributes", "init_split", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "splitSizes", "numOutputs", "v", "createAttributeWithCacheKey", "numberOfTensors", "getElementAt", "outputs", "codeLines", "i", "returnSnippet", "inputShape", "inputSize", "ShapeUtil", "dataType", "axis", "input", "inputVariable", "sizeInSplitAxis", "outputsTensorInfo", "outputShapes", "previousSum", "programUniforms", "outputShape", "outputVariable", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "createWhereOpProgramShader", "createWhereOpProgramInfo", "where", "init_where", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "inputs", "dimsOutput", "isBroadcast", "typeOutput", "output", "outputVariable", "a", "inputVariable", "b", "c", "assignment", "expression", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "expressionC", "dimsA", "dimsB", "dimsC", "outputDataType", "ShapeUtil", "outputShape", "outputSize", "calculatedShape", "BroadcastUtil", "vecSize", "createTensorShapeVariables", "context", "WEBGPU_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_argminmax", "init_attention", "init_batch_norm", "init_bias_add", "init_bias_split_gelu", "init_binary_op", "init_concat", "init_conv", "init_conv_transpose", "init_cumsum", "init_depth_to_space", "init_einsum", "init_expand", "init_fast_gelu", "init_gather", "init_gather_elements", "init_gemm", "init_group_query_attention", "init_instance_norm", "init_layer_norm", "init_matmul", "init_matmulnbits", "init_multihead_attention", "init_pad", "init_pool", "init_range", "init_reduce", "init_resize", "init_rotary_embedding", "init_skip_layer_norm", "init_slice", "init_softmax", "init_split", "init_tile", "init_transpose", "init_unary_op", "init_where", "abs", "acos", "acosh", "add", "argMax", "parseArgMinMaxAttributes", "argMin", "asin", "asinh", "atan", "atanh", "attention", "averagePool", "parseAveragePoolAttributes", "batchNorm", "biasAdd", "biasSplitGelu", "cast", "parseCastAttributes", "ceil", "clip", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "cosh", "cumsum", "parseCumSumAttributes", "depthToSpace", "parseDepthToSpaceAttributes", "div", "einsum", "parseEinsumAttributes", "elu", "parseAlphaAttributes", "equal", "erf", "exp", "expand", "fastGelu", "floor", "gather", "parseGatherAttributes", "gatherElements", "parseGatherElementsAttributes", "gelu", "gemm", "parseGemmAttributes", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "parseGlobalMaxPoolAttributes", "greater", "greaterOrEqual", "groupQueryAttention", "parseGroupQueryAttentionAttributes", "hardSigmoid", "parseHardSigmoidAttributes", "instanceNorm", "layerNorm", "leakyRelu", "less", "lessOrEqual", "log", "matMul", "matMulNBits", "parseMatMulNBitsAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "multiHeadAttention", "parseMultiHeadAttentionAttributes", "neg", "not", "pad", "pow", "quickgelu", "range", "reciprocal", "reduceMin", "reduceMean", "reduceMax", "reduceSum", "reduceProd", "reduceL1", "reduceL2", "reduceLogSum", "reduceLogSumExp", "reduceSumSquare", "relu", "resize", "parseResizeAttributes", "rotaryEmbedding", "sigmoid", "sin", "sinh", "slice", "parseSliceAttributes", "skipLayerNorm", "split", "parseSplitAttributes", "sqrt", "softmax", "parseSoftmaxAttributes", "sub", "tan", "tanh", "thresholdedRelu", "tile", "transpose", "parseTransposeAttributes", "where", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_log", "init_common", "backend", "key", "artifact", "buildArtifact", "inputs", "outputs", "dispatchGroup", "uniformBufferBinding", "TRACE_FUNC_BEGIN", "device", "computePassEncoder", "entries", "input", "output", "bindGroup", "commandInfo", "TRACE_FUNC_END", "programInfo", "normalizedDispatchGroupSize", "extensions", "shaderHelper", "createShaderHelper", "userCode", "code", "shaderModule", "LOG_DEBUG", "computePipeline", "x", "y", "z", "limitPerDimension", "size", "dispatchAverage", "getProgramInputTensorInfoDependencyKey", "getProgramInfoUniqueKey", "AdapterInfoImpl", "WebGpuBackend", "init_backend_webgpu", "__esmMin", "init_esm", "init_wasm_common", "init_log", "init_tensor_view", "init_gpu_data_manager", "init_op_resolve_rules", "init_program_manager", "inputTensors", "inputDependencies", "inputInfos", "i", "type", "rank", "dims", "programInfo", "is1DimensionDispatch", "key", "adapterInfo", "architecture", "vendor", "data", "env", "adapter", "requiredFeatures", "deviceDescriptor", "createGpuDataManager", "ProgramManager", "configureLogger", "ev", "commandEncoder", "computePassDescriptor", "TRACE_FUNC_BEGIN", "queryReadBuffer", "mappedData", "pendingKernels", "pendingKernelInfo", "kernelId", "kernelInfo", "kernelType", "kernelName", "programName", "inputTensorViews", "outputTensorViews", "startTimeU64", "endTimeU64", "startTime", "endTime", "value", "tensorDataTypeEnumToString", "inputShapes", "outputShapes", "TRACE", "TRACE_FUNC_END", "program", "outputIndices", "createKernelOutput", "createIntermediateOutput", "outputCount", "inputDatas", "gpuData", "outputs", "dispatchGroup", "programUniforms", "validatedOutputIndices", "_", "outputDatas", "isTemporary", "isPersistent", "tensorView", "persistentData", "uniformBufferBinding", "currentOffset", "offsets", "v", "sizeOfElement", "sizeOfVecOrMat", "baseAlignment", "elementPerVecOrMat", "maxAlignmentOfField", "arrayBuffer", "offset", "uniformBufferData", "normalizedDispatchGroup", "artifact", "LOG_DEBUG", "uniform", "actualType", "actualLength", "length", "gpuDataId", "src", "dst", "getTargetBuffer", "size", "ptr", "attribute", "op", "WEBGPU_OP_RESOLVE_RULES", "context", "errors", "kernel", "kernelEntry", "attributes", "useErrorScope", "e", "err", "sessionId", "index", "buffer", "sessionInputOutputMapping", "previousBuffer", "id", "bufferInfo", "gpuBuffer", "downloadGpuData", "createView", "sessionCommandList", "sessionPendingKernels", "computePassEncoder", "command", "init_exports", "__export", "init", "TensorViewImpl", "ComputeContextImpl", "init_init", "__esmMin", "init_wasm_common", "init_backend_webgpu", "init_log", "init_util", "_TensorViewImpl", "module", "dataType", "data", "dims", "elementCount", "ShapeUtil", "newDims", "backend", "contextDataOffset", "heapU32", "dataIndex", "inputCount", "inputs", "i", "dim", "d", "program", "inputsOutputsMapping", "mappedInputs", "outputIndices", "createKernelOutput", "index", "createTemporaryOutput", "elementSize", "getTensorElementSize", "bufferSize", "gpuDataId", "stack", "offset", "e", "name", "env", "gpuAdapter", "jsepInit", "WebGpuBackend", "size", "ptr", "src", "dst", "isSourceGpu", "LOG_DEBUG", "dataOffset", "kernelType", "kernelId", "attribute", "kernel", "sessionHandle", "errors", "context", "initOrt", "initRuntime", "initEp", "activeSessions", "getSessionInputOutputCount", "copyFromExternalBuffer", "createSession", "releaseSession", "prepareInputOutputTensor", "run", "endProfiling", "extractTransferableBuffers", "init_wasm_core_impl", "__esmMin", "init_run_options", "init_session_options", "init_wasm_common", "init_wasm_factory", "init_wasm_utils", "init_wasm_utils_load_file", "numThreads", "loggingLevel", "getInstance", "checkLastError", "env", "logLevelStringToEnum", "epName", "initJsep", "adapter", "powerPreference", "forceFallbackAdapter", "sessionHandle", "wasm", "stack", "dataOffset", "model", "modelDataOffset", "modelData", "options", "modelDataLength", "sessionOptionsHandle", "ioBindingHandle", "allocs", "inputNamesUTF8Encoded", "outputNamesUTF8Encoded", "setSessionOptions", "loadingPromises", "file", "path", "loadFile", "data", "provider", "webnnOptions", "context", "gpuDevice", "deviceType", "inputCount", "outputCount", "enableGraphCapture", "inputNames", "outputNames", "outputPreferredLocations", "i", "name", "nameString", "location", "bindingState", "l", "dataLocationStringToEnum", "e", "buf", "alloc", "sessionId", "session", "ioBindingState", "tensor", "tensorHandles", "index", "dataType", "dims", "rawData", "dataByteLength", "gpuBuffer", "elementSizeInBytes", "getTensorElementSize", "tensorDataTypeStringToEnum", "a", "b", "registerBuffer", "dataIndex", "allocWasmString", "dimsOffset", "dimIndex", "d", "inputIndices", "inputTensors", "outputIndices", "outputTensors", "inputOutputBound", "runOptionsHandle", "runOptionsAllocs", "inputTensorHandles", "outputTensorHandles", "inputOutputAllocs", "beforeRunStack", "inputValuesOffset", "inputNamesOffset", "outputValuesOffset", "outputNamesOffset", "setRunOptions", "inputValuesIndex", "inputNamesIndex", "outputValuesIndex", "outputNamesIndex", "handle", "outputPreferredLocationsEncoded", "errorCode", "output", "beforeGetTensorDataStack", "tensorDataOffset", "keepOutputTensor", "type", "tensorDataIndex", "dimsLength", "size", "tensorDataTypeEnumToString", "preferredLocation", "stringData", "offset", "maxBytesToRead", "getBuffer", "elementSize", "isGpuBufferSupportedType", "typedArrayConstructor", "tensorTypeToTypedArrayConstructor", "v", "p", "profileFileName", "tensors", "buffers", "isProxy", "proxyWorker", "initializing", "initialized", "aborted", "temporaryObjectUrl", "initWasmCallbacks", "queuedCallbacks", "enqueueCallbacks", "ensureWorker", "onProxyWorkerMessage", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "copyFromExternalBuffer", "createSession", "releaseSession", "run", "endProfiling", "init_proxy_wrapper", "__esmMin", "init_esm", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "env", "type", "callbacks", "queue", "ev", "resolve", "reject", "importProxyWorker", "objectUrl", "worker", "message", "e", "initializeWebAssembly", "initRuntime", "epName", "initEp", "buffer", "model", "options", "transferable", "sessionId", "inputIndices", "inputs", "outputIndices", "outputs", "t", "serializableInputs", "extractTransferableBuffers", "encodeTensorMetadata", "decodeTensorMetadata", "OnnxruntimeWebAssemblySessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_proxy_wrapper", "init_wasm_common", "init_wasm_utils_env", "init_wasm_utils_load_file", "tensor", "getName", "Tensor", "dataType", "isGpuBufferSupportedType", "gpuBuffer", "download", "dispose", "path", "copyFromExternalBuffer", "loadFile", "pathOrBuffer", "options", "TRACE_FUNC_BEGIN", "model", "createSession", "TRACE_FUNC_END", "releaseSession", "feeds", "fetches", "inputArray", "inputIndices", "kvp", "name", "index", "outputArray", "outputIndices", "inputs", "t", "i", "outputs", "results", "run", "resultMap", "endProfiling", "initializeFlags", "OnnxruntimeWebAssemblyBackend", "init_backend_wasm", "__esmMin", "init_esm", "init_proxy_wrapper", "init_session_handler_inference", "init_wasm_utils_import", "env", "numCpuLogicalCores", "backendName", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "pathOrBuffer", "options", "handler", "OnnxruntimeWebAssemblySessionHandler", "backend_wasm_inference_exports", "__export", "wasmBackend", "init_backend_wasm_inference", "__esmMin", "init_backend_wasm", "OnnxruntimeWebAssemblyBackend", "init_esm", "version", "lib_default", "esm_exports", "onnxjsBackend", "registerBackend", "wasmBackend", "env", "version"] + "sourcesContent": ["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend} from './backend.js';\nimport {InferenceSession} from './inference-session.js';\n\ninterface BackendInfo {\n backend: Backend;\n priority: number;\n\n initPromise?: Promise;\n initialized?: boolean;\n aborted?: boolean;\n error?: string;\n}\n\nconst backends: Map = new Map();\nconst backendsSortedByPriority: string[] = [];\n\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @ignore\n */\nexport const registerBackend = (name: string, backend: Backend, priority: number): void => {\n if (backend && typeof backend.init === 'function' && typeof backend.createInferenceSessionHandler === 'function') {\n const currentBackend = backends.get(name);\n if (currentBackend === undefined) {\n backends.set(name, {backend, priority});\n } else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n } else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends.get(backendsSortedByPriority[i])!.priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n\n throw new TypeError('not a valid backend');\n};\n\n/**\n * Try to resolve and initialize a backend.\n *\n * @param backendName - the name of the backend.\n * @returns the backend instance if resolved and initialized successfully, or an error message if failed.\n */\nconst tryResolveAndInitializeBackend = async(backendName: string): Promise => {\n const backendInfo = backends.get(backendName);\n if (!backendInfo) {\n return 'backend not found.';\n }\n\n if (backendInfo.initialized) {\n return backendInfo.backend;\n } else if (backendInfo.aborted) {\n return backendInfo.error!;\n } else {\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init(backendName);\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n } catch (e) {\n if (!isInitializing) {\n backendInfo.error = `${e}`;\n backendInfo.aborted = true;\n }\n return backendInfo.error!;\n } finally {\n delete backendInfo.initPromise;\n }\n }\n};\n\n/**\n * Resolve execution providers from the specific session options.\n *\n * @param options - the session options object.\n * @returns a promise that resolves to a tuple of an initialized backend instance and a session options object with\n * filtered EP list.\n *\n * @ignore\n */\nexport const resolveBackendAndExecutionProviders = async(options: InferenceSession.SessionOptions):\n Promise<[backend: Backend, options: InferenceSession.SessionOptions]> => {\n // extract backend hints from session options\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n\n // try to resolve and initialize all requested backends\n let backend: Backend|undefined;\n const errors = [];\n const availableBackendNames = new Set();\n for (const backendName of backendNames) {\n const resolveResult = await tryResolveAndInitializeBackend(backendName);\n if (typeof resolveResult === 'string') {\n errors.push({name: backendName, err: resolveResult});\n } else {\n if (!backend) {\n backend = resolveResult;\n }\n if (backend === resolveResult) {\n availableBackendNames.add(backendName);\n }\n }\n }\n\n // if no backend is available, throw error.\n if (!backend) {\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n }\n\n // for each explicitly requested backend, if it's not available, output warning message.\n for (const {name, err} of errors) {\n if (backendHints.includes(name)) {\n // eslint-disable-next-line no-console\n console.warn(`removing requested execution provider \"${\n name}\" from session options because it is not available: ${err}`);\n }\n }\n\n const filteredEps = eps.filter(i => availableBackendNames.has(typeof i === 'string' ? i : i.name));\n\n return [\n backend, new Proxy(options, {\n get: (target, prop) => {\n if (prop === 'executionProviders') {\n return filteredEps;\n }\n return Reflect.get(target, prop);\n }\n })\n ];\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession} from './training-session.js';\n\n/**\n * @ignore\n */\nexport declare namespace SessionHandler {\n type FeedsType = {[name: string]: OnnxValue};\n type FetchesType = {[name: string]: OnnxValue | null};\n type ReturnType = {[name: string]: OnnxValue};\n}\n\n/**\n * Represents shared SessionHandler functionality\n *\n * @ignore\n */\ninterface SessionHandler {\n dispose(): Promise;\n\n readonly inputNames: readonly string[];\n readonly outputNames: readonly string[];\n}\n\n/**\n * Represent a handler instance of an inference session.\n *\n * @ignore\n */\nexport interface InferenceSessionHandler extends SessionHandler {\n startProfiling(): void;\n endProfiling(): void;\n\n run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n}\n\n/**\n * Represent a handler instance of a training inference session.\n *\n * @ignore\n */\nexport interface TrainingSessionHandler extends SessionHandler {\n readonly evalInputNames: readonly string[];\n readonly evalOutputNames: readonly string[];\n\n lazyResetGrad(): Promise;\n runTrainStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n runOptimizerStep(options: InferenceSession.RunOptions): Promise;\n runEvalStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n\n getParametersSize(trainableOnly: boolean): Promise;\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n getContiguousParameters(trainableOnly: boolean): Promise;\n}\n\n/**\n * Represent a backend that provides implementation of model inferencing.\n *\n * @ignore\n */\nexport interface Backend {\n /**\n * Initialize the backend asynchronously. Should throw when failed.\n */\n init(backendName: string): Promise;\n\n createInferenceSessionHandler(uriOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n\n createTrainingSessionHandler?\n (checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,\n evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,\n options: InferenceSession.SessionOptions): Promise;\n}\n\nexport {registerBackend} from './backend-impl.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from './env.js';\nimport {version} from './version.js';\n\ntype LogLevelType = Env['logLevel'];\n\nlet logLevelValue: Required = 'warning';\n\nexport const env: Env = {\n wasm: {} as Env.WebAssemblyFlags,\n webgl: {} as Env.WebGLFlags,\n webgpu: {} as Env.WebGpuFlags,\n versions: {common: version},\n\n set logLevel(value: LogLevelType) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n logLevelValue = value;\n },\n get logLevel(): Required {\n return logLevelValue;\n },\n};\n\n// set property 'logLevel' so that they can be correctly transferred to worker by `postMessage()`.\nObject.defineProperty(env, 'logLevel', {enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env as envImpl} from './env-impl.js';\n\nexport declare namespace Env {\n export type WasmPathPrefix = string;\n export interface WasmFilePaths {\n /**\n * Specify the override path for the main .wasm file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .wasm file is:\n * - `ort-wasm-simd-threaded.wasm` for default build\n * - `ort-wasm-simd-threaded.jsep.wasm` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.wasm` for training build\n */\n wasm?: URL|string;\n /**\n * Specify the override path for the main .mjs file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .mjs file is:\n * - `ort-wasm-simd-threaded.mjs` for default build\n * - `ort-wasm-simd-threaded.jsep.mjs` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.mjs` for training build\n */\n mjs?: URL|string;\n }\n export type WasmPrefixOrFilePaths = WasmPathPrefix|WasmFilePaths;\n export interface WebAssemblyFlags {\n /**\n * set or get number of thread(s). If omitted or set to 0, number of thread(s) will be determined by system. If set\n * to 1, no worker thread will be spawned.\n *\n * This setting is available only when WebAssembly multithread feature is available in current context.\n *\n * @defaultValue `0`\n */\n numThreads?: number;\n\n /**\n * set or get a boolean value indicating whether to enable SIMD. If set to false, SIMD will be forcely disabled.\n *\n * This setting is available only when WebAssembly SIMD feature is available in current context.\n *\n * @deprecated This property is deprecated. Since SIMD is supported by all major JavaScript engines, non-SIMD\n * build is no longer provided. This property will be removed in future release.\n * @defaultValue `true`\n */\n simd?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @deprecated Use `env.trace` instead. If `env.trace` is set, this property will be ignored.\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Set or get a number specifying the timeout for initialization of WebAssembly backend, in milliseconds. A zero\n * value indicates no timeout is set.\n *\n * @defaultValue `0`\n */\n initTimeout?: number;\n\n /**\n * Set a custom URL prefix to the .wasm/.mjs files, or an object of overrides for both .wasm/.mjs file. The override\n * path should be an absolute path.\n */\n wasmPaths?: WasmPrefixOrFilePaths;\n\n /**\n * Set or get a boolean value indicating whether to proxy the execution of main thread to a worker thread.\n *\n * @defaultValue `false`\n */\n proxy?: boolean;\n }\n\n export interface WebGLFlags {\n /**\n * Set or get the WebGL Context ID (webgl or webgl2).\n *\n * @defaultValue `'webgl2'`\n */\n contextId?: 'webgl'|'webgl2';\n /**\n * Get the WebGL rendering context.\n */\n readonly context: WebGLRenderingContext;\n /**\n * Set or get the maximum batch size for matmul. 0 means to disable batching.\n *\n * @deprecated\n */\n matmulMaxBatchSize?: number;\n /**\n * Set or get the texture cache mode.\n *\n * @defaultValue `'full'`\n */\n textureCacheMode?: 'initializerOnly'|'full';\n /**\n * Set or get the packed texture mode\n *\n * @defaultValue `false`\n */\n pack?: boolean;\n /**\n * Set or get whether enable async download.\n *\n * @defaultValue `false`\n */\n async?: boolean;\n }\n\n export interface WebGpuProfilingDataV1TensorMetadata {\n dims: readonly number[];\n dataType: string;\n }\n export interface WebGpuProfilingDataV1 {\n version: 1;\n inputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n outputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n kernelId: number;\n kernelType: string;\n kernelName: string;\n programName: string;\n startTime: number;\n endTime: number;\n }\n\n export type WebGpuProfilingData = WebGpuProfilingDataV1;\n\n export interface WebGpuFlags {\n /**\n * Set or get the profiling mode.\n *\n * @deprecated Use `env.webgpu.profiling.mode` instead. If `env.webgpu.profiling.mode` is set, this property will be\n * ignored.\n */\n profilingMode?: 'off'|'default';\n /**\n * Set or get the profiling configuration.\n */\n profiling?: {\n /**\n * Set or get the profiling mode.\n *\n * @defaultValue `'off'`\n */\n mode?: 'off'|'default';\n\n /**\n * Set or get a callback function when a profiling data is received. If not set, the profiling data will be\n * printed to console.\n */\n ondata?: (data: WebGpuProfilingData) => void;\n };\n /**\n * Set or get the power preference.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n powerPreference?: 'low-power'|'high-performance';\n /**\n * Set or get the force fallback adapter flag.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n forceFallbackAdapter?: boolean;\n /**\n * Set or get the adapter for WebGPU.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as the GPU adapter for the underlying WebGPU backend to create GPU device.\n *\n * If this property is not set, it will be available to get after the first WebGPU inference session is created. The\n * value will be the GPU adapter that created by the underlying WebGPU backend.\n *\n * When use with TypeScript, the type of this property is `GPUAdapter` defined in \"@webgpu/types\".\n * Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType}\n */\n adapter: unknown;\n /**\n * Get the device for WebGPU.\n *\n * This property is only available after the first WebGPU inference session is created.\n *\n * When use with TypeScript, the type of this property is `GPUDevice` defined in \"@webgpu/types\".\n * Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in \"@webgpu/types\".\n */\n readonly device: unknown;\n /**\n * Set or get whether validate input content.\n *\n * @defaultValue `false`\n */\n validateInputContent?: boolean;\n }\n}\n\nexport interface Env {\n /**\n * set the severity level for logging.\n *\n * @defaultValue `'warning'`\n */\n logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal';\n\n /**\n * Indicate whether run in debug mode.\n *\n * @defaultValue `false`\n */\n debug?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Get version of the current package.\n */\n readonly versions: {\n readonly common: string;\n readonly web?: string;\n readonly node?: string;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n readonly 'react-native'?: string;\n };\n\n /**\n * Represent a set of flags for WebAssembly\n */\n readonly wasm: Env.WebAssemblyFlags;\n\n /**\n * Represent a set of flags for WebGL\n */\n readonly webgl: Env.WebGLFlags;\n\n /**\n * Represent a set of flags for WebGPU\n */\n readonly webgpu: Env.WebGpuFlags;\n\n [name: string]: unknown;\n}\n\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env: Env = envImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {Tensor} from './tensor.js';\n\n/**\n * implementation of Tensor.toDataURL()\n */\nexport const tensorToDataURL = (tensor: Tensor, options?: TensorToDataUrlOptions): string => {\n const canvas = typeof document !== 'undefined' ? document.createElement('canvas') : (new OffscreenCanvas(1, 1));\n canvas.width = tensor.dims[3];\n canvas.height = tensor.dims[2];\n const pixels2DContext =\n canvas.getContext('2d') as (CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null);\n\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n }\n\n const inputformat = options?.format !== undefined ? options.format : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 0];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n // Default pointer assignments\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < height; i++) {\n for (let j = 0; j < width; j++) {\n const R = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n const G = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n const B = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n const A = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n pixels2DContext.fillStyle = 'rgba(' + R + ',' + G + ',' + B + ',' + A + ')';\n pixels2DContext.fillRect(j, i, 1, 1);\n }\n }\n if ('toDataURL' in canvas) {\n return canvas.toDataURL();\n } else {\n throw new Error('toDataURL is not supported');\n }\n } else {\n throw new Error('Can not access image data');\n }\n};\n\n/**\n * implementation of Tensor.toImageData()\n */\nexport const tensorToImageData = (tensor: Tensor, options?: TensorToImageDataOptions): ImageData => {\n const pixels2DContext = typeof document !== 'undefined' ?\n document.createElement('canvas').getContext('2d') :\n new OffscreenCanvas(1, 1).getContext('2d') as OffscreenCanvasRenderingContext2D;\n let image: ImageData;\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n let channels: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[1];\n channels = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n channels = tensor.dims[1];\n }\n const inputformat = options !== undefined ? (options.format !== undefined ? options.format : 'RGB') : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 255];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n if (options !== undefined) {\n if (options.format !== undefined && (channels === 4 && options.format !== 'RGBA') ||\n (channels === 3 && (options.format !== 'RGB' && options.format !== 'BGR'))) {\n throw new Error('Tensor format doesn\\'t match input tensor dims');\n }\n }\n\n // Default pointer assignments\n const step = 4;\n let rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n image = pixels2DContext.createImageData(width, height);\n\n for (let i = 0; i < height * width;\n rImagePointer += step, gImagePointer += step, bImagePointer += step, aImagePointer += step, i++) {\n image.data[rImagePointer] = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n image.data[gImagePointer] = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n image.data[bImagePointer] = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n image.data[aImagePointer] = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n }\n\n } else {\n throw new Error('Can not access image data');\n }\n return image;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsDimensions, OptionsFormat, OptionsNormalizationParameters, OptionsTensorFormat, OptionsTensorLayout, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\ninterface BufferToTensorOptions extends OptionsDimensions, OptionsTensorLayout, OptionsNormalizationParameters,\n OptionsFormat, OptionsTensorFormat {}\n\n/**\n * Create a new tensor object from image object\n *\n * @param buffer - Extracted image buffer data - assuming RGBA format\n * @param imageFormat - input image configuration - required configurations height, width, format\n * @param tensorFormat - output tensor configuration - Default is RGB format\n */\nexport const bufferToTensor = (buffer: Uint8ClampedArray|undefined, options: BufferToTensorOptions): Tensor => {\n if (buffer === undefined) {\n throw new Error('Image buffer must be defined');\n }\n if (options.height === undefined || options.width === undefined) {\n throw new Error('Image height and width must be defined');\n }\n if (options.tensorLayout === 'NHWC') {\n throw new Error('NHWC Tensor layout is not supported yet');\n }\n\n const {height, width} = options;\n\n const norm = options.norm ?? {mean: 255, bias: 0};\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean![0], norm.mean![1], norm.mean![2], norm.mean![3] ?? 255];\n }\n\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias![0], norm.bias![1], norm.bias![2], norm.bias![3] ?? 0];\n }\n\n const inputformat = options.format !== undefined ? options.format : 'RGBA';\n // default value is RGBA since imagedata and HTMLImageElement uses it\n\n const outputformat =\n options.tensorFormat !== undefined ? (options.tensorFormat !== undefined ? options.tensorFormat : 'RGB') : 'RGB';\n const stride = height * width;\n const float32Data = outputformat === 'RGBA' ? new Float32Array(stride * 4) : new Float32Array(stride * 3);\n\n // Default pointer assignments\n let step = 4, rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGB') {\n step = 3;\n rImagePointer = 0;\n gImagePointer = 1;\n bImagePointer = 2;\n aImagePointer = -1;\n }\n\n // Updating the pointer assignments based on the output tensor format\n if (outputformat === 'RGBA') {\n aTensorPointer = stride * 3;\n } else if (outputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n } else if (outputformat === 'BGR') {\n bTensorPointer = 0;\n gTensorPointer = stride;\n rTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < stride;\n i++, rImagePointer += step, bImagePointer += step, gImagePointer += step, aImagePointer += step) {\n float32Data[rTensorPointer++] = (buffer[rImagePointer] + normBias[0]) / normMean[0];\n float32Data[gTensorPointer++] = (buffer[gImagePointer] + normBias[1]) / normMean[1];\n float32Data[bTensorPointer++] = (buffer[bImagePointer] + normBias[2]) / normMean[2];\n if (aTensorPointer !== -1 && aImagePointer !== -1) {\n float32Data[aTensorPointer++] = (buffer[aImagePointer] + normBias[3]) / normMean[3];\n }\n }\n\n // Float32Array -> ort.Tensor\n const outputTensor = outputformat === 'RGBA' ? new Tensor('float32', float32Data, [1, 4, height, width]) :\n new Tensor('float32', float32Data, [1, 3, height, width]);\n return outputTensor;\n};\n\n/**\n * implementation of Tensor.fromImage().\n */\nexport const tensorFromImage = async(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise => {\n // checking the type of image object\n const isHTMLImageEle = typeof (HTMLImageElement) !== 'undefined' && image instanceof HTMLImageElement;\n const isImageDataEle = typeof (ImageData) !== 'undefined' && image instanceof ImageData;\n const isImageBitmap = typeof (ImageBitmap) !== 'undefined' && image instanceof ImageBitmap;\n const isString = typeof image === 'string';\n\n let data: Uint8ClampedArray|undefined;\n let bufferToTensorOptions: BufferToTensorOptions = options ?? {};\n\n const createCanvas = () => {\n if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n } else if (typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1);\n } else {\n throw new Error('Canvas is not supported');\n }\n };\n const createCanvasContext = (canvas: HTMLCanvasElement|OffscreenCanvas) => {\n if (canvas instanceof HTMLCanvasElement) {\n return canvas.getContext('2d');\n } else if (canvas instanceof OffscreenCanvas) {\n return canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n } else {\n return null;\n }\n };\n // filling and checking image configuration options\n if (isHTMLImageEle) {\n // HTMLImageElement - image object - format is RGBA by default\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n let height = image.height;\n let width = image.width;\n if (options !== undefined && options.resizedHeight !== undefined && options.resizedWidth !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n if (options.tensorFormat !== undefined) {\n throw new Error('Image input config format must be RGBA for HTMLImageElement');\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n }\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n }\n\n pixels2DContext.drawImage(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isImageDataEle) {\n let height: number;\n let width: number;\n\n if (options !== undefined && options.resizedWidth !== undefined && options.resizedHeight !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n } else {\n height = image.height;\n width = image.width;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n }\n bufferToTensorOptions.format = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n\n if (options !== undefined) {\n const tempCanvas = createCanvas();\n\n tempCanvas.width = width;\n tempCanvas.height = height;\n\n const pixels2DContext = createCanvasContext(tempCanvas);\n\n if (pixels2DContext != null) {\n pixels2DContext.putImageData(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else {\n data = image.data;\n }\n } else if (isImageBitmap) {\n // ImageBitmap - image object - format must be provided by user\n if (options === undefined) {\n throw new Error('Please provide image config with format for Imagebitmap');\n }\n\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n const height = image.height;\n const width = image.width;\n pixels2DContext.drawImage(image, 0, 0, width, height);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isString) {\n return new Promise((resolve, reject) => {\n const canvas = createCanvas();\n const context = createCanvasContext(canvas);\n if (!image || !context) {\n return reject();\n }\n const newImage = new Image();\n newImage.crossOrigin = 'Anonymous';\n newImage.src = image;\n newImage.onload = () => {\n canvas.width = newImage.width;\n canvas.height = newImage.height;\n context.drawImage(newImage, 0, 0, canvas.width, canvas.height);\n const img = context.getImageData(0, 0, canvas.width, canvas.height);\n\n bufferToTensorOptions.height = canvas.height;\n bufferToTensorOptions.width = canvas.width;\n resolve(bufferToTensor(img.data, bufferToTensorOptions));\n };\n });\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n\n if (data !== undefined) {\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n};\n\n/**\n * implementation of Tensor.fromTexture().\n */\nexport const tensorFromTexture = (\n texture: TensorInterface.TextureType, options: TensorFromTextureOptions): Tensor => {\n const {width, height, download, dispose} = options;\n // Always assume RGBAF32. TODO: support different texture format\n const dims = [1, height, width, 4];\n return new Tensor({location: 'texture', type: 'float32', texture, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromGpuBuffer().\n */\nexport const tensorFromGpuBuffer = (\n gpuBuffer: TensorInterface.GpuBufferType, options: TensorFromGpuBufferOptions): Tensor => {\n const {dataType, dims, download, dispose} = options;\n return new Tensor({location: 'gpu-buffer', type: dataType ?? 'float32', gpuBuffer, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromPinnedBuffer().\n */\nexport const tensorFromPinnedBuffer = (\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor =>\n new Tensor({location: 'cpu-pinned', type, data: buffer, dims: dims ?? [buffer.length]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type SupportedTypedArrayConstructors = Float32ArrayConstructor|Uint8ArrayConstructor|Int8ArrayConstructor|\n Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|Uint8ArrayConstructor|\n Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor;\nexport type SupportedTypedArray = InstanceType;\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([\n ['float32', Float32Array],\n ['uint8', Uint8Array],\n ['int8', Int8Array],\n ['uint16', Uint16Array],\n ['int16', Int16Array],\n ['int32', Int32Array],\n ['bool', Uint8Array],\n ['float64', Float64Array],\n ['uint32', Uint32Array],\n]);\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP = new Map([\n [Float32Array, 'float32'],\n [Uint8Array, 'uint8'],\n [Int8Array, 'int8'],\n [Uint16Array, 'uint16'],\n [Int16Array, 'int16'],\n [Int32Array, 'int32'],\n [Float64Array, 'float64'],\n [Uint32Array, 'uint32'],\n]);\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// the following code allows delaying execution of BigInt/Float16Array checking. This allows lazy initialization for\n// NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP and NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, which allows BigInt/Float16Array\n// polyfill if available.\nlet isTypedArrayChecked = false;\nexport const checkTypedArray = () => {\n if (!isTypedArrayChecked) {\n isTypedArrayChecked = true;\n const isBigInt64ArrayAvailable = typeof BigInt64Array !== 'undefined' && BigInt64Array.from;\n const isBigUint64ArrayAvailable = typeof BigUint64Array !== 'undefined' && BigUint64Array.from;\n const isFloat16ArrayAvailable = typeof Float16Array !== 'undefined' && Float16Array.from;\n\n if (isBigInt64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('int64', BigInt64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigInt64Array, 'int64');\n }\n if (isBigUint64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('uint64', BigUint64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigUint64Array, 'uint64');\n }\n if (isFloat16ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Float16Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(Float16Array, 'float16');\n } else {\n // if Float16Array is not available, use 'Uint16Array' to store the data.\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Uint16Array);\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TextureConstructorParameters} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\n\n/**\n * calculate size from dims.\n *\n * @param dims the dims array. May be an illegal input.\n */\nexport const calculateSize = (dims: readonly unknown[]): number => {\n let size = 1;\n for (let i = 0; i < dims.length; i++) {\n const dim = dims[i];\n if (typeof dim !== 'number' || !Number.isSafeInteger(dim)) {\n throw new TypeError(`dims[${i}] must be an integer, got: ${dim}`);\n }\n if (dim < 0) {\n throw new RangeError(`dims[${i}] must be a non-negative integer, got: ${dim}`);\n }\n size *= dim;\n }\n return size;\n};\n\n/**\n * implementation of Tensor.reshape()\n */\nexport const tensorReshape = (tensor: Tensor, dims: readonly number[]): Tensor => {\n switch (tensor.location) {\n case 'cpu':\n return new Tensor(tensor.type, tensor.data, dims);\n case 'cpu-pinned':\n return new Tensor({\n location: 'cpu-pinned',\n data: tensor.data as CpuPinnedConstructorParameters['data'],\n type: tensor.type as CpuPinnedConstructorParameters['type'],\n dims,\n });\n case 'texture':\n return new Tensor({\n location: 'texture',\n texture: tensor.texture,\n type: tensor.type as TextureConstructorParameters['type'],\n dims,\n });\n case 'gpu-buffer':\n return new Tensor({\n location: 'gpu-buffer',\n gpuBuffer: tensor.gpuBuffer,\n type: tensor.type as GpuBufferConstructorParameters['type'],\n dims,\n });\n default:\n throw new Error(`tensorReshape: tensor location ${tensor.location} is not supported`);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {tensorToDataURL, tensorToImageData} from './tensor-conversion-impl.js';\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {tensorFromGpuBuffer, tensorFromImage, tensorFromPinnedBuffer, tensorFromTexture} from './tensor-factory-impl.js';\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions, TextureConstructorParameters} from './tensor-factory.js';\nimport {checkTypedArray, NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP, NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, SupportedTypedArray, SupportedTypedArrayConstructors} from './tensor-impl-type-mapping.js';\nimport {calculateSize, tensorReshape} from './tensor-utils-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\n// type aliases for those exported from Tensor interface\n\ntype TensorType = TensorInterface.Type;\ntype TensorDataType = TensorInterface.DataType;\ntype TensorDataLocation = TensorInterface.DataLocation;\ntype TensorTextureType = TensorInterface.TextureType;\ntype TensorGpuBufferType = TensorInterface.GpuBufferType;\n\n/**\n * the implementation of Tensor interface.\n *\n * @ignore\n */\nexport class Tensor implements TensorInterface {\n // #region constructors\n\n /**\n * Construct a new CPU tensor object from the given type, data and dims.\n */\n constructor(\n type: TensorType, data: TensorDataType|readonly string[]|readonly number[]|readonly boolean[],\n dims?: readonly number[]);\n /**\n * Construct a new CPU tensor object from the given data and dims. Type is inferred from data.\n */\n constructor(data: TensorDataType|readonly string[]|readonly boolean[], dims?: readonly number[]);\n /**\n * Construct a new tensor object from the pinned CPU data with the given type and dims.\n *\n * Tensor's location will be set to 'cpu-pinned'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: CpuPinnedConstructorParameters);\n /**\n * Construct a new tensor object from the WebGL texture with the given type and dims.\n *\n * Tensor's location will be set to 'texture'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: TextureConstructorParameters);\n /**\n * Construct a new tensor object from the WebGPU buffer with the given type and dims.\n *\n * Tensor's location will be set to 'gpu-buffer'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: GpuBufferConstructorParameters);\n\n /**\n * implementation.\n */\n constructor(\n arg0: TensorType|TensorDataType|readonly string[]|readonly boolean[]|CpuPinnedConstructorParameters|\n TextureConstructorParameters|GpuBufferConstructorParameters,\n arg1?: TensorDataType|readonly number[]|readonly string[]|readonly boolean[], arg2?: readonly number[]) {\n // perform one-time check for BigInt/Float16Array support\n checkTypedArray();\n\n let type: TensorType;\n let dims: readonly number[];\n\n if (typeof arg0 === 'object' && 'location' in arg0) {\n //\n // constructing tensor from specific location\n //\n this.dataLocation = arg0.location;\n type = arg0.type;\n dims = arg0.dims;\n switch (arg0.location) {\n case 'cpu-pinned': {\n const expectedTypedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(type);\n if (!expectedTypedArrayConstructor) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from pinned buffer`);\n }\n if (!(arg0.data instanceof expectedTypedArrayConstructor)) {\n throw new TypeError(`buffer should be of type ${expectedTypedArrayConstructor.name}`);\n }\n this.cpuData = arg0.data;\n break;\n }\n case 'texture': {\n if (type !== 'float32') {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from texture`);\n }\n this.gpuTextureData = arg0.texture;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n case 'gpu-buffer': {\n if ((type !== 'float32' && type !== 'float16' && type !== 'int32' && type !== 'int64' && type !== 'uint32' &&\n type !== 'uint8' && type !== 'bool')) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from gpu buffer`);\n }\n this.gpuBufferData = arg0.gpuBuffer;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n default:\n throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`);\n }\n } else {\n //\n // constructing tensor of location 'cpu'\n //\n let data: TensorDataType;\n let maybeDims: typeof arg1|typeof arg2;\n // check whether arg0 is type or data\n if (typeof arg0 === 'string') {\n //\n // Override: constructor(type, data, ...)\n //\n type = arg0;\n maybeDims = arg2;\n if (arg0 === 'string') {\n // string tensor\n if (!Array.isArray(arg1)) {\n throw new TypeError('A string tensor\\'s data must be a string array.');\n }\n // we don't check whether every element in the array is string; this is too slow. we assume it's correct and\n // error will be populated at inference\n data = arg1;\n } else {\n // numeric tensor\n const typedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(arg0);\n if (typedArrayConstructor === undefined) {\n throw new TypeError(`Unsupported tensor type: ${arg0}.`);\n }\n if (Array.isArray(arg1)) {\n if (arg0 === 'float16' && typedArrayConstructor === Uint16Array) {\n // When no Float16Array polyfill is used, we cannot create 'float16' tensor from number array.\n //\n // Throw error here because when user try to use number array as data,\n // e.g. new Tensor('float16', [1, 2, 3, 4], dims)), it will actually call\n // Uint16Array.from(arg1) which generates wrong data.\n throw new TypeError(\n 'Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.');\n } else if (arg0 === 'uint64' || arg0 === 'int64') {\n // use 'as any' here because:\n // 1. TypeScript's check on type of 'Array.isArray()' does not work with readonly arrays.\n // see https://github.com/microsoft/TypeScript/issues/17002\n // 2. TypeScript's check on union type of '(BigInt64ArrayConstructor|BigUint64ArrayConstructor).from()'\n // does not accept parameter mapFn.\n // 3. parameters of 'SupportedTypedArrayConstructors.from()' does not match the requirement of the union\n // type.\n\n // assume 'arg1' is of type \"readonly number[]|readonly bigint[]\" here.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1, BigInt);\n } else {\n // assume 'arg1' is of type \"readonly number[]\" here.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1);\n }\n } else if (arg1 instanceof typedArrayConstructor) {\n data = arg1;\n } else {\n throw new TypeError(`A ${type} tensor's data must be type of ${typedArrayConstructor}`);\n }\n }\n } else {\n //\n // Override: constructor(data, ...)\n //\n maybeDims = arg1;\n if (Array.isArray(arg0)) {\n // only boolean[] and string[] is supported\n if (arg0.length === 0) {\n throw new TypeError('Tensor type cannot be inferred from an empty array.');\n }\n const firstElementType = typeof arg0[0];\n if (firstElementType === 'string') {\n type = 'string';\n data = arg0;\n } else if (firstElementType === 'boolean') {\n type = 'bool';\n // 'arg0' is of type 'boolean[]'. Uint8Array.from(boolean[]) actually works, but typescript thinks this is\n // wrong type. We use 'as any' to make it happy.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = Uint8Array.from(arg0 as any[]);\n } else {\n throw new TypeError(`Invalid element type of data array: ${firstElementType}.`);\n }\n } else {\n // get tensor type from TypedArray\n const mappedType =\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.get(arg0.constructor as SupportedTypedArrayConstructors);\n if (mappedType === undefined) {\n throw new TypeError(`Unsupported type for tensor data: ${arg0.constructor}.`);\n }\n type = mappedType;\n data = arg0 as SupportedTypedArray;\n }\n }\n\n // type and data is processed, now processing dims\n if (maybeDims === undefined) {\n // assume 1-D tensor if dims omitted\n maybeDims = [data.length];\n } else if (!Array.isArray(maybeDims)) {\n throw new TypeError('A tensor\\'s dims must be a number array');\n }\n dims = maybeDims as readonly number[];\n\n this.cpuData = data;\n this.dataLocation = 'cpu';\n }\n\n // perform check on dims\n const size = calculateSize(dims);\n // if data is on CPU, check whether data length matches tensor size\n if (this.cpuData && size !== this.cpuData.length) {\n throw new Error(`Tensor's size(${size}) does not match data length(${this.cpuData.length}).`);\n }\n\n this.type = type;\n this.dims = dims;\n this.size = size;\n }\n // #endregion\n\n // #region factory\n static async fromImage(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise {\n return tensorFromImage(image, options);\n }\n\n static fromTexture(\n texture: TensorTextureType, options: TensorFromTextureOptions): TensorInterface {\n return tensorFromTexture(texture, options);\n }\n\n static fromGpuBuffer(\n gpuBuffer: TensorGpuBufferType, options: TensorFromGpuBufferOptions): TensorInterface {\n return tensorFromGpuBuffer(gpuBuffer, options);\n }\n\n static fromPinnedBuffer(\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor {\n return tensorFromPinnedBuffer(type, buffer, dims);\n }\n\n // #endregion\n\n // #region conversions\n toDataURL(options?: TensorToDataUrlOptions): string {\n return tensorToDataURL(this, options);\n }\n\n toImageData(options?: TensorToImageDataOptions): ImageData {\n return tensorToImageData(this, options);\n }\n // #endregion\n\n // #region public fields\n readonly dims: readonly number[];\n readonly type: TensorType;\n readonly size: number;\n // #endregion\n\n // #region private fields\n\n /**\n * stores the location of the data.\n */\n private dataLocation: TensorDataLocation;\n\n /**\n * stores the data on CPU, if location is 'cpu' or 'cpu-pinned'. otherwise empty.\n */\n private cpuData?: TensorDataType;\n\n /**\n * stores the underlying texture when location is 'texture'. otherwise empty.\n */\n private gpuTextureData?: TensorTextureType;\n\n /**\n * stores the underlying GPU buffer when location is 'gpu-buffer'. otherwise empty.\n */\n private gpuBufferData?: TensorGpuBufferType;\n\n /**\n * stores an optional downloader function to download data from GPU to CPU.\n */\n private downloader?(): Promise;\n\n /**\n * a flag indicating whether the data is being downloaded from GPU to CPU.\n */\n private isDownloading?: boolean;\n\n /**\n * stores an optional disposer function to dispose the underlying data.\n */\n private disposer?(): void;\n // #endregion\n\n // #region properties\n get data(): TensorDataType {\n this.ensureValid();\n if (!this.cpuData) {\n throw new Error(\n 'The data is not on CPU. Use `getData()` to download GPU data to CPU, ' +\n 'or use `texture` or `gpuBuffer` property to access the GPU data directly.');\n }\n return this.cpuData;\n }\n\n get location(): TensorDataLocation {\n return this.dataLocation;\n }\n\n get texture(): TensorTextureType {\n this.ensureValid();\n if (!this.gpuTextureData) {\n throw new Error('The data is not stored as a WebGL texture.');\n }\n return this.gpuTextureData;\n }\n\n get gpuBuffer(): TensorGpuBufferType {\n this.ensureValid();\n if (!this.gpuBufferData) {\n throw new Error('The data is not stored as a WebGPU buffer.');\n }\n return this.gpuBufferData;\n }\n // #endregion\n\n // #region methods\n\n async getData(releaseData?: boolean): Promise {\n this.ensureValid();\n switch (this.dataLocation) {\n case 'cpu':\n case 'cpu-pinned':\n return this.data;\n case 'texture':\n case 'gpu-buffer': {\n if (!this.downloader) {\n throw new Error('The current tensor is not created with a specified data downloader.');\n }\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n try {\n this.isDownloading = true;\n const data = await this.downloader();\n this.downloader = undefined;\n this.dataLocation = 'cpu';\n this.cpuData = data;\n\n if (releaseData && this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n\n return data;\n\n } finally {\n this.isDownloading = false;\n }\n }\n default:\n throw new Error(`cannot get data from location: ${this.dataLocation}`);\n }\n }\n\n dispose(): void {\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n\n if (this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n this.cpuData = undefined;\n this.gpuTextureData = undefined;\n this.gpuBufferData = undefined;\n this.downloader = undefined;\n this.isDownloading = undefined;\n\n this.dataLocation = 'none';\n }\n\n // #endregion\n\n // #region tensor utilities\n private ensureValid(): void {\n if (this.dataLocation === 'none') {\n throw new Error('The tensor is disposed.');\n }\n }\n\n reshape(dims: readonly number[]): TensorInterface {\n this.ensureValid();\n if (this.downloader || this.disposer) {\n throw new Error('Cannot reshape a tensor that owns GPU resource.');\n }\n return tensorReshape(this, dims);\n }\n // #endregion\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorFactory} from './tensor-factory.js';\nimport {Tensor as TensorImpl} from './tensor-impl.js';\nimport {TypedTensorUtils} from './tensor-utils.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\n/**\n * represent a basic tensor with specified dimensions and data type.\n */\ninterface TypedTensorBase {\n /**\n * Get the dimensions of the tensor.\n */\n readonly dims: readonly number[];\n /**\n * Get the data type of the tensor.\n */\n readonly type: T;\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is not on CPU (eg. it's in the form of WebGL texture or WebGPU buffer), throw error.\n */\n readonly data: Tensor.DataTypeMap[T];\n /**\n * Get the location of the data.\n */\n readonly location: Tensor.DataLocation;\n /**\n * Get the WebGL texture that holds the tensor data.\n *\n * If the data is not on GPU as WebGL texture, throw error.\n */\n readonly texture: Tensor.TextureType;\n /**\n * Get the WebGPU buffer that holds the tensor data.\n *\n * If the data is not on GPU as WebGPU buffer, throw error.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is on CPU, returns the data immediately.\n * If the data is on GPU, downloads the data and returns the promise.\n *\n * @param releaseData - whether release the data on GPU. Ignore if data is already on CPU.\n */\n getData(releaseData?: boolean): Promise;\n\n /**\n * Dispose the tensor data.\n *\n * If the data is on CPU, remove its internal reference to the underlying data.\n * If the data is on GPU, release the data on GPU.\n *\n * After calling this function, the tensor is considered no longer valid. Its location will be set to 'none'.\n */\n dispose(): void;\n}\n\nexport declare namespace Tensor {\n interface DataTypeMap {\n float32: Float32Array;\n uint8: Uint8Array;\n int8: Int8Array;\n uint16: Uint16Array;\n int16: Int16Array;\n int32: Int32Array;\n int64: BigInt64Array;\n string: string[];\n bool: Uint8Array;\n float16: Uint16Array; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: Float64Array;\n uint32: Uint32Array;\n uint64: BigUint64Array;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n interface ElementTypeMap {\n float32: number;\n uint8: number;\n int8: number;\n uint16: number;\n int16: number;\n int32: number;\n int64: bigint;\n string: string;\n bool: boolean;\n float16: number; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: number;\n uint32: number;\n uint64: bigint;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n type DataType = DataTypeMap[Type];\n type ElementType = ElementTypeMap[Type];\n\n /**\n * supported data types for constructing a tensor from a pinned CPU buffer\n */\n export type CpuPinnedDataTypes = Exclude;\n\n /**\n * type alias for WebGL texture\n */\n export type TextureType = WebGLTexture;\n\n /**\n * supported data types for constructing a tensor from a WebGL texture\n */\n export type TextureDataTypes = 'float32';\n\n /**\n * type alias for WebGPU buffer\n *\n * The reason why we don't use type \"GPUBuffer\" defined in webgpu.d.ts from @webgpu/types is because \"@webgpu/types\"\n * requires \"@types/dom-webcodecs\" as peer dependency when using TypeScript < v5.1 and its version need to be chosen\n * carefully according to the TypeScript version being used. This means so far there is not a way to keep every\n * TypeScript version happy. It turns out that we will easily broke users on some TypeScript version.\n *\n * for more info see https://github.com/gpuweb/types/issues/127\n */\n export type GpuBufferType = {size: number; mapState: 'unmapped' | 'pending' | 'mapped'};\n\n /**\n * supported data types for constructing a tensor from a WebGPU buffer\n */\n export type GpuBufferDataTypes = 'float32'|'float16'|'int32'|'int64'|'uint32'|'uint8'|'bool';\n\n /**\n * represent where the tensor data is stored\n */\n export type DataLocation = 'none'|'cpu'|'cpu-pinned'|'texture'|'gpu-buffer';\n\n /**\n * represent the data type of a tensor\n */\n export type Type = keyof DataTypeMap;\n}\n\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface TypedTensor extends TypedTensorBase, TypedTensorUtils {}\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface Tensor extends TypedTensorBase, TypedTensorUtils {}\n\n/**\n * type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.\n */\nexport interface TensorConstructor extends TensorFactory {\n // #region CPU tensor - specify element type\n /**\n * Construct a new string tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'string', data: Tensor.DataTypeMap['string']|readonly string[],\n dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'bool', data: Tensor.DataTypeMap['bool']|readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new 64-bit integer typed tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(\n type: T, data: Tensor.DataTypeMap[T]|readonly bigint[]|readonly number[],\n dims?: readonly number[]): TypedTensor;\n\n /**\n * Construct a new numeric tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new>(\n type: T, data: Tensor.DataTypeMap[T]|readonly number[], dims?: readonly number[]): TypedTensor;\n // #endregion\n\n // #region CPU tensor - infer element types\n\n /**\n * Construct a new float32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float32Array, dims?: readonly number[]): TypedTensor<'float32'>;\n\n /**\n * Construct a new int8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int8Array, dims?: readonly number[]): TypedTensor<'int8'>;\n\n /**\n * Construct a new uint8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint8Array, dims?: readonly number[]): TypedTensor<'uint8'>;\n\n /**\n * Construct a new uint16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint16Array, dims?: readonly number[]): TypedTensor<'uint16'>;\n\n /**\n * Construct a new int16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int16Array, dims?: readonly number[]): TypedTensor<'int16'>;\n\n /**\n * Construct a new int32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int32Array, dims?: readonly number[]): TypedTensor<'int32'>;\n\n /**\n * Construct a new int64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigInt64Array, dims?: readonly number[]): TypedTensor<'int64'>;\n\n /**\n * Construct a new string tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly string[], dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new float64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float64Array, dims?: readonly number[]): TypedTensor<'float64'>;\n\n /**\n * Construct a new uint32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint32Array, dims?: readonly number[]): TypedTensor<'uint32'>;\n\n /**\n * Construct a new uint64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigUint64Array, dims?: readonly number[]): TypedTensor<'uint64'>;\n\n // #endregion\n\n // #region CPU tensor - fall back to non-generic tensor type declaration\n\n /**\n * Construct a new tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: Tensor.Type, data: Tensor.DataType|readonly number[]|readonly string[]|readonly bigint[]|readonly boolean[],\n dims?: readonly number[]): Tensor;\n\n /**\n * Construct a new tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Tensor.DataType, dims?: readonly number[]): Tensor;\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const Tensor = TensorImpl as TensorConstructor;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from './env-impl.js';\n\n/**\n * @ignore\n */\nexport const TRACE = (deviceType: string, label: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n // eslint-disable-next-line no-console\n console.timeStamp(`${deviceType}::ORT::${label}`);\n};\n\nconst TRACE_FUNC = (msg: string, extraMsg?: string) => {\n const stack = new Error().stack?.split(/\\r\\n|\\r|\\n/g) || [];\n let hasTraceFunc = false;\n for (let i = 0; i < stack.length; i++) {\n if (hasTraceFunc && !stack[i].includes('TRACE_FUNC')) {\n let label = `FUNC_${msg}::${stack[i].trim().split(' ')[1]}`;\n if (extraMsg) {\n label += `::${extraMsg}`;\n }\n TRACE('CPU', label);\n return;\n }\n if (stack[i].includes('TRACE_FUNC')) {\n hasTraceFunc = true;\n }\n }\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_BEGIN = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('BEGIN', extraMsg);\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_END = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('END', extraMsg);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {InferenceSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSessionInterface} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from './trace.js';\n\ntype SessionOptions = InferenceSessionInterface.SessionOptions;\ntype RunOptions = InferenceSessionInterface.RunOptions;\ntype FeedsType = InferenceSessionInterface.FeedsType;\ntype FetchesType = InferenceSessionInterface.FetchesType;\ntype ReturnType = InferenceSessionInterface.ReturnType;\n\nexport class InferenceSession implements InferenceSessionInterface {\n private constructor(handler: InferenceSessionHandler) {\n this.handler = handler;\n }\n run(feeds: FeedsType, options?: RunOptions): Promise;\n run(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async run(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n TRACE_FUNC_BEGIN();\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSessionInterface.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n\n // feeds, fetches and options are prepared\n\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n TRACE_FUNC_END();\n return returnValue;\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n\n static create(path: string, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: SessionOptions):\n Promise;\n static create(buffer: Uint8Array, options?: SessionOptions): Promise;\n static async create(\n arg0: string|ArrayBufferLike|Uint8Array, arg1?: SessionOptions|number, arg2?: number,\n arg3?: SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n // either load from a file or buffer\n let filePathOrUint8Array: string|Uint8Array;\n let options: SessionOptions = {};\n\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (\n arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n } else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n } else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n const handler = await backend.createInferenceSessionHandler(filePathOrUint8Array, optionsWithValidatedEPs);\n TRACE_FUNC_END();\n return new InferenceSession(handler);\n }\n\n startProfiling(): void {\n this.handler.startProfiling();\n }\n endProfiling(): void {\n this.handler.endProfiling();\n }\n\n get inputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get outputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n private handler: InferenceSessionHandler;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession as InferenceSessionImpl} from './inference-session-impl.js';\nimport {OnnxModelOptions} from './onnx-model.js';\nimport {OnnxValue, OnnxValueDataLocation} from './onnx-value.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace InferenceSession {\n // #region input/output types\n\n type OnnxValueMapType = {readonly [name: string]: OnnxValue};\n type NullableOnnxValueMapType = {readonly [name: string]: OnnxValue | null};\n\n /**\n * A feeds (model inputs) is an object that uses input names as keys and OnnxValue as corresponding values.\n */\n type FeedsType = OnnxValueMapType;\n\n /**\n * A fetches (model outputs) could be one of the following:\n *\n * - Omitted. Use model's output names definition.\n * - An array of string indicating the output names.\n * - An object that use output names as keys and OnnxValue or null as corresponding values.\n *\n * @remark\n * different from input argument, in output, OnnxValue is optional. If an OnnxValue is present it will be\n * used as a pre-allocated value by the inference engine; if omitted, inference engine will allocate buffer\n * internally.\n */\n type FetchesType = readonly string[]|NullableOnnxValueMapType;\n\n /**\n * A inferencing return type is an object that uses output names as keys and OnnxValue as corresponding values.\n */\n type ReturnType = OnnxValueMapType;\n\n // #endregion\n\n // #region session options\n\n /**\n * A set of configurations for session behavior.\n */\n export interface SessionOptions extends OnnxModelOptions {\n /**\n * An array of execution provider options.\n *\n * An execution provider option can be a string indicating the name of the execution provider,\n * or an object of corresponding type.\n */\n executionProviders?: readonly ExecutionProviderConfig[];\n\n /**\n * The intra OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n intraOpNumThreads?: number;\n\n /**\n * The inter OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n interOpNumThreads?: number;\n\n /**\n * The free dimension override.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n freeDimensionOverrides?: {readonly [dimensionName: string]: number};\n\n /**\n * The optimization level.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n graphOptimizationLevel?: 'disabled'|'basic'|'extended'|'all';\n\n /**\n * Whether enable CPU memory arena.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableCpuMemArena?: boolean;\n\n /**\n * Whether enable memory pattern.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableMemPattern?: boolean;\n\n /**\n * Execution mode.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n executionMode?: 'sequential'|'parallel';\n\n /**\n * Optimized model file path.\n *\n * If this setting is specified, the optimized model will be dumped. In browser, a blob will be created\n * with a pop-up window.\n */\n optimizedModelFilePath?: string;\n\n /**\n * Whether enable profiling.\n *\n * This setting is a placeholder for a future use.\n */\n enableProfiling?: boolean;\n\n /**\n * File prefix for profiling.\n *\n * This setting is a placeholder for a future use.\n */\n profileFilePrefix?: string;\n\n /**\n * Log ID.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logId?: string;\n\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Specify string as a preferred data location for all outputs, or an object that use output names as keys and a\n * preferred data location as corresponding values.\n *\n * This setting is available only in ONNXRuntime Web for WebGL and WebGPU EP.\n */\n preferredOutputLocation?: OnnxValueDataLocation|{readonly [outputName: string]: OnnxValueDataLocation};\n\n /**\n * Whether enable graph capture.\n * This setting is available only in ONNXRuntime Web for WebGPU EP.\n */\n enableGraphCapture?: boolean;\n\n /**\n * Store configurations for a session. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_session_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n * ```js\n * extra: {\n * session: {\n * set_denormal_as_zero: \"1\",\n * disable_prepacking: \"1\"\n * },\n * optimization: {\n * enable_gelu_approximation: \"1\"\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #region execution providers\n\n // Currently, we have the following backends to support execution providers:\n // Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).\n // Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.\n // Backend ONNX.js: supports 'webgl'.\n // Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).\n interface ExecutionProviderOptionMap {\n coreml: CoreMLExecutionProviderOption;\n cpu: CpuExecutionProviderOption;\n cuda: CudaExecutionProviderOption;\n dml: DmlExecutionProviderOption;\n nnapi: NnapiExecutionProviderOption;\n tensorrt: TensorRtExecutionProviderOption;\n wasm: WebAssemblyExecutionProviderOption;\n webgl: WebGLExecutionProviderOption;\n webgpu: WebGpuExecutionProviderOption;\n webnn: WebNNExecutionProviderOption;\n qnn: QnnExecutionProviderOption;\n xnnpack: XnnpackExecutionProviderOption;\n }\n\n type ExecutionProviderName = keyof ExecutionProviderOptionMap;\n type ExecutionProviderConfig =\n ExecutionProviderOptionMap[ExecutionProviderName]|ExecutionProviderOption|ExecutionProviderName|string;\n\n export interface ExecutionProviderOption {\n readonly name: string;\n }\n export interface CpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cpu';\n useArena?: boolean;\n }\n export interface CudaExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cuda';\n deviceId?: number;\n }\n export interface DmlExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'dml';\n deviceId?: number;\n }\n export interface TensorRtExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'tensorrt';\n deviceId?: number;\n }\n export interface WebAssemblyExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'wasm';\n }\n export interface WebGLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgl';\n // TODO: add flags\n }\n export interface XnnpackExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'xnnpack';\n }\n export interface WebGpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgpu';\n preferredLayout?: 'NCHW'|'NHWC';\n }\n\n // #region WebNN options\n\n interface WebNNExecutionProviderName extends ExecutionProviderOption {\n readonly name: 'webnn';\n }\n\n /**\n * Represents a set of options for creating a WebNN MLContext.\n *\n * @see https://www.w3.org/TR/webnn/#dictdef-mlcontextoptions\n */\n export interface WebNNContextOptions {\n deviceType?: 'cpu'|'gpu'|'npu';\n numThreads?: number;\n powerPreference?: 'default'|'low-power'|'high-performance';\n }\n\n /**\n * Represents a set of options for WebNN execution provider without MLContext.\n */\n export interface WebNNOptionsWithoutMLContext extends WebNNExecutionProviderName, WebNNContextOptions {\n context?: never;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext.\n *\n * When MLContext is provided, the deviceType is also required so that the WebNN EP can determine the preferred\n * channel layout.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext\n */\n export interface WebNNOptionsWithMLContext extends WebNNExecutionProviderName,\n Omit,\n Required> {\n context: unknown /* MLContext */;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext which is created from GPUDevice.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext-gpudevice\n */\n export interface WebNNOptionsWebGpu extends WebNNExecutionProviderName {\n context: unknown /* MLContext */;\n gpuDevice: unknown /* GPUDevice */;\n }\n\n /**\n * Options for WebNN execution provider.\n */\n export type WebNNExecutionProviderOption = WebNNOptionsWithoutMLContext|WebNNOptionsWithMLContext|WebNNOptionsWebGpu;\n\n // #endregion\n\n export interface QnnExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'qnn';\n // TODO add flags\n }\n export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'coreml';\n /**\n * The bit flags for CoreML execution provider.\n *\n * ```\n * COREML_FLAG_USE_CPU_ONLY = 0x001\n * COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002\n * COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004\n * COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008\n * COREML_FLAG_CREATE_MLPROGRAM = 0x010\n * ```\n *\n * See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.\n *\n * This flag is available only in ONNXRuntime (Node.js binding).\n */\n coreMlFlags?: number;\n /**\n * Specify whether to use CPU only in CoreML EP.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n useCPUOnly?: boolean;\n /**\n * Specify whether to enable CoreML EP on subgraph.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n enableOnSubgraph?: boolean;\n /**\n * Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n onlyEnableDeviceWithANE?: boolean;\n }\n export interface NnapiExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'nnapi';\n useFP16?: boolean;\n useNCHW?: boolean;\n cpuDisabled?: boolean;\n cpuOnly?: boolean;\n }\n // #endregion\n\n // #endregion\n\n // #region run options\n\n /**\n * A set of configurations for inference run behavior\n */\n export interface RunOptions {\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Terminate all incomplete OrtRun calls as soon as possible if true\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n terminate?: boolean;\n\n /**\n * A tag for the Run() calls using this\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n tag?: string;\n\n /**\n * Set a single run configuration entry. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_run_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n *\n * ```js\n * extra: {\n * memory: {\n * enable_memory_arena_shrinkage: \"1\",\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #endregion\n\n // #region value metadata\n\n // eslint-disable-next-line @typescript-eslint/no-empty-interface\n interface ValueMetadata {\n // TBD\n }\n\n // #endregion\n}\n\n/**\n * Represent a runtime instance of an ONNX model.\n */\nexport interface InferenceSession {\n // #region run()\n\n /**\n * Execute the model asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Execute the model asynchronously with the given feeds, fetches and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param fetches - Representation of the model output. See type description of `InferenceSession.OutputType` for\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n\n // #endregion\n\n // #region profiling\n\n /**\n * Start profiling.\n */\n startProfiling(): void;\n\n /**\n * End profiling.\n */\n endProfiling(): void;\n\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded model.\n */\n readonly inputNames: readonly string[];\n\n /**\n * Get output names of the loaded model.\n */\n readonly outputNames: readonly string[];\n\n // /**\n // * Get input metadata of the loaded model.\n // */\n // readonly inputMetadata: ReadonlyArray>;\n\n // /**\n // * Get output metadata of the loaded model.\n // */\n // readonly outputMetadata: ReadonlyArray>;\n\n // #endregion\n}\n\nexport interface InferenceSessionFactory {\n // #region create()\n\n /**\n * Create a new inference session and load model asynchronously from an ONNX model file.\n *\n * @param uri - The URI or file path of the model to load.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(uri: string, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from segment of an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param byteOffset - The beginning of the specified portion of the array buffer.\n * @param byteLength - The length in bytes of the array buffer.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: InferenceSession.SessionOptions):\n Promise;\n\n /**\n * Create a new inference session and load model asynchronously from a Uint8Array.\n *\n * @param buffer - A Uint8Array representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: Uint8Array, options?: InferenceSession.SessionOptions): Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession: InferenceSessionFactory = InferenceSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsFormat, OptionsNormalizationParameters, OptionsTensorLayout} from './tensor-factory.js';\n\nexport interface TensorToDataUrlOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface TensorToImageDataOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface ConversionUtils {\n /**\n * creates a DataURL instance from tensor\n *\n * @param options - An optional object representing options for creating a DataURL instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns a DataURL string representing the image converted from tensor data\n */\n toDataURL(options?: TensorToDataUrlOptions): string;\n\n /**\n * creates an ImageData instance from tensor\n *\n * @param options - An optional object representing options for creating an ImageData instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns an ImageData instance representing the image converted from tensor data\n */\n toImageData(options?: TensorToImageDataOptions): ImageData;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor, TypedTensor} from './tensor.js';\n\nexport type ImageFormat = 'RGB'|'RGBA'|'BGR'|'RBG';\nexport type ImageTensorLayout = 'NHWC'|'NCHW';\n\n// the following region contains type definitions for constructing tensor from a specific location.\n\n// #region types for constructing a tensor from a specific location\n\n/**\n * represent common properties of the parameter for constructing a tensor from a specific location.\n */\ninterface CommonConstructorParameters extends Pick {\n /**\n * Specify the data type of the tensor.\n */\n readonly type: T;\n}\n\n/**\n * represent the parameter for constructing a tensor from a GPU resource.\n */\ninterface GpuResourceConstructorParameters {\n /**\n * an optional callback function to download data from GPU to CPU.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n download?(): Promise;\n\n /**\n * an optional callback function that will be called when the tensor is disposed.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n dispose?(): void;\n}\n\n/**\n * represent the parameter for constructing a tensor from a pinned CPU buffer\n */\nexport interface CpuPinnedConstructorParameters extends\n CommonConstructorParameters {\n /**\n * Specify the location of the data to be 'cpu-pinned'.\n */\n readonly location: 'cpu-pinned';\n /**\n * Specify the CPU pinned buffer that holds the tensor data.\n */\n readonly data: Tensor.DataTypeMap[T];\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGL texture\n */\nexport interface TextureConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'texture'.\n */\n readonly location: 'texture';\n /**\n * Specify the WebGL texture that holds the tensor data.\n */\n readonly texture: Tensor.TextureType;\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGPU buffer\n */\nexport interface GpuBufferConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'gpu-buffer'.\n */\n readonly location: 'gpu-buffer';\n /**\n * Specify the WebGPU buffer that holds the tensor data.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n}\n\n// #endregion\n\n// the following region contains type definitions of each individual options.\n// the tensor factory functions use a composition of those options as the parameter type.\n\n// #region Options fields\n\nexport interface OptionsFormat {\n /**\n * Describes the image format represented in RGBA color space.\n */\n format?: ImageFormat;\n}\n\nexport interface OptionsTensorFormat {\n /**\n * Describes the image format of the tensor.\n *\n * NOTE: this is different from option 'format'. While option 'format' represents the original image, 'tensorFormat'\n * represents the target format of the tensor. A transpose will be performed if they are different.\n */\n tensorFormat?: ImageFormat;\n}\n\nexport interface OptionsTensorDataType {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: 'float32'|'uint8';\n}\n\nexport interface OptionsTensorLayout {\n /**\n * Describes the tensor layout when representing data of one or more image(s).\n */\n tensorLayout?: ImageTensorLayout;\n}\n\nexport interface OptionsDimensions {\n /**\n * Describes the image height in pixel\n */\n height?: number;\n /**\n * Describes the image width in pixel\n */\n width?: number;\n}\n\nexport interface OptionResizedDimensions {\n /**\n * Describes the resized height. If omitted, original height will be used.\n */\n resizedHeight?: number;\n /**\n * Describes resized width - can be accessed via tensor dimensions as well\n */\n resizedWidth?: number;\n}\n\nexport interface OptionsNormalizationParameters {\n /**\n * Describes normalization parameters when preprocessing the image as model input.\n *\n * Data element are ranged from 0 to 255.\n */\n norm?: {\n /**\n * The 'bias' value for image normalization.\n * - If omitted, use default value 0.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n bias?: number|[number, number, number]|[number, number, number, number];\n /**\n * The 'mean' value for image normalization.\n * - If omitted, use default value 255.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n mean?: number | [number, number, number] | [number, number, number, number];\n };\n}\n\n// #endregion\n\n// #region Options composition\n\nexport interface TensorFromImageDataOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromImageElementOptions extends OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromUrlOptions extends OptionsDimensions, OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromImageBitmapOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromTextureOptions extends\n Required, OptionsFormat, GpuResourceConstructorParameters/* TODO: add more */ {}\n\nexport interface TensorFromGpuBufferOptions extends\n Pick, GpuResourceConstructorParameters {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: T;\n}\n\n// #endregion\n\n/**\n * type TensorFactory defines the factory functions of 'Tensor' to create tensor instances from existing data or\n * resources.\n */\nexport interface TensorFactory {\n /**\n * create a tensor from an ImageData object\n *\n * @param imageData - the ImageData object to create tensor from\n * @param options - An optional object representing options for creating tensor from ImageData.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageData: ImageData, options?: TensorFromImageDataOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a HTMLImageElement object\n *\n * @param imageElement - the HTMLImageElement object to create tensor from\n * @param options - An optional object representing options for creating tensor from HTMLImageElement.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from URL\n *\n * @param urlSource - a string as a URL to the image or a data URL containing the image data.\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(urlSource: string, options?: TensorFromUrlOptions): Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from an ImageBitmap object\n *\n * @param bitmap - the ImageBitmap object to create tensor from\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(bitmap: ImageBitmap, options: TensorFromImageBitmapOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a WebGL texture\n *\n * @param texture - the WebGLTexture object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGL texture.\n *\n * The options include following properties:\n * - `width`: the width of the texture. Required.\n * - `height`: the height of the texture. Required.\n * - `format`: the format of the texture. If omitted, assume 'RGBA'.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromTexture(\n texture: Tensor.TextureType, options: TensorFromTextureOptions): TypedTensor<'float32'>;\n\n /**\n * create a tensor from a WebGPU buffer\n *\n * @param buffer - the GPUBuffer object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGPU buffer.\n *\n * The options include following properties:\n * - `dataType`: the data type of the tensor. If omitted, assume 'float32'.\n * - `dims`: the dimension of the tensor. Required.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromGpuBuffer(\n buffer: Tensor.GpuBufferType, options: TensorFromGpuBufferOptions): TypedTensor;\n\n /**\n * create a tensor from a pre-allocated buffer. The buffer will be used as a pinned buffer.\n *\n * @param type - the tensor element type.\n * @param buffer - a TypedArray corresponding to the type.\n * @param dims - specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n *\n * @returns a tensor object\n */\n fromPinnedBuffer>(\n type: T, buffer: Tensor.DataTypeMap[T], dims?: readonly number[]): TypedTensor;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * A string that represents a file's URL or path.\n *\n * Path is vailable only in onnxruntime-node or onnxruntime-web running in Node.js.\n */\nexport type FileUrlOrPath = string;\n\n/**\n * A Blob object that represents a file.\n */\nexport type FileBlob = Blob;\n\n/**\n * A Uint8Array, ArrayBuffer or SharedArrayBuffer object that represents a file content.\n *\n * When it is an ArrayBuffer or SharedArrayBuffer, the whole buffer is assumed to be the file content.\n */\nexport type FileData = Uint8Array|ArrayBufferLike;\n\n/**\n * Represents a file that can be loaded by the ONNX Runtime JavaScript API.\n */\nexport type FileType = FileUrlOrPath|FileBlob|FileData;\n\n/**\n * Represents an external data file.\n */\nexport interface ExternalDataFileDescription {\n /**\n * Specify the external data file.\n */\n data: FileType;\n /**\n * Specify the file path.\n */\n path: string;\n}\n\n/**\n * Represents an external data file.\n *\n * When using a string, it should be a file URL or path that in the same directory as the model file.\n */\nexport type ExternalDataFileType = ExternalDataFileDescription|FileUrlOrPath;\n\n/**\n * Options for model loading.\n */\nexport interface OnnxModelOptions {\n /**\n * Specifying a list of files that represents the external data.\n */\n externalData?: readonly ExternalDataFileType[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type NonTensorType = never;\n\n/**\n * Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.\n *\n * NOTE: currently not support non-tensor\n */\nexport type OnnxValue = Tensor|NonTensorType;\n\n/**\n * Type OnnxValueDataLocation represents the location of the data of an OnnxValue.\n */\nexport type OnnxValueDataLocation = Tensor.DataLocation;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {SessionHandler, TrainingSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TrainingSession as TrainingSessionInterface, TrainingSessionCreateOptions} from './training-session.js';\n\ntype SessionOptions = InferenceSession.SessionOptions;\ntype FeedsType = InferenceSession.FeedsType;\ntype FetchesType = InferenceSession.FetchesType;\ntype ReturnType = InferenceSession.ReturnType;\ntype RunOptions = InferenceSession.RunOptions;\n\nconst noBackendErrMsg: string = 'Training backend could not be resolved. ' +\n 'Make sure you\\'re using the correct configuration & WebAssembly files.';\n\nexport class TrainingSession implements TrainingSessionInterface {\n private constructor(handler: TrainingSessionHandler, hasOptimizerModel: boolean, hasEvalModel: boolean) {\n this.handler = handler;\n this.hasOptimizerModel = hasOptimizerModel;\n this.hasEvalModel = hasEvalModel;\n }\n private handler: TrainingSessionHandler;\n private hasOptimizerModel: boolean;\n private hasEvalModel: boolean;\n\n get trainingInputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get trainingOutputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n get evalInputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalInputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n get evalOutputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalOutputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n\n static async create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: SessionOptions):\n Promise {\n const evalModel: string|Uint8Array = trainingOptions.evalModel || '';\n const optimizerModel: string|Uint8Array = trainingOptions.optimizerModel || '';\n const options: SessionOptions = sessionOptions || {};\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n if (backend.createTrainingSessionHandler) {\n const handler = await backend.createTrainingSessionHandler(\n trainingOptions.checkpointState, trainingOptions.trainModel, evalModel, optimizerModel,\n optionsWithValidatedEPs);\n return new TrainingSession(handler, !!trainingOptions.optimizerModel, !!trainingOptions.evalModel);\n } else {\n throw new Error(noBackendErrMsg);\n }\n }\n\n /**\n * Helper function for runTrainStep and future runStep methods that handles the type-narrowing conversion from\n * the given parameters to SessionHandler.FetchesType and RunOptions.\n *\n * @param inputNames the feeds object is checked that they contain all input names in the provided list of input\n * names.\n * @param outputNames the fetches object is checked that their keys match up with valid names in the list of output\n * names.\n * @param feeds the required input\n * @param arg1 narrowed & converted into the SessionHandler.FetchesType or RunOptions object\n * @param arg2 optional RunOptions object.\n * @returns\n */\n typeNarrowingForRunStep(\n inputNames: readonly string[], outputNames: readonly string[], feeds: FeedsType, arg1?: FetchesType|RunOptions,\n arg2?: RunOptions): [SessionHandler.FetchesType, RunOptions] {\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSession.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of outputNames) {\n fetches[name] = null;\n }\n }\n\n return [fetches, options];\n }\n\n /**\n * Helper method for runTrainStep and any other runStep methods. Takes the ReturnType result from the SessionHandler\n * and changes it into a map of Tensors.\n *\n * @param results\n * @returns\n */\n convertHandlerReturnTypeToMapOfTensors(results: SessionHandler.ReturnType): ReturnType {\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n return returnValue;\n }\n\n async lazyResetGrad(): Promise {\n await this.handler.lazyResetGrad();\n }\n\n runTrainStep(feeds: FeedsType, options?: RunOptions): Promise;\n runTrainStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async runTrainStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.trainingInputNames, this.trainingOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runTrainStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n }\n\n async runOptimizerStep(options?: InferenceSession.RunOptions|undefined): Promise {\n if (this.hasOptimizerModel) {\n await this.handler.runOptimizerStep(options || {});\n } else {\n throw new Error('This TrainingSession has no OptimizerModel loaded.');\n }\n }\n\n runEvalStep(feeds: FeedsType, options?: RunOptions|undefined): Promise;\n runEvalStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions|undefined): Promise;\n async runEvalStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n if (this.hasEvalModel) {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.evalInputNames, this.evalOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runEvalStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n } else {\n throw new Error('This TrainingSession has no EvalModel loaded.');\n }\n }\n\n async getParametersSize(trainableOnly = true): Promise {\n return this.handler.getParametersSize(trainableOnly);\n }\n\n async loadParametersBuffer(array: Uint8Array, trainableOnly = true): Promise {\n const paramsSize = await this.getParametersSize(trainableOnly);\n // checking that the size of the Uint8Array is equivalent to the byte length of a Float32Array of the number\n // of parameters\n if (array.length !== 4 * paramsSize) {\n throw new Error(\n 'Size of the buffer passed into loadParametersBuffer must match the number of parameters in ' +\n 'the model. Please use getParametersSize method to check.');\n }\n return this.handler.loadParametersBuffer(array, trainableOnly);\n }\n\n async getContiguousParameters(trainableOnly = true): Promise {\n return this.handler.getContiguousParameters(trainableOnly);\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession as TrainingSessionImpl} from './training-session-impl.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace TrainingSession {\n /**\n * Either URI file path (string) or Uint8Array containing model or checkpoint information.\n */\n type UriOrBuffer = string|Uint8Array;\n}\n\n/**\n * Represent a runtime instance of an ONNX training session,\n * which contains a model that can be trained, and, optionally,\n * an eval and optimizer model.\n */\nexport interface TrainingSession {\n // #region run()\n\n /**\n * Lazily resets the gradients of all trainable parameters to zero. Should happen after the invocation of\n * runOptimizerStep.\n */\n lazyResetGrad(): Promise;\n\n /**\n * Run TrainStep asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for\n detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n runTrainStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single train step with the given inputs and options.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runTrainStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Runs a single optimizer step, which performs weight updates for the trainable parameters using the optimizer model.\n *\n * @param options - Optional. A set of options that controls the behavior of model optimizing.\n */\n runOptimizerStep(options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region copy parameters\n\n /**\n * Retrieves the size of all parameters for the training state. Calculates the total number of primitive (datatype of\n * the parameters) elements of all the parameters in the training state.\n *\n * @param trainableOnly - When set to true, the size is calculated for trainable params only. Default value is true.\n */\n getParametersSize(trainableOnly: boolean): Promise;\n\n /**\n * Copies parameter values from the given buffer to the training state. Currently, only supporting models with\n * parameters of type Float32.\n *\n * @param buffer - A Uint8Array representation of Float32 parameters.\n * @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.\n */\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n\n /**\n * Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.\n * Currently, only supporting models with parameters of type Float32.\n *\n * @param trainableOnly - When set to true, only trainable parameters are copied. Trainable parameters are parameters\n * for which requires_grad is set to true. Default value is true.\n * @returns A promise that resolves to a Float32 OnnxValue of the requested parameters.\n */\n getContiguousParameters(trainableOnly: boolean): Promise;\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded training model.\n */\n readonly trainingInputNames: readonly string[];\n\n /**\n * Get output names of the loaded training model.\n */\n readonly trainingOutputNames: readonly string[];\n\n /**\n * Get input names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalInputNames: readonly string[];\n\n /**\n * Get output names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalOutputNames: readonly string[];\n\n // #endregion\n}\n\n/**\n * Represents the optional parameters that can be passed into the TrainingSessionFactory.\n */\nexport interface TrainingSessionCreateOptions {\n /**\n * URI or buffer for a .ckpt file that contains the checkpoint for the training model.\n */\n checkpointState: TrainingSession.UriOrBuffer;\n /**\n * URI or buffer for the .onnx training file.\n */\n trainModel: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx optimizer model file.\n */\n optimizerModel?: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx eval model file.\n */\n evalModel?: TrainingSession.UriOrBuffer;\n}\n\n/**\n * Defines method overload possibilities for creating a TrainingSession.\n */\nexport interface TrainingSessionFactory {\n // #region create()\n\n /**\n * Creates a new TrainingSession and asynchronously loads any models passed in through trainingOptions\n *\n * @param trainingOptions specify models and checkpoints to load into the Training Session\n * @param sessionOptions specify configuration for training session behavior\n *\n * @returns Promise that resolves to a TrainingSession object\n */\n create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: InferenceSession.SessionOptions):\n Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const TrainingSession: TrainingSessionFactory = TrainingSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * # ONNX Runtime JavaScript API\n *\n * ONNX Runtime JavaScript API is a unified API for all JavaScript usages, including the following NPM packages:\n *\n * - [onnxruntime-node](https://www.npmjs.com/package/onnxruntime-node)\n * - [onnxruntime-web](https://www.npmjs.com/package/onnxruntime-web)\n * - [onnxruntime-react-native](https://www.npmjs.com/package/onnxruntime-react-native)\n *\n * See also:\n * - [Get Started](https://onnxruntime.ai/docs/get-started/with-javascript/)\n * - [Inference examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js)\n *\n * @packageDocumentation\n */\n\nexport * from './backend.js';\nexport * from './env.js';\nexport * from './inference-session.js';\nexport * from './tensor.js';\nexport * from './tensor-conversion.js';\nexport * from './tensor-factory.js';\nexport * from './trace.js';\nexport * from './onnx-model.js';\nexport * from './onnx-value.js';\nexport * from './training-session.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {WebGLContext} from './backends/webgl/webgl-context';\n\nexport declare namespace Logger {\n export interface SeverityTypeMap {\n verbose: 'v';\n info: 'i';\n warning: 'w';\n error: 'e';\n fatal: 'f';\n }\n\n export type Severity = keyof SeverityTypeMap;\n\n export type Provider = 'none'|'console';\n\n /**\n * Logging config that used to control the behavior of logger\n */\n export interface Config {\n /**\n * Specify the logging provider. 'console' by default\n */\n provider?: Provider;\n /**\n * Specify the minimal logger serverity. 'warning' by default\n */\n minimalSeverity?: Logger.Severity;\n /**\n * Whether to output date time in log. true by default\n */\n logDateTime?: boolean;\n /**\n * Whether to output source information (Not yet supported). false by default\n */\n logSourceLocation?: boolean;\n }\n\n export interface CategorizedLogger {\n verbose(content: string): void;\n info(content: string): void;\n warning(content: string): void;\n error(content: string): void;\n fatal(content: string): void;\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface Logger {\n (category: string): Logger.CategorizedLogger;\n\n verbose(content: string): void;\n verbose(category: string, content: string): void;\n info(content: string): void;\n info(category: string, content: string): void;\n warning(content: string): void;\n warning(category: string, content: string): void;\n error(content: string): void;\n error(category: string, content: string): void;\n fatal(content: string): void;\n fatal(category: string, content: string): void;\n\n /**\n * Reset the logger configuration.\n * @param config specify an optional default config\n */\n reset(config?: Logger.Config): void;\n /**\n * Set the logger's behavior on the given category\n * @param category specify a category string. If '*' is specified, all previous configuration will be overwritten. If\n * '' is specified, the default behavior will be updated.\n * @param config the config object to indicate the logger's behavior\n */\n set(category: string, config: Logger.Config): void;\n\n /**\n * Set the logger's behavior from ort-common env\n * @param env the env used to set logger. Currently only setting loglevel is supported through Env.\n */\n setWithEnv(env: Env): void;\n}\n\ninterface LoggerProvider {\n log(severity: Logger.Severity, content: string, category?: string): void;\n}\nclass NoOpLoggerProvider implements LoggerProvider {\n log(_severity: Logger.Severity, _content: string, _category?: string) {\n // do nothing\n }\n}\nclass ConsoleLoggerProvider implements LoggerProvider {\n log(severity: Logger.Severity, content: string, category?: string) {\n // eslint-disable-next-line no-console\n console.log(`${this.color(severity)} ${category ? '\\x1b[35m' + category + '\\x1b[0m ' : ''}${content}`);\n }\n\n private color(severity: Logger.Severity) {\n switch (severity) {\n case 'verbose':\n return '\\x1b[34;40mv\\x1b[0m';\n case 'info':\n return '\\x1b[32mi\\x1b[0m';\n case 'warning':\n return '\\x1b[30;43mw\\x1b[0m';\n case 'error':\n return '\\x1b[31;40me\\x1b[0m';\n case 'fatal':\n return '\\x1b[101mf\\x1b[0m';\n default:\n throw new Error(`unsupported severity: ${severity}`);\n }\n }\n}\n\nconst SEVERITY_VALUE = {\n verbose: 1000,\n info: 2000,\n warning: 4000,\n error: 5000,\n fatal: 6000\n};\n\nconst LOGGER_PROVIDER_MAP: {readonly [provider: string]: Readonly} = {\n ['none']: new NoOpLoggerProvider(),\n ['console']: new ConsoleLoggerProvider()\n};\nconst LOGGER_DEFAULT_CONFIG = {\n provider: 'console',\n minimalSeverity: 'warning',\n logDateTime: true,\n logSourceLocation: false\n};\nlet LOGGER_CONFIG_MAP:\n {[category: string]: Readonly>} = {['']: LOGGER_DEFAULT_CONFIG as Required};\n\nfunction log(category: string): Logger.CategorizedLogger;\nfunction log(severity: Logger.Severity, content: string): void;\nfunction log(severity: Logger.Severity, category: string, content: string): void;\nfunction log(severity: Logger.Severity, arg1: string, arg2?: string): void;\nfunction log(\n arg0: string|Logger.Severity, arg1?: string, arg2?: string|number, arg3?: number): Logger.CategorizedLogger|void {\n if (arg1 === undefined) {\n // log(category: string): Logger.CategorizedLogger;\n return createCategorizedLogger(arg0);\n } else if (arg2 === undefined) {\n // log(severity, content);\n logInternal(arg0 as Logger.Severity, arg1, 1);\n } else if (typeof arg2 === 'number' && arg3 === undefined) {\n // log(severity, content, stack)\n logInternal(arg0 as Logger.Severity, arg1, arg2);\n } else if (typeof arg2 === 'string' && arg3 === undefined) {\n // log(severity, category, content)\n logInternal(arg0 as Logger.Severity, arg2, 1, arg1);\n } else if (typeof arg2 === 'string' && typeof arg3 === 'number') {\n // log(severity, category, content, stack)\n logInternal(arg0 as Logger.Severity, arg2, arg3, arg1);\n } else {\n throw new TypeError('input is valid');\n }\n}\n\nfunction createCategorizedLogger(category: string): Logger.CategorizedLogger {\n return {\n verbose: log.verbose.bind(null, category),\n info: log.info.bind(null, category),\n warning: log.warning.bind(null, category),\n error: log.error.bind(null, category),\n fatal: log.fatal.bind(null, category)\n };\n}\n\n// NOTE: argument 'category' is put the last parameter beacause typescript\n// doesn't allow optional argument put in front of required argument. This\n// order is different from a usual logging API.\nfunction logInternal(severity: Logger.Severity, content: string, _stack: number, category?: string) {\n const config = LOGGER_CONFIG_MAP[category || ''] || LOGGER_CONFIG_MAP[''];\n if (SEVERITY_VALUE[severity] < SEVERITY_VALUE[config.minimalSeverity]) {\n return;\n }\n\n if (config.logDateTime) {\n content = `${new Date().toISOString()}|${content}`;\n }\n\n if (config.logSourceLocation) {\n // TODO: calculate source location from 'stack'\n }\n\n LOGGER_PROVIDER_MAP[config.provider].log(severity, content, category);\n}\n\n// eslint-disable-next-line @typescript-eslint/no-namespace\nnamespace log {\n export function verbose(content: string): void;\n export function verbose(category: string, content: string): void;\n export function verbose(arg0: string, arg1?: string) {\n log('verbose', arg0, arg1);\n }\n export function info(content: string): void;\n export function info(category: string, content: string): void;\n export function info(arg0: string, arg1?: string) {\n log('info', arg0, arg1);\n }\n export function warning(content: string): void;\n export function warning(category: string, content: string): void;\n export function warning(arg0: string, arg1?: string) {\n log('warning', arg0, arg1);\n }\n export function error(content: string): void;\n export function error(category: string, content: string): void;\n export function error(arg0: string, arg1?: string) {\n log('error', arg0, arg1);\n }\n export function fatal(content: string): void;\n export function fatal(category: string, content: string): void;\n export function fatal(arg0: string, arg1?: string) {\n log('fatal', arg0, arg1);\n }\n\n export function reset(config?: Logger.Config): void {\n LOGGER_CONFIG_MAP = {};\n set('', config || {});\n }\n export function set(category: string, config: Logger.Config): void {\n if (category === '*') {\n reset(config);\n } else {\n const previousConfig = LOGGER_CONFIG_MAP[category] || LOGGER_DEFAULT_CONFIG;\n LOGGER_CONFIG_MAP[category] = {\n provider: config.provider || previousConfig.provider,\n minimalSeverity: config.minimalSeverity || previousConfig.minimalSeverity,\n logDateTime: (config.logDateTime === undefined) ? previousConfig.logDateTime : config.logDateTime,\n logSourceLocation: (config.logSourceLocation === undefined) ? previousConfig.logSourceLocation :\n config.logSourceLocation\n };\n }\n\n // TODO: we want to support wildcard or regex?\n }\n\n export function setWithEnv(env: Env): void {\n const config: Logger.Config = {};\n if (env.logLevel) {\n config.minimalSeverity = env.logLevel as Logger.Severity;\n }\n set('', config);\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare, @typescript-eslint/naming-convention\nexport const Logger: Logger = log;\n\nexport declare namespace Profiler {\n export interface Config {\n maxNumberEvents?: number;\n flushBatchSize?: number;\n flushIntervalInMilliseconds?: number;\n }\n\n export type EventCategory = 'session'|'node'|'op'|'backend';\n\n export interface Event {\n end(): void|Promise;\n }\n}\n// TODO\n// class WebGLEvent implements Profiler.Event {}\n\nclass Event implements Profiler.Event {\n constructor(\n public category: Profiler.EventCategory, public name: string, public startTime: number,\n private endCallback: (e: Event) => void|Promise, public timer?: WebGLQuery, public ctx?: WebGLContext) {}\n\n async end() {\n return this.endCallback(this);\n }\n\n async checkTimer(): Promise {\n if (this.ctx === undefined || this.timer === undefined) {\n throw new Error('No webgl timer found');\n } else {\n this.ctx.endTimer();\n return this.ctx.waitForQueryAndGetTime(this.timer);\n }\n }\n}\n\nclass EventRecord {\n constructor(\n public category: Profiler.EventCategory, public name: string, public startTime: number, public endTime: number) {}\n}\n\nexport class Profiler {\n static create(config?: Profiler.Config): Profiler {\n if (config === undefined) {\n return new this();\n }\n return new this(config.maxNumberEvents, config.flushBatchSize, config.flushIntervalInMilliseconds);\n }\n\n private constructor(maxNumberEvents?: number, flushBatchSize?: number, flushIntervalInMilliseconds?: number) {\n this._started = false;\n this._maxNumberEvents = maxNumberEvents === undefined ? 10000 : maxNumberEvents;\n this._flushBatchSize = flushBatchSize === undefined ? 10 : flushBatchSize;\n this._flushIntervalInMilliseconds = flushIntervalInMilliseconds === undefined ? 5000 : flushIntervalInMilliseconds;\n }\n\n // start profiling\n start() {\n this._started = true;\n this._timingEvents = [];\n this._flushTime = now();\n this._flushPointer = 0;\n }\n\n // stop profiling\n stop() {\n this._started = false;\n for (; this._flushPointer < this._timingEvents.length; this._flushPointer++) {\n this.logOneEvent(this._timingEvents[this._flushPointer]);\n }\n }\n\n // create an event scope for the specific function\n event(category: Profiler.EventCategory, name: string, func: () => T, ctx?: WebGLContext): T;\n event(category: Profiler.EventCategory, name: string, func: () => Promise, ctx?: WebGLContext): Promise;\n\n event(category: Profiler.EventCategory, name: string, func: () => T | Promise, ctx?: WebGLContext): T\n |Promise {\n const event = this._started ? this.begin(category, name, ctx) : undefined;\n let isPromise = false;\n\n const res = func();\n\n // we consider a then-able object is a promise\n if (res && typeof (res as Promise).then === 'function') {\n isPromise = true;\n return new Promise((resolve, reject) => {\n (res as Promise)\n .then(\n async value => { // fulfilled\n if (event) {\n await event.end();\n }\n resolve(value);\n },\n async reason => { // rejected\n if (event) {\n await event.end();\n }\n reject(reason);\n });\n });\n }\n if (!isPromise && event) {\n const eventRes = event.end();\n if (eventRes && typeof eventRes.then === 'function') {\n return new Promise((resolve, reject) => {\n (eventRes).then(\n () => { // fulfilled\n resolve(res);\n },\n (reason) => { // rejected\n reject(reason);\n });\n });\n }\n }\n return res;\n }\n\n // begin an event\n begin(category: Profiler.EventCategory, name: string, ctx?: WebGLContext): Event {\n if (!this._started) {\n throw new Error('profiler is not started yet');\n }\n if (ctx === undefined) {\n const startTime = now();\n this.flush(startTime);\n return new Event(category, name, startTime, e => this.endSync(e));\n } else {\n const timer: WebGLQuery = ctx.beginTimer();\n return new Event(category, name, 0, async e => this.end(e), timer, ctx);\n }\n }\n\n // end the specific event\n private async end(event: Event): Promise {\n const endTime: number = await event.checkTimer();\n if (this._timingEvents.length < this._maxNumberEvents) {\n this._timingEvents.push(new EventRecord(event.category, event.name, event.startTime, endTime));\n this.flush(endTime);\n }\n }\n\n private endSync(event: Event): void {\n const endTime: number = now();\n if (this._timingEvents.length < this._maxNumberEvents) {\n this._timingEvents.push(new EventRecord(event.category, event.name, event.startTime, endTime));\n this.flush(endTime);\n }\n }\n\n private logOneEvent(event: EventRecord) {\n Logger.verbose(\n `Profiler.${event.category}`,\n `${(event.endTime - event.startTime).toFixed(2)}ms on event '${event.name}' at ${event.endTime.toFixed(2)}`);\n }\n\n private flush(currentTime: number) {\n if (this._timingEvents.length - this._flushPointer >= this._flushBatchSize ||\n currentTime - this._flushTime >= this._flushIntervalInMilliseconds) {\n // should flush when either batch size accumlated or interval elepsed\n\n for (const previousPointer = this._flushPointer; this._flushPointer < previousPointer + this._flushBatchSize &&\n this._flushPointer < this._timingEvents.length;\n this._flushPointer++) {\n this.logOneEvent(this._timingEvents[this._flushPointer]);\n }\n\n this._flushTime = now();\n }\n }\n\n get started() {\n return this._started;\n }\n private _started = false;\n private _timingEvents: EventRecord[];\n\n private readonly _maxNumberEvents: number;\n\n private readonly _flushBatchSize: number;\n private readonly _flushIntervalInMilliseconds: number;\n\n private _flushTime: number;\n private _flushPointer = 0;\n}\n\n/**\n * returns a number to represent the current timestamp in a resolution as high as possible.\n */\nexport const now = (typeof performance !== 'undefined' && performance.now) ? () => performance.now() : Date.now;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from './graph';\nimport {OperatorImplementation, OperatorInitialization} from './operators';\n\nexport interface OpSet {\n domain: string;\n version: number;\n}\nexport declare namespace OpSet {\n /**\n * Domain of an opset, it can be an empty string(default value, represent for ai.onnx), or 'ai.onnx.ml'\n */\n type Domain = ''|'ai.onnx.ml'|'com.microsoft';\n /**\n * A resolve rule consists of 4 or 5 items: opType, opSetDomain, versionSelector, operatorImplementation and\n * operatorInitialization (optional)\n */\n type ResolveRule = [\n string, Domain, string, OperatorImplementation\n ]|[string, Domain, string, OperatorImplementation, OperatorInitialization];\n}\n\nexport function resolveOperator(node: Graph.Node, opsets: readonly OpSet[], rules: readonly OpSet.ResolveRule[]) {\n for (const rule of rules) {\n const opType = rule[0];\n const domain = rule[1];\n const versionSelector = rule[2];\n const opImpl = rule[3];\n const opInit = rule[4];\n\n if (node.opType === opType) { // operator type matches\n for (const opset of opsets) {\n // opset '' and 'ai.onnx' are considered the same.\n if (opset.domain === domain || (opset.domain === 'ai.onnx' && domain === '')) { // opset domain found\n if (matchSelector(opset.version, versionSelector)) {\n return {opImpl, opInit};\n }\n }\n }\n }\n }\n\n throw new TypeError(`cannot resolve operator '${node.opType}' with opsets: ${\n opsets.map(set => `${set.domain || 'ai.onnx'} v${set.version}`).join(', ')}`);\n}\n\nfunction matchSelector(version: number, selector: string): boolean {\n if (selector.endsWith('+')) {\n // minimum version match ('7+' expects version>=7)\n const rangeStart = Number.parseInt(selector.substring(0, selector.length - 1), 10);\n return !isNaN(rangeStart) && rangeStart <= version;\n } else if (selector.split('-').length === 2) {\n // range match ('6-8' expects 6<=version<=8)\n const pair = selector.split('-');\n const rangeStart = Number.parseInt(pair[0], 10);\n const rangeEnd = Number.parseInt(pair[1], 10);\n return !isNaN(rangeStart) && !isNaN(rangeEnd) && rangeStart <= version && version <= rangeEnd;\n } else {\n // exact match ('7' expects version===7)\n return Number.parseInt(selector, 10) === version;\n }\n}\n", "\"use strict\";\r\nexports.__esModule = true;\r\nvar Guid = /** @class */ (function () {\r\n function Guid(guid) {\r\n if (!guid) {\r\n throw new TypeError(\"Invalid argument; `value` has no value.\");\r\n }\r\n this.value = Guid.EMPTY;\r\n if (guid && Guid.isGuid(guid)) {\r\n this.value = guid;\r\n }\r\n }\r\n Guid.isGuid = function (guid) {\r\n var value = guid.toString();\r\n return guid && (guid instanceof Guid || Guid.validator.test(value));\r\n };\r\n Guid.create = function () {\r\n return new Guid([Guid.gen(2), Guid.gen(1), Guid.gen(1), Guid.gen(1), Guid.gen(3)].join(\"-\"));\r\n };\r\n Guid.createEmpty = function () {\r\n return new Guid(\"emptyguid\");\r\n };\r\n Guid.parse = function (guid) {\r\n return new Guid(guid);\r\n };\r\n Guid.raw = function () {\r\n return [Guid.gen(2), Guid.gen(1), Guid.gen(1), Guid.gen(1), Guid.gen(3)].join(\"-\");\r\n };\r\n Guid.gen = function (count) {\r\n var out = \"\";\r\n for (var i = 0; i < count; i++) {\r\n // tslint:disable-next-line:no-bitwise\r\n out += (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);\r\n }\r\n return out;\r\n };\r\n Guid.prototype.equals = function (other) {\r\n // Comparing string `value` against provided `guid` will auto-call\r\n // toString on `guid` for comparison\r\n return Guid.isGuid(other) && this.value === other.toString();\r\n };\r\n Guid.prototype.isEmpty = function () {\r\n return this.value === Guid.EMPTY;\r\n };\r\n Guid.prototype.toString = function () {\r\n return this.value;\r\n };\r\n Guid.prototype.toJSON = function () {\r\n return {\r\n value: this.value\r\n };\r\n };\r\n Guid.validator = new RegExp(\"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$\", \"i\");\r\n Guid.EMPTY = \"00000000-0000-0000-0000-000000000000\";\r\n return Guid;\r\n}());\r\nexports.Guid = Guid;\r\n", "/**\n * @license\n * Copyright 2009 The Closure Library Authors\n * Copyright 2020 Daniel Wirtz / The long.js Authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n */\n\n// WebAssembly optimizations to do native i64 multiplication and divide\nvar wasm = null;\ntry {\n wasm = new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 13, 2, 96, 0, 1, 127, 96, 4, 127, 127, 127, 127, 1, 127, 3, 7, 6, 0, 1, 1, 1, 1, 1, 6, 6, 1, 127, 1, 65, 0, 11, 7, 50, 6, 3, 109, 117, 108, 0, 1, 5, 100, 105, 118, 95, 115, 0, 2, 5, 100, 105, 118, 95, 117, 0, 3, 5, 114, 101, 109, 95, 115, 0, 4, 5, 114, 101, 109, 95, 117, 0, 5, 8, 103, 101, 116, 95, 104, 105, 103, 104, 0, 0, 10, 191, 1, 6, 4, 0, 35, 0, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 126, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 127, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 128, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 129, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 130, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11\n ])), {}).exports;\n} catch (e) {\n // no wasm support :(\n}\n\n/**\n * Constructs a 64 bit two's-complement integer, given its low and high 32 bit values as *signed* integers.\n * See the from* functions below for more convenient ways of constructing Longs.\n * @exports Long\n * @class A Long class for representing a 64 bit two's-complement integer value.\n * @param {number} low The low (signed) 32 bits of the long\n * @param {number} high The high (signed) 32 bits of the long\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @constructor\n */\nfunction Long(low, high, unsigned) {\n\n /**\n * The low 32 bits as a signed value.\n * @type {number}\n */\n this.low = low | 0;\n\n /**\n * The high 32 bits as a signed value.\n * @type {number}\n */\n this.high = high | 0;\n\n /**\n * Whether unsigned or not.\n * @type {boolean}\n */\n this.unsigned = !!unsigned;\n}\n\n// The internal representation of a long is the two given signed, 32-bit values.\n// We use 32-bit pieces because these are the size of integers on which\n// Javascript performs bit-operations. For operations like addition and\n// multiplication, we split each number into 16 bit pieces, which can easily be\n// multiplied within Javascript's floating-point representation without overflow\n// or change in sign.\n//\n// In the algorithms below, we frequently reduce the negative case to the\n// positive case by negating the input(s) and then post-processing the result.\n// Note that we must ALWAYS check specially whether those values are MIN_VALUE\n// (-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as\n// a positive number, it overflows back into a negative). Not handling this\n// case would often result in infinite recursion.\n//\n// Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the from*\n// methods on which they depend.\n\n/**\n * An indicator used to reliably determine if an object is a Long or not.\n * @type {boolean}\n * @const\n * @private\n */\nLong.prototype.__isLong__;\n\nObject.defineProperty(Long.prototype, \"__isLong__\", { value: true });\n\n/**\n * @function\n * @param {*} obj Object\n * @returns {boolean}\n * @inner\n */\nfunction isLong(obj) {\n return (obj && obj[\"__isLong__\"]) === true;\n}\n\n/**\n * @function\n * @param {*} value number\n * @returns {number}\n * @inner\n */\nfunction ctz32(value) {\n var c = Math.clz32(value & -value);\n return value ? 31 - c : c;\n}\n\n/**\n * Tests if the specified object is a Long.\n * @function\n * @param {*} obj Object\n * @returns {boolean}\n */\nLong.isLong = isLong;\n\n/**\n * A cache of the Long representations of small integer values.\n * @type {!Object}\n * @inner\n */\nvar INT_CACHE = {};\n\n/**\n * A cache of the Long representations of small unsigned integer values.\n * @type {!Object}\n * @inner\n */\nvar UINT_CACHE = {};\n\n/**\n * @param {number} value\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromInt(value, unsigned) {\n var obj, cachedObj, cache;\n if (unsigned) {\n value >>>= 0;\n if (cache = (0 <= value && value < 256)) {\n cachedObj = UINT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, 0, true);\n if (cache)\n UINT_CACHE[value] = obj;\n return obj;\n } else {\n value |= 0;\n if (cache = (-128 <= value && value < 128)) {\n cachedObj = INT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, value < 0 ? -1 : 0, false);\n if (cache)\n INT_CACHE[value] = obj;\n return obj;\n }\n}\n\n/**\n * Returns a Long representing the given 32 bit integer value.\n * @function\n * @param {number} value The 32 bit integer in question\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromInt = fromInt;\n\n/**\n * @param {number} value\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromNumber(value, unsigned) {\n if (isNaN(value))\n return unsigned ? UZERO : ZERO;\n if (unsigned) {\n if (value < 0)\n return UZERO;\n if (value >= TWO_PWR_64_DBL)\n return MAX_UNSIGNED_VALUE;\n } else {\n if (value <= -TWO_PWR_63_DBL)\n return MIN_VALUE;\n if (value + 1 >= TWO_PWR_63_DBL)\n return MAX_VALUE;\n }\n if (value < 0)\n return fromNumber(-value, unsigned).neg();\n return fromBits((value % TWO_PWR_32_DBL) | 0, (value / TWO_PWR_32_DBL) | 0, unsigned);\n}\n\n/**\n * Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.\n * @function\n * @param {number} value The number in question\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromNumber = fromNumber;\n\n/**\n * @param {number} lowBits\n * @param {number} highBits\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromBits(lowBits, highBits, unsigned) {\n return new Long(lowBits, highBits, unsigned);\n}\n\n/**\n * Returns a Long representing the 64 bit integer that comes by concatenating the given low and high bits. Each is\n * assumed to use 32 bits.\n * @function\n * @param {number} lowBits The low 32 bits\n * @param {number} highBits The high 32 bits\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long} The corresponding Long value\n */\nLong.fromBits = fromBits;\n\n/**\n * @function\n * @param {number} base\n * @param {number} exponent\n * @returns {number}\n * @inner\n */\nvar pow_dbl = Math.pow; // Used 4 times (4*8 to 15+4)\n\n/**\n * @param {string} str\n * @param {(boolean|number)=} unsigned\n * @param {number=} radix\n * @returns {!Long}\n * @inner\n */\nfunction fromString(str, unsigned, radix) {\n if (str.length === 0)\n throw Error('empty string');\n if (typeof unsigned === 'number') {\n // For goog.math.long compatibility\n radix = unsigned;\n unsigned = false;\n } else {\n unsigned = !!unsigned;\n }\n if (str === \"NaN\" || str === \"Infinity\" || str === \"+Infinity\" || str === \"-Infinity\")\n return unsigned ? UZERO : ZERO;\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError('radix');\n\n var p;\n if ((p = str.indexOf('-')) > 0)\n throw Error('interior hyphen');\n else if (p === 0) {\n return fromString(str.substring(1), unsigned, radix).neg();\n }\n\n // Do several (8) digits each time through the loop, so as to\n // minimize the calls to the very expensive emulated div.\n var radixToPower = fromNumber(pow_dbl(radix, 8));\n\n var result = ZERO;\n for (var i = 0; i < str.length; i += 8) {\n var size = Math.min(8, str.length - i),\n value = parseInt(str.substring(i, i + size), radix);\n if (size < 8) {\n var power = fromNumber(pow_dbl(radix, size));\n result = result.mul(power).add(fromNumber(value));\n } else {\n result = result.mul(radixToPower);\n result = result.add(fromNumber(value));\n }\n }\n result.unsigned = unsigned;\n return result;\n}\n\n/**\n * Returns a Long representation of the given string, written using the specified radix.\n * @function\n * @param {string} str The textual representation of the Long\n * @param {(boolean|number)=} unsigned Whether unsigned or not, defaults to signed\n * @param {number=} radix The radix in which the text is written (2-36), defaults to 10\n * @returns {!Long} The corresponding Long value\n */\nLong.fromString = fromString;\n\n/**\n * @function\n * @param {!Long|number|string|!{low: number, high: number, unsigned: boolean}} val\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */\nfunction fromValue(val, unsigned) {\n if (typeof val === 'number')\n return fromNumber(val, unsigned);\n if (typeof val === 'string')\n return fromString(val, unsigned);\n // Throws for non-objects, converts non-instanceof Long:\n return fromBits(val.low, val.high, typeof unsigned === 'boolean' ? unsigned : val.unsigned);\n}\n\n/**\n * Converts the specified value to a Long using the appropriate from* function for its type.\n * @function\n * @param {!Long|number|string|!{low: number, high: number, unsigned: boolean}} val Value\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {!Long}\n */\nLong.fromValue = fromValue;\n\n// NOTE: the compiler should inline these constant values below and then remove these variables, so there should be\n// no runtime penalty for these.\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_16_DBL = 1 << 16;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_24_DBL = 1 << 24;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL;\n\n/**\n * @type {number}\n * @const\n * @inner\n */\nvar TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2;\n\n/**\n * @type {!Long}\n * @const\n * @inner\n */\nvar TWO_PWR_24 = fromInt(TWO_PWR_24_DBL);\n\n/**\n * @type {!Long}\n * @inner\n */\nvar ZERO = fromInt(0);\n\n/**\n * Signed zero.\n * @type {!Long}\n */\nLong.ZERO = ZERO;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar UZERO = fromInt(0, true);\n\n/**\n * Unsigned zero.\n * @type {!Long}\n */\nLong.UZERO = UZERO;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar ONE = fromInt(1);\n\n/**\n * Signed one.\n * @type {!Long}\n */\nLong.ONE = ONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar UONE = fromInt(1, true);\n\n/**\n * Unsigned one.\n * @type {!Long}\n */\nLong.UONE = UONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar NEG_ONE = fromInt(-1);\n\n/**\n * Signed negative one.\n * @type {!Long}\n */\nLong.NEG_ONE = NEG_ONE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MAX_VALUE = fromBits(0xFFFFFFFF | 0, 0x7FFFFFFF | 0, false);\n\n/**\n * Maximum signed value.\n * @type {!Long}\n */\nLong.MAX_VALUE = MAX_VALUE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MAX_UNSIGNED_VALUE = fromBits(0xFFFFFFFF | 0, 0xFFFFFFFF | 0, true);\n\n/**\n * Maximum unsigned value.\n * @type {!Long}\n */\nLong.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE;\n\n/**\n * @type {!Long}\n * @inner\n */\nvar MIN_VALUE = fromBits(0, 0x80000000 | 0, false);\n\n/**\n * Minimum signed value.\n * @type {!Long}\n */\nLong.MIN_VALUE = MIN_VALUE;\n\n/**\n * @alias Long.prototype\n * @inner\n */\nvar LongPrototype = Long.prototype;\n\n/**\n * Converts the Long to a 32 bit integer, assuming it is a 32 bit integer.\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.toInt = function toInt() {\n return this.unsigned ? this.low >>> 0 : this.low;\n};\n\n/**\n * Converts the Long to a the nearest floating-point representation of this value (double, 53 bit mantissa).\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.toNumber = function toNumber() {\n if (this.unsigned)\n return ((this.high >>> 0) * TWO_PWR_32_DBL) + (this.low >>> 0);\n return this.high * TWO_PWR_32_DBL + (this.low >>> 0);\n};\n\n/**\n * Converts the Long to a string written in the specified radix.\n * @this {!Long}\n * @param {number=} radix Radix (2-36), defaults to 10\n * @returns {string}\n * @override\n * @throws {RangeError} If `radix` is out of range\n */\nLongPrototype.toString = function toString(radix) {\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError('radix');\n if (this.isZero())\n return '0';\n if (this.isNegative()) { // Unsigned Longs are never negative\n if (this.eq(MIN_VALUE)) {\n // We need to change the Long value before it can be negated, so we remove\n // the bottom-most digit in this base and then recurse to do the rest.\n var radixLong = fromNumber(radix),\n div = this.div(radixLong),\n rem1 = div.mul(radixLong).sub(this);\n return div.toString(radix) + rem1.toInt().toString(radix);\n } else\n return '-' + this.neg().toString(radix);\n }\n\n // Do several (6) digits each time through the loop, so as to\n // minimize the calls to the very expensive emulated div.\n var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned),\n rem = this;\n var result = '';\n while (true) {\n var remDiv = rem.div(radixToPower),\n intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0,\n digits = intval.toString(radix);\n rem = remDiv;\n if (rem.isZero())\n return digits + result;\n else {\n while (digits.length < 6)\n digits = '0' + digits;\n result = '' + digits + result;\n }\n }\n};\n\n/**\n * Gets the high 32 bits as a signed integer.\n * @this {!Long}\n * @returns {number} Signed high bits\n */\nLongPrototype.getHighBits = function getHighBits() {\n return this.high;\n};\n\n/**\n * Gets the high 32 bits as an unsigned integer.\n * @this {!Long}\n * @returns {number} Unsigned high bits\n */\nLongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() {\n return this.high >>> 0;\n};\n\n/**\n * Gets the low 32 bits as a signed integer.\n * @this {!Long}\n * @returns {number} Signed low bits\n */\nLongPrototype.getLowBits = function getLowBits() {\n return this.low;\n};\n\n/**\n * Gets the low 32 bits as an unsigned integer.\n * @this {!Long}\n * @returns {number} Unsigned low bits\n */\nLongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() {\n return this.low >>> 0;\n};\n\n/**\n * Gets the number of bits needed to represent the absolute value of this Long.\n * @this {!Long}\n * @returns {number}\n */\nLongPrototype.getNumBitsAbs = function getNumBitsAbs() {\n if (this.isNegative()) // Unsigned Longs are never negative\n return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs();\n var val = this.high != 0 ? this.high : this.low;\n for (var bit = 31; bit > 0; bit--)\n if ((val & (1 << bit)) != 0)\n break;\n return this.high != 0 ? bit + 33 : bit + 1;\n};\n\n/**\n * Tests if this Long's value equals zero.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isZero = function isZero() {\n return this.high === 0 && this.low === 0;\n};\n\n/**\n * Tests if this Long's value equals zero. This is an alias of {@link Long#isZero}.\n * @returns {boolean}\n */\nLongPrototype.eqz = LongPrototype.isZero;\n\n/**\n * Tests if this Long's value is negative.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isNegative = function isNegative() {\n return !this.unsigned && this.high < 0;\n};\n\n/**\n * Tests if this Long's value is positive or zero.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isPositive = function isPositive() {\n return this.unsigned || this.high >= 0;\n};\n\n/**\n * Tests if this Long's value is odd.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isOdd = function isOdd() {\n return (this.low & 1) === 1;\n};\n\n/**\n * Tests if this Long's value is even.\n * @this {!Long}\n * @returns {boolean}\n */\nLongPrototype.isEven = function isEven() {\n return (this.low & 1) === 0;\n};\n\n/**\n * Tests if this Long's value equals the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.equals = function equals(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.unsigned !== other.unsigned && (this.high >>> 31) === 1 && (other.high >>> 31) === 1)\n return false;\n return this.high === other.high && this.low === other.low;\n};\n\n/**\n * Tests if this Long's value equals the specified's. This is an alias of {@link Long#equals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.eq = LongPrototype.equals;\n\n/**\n * Tests if this Long's value differs from the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.notEquals = function notEquals(other) {\n return !this.eq(/* validates */ other);\n};\n\n/**\n * Tests if this Long's value differs from the specified's. This is an alias of {@link Long#notEquals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.neq = LongPrototype.notEquals;\n\n/**\n * Tests if this Long's value differs from the specified's. This is an alias of {@link Long#notEquals}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.ne = LongPrototype.notEquals;\n\n/**\n * Tests if this Long's value is less than the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lessThan = function lessThan(other) {\n return this.comp(/* validates */ other) < 0;\n};\n\n/**\n * Tests if this Long's value is less than the specified's. This is an alias of {@link Long#lessThan}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lt = LongPrototype.lessThan;\n\n/**\n * Tests if this Long's value is less than or equal the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lessThanOrEqual = function lessThanOrEqual(other) {\n return this.comp(/* validates */ other) <= 0;\n};\n\n/**\n * Tests if this Long's value is less than or equal the specified's. This is an alias of {@link Long#lessThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.lte = LongPrototype.lessThanOrEqual;\n\n/**\n * Tests if this Long's value is less than or equal the specified's. This is an alias of {@link Long#lessThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.le = LongPrototype.lessThanOrEqual;\n\n/**\n * Tests if this Long's value is greater than the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.greaterThan = function greaterThan(other) {\n return this.comp(/* validates */ other) > 0;\n};\n\n/**\n * Tests if this Long's value is greater than the specified's. This is an alias of {@link Long#greaterThan}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.gt = LongPrototype.greaterThan;\n\n/**\n * Tests if this Long's value is greater than or equal the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) {\n return this.comp(/* validates */ other) >= 0;\n};\n\n/**\n * Tests if this Long's value is greater than or equal the specified's. This is an alias of {@link Long#greaterThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.gte = LongPrototype.greaterThanOrEqual;\n\n/**\n * Tests if this Long's value is greater than or equal the specified's. This is an alias of {@link Long#greaterThanOrEqual}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {boolean}\n */\nLongPrototype.ge = LongPrototype.greaterThanOrEqual;\n\n/**\n * Compares this Long's value with the specified's.\n * @this {!Long}\n * @param {!Long|number|string} other Other value\n * @returns {number} 0 if they are the same, 1 if the this is greater and -1\n * if the given one is greater\n */\nLongPrototype.compare = function compare(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.eq(other))\n return 0;\n var thisNeg = this.isNegative(),\n otherNeg = other.isNegative();\n if (thisNeg && !otherNeg)\n return -1;\n if (!thisNeg && otherNeg)\n return 1;\n // At this point the sign bits are the same\n if (!this.unsigned)\n return this.sub(other).isNegative() ? -1 : 1;\n // Both are positive if at least one is unsigned\n return (other.high >>> 0) > (this.high >>> 0) || (other.high === this.high && (other.low >>> 0) > (this.low >>> 0)) ? -1 : 1;\n};\n\n/**\n * Compares this Long's value with the specified's. This is an alias of {@link Long#compare}.\n * @function\n * @param {!Long|number|string} other Other value\n * @returns {number} 0 if they are the same, 1 if the this is greater and -1\n * if the given one is greater\n */\nLongPrototype.comp = LongPrototype.compare;\n\n/**\n * Negates this Long's value.\n * @this {!Long}\n * @returns {!Long} Negated Long\n */\nLongPrototype.negate = function negate() {\n if (!this.unsigned && this.eq(MIN_VALUE))\n return MIN_VALUE;\n return this.not().add(ONE);\n};\n\n/**\n * Negates this Long's value. This is an alias of {@link Long#negate}.\n * @function\n * @returns {!Long} Negated Long\n */\nLongPrototype.neg = LongPrototype.negate;\n\n/**\n * Returns the sum of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} addend Addend\n * @returns {!Long} Sum\n */\nLongPrototype.add = function add(addend) {\n if (!isLong(addend))\n addend = fromValue(addend);\n\n // Divide each number into 4 chunks of 16 bits, and then sum the chunks.\n\n var a48 = this.high >>> 16;\n var a32 = this.high & 0xFFFF;\n var a16 = this.low >>> 16;\n var a00 = this.low & 0xFFFF;\n\n var b48 = addend.high >>> 16;\n var b32 = addend.high & 0xFFFF;\n var b16 = addend.low >>> 16;\n var b00 = addend.low & 0xFFFF;\n\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 + b00;\n c16 += c00 >>> 16;\n c00 &= 0xFFFF;\n c16 += a16 + b16;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c32 += a32 + b32;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c48 += a48 + b48;\n c48 &= 0xFFFF;\n return fromBits((c16 << 16) | c00, (c48 << 16) | c32, this.unsigned);\n};\n\n/**\n * Returns the difference of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} subtrahend Subtrahend\n * @returns {!Long} Difference\n */\nLongPrototype.subtract = function subtract(subtrahend) {\n if (!isLong(subtrahend))\n subtrahend = fromValue(subtrahend);\n return this.add(subtrahend.neg());\n};\n\n/**\n * Returns the difference of this and the specified Long. This is an alias of {@link Long#subtract}.\n * @function\n * @param {!Long|number|string} subtrahend Subtrahend\n * @returns {!Long} Difference\n */\nLongPrototype.sub = LongPrototype.subtract;\n\n/**\n * Returns the product of this and the specified Long.\n * @this {!Long}\n * @param {!Long|number|string} multiplier Multiplier\n * @returns {!Long} Product\n */\nLongPrototype.multiply = function multiply(multiplier) {\n if (this.isZero())\n return this;\n if (!isLong(multiplier))\n multiplier = fromValue(multiplier);\n\n // use wasm support if present\n if (wasm) {\n var low = wasm[\"mul\"](this.low,\n this.high,\n multiplier.low,\n multiplier.high);\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n if (multiplier.isZero())\n return this.unsigned ? UZERO : ZERO;\n if (this.eq(MIN_VALUE))\n return multiplier.isOdd() ? MIN_VALUE : ZERO;\n if (multiplier.eq(MIN_VALUE))\n return this.isOdd() ? MIN_VALUE : ZERO;\n\n if (this.isNegative()) {\n if (multiplier.isNegative())\n return this.neg().mul(multiplier.neg());\n else\n return this.neg().mul(multiplier).neg();\n } else if (multiplier.isNegative())\n return this.mul(multiplier.neg()).neg();\n\n // If both longs are small, use float multiplication\n if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24))\n return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned);\n\n // Divide each long into 4 chunks of 16 bits, and then add up 4x4 products.\n // We can skip products that would overflow.\n\n var a48 = this.high >>> 16;\n var a32 = this.high & 0xFFFF;\n var a16 = this.low >>> 16;\n var a00 = this.low & 0xFFFF;\n\n var b48 = multiplier.high >>> 16;\n var b32 = multiplier.high & 0xFFFF;\n var b16 = multiplier.low >>> 16;\n var b00 = multiplier.low & 0xFFFF;\n\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 * b00;\n c16 += c00 >>> 16;\n c00 &= 0xFFFF;\n c16 += a16 * b00;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c16 += a00 * b16;\n c32 += c16 >>> 16;\n c16 &= 0xFFFF;\n c32 += a32 * b00;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c32 += a16 * b16;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c32 += a00 * b32;\n c48 += c32 >>> 16;\n c32 &= 0xFFFF;\n c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48;\n c48 &= 0xFFFF;\n return fromBits((c16 << 16) | c00, (c48 << 16) | c32, this.unsigned);\n};\n\n/**\n * Returns the product of this and the specified Long. This is an alias of {@link Long#multiply}.\n * @function\n * @param {!Long|number|string} multiplier Multiplier\n * @returns {!Long} Product\n */\nLongPrototype.mul = LongPrototype.multiply;\n\n/**\n * Returns this Long divided by the specified. The result is signed if this Long is signed or\n * unsigned if this Long is unsigned.\n * @this {!Long}\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Quotient\n */\nLongPrototype.divide = function divide(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n if (divisor.isZero())\n throw Error('division by zero');\n\n // use wasm support if present\n if (wasm) {\n // guard against signed division overflow: the largest\n // negative number / -1 would be 1 larger than the largest\n // positive number, due to two's complement.\n if (!this.unsigned &&\n this.high === -0x80000000 &&\n divisor.low === -1 && divisor.high === -1) {\n // be consistent with non-wasm code path\n return this;\n }\n var low = (this.unsigned ? wasm[\"div_u\"] : wasm[\"div_s\"])(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n if (this.isZero())\n return this.unsigned ? UZERO : ZERO;\n var approx, rem, res;\n if (!this.unsigned) {\n // This section is only relevant for signed longs and is derived from the\n // closure library as a whole.\n if (this.eq(MIN_VALUE)) {\n if (divisor.eq(ONE) || divisor.eq(NEG_ONE))\n return MIN_VALUE; // recall that -MIN_VALUE == MIN_VALUE\n else if (divisor.eq(MIN_VALUE))\n return ONE;\n else {\n // At this point, we have |other| >= 2, so |this/other| < |MIN_VALUE|.\n var halfThis = this.shr(1);\n approx = halfThis.div(divisor).shl(1);\n if (approx.eq(ZERO)) {\n return divisor.isNegative() ? ONE : NEG_ONE;\n } else {\n rem = this.sub(divisor.mul(approx));\n res = approx.add(rem.div(divisor));\n return res;\n }\n }\n } else if (divisor.eq(MIN_VALUE))\n return this.unsigned ? UZERO : ZERO;\n if (this.isNegative()) {\n if (divisor.isNegative())\n return this.neg().div(divisor.neg());\n return this.neg().div(divisor).neg();\n } else if (divisor.isNegative())\n return this.div(divisor.neg()).neg();\n res = ZERO;\n } else {\n // The algorithm below has not been made for unsigned longs. It's therefore\n // required to take special care of the MSB prior to running it.\n if (!divisor.unsigned)\n divisor = divisor.toUnsigned();\n if (divisor.gt(this))\n return UZERO;\n if (divisor.gt(this.shru(1))) // 15 >>> 1 = 7 ; with divisor = 8 ; true\n return UONE;\n res = UZERO;\n }\n\n // Repeat the following until the remainder is less than other: find a\n // floating-point that approximates remainder / other *from below*, add this\n // into the result, and subtract it from the remainder. It is critical that\n // the approximate value is less than or equal to the real value so that the\n // remainder never becomes negative.\n rem = this;\n while (rem.gte(divisor)) {\n // Approximate the result of division. This may be a little greater or\n // smaller than the actual value.\n approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber()));\n\n // We will tweak the approximate result by changing it in the 48-th digit or\n // the smallest non-fractional digit, whichever is larger.\n var log2 = Math.ceil(Math.log(approx) / Math.LN2),\n delta = (log2 <= 48) ? 1 : pow_dbl(2, log2 - 48),\n\n // Decrease the approximation until it is smaller than the remainder. Note\n // that if it is too large, the product overflows and is negative.\n approxRes = fromNumber(approx),\n approxRem = approxRes.mul(divisor);\n while (approxRem.isNegative() || approxRem.gt(rem)) {\n approx -= delta;\n approxRes = fromNumber(approx, this.unsigned);\n approxRem = approxRes.mul(divisor);\n }\n\n // We know the answer can't be zero... and actually, zero would cause\n // infinite recursion since we would make no progress.\n if (approxRes.isZero())\n approxRes = ONE;\n\n res = res.add(approxRes);\n rem = rem.sub(approxRem);\n }\n return res;\n};\n\n/**\n * Returns this Long divided by the specified. This is an alias of {@link Long#divide}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Quotient\n */\nLongPrototype.div = LongPrototype.divide;\n\n/**\n * Returns this Long modulo the specified.\n * @this {!Long}\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.modulo = function modulo(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n\n // use wasm support if present\n if (wasm) {\n var low = (this.unsigned ? wasm[\"rem_u\"] : wasm[\"rem_s\"])(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm[\"get_high\"](), this.unsigned);\n }\n\n return this.sub(this.div(divisor).mul(divisor));\n};\n\n/**\n * Returns this Long modulo the specified. This is an alias of {@link Long#modulo}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.mod = LongPrototype.modulo;\n\n/**\n * Returns this Long modulo the specified. This is an alias of {@link Long#modulo}.\n * @function\n * @param {!Long|number|string} divisor Divisor\n * @returns {!Long} Remainder\n */\nLongPrototype.rem = LongPrototype.modulo;\n\n/**\n * Returns the bitwise NOT of this Long.\n * @this {!Long}\n * @returns {!Long}\n */\nLongPrototype.not = function not() {\n return fromBits(~this.low, ~this.high, this.unsigned);\n};\n\n/**\n * Returns count leading zeros of this Long.\n * @this {!Long}\n * @returns {!number}\n */\nLongPrototype.countLeadingZeros = function countLeadingZeros() {\n return this.high ? Math.clz32(this.high) : Math.clz32(this.low) + 32;\n};\n\n/**\n * Returns count leading zeros. This is an alias of {@link Long#countLeadingZeros}.\n * @function\n * @param {!Long}\n * @returns {!number}\n */\nLongPrototype.clz = LongPrototype.countLeadingZeros;\n\n/**\n * Returns count trailing zeros of this Long.\n * @this {!Long}\n * @returns {!number}\n */\nLongPrototype.countTrailingZeros = function countTrailingZeros() {\n return this.low ? ctz32(this.low) : ctz32(this.high) + 32;\n};\n\n/**\n * Returns count trailing zeros. This is an alias of {@link Long#countTrailingZeros}.\n * @function\n * @param {!Long}\n * @returns {!number}\n */\nLongPrototype.ctz = LongPrototype.countTrailingZeros;\n\n/**\n * Returns the bitwise AND of this Long and the specified.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.and = function and(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low & other.low, this.high & other.high, this.unsigned);\n};\n\n/**\n * Returns the bitwise OR of this Long and the specified.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.or = function or(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low | other.low, this.high | other.high, this.unsigned);\n};\n\n/**\n * Returns the bitwise XOR of this Long and the given one.\n * @this {!Long}\n * @param {!Long|number|string} other Other Long\n * @returns {!Long}\n */\nLongPrototype.xor = function xor(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned);\n};\n\n/**\n * Returns this Long with bits shifted to the left by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftLeft = function shiftLeft(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits(this.low << numBits, (this.high << numBits) | (this.low >>> (32 - numBits)), this.unsigned);\n else\n return fromBits(0, this.low << (numBits - 32), this.unsigned);\n};\n\n/**\n * Returns this Long with bits shifted to the left by the given amount. This is an alias of {@link Long#shiftLeft}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shl = LongPrototype.shiftLeft;\n\n/**\n * Returns this Long with bits arithmetically shifted to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftRight = function shiftRight(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits((this.low >>> numBits) | (this.high << (32 - numBits)), this.high >> numBits, this.unsigned);\n else\n return fromBits(this.high >> (numBits - 32), this.high >= 0 ? 0 : -1, this.unsigned);\n};\n\n/**\n * Returns this Long with bits arithmetically shifted to the right by the given amount. This is an alias of {@link Long#shiftRight}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shr = LongPrototype.shiftRight;\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) {\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits < 32) return fromBits((this.low >>> numBits) | (this.high << (32 - numBits)), this.high >>> numBits, this.unsigned);\n if (numBits === 32) return fromBits(this.high, 0, this.unsigned);\n return fromBits(this.high >>> (numBits - 32), 0, this.unsigned);\n};\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount. This is an alias of {@link Long#shiftRightUnsigned}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shru = LongPrototype.shiftRightUnsigned;\n\n/**\n * Returns this Long with bits logically shifted to the right by the given amount. This is an alias of {@link Long#shiftRightUnsigned}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Shifted Long\n */\nLongPrototype.shr_u = LongPrototype.shiftRightUnsigned;\n\n/**\n * Returns this Long with bits rotated to the left by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotateLeft = function rotateLeft(numBits) {\n var b;\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits === 32) return fromBits(this.high, this.low, this.unsigned);\n if (numBits < 32) {\n b = (32 - numBits);\n return fromBits(((this.low << numBits) | (this.high >>> b)), ((this.high << numBits) | (this.low >>> b)), this.unsigned);\n }\n numBits -= 32;\n b = (32 - numBits);\n return fromBits(((this.high << numBits) | (this.low >>> b)), ((this.low << numBits) | (this.high >>> b)), this.unsigned);\n}\n/**\n * Returns this Long with bits rotated to the left by the given amount. This is an alias of {@link Long#rotateLeft}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotl = LongPrototype.rotateLeft;\n\n/**\n * Returns this Long with bits rotated to the right by the given amount.\n * @this {!Long}\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotateRight = function rotateRight(numBits) {\n var b;\n if (isLong(numBits)) numBits = numBits.toInt();\n if ((numBits &= 63) === 0) return this;\n if (numBits === 32) return fromBits(this.high, this.low, this.unsigned);\n if (numBits < 32) {\n b = (32 - numBits);\n return fromBits(((this.high << b) | (this.low >>> numBits)), ((this.low << b) | (this.high >>> numBits)), this.unsigned);\n }\n numBits -= 32;\n b = (32 - numBits);\n return fromBits(((this.low << b) | (this.high >>> numBits)), ((this.high << b) | (this.low >>> numBits)), this.unsigned);\n}\n/**\n * Returns this Long with bits rotated to the right by the given amount. This is an alias of {@link Long#rotateRight}.\n * @function\n * @param {number|!Long} numBits Number of bits\n * @returns {!Long} Rotated Long\n */\nLongPrototype.rotr = LongPrototype.rotateRight;\n\n/**\n * Converts this Long to signed.\n * @this {!Long}\n * @returns {!Long} Signed long\n */\nLongPrototype.toSigned = function toSigned() {\n if (!this.unsigned)\n return this;\n return fromBits(this.low, this.high, false);\n};\n\n/**\n * Converts this Long to unsigned.\n * @this {!Long}\n * @returns {!Long} Unsigned long\n */\nLongPrototype.toUnsigned = function toUnsigned() {\n if (this.unsigned)\n return this;\n return fromBits(this.low, this.high, true);\n};\n\n/**\n * Converts this Long to its byte representation.\n * @param {boolean=} le Whether little or big endian, defaults to big endian\n * @this {!Long}\n * @returns {!Array.} Byte representation\n */\nLongPrototype.toBytes = function toBytes(le) {\n return le ? this.toBytesLE() : this.toBytesBE();\n};\n\n/**\n * Converts this Long to its little endian byte representation.\n * @this {!Long}\n * @returns {!Array.} Little endian byte representation\n */\nLongPrototype.toBytesLE = function toBytesLE() {\n var hi = this.high,\n lo = this.low;\n return [\n lo & 0xff,\n lo >>> 8 & 0xff,\n lo >>> 16 & 0xff,\n lo >>> 24,\n hi & 0xff,\n hi >>> 8 & 0xff,\n hi >>> 16 & 0xff,\n hi >>> 24\n ];\n};\n\n/**\n * Converts this Long to its big endian byte representation.\n * @this {!Long}\n * @returns {!Array.} Big endian byte representation\n */\nLongPrototype.toBytesBE = function toBytesBE() {\n var hi = this.high,\n lo = this.low;\n return [\n hi >>> 24,\n hi >>> 16 & 0xff,\n hi >>> 8 & 0xff,\n hi & 0xff,\n lo >>> 24,\n lo >>> 16 & 0xff,\n lo >>> 8 & 0xff,\n lo & 0xff\n ];\n};\n\n/**\n * Creates a Long from its byte representation.\n * @param {!Array.} bytes Byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @param {boolean=} le Whether little or big endian, defaults to big endian\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytes = function fromBytes(bytes, unsigned, le) {\n return le ? Long.fromBytesLE(bytes, unsigned) : Long.fromBytesBE(bytes, unsigned);\n};\n\n/**\n * Creates a Long from its little endian byte representation.\n * @param {!Array.} bytes Little endian byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytesLE = function fromBytesLE(bytes, unsigned) {\n return new Long(\n bytes[0] |\n bytes[1] << 8 |\n bytes[2] << 16 |\n bytes[3] << 24,\n bytes[4] |\n bytes[5] << 8 |\n bytes[6] << 16 |\n bytes[7] << 24,\n unsigned\n );\n};\n\n/**\n * Creates a Long from its big endian byte representation.\n * @param {!Array.} bytes Big endian byte representation\n * @param {boolean=} unsigned Whether unsigned or not, defaults to signed\n * @returns {Long} The corresponding Long value\n */\nLong.fromBytesBE = function fromBytesBE(bytes, unsigned) {\n return new Long(\n bytes[4] << 24 |\n bytes[5] << 16 |\n bytes[6] << 8 |\n bytes[7],\n bytes[0] << 24 |\n bytes[1] << 16 |\n bytes[2] << 8 |\n bytes[3],\n unsigned\n );\n};\n\nexport default Long;\n", "/// @file\n/// @addtogroup flatbuffers_javascript_api\n/// @{\n/// @cond FLATBUFFERS_INTERNAL\n\n/**\n * @fileoverview\n *\n * Need to suppress 'global this' error so the Node.js export line doesn't cause\n * closure compile to error out.\n * @suppress {globalThis}\n */\n\n/**\n * @const\n * @namespace\n */\nvar flatbuffers = {};\n\n/**\n * @typedef {number}\n */\nflatbuffers.Offset;\n\n/**\n * @typedef {{\n * bb: flatbuffers.ByteBuffer,\n * bb_pos: number\n * }}\n */\nflatbuffers.Table;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZEOF_SHORT = 2;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZEOF_INT = 4;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.FILE_IDENTIFIER_LENGTH = 4;\n\n/**\n * @type {number}\n * @const\n */\nflatbuffers.SIZE_PREFIX_LENGTH = 4;\n\n/**\n * @enum {number}\n */\nflatbuffers.Encoding = {\n UTF8_BYTES: 1,\n UTF16_STRING: 2\n};\n\n/**\n * @type {Int32Array}\n * @const\n */\nflatbuffers.int32 = new Int32Array(2);\n\n/**\n * @type {Float32Array}\n * @const\n */\nflatbuffers.float32 = new Float32Array(flatbuffers.int32.buffer);\n\n/**\n * @type {Float64Array}\n * @const\n */\nflatbuffers.float64 = new Float64Array(flatbuffers.int32.buffer);\n\n/**\n * @type {boolean}\n * @const\n */\nflatbuffers.isLittleEndian = new Uint16Array(new Uint8Array([1, 0]).buffer)[0] === 1;\n\n////////////////////////////////////////////////////////////////////////////////\n\n/**\n * @constructor\n * @param {number} low\n * @param {number} high\n */\nflatbuffers.Long = function(low, high) {\n /**\n * @type {number}\n * @const\n */\n this.low = low | 0;\n\n /**\n * @type {number}\n * @const\n */\n this.high = high | 0;\n};\n\n/**\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.Long.create = function(low, high) {\n // Special-case zero to avoid GC overhead for default values\n return low == 0 && high == 0 ? flatbuffers.Long.ZERO : new flatbuffers.Long(low, high);\n};\n\n/**\n * @returns {number}\n */\nflatbuffers.Long.prototype.toFloat64 = function() {\n return (this.low >>> 0) + this.high * 0x100000000;\n};\n\n/**\n * @param {flatbuffers.Long} other\n * @returns {boolean}\n */\nflatbuffers.Long.prototype.equals = function(other) {\n return this.low == other.low && this.high == other.high;\n};\n\n/**\n * @type {!flatbuffers.Long}\n * @const\n */\nflatbuffers.Long.ZERO = new flatbuffers.Long(0, 0);\n\n/// @endcond\n////////////////////////////////////////////////////////////////////////////////\n/**\n * Create a FlatBufferBuilder.\n *\n * @constructor\n * @param {number=} opt_initial_size\n */\nflatbuffers.Builder = function(opt_initial_size) {\n if (!opt_initial_size) {\n var initial_size = 1024;\n } else {\n var initial_size = opt_initial_size;\n }\n\n /**\n * @type {flatbuffers.ByteBuffer}\n * @private\n */\n this.bb = flatbuffers.ByteBuffer.allocate(initial_size);\n\n /**\n * Remaining space in the ByteBuffer.\n *\n * @type {number}\n * @private\n */\n this.space = initial_size;\n\n /**\n * Minimum alignment encountered so far.\n *\n * @type {number}\n * @private\n */\n this.minalign = 1;\n\n /**\n * The vtable for the current table.\n *\n * @type {Array.}\n * @private\n */\n this.vtable = null;\n\n /**\n * The amount of fields we're actually using.\n *\n * @type {number}\n * @private\n */\n this.vtable_in_use = 0;\n\n /**\n * Whether we are currently serializing a table.\n *\n * @type {boolean}\n * @private\n */\n this.isNested = false;\n\n /**\n * Starting offset of the current struct/table.\n *\n * @type {number}\n * @private\n */\n this.object_start = 0;\n\n /**\n * List of offsets of all vtables.\n *\n * @type {Array.}\n * @private\n */\n this.vtables = [];\n\n /**\n * For the current vector being built.\n *\n * @type {number}\n * @private\n */\n this.vector_num_elems = 0;\n\n /**\n * False omits default values from the serialized data\n *\n * @type {boolean}\n * @private\n */\n this.force_defaults = false;\n};\n\nflatbuffers.Builder.prototype.clear = function() {\n this.bb.clear();\n this.space = this.bb.capacity();\n this.minalign = 1;\n this.vtable = null;\n this.vtable_in_use = 0;\n this.isNested = false;\n this.object_start = 0;\n this.vtables = [];\n this.vector_num_elems = 0;\n this.force_defaults = false;\n};\n\n/**\n * In order to save space, fields that are set to their default value\n * don't get serialized into the buffer. Forcing defaults provides a\n * way to manually disable this optimization.\n *\n * @param {boolean} forceDefaults true always serializes default values\n */\nflatbuffers.Builder.prototype.forceDefaults = function(forceDefaults) {\n this.force_defaults = forceDefaults;\n};\n\n/**\n * Get the ByteBuffer representing the FlatBuffer. Only call this after you've\n * called finish(). The actual data starts at the ByteBuffer's current position,\n * not necessarily at 0.\n *\n * @returns {flatbuffers.ByteBuffer}\n */\nflatbuffers.Builder.prototype.dataBuffer = function() {\n return this.bb;\n};\n\n/**\n * Get the bytes representing the FlatBuffer. Only call this after you've\n * called finish().\n *\n * @returns {!Uint8Array}\n */\nflatbuffers.Builder.prototype.asUint8Array = function() {\n return this.bb.bytes().subarray(this.bb.position(), this.bb.position() + this.offset());\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Prepare to write an element of `size` after `additional_bytes` have been\n * written, e.g. if you write a string, you need to align such the int length\n * field is aligned to 4 bytes, and the string data follows it directly. If all\n * you need to do is alignment, `additional_bytes` will be 0.\n *\n * @param {number} size This is the of the new element to write\n * @param {number} additional_bytes The padding size\n */\nflatbuffers.Builder.prototype.prep = function(size, additional_bytes) {\n // Track the biggest thing we've ever aligned to.\n if (size > this.minalign) {\n this.minalign = size;\n }\n\n // Find the amount of alignment needed such that `size` is properly\n // aligned after `additional_bytes`\n var align_size = ((~(this.bb.capacity() - this.space + additional_bytes)) + 1) & (size - 1);\n\n // Reallocate the buffer if needed.\n while (this.space < align_size + size + additional_bytes) {\n var old_buf_size = this.bb.capacity();\n this.bb = flatbuffers.Builder.growByteBuffer(this.bb);\n this.space += this.bb.capacity() - old_buf_size;\n }\n\n this.pad(align_size);\n};\n\n/**\n * @param {number} byte_size\n */\nflatbuffers.Builder.prototype.pad = function(byte_size) {\n for (var i = 0; i < byte_size; i++) {\n this.bb.writeInt8(--this.space, 0);\n }\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt8 = function(value) {\n this.bb.writeInt8(this.space -= 1, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt16 = function(value) {\n this.bb.writeInt16(this.space -= 2, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeInt32 = function(value) {\n this.bb.writeInt32(this.space -= 4, value);\n};\n\n/**\n * @param {flatbuffers.Long} value\n */\nflatbuffers.Builder.prototype.writeInt64 = function(value) {\n this.bb.writeInt64(this.space -= 8, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeFloat32 = function(value) {\n this.bb.writeFloat32(this.space -= 4, value);\n};\n\n/**\n * @param {number} value\n */\nflatbuffers.Builder.prototype.writeFloat64 = function(value) {\n this.bb.writeFloat64(this.space -= 8, value);\n};\n/// @endcond\n\n/**\n * Add an `int8` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int8` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt8 = function(value) {\n this.prep(1, 0);\n this.writeInt8(value);\n};\n\n/**\n * Add an `int16` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int16` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt16 = function(value) {\n this.prep(2, 0);\n this.writeInt16(value);\n};\n\n/**\n * Add an `int32` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `int32` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt32 = function(value) {\n this.prep(4, 0);\n this.writeInt32(value);\n};\n\n/**\n * Add an `int64` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {flatbuffers.Long} value The `int64` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addInt64 = function(value) {\n this.prep(8, 0);\n this.writeInt64(value);\n};\n\n/**\n * Add a `float32` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `float32` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addFloat32 = function(value) {\n this.prep(4, 0);\n this.writeFloat32(value);\n};\n\n/**\n * Add a `float64` to the buffer, properly aligned, and grows the buffer (if necessary).\n * @param {number} value The `float64` to add the the buffer.\n */\nflatbuffers.Builder.prototype.addFloat64 = function(value) {\n this.prep(8, 0);\n this.writeFloat64(value);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt8 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt8(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt16 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt16(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt32 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addInt32(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {flatbuffers.Long} value\n * @param {flatbuffers.Long} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldInt64 = function(voffset, value, defaultValue) {\n if (this.force_defaults || !value.equals(defaultValue)) {\n this.addInt64(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldFloat32 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addFloat32(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {number} value\n * @param {number} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldFloat64 = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addFloat64(value);\n this.slot(voffset);\n }\n};\n\n/**\n * @param {number} voffset\n * @param {flatbuffers.Offset} value\n * @param {flatbuffers.Offset} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldOffset = function(voffset, value, defaultValue) {\n if (this.force_defaults || value != defaultValue) {\n this.addOffset(value);\n this.slot(voffset);\n }\n};\n\n/**\n * Structs are stored inline, so nothing additional is being added. `d` is always 0.\n *\n * @param {number} voffset\n * @param {flatbuffers.Offset} value\n * @param {flatbuffers.Offset} defaultValue\n */\nflatbuffers.Builder.prototype.addFieldStruct = function(voffset, value, defaultValue) {\n if (value != defaultValue) {\n this.nested(value);\n this.slot(voffset);\n }\n};\n\n/**\n * Structures are always stored inline, they need to be created right\n * where they're used. You'll get this assertion failure if you\n * created it elsewhere.\n *\n * @param {flatbuffers.Offset} obj The offset of the created object\n */\nflatbuffers.Builder.prototype.nested = function(obj) {\n if (obj != this.offset()) {\n throw new Error('FlatBuffers: struct must be serialized inline.');\n }\n};\n\n/**\n * Should not be creating any other object, string or vector\n * while an object is being constructed\n */\nflatbuffers.Builder.prototype.notNested = function() {\n if (this.isNested) {\n throw new Error('FlatBuffers: object serialization must not be nested.');\n }\n};\n\n/**\n * Set the current vtable at `voffset` to the current location in the buffer.\n *\n * @param {number} voffset\n */\nflatbuffers.Builder.prototype.slot = function(voffset) {\n this.vtable[voffset] = this.offset();\n};\n\n/**\n * @returns {flatbuffers.Offset} Offset relative to the end of the buffer.\n */\nflatbuffers.Builder.prototype.offset = function() {\n return this.bb.capacity() - this.space;\n};\n\n/**\n * Doubles the size of the backing ByteBuffer and copies the old data towards\n * the end of the new buffer (since we build the buffer backwards).\n *\n * @param {flatbuffers.ByteBuffer} bb The current buffer with the existing data\n * @returns {!flatbuffers.ByteBuffer} A new byte buffer with the old data copied\n * to it. The data is located at the end of the buffer.\n *\n * uint8Array.set() formally takes {Array|ArrayBufferView}, so to pass\n * it a uint8Array we need to suppress the type check:\n * @suppress {checkTypes}\n */\nflatbuffers.Builder.growByteBuffer = function(bb) {\n var old_buf_size = bb.capacity();\n\n // Ensure we don't grow beyond what fits in an int.\n if (old_buf_size & 0xC0000000) {\n throw new Error('FlatBuffers: cannot grow buffer beyond 2 gigabytes.');\n }\n\n var new_buf_size = old_buf_size << 1;\n var nbb = flatbuffers.ByteBuffer.allocate(new_buf_size);\n nbb.setPosition(new_buf_size - old_buf_size);\n nbb.bytes().set(bb.bytes(), new_buf_size - old_buf_size);\n return nbb;\n};\n/// @endcond\n\n/**\n * Adds on offset, relative to where it will be written.\n *\n * @param {flatbuffers.Offset} offset The offset to add.\n */\nflatbuffers.Builder.prototype.addOffset = function(offset) {\n this.prep(flatbuffers.SIZEOF_INT, 0); // Ensure alignment is already done.\n this.writeInt32(this.offset() - offset + flatbuffers.SIZEOF_INT);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Start encoding a new object in the buffer. Users will not usually need to\n * call this directly. The FlatBuffers compiler will generate helper methods\n * that call this method internally.\n *\n * @param {number} numfields\n */\nflatbuffers.Builder.prototype.startObject = function(numfields) {\n this.notNested();\n if (this.vtable == null) {\n this.vtable = [];\n }\n this.vtable_in_use = numfields;\n for (var i = 0; i < numfields; i++) {\n this.vtable[i] = 0; // This will push additional elements as needed\n }\n this.isNested = true;\n this.object_start = this.offset();\n};\n\n/**\n * Finish off writing the object that is under construction.\n *\n * @returns {flatbuffers.Offset} The offset to the object inside `dataBuffer`\n */\nflatbuffers.Builder.prototype.endObject = function() {\n if (this.vtable == null || !this.isNested) {\n throw new Error('FlatBuffers: endObject called without startObject');\n }\n\n this.addInt32(0);\n var vtableloc = this.offset();\n\n // Trim trailing zeroes.\n var i = this.vtable_in_use - 1;\n for (; i >= 0 && this.vtable[i] == 0; i--) {}\n var trimmed_size = i + 1;\n\n // Write out the current vtable.\n for (; i >= 0; i--) {\n // Offset relative to the start of the table.\n this.addInt16(this.vtable[i] != 0 ? vtableloc - this.vtable[i] : 0);\n }\n\n var standard_fields = 2; // The fields below:\n this.addInt16(vtableloc - this.object_start);\n var len = (trimmed_size + standard_fields) * flatbuffers.SIZEOF_SHORT;\n this.addInt16(len);\n\n // Search for an existing vtable that matches the current one.\n var existing_vtable = 0;\n var vt1 = this.space;\nouter_loop:\n for (i = 0; i < this.vtables.length; i++) {\n var vt2 = this.bb.capacity() - this.vtables[i];\n if (len == this.bb.readInt16(vt2)) {\n for (var j = flatbuffers.SIZEOF_SHORT; j < len; j += flatbuffers.SIZEOF_SHORT) {\n if (this.bb.readInt16(vt1 + j) != this.bb.readInt16(vt2 + j)) {\n continue outer_loop;\n }\n }\n existing_vtable = this.vtables[i];\n break;\n }\n }\n\n if (existing_vtable) {\n // Found a match:\n // Remove the current vtable.\n this.space = this.bb.capacity() - vtableloc;\n\n // Point table to existing vtable.\n this.bb.writeInt32(this.space, existing_vtable - vtableloc);\n } else {\n // No match:\n // Add the location of the current vtable to the list of vtables.\n this.vtables.push(this.offset());\n\n // Point table to current vtable.\n this.bb.writeInt32(this.bb.capacity() - vtableloc, this.offset() - vtableloc);\n }\n\n this.isNested = false;\n return vtableloc;\n};\n/// @endcond\n\n/**\n * Finalize a buffer, poiting to the given `root_table`.\n *\n * @param {flatbuffers.Offset} root_table\n * @param {string=} opt_file_identifier\n * @param {boolean=} opt_size_prefix\n */\nflatbuffers.Builder.prototype.finish = function(root_table, opt_file_identifier, opt_size_prefix) {\n var size_prefix = opt_size_prefix ? flatbuffers.SIZE_PREFIX_LENGTH : 0;\n if (opt_file_identifier) {\n var file_identifier = opt_file_identifier;\n this.prep(this.minalign, flatbuffers.SIZEOF_INT +\n flatbuffers.FILE_IDENTIFIER_LENGTH + size_prefix);\n if (file_identifier.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error('FlatBuffers: file identifier must be length ' +\n flatbuffers.FILE_IDENTIFIER_LENGTH);\n }\n for (var i = flatbuffers.FILE_IDENTIFIER_LENGTH - 1; i >= 0; i--) {\n this.writeInt8(file_identifier.charCodeAt(i));\n }\n }\n this.prep(this.minalign, flatbuffers.SIZEOF_INT + size_prefix);\n this.addOffset(root_table);\n if (size_prefix) {\n this.addInt32(this.bb.capacity() - this.space);\n }\n this.bb.setPosition(this.space);\n};\n\n/**\n * Finalize a size prefixed buffer, pointing to the given `root_table`.\n *\n * @param {flatbuffers.Offset} root_table\n * @param {string=} opt_file_identifier\n */\nflatbuffers.Builder.prototype.finishSizePrefixed = function (root_table, opt_file_identifier) {\n this.finish(root_table, opt_file_identifier, true);\n};\n\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * This checks a required field has been set in a given table that has\n * just been constructed.\n *\n * @param {flatbuffers.Offset} table\n * @param {number} field\n */\nflatbuffers.Builder.prototype.requiredField = function(table, field) {\n var table_start = this.bb.capacity() - table;\n var vtable_start = table_start - this.bb.readInt32(table_start);\n var ok = this.bb.readInt16(vtable_start + field) != 0;\n\n // If this fails, the caller will show what field needs to be set.\n if (!ok) {\n throw new Error('FlatBuffers: field ' + field + ' must be set');\n }\n};\n\n/**\n * Start a new array/vector of objects. Users usually will not call\n * this directly. The FlatBuffers compiler will create a start/end\n * method for vector types in generated code.\n *\n * @param {number} elem_size The size of each element in the array\n * @param {number} num_elems The number of elements in the array\n * @param {number} alignment The alignment of the array\n */\nflatbuffers.Builder.prototype.startVector = function(elem_size, num_elems, alignment) {\n this.notNested();\n this.vector_num_elems = num_elems;\n this.prep(flatbuffers.SIZEOF_INT, elem_size * num_elems);\n this.prep(alignment, elem_size * num_elems); // Just in case alignment > int.\n};\n\n/**\n * Finish off the creation of an array and all its elements. The array must be\n * created with `startVector`.\n *\n * @returns {flatbuffers.Offset} The offset at which the newly created array\n * starts.\n */\nflatbuffers.Builder.prototype.endVector = function() {\n this.writeInt32(this.vector_num_elems);\n return this.offset();\n};\n/// @endcond\n\n/**\n * Encode the string `s` in the buffer using UTF-8. If a Uint8Array is passed\n * instead of a string, it is assumed to contain valid UTF-8 encoded data.\n *\n * @param {string|Uint8Array} s The string to encode\n * @return {flatbuffers.Offset} The offset in the buffer where the encoded string starts\n */\nflatbuffers.Builder.prototype.createString = function(s) {\n if (s instanceof Uint8Array) {\n var utf8 = s;\n } else {\n var utf8 = [];\n var i = 0;\n\n while (i < s.length) {\n var codePoint;\n\n // Decode UTF-16\n var a = s.charCodeAt(i++);\n if (a < 0xD800 || a >= 0xDC00) {\n codePoint = a;\n } else {\n var b = s.charCodeAt(i++);\n codePoint = (a << 10) + b + (0x10000 - (0xD800 << 10) - 0xDC00);\n }\n\n // Encode UTF-8\n if (codePoint < 0x80) {\n utf8.push(codePoint);\n } else {\n if (codePoint < 0x800) {\n utf8.push(((codePoint >> 6) & 0x1F) | 0xC0);\n } else {\n if (codePoint < 0x10000) {\n utf8.push(((codePoint >> 12) & 0x0F) | 0xE0);\n } else {\n utf8.push(\n ((codePoint >> 18) & 0x07) | 0xF0,\n ((codePoint >> 12) & 0x3F) | 0x80);\n }\n utf8.push(((codePoint >> 6) & 0x3F) | 0x80);\n }\n utf8.push((codePoint & 0x3F) | 0x80);\n }\n }\n }\n\n this.addInt8(0);\n this.startVector(1, utf8.length, 1);\n this.bb.setPosition(this.space -= utf8.length);\n for (var i = 0, offset = this.space, bytes = this.bb.bytes(); i < utf8.length; i++) {\n bytes[offset++] = utf8[i];\n }\n return this.endVector();\n};\n\n/**\n * A helper function to avoid generated code depending on this file directly.\n *\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.Builder.prototype.createLong = function(low, high) {\n return flatbuffers.Long.create(low, high);\n};\n////////////////////////////////////////////////////////////////////////////////\n/// @cond FLATBUFFERS_INTERNAL\n/**\n * Create a new ByteBuffer with a given array of bytes (`Uint8Array`).\n *\n * @constructor\n * @param {Uint8Array} bytes\n */\nflatbuffers.ByteBuffer = function(bytes) {\n /**\n * @type {Uint8Array}\n * @private\n */\n this.bytes_ = bytes;\n\n /**\n * @type {number}\n * @private\n */\n this.position_ = 0;\n};\n\n/**\n * Create and allocate a new ByteBuffer with a given size.\n *\n * @param {number} byte_size\n * @returns {!flatbuffers.ByteBuffer}\n */\nflatbuffers.ByteBuffer.allocate = function(byte_size) {\n return new flatbuffers.ByteBuffer(new Uint8Array(byte_size));\n};\n\nflatbuffers.ByteBuffer.prototype.clear = function() {\n this.position_ = 0;\n};\n\n/**\n * Get the underlying `Uint8Array`.\n *\n * @returns {Uint8Array}\n */\nflatbuffers.ByteBuffer.prototype.bytes = function() {\n return this.bytes_;\n};\n\n/**\n * Get the buffer's position.\n *\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.position = function() {\n return this.position_;\n};\n\n/**\n * Set the buffer's position.\n *\n * @param {number} position\n */\nflatbuffers.ByteBuffer.prototype.setPosition = function(position) {\n this.position_ = position;\n};\n\n/**\n * Get the buffer's capacity.\n *\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.capacity = function() {\n return this.bytes_.length;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt8 = function(offset) {\n return this.readUint8(offset) << 24 >> 24;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint8 = function(offset) {\n return this.bytes_[offset];\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt16 = function(offset) {\n return this.readUint16(offset) << 16 >> 16;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint16 = function(offset) {\n return this.bytes_[offset] | this.bytes_[offset + 1] << 8;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readInt32 = function(offset) {\n return this.bytes_[offset] | this.bytes_[offset + 1] << 8 | this.bytes_[offset + 2] << 16 | this.bytes_[offset + 3] << 24;\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readUint32 = function(offset) {\n return this.readInt32(offset) >>> 0;\n};\n\n/**\n * @param {number} offset\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.readInt64 = function(offset) {\n return new flatbuffers.Long(this.readInt32(offset), this.readInt32(offset + 4));\n};\n\n/**\n * @param {number} offset\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.readUint64 = function(offset) {\n return new flatbuffers.Long(this.readUint32(offset), this.readUint32(offset + 4));\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readFloat32 = function(offset) {\n flatbuffers.int32[0] = this.readInt32(offset);\n return flatbuffers.float32[0];\n};\n\n/**\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.readFloat64 = function(offset) {\n flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1] = this.readInt32(offset);\n flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0] = this.readInt32(offset + 4);\n return flatbuffers.float64[0];\n};\n\n/**\n * @param {number} offset\n * @param {number|boolean} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt8 = function(offset, value) {\n this.bytes_[offset] = /** @type {number} */(value);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint8 = function(offset, value) {\n this.bytes_[offset] = value;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt16 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint16 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt32 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n this.bytes_[offset + 2] = value >> 16;\n this.bytes_[offset + 3] = value >> 24;\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint32 = function(offset, value) {\n this.bytes_[offset] = value;\n this.bytes_[offset + 1] = value >> 8;\n this.bytes_[offset + 2] = value >> 16;\n this.bytes_[offset + 3] = value >> 24;\n};\n\n/**\n * @param {number} offset\n * @param {flatbuffers.Long} value\n */\nflatbuffers.ByteBuffer.prototype.writeInt64 = function(offset, value) {\n this.writeInt32(offset, value.low);\n this.writeInt32(offset + 4, value.high);\n};\n\n/**\n * @param {number} offset\n * @param {flatbuffers.Long} value\n */\nflatbuffers.ByteBuffer.prototype.writeUint64 = function(offset, value) {\n this.writeUint32(offset, value.low);\n this.writeUint32(offset + 4, value.high);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeFloat32 = function(offset, value) {\n flatbuffers.float32[0] = value;\n this.writeInt32(offset, flatbuffers.int32[0]);\n};\n\n/**\n * @param {number} offset\n * @param {number} value\n */\nflatbuffers.ByteBuffer.prototype.writeFloat64 = function(offset, value) {\n flatbuffers.float64[0] = value;\n this.writeInt32(offset, flatbuffers.int32[flatbuffers.isLittleEndian ? 0 : 1]);\n this.writeInt32(offset + 4, flatbuffers.int32[flatbuffers.isLittleEndian ? 1 : 0]);\n};\n\n/**\n * Return the file identifier. Behavior is undefined for FlatBuffers whose\n * schema does not include a file_identifier (likely points at padding or the\n * start of a the root vtable).\n * @returns {string}\n */\nflatbuffers.ByteBuffer.prototype.getBufferIdentifier = function() {\n if (this.bytes_.length < this.position_ + flatbuffers.SIZEOF_INT +\n flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error(\n 'FlatBuffers: ByteBuffer is too short to contain an identifier.');\n }\n var result = \"\";\n for (var i = 0; i < flatbuffers.FILE_IDENTIFIER_LENGTH; i++) {\n result += String.fromCharCode(\n this.readInt8(this.position_ + flatbuffers.SIZEOF_INT + i));\n }\n return result;\n};\n\n/**\n * Look up a field in the vtable, return an offset into the object, or 0 if the\n * field is not present.\n *\n * @param {number} bb_pos\n * @param {number} vtable_offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__offset = function(bb_pos, vtable_offset) {\n var vtable = bb_pos - this.readInt32(bb_pos);\n return vtable_offset < this.readInt16(vtable) ? this.readInt16(vtable + vtable_offset) : 0;\n};\n\n/**\n * Initialize any Table-derived type to point to the union at the given offset.\n *\n * @param {flatbuffers.Table} t\n * @param {number} offset\n * @returns {flatbuffers.Table}\n */\nflatbuffers.ByteBuffer.prototype.__union = function(t, offset) {\n t.bb_pos = offset + this.readInt32(offset);\n t.bb = this;\n return t;\n};\n\n/**\n * Create a JavaScript string from UTF-8 data stored inside the FlatBuffer.\n * This allocates a new string and converts to wide chars upon each access.\n *\n * To avoid the conversion to UTF-16, pass flatbuffers.Encoding.UTF8_BYTES as\n * the \"optionalEncoding\" argument. This is useful for avoiding conversion to\n * and from UTF-16 when the data will just be packaged back up in another\n * FlatBuffer later on.\n *\n * @param {number} offset\n * @param {flatbuffers.Encoding=} opt_encoding Defaults to UTF16_STRING\n * @returns {string|!Uint8Array}\n */\nflatbuffers.ByteBuffer.prototype.__string = function(offset, opt_encoding) {\n offset += this.readInt32(offset);\n\n var length = this.readInt32(offset);\n var result = '';\n var i = 0;\n\n offset += flatbuffers.SIZEOF_INT;\n\n if (opt_encoding === flatbuffers.Encoding.UTF8_BYTES) {\n return this.bytes_.subarray(offset, offset + length);\n }\n\n while (i < length) {\n var codePoint;\n\n // Decode UTF-8\n var a = this.readUint8(offset + i++);\n if (a < 0xC0) {\n codePoint = a;\n } else {\n var b = this.readUint8(offset + i++);\n if (a < 0xE0) {\n codePoint =\n ((a & 0x1F) << 6) |\n (b & 0x3F);\n } else {\n var c = this.readUint8(offset + i++);\n if (a < 0xF0) {\n codePoint =\n ((a & 0x0F) << 12) |\n ((b & 0x3F) << 6) |\n (c & 0x3F);\n } else {\n var d = this.readUint8(offset + i++);\n codePoint =\n ((a & 0x07) << 18) |\n ((b & 0x3F) << 12) |\n ((c & 0x3F) << 6) |\n (d & 0x3F);\n }\n }\n }\n\n // Encode UTF-16\n if (codePoint < 0x10000) {\n result += String.fromCharCode(codePoint);\n } else {\n codePoint -= 0x10000;\n result += String.fromCharCode(\n (codePoint >> 10) + 0xD800,\n (codePoint & ((1 << 10) - 1)) + 0xDC00);\n }\n }\n\n return result;\n};\n\n/**\n * Retrieve the relative offset stored at \"offset\"\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__indirect = function(offset) {\n return offset + this.readInt32(offset);\n};\n\n/**\n * Get the start of data of a vector whose offset is stored at \"offset\" in this object.\n *\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__vector = function(offset) {\n return offset + this.readInt32(offset) + flatbuffers.SIZEOF_INT; // data starts after the length\n};\n\n/**\n * Get the length of a vector whose offset is stored at \"offset\" in this object.\n *\n * @param {number} offset\n * @returns {number}\n */\nflatbuffers.ByteBuffer.prototype.__vector_len = function(offset) {\n return this.readInt32(offset + this.readInt32(offset));\n};\n\n/**\n * @param {string} ident\n * @returns {boolean}\n */\nflatbuffers.ByteBuffer.prototype.__has_identifier = function(ident) {\n if (ident.length != flatbuffers.FILE_IDENTIFIER_LENGTH) {\n throw new Error('FlatBuffers: file identifier must be length ' +\n flatbuffers.FILE_IDENTIFIER_LENGTH);\n }\n for (var i = 0; i < flatbuffers.FILE_IDENTIFIER_LENGTH; i++) {\n if (ident.charCodeAt(i) != this.readInt8(this.position_ + flatbuffers.SIZEOF_INT + i)) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * A helper function to avoid generated code depending on this file directly.\n *\n * @param {number} low\n * @param {number} high\n * @returns {!flatbuffers.Long}\n */\nflatbuffers.ByteBuffer.prototype.createLong = function(low, high) {\n return flatbuffers.Long.create(low, high);\n};\n\n// Exports for Node.js and RequireJS\nexport { flatbuffers };\n\n/// @endcond\n/// @}\n", "// automatically generated by the FlatBuffers compiler, do not modify\n/* eslint-disable */\n\nimport {flatbuffers} from 'flatbuffers';\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum AttributeType {\n UNDEFINED = 0,\n FLOAT = 1,\n INT = 2,\n STRING = 3,\n TENSOR = 4,\n GRAPH = 5,\n FLOATS = 6,\n INTS = 7,\n STRINGS = 8,\n TENSORS = 9,\n GRAPHS = 10,\n SPARSE_TENSOR = 11,\n SPARSE_TENSORS = 12\n }\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum DimensionValueType {UNKNOWN = 0, VALUE = 1, PARAM = 2}\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum TensorDataType {\n UNDEFINED = 0,\n FLOAT = 1,\n UINT8 = 2,\n INT8 = 3,\n UINT16 = 4,\n INT16 = 5,\n INT32 = 6,\n INT64 = 7,\n STRING = 8,\n BOOL = 9,\n FLOAT16 = 10,\n DOUBLE = 11,\n UINT32 = 12,\n UINT64 = 13,\n COMPLEX64 = 14,\n COMPLEX128 = 15,\n BFLOAT16 = 16,\n FLOAT8E4M3FN = 17,\n FLOAT8E4M3FNUZ = 18,\n FLOAT8E5M2 = 19,\n FLOAT8E5M2FNUZ = 20,\n }\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum NodeType {Primitive = 0, Fused = 1}\n}\n\n/**\n * @enum {number}\n */\nexport namespace onnxruntime.experimental.fbs {\n export enum TypeInfoValue {NONE = 0, tensor_type = 1, sequence_type = 2, map_type = 3}\n}\n\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Shape {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Shape\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Shape {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Shape= obj\n * @returns Shape\n */\n static getRootAsShape(bb: flatbuffers.ByteBuffer, obj?: Shape): Shape {\n return (obj || new Shape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Shape= obj\n * @returns Shape\n */\n static getSizePrefixedRootAsShape(bb: flatbuffers.ByteBuffer, obj?: Shape): Shape {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Shape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Dimension= obj\n * @returns onnxruntime.experimental.fbs.Dimension\n */\n dim(index: number, obj?: onnxruntime.experimental.fbs.Dimension): onnxruntime.experimental.fbs.Dimension|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Dimension())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n dimLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startShape(builder: flatbuffers.Builder) {\n builder.startObject(1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimOffset\n */\n static addDim(builder: flatbuffers.Builder, dimOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, dimOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endShape(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createShape(builder: flatbuffers.Builder, dimOffset: flatbuffers.Offset): flatbuffers.Offset {\n Shape.startShape(builder);\n Shape.addDim(builder, dimOffset);\n return Shape.endShape(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Dimension {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Dimension\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Dimension {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Dimension= obj\n * @returns Dimension\n */\n static getRootAsDimension(bb: flatbuffers.ByteBuffer, obj?: Dimension): Dimension {\n return (obj || new Dimension()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Dimension= obj\n * @returns Dimension\n */\n static getSizePrefixedRootAsDimension(bb: flatbuffers.ByteBuffer, obj?: Dimension): Dimension {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Dimension()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.DimensionValue= obj\n * @returns onnxruntime.experimental.fbs.DimensionValue|null\n */\n value(obj?: onnxruntime.experimental.fbs.DimensionValue): onnxruntime.experimental.fbs.DimensionValue|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.DimensionValue())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n denotation(): string|null;\n denotation(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n denotation(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startDimension(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueOffset\n */\n static addValue(builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, valueOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset denotationOffset\n */\n static addDenotation(builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, denotationOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endDimension(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createDimension(\n builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset,\n denotationOffset: flatbuffers.Offset): flatbuffers.Offset {\n Dimension.startDimension(builder);\n Dimension.addValue(builder, valueOffset);\n Dimension.addDenotation(builder, denotationOffset);\n return Dimension.endDimension(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class DimensionValue {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns DimensionValue\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): DimensionValue {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param DimensionValue= obj\n * @returns DimensionValue\n */\n static getRootAsDimensionValue(bb: flatbuffers.ByteBuffer, obj?: DimensionValue): DimensionValue {\n return (obj || new DimensionValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param DimensionValue= obj\n * @returns DimensionValue\n */\n static getSizePrefixedRootAsDimensionValue(bb: flatbuffers.ByteBuffer, obj?: DimensionValue): DimensionValue {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new DimensionValue()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.DimensionValueType\n */\n dimType(): onnxruntime.experimental.fbs.DimensionValueType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt8(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.DimensionValueType.UNKNOWN;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n dimValue(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n dimParam(): string|null;\n dimParam(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n dimParam(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startDimensionValue(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.DimensionValueType dimType\n */\n static addDimType(builder: flatbuffers.Builder, dimType: onnxruntime.experimental.fbs.DimensionValueType) {\n builder.addFieldInt8(0, dimType, onnxruntime.experimental.fbs.DimensionValueType.UNKNOWN);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long dimValue\n */\n static addDimValue(builder: flatbuffers.Builder, dimValue: flatbuffers.Long) {\n builder.addFieldInt64(1, dimValue, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimParamOffset\n */\n static addDimParam(builder: flatbuffers.Builder, dimParamOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimParamOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endDimensionValue(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createDimensionValue(\n builder: flatbuffers.Builder, dimType: onnxruntime.experimental.fbs.DimensionValueType,\n dimValue: flatbuffers.Long, dimParamOffset: flatbuffers.Offset): flatbuffers.Offset {\n DimensionValue.startDimensionValue(builder);\n DimensionValue.addDimType(builder, dimType);\n DimensionValue.addDimValue(builder, dimValue);\n DimensionValue.addDimParam(builder, dimParamOffset);\n return DimensionValue.endDimensionValue(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class TensorTypeAndShape {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns TensorTypeAndShape\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): TensorTypeAndShape {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TensorTypeAndShape= obj\n * @returns TensorTypeAndShape\n */\n static getRootAsTensorTypeAndShape(bb: flatbuffers.ByteBuffer, obj?: TensorTypeAndShape): TensorTypeAndShape {\n return (obj || new TensorTypeAndShape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TensorTypeAndShape= obj\n * @returns TensorTypeAndShape\n */\n static getSizePrefixedRootAsTensorTypeAndShape(bb: flatbuffers.ByteBuffer, obj?: TensorTypeAndShape):\n TensorTypeAndShape {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new TensorTypeAndShape()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n elemType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Shape= obj\n * @returns onnxruntime.experimental.fbs.Shape|null\n */\n shape(obj?: onnxruntime.experimental.fbs.Shape): onnxruntime.experimental.fbs.Shape|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Shape())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTensorTypeAndShape(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType elemType\n */\n static addElemType(builder: flatbuffers.Builder, elemType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(0, elemType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset shapeOffset\n */\n static addShape(builder: flatbuffers.Builder, shapeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, shapeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTensorTypeAndShape(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTensorTypeAndShape(\n builder: flatbuffers.Builder, elemType: onnxruntime.experimental.fbs.TensorDataType,\n shapeOffset: flatbuffers.Offset): flatbuffers.Offset {\n TensorTypeAndShape.startTensorTypeAndShape(builder);\n TensorTypeAndShape.addElemType(builder, elemType);\n TensorTypeAndShape.addShape(builder, shapeOffset);\n return TensorTypeAndShape.endTensorTypeAndShape(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class MapType {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns MapType\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): MapType {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param MapType= obj\n * @returns MapType\n */\n static getRootAsMapType(bb: flatbuffers.ByteBuffer, obj?: MapType): MapType {\n return (obj || new MapType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param MapType= obj\n * @returns MapType\n */\n static getSizePrefixedRootAsMapType(bb: flatbuffers.ByteBuffer, obj?: MapType): MapType {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new MapType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n keyType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n valueType(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startMapType(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType keyType\n */\n static addKeyType(builder: flatbuffers.Builder, keyType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(0, keyType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueTypeOffset\n */\n static addValueType(builder: flatbuffers.Builder, valueTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, valueTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endMapType(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createMapType(\n builder: flatbuffers.Builder, keyType: onnxruntime.experimental.fbs.TensorDataType,\n valueTypeOffset: flatbuffers.Offset): flatbuffers.Offset {\n MapType.startMapType(builder);\n MapType.addKeyType(builder, keyType);\n MapType.addValueType(builder, valueTypeOffset);\n return MapType.endMapType(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SequenceType {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SequenceType\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SequenceType {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SequenceType= obj\n * @returns SequenceType\n */\n static getRootAsSequenceType(bb: flatbuffers.ByteBuffer, obj?: SequenceType): SequenceType {\n return (obj || new SequenceType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SequenceType= obj\n * @returns SequenceType\n */\n static getSizePrefixedRootAsSequenceType(bb: flatbuffers.ByteBuffer, obj?: SequenceType): SequenceType {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SequenceType()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n elemType(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSequenceType(builder: flatbuffers.Builder) {\n builder.startObject(1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset elemTypeOffset\n */\n static addElemType(builder: flatbuffers.Builder, elemTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, elemTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSequenceType(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSequenceType(builder: flatbuffers.Builder, elemTypeOffset: flatbuffers.Offset): flatbuffers.Offset {\n SequenceType.startSequenceType(builder);\n SequenceType.addElemType(builder, elemTypeOffset);\n return SequenceType.endSequenceType(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class EdgeEnd {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns EdgeEnd\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): EdgeEnd {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @returns number\n */\n nodeIndex(): number {\n return this.bb!.readUint32(this.bb_pos);\n }\n\n /**\n * @returns number\n */\n srcArgIndex(): number {\n return this.bb!.readInt32(this.bb_pos + 4);\n }\n\n /**\n * @returns number\n */\n dstArgIndex(): number {\n return this.bb!.readInt32(this.bb_pos + 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number node_index\n * @param number src_arg_index\n * @param number dst_arg_index\n * @returns flatbuffers.Offset\n */\n static createEdgeEnd(\n builder: flatbuffers.Builder, node_index: number, src_arg_index: number,\n dst_arg_index: number): flatbuffers.Offset {\n builder.prep(4, 12);\n builder.writeInt32(dst_arg_index);\n builder.writeInt32(src_arg_index);\n builder.writeInt32(node_index);\n return builder.offset();\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class NodeEdge {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns NodeEdge\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): NodeEdge {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param NodeEdge= obj\n * @returns NodeEdge\n */\n static getRootAsNodeEdge(bb: flatbuffers.ByteBuffer, obj?: NodeEdge): NodeEdge {\n return (obj || new NodeEdge()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param NodeEdge= obj\n * @returns NodeEdge\n */\n static getSizePrefixedRootAsNodeEdge(bb: flatbuffers.ByteBuffer, obj?: NodeEdge): NodeEdge {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new NodeEdge()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns number\n */\n nodeIndex(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.EdgeEnd= obj\n * @returns onnxruntime.experimental.fbs.EdgeEnd\n */\n inputEdges(index: number, obj?: onnxruntime.experimental.fbs.EdgeEnd): onnxruntime.experimental.fbs.EdgeEnd|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.EdgeEnd())\n .__init(this.bb!.__vector(this.bb_pos + offset) + index * 12, this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n inputEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.EdgeEnd= obj\n * @returns onnxruntime.experimental.fbs.EdgeEnd\n */\n outputEdges(index: number, obj?: onnxruntime.experimental.fbs.EdgeEnd): onnxruntime.experimental.fbs.EdgeEnd|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.EdgeEnd())\n .__init(this.bb!.__vector(this.bb_pos + offset) + index * 12, this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n outputEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startNodeEdge(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number nodeIndex\n */\n static addNodeIndex(builder: flatbuffers.Builder, nodeIndex: number) {\n builder.addFieldInt32(0, nodeIndex, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputEdgesOffset\n */\n static addInputEdges(builder: flatbuffers.Builder, inputEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, inputEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(12, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputEdgesOffset\n */\n static addOutputEdges(builder: flatbuffers.Builder, outputEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, outputEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(12, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endNodeEdge(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createNodeEdge(\n builder: flatbuffers.Builder, nodeIndex: number, inputEdgesOffset: flatbuffers.Offset,\n outputEdgesOffset: flatbuffers.Offset): flatbuffers.Offset {\n NodeEdge.startNodeEdge(builder);\n NodeEdge.addNodeIndex(builder, nodeIndex);\n NodeEdge.addInputEdges(builder, inputEdgesOffset);\n NodeEdge.addOutputEdges(builder, outputEdgesOffset);\n return NodeEdge.endNodeEdge(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Node {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Node\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Node {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Node= obj\n * @returns Node\n */\n static getRootAsNode(bb: flatbuffers.ByteBuffer, obj?: Node): Node {\n return (obj || new Node()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Node= obj\n * @returns Node\n */\n static getSizePrefixedRootAsNode(bb: flatbuffers.ByteBuffer, obj?: Node): Node {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Node()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n sinceVersion(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readInt32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns number\n */\n index(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n opType(): string|null;\n opType(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n opType(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.NodeType\n */\n type(): onnxruntime.experimental.fbs.NodeType {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.NodeType.Primitive;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n executionProviderType(): string|null;\n executionProviderType(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n executionProviderType(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n inputs(index: number): string;\n inputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n inputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n inputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n outputs(index: number): string;\n outputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n outputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n outputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Attribute= obj\n * @returns onnxruntime.experimental.fbs.Attribute\n */\n attributes(index: number, obj?: onnxruntime.experimental.fbs.Attribute): onnxruntime.experimental.fbs.Attribute\n |null {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? (obj || new onnxruntime.experimental.fbs.Attribute())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n attributesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @returns number\n */\n inputArgCounts(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.readInt32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n inputArgCountsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Int32Array\n */\n inputArgCountsArray(): Int32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ?\n new Int32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n implicitInputs(index: number): string;\n implicitInputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n implicitInputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n implicitInputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startNode(builder: flatbuffers.Builder) {\n builder.startObject(13);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number sinceVersion\n */\n static addSinceVersion(builder: flatbuffers.Builder, sinceVersion: number) {\n builder.addFieldInt32(3, sinceVersion, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number index\n */\n static addIndex(builder: flatbuffers.Builder, index: number) {\n builder.addFieldInt32(4, index, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset opTypeOffset\n */\n static addOpType(builder: flatbuffers.Builder, opTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, opTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.NodeType type\n */\n static addType(builder: flatbuffers.Builder, type: onnxruntime.experimental.fbs.NodeType) {\n builder.addFieldInt32(6, type, onnxruntime.experimental.fbs.NodeType.Primitive);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset executionProviderTypeOffset\n */\n static addExecutionProviderType(builder: flatbuffers.Builder, executionProviderTypeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, executionProviderTypeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputsOffset\n */\n static addInputs(builder: flatbuffers.Builder, inputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, inputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputsOffset\n */\n static addOutputs(builder: flatbuffers.Builder, outputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(9, outputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOutputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset attributesOffset\n */\n static addAttributes(builder: flatbuffers.Builder, attributesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(10, attributesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createAttributesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startAttributesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputArgCountsOffset\n */\n static addInputArgCounts(builder: flatbuffers.Builder, inputArgCountsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(11, inputArgCountsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputArgCountsVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputArgCountsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset implicitInputsOffset\n */\n static addImplicitInputs(builder: flatbuffers.Builder, implicitInputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(12, implicitInputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createImplicitInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startImplicitInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endNode(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createNode(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n domainOffset: flatbuffers.Offset, sinceVersion: number, index: number, opTypeOffset: flatbuffers.Offset,\n type: onnxruntime.experimental.fbs.NodeType, executionProviderTypeOffset: flatbuffers.Offset,\n inputsOffset: flatbuffers.Offset, outputsOffset: flatbuffers.Offset, attributesOffset: flatbuffers.Offset,\n inputArgCountsOffset: flatbuffers.Offset, implicitInputsOffset: flatbuffers.Offset): flatbuffers.Offset {\n Node.startNode(builder);\n Node.addName(builder, nameOffset);\n Node.addDocString(builder, docStringOffset);\n Node.addDomain(builder, domainOffset);\n Node.addSinceVersion(builder, sinceVersion);\n Node.addIndex(builder, index);\n Node.addOpType(builder, opTypeOffset);\n Node.addType(builder, type);\n Node.addExecutionProviderType(builder, executionProviderTypeOffset);\n Node.addInputs(builder, inputsOffset);\n Node.addOutputs(builder, outputsOffset);\n Node.addAttributes(builder, attributesOffset);\n Node.addInputArgCounts(builder, inputArgCountsOffset);\n Node.addImplicitInputs(builder, implicitInputsOffset);\n return Node.endNode(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class ValueInfo {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns ValueInfo\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): ValueInfo {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param ValueInfo= obj\n * @returns ValueInfo\n */\n static getRootAsValueInfo(bb: flatbuffers.ByteBuffer, obj?: ValueInfo): ValueInfo {\n return (obj || new ValueInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param ValueInfo= obj\n * @returns ValueInfo\n */\n static getSizePrefixedRootAsValueInfo(bb: flatbuffers.ByteBuffer, obj?: ValueInfo): ValueInfo {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new ValueInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.TypeInfo= obj\n * @returns onnxruntime.experimental.fbs.TypeInfo|null\n */\n type(obj?: onnxruntime.experimental.fbs.TypeInfo): onnxruntime.experimental.fbs.TypeInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.TypeInfo())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startValueInfo(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset typeOffset\n */\n static addType(builder: flatbuffers.Builder, typeOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, typeOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endValueInfo(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createValueInfo(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n typeOffset: flatbuffers.Offset): flatbuffers.Offset {\n ValueInfo.startValueInfo(builder);\n ValueInfo.addName(builder, nameOffset);\n ValueInfo.addDocString(builder, docStringOffset);\n ValueInfo.addType(builder, typeOffset);\n return ValueInfo.endValueInfo(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class TypeInfo {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns TypeInfo\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): TypeInfo {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TypeInfo= obj\n * @returns TypeInfo\n */\n static getRootAsTypeInfo(bb: flatbuffers.ByteBuffer, obj?: TypeInfo): TypeInfo {\n return (obj || new TypeInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param TypeInfo= obj\n * @returns TypeInfo\n */\n static getSizePrefixedRootAsTypeInfo(bb: flatbuffers.ByteBuffer, obj?: TypeInfo): TypeInfo {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new TypeInfo()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n denotation(): string|null;\n denotation(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n denotation(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TypeInfoValue\n */\n valueType(): onnxruntime.experimental.fbs.TypeInfoValue {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? /** */ (this.bb!.readUint8(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TypeInfoValue.NONE;\n }\n\n /**\n * @param flatbuffers.Table obj\n * @returns ?flatbuffers.Table\n */\n value(obj: T): T|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__union(obj, this.bb_pos + offset) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTypeInfo(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset denotationOffset\n */\n static addDenotation(builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, denotationOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TypeInfoValue valueType\n */\n static addValueType(builder: flatbuffers.Builder, valueType: onnxruntime.experimental.fbs.TypeInfoValue) {\n builder.addFieldInt8(1, valueType, onnxruntime.experimental.fbs.TypeInfoValue.NONE);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valueOffset\n */\n static addValue(builder: flatbuffers.Builder, valueOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, valueOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTypeInfo(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTypeInfo(\n builder: flatbuffers.Builder, denotationOffset: flatbuffers.Offset,\n valueType: onnxruntime.experimental.fbs.TypeInfoValue, valueOffset: flatbuffers.Offset): flatbuffers.Offset {\n TypeInfo.startTypeInfo(builder);\n TypeInfo.addDenotation(builder, denotationOffset);\n TypeInfo.addValueType(builder, valueType);\n TypeInfo.addValue(builder, valueOffset);\n return TypeInfo.endTypeInfo(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class OperatorSetId {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns OperatorSetId\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): OperatorSetId {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param OperatorSetId= obj\n * @returns OperatorSetId\n */\n static getRootAsOperatorSetId(bb: flatbuffers.ByteBuffer, obj?: OperatorSetId): OperatorSetId {\n return (obj || new OperatorSetId()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param OperatorSetId= obj\n * @returns OperatorSetId\n */\n static getSizePrefixedRootAsOperatorSetId(bb: flatbuffers.ByteBuffer, obj?: OperatorSetId): OperatorSetId {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new OperatorSetId()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n version(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startOperatorSetId(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long version\n */\n static addVersion(builder: flatbuffers.Builder, version: flatbuffers.Long) {\n builder.addFieldInt64(1, version, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endOperatorSetId(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createOperatorSetId(\n builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset, version: flatbuffers.Long): flatbuffers.Offset {\n OperatorSetId.startOperatorSetId(builder);\n OperatorSetId.addDomain(builder, domainOffset);\n OperatorSetId.addVersion(builder, version);\n return OperatorSetId.endOperatorSetId(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Tensor {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Tensor\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Tensor {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Tensor= obj\n * @returns Tensor\n */\n static getRootAsTensor(bb: flatbuffers.ByteBuffer, obj?: Tensor): Tensor {\n return (obj || new Tensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Tensor= obj\n * @returns Tensor\n */\n static getSizePrefixedRootAsTensor(bb: flatbuffers.ByteBuffer, obj?: Tensor): Tensor {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Tensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n dims(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n dimsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.TensorDataType\n */\n dataType(): onnxruntime.experimental.fbs.TensorDataType {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.TensorDataType.UNDEFINED;\n }\n\n /**\n * @param number index\n * @returns number\n */\n rawData(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readUint8(this.bb!.__vector(this.bb_pos + offset) + index) : 0;\n }\n\n /**\n * @returns number\n */\n rawDataLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Uint8Array\n */\n rawDataArray(): Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ?\n new Uint8Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n stringData(index: number): string;\n stringData(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n stringData(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n stringDataLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startTensor(builder: flatbuffers.Builder) {\n builder.startObject(6);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimsOffset\n */\n static addDims(builder: flatbuffers.Builder, dimsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.TensorDataType dataType\n */\n static addDataType(builder: flatbuffers.Builder, dataType: onnxruntime.experimental.fbs.TensorDataType) {\n builder.addFieldInt32(3, dataType, onnxruntime.experimental.fbs.TensorDataType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset rawDataOffset\n */\n static addRawData(builder: flatbuffers.Builder, rawDataOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, rawDataOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createRawDataVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(1, data.length, 1);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt8(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startRawDataVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(1, numElems, 1);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset stringDataOffset\n */\n static addStringData(builder: flatbuffers.Builder, stringDataOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, stringDataOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createStringDataVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startStringDataVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endTensor(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createTensor(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n dimsOffset: flatbuffers.Offset, dataType: onnxruntime.experimental.fbs.TensorDataType,\n rawDataOffset: flatbuffers.Offset, stringDataOffset: flatbuffers.Offset): flatbuffers.Offset {\n Tensor.startTensor(builder);\n Tensor.addName(builder, nameOffset);\n Tensor.addDocString(builder, docStringOffset);\n Tensor.addDims(builder, dimsOffset);\n Tensor.addDataType(builder, dataType);\n Tensor.addRawData(builder, rawDataOffset);\n Tensor.addStringData(builder, stringDataOffset);\n return Tensor.endTensor(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SparseTensor {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SparseTensor\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SparseTensor {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SparseTensor= obj\n * @returns SparseTensor\n */\n static getRootAsSparseTensor(bb: flatbuffers.ByteBuffer, obj?: SparseTensor): SparseTensor {\n return (obj || new SparseTensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SparseTensor= obj\n * @returns SparseTensor\n */\n static getSizePrefixedRootAsSparseTensor(bb: flatbuffers.ByteBuffer, obj?: SparseTensor): SparseTensor {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SparseTensor()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n values(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n indices(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n dims(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n dimsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSparseTensor(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset valuesOffset\n */\n static addValues(builder: flatbuffers.Builder, valuesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, valuesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset indicesOffset\n */\n static addIndices(builder: flatbuffers.Builder, indicesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, indicesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset dimsOffset\n */\n static addDims(builder: flatbuffers.Builder, dimsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, dimsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createDimsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startDimsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSparseTensor(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSparseTensor(\n builder: flatbuffers.Builder, valuesOffset: flatbuffers.Offset, indicesOffset: flatbuffers.Offset,\n dimsOffset: flatbuffers.Offset): flatbuffers.Offset {\n SparseTensor.startSparseTensor(builder);\n SparseTensor.addValues(builder, valuesOffset);\n SparseTensor.addIndices(builder, indicesOffset);\n SparseTensor.addDims(builder, dimsOffset);\n return SparseTensor.endSparseTensor(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Attribute {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Attribute\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Attribute {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Attribute= obj\n * @returns Attribute\n */\n static getRootAsAttribute(bb: flatbuffers.ByteBuffer, obj?: Attribute): Attribute {\n return (obj || new Attribute()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Attribute= obj\n * @returns Attribute\n */\n static getSizePrefixedRootAsAttribute(bb: flatbuffers.ByteBuffer, obj?: Attribute): Attribute {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Attribute()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n name(): string|null;\n name(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n name(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns onnxruntime.experimental.fbs.AttributeType\n */\n type(): onnxruntime.experimental.fbs.AttributeType {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? /** */ (this.bb!.readInt32(this.bb_pos + offset)) :\n onnxruntime.experimental.fbs.AttributeType.UNDEFINED;\n }\n\n /**\n * @returns number\n */\n f(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readFloat32(this.bb_pos + offset) : 0.0;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n i(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n s(): string|null;\n s(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n s(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor|null\n */\n t(obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph|null\n */\n g(obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @returns number\n */\n floats(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.readFloat32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n floatsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Float32Array\n */\n floatsArray(): Float32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ?\n new Float32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n ints(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.readInt64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n intsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 22);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n strings(index: number): string;\n strings(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n strings(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n stringsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 24);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor\n */\n tensors(index: number, obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n tensorsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 26);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph\n */\n graphs(index: number, obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n graphsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 28);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startAttribute(builder: flatbuffers.Builder) {\n builder.startObject(13);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nameOffset\n */\n static addName(builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param onnxruntime.experimental.fbs.AttributeType type\n */\n static addType(builder: flatbuffers.Builder, type: onnxruntime.experimental.fbs.AttributeType) {\n builder.addFieldInt32(2, type, onnxruntime.experimental.fbs.AttributeType.UNDEFINED);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number f\n */\n static addF(builder: flatbuffers.Builder, f: number) {\n builder.addFieldFloat32(3, f, 0.0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long i\n */\n static addI(builder: flatbuffers.Builder, i: flatbuffers.Long) {\n builder.addFieldInt64(4, i, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sOffset\n */\n static addS(builder: flatbuffers.Builder, sOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, sOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset tOffset\n */\n static addT(builder: flatbuffers.Builder, tOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, tOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset gOffset\n */\n static addG(builder: flatbuffers.Builder, gOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, gOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset floatsOffset\n */\n static addFloats(builder: flatbuffers.Builder, floatsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, floatsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createFloatsVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addFloat32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startFloatsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset intsOffset\n */\n static addInts(builder: flatbuffers.Builder, intsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(9, intsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createIntsVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startIntsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset stringsOffset\n */\n static addStrings(builder: flatbuffers.Builder, stringsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(10, stringsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createStringsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startStringsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset tensorsOffset\n */\n static addTensors(builder: flatbuffers.Builder, tensorsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(11, tensorsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createTensorsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startTensorsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphsOffset\n */\n static addGraphs(builder: flatbuffers.Builder, graphsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(12, graphsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createGraphsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startGraphsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endAttribute(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createAttribute(\n builder: flatbuffers.Builder, nameOffset: flatbuffers.Offset, docStringOffset: flatbuffers.Offset,\n type: onnxruntime.experimental.fbs.AttributeType, f: number, i: flatbuffers.Long, sOffset: flatbuffers.Offset,\n tOffset: flatbuffers.Offset, gOffset: flatbuffers.Offset, floatsOffset: flatbuffers.Offset,\n intsOffset: flatbuffers.Offset, stringsOffset: flatbuffers.Offset, tensorsOffset: flatbuffers.Offset,\n graphsOffset: flatbuffers.Offset): flatbuffers.Offset {\n Attribute.startAttribute(builder);\n Attribute.addName(builder, nameOffset);\n Attribute.addDocString(builder, docStringOffset);\n Attribute.addType(builder, type);\n Attribute.addF(builder, f);\n Attribute.addI(builder, i);\n Attribute.addS(builder, sOffset);\n Attribute.addT(builder, tOffset);\n Attribute.addG(builder, gOffset);\n Attribute.addFloats(builder, floatsOffset);\n Attribute.addInts(builder, intsOffset);\n Attribute.addStrings(builder, stringsOffset);\n Attribute.addTensors(builder, tensorsOffset);\n Attribute.addGraphs(builder, graphsOffset);\n return Attribute.endAttribute(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Graph {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Graph\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Graph {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Graph= obj\n * @returns Graph\n */\n static getRootAsGraph(bb: flatbuffers.ByteBuffer, obj?: Graph): Graph {\n return (obj || new Graph()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Graph= obj\n * @returns Graph\n */\n static getSizePrefixedRootAsGraph(bb: flatbuffers.ByteBuffer, obj?: Graph): Graph {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Graph()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Tensor= obj\n * @returns onnxruntime.experimental.fbs.Tensor\n */\n initializers(index: number, obj?: onnxruntime.experimental.fbs.Tensor): onnxruntime.experimental.fbs.Tensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.Tensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n initializersLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.ValueInfo= obj\n * @returns onnxruntime.experimental.fbs.ValueInfo\n */\n nodeArgs(index: number, obj?: onnxruntime.experimental.fbs.ValueInfo): onnxruntime.experimental.fbs.ValueInfo|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.ValueInfo())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodeArgsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.Node= obj\n * @returns onnxruntime.experimental.fbs.Node\n */\n nodes(index: number, obj?: onnxruntime.experimental.fbs.Node): onnxruntime.experimental.fbs.Node|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.Node())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns number\n */\n maxNodeIndex(): number {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.readUint32(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.NodeEdge= obj\n * @returns onnxruntime.experimental.fbs.NodeEdge\n */\n nodeEdges(index: number, obj?: onnxruntime.experimental.fbs.NodeEdge): onnxruntime.experimental.fbs.NodeEdge|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? (obj || new onnxruntime.experimental.fbs.NodeEdge())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n nodeEdgesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n inputs(index: number): string;\n inputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n inputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n inputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array\n */\n outputs(index: number): string;\n outputs(index: number, optionalEncoding: flatbuffers.Encoding): string|Uint8Array;\n outputs(index: number, optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__string(this.bb!.__vector(this.bb_pos + offset) + index * 4, optionalEncoding) : null;\n }\n\n /**\n * @returns number\n */\n outputsLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.SparseTensor= obj\n * @returns onnxruntime.experimental.fbs.SparseTensor\n */\n sparseInitializers(index: number, obj?: onnxruntime.experimental.fbs.SparseTensor):\n onnxruntime.experimental.fbs.SparseTensor|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.SparseTensor())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n sparseInitializersLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startGraph(builder: flatbuffers.Builder) {\n builder.startObject(8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset initializersOffset\n */\n static addInitializers(builder: flatbuffers.Builder, initializersOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, initializersOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInitializersVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInitializersVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeArgsOffset\n */\n static addNodeArgs(builder: flatbuffers.Builder, nodeArgsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, nodeArgsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeArgsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeArgsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodesOffset\n */\n static addNodes(builder: flatbuffers.Builder, nodesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, nodesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number maxNodeIndex\n */\n static addMaxNodeIndex(builder: flatbuffers.Builder, maxNodeIndex: number) {\n builder.addFieldInt32(3, maxNodeIndex, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeEdgesOffset\n */\n static addNodeEdges(builder: flatbuffers.Builder, nodeEdgesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, nodeEdgesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeEdgesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeEdgesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset inputsOffset\n */\n static addInputs(builder: flatbuffers.Builder, inputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(5, inputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createInputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startInputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset outputsOffset\n */\n static addOutputs(builder: flatbuffers.Builder, outputsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, outputsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOutputsVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOutputsVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sparseInitializersOffset\n */\n static addSparseInitializers(builder: flatbuffers.Builder, sparseInitializersOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, sparseInitializersOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createSparseInitializersVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]):\n flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startSparseInitializersVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endGraph(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createGraph(\n builder: flatbuffers.Builder, initializersOffset: flatbuffers.Offset, nodeArgsOffset: flatbuffers.Offset,\n nodesOffset: flatbuffers.Offset, maxNodeIndex: number, nodeEdgesOffset: flatbuffers.Offset,\n inputsOffset: flatbuffers.Offset, outputsOffset: flatbuffers.Offset,\n sparseInitializersOffset: flatbuffers.Offset): flatbuffers.Offset {\n Graph.startGraph(builder);\n Graph.addInitializers(builder, initializersOffset);\n Graph.addNodeArgs(builder, nodeArgsOffset);\n Graph.addNodes(builder, nodesOffset);\n Graph.addMaxNodeIndex(builder, maxNodeIndex);\n Graph.addNodeEdges(builder, nodeEdgesOffset);\n Graph.addInputs(builder, inputsOffset);\n Graph.addOutputs(builder, outputsOffset);\n Graph.addSparseInitializers(builder, sparseInitializersOffset);\n return Graph.endGraph(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class Model {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns Model\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): Model {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Model= obj\n * @returns Model\n */\n static getRootAsModel(bb: flatbuffers.ByteBuffer, obj?: Model): Model {\n return (obj || new Model()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param Model= obj\n * @returns Model\n */\n static getSizePrefixedRootAsModel(bb: flatbuffers.ByteBuffer, obj?: Model): Model {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new Model()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @returns flatbuffers.Long\n */\n irVersion(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.OperatorSetId= obj\n * @returns onnxruntime.experimental.fbs.OperatorSetId\n */\n opsetImport(index: number, obj?: onnxruntime.experimental.fbs.OperatorSetId):\n onnxruntime.experimental.fbs.OperatorSetId|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.OperatorSetId())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n opsetImportLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n producerName(): string|null;\n producerName(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n producerName(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n producerVersion(): string|null;\n producerVersion(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n producerVersion(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 10);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n domain(): string|null;\n domain(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n domain(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 12);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @returns flatbuffers.Long\n */\n modelVersion(): flatbuffers.Long {\n let offset = this.bb!.__offset(this.bb_pos, 14);\n return offset ? this.bb!.readInt64(this.bb_pos + offset) : this.bb!.createLong(0, 0);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n docString(): string|null;\n docString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n docString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 16);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Graph= obj\n * @returns onnxruntime.experimental.fbs.Graph|null\n */\n graph(obj?: onnxruntime.experimental.fbs.Graph): onnxruntime.experimental.fbs.Graph|null {\n let offset = this.bb!.__offset(this.bb_pos, 18);\n return offset ? (obj || new onnxruntime.experimental.fbs.Graph())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n graphDocString(): string|null;\n graphDocString(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n graphDocString(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 20);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startModel(builder: flatbuffers.Builder) {\n builder.startObject(9);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long irVersion\n */\n static addIrVersion(builder: flatbuffers.Builder, irVersion: flatbuffers.Long) {\n builder.addFieldInt64(0, irVersion, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset opsetImportOffset\n */\n static addOpsetImport(builder: flatbuffers.Builder, opsetImportOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, opsetImportOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createOpsetImportVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startOpsetImportVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset producerNameOffset\n */\n static addProducerName(builder: flatbuffers.Builder, producerNameOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, producerNameOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset producerVersionOffset\n */\n static addProducerVersion(builder: flatbuffers.Builder, producerVersionOffset: flatbuffers.Offset) {\n builder.addFieldOffset(3, producerVersionOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset domainOffset\n */\n static addDomain(builder: flatbuffers.Builder, domainOffset: flatbuffers.Offset) {\n builder.addFieldOffset(4, domainOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Long modelVersion\n */\n static addModelVersion(builder: flatbuffers.Builder, modelVersion: flatbuffers.Long) {\n builder.addFieldInt64(5, modelVersion, builder.createLong(0, 0));\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset docStringOffset\n */\n static addDocString(builder: flatbuffers.Builder, docStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(6, docStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphOffset\n */\n static addGraph(builder: flatbuffers.Builder, graphOffset: flatbuffers.Offset) {\n builder.addFieldOffset(7, graphOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphDocStringOffset\n */\n static addGraphDocString(builder: flatbuffers.Builder, graphDocStringOffset: flatbuffers.Offset) {\n builder.addFieldOffset(8, graphDocStringOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endModel(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createModel(\n builder: flatbuffers.Builder, irVersion: flatbuffers.Long, opsetImportOffset: flatbuffers.Offset,\n producerNameOffset: flatbuffers.Offset, producerVersionOffset: flatbuffers.Offset,\n domainOffset: flatbuffers.Offset, modelVersion: flatbuffers.Long, docStringOffset: flatbuffers.Offset,\n graphOffset: flatbuffers.Offset, graphDocStringOffset: flatbuffers.Offset): flatbuffers.Offset {\n Model.startModel(builder);\n Model.addIrVersion(builder, irVersion);\n Model.addOpsetImport(builder, opsetImportOffset);\n Model.addProducerName(builder, producerNameOffset);\n Model.addProducerVersion(builder, producerVersionOffset);\n Model.addDomain(builder, domainOffset);\n Model.addModelVersion(builder, modelVersion);\n Model.addDocString(builder, docStringOffset);\n Model.addGraph(builder, graphOffset);\n Model.addGraphDocString(builder, graphDocStringOffset);\n return Model.endModel(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class KernelCreateInfos {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns KernelCreateInfos\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): KernelCreateInfos {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param KernelCreateInfos= obj\n * @returns KernelCreateInfos\n */\n static getRootAsKernelCreateInfos(bb: flatbuffers.ByteBuffer, obj?: KernelCreateInfos): KernelCreateInfos {\n return (obj || new KernelCreateInfos()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param KernelCreateInfos= obj\n * @returns KernelCreateInfos\n */\n static getSizePrefixedRootAsKernelCreateInfos(bb: flatbuffers.ByteBuffer, obj?: KernelCreateInfos):\n KernelCreateInfos {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new KernelCreateInfos()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param number index\n * @returns number\n */\n nodeIndices(index: number): number|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.readUint32(this.bb!.__vector(this.bb_pos + offset) + index * 4) : 0;\n }\n\n /**\n * @returns number\n */\n nodeIndicesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @returns Uint32Array\n */\n nodeIndicesArray(): Uint32Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ?\n new Uint32Array(\n this.bb!.bytes().buffer, this.bb!.bytes().byteOffset + this.bb!.__vector(this.bb_pos + offset),\n this.bb!.__vector_len(this.bb_pos + offset)) :\n null;\n }\n\n /**\n * @param number index\n * @returns flatbuffers.Long\n */\n kernelDefHashes(index: number): flatbuffers.Long|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.readUint64(this.bb!.__vector(this.bb_pos + offset) + index * 8) :\n this.bb!.createLong(0, 0);\n }\n\n /**\n * @returns number\n */\n kernelDefHashesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startKernelCreateInfos(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset nodeIndicesOffset\n */\n static addNodeIndices(builder: flatbuffers.Builder, nodeIndicesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, nodeIndicesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createNodeIndicesVector(builder: flatbuffers.Builder, data: number[]|Uint8Array): flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt32(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startNodeIndicesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset kernelDefHashesOffset\n */\n static addKernelDefHashes(builder: flatbuffers.Builder, kernelDefHashesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, kernelDefHashesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createKernelDefHashesVector(builder: flatbuffers.Builder, data: flatbuffers.Long[]): flatbuffers.Offset {\n builder.startVector(8, data.length, 8);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addInt64(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startKernelDefHashesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(8, numElems, 8);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endKernelCreateInfos(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createKernelCreateInfos(\n builder: flatbuffers.Builder, nodeIndicesOffset: flatbuffers.Offset,\n kernelDefHashesOffset: flatbuffers.Offset): flatbuffers.Offset {\n KernelCreateInfos.startKernelCreateInfos(builder);\n KernelCreateInfos.addNodeIndices(builder, nodeIndicesOffset);\n KernelCreateInfos.addKernelDefHashes(builder, kernelDefHashesOffset);\n return KernelCreateInfos.endKernelCreateInfos(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SubGraphSessionState {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SubGraphSessionState\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SubGraphSessionState {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SubGraphSessionState= obj\n * @returns SubGraphSessionState\n */\n static getRootAsSubGraphSessionState(bb: flatbuffers.ByteBuffer, obj?: SubGraphSessionState): SubGraphSessionState {\n return (obj || new SubGraphSessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SubGraphSessionState= obj\n * @returns SubGraphSessionState\n */\n static getSizePrefixedRootAsSubGraphSessionState(bb: flatbuffers.ByteBuffer, obj?: SubGraphSessionState):\n SubGraphSessionState {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SubGraphSessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n graphId(): string|null;\n graphId(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n graphId(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.SessionState= obj\n * @returns onnxruntime.experimental.fbs.SessionState|null\n */\n sessionState(obj?: onnxruntime.experimental.fbs.SessionState): onnxruntime.experimental.fbs.SessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.SessionState())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSubGraphSessionState(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset graphIdOffset\n */\n static addGraphId(builder: flatbuffers.Builder, graphIdOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, graphIdOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sessionStateOffset\n */\n static addSessionState(builder: flatbuffers.Builder, sessionStateOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, sessionStateOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSubGraphSessionState(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n builder.requiredField(offset, 4); // graph_id\n return offset;\n }\n\n static createSubGraphSessionState(\n builder: flatbuffers.Builder, graphIdOffset: flatbuffers.Offset,\n sessionStateOffset: flatbuffers.Offset): flatbuffers.Offset {\n SubGraphSessionState.startSubGraphSessionState(builder);\n SubGraphSessionState.addGraphId(builder, graphIdOffset);\n SubGraphSessionState.addSessionState(builder, sessionStateOffset);\n return SubGraphSessionState.endSubGraphSessionState(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class SessionState {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns SessionState\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): SessionState {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SessionState= obj\n * @returns SessionState\n */\n static getRootAsSessionState(bb: flatbuffers.ByteBuffer, obj?: SessionState): SessionState {\n return (obj || new SessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param SessionState= obj\n * @returns SessionState\n */\n static getSizePrefixedRootAsSessionState(bb: flatbuffers.ByteBuffer, obj?: SessionState): SessionState {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new SessionState()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param onnxruntime.experimental.fbs.KernelCreateInfos= obj\n * @returns onnxruntime.experimental.fbs.KernelCreateInfos|null\n */\n kernels(obj?: onnxruntime.experimental.fbs.KernelCreateInfos): onnxruntime.experimental.fbs.KernelCreateInfos|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? (obj || new onnxruntime.experimental.fbs.KernelCreateInfos())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param number index\n * @param onnxruntime.experimental.fbs.SubGraphSessionState= obj\n * @returns onnxruntime.experimental.fbs.SubGraphSessionState\n */\n subGraphSessionStates(index: number, obj?: onnxruntime.experimental.fbs.SubGraphSessionState):\n onnxruntime.experimental.fbs.SubGraphSessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.SubGraphSessionState())\n .__init(this.bb!.__indirect(this.bb!.__vector(this.bb_pos + offset) + index * 4), this.bb!) :\n null;\n }\n\n /**\n * @returns number\n */\n subGraphSessionStatesLength(): number {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? this.bb!.__vector_len(this.bb_pos + offset) : 0;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startSessionState(builder: flatbuffers.Builder) {\n builder.startObject(2);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset kernelsOffset\n */\n static addKernels(builder: flatbuffers.Builder, kernelsOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, kernelsOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset subGraphSessionStatesOffset\n */\n static addSubGraphSessionStates(builder: flatbuffers.Builder, subGraphSessionStatesOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, subGraphSessionStatesOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param Array. data\n * @returns flatbuffers.Offset\n */\n static createSubGraphSessionStatesVector(builder: flatbuffers.Builder, data: flatbuffers.Offset[]):\n flatbuffers.Offset {\n builder.startVector(4, data.length, 4);\n for (let i = data.length - 1; i >= 0; i--) {\n builder.addOffset(data[i]);\n }\n return builder.endVector();\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param number numElems\n */\n static startSubGraphSessionStatesVector(builder: flatbuffers.Builder, numElems: number) {\n builder.startVector(4, numElems, 4);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endSessionState(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n static createSessionState(\n builder: flatbuffers.Builder, kernelsOffset: flatbuffers.Offset,\n subGraphSessionStatesOffset: flatbuffers.Offset): flatbuffers.Offset {\n SessionState.startSessionState(builder);\n SessionState.addKernels(builder, kernelsOffset);\n SessionState.addSubGraphSessionStates(builder, subGraphSessionStatesOffset);\n return SessionState.endSessionState(builder);\n }\n }\n}\n/**\n * @constructor\n */\nexport namespace onnxruntime.experimental.fbs {\n export class InferenceSession {\n bb: flatbuffers.ByteBuffer|null = null;\n\n bb_pos = 0;\n /**\n * @param number i\n * @param flatbuffers.ByteBuffer bb\n * @returns InferenceSession\n */\n __init(i: number, bb: flatbuffers.ByteBuffer): InferenceSession {\n this.bb_pos = i;\n this.bb = bb;\n return this;\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param InferenceSession= obj\n * @returns InferenceSession\n */\n static getRootAsInferenceSession(bb: flatbuffers.ByteBuffer, obj?: InferenceSession): InferenceSession {\n return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @param InferenceSession= obj\n * @returns InferenceSession\n */\n static getSizePrefixedRootAsInferenceSession(bb: flatbuffers.ByteBuffer, obj?: InferenceSession): InferenceSession {\n bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH);\n return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb);\n }\n\n /**\n * @param flatbuffers.ByteBuffer bb\n * @returns boolean\n */\n static bufferHasIdentifier(bb: flatbuffers.ByteBuffer): boolean {\n return bb.__has_identifier('ORTM');\n }\n\n /**\n * @param flatbuffers.Encoding= optionalEncoding\n * @returns string|Uint8Array|null\n */\n ortVersion(): string|null;\n ortVersion(optionalEncoding: flatbuffers.Encoding): string|Uint8Array|null;\n ortVersion(optionalEncoding?: any): string|Uint8Array|null {\n let offset = this.bb!.__offset(this.bb_pos, 4);\n return offset ? this.bb!.__string(this.bb_pos + offset, optionalEncoding) : null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.Model= obj\n * @returns onnxruntime.experimental.fbs.Model|null\n */\n model(obj?: onnxruntime.experimental.fbs.Model): onnxruntime.experimental.fbs.Model|null {\n let offset = this.bb!.__offset(this.bb_pos, 6);\n return offset ? (obj || new onnxruntime.experimental.fbs.Model())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param onnxruntime.experimental.fbs.SessionState= obj\n * @returns onnxruntime.experimental.fbs.SessionState|null\n */\n sessionState(obj?: onnxruntime.experimental.fbs.SessionState): onnxruntime.experimental.fbs.SessionState|null {\n let offset = this.bb!.__offset(this.bb_pos, 8);\n return offset ? (obj || new onnxruntime.experimental.fbs.SessionState())\n .__init(this.bb!.__indirect(this.bb_pos + offset), this.bb!) :\n null;\n }\n\n /**\n * @param flatbuffers.Builder builder\n */\n static startInferenceSession(builder: flatbuffers.Builder) {\n builder.startObject(3);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset ortVersionOffset\n */\n static addOrtVersion(builder: flatbuffers.Builder, ortVersionOffset: flatbuffers.Offset) {\n builder.addFieldOffset(0, ortVersionOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset modelOffset\n */\n static addModel(builder: flatbuffers.Builder, modelOffset: flatbuffers.Offset) {\n builder.addFieldOffset(1, modelOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset sessionStateOffset\n */\n static addSessionState(builder: flatbuffers.Builder, sessionStateOffset: flatbuffers.Offset) {\n builder.addFieldOffset(2, sessionStateOffset, 0);\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @returns flatbuffers.Offset\n */\n static endInferenceSession(builder: flatbuffers.Builder): flatbuffers.Offset {\n let offset = builder.endObject();\n return offset;\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset offset\n */\n static finishInferenceSessionBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {\n builder.finish(offset, 'ORTM');\n }\n\n /**\n * @param flatbuffers.Builder builder\n * @param flatbuffers.Offset offset\n */\n static finishSizePrefixedInferenceSessionBuffer(builder: flatbuffers.Builder, offset: flatbuffers.Offset) {\n builder.finish(offset, 'ORTM', true);\n }\n\n static createInferenceSession(\n builder: flatbuffers.Builder, ortVersionOffset: flatbuffers.Offset, modelOffset: flatbuffers.Offset,\n sessionStateOffset: flatbuffers.Offset): flatbuffers.Offset {\n InferenceSession.startInferenceSession(builder);\n InferenceSession.addOrtVersion(builder, ortVersionOffset);\n InferenceSession.addModel(builder, modelOffset);\n InferenceSession.addSessionState(builder, sessionStateOffset);\n return InferenceSession.endInferenceSession(builder);\n }\n }\n}\n", "\"use strict\";\r\nmodule.exports = asPromise;\r\n\r\n/**\r\n * Callback as used by {@link util.asPromise}.\r\n * @typedef asPromiseCallback\r\n * @type {function}\r\n * @param {Error|null} error Error, if any\r\n * @param {...*} params Additional arguments\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Returns a promise from a node-style callback function.\r\n * @memberof util\r\n * @param {asPromiseCallback} fn Function to call\r\n * @param {*} ctx Function context\r\n * @param {...*} params Function arguments\r\n * @returns {Promise<*>} Promisified function\r\n */\r\nfunction asPromise(fn, ctx/*, varargs */) {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0,\r\n index = 2,\r\n pending = true;\r\n while (index < arguments.length)\r\n params[offset++] = arguments[index++];\r\n return new Promise(function executor(resolve, reject) {\r\n params[offset] = function callback(err/*, varargs */) {\r\n if (pending) {\r\n pending = false;\r\n if (err)\r\n reject(err);\r\n else {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0;\r\n while (offset < params.length)\r\n params[offset++] = arguments[offset];\r\n resolve.apply(null, params);\r\n }\r\n }\r\n };\r\n try {\r\n fn.apply(ctx || null, params);\r\n } catch (err) {\r\n if (pending) {\r\n pending = false;\r\n reject(err);\r\n }\r\n }\r\n });\r\n}\r\n", "\"use strict\";\r\n\r\n/**\r\n * A minimal base64 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar base64 = exports;\r\n\r\n/**\r\n * Calculates the byte length of a base64 encoded string.\r\n * @param {string} string Base64 encoded string\r\n * @returns {number} Byte length\r\n */\r\nbase64.length = function length(string) {\r\n var p = string.length;\r\n if (!p)\r\n return 0;\r\n var n = 0;\r\n while (--p % 4 > 1 && string.charAt(p) === \"=\")\r\n ++n;\r\n return Math.ceil(string.length * 3) / 4 - n;\r\n};\r\n\r\n// Base64 encoding table\r\nvar b64 = new Array(64);\r\n\r\n// Base64 decoding table\r\nvar s64 = new Array(123);\r\n\r\n// 65..90, 97..122, 48..57, 43, 47\r\nfor (var i = 0; i < 64;)\r\n s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++;\r\n\r\n/**\r\n * Encodes a buffer to a base64 encoded string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} Base64 encoded string\r\n */\r\nbase64.encode = function encode(buffer, start, end) {\r\n var parts = null,\r\n chunk = [];\r\n var i = 0, // output index\r\n j = 0, // goto index\r\n t; // temporary\r\n while (start < end) {\r\n var b = buffer[start++];\r\n switch (j) {\r\n case 0:\r\n chunk[i++] = b64[b >> 2];\r\n t = (b & 3) << 4;\r\n j = 1;\r\n break;\r\n case 1:\r\n chunk[i++] = b64[t | b >> 4];\r\n t = (b & 15) << 2;\r\n j = 2;\r\n break;\r\n case 2:\r\n chunk[i++] = b64[t | b >> 6];\r\n chunk[i++] = b64[b & 63];\r\n j = 0;\r\n break;\r\n }\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (j) {\r\n chunk[i++] = b64[t];\r\n chunk[i++] = 61;\r\n if (j === 1)\r\n chunk[i++] = 61;\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\nvar invalidEncoding = \"invalid encoding\";\r\n\r\n/**\r\n * Decodes a base64 encoded string to a buffer.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Number of bytes written\r\n * @throws {Error} If encoding is invalid\r\n */\r\nbase64.decode = function decode(string, buffer, offset) {\r\n var start = offset;\r\n var j = 0, // goto index\r\n t; // temporary\r\n for (var i = 0; i < string.length;) {\r\n var c = string.charCodeAt(i++);\r\n if (c === 61 && j > 1)\r\n break;\r\n if ((c = s64[c]) === undefined)\r\n throw Error(invalidEncoding);\r\n switch (j) {\r\n case 0:\r\n t = c;\r\n j = 1;\r\n break;\r\n case 1:\r\n buffer[offset++] = t << 2 | (c & 48) >> 4;\r\n t = c;\r\n j = 2;\r\n break;\r\n case 2:\r\n buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2;\r\n t = c;\r\n j = 3;\r\n break;\r\n case 3:\r\n buffer[offset++] = (t & 3) << 6 | c;\r\n j = 0;\r\n break;\r\n }\r\n }\r\n if (j === 1)\r\n throw Error(invalidEncoding);\r\n return offset - start;\r\n};\r\n\r\n/**\r\n * Tests if the specified string appears to be base64 encoded.\r\n * @param {string} string String to test\r\n * @returns {boolean} `true` if probably base64 encoded, otherwise false\r\n */\r\nbase64.test = function test(string) {\r\n return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string);\r\n};\r\n", "\"use strict\";\r\nmodule.exports = EventEmitter;\r\n\r\n/**\r\n * Constructs a new event emitter instance.\r\n * @classdesc A minimal event emitter.\r\n * @memberof util\r\n * @constructor\r\n */\r\nfunction EventEmitter() {\r\n\r\n /**\r\n * Registered listeners.\r\n * @type {Object.}\r\n * @private\r\n */\r\n this._listeners = {};\r\n}\r\n\r\n/**\r\n * Registers an event listener.\r\n * @param {string} evt Event name\r\n * @param {function} fn Listener\r\n * @param {*} [ctx] Listener context\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.on = function on(evt, fn, ctx) {\r\n (this._listeners[evt] || (this._listeners[evt] = [])).push({\r\n fn : fn,\r\n ctx : ctx || this\r\n });\r\n return this;\r\n};\r\n\r\n/**\r\n * Removes an event listener or any matching listeners if arguments are omitted.\r\n * @param {string} [evt] Event name. Removes all listeners if omitted.\r\n * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted.\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.off = function off(evt, fn) {\r\n if (evt === undefined)\r\n this._listeners = {};\r\n else {\r\n if (fn === undefined)\r\n this._listeners[evt] = [];\r\n else {\r\n var listeners = this._listeners[evt];\r\n for (var i = 0; i < listeners.length;)\r\n if (listeners[i].fn === fn)\r\n listeners.splice(i, 1);\r\n else\r\n ++i;\r\n }\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Emits an event by calling its listeners with the specified arguments.\r\n * @param {string} evt Event name\r\n * @param {...*} args Arguments\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.emit = function emit(evt) {\r\n var listeners = this._listeners[evt];\r\n if (listeners) {\r\n var args = [],\r\n i = 1;\r\n for (; i < arguments.length;)\r\n args.push(arguments[i++]);\r\n for (i = 0; i < listeners.length;)\r\n listeners[i].fn.apply(listeners[i++].ctx, args);\r\n }\r\n return this;\r\n};\r\n", "\"use strict\";\r\n\r\nmodule.exports = factory(factory);\r\n\r\n/**\r\n * Reads / writes floats / doubles from / to buffers.\r\n * @name util.float\r\n * @namespace\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using little endian byte order.\r\n * @name util.float.writeFloatLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using big endian byte order.\r\n * @name util.float.writeFloatBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using little endian byte order.\r\n * @name util.float.readFloatLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using big endian byte order.\r\n * @name util.float.readFloatBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using little endian byte order.\r\n * @name util.float.writeDoubleLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using big endian byte order.\r\n * @name util.float.writeDoubleBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using little endian byte order.\r\n * @name util.float.readDoubleLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using big endian byte order.\r\n * @name util.float.readDoubleBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n// Factory function for the purpose of node-based testing in modified global environments\r\nfunction factory(exports) {\r\n\r\n // float: typed array\r\n if (typeof Float32Array !== \"undefined\") (function() {\r\n\r\n var f32 = new Float32Array([ -0 ]),\r\n f8b = new Uint8Array(f32.buffer),\r\n le = f8b[3] === 128;\r\n\r\n function writeFloat_f32_cpy(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n }\r\n\r\n function writeFloat_f32_rev(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[3];\r\n buf[pos + 1] = f8b[2];\r\n buf[pos + 2] = f8b[1];\r\n buf[pos + 3] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy;\r\n\r\n function readFloat_f32_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n function readFloat_f32_rev(buf, pos) {\r\n f8b[3] = buf[pos ];\r\n f8b[2] = buf[pos + 1];\r\n f8b[1] = buf[pos + 2];\r\n f8b[0] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy;\r\n\r\n // float: ieee754\r\n })(); else (function() {\r\n\r\n function writeFloat_ieee754(writeUint, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0)\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos);\r\n else if (isNaN(val))\r\n writeUint(2143289344, buf, pos);\r\n else if (val > 3.4028234663852886e+38) // +-Infinity\r\n writeUint((sign << 31 | 2139095040) >>> 0, buf, pos);\r\n else if (val < 1.1754943508222875e-38) // denormal\r\n writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos);\r\n else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2),\r\n mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607;\r\n writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos);\r\n }\r\n }\r\n\r\n exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE);\r\n exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE);\r\n\r\n function readFloat_ieee754(readUint, buf, pos) {\r\n var uint = readUint(buf, pos),\r\n sign = (uint >> 31) * 2 + 1,\r\n exponent = uint >>> 23 & 255,\r\n mantissa = uint & 8388607;\r\n return exponent === 255\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 1.401298464324817e-45 * mantissa\r\n : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608);\r\n }\r\n\r\n exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE);\r\n exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE);\r\n\r\n })();\r\n\r\n // double: typed array\r\n if (typeof Float64Array !== \"undefined\") (function() {\r\n\r\n var f64 = new Float64Array([-0]),\r\n f8b = new Uint8Array(f64.buffer),\r\n le = f8b[7] === 128;\r\n\r\n function writeDouble_f64_cpy(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n buf[pos + 4] = f8b[4];\r\n buf[pos + 5] = f8b[5];\r\n buf[pos + 6] = f8b[6];\r\n buf[pos + 7] = f8b[7];\r\n }\r\n\r\n function writeDouble_f64_rev(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[7];\r\n buf[pos + 1] = f8b[6];\r\n buf[pos + 2] = f8b[5];\r\n buf[pos + 3] = f8b[4];\r\n buf[pos + 4] = f8b[3];\r\n buf[pos + 5] = f8b[2];\r\n buf[pos + 6] = f8b[1];\r\n buf[pos + 7] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy;\r\n\r\n function readDouble_f64_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n f8b[4] = buf[pos + 4];\r\n f8b[5] = buf[pos + 5];\r\n f8b[6] = buf[pos + 6];\r\n f8b[7] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n function readDouble_f64_rev(buf, pos) {\r\n f8b[7] = buf[pos ];\r\n f8b[6] = buf[pos + 1];\r\n f8b[5] = buf[pos + 2];\r\n f8b[4] = buf[pos + 3];\r\n f8b[3] = buf[pos + 4];\r\n f8b[2] = buf[pos + 5];\r\n f8b[1] = buf[pos + 6];\r\n f8b[0] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy;\r\n\r\n // double: ieee754\r\n })(); else (function() {\r\n\r\n function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1);\r\n } else if (isNaN(val)) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(2146959360, buf, pos + off1);\r\n } else if (val > 1.7976931348623157e+308) { // +-Infinity\r\n writeUint(0, buf, pos + off0);\r\n writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1);\r\n } else {\r\n var mantissa;\r\n if (val < 2.2250738585072014e-308) { // denormal\r\n mantissa = val / 5e-324;\r\n writeUint(mantissa >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1);\r\n } else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2);\r\n if (exponent === 1024)\r\n exponent = 1023;\r\n mantissa = val * Math.pow(2, -exponent);\r\n writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1);\r\n }\r\n }\r\n }\r\n\r\n exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4);\r\n exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0);\r\n\r\n function readDouble_ieee754(readUint, off0, off1, buf, pos) {\r\n var lo = readUint(buf, pos + off0),\r\n hi = readUint(buf, pos + off1);\r\n var sign = (hi >> 31) * 2 + 1,\r\n exponent = hi >>> 20 & 2047,\r\n mantissa = 4294967296 * (hi & 1048575) + lo;\r\n return exponent === 2047\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 5e-324 * mantissa\r\n : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496);\r\n }\r\n\r\n exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4);\r\n exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0);\r\n\r\n })();\r\n\r\n return exports;\r\n}\r\n\r\n// uint helpers\r\n\r\nfunction writeUintLE(val, buf, pos) {\r\n buf[pos ] = val & 255;\r\n buf[pos + 1] = val >>> 8 & 255;\r\n buf[pos + 2] = val >>> 16 & 255;\r\n buf[pos + 3] = val >>> 24;\r\n}\r\n\r\nfunction writeUintBE(val, buf, pos) {\r\n buf[pos ] = val >>> 24;\r\n buf[pos + 1] = val >>> 16 & 255;\r\n buf[pos + 2] = val >>> 8 & 255;\r\n buf[pos + 3] = val & 255;\r\n}\r\n\r\nfunction readUintLE(buf, pos) {\r\n return (buf[pos ]\r\n | buf[pos + 1] << 8\r\n | buf[pos + 2] << 16\r\n | buf[pos + 3] << 24) >>> 0;\r\n}\r\n\r\nfunction readUintBE(buf, pos) {\r\n return (buf[pos ] << 24\r\n | buf[pos + 1] << 16\r\n | buf[pos + 2] << 8\r\n | buf[pos + 3]) >>> 0;\r\n}\r\n", "\"use strict\";\r\nmodule.exports = inquire;\r\n\r\n/**\r\n * Requires a module only if available.\r\n * @memberof util\r\n * @param {string} moduleName Module to require\r\n * @returns {?Object} Required module if available and not empty, otherwise `null`\r\n */\r\nfunction inquire(moduleName) {\r\n try {\r\n var mod = eval(\"quire\".replace(/^/,\"re\"))(moduleName); // eslint-disable-line no-eval\r\n if (mod && (mod.length || Object.keys(mod).length))\r\n return mod;\r\n } catch (e) {} // eslint-disable-line no-empty\r\n return null;\r\n}\r\n", "\"use strict\";\r\n\r\n/**\r\n * A minimal UTF8 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar utf8 = exports;\r\n\r\n/**\r\n * Calculates the UTF8 byte length of a string.\r\n * @param {string} string String\r\n * @returns {number} Byte length\r\n */\r\nutf8.length = function utf8_length(string) {\r\n var len = 0,\r\n c = 0;\r\n for (var i = 0; i < string.length; ++i) {\r\n c = string.charCodeAt(i);\r\n if (c < 128)\r\n len += 1;\r\n else if (c < 2048)\r\n len += 2;\r\n else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) {\r\n ++i;\r\n len += 4;\r\n } else\r\n len += 3;\r\n }\r\n return len;\r\n};\r\n\r\n/**\r\n * Reads UTF8 bytes as a string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} String read\r\n */\r\nutf8.read = function utf8_read(buffer, start, end) {\r\n var len = end - start;\r\n if (len < 1)\r\n return \"\";\r\n var parts = null,\r\n chunk = [],\r\n i = 0, // char offset\r\n t; // temporary\r\n while (start < end) {\r\n t = buffer[start++];\r\n if (t < 128)\r\n chunk[i++] = t;\r\n else if (t > 191 && t < 224)\r\n chunk[i++] = (t & 31) << 6 | buffer[start++] & 63;\r\n else if (t > 239 && t < 365) {\r\n t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000;\r\n chunk[i++] = 0xD800 + (t >> 10);\r\n chunk[i++] = 0xDC00 + (t & 1023);\r\n } else\r\n chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63;\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\n/**\r\n * Writes a string as UTF8 bytes.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Bytes written\r\n */\r\nutf8.write = function utf8_write(string, buffer, offset) {\r\n var start = offset,\r\n c1, // character 1\r\n c2; // character 2\r\n for (var i = 0; i < string.length; ++i) {\r\n c1 = string.charCodeAt(i);\r\n if (c1 < 128) {\r\n buffer[offset++] = c1;\r\n } else if (c1 < 2048) {\r\n buffer[offset++] = c1 >> 6 | 192;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) {\r\n c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF);\r\n ++i;\r\n buffer[offset++] = c1 >> 18 | 240;\r\n buffer[offset++] = c1 >> 12 & 63 | 128;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else {\r\n buffer[offset++] = c1 >> 12 | 224;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n }\r\n }\r\n return offset - start;\r\n};\r\n", "\"use strict\";\r\nmodule.exports = pool;\r\n\r\n/**\r\n * An allocator as used by {@link util.pool}.\r\n * @typedef PoolAllocator\r\n * @type {function}\r\n * @param {number} size Buffer size\r\n * @returns {Uint8Array} Buffer\r\n */\r\n\r\n/**\r\n * A slicer as used by {@link util.pool}.\r\n * @typedef PoolSlicer\r\n * @type {function}\r\n * @param {number} start Start offset\r\n * @param {number} end End offset\r\n * @returns {Uint8Array} Buffer slice\r\n * @this {Uint8Array}\r\n */\r\n\r\n/**\r\n * A general purpose buffer pool.\r\n * @memberof util\r\n * @function\r\n * @param {PoolAllocator} alloc Allocator\r\n * @param {PoolSlicer} slice Slicer\r\n * @param {number} [size=8192] Slab size\r\n * @returns {PoolAllocator} Pooled allocator\r\n */\r\nfunction pool(alloc, slice, size) {\r\n var SIZE = size || 8192;\r\n var MAX = SIZE >>> 1;\r\n var slab = null;\r\n var offset = SIZE;\r\n return function pool_alloc(size) {\r\n if (size < 1 || size > MAX)\r\n return alloc(size);\r\n if (offset + size > SIZE) {\r\n slab = alloc(SIZE);\r\n offset = 0;\r\n }\r\n var buf = slice.call(slab, offset, offset += size);\r\n if (offset & 7) // align to 32 bit\r\n offset = (offset | 7) + 1;\r\n return buf;\r\n };\r\n}\r\n", "\"use strict\";\nmodule.exports = LongBits;\n\nvar util = require(\"../util/minimal\");\n\n/**\n * Constructs new long bits.\n * @classdesc Helper class for working with the low and high bits of a 64 bit value.\n * @memberof util\n * @constructor\n * @param {number} lo Low 32 bits, unsigned\n * @param {number} hi High 32 bits, unsigned\n */\nfunction LongBits(lo, hi) {\n\n // note that the casts below are theoretically unnecessary as of today, but older statically\n // generated converter code might still call the ctor with signed 32bits. kept for compat.\n\n /**\n * Low bits.\n * @type {number}\n */\n this.lo = lo >>> 0;\n\n /**\n * High bits.\n * @type {number}\n */\n this.hi = hi >>> 0;\n}\n\n/**\n * Zero bits.\n * @memberof util.LongBits\n * @type {util.LongBits}\n */\nvar zero = LongBits.zero = new LongBits(0, 0);\n\nzero.toNumber = function() { return 0; };\nzero.zzEncode = zero.zzDecode = function() { return this; };\nzero.length = function() { return 1; };\n\n/**\n * Zero hash.\n * @memberof util.LongBits\n * @type {string}\n */\nvar zeroHash = LongBits.zeroHash = \"\\0\\0\\0\\0\\0\\0\\0\\0\";\n\n/**\n * Constructs new long bits from the specified number.\n * @param {number} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.fromNumber = function fromNumber(value) {\n if (value === 0)\n return zero;\n var sign = value < 0;\n if (sign)\n value = -value;\n var lo = value >>> 0,\n hi = (value - lo) / 4294967296 >>> 0;\n if (sign) {\n hi = ~hi >>> 0;\n lo = ~lo >>> 0;\n if (++lo > 4294967295) {\n lo = 0;\n if (++hi > 4294967295)\n hi = 0;\n }\n }\n return new LongBits(lo, hi);\n};\n\n/**\n * Constructs new long bits from a number, long or string.\n * @param {Long|number|string} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.from = function from(value) {\n if (typeof value === \"number\")\n return LongBits.fromNumber(value);\n if (util.isString(value)) {\n /* istanbul ignore else */\n if (util.Long)\n value = util.Long.fromString(value);\n else\n return LongBits.fromNumber(parseInt(value, 10));\n }\n return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;\n};\n\n/**\n * Converts this long bits to a possibly unsafe JavaScript number.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {number} Possibly unsafe number\n */\nLongBits.prototype.toNumber = function toNumber(unsigned) {\n if (!unsigned && this.hi >>> 31) {\n var lo = ~this.lo + 1 >>> 0,\n hi = ~this.hi >>> 0;\n if (!lo)\n hi = hi + 1 >>> 0;\n return -(lo + hi * 4294967296);\n }\n return this.lo + this.hi * 4294967296;\n};\n\n/**\n * Converts this long bits to a long.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long} Long\n */\nLongBits.prototype.toLong = function toLong(unsigned) {\n return util.Long\n ? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))\n /* istanbul ignore next */\n : { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };\n};\n\nvar charCodeAt = String.prototype.charCodeAt;\n\n/**\n * Constructs new long bits from the specified 8 characters long hash.\n * @param {string} hash Hash\n * @returns {util.LongBits} Bits\n */\nLongBits.fromHash = function fromHash(hash) {\n if (hash === zeroHash)\n return zero;\n return new LongBits(\n ( charCodeAt.call(hash, 0)\n | charCodeAt.call(hash, 1) << 8\n | charCodeAt.call(hash, 2) << 16\n | charCodeAt.call(hash, 3) << 24) >>> 0\n ,\n ( charCodeAt.call(hash, 4)\n | charCodeAt.call(hash, 5) << 8\n | charCodeAt.call(hash, 6) << 16\n | charCodeAt.call(hash, 7) << 24) >>> 0\n );\n};\n\n/**\n * Converts this long bits to a 8 characters long hash.\n * @returns {string} Hash\n */\nLongBits.prototype.toHash = function toHash() {\n return String.fromCharCode(\n this.lo & 255,\n this.lo >>> 8 & 255,\n this.lo >>> 16 & 255,\n this.lo >>> 24 ,\n this.hi & 255,\n this.hi >>> 8 & 255,\n this.hi >>> 16 & 255,\n this.hi >>> 24\n );\n};\n\n/**\n * Zig-zag encodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzEncode = function zzEncode() {\n var mask = this.hi >> 31;\n this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;\n this.lo = ( this.lo << 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Zig-zag decodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzDecode = function zzDecode() {\n var mask = -(this.lo & 1);\n this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;\n this.hi = ( this.hi >>> 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Calculates the length of this longbits when encoded as a varint.\n * @returns {number} Length\n */\nLongBits.prototype.length = function length() {\n var part0 = this.lo,\n part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,\n part2 = this.hi >>> 24;\n return part2 === 0\n ? part1 === 0\n ? part0 < 16384\n ? part0 < 128 ? 1 : 2\n : part0 < 2097152 ? 3 : 4\n : part1 < 16384\n ? part1 < 128 ? 5 : 6\n : part1 < 2097152 ? 7 : 8\n : part2 < 128 ? 9 : 10;\n};\n", "\"use strict\";\nvar util = exports;\n\n// used to return a Promise where callback is omitted\nutil.asPromise = require(\"@protobufjs/aspromise\");\n\n// converts to / from base64 encoded strings\nutil.base64 = require(\"@protobufjs/base64\");\n\n// base class of rpc.Service\nutil.EventEmitter = require(\"@protobufjs/eventemitter\");\n\n// float handling accross browsers\nutil.float = require(\"@protobufjs/float\");\n\n// requires modules optionally and hides the call from bundlers\nutil.inquire = require(\"@protobufjs/inquire\");\n\n// converts to / from utf8 encoded strings\nutil.utf8 = require(\"@protobufjs/utf8\");\n\n// provides a node-like buffer pool in the browser\nutil.pool = require(\"@protobufjs/pool\");\n\n// utility to work with the low and high bits of a 64 bit value\nutil.LongBits = require(\"./longbits\");\n\n/**\n * Whether running within node or not.\n * @memberof util\n * @type {boolean}\n */\nutil.isNode = Boolean(typeof global !== \"undefined\"\n && global\n && global.process\n && global.process.versions\n && global.process.versions.node);\n\n/**\n * Global object reference.\n * @memberof util\n * @type {Object}\n */\nutil.global = util.isNode && global\n || typeof window !== \"undefined\" && window\n || typeof self !== \"undefined\" && self\n || this; // eslint-disable-line no-invalid-this\n\n/**\n * An immuable empty array.\n * @memberof util\n * @type {Array.<*>}\n * @const\n */\nutil.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes\n\n/**\n * An immutable empty object.\n * @type {Object}\n * @const\n */\nutil.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes\n\n/**\n * Tests if the specified value is an integer.\n * @function\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is an integer\n */\nutil.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {\n return typeof value === \"number\" && isFinite(value) && Math.floor(value) === value;\n};\n\n/**\n * Tests if the specified value is a string.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a string\n */\nutil.isString = function isString(value) {\n return typeof value === \"string\" || value instanceof String;\n};\n\n/**\n * Tests if the specified value is a non-null object.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a non-null object\n */\nutil.isObject = function isObject(value) {\n return value && typeof value === \"object\";\n};\n\n/**\n * Checks if a property on a message is considered to be present.\n * This is an alias of {@link util.isSet}.\n * @function\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isset =\n\n/**\n * Checks if a property on a message is considered to be present.\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isSet = function isSet(obj, prop) {\n var value = obj[prop];\n if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins\n return typeof value !== \"object\" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;\n return false;\n};\n\n/**\n * Any compatible Buffer instance.\n * This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.\n * @interface Buffer\n * @extends Uint8Array\n */\n\n/**\n * Node's Buffer class if available.\n * @type {Constructor}\n */\nutil.Buffer = (function() {\n try {\n var Buffer = util.inquire(\"buffer\").Buffer;\n // refuse to use non-node buffers if not explicitly assigned (perf reasons):\n return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;\n } catch (e) {\n /* istanbul ignore next */\n return null;\n }\n})();\n\n// Internal alias of or polyfull for Buffer.from.\nutil._Buffer_from = null;\n\n// Internal alias of or polyfill for Buffer.allocUnsafe.\nutil._Buffer_allocUnsafe = null;\n\n/**\n * Creates a new buffer of whatever type supported by the environment.\n * @param {number|number[]} [sizeOrArray=0] Buffer size or number array\n * @returns {Uint8Array|Buffer} Buffer\n */\nutil.newBuffer = function newBuffer(sizeOrArray) {\n /* istanbul ignore next */\n return typeof sizeOrArray === \"number\"\n ? util.Buffer\n ? util._Buffer_allocUnsafe(sizeOrArray)\n : new util.Array(sizeOrArray)\n : util.Buffer\n ? util._Buffer_from(sizeOrArray)\n : typeof Uint8Array === \"undefined\"\n ? sizeOrArray\n : new Uint8Array(sizeOrArray);\n};\n\n/**\n * Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.\n * @type {Constructor}\n */\nutil.Array = typeof Uint8Array !== \"undefined\" ? Uint8Array /* istanbul ignore next */ : Array;\n\n/**\n * Any compatible Long instance.\n * This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.\n * @interface Long\n * @property {number} low Low bits\n * @property {number} high High bits\n * @property {boolean} unsigned Whether unsigned or not\n */\n\n/**\n * Long.js's Long class if available.\n * @type {Constructor}\n */\nutil.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long\n || /* istanbul ignore next */ util.global.Long\n || util.inquire(\"long\");\n\n/**\n * Regular expression used to verify 2 bit (`bool`) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key2Re = /^true|false|0|1$/;\n\n/**\n * Regular expression used to verify 32 bit (`int32` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key32Re = /^-?(?:0|[1-9][0-9]*)$/;\n\n/**\n * Regular expression used to verify 64 bit (`int64` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key64Re = /^(?:[\\\\x00-\\\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;\n\n/**\n * Converts a number or long to an 8 characters long hash string.\n * @param {Long|number} value Value to convert\n * @returns {string} Hash\n */\nutil.longToHash = function longToHash(value) {\n return value\n ? util.LongBits.from(value).toHash()\n : util.LongBits.zeroHash;\n};\n\n/**\n * Converts an 8 characters long hash string to a long or number.\n * @param {string} hash Hash\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long|number} Original value\n */\nutil.longFromHash = function longFromHash(hash, unsigned) {\n var bits = util.LongBits.fromHash(hash);\n if (util.Long)\n return util.Long.fromBits(bits.lo, bits.hi, unsigned);\n return bits.toNumber(Boolean(unsigned));\n};\n\n/**\n * Merges the properties of the source object into the destination object.\n * @memberof util\n * @param {Object.} dst Destination object\n * @param {Object.} src Source object\n * @param {boolean} [ifNotSet=false] Merges only if the key is not already set\n * @returns {Object.} Destination object\n */\nfunction merge(dst, src, ifNotSet) { // used by converters\n for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)\n if (dst[keys[i]] === undefined || !ifNotSet)\n dst[keys[i]] = src[keys[i]];\n return dst;\n}\n\nutil.merge = merge;\n\n/**\n * Converts the first character of a string to lower case.\n * @param {string} str String to convert\n * @returns {string} Converted string\n */\nutil.lcFirst = function lcFirst(str) {\n return str.charAt(0).toLowerCase() + str.substring(1);\n};\n\n/**\n * Creates a custom error constructor.\n * @memberof util\n * @param {string} name Error name\n * @returns {Constructor} Custom error constructor\n */\nfunction newError(name) {\n\n function CustomError(message, properties) {\n\n if (!(this instanceof CustomError))\n return new CustomError(message, properties);\n\n // Error.call(this, message);\n // ^ just returns a new error instance because the ctor can be called as a function\n\n Object.defineProperty(this, \"message\", { get: function() { return message; } });\n\n /* istanbul ignore next */\n if (Error.captureStackTrace) // node\n Error.captureStackTrace(this, CustomError);\n else\n Object.defineProperty(this, \"stack\", { value: new Error().stack || \"\" });\n\n if (properties)\n merge(this, properties);\n }\n\n CustomError.prototype = Object.create(Error.prototype, {\n constructor: {\n value: CustomError,\n writable: true,\n enumerable: false,\n configurable: true,\n },\n name: {\n get: function get() { return name; },\n set: undefined,\n enumerable: false,\n // configurable: false would accurately preserve the behavior of\n // the original, but I'm guessing that was not intentional.\n // For an actual error subclass, this property would\n // be configurable.\n configurable: true,\n },\n toString: {\n value: function value() { return this.name + \": \" + this.message; },\n writable: true,\n enumerable: false,\n configurable: true,\n },\n });\n\n return CustomError;\n}\n\nutil.newError = newError;\n\n/**\n * Constructs a new protocol error.\n * @classdesc Error subclass indicating a protocol specifc error.\n * @memberof util\n * @extends Error\n * @template T extends Message\n * @constructor\n * @param {string} message Error message\n * @param {Object.} [properties] Additional properties\n * @example\n * try {\n * MyMessage.decode(someBuffer); // throws if required fields are missing\n * } catch (e) {\n * if (e instanceof ProtocolError && e.instance)\n * console.log(\"decoded so far: \" + JSON.stringify(e.instance));\n * }\n */\nutil.ProtocolError = newError(\"ProtocolError\");\n\n/**\n * So far decoded message instance.\n * @name util.ProtocolError#instance\n * @type {Message}\n */\n\n/**\n * A OneOf getter as returned by {@link util.oneOfGetter}.\n * @typedef OneOfGetter\n * @type {function}\n * @returns {string|undefined} Set field name, if any\n */\n\n/**\n * Builds a getter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfGetter} Unbound getter\n */\nutil.oneOfGetter = function getOneOf(fieldNames) {\n var fieldMap = {};\n for (var i = 0; i < fieldNames.length; ++i)\n fieldMap[fieldNames[i]] = 1;\n\n /**\n * @returns {string|undefined} Set field name, if any\n * @this Object\n * @ignore\n */\n return function() { // eslint-disable-line consistent-return\n for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)\n if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)\n return keys[i];\n };\n};\n\n/**\n * A OneOf setter as returned by {@link util.oneOfSetter}.\n * @typedef OneOfSetter\n * @type {function}\n * @param {string|undefined} value Field name\n * @returns {undefined}\n */\n\n/**\n * Builds a setter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfSetter} Unbound setter\n */\nutil.oneOfSetter = function setOneOf(fieldNames) {\n\n /**\n * @param {string} name Field name\n * @returns {undefined}\n * @this Object\n * @ignore\n */\n return function(name) {\n for (var i = 0; i < fieldNames.length; ++i)\n if (fieldNames[i] !== name)\n delete this[fieldNames[i]];\n };\n};\n\n/**\n * Default conversion options used for {@link Message#toJSON} implementations.\n *\n * These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:\n *\n * - Longs become strings\n * - Enums become string keys\n * - Bytes become base64 encoded strings\n * - (Sub-)Messages become plain objects\n * - Maps become plain objects with all string keys\n * - Repeated fields become arrays\n * - NaN and Infinity for float and double fields become strings\n *\n * @type {IConversionOptions}\n * @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json\n */\nutil.toJSONOptions = {\n longs: String,\n enums: String,\n bytes: String,\n json: true\n};\n\n// Sets up buffer utility according to the environment (called in index-minimal)\nutil._configure = function() {\n var Buffer = util.Buffer;\n /* istanbul ignore if */\n if (!Buffer) {\n util._Buffer_from = util._Buffer_allocUnsafe = null;\n return;\n }\n // because node 4.x buffers are incompatible & immutable\n // see: https://github.com/dcodeIO/protobuf.js/pull/665\n util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||\n /* istanbul ignore next */\n function Buffer_from(value, encoding) {\n return new Buffer(value, encoding);\n };\n util._Buffer_allocUnsafe = Buffer.allocUnsafe ||\n /* istanbul ignore next */\n function Buffer_allocUnsafe(size) {\n return new Buffer(size);\n };\n};\n", "\"use strict\";\nmodule.exports = Writer;\n\nvar util = require(\"./util/minimal\");\n\nvar BufferWriter; // cyclic\n\nvar LongBits = util.LongBits,\n base64 = util.base64,\n utf8 = util.utf8;\n\n/**\n * Constructs a new writer operation instance.\n * @classdesc Scheduled writer operation.\n * @constructor\n * @param {function(*, Uint8Array, number)} fn Function to call\n * @param {number} len Value byte length\n * @param {*} val Value to write\n * @ignore\n */\nfunction Op(fn, len, val) {\n\n /**\n * Function to call.\n * @type {function(Uint8Array, number, *)}\n */\n this.fn = fn;\n\n /**\n * Value byte length.\n * @type {number}\n */\n this.len = len;\n\n /**\n * Next operation.\n * @type {Writer.Op|undefined}\n */\n this.next = undefined;\n\n /**\n * Value to write.\n * @type {*}\n */\n this.val = val; // type varies\n}\n\n/* istanbul ignore next */\nfunction noop() {} // eslint-disable-line no-empty-function\n\n/**\n * Constructs a new writer state instance.\n * @classdesc Copied writer state.\n * @memberof Writer\n * @constructor\n * @param {Writer} writer Writer to copy state from\n * @ignore\n */\nfunction State(writer) {\n\n /**\n * Current head.\n * @type {Writer.Op}\n */\n this.head = writer.head;\n\n /**\n * Current tail.\n * @type {Writer.Op}\n */\n this.tail = writer.tail;\n\n /**\n * Current buffer length.\n * @type {number}\n */\n this.len = writer.len;\n\n /**\n * Next state.\n * @type {State|null}\n */\n this.next = writer.states;\n}\n\n/**\n * Constructs a new writer instance.\n * @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n */\nfunction Writer() {\n\n /**\n * Current length.\n * @type {number}\n */\n this.len = 0;\n\n /**\n * Operations head.\n * @type {Object}\n */\n this.head = new Op(noop, 0, 0);\n\n /**\n * Operations tail\n * @type {Object}\n */\n this.tail = this.head;\n\n /**\n * Linked forked states.\n * @type {Object|null}\n */\n this.states = null;\n\n // When a value is written, the writer calculates its byte length and puts it into a linked\n // list of operations to perform when finish() is called. This both allows us to allocate\n // buffers of the exact required size and reduces the amount of work we have to do compared\n // to first calculating over objects and then encoding over objects. In our case, the encoding\n // part is just a linked list walk calling operations with already prepared values.\n}\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup() {\n return (Writer.create = function create_buffer() {\n return new BufferWriter();\n })();\n }\n /* istanbul ignore next */\n : function create_array() {\n return new Writer();\n };\n};\n\n/**\n * Creates a new writer.\n * @function\n * @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}\n */\nWriter.create = create();\n\n/**\n * Allocates a buffer of the specified size.\n * @param {number} size Buffer size\n * @returns {Uint8Array} Buffer\n */\nWriter.alloc = function alloc(size) {\n return new util.Array(size);\n};\n\n// Use Uint8Array buffer pool in the browser, just like node does with buffers\n/* istanbul ignore else */\nif (util.Array !== Array)\n Writer.alloc = util.pool(Writer.alloc, util.Array.prototype.subarray);\n\n/**\n * Pushes a new operation to the queue.\n * @param {function(Uint8Array, number, *)} fn Function to call\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @returns {Writer} `this`\n * @private\n */\nWriter.prototype._push = function push(fn, len, val) {\n this.tail = this.tail.next = new Op(fn, len, val);\n this.len += len;\n return this;\n};\n\nfunction writeByte(val, buf, pos) {\n buf[pos] = val & 255;\n}\n\nfunction writeVarint32(val, buf, pos) {\n while (val > 127) {\n buf[pos++] = val & 127 | 128;\n val >>>= 7;\n }\n buf[pos] = val;\n}\n\n/**\n * Constructs a new varint writer operation instance.\n * @classdesc Scheduled varint writer operation.\n * @extends Op\n * @constructor\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @ignore\n */\nfunction VarintOp(len, val) {\n this.len = len;\n this.next = undefined;\n this.val = val;\n}\n\nVarintOp.prototype = Object.create(Op.prototype);\nVarintOp.prototype.fn = writeVarint32;\n\n/**\n * Writes an unsigned 32 bit value as a varint.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.uint32 = function write_uint32(value) {\n // here, the call to this.push has been inlined and a varint specific Op subclass is used.\n // uint32 is by far the most frequently used operation and benefits significantly from this.\n this.len += (this.tail = this.tail.next = new VarintOp(\n (value = value >>> 0)\n < 128 ? 1\n : value < 16384 ? 2\n : value < 2097152 ? 3\n : value < 268435456 ? 4\n : 5,\n value)).len;\n return this;\n};\n\n/**\n * Writes a signed 32 bit value as a varint.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.int32 = function write_int32(value) {\n return value < 0\n ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec\n : this.uint32(value);\n};\n\n/**\n * Writes a 32 bit value as a varint, zig-zag encoded.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sint32 = function write_sint32(value) {\n return this.uint32((value << 1 ^ value >> 31) >>> 0);\n};\n\nfunction writeVarint64(val, buf, pos) {\n while (val.hi) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;\n val.hi >>>= 7;\n }\n while (val.lo > 127) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = val.lo >>> 7;\n }\n buf[pos++] = val.lo;\n}\n\n/**\n * Writes an unsigned 64 bit value as a varint.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.uint64 = function write_uint64(value) {\n var bits = LongBits.from(value);\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a signed 64 bit value as a varint.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.int64 = Writer.prototype.uint64;\n\n/**\n * Writes a signed 64 bit value as a varint, zig-zag encoded.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sint64 = function write_sint64(value) {\n var bits = LongBits.from(value).zzEncode();\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a boolish value as a varint.\n * @param {boolean} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bool = function write_bool(value) {\n return this._push(writeByte, 1, value ? 1 : 0);\n};\n\nfunction writeFixed32(val, buf, pos) {\n buf[pos ] = val & 255;\n buf[pos + 1] = val >>> 8 & 255;\n buf[pos + 2] = val >>> 16 & 255;\n buf[pos + 3] = val >>> 24;\n}\n\n/**\n * Writes an unsigned 32 bit value as fixed 32 bits.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.fixed32 = function write_fixed32(value) {\n return this._push(writeFixed32, 4, value >>> 0);\n};\n\n/**\n * Writes a signed 32 bit value as fixed 32 bits.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sfixed32 = Writer.prototype.fixed32;\n\n/**\n * Writes an unsigned 64 bit value as fixed 64 bits.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.fixed64 = function write_fixed64(value) {\n var bits = LongBits.from(value);\n return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);\n};\n\n/**\n * Writes a signed 64 bit value as fixed 64 bits.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sfixed64 = Writer.prototype.fixed64;\n\n/**\n * Writes a float (32 bit).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.float = function write_float(value) {\n return this._push(util.float.writeFloatLE, 4, value);\n};\n\n/**\n * Writes a double (64 bit float).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.double = function write_double(value) {\n return this._push(util.float.writeDoubleLE, 8, value);\n};\n\nvar writeBytes = util.Array.prototype.set\n ? function writeBytes_set(val, buf, pos) {\n buf.set(val, pos); // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytes_for(val, buf, pos) {\n for (var i = 0; i < val.length; ++i)\n buf[pos + i] = val[i];\n };\n\n/**\n * Writes a sequence of bytes.\n * @param {Uint8Array|string} value Buffer or base64 encoded string to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bytes = function write_bytes(value) {\n var len = value.length >>> 0;\n if (!len)\n return this._push(writeByte, 1, 0);\n if (util.isString(value)) {\n var buf = Writer.alloc(len = base64.length(value));\n base64.decode(value, buf, 0);\n value = buf;\n }\n return this.uint32(len)._push(writeBytes, len, value);\n};\n\n/**\n * Writes a string.\n * @param {string} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.string = function write_string(value) {\n var len = utf8.length(value);\n return len\n ? this.uint32(len)._push(utf8.write, len, value)\n : this._push(writeByte, 1, 0);\n};\n\n/**\n * Forks this writer's state by pushing it to a stack.\n * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.\n * @returns {Writer} `this`\n */\nWriter.prototype.fork = function fork() {\n this.states = new State(this);\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n return this;\n};\n\n/**\n * Resets this instance to the last state.\n * @returns {Writer} `this`\n */\nWriter.prototype.reset = function reset() {\n if (this.states) {\n this.head = this.states.head;\n this.tail = this.states.tail;\n this.len = this.states.len;\n this.states = this.states.next;\n } else {\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n }\n return this;\n};\n\n/**\n * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.\n * @returns {Writer} `this`\n */\nWriter.prototype.ldelim = function ldelim() {\n var head = this.head,\n tail = this.tail,\n len = this.len;\n this.reset().uint32(len);\n if (len) {\n this.tail.next = head.next; // skip noop\n this.tail = tail;\n this.len += len;\n }\n return this;\n};\n\n/**\n * Finishes the write operation.\n * @returns {Uint8Array} Finished buffer\n */\nWriter.prototype.finish = function finish() {\n var head = this.head.next, // skip noop\n buf = this.constructor.alloc(this.len),\n pos = 0;\n while (head) {\n head.fn(head.val, buf, pos);\n pos += head.len;\n head = head.next;\n }\n // this.head = this.tail = null;\n return buf;\n};\n\nWriter._configure = function(BufferWriter_) {\n BufferWriter = BufferWriter_;\n Writer.create = create();\n BufferWriter._configure();\n};\n", "\"use strict\";\nmodule.exports = BufferWriter;\n\n// extends Writer\nvar Writer = require(\"./writer\");\n(BufferWriter.prototype = Object.create(Writer.prototype)).constructor = BufferWriter;\n\nvar util = require(\"./util/minimal\");\n\n/**\n * Constructs a new buffer writer instance.\n * @classdesc Wire format writer using node buffers.\n * @extends Writer\n * @constructor\n */\nfunction BufferWriter() {\n Writer.call(this);\n}\n\nBufferWriter._configure = function () {\n /**\n * Allocates a buffer of the specified size.\n * @function\n * @param {number} size Buffer size\n * @returns {Buffer} Buffer\n */\n BufferWriter.alloc = util._Buffer_allocUnsafe;\n\n BufferWriter.writeBytesBuffer = util.Buffer && util.Buffer.prototype instanceof Uint8Array && util.Buffer.prototype.set.name === \"set\"\n ? function writeBytesBuffer_set(val, buf, pos) {\n buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)\n // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytesBuffer_copy(val, buf, pos) {\n if (val.copy) // Buffer values\n val.copy(buf, pos, 0, val.length);\n else for (var i = 0; i < val.length;) // plain array values\n buf[pos++] = val[i++];\n };\n};\n\n\n/**\n * @override\n */\nBufferWriter.prototype.bytes = function write_bytes_buffer(value) {\n if (util.isString(value))\n value = util._Buffer_from(value, \"base64\");\n var len = value.length >>> 0;\n this.uint32(len);\n if (len)\n this._push(BufferWriter.writeBytesBuffer, len, value);\n return this;\n};\n\nfunction writeStringBuffer(val, buf, pos) {\n if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)\n util.utf8.write(val, buf, pos);\n else if (buf.utf8Write)\n buf.utf8Write(val, pos);\n else\n buf.write(val, pos);\n}\n\n/**\n * @override\n */\nBufferWriter.prototype.string = function write_string_buffer(value) {\n var len = util.Buffer.byteLength(value);\n this.uint32(len);\n if (len)\n this._push(writeStringBuffer, len, value);\n return this;\n};\n\n\n/**\n * Finishes the write operation.\n * @name BufferWriter#finish\n * @function\n * @returns {Buffer} Finished buffer\n */\n\nBufferWriter._configure();\n", "\"use strict\";\nmodule.exports = Reader;\n\nvar util = require(\"./util/minimal\");\n\nvar BufferReader; // cyclic\n\nvar LongBits = util.LongBits,\n utf8 = util.utf8;\n\n/* istanbul ignore next */\nfunction indexOutOfRange(reader, writeLength) {\n return RangeError(\"index out of range: \" + reader.pos + \" + \" + (writeLength || 1) + \" > \" + reader.len);\n}\n\n/**\n * Constructs a new reader instance using the specified buffer.\n * @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n * @param {Uint8Array} buffer Buffer to read from\n */\nfunction Reader(buffer) {\n\n /**\n * Read buffer.\n * @type {Uint8Array}\n */\n this.buf = buffer;\n\n /**\n * Read buffer position.\n * @type {number}\n */\n this.pos = 0;\n\n /**\n * Read buffer length.\n * @type {number}\n */\n this.len = buffer.length;\n}\n\nvar create_array = typeof Uint8Array !== \"undefined\"\n ? function create_typed_array(buffer) {\n if (buffer instanceof Uint8Array || Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n }\n /* istanbul ignore next */\n : function create_array(buffer) {\n if (Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n };\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup(buffer) {\n return (Reader.create = function create_buffer(buffer) {\n return util.Buffer.isBuffer(buffer)\n ? new BufferReader(buffer)\n /* istanbul ignore next */\n : create_array(buffer);\n })(buffer);\n }\n /* istanbul ignore next */\n : create_array;\n};\n\n/**\n * Creates a new reader using the specified buffer.\n * @function\n * @param {Uint8Array|Buffer} buffer Buffer to read from\n * @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}\n * @throws {Error} If `buffer` is not a valid buffer\n */\nReader.create = create();\n\nReader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */ util.Array.prototype.slice;\n\n/**\n * Reads a varint as an unsigned 32 bit value.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.uint32 = (function read_uint32_setup() {\n var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)\n return function read_uint32() {\n value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;\n\n /* istanbul ignore if */\n if ((this.pos += 5) > this.len) {\n this.pos = this.len;\n throw indexOutOfRange(this, 10);\n }\n return value;\n };\n})();\n\n/**\n * Reads a varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.int32 = function read_int32() {\n return this.uint32() | 0;\n};\n\n/**\n * Reads a zig-zag encoded varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.sint32 = function read_sint32() {\n var value = this.uint32();\n return value >>> 1 ^ -(value & 1) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readLongVarint() {\n // tends to deopt with local vars for octet etc.\n var bits = new LongBits(0, 0);\n var i = 0;\n if (this.len - this.pos > 4) { // fast route (lo)\n for (; i < 4; ++i) {\n // 1st..4th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 5th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n i = 0;\n } else {\n for (; i < 3; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 1st..3th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 4th\n bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;\n return bits;\n }\n if (this.len - this.pos > 4) { // fast route (hi)\n for (; i < 5; ++i) {\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n } else {\n for (; i < 5; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n }\n /* istanbul ignore next */\n throw Error(\"invalid varint encoding\");\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads a varint as a signed 64 bit value.\n * @name Reader#int64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as an unsigned 64 bit value.\n * @name Reader#uint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a zig-zag encoded varint as a signed 64 bit value.\n * @name Reader#sint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as a boolean.\n * @returns {boolean} Value read\n */\nReader.prototype.bool = function read_bool() {\n return this.uint32() !== 0;\n};\n\nfunction readFixed32_end(buf, end) { // note that this uses `end`, not `pos`\n return (buf[end - 4]\n | buf[end - 3] << 8\n | buf[end - 2] << 16\n | buf[end - 1] << 24) >>> 0;\n}\n\n/**\n * Reads fixed 32 bits as an unsigned 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.fixed32 = function read_fixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4);\n};\n\n/**\n * Reads fixed 32 bits as a signed 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.sfixed32 = function read_sfixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readFixed64(/* this: Reader */) {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 8);\n\n return new LongBits(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads fixed 64 bits.\n * @name Reader#fixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads zig-zag encoded fixed 64 bits.\n * @name Reader#sfixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a float (32 bit) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.float = function read_float() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readFloatLE(this.buf, this.pos);\n this.pos += 4;\n return value;\n};\n\n/**\n * Reads a double (64 bit float) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.double = function read_double() {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readDoubleLE(this.buf, this.pos);\n this.pos += 8;\n return value;\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @returns {Uint8Array} Value read\n */\nReader.prototype.bytes = function read_bytes() {\n var length = this.uint32(),\n start = this.pos,\n end = this.pos + length;\n\n /* istanbul ignore if */\n if (end > this.len)\n throw indexOutOfRange(this, length);\n\n this.pos += length;\n if (Array.isArray(this.buf)) // plain array\n return this.buf.slice(start, end);\n\n if (start === end) { // fix for IE 10/Win8 and others' subarray returning array of size 1\n var nativeBuffer = util.Buffer;\n return nativeBuffer\n ? nativeBuffer.alloc(0)\n : new this.buf.constructor(0);\n }\n return this._slice.call(this.buf, start, end);\n};\n\n/**\n * Reads a string preceeded by its byte length as a varint.\n * @returns {string} Value read\n */\nReader.prototype.string = function read_string() {\n var bytes = this.bytes();\n return utf8.read(bytes, 0, bytes.length);\n};\n\n/**\n * Skips the specified number of bytes if specified, otherwise skips a varint.\n * @param {number} [length] Length if known, otherwise a varint is assumed\n * @returns {Reader} `this`\n */\nReader.prototype.skip = function skip(length) {\n if (typeof length === \"number\") {\n /* istanbul ignore if */\n if (this.pos + length > this.len)\n throw indexOutOfRange(this, length);\n this.pos += length;\n } else {\n do {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n } while (this.buf[this.pos++] & 128);\n }\n return this;\n};\n\n/**\n * Skips the next element of the specified wire type.\n * @param {number} wireType Wire type received\n * @returns {Reader} `this`\n */\nReader.prototype.skipType = function(wireType) {\n switch (wireType) {\n case 0:\n this.skip();\n break;\n case 1:\n this.skip(8);\n break;\n case 2:\n this.skip(this.uint32());\n break;\n case 3:\n while ((wireType = this.uint32() & 7) !== 4) {\n this.skipType(wireType);\n }\n break;\n case 5:\n this.skip(4);\n break;\n\n /* istanbul ignore next */\n default:\n throw Error(\"invalid wire type \" + wireType + \" at offset \" + this.pos);\n }\n return this;\n};\n\nReader._configure = function(BufferReader_) {\n BufferReader = BufferReader_;\n Reader.create = create();\n BufferReader._configure();\n\n var fn = util.Long ? \"toLong\" : /* istanbul ignore next */ \"toNumber\";\n util.merge(Reader.prototype, {\n\n int64: function read_int64() {\n return readLongVarint.call(this)[fn](false);\n },\n\n uint64: function read_uint64() {\n return readLongVarint.call(this)[fn](true);\n },\n\n sint64: function read_sint64() {\n return readLongVarint.call(this).zzDecode()[fn](false);\n },\n\n fixed64: function read_fixed64() {\n return readFixed64.call(this)[fn](true);\n },\n\n sfixed64: function read_sfixed64() {\n return readFixed64.call(this)[fn](false);\n }\n\n });\n};\n", "\"use strict\";\nmodule.exports = BufferReader;\n\n// extends Reader\nvar Reader = require(\"./reader\");\n(BufferReader.prototype = Object.create(Reader.prototype)).constructor = BufferReader;\n\nvar util = require(\"./util/minimal\");\n\n/**\n * Constructs a new buffer reader instance.\n * @classdesc Wire format reader using node buffers.\n * @extends Reader\n * @constructor\n * @param {Buffer} buffer Buffer to read from\n */\nfunction BufferReader(buffer) {\n Reader.call(this, buffer);\n\n /**\n * Read buffer.\n * @name BufferReader#buf\n * @type {Buffer}\n */\n}\n\nBufferReader._configure = function () {\n /* istanbul ignore else */\n if (util.Buffer)\n BufferReader.prototype._slice = util.Buffer.prototype.slice;\n};\n\n\n/**\n * @override\n */\nBufferReader.prototype.string = function read_string_buffer() {\n var len = this.uint32(); // modifies pos\n return this.buf.utf8Slice\n ? this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len))\n : this.buf.toString(\"utf-8\", this.pos, this.pos = Math.min(this.pos + len, this.len));\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @name BufferReader#bytes\n * @function\n * @returns {Buffer} Value read\n */\n\nBufferReader._configure();\n", "\"use strict\";\nmodule.exports = Service;\n\nvar util = require(\"../util/minimal\");\n\n// Extends EventEmitter\n(Service.prototype = Object.create(util.EventEmitter.prototype)).constructor = Service;\n\n/**\n * A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.\n *\n * Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.\n * @typedef rpc.ServiceMethodCallback\n * @template TRes extends Message\n * @type {function}\n * @param {Error|null} error Error, if any\n * @param {TRes} [response] Response message\n * @returns {undefined}\n */\n\n/**\n * A service method part of a {@link rpc.Service} as created by {@link Service.create}.\n * @typedef rpc.ServiceMethod\n * @template TReq extends Message\n * @template TRes extends Message\n * @type {function}\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} [callback] Node-style callback called with the error, if any, and the response message\n * @returns {Promise>} Promise if `callback` has been omitted, otherwise `undefined`\n */\n\n/**\n * Constructs a new RPC service instance.\n * @classdesc An RPC service as returned by {@link Service#create}.\n * @exports rpc.Service\n * @extends util.EventEmitter\n * @constructor\n * @param {RPCImpl} rpcImpl RPC implementation\n * @param {boolean} [requestDelimited=false] Whether requests are length-delimited\n * @param {boolean} [responseDelimited=false] Whether responses are length-delimited\n */\nfunction Service(rpcImpl, requestDelimited, responseDelimited) {\n\n if (typeof rpcImpl !== \"function\")\n throw TypeError(\"rpcImpl must be a function\");\n\n util.EventEmitter.call(this);\n\n /**\n * RPC implementation. Becomes `null` once the service is ended.\n * @type {RPCImpl|null}\n */\n this.rpcImpl = rpcImpl;\n\n /**\n * Whether requests are length-delimited.\n * @type {boolean}\n */\n this.requestDelimited = Boolean(requestDelimited);\n\n /**\n * Whether responses are length-delimited.\n * @type {boolean}\n */\n this.responseDelimited = Boolean(responseDelimited);\n}\n\n/**\n * Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.\n * @param {Method|rpc.ServiceMethod} method Reflected or static method\n * @param {Constructor} requestCtor Request constructor\n * @param {Constructor} responseCtor Response constructor\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} callback Service callback\n * @returns {undefined}\n * @template TReq extends Message\n * @template TRes extends Message\n */\nService.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {\n\n if (!request)\n throw TypeError(\"request must be specified\");\n\n var self = this;\n if (!callback)\n return util.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);\n\n if (!self.rpcImpl) {\n setTimeout(function() { callback(Error(\"already ended\")); }, 0);\n return undefined;\n }\n\n try {\n return self.rpcImpl(\n method,\n requestCtor[self.requestDelimited ? \"encodeDelimited\" : \"encode\"](request).finish(),\n function rpcCallback(err, response) {\n\n if (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n\n if (response === null) {\n self.end(/* endedByRPC */ true);\n return undefined;\n }\n\n if (!(response instanceof responseCtor)) {\n try {\n response = responseCtor[self.responseDelimited ? \"decodeDelimited\" : \"decode\"](response);\n } catch (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n }\n\n self.emit(\"data\", response, method);\n return callback(null, response);\n }\n );\n } catch (err) {\n self.emit(\"error\", err, method);\n setTimeout(function() { callback(err); }, 0);\n return undefined;\n }\n};\n\n/**\n * Ends this service and emits the `end` event.\n * @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.\n * @returns {rpc.Service} `this`\n */\nService.prototype.end = function end(endedByRPC) {\n if (this.rpcImpl) {\n if (!endedByRPC) // signal end to rpcImpl\n this.rpcImpl(null, null, null);\n this.rpcImpl = null;\n this.emit(\"end\").off();\n }\n return this;\n};\n", "\"use strict\";\n\n/**\n * Streaming RPC helpers.\n * @namespace\n */\nvar rpc = exports;\n\n/**\n * RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.\n * @typedef RPCImpl\n * @type {function}\n * @param {Method|rpc.ServiceMethod,Message<{}>>} method Reflected or static method being called\n * @param {Uint8Array} requestData Request data\n * @param {RPCImplCallback} callback Callback function\n * @returns {undefined}\n * @example\n * function rpcImpl(method, requestData, callback) {\n * if (protobuf.util.lcFirst(method.name) !== \"myMethod\") // compatible with static code\n * throw Error(\"no such method\");\n * asynchronouslyObtainAResponse(requestData, function(err, responseData) {\n * callback(err, responseData);\n * });\n * }\n */\n\n/**\n * Node-style callback as used by {@link RPCImpl}.\n * @typedef RPCImplCallback\n * @type {function}\n * @param {Error|null} error Error, if any, otherwise `null`\n * @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error\n * @returns {undefined}\n */\n\nrpc.Service = require(\"./rpc/service\");\n", "\"use strict\";\nmodule.exports = {};\n\n/**\n * Named roots.\n * This is where pbjs stores generated structures (the option `-r, --root` specifies a name).\n * Can also be used manually to make roots available across modules.\n * @name roots\n * @type {Object.}\n * @example\n * // pbjs -r myroot -o compiled.js ...\n *\n * // in another module:\n * require(\"./compiled.js\");\n *\n * // in any subsequent module:\n * var root = protobuf.roots[\"myroot\"];\n */\n", "\"use strict\";\nvar protobuf = exports;\n\n/**\n * Build type, one of `\"full\"`, `\"light\"` or `\"minimal\"`.\n * @name build\n * @type {string}\n * @const\n */\nprotobuf.build = \"minimal\";\n\n// Serialization\nprotobuf.Writer = require(\"./writer\");\nprotobuf.BufferWriter = require(\"./writer_buffer\");\nprotobuf.Reader = require(\"./reader\");\nprotobuf.BufferReader = require(\"./reader_buffer\");\n\n// Utility\nprotobuf.util = require(\"./util/minimal\");\nprotobuf.rpc = require(\"./rpc\");\nprotobuf.roots = require(\"./roots\");\nprotobuf.configure = configure;\n\n/* istanbul ignore next */\n/**\n * Reconfigures the library according to the environment.\n * @returns {undefined}\n */\nfunction configure() {\n protobuf.util._configure();\n protobuf.Writer._configure(protobuf.BufferWriter);\n protobuf.Reader._configure(protobuf.BufferReader);\n}\n\n// Set up buffer utility according to the environment\nconfigure();\n", "// minimal library entry point.\n\n\"use strict\";\nmodule.exports = require(\"./src/index-minimal\");\n", "/*eslint-disable block-scoped-var, id-length, no-control-regex, no-magic-numbers, no-prototype-builtins, no-redeclare, no-shadow, no-var, sort-vars*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\n// Common aliases\nvar $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;\n\n// Exported root namespace\nvar $root = $protobuf.roots[\"default\"] || ($protobuf.roots[\"default\"] = {});\n\n$root.onnx = (function() {\n\n /**\n * Namespace onnx.\n * @exports onnx\n * @namespace\n */\n var onnx = {};\n\n /**\n * Version enum.\n * @name onnx.Version\n * @enum {number}\n * @property {number} _START_VERSION=0 _START_VERSION value\n * @property {number} IR_VERSION_2017_10_10=1 IR_VERSION_2017_10_10 value\n * @property {number} IR_VERSION_2017_10_30=2 IR_VERSION_2017_10_30 value\n * @property {number} IR_VERSION_2017_11_3=3 IR_VERSION_2017_11_3 value\n * @property {number} IR_VERSION_2019_1_22=4 IR_VERSION_2019_1_22 value\n * @property {number} IR_VERSION_2019_3_18=5 IR_VERSION_2019_3_18 value\n * @property {number} IR_VERSION_2019_9_19=6 IR_VERSION_2019_9_19 value\n * @property {number} IR_VERSION_2020_5_8=7 IR_VERSION_2020_5_8 value\n * @property {number} IR_VERSION_2021_7_30=8 IR_VERSION_2021_7_30 value\n * @property {number} IR_VERSION=9 IR_VERSION value\n */\n onnx.Version = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"_START_VERSION\"] = 0;\n values[valuesById[1] = \"IR_VERSION_2017_10_10\"] = 1;\n values[valuesById[2] = \"IR_VERSION_2017_10_30\"] = 2;\n values[valuesById[3] = \"IR_VERSION_2017_11_3\"] = 3;\n values[valuesById[4] = \"IR_VERSION_2019_1_22\"] = 4;\n values[valuesById[5] = \"IR_VERSION_2019_3_18\"] = 5;\n values[valuesById[6] = \"IR_VERSION_2019_9_19\"] = 6;\n values[valuesById[7] = \"IR_VERSION_2020_5_8\"] = 7;\n values[valuesById[8] = \"IR_VERSION_2021_7_30\"] = 8;\n values[valuesById[9] = \"IR_VERSION\"] = 9;\n return values;\n })();\n\n onnx.AttributeProto = (function() {\n\n /**\n * Properties of an AttributeProto.\n * @memberof onnx\n * @interface IAttributeProto\n * @property {string|null} [name] AttributeProto name\n * @property {string|null} [refAttrName] AttributeProto refAttrName\n * @property {string|null} [docString] AttributeProto docString\n * @property {onnx.AttributeProto.AttributeType|null} [type] AttributeProto type\n * @property {number|null} [f] AttributeProto f\n * @property {number|Long|null} [i] AttributeProto i\n * @property {Uint8Array|null} [s] AttributeProto s\n * @property {onnx.ITensorProto|null} [t] AttributeProto t\n * @property {onnx.IGraphProto|null} [g] AttributeProto g\n * @property {onnx.ISparseTensorProto|null} [sparseTensor] AttributeProto sparseTensor\n * @property {onnx.ITypeProto|null} [tp] AttributeProto tp\n * @property {Array.|null} [floats] AttributeProto floats\n * @property {Array.|null} [ints] AttributeProto ints\n * @property {Array.|null} [strings] AttributeProto strings\n * @property {Array.|null} [tensors] AttributeProto tensors\n * @property {Array.|null} [graphs] AttributeProto graphs\n * @property {Array.|null} [sparseTensors] AttributeProto sparseTensors\n * @property {Array.|null} [typeProtos] AttributeProto typeProtos\n */\n\n /**\n * Constructs a new AttributeProto.\n * @memberof onnx\n * @classdesc Represents an AttributeProto.\n * @implements IAttributeProto\n * @constructor\n * @param {onnx.IAttributeProto=} [properties] Properties to set\n */\n function AttributeProto(properties) {\n this.floats = [];\n this.ints = [];\n this.strings = [];\n this.tensors = [];\n this.graphs = [];\n this.sparseTensors = [];\n this.typeProtos = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * AttributeProto name.\n * @member {string} name\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.name = \"\";\n\n /**\n * AttributeProto refAttrName.\n * @member {string} refAttrName\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.refAttrName = \"\";\n\n /**\n * AttributeProto docString.\n * @member {string} docString\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.docString = \"\";\n\n /**\n * AttributeProto type.\n * @member {onnx.AttributeProto.AttributeType} type\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.type = 0;\n\n /**\n * AttributeProto f.\n * @member {number} f\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.f = 0;\n\n /**\n * AttributeProto i.\n * @member {number|Long} i\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.i = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * AttributeProto s.\n * @member {Uint8Array} s\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.s = $util.newBuffer([]);\n\n /**\n * AttributeProto t.\n * @member {onnx.ITensorProto|null|undefined} t\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.t = null;\n\n /**\n * AttributeProto g.\n * @member {onnx.IGraphProto|null|undefined} g\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.g = null;\n\n /**\n * AttributeProto sparseTensor.\n * @member {onnx.ISparseTensorProto|null|undefined} sparseTensor\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.sparseTensor = null;\n\n /**\n * AttributeProto tp.\n * @member {onnx.ITypeProto|null|undefined} tp\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.tp = null;\n\n /**\n * AttributeProto floats.\n * @member {Array.} floats\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.floats = $util.emptyArray;\n\n /**\n * AttributeProto ints.\n * @member {Array.} ints\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.ints = $util.emptyArray;\n\n /**\n * AttributeProto strings.\n * @member {Array.} strings\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.strings = $util.emptyArray;\n\n /**\n * AttributeProto tensors.\n * @member {Array.} tensors\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.tensors = $util.emptyArray;\n\n /**\n * AttributeProto graphs.\n * @member {Array.} graphs\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.graphs = $util.emptyArray;\n\n /**\n * AttributeProto sparseTensors.\n * @member {Array.} sparseTensors\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.sparseTensors = $util.emptyArray;\n\n /**\n * AttributeProto typeProtos.\n * @member {Array.} typeProtos\n * @memberof onnx.AttributeProto\n * @instance\n */\n AttributeProto.prototype.typeProtos = $util.emptyArray;\n\n /**\n * Creates a new AttributeProto instance using the specified properties.\n * @function create\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto=} [properties] Properties to set\n * @returns {onnx.AttributeProto} AttributeProto instance\n */\n AttributeProto.create = function create(properties) {\n return new AttributeProto(properties);\n };\n\n /**\n * Encodes the specified AttributeProto message. Does not implicitly {@link onnx.AttributeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto} message AttributeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n AttributeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.f != null && Object.hasOwnProperty.call(message, \"f\"))\n writer.uint32(/* id 2, wireType 5 =*/21).float(message.f);\n if (message.i != null && Object.hasOwnProperty.call(message, \"i\"))\n writer.uint32(/* id 3, wireType 0 =*/24).int64(message.i);\n if (message.s != null && Object.hasOwnProperty.call(message, \"s\"))\n writer.uint32(/* id 4, wireType 2 =*/34).bytes(message.s);\n if (message.t != null && Object.hasOwnProperty.call(message, \"t\"))\n $root.onnx.TensorProto.encode(message.t, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.g != null && Object.hasOwnProperty.call(message, \"g\"))\n $root.onnx.GraphProto.encode(message.g, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim();\n if (message.floats != null && message.floats.length) {\n writer.uint32(/* id 7, wireType 2 =*/58).fork();\n for (var i = 0; i < message.floats.length; ++i)\n writer.float(message.floats[i]);\n writer.ldelim();\n }\n if (message.ints != null && message.ints.length) {\n writer.uint32(/* id 8, wireType 2 =*/66).fork();\n for (var i = 0; i < message.ints.length; ++i)\n writer.int64(message.ints[i]);\n writer.ldelim();\n }\n if (message.strings != null && message.strings.length)\n for (var i = 0; i < message.strings.length; ++i)\n writer.uint32(/* id 9, wireType 2 =*/74).bytes(message.strings[i]);\n if (message.tensors != null && message.tensors.length)\n for (var i = 0; i < message.tensors.length; ++i)\n $root.onnx.TensorProto.encode(message.tensors[i], writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim();\n if (message.graphs != null && message.graphs.length)\n for (var i = 0; i < message.graphs.length; ++i)\n $root.onnx.GraphProto.encode(message.graphs[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 13, wireType 2 =*/106).string(message.docString);\n if (message.tp != null && Object.hasOwnProperty.call(message, \"tp\"))\n $root.onnx.TypeProto.encode(message.tp, writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.typeProtos != null && message.typeProtos.length)\n for (var i = 0; i < message.typeProtos.length; ++i)\n $root.onnx.TypeProto.encode(message.typeProtos[i], writer.uint32(/* id 15, wireType 2 =*/122).fork()).ldelim();\n if (message.type != null && Object.hasOwnProperty.call(message, \"type\"))\n writer.uint32(/* id 20, wireType 0 =*/160).int32(message.type);\n if (message.refAttrName != null && Object.hasOwnProperty.call(message, \"refAttrName\"))\n writer.uint32(/* id 21, wireType 2 =*/170).string(message.refAttrName);\n if (message.sparseTensor != null && Object.hasOwnProperty.call(message, \"sparseTensor\"))\n $root.onnx.SparseTensorProto.encode(message.sparseTensor, writer.uint32(/* id 22, wireType 2 =*/178).fork()).ldelim();\n if (message.sparseTensors != null && message.sparseTensors.length)\n for (var i = 0; i < message.sparseTensors.length; ++i)\n $root.onnx.SparseTensorProto.encode(message.sparseTensors[i], writer.uint32(/* id 23, wireType 2 =*/186).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified AttributeProto message, length delimited. Does not implicitly {@link onnx.AttributeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.IAttributeProto} message AttributeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n AttributeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an AttributeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.AttributeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.AttributeProto} AttributeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n AttributeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.AttributeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 21: {\n message.refAttrName = reader.string();\n break;\n }\n case 13: {\n message.docString = reader.string();\n break;\n }\n case 20: {\n message.type = reader.int32();\n break;\n }\n case 2: {\n message.f = reader.float();\n break;\n }\n case 3: {\n message.i = reader.int64();\n break;\n }\n case 4: {\n message.s = reader.bytes();\n break;\n }\n case 5: {\n message.t = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 6: {\n message.g = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 22: {\n message.sparseTensor = $root.onnx.SparseTensorProto.decode(reader, reader.uint32());\n break;\n }\n case 14: {\n message.tp = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n case 7: {\n if (!(message.floats && message.floats.length))\n message.floats = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.floats.push(reader.float());\n } else\n message.floats.push(reader.float());\n break;\n }\n case 8: {\n if (!(message.ints && message.ints.length))\n message.ints = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.ints.push(reader.int64());\n } else\n message.ints.push(reader.int64());\n break;\n }\n case 9: {\n if (!(message.strings && message.strings.length))\n message.strings = [];\n message.strings.push(reader.bytes());\n break;\n }\n case 10: {\n if (!(message.tensors && message.tensors.length))\n message.tensors = [];\n message.tensors.push($root.onnx.TensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 11: {\n if (!(message.graphs && message.graphs.length))\n message.graphs = [];\n message.graphs.push($root.onnx.GraphProto.decode(reader, reader.uint32()));\n break;\n }\n case 23: {\n if (!(message.sparseTensors && message.sparseTensors.length))\n message.sparseTensors = [];\n message.sparseTensors.push($root.onnx.SparseTensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 15: {\n if (!(message.typeProtos && message.typeProtos.length))\n message.typeProtos = [];\n message.typeProtos.push($root.onnx.TypeProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an AttributeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.AttributeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.AttributeProto} AttributeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n AttributeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an AttributeProto message.\n * @function verify\n * @memberof onnx.AttributeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n AttributeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.refAttrName != null && message.hasOwnProperty(\"refAttrName\"))\n if (!$util.isString(message.refAttrName))\n return \"refAttrName: string expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.type != null && message.hasOwnProperty(\"type\"))\n switch (message.type) {\n default:\n return \"type: enum value expected\";\n case 0:\n case 1:\n case 2:\n case 3:\n case 4:\n case 5:\n case 11:\n case 13:\n case 6:\n case 7:\n case 8:\n case 9:\n case 10:\n case 12:\n case 14:\n break;\n }\n if (message.f != null && message.hasOwnProperty(\"f\"))\n if (typeof message.f !== \"number\")\n return \"f: number expected\";\n if (message.i != null && message.hasOwnProperty(\"i\"))\n if (!$util.isInteger(message.i) && !(message.i && $util.isInteger(message.i.low) && $util.isInteger(message.i.high)))\n return \"i: integer|Long expected\";\n if (message.s != null && message.hasOwnProperty(\"s\"))\n if (!(message.s && typeof message.s.length === \"number\" || $util.isString(message.s)))\n return \"s: buffer expected\";\n if (message.t != null && message.hasOwnProperty(\"t\")) {\n var error = $root.onnx.TensorProto.verify(message.t);\n if (error)\n return \"t.\" + error;\n }\n if (message.g != null && message.hasOwnProperty(\"g\")) {\n var error = $root.onnx.GraphProto.verify(message.g);\n if (error)\n return \"g.\" + error;\n }\n if (message.sparseTensor != null && message.hasOwnProperty(\"sparseTensor\")) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseTensor);\n if (error)\n return \"sparseTensor.\" + error;\n }\n if (message.tp != null && message.hasOwnProperty(\"tp\")) {\n var error = $root.onnx.TypeProto.verify(message.tp);\n if (error)\n return \"tp.\" + error;\n }\n if (message.floats != null && message.hasOwnProperty(\"floats\")) {\n if (!Array.isArray(message.floats))\n return \"floats: array expected\";\n for (var i = 0; i < message.floats.length; ++i)\n if (typeof message.floats[i] !== \"number\")\n return \"floats: number[] expected\";\n }\n if (message.ints != null && message.hasOwnProperty(\"ints\")) {\n if (!Array.isArray(message.ints))\n return \"ints: array expected\";\n for (var i = 0; i < message.ints.length; ++i)\n if (!$util.isInteger(message.ints[i]) && !(message.ints[i] && $util.isInteger(message.ints[i].low) && $util.isInteger(message.ints[i].high)))\n return \"ints: integer|Long[] expected\";\n }\n if (message.strings != null && message.hasOwnProperty(\"strings\")) {\n if (!Array.isArray(message.strings))\n return \"strings: array expected\";\n for (var i = 0; i < message.strings.length; ++i)\n if (!(message.strings[i] && typeof message.strings[i].length === \"number\" || $util.isString(message.strings[i])))\n return \"strings: buffer[] expected\";\n }\n if (message.tensors != null && message.hasOwnProperty(\"tensors\")) {\n if (!Array.isArray(message.tensors))\n return \"tensors: array expected\";\n for (var i = 0; i < message.tensors.length; ++i) {\n var error = $root.onnx.TensorProto.verify(message.tensors[i]);\n if (error)\n return \"tensors.\" + error;\n }\n }\n if (message.graphs != null && message.hasOwnProperty(\"graphs\")) {\n if (!Array.isArray(message.graphs))\n return \"graphs: array expected\";\n for (var i = 0; i < message.graphs.length; ++i) {\n var error = $root.onnx.GraphProto.verify(message.graphs[i]);\n if (error)\n return \"graphs.\" + error;\n }\n }\n if (message.sparseTensors != null && message.hasOwnProperty(\"sparseTensors\")) {\n if (!Array.isArray(message.sparseTensors))\n return \"sparseTensors: array expected\";\n for (var i = 0; i < message.sparseTensors.length; ++i) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseTensors[i]);\n if (error)\n return \"sparseTensors.\" + error;\n }\n }\n if (message.typeProtos != null && message.hasOwnProperty(\"typeProtos\")) {\n if (!Array.isArray(message.typeProtos))\n return \"typeProtos: array expected\";\n for (var i = 0; i < message.typeProtos.length; ++i) {\n var error = $root.onnx.TypeProto.verify(message.typeProtos[i]);\n if (error)\n return \"typeProtos.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates an AttributeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.AttributeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.AttributeProto} AttributeProto\n */\n AttributeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.AttributeProto)\n return object;\n var message = new $root.onnx.AttributeProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.refAttrName != null)\n message.refAttrName = String(object.refAttrName);\n if (object.docString != null)\n message.docString = String(object.docString);\n switch (object.type) {\n default:\n if (typeof object.type === \"number\") {\n message.type = object.type;\n break;\n }\n break;\n case \"UNDEFINED\":\n case 0:\n message.type = 0;\n break;\n case \"FLOAT\":\n case 1:\n message.type = 1;\n break;\n case \"INT\":\n case 2:\n message.type = 2;\n break;\n case \"STRING\":\n case 3:\n message.type = 3;\n break;\n case \"TENSOR\":\n case 4:\n message.type = 4;\n break;\n case \"GRAPH\":\n case 5:\n message.type = 5;\n break;\n case \"SPARSE_TENSOR\":\n case 11:\n message.type = 11;\n break;\n case \"TYPE_PROTO\":\n case 13:\n message.type = 13;\n break;\n case \"FLOATS\":\n case 6:\n message.type = 6;\n break;\n case \"INTS\":\n case 7:\n message.type = 7;\n break;\n case \"STRINGS\":\n case 8:\n message.type = 8;\n break;\n case \"TENSORS\":\n case 9:\n message.type = 9;\n break;\n case \"GRAPHS\":\n case 10:\n message.type = 10;\n break;\n case \"SPARSE_TENSORS\":\n case 12:\n message.type = 12;\n break;\n case \"TYPE_PROTOS\":\n case 14:\n message.type = 14;\n break;\n }\n if (object.f != null)\n message.f = Number(object.f);\n if (object.i != null)\n if ($util.Long)\n (message.i = $util.Long.fromValue(object.i)).unsigned = false;\n else if (typeof object.i === \"string\")\n message.i = parseInt(object.i, 10);\n else if (typeof object.i === \"number\")\n message.i = object.i;\n else if (typeof object.i === \"object\")\n message.i = new $util.LongBits(object.i.low >>> 0, object.i.high >>> 0).toNumber();\n if (object.s != null)\n if (typeof object.s === \"string\")\n $util.base64.decode(object.s, message.s = $util.newBuffer($util.base64.length(object.s)), 0);\n else if (object.s.length >= 0)\n message.s = object.s;\n if (object.t != null) {\n if (typeof object.t !== \"object\")\n throw TypeError(\".onnx.AttributeProto.t: object expected\");\n message.t = $root.onnx.TensorProto.fromObject(object.t);\n }\n if (object.g != null) {\n if (typeof object.g !== \"object\")\n throw TypeError(\".onnx.AttributeProto.g: object expected\");\n message.g = $root.onnx.GraphProto.fromObject(object.g);\n }\n if (object.sparseTensor != null) {\n if (typeof object.sparseTensor !== \"object\")\n throw TypeError(\".onnx.AttributeProto.sparseTensor: object expected\");\n message.sparseTensor = $root.onnx.SparseTensorProto.fromObject(object.sparseTensor);\n }\n if (object.tp != null) {\n if (typeof object.tp !== \"object\")\n throw TypeError(\".onnx.AttributeProto.tp: object expected\");\n message.tp = $root.onnx.TypeProto.fromObject(object.tp);\n }\n if (object.floats) {\n if (!Array.isArray(object.floats))\n throw TypeError(\".onnx.AttributeProto.floats: array expected\");\n message.floats = [];\n for (var i = 0; i < object.floats.length; ++i)\n message.floats[i] = Number(object.floats[i]);\n }\n if (object.ints) {\n if (!Array.isArray(object.ints))\n throw TypeError(\".onnx.AttributeProto.ints: array expected\");\n message.ints = [];\n for (var i = 0; i < object.ints.length; ++i)\n if ($util.Long)\n (message.ints[i] = $util.Long.fromValue(object.ints[i])).unsigned = false;\n else if (typeof object.ints[i] === \"string\")\n message.ints[i] = parseInt(object.ints[i], 10);\n else if (typeof object.ints[i] === \"number\")\n message.ints[i] = object.ints[i];\n else if (typeof object.ints[i] === \"object\")\n message.ints[i] = new $util.LongBits(object.ints[i].low >>> 0, object.ints[i].high >>> 0).toNumber();\n }\n if (object.strings) {\n if (!Array.isArray(object.strings))\n throw TypeError(\".onnx.AttributeProto.strings: array expected\");\n message.strings = [];\n for (var i = 0; i < object.strings.length; ++i)\n if (typeof object.strings[i] === \"string\")\n $util.base64.decode(object.strings[i], message.strings[i] = $util.newBuffer($util.base64.length(object.strings[i])), 0);\n else if (object.strings[i].length >= 0)\n message.strings[i] = object.strings[i];\n }\n if (object.tensors) {\n if (!Array.isArray(object.tensors))\n throw TypeError(\".onnx.AttributeProto.tensors: array expected\");\n message.tensors = [];\n for (var i = 0; i < object.tensors.length; ++i) {\n if (typeof object.tensors[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.tensors: object expected\");\n message.tensors[i] = $root.onnx.TensorProto.fromObject(object.tensors[i]);\n }\n }\n if (object.graphs) {\n if (!Array.isArray(object.graphs))\n throw TypeError(\".onnx.AttributeProto.graphs: array expected\");\n message.graphs = [];\n for (var i = 0; i < object.graphs.length; ++i) {\n if (typeof object.graphs[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.graphs: object expected\");\n message.graphs[i] = $root.onnx.GraphProto.fromObject(object.graphs[i]);\n }\n }\n if (object.sparseTensors) {\n if (!Array.isArray(object.sparseTensors))\n throw TypeError(\".onnx.AttributeProto.sparseTensors: array expected\");\n message.sparseTensors = [];\n for (var i = 0; i < object.sparseTensors.length; ++i) {\n if (typeof object.sparseTensors[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.sparseTensors: object expected\");\n message.sparseTensors[i] = $root.onnx.SparseTensorProto.fromObject(object.sparseTensors[i]);\n }\n }\n if (object.typeProtos) {\n if (!Array.isArray(object.typeProtos))\n throw TypeError(\".onnx.AttributeProto.typeProtos: array expected\");\n message.typeProtos = [];\n for (var i = 0; i < object.typeProtos.length; ++i) {\n if (typeof object.typeProtos[i] !== \"object\")\n throw TypeError(\".onnx.AttributeProto.typeProtos: object expected\");\n message.typeProtos[i] = $root.onnx.TypeProto.fromObject(object.typeProtos[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from an AttributeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.AttributeProto\n * @static\n * @param {onnx.AttributeProto} message AttributeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n AttributeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.floats = [];\n object.ints = [];\n object.strings = [];\n object.tensors = [];\n object.graphs = [];\n object.typeProtos = [];\n object.sparseTensors = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.f = 0;\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.i = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.i = options.longs === String ? \"0\" : 0;\n if (options.bytes === String)\n object.s = \"\";\n else {\n object.s = [];\n if (options.bytes !== Array)\n object.s = $util.newBuffer(object.s);\n }\n object.t = null;\n object.g = null;\n object.docString = \"\";\n object.tp = null;\n object.type = options.enums === String ? \"UNDEFINED\" : 0;\n object.refAttrName = \"\";\n object.sparseTensor = null;\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.f != null && message.hasOwnProperty(\"f\"))\n object.f = options.json && !isFinite(message.f) ? String(message.f) : message.f;\n if (message.i != null && message.hasOwnProperty(\"i\"))\n if (typeof message.i === \"number\")\n object.i = options.longs === String ? String(message.i) : message.i;\n else\n object.i = options.longs === String ? $util.Long.prototype.toString.call(message.i) : options.longs === Number ? new $util.LongBits(message.i.low >>> 0, message.i.high >>> 0).toNumber() : message.i;\n if (message.s != null && message.hasOwnProperty(\"s\"))\n object.s = options.bytes === String ? $util.base64.encode(message.s, 0, message.s.length) : options.bytes === Array ? Array.prototype.slice.call(message.s) : message.s;\n if (message.t != null && message.hasOwnProperty(\"t\"))\n object.t = $root.onnx.TensorProto.toObject(message.t, options);\n if (message.g != null && message.hasOwnProperty(\"g\"))\n object.g = $root.onnx.GraphProto.toObject(message.g, options);\n if (message.floats && message.floats.length) {\n object.floats = [];\n for (var j = 0; j < message.floats.length; ++j)\n object.floats[j] = options.json && !isFinite(message.floats[j]) ? String(message.floats[j]) : message.floats[j];\n }\n if (message.ints && message.ints.length) {\n object.ints = [];\n for (var j = 0; j < message.ints.length; ++j)\n if (typeof message.ints[j] === \"number\")\n object.ints[j] = options.longs === String ? String(message.ints[j]) : message.ints[j];\n else\n object.ints[j] = options.longs === String ? $util.Long.prototype.toString.call(message.ints[j]) : options.longs === Number ? new $util.LongBits(message.ints[j].low >>> 0, message.ints[j].high >>> 0).toNumber() : message.ints[j];\n }\n if (message.strings && message.strings.length) {\n object.strings = [];\n for (var j = 0; j < message.strings.length; ++j)\n object.strings[j] = options.bytes === String ? $util.base64.encode(message.strings[j], 0, message.strings[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.strings[j]) : message.strings[j];\n }\n if (message.tensors && message.tensors.length) {\n object.tensors = [];\n for (var j = 0; j < message.tensors.length; ++j)\n object.tensors[j] = $root.onnx.TensorProto.toObject(message.tensors[j], options);\n }\n if (message.graphs && message.graphs.length) {\n object.graphs = [];\n for (var j = 0; j < message.graphs.length; ++j)\n object.graphs[j] = $root.onnx.GraphProto.toObject(message.graphs[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.tp != null && message.hasOwnProperty(\"tp\"))\n object.tp = $root.onnx.TypeProto.toObject(message.tp, options);\n if (message.typeProtos && message.typeProtos.length) {\n object.typeProtos = [];\n for (var j = 0; j < message.typeProtos.length; ++j)\n object.typeProtos[j] = $root.onnx.TypeProto.toObject(message.typeProtos[j], options);\n }\n if (message.type != null && message.hasOwnProperty(\"type\"))\n object.type = options.enums === String ? $root.onnx.AttributeProto.AttributeType[message.type] === undefined ? message.type : $root.onnx.AttributeProto.AttributeType[message.type] : message.type;\n if (message.refAttrName != null && message.hasOwnProperty(\"refAttrName\"))\n object.refAttrName = message.refAttrName;\n if (message.sparseTensor != null && message.hasOwnProperty(\"sparseTensor\"))\n object.sparseTensor = $root.onnx.SparseTensorProto.toObject(message.sparseTensor, options);\n if (message.sparseTensors && message.sparseTensors.length) {\n object.sparseTensors = [];\n for (var j = 0; j < message.sparseTensors.length; ++j)\n object.sparseTensors[j] = $root.onnx.SparseTensorProto.toObject(message.sparseTensors[j], options);\n }\n return object;\n };\n\n /**\n * Converts this AttributeProto to JSON.\n * @function toJSON\n * @memberof onnx.AttributeProto\n * @instance\n * @returns {Object.} JSON object\n */\n AttributeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for AttributeProto\n * @function getTypeUrl\n * @memberof onnx.AttributeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n AttributeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.AttributeProto\";\n };\n\n /**\n * AttributeType enum.\n * @name onnx.AttributeProto.AttributeType\n * @enum {number}\n * @property {number} UNDEFINED=0 UNDEFINED value\n * @property {number} FLOAT=1 FLOAT value\n * @property {number} INT=2 INT value\n * @property {number} STRING=3 STRING value\n * @property {number} TENSOR=4 TENSOR value\n * @property {number} GRAPH=5 GRAPH value\n * @property {number} SPARSE_TENSOR=11 SPARSE_TENSOR value\n * @property {number} TYPE_PROTO=13 TYPE_PROTO value\n * @property {number} FLOATS=6 FLOATS value\n * @property {number} INTS=7 INTS value\n * @property {number} STRINGS=8 STRINGS value\n * @property {number} TENSORS=9 TENSORS value\n * @property {number} GRAPHS=10 GRAPHS value\n * @property {number} SPARSE_TENSORS=12 SPARSE_TENSORS value\n * @property {number} TYPE_PROTOS=14 TYPE_PROTOS value\n */\n AttributeProto.AttributeType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"UNDEFINED\"] = 0;\n values[valuesById[1] = \"FLOAT\"] = 1;\n values[valuesById[2] = \"INT\"] = 2;\n values[valuesById[3] = \"STRING\"] = 3;\n values[valuesById[4] = \"TENSOR\"] = 4;\n values[valuesById[5] = \"GRAPH\"] = 5;\n values[valuesById[11] = \"SPARSE_TENSOR\"] = 11;\n values[valuesById[13] = \"TYPE_PROTO\"] = 13;\n values[valuesById[6] = \"FLOATS\"] = 6;\n values[valuesById[7] = \"INTS\"] = 7;\n values[valuesById[8] = \"STRINGS\"] = 8;\n values[valuesById[9] = \"TENSORS\"] = 9;\n values[valuesById[10] = \"GRAPHS\"] = 10;\n values[valuesById[12] = \"SPARSE_TENSORS\"] = 12;\n values[valuesById[14] = \"TYPE_PROTOS\"] = 14;\n return values;\n })();\n\n return AttributeProto;\n })();\n\n onnx.ValueInfoProto = (function() {\n\n /**\n * Properties of a ValueInfoProto.\n * @memberof onnx\n * @interface IValueInfoProto\n * @property {string|null} [name] ValueInfoProto name\n * @property {onnx.ITypeProto|null} [type] ValueInfoProto type\n * @property {string|null} [docString] ValueInfoProto docString\n */\n\n /**\n * Constructs a new ValueInfoProto.\n * @memberof onnx\n * @classdesc Represents a ValueInfoProto.\n * @implements IValueInfoProto\n * @constructor\n * @param {onnx.IValueInfoProto=} [properties] Properties to set\n */\n function ValueInfoProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * ValueInfoProto name.\n * @member {string} name\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.name = \"\";\n\n /**\n * ValueInfoProto type.\n * @member {onnx.ITypeProto|null|undefined} type\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.type = null;\n\n /**\n * ValueInfoProto docString.\n * @member {string} docString\n * @memberof onnx.ValueInfoProto\n * @instance\n */\n ValueInfoProto.prototype.docString = \"\";\n\n /**\n * Creates a new ValueInfoProto instance using the specified properties.\n * @function create\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto=} [properties] Properties to set\n * @returns {onnx.ValueInfoProto} ValueInfoProto instance\n */\n ValueInfoProto.create = function create(properties) {\n return new ValueInfoProto(properties);\n };\n\n /**\n * Encodes the specified ValueInfoProto message. Does not implicitly {@link onnx.ValueInfoProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto} message ValueInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ValueInfoProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.type != null && Object.hasOwnProperty.call(message, \"type\"))\n $root.onnx.TypeProto.encode(message.type, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.docString);\n return writer;\n };\n\n /**\n * Encodes the specified ValueInfoProto message, length delimited. Does not implicitly {@link onnx.ValueInfoProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.IValueInfoProto} message ValueInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ValueInfoProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a ValueInfoProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ValueInfoProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.ValueInfoProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 2: {\n message.type = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n message.docString = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a ValueInfoProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ValueInfoProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a ValueInfoProto message.\n * @function verify\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n ValueInfoProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.type != null && message.hasOwnProperty(\"type\")) {\n var error = $root.onnx.TypeProto.verify(message.type);\n if (error)\n return \"type.\" + error;\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n return null;\n };\n\n /**\n * Creates a ValueInfoProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.ValueInfoProto} ValueInfoProto\n */\n ValueInfoProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.ValueInfoProto)\n return object;\n var message = new $root.onnx.ValueInfoProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.type != null) {\n if (typeof object.type !== \"object\")\n throw TypeError(\".onnx.ValueInfoProto.type: object expected\");\n message.type = $root.onnx.TypeProto.fromObject(object.type);\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n return message;\n };\n\n /**\n * Creates a plain object from a ValueInfoProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {onnx.ValueInfoProto} message ValueInfoProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n ValueInfoProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.name = \"\";\n object.type = null;\n object.docString = \"\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.type != null && message.hasOwnProperty(\"type\"))\n object.type = $root.onnx.TypeProto.toObject(message.type, options);\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n return object;\n };\n\n /**\n * Converts this ValueInfoProto to JSON.\n * @function toJSON\n * @memberof onnx.ValueInfoProto\n * @instance\n * @returns {Object.} JSON object\n */\n ValueInfoProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for ValueInfoProto\n * @function getTypeUrl\n * @memberof onnx.ValueInfoProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n ValueInfoProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.ValueInfoProto\";\n };\n\n return ValueInfoProto;\n })();\n\n onnx.NodeProto = (function() {\n\n /**\n * Properties of a NodeProto.\n * @memberof onnx\n * @interface INodeProto\n * @property {Array.|null} [input] NodeProto input\n * @property {Array.|null} [output] NodeProto output\n * @property {string|null} [name] NodeProto name\n * @property {string|null} [opType] NodeProto opType\n * @property {string|null} [domain] NodeProto domain\n * @property {Array.|null} [attribute] NodeProto attribute\n * @property {string|null} [docString] NodeProto docString\n */\n\n /**\n * Constructs a new NodeProto.\n * @memberof onnx\n * @classdesc Represents a NodeProto.\n * @implements INodeProto\n * @constructor\n * @param {onnx.INodeProto=} [properties] Properties to set\n */\n function NodeProto(properties) {\n this.input = [];\n this.output = [];\n this.attribute = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * NodeProto input.\n * @member {Array.} input\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.input = $util.emptyArray;\n\n /**\n * NodeProto output.\n * @member {Array.} output\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.output = $util.emptyArray;\n\n /**\n * NodeProto name.\n * @member {string} name\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.name = \"\";\n\n /**\n * NodeProto opType.\n * @member {string} opType\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.opType = \"\";\n\n /**\n * NodeProto domain.\n * @member {string} domain\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.domain = \"\";\n\n /**\n * NodeProto attribute.\n * @member {Array.} attribute\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.attribute = $util.emptyArray;\n\n /**\n * NodeProto docString.\n * @member {string} docString\n * @memberof onnx.NodeProto\n * @instance\n */\n NodeProto.prototype.docString = \"\";\n\n /**\n * Creates a new NodeProto instance using the specified properties.\n * @function create\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto=} [properties] Properties to set\n * @returns {onnx.NodeProto} NodeProto instance\n */\n NodeProto.create = function create(properties) {\n return new NodeProto(properties);\n };\n\n /**\n * Encodes the specified NodeProto message. Does not implicitly {@link onnx.NodeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto} message NodeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n NodeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.input[i]);\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.output[i]);\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.name);\n if (message.opType != null && Object.hasOwnProperty.call(message, \"opType\"))\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.opType);\n if (message.attribute != null && message.attribute.length)\n for (var i = 0; i < message.attribute.length; ++i)\n $root.onnx.AttributeProto.encode(message.attribute[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.docString);\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 7, wireType 2 =*/58).string(message.domain);\n return writer;\n };\n\n /**\n * Encodes the specified NodeProto message, length delimited. Does not implicitly {@link onnx.NodeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.INodeProto} message NodeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n NodeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a NodeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.NodeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.NodeProto} NodeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n NodeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.NodeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push(reader.string());\n break;\n }\n case 2: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push(reader.string());\n break;\n }\n case 3: {\n message.name = reader.string();\n break;\n }\n case 4: {\n message.opType = reader.string();\n break;\n }\n case 7: {\n message.domain = reader.string();\n break;\n }\n case 5: {\n if (!(message.attribute && message.attribute.length))\n message.attribute = [];\n message.attribute.push($root.onnx.AttributeProto.decode(reader, reader.uint32()));\n break;\n }\n case 6: {\n message.docString = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a NodeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.NodeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.NodeProto} NodeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n NodeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a NodeProto message.\n * @function verify\n * @memberof onnx.NodeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n NodeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i)\n if (!$util.isString(message.input[i]))\n return \"input: string[] expected\";\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i)\n if (!$util.isString(message.output[i]))\n return \"output: string[] expected\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.opType != null && message.hasOwnProperty(\"opType\"))\n if (!$util.isString(message.opType))\n return \"opType: string expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.attribute != null && message.hasOwnProperty(\"attribute\")) {\n if (!Array.isArray(message.attribute))\n return \"attribute: array expected\";\n for (var i = 0; i < message.attribute.length; ++i) {\n var error = $root.onnx.AttributeProto.verify(message.attribute[i]);\n if (error)\n return \"attribute.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n return null;\n };\n\n /**\n * Creates a NodeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.NodeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.NodeProto} NodeProto\n */\n NodeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.NodeProto)\n return object;\n var message = new $root.onnx.NodeProto();\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.NodeProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i)\n message.input[i] = String(object.input[i]);\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.NodeProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i)\n message.output[i] = String(object.output[i]);\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.opType != null)\n message.opType = String(object.opType);\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.attribute) {\n if (!Array.isArray(object.attribute))\n throw TypeError(\".onnx.NodeProto.attribute: array expected\");\n message.attribute = [];\n for (var i = 0; i < object.attribute.length; ++i) {\n if (typeof object.attribute[i] !== \"object\")\n throw TypeError(\".onnx.NodeProto.attribute: object expected\");\n message.attribute[i] = $root.onnx.AttributeProto.fromObject(object.attribute[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n return message;\n };\n\n /**\n * Creates a plain object from a NodeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.NodeProto\n * @static\n * @param {onnx.NodeProto} message NodeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n NodeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.input = [];\n object.output = [];\n object.attribute = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.opType = \"\";\n object.docString = \"\";\n object.domain = \"\";\n }\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = message.input[j];\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = message.output[j];\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.opType != null && message.hasOwnProperty(\"opType\"))\n object.opType = message.opType;\n if (message.attribute && message.attribute.length) {\n object.attribute = [];\n for (var j = 0; j < message.attribute.length; ++j)\n object.attribute[j] = $root.onnx.AttributeProto.toObject(message.attribute[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n return object;\n };\n\n /**\n * Converts this NodeProto to JSON.\n * @function toJSON\n * @memberof onnx.NodeProto\n * @instance\n * @returns {Object.} JSON object\n */\n NodeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for NodeProto\n * @function getTypeUrl\n * @memberof onnx.NodeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n NodeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.NodeProto\";\n };\n\n return NodeProto;\n })();\n\n onnx.TrainingInfoProto = (function() {\n\n /**\n * Properties of a TrainingInfoProto.\n * @memberof onnx\n * @interface ITrainingInfoProto\n * @property {onnx.IGraphProto|null} [initialization] TrainingInfoProto initialization\n * @property {onnx.IGraphProto|null} [algorithm] TrainingInfoProto algorithm\n * @property {Array.|null} [initializationBinding] TrainingInfoProto initializationBinding\n * @property {Array.|null} [updateBinding] TrainingInfoProto updateBinding\n */\n\n /**\n * Constructs a new TrainingInfoProto.\n * @memberof onnx\n * @classdesc Represents a TrainingInfoProto.\n * @implements ITrainingInfoProto\n * @constructor\n * @param {onnx.ITrainingInfoProto=} [properties] Properties to set\n */\n function TrainingInfoProto(properties) {\n this.initializationBinding = [];\n this.updateBinding = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TrainingInfoProto initialization.\n * @member {onnx.IGraphProto|null|undefined} initialization\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.initialization = null;\n\n /**\n * TrainingInfoProto algorithm.\n * @member {onnx.IGraphProto|null|undefined} algorithm\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.algorithm = null;\n\n /**\n * TrainingInfoProto initializationBinding.\n * @member {Array.} initializationBinding\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.initializationBinding = $util.emptyArray;\n\n /**\n * TrainingInfoProto updateBinding.\n * @member {Array.} updateBinding\n * @memberof onnx.TrainingInfoProto\n * @instance\n */\n TrainingInfoProto.prototype.updateBinding = $util.emptyArray;\n\n /**\n * Creates a new TrainingInfoProto instance using the specified properties.\n * @function create\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto=} [properties] Properties to set\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto instance\n */\n TrainingInfoProto.create = function create(properties) {\n return new TrainingInfoProto(properties);\n };\n\n /**\n * Encodes the specified TrainingInfoProto message. Does not implicitly {@link onnx.TrainingInfoProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto} message TrainingInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TrainingInfoProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.initialization != null && Object.hasOwnProperty.call(message, \"initialization\"))\n $root.onnx.GraphProto.encode(message.initialization, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.algorithm != null && Object.hasOwnProperty.call(message, \"algorithm\"))\n $root.onnx.GraphProto.encode(message.algorithm, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.initializationBinding != null && message.initializationBinding.length)\n for (var i = 0; i < message.initializationBinding.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.initializationBinding[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();\n if (message.updateBinding != null && message.updateBinding.length)\n for (var i = 0; i < message.updateBinding.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.updateBinding[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TrainingInfoProto message, length delimited. Does not implicitly {@link onnx.TrainingInfoProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.ITrainingInfoProto} message TrainingInfoProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TrainingInfoProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TrainingInfoProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TrainingInfoProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TrainingInfoProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.initialization = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 2: {\n message.algorithm = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n if (!(message.initializationBinding && message.initializationBinding.length))\n message.initializationBinding = [];\n message.initializationBinding.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 4: {\n if (!(message.updateBinding && message.updateBinding.length))\n message.updateBinding = [];\n message.updateBinding.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TrainingInfoProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TrainingInfoProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TrainingInfoProto message.\n * @function verify\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TrainingInfoProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.initialization != null && message.hasOwnProperty(\"initialization\")) {\n var error = $root.onnx.GraphProto.verify(message.initialization);\n if (error)\n return \"initialization.\" + error;\n }\n if (message.algorithm != null && message.hasOwnProperty(\"algorithm\")) {\n var error = $root.onnx.GraphProto.verify(message.algorithm);\n if (error)\n return \"algorithm.\" + error;\n }\n if (message.initializationBinding != null && message.hasOwnProperty(\"initializationBinding\")) {\n if (!Array.isArray(message.initializationBinding))\n return \"initializationBinding: array expected\";\n for (var i = 0; i < message.initializationBinding.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.initializationBinding[i]);\n if (error)\n return \"initializationBinding.\" + error;\n }\n }\n if (message.updateBinding != null && message.hasOwnProperty(\"updateBinding\")) {\n if (!Array.isArray(message.updateBinding))\n return \"updateBinding: array expected\";\n for (var i = 0; i < message.updateBinding.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.updateBinding[i]);\n if (error)\n return \"updateBinding.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TrainingInfoProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TrainingInfoProto} TrainingInfoProto\n */\n TrainingInfoProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TrainingInfoProto)\n return object;\n var message = new $root.onnx.TrainingInfoProto();\n if (object.initialization != null) {\n if (typeof object.initialization !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.initialization: object expected\");\n message.initialization = $root.onnx.GraphProto.fromObject(object.initialization);\n }\n if (object.algorithm != null) {\n if (typeof object.algorithm !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.algorithm: object expected\");\n message.algorithm = $root.onnx.GraphProto.fromObject(object.algorithm);\n }\n if (object.initializationBinding) {\n if (!Array.isArray(object.initializationBinding))\n throw TypeError(\".onnx.TrainingInfoProto.initializationBinding: array expected\");\n message.initializationBinding = [];\n for (var i = 0; i < object.initializationBinding.length; ++i) {\n if (typeof object.initializationBinding[i] !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.initializationBinding: object expected\");\n message.initializationBinding[i] = $root.onnx.StringStringEntryProto.fromObject(object.initializationBinding[i]);\n }\n }\n if (object.updateBinding) {\n if (!Array.isArray(object.updateBinding))\n throw TypeError(\".onnx.TrainingInfoProto.updateBinding: array expected\");\n message.updateBinding = [];\n for (var i = 0; i < object.updateBinding.length; ++i) {\n if (typeof object.updateBinding[i] !== \"object\")\n throw TypeError(\".onnx.TrainingInfoProto.updateBinding: object expected\");\n message.updateBinding[i] = $root.onnx.StringStringEntryProto.fromObject(object.updateBinding[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TrainingInfoProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {onnx.TrainingInfoProto} message TrainingInfoProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TrainingInfoProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.initializationBinding = [];\n object.updateBinding = [];\n }\n if (options.defaults) {\n object.initialization = null;\n object.algorithm = null;\n }\n if (message.initialization != null && message.hasOwnProperty(\"initialization\"))\n object.initialization = $root.onnx.GraphProto.toObject(message.initialization, options);\n if (message.algorithm != null && message.hasOwnProperty(\"algorithm\"))\n object.algorithm = $root.onnx.GraphProto.toObject(message.algorithm, options);\n if (message.initializationBinding && message.initializationBinding.length) {\n object.initializationBinding = [];\n for (var j = 0; j < message.initializationBinding.length; ++j)\n object.initializationBinding[j] = $root.onnx.StringStringEntryProto.toObject(message.initializationBinding[j], options);\n }\n if (message.updateBinding && message.updateBinding.length) {\n object.updateBinding = [];\n for (var j = 0; j < message.updateBinding.length; ++j)\n object.updateBinding[j] = $root.onnx.StringStringEntryProto.toObject(message.updateBinding[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TrainingInfoProto to JSON.\n * @function toJSON\n * @memberof onnx.TrainingInfoProto\n * @instance\n * @returns {Object.} JSON object\n */\n TrainingInfoProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TrainingInfoProto\n * @function getTypeUrl\n * @memberof onnx.TrainingInfoProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TrainingInfoProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TrainingInfoProto\";\n };\n\n return TrainingInfoProto;\n })();\n\n onnx.ModelProto = (function() {\n\n /**\n * Properties of a ModelProto.\n * @memberof onnx\n * @interface IModelProto\n * @property {number|Long|null} [irVersion] ModelProto irVersion\n * @property {Array.|null} [opsetImport] ModelProto opsetImport\n * @property {string|null} [producerName] ModelProto producerName\n * @property {string|null} [producerVersion] ModelProto producerVersion\n * @property {string|null} [domain] ModelProto domain\n * @property {number|Long|null} [modelVersion] ModelProto modelVersion\n * @property {string|null} [docString] ModelProto docString\n * @property {onnx.IGraphProto|null} [graph] ModelProto graph\n * @property {Array.|null} [metadataProps] ModelProto metadataProps\n * @property {Array.|null} [trainingInfo] ModelProto trainingInfo\n * @property {Array.|null} [functions] ModelProto functions\n */\n\n /**\n * Constructs a new ModelProto.\n * @memberof onnx\n * @classdesc Represents a ModelProto.\n * @implements IModelProto\n * @constructor\n * @param {onnx.IModelProto=} [properties] Properties to set\n */\n function ModelProto(properties) {\n this.opsetImport = [];\n this.metadataProps = [];\n this.trainingInfo = [];\n this.functions = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * ModelProto irVersion.\n * @member {number|Long} irVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.irVersion = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * ModelProto opsetImport.\n * @member {Array.} opsetImport\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.opsetImport = $util.emptyArray;\n\n /**\n * ModelProto producerName.\n * @member {string} producerName\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.producerName = \"\";\n\n /**\n * ModelProto producerVersion.\n * @member {string} producerVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.producerVersion = \"\";\n\n /**\n * ModelProto domain.\n * @member {string} domain\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.domain = \"\";\n\n /**\n * ModelProto modelVersion.\n * @member {number|Long} modelVersion\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.modelVersion = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * ModelProto docString.\n * @member {string} docString\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.docString = \"\";\n\n /**\n * ModelProto graph.\n * @member {onnx.IGraphProto|null|undefined} graph\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.graph = null;\n\n /**\n * ModelProto metadataProps.\n * @member {Array.} metadataProps\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.metadataProps = $util.emptyArray;\n\n /**\n * ModelProto trainingInfo.\n * @member {Array.} trainingInfo\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.trainingInfo = $util.emptyArray;\n\n /**\n * ModelProto functions.\n * @member {Array.} functions\n * @memberof onnx.ModelProto\n * @instance\n */\n ModelProto.prototype.functions = $util.emptyArray;\n\n /**\n * Creates a new ModelProto instance using the specified properties.\n * @function create\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto=} [properties] Properties to set\n * @returns {onnx.ModelProto} ModelProto instance\n */\n ModelProto.create = function create(properties) {\n return new ModelProto(properties);\n };\n\n /**\n * Encodes the specified ModelProto message. Does not implicitly {@link onnx.ModelProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto} message ModelProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ModelProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.irVersion != null && Object.hasOwnProperty.call(message, \"irVersion\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.irVersion);\n if (message.producerName != null && Object.hasOwnProperty.call(message, \"producerName\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.producerName);\n if (message.producerVersion != null && Object.hasOwnProperty.call(message, \"producerVersion\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.producerVersion);\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.domain);\n if (message.modelVersion != null && Object.hasOwnProperty.call(message, \"modelVersion\"))\n writer.uint32(/* id 5, wireType 0 =*/40).int64(message.modelVersion);\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.docString);\n if (message.graph != null && Object.hasOwnProperty.call(message, \"graph\"))\n $root.onnx.GraphProto.encode(message.graph, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();\n if (message.opsetImport != null && message.opsetImport.length)\n for (var i = 0; i < message.opsetImport.length; ++i)\n $root.onnx.OperatorSetIdProto.encode(message.opsetImport[i], writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();\n if (message.metadataProps != null && message.metadataProps.length)\n for (var i = 0; i < message.metadataProps.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.metadataProps[i], writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.trainingInfo != null && message.trainingInfo.length)\n for (var i = 0; i < message.trainingInfo.length; ++i)\n $root.onnx.TrainingInfoProto.encode(message.trainingInfo[i], writer.uint32(/* id 20, wireType 2 =*/162).fork()).ldelim();\n if (message.functions != null && message.functions.length)\n for (var i = 0; i < message.functions.length; ++i)\n $root.onnx.FunctionProto.encode(message.functions[i], writer.uint32(/* id 25, wireType 2 =*/202).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified ModelProto message, length delimited. Does not implicitly {@link onnx.ModelProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.IModelProto} message ModelProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n ModelProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a ModelProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.ModelProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.ModelProto} ModelProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ModelProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.ModelProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.irVersion = reader.int64();\n break;\n }\n case 8: {\n if (!(message.opsetImport && message.opsetImport.length))\n message.opsetImport = [];\n message.opsetImport.push($root.onnx.OperatorSetIdProto.decode(reader, reader.uint32()));\n break;\n }\n case 2: {\n message.producerName = reader.string();\n break;\n }\n case 3: {\n message.producerVersion = reader.string();\n break;\n }\n case 4: {\n message.domain = reader.string();\n break;\n }\n case 5: {\n message.modelVersion = reader.int64();\n break;\n }\n case 6: {\n message.docString = reader.string();\n break;\n }\n case 7: {\n message.graph = $root.onnx.GraphProto.decode(reader, reader.uint32());\n break;\n }\n case 14: {\n if (!(message.metadataProps && message.metadataProps.length))\n message.metadataProps = [];\n message.metadataProps.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 20: {\n if (!(message.trainingInfo && message.trainingInfo.length))\n message.trainingInfo = [];\n message.trainingInfo.push($root.onnx.TrainingInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 25: {\n if (!(message.functions && message.functions.length))\n message.functions = [];\n message.functions.push($root.onnx.FunctionProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a ModelProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.ModelProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.ModelProto} ModelProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n ModelProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a ModelProto message.\n * @function verify\n * @memberof onnx.ModelProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n ModelProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.irVersion != null && message.hasOwnProperty(\"irVersion\"))\n if (!$util.isInteger(message.irVersion) && !(message.irVersion && $util.isInteger(message.irVersion.low) && $util.isInteger(message.irVersion.high)))\n return \"irVersion: integer|Long expected\";\n if (message.opsetImport != null && message.hasOwnProperty(\"opsetImport\")) {\n if (!Array.isArray(message.opsetImport))\n return \"opsetImport: array expected\";\n for (var i = 0; i < message.opsetImport.length; ++i) {\n var error = $root.onnx.OperatorSetIdProto.verify(message.opsetImport[i]);\n if (error)\n return \"opsetImport.\" + error;\n }\n }\n if (message.producerName != null && message.hasOwnProperty(\"producerName\"))\n if (!$util.isString(message.producerName))\n return \"producerName: string expected\";\n if (message.producerVersion != null && message.hasOwnProperty(\"producerVersion\"))\n if (!$util.isString(message.producerVersion))\n return \"producerVersion: string expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.modelVersion != null && message.hasOwnProperty(\"modelVersion\"))\n if (!$util.isInteger(message.modelVersion) && !(message.modelVersion && $util.isInteger(message.modelVersion.low) && $util.isInteger(message.modelVersion.high)))\n return \"modelVersion: integer|Long expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.graph != null && message.hasOwnProperty(\"graph\")) {\n var error = $root.onnx.GraphProto.verify(message.graph);\n if (error)\n return \"graph.\" + error;\n }\n if (message.metadataProps != null && message.hasOwnProperty(\"metadataProps\")) {\n if (!Array.isArray(message.metadataProps))\n return \"metadataProps: array expected\";\n for (var i = 0; i < message.metadataProps.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.metadataProps[i]);\n if (error)\n return \"metadataProps.\" + error;\n }\n }\n if (message.trainingInfo != null && message.hasOwnProperty(\"trainingInfo\")) {\n if (!Array.isArray(message.trainingInfo))\n return \"trainingInfo: array expected\";\n for (var i = 0; i < message.trainingInfo.length; ++i) {\n var error = $root.onnx.TrainingInfoProto.verify(message.trainingInfo[i]);\n if (error)\n return \"trainingInfo.\" + error;\n }\n }\n if (message.functions != null && message.hasOwnProperty(\"functions\")) {\n if (!Array.isArray(message.functions))\n return \"functions: array expected\";\n for (var i = 0; i < message.functions.length; ++i) {\n var error = $root.onnx.FunctionProto.verify(message.functions[i]);\n if (error)\n return \"functions.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a ModelProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.ModelProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.ModelProto} ModelProto\n */\n ModelProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.ModelProto)\n return object;\n var message = new $root.onnx.ModelProto();\n if (object.irVersion != null)\n if ($util.Long)\n (message.irVersion = $util.Long.fromValue(object.irVersion)).unsigned = false;\n else if (typeof object.irVersion === \"string\")\n message.irVersion = parseInt(object.irVersion, 10);\n else if (typeof object.irVersion === \"number\")\n message.irVersion = object.irVersion;\n else if (typeof object.irVersion === \"object\")\n message.irVersion = new $util.LongBits(object.irVersion.low >>> 0, object.irVersion.high >>> 0).toNumber();\n if (object.opsetImport) {\n if (!Array.isArray(object.opsetImport))\n throw TypeError(\".onnx.ModelProto.opsetImport: array expected\");\n message.opsetImport = [];\n for (var i = 0; i < object.opsetImport.length; ++i) {\n if (typeof object.opsetImport[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.opsetImport: object expected\");\n message.opsetImport[i] = $root.onnx.OperatorSetIdProto.fromObject(object.opsetImport[i]);\n }\n }\n if (object.producerName != null)\n message.producerName = String(object.producerName);\n if (object.producerVersion != null)\n message.producerVersion = String(object.producerVersion);\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.modelVersion != null)\n if ($util.Long)\n (message.modelVersion = $util.Long.fromValue(object.modelVersion)).unsigned = false;\n else if (typeof object.modelVersion === \"string\")\n message.modelVersion = parseInt(object.modelVersion, 10);\n else if (typeof object.modelVersion === \"number\")\n message.modelVersion = object.modelVersion;\n else if (typeof object.modelVersion === \"object\")\n message.modelVersion = new $util.LongBits(object.modelVersion.low >>> 0, object.modelVersion.high >>> 0).toNumber();\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.graph != null) {\n if (typeof object.graph !== \"object\")\n throw TypeError(\".onnx.ModelProto.graph: object expected\");\n message.graph = $root.onnx.GraphProto.fromObject(object.graph);\n }\n if (object.metadataProps) {\n if (!Array.isArray(object.metadataProps))\n throw TypeError(\".onnx.ModelProto.metadataProps: array expected\");\n message.metadataProps = [];\n for (var i = 0; i < object.metadataProps.length; ++i) {\n if (typeof object.metadataProps[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.metadataProps: object expected\");\n message.metadataProps[i] = $root.onnx.StringStringEntryProto.fromObject(object.metadataProps[i]);\n }\n }\n if (object.trainingInfo) {\n if (!Array.isArray(object.trainingInfo))\n throw TypeError(\".onnx.ModelProto.trainingInfo: array expected\");\n message.trainingInfo = [];\n for (var i = 0; i < object.trainingInfo.length; ++i) {\n if (typeof object.trainingInfo[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.trainingInfo: object expected\");\n message.trainingInfo[i] = $root.onnx.TrainingInfoProto.fromObject(object.trainingInfo[i]);\n }\n }\n if (object.functions) {\n if (!Array.isArray(object.functions))\n throw TypeError(\".onnx.ModelProto.functions: array expected\");\n message.functions = [];\n for (var i = 0; i < object.functions.length; ++i) {\n if (typeof object.functions[i] !== \"object\")\n throw TypeError(\".onnx.ModelProto.functions: object expected\");\n message.functions[i] = $root.onnx.FunctionProto.fromObject(object.functions[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a ModelProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.ModelProto\n * @static\n * @param {onnx.ModelProto} message ModelProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n ModelProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.opsetImport = [];\n object.metadataProps = [];\n object.trainingInfo = [];\n object.functions = [];\n }\n if (options.defaults) {\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.irVersion = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.irVersion = options.longs === String ? \"0\" : 0;\n object.producerName = \"\";\n object.producerVersion = \"\";\n object.domain = \"\";\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.modelVersion = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.modelVersion = options.longs === String ? \"0\" : 0;\n object.docString = \"\";\n object.graph = null;\n }\n if (message.irVersion != null && message.hasOwnProperty(\"irVersion\"))\n if (typeof message.irVersion === \"number\")\n object.irVersion = options.longs === String ? String(message.irVersion) : message.irVersion;\n else\n object.irVersion = options.longs === String ? $util.Long.prototype.toString.call(message.irVersion) : options.longs === Number ? new $util.LongBits(message.irVersion.low >>> 0, message.irVersion.high >>> 0).toNumber() : message.irVersion;\n if (message.producerName != null && message.hasOwnProperty(\"producerName\"))\n object.producerName = message.producerName;\n if (message.producerVersion != null && message.hasOwnProperty(\"producerVersion\"))\n object.producerVersion = message.producerVersion;\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.modelVersion != null && message.hasOwnProperty(\"modelVersion\"))\n if (typeof message.modelVersion === \"number\")\n object.modelVersion = options.longs === String ? String(message.modelVersion) : message.modelVersion;\n else\n object.modelVersion = options.longs === String ? $util.Long.prototype.toString.call(message.modelVersion) : options.longs === Number ? new $util.LongBits(message.modelVersion.low >>> 0, message.modelVersion.high >>> 0).toNumber() : message.modelVersion;\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.graph != null && message.hasOwnProperty(\"graph\"))\n object.graph = $root.onnx.GraphProto.toObject(message.graph, options);\n if (message.opsetImport && message.opsetImport.length) {\n object.opsetImport = [];\n for (var j = 0; j < message.opsetImport.length; ++j)\n object.opsetImport[j] = $root.onnx.OperatorSetIdProto.toObject(message.opsetImport[j], options);\n }\n if (message.metadataProps && message.metadataProps.length) {\n object.metadataProps = [];\n for (var j = 0; j < message.metadataProps.length; ++j)\n object.metadataProps[j] = $root.onnx.StringStringEntryProto.toObject(message.metadataProps[j], options);\n }\n if (message.trainingInfo && message.trainingInfo.length) {\n object.trainingInfo = [];\n for (var j = 0; j < message.trainingInfo.length; ++j)\n object.trainingInfo[j] = $root.onnx.TrainingInfoProto.toObject(message.trainingInfo[j], options);\n }\n if (message.functions && message.functions.length) {\n object.functions = [];\n for (var j = 0; j < message.functions.length; ++j)\n object.functions[j] = $root.onnx.FunctionProto.toObject(message.functions[j], options);\n }\n return object;\n };\n\n /**\n * Converts this ModelProto to JSON.\n * @function toJSON\n * @memberof onnx.ModelProto\n * @instance\n * @returns {Object.} JSON object\n */\n ModelProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for ModelProto\n * @function getTypeUrl\n * @memberof onnx.ModelProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n ModelProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.ModelProto\";\n };\n\n return ModelProto;\n })();\n\n onnx.StringStringEntryProto = (function() {\n\n /**\n * Properties of a StringStringEntryProto.\n * @memberof onnx\n * @interface IStringStringEntryProto\n * @property {string|null} [key] StringStringEntryProto key\n * @property {string|null} [value] StringStringEntryProto value\n */\n\n /**\n * Constructs a new StringStringEntryProto.\n * @memberof onnx\n * @classdesc Represents a StringStringEntryProto.\n * @implements IStringStringEntryProto\n * @constructor\n * @param {onnx.IStringStringEntryProto=} [properties] Properties to set\n */\n function StringStringEntryProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * StringStringEntryProto key.\n * @member {string} key\n * @memberof onnx.StringStringEntryProto\n * @instance\n */\n StringStringEntryProto.prototype.key = \"\";\n\n /**\n * StringStringEntryProto value.\n * @member {string} value\n * @memberof onnx.StringStringEntryProto\n * @instance\n */\n StringStringEntryProto.prototype.value = \"\";\n\n /**\n * Creates a new StringStringEntryProto instance using the specified properties.\n * @function create\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto=} [properties] Properties to set\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto instance\n */\n StringStringEntryProto.create = function create(properties) {\n return new StringStringEntryProto(properties);\n };\n\n /**\n * Encodes the specified StringStringEntryProto message. Does not implicitly {@link onnx.StringStringEntryProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto} message StringStringEntryProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n StringStringEntryProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.key != null && Object.hasOwnProperty.call(message, \"key\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.key);\n if (message.value != null && Object.hasOwnProperty.call(message, \"value\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.value);\n return writer;\n };\n\n /**\n * Encodes the specified StringStringEntryProto message, length delimited. Does not implicitly {@link onnx.StringStringEntryProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.IStringStringEntryProto} message StringStringEntryProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n StringStringEntryProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a StringStringEntryProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n StringStringEntryProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.StringStringEntryProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.key = reader.string();\n break;\n }\n case 2: {\n message.value = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a StringStringEntryProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n StringStringEntryProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a StringStringEntryProto message.\n * @function verify\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n StringStringEntryProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.key != null && message.hasOwnProperty(\"key\"))\n if (!$util.isString(message.key))\n return \"key: string expected\";\n if (message.value != null && message.hasOwnProperty(\"value\"))\n if (!$util.isString(message.value))\n return \"value: string expected\";\n return null;\n };\n\n /**\n * Creates a StringStringEntryProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.StringStringEntryProto} StringStringEntryProto\n */\n StringStringEntryProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.StringStringEntryProto)\n return object;\n var message = new $root.onnx.StringStringEntryProto();\n if (object.key != null)\n message.key = String(object.key);\n if (object.value != null)\n message.value = String(object.value);\n return message;\n };\n\n /**\n * Creates a plain object from a StringStringEntryProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {onnx.StringStringEntryProto} message StringStringEntryProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n StringStringEntryProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.key = \"\";\n object.value = \"\";\n }\n if (message.key != null && message.hasOwnProperty(\"key\"))\n object.key = message.key;\n if (message.value != null && message.hasOwnProperty(\"value\"))\n object.value = message.value;\n return object;\n };\n\n /**\n * Converts this StringStringEntryProto to JSON.\n * @function toJSON\n * @memberof onnx.StringStringEntryProto\n * @instance\n * @returns {Object.} JSON object\n */\n StringStringEntryProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for StringStringEntryProto\n * @function getTypeUrl\n * @memberof onnx.StringStringEntryProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n StringStringEntryProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.StringStringEntryProto\";\n };\n\n return StringStringEntryProto;\n })();\n\n onnx.TensorAnnotation = (function() {\n\n /**\n * Properties of a TensorAnnotation.\n * @memberof onnx\n * @interface ITensorAnnotation\n * @property {string|null} [tensorName] TensorAnnotation tensorName\n * @property {Array.|null} [quantParameterTensorNames] TensorAnnotation quantParameterTensorNames\n */\n\n /**\n * Constructs a new TensorAnnotation.\n * @memberof onnx\n * @classdesc Represents a TensorAnnotation.\n * @implements ITensorAnnotation\n * @constructor\n * @param {onnx.ITensorAnnotation=} [properties] Properties to set\n */\n function TensorAnnotation(properties) {\n this.quantParameterTensorNames = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorAnnotation tensorName.\n * @member {string} tensorName\n * @memberof onnx.TensorAnnotation\n * @instance\n */\n TensorAnnotation.prototype.tensorName = \"\";\n\n /**\n * TensorAnnotation quantParameterTensorNames.\n * @member {Array.} quantParameterTensorNames\n * @memberof onnx.TensorAnnotation\n * @instance\n */\n TensorAnnotation.prototype.quantParameterTensorNames = $util.emptyArray;\n\n /**\n * Creates a new TensorAnnotation instance using the specified properties.\n * @function create\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation=} [properties] Properties to set\n * @returns {onnx.TensorAnnotation} TensorAnnotation instance\n */\n TensorAnnotation.create = function create(properties) {\n return new TensorAnnotation(properties);\n };\n\n /**\n * Encodes the specified TensorAnnotation message. Does not implicitly {@link onnx.TensorAnnotation.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation} message TensorAnnotation message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorAnnotation.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.tensorName != null && Object.hasOwnProperty.call(message, \"tensorName\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.tensorName);\n if (message.quantParameterTensorNames != null && message.quantParameterTensorNames.length)\n for (var i = 0; i < message.quantParameterTensorNames.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.quantParameterTensorNames[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TensorAnnotation message, length delimited. Does not implicitly {@link onnx.TensorAnnotation.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.ITensorAnnotation} message TensorAnnotation message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorAnnotation.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorAnnotation message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorAnnotation.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorAnnotation();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.tensorName = reader.string();\n break;\n }\n case 2: {\n if (!(message.quantParameterTensorNames && message.quantParameterTensorNames.length))\n message.quantParameterTensorNames = [];\n message.quantParameterTensorNames.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorAnnotation message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorAnnotation.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorAnnotation message.\n * @function verify\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorAnnotation.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.tensorName != null && message.hasOwnProperty(\"tensorName\"))\n if (!$util.isString(message.tensorName))\n return \"tensorName: string expected\";\n if (message.quantParameterTensorNames != null && message.hasOwnProperty(\"quantParameterTensorNames\")) {\n if (!Array.isArray(message.quantParameterTensorNames))\n return \"quantParameterTensorNames: array expected\";\n for (var i = 0; i < message.quantParameterTensorNames.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.quantParameterTensorNames[i]);\n if (error)\n return \"quantParameterTensorNames.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TensorAnnotation message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorAnnotation} TensorAnnotation\n */\n TensorAnnotation.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorAnnotation)\n return object;\n var message = new $root.onnx.TensorAnnotation();\n if (object.tensorName != null)\n message.tensorName = String(object.tensorName);\n if (object.quantParameterTensorNames) {\n if (!Array.isArray(object.quantParameterTensorNames))\n throw TypeError(\".onnx.TensorAnnotation.quantParameterTensorNames: array expected\");\n message.quantParameterTensorNames = [];\n for (var i = 0; i < object.quantParameterTensorNames.length; ++i) {\n if (typeof object.quantParameterTensorNames[i] !== \"object\")\n throw TypeError(\".onnx.TensorAnnotation.quantParameterTensorNames: object expected\");\n message.quantParameterTensorNames[i] = $root.onnx.StringStringEntryProto.fromObject(object.quantParameterTensorNames[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorAnnotation message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {onnx.TensorAnnotation} message TensorAnnotation\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorAnnotation.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.quantParameterTensorNames = [];\n if (options.defaults)\n object.tensorName = \"\";\n if (message.tensorName != null && message.hasOwnProperty(\"tensorName\"))\n object.tensorName = message.tensorName;\n if (message.quantParameterTensorNames && message.quantParameterTensorNames.length) {\n object.quantParameterTensorNames = [];\n for (var j = 0; j < message.quantParameterTensorNames.length; ++j)\n object.quantParameterTensorNames[j] = $root.onnx.StringStringEntryProto.toObject(message.quantParameterTensorNames[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TensorAnnotation to JSON.\n * @function toJSON\n * @memberof onnx.TensorAnnotation\n * @instance\n * @returns {Object.} JSON object\n */\n TensorAnnotation.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorAnnotation\n * @function getTypeUrl\n * @memberof onnx.TensorAnnotation\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorAnnotation.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorAnnotation\";\n };\n\n return TensorAnnotation;\n })();\n\n onnx.GraphProto = (function() {\n\n /**\n * Properties of a GraphProto.\n * @memberof onnx\n * @interface IGraphProto\n * @property {Array.|null} [node] GraphProto node\n * @property {string|null} [name] GraphProto name\n * @property {Array.|null} [initializer] GraphProto initializer\n * @property {Array.|null} [sparseInitializer] GraphProto sparseInitializer\n * @property {string|null} [docString] GraphProto docString\n * @property {Array.|null} [input] GraphProto input\n * @property {Array.|null} [output] GraphProto output\n * @property {Array.|null} [valueInfo] GraphProto valueInfo\n * @property {Array.|null} [quantizationAnnotation] GraphProto quantizationAnnotation\n */\n\n /**\n * Constructs a new GraphProto.\n * @memberof onnx\n * @classdesc Represents a GraphProto.\n * @implements IGraphProto\n * @constructor\n * @param {onnx.IGraphProto=} [properties] Properties to set\n */\n function GraphProto(properties) {\n this.node = [];\n this.initializer = [];\n this.sparseInitializer = [];\n this.input = [];\n this.output = [];\n this.valueInfo = [];\n this.quantizationAnnotation = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * GraphProto node.\n * @member {Array.} node\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.node = $util.emptyArray;\n\n /**\n * GraphProto name.\n * @member {string} name\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.name = \"\";\n\n /**\n * GraphProto initializer.\n * @member {Array.} initializer\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.initializer = $util.emptyArray;\n\n /**\n * GraphProto sparseInitializer.\n * @member {Array.} sparseInitializer\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.sparseInitializer = $util.emptyArray;\n\n /**\n * GraphProto docString.\n * @member {string} docString\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.docString = \"\";\n\n /**\n * GraphProto input.\n * @member {Array.} input\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.input = $util.emptyArray;\n\n /**\n * GraphProto output.\n * @member {Array.} output\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.output = $util.emptyArray;\n\n /**\n * GraphProto valueInfo.\n * @member {Array.} valueInfo\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.valueInfo = $util.emptyArray;\n\n /**\n * GraphProto quantizationAnnotation.\n * @member {Array.} quantizationAnnotation\n * @memberof onnx.GraphProto\n * @instance\n */\n GraphProto.prototype.quantizationAnnotation = $util.emptyArray;\n\n /**\n * Creates a new GraphProto instance using the specified properties.\n * @function create\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto=} [properties] Properties to set\n * @returns {onnx.GraphProto} GraphProto instance\n */\n GraphProto.create = function create(properties) {\n return new GraphProto(properties);\n };\n\n /**\n * Encodes the specified GraphProto message. Does not implicitly {@link onnx.GraphProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto} message GraphProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n GraphProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.node != null && message.node.length)\n for (var i = 0; i < message.node.length; ++i)\n $root.onnx.NodeProto.encode(message.node[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.name);\n if (message.initializer != null && message.initializer.length)\n for (var i = 0; i < message.initializer.length; ++i)\n $root.onnx.TensorProto.encode(message.initializer[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 10, wireType 2 =*/82).string(message.docString);\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.input[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.output[i], writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim();\n if (message.valueInfo != null && message.valueInfo.length)\n for (var i = 0; i < message.valueInfo.length; ++i)\n $root.onnx.ValueInfoProto.encode(message.valueInfo[i], writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim();\n if (message.quantizationAnnotation != null && message.quantizationAnnotation.length)\n for (var i = 0; i < message.quantizationAnnotation.length; ++i)\n $root.onnx.TensorAnnotation.encode(message.quantizationAnnotation[i], writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim();\n if (message.sparseInitializer != null && message.sparseInitializer.length)\n for (var i = 0; i < message.sparseInitializer.length; ++i)\n $root.onnx.SparseTensorProto.encode(message.sparseInitializer[i], writer.uint32(/* id 15, wireType 2 =*/122).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified GraphProto message, length delimited. Does not implicitly {@link onnx.GraphProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.IGraphProto} message GraphProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n GraphProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a GraphProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.GraphProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.GraphProto} GraphProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n GraphProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.GraphProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.node && message.node.length))\n message.node = [];\n message.node.push($root.onnx.NodeProto.decode(reader, reader.uint32()));\n break;\n }\n case 2: {\n message.name = reader.string();\n break;\n }\n case 5: {\n if (!(message.initializer && message.initializer.length))\n message.initializer = [];\n message.initializer.push($root.onnx.TensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 15: {\n if (!(message.sparseInitializer && message.sparseInitializer.length))\n message.sparseInitializer = [];\n message.sparseInitializer.push($root.onnx.SparseTensorProto.decode(reader, reader.uint32()));\n break;\n }\n case 10: {\n message.docString = reader.string();\n break;\n }\n case 11: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 12: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 13: {\n if (!(message.valueInfo && message.valueInfo.length))\n message.valueInfo = [];\n message.valueInfo.push($root.onnx.ValueInfoProto.decode(reader, reader.uint32()));\n break;\n }\n case 14: {\n if (!(message.quantizationAnnotation && message.quantizationAnnotation.length))\n message.quantizationAnnotation = [];\n message.quantizationAnnotation.push($root.onnx.TensorAnnotation.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a GraphProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.GraphProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.GraphProto} GraphProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n GraphProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a GraphProto message.\n * @function verify\n * @memberof onnx.GraphProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n GraphProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.node != null && message.hasOwnProperty(\"node\")) {\n if (!Array.isArray(message.node))\n return \"node: array expected\";\n for (var i = 0; i < message.node.length; ++i) {\n var error = $root.onnx.NodeProto.verify(message.node[i]);\n if (error)\n return \"node.\" + error;\n }\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.initializer != null && message.hasOwnProperty(\"initializer\")) {\n if (!Array.isArray(message.initializer))\n return \"initializer: array expected\";\n for (var i = 0; i < message.initializer.length; ++i) {\n var error = $root.onnx.TensorProto.verify(message.initializer[i]);\n if (error)\n return \"initializer.\" + error;\n }\n }\n if (message.sparseInitializer != null && message.hasOwnProperty(\"sparseInitializer\")) {\n if (!Array.isArray(message.sparseInitializer))\n return \"sparseInitializer: array expected\";\n for (var i = 0; i < message.sparseInitializer.length; ++i) {\n var error = $root.onnx.SparseTensorProto.verify(message.sparseInitializer[i]);\n if (error)\n return \"sparseInitializer.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.input[i]);\n if (error)\n return \"input.\" + error;\n }\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.output[i]);\n if (error)\n return \"output.\" + error;\n }\n }\n if (message.valueInfo != null && message.hasOwnProperty(\"valueInfo\")) {\n if (!Array.isArray(message.valueInfo))\n return \"valueInfo: array expected\";\n for (var i = 0; i < message.valueInfo.length; ++i) {\n var error = $root.onnx.ValueInfoProto.verify(message.valueInfo[i]);\n if (error)\n return \"valueInfo.\" + error;\n }\n }\n if (message.quantizationAnnotation != null && message.hasOwnProperty(\"quantizationAnnotation\")) {\n if (!Array.isArray(message.quantizationAnnotation))\n return \"quantizationAnnotation: array expected\";\n for (var i = 0; i < message.quantizationAnnotation.length; ++i) {\n var error = $root.onnx.TensorAnnotation.verify(message.quantizationAnnotation[i]);\n if (error)\n return \"quantizationAnnotation.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a GraphProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.GraphProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.GraphProto} GraphProto\n */\n GraphProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.GraphProto)\n return object;\n var message = new $root.onnx.GraphProto();\n if (object.node) {\n if (!Array.isArray(object.node))\n throw TypeError(\".onnx.GraphProto.node: array expected\");\n message.node = [];\n for (var i = 0; i < object.node.length; ++i) {\n if (typeof object.node[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.node: object expected\");\n message.node[i] = $root.onnx.NodeProto.fromObject(object.node[i]);\n }\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.initializer) {\n if (!Array.isArray(object.initializer))\n throw TypeError(\".onnx.GraphProto.initializer: array expected\");\n message.initializer = [];\n for (var i = 0; i < object.initializer.length; ++i) {\n if (typeof object.initializer[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.initializer: object expected\");\n message.initializer[i] = $root.onnx.TensorProto.fromObject(object.initializer[i]);\n }\n }\n if (object.sparseInitializer) {\n if (!Array.isArray(object.sparseInitializer))\n throw TypeError(\".onnx.GraphProto.sparseInitializer: array expected\");\n message.sparseInitializer = [];\n for (var i = 0; i < object.sparseInitializer.length; ++i) {\n if (typeof object.sparseInitializer[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.sparseInitializer: object expected\");\n message.sparseInitializer[i] = $root.onnx.SparseTensorProto.fromObject(object.sparseInitializer[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.GraphProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i) {\n if (typeof object.input[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.input: object expected\");\n message.input[i] = $root.onnx.ValueInfoProto.fromObject(object.input[i]);\n }\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.GraphProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i) {\n if (typeof object.output[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.output: object expected\");\n message.output[i] = $root.onnx.ValueInfoProto.fromObject(object.output[i]);\n }\n }\n if (object.valueInfo) {\n if (!Array.isArray(object.valueInfo))\n throw TypeError(\".onnx.GraphProto.valueInfo: array expected\");\n message.valueInfo = [];\n for (var i = 0; i < object.valueInfo.length; ++i) {\n if (typeof object.valueInfo[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.valueInfo: object expected\");\n message.valueInfo[i] = $root.onnx.ValueInfoProto.fromObject(object.valueInfo[i]);\n }\n }\n if (object.quantizationAnnotation) {\n if (!Array.isArray(object.quantizationAnnotation))\n throw TypeError(\".onnx.GraphProto.quantizationAnnotation: array expected\");\n message.quantizationAnnotation = [];\n for (var i = 0; i < object.quantizationAnnotation.length; ++i) {\n if (typeof object.quantizationAnnotation[i] !== \"object\")\n throw TypeError(\".onnx.GraphProto.quantizationAnnotation: object expected\");\n message.quantizationAnnotation[i] = $root.onnx.TensorAnnotation.fromObject(object.quantizationAnnotation[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a GraphProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.GraphProto\n * @static\n * @param {onnx.GraphProto} message GraphProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n GraphProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.node = [];\n object.initializer = [];\n object.input = [];\n object.output = [];\n object.valueInfo = [];\n object.quantizationAnnotation = [];\n object.sparseInitializer = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.docString = \"\";\n }\n if (message.node && message.node.length) {\n object.node = [];\n for (var j = 0; j < message.node.length; ++j)\n object.node[j] = $root.onnx.NodeProto.toObject(message.node[j], options);\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.initializer && message.initializer.length) {\n object.initializer = [];\n for (var j = 0; j < message.initializer.length; ++j)\n object.initializer[j] = $root.onnx.TensorProto.toObject(message.initializer[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = $root.onnx.ValueInfoProto.toObject(message.input[j], options);\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = $root.onnx.ValueInfoProto.toObject(message.output[j], options);\n }\n if (message.valueInfo && message.valueInfo.length) {\n object.valueInfo = [];\n for (var j = 0; j < message.valueInfo.length; ++j)\n object.valueInfo[j] = $root.onnx.ValueInfoProto.toObject(message.valueInfo[j], options);\n }\n if (message.quantizationAnnotation && message.quantizationAnnotation.length) {\n object.quantizationAnnotation = [];\n for (var j = 0; j < message.quantizationAnnotation.length; ++j)\n object.quantizationAnnotation[j] = $root.onnx.TensorAnnotation.toObject(message.quantizationAnnotation[j], options);\n }\n if (message.sparseInitializer && message.sparseInitializer.length) {\n object.sparseInitializer = [];\n for (var j = 0; j < message.sparseInitializer.length; ++j)\n object.sparseInitializer[j] = $root.onnx.SparseTensorProto.toObject(message.sparseInitializer[j], options);\n }\n return object;\n };\n\n /**\n * Converts this GraphProto to JSON.\n * @function toJSON\n * @memberof onnx.GraphProto\n * @instance\n * @returns {Object.} JSON object\n */\n GraphProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for GraphProto\n * @function getTypeUrl\n * @memberof onnx.GraphProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n GraphProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.GraphProto\";\n };\n\n return GraphProto;\n })();\n\n onnx.TensorProto = (function() {\n\n /**\n * Properties of a TensorProto.\n * @memberof onnx\n * @interface ITensorProto\n * @property {Array.|null} [dims] TensorProto dims\n * @property {number|null} [dataType] TensorProto dataType\n * @property {onnx.TensorProto.ISegment|null} [segment] TensorProto segment\n * @property {Array.|null} [floatData] TensorProto floatData\n * @property {Array.|null} [int32Data] TensorProto int32Data\n * @property {Array.|null} [stringData] TensorProto stringData\n * @property {Array.|null} [int64Data] TensorProto int64Data\n * @property {string|null} [name] TensorProto name\n * @property {string|null} [docString] TensorProto docString\n * @property {Uint8Array|null} [rawData] TensorProto rawData\n * @property {Array.|null} [externalData] TensorProto externalData\n * @property {onnx.TensorProto.DataLocation|null} [dataLocation] TensorProto dataLocation\n * @property {Array.|null} [doubleData] TensorProto doubleData\n * @property {Array.|null} [uint64Data] TensorProto uint64Data\n */\n\n /**\n * Constructs a new TensorProto.\n * @memberof onnx\n * @classdesc Represents a TensorProto.\n * @implements ITensorProto\n * @constructor\n * @param {onnx.ITensorProto=} [properties] Properties to set\n */\n function TensorProto(properties) {\n this.dims = [];\n this.floatData = [];\n this.int32Data = [];\n this.stringData = [];\n this.int64Data = [];\n this.externalData = [];\n this.doubleData = [];\n this.uint64Data = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorProto dims.\n * @member {Array.} dims\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dims = $util.emptyArray;\n\n /**\n * TensorProto dataType.\n * @member {number} dataType\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dataType = 0;\n\n /**\n * TensorProto segment.\n * @member {onnx.TensorProto.ISegment|null|undefined} segment\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.segment = null;\n\n /**\n * TensorProto floatData.\n * @member {Array.} floatData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.floatData = $util.emptyArray;\n\n /**\n * TensorProto int32Data.\n * @member {Array.} int32Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.int32Data = $util.emptyArray;\n\n /**\n * TensorProto stringData.\n * @member {Array.} stringData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.stringData = $util.emptyArray;\n\n /**\n * TensorProto int64Data.\n * @member {Array.} int64Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.int64Data = $util.emptyArray;\n\n /**\n * TensorProto name.\n * @member {string} name\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.name = \"\";\n\n /**\n * TensorProto docString.\n * @member {string} docString\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.docString = \"\";\n\n /**\n * TensorProto rawData.\n * @member {Uint8Array} rawData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.rawData = $util.newBuffer([]);\n\n /**\n * TensorProto externalData.\n * @member {Array.} externalData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.externalData = $util.emptyArray;\n\n /**\n * TensorProto dataLocation.\n * @member {onnx.TensorProto.DataLocation} dataLocation\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.dataLocation = 0;\n\n /**\n * TensorProto doubleData.\n * @member {Array.} doubleData\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.doubleData = $util.emptyArray;\n\n /**\n * TensorProto uint64Data.\n * @member {Array.} uint64Data\n * @memberof onnx.TensorProto\n * @instance\n */\n TensorProto.prototype.uint64Data = $util.emptyArray;\n\n /**\n * Creates a new TensorProto instance using the specified properties.\n * @function create\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto=} [properties] Properties to set\n * @returns {onnx.TensorProto} TensorProto instance\n */\n TensorProto.create = function create(properties) {\n return new TensorProto(properties);\n };\n\n /**\n * Encodes the specified TensorProto message. Does not implicitly {@link onnx.TensorProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto} message TensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dims != null && message.dims.length) {\n writer.uint32(/* id 1, wireType 2 =*/10).fork();\n for (var i = 0; i < message.dims.length; ++i)\n writer.int64(message.dims[i]);\n writer.ldelim();\n }\n if (message.dataType != null && Object.hasOwnProperty.call(message, \"dataType\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int32(message.dataType);\n if (message.segment != null && Object.hasOwnProperty.call(message, \"segment\"))\n $root.onnx.TensorProto.Segment.encode(message.segment, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim();\n if (message.floatData != null && message.floatData.length) {\n writer.uint32(/* id 4, wireType 2 =*/34).fork();\n for (var i = 0; i < message.floatData.length; ++i)\n writer.float(message.floatData[i]);\n writer.ldelim();\n }\n if (message.int32Data != null && message.int32Data.length) {\n writer.uint32(/* id 5, wireType 2 =*/42).fork();\n for (var i = 0; i < message.int32Data.length; ++i)\n writer.int32(message.int32Data[i]);\n writer.ldelim();\n }\n if (message.stringData != null && message.stringData.length)\n for (var i = 0; i < message.stringData.length; ++i)\n writer.uint32(/* id 6, wireType 2 =*/50).bytes(message.stringData[i]);\n if (message.int64Data != null && message.int64Data.length) {\n writer.uint32(/* id 7, wireType 2 =*/58).fork();\n for (var i = 0; i < message.int64Data.length; ++i)\n writer.int64(message.int64Data[i]);\n writer.ldelim();\n }\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 8, wireType 2 =*/66).string(message.name);\n if (message.rawData != null && Object.hasOwnProperty.call(message, \"rawData\"))\n writer.uint32(/* id 9, wireType 2 =*/74).bytes(message.rawData);\n if (message.doubleData != null && message.doubleData.length) {\n writer.uint32(/* id 10, wireType 2 =*/82).fork();\n for (var i = 0; i < message.doubleData.length; ++i)\n writer.double(message.doubleData[i]);\n writer.ldelim();\n }\n if (message.uint64Data != null && message.uint64Data.length) {\n writer.uint32(/* id 11, wireType 2 =*/90).fork();\n for (var i = 0; i < message.uint64Data.length; ++i)\n writer.uint64(message.uint64Data[i]);\n writer.ldelim();\n }\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 12, wireType 2 =*/98).string(message.docString);\n if (message.externalData != null && message.externalData.length)\n for (var i = 0; i < message.externalData.length; ++i)\n $root.onnx.StringStringEntryProto.encode(message.externalData[i], writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim();\n if (message.dataLocation != null && Object.hasOwnProperty.call(message, \"dataLocation\"))\n writer.uint32(/* id 14, wireType 0 =*/112).int32(message.dataLocation);\n return writer;\n };\n\n /**\n * Encodes the specified TensorProto message, length delimited. Does not implicitly {@link onnx.TensorProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.ITensorProto} message TensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorProto} TensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.dims && message.dims.length))\n message.dims = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.dims.push(reader.int64());\n } else\n message.dims.push(reader.int64());\n break;\n }\n case 2: {\n message.dataType = reader.int32();\n break;\n }\n case 3: {\n message.segment = $root.onnx.TensorProto.Segment.decode(reader, reader.uint32());\n break;\n }\n case 4: {\n if (!(message.floatData && message.floatData.length))\n message.floatData = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.floatData.push(reader.float());\n } else\n message.floatData.push(reader.float());\n break;\n }\n case 5: {\n if (!(message.int32Data && message.int32Data.length))\n message.int32Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.int32Data.push(reader.int32());\n } else\n message.int32Data.push(reader.int32());\n break;\n }\n case 6: {\n if (!(message.stringData && message.stringData.length))\n message.stringData = [];\n message.stringData.push(reader.bytes());\n break;\n }\n case 7: {\n if (!(message.int64Data && message.int64Data.length))\n message.int64Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.int64Data.push(reader.int64());\n } else\n message.int64Data.push(reader.int64());\n break;\n }\n case 8: {\n message.name = reader.string();\n break;\n }\n case 12: {\n message.docString = reader.string();\n break;\n }\n case 9: {\n message.rawData = reader.bytes();\n break;\n }\n case 13: {\n if (!(message.externalData && message.externalData.length))\n message.externalData = [];\n message.externalData.push($root.onnx.StringStringEntryProto.decode(reader, reader.uint32()));\n break;\n }\n case 14: {\n message.dataLocation = reader.int32();\n break;\n }\n case 10: {\n if (!(message.doubleData && message.doubleData.length))\n message.doubleData = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.doubleData.push(reader.double());\n } else\n message.doubleData.push(reader.double());\n break;\n }\n case 11: {\n if (!(message.uint64Data && message.uint64Data.length))\n message.uint64Data = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.uint64Data.push(reader.uint64());\n } else\n message.uint64Data.push(reader.uint64());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorProto} TensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorProto message.\n * @function verify\n * @memberof onnx.TensorProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.dims != null && message.hasOwnProperty(\"dims\")) {\n if (!Array.isArray(message.dims))\n return \"dims: array expected\";\n for (var i = 0; i < message.dims.length; ++i)\n if (!$util.isInteger(message.dims[i]) && !(message.dims[i] && $util.isInteger(message.dims[i].low) && $util.isInteger(message.dims[i].high)))\n return \"dims: integer|Long[] expected\";\n }\n if (message.dataType != null && message.hasOwnProperty(\"dataType\"))\n if (!$util.isInteger(message.dataType))\n return \"dataType: integer expected\";\n if (message.segment != null && message.hasOwnProperty(\"segment\")) {\n var error = $root.onnx.TensorProto.Segment.verify(message.segment);\n if (error)\n return \"segment.\" + error;\n }\n if (message.floatData != null && message.hasOwnProperty(\"floatData\")) {\n if (!Array.isArray(message.floatData))\n return \"floatData: array expected\";\n for (var i = 0; i < message.floatData.length; ++i)\n if (typeof message.floatData[i] !== \"number\")\n return \"floatData: number[] expected\";\n }\n if (message.int32Data != null && message.hasOwnProperty(\"int32Data\")) {\n if (!Array.isArray(message.int32Data))\n return \"int32Data: array expected\";\n for (var i = 0; i < message.int32Data.length; ++i)\n if (!$util.isInteger(message.int32Data[i]))\n return \"int32Data: integer[] expected\";\n }\n if (message.stringData != null && message.hasOwnProperty(\"stringData\")) {\n if (!Array.isArray(message.stringData))\n return \"stringData: array expected\";\n for (var i = 0; i < message.stringData.length; ++i)\n if (!(message.stringData[i] && typeof message.stringData[i].length === \"number\" || $util.isString(message.stringData[i])))\n return \"stringData: buffer[] expected\";\n }\n if (message.int64Data != null && message.hasOwnProperty(\"int64Data\")) {\n if (!Array.isArray(message.int64Data))\n return \"int64Data: array expected\";\n for (var i = 0; i < message.int64Data.length; ++i)\n if (!$util.isInteger(message.int64Data[i]) && !(message.int64Data[i] && $util.isInteger(message.int64Data[i].low) && $util.isInteger(message.int64Data[i].high)))\n return \"int64Data: integer|Long[] expected\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.rawData != null && message.hasOwnProperty(\"rawData\"))\n if (!(message.rawData && typeof message.rawData.length === \"number\" || $util.isString(message.rawData)))\n return \"rawData: buffer expected\";\n if (message.externalData != null && message.hasOwnProperty(\"externalData\")) {\n if (!Array.isArray(message.externalData))\n return \"externalData: array expected\";\n for (var i = 0; i < message.externalData.length; ++i) {\n var error = $root.onnx.StringStringEntryProto.verify(message.externalData[i]);\n if (error)\n return \"externalData.\" + error;\n }\n }\n if (message.dataLocation != null && message.hasOwnProperty(\"dataLocation\"))\n switch (message.dataLocation) {\n default:\n return \"dataLocation: enum value expected\";\n case 0:\n case 1:\n break;\n }\n if (message.doubleData != null && message.hasOwnProperty(\"doubleData\")) {\n if (!Array.isArray(message.doubleData))\n return \"doubleData: array expected\";\n for (var i = 0; i < message.doubleData.length; ++i)\n if (typeof message.doubleData[i] !== \"number\")\n return \"doubleData: number[] expected\";\n }\n if (message.uint64Data != null && message.hasOwnProperty(\"uint64Data\")) {\n if (!Array.isArray(message.uint64Data))\n return \"uint64Data: array expected\";\n for (var i = 0; i < message.uint64Data.length; ++i)\n if (!$util.isInteger(message.uint64Data[i]) && !(message.uint64Data[i] && $util.isInteger(message.uint64Data[i].low) && $util.isInteger(message.uint64Data[i].high)))\n return \"uint64Data: integer|Long[] expected\";\n }\n return null;\n };\n\n /**\n * Creates a TensorProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorProto} TensorProto\n */\n TensorProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorProto)\n return object;\n var message = new $root.onnx.TensorProto();\n if (object.dims) {\n if (!Array.isArray(object.dims))\n throw TypeError(\".onnx.TensorProto.dims: array expected\");\n message.dims = [];\n for (var i = 0; i < object.dims.length; ++i)\n if ($util.Long)\n (message.dims[i] = $util.Long.fromValue(object.dims[i])).unsigned = false;\n else if (typeof object.dims[i] === \"string\")\n message.dims[i] = parseInt(object.dims[i], 10);\n else if (typeof object.dims[i] === \"number\")\n message.dims[i] = object.dims[i];\n else if (typeof object.dims[i] === \"object\")\n message.dims[i] = new $util.LongBits(object.dims[i].low >>> 0, object.dims[i].high >>> 0).toNumber();\n }\n if (object.dataType != null)\n message.dataType = object.dataType | 0;\n if (object.segment != null) {\n if (typeof object.segment !== \"object\")\n throw TypeError(\".onnx.TensorProto.segment: object expected\");\n message.segment = $root.onnx.TensorProto.Segment.fromObject(object.segment);\n }\n if (object.floatData) {\n if (!Array.isArray(object.floatData))\n throw TypeError(\".onnx.TensorProto.floatData: array expected\");\n message.floatData = [];\n for (var i = 0; i < object.floatData.length; ++i)\n message.floatData[i] = Number(object.floatData[i]);\n }\n if (object.int32Data) {\n if (!Array.isArray(object.int32Data))\n throw TypeError(\".onnx.TensorProto.int32Data: array expected\");\n message.int32Data = [];\n for (var i = 0; i < object.int32Data.length; ++i)\n message.int32Data[i] = object.int32Data[i] | 0;\n }\n if (object.stringData) {\n if (!Array.isArray(object.stringData))\n throw TypeError(\".onnx.TensorProto.stringData: array expected\");\n message.stringData = [];\n for (var i = 0; i < object.stringData.length; ++i)\n if (typeof object.stringData[i] === \"string\")\n $util.base64.decode(object.stringData[i], message.stringData[i] = $util.newBuffer($util.base64.length(object.stringData[i])), 0);\n else if (object.stringData[i].length >= 0)\n message.stringData[i] = object.stringData[i];\n }\n if (object.int64Data) {\n if (!Array.isArray(object.int64Data))\n throw TypeError(\".onnx.TensorProto.int64Data: array expected\");\n message.int64Data = [];\n for (var i = 0; i < object.int64Data.length; ++i)\n if ($util.Long)\n (message.int64Data[i] = $util.Long.fromValue(object.int64Data[i])).unsigned = false;\n else if (typeof object.int64Data[i] === \"string\")\n message.int64Data[i] = parseInt(object.int64Data[i], 10);\n else if (typeof object.int64Data[i] === \"number\")\n message.int64Data[i] = object.int64Data[i];\n else if (typeof object.int64Data[i] === \"object\")\n message.int64Data[i] = new $util.LongBits(object.int64Data[i].low >>> 0, object.int64Data[i].high >>> 0).toNumber();\n }\n if (object.name != null)\n message.name = String(object.name);\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.rawData != null)\n if (typeof object.rawData === \"string\")\n $util.base64.decode(object.rawData, message.rawData = $util.newBuffer($util.base64.length(object.rawData)), 0);\n else if (object.rawData.length >= 0)\n message.rawData = object.rawData;\n if (object.externalData) {\n if (!Array.isArray(object.externalData))\n throw TypeError(\".onnx.TensorProto.externalData: array expected\");\n message.externalData = [];\n for (var i = 0; i < object.externalData.length; ++i) {\n if (typeof object.externalData[i] !== \"object\")\n throw TypeError(\".onnx.TensorProto.externalData: object expected\");\n message.externalData[i] = $root.onnx.StringStringEntryProto.fromObject(object.externalData[i]);\n }\n }\n switch (object.dataLocation) {\n default:\n if (typeof object.dataLocation === \"number\") {\n message.dataLocation = object.dataLocation;\n break;\n }\n break;\n case \"DEFAULT\":\n case 0:\n message.dataLocation = 0;\n break;\n case \"EXTERNAL\":\n case 1:\n message.dataLocation = 1;\n break;\n }\n if (object.doubleData) {\n if (!Array.isArray(object.doubleData))\n throw TypeError(\".onnx.TensorProto.doubleData: array expected\");\n message.doubleData = [];\n for (var i = 0; i < object.doubleData.length; ++i)\n message.doubleData[i] = Number(object.doubleData[i]);\n }\n if (object.uint64Data) {\n if (!Array.isArray(object.uint64Data))\n throw TypeError(\".onnx.TensorProto.uint64Data: array expected\");\n message.uint64Data = [];\n for (var i = 0; i < object.uint64Data.length; ++i)\n if ($util.Long)\n (message.uint64Data[i] = $util.Long.fromValue(object.uint64Data[i])).unsigned = true;\n else if (typeof object.uint64Data[i] === \"string\")\n message.uint64Data[i] = parseInt(object.uint64Data[i], 10);\n else if (typeof object.uint64Data[i] === \"number\")\n message.uint64Data[i] = object.uint64Data[i];\n else if (typeof object.uint64Data[i] === \"object\")\n message.uint64Data[i] = new $util.LongBits(object.uint64Data[i].low >>> 0, object.uint64Data[i].high >>> 0).toNumber(true);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorProto\n * @static\n * @param {onnx.TensorProto} message TensorProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.dims = [];\n object.floatData = [];\n object.int32Data = [];\n object.stringData = [];\n object.int64Data = [];\n object.doubleData = [];\n object.uint64Data = [];\n object.externalData = [];\n }\n if (options.defaults) {\n object.dataType = 0;\n object.segment = null;\n object.name = \"\";\n if (options.bytes === String)\n object.rawData = \"\";\n else {\n object.rawData = [];\n if (options.bytes !== Array)\n object.rawData = $util.newBuffer(object.rawData);\n }\n object.docString = \"\";\n object.dataLocation = options.enums === String ? \"DEFAULT\" : 0;\n }\n if (message.dims && message.dims.length) {\n object.dims = [];\n for (var j = 0; j < message.dims.length; ++j)\n if (typeof message.dims[j] === \"number\")\n object.dims[j] = options.longs === String ? String(message.dims[j]) : message.dims[j];\n else\n object.dims[j] = options.longs === String ? $util.Long.prototype.toString.call(message.dims[j]) : options.longs === Number ? new $util.LongBits(message.dims[j].low >>> 0, message.dims[j].high >>> 0).toNumber() : message.dims[j];\n }\n if (message.dataType != null && message.hasOwnProperty(\"dataType\"))\n object.dataType = message.dataType;\n if (message.segment != null && message.hasOwnProperty(\"segment\"))\n object.segment = $root.onnx.TensorProto.Segment.toObject(message.segment, options);\n if (message.floatData && message.floatData.length) {\n object.floatData = [];\n for (var j = 0; j < message.floatData.length; ++j)\n object.floatData[j] = options.json && !isFinite(message.floatData[j]) ? String(message.floatData[j]) : message.floatData[j];\n }\n if (message.int32Data && message.int32Data.length) {\n object.int32Data = [];\n for (var j = 0; j < message.int32Data.length; ++j)\n object.int32Data[j] = message.int32Data[j];\n }\n if (message.stringData && message.stringData.length) {\n object.stringData = [];\n for (var j = 0; j < message.stringData.length; ++j)\n object.stringData[j] = options.bytes === String ? $util.base64.encode(message.stringData[j], 0, message.stringData[j].length) : options.bytes === Array ? Array.prototype.slice.call(message.stringData[j]) : message.stringData[j];\n }\n if (message.int64Data && message.int64Data.length) {\n object.int64Data = [];\n for (var j = 0; j < message.int64Data.length; ++j)\n if (typeof message.int64Data[j] === \"number\")\n object.int64Data[j] = options.longs === String ? String(message.int64Data[j]) : message.int64Data[j];\n else\n object.int64Data[j] = options.longs === String ? $util.Long.prototype.toString.call(message.int64Data[j]) : options.longs === Number ? new $util.LongBits(message.int64Data[j].low >>> 0, message.int64Data[j].high >>> 0).toNumber() : message.int64Data[j];\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.rawData != null && message.hasOwnProperty(\"rawData\"))\n object.rawData = options.bytes === String ? $util.base64.encode(message.rawData, 0, message.rawData.length) : options.bytes === Array ? Array.prototype.slice.call(message.rawData) : message.rawData;\n if (message.doubleData && message.doubleData.length) {\n object.doubleData = [];\n for (var j = 0; j < message.doubleData.length; ++j)\n object.doubleData[j] = options.json && !isFinite(message.doubleData[j]) ? String(message.doubleData[j]) : message.doubleData[j];\n }\n if (message.uint64Data && message.uint64Data.length) {\n object.uint64Data = [];\n for (var j = 0; j < message.uint64Data.length; ++j)\n if (typeof message.uint64Data[j] === \"number\")\n object.uint64Data[j] = options.longs === String ? String(message.uint64Data[j]) : message.uint64Data[j];\n else\n object.uint64Data[j] = options.longs === String ? $util.Long.prototype.toString.call(message.uint64Data[j]) : options.longs === Number ? new $util.LongBits(message.uint64Data[j].low >>> 0, message.uint64Data[j].high >>> 0).toNumber(true) : message.uint64Data[j];\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.externalData && message.externalData.length) {\n object.externalData = [];\n for (var j = 0; j < message.externalData.length; ++j)\n object.externalData[j] = $root.onnx.StringStringEntryProto.toObject(message.externalData[j], options);\n }\n if (message.dataLocation != null && message.hasOwnProperty(\"dataLocation\"))\n object.dataLocation = options.enums === String ? $root.onnx.TensorProto.DataLocation[message.dataLocation] === undefined ? message.dataLocation : $root.onnx.TensorProto.DataLocation[message.dataLocation] : message.dataLocation;\n return object;\n };\n\n /**\n * Converts this TensorProto to JSON.\n * @function toJSON\n * @memberof onnx.TensorProto\n * @instance\n * @returns {Object.} JSON object\n */\n TensorProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorProto\n * @function getTypeUrl\n * @memberof onnx.TensorProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorProto\";\n };\n\n /**\n * DataType enum.\n * @name onnx.TensorProto.DataType\n * @enum {number}\n * @property {number} UNDEFINED=0 UNDEFINED value\n * @property {number} FLOAT=1 FLOAT value\n * @property {number} UINT8=2 UINT8 value\n * @property {number} INT8=3 INT8 value\n * @property {number} UINT16=4 UINT16 value\n * @property {number} INT16=5 INT16 value\n * @property {number} INT32=6 INT32 value\n * @property {number} INT64=7 INT64 value\n * @property {number} STRING=8 STRING value\n * @property {number} BOOL=9 BOOL value\n * @property {number} FLOAT16=10 FLOAT16 value\n * @property {number} DOUBLE=11 DOUBLE value\n * @property {number} UINT32=12 UINT32 value\n * @property {number} UINT64=13 UINT64 value\n * @property {number} COMPLEX64=14 COMPLEX64 value\n * @property {number} COMPLEX128=15 COMPLEX128 value\n * @property {number} BFLOAT16=16 BFLOAT16 value\n * @property {number} FLOAT8E4M3FN=17 FLOAT8E4M3FN value\n * @property {number} FLOAT8E4M3FNUZ=18 FLOAT8E4M3FNUZ value\n * @property {number} FLOAT8E5M2=19 FLOAT8E5M2 value\n * @property {number} FLOAT8E5M2FNUZ=20 FLOAT8E5M2FNUZ value\n */\n TensorProto.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"UNDEFINED\"] = 0;\n values[valuesById[1] = \"FLOAT\"] = 1;\n values[valuesById[2] = \"UINT8\"] = 2;\n values[valuesById[3] = \"INT8\"] = 3;\n values[valuesById[4] = \"UINT16\"] = 4;\n values[valuesById[5] = \"INT16\"] = 5;\n values[valuesById[6] = \"INT32\"] = 6;\n values[valuesById[7] = \"INT64\"] = 7;\n values[valuesById[8] = \"STRING\"] = 8;\n values[valuesById[9] = \"BOOL\"] = 9;\n values[valuesById[10] = \"FLOAT16\"] = 10;\n values[valuesById[11] = \"DOUBLE\"] = 11;\n values[valuesById[12] = \"UINT32\"] = 12;\n values[valuesById[13] = \"UINT64\"] = 13;\n values[valuesById[14] = \"COMPLEX64\"] = 14;\n values[valuesById[15] = \"COMPLEX128\"] = 15;\n values[valuesById[16] = \"BFLOAT16\"] = 16;\n values[valuesById[17] = \"FLOAT8E4M3FN\"] = 17;\n values[valuesById[18] = \"FLOAT8E4M3FNUZ\"] = 18;\n values[valuesById[19] = \"FLOAT8E5M2\"] = 19;\n values[valuesById[20] = \"FLOAT8E5M2FNUZ\"] = 20;\n return values;\n })();\n\n TensorProto.Segment = (function() {\n\n /**\n * Properties of a Segment.\n * @memberof onnx.TensorProto\n * @interface ISegment\n * @property {number|Long|null} [begin] Segment begin\n * @property {number|Long|null} [end] Segment end\n */\n\n /**\n * Constructs a new Segment.\n * @memberof onnx.TensorProto\n * @classdesc Represents a Segment.\n * @implements ISegment\n * @constructor\n * @param {onnx.TensorProto.ISegment=} [properties] Properties to set\n */\n function Segment(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Segment begin.\n * @member {number|Long} begin\n * @memberof onnx.TensorProto.Segment\n * @instance\n */\n Segment.prototype.begin = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Segment end.\n * @member {number|Long} end\n * @memberof onnx.TensorProto.Segment\n * @instance\n */\n Segment.prototype.end = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Creates a new Segment instance using the specified properties.\n * @function create\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment=} [properties] Properties to set\n * @returns {onnx.TensorProto.Segment} Segment instance\n */\n Segment.create = function create(properties) {\n return new Segment(properties);\n };\n\n /**\n * Encodes the specified Segment message. Does not implicitly {@link onnx.TensorProto.Segment.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment} message Segment message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Segment.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.begin != null && Object.hasOwnProperty.call(message, \"begin\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.begin);\n if (message.end != null && Object.hasOwnProperty.call(message, \"end\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int64(message.end);\n return writer;\n };\n\n /**\n * Encodes the specified Segment message, length delimited. Does not implicitly {@link onnx.TensorProto.Segment.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.ISegment} message Segment message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Segment.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Segment message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorProto.Segment} Segment\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Segment.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorProto.Segment();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.begin = reader.int64();\n break;\n }\n case 2: {\n message.end = reader.int64();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Segment message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorProto.Segment} Segment\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Segment.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Segment message.\n * @function verify\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Segment.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.begin != null && message.hasOwnProperty(\"begin\"))\n if (!$util.isInteger(message.begin) && !(message.begin && $util.isInteger(message.begin.low) && $util.isInteger(message.begin.high)))\n return \"begin: integer|Long expected\";\n if (message.end != null && message.hasOwnProperty(\"end\"))\n if (!$util.isInteger(message.end) && !(message.end && $util.isInteger(message.end.low) && $util.isInteger(message.end.high)))\n return \"end: integer|Long expected\";\n return null;\n };\n\n /**\n * Creates a Segment message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorProto.Segment} Segment\n */\n Segment.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorProto.Segment)\n return object;\n var message = new $root.onnx.TensorProto.Segment();\n if (object.begin != null)\n if ($util.Long)\n (message.begin = $util.Long.fromValue(object.begin)).unsigned = false;\n else if (typeof object.begin === \"string\")\n message.begin = parseInt(object.begin, 10);\n else if (typeof object.begin === \"number\")\n message.begin = object.begin;\n else if (typeof object.begin === \"object\")\n message.begin = new $util.LongBits(object.begin.low >>> 0, object.begin.high >>> 0).toNumber();\n if (object.end != null)\n if ($util.Long)\n (message.end = $util.Long.fromValue(object.end)).unsigned = false;\n else if (typeof object.end === \"string\")\n message.end = parseInt(object.end, 10);\n else if (typeof object.end === \"number\")\n message.end = object.end;\n else if (typeof object.end === \"object\")\n message.end = new $util.LongBits(object.end.low >>> 0, object.end.high >>> 0).toNumber();\n return message;\n };\n\n /**\n * Creates a plain object from a Segment message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {onnx.TensorProto.Segment} message Segment\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Segment.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.begin = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.begin = options.longs === String ? \"0\" : 0;\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.end = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.end = options.longs === String ? \"0\" : 0;\n }\n if (message.begin != null && message.hasOwnProperty(\"begin\"))\n if (typeof message.begin === \"number\")\n object.begin = options.longs === String ? String(message.begin) : message.begin;\n else\n object.begin = options.longs === String ? $util.Long.prototype.toString.call(message.begin) : options.longs === Number ? new $util.LongBits(message.begin.low >>> 0, message.begin.high >>> 0).toNumber() : message.begin;\n if (message.end != null && message.hasOwnProperty(\"end\"))\n if (typeof message.end === \"number\")\n object.end = options.longs === String ? String(message.end) : message.end;\n else\n object.end = options.longs === String ? $util.Long.prototype.toString.call(message.end) : options.longs === Number ? new $util.LongBits(message.end.low >>> 0, message.end.high >>> 0).toNumber() : message.end;\n return object;\n };\n\n /**\n * Converts this Segment to JSON.\n * @function toJSON\n * @memberof onnx.TensorProto.Segment\n * @instance\n * @returns {Object.} JSON object\n */\n Segment.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Segment\n * @function getTypeUrl\n * @memberof onnx.TensorProto.Segment\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Segment.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorProto.Segment\";\n };\n\n return Segment;\n })();\n\n /**\n * DataLocation enum.\n * @name onnx.TensorProto.DataLocation\n * @enum {number}\n * @property {number} DEFAULT=0 DEFAULT value\n * @property {number} EXTERNAL=1 EXTERNAL value\n */\n TensorProto.DataLocation = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"DEFAULT\"] = 0;\n values[valuesById[1] = \"EXTERNAL\"] = 1;\n return values;\n })();\n\n return TensorProto;\n })();\n\n onnx.SparseTensorProto = (function() {\n\n /**\n * Properties of a SparseTensorProto.\n * @memberof onnx\n * @interface ISparseTensorProto\n * @property {onnx.ITensorProto|null} [values] SparseTensorProto values\n * @property {onnx.ITensorProto|null} [indices] SparseTensorProto indices\n * @property {Array.|null} [dims] SparseTensorProto dims\n */\n\n /**\n * Constructs a new SparseTensorProto.\n * @memberof onnx\n * @classdesc Represents a SparseTensorProto.\n * @implements ISparseTensorProto\n * @constructor\n * @param {onnx.ISparseTensorProto=} [properties] Properties to set\n */\n function SparseTensorProto(properties) {\n this.dims = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * SparseTensorProto values.\n * @member {onnx.ITensorProto|null|undefined} values\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.values = null;\n\n /**\n * SparseTensorProto indices.\n * @member {onnx.ITensorProto|null|undefined} indices\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.indices = null;\n\n /**\n * SparseTensorProto dims.\n * @member {Array.} dims\n * @memberof onnx.SparseTensorProto\n * @instance\n */\n SparseTensorProto.prototype.dims = $util.emptyArray;\n\n /**\n * Creates a new SparseTensorProto instance using the specified properties.\n * @function create\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto=} [properties] Properties to set\n * @returns {onnx.SparseTensorProto} SparseTensorProto instance\n */\n SparseTensorProto.create = function create(properties) {\n return new SparseTensorProto(properties);\n };\n\n /**\n * Encodes the specified SparseTensorProto message. Does not implicitly {@link onnx.SparseTensorProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto} message SparseTensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensorProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.values != null && Object.hasOwnProperty.call(message, \"values\"))\n $root.onnx.TensorProto.encode(message.values, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.indices != null && Object.hasOwnProperty.call(message, \"indices\"))\n $root.onnx.TensorProto.encode(message.indices, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n if (message.dims != null && message.dims.length) {\n writer.uint32(/* id 3, wireType 2 =*/26).fork();\n for (var i = 0; i < message.dims.length; ++i)\n writer.int64(message.dims[i]);\n writer.ldelim();\n }\n return writer;\n };\n\n /**\n * Encodes the specified SparseTensorProto message, length delimited. Does not implicitly {@link onnx.SparseTensorProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.ISparseTensorProto} message SparseTensorProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensorProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a SparseTensorProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensorProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.SparseTensorProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.values = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 2: {\n message.indices = $root.onnx.TensorProto.decode(reader, reader.uint32());\n break;\n }\n case 3: {\n if (!(message.dims && message.dims.length))\n message.dims = [];\n if ((tag & 7) === 2) {\n var end2 = reader.uint32() + reader.pos;\n while (reader.pos < end2)\n message.dims.push(reader.int64());\n } else\n message.dims.push(reader.int64());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a SparseTensorProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensorProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a SparseTensorProto message.\n * @function verify\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n SparseTensorProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.values != null && message.hasOwnProperty(\"values\")) {\n var error = $root.onnx.TensorProto.verify(message.values);\n if (error)\n return \"values.\" + error;\n }\n if (message.indices != null && message.hasOwnProperty(\"indices\")) {\n var error = $root.onnx.TensorProto.verify(message.indices);\n if (error)\n return \"indices.\" + error;\n }\n if (message.dims != null && message.hasOwnProperty(\"dims\")) {\n if (!Array.isArray(message.dims))\n return \"dims: array expected\";\n for (var i = 0; i < message.dims.length; ++i)\n if (!$util.isInteger(message.dims[i]) && !(message.dims[i] && $util.isInteger(message.dims[i].low) && $util.isInteger(message.dims[i].high)))\n return \"dims: integer|Long[] expected\";\n }\n return null;\n };\n\n /**\n * Creates a SparseTensorProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.SparseTensorProto} SparseTensorProto\n */\n SparseTensorProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.SparseTensorProto)\n return object;\n var message = new $root.onnx.SparseTensorProto();\n if (object.values != null) {\n if (typeof object.values !== \"object\")\n throw TypeError(\".onnx.SparseTensorProto.values: object expected\");\n message.values = $root.onnx.TensorProto.fromObject(object.values);\n }\n if (object.indices != null) {\n if (typeof object.indices !== \"object\")\n throw TypeError(\".onnx.SparseTensorProto.indices: object expected\");\n message.indices = $root.onnx.TensorProto.fromObject(object.indices);\n }\n if (object.dims) {\n if (!Array.isArray(object.dims))\n throw TypeError(\".onnx.SparseTensorProto.dims: array expected\");\n message.dims = [];\n for (var i = 0; i < object.dims.length; ++i)\n if ($util.Long)\n (message.dims[i] = $util.Long.fromValue(object.dims[i])).unsigned = false;\n else if (typeof object.dims[i] === \"string\")\n message.dims[i] = parseInt(object.dims[i], 10);\n else if (typeof object.dims[i] === \"number\")\n message.dims[i] = object.dims[i];\n else if (typeof object.dims[i] === \"object\")\n message.dims[i] = new $util.LongBits(object.dims[i].low >>> 0, object.dims[i].high >>> 0).toNumber();\n }\n return message;\n };\n\n /**\n * Creates a plain object from a SparseTensorProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {onnx.SparseTensorProto} message SparseTensorProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n SparseTensorProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.dims = [];\n if (options.defaults) {\n object.values = null;\n object.indices = null;\n }\n if (message.values != null && message.hasOwnProperty(\"values\"))\n object.values = $root.onnx.TensorProto.toObject(message.values, options);\n if (message.indices != null && message.hasOwnProperty(\"indices\"))\n object.indices = $root.onnx.TensorProto.toObject(message.indices, options);\n if (message.dims && message.dims.length) {\n object.dims = [];\n for (var j = 0; j < message.dims.length; ++j)\n if (typeof message.dims[j] === \"number\")\n object.dims[j] = options.longs === String ? String(message.dims[j]) : message.dims[j];\n else\n object.dims[j] = options.longs === String ? $util.Long.prototype.toString.call(message.dims[j]) : options.longs === Number ? new $util.LongBits(message.dims[j].low >>> 0, message.dims[j].high >>> 0).toNumber() : message.dims[j];\n }\n return object;\n };\n\n /**\n * Converts this SparseTensorProto to JSON.\n * @function toJSON\n * @memberof onnx.SparseTensorProto\n * @instance\n * @returns {Object.} JSON object\n */\n SparseTensorProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for SparseTensorProto\n * @function getTypeUrl\n * @memberof onnx.SparseTensorProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n SparseTensorProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.SparseTensorProto\";\n };\n\n return SparseTensorProto;\n })();\n\n onnx.TensorShapeProto = (function() {\n\n /**\n * Properties of a TensorShapeProto.\n * @memberof onnx\n * @interface ITensorShapeProto\n * @property {Array.|null} [dim] TensorShapeProto dim\n */\n\n /**\n * Constructs a new TensorShapeProto.\n * @memberof onnx\n * @classdesc Represents a TensorShapeProto.\n * @implements ITensorShapeProto\n * @constructor\n * @param {onnx.ITensorShapeProto=} [properties] Properties to set\n */\n function TensorShapeProto(properties) {\n this.dim = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TensorShapeProto dim.\n * @member {Array.} dim\n * @memberof onnx.TensorShapeProto\n * @instance\n */\n TensorShapeProto.prototype.dim = $util.emptyArray;\n\n /**\n * Creates a new TensorShapeProto instance using the specified properties.\n * @function create\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto=} [properties] Properties to set\n * @returns {onnx.TensorShapeProto} TensorShapeProto instance\n */\n TensorShapeProto.create = function create(properties) {\n return new TensorShapeProto(properties);\n };\n\n /**\n * Encodes the specified TensorShapeProto message. Does not implicitly {@link onnx.TensorShapeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto} message TensorShapeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorShapeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dim != null && message.dim.length)\n for (var i = 0; i < message.dim.length; ++i)\n $root.onnx.TensorShapeProto.Dimension.encode(message.dim[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TensorShapeProto message, length delimited. Does not implicitly {@link onnx.TensorShapeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.ITensorShapeProto} message TensorShapeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TensorShapeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TensorShapeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorShapeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorShapeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n if (!(message.dim && message.dim.length))\n message.dim = [];\n message.dim.push($root.onnx.TensorShapeProto.Dimension.decode(reader, reader.uint32()));\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TensorShapeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TensorShapeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TensorShapeProto message.\n * @function verify\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TensorShapeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.dim != null && message.hasOwnProperty(\"dim\")) {\n if (!Array.isArray(message.dim))\n return \"dim: array expected\";\n for (var i = 0; i < message.dim.length; ++i) {\n var error = $root.onnx.TensorShapeProto.Dimension.verify(message.dim[i]);\n if (error)\n return \"dim.\" + error;\n }\n }\n return null;\n };\n\n /**\n * Creates a TensorShapeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorShapeProto} TensorShapeProto\n */\n TensorShapeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorShapeProto)\n return object;\n var message = new $root.onnx.TensorShapeProto();\n if (object.dim) {\n if (!Array.isArray(object.dim))\n throw TypeError(\".onnx.TensorShapeProto.dim: array expected\");\n message.dim = [];\n for (var i = 0; i < object.dim.length; ++i) {\n if (typeof object.dim[i] !== \"object\")\n throw TypeError(\".onnx.TensorShapeProto.dim: object expected\");\n message.dim[i] = $root.onnx.TensorShapeProto.Dimension.fromObject(object.dim[i]);\n }\n }\n return message;\n };\n\n /**\n * Creates a plain object from a TensorShapeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {onnx.TensorShapeProto} message TensorShapeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TensorShapeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults)\n object.dim = [];\n if (message.dim && message.dim.length) {\n object.dim = [];\n for (var j = 0; j < message.dim.length; ++j)\n object.dim[j] = $root.onnx.TensorShapeProto.Dimension.toObject(message.dim[j], options);\n }\n return object;\n };\n\n /**\n * Converts this TensorShapeProto to JSON.\n * @function toJSON\n * @memberof onnx.TensorShapeProto\n * @instance\n * @returns {Object.} JSON object\n */\n TensorShapeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TensorShapeProto\n * @function getTypeUrl\n * @memberof onnx.TensorShapeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TensorShapeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorShapeProto\";\n };\n\n TensorShapeProto.Dimension = (function() {\n\n /**\n * Properties of a Dimension.\n * @memberof onnx.TensorShapeProto\n * @interface IDimension\n * @property {number|Long|null} [dimValue] Dimension dimValue\n * @property {string|null} [dimParam] Dimension dimParam\n * @property {string|null} [denotation] Dimension denotation\n */\n\n /**\n * Constructs a new Dimension.\n * @memberof onnx.TensorShapeProto\n * @classdesc Represents a Dimension.\n * @implements IDimension\n * @constructor\n * @param {onnx.TensorShapeProto.IDimension=} [properties] Properties to set\n */\n function Dimension(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Dimension dimValue.\n * @member {number|Long|null|undefined} dimValue\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.dimValue = null;\n\n /**\n * Dimension dimParam.\n * @member {string|null|undefined} dimParam\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.dimParam = null;\n\n /**\n * Dimension denotation.\n * @member {string} denotation\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Dimension.prototype.denotation = \"\";\n\n // OneOf field names bound to virtual getters and setters\n var $oneOfFields;\n\n /**\n * Dimension value.\n * @member {\"dimValue\"|\"dimParam\"|undefined} value\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n */\n Object.defineProperty(Dimension.prototype, \"value\", {\n get: $util.oneOfGetter($oneOfFields = [\"dimValue\", \"dimParam\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n /**\n * Creates a new Dimension instance using the specified properties.\n * @function create\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension=} [properties] Properties to set\n * @returns {onnx.TensorShapeProto.Dimension} Dimension instance\n */\n Dimension.create = function create(properties) {\n return new Dimension(properties);\n };\n\n /**\n * Encodes the specified Dimension message. Does not implicitly {@link onnx.TensorShapeProto.Dimension.verify|verify} messages.\n * @function encode\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension} message Dimension message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Dimension.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.dimValue != null && Object.hasOwnProperty.call(message, \"dimValue\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int64(message.dimValue);\n if (message.dimParam != null && Object.hasOwnProperty.call(message, \"dimParam\"))\n writer.uint32(/* id 2, wireType 2 =*/18).string(message.dimParam);\n if (message.denotation != null && Object.hasOwnProperty.call(message, \"denotation\"))\n writer.uint32(/* id 3, wireType 2 =*/26).string(message.denotation);\n return writer;\n };\n\n /**\n * Encodes the specified Dimension message, length delimited. Does not implicitly {@link onnx.TensorShapeProto.Dimension.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.IDimension} message Dimension message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Dimension.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Dimension message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Dimension.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TensorShapeProto.Dimension();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.dimValue = reader.int64();\n break;\n }\n case 2: {\n message.dimParam = reader.string();\n break;\n }\n case 3: {\n message.denotation = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Dimension message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Dimension.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Dimension message.\n * @function verify\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Dimension.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n var properties = {};\n if (message.dimValue != null && message.hasOwnProperty(\"dimValue\")) {\n properties.value = 1;\n if (!$util.isInteger(message.dimValue) && !(message.dimValue && $util.isInteger(message.dimValue.low) && $util.isInteger(message.dimValue.high)))\n return \"dimValue: integer|Long expected\";\n }\n if (message.dimParam != null && message.hasOwnProperty(\"dimParam\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n if (!$util.isString(message.dimParam))\n return \"dimParam: string expected\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n if (!$util.isString(message.denotation))\n return \"denotation: string expected\";\n return null;\n };\n\n /**\n * Creates a Dimension message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TensorShapeProto.Dimension} Dimension\n */\n Dimension.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TensorShapeProto.Dimension)\n return object;\n var message = new $root.onnx.TensorShapeProto.Dimension();\n if (object.dimValue != null)\n if ($util.Long)\n (message.dimValue = $util.Long.fromValue(object.dimValue)).unsigned = false;\n else if (typeof object.dimValue === \"string\")\n message.dimValue = parseInt(object.dimValue, 10);\n else if (typeof object.dimValue === \"number\")\n message.dimValue = object.dimValue;\n else if (typeof object.dimValue === \"object\")\n message.dimValue = new $util.LongBits(object.dimValue.low >>> 0, object.dimValue.high >>> 0).toNumber();\n if (object.dimParam != null)\n message.dimParam = String(object.dimParam);\n if (object.denotation != null)\n message.denotation = String(object.denotation);\n return message;\n };\n\n /**\n * Creates a plain object from a Dimension message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {onnx.TensorShapeProto.Dimension} message Dimension\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Dimension.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.denotation = \"\";\n if (message.dimValue != null && message.hasOwnProperty(\"dimValue\")) {\n if (typeof message.dimValue === \"number\")\n object.dimValue = options.longs === String ? String(message.dimValue) : message.dimValue;\n else\n object.dimValue = options.longs === String ? $util.Long.prototype.toString.call(message.dimValue) : options.longs === Number ? new $util.LongBits(message.dimValue.low >>> 0, message.dimValue.high >>> 0).toNumber() : message.dimValue;\n if (options.oneofs)\n object.value = \"dimValue\";\n }\n if (message.dimParam != null && message.hasOwnProperty(\"dimParam\")) {\n object.dimParam = message.dimParam;\n if (options.oneofs)\n object.value = \"dimParam\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n object.denotation = message.denotation;\n return object;\n };\n\n /**\n * Converts this Dimension to JSON.\n * @function toJSON\n * @memberof onnx.TensorShapeProto.Dimension\n * @instance\n * @returns {Object.} JSON object\n */\n Dimension.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Dimension\n * @function getTypeUrl\n * @memberof onnx.TensorShapeProto.Dimension\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Dimension.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TensorShapeProto.Dimension\";\n };\n\n return Dimension;\n })();\n\n return TensorShapeProto;\n })();\n\n onnx.TypeProto = (function() {\n\n /**\n * Properties of a TypeProto.\n * @memberof onnx\n * @interface ITypeProto\n * @property {onnx.TypeProto.ITensor|null} [tensorType] TypeProto tensorType\n * @property {onnx.TypeProto.ISequence|null} [sequenceType] TypeProto sequenceType\n * @property {onnx.TypeProto.IMap|null} [mapType] TypeProto mapType\n * @property {onnx.TypeProto.IOptional|null} [optionalType] TypeProto optionalType\n * @property {onnx.TypeProto.ISparseTensor|null} [sparseTensorType] TypeProto sparseTensorType\n * @property {string|null} [denotation] TypeProto denotation\n */\n\n /**\n * Constructs a new TypeProto.\n * @memberof onnx\n * @classdesc Represents a TypeProto.\n * @implements ITypeProto\n * @constructor\n * @param {onnx.ITypeProto=} [properties] Properties to set\n */\n function TypeProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * TypeProto tensorType.\n * @member {onnx.TypeProto.ITensor|null|undefined} tensorType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.tensorType = null;\n\n /**\n * TypeProto sequenceType.\n * @member {onnx.TypeProto.ISequence|null|undefined} sequenceType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.sequenceType = null;\n\n /**\n * TypeProto mapType.\n * @member {onnx.TypeProto.IMap|null|undefined} mapType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.mapType = null;\n\n /**\n * TypeProto optionalType.\n * @member {onnx.TypeProto.IOptional|null|undefined} optionalType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.optionalType = null;\n\n /**\n * TypeProto sparseTensorType.\n * @member {onnx.TypeProto.ISparseTensor|null|undefined} sparseTensorType\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.sparseTensorType = null;\n\n /**\n * TypeProto denotation.\n * @member {string} denotation\n * @memberof onnx.TypeProto\n * @instance\n */\n TypeProto.prototype.denotation = \"\";\n\n // OneOf field names bound to virtual getters and setters\n var $oneOfFields;\n\n /**\n * TypeProto value.\n * @member {\"tensorType\"|\"sequenceType\"|\"mapType\"|\"optionalType\"|\"sparseTensorType\"|undefined} value\n * @memberof onnx.TypeProto\n * @instance\n */\n Object.defineProperty(TypeProto.prototype, \"value\", {\n get: $util.oneOfGetter($oneOfFields = [\"tensorType\", \"sequenceType\", \"mapType\", \"optionalType\", \"sparseTensorType\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n /**\n * Creates a new TypeProto instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto=} [properties] Properties to set\n * @returns {onnx.TypeProto} TypeProto instance\n */\n TypeProto.create = function create(properties) {\n return new TypeProto(properties);\n };\n\n /**\n * Encodes the specified TypeProto message. Does not implicitly {@link onnx.TypeProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto} message TypeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TypeProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.tensorType != null && Object.hasOwnProperty.call(message, \"tensorType\"))\n $root.onnx.TypeProto.Tensor.encode(message.tensorType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n if (message.sequenceType != null && Object.hasOwnProperty.call(message, \"sequenceType\"))\n $root.onnx.TypeProto.Sequence.encode(message.sequenceType, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim();\n if (message.mapType != null && Object.hasOwnProperty.call(message, \"mapType\"))\n $root.onnx.TypeProto.Map.encode(message.mapType, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim();\n if (message.denotation != null && Object.hasOwnProperty.call(message, \"denotation\"))\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.denotation);\n if (message.sparseTensorType != null && Object.hasOwnProperty.call(message, \"sparseTensorType\"))\n $root.onnx.TypeProto.SparseTensor.encode(message.sparseTensorType, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim();\n if (message.optionalType != null && Object.hasOwnProperty.call(message, \"optionalType\"))\n $root.onnx.TypeProto.Optional.encode(message.optionalType, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified TypeProto message, length delimited. Does not implicitly {@link onnx.TypeProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.ITypeProto} message TypeProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n TypeProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a TypeProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto} TypeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TypeProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.tensorType = $root.onnx.TypeProto.Tensor.decode(reader, reader.uint32());\n break;\n }\n case 4: {\n message.sequenceType = $root.onnx.TypeProto.Sequence.decode(reader, reader.uint32());\n break;\n }\n case 5: {\n message.mapType = $root.onnx.TypeProto.Map.decode(reader, reader.uint32());\n break;\n }\n case 9: {\n message.optionalType = $root.onnx.TypeProto.Optional.decode(reader, reader.uint32());\n break;\n }\n case 8: {\n message.sparseTensorType = $root.onnx.TypeProto.SparseTensor.decode(reader, reader.uint32());\n break;\n }\n case 6: {\n message.denotation = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a TypeProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto} TypeProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n TypeProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a TypeProto message.\n * @function verify\n * @memberof onnx.TypeProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n TypeProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n var properties = {};\n if (message.tensorType != null && message.hasOwnProperty(\"tensorType\")) {\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Tensor.verify(message.tensorType);\n if (error)\n return \"tensorType.\" + error;\n }\n }\n if (message.sequenceType != null && message.hasOwnProperty(\"sequenceType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Sequence.verify(message.sequenceType);\n if (error)\n return \"sequenceType.\" + error;\n }\n }\n if (message.mapType != null && message.hasOwnProperty(\"mapType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Map.verify(message.mapType);\n if (error)\n return \"mapType.\" + error;\n }\n }\n if (message.optionalType != null && message.hasOwnProperty(\"optionalType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.Optional.verify(message.optionalType);\n if (error)\n return \"optionalType.\" + error;\n }\n }\n if (message.sparseTensorType != null && message.hasOwnProperty(\"sparseTensorType\")) {\n if (properties.value === 1)\n return \"value: multiple values\";\n properties.value = 1;\n {\n var error = $root.onnx.TypeProto.SparseTensor.verify(message.sparseTensorType);\n if (error)\n return \"sparseTensorType.\" + error;\n }\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n if (!$util.isString(message.denotation))\n return \"denotation: string expected\";\n return null;\n };\n\n /**\n * Creates a TypeProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto} TypeProto\n */\n TypeProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto)\n return object;\n var message = new $root.onnx.TypeProto();\n if (object.tensorType != null) {\n if (typeof object.tensorType !== \"object\")\n throw TypeError(\".onnx.TypeProto.tensorType: object expected\");\n message.tensorType = $root.onnx.TypeProto.Tensor.fromObject(object.tensorType);\n }\n if (object.sequenceType != null) {\n if (typeof object.sequenceType !== \"object\")\n throw TypeError(\".onnx.TypeProto.sequenceType: object expected\");\n message.sequenceType = $root.onnx.TypeProto.Sequence.fromObject(object.sequenceType);\n }\n if (object.mapType != null) {\n if (typeof object.mapType !== \"object\")\n throw TypeError(\".onnx.TypeProto.mapType: object expected\");\n message.mapType = $root.onnx.TypeProto.Map.fromObject(object.mapType);\n }\n if (object.optionalType != null) {\n if (typeof object.optionalType !== \"object\")\n throw TypeError(\".onnx.TypeProto.optionalType: object expected\");\n message.optionalType = $root.onnx.TypeProto.Optional.fromObject(object.optionalType);\n }\n if (object.sparseTensorType != null) {\n if (typeof object.sparseTensorType !== \"object\")\n throw TypeError(\".onnx.TypeProto.sparseTensorType: object expected\");\n message.sparseTensorType = $root.onnx.TypeProto.SparseTensor.fromObject(object.sparseTensorType);\n }\n if (object.denotation != null)\n message.denotation = String(object.denotation);\n return message;\n };\n\n /**\n * Creates a plain object from a TypeProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto\n * @static\n * @param {onnx.TypeProto} message TypeProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n TypeProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.denotation = \"\";\n if (message.tensorType != null && message.hasOwnProperty(\"tensorType\")) {\n object.tensorType = $root.onnx.TypeProto.Tensor.toObject(message.tensorType, options);\n if (options.oneofs)\n object.value = \"tensorType\";\n }\n if (message.sequenceType != null && message.hasOwnProperty(\"sequenceType\")) {\n object.sequenceType = $root.onnx.TypeProto.Sequence.toObject(message.sequenceType, options);\n if (options.oneofs)\n object.value = \"sequenceType\";\n }\n if (message.mapType != null && message.hasOwnProperty(\"mapType\")) {\n object.mapType = $root.onnx.TypeProto.Map.toObject(message.mapType, options);\n if (options.oneofs)\n object.value = \"mapType\";\n }\n if (message.denotation != null && message.hasOwnProperty(\"denotation\"))\n object.denotation = message.denotation;\n if (message.sparseTensorType != null && message.hasOwnProperty(\"sparseTensorType\")) {\n object.sparseTensorType = $root.onnx.TypeProto.SparseTensor.toObject(message.sparseTensorType, options);\n if (options.oneofs)\n object.value = \"sparseTensorType\";\n }\n if (message.optionalType != null && message.hasOwnProperty(\"optionalType\")) {\n object.optionalType = $root.onnx.TypeProto.Optional.toObject(message.optionalType, options);\n if (options.oneofs)\n object.value = \"optionalType\";\n }\n return object;\n };\n\n /**\n * Converts this TypeProto to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto\n * @instance\n * @returns {Object.} JSON object\n */\n TypeProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for TypeProto\n * @function getTypeUrl\n * @memberof onnx.TypeProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n TypeProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto\";\n };\n\n TypeProto.Tensor = (function() {\n\n /**\n * Properties of a Tensor.\n * @memberof onnx.TypeProto\n * @interface ITensor\n * @property {number|null} [elemType] Tensor elemType\n * @property {onnx.ITensorShapeProto|null} [shape] Tensor shape\n */\n\n /**\n * Constructs a new Tensor.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Tensor.\n * @implements ITensor\n * @constructor\n * @param {onnx.TypeProto.ITensor=} [properties] Properties to set\n */\n function Tensor(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Tensor elemType.\n * @member {number} elemType\n * @memberof onnx.TypeProto.Tensor\n * @instance\n */\n Tensor.prototype.elemType = 0;\n\n /**\n * Tensor shape.\n * @member {onnx.ITensorShapeProto|null|undefined} shape\n * @memberof onnx.TypeProto.Tensor\n * @instance\n */\n Tensor.prototype.shape = null;\n\n /**\n * Creates a new Tensor instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor=} [properties] Properties to set\n * @returns {onnx.TypeProto.Tensor} Tensor instance\n */\n Tensor.create = function create(properties) {\n return new Tensor(properties);\n };\n\n /**\n * Encodes the specified Tensor message. Does not implicitly {@link onnx.TypeProto.Tensor.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor} message Tensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Tensor.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.elemType);\n if (message.shape != null && Object.hasOwnProperty.call(message, \"shape\"))\n $root.onnx.TensorShapeProto.encode(message.shape, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Tensor message, length delimited. Does not implicitly {@link onnx.TypeProto.Tensor.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.ITensor} message Tensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Tensor.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Tensor message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Tensor} Tensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Tensor.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Tensor();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = reader.int32();\n break;\n }\n case 2: {\n message.shape = $root.onnx.TensorShapeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Tensor message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Tensor} Tensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Tensor.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Tensor message.\n * @function verify\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Tensor.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n if (!$util.isInteger(message.elemType))\n return \"elemType: integer expected\";\n if (message.shape != null && message.hasOwnProperty(\"shape\")) {\n var error = $root.onnx.TensorShapeProto.verify(message.shape);\n if (error)\n return \"shape.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Tensor message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Tensor} Tensor\n */\n Tensor.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Tensor)\n return object;\n var message = new $root.onnx.TypeProto.Tensor();\n if (object.elemType != null)\n message.elemType = object.elemType | 0;\n if (object.shape != null) {\n if (typeof object.shape !== \"object\")\n throw TypeError(\".onnx.TypeProto.Tensor.shape: object expected\");\n message.shape = $root.onnx.TensorShapeProto.fromObject(object.shape);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Tensor message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {onnx.TypeProto.Tensor} message Tensor\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Tensor.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.elemType = 0;\n object.shape = null;\n }\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = message.elemType;\n if (message.shape != null && message.hasOwnProperty(\"shape\"))\n object.shape = $root.onnx.TensorShapeProto.toObject(message.shape, options);\n return object;\n };\n\n /**\n * Converts this Tensor to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Tensor\n * @instance\n * @returns {Object.} JSON object\n */\n Tensor.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Tensor\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Tensor\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Tensor.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Tensor\";\n };\n\n return Tensor;\n })();\n\n TypeProto.Sequence = (function() {\n\n /**\n * Properties of a Sequence.\n * @memberof onnx.TypeProto\n * @interface ISequence\n * @property {onnx.ITypeProto|null} [elemType] Sequence elemType\n */\n\n /**\n * Constructs a new Sequence.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Sequence.\n * @implements ISequence\n * @constructor\n * @param {onnx.TypeProto.ISequence=} [properties] Properties to set\n */\n function Sequence(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Sequence elemType.\n * @member {onnx.ITypeProto|null|undefined} elemType\n * @memberof onnx.TypeProto.Sequence\n * @instance\n */\n Sequence.prototype.elemType = null;\n\n /**\n * Creates a new Sequence instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence=} [properties] Properties to set\n * @returns {onnx.TypeProto.Sequence} Sequence instance\n */\n Sequence.create = function create(properties) {\n return new Sequence(properties);\n };\n\n /**\n * Encodes the specified Sequence message. Does not implicitly {@link onnx.TypeProto.Sequence.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence} message Sequence message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Sequence.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n $root.onnx.TypeProto.encode(message.elemType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Sequence message, length delimited. Does not implicitly {@link onnx.TypeProto.Sequence.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.ISequence} message Sequence message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Sequence.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Sequence message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Sequence} Sequence\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Sequence.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Sequence();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Sequence message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Sequence} Sequence\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Sequence.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Sequence message.\n * @function verify\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Sequence.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\")) {\n var error = $root.onnx.TypeProto.verify(message.elemType);\n if (error)\n return \"elemType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Sequence message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Sequence} Sequence\n */\n Sequence.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Sequence)\n return object;\n var message = new $root.onnx.TypeProto.Sequence();\n if (object.elemType != null) {\n if (typeof object.elemType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Sequence.elemType: object expected\");\n message.elemType = $root.onnx.TypeProto.fromObject(object.elemType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Sequence message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {onnx.TypeProto.Sequence} message Sequence\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Sequence.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.elemType = null;\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = $root.onnx.TypeProto.toObject(message.elemType, options);\n return object;\n };\n\n /**\n * Converts this Sequence to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Sequence\n * @instance\n * @returns {Object.} JSON object\n */\n Sequence.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Sequence\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Sequence\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Sequence.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Sequence\";\n };\n\n return Sequence;\n })();\n\n TypeProto.Map = (function() {\n\n /**\n * Properties of a Map.\n * @memberof onnx.TypeProto\n * @interface IMap\n * @property {number|null} [keyType] Map keyType\n * @property {onnx.ITypeProto|null} [valueType] Map valueType\n */\n\n /**\n * Constructs a new Map.\n * @memberof onnx.TypeProto\n * @classdesc Represents a Map.\n * @implements IMap\n * @constructor\n * @param {onnx.TypeProto.IMap=} [properties] Properties to set\n */\n function Map(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Map keyType.\n * @member {number} keyType\n * @memberof onnx.TypeProto.Map\n * @instance\n */\n Map.prototype.keyType = 0;\n\n /**\n * Map valueType.\n * @member {onnx.ITypeProto|null|undefined} valueType\n * @memberof onnx.TypeProto.Map\n * @instance\n */\n Map.prototype.valueType = null;\n\n /**\n * Creates a new Map instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap=} [properties] Properties to set\n * @returns {onnx.TypeProto.Map} Map instance\n */\n Map.create = function create(properties) {\n return new Map(properties);\n };\n\n /**\n * Encodes the specified Map message. Does not implicitly {@link onnx.TypeProto.Map.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap} message Map message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Map.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.keyType != null && Object.hasOwnProperty.call(message, \"keyType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.keyType);\n if (message.valueType != null && Object.hasOwnProperty.call(message, \"valueType\"))\n $root.onnx.TypeProto.encode(message.valueType, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Map message, length delimited. Does not implicitly {@link onnx.TypeProto.Map.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.IMap} message Map message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Map.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a Map message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Map} Map\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Map.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Map();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.keyType = reader.int32();\n break;\n }\n case 2: {\n message.valueType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a Map message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Map} Map\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Map.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a Map message.\n * @function verify\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Map.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.keyType != null && message.hasOwnProperty(\"keyType\"))\n if (!$util.isInteger(message.keyType))\n return \"keyType: integer expected\";\n if (message.valueType != null && message.hasOwnProperty(\"valueType\")) {\n var error = $root.onnx.TypeProto.verify(message.valueType);\n if (error)\n return \"valueType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a Map message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Map} Map\n */\n Map.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Map)\n return object;\n var message = new $root.onnx.TypeProto.Map();\n if (object.keyType != null)\n message.keyType = object.keyType | 0;\n if (object.valueType != null) {\n if (typeof object.valueType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Map.valueType: object expected\");\n message.valueType = $root.onnx.TypeProto.fromObject(object.valueType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a Map message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {onnx.TypeProto.Map} message Map\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Map.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.keyType = 0;\n object.valueType = null;\n }\n if (message.keyType != null && message.hasOwnProperty(\"keyType\"))\n object.keyType = message.keyType;\n if (message.valueType != null && message.hasOwnProperty(\"valueType\"))\n object.valueType = $root.onnx.TypeProto.toObject(message.valueType, options);\n return object;\n };\n\n /**\n * Converts this Map to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Map\n * @instance\n * @returns {Object.} JSON object\n */\n Map.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Map\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Map\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Map.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Map\";\n };\n\n return Map;\n })();\n\n TypeProto.Optional = (function() {\n\n /**\n * Properties of an Optional.\n * @memberof onnx.TypeProto\n * @interface IOptional\n * @property {onnx.ITypeProto|null} [elemType] Optional elemType\n */\n\n /**\n * Constructs a new Optional.\n * @memberof onnx.TypeProto\n * @classdesc Represents an Optional.\n * @implements IOptional\n * @constructor\n * @param {onnx.TypeProto.IOptional=} [properties] Properties to set\n */\n function Optional(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * Optional elemType.\n * @member {onnx.ITypeProto|null|undefined} elemType\n * @memberof onnx.TypeProto.Optional\n * @instance\n */\n Optional.prototype.elemType = null;\n\n /**\n * Creates a new Optional instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional=} [properties] Properties to set\n * @returns {onnx.TypeProto.Optional} Optional instance\n */\n Optional.create = function create(properties) {\n return new Optional(properties);\n };\n\n /**\n * Encodes the specified Optional message. Does not implicitly {@link onnx.TypeProto.Optional.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional} message Optional message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Optional.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n $root.onnx.TypeProto.encode(message.elemType, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified Optional message, length delimited. Does not implicitly {@link onnx.TypeProto.Optional.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.IOptional} message Optional message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n Optional.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an Optional message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.Optional} Optional\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Optional.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.Optional();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = $root.onnx.TypeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an Optional message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.Optional} Optional\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n Optional.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an Optional message.\n * @function verify\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n Optional.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\")) {\n var error = $root.onnx.TypeProto.verify(message.elemType);\n if (error)\n return \"elemType.\" + error;\n }\n return null;\n };\n\n /**\n * Creates an Optional message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.Optional} Optional\n */\n Optional.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.Optional)\n return object;\n var message = new $root.onnx.TypeProto.Optional();\n if (object.elemType != null) {\n if (typeof object.elemType !== \"object\")\n throw TypeError(\".onnx.TypeProto.Optional.elemType: object expected\");\n message.elemType = $root.onnx.TypeProto.fromObject(object.elemType);\n }\n return message;\n };\n\n /**\n * Creates a plain object from an Optional message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {onnx.TypeProto.Optional} message Optional\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n Optional.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults)\n object.elemType = null;\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = $root.onnx.TypeProto.toObject(message.elemType, options);\n return object;\n };\n\n /**\n * Converts this Optional to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.Optional\n * @instance\n * @returns {Object.} JSON object\n */\n Optional.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for Optional\n * @function getTypeUrl\n * @memberof onnx.TypeProto.Optional\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n Optional.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.Optional\";\n };\n\n return Optional;\n })();\n\n TypeProto.SparseTensor = (function() {\n\n /**\n * Properties of a SparseTensor.\n * @memberof onnx.TypeProto\n * @interface ISparseTensor\n * @property {number|null} [elemType] SparseTensor elemType\n * @property {onnx.ITensorShapeProto|null} [shape] SparseTensor shape\n */\n\n /**\n * Constructs a new SparseTensor.\n * @memberof onnx.TypeProto\n * @classdesc Represents a SparseTensor.\n * @implements ISparseTensor\n * @constructor\n * @param {onnx.TypeProto.ISparseTensor=} [properties] Properties to set\n */\n function SparseTensor(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * SparseTensor elemType.\n * @member {number} elemType\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n */\n SparseTensor.prototype.elemType = 0;\n\n /**\n * SparseTensor shape.\n * @member {onnx.ITensorShapeProto|null|undefined} shape\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n */\n SparseTensor.prototype.shape = null;\n\n /**\n * Creates a new SparseTensor instance using the specified properties.\n * @function create\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor=} [properties] Properties to set\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor instance\n */\n SparseTensor.create = function create(properties) {\n return new SparseTensor(properties);\n };\n\n /**\n * Encodes the specified SparseTensor message. Does not implicitly {@link onnx.TypeProto.SparseTensor.verify|verify} messages.\n * @function encode\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor} message SparseTensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensor.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.elemType != null && Object.hasOwnProperty.call(message, \"elemType\"))\n writer.uint32(/* id 1, wireType 0 =*/8).int32(message.elemType);\n if (message.shape != null && Object.hasOwnProperty.call(message, \"shape\"))\n $root.onnx.TensorShapeProto.encode(message.shape, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified SparseTensor message, length delimited. Does not implicitly {@link onnx.TypeProto.SparseTensor.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.ISparseTensor} message SparseTensor message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n SparseTensor.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a SparseTensor message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensor.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.TypeProto.SparseTensor();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.elemType = reader.int32();\n break;\n }\n case 2: {\n message.shape = $root.onnx.TensorShapeProto.decode(reader, reader.uint32());\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a SparseTensor message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n SparseTensor.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a SparseTensor message.\n * @function verify\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n SparseTensor.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n if (!$util.isInteger(message.elemType))\n return \"elemType: integer expected\";\n if (message.shape != null && message.hasOwnProperty(\"shape\")) {\n var error = $root.onnx.TensorShapeProto.verify(message.shape);\n if (error)\n return \"shape.\" + error;\n }\n return null;\n };\n\n /**\n * Creates a SparseTensor message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.TypeProto.SparseTensor} SparseTensor\n */\n SparseTensor.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.TypeProto.SparseTensor)\n return object;\n var message = new $root.onnx.TypeProto.SparseTensor();\n if (object.elemType != null)\n message.elemType = object.elemType | 0;\n if (object.shape != null) {\n if (typeof object.shape !== \"object\")\n throw TypeError(\".onnx.TypeProto.SparseTensor.shape: object expected\");\n message.shape = $root.onnx.TensorShapeProto.fromObject(object.shape);\n }\n return message;\n };\n\n /**\n * Creates a plain object from a SparseTensor message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {onnx.TypeProto.SparseTensor} message SparseTensor\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n SparseTensor.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.elemType = 0;\n object.shape = null;\n }\n if (message.elemType != null && message.hasOwnProperty(\"elemType\"))\n object.elemType = message.elemType;\n if (message.shape != null && message.hasOwnProperty(\"shape\"))\n object.shape = $root.onnx.TensorShapeProto.toObject(message.shape, options);\n return object;\n };\n\n /**\n * Converts this SparseTensor to JSON.\n * @function toJSON\n * @memberof onnx.TypeProto.SparseTensor\n * @instance\n * @returns {Object.} JSON object\n */\n SparseTensor.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for SparseTensor\n * @function getTypeUrl\n * @memberof onnx.TypeProto.SparseTensor\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n SparseTensor.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.TypeProto.SparseTensor\";\n };\n\n return SparseTensor;\n })();\n\n return TypeProto;\n })();\n\n onnx.OperatorSetIdProto = (function() {\n\n /**\n * Properties of an OperatorSetIdProto.\n * @memberof onnx\n * @interface IOperatorSetIdProto\n * @property {string|null} [domain] OperatorSetIdProto domain\n * @property {number|Long|null} [version] OperatorSetIdProto version\n */\n\n /**\n * Constructs a new OperatorSetIdProto.\n * @memberof onnx\n * @classdesc Represents an OperatorSetIdProto.\n * @implements IOperatorSetIdProto\n * @constructor\n * @param {onnx.IOperatorSetIdProto=} [properties] Properties to set\n */\n function OperatorSetIdProto(properties) {\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * OperatorSetIdProto domain.\n * @member {string} domain\n * @memberof onnx.OperatorSetIdProto\n * @instance\n */\n OperatorSetIdProto.prototype.domain = \"\";\n\n /**\n * OperatorSetIdProto version.\n * @member {number|Long} version\n * @memberof onnx.OperatorSetIdProto\n * @instance\n */\n OperatorSetIdProto.prototype.version = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n\n /**\n * Creates a new OperatorSetIdProto instance using the specified properties.\n * @function create\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto=} [properties] Properties to set\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto instance\n */\n OperatorSetIdProto.create = function create(properties) {\n return new OperatorSetIdProto(properties);\n };\n\n /**\n * Encodes the specified OperatorSetIdProto message. Does not implicitly {@link onnx.OperatorSetIdProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto} message OperatorSetIdProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n OperatorSetIdProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.domain);\n if (message.version != null && Object.hasOwnProperty.call(message, \"version\"))\n writer.uint32(/* id 2, wireType 0 =*/16).int64(message.version);\n return writer;\n };\n\n /**\n * Encodes the specified OperatorSetIdProto message, length delimited. Does not implicitly {@link onnx.OperatorSetIdProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.IOperatorSetIdProto} message OperatorSetIdProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n OperatorSetIdProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes an OperatorSetIdProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n OperatorSetIdProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.OperatorSetIdProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.domain = reader.string();\n break;\n }\n case 2: {\n message.version = reader.int64();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes an OperatorSetIdProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n OperatorSetIdProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies an OperatorSetIdProto message.\n * @function verify\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n OperatorSetIdProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n if (message.version != null && message.hasOwnProperty(\"version\"))\n if (!$util.isInteger(message.version) && !(message.version && $util.isInteger(message.version.low) && $util.isInteger(message.version.high)))\n return \"version: integer|Long expected\";\n return null;\n };\n\n /**\n * Creates an OperatorSetIdProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.OperatorSetIdProto} OperatorSetIdProto\n */\n OperatorSetIdProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.OperatorSetIdProto)\n return object;\n var message = new $root.onnx.OperatorSetIdProto();\n if (object.domain != null)\n message.domain = String(object.domain);\n if (object.version != null)\n if ($util.Long)\n (message.version = $util.Long.fromValue(object.version)).unsigned = false;\n else if (typeof object.version === \"string\")\n message.version = parseInt(object.version, 10);\n else if (typeof object.version === \"number\")\n message.version = object.version;\n else if (typeof object.version === \"object\")\n message.version = new $util.LongBits(object.version.low >>> 0, object.version.high >>> 0).toNumber();\n return message;\n };\n\n /**\n * Creates a plain object from an OperatorSetIdProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {onnx.OperatorSetIdProto} message OperatorSetIdProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n OperatorSetIdProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.defaults) {\n object.domain = \"\";\n if ($util.Long) {\n var long = new $util.Long(0, 0, false);\n object.version = options.longs === String ? long.toString() : options.longs === Number ? long.toNumber() : long;\n } else\n object.version = options.longs === String ? \"0\" : 0;\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.version != null && message.hasOwnProperty(\"version\"))\n if (typeof message.version === \"number\")\n object.version = options.longs === String ? String(message.version) : message.version;\n else\n object.version = options.longs === String ? $util.Long.prototype.toString.call(message.version) : options.longs === Number ? new $util.LongBits(message.version.low >>> 0, message.version.high >>> 0).toNumber() : message.version;\n return object;\n };\n\n /**\n * Converts this OperatorSetIdProto to JSON.\n * @function toJSON\n * @memberof onnx.OperatorSetIdProto\n * @instance\n * @returns {Object.} JSON object\n */\n OperatorSetIdProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for OperatorSetIdProto\n * @function getTypeUrl\n * @memberof onnx.OperatorSetIdProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n OperatorSetIdProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.OperatorSetIdProto\";\n };\n\n return OperatorSetIdProto;\n })();\n\n /**\n * OperatorStatus enum.\n * @name onnx.OperatorStatus\n * @enum {number}\n * @property {number} EXPERIMENTAL=0 EXPERIMENTAL value\n * @property {number} STABLE=1 STABLE value\n */\n onnx.OperatorStatus = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"EXPERIMENTAL\"] = 0;\n values[valuesById[1] = \"STABLE\"] = 1;\n return values;\n })();\n\n onnx.FunctionProto = (function() {\n\n /**\n * Properties of a FunctionProto.\n * @memberof onnx\n * @interface IFunctionProto\n * @property {string|null} [name] FunctionProto name\n * @property {Array.|null} [input] FunctionProto input\n * @property {Array.|null} [output] FunctionProto output\n * @property {Array.|null} [attribute] FunctionProto attribute\n * @property {Array.|null} [attributeProto] FunctionProto attributeProto\n * @property {Array.|null} [node] FunctionProto node\n * @property {string|null} [docString] FunctionProto docString\n * @property {Array.|null} [opsetImport] FunctionProto opsetImport\n * @property {string|null} [domain] FunctionProto domain\n */\n\n /**\n * Constructs a new FunctionProto.\n * @memberof onnx\n * @classdesc Represents a FunctionProto.\n * @implements IFunctionProto\n * @constructor\n * @param {onnx.IFunctionProto=} [properties] Properties to set\n */\n function FunctionProto(properties) {\n this.input = [];\n this.output = [];\n this.attribute = [];\n this.attributeProto = [];\n this.node = [];\n this.opsetImport = [];\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n if (properties[keys[i]] != null)\n this[keys[i]] = properties[keys[i]];\n }\n\n /**\n * FunctionProto name.\n * @member {string} name\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.name = \"\";\n\n /**\n * FunctionProto input.\n * @member {Array.} input\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.input = $util.emptyArray;\n\n /**\n * FunctionProto output.\n * @member {Array.} output\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.output = $util.emptyArray;\n\n /**\n * FunctionProto attribute.\n * @member {Array.} attribute\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.attribute = $util.emptyArray;\n\n /**\n * FunctionProto attributeProto.\n * @member {Array.} attributeProto\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.attributeProto = $util.emptyArray;\n\n /**\n * FunctionProto node.\n * @member {Array.} node\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.node = $util.emptyArray;\n\n /**\n * FunctionProto docString.\n * @member {string} docString\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.docString = \"\";\n\n /**\n * FunctionProto opsetImport.\n * @member {Array.} opsetImport\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.opsetImport = $util.emptyArray;\n\n /**\n * FunctionProto domain.\n * @member {string} domain\n * @memberof onnx.FunctionProto\n * @instance\n */\n FunctionProto.prototype.domain = \"\";\n\n /**\n * Creates a new FunctionProto instance using the specified properties.\n * @function create\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto=} [properties] Properties to set\n * @returns {onnx.FunctionProto} FunctionProto instance\n */\n FunctionProto.create = function create(properties) {\n return new FunctionProto(properties);\n };\n\n /**\n * Encodes the specified FunctionProto message. Does not implicitly {@link onnx.FunctionProto.verify|verify} messages.\n * @function encode\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto} message FunctionProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n FunctionProto.encode = function encode(message, writer) {\n if (!writer)\n writer = $Writer.create();\n if (message.name != null && Object.hasOwnProperty.call(message, \"name\"))\n writer.uint32(/* id 1, wireType 2 =*/10).string(message.name);\n if (message.input != null && message.input.length)\n for (var i = 0; i < message.input.length; ++i)\n writer.uint32(/* id 4, wireType 2 =*/34).string(message.input[i]);\n if (message.output != null && message.output.length)\n for (var i = 0; i < message.output.length; ++i)\n writer.uint32(/* id 5, wireType 2 =*/42).string(message.output[i]);\n if (message.attribute != null && message.attribute.length)\n for (var i = 0; i < message.attribute.length; ++i)\n writer.uint32(/* id 6, wireType 2 =*/50).string(message.attribute[i]);\n if (message.node != null && message.node.length)\n for (var i = 0; i < message.node.length; ++i)\n $root.onnx.NodeProto.encode(message.node[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim();\n if (message.docString != null && Object.hasOwnProperty.call(message, \"docString\"))\n writer.uint32(/* id 8, wireType 2 =*/66).string(message.docString);\n if (message.opsetImport != null && message.opsetImport.length)\n for (var i = 0; i < message.opsetImport.length; ++i)\n $root.onnx.OperatorSetIdProto.encode(message.opsetImport[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim();\n if (message.domain != null && Object.hasOwnProperty.call(message, \"domain\"))\n writer.uint32(/* id 10, wireType 2 =*/82).string(message.domain);\n if (message.attributeProto != null && message.attributeProto.length)\n for (var i = 0; i < message.attributeProto.length; ++i)\n $root.onnx.AttributeProto.encode(message.attributeProto[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim();\n return writer;\n };\n\n /**\n * Encodes the specified FunctionProto message, length delimited. Does not implicitly {@link onnx.FunctionProto.verify|verify} messages.\n * @function encodeDelimited\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.IFunctionProto} message FunctionProto message or plain object to encode\n * @param {$protobuf.Writer} [writer] Writer to encode to\n * @returns {$protobuf.Writer} Writer\n */\n FunctionProto.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer).ldelim();\n };\n\n /**\n * Decodes a FunctionProto message from the specified reader or buffer.\n * @function decode\n * @memberof onnx.FunctionProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Message length if known beforehand\n * @returns {onnx.FunctionProto} FunctionProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n FunctionProto.decode = function decode(reader, length) {\n if (!(reader instanceof $Reader))\n reader = $Reader.create(reader);\n var end = length === undefined ? reader.len : reader.pos + length, message = new $root.onnx.FunctionProto();\n while (reader.pos < end) {\n var tag = reader.uint32();\n switch (tag >>> 3) {\n case 1: {\n message.name = reader.string();\n break;\n }\n case 4: {\n if (!(message.input && message.input.length))\n message.input = [];\n message.input.push(reader.string());\n break;\n }\n case 5: {\n if (!(message.output && message.output.length))\n message.output = [];\n message.output.push(reader.string());\n break;\n }\n case 6: {\n if (!(message.attribute && message.attribute.length))\n message.attribute = [];\n message.attribute.push(reader.string());\n break;\n }\n case 11: {\n if (!(message.attributeProto && message.attributeProto.length))\n message.attributeProto = [];\n message.attributeProto.push($root.onnx.AttributeProto.decode(reader, reader.uint32()));\n break;\n }\n case 7: {\n if (!(message.node && message.node.length))\n message.node = [];\n message.node.push($root.onnx.NodeProto.decode(reader, reader.uint32()));\n break;\n }\n case 8: {\n message.docString = reader.string();\n break;\n }\n case 9: {\n if (!(message.opsetImport && message.opsetImport.length))\n message.opsetImport = [];\n message.opsetImport.push($root.onnx.OperatorSetIdProto.decode(reader, reader.uint32()));\n break;\n }\n case 10: {\n message.domain = reader.string();\n break;\n }\n default:\n reader.skipType(tag & 7);\n break;\n }\n }\n return message;\n };\n\n /**\n * Decodes a FunctionProto message from the specified reader or buffer, length delimited.\n * @function decodeDelimited\n * @memberof onnx.FunctionProto\n * @static\n * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {onnx.FunctionProto} FunctionProto\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {$protobuf.util.ProtocolError} If required fields are missing\n */\n FunctionProto.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof $Reader))\n reader = new $Reader(reader);\n return this.decode(reader, reader.uint32());\n };\n\n /**\n * Verifies a FunctionProto message.\n * @function verify\n * @memberof onnx.FunctionProto\n * @static\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\n FunctionProto.verify = function verify(message) {\n if (typeof message !== \"object\" || message === null)\n return \"object expected\";\n if (message.name != null && message.hasOwnProperty(\"name\"))\n if (!$util.isString(message.name))\n return \"name: string expected\";\n if (message.input != null && message.hasOwnProperty(\"input\")) {\n if (!Array.isArray(message.input))\n return \"input: array expected\";\n for (var i = 0; i < message.input.length; ++i)\n if (!$util.isString(message.input[i]))\n return \"input: string[] expected\";\n }\n if (message.output != null && message.hasOwnProperty(\"output\")) {\n if (!Array.isArray(message.output))\n return \"output: array expected\";\n for (var i = 0; i < message.output.length; ++i)\n if (!$util.isString(message.output[i]))\n return \"output: string[] expected\";\n }\n if (message.attribute != null && message.hasOwnProperty(\"attribute\")) {\n if (!Array.isArray(message.attribute))\n return \"attribute: array expected\";\n for (var i = 0; i < message.attribute.length; ++i)\n if (!$util.isString(message.attribute[i]))\n return \"attribute: string[] expected\";\n }\n if (message.attributeProto != null && message.hasOwnProperty(\"attributeProto\")) {\n if (!Array.isArray(message.attributeProto))\n return \"attributeProto: array expected\";\n for (var i = 0; i < message.attributeProto.length; ++i) {\n var error = $root.onnx.AttributeProto.verify(message.attributeProto[i]);\n if (error)\n return \"attributeProto.\" + error;\n }\n }\n if (message.node != null && message.hasOwnProperty(\"node\")) {\n if (!Array.isArray(message.node))\n return \"node: array expected\";\n for (var i = 0; i < message.node.length; ++i) {\n var error = $root.onnx.NodeProto.verify(message.node[i]);\n if (error)\n return \"node.\" + error;\n }\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n if (!$util.isString(message.docString))\n return \"docString: string expected\";\n if (message.opsetImport != null && message.hasOwnProperty(\"opsetImport\")) {\n if (!Array.isArray(message.opsetImport))\n return \"opsetImport: array expected\";\n for (var i = 0; i < message.opsetImport.length; ++i) {\n var error = $root.onnx.OperatorSetIdProto.verify(message.opsetImport[i]);\n if (error)\n return \"opsetImport.\" + error;\n }\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n if (!$util.isString(message.domain))\n return \"domain: string expected\";\n return null;\n };\n\n /**\n * Creates a FunctionProto message from a plain object. Also converts values to their respective internal types.\n * @function fromObject\n * @memberof onnx.FunctionProto\n * @static\n * @param {Object.} object Plain object\n * @returns {onnx.FunctionProto} FunctionProto\n */\n FunctionProto.fromObject = function fromObject(object) {\n if (object instanceof $root.onnx.FunctionProto)\n return object;\n var message = new $root.onnx.FunctionProto();\n if (object.name != null)\n message.name = String(object.name);\n if (object.input) {\n if (!Array.isArray(object.input))\n throw TypeError(\".onnx.FunctionProto.input: array expected\");\n message.input = [];\n for (var i = 0; i < object.input.length; ++i)\n message.input[i] = String(object.input[i]);\n }\n if (object.output) {\n if (!Array.isArray(object.output))\n throw TypeError(\".onnx.FunctionProto.output: array expected\");\n message.output = [];\n for (var i = 0; i < object.output.length; ++i)\n message.output[i] = String(object.output[i]);\n }\n if (object.attribute) {\n if (!Array.isArray(object.attribute))\n throw TypeError(\".onnx.FunctionProto.attribute: array expected\");\n message.attribute = [];\n for (var i = 0; i < object.attribute.length; ++i)\n message.attribute[i] = String(object.attribute[i]);\n }\n if (object.attributeProto) {\n if (!Array.isArray(object.attributeProto))\n throw TypeError(\".onnx.FunctionProto.attributeProto: array expected\");\n message.attributeProto = [];\n for (var i = 0; i < object.attributeProto.length; ++i) {\n if (typeof object.attributeProto[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.attributeProto: object expected\");\n message.attributeProto[i] = $root.onnx.AttributeProto.fromObject(object.attributeProto[i]);\n }\n }\n if (object.node) {\n if (!Array.isArray(object.node))\n throw TypeError(\".onnx.FunctionProto.node: array expected\");\n message.node = [];\n for (var i = 0; i < object.node.length; ++i) {\n if (typeof object.node[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.node: object expected\");\n message.node[i] = $root.onnx.NodeProto.fromObject(object.node[i]);\n }\n }\n if (object.docString != null)\n message.docString = String(object.docString);\n if (object.opsetImport) {\n if (!Array.isArray(object.opsetImport))\n throw TypeError(\".onnx.FunctionProto.opsetImport: array expected\");\n message.opsetImport = [];\n for (var i = 0; i < object.opsetImport.length; ++i) {\n if (typeof object.opsetImport[i] !== \"object\")\n throw TypeError(\".onnx.FunctionProto.opsetImport: object expected\");\n message.opsetImport[i] = $root.onnx.OperatorSetIdProto.fromObject(object.opsetImport[i]);\n }\n }\n if (object.domain != null)\n message.domain = String(object.domain);\n return message;\n };\n\n /**\n * Creates a plain object from a FunctionProto message. Also converts values to other types if specified.\n * @function toObject\n * @memberof onnx.FunctionProto\n * @static\n * @param {onnx.FunctionProto} message FunctionProto\n * @param {$protobuf.IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\n FunctionProto.toObject = function toObject(message, options) {\n if (!options)\n options = {};\n var object = {};\n if (options.arrays || options.defaults) {\n object.input = [];\n object.output = [];\n object.attribute = [];\n object.node = [];\n object.opsetImport = [];\n object.attributeProto = [];\n }\n if (options.defaults) {\n object.name = \"\";\n object.docString = \"\";\n object.domain = \"\";\n }\n if (message.name != null && message.hasOwnProperty(\"name\"))\n object.name = message.name;\n if (message.input && message.input.length) {\n object.input = [];\n for (var j = 0; j < message.input.length; ++j)\n object.input[j] = message.input[j];\n }\n if (message.output && message.output.length) {\n object.output = [];\n for (var j = 0; j < message.output.length; ++j)\n object.output[j] = message.output[j];\n }\n if (message.attribute && message.attribute.length) {\n object.attribute = [];\n for (var j = 0; j < message.attribute.length; ++j)\n object.attribute[j] = message.attribute[j];\n }\n if (message.node && message.node.length) {\n object.node = [];\n for (var j = 0; j < message.node.length; ++j)\n object.node[j] = $root.onnx.NodeProto.toObject(message.node[j], options);\n }\n if (message.docString != null && message.hasOwnProperty(\"docString\"))\n object.docString = message.docString;\n if (message.opsetImport && message.opsetImport.length) {\n object.opsetImport = [];\n for (var j = 0; j < message.opsetImport.length; ++j)\n object.opsetImport[j] = $root.onnx.OperatorSetIdProto.toObject(message.opsetImport[j], options);\n }\n if (message.domain != null && message.hasOwnProperty(\"domain\"))\n object.domain = message.domain;\n if (message.attributeProto && message.attributeProto.length) {\n object.attributeProto = [];\n for (var j = 0; j < message.attributeProto.length; ++j)\n object.attributeProto[j] = $root.onnx.AttributeProto.toObject(message.attributeProto[j], options);\n }\n return object;\n };\n\n /**\n * Converts this FunctionProto to JSON.\n * @function toJSON\n * @memberof onnx.FunctionProto\n * @instance\n * @returns {Object.} JSON object\n */\n FunctionProto.prototype.toJSON = function toJSON() {\n return this.constructor.toObject(this, $protobuf.util.toJSONOptions);\n };\n\n /**\n * Gets the default type url for FunctionProto\n * @function getTypeUrl\n * @memberof onnx.FunctionProto\n * @static\n * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default \"type.googleapis.com\")\n * @returns {string} The default type url\n */\n FunctionProto.getTypeUrl = function getTypeUrl(typeUrlPrefix) {\n if (typeUrlPrefix === undefined) {\n typeUrlPrefix = \"type.googleapis.com\";\n }\n return typeUrlPrefix + \"/onnx.FunctionProto\";\n };\n\n return FunctionProto;\n })();\n\n return onnx;\n})();\n\nmodule.exports = $root;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {flatbuffers} from 'flatbuffers';\nimport Long from 'long';\n\nimport {Graph} from './graph';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\n\n// check the inputs shape before running an OP.\n// return true when the inputs pass the check\n// return false when the inputs do not fit the requirement\n// throw exception when fatal error or not implemented\nexport function checkInputsShape(inputs: Tensor[], ...expectedDimensions: number[]): boolean {\n if (!inputs || inputs.length !== expectedDimensions.length) {\n return false;\n }\n for (let i = 0; i < inputs.length; i++) {\n if (!inputs[i].dims || inputs[i].dims.length !== expectedDimensions[i]) {\n return false;\n }\n }\n return true;\n}\n\n// Evaluates the given expression and asserts error message if condition is unmet.\nexport function assert(expr: boolean, msg: () => string) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\n\nexport class ArrayUtil {\n /**\n * Verifies if 2 input arrays contain the same elements.\n * @param n1 Array 1\n * @param n2 Array 2\n * @returns Whether these 2 are equal\n */\n static arraysEqual(\n n1: readonly number[]|Int8Array|Uint8Array|Int16Array|Uint16Array|Int32Array|Uint32Array|Uint8ClampedArray|\n Float32Array|Float64Array,\n n2: readonly number[]|Int8Array|Uint8Array|Int16Array|Uint16Array|Int32Array|Uint32Array|Uint8ClampedArray|\n Float32Array|Float64Array) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n }\n}\n\nexport class MatMulUtil {\n /**\n * Fix the input shapes for MatMul operation if they need fixing\n * @param dimsA The shape of tensor A. Should be an array of positive integers\n * @param dimsB The shape of tensor B. Should be an array of positive integers\n * @returns A tuple containing the preprocessed input shapes as required by ONNX specifications\n */\n static preprocessInputShapes(dimsA: readonly number[], dimsB: readonly number[]):\n [readonly number[], readonly number[]] {\n // If the first argument is 1-D, it is promoted to a matrix by prepending\n // a 1 to its dimensions. After matrix multiplication the prepended 1 is\n // removed.\n const a = (dimsA.length === 1) ? [1, dimsA[0]] : dimsA;\n\n // If the second argument is 1-D, it is promoted to a matrix by appending\n // a 1 to its dimensions. After matrix multiplication the appended 1 is\n // removed.\n const b = (dimsB.length === 1) ? [dimsB[0], 1] : dimsB;\n\n return [a, b];\n }\n\n /**\n * Fix the output shape computed for MatMul operation if it needs fixing\n * @param outputShape The computed outputShape. Should be an array (atleast of length 2) of positive integers.\n * This will be mutated.\n * @param aRank The rank of tensor A.\n * @param bRank The rank of tensor B.\n */\n static postprocessOutputShape(outputShape: number[], aRank: number, bRank: number) {\n // Remove prepended dimension if first input is 1d\n if (aRank === 1) {\n // outputShape = outputShape.slice(0, outputShape.length - 2).concat(outputShape.slice(outputShape.length - 1));\n outputShape.splice(outputShape.length - 2, 1);\n }\n // Remove appended dimension if second input is 1d\n if (bRank === 1) {\n outputShape.pop();\n }\n }\n\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n cdims[crank - i] = Math.max(aLen, bLen);\n }\n\n return cdims;\n }\n\n /**\n * Given the indices of a broadcasted tensor, calculate the original indices\n * @param broadcastedIndices The given indices of the broadcasted tensor.\n * @param originalShape The original shape of the tensor before broadcas\n * @returns The calculated indices that maps to the original tensor.\n */\n static index(broadcastedIndices: readonly number[], originalShape: readonly number[]): number[] {\n // NOTE 1: we assume the parameter broadcastedIndices is valid. ie. it should have the same\n // length as the broadcasted shape, and for each dimension the index should\n // not be out of range.\n const originalIndices = new Array(originalShape.length);\n BroadcastUtil.fillIndex(broadcastedIndices, originalShape, originalIndices);\n return originalIndices;\n }\n\n /**\n * Given the indices of a broadcasted tensor, calculate the original indices\n * @param broadcastedIndices The given indices of the broadcasted tensor.\n * @param originalShape The original shape of the tensor before broadcast\n * @param originalIndices The mapping of broadcastedIndices to the originalIndices (output parameter - will be\n * mutated).\n */\n static fillIndex(broadcastedIndices: readonly number[], originalShape: readonly number[], originalIndices: number[]) {\n // NOTE 1: we assume the parameter broadcastedIndices is valid. ie. it should have the same length as the\n // broadcasted shape, and for each dimension the index should not be out of range.\n // NOTE 2: we assume the parameter originalIndices has the same length as the originalShape\n const dimOffset = broadcastedIndices.length - originalShape.length;\n for (let i = 0; i < originalShape.length; i++) {\n originalIndices[i] = broadcastedIndices[dimOffset + i] % originalShape[i];\n }\n }\n\n /**\n * Perform the broadcasting operation on the specific operator\n * @param a The input tensor A\n * @param b The input tensor B\n * @param op The operator lambda function\n * @param inplace Whether to write the result back to A.\n * @returns The result tensor, or undefined if input not broadcastable.\n */\n static calc(\n a: Tensor, b: Tensor, op: (a: string|number, b: string|number) => (string | number), inplace: boolean,\n resultType?: Tensor.DataType): Tensor|undefined {\n const outputShape = BroadcastUtil.calcShape(a.dims, b.dims);\n\n if (outputShape) {\n if (inplace && !ShapeUtil.areEqual(outputShape, a.dims)) {\n // B is not broadcastable to A, failed to calculate inplace.\n return undefined;\n }\n\n const size = ShapeUtil.size(outputShape);\n const c = inplace ? a : new Tensor(outputShape, resultType || a.type);\n\n // both inputs are scalars\n if (outputShape.length === 0) {\n c.set([], op(a.get([]) as number, b.get([]) as number));\n }\n\n // atleast one input is a non-scalar\n else {\n const outputIndices = new Array(outputShape.length);\n const originalIndicesA = new Array(a.dims.length);\n const originalIndicesB = new Array(b.dims.length);\n let valA: string|number = 0;\n let valB: string|number = 0;\n let isAScalar = false;\n let isBScalar = false;\n if (a.dims.length === 0) {\n valA = a.get([]) as number;\n isAScalar = true;\n }\n if (b.dims.length === 0) {\n valB = b.get([]) as number;\n isBScalar = true;\n }\n let rest: number;\n for (let i = 0; i < size; i++) {\n // traversal indices\n rest = i;\n for (let j = outputShape.length - 1; j >= 0; j--) {\n outputIndices[j] = rest % outputShape[j];\n rest = Math.floor(rest / outputShape[j]);\n }\n\n if (!isAScalar) {\n // map outputIndices (which is actually broadcasted) to the originalIndices\n BroadcastUtil.fillIndex(outputIndices, a.dims, originalIndicesA);\n valA = a.get(originalIndicesA) as number;\n }\n if (!isBScalar) {\n BroadcastUtil.fillIndex(outputIndices, b.dims, originalIndicesB);\n valB = b.get(originalIndicesB) as number;\n }\n\n c.set(outputIndices, op(valA, valB));\n }\n }\n\n return c;\n }\n\n return undefined;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n\n /**\n * Determine the broadcasted dims in input shape based on the given output shape.\n * Note that this function only returns the broadcasted dims.\n * @param inputShape The input shape\n * @param outputShape The output shape\n * @returns The broadcasted dims in input shape.\n */\n static getBroadcastDims(inputShape: readonly number[], outputShape: readonly number[]): number[] {\n const inRank = inputShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inputShape[dim] || 1;\n const b = outputShape[outputShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n }\n}\n\n// copy array helper\n// mimics memcpy as much as possible\nexport function arrayCopyHelper(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = source[sourceIndex + offset];\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\nexport class ProtoUtil {\n static tensorDataTypeFromProto(typeProto: onnx.TensorProto.DataType|\n onnxruntime.experimental.fbs.TensorDataType): Tensor.DataType {\n switch (typeProto) {\n case onnx.TensorProto.DataType.INT8:\n return 'int8';\n case onnx.TensorProto.DataType.UINT8:\n return 'uint8';\n case onnx.TensorProto.DataType.BOOL:\n return 'bool';\n case onnx.TensorProto.DataType.INT16:\n return 'int16';\n case onnx.TensorProto.DataType.UINT16:\n return 'uint16';\n case onnx.TensorProto.DataType.INT32:\n return 'int32';\n case onnx.TensorProto.DataType.UINT32:\n return 'uint32';\n case onnx.TensorProto.DataType.FLOAT:\n return 'float32';\n case onnx.TensorProto.DataType.DOUBLE:\n return 'float64';\n case onnx.TensorProto.DataType.STRING:\n return 'string';\n\n // For INT64/UINT64, reduce their value to 32-bits.\n // Should throw exception when overflow\n case onnx.TensorProto.DataType.INT64:\n return 'int32';\n case onnx.TensorProto.DataType.UINT64:\n return 'uint32';\n\n default:\n throw new Error(`unsupported data type: ${onnx.TensorProto.DataType[typeProto]}`);\n }\n }\n\n static tensorDataTypeStringToEnum(type: string): onnx.TensorProto.DataType {\n switch (type) {\n case 'int8':\n return onnx.TensorProto.DataType.INT8;\n case 'uint8':\n return onnx.TensorProto.DataType.UINT8;\n case 'bool':\n return onnx.TensorProto.DataType.BOOL;\n case 'int16':\n return onnx.TensorProto.DataType.INT16;\n case 'uint16':\n return onnx.TensorProto.DataType.UINT16;\n case 'int32':\n return onnx.TensorProto.DataType.INT32;\n case 'uint32':\n return onnx.TensorProto.DataType.UINT32;\n case 'float32':\n return onnx.TensorProto.DataType.FLOAT;\n case 'float64':\n return onnx.TensorProto.DataType.DOUBLE;\n case 'string':\n return onnx.TensorProto.DataType.STRING;\n case 'int64':\n return onnx.TensorProto.DataType.INT64;\n case 'uint64':\n return onnx.TensorProto.DataType.UINT64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n }\n\n static tensorDimsFromProto(dims: Array): number[] {\n // get rid of Long type for dims\n return dims.map(d => Long.isLong(d) ? d.toNumber() : d);\n }\n\n static tensorValueTypeFromProto(valueType: onnx.TypeProto.ITensor): Graph.ValueType {\n return {\n tensorType: ProtoUtil.tensorDataTypeFromProto(valueType.elemType!),\n shape: {dims: ProtoUtil.tensorDimsFromProto(valueType.shape!.dim!.map(d => d.dimValue!))}\n };\n }\n\n static tensorDimsFromORTFormat(tensor: onnxruntime.experimental.fbs.Tensor) {\n const dims = [];\n for (let i = 0; i < tensor.dimsLength(); i++) {\n dims.push(LongUtil.longToNumber(tensor.dims(i)!));\n }\n return dims;\n }\n\n static tensorAttributesFromORTFormat(node: onnxruntime.experimental.fbs.Node) {\n const attributes = [];\n for (let i = 0; i < node.attributesLength(); i++) {\n attributes.push(node.attributes(i)!);\n }\n return attributes;\n }\n}\n\nexport class LongUtil {\n // This function is called to get a number from long type of data for attribute, dim, and ir version,\n // which values are signed integers.\n // To make it more generic, add an optional parameter to convert to a unsigned number.\n static longToNumber(n: Long|flatbuffers.Long|number, unsigned?: boolean) {\n if (Long.isLong(n)) {\n return n.toNumber();\n } else if (n instanceof flatbuffers.Long) {\n return Long.fromValue({low: n.low, high: n.high, unsigned: unsigned ?? false}).toNumber();\n }\n return n;\n }\n static isLong(n: unknown) {\n return Long.isLong(n) || n instanceof flatbuffers.Long;\n }\n}\n\nexport class ShapeUtil {\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n // `axis` inclusive\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n // `axis` exclusive\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be 0 or negative.\n if (dims[i] <= 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains 0 or negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n static transpose(dims: readonly number[]): readonly number[] {\n const copy = dims.slice();\n return copy.reverse();\n }\n\n static indicesToOffset(indices: readonly number[], strides: readonly number[], axis?: number): number {\n if (axis === undefined) {\n axis = indices.length;\n }\n let offset = 0;\n for (let i = 0; i < axis; ++i) {\n offset += strides[i] * indices[i];\n }\n return offset;\n }\n\n static offsetToIndices(offset: number, strides: readonly number[]): readonly number[] {\n const rank = strides.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [offset * strides[0]];\n }\n const indices: number[] = new Array(strides.length);\n for (let i = 0; i < indices.length - 1; ++i) {\n indices[i] = Math.floor(offset / strides[i]);\n offset -= indices[i] * strides[i];\n }\n indices[indices.length - 1] = offset;\n return indices;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank));\n }\n\n // Increment an index into a tensor (in lexicographic\n // ordering), wrapping around the specified upper_bound.\n /**\n * Increment an index into a tensor (in lexicographic ordering), wrapping around the specified upper_bound.\n * @param index Given index to increment (Will be mutated)\n * @param dims The dimensions of the tensor for which the given index corresponds to\n * @param axisToIncrementOn The 1-indexed axis to increment on. If undefined, axisToIncrementOn == rank\n */\n static incrementIndex(index: number[], dims: readonly number[], axisToIncrementOn?: number) {\n if (dims.length === 0 || index.length === 0) {\n throw new Error('Index incrementing unsupported for scalar Tensor');\n }\n if (axisToIncrementOn === undefined) {\n axisToIncrementOn = dims.length;\n } else {\n if (axisToIncrementOn <= 0 || axisToIncrementOn > dims.length) {\n throw new Error('Incorrect axis to increment on');\n }\n }\n\n for (let k = axisToIncrementOn - 1; k >= 0; --k) {\n index[k]++;\n if (index[k] < dims[k]) {\n break;\n }\n index[k] = 0;\n }\n }\n\n /**\n * Produces a new dimensions array based on the values in the 'originalDimensions' and 'shape' array\n * Used in Reshape\n * @param originalDims Original Shape array\n * @param shapeHints array containing values to compute the new dimensions\n * For example:\n * originalDims = [2,2] and shapeHints = [0,-1] will return [2,2]\n * originalDims = [2,2] and shapeHints = [4] will return [4]\n * originalDims = [2,2] and shapeHints = [5] will throw an exception\n * https://github.com/onnx/onnx/blob/main/docs/Operators.md#Reshape\n */\n\n static calculateReshapedDims(originalDims: readonly number[], shapeHints: ArrayLike): number[] {\n // reshape to a Scalar Tensor\n if (shapeHints.length === 0) {\n if (originalDims.length === 0 || ShapeUtil.size(originalDims) === 1) {\n return [];\n } else {\n throw new Error('cannot reshape to a scalar Tensor');\n }\n }\n\n const nDims = shapeHints.length;\n const reshapedDims = new Array(nDims);\n let unknownDimension = -1;\n let newTensorSize = 1;\n for (let i = 0; i < nDims; i++) {\n if (shapeHints[i] < -1) {\n throw new Error('a dimension in shape hints cannot be less than -1');\n }\n if (shapeHints[i] === -1) {\n if (unknownDimension !== -1) {\n throw new Error('at most one dimension in shape hints can be -1');\n }\n unknownDimension = i;\n } else {\n if (shapeHints[i] === 0) {\n if (i >= originalDims.length) {\n throw new Error('the dimension with value zero exceeds the dimension size of the input tensor');\n }\n reshapedDims[i] = originalDims[i];\n } else {\n reshapedDims[i] = shapeHints[i];\n }\n newTensorSize *= reshapedDims[i];\n }\n }\n\n const oldTensorSize = ShapeUtil.size(originalDims);\n if (unknownDimension !== -1) {\n if (oldTensorSize % newTensorSize !== 0) {\n throw new Error(`the input tensor cannot be reshaped to the requested shape. Input shape: [${\n originalDims}] Output shape: [${shapeHints}]`);\n }\n reshapedDims[unknownDimension] = oldTensorSize / newTensorSize;\n }\n // validate sizes from originalDims and reshapedDims match\n else {\n if (newTensorSize !== oldTensorSize) {\n throw new Error('reshapedDims and originalDims don\\'t have matching sizes');\n }\n }\n return reshapedDims;\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n\n /**\n * Validates if the given `dims` or `shape` is valid in ONNX.js context and returns data size\n * @param dims - input `dims` that needs to be checked\n */\n static validateDimsAndCalcSize(dims: readonly number[]): number {\n if (dims.length > 6) {\n throw new TypeError('Only rank 0 to 6 is supported for tensor shape.');\n }\n let size = 1;\n for (const n of dims) {\n if (!Number.isInteger(n)) {\n throw new TypeError(`Invalid shape: ${n} is not an integer`);\n }\n if (n < 0 || n > 2147483647) {\n throw new TypeError(`Invalid shape: length ${n} is not allowed`);\n }\n size *= n;\n }\n return size;\n }\n\n /**\n * Determines the shape of output tensor y = flatten(x, axis)\n * @param dims - shape of input tensor\n * @param axis - flatten axis, in the range [-r, r]\n */\n static flattenShape(dims: readonly number[], axis: number): readonly number[] {\n if (axis < 0) {\n axis += dims.length;\n }\n const total = dims.reduce((x, y) => x * y, 1);\n const right = dims.slice(axis).reduce((x, y) => x * y, 1);\n const outputDims = [total / right, right];\n\n return outputDims;\n }\n\n /**\n * Determines the shape of output tensor y = squeeze(x, axes)\n * @param dims - shape of input tensor\n * @param axes - squeeze axes\n */\n static squeezeShape(dims: readonly number[], axes: readonly number[]): readonly number[] {\n const outputDims = new Array();\n\n // sanity check\n axes = ShapeUtil.normalizeAxes(axes, dims.length);\n\n for (let i = 0; i < dims.length; i++) {\n const inSqueezeList = axes.indexOf(i) >= 0;\n if (inSqueezeList && dims[i] !== 1) {\n throw new Error('squeeze an axis of size different than 1');\n }\n\n if ((axes.length === 0 && dims[i] > 1) || (axes.length > 0 && !inSqueezeList)) {\n outputDims.push(dims[i]);\n }\n }\n\n return outputDims;\n }\n\n /**\n * Determines the shape of output tensor y = unsqueeze(x, axes)\n * @param dims - shape of input tensor\n * @param axes - unsqueeze axes\n */\n static unsqueezeShape(dims: readonly number[], axes: readonly number[]): readonly number[] {\n const outputDims = new Array(dims.length + axes.length);\n\n // initialize the array elements to 0\n outputDims.fill(0);\n\n // set all axes indices to 1 in outputDims and check for duplicates\n for (let i = 0; i < axes.length; i++) {\n const axis = ShapeUtil.normalizeAxis(axes[i], outputDims.length);\n if (axis >= outputDims.length) {\n throw new Error('\\'axes\\' has an out of range axis');\n }\n if (outputDims[axis] !== 0) {\n throw new Error('\\'axes\\' has a duplicate axis');\n }\n\n outputDims[axis] = 1;\n }\n\n // fill in the zero entries of outputDims with the input tensor's shape\n let inputDimsIterator = 0;\n for (let i = 0; i < outputDims.length; i++) {\n if (outputDims[i] === 0) {\n outputDims[i] = dims[inputDimsIterator++];\n }\n }\n\n // sanity check assertion. 'inputDimsIterator'\n // should be equal to the length of 'dims'\n if (inputDimsIterator !== dims.length) {\n throw new Error('the unsqueezed dimension could not be established');\n }\n\n return outputDims;\n }\n}\n\n// bunch of helper methods that do a variety of math operations\nexport class MathUtil {\n // y = (x*x) + y\n static sqr(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] += Math.pow(source[sourceIndex + offset], 2);\n }\n }\n\n // y = ax + y\n static axpy(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number, alpha: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] += (alpha * source[sourceIndex + offset]);\n }\n }\n\n // y = pow(x, b)\n static powx(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number, b: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = Math.pow(source[sourceIndex + offset], b);\n }\n }\n\n // y = x * y\n static mul(\n target: number[]|Tensor.NumberType, source: number[]|Tensor.NumberType, targetIndex: number, sourceIndex: number,\n blockSize: number) {\n if (sourceIndex < 0 || sourceIndex >= source.length) {\n throw new Error('sourceIndex out of bounds');\n }\n if (targetIndex < 0 || targetIndex >= target.length) {\n throw new Error('targetIndex out of bounds');\n }\n if (sourceIndex + blockSize > source.length) {\n throw new Error('source indices to be copied are outside bounds');\n }\n if (targetIndex + blockSize > target.length) {\n throw new Error('target array is too small to hold result');\n }\n\n for (let offset = 0; offset < blockSize; offset++) {\n target[targetIndex + offset] = (source[sourceIndex + offset] * target[targetIndex + offset]);\n }\n }\n}\n\nexport class SplitUtil {\n /**\n * Calculates new Shapes from existing one and the splits given along the axis provides\n * @param dims Shape of the Tensor to be splitted into two or more Shapes\n * @param axis The dimension along which the Tensor will be split\n * @param splits Offsets for the start of each split\n */\n static splitShape(dims: readonly number[], axis: number, split: number[], numOutputs?: number):\n [number[][], number[]] {\n if (split.length === 0) {\n if (!numOutputs) {\n throw new Error('need to know number of outputs when the \\'split\\' attribute is not specified');\n }\n SplitUtil.determineSplit(dims[axis], numOutputs, split);\n }\n\n const shapes: number[][] = [];\n const offsets = [0];\n for (let i = 0; i < split.length; ++i) {\n if (i !== 0) {\n offsets.push(offsets[i - 1] + split[i - 1]);\n }\n const shape = dims.slice();\n shape[axis] = split[i];\n shapes.push(shape);\n }\n return [shapes, offsets];\n }\n\n static determineSplit(numElementsAlongAxis: number, numOutputs: number, split: number[]) {\n // If 'split' is not specified by the user, we need to partition the number of elements equally among the outputs\n if (numElementsAlongAxis % numOutputs !== 0) {\n throw new Error('cannot split tensor to equal sized parts');\n }\n for (let i = 0; i < numOutputs; ++i) {\n split.push(numElementsAlongAxis / numOutputs);\n }\n }\n}\n\nexport class ReduceUtil {\n /**\n * Perform reduce operations on the specific operator\n * @param a Input tensor data\n * @param axes The dimensions along which the Tensor will be reduced\n * @param keepdims If set to true, the axes which are reduced are left in the\n * result as dimensions with size one.\n * @param op1 The operation to be performed on each element in the tensor\n * @param op2 The operation to be performed between elements in the tensor\n */\n static calcReduce(\n a: Tensor, axes: number[], keepdims: boolean, op1: (b: number) => number,\n op2: (a: number, b: number) => number): Tensor {\n const dims = a.dims.slice(0);\n // if axes is not set, perform reduce on all axes\n if (axes.length === 0) {\n dims.forEach((_d, ind) => axes.push(ind));\n }\n // get a temporary broadcastable output shape\n const outputDims = ReduceUtil.calcReduceShape(dims, axes, true);\n\n // loop through the output and calculate result one by one\n const size = ShapeUtil.size(outputDims);\n const y = new Tensor(outputDims, a.type);\n const strides = ShapeUtil.computeStrides(outputDims);\n const inputStrides = ShapeUtil.computeStrides(dims);\n const indicesY = new Array(dims.length);\n for (let i = 0; i < size; i++) {\n const indices = ShapeUtil.offsetToIndices(i, strides);\n // map index\n BroadcastUtil.fillIndex(indices, dims, indicesY);\n y.set(\n indices,\n ReduceUtil.calcReduceByAxis(\n a.numberData, axes, dims, 0, ShapeUtil.indicesToOffset(indicesY, inputStrides), op1, op2));\n }\n\n if (keepdims) {\n return y;\n } else {\n // keepdims == 0, calculate the expected shape\n return new Tensor(\n ReduceUtil.calcReduceShape(dims, axes, keepdims), y.type, undefined, undefined, y.data, y.dataId);\n }\n }\n\n /**\n * Perform reduce operations on the specific operator on specific axes\n * @param a Input tensor data\n * @param axes The dimensions along which the Tensor will be reduced\n * @param dims The input dimension.\n * @param curAxisInd Index in axes specifying the current dimension along\n * which the tensor will be reduced\n * @param pos The current index of element to perform operation\n * @param op1 The operation to be performed on each element in the tensor\n * @param op2 The operation to be performed between elements in the tensor\n */\n static calcReduceByAxis(\n input: Tensor.NumberType, axes: number[], dims: number[], curAxisInd: number, pos: number,\n op1: (b: number) => number, op2: (a: number, b: number) => number): number {\n let res = 0;\n if (curAxisInd >= axes.length) {\n return op1(input[pos]);\n }\n const axis = axes[curAxisInd];\n const step = axis >= dims.length ? 1 : ShapeUtil.size(dims.slice(axis + 1));\n for (let i = 0; i < dims[axis]; i++) {\n res = i === 0 ? ReduceUtil.calcReduceByAxis(input, axes, dims, curAxisInd + 1, pos, op1, op2) :\n op2(res, ReduceUtil.calcReduceByAxis(input, axes, dims, curAxisInd + 1, pos, op1, op2));\n pos += step;\n }\n return res;\n }\n\n /**\n * Calculate the expected shape of a reduce operation\n * @param dims The input tensor dimension\n * @param axes The dimensions along which the Tensor will be reduced\n * @param keepdims If set to true, the axes which are reduced are left in the\n * result as dimensions with size one.\n */\n static calcReduceShape(dims: readonly number[], axes: readonly number[], keepDims: boolean): number[] {\n const outputDims = dims.slice();\n for (let i = 0; i < axes.length; i++) {\n if (keepDims) {\n outputDims[axes[i]] = 1;\n } else {\n outputDims[axes[i]] = 0;\n }\n }\n return outputDims.filter(dim => dim !== 0);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]) {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n\nexport function decodeUtf8String(buffer: Uint8Array): string {\n return new TextDecoder().decode(buffer);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Guid} from 'guid-typescript';\nimport Long from 'long';\n\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {decodeUtf8String, ProtoUtil, ShapeUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Tensor {\n export interface DataTypeMap {\n bool: Uint8Array;\n float32: Float32Array;\n float64: Float64Array;\n string: string[];\n int8: Int8Array;\n uint8: Uint8Array;\n int16: Int16Array;\n uint16: Uint16Array;\n int32: Int32Array;\n uint32: Uint32Array;\n int64: BigInt64Array;\n }\n\n export type DataType = keyof DataTypeMap;\n\n export type StringType = Tensor.DataTypeMap['string'];\n export type BooleanType = Tensor.DataTypeMap['bool'];\n export type IntegerType = Tensor.DataTypeMap['int8']|Tensor.DataTypeMap['uint8']|Tensor.DataTypeMap['int16']|\n Tensor.DataTypeMap['uint16']|Tensor.DataTypeMap['int32']|Tensor.DataTypeMap['uint32'];\n export type FloatType = Tensor.DataTypeMap['float32']|Tensor.DataTypeMap['float64'];\n export type NumberType = BooleanType|IntegerType|FloatType;\n\n export type Id = Guid;\n}\n\ntype TensorData = Tensor.DataTypeMap[Tensor.DataType];\n\ntype DataProvider = (id: Tensor.Id) => TensorData;\ntype AsyncDataProvider = (id: Tensor.Id) => Promise;\n\nexport class Tensor {\n /**\n * get the underlying tensor data\n */\n get data(): TensorData {\n if (this.cache === undefined) {\n const data = this.dataProvider!(this.dataId);\n if (data.length !== this.size) {\n throw new Error('Length of data provided by the Data Provider is inconsistent with the dims of this Tensor.');\n }\n this.cache = data;\n }\n return this.cache;\n }\n\n /**\n * get the underlying string tensor data. Should only use when type is STRING\n */\n get stringData() {\n if (this.type !== 'string') {\n throw new TypeError('data type is not string');\n }\n\n return this.data as Tensor.StringType;\n }\n\n /**\n * get the underlying integer tensor data. Should only use when type is one of the following: (UINT8, INT8, UINT16,\n * INT16, INT32, UINT32, BOOL)\n */\n get integerData() {\n switch (this.type) {\n case 'uint8':\n case 'int8':\n case 'uint16':\n case 'int16':\n case 'int32':\n case 'uint32':\n case 'bool':\n return this.data as Tensor.IntegerType;\n\n default:\n throw new TypeError('data type is not integer (uint8, int8, uint16, int16, int32, uint32, bool)');\n }\n }\n\n /**\n * get the underlying float tensor data. Should only use when type is one of the following: (FLOAT, DOUBLE)\n */\n get floatData() {\n switch (this.type) {\n case 'float32':\n case 'float64':\n return this.data as Tensor.FloatType;\n\n default:\n throw new TypeError('data type is not float (float32, float64)');\n }\n }\n\n /**\n * get the underlying number tensor data. Should only use when type is one of the following: (UINT8, INT8, UINT16,\n * INT16, INT32, UINT32, BOOL, FLOAT, DOUBLE)\n */\n get numberData() {\n if (this.type !== 'string') {\n return this.data as Tensor.NumberType;\n }\n throw new TypeError('type cannot be non-number (string)');\n }\n\n /**\n * get value of an element at the given indices\n */\n get(indices: readonly number[]): Tensor.DataTypeMap[Tensor.DataType][number] {\n return this.data[ShapeUtil.indicesToOffset(indices, this.strides)];\n }\n\n /**\n * set value of an element at the given indices\n */\n set(indices: readonly number[], value: Tensor.DataTypeMap[Tensor.DataType][number]) {\n this.data[ShapeUtil.indicesToOffset(indices, this.strides)] = value;\n }\n\n /**\n * get the underlying tensor data asynchronously\n */\n async getData(): Promise {\n if (this.cache === undefined) {\n this.cache = await this.asyncDataProvider!(this.dataId);\n }\n return this.cache;\n }\n\n /**\n * get the number of elements in the tensor\n */\n public readonly size: number;\n\n private _strides: readonly number[];\n /**\n * get the strides for each dimension\n */\n get strides(): readonly number[] {\n if (!this._strides) {\n this._strides = ShapeUtil.computeStrides(this.dims);\n }\n return this._strides;\n }\n\n constructor(\n /**\n * get the dimensions of the tensor\n */\n public readonly dims: readonly number[],\n /**\n * get the type of the tensor\n */\n public readonly type: Tensor.DataType, private dataProvider?: DataProvider,\n private asyncDataProvider?: AsyncDataProvider, private cache?: TensorData,\n /**\n * get the data ID that used to map to a tensor data\n */\n public readonly dataId: Guid = Guid.create()) {\n this.size = ShapeUtil.validateDimsAndCalcSize(dims);\n const size = this.size;\n const empty = (dataProvider === undefined && asyncDataProvider === undefined && cache === undefined);\n\n if (cache !== undefined) {\n if (cache.length !== size) {\n throw new RangeError('Input dims doesn\\'t match data length.');\n }\n }\n\n if (type === 'string') {\n if (cache !== undefined && (!Array.isArray(cache) || !cache.every(i => typeof i === 'string'))) {\n throw new TypeError('cache should be a string array');\n }\n\n if (empty) {\n this.cache = new Array(size);\n }\n } else {\n if (cache !== undefined) {\n const constructor = dataviewConstructor(type);\n if (!(cache instanceof constructor)) {\n throw new TypeError(`cache should be type ${constructor.name}`);\n }\n }\n\n if (empty) {\n const buf = new ArrayBuffer(size * sizeof(type));\n this.cache = createView(buf, type);\n }\n }\n }\n\n /**\n * Construct new Tensor from a ONNX Tensor object\n * @param tensorProto the ONNX Tensor\n */\n static fromProto(tensorProto: onnx.ITensorProto): Tensor {\n if (!tensorProto) {\n throw new Error('cannot construct Value from an empty tensor');\n }\n const type = ProtoUtil.tensorDataTypeFromProto(tensorProto.dataType!);\n const dims = ProtoUtil.tensorDimsFromProto(tensorProto.dims!);\n\n const value = new Tensor(dims, type);\n\n if (type === 'string') {\n // When it's STRING type, the value should always be stored in field\n // 'stringData'\n tensorProto.stringData!.forEach((str, i) => {\n value.data[i] = decodeUtf8String(str);\n });\n\n } else if (\n tensorProto.rawData && typeof tensorProto.rawData.byteLength === 'number' &&\n tensorProto.rawData.byteLength > 0) {\n // NOT considering segment for now (IMPORTANT)\n\n // populate value from rawData\n const dataDest = value.data;\n const dataSource =\n new DataView(tensorProto.rawData.buffer, tensorProto.rawData.byteOffset, tensorProto.rawData.byteLength);\n const elementSize = sizeofProto(tensorProto.dataType!);\n const length = tensorProto.rawData.byteLength / elementSize;\n\n if (tensorProto.rawData.byteLength % elementSize !== 0) {\n throw new Error('invalid buffer length');\n }\n if (dataDest.length !== length) {\n throw new Error('buffer length mismatch');\n }\n\n for (let i = 0; i < length; i++) {\n const n = readProto(dataSource, tensorProto.dataType!, i * elementSize);\n dataDest[i] = n;\n }\n } else {\n // populate value from array\n let array: Array;\n switch (tensorProto.dataType) {\n case onnx.TensorProto.DataType.FLOAT:\n array = tensorProto.floatData!;\n break;\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.INT16:\n case onnx.TensorProto.DataType.UINT16:\n case onnx.TensorProto.DataType.INT8:\n case onnx.TensorProto.DataType.UINT8:\n case onnx.TensorProto.DataType.BOOL:\n array = tensorProto.int32Data!;\n break;\n case onnx.TensorProto.DataType.INT64:\n array = tensorProto.int64Data!;\n break;\n case onnx.TensorProto.DataType.DOUBLE:\n array = tensorProto.doubleData!;\n break;\n case onnx.TensorProto.DataType.UINT32:\n case onnx.TensorProto.DataType.UINT64:\n array = tensorProto.uint64Data!;\n break;\n default:\n // should never run here\n throw new Error('unspecific error');\n }\n\n if (array === null || array === undefined) {\n throw new Error('failed to populate data from a tensorproto value');\n }\n\n const data = value.data;\n if (data.length !== array.length) {\n throw new Error('array length mismatch');\n }\n\n for (let i = 0; i < array.length; i++) {\n const element = array[i];\n if (Long.isLong(element)) {\n data[i] = longToNumber(element, tensorProto.dataType);\n } else {\n data[i] = element;\n }\n }\n }\n\n return value;\n }\n\n /**\n * Construct new Tensor from raw data\n * @param data the raw data object. Should be a string array for 'string' tensor, and the corresponding typed array\n * for other types of tensor.\n * @param dims the dimensions of the tensor\n * @param type the type of the tensor\n */\n static fromData(data: Tensor.DataTypeMap[Tensor.DataType], dims: readonly number[], type: Tensor.DataType) {\n return new Tensor(dims, type, undefined, undefined, data);\n }\n\n static fromOrtTensor(ortTensor: ortFbs.Tensor) {\n if (!ortTensor) {\n throw new Error('cannot construct Value from an empty tensor');\n }\n const dims = ProtoUtil.tensorDimsFromORTFormat(ortTensor);\n const type = ProtoUtil.tensorDataTypeFromProto(ortTensor.dataType());\n\n const value = new Tensor(dims, type);\n\n if (type === 'string') {\n // When it's STRING type, the value should always be stored in field\n // 'stringData'\n for (let i = 0; i < ortTensor.stringDataLength(); i++) {\n value.data[i] = ortTensor.stringData(i);\n }\n\n } else if (\n ortTensor.rawDataArray() && typeof ortTensor.rawDataLength() === 'number' && ortTensor.rawDataLength() > 0) {\n // NOT considering segment for now (IMPORTANT)\n\n // populate value from rawData\n const dataDest = value.data;\n const dataSource = new DataView(\n ortTensor.rawDataArray()!.buffer, ortTensor.rawDataArray()!.byteOffset, ortTensor.rawDataLength());\n const elementSize = sizeofProto(ortTensor.dataType());\n const length = ortTensor.rawDataLength() / elementSize;\n\n if (ortTensor.rawDataLength() % elementSize !== 0) {\n throw new Error('invalid buffer length');\n }\n if (dataDest.length !== length) {\n throw new Error('buffer length mismatch');\n }\n\n for (let i = 0; i < length; i++) {\n const n = readProto(dataSource, ortTensor.dataType(), i * elementSize);\n dataDest[i] = n;\n }\n }\n return value;\n }\n}\n\nfunction sizeof(type: Tensor.DataType): number {\n switch (type) {\n case 'bool':\n case 'int8':\n case 'uint8':\n return 1;\n case 'int16':\n case 'uint16':\n return 2;\n case 'int32':\n case 'uint32':\n case 'float32':\n return 4;\n case 'float64':\n return 8;\n default:\n throw new Error(`cannot calculate sizeof() on type ${type}`);\n }\n}\n\nfunction sizeofProto(type: onnx.TensorProto.DataType|ortFbs.TensorDataType): number {\n switch (type) {\n case onnx.TensorProto.DataType.UINT8:\n case onnx.TensorProto.DataType.INT8:\n case onnx.TensorProto.DataType.BOOL:\n return 1;\n case onnx.TensorProto.DataType.UINT16:\n case onnx.TensorProto.DataType.INT16:\n return 2;\n case onnx.TensorProto.DataType.FLOAT:\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.UINT32:\n return 4;\n case onnx.TensorProto.DataType.INT64:\n case onnx.TensorProto.DataType.DOUBLE:\n case onnx.TensorProto.DataType.UINT64:\n return 8;\n default:\n throw new Error(`cannot calculate sizeof() on type ${onnx.TensorProto.DataType[type]}`);\n }\n}\n\nfunction createView(dataBuffer: ArrayBuffer, type: Tensor.DataType) {\n return new (dataviewConstructor(type))(dataBuffer);\n}\n\nfunction dataviewConstructor(type: Tensor.DataType) {\n switch (type) {\n case 'bool':\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'int16':\n return Int16Array;\n case 'uint16':\n return Uint16Array;\n case 'int32':\n return Int32Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'float32':\n return Float32Array;\n case 'float64':\n return Float64Array;\n default:\n // should never run to here\n throw new Error('unspecified error');\n }\n}\n\n// convert a long number to a 32-bit integer (cast-down)\nfunction longToNumber(i: Long, type: onnx.TensorProto.DataType|ortFbs.TensorDataType): number {\n // INT64, UINT32, UINT64\n if (type === onnx.TensorProto.DataType.INT64 || type === ortFbs.TensorDataType.INT64) {\n if (i.greaterThanOrEqual(2147483648) || i.lessThan(-2147483648)) {\n throw new TypeError('int64 is not supported');\n }\n } else if (\n type === onnx.TensorProto.DataType.UINT32 || type === ortFbs.TensorDataType.UINT32 ||\n type === onnx.TensorProto.DataType.UINT64 || type === ortFbs.TensorDataType.UINT64) {\n if (i.greaterThanOrEqual(4294967296) || i.lessThan(0)) {\n throw new TypeError('uint64 is not supported');\n }\n } else {\n throw new TypeError(`not a LONG type: ${onnx.TensorProto.DataType[type]}`);\n }\n\n return i.toNumber();\n}\n\n// read one value from TensorProto\nfunction readProto(view: DataView, type: onnx.TensorProto.DataType|ortFbs.TensorDataType, byteOffset: number): number {\n switch (type) {\n case onnx.TensorProto.DataType.BOOL:\n case onnx.TensorProto.DataType.UINT8:\n return view.getUint8(byteOffset);\n case onnx.TensorProto.DataType.INT8:\n return view.getInt8(byteOffset);\n case onnx.TensorProto.DataType.UINT16:\n return view.getUint16(byteOffset, true);\n case onnx.TensorProto.DataType.INT16:\n return view.getInt16(byteOffset, true);\n case onnx.TensorProto.DataType.FLOAT:\n return view.getFloat32(byteOffset, true);\n case onnx.TensorProto.DataType.INT32:\n return view.getInt32(byteOffset, true);\n case onnx.TensorProto.DataType.UINT32:\n return view.getUint32(byteOffset, true);\n case onnx.TensorProto.DataType.INT64:\n return longToNumber(\n Long.fromBits(view.getUint32(byteOffset, true), view.getUint32(byteOffset + 4, true), false), type);\n case onnx.TensorProto.DataType.DOUBLE:\n return view.getFloat64(byteOffset, true);\n case onnx.TensorProto.DataType.UINT64:\n return longToNumber(\n Long.fromBits(view.getUint32(byteOffset, true), view.getUint32(byteOffset + 4, true), true), type);\n default:\n throw new Error(`cannot read from DataView for type ${onnx.TensorProto.DataType[type]}`);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * represent a version irrelevant abstraction of for GLSL source code\n */\nexport interface Glsl {\n readonly version: string;\n readonly attribute: string;\n readonly varyingVertex: string;\n readonly varyingFrag: string;\n readonly texture2D: string;\n readonly output: string;\n readonly outputDeclaration: string;\n}\n\nconst GLSL_ES_2_0: Glsl = {\n version: '',\n attribute: 'attribute',\n varyingVertex: 'varying',\n varyingFrag: 'varying',\n texture2D: 'texture2D',\n output: 'gl_FragColor',\n outputDeclaration: '',\n};\nconst GLSL_ES_3_0: Glsl = {\n version: '#version 300 es',\n attribute: 'in',\n varyingVertex: 'out',\n varyingFrag: 'in',\n texture2D: 'texture',\n output: 'outputColor',\n outputDeclaration: 'out vec4 outputColor;',\n};\n\nexport function getGlsl(version: 1|2) {\n return version === 1 ? GLSL_ES_2_0 : GLSL_ES_3_0;\n}\n\nexport function getVertexShaderSource(version: 1|2): string {\n const glsl = getGlsl(version);\n return `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 position;\n ${glsl.attribute} vec2 textureCoord;\n\n ${glsl.varyingVertex} vec2 TexCoords;\n\n void main()\n {\n gl_Position = vec4(position, 1.0);\n TexCoords = textureCoord;\n }`;\n}\n\nexport function getFragShaderPreamble(version: 1|2): string {\n const glsl = getGlsl(version);\n return `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFrag} vec2 TexCoords;\n ${glsl.outputDeclaration}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n // Custom vector types to handle higher dimenalities.\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n `;\n}\n\nexport function getDefaultFragShaderMain(version: 1|2, outputShapeLength: number): string {\n const glsl = getGlsl(version);\n return `\n void main() {\n int indices[${outputShapeLength}];\n toVec(TexCoords, indices);\n vec4 result = vec4(process(indices));\n ${glsl.output} = result;\n }\n `;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../tensor';\n\n/**\n * Layout info is used for mapping n-dimensional array to 2D textures\n * The layout is created by the TextureLayoutStrategy based on\n * the Tensor's dimensions and strides\n */\nexport interface TextureLayout {\n width: number;\n height: number;\n /**\n * specify the number of value that encoded in a single pixel\n */\n channels: 1|2|3|4;\n /**\n * whether in packed mode or not\n */\n isPacked?: boolean;\n /**\n * the normalized shape\n */\n shape: readonly number[];\n /**\n * the stride of each dimensions, calculated according to shape\n */\n strides: readonly number[];\n /**\n * the original shape(dims) of the corresponding tensor\n */\n unpackedShape: readonly number[];\n\n reversedWH?: boolean;\n}\nexport interface TextureData extends TextureLayout {\n tensor: Tensor;\n texture: WebGLTexture;\n}\n\nexport enum TextureType {\n unpacked, // <-- normal unpacked texture\n unpackedReversed, // <-- unpacked texture used in old ONNX.js implementation (deprecated)\n packed, // <-- normal packed texture\n downloadUint8AsFloat, // <-- ONLY used in texture downloading for iOS devices\n packedLastDimension // <-- ONLY used in old ONNX.js Conv implementation for input W (deprecated)\n}\n\nexport interface TensorInfo {\n id?: Tensor.Id;\n dims: readonly number[];\n type: Tensor.DataType;\n textureType: TextureType;\n}\n\nexport interface ProgramVariable {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n data: number|number[];\n}\n\n/**\n * A set of metadata of a shader program.\n */\nexport interface ProgramMetadata {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n /**\n * texture types for each input\n */\n inputTypes: TextureType[];\n /**\n * names of each input\n */\n inputNames: string[];\n /**\n * an optional string as a cache hint in the artifact cache\n */\n cacheHint?: string;\n}\n\n/**\n * A ProgramInfoLoader allows\n */\nexport interface ProgramInfoLoader extends ProgramMetadata {\n /**\n * a function to get the program info\n */\n get(): ProgramInfo;\n}\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo extends ProgramMetadata {\n /**\n * information of uniform variables\n */\n variables?: ProgramVariable[];\n /**\n * tensor info for output\n */\n output: TensorInfo;\n /**\n * the shader's processing source code\n */\n shaderSource: string;\n /**\n * whether the shader source contains a customized main function implementation\n */\n hasMain?: boolean;\n}\n\nexport interface VariableInfo {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n}\n\nexport interface ProgramVariable {\n type: 'float'|'int';\n name: string;\n arrayLength?: number;\n data: number|number[];\n}\n\n/**\n * Information of uniforms that shader uses\n */\nexport interface UniformInfo {\n type: 'sampler2D'|VariableInfo['type'];\n name: string;\n arrayLength?: number;\n}\n\nexport interface UniformLocation extends UniformInfo {\n location: WebGLUniformLocation;\n}\n\n/**\n * Artifact is the result of compilation\n * It does not contain input of output data\n * However anything that could be run as a \"program\"\n */\nexport interface Artifact {\n programInfo: ProgramInfo;\n program: WebGLProgram;\n uniformLocations: UniformLocation[];\n attribLocations: {position: number; textureCoord: number};\n}\nexport declare namespace Artifact {\n type UniformLocations = Artifact['uniformLocations'];\n type AttribLocations = Artifact['attribLocations'];\n}\n\nexport interface UniformData {\n [name: string]: number|number[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {assert} from '../../util';\n/**\n * Given a non RGBA shape calculate the R version\n * It is assumed that the dimensions are multiples of given channels\n * NOTE: it is always the last dim that gets packed.\n * @param unpackedShape original shape to create a packed version from\n */\nexport function getPackedShape(unpackedShape: readonly number[]): readonly number[] {\n const len = unpackedShape.length;\n return unpackedShape.slice(0, len - 1).concat(unpackedShape[len - 1] / 4);\n}\n\nexport async function repeatedTry(\n checkFn: () => boolean, delayFn = (_counter: number) => 0, maxCounter?: number): Promise {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n\n tryCount++;\n\n const nextBackoff = delayFn(tryCount);\n\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n\n tryFn();\n });\n}\n\n/**\n * Generates the function name from an input sampler name.\n * @param samplerName Name of the sampler.\n */\nexport function generateShaderFuncNameFromInputSamplerName(samplerName: string): string {\n assert(typeof samplerName !== 'undefined' && samplerName.length !== 0, () => 'empty string found for sampler name');\n return 'get' + samplerName.charAt(0).toUpperCase() + samplerName.slice(1);\n}\n\n/**\n * Generates the function name from an input sampler name at output coordinates.\n * @param samplerName Name of the sampler.\n */\nexport function generateShaderFuncNameFromInputSamplerNameAtOutCoords(samplerName: string): string {\n assert(typeof samplerName !== 'undefined' && samplerName.length !== 0, () => 'empty string found for sampler name');\n return 'get' + samplerName.charAt(0).toUpperCase() + samplerName.slice(1) + 'AtOutCoords';\n}\n\n/** Returns a new input shape (a copy) that has a squeezed logical shape. */\nexport function squeezeInputShape(inputShape: readonly number[], squeezedShape: number[]): number[] {\n // Deep copy.\n let newInputShape: number[] = JSON.parse(JSON.stringify(inputShape));\n newInputShape = squeezedShape;\n return newInputShape;\n}\n\n/** Returns a list of squeezed parameters for shader functions */\nexport function getSqueezedParams(params: string[], keptDims: number[]): string {\n return keptDims.map(d => params[d]).join(', ');\n}\n\n/** Returns the data type for different ranks. */\nexport function getCoordsDataType(rank: number): string {\n if (rank <= 1) {\n return 'int';\n } else if (rank === 2) {\n return 'ivec2';\n } else if (rank === 3) {\n return 'ivec3';\n } else if (rank === 4) {\n return 'ivec4';\n } else if (rank === 5) {\n return 'ivec5';\n } else if (rank === 6) {\n return 'ivec6';\n } else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n\nexport function getGlChannels(rank = 6): string[] {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getGlChannels} from '../utils';\n\nexport function getVecChannels(name: string, rank: number): string[] {\n return getGlChannels(rank).map(d => `${name}.${d}`);\n}\n\nexport function getChannels(name: string, rank: number): string[] {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\n\nexport function unpackFromChannel(): string {\n return `\n float getChannel(vec4 frag, int dim) {\n int modCoord = imod(dim, 2);\n return modCoord == 0 ? frag.r : frag.g;\n }\n\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n `;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {getChannels} from './packing-utils';\n\nconst packProgramMetadata = {\n name: 'pack',\n inputNames: ['A'],\n inputTypes: [TextureType.unpackedReversed]\n};\n\nconst createPackProgramInfo = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfo => {\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const inputShape = input.dims;\n\n const inputRank = inputShape.length;\n // createTextureLayoutFromShape won't change output rank. Need to verify by running tests\n const outputRank = input.dims.length;\n\n const coordsDataType = getCoordsDataType(outputRank);\n const channels = getChannels('rc', outputRank);\n const setup = getSetup(outputRank, channels, inputShape[inputShape.length - 2], inputShape[inputShape.length - 1]);\n\n let reversedInputWH;\n if (inputRank === 0) {\n reversedInputWH = [1, 1];\n } else if (inputRank === 1) {\n reversedInputWH = [inputShape[0], 1];\n } else {\n reversedInputWH = [inputShape[outputRank - 1], inputShape[outputRank - 2]];\n }\n const outOfBoundsCondition = getOutOfBoundsCondition(outputRank, reversedInputWH, channels);\n const output = getOutput(inputShape, channels);\n\n const shaderSource = `\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n ${glsl.output} = vec4(0);\n } else {\n ${setup}\n\n ${glsl.output} = vec4(${output});\n }\n }\n `;\n return {\n ...packProgramMetadata,\n hasMain: true,\n output: {dims: input.dims, type: input.type, textureType: TextureType.packed},\n shaderSource\n };\n};\n\nexport const createPackProgramInfoLoader = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfoLoader =>\n ({...packProgramMetadata, get: () => createPackProgramInfo(handler, input)});\n\n/**\n * check output coordinate location and return false if it is outside input's width/height boundary\n */\nfunction getOutOfBoundsCondition(rank: number, shape: readonly number[], dims: string[]): string {\n if (rank === 0) {\n return 'false';\n }\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i - rank + 2]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n\n return cond;\n}\n\n/**\n * code snippet to sample input texture with output coordinates\n */\nfunction getOutput(shape: readonly number[], dims: string[]): string {\n const rank = shape.length;\n\n if (rank === 0) {\n return 'getA(), 0, 0, 0';\n }\n\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n\n const coord00 = 'r, c';\n const coord01 = 'r, cp1';\n const coord10 = 'rp1, c';\n const coord11 = 'rp1, cp1';\n let D = '';\n if (rank > 2) {\n for (let i = 0; i < rank - 2; ++i) {\n D = D + `${dims[i]},`;\n }\n }\n return `getA(${D}${coord00}),\n rEdge ? 0. : getA(${D}${coord10}),\n cEdge ? 0. : getA(${D}${coord01}),\n rEdge || cEdge ? 0. : getA(${D}${coord11})`;\n}\n\n/**\n * code snippet to setup 4 coordinates and edge conditions\n */\nfunction getSetup(rank: number, dims: string[], rows: number, cols: number): string {\n if (rank === 0 || rank === 1) {\n return '';\n }\n // rank >= 2 for width+height pack.\n else {\n const setup = `\n int r = ${dims[rank - 2]};\n int c = ${dims[rank - 1]};\n int rp1 = ${dims[rank - 2]} + 1;\n int cp1 = ${dims[rank - 1]} + 1;\n bool rEdge = rp1 >= ${cols};\n bool cEdge = cp1 >= ${rows};\n `;\n return setup;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {unpackFromChannel} from './packing-utils';\n\nconst createPackedReshape3DProgramMetadata = (outputShape3D: readonly number[]) =>\n ({name: 'Reshape (packed)', inputTypes: [TextureType.packed], inputNames: ['A'], cacheHint: `${outputShape3D}`});\n\nconst createPackedReshape3DProgramInfo =\n (handler: WebGLInferenceHandler, input3D: Tensor, metadata: ProgramMetadata, outputShape3D: readonly number[]):\n ProgramInfo => {\n const inputShape3D = input3D.dims as [number, number, number];\n const squeezedOutputShape = outputShape3D as [number, number, number];\n\n let mainLoop = '';\n for (let i = 0; i < 4; i++) {\n let outputCoords = '';\n switch (i) {\n case 0:\n outputCoords = 'outputCoords = rc;';\n break;\n case 1:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y+1, rc.z);';\n break;\n case 2:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y, rc.z+1);';\n break;\n case 3:\n outputCoords = 'outputCoords = ivec3(rc.x, rc.y+1, rc.z+1);';\n break;\n default:\n throw new Error();\n }\n\n mainLoop += `\n ${outputCoords}\n ${i > 0 ? 'if(outputCoords.y < rows && outputCoords.z < cols){' : ''}\n int flattenedIndex = getFlattenedIndex(outputCoords);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flattenedIndex);\n vec2 innerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] = getChannel(getA(inputRC.x, inputRC.y, inputRC.z), innerDims);\n\n ${i > 0 ? '}' : ''}\n `;\n }\n const glsl = getGlsl(handler.session.backend.glContext.version);\n\n const shaderSource = `\n ${getReshapedInputCoords(inputShape3D)}\n ${getFlattenedIndexFrom3D(squeezedOutputShape)}\n ${unpackFromChannel()}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.0);\n\n ivec3 outputCoords;\n int rows = ${squeezedOutputShape[2]};\n int cols = ${squeezedOutputShape[1]};\n\n ${mainLoop}\n ${glsl.output} = result;\n }\n `;\n\n return {\n ...metadata,\n output: {dims: squeezedOutputShape, type: input3D.type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedReshape3DProgramInfoLoader =\n (handler: WebGLInferenceHandler, input3D: Tensor, outputShape3D: readonly number[]): ProgramInfoLoader => {\n const metadata = createPackedReshape3DProgramMetadata(outputShape3D);\n return {...metadata, get: () => createPackedReshape3DProgramInfo(handler, input3D, metadata, outputShape3D)};\n };\n\nexport function processDims3D(shape: ArrayLike): [number, number, number] {\n if (shape.length === 0) {\n return [1, 1, 1];\n }\n // TODO: squeeze other shapes to 2D case\n let batch = 1;\n for (let i = 0; i < shape.length - 2; ++i) {\n batch *= shape[i];\n }\n return [batch, shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]];\n}\n\n// For packed reshape, we need to re-arrange texel data for output shape.\n// Our pack is designed to pack a 2x2 tile in last h and w dimension, so\n// for the reshaped new tensor, we just need to re-arrange the last h and\n// w dimension. For any shape that is not in 3D, i.e. [batch, W, H], we\n// first convert it to 3D by collapsing other dimension to batch dim, then\n// process with the last two dimensions.\n// Note: we only need the shape tensor to calculate output shape, so the\n// content in shape tensor is never uploaded to GPU. It is always kept in CPU.\n// TODO: optimize the algorithm -- in some cases, if the last two dims are\n// the same between input shape and output shape, the packed reshape can be\n// treated as no-op.\nexport function isReshapeCheap(dims: readonly number[], reshapedDims: readonly number[]) {\n let isCheapReshape = false;\n if (dims.length === 0 || reshapedDims.length === 0) { // scalar\n isCheapReshape = true;\n } else if (dims.length < 2 || reshapedDims.length < 2) { // 1D\n isCheapReshape = dims[dims.length - 1] === reshapedDims[reshapedDims.length - 1];\n } else { // 2D +\n isCheapReshape = dims[dims.length - 1] === reshapedDims[reshapedDims.length - 1] &&\n dims[dims.length - 2] === reshapedDims[reshapedDims.length - 2];\n }\n\n return isCheapReshape;\n}\n\nfunction getReshapedInputCoords(shape: [number, number, number]): string {\n const strides = ShapeUtil.computeStrides(shape);\n const coords = ['b', 'r', 'c'];\n const index = 'index';\n const coordsFromIndexSnippet = strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(b, r, c);\n }\n `;\n}\n\nfunction getFlattenedIndexFrom3D(shape: [number, number, number]): string {\n const strides = ShapeUtil.computeStrides(shape);\n\n return `\n int getFlattenedIndex(ivec3 coords) {\n // reverse y, z order\n return coords.x * ${strides[0]} + coords.z * ${strides[1]} + coords.y;\n }\n`;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {TextureData, TextureType} from '../types';\n\nexport const encodeAsUint8 = (inferenceHandler: WebGLInferenceHandler, input: TextureData): TextureData => {\n const outputShape = input.shape;\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n /**\n * https://github.com/tensorflow/tfjs-core/blob/master/src/kernels/webgl/encode_float_gpu.ts\n */\n const shaderSource = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n bool isNaN(float val) {\n return (val < 1.0 || 0.0 < val || val == 0.0) ? false : true;\n }\n\n highp vec4 encodeAsUint8(highp float v) {\n if (isNaN(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n\n void main() {\n float value = ${glsl.texture2D}(X,TexCoords).r;\n ${glsl.output} = encodeAsUint8(value);\n }`;\n const programInfo = {\n name: 'Uint8Encode',\n inputTypes: [TextureType.unpacked],\n inputNames: ['X'],\n output: {dims: outputShape, type: input.tensor.type, textureType: TextureType.downloadUint8AsFloat},\n shaderSource,\n hasMain: true\n };\n return inferenceHandler.executeProgram(programInfo, [input.tensor]);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {getChannels, unpackFromChannel} from './packing-utils';\n\nconst unpackProgramMetadata = {\n name: 'unpack',\n inputNames: ['A'],\n inputTypes: [TextureType.packed]\n};\n\nexport const createUnpackProgramInfo = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfo => {\n const rank = input.dims.length;\n\n const channels = getChannels('rc', rank);\n const innerDims = channels.slice(-2);\n const coordsDataType = getCoordsDataType(rank);\n const unpackChannel = unpackFromChannel();\n const isScalar = (input.dims.length === 0);\n const sourceCoords = isScalar ? '' : getSourceCoords(rank, channels);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = `\n ${unpackChannel}\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n // Sample the texture with the coords to get the rgba channel value.\n vec4 packedInput = getA(${sourceCoords});\n\n ${glsl.output} = vec4(getChannel(packedInput, ${coords}), 0, 0, 0);\n }\n `;\n\n return {\n ...unpackProgramMetadata,\n hasMain: true,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n};\n\nexport const createUnpackProgramInfoLoader = (handler: WebGLInferenceHandler, input: Tensor): ProgramInfoLoader =>\n ({...unpackProgramMetadata, get: () => createUnpackProgramInfo(handler, input)});\n\nfunction getSourceCoords(rank: number, dims: string[]): string {\n if (rank === 1) {\n return 'rc';\n }\n\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\n\nexport declare namespace Encoder {\n export interface DataTypeMap {\n float: Float32Array;\n byte: Uint8Array;\n int: Uint32Array;\n }\n export type DataType = keyof DataTypeMap;\n type DataArrayType = DataTypeMap[DataType];\n}\n\n/* eslint-disable @typescript-eslint/naming-convention */\nexport const enum EncoderUsage {\n Default = 0,\n UploadOnly,\n Download4BytesAsFloat32,\n}\n/* eslint-enable @typescript-eslint/naming-convention */\n\n/**\n * Abstraction for mapping data types to texture texlets\n * Encoding means how a Float32 is mapped to 1 or 4 channels for each texlet\n * Decoding means how a texlet's channels are mapped to a resulting Float32\n */\nexport interface DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n encode(src: Encoder.DataArrayType, textureSize: number): Encoder.DataArrayType;\n allocate(size: number): Encoder.DataArrayType;\n decode(buffer: Encoder.DataArrayType, dataSize: number): Encoder.DataArrayType;\n}\n/**\n * WebGL2 data encoder\n * Uses R32F as the format for texlet\n */\nexport class RedFloat32DataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n constructor(gl: WebGL2RenderingContext, channels = 1) {\n if (channels === 1) {\n this.internalFormat = gl.R32F;\n this.format = gl.RED;\n this.textureType = gl.FLOAT;\n this.channelSize = channels;\n } else if (channels === 4) {\n this.internalFormat = gl.RGBA32F;\n this.format = gl.RGBA;\n this.textureType = gl.FLOAT;\n this.channelSize = channels;\n } else {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n }\n encode(src: Encoder.DataArrayType, textureSize: number): Encoder.DataArrayType {\n let result: Float32Array;\n let source: Float32Array;\n if (src.constructor !== Float32Array) {\n Logger.warning('Encoder', 'data was not of type Float32; creating new Float32Array');\n source = new Float32Array(src);\n }\n if (textureSize * this.channelSize > src.length) {\n Logger.warning('Encoder', 'Source data too small. Allocating larger array');\n source = src as Float32Array;\n result = this.allocate(textureSize * this.channelSize) as Float32Array;\n source.forEach((v, i) => result[i] = v);\n } else {\n source = src as Float32Array;\n result = source;\n }\n return result;\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Float32Array(size * 4);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {\n if (this.channelSize === 1) {\n const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);\n return filteredData;\n }\n return buffer.subarray(0, dataSize) as Float32Array;\n }\n}\n/**\n * Data encoder for WebGL 1 with support for floating point texture\n */\nexport class RGBAFloatDataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize: number;\n constructor(gl: WebGLRenderingContext, channels = 1, textureType?: number) {\n if (channels !== 1 && channels !== 4) {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n this.internalFormat = gl.RGBA;\n this.format = gl.RGBA;\n this.channelSize = channels;\n this.textureType = textureType || gl.FLOAT;\n }\n encode(src: Float32Array, textureSize: number): Encoder.DataArrayType {\n let dest = src;\n if (this.channelSize === 1) {\n Logger.verbose('Encoder', 'Exploding into a larger array');\n dest = this.allocate(textureSize) as Float32Array;\n src.forEach((v, i) => dest[i * 4] = v);\n }\n return dest;\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Float32Array(size * 4);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Float32Array {\n if (this.channelSize === 1) {\n const filteredData = (buffer as Float32Array).filter((_value, index) => index % 4 === 0).subarray(0, dataSize);\n return filteredData;\n }\n return buffer.subarray(0, dataSize) as Float32Array;\n }\n}\n\nexport class Uint8DataEncoder implements DataEncoder {\n internalFormat: number;\n format: number;\n textureType: number;\n channelSize = 4;\n constructor(gl: WebGLRenderingContext, channels = 1) {\n if (channels === 1) {\n this.internalFormat = gl.ALPHA;\n this.format = gl.ALPHA; // not tested\n this.textureType = gl.UNSIGNED_BYTE;\n this.channelSize = channels;\n } else if (channels === 4) {\n this.internalFormat = gl.RGBA;\n this.format = gl.RGBA;\n this.textureType = gl.UNSIGNED_BYTE;\n this.channelSize = channels;\n } else {\n throw new Error(`Invalid number of channels: ${channels}`);\n }\n }\n encode(src: Uint8Array, _textureSize: number): Encoder.DataArrayType {\n return new Uint8Array(src.buffer, src.byteOffset, src.byteLength);\n }\n allocate(size: number): Encoder.DataArrayType {\n return new Uint8Array(size * this.channelSize);\n }\n decode(buffer: Encoder.DataArrayType, dataSize: number): Uint8Array {\n if (buffer instanceof Uint8Array) {\n return buffer.subarray(0, dataSize);\n }\n throw new Error(`Invalid array type: ${buffer.constructor}`);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ShapeUtil} from '../../util';\n\nimport {TextureLayoutStrategy, WidthHeightPrefs} from './texture-layout-strategy';\nimport {TextureLayout, TextureType} from './types';\n\nexport const createTextureLayoutFromTextureType =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[],\n textureType: TextureType): TextureLayout => {\n const channel = (textureType === TextureType.unpacked || textureType === TextureType.unpackedReversed) ? 1 : 4;\n const isPacked = textureType === TextureType.packed;\n const reverseWH = (textureType === TextureType.unpackedReversed || textureType === TextureType.packed);\n const breakAxis = textureType === TextureType.packedLastDimension ? shape.length - 1 : undefined;\n const unpackedShape = textureType === TextureType.packedLastDimension ?\n shape.map((d, i) => i === shape.length - 1 ? d * 4 : d) :\n undefined;\n return createTextureLayoutFromShape(\n textureLayoutStrategy, shape, channel, unpackedShape, {isPacked, reverseWH, breakAxis});\n };\n\nexport const calculateTextureWidthAndHeight =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[], textureType: TextureType):\n [number, number] => {\n const layout = createTextureLayoutFromTextureType(textureLayoutStrategy, shape, textureType);\n return [layout.width, layout.height];\n };\n\n/**\n * Create a TextureLayout object from shape.\n */\nexport const createTextureLayoutFromShape =\n (textureLayoutStrategy: TextureLayoutStrategy, shape: readonly number[], channels: 1|4 = 1,\n unpackedShape?: readonly number[], prefs?: WidthHeightPrefs): TextureLayout => {\n const isPacked = !!(prefs && prefs.isPacked);\n const [width, height] = textureLayoutStrategy.computeTextureWH(isPacked ? unpackedShape || shape : shape, prefs);\n const rank = shape.length;\n let inferredDims = shape.slice(0);\n if (rank === 0) {\n inferredDims = [1];\n }\n if (channels === 1) {\n // unpackedShape will take `shape` and not `inferredDims` so as to create a scalar Tensor if need be\n unpackedShape = shape;\n } else if (isPacked) {\n if (channels !== 4) {\n throw new Error('a packed texture must be 4-channel');\n }\n unpackedShape = shape;\n if (rank > 0) {\n inferredDims[rank - 1] = Math.ceil(inferredDims[rank - 1] / 2);\n }\n if (rank > 1) {\n inferredDims[rank - 2] = Math.ceil(inferredDims[rank - 2] / 2);\n }\n } else if (!unpackedShape) {\n throw new Error('Unpacked shape is needed when using channels > 1');\n }\n return {\n width,\n height,\n channels,\n isPacked,\n shape: inferredDims,\n strides: ShapeUtil.computeStrides(inferredDims),\n unpackedShape,\n reversedWH: (prefs && prefs.reverseWH)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceHandler} from '../../backend';\nimport {Logger} from '../../instrument';\nimport {Tensor} from '../../tensor';\nimport {ShapeUtil} from '../../util';\n\nimport {createPackProgramInfoLoader} from './ops/pack';\nimport {createPackedReshape3DProgramInfoLoader, isReshapeCheap, processDims3D} from './ops/reshape-packed';\nimport {encodeAsUint8} from './ops/uint8-encode';\nimport {createUnpackProgramInfoLoader} from './ops/unpack';\nimport {WebGLSessionHandler} from './session-handler';\nimport {EncoderUsage} from './texture-data-encoder';\nimport {calculateTextureWidthAndHeight, createTextureLayoutFromShape, createTextureLayoutFromTextureType} from './texture-layout';\nimport {Artifact, ProgramInfo, ProgramInfoLoader, TextureData, TextureLayout, TextureType} from './types';\n\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo|ProgramInfoLoader, inputTextureDatas: TextureData[]): string => {\n const inputs =\n inputTextureDatas.map(texture => `${texture.unpackedShape.join(',')};${texture.width}x${texture.height}`)\n .join('_');\n let key = programInfo.name;\n if (programInfo.cacheHint) {\n key += '[' + programInfo.cacheHint + ']';\n }\n key += ':' + inputs;\n return key;\n };\n\nexport class WebGLInferenceHandler implements InferenceHandler {\n private packedTextureDataCache: Map;\n private unpackedTextureDataCache: Map;\n constructor(public session: WebGLSessionHandler) {\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache = new Map();\n }\n\n /**\n * @returns [width, height]\n */\n calculateTextureWidthAndHeight(shape: readonly number[], textureType: TextureType): [number, number] {\n return calculateTextureWidthAndHeight(this.session.layoutStrategy, shape, textureType);\n }\n\n executeProgram(program: ProgramInfo|ProgramInfoLoader, inputs: readonly Tensor[]): TextureData {\n if (inputs.length < program.inputNames.length) {\n throw new Error(`Input size mustn't be less than ${program.inputNames.length}.`);\n }\n if (program.inputNames.length !== program.inputTypes.length) {\n throw new Error('input names size does not match input types');\n }\n\n // create texture info for input\n const inputTextureDatas: TextureData[] = [];\n for (let i = 0; i < program.inputNames.length; ++i) {\n inputTextureDatas[i] = this.getOrCreateTextureData(inputs[i], program.inputTypes[i]);\n }\n\n const key = getProgramInfoUniqueKey(program, inputTextureDatas);\n let artifact = this.session.programManager.getArtifact(key);\n const programInfo = artifact ?\n artifact.programInfo :\n (typeof (program as ProgramInfoLoader).get === 'function' ? (program as ProgramInfoLoader).get() :\n (program as ProgramInfo));\n\n // create texture info for output\n const outputTextureLayout = createTextureLayoutFromTextureType(\n this.session.layoutStrategy, programInfo.output.dims, programInfo.output.textureType);\n const outputTextureData = this.createTextureData(outputTextureLayout, programInfo.output.type);\n\n if (!artifact) {\n artifact = this.session.programManager.build(programInfo, inputTextureDatas, outputTextureData);\n this.session.programManager.setArtifact(key, artifact);\n }\n\n this.runProgram(artifact, inputTextureDatas, outputTextureData);\n return outputTextureData;\n }\n\n run(program: ProgramInfoLoader, inputs: readonly Tensor[]): Tensor {\n const outputTextureData = this.executeProgram(program, inputs);\n return outputTextureData.tensor;\n }\n\n private runProgram(artifact: Artifact, inputs: TextureData[], output: TextureData): void {\n // input should match\n for (let i = 0; i < inputs.length; ++i) {\n if (!!inputs[i].isPacked !== (artifact.programInfo.inputTypes[i] === TextureType.packed)) {\n throw new Error(`input[${i}] property packed inconsistent`);\n }\n }\n\n // output should match\n if (!!output.isPacked !== (artifact.programInfo.output.textureType === TextureType.packed)) {\n throw new Error('output property packed inconsistent');\n }\n\n this.session.programManager.run(artifact, inputs, output);\n }\n\n /**\n * Create a TextureData object from a tensor.\n * Usage = EncoderUsage.UploadOnly.\n * If a related texture data is found in cache, returns it;\n * Otherwise:\n * Creates a new texture layout if not provided;\n * Creates WebGLTexture with the layout;\n * Upload tensor data to the texture;\n * Creates a texture data object associated with the given tensor.\n * @param tensor the tensor with data to upload\n */\n private getOrCreateTextureData(tensor: Tensor, textureType: TextureType) {\n let td = this.getTextureData(tensor.dataId, textureType === TextureType.packed);\n\n if (!td) {\n // check if we have texture data in different type\n td = this.getTextureData(tensor.dataId, textureType !== TextureType.packed);\n if (td) {\n if (textureType === TextureType.packed) {\n return this.pack(td);\n } else {\n return this.unpack(td);\n }\n }\n }\n\n if (!td) {\n const layout = createTextureLayoutFromTextureType(this.session.layoutStrategy, tensor.dims, textureType);\n\n if (textureType === TextureType.packedLastDimension) {\n const group = 1;\n const channels = 4;\n const shape = tensor.dims;\n if (shape.length === 4) {\n // pre-processing for kernel data of Conv.\n //\n // TODO: currently this is a hacking to overwrite Conv's weight. The correct way to do this should be:\n // 1. implement texture based const-folding\n // 2. create a WebGL program \"preprocessConvWeight\" to do the same work as below\n // 3. run the program before dotProduct.\n //\n const adjustedKernelShape = [shape[0], Math.ceil((shape[1] * shape[2] * shape[3]) / channels)];\n const adjustedLayout =\n createTextureLayoutFromTextureType(this.session.layoutStrategy, adjustedKernelShape, textureType);\n let buffer = tensor.numberData;\n if (shape[1] * shape[2] * shape[3] % channels !== 0) {\n const numFeatureMaps = shape[0];\n const oldRowSize = shape[1] * shape[2] * shape[3];\n const newRowSize = Math.ceil(oldRowSize * group / channels) * channels;\n const newSize = numFeatureMaps * newRowSize;\n buffer = new Float32Array(newSize);\n for (let f = 0; f < numFeatureMaps; ++f) {\n const oldOffset = f * oldRowSize;\n const newOffset = f * newRowSize + f % group * oldRowSize;\n buffer.set(tensor.numberData.subarray(oldOffset, oldOffset + oldRowSize), newOffset);\n }\n }\n return this.createTextureData(adjustedLayout, tensor.type, buffer, tensor, EncoderUsage.UploadOnly);\n }\n }\n\n if (textureType === TextureType.packed) {\n const unpackedTextureLayout =\n createTextureLayoutFromShape(this.session.layoutStrategy, tensor.dims, 1, [], {reverseWH: true});\n const unpackedTextureData = this.createTextureData(\n unpackedTextureLayout, tensor.type, tensor.numberData, tensor, EncoderUsage.UploadOnly);\n td = this.pack(unpackedTextureData);\n } else {\n td = this.createTextureData(layout, tensor.type, tensor.numberData, tensor, EncoderUsage.UploadOnly);\n }\n }\n return td;\n }\n\n /**\n * Create a TextureData object using the given data and bind to the given tensor.\n * Usage = EncoderUsage.UploadOnly.\n * NOTE: this function is a hack for Conv implementation. should remove this function, after rewriting Conv\n * implementation by Graph.Transformer\n * @param dataType the tensor data type\n * @param data the actual data to upload\n * @param tensor the tensor to bind. tensor's data is ignored.\n */\n createTextureDataFromLayoutBindTensor(\n layout: TextureLayout, dataType: Tensor.DataType, data: Tensor.NumberType, tensor: Tensor): TextureData {\n return this.createTextureData(layout, dataType, data, tensor, EncoderUsage.UploadOnly);\n }\n\n private createTextureData(\n layout: TextureLayout, dataType: Tensor.DataType, data?: Tensor.NumberType, tensor?: Tensor,\n usage?: EncoderUsage): TextureData {\n Logger.verbose('InferenceHandler', `Creating TextureData: layout:[${JSON.stringify(layout)}]`);\n const texture = this.session.textureManager.createTextureFromLayout(dataType, layout, data, usage);\n return this.createTextureDataFromTexture(layout, dataType, texture, tensor);\n }\n\n reshapeUnpacked(input: Tensor, reshapedDims: readonly number[]): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.unpacked);\n const newTextureLayout: TextureLayout = {\n channels: inputTD.channels,\n height: inputTD.height,\n width: inputTD.width,\n // handle reshaping into scalar Tensors\n shape: reshapedDims.length !== 0 ? reshapedDims : [1],\n strides: ShapeUtil.computeStrides(reshapedDims),\n unpackedShape: reshapedDims,\n };\n const newTextureData = this.createTextureDataFromTexture(newTextureLayout, input.type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n reshapePacked(input: Tensor, reshapedDims: readonly number[]): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.packed);\n\n // check if the reshape is 'cheap'\n if (isReshapeCheap(input.dims, reshapedDims)) {\n const newTextureLayout: TextureLayout = {\n channels: inputTD.channels,\n height: inputTD.height,\n width: inputTD.width,\n // handle reshaping into scalar Tensors\n shape: reshapedDims.length !== 0 ? reshapedDims : [1],\n strides: ShapeUtil.computeStrides(reshapedDims),\n unpackedShape: reshapedDims,\n isPacked: true\n };\n const newTextureData = this.createTextureDataFromTexture(newTextureLayout, input.type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n const squeezedInputShape = processDims3D(input.dims);\n const squeezedOutputShape = processDims3D(reshapedDims);\n\n const squeezedInputTensor = this.reshapePacked(input, squeezedInputShape);\n const squeezedOutputTensor = this.run(\n createPackedReshape3DProgramInfoLoader(this, squeezedInputTensor, squeezedOutputShape), [squeezedInputTensor]);\n const outputTensor = this.reshapePacked(squeezedOutputTensor, reshapedDims);\n return outputTensor;\n }\n\n cast(input: Tensor, type: Tensor.DataType): Tensor {\n const inputTD = this.getOrCreateTextureData(input, TextureType.unpacked);\n const newTextureData = this.createTextureDataFromTexture(inputTD as TextureLayout, type, inputTD.texture);\n return newTextureData.tensor;\n }\n\n private createTextureDataFromTexture(\n layout: TextureLayout, dataType: Tensor.DataType, texture: WebGLTexture, tensor?: Tensor, tensorId?: Tensor.Id) {\n const textureData: TextureData = {\n ...layout,\n tensor: tensor ||\n new Tensor(\n layout.unpackedShape, dataType, (_id: Tensor.Id) => this.readTexture(textureData),\n async (_id: Tensor.Id) => this.readTextureAsync(textureData), undefined, tensorId),\n texture\n };\n this.setTextureData(textureData.tensor.dataId, textureData, layout.isPacked);\n return textureData;\n }\n\n private getTextureData(tensorId: Tensor.Id, isPacked = false): TextureData|undefined {\n return this.session.isInitializer(tensorId) ? this.session.getTextureData(tensorId, isPacked) :\n isPacked ? this.packedTextureDataCache.get(tensorId) :\n this.unpackedTextureDataCache.get(tensorId);\n }\n setTextureData(tensorId: Tensor.Id, td: TextureData, isPacked = false): void {\n if (this.session.isInitializer(tensorId)) {\n this.session.setTextureData(tensorId, td, isPacked);\n } else {\n (isPacked ? this.packedTextureDataCache : this.unpackedTextureDataCache).set(tensorId, td);\n }\n }\n isTextureLayoutCached(tensor: Tensor, isPacked = false): boolean {\n return !!this.getTextureData(tensor.dataId, isPacked);\n }\n\n dispose(): void {\n this.session.textureManager.clearActiveTextures();\n this.packedTextureDataCache.forEach(td => this.session.textureManager.releaseTexture(td));\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache.forEach(td => this.session.textureManager.releaseTexture(td));\n this.unpackedTextureDataCache = new Map();\n }\n\n readTexture(textureData: TextureData): Tensor.NumberType {\n if (textureData.isPacked) {\n return this.readTexture(this.unpack(textureData));\n }\n if (!this.session.backend.glContext.isFloat32DownloadSupported) {\n return this.session.textureManager.readUint8TextureAsFloat(encodeAsUint8(this, textureData));\n }\n return this.session.textureManager.readTexture(textureData, textureData.tensor.type, textureData.channels);\n }\n\n async readTextureAsync(textureData: TextureData): Promise {\n if (textureData.isPacked) {\n return this.readTextureAsync(this.unpack(textureData));\n }\n if (!this.session.backend.glContext.isFloat32DownloadSupported) {\n return this.session.textureManager.readUint8TextureAsFloat(encodeAsUint8(this, textureData));\n }\n return this.session.textureManager.readTextureAsync(textureData, textureData.tensor.type, textureData.channels);\n }\n\n pack(input: TextureData): TextureData {\n const outputTextureData = this.executeProgram(createPackProgramInfoLoader(this, input.tensor), [input.tensor]);\n return outputTextureData;\n }\n\n unpack(input: TextureData): TextureData {\n const outputTextureData = this.executeProgram(createUnpackProgramInfoLoader(this, input.tensor), [input.tensor]);\n return outputTextureData;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface BatchNormalizationAttributes extends AttributeWithCacheKey {\n epsilon: number;\n momentum: number;\n spatial: number;\n}\n\nconst batchNormalizationProgramMetadata = {\n name: 'BatchNormalization',\n inputNames: ['A', 'Scale', 'B', 'Mean', 'Variance'],\n inputTypes:\n [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked, TextureType.unpacked, TextureType.unpacked]\n};\n\nexport const batchNormalization: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: BatchNormalizationAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...batchNormalizationProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createBatchNormalizationProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseBatchNormalizationAttributes: OperatorInitialization =\n (node: Graph.Node): BatchNormalizationAttributes => {\n const epsilon = node.attributes.getFloat('epsilon', 1e-5);\n const momentum = node.attributes.getFloat('momentum', 0.9);\n const spatial = node.attributes.getInt('spatial', 1);\n return createAttributeWithCacheKey({epsilon, momentum, spatial});\n };\n\nconst createBatchNormalizationProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: BatchNormalizationAttributes):\n ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const rank = inputs[0].dims.length;\n const [scaleWidth, scaleHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(inputs[1].dims, TextureType.unpacked);\n const shaderSource = `\n float process(int[${rank}] indices) {\n vec2 position = offsetToCoords(indices[1], ${scaleWidth}, ${scaleHeight});\n float scale = getColorAsFloat(${glsl.texture2D}(Scale, position));\n float mean = getColorAsFloat(${glsl.texture2D}(Mean, position));\n float variance = getColorAsFloat(${glsl.texture2D}(Variance, position));\n float b = getColorAsFloat(${glsl.texture2D}(B, position));\n\n return scale * ( (_A(indices) - mean) / sqrt(variance + float(${attributes.epsilon})) ) + b;\n }`;\n return {\n ...batchNormalizationProgramMetadata,\n output: {dims: inputs[0].dims, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs.');\n }\n\n const X = inputs[0];\n const scale = inputs[1];\n const B = inputs[2];\n const mean = inputs[3];\n const var_ = inputs[4];\n\n // input should atleast have three dimensions - N,C,dim1,...,dimn\n // other inputs can have only one dimensions\n if (X.dims.length < 3 || scale.dims.length !== 1 || B.dims.length !== 1 || mean.dims.length !== 1 ||\n var_.dims.length !== 1) {\n throw new Error('invalid input shape.');\n }\n if (scale.dims[0] !== X.dims[1] || B.dims[0] !== X.dims[1] || mean.dims[0] !== X.dims[1] ||\n var_.dims[0] !== X.dims[1]) {\n throw new Error('invalid input shape.');\n }\n if ((X.type !== 'float32' && X.type !== 'float64') || (scale.type !== 'float32' && scale.type !== 'float64') ||\n (B.type !== 'float32' && B.type !== 'float64') || (mean.type !== 'float32' && mean.type !== 'float64') ||\n (var_.type !== 'float32' && var_.type !== 'float64')) {\n throw new Error('invalid input tensor types.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ProgramInfo, TextureLayout} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/* eslint-disable @typescript-eslint/naming-convention */\nexport enum FunctionType {\n ValueBased,\n Positional\n}\nexport interface GlslFunction {\n body: string;\n name: string;\n type: T;\n}\nexport type GlslValueFunction = GlslFunction;\nexport interface GlslPositionalFunction extends GlslFunction {\n inputShape: readonly number[];\n outputShape: readonly number[];\n}\n\nexport class GlslContext {\n constructor(\n public glContext: WebGLContext, public programInfo: ProgramInfo, public inputTextureLayouts: TextureLayout[],\n public outputTextureLayout: TextureLayout) {}\n}\nexport abstract class GlslLib {\n constructor(public context: GlslContext) {}\n abstract getFunctions(): {[name: string]: GlslLibRoutine};\n abstract getCustomTypes(): {[name: string]: string};\n}\n\n// abstraction to represent a GLSL library routine and it's dependencies\nexport class GlslLibRoutine {\n constructor(public routineBody: string, public dependencies?: string[]) {}\n}\n\n// abstraction to represent a GLSL library routine and it's dependencies AS GRAPH Nodes\n// this level of abstraction is used to topologically sort routines before fragment shade inclusion\nexport class GlslLibRoutineNode {\n dependencies: GlslLibRoutineNode[];\n routineBody: string;\n constructor(public name: string, routineBody?: string, dependencies?: GlslLibRoutineNode[]) {\n if (dependencies) {\n this.dependencies = dependencies;\n } else {\n this.dependencies = [];\n }\n\n if (routineBody) {\n this.routineBody = routineBody;\n }\n }\n addDependency(node: GlslLibRoutineNode) {\n if (node) {\n this.dependencies.push(node);\n }\n }\n}\n\n// topologically sort GLSL library routines (graph nodes abstraction) before shader script inclusion\nexport class TopologicalSortGlslRoutines {\n static returnOrderedNodes(nodes: GlslLibRoutineNode[]): GlslLibRoutineNode[] {\n if (!nodes || nodes.length === 0) {\n return [];\n }\n\n if (nodes.length === 1) {\n return nodes;\n }\n\n const cycleCheck = new Set();\n const alreadyTraversed = new Set();\n const result = new Array();\n\n this.createOrderedNodes(nodes, cycleCheck, alreadyTraversed, result);\n return result;\n }\n\n private static createOrderedNodes(\n graphNodes: GlslLibRoutineNode[], cycleCheck: Set, alreadyTraversed: Set,\n result: GlslLibRoutineNode[]) {\n for (let i = 0; i < graphNodes.length; ++i) {\n this.dfsTraverse(graphNodes[i], cycleCheck, alreadyTraversed, result);\n }\n }\n\n private static dfsTraverse(\n root: GlslLibRoutineNode, cycleCheck: Set, alreadyTraversed: Set, result: GlslLibRoutineNode[]) {\n // if this root has already been traversed return\n if (!root || alreadyTraversed.has(root.name)) {\n return;\n }\n\n // cyclic dependency has been detected\n if (cycleCheck.has(root.name)) {\n throw new Error('Cyclic dependency detected. Can\\'t topologically sort routines needed for shader.');\n }\n\n // hold this node to detect cycles if any\n cycleCheck.add(root.name);\n\n // traverse children in a dfs fashion\n const dependencies = root.dependencies;\n if (dependencies && dependencies.length > 0) {\n for (let i = 0; i < dependencies.length; ++i) {\n this.dfsTraverse(dependencies[i], cycleCheck, alreadyTraversed, result);\n }\n }\n\n // add to result holder\n result.push(root);\n\n // mark this node as traversed so that we don't traverse from this again\n alreadyTraversed.add(root.name);\n\n // release the hold\n cycleCheck.delete(root.name);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {FunctionType, GlslValueFunction} from '../glsl-definitions';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\n\nexport function glslAdd(): GlslValueFunction {\n const name = 'add_';\n const body = `\n float ${name}(float a, float b) {\n return a + b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 + v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslDiv(): GlslValueFunction {\n const name = 'div_';\n const body = `\n float ${name}(float a, float b) {\n return a / b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 / v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslMul(): GlslValueFunction {\n const name = 'mul_';\n const body = `\n float ${name}(float a, float b) {\n return a * b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 * v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSub(): GlslValueFunction {\n const name = 'sub_';\n const body = `\n float ${name}(float a, float b) {\n return a - b;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return v1 - v2;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslEqual(): GlslValueFunction {\n const name = 'equal_';\n const body = `\n float ${name}(float a, float b) {\n return float(a == b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4(equal(v1, v2));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslGreater(): GlslValueFunction {\n const name = 'greater_';\n const body = `\n float ${name}(float a, float b) {\n return float(a > b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4( v1.r > v2.r ,\n v1.g > v2.g,\n v1.b > v2.b,\n v1.a > v2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLess(): GlslValueFunction {\n const name = 'less_';\n const body = `\n float ${name}(float a, float b) {\n return float(a < b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4( v1.r < v2.r ,\n v1.g < v2.g,\n v1.b < v2.b,\n v1.a < v2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslAnd(): GlslValueFunction {\n const name = 'and_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) && bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r && b2.r ,\n b1.g && b2.g,\n b1.b && b2.b,\n b1.a && b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslOr(): GlslValueFunction {\n const name = 'or_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) || bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r || b2.r ,\n b1.g || b2.g,\n b1.b || b2.b,\n b1.a || b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslXor(): GlslValueFunction {\n const name = 'xor_';\n const body = `\n float ${name}(float a, float b) {\n return float( bool(a) ^^ bool(b) );\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n bvec4 b1 = bvec4(v1);\n bvec4 b2 = bvec4(v2);\n return vec4( b1.r ^^ b2.r ,\n b1.g ^^ b2.g,\n b1.b ^^ b2.b,\n b1.a ^^ b2.a );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslPow(): GlslValueFunction {\n return glslBuiltinBinary('pow');\n}\nexport function glslPRelu(): GlslValueFunction {\n const name = 'prelu_';\n const body = `\n float ${name}(float a, float b) {\n return a < 0.0 ? a * b: a;\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return vec4(\n v1.r < 0.0 ? v1.r * v2.r: v1.r,\n v1.g < 0.0 ? v1.g * v2.g: v1.g,\n v1.b < 0.0 ? v1.b * v2.b: v1.b,\n v1.a < 0.0 ? v1.a * v2.a: v1.a\n );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\nfunction glslBuiltinBinary(fname: string): GlslValueFunction {\n const name = `${fname}_`;\n const body = `\n float ${name}(float a, float b) {\n return ${fname}(a, b);\n }\n vec4 ${name}(vec4 v1, vec4 v2) {\n return ${fname}(v1, v2);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\nconst createBinaryProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], glslFunc: GlslValueFunction,\n outputTensorType: Tensor.DataType = inputs[0].type, cacheKey?: string): ProgramInfoLoader => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n cacheHint: cacheKey,\n get: () => createBinaryProgramInfo(handler, inputs, glslFunc, outputTensorType)\n };\n };\n\nconst createBinaryProgramInfo =\n (handler: WebGLInferenceHandler, inputs: Tensor[], glslFunc: GlslValueFunction,\n outputTensorType: Tensor.DataType = inputs[0].type): ProgramInfo => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const isBroadcast = !ShapeUtil.areEqual(inputs[0].dims, inputs[1].dims);\n let outputShape = inputs[0].dims;\n\n const usePackedTexture = handler.session.pack;\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(inputs[0].dims, inputs[1].dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n const outputRank = outputShape.length;\n const aRank = inputs[0].dims.length !== 0 ? inputs[0].dims.length : 1;\n const bRank = inputs[1].dims.length !== 0 ? inputs[1].dims.length : 1;\n const aBcast = inputs[0].dims.length !== 0 ? 'bcastIndices_A(indices, aindices);' : 'aindices[0] = 0;';\n const bBcast = inputs[1].dims.length !== 0 ? 'bcastIndices_B(indices, bindices);' : 'bindices[0] = 0;';\n\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = usePackedTexture ? `\n ${glslFunc.body}\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n vec4 result = ${glslFunc.name}(a, b);\n ${glsl.output} = result;\n }` :\n `\n ${glslFunc.body}\n float process(int indices[${outputRank}]) {\n int aindices[${aRank}];\n int bindices[${bRank}];\n ${aBcast}\n ${bBcast}\n return ${glslFunc.name}(_A(aindices), _B(bindices));\n }`;\n\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n output: {dims: outputShape, type: outputTensorType, textureType},\n shaderSource,\n hasMain: usePackedTexture\n };\n }\n const glsl = getGlsl(handler.session.backend.glContext.version);\n const shaderSource = `\n ${glslFunc.body}\n void main() {\n vec4 v1 = ${glsl.texture2D}(A, TexCoords);\n vec4 v2 = ${glsl.texture2D}(B, TexCoords);\n vec4 result = ${glslFunc.name}(v1, v2);\n ${glsl.output} = result;\n }\n `;\n\n return {\n name: glslFunc.name,\n inputNames: ['A', 'B'],\n inputTypes: [textureType, textureType],\n output: {dims: inputs[0].dims, type: outputTensorType, textureType},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const add = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslAdd()), inputs)];\n\nexport const and = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslAnd(), 'bool'), inputs)];\n\nexport const div = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslDiv()), inputs)];\n\nexport const equal = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslEqual(), 'bool'), inputs)];\n\nexport const greater = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslGreater(), 'bool'), inputs)];\n\nexport const less = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslLess(), 'bool'), inputs)];\n\nexport const mul = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslMul()), inputs)];\n\nexport const or = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslOr(), 'bool'), inputs)];\n\nexport const pow = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslPow()), inputs)];\n\nexport const pRelu = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslPRelu()), inputs)];\n\nexport const sub = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslSub()), inputs)];\n\nexport const xor = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createBinaryProgramInfoLoader(handler, inputs, glslXor(), 'bool'), inputs)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ProtoUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const cast: OperatorImplementation =\n (handler: WebGLInferenceHandler, inputs: Tensor[], to: Tensor.DataType): Tensor[] => {\n validateInputs(inputs);\n return [handler.cast(inputs[0], to)];\n };\n\nexport const parseCastAttributes: OperatorInitialization = (node: Graph.Node): Tensor.DataType =>\n ProtoUtil.tensorDataTypeFromProto(node.attributes.getInt('to'));\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Cast requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('Invalid input type.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {ConcatAttributes} from './concat';\nimport {getChannels, unpackFromChannel} from './packing-utils';\n\nconst createPackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({\n name: 'Concat (packed)',\n inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),\n inputTypes: Array(inputCount).fill(TextureType.packed),\n cacheHint\n});\n\nconst createPackedConcatProgramInfo =\n (handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n if (axis >= inputShape.length || axis < (-1 * inputShape.length)) {\n throw new Error('axis specified for concat doesn\\'t match input dimensionality');\n }\n if (axis < 0) {\n axis = inputShape.length + axis;\n }\n // ensure all of the non-concatenated axes match each other\n // calculate the shape of the output tensor while we do that\n const outputShape = inputShape.slice(0);\n for (let i = 1; i < inputs.length; i++) {\n const dataNShape = inputs[i].dims.slice();\n for (let axisIndex = 0; axisIndex < inputShape.length; axisIndex++) {\n // add to the placeholder for computing output shape\n if (axisIndex === axis) {\n outputShape[axis] += dataNShape[axisIndex];\n }\n // ensure all non-cancatenated axes match each other\n else if (inputShape[axisIndex] !== dataNShape[axisIndex]) {\n throw new Error('non concat dimensions must match');\n }\n }\n }\n\n const rank = outputShape.length;\n const coords = getChannels('coords', rank);\n const dtype = getCoordsDataType(rank);\n const unpackChannel = unpackFromChannel();\n\n const shapes = inputs.map(i => i.dims);\n const channels = getGlChannels(rank);\n const offsets: number[] = new Array(shapes.length - 1);\n\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getX0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getX${i}(${getShiftedChannelsSnippet(channels, channel, shift)}),\n vec2(${getShiftedChannelsSnippet(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getX${lastIndex}(${getShiftedChannelsSnippet(channels, channel, shift)}),\n vec2(${getShiftedChannelsSnippet(lastChannels, channel, shift)}));`;\n\n const glsl = getGlsl(handler.session.backend.glContext.version);\n\n const shaderSource = `\n ${unpackChannel}\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n int lastDim = coords.${channels[rank - 1]};\n coords.${channels[rank - 1]} = coords.${channels[rank - 2]};\n coords.${channels[rank - 2]} = lastDim;\n\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${outputShape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${outputShape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${outputShape[rank - 2]} &&\n ${coords[rank - 1]} < ${outputShape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n ${glsl.output} = result;\n }\n `;\n\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true,\n };\n };\n\nexport const createPackedConcatProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): ProgramInfoLoader => {\n const metadata = createPackedConcatProgramMetadata(inputs.length, attributes.cacheKey);\n return {...metadata, get: () => createPackedConcatProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst getShiftedChannelsSnippet = (channels: string[], channel: string, shift: number): string => {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n } else {\n return c;\n }\n });\n return res.join();\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {createPackedConcatProgramInfoLoader} from './concat-packed';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nexport const concat: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): Tensor[] => {\n validateInputs(inputs);\n if (inferenceHandler.session.pack && inputs[0].dims.length > 1) {\n const output =\n inferenceHandler.run(createPackedConcatProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n } else {\n const output =\n inferenceHandler.run(createUnpackedConcatProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n }\n };\n\nconst createUnpackedConcatProgramMetadata = (inputCount: number, cacheHint: string) => ({\n name: 'Concat',\n inputNames: Array.from({length: inputCount}, (_v, i) => `X${i}`),\n inputTypes: Array(inputCount).fill(TextureType.unpacked),\n cacheHint\n});\n\nconst createUnpackedConcatProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n if (axis >= inputShape.length || axis < (-1 * inputShape.length)) {\n throw new Error('axis specified for concat doesn\\'t match input dimensionality');\n }\n if (axis < 0) {\n axis = inputShape.length + axis;\n }\n // ensure all of the non-concatenated axes match each other\n // calculate the shape of the output tensor while we do that\n const outputShape = inputShape.slice(0);\n for (let i = 1; i < inputs.length; i++) {\n const dataNShape = inputs[i].dims.slice();\n for (let axisIndex = 0; axisIndex < inputShape.length; axisIndex++) {\n // add to the placeholder for computing output shape\n if (axisIndex === axis) {\n outputShape[axis] += dataNShape[axisIndex];\n }\n // ensure all non-cancatenated axes match each other\n else if (inputShape[axisIndex] !== dataNShape[axisIndex]) {\n throw new Error('non concat dimensions must match');\n }\n }\n }\n\n const rank = outputShape.length;\n\n const sizeInConcatAxis = new Array(inputs.length);\n let previousSum = 0;\n for (let i = 0; i < sizeInConcatAxis.length; ++i) {\n previousSum += inputs[i].dims[axis];\n sizeInConcatAxis[i] = previousSum;\n }\n\n let getTextureIndexWhereDataResidesMethod = '';\n // in most cases linear search is sufficient, as in most scenarios, only 2 tensors are concatenated\n if (inputs.length < 5) {\n getTextureIndexWhereDataResidesMethod = getTextureIndexWhereDataResidesLinearSearch(sizeInConcatAxis);\n } else {\n getTextureIndexWhereDataResidesMethod = getTextureIndexWhereDataResidesBinarySearch(sizeInConcatAxis);\n }\n\n const fetchDataFromCorrectTextureMethod = getFetchDataFromCorrectTextureMethod(inputs.length, rank);\n const getSizeInConcatAxisValueFromIndexMethod = getGetSizeInConcatAxisValueFromIndexMethod(sizeInConcatAxis);\n const shaderSource = `\n ${fetchDataFromCorrectTextureMethod}\n ${getSizeInConcatAxisValueFromIndexMethod}\n ${getTextureIndexWhereDataResidesMethod}\n float process(int indices[${rank}]) {\n int textureIndex = getTextureWhereDataResides (indices[${axis}]);\n\n if(textureIndex != 0) {\n indices[${axis}] = indices[${axis}] - int(getSizeInConcatAxisValueFromIndex(textureIndex-int(1)));\n }\n\n return fetchDataFromCorrectTexture(textureIndex, indices);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n };\n\nconst createUnpackedConcatProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConcatAttributes): ProgramInfoLoader => {\n const metadata = createUnpackedConcatProgramMetadata(inputs.length, attributes.cacheKey);\n return {...metadata, get: () => createUnpackedConcatProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst getTextureIndexWhereDataResidesLinearSearch = (sizeInConcatAxis: number[]): string => {\n const searchAxis = sizeInConcatAxis.map((size, i) => `if(index<${size}) {return ${i};}\n`);\n return `int getTextureWhereDataResides(int index) {\n ${searchAxis.join('')}\n }`;\n};\n\n// TODO: Implement BinarySearch in GLSL\nconst getTextureIndexWhereDataResidesBinarySearch = (sizeInConcatAxis: number[]): string =>\n getTextureIndexWhereDataResidesLinearSearch(sizeInConcatAxis);\n\nconst getFetchDataFromCorrectTextureMethod = (numberOfTensors: number, tensorRank: number) => {\n const codeLines: string[] = [`float fetchDataFromCorrectTexture(int textureIndex, int indices[${tensorRank}]) {`];\n for (let i = 0; i < numberOfTensors; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (textureIndex == ${i}) { return _X${i}(indices); }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(\n '\\t' +\n `else { return _X${i}(indices); }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (textureIndex == ${i}) { return _X${i}(indices); }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n return codeLines.join('\\n');\n};\n\nconst getGetSizeInConcatAxisValueFromIndexMethod = (sizeInConcatAxis: number[]): string => {\n const codeLines: string[] = ['int getSizeInConcatAxisValueFromIndex(int index) {'];\n for (let i = 0; i < sizeInConcatAxis.length; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (index == ${i}) { return ${sizeInConcatAxis[i]}; }`);\n } else if (i === sizeInConcatAxis.length - 1) {\n codeLines.push(\n '\\t' +\n `else { return ${sizeInConcatAxis[i]}; }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (index == ${i}) { return ${sizeInConcatAxis[i]}; }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n\n return codeLines.join('\\n');\n};\n\nexport const parseConcatAttributes: OperatorInitialization = (node: Graph.Node): ConcatAttributes =>\n createAttributeWithCacheKey({axis: node.attributes.getInt('axis')});\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n\n const inputType = inputs[0].type;\n const inputDimensionality = inputs[0].dims.length;\n\n // TODO: Support string concat\n if (inputType === 'string') {\n throw new Error('string tensor is not supported yet');\n }\n\n for (const input of inputs) {\n // make sure types of all inputs match\n if (input.type !== inputType) {\n throw new Error('input tensors should be one type');\n }\n\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputDimensionality) {\n throw new Error('input tensors should have the same shape');\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {Tensor} from '../../../tensor';\nimport {MAX_CLIP, MIN_CLIP} from '../../../util';\nimport {FunctionType, GlslValueFunction} from '../glsl-definitions';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport function glslAbs(): GlslValueFunction {\n return glslBuiltinUnary('abs');\n}\nexport function glslAcos(): GlslValueFunction {\n return glslBuiltinUnary('acos');\n}\nexport function glslAsin(): GlslValueFunction {\n return glslBuiltinUnary('asin');\n}\nexport function glslAtan(): GlslValueFunction {\n return glslBuiltinUnary('atan');\n}\nexport function glslCeil(): GlslValueFunction {\n return glslBuiltinUnary('ceil');\n}\nexport function glslCos(): GlslValueFunction {\n return glslBuiltinUnary('cos');\n}\nexport function glslElu(alpha: number): GlslValueFunction {\n const name = 'elu';\n const body = `\n const float alpha = float(${alpha});\n\n float ${name}_(float a) {\n return a >= 0.0 ? a: (exp(a) - 1.0) * alpha;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(${name}_(v.x), ${name}_(v.y), ${name}_(v.z), ${name}_(v.w));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslExp(): GlslValueFunction {\n return glslBuiltinUnary('exp');\n}\nexport function glslFloor(): GlslValueFunction {\n return glslBuiltinUnary('floor');\n}\nexport function glslClip(min: number, max: number): GlslValueFunction {\n const name = 'clip';\n const body = `\n const float min = float(${min});\n const float max = float(${max});\n\n float ${name}_(float a) {\n return clamp(a, min, max);\n }\n vec4 ${name}_(vec4 v) {\n return clamp(v, min, max);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslIdentity(): GlslValueFunction {\n const name = 'indentity';\n const body = `\n float ${name}_(float a) {\n return a;\n }\n vec4 ${name}_(vec4 v) {\n return v;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLeakyRelu(alpha: number): GlslValueFunction {\n const name = 'leakyRelu';\n const body = `\n const float alpha = float(${alpha});\n\n float ${name}_(float a) {\n return a < 0.0 ? a * alpha : a;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(${name}_(v.x), ${name}_(v.y), ${name}_(v.z), ${name}_(v.w));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslLog(): GlslValueFunction {\n return glslBuiltinUnary('log');\n}\nexport function glslNeg(): GlslValueFunction {\n const name = 'neg';\n const body = `\n float ${name}_(float a) {\n return -a;\n }\n vec4 ${name}_(vec4 v) {\n return -v;\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslNot(): GlslValueFunction {\n const name = 'not';\n const body = `\n float ${name}_(float a) {\n return float( ! bool(a) );\n }\n bool ${name}_(bool a) {\n return !a;\n }\n vec4 ${name}_(vec4 v) {\n return vec4(!bool(v.x), !bool(v.y), !bool(v.z), !bool(v.w));\n }\n bvec4 ${name}_(bvec4 v) {\n return bvec4(!v.x, !v.y, !v.z, !v.w);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSin(): GlslValueFunction {\n return glslBuiltinUnary('sin');\n}\nexport function glslRelu(): GlslValueFunction {\n const name = 'relu';\n const body = `\n float ${name}_(float a) {\n return max( a, 0.0 );\n }\n vec4 ${name}_(vec4 v) {\n return max( v, 0.0 );\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSigmoid(): GlslValueFunction {\n const name = 'sigmoid';\n const body = `\n float ${name}_(float a) {\n return 1.0 / (1.0 + exp(-a));\n }\n vec4 ${name}_(vec4 v) {\n return 1.0 / (1.0 + exp(-v));\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nexport function glslSqrt(): GlslValueFunction {\n return glslBuiltinUnary('sqrt');\n}\nexport function glslTan(): GlslValueFunction {\n return glslBuiltinUnary('tan');\n}\nexport function glslTanh(): GlslValueFunction {\n const name = 'tanh';\n const body = `\n float ${name}_(float a) {\n a = clamp(a, -10., 10.);\n a = exp(2.*a);\n return (a - 1.) / (a + 1.);\n }\n vec4 ${name}_(vec4 v) {\n v = clamp(v, -10., 10.);\n v = exp(2.*v);\n return (v - 1.) / (v + 1.);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\nfunction glslBuiltinUnary(name: string): GlslValueFunction {\n const body = `\n float ${name}_(float a) {\n return ${name}(a);\n }\n vec4 ${name}_(vec4 v) {\n return ${name}(v);\n }\n `;\n return {body, name, type: FunctionType.ValueBased};\n}\n\n/////\n/////\n/////\n\nconst createElementwiseProgramInfo =\n (handler: WebGLInferenceHandler, metadata: ProgramMetadata, input: Tensor, glslFunc: GlslValueFunction):\n ProgramInfo => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const glsl = getGlsl(handler.session.backend.glContext.version);\n return {\n ...metadata,\n output: {dims: input.dims, type: input.type, textureType},\n shaderSource: `\n ${glslFunc.body}\n void main() {\n vec4 v = ${glsl.texture2D}(A, TexCoords);\n v = ${glslFunc.name}_(v);\n ${glsl.output} = v;\n }\n `,\n hasMain: true\n };\n };\n\nconst createElementwiseProgramInfoLoader =\n (handler: WebGLInferenceHandler, input: Tensor, glslFunc: GlslValueFunction, cacheKey?: string):\n ProgramInfoLoader => {\n const textureType = handler.session.pack ? TextureType.packed : TextureType.unpacked;\n const metadata = {name: glslFunc.name, inputTypes: [textureType], inputNames: ['A'], cacheHint: cacheKey};\n return {...metadata, get: () => createElementwiseProgramInfo(handler, metadata, input, glslFunc)};\n };\n\nexport const abs = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAbs()), inputs)];\n\nexport const acos = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAcos()), inputs)];\n\nexport const asin = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAsin()), inputs)];\n\nexport const atan = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslAtan()), inputs)];\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nexport const clip =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ClipAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(\n handler, inputs[0], glslClip(attributes.min, attributes.max), attributes.cacheKey),\n inputs)];\n\nexport const parseClipAttributes = (node: Graph.Node): ClipAttributes => createAttributeWithCacheKey(\n {min: node.attributes.getFloat('min', MIN_CLIP), max: node.attributes.getFloat('max', MAX_CLIP)});\n\nexport const clipV11 = (handler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n const attributes = generateClipAttributesFromInputs(handler, inputs);\n return clip(handler, [inputs[0]], attributes);\n};\n\nconst generateClipAttributesFromInputs = (handler: WebGLInferenceHandler, inputs: Tensor[]): ClipAttributes => {\n if (inputs.length >= 3 &&\n (!handler.session.isInitializer(inputs[1].dataId) || !handler.session.isInitializer(inputs[2].dataId))) {\n throw new Error('dynamic clip attributes are not allowed');\n }\n\n const min = (inputs.length >= 3) ? inputs[1].numberData[0] : MIN_CLIP;\n const max = (inputs.length >= 3) ? inputs[2].numberData[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const ceil = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslCeil()), inputs)];\n\nexport const cos = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslCos()), inputs)];\n\nexport interface EluAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const elu =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: EluAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(handler, inputs[0], glslElu(attributes.alpha), attributes.cacheKey),\n inputs)];\n\nexport const parseEluAttributes = (node: Graph.Node): EluAttributes =>\n createAttributeWithCacheKey({alpha: node.attributes.getFloat('alpha', 1.0)});\n\nexport const exp = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslExp()), inputs)];\n\nexport const floor = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslFloor()), inputs)];\n\nexport const identity = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslIdentity()), inputs)];\n\nexport interface LeakyReluAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const leakyRelu =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: LeakyReluAttributes): Tensor[] => [handler.run(\n createElementwiseProgramInfoLoader(handler, inputs[0], glslLeakyRelu(attributes.alpha), attributes.cacheKey),\n inputs)];\n\nexport const parseLeakyReluAttributes = (node: Graph.Node): LeakyReluAttributes =>\n createAttributeWithCacheKey({alpha: node.attributes.getFloat('alpha', 0.01)});\n\nexport const log = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslLog()), inputs)];\n\nexport const neg = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslNeg()), inputs)];\n\nexport const not = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslNot()), inputs)];\n\nexport const relu = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslRelu()), inputs)];\n\nexport const sigmoid = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSigmoid()), inputs)];\n\nexport const sin = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSin()), inputs)];\n\nexport const sqrt = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslSqrt()), inputs)];\n\nexport const tan = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslTan()), inputs)];\n\nexport const tanh = (handler: WebGLInferenceHandler, inputs: Tensor[]):\n Tensor[] => [handler.run(createElementwiseProgramInfoLoader(handler, inputs[0], glslTanh()), inputs)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Attribute} from '../../../attribute';\nimport {MAX_CLIP, MIN_CLIP} from '../../../util';\nimport {GlslValueFunction} from '../glsl-definitions';\n\nimport {glslClip, glslRelu, glslSigmoid} from './unary-op';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly activationCacheKey: string;\n}\n\nexport function getActivationSnippet(attributes: InternalActivationAttributes) {\n let func: GlslValueFunction;\n switch (attributes.activation) {\n case 'Relu':\n func = glslRelu();\n break;\n case 'Sigmoid':\n func = glslSigmoid();\n break;\n case 'Clip':\n func = glslClip(attributes.clipMin!, attributes.clipMax!);\n break;\n // TODO: adding other activations that can be fused.\n default:\n return {activationFunction: '', applyActivation: ''};\n }\n\n const activationName = func.name;\n const activationFunction = func.body;\n const applyActivation = `value = ${activationName}_(value);`;\n return {activationFunction, applyActivation};\n}\n\nexport const parseInternalActivationAttributes = (attributes: Attribute): InternalActivationAttributes => {\n const activation = attributes.getString('activation', '');\n\n if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes.getFloats('activation_params', [MIN_CLIP, MAX_CLIP]);\n return {activation, clipMax, clipMin, activationCacheKey: `${activation}:${clipMin},${clipMax}`};\n }\n return {activation, activationCacheKey: activation};\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../../instrument';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {getActivationSnippet} from './fuse-utils';\n\nconst createUnpackedGroupedConvProgramMetadata = (hasBias: boolean, cacheHint: string): ProgramMetadata => ({\n name: 'GroupedConv',\n inputNames: hasBias ? ['X', 'W', 'Bias'] : ['X', 'W'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nconst createUnpackedGroupedConvProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], metadata: ProgramMetadata,\n attributes: ConvAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBias(output_channel);' : '';\n const xShape = inputs[0].dims.slice();\n const wShape = inputs[1].dims.slice();\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n Logger.verbose(\n 'GroupedConv',\n `autpPad:${attributes.autoPad}, dilations:${attributes.dilations}, group:${attributes.group}, kernelShape:${\n attributes.kernelShape}, pads:${attributes.pads}, strides:${attributes.strides}`);\n const outputShape =\n calculateOutputShape(xShape, wShape, attributes.dilations, attributes.pads, attributes.strides);\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n\n const shaderSource = `\n const ivec2 strides = ivec2(${attributes.strides[0]}, ${attributes.strides[1]});\n const ivec2 pads = ivec2(${attributes.pads[0]}, ${attributes.pads[1]});\n ${activationFunction}\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n int output_channel = coords.y;\n ivec2 xRCCorner = coords.zw * strides - pads;\n int group_id = output_channel / ${outputChannelsPerGroup};\n\n float value = 0.0;\n for (int wInChannel = 0; wInChannel < ${wShape[1]}; wInChannel++) {\n int input_channel = group_id * ${wShape[1]} + wInChannel;\n for (int wHeight = 0; wHeight < ${wShape[2]}; wHeight++) {\n int xHeight = xRCCorner.x + wHeight * ${attributes.dilations[0]};\n\n if (xHeight < 0 || xHeight >= ${xShape[2]}) {\n continue;\n }\n\n for (int wWidth = 0; wWidth < ${wShape[3]}; wWidth++) {\n int xWidth = xRCCorner.y + wWidth * ${attributes.dilations[1]};\n if (xWidth < 0 || xWidth >= ${xShape[3]}) {\n continue;\n }\n\n float xVal = getX(batch, input_channel, xWidth, xHeight);\n float wVal = getW(output_channel, wInChannel, wWidth, wHeight);\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${glsl.output} = vec4(value, .0, .0, .0);\n }\n`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n hasMain: true,\n };\n };\n\nexport const createUnpackedGroupedConvProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes):\n ProgramInfoLoader => {\n const metadata = createUnpackedGroupedConvProgramMetadata(inputs.length > 2, attributes.cacheKey);\n return {\n ...metadata,\n get: () => createUnpackedGroupedConvProgramInfo(inferenceHandler, inputs, metadata, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\nimport {unpackFromChannel} from './packing-utils';\n\nconst createPackedIm2ColProgramMetadata = (cacheHint: string) => ({\n name: 'Im2Col (packed)',\n inputNames: ['A'],\n inputTypes: [TextureType.packed],\n cacheHint,\n});\n\nconst createPackedIm2ColProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,\n outputShape: readonly number[], attributes: ConvAttributes): ProgramInfo => {\n const xshape = x.dims;\n const wshape = w.dims;\n const rowDim = 2;\n const colDim = 3;\n const rank = outputShape.length;\n const im2colShape = [wshape[1] * wshape[2] * wshape[3], outputShape[2] * outputShape[3]];\n const kernelSize = wshape[2] * wshape[3];\n const unpackChannel = unpackFromChannel();\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n let unrolled = '';\n\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.x + ${col};\n pos = rc.y + ${row};\n\n if(blockIndex < ${im2colShape[1]} && pos < ${im2colShape[0]}) {\n offsetY = int(blockIndex / (${outputShape[rank - 1]})) * ${attributes.strides[0]} -\n ${attributes.pads[0]};\n d0 = offsetY + ${attributes.dilations[0]} * (imod(pos, ${kernelSize}) / ${wshape[2]});\n\n if(d0 < ${xshape[rowDim]} && d0 >= 0) {\n offsetX = imod(blockIndex, ${outputShape[rank - 1]}) * ${attributes.strides[1]} -\n ${attributes.pads[1]};\n d1 = offsetX + ${attributes.dilations[1]} * imod(imod(pos, ${kernelSize}), ${wshape[2]});\n\n if(d1 < ${xshape[colDim]} && d1 >= 0) {\n\n ch = int(float(pos)/ ${kernelSize}.);\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(0, ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n\n `;\n }\n }\n\n const shaderSource = `\n ${unpackChannel}\n\n void main() {\n ivec2 rc = getOutputCoords();\n vec4 result = vec4(0.0);\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n ${unrolled}\n ${glsl.output} = result;\n }\n `;\n return {\n ...metadata,\n output: {dims: im2colShape, type: x.type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedIm2ColProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, x: Tensor, w: Tensor, outputShape: readonly number[],\n attributes: ConvAttributes): ProgramInfoLoader => {\n const metadata = createPackedIm2ColProgramMetadata(attributes.cacheKey);\n return {\n ...metadata,\n get: () => createPackedIm2ColProgramInfo(inferenceHandler, metadata, x, w, outputShape, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {getActivationSnippet, InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createPackedMatmulProgramInfoLoader} from './matmul-pack';\n\nexport const matMul: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: InternalActivationAttributes): Tensor[] => {\n validateInputs(inputs);\n\n if (inferenceHandler.session.pack) {\n return [inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, inputs, attributes), inputs)];\n } else {\n return [inferenceHandler.run(createMatmulProgramInfoLoader(inputs, attributes), inputs)];\n }\n };\n\nexport const parseMatMulAttributes: OperatorInitialization =\n (node: Graph.Node): InternalActivationAttributes => parseInternalActivationAttributes(node.attributes);\n\nconst createMatmulProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'MatMul',\n inputNames: hasBias ? ['A', 'B', 'Bias'] : ['A', 'B'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nfunction createMatmulProgramInfo(\n metadata: ProgramMetadata, inputs: Tensor[], activationAttributes: InternalActivationAttributes): ProgramInfo {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outputShape = BroadcastUtil.calcShape(aShape, bShape, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const coordsDataType = getCoordsDataType(outputShape.length);\n const allGlChannels = getGlChannels();\n const {activationFunction, applyActivation} = getActivationSnippet(activationAttributes);\n\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBiasForMatmul();' : '';\n const getBiasForMatmulSnippet =\n hasBias ? `${getBiasForMatmul(coordsDataType, allGlChannels, inputs[2].dims, outputShape, false)}` : '';\n\n const rank = outputShape.length;\n const arank = aShape.length;\n const brank = bShape.length;\n const sharedDim = aShape[aShape.length - 1];\n const shaderSource = `\n ${activationFunction}\n ${getBiasForMatmulSnippet}\n float process(int indices[${rank}]) {\n int a[${arank}];\n int b[${brank}];\n bcastMatmulIndices_A(indices, a);\n bcastMatmulIndices_B(indices, b);\n\n float value;\n for (int k=0; k<${sharedDim}; ++k) {\n a[${arank - 1}] = k;\n b[${brank - 2}] = k;\n value += _A(a) * _B(b);\n }\n ${processBias}\n ${applyActivation}\n return value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n}\n\nexport function createMatmulProgramInfoLoader(\n inputs: Tensor[], activationAttributes: InternalActivationAttributes): ProgramInfoLoader {\n const metadata = createMatmulProgramMetadata(inputs.length > 2, activationAttributes.activationCacheKey);\n return {...metadata, get: () => createMatmulProgramInfo(metadata, inputs, activationAttributes)};\n}\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n\n if ((inputs[0].type !== 'float32' && inputs[0].type !== 'float64') ||\n (inputs[1].type !== 'float32' && inputs[1].type !== 'float64')) {\n throw new Error('inputs should be float type');\n }\n\n if (inputs[0].type !== inputs[1].type) {\n throw new Error('inputs types should match');\n }\n};\n\nexport function getBiasForMatmul(\n coordsDataType: string, allGlChannels: readonly string[], inShape: readonly number[], outShape: readonly number[],\n isPacked: boolean): string {\n let unpackedCoordsSnippet = '';\n const inRank = inShape.length;\n const outRank = outShape.length;\n const rankDiff = outRank - inRank;\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${allGlChannels[i + rankDiff]}`).join(', ');\n }\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n const coordsSnippet = broadcastDims.map(d => `coords.${allGlChannels[d + rankDiff]} = 0;`).join('\\n');\n const inSize = ShapeUtil.size(inShape);\n const isInputScalar = inSize === 1;\n let output = 'vec4(outputValue.xx, outputValue.yy)';\n if (isInputScalar) {\n output = 'vec4(outputValue.x)';\n }\n const getBiasForMatmulSource = isPacked ? `\nvec4 getBiasForMatmul() {\n ${coordsDataType} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = getBias(${unpackedCoordsSnippet});\n return ${output};\n}` :\n `\nfloat getBiasForMatmul() {\n ${coordsDataType} coords = getOutputCoords();\n ${coordsSnippet}\n return getBias(coords.x);\n}`;\n\n return getBiasForMatmulSource;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {BroadcastUtil, ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\nimport {getCoordsDataType, getGlChannels} from '../utils';\n\nimport {getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\nimport {getBiasForMatmul} from './matmul';\n\nconst createPackedMatmulProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'MatMul (packed)',\n inputNames: hasBias ? ['A', 'B', 'Bias'] : ['A', 'B'],\n inputTypes: hasBias ? [TextureType.packed, TextureType.packed, TextureType.packed] :\n [TextureType.packed, TextureType.packed],\n cacheHint\n});\n\nconst createPackedMatmulProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[],\n activationAttributes: InternalActivationAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += getBiasForMatmul();' : '';\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outputShape = BroadcastUtil.calcShape(aShape, bShape, true);\n const isBroadcast = !ShapeUtil.areEqual(inputs[0].dims, inputs[1].dims);\n\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const sharedDim = aShape[aShape.length - 1];\n const sharedDimIndex = Math.ceil(sharedDim / 2);\n const aRank = aShape.length;\n const bRank = bShape.length;\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const coordsDataType = getCoordsDataType(outputShape.length);\n const outRank = outputShape.length;\n const allGlChannels = getGlChannels();\n const {activationFunction, applyActivation} = getActivationSnippet(activationAttributes);\n\n const getBiasForMatmulSnippet =\n hasBias ? `${getBiasForMatmul(coordsDataType, allGlChannels, inputs[2].dims, outputShape, true)}` : '';\n\n const getBcastedSamplerForMatmulSnippet =\n isBroadcast ? `${getBcastSamplerForMatmul(coordsDataType, allGlChannels, inputs, outputShape)}` : '';\n\n const getSamplerAInLoopSnippet = isBroadcast ? 'getAAtOutCoordsMatmul(i)' : `getA(${getA(allGlChannels, aRank)})`;\n const getSamplerBInLoopSnippet = isBroadcast ? 'getBAtOutCoordsMatmul(i)' : `getB(${getB(allGlChannels, bRank)})`;\n const getOutputCoordsSnippet = isBroadcast ? '' : `${coordsDataType} rc =\n getOutputCoords(); int lastDim = rc.${allGlChannels[outRank - 1]}; rc.${allGlChannels[outRank - 1]} =\n rc.${allGlChannels[outRank - 2]}; rc.${allGlChannels[outRank - 2]} = lastDim;\n `;\n const shaderSource = `\n ${getBcastedSamplerForMatmulSnippet}\n ${getBiasForMatmulSnippet}\n ${activationFunction}\n void main() {\n ${getOutputCoordsSnippet}\n\n vec4 value = vec4(0);\n for (int i = 0; i < ${sharedDimIndex}; i++) {\n vec4 a = ${getSamplerAInLoopSnippet};\n vec4 b = ${getSamplerBInLoopSnippet};\n\n value += (a.rrbb * b.rgrg);\n value += (a.ggaa * b.baba);\n }\n ${processBias}\n ${applyActivation}\n ${glsl.output} = value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n shaderSource,\n hasMain: true\n };\n };\n\nexport const createPackedMatmulProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[],\n activationAttributes: InternalActivationAttributes): ProgramInfoLoader => {\n const metadata = createPackedMatmulProgramMetadata(inputs.length > 2, activationAttributes.activationCacheKey);\n return {\n ...metadata,\n get: () => createPackedMatmulProgramInfo(inferenceHandler, metadata, inputs, activationAttributes)\n };\n };\n\nfunction getBcastSamplerForMatmul(\n coordsDataType: string, allGlChannels: readonly string[], inputs: Tensor[], outShape: readonly number[]): string {\n let unpackedACoordsSnippet = [];\n let unpackedBCoordsSnippet = [];\n\n const inAShape = inputs[0].dims;\n const inBShape = inputs[1].dims;\n\n const inARank = inAShape.length;\n const inBRank = inBShape.length;\n\n const outRank = outShape.length;\n const rankADiff = outRank - inARank;\n const rankBDiff = outRank - inBRank;\n\n unpackedACoordsSnippet = inAShape.map((_s, i) => `coords.${allGlChannels[i + rankADiff]}`);\n unpackedACoordsSnippet[inARank - 1] = 'i*2';\n unpackedACoordsSnippet.join(', ');\n unpackedBCoordsSnippet = inBShape.map((_s, i) => `coords.${allGlChannels[i + rankBDiff]}`);\n unpackedBCoordsSnippet[inBRank - 2] = 'i*2';\n unpackedBCoordsSnippet.join(', ');\n\n const broadcastADims = BroadcastUtil.getBroadcastDims(inAShape, outShape);\n const broadcastBDims = BroadcastUtil.getBroadcastDims(inBShape, outShape);\n\n const coordsASnippet = broadcastADims.map(d => `coords.${allGlChannels[d + rankADiff]} = 0;`).join('\\n');\n const coordsBSnippet = broadcastBDims.map(d => `coords.${allGlChannels[d + rankBDiff]} = 0;`).join('\\n');\n const swapDimSnippet = `int lastDim = coords.${allGlChannels[outRank - 1]};\n coords.${allGlChannels[outRank - 1]} = coords.${allGlChannels[outRank - 2]};\n coords.${allGlChannels[outRank - 2]} = lastDim;`;\n\n const getBcastSamplerMatmulSource = `\nvec4 getAAtOutCoordsMatmul(int i) {\n ${coordsDataType} coords = getOutputCoords();\n ${swapDimSnippet}\n ${coordsASnippet}\n vec4 outputValue = getA(${unpackedACoordsSnippet});\n return outputValue;\n}\n\nvec4 getBAtOutCoordsMatmul(int i) {\n ${coordsDataType} coords = getOutputCoords();\n ${swapDimSnippet}\n ${coordsBSnippet}\n vec4 outputValue = getB(${unpackedBCoordsSnippet});\n return outputValue;\n}`;\n\n return getBcastSamplerMatmulSource;\n}\n\nfunction getA(allGlChannels: string[], rank: number): string {\n let res = '';\n for (let i = 0; i < rank - 2; i++) {\n res += `rc.${allGlChannels[i]}, `;\n }\n res += `rc.${allGlChannels[rank - 2]}, ` +\n 'i*2';\n return res;\n}\n\nfunction getB(allGlChannels: string[], rank: number): string {\n let res = '';\n for (let i = 0; i < rank - 2; i++) {\n res += `rc.${allGlChannels[i]}, `;\n }\n res += 'i*2, ' +\n `rc.${allGlChannels[rank - 1]}`;\n return res;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {createPackedIm2ColProgramInfoLoader} from './im2col-pack';\nimport {createPackedMatmulProgramInfoLoader} from './matmul-pack';\n\nexport const conv2DPackedPointwise =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const reshapedX = inferenceHandler.reshapePacked(inputs[0], [xshape[1], xshape[2] * xshape[3]]);\n const reshapedK = inferenceHandler.reshapePacked(inputs[1], [kshape[0], kshape[1]]);\n\n const matmulInputs = inputs.length > 2 ? [reshapedK, reshapedX, inputs[2]] : [reshapedK, reshapedX];\n const matmulOutput = inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, matmulInputs, attributes), matmulInputs);\n return inferenceHandler.reshapePacked(matmulOutput, outputShape);\n };\n\nexport const conv2DPacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n\n // run im2col\n const im2colOutput = inferenceHandler.run(\n createPackedIm2ColProgramInfoLoader(inferenceHandler, inputs[0], inputs[1], outputShape, attributes),\n [inputs[0]]);\n\n // reshape kernel\n const kernelReshaped = inferenceHandler.reshapePacked(inputs[1], [kshape[0], kshape[1] * kshape[2] * kshape[3]]);\n\n // run matmul\n const matmulInputs =\n (inputs.length === 3) ? [kernelReshaped, im2colOutput, inputs[2]] : [kernelReshaped, im2colOutput];\n const matmulOutput = inferenceHandler.run(\n createPackedMatmulProgramInfoLoader(inferenceHandler, matmulInputs, attributes), matmulInputs);\n\n // reshape output\n const outputReshaped = inferenceHandler.reshapePacked(matmulOutput, outputShape);\n return outputReshaped;\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\n\nconst createIm2ColProgramMetadata = (cacheHint: string) => ({\n name: 'Im2Col',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n cacheHint,\n});\n\nconst createIm2ColProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, x: Tensor, w: Tensor,\n outputShape: readonly number[], attributes: ConvAttributes): ProgramInfo => {\n const xshape = x.dims;\n const wshape = w.dims;\n\n const rank = outputShape.length;\n const im2colDims = calculateIm2ColDims(xshape, wshape, outputShape, 4);\n\n const shaderSource = `\n const int XC = ${xshape[1]};\n const int XH = ${xshape[2]};\n const int XW = ${xshape[3]};\n const int KH = ${attributes.kernelShape[0]};\n const int KW = ${attributes.kernelShape[1]};\n const int dilationH = ${attributes.dilations[0]};\n const int dilationW = ${attributes.dilations[1]};\n const int strideH = ${attributes.strides[0]};\n const int strideW = ${attributes.strides[1]};\n const int padH = ${attributes.pads[0]};\n const int padW = ${attributes.pads[1]};\n const int KHKW = KH*KW;\n const int XCKHKW = XC * KHKW;\n const int outputChannels = 4;\n vec4 process(int indices[${rank}]) {\n int b = indices[0]; // batch size\n int oh = indices[1] * strideH - padH; //output height\n int ow = indices[2] * strideW - padW; //output width\n int p = indices[3] * outputChannels; //patch\n vec4 value = vec4(0.0);\n for(int i=0; i < outputChannels; ++i) {\n if(p < XCKHKW) {\n int patchC = p / KHKW;\n int patchH = (p - patchC*KHKW) / KW;\n int patchW = (p - patchC*KHKW) - patchH * KW;\n int xh2 = oh + patchH * dilationH;\n int xw2 = ow + patchW * dilationW;\n int x[${xshape.length}];\n x[0] = b;\n x[1] = patchC;\n x[2] = xh2;\n x[3] = xw2;\n if(xh2 >= 0 &&\n xh2 < XH &&\n xw2 >= 0 &&\n xw2 < XW) {\n value[i] = _X(x);\n }\n }\n ++p;\n }\n return value;\n }\n `;\n return {\n ...metadata,\n output: {dims: im2colDims, type: x.type, textureType: TextureType.packedLastDimension},\n shaderSource\n };\n };\n\nexport const createIm2ColProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, x: Tensor, w: Tensor, outputShape: readonly number[],\n attributes: ConvAttributes): ProgramInfoLoader => {\n const metadata = createIm2ColProgramMetadata(attributes.cacheKey);\n return {\n ...metadata,\n get: () => createIm2ColProgramInfo(inferenceHandler, metadata, x, w, outputShape, attributes)\n };\n };\n\n\nexport const calculateIm2ColDims =\n (inputShape: readonly number[], kernelShape: readonly number[], outputShape: readonly number[], channels = 4):\n number[] =>\n [outputShape[0], outputShape[2], outputShape[3],\n Math.ceil(inputShape[1] * kernelShape[2] * kernelShape[3] / channels)];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\nimport {calculateIm2ColDims} from './im2col';\n\nconst createDotProductProgramMetadata = (hasBias: boolean, attributes: InternalActivationAttributes) => ({\n name: 'ConvDotProduct',\n inputNames: hasBias ? ['Im2Col', 'K', 'B'] : ['Im2Col', 'K'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.packedLastDimension, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.packedLastDimension],\n cacheKey: attributes.activationCacheKey\n});\n\nconst createDotProductProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: readonly Tensor[],\n outputShape: number[], attributes: InternalActivationAttributes): ProgramInfo => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const adjustedKernelShape = [kshape[0], Math.ceil((xshape[1] * kshape[2] * kshape[3]) / 4)];\n const im2colShape = calculateIm2ColDims(xshape, kshape, outputShape);\n const [kWidth, kHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(adjustedKernelShape, TextureType.packedLastDimension);\n\n const im2colStrides = ShapeUtil.computeStrides(im2colShape);\n const [im2colWidth, im2colHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(im2colShape, TextureType.packedLastDimension);\n const rank = outputShape.length;\n\n const initValue = (inputs.length < 3) ? '0.0' : '_B(b)';\n const sharedDim = Math.ceil(xshape[1] * kshape[2] * kshape[3] / 4);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n${activationFunction}\nfloat process(int indices[${rank}]) {\n int b[1];\n b[0] = indices[1];\n int im2col[4];\n im2col[0] = indices[0];\n im2col[1] = indices[2];\n im2col[2] = indices[3];\n int im2colOffset = im2col[0] * ${im2colStrides[0]} + im2col[1] * ${im2colStrides[1]} + im2col[2] * ${\n im2colStrides[2]};\n int kernelOffset = indices[1] * ${adjustedKernelShape[1]};\n float value = ${initValue};\n for (int i = 0; i < ${sharedDim}; ++i) {\n vec2 im2colCoords = offsetToCoords(im2colOffset, ${im2colWidth}, ${im2colHeight});\n vec2 kernelCoords = offsetToCoords(kernelOffset, ${kWidth}, ${kHeight});\n value += dot(${glsl.texture2D}(Im2Col, im2colCoords), ${glsl.texture2D}(K, kernelCoords));\n ++im2colOffset;\n ++kernelOffset;\n }\n ${applyActivation}\n return value;\n}`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nexport const createDotProductProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], outputShape: number[],\n attributes: InternalActivationAttributes): ProgramInfoLoader => {\n const metadata = createDotProductProgramMetadata(inputs.length > 2, attributes);\n return {\n ...metadata,\n get: () => createDotProductProgramInfo(inferenceHandler, metadata, inputs, outputShape, attributes)\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {InferenceHandler} from '../../../backend';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {PoolConvUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {createUnpackedGroupedConvProgramInfoLoader} from './conv-grouped';\nimport {conv2DPacked} from './conv-pack';\nimport {createDotProductProgramInfoLoader} from './dot-product';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createIm2ColProgramInfoLoader} from './im2col';\nimport {createMatmulProgramInfoLoader} from './matmul';\n\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[]): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(2);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputSpatialShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n const outputShape = [batchSize, outChannels].concat(...outputSpatialShape);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n}\n\nexport const conv: OperatorImplementation =\n (inferenceHandler: InferenceHandler, inputs: Tensor[], attributes: ConvAttributes): Tensor[] => {\n validateInputs(inputs, attributes); // currently will fail if not conv2D\n return conv2d(inferenceHandler, inputs, attributes);\n };\n\nconst conv2d: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConvAttributes): Tensor[] => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const packMode = inferenceHandler.session.pack;\n const isPointwise = adjustedAttributes.kernelShape[0] === 1 && adjustedAttributes.kernelShape[1] === 1;\n if (adjustedAttributes.group > 1) {\n const result = inferenceHandler.run(\n createUnpackedGroupedConvProgramInfoLoader(inferenceHandler, inputs, adjustedAttributes), inputs);\n return [result];\n } else if (isPointwise && packMode) {\n return [conv2DUnpackedPointwise(inferenceHandler, inputs, adjustedAttributes)];\n } else if (packMode && inputs[0].dims.length === 4 && inputs[0].dims[0] === 1 && !isPointwise) {\n return [conv2DPacked(inferenceHandler, inputs, adjustedAttributes)];\n } else {\n return [conv2DUnpacked(inferenceHandler, inputs, adjustedAttributes)];\n }\n };\n\nconst conv2DUnpackedPointwise =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const reshapedX = inferenceHandler.reshapeUnpacked(inputs[0], [xshape[1], xshape[2] * xshape[3]]);\n const reshapedK = inferenceHandler.reshapeUnpacked(inputs[1], [kshape[0], kshape[1]]);\n\n const matmulInputs = inputs.length > 2 ? [reshapedK, reshapedX, inputs[2]] : [reshapedK, reshapedX];\n const matmulOutput = inferenceHandler.run(createMatmulProgramInfoLoader(matmulInputs, attributes), matmulInputs);\n return inferenceHandler.reshapeUnpacked(matmulOutput, outputShape);\n };\n\nconst conv2DUnpacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvAttributes): Tensor => {\n const xshape = inputs[0].dims;\n const kshape = inputs[1].dims;\n const outputShape =\n calculateOutputShape(xshape, kshape, attributes.dilations, attributes.pads, attributes.strides);\n const xIm2Col = inferenceHandler.run(\n createIm2ColProgramInfoLoader(inferenceHandler, inputs[0], inputs[1], outputShape, attributes), [inputs[0]]);\n\n const dotProductInputs = inputs.length === 3 ? [xIm2Col, inputs[1], inputs[2]] : [xIm2Col, inputs[1]];\n const output = inferenceHandler.run(\n createDotProductProgramInfoLoader(inferenceHandler, inputs, outputShape, attributes), dotProductInputs);\n return output;\n };\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: Tensor[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0) {\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, cacheKey: attributes.cacheKey});\n return newAttributes;\n};\n\nexport const parseConvAttributes: OperatorInitialization = (node: Graph.Node): ConvAttributes => {\n const attributes = node.attributes;\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const autoPad = attributes.getString('auto_pad', 'NOTSET');\n const dilations = attributes.getInts('dilations', [1, 1]);\n const group = attributes.getInt('group', 1);\n const kernelShape = attributes.getInts('kernel_shape', []);\n const pads = attributes.getInts('pads', [0, 0, 0, 0]);\n const strides = attributes.getInts('strides', [1, 1]);\n\n return createAttributeWithCacheKey({autoPad, dilations, group, kernelShape, pads, strides, ...activationAttributes});\n};\n\nconst validateInputs = (inputs: Tensor[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 || inputs[1].dims.length !== 4) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // TODO : Need to add support for float64\n if (inputs[0].type !== 'float32' || inputs[1].type !== 'float32') {\n throw new Error('Conv input(X,W) should be float tensor');\n }\n\n if (inputs.length === 3 && inputs[2].type !== 'float32') {\n throw new Error('Conv input(bias) should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {InferenceHandler} from '../../../backend';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nimport {ConvAttributes} from './conv';\nimport {getActivationSnippet, parseInternalActivationAttributes} from './fuse-utils';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n pads: number[], strides: readonly number[], outputPadding: readonly number[], outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateShape = outputShape.length === 0;\n for (let i = 0; i < spatialRank; ++i) {\n const outSize = updateShape ? inputShape[i + 2] * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inputShape[i + 2], strides[i], pads[i], kernelShape[i], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateShape) {\n outputShape.push(\n strides[i] * (inputShape[i + 2] - 1) + outputPadding[i] + (kernelShape[i] - 1) * dilations[i] + 1 -\n pads[i] - pads[i + spatialRank]);\n }\n }\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nexport const convTranspose: OperatorImplementation =\n (inferenceHandler: InferenceHandler, inputs: Tensor[], attributes: ConvTransposeAttributes): Tensor[] => {\n validateInputs(inputs, attributes); // currently will fail if not convTranspose2D\n return convTranspose2d(inferenceHandler, inputs, attributes);\n };\n\nconst convTranspose2d: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ConvTransposeAttributes): Tensor[] => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n return [convTranspose2DUnpacked(inferenceHandler, inputs, adjustedAttributes)];\n };\n\nconst createConvTransposeProgramMetadata = (hasBias: boolean, cacheHint: string) => ({\n name: 'ConvTranspose',\n inputNames: hasBias ? ['X', 'W', 'B'] : ['X', 'W'],\n inputTypes: hasBias ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n cacheHint\n});\n\nconst createUnpackedConvTransposeProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], metadata: ProgramMetadata,\n attributes: ConvTransposeAttributes): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const valueInit = hasBias ? 'getB(output_channel)' : '0.0';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[1];\n const inputChannelsPerGroup = wShape[0] / attributes.group;\n const outputShape = [inputs[0].dims[0], inputs[1].dims[1] * attributes.group, ...attributes.outputShape];\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const {activationFunction, applyActivation} = getActivationSnippet(attributes);\n\n const shaderSource = `\n const ivec2 strides = ivec2(${attributes.strides[0]}, ${attributes.strides[1]});\n const ivec2 pads = ivec2(${attributes.pads[0]}, ${attributes.pads[1]});\n ${activationFunction}\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n int output_channel = coords.y;\n\n ivec2 loc = coords.zw + pads;\n\n int group_id = output_channel / ${outputChannelsPerGroup};\n int wOutChannel = output_channel - group_id * ${outputChannelsPerGroup};\n\n float value = ${valueInit};\n for (int inChannelOffset = 0; inChannelOffset < ${inputChannelsPerGroup}; inChannelOffset++) {\n int input_channel = group_id * ${inputChannelsPerGroup} + inChannelOffset;\n for (int wWOff = 0; wWOff < ${wShape[2]}; wWOff++) {\n for (int wHOff = 0; wHOff < ${wShape[3]}; wHOff++) {\n ivec2 wOff = ivec2(wWOff * ${attributes.dilations[0]}, wHOff * ${attributes.dilations[1]});\n ivec2 wLoc = loc - wOff;\n ivec2 wLocIn = wLoc / strides;\n if (\n wLocIn * strides == wLoc &&\n wLocIn.x >= 0 && wLocIn.x < ${xShape[2]} &&\n wLocIn.y >= 0 && wLocIn.y < ${xShape[3]}\n ) {\n float xVal = getX(batch, input_channel, wLocIn.y, wLocIn.x);\n float wVal = getW(input_channel, wOutChannel, wHOff, wWOff);\n value += xVal * wVal;\n }\n }\n }\n }\n ${applyActivation}\n ${glsl.output} = vec4(value, .0, .0, .0);\n }\n`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n hasMain: true,\n };\n };\n\nconst createUnpackedConvTransposeProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvTransposeAttributes):\n ProgramInfoLoader => {\n const metadata = createConvTransposeProgramMetadata(inputs.length > 2, attributes.cacheKey);\n return {\n ...metadata,\n get: () => createUnpackedConvTransposeProgramInfo(inferenceHandler, inputs, metadata, attributes)\n };\n };\n\n\nconst convTranspose2DUnpacked =\n (inferenceHandler: WebGLInferenceHandler, inputs: readonly Tensor[], attributes: ConvTransposeAttributes):\n Tensor => {\n const result = inferenceHandler.run(\n createUnpackedConvTransposeProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return result;\n };\n\nconst getAdjustedConvTransposeAttributes = (attributes: T, inputs: Tensor[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0) {\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const inputShape = inputs[0].dims;\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, attributes.dilations, attributes.autoPad, pads, attributes.strides,\n attributes.outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputShape, cacheKey: attributes.cacheKey});\n return newAttributes;\n};\n\nexport const parseConvTransposeAttributes: OperatorInitialization =\n (node: Graph.Node): ConvTransposeAttributes => {\n const attributes = node.attributes;\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const autoPad = attributes.getString('auto_pad', 'NOTSET');\n const dilations = attributes.getInts('dilations', [1, 1]);\n const group = attributes.getInt('group', 1);\n const kernelShape = attributes.getInts('kernel_shape', []);\n const outputPadding = attributes.getInts('output_padding', [0, 0]);\n const outputShape = attributes.getInts('output_shape', []);\n const pads = attributes.getInts('pads', [0, 0, 0, 0]);\n const strides = attributes.getInts('strides', [1, 1]);\n\n return createAttributeWithCacheKey(\n {autoPad, dilations, group, kernelShape, outputPadding, outputShape, pads, strides, ...activationAttributes});\n };\n\nconst validateInputs = (inputs: Tensor[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 || inputs[1].dims.length !== 4) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n\n // TODO : Need to add support for float64\n if (inputs[0].type !== 'float32' || inputs[1].type !== 'float32') {\n throw new Error('ConvTranspose input(X,W) should be float tensor');\n }\n\n if (inputs.length === 3 && inputs[2].type !== 'float32') {\n throw new Error('ConvTranspose input(bias) should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst transposeProgramMetadata = {\n name: 'Transpose',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const transpose: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: TransposeAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...transposeProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createTransposeProgramInfo(inferenceHandler, inputs[0], attributes.perm)\n },\n inputs);\n return [output];\n };\n\nexport const parseTransposeAttributes: OperatorInitialization =\n (node: Graph.Node): TransposeAttributes => createAttributeWithCacheKey({perm: node.attributes.getInts('perm', [])});\n\nconst createTransposeProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, perm: number[]): ProgramInfo => {\n const inputShape = input.dims;\n perm = getAdjustedPerm(inputShape, perm);\n const unpackedOutputShape = getOutputShape(inputShape, perm);\n const rank = inputShape.length;\n // A dims=[${inputs[0].dims.toString()}]\n // out Dims=[${unpackedOutputShape.toString()}]\n // based on perm=[${perm.toString()}]\n const shaderSource = `\n ${getPermFunctionBody('perm', perm, rank)}\n float process(int indices[${rank}]) {\n int a[${rank}];\n perm(a, indices);\n return _A(a);\n }`;\n return {\n ...transposeProgramMetadata,\n output: {dims: unpackedOutputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst getAdjustedPerm = (inputShape: readonly number[], perm: number[]): number[] => {\n if (perm && perm.length !== inputShape.length) {\n perm = [...(inputShape.keys())].reverse();\n }\n return perm;\n};\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] => {\n perm = getAdjustedPerm(inputShape, perm);\n return ShapeUtil.sortBasedOnPerm(inputShape, perm);\n};\n\nconst getPermFunctionBody = (name: string, perm: number[], rank: number): string => {\n const reverseFunc = [];\n reverseFunc.push(`void ${name}(out int a[${rank}], int src[${rank}]) {`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(`\\ta[${perm[i]}]=src[${i}];`);\n }\n reverseFunc.push('\\t}');\n return reverseFunc.join('\\n');\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('input should be float tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nimport {transpose, TransposeAttributes} from './transpose';\n\nexport interface DepthToSpaceAttributes {\n mode: 'DCR'|'CRD';\n blocksize: number;\n}\n\nexport const depthToSpace: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: DepthToSpaceAttributes): Tensor[] => {\n validateInputs(inputs);\n const blocksize = attributes.blocksize;\n const blocksizeSqr = blocksize * blocksize;\n const transposePerm = attributes.mode === 'DCR' ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n const firstReshapeShape = attributes.mode === 'DCR' ?\n [\n inputs[0].dims[0], blocksize, blocksize, inputs[0].dims[1] / blocksizeSqr, inputs[0].dims[2],\n inputs[0].dims[3]\n ] :\n [\n inputs[0].dims[0], inputs[0].dims[1] / blocksizeSqr, blocksize, blocksize, inputs[0].dims[2],\n inputs[0].dims[3]\n ];\n\n // const transpose = new WebGLTranspose();\n // const attributes = new Attribute(undefined);\n // attributes.set('perm', 'ints', transposePerm);\n // transpose.initialize(attributes);\n\n // First reshape\n const firstReshapedTensor = inferenceHandler.reshapeUnpacked(inputs[0], firstReshapeShape);\n\n // transpose\n const transposeAttributes: TransposeAttributes = {perm: transposePerm, cacheKey: `${transposePerm}`};\n const [transposeOutput] = transpose(inferenceHandler, [firstReshapedTensor], transposeAttributes);\n\n // Second reshape\n const secondReshapeShape = [\n inputs[0].dims[0], inputs[0].dims[1] / blocksizeSqr, inputs[0].dims[2] * blocksize,\n inputs[0].dims[3] * blocksize\n ];\n const result = inferenceHandler.reshapeUnpacked(transposeOutput, secondReshapeShape);\n return [result];\n };\n\nexport const parseDepthToSpaceAttributes: OperatorInitialization =\n (node: Graph.Node): DepthToSpaceAttributes => {\n // processing node attributes\n const blocksize = node.attributes.getInt('blocksize');\n if (blocksize < 1) {\n throw new Error(`blocksize must be >= 1, but got : ${blocksize} for DepthToSpace`);\n }\n const mode = node.attributes.getString('mode', 'DCR');\n if (mode !== 'DCR' && mode !== 'CRD') {\n throw new Error(`unrecognized mode: ${mode} for DepthToSpace`);\n }\n return {mode, blocksize};\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (inputs.length !== 1) {\n throw new Error(`DepthToSpace expect 1 inputs, but got ${inputs.length}`);\n }\n\n // Input has to be a 4-D tensor\n // TODO: Support string depth-to-space.\n if (inputs[0].type === 'string' || inputs[0].dims.length !== 4) {\n throw new TypeError('DepthToSpace input should be a 4-D numeric tensor');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const flatten: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number): Tensor[] => {\n validateInputs(inputs, axis);\n\n const outputDims = ShapeUtil.flattenShape(inputs[0].dims, axis);\n return [inferenceHandler.reshapeUnpacked(inputs[0], outputDims)];\n };\n\nexport const parseFlattenAttributes: OperatorInitialization = (node: Graph.Node): number =>\n node.attributes.getInt('axis', 1); // default axis is 1\n\nconst validateInputs = (inputs: Tensor[], axis: number): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Flatten requires 1 input.');\n }\n\n const r = inputs[0].dims.length;\n if (r === 0) {\n throw new Error('scalar tensor is not supported.');\n }\n\n if (axis < -r || axis > r) {\n throw new Error('Invalid axis');\n }\n\n // TODO: Support string type\n if (inputs[0].type === 'string') {\n throw new Error('string tensor is not supported.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceHandler} from './backend';\nimport {Graph} from './graph';\nimport {Tensor} from './tensor';\n\nexport type OperatorImplementation = (inferenceHandler: InferenceHandler, inputs: Tensor[], context: T) => Tensor[];\nexport type OperatorInitialization = (node: Graph.Node, graph: Graph) => T;\n\nexport interface Operator {\n readonly impl: OperatorImplementation;\n readonly context: Graph.Node|unknown;\n}\n\nexport const NUMBER_TYPES: readonly Tensor.DataType[] =\n ['float32', 'float64', 'int32', 'int16', 'int8', 'uint16', 'uint32', 'uint8'];\nexport const INT_TYPES: readonly Tensor.DataType[] = ['int32', 'int16', 'int8', 'uint16', 'uint32', 'uint8'];\nexport const FLOAT_TYPES: readonly Tensor.DataType[] = ['float32', 'float64'];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\ninterface GatherAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nexport const gather: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: GatherAttributes): Tensor[] => {\n validateInputs(inputs, attributes.axis);\n const output = inferenceHandler.run(createGatherProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n };\n\nexport const parseGatherAttributes: OperatorInitialization = (node: Graph.Node): GatherAttributes =>\n createAttributeWithCacheKey({axis: node.attributes.getInt('axis', 0)});\n\nconst gatherProgramMetadata = {\n name: 'Gather',\n inputNames: ['A', 'B'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked],\n};\n\nconst createGatherProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], axis: number): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n const indexDataShape = inputs[1].dims.slice();\n const outputShape = new Array(inputShape.length + indexDataShape.length - 1);\n\n axis = ShapeUtil.normalizeAxis(axis, inputShape.length);\n const indexCopyOps: string[] = [];\n for (let i = 0; i < outputShape.length; i++) {\n // outputShape is divided into three parts: A, B, C\n // |0 axis| axis + indexDataShape.length | end|\n // | A | B | C |\n //\n // inputIdx: [A, inputs[1][B], C]\n if (i < axis) { // A\n outputShape[i] = inputShape[i];\n indexCopyOps.push(`inputIdx[${i}] = outputIdx[${i}];`);\n } else {\n if (i < axis + indexDataShape.length) { // B\n outputShape[i] = indexDataShape[i - axis];\n indexCopyOps.push(`indexDataIdx[${i - axis}] = outputIdx[${i}];`);\n } else { // C\n outputShape[i] = inputShape[i - indexDataShape.length + 1]; // skip 1 for axis\n indexCopyOps.push(`inputIdx[${i - indexDataShape.length + 1}] = outputIdx[${i}];`);\n }\n }\n }\n\n const orank = outputShape.length || 1;\n const irank = inputShape.length;\n const iDrank = indexDataShape.length || 1;\n const shaderSource = `\n float process(int outputIdx[${orank}]) {\n int inputIdx[${irank}];\n int indexDataIdx[${iDrank}];\n indexDataIdx[0] = 0;\n ${indexCopyOps.join('\\n ')}\n int idx = int(_B(indexDataIdx));\n inputIdx[${axis}] = idx < 0 ? idx + ${inputShape[axis]} : idx;\n return _A(inputIdx);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst createGatherProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: GatherAttributes): ProgramInfoLoader => {\n const metadata = {...gatherProgramMetadata, cacheHint: attributes.cacheKey};\n return {...metadata, get: () => createGatherProgramInfo(handler, metadata, inputs, attributes.axis)};\n };\n\nconst validateInputs = (inputs: Tensor[], axis: number): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n const tensorRank = inputs[0].dims.length;\n if (tensorRank < 1) {\n throw new Error('Invalid input shape.');\n }\n if (axis < -tensorRank || axis > tensorRank - 1) {\n throw new Error('Invalid axis.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invaid input type.');\n }\n if (inputs[1].type !== 'int32' && inputs[1].type !== 'int16') {\n throw new Error('Invaid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {GemmUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n isOptionalC: boolean; // in opset 11, C becomes optional\n}\n\nexport const gemm: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: GemmAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(createGemmProgramInfoLoader(inputs, attributes), inputs);\n return [output];\n };\n\nconst parseGemmAttributes = (node: Graph.Node, isOptionalC: boolean): GemmAttributes => {\n const transA = node.attributes.getInt('transA', 0) !== 0;\n const transB = node.attributes.getInt('transB', 0) !== 0;\n const alpha = node.attributes.getFloat('alpha', 1.0);\n const beta = node.attributes.getFloat('beta', 1.0);\n return createAttributeWithCacheKey({transA, transB, alpha, beta, isOptionalC});\n};\n\nexport const parseGemmAttributesV7: OperatorInitialization = (node: Graph.Node): GemmAttributes =>\n parseGemmAttributes(node, false);\n\nexport const parseGemmAttributesV11: OperatorInitialization = (node: Graph.Node): GemmAttributes =>\n parseGemmAttributes(node, true);\n\nconst createGemmProgramInfoLoader = (inputs: Tensor[], attributes: GemmAttributes): ProgramInfoLoader => {\n const metadata = {\n name: 'Gemm',\n inputNames: inputs.length === 3 ? ['A', 'B', 'C'] : ['A', 'B'],\n inputTypes: inputs.length === 3 ? [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked] :\n [TextureType.unpacked, TextureType.unpacked],\n key: attributes.cacheKey\n };\n\n return {...metadata, get: () => createGemmProgramInfo(metadata, inputs, attributes)};\n};\n\nconst createGemmProgramInfo =\n (metadata: ProgramMetadata, inputs: Tensor[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n let sharedDim = aShape[aShape.length - 1];\n let line = '';\n if (attributes.transA) {\n sharedDim = aShape[0];\n }\n if (attributes.transA && attributes.transB) {\n line = 'value += _A_T(a) * _B_T(b);';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += _A_T(a) * _B(b);';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += _A(a) * _B_T(b);';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += _A(a) * _B(b);';\n }\n const rank = outputShape.length;\n const declareC = inputs.length === 3 ? `int c[${inputs[2].dims.length}];` : '';\n const broadcastC = inputs.length === 3 ? 'bcastIndices_C(indices, c);' : '';\n const calculateC = inputs.length === 3 ? 'value += beta * _C(c);' : '';\n const shaderSource = `\n float process(int indices[${rank}]) {\n int a[${rank}];\n int b[${rank}];\n ${declareC}\n\n copyVec(indices, a);\n copyVec(indices, b);\n ${broadcastC}\n\n float value = 0.0;\n for (int k=0; k<${sharedDim}; ++k) {\n a[${rank - 1}] = k;\n b[${rank - 2}] = k;\n ${line}\n }\n\n value = value * alpha;\n ${calculateC}\n return value;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n variables: [\n {name: 'alpha', type: 'float', data: attributes.alpha}, {name: 'beta', type: 'float', data: attributes.beta}\n ],\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[], attributes: GemmAttributes): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (attributes.isOptionalC && (inputs.length < 2 || inputs.length > 3)) {\n throw new Error('Invaid input shape.');\n }\n if (!attributes.isOptionalC && inputs.length !== 3) {\n throw new Error('Gemm requires 3 inputs');\n }\n\n // 'C' can be of dimensionality 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length !== 1 && inputs[2].dims.length !== 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].type !== 'float32' && inputs[0].type !== 'float64') ||\n (inputs[1].type !== 'float32' && inputs[1].type !== 'float64') ||\n (inputs.length === 3 && inputs[2].type !== 'float32' && inputs[2].type !== 'float64')) {\n throw new Error('Invalid input type.');\n }\n\n if ((inputs[0].type !== inputs[1].type) || (inputs.length === 3 && inputs[0].type !== inputs[2].type)) {\n throw new Error('Input types are mismatched');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport interface ImageScalerAttributes extends AttributeWithCacheKey {\n scale: number;\n bias: number[];\n}\n\nexport const imageScaler: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ImageScalerAttributes): Tensor[] => {\n validateInputs(inputs);\n const output =\n inferenceHandler.run(createImageScalerProgramInfoLoader(inferenceHandler, inputs, attributes), inputs);\n return [output];\n };\n\nexport const parseImageScalerAttributes: OperatorInitialization =\n (node: Graph.Node): ImageScalerAttributes => {\n const scale = node.attributes.getFloat('scale');\n const bias = node.attributes.getFloats('bias');\n return createAttributeWithCacheKey({scale, bias});\n };\n\nconst imageScalerProgramMetadata = {\n name: 'ImageScaler',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst createImageScalerProgramInfo =\n (_handler: WebGLInferenceHandler, metadata: ProgramMetadata, inputs: Tensor[], attributes: ImageScalerAttributes):\n ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n const rank = outputShape.length;\n const getBiasMethod = createGetBiasMethod(attributes.bias.length);\n const shaderSource = `\n ${getBiasMethod}\n float process(int indices[${rank}]) {\n return _X(indices) * scale + getBias(bias, indices[1]);\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n variables: [\n {name: 'bias', type: 'float', arrayLength: attributes.bias.length, data: attributes.bias},\n {name: 'scale', type: 'float', data: attributes.scale}\n ],\n shaderSource\n };\n };\n\nconst createImageScalerProgramInfoLoader =\n (handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ImageScalerAttributes): ProgramInfoLoader => {\n const metadata = {...imageScalerProgramMetadata, cacheHint: attributes.cacheKey};\n return {...metadata, get: () => createImageScalerProgramInfo(handler, metadata, inputs, attributes)};\n };\n\nconst createGetBiasMethod = (numChannels: number): string => {\n const codeLines: string[] = [`float getBias(float bias[${numChannels}], int channel) {`];\n for (let i = 0; i < numChannels; ++i) {\n if (i === 0) {\n codeLines.push(\n '\\t' +\n `if (channel == ${i}) { return bias[${i}]; }`);\n } else if (i === numChannels - 1) {\n codeLines.push(\n '\\t' +\n `else { return bias[${i}]; }`);\n } else {\n codeLines.push(\n '\\t' +\n `else if (channel == ${i}) { return bias[${i}]; }`);\n }\n }\n codeLines.push(\n '\\t' +\n '}');\n return codeLines.join('\\n');\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('ImageScaler requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('Invalid input shape.');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, ProgramMetadata, TextureType} from '../types';\n\nexport const instanceNormalization: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], epsilon: number): Tensor[] => {\n validateInputs(inputs);\n\n const meanAndVariance = inferenceHandler.run(createMeanAndVarianceProgramInfoLoader(inputs[0]), inputs);\n const output = inferenceHandler.run(\n createComputeOutputProgramInfoLoader(inferenceHandler, inputs[0], epsilon, meanAndVariance.dims),\n [inputs[0], meanAndVariance, inputs[1], inputs[2]]);\n return [output];\n };\n\nexport const parseInstanceNormalizationAttributes: OperatorInitialization = (node: Graph.Node): number =>\n node.attributes.getFloat('epsilon', 1e-5);\n\nconst meanAndVarianceProgramMetadata = {\n name: 'InstanceNormalization_MeanAndVariance',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst createMeanAndVarianceProgramInfo = (metadata: ProgramMetadata, input: Tensor): ProgramInfo => {\n const xDims = input.dims.slice();\n const channel = xDims[1];\n const channelSize = xDims[2] * xDims[3];\n const outputShape = [xDims[0], channel];\n\n const shaderSource = `\n vec4 process(int[2] indices) {\n vec4 v = vec4(0.0);\n int a[4];\n a[0] = indices[0];\n a[1] = indices[1];\n float temp = 0.0;\n for(int a2=0; a2<${xDims[2]}; a2++) {\n a[2] = a2;\n for(int a3=0; a3<${xDims[3]}; a3++) {\n a[3] = a3;\n float x = _X(a);\n temp += x;\n }\n }\n float mean = temp / float(${channelSize});\n temp = 0.0;\n for(int a2=0; a2<${xDims[2]}; a2++) {\n a[2] = a2;\n for(int a3=0; a3<${xDims[3]}; a3++) {\n a[3] = a3;\n float x = _X(a);\n temp += (x - mean) * (x - mean);\n }\n }\n v.r = mean;\n v.g = temp / float(${channelSize});\n\n return v;\n }`;\n return {\n ...metadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.packedLastDimension},\n shaderSource\n };\n};\n\nconst createMeanAndVarianceProgramInfoLoader = (input: Tensor): ProgramInfoLoader => ({\n ...meanAndVarianceProgramMetadata,\n get: () => createMeanAndVarianceProgramInfo(meanAndVarianceProgramMetadata, input)\n});\n\nconst computeOutputProgramMetadata = {\n name: 'InstanceNormalization_ComputeOutput',\n inputNames: ['X', 'MeanAndVariance', 'Scale', 'B'],\n inputTypes: [TextureType.unpacked, TextureType.packedLastDimension, TextureType.unpacked, TextureType.unpacked],\n};\n\nconst createComputeOutputProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, metadata: ProgramMetadata, input: Tensor, epsilon: number,\n meanAndVarianceShape: readonly number[]): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(meanAndVarianceShape, TextureType.packedLastDimension);\n const [meanAndVarianceWidth, meanAndVarianceHeight] = [textureWidth / 4, textureHeight];\n const shaderSource = `\n vec4 get_MeanAndVariance(int[2] mv) {\n int offset = indicesToOffset_MeanAndVariance(mv);\n vec2 coords = offsetToCoords(offset, ${meanAndVarianceWidth}, ${meanAndVarianceHeight});\n return ${glsl.texture2D}(MeanAndVariance, coords);\n }\n\n float process(int[4] indices) {\n int mv[2];\n mv[0] = indices[0];\n mv[1] = indices[1];\n vec4 mean_and_variance = get_MeanAndVariance(mv);\n float mean = mean_and_variance.r;\n float variance = mean_and_variance.g;\n\n int sb[1];\n sb[0] = indices[1];\n float scale = _Scale(sb);\n float b = _B(sb);\n\n return scale * (_X(indices) - mean) / sqrt(variance + epsilon) + b;\n }`;\n return {\n ...metadata,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n variables: [{name: 'epsilon', type: 'float', data: epsilon}],\n shaderSource\n };\n };\n\nconst createComputeOutputProgramInfoLoader =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, epsilon: number, meanAndVarianceShape: readonly number[]):\n ProgramInfoLoader => {\n const metadata = {...computeOutputProgramMetadata, cacheHint: `${epsilon}`};\n return {\n ...metadata,\n get: () => createComputeOutputProgramInfo(inferenceHandler, metadata, input, epsilon, meanAndVarianceShape)\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 3) {\n throw new Error('InstanceNormalization requires 3 inputs.');\n }\n\n const X = inputs[0];\n const scale = inputs[1];\n const B = inputs[2];\n\n // input should at least have three dimensions - N,C,dim1,...,dimn\n // other inputs can have only one dimensions\n if (X.dims.length < 3 || scale.dims.length !== 1 || B.dims.length !== 1) {\n throw new Error('Invalid input shape.');\n }\n if (scale.dims[0] !== X.dims[1] || B.dims[0] !== X.dims[1]) {\n throw new Error('Input shapes are mismatched.');\n }\n if ((X.type !== 'float32' && X.type !== 'float64') || (scale.type !== 'float32' && scale.type !== 'float64') ||\n (B.type !== 'float32' && B.type !== 'float64')) {\n throw new Error('Invalid input type.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('Only support 4-D input shape.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramInfoLoader, TextureType} from '../types';\n\nexport interface LrnAttributes extends AttributeWithCacheKey {\n alpha: number;\n beta: number;\n bias: number;\n size: number;\n}\n\nexport const lrn: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: LrnAttributes): Tensor[] => {\n validateInputs(inputs);\n\n // if (inferenceHandler.session.pack) {\n // return [inferenceHandler.run(createPackedLrnProgramInfoLoader(inferenceHandler, inputs, attributes),\n // inputs)];\n // } else {\n return [inferenceHandler.run(createLrnProgramInfoLoader(inputs, attributes), inputs)];\n //}\n };\n\nexport const parseLrnAttributes: OperatorInitialization = (node: Graph.Node): LrnAttributes => {\n const alpha = node.attributes.getFloat('alpha', 0.0001);\n const beta = node.attributes.getFloat('beta', 0.75);\n const bias = node.attributes.getFloat('bias', 1.0);\n const size = node.attributes.getInt('size');\n\n return createAttributeWithCacheKey({alpha, beta, bias, size});\n};\n\nconst lrnProgramMetadata = {\n name: 'LRN',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked]\n};\n\nfunction createLrnProgramInfo(inputs: Tensor[], attributes: LrnAttributes): ProgramInfo {\n const C = inputs[0].dims[1];\n const rank = inputs[0].dims.length;\n const from = -Math.floor((attributes.size - 1) / 2);\n const to = Math.ceil((attributes.size - 1) / 2);\n const alpha = `float(${attributes.alpha}) / float(${attributes.size})`;\n const bias = `float(${attributes.bias})`;\n const beta = `float(${attributes.beta})`;\n\n const shaderSource = `\n float process(int indices[${rank}]) {\n int c = indices[1];\n float x = _X(indices);\n float square_sum = 0.0;\n\n for (int i = ${from}; i <= ${to}; i++) {\n int idx = c + i;\n if (c >= 0 && c < ${C}) {\n indices[1] = idx;\n float j = _X(indices);\n square_sum += j * j;\n }\n }\n return x / pow(${bias} + ${alpha} * square_sum, ${beta});\n }`;\n return {\n ...lrnProgramMetadata,\n cacheHint: attributes.cacheKey,\n output: {dims: inputs[0].dims, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n };\n}\n\nexport function createLrnProgramInfoLoader(inputs: Tensor[], attributes: LrnAttributes): ProgramInfoLoader {\n return {...lrnProgramMetadata, cacheHint: attributes.cacheKey, get: () => createLrnProgramInfo(inputs, attributes)};\n}\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('LRN requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('currently only support LRN for input with \"NCHW\" format');\n }\n if (inputs[0].type !== 'float32') {\n throw new Error('input should be float type');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl, Glsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface PadAttributes extends AttributeWithCacheKey {\n readonly mode: string;\n readonly pads: number[];\n readonly value: number;\n}\n\nconst padProgramMetadata = {\n name: 'Pad',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const padV2: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: PadAttributes): Tensor[] => {\n validateInputsV2(inputs);\n const output = inferenceHandler.run(\n {\n ...padProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createPadProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parsePadAttributesV2: OperatorInitialization = (node: Graph.Node): PadAttributes => {\n const mode = node.attributes.getString('mode', 'constant');\n const value = node.attributes.getFloat('value', 0.0);\n const pads = node.attributes.getInts('pads');\n return createAttributeWithCacheKey({mode, value, pads});\n};\n\nexport const padV11: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], mode: string): Tensor[] => {\n validateInputsV11(inputs);\n const attrubutes = generatePadAttributesFromInputs(inferenceHandler, inputs, mode);\n return padV2(inferenceHandler, [inputs[0]], attrubutes);\n };\n\nexport const parsePadAttributesV11: OperatorInitialization = (node: Graph.Node): string =>\n node.attributes.getString('mode', 'constant');\n\nconst generatePadAttributesFromInputs =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], mode: string): PadAttributes => {\n if (!inferenceHandler.session.isInitializer(inputs[1].dataId) ||\n (inputs.length >= 3 && !inferenceHandler.session.isInitializer(inputs[2].dataId))) {\n throw new Error('dynamic pad attributes are not allowed');\n }\n\n const pads = Array.from(inputs[1].integerData);\n const value = (inputs.length >= 3) ? inputs[2].floatData[0] : 0.0;\n\n return createAttributeWithCacheKey({mode, pads, value});\n };\n\nconst createPadProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(input.dims.slice(), attributes.pads);\n const rank = outputShape.length;\n const padFunction = getPadFunction(inferenceHandler, input, attributes);\n const shaderSource = `\n ${padFunction}\n float process(int[${rank}] indices) {\n return padA(indices);\n }`;\n return {\n name: 'Pad',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputsV2 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Pad requires 1 input');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst validateInputsV11 = (inputs: Tensor[]): void => {\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Pad requires 2 or 3 inputs');\n }\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 3 && inputs[2].type === 'string') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst getPadFunction = (inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: PadAttributes): string => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [width, height] = inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const strides = ShapeUtil.computeStrides(input.dims);\n\n switch (attributes.mode) {\n case 'constant':\n return getPadConstant(glsl, input.dims, strides, width, height, attributes.pads, attributes.value);\n case 'reflect':\n return getPadReflect(glsl, input.dims, strides, width, height, attributes.pads);\n case 'edge':\n return getPadEdge(glsl, input.dims, strides, width, height, attributes.pads);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst getPadConstant =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[],\n value: number): string => {\n const rank = shape.length;\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) return constant;\n if (k >= ${shape[i]}) return constant;\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n const float constant = float(${value});\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n\nconst getPadReflect =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[]):\n string => {\n const rank = shape.length;\n\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) { k = -k; }\n {\n const int _2n_1 = ${2 * (shape[i] - 1)};\n k = int( mod( float(k), float(_2n_1) ) ) ;\n if(k >= ${shape[i]}) { k = _2n_1 - k; }\n }\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n\nconst getPadEdge =\n (glsl: Glsl, shape: readonly number[], strides: readonly number[], width: number, height: number, pads: number[]):\n string => {\n const rank = shape.length;\n\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n k = m[${i}] - ${pads[i]};\n if (k < 0) k = 0;\n if (k >= ${shape[i]}) k = ${shape[i] - 1};\n offset += k * ${strides[i]};\n `;\n }\n return `\n float padA(int m[${rank}]) {\n int offset = 0;\n int k = 0;\n ${block}\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(A, coords));\n return value;\n }\n `;\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {PoolConvUtil, ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport interface AveragePoolAttributes extends AttributeWithCacheKey {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly countIncludePad: boolean;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nexport const averagePool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: AveragePoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata =\n {name: 'AveragePool', inputNames: ['X'], inputTypes: [TextureType.unpacked], cacheHint: attributes.cacheKey};\n const output = inferenceHandler.run(\n {...metadata, get: () => createAveragePoolProgramInfo(inputs, metadata, false, attributes)}, inputs);\n return [output];\n };\n\nexport const parseAveragePoolAttributes: OperatorInitialization =\n (node: Graph.Node): AveragePoolAttributes => {\n const autoPad = node.attributes.getString('auto_pad', 'NOTSET');\n const ceilMode = node.attributes.getInt('ceil_mode', 0);\n const countIncludePad = (node.attributes.getInt('count_include_pad', 0) === 0 ? false : true);\n const kernelShape = node.attributes.getInts('kernel_shape');\n const strides = node.attributes.getInts('strides', []);\n const pads = node.attributes.getInts('pads', []);\n\n // TODO: support attribute 'ceil_mode'\n if (ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n\n return createAttributeWithCacheKey({autoPad, ceilMode, countIncludePad, kernelShape, strides, pads});\n };\n\nconst createAveragePoolProgramInfo =\n (inputs: Tensor[], metadata: ProgramMetadata, isGlobalOperator: boolean, attributes: AveragePoolAttributes):\n ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(inputs, attributes, isGlobalOperator);\n const kernelSize = ShapeUtil.size(adjustedAttributes.kernelShape);\n const op1 = 'value += _X(x);';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= float(${kernelSize});`;\n } else {\n op2 += `value /= float(${kernelSize} - pad);`;\n }\n const poolingCode = generatePoolingCode(inputs[0].dims, adjustedAttributes, op1, op2, '0.0');\n const shaderSource = `\n ${poolingCode}\n `;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nexport const globalAveragePool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: AveragePoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata = {\n name: 'GlobalAveragePool',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n cacheHint: `${attributes.countIncludePad}`\n };\n const output = inferenceHandler.run(\n {...metadata, get: () => createAveragePoolProgramInfo(inputs, metadata, true, attributes)}, inputs);\n return [output];\n };\n\nexport const parseGlobalAveragePoolAttributes: OperatorInitialization =\n (node: Graph.Node): AveragePoolAttributes => {\n const countIncludePad = (node.attributes.getInt('count_include_pad', 0) === 0 ? false : true);\n return createAttributeWithCacheKey(\n {autoPad: '', ceilMode: 0, countIncludePad, kernelShape: [], strides: [], pads: []});\n };\n\nexport interface MaxPoolAttributes extends AveragePoolAttributes {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nexport const maxPool: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: MaxPoolAttributes): Tensor[] => {\n validateInputs(inputs);\n const metadata =\n {name: 'MaxPool', inputNames: ['X'], inputTypes: [TextureType.unpacked], cacheHint: attributes.cacheKey};\n const output = inferenceHandler.run(\n {...metadata, get: () => createMaxPoolProgramInfo(inputs, metadata, false, attributes)}, inputs);\n return [output];\n };\n\nexport const parseMaxPoolAttributes: OperatorInitialization =\n (node: Graph.Node): MaxPoolAttributes => {\n const autoPad = node.attributes.getString('auto_pad', 'NOTSET');\n const ceilMode = node.attributes.getInt('ceil_mode', 0);\n const kernelShape = node.attributes.getInts('kernel_shape');\n const strides = node.attributes.getInts('strides', []);\n const pads = node.attributes.getInts('pads', []);\n const storageOrder = node.attributes.getInt('storage_order', 0);\n const dilations = node.attributes.getInts('dilations', []);\n\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n\n return createAttributeWithCacheKey(\n {autoPad, ceilMode, countIncludePad: false, kernelShape, strides, pads, storageOrder, dilations});\n };\n\nconst createMaxPoolProgramInfo =\n (inputs: Tensor[], metadata: ProgramMetadata, isGlobalOperator: boolean, attributes: MaxPoolAttributes):\n ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(inputs, attributes, isGlobalOperator);\n const op1 = `\n value = max(_X(x), value);\n `;\n const op2 = '';\n const poolingCode = generatePoolingCode(inputs[0].dims, adjustedAttributes, op1, op2, '-1e5');\n const shaderSource = `\n ${poolingCode}\n `;\n return {\n ...metadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst getAdjustedPoolAttributesAndOutputShape =\n (inputs: Tensor[], attributes: AveragePoolAttributes|MaxPoolAttributes, isGlobalOperator: boolean):\n [AveragePoolAttributes|MaxPoolAttributes, number[]] => {\n const inputShape = inputs[0].dims.slice();\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShape, kernelShape, strides, dilations, pads);\n\n const outputShape = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShape, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n return [newAttributes, outputShape];\n };\n\nconst globalMaxPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: [],\n cacheKey: ''\n};\n\nconst globalMaxPoolMetadata = {\n name: 'GlobalMaxPool',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const globalMaxPool = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...globalMaxPoolMetadata,\n get: () => createMaxPoolProgramInfo(inputs, globalMaxPoolMetadata, true, globalMaxPoolAttributes)\n },\n inputs);\n return [output];\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Pool ops requires 1 input.');\n }\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n};\n\nconst generatePoolingCode =\n (inputDims: readonly number[], attributes: AveragePoolAttributes, op1: string, op2: string, start: string):\n string => {\n const rank = inputDims.length;\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const dimW = inputDims[rank - 1];\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n if (pwStart + pwEnd !== 0) {\n codeW = `\n for (int i = 0; i < ${kw}; i++) {\n x[${rank} - 1] = indices[${rank} - 1] * ${sw} - ${pwStart} + i;\n if (x[${rank} - 1] < 0 || x[${rank} - 1] >= ${dimW}) {\n pad++;\n continue;\n }\n ${op1}\n }`;\n } else {\n codeW = `\n for (int i = 0; i < ${kw}; i++) {\n x[${rank} - 1] = indices[${rank} - 1] * ${sw} - ${pwStart} + i;\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n const dimH = inputDims[rank - 2];\n if (phStart + phEnd !== 0) {\n codeH = `\n for (int j = 0; j < ${kh}; j++) {\n x[${rank} - 2] = indices[${rank} - 2] * ${sh} - ${phStart} + j;\n if (x[${rank} - 2] < 0 || x[${rank} - 2] >= ${dimH}) {\n pad+= ${kw};\n continue;\n }\n `;\n } else {\n codeH = `\n for (int j = 0; j < ${kh}; j++) {\n x[${rank} - 2] = indices[${rank} - 2] * ${sh} - ${phStart} + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n float process(int indices[${rank}]) {\n int x[${rank}];\n copyVec(indices, x);\n\n float value = ${start};\n int pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n return value;\n }\n `;\n return poolingCode;\n } else {\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n const stridesRank = kernelStrides.length;\n const padsRank = attributes.pads.length;\n const offsetToIndicesFunction = offsetToIndices(stridesRank);\n const copyInputDims = copyArray(inputDims, 'inputDims');\n const copyPads = copyArray(attributes.pads, 'pads');\n const copyKernelStrides = copyArray(kernelStrides, 'kernelStrides');\n const copyStrides = copyArray(attributes.strides, 'strides');\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (x[j] >= inputDims[j] || x[j] < 0) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n ${op1}\n }`;\n } else {\n padCode = `\n }\n ${op1}\n `;\n }\n const poolingCode = `\n ${offsetToIndicesFunction}\n float process(int indices[${rank}]) {\n int x[${rank}];\n copyVec(indices, x);\n int offset[${stridesRank}];\n int pads[${padsRank}];\n int inputDims[${rank}];\n int kernelStrides[${stridesRank}];\n int strides[${stridesRank}];\n ${copyPads}\n ${copyInputDims}\n ${copyStrides}\n ${copyKernelStrides}\n\n float value = ${start};\n int pad = 0;\n bool isPad = false;\n for (int i = 0; i < ${kernelSize}; i++) {\n offsetToIndices(i, kernelStrides, offset);\n isPad = false;\n for (int j = ${rank} - ${stridesRank}; j < ${rank}; j++) {\n x[j] = indices[j] * strides[j - ${rank} + ${stridesRank}]\n + offset[j - ${rank} + ${stridesRank}] - pads[j - 2];\n ${padCode}\n }\n ${op2}\n\n return value;\n }\n `;\n return poolingCode;\n }\n };\n\nconst copyArray = (array: readonly number[], arrayName: string): string => {\n let block = '';\n for (let i = 0; i < array.length; i++) {\n block += `\n ${arrayName}[${i}] = ${array[i]};\n `;\n }\n return block;\n};\n\nconst offsetToIndices = (rank: number): string => `\n void offsetToIndices(int offset, int[${rank}] strides, out int[${rank}] indices) {\n if (${rank} == 0) {\n return;\n }\n for (int i = 0; i < ${rank} - 1; ++i) {\n indices[i] = offset / strides[i];\n offset -= indices[i] * strides[i];\n }\n indices[${rank} - 1] = offset;\n }`;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n readonly axes: number[];\n readonly keepDims: boolean;\n}\n\n// return [init ops, reduce ops, final ops]\ntype ReduceOp = (inputs: Tensor[], axes: number[]) => string[];\n\nconst reduce =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, name: string,\n reduceOp: ReduceOp): Tensor[] => {\n validateInputs(inputs);\n\n const reduceProgramMetadata = {\n name,\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n };\n\n const output = inferenceHandler.run(\n {\n ...reduceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () =>\n createReduceProgramInfo(inferenceHandler, inputs, attributes, name, reduceOp, reduceProgramMetadata)\n },\n inputs);\n return [output];\n };\n\nexport const parseReduceAttributes: OperatorInitialization = (node: Graph.Node): ReduceAttributes => {\n const axes = node.attributes.getInts('axes', []);\n const keepDims = node.attributes.getInt('keepdims', 1) === 1;\n return createAttributeWithCacheKey({axes, keepDims});\n};\n\nconst createReduceProgramInfo =\n (_handler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes, _name: string, reduceOp: ReduceOp,\n reduceProgramMetadata: ProgramMetadata): ProgramInfo => {\n const outputShape: number[] = [];\n const iRank = inputs[0].dims.length || 1;\n\n const idxCopy = []; // copy output indexes to input indexes\n\n const axes = ShapeUtil.normalizeAxes(attributes.axes, inputs[0].dims.length);\n const ops = reduceOp(inputs, axes);\n let reduceOps = ops[1];\n\n for (let k = 0; k < inputs[0].dims.length; k++) {\n // if this axis is reduced\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n if (attributes.keepDims) {\n outputShape.push(1);\n } // else { remove the axis from outputShape; }\n\n // loop over the d-th axis\n reduceOps = `\n for(int j${k} = 0; j${k} < ${inputs[0].dims[k]}; j${k}++) {\n inputIdx[${k}] = j${k};\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`inputIdx[${k}] = outputIdx[${outputShape.length}];`);\n\n outputShape.push(inputs[0].dims[k]);\n }\n }\n\n const oRank = outputShape.length || 1;\n\n const shaderSource = `\n float process(int outputIdx[${oRank}]) {\n float value; // final result\n int inputIdx[${iRank}]; // addressing input data\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${reduceOps}\n ${ops[2]} // final computation for reduce mean\n return value;\n }`;\n\n return {\n ...reduceProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n // TODO: support Reduce* operators with 2 inputs.\n if (!inputs || inputs.length !== 1) {\n throw new Error('Reduce op requires 1 input.');\n }\n\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport const reduceSum: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 0.0;', 'value += _A(inputIdx);', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceSum', reduceOp);\n };\n\nexport const reduceMean: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n let size = 1.0;\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n size *= inputs[0].dims[k];\n }\n }\n\n return ['value = 0.0;', 'value += _A(inputIdx);', `value /= ${size}.;`]; // ensure real number with `.`\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMean', reduceOp);\n };\n\nexport const reduceMax: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n const idxZero = [];\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`inputIdx[${k}] = 0;`); // first element\n }\n }\n\n return [`${idxZero.join('\\n')}\\nvalue = _A(inputIdx);`, 'value = max(value, _A(inputIdx));', ''];\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMax', reduceOp);\n };\n\nexport const reduceMin: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (inputs: Tensor[], axes: number[]): string[] => {\n const idxZero = [];\n for (let k = 0; k < inputs[0].dims.length; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`inputIdx[${k}] = 0;`); // first element\n }\n }\n\n return [`${idxZero.join('\\n')}\\nvalue = _A(inputIdx);`, 'value = min(value, _A(inputIdx));', ''];\n };\n return reduce(inferenceHandler, inputs, attributes, 'ReduceMin', reduceOp);\n };\n\nexport const reduceProd: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 1.0;', 'value *= _A(inputIdx);', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceProd', reduceOp);\n };\n\nexport const reduceLogSum: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['value = 0.0;', 'value += _A(inputIdx);', 'value = log(value);'];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceLogSum', reduceOp);\n };\n\nexport const reduceLogSumSquare: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: ReduceAttributes): Tensor[] => {\n const reduceOp: ReduceOp = (): string[] => ['float t; value = 0.0;', 't = _A(inputIdx); value += t * t;', ''];\n return reduce(inferenceHandler, inputs, attributes, 'ReduceLogSumSquare', reduceOp);\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const reshape = (handler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n const reshapedDims = ShapeUtil.calculateReshapedDims(inputs[0].dims, inputs[1].integerData);\n if (handler.session.pack) {\n return [handler.reshapePacked(inputs[0], reshapedDims)];\n } else {\n return [handler.reshapeUnpacked(inputs[0], reshapedDims)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface UpsampleAttributes extends AttributeWithCacheKey {\n readonly opset: number;\n readonly isResize: boolean;\n readonly mode: string;\n readonly scales: number[];\n readonly extrapolationValue: number;\n readonly coordinateTransformMode: string;\n readonly useExtrapolation: boolean;\n readonly needRoiInput: boolean;\n readonly nearestMode: string;\n readonly cubicCoefficientA: number;\n readonly excludeOutside: boolean;\n readonly useNearest2xOptimization: boolean;\n readonly roiInputIdx: number;\n readonly scalesInputIdx: number;\n readonly sizesInputIdx: number;\n}\n\nconst upsampleProgramMetadata = {\n name: 'Upsample',\n inputNames: ['X'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const upsample: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(\n {\n ...upsampleProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createUpsampleProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseUpsampleAttributesV7: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 7);\n\nexport const parseUpsampleAttributesV9: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 9);\n\nexport const parseUpsampleAttributes = (node: Graph.Node, opset: number): UpsampleAttributes => {\n const isResize = (opset >= 10);\n\n // processing node attributes\n const mode = node.attributes.getString('mode', 'nearest');\n if (mode !== 'nearest' && mode !== 'linear' && (opset < 11 || mode !== 'cubic')) {\n throw new Error(`unrecognized mode: ${mode}`);\n }\n\n let scales: number[] = [];\n if (opset < 9) {\n scales = node.attributes.getFloats('scales');\n scalesValidation(scales, mode, isResize);\n }\n\n const extrapolationValue = node.attributes.getFloat('extrapolation_value', 0.0);\n\n const coordinateTransformMode =\n opset > 10 ? node.attributes.getString('coordinate_transformation_mode', 'half_pixel') : 'asymmetric';\n if ([\n 'asymmetric', 'pytorch_half_pixel', 'tf_half_pixel_for_nn', 'align_corners', 'tf_crop_and_resize', 'half_pixel'\n ].indexOf(coordinateTransformMode) === -1) {\n throw new Error(`coordinate_transform_mode '${coordinateTransformMode}' is not supported`);\n }\n const needRoiInput = (coordinateTransformMode === 'tf_crop_and_resize');\n const useExtrapolation = needRoiInput;\n\n const nearestMode =\n (mode === 'nearest' && opset >= 11) ? node.attributes.getString('nearest_mode', 'round_prefer_floor') : '';\n if (['round_prefer_floor', 'round_prefer_ceil', 'floor', 'ceil', ''].indexOf(nearestMode) === -1) {\n throw new Error(`nearest_mode '${nearestMode}' is not supported`);\n }\n\n const cubicCoefficientA = node.attributes.getFloat('cubic_coeff_a', -0.75);\n const excludeOutside = node.attributes.getInt('exclude_outside', 0) !== 0;\n if (excludeOutside && mode !== 'cubic') {\n throw new Error('exclude_outside can be set to 1 only when mode is CUBIC.');\n }\n\n const useNearest2xOptimization =\n (opset < 11) ? true : (mode === 'nearest' && coordinateTransformMode === 'asymmetric' && nearestMode === 'floor');\n\n let roiInputIdx = 0;\n let scalesInputIdx = 0;\n let sizesInputIdx = 0;\n\n if (opset > 10) {\n // handle when roiInput is not given\n if (node.inputs.length > 2) {\n roiInputIdx = 1;\n scalesInputIdx = 2;\n sizesInputIdx = 3;\n } else {\n scalesInputIdx = 1;\n sizesInputIdx = 2;\n }\n } else if (opset === 9) {\n scalesInputIdx = 1;\n }\n\n return createAttributeWithCacheKey({\n opset,\n isResize,\n mode,\n scales,\n extrapolationValue,\n coordinateTransformMode,\n useExtrapolation,\n needRoiInput,\n nearestMode,\n cubicCoefficientA,\n excludeOutside,\n useNearest2xOptimization,\n roiInputIdx,\n scalesInputIdx,\n sizesInputIdx\n });\n};\n\nconst createUpsampleProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [inputWidth, inputHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(inputs[0].dims, TextureType.unpacked);\n\n const outputShape = inputs[0].dims.map((dim, i) => Math.floor(dim * attributes.scales[i]));\n const [outputWidth, outputHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(outputShape, TextureType.unpacked);\n const dim = outputShape.length;\n\n const outputPitches = new Array(dim);\n const inputPitches = new Array(dim);\n let precalculatedPitches = `\n int output_pitches[${dim}];\n int input_pitches[${dim}];\n `;\n for (let d = dim - 1; d >= 0; d--) {\n outputPitches[d] = (d === dim - 1) ? 1 : outputPitches[d + 1] * outputShape[d + 1];\n inputPitches[d] = (d === dim - 1) ? 1 : inputPitches[d + 1] * inputs[0].dims[d + 1];\n\n precalculatedPitches += `\n output_pitches[${d}] = ${outputPitches[d]};\n input_pitches[${d}] = ${inputPitches[d]};\n `;\n }\n const getInputFloatFunction = `\n float getInputFloat(int index) {\n vec2 coords = offsetToCoords(index, ${inputWidth}, ${inputHeight});\n float value = getColorAsFloat(${glsl.texture2D}(X, coords));\n return value;\n }\n `;\n\n const shaderSource = attributes.mode === 'nearest' ?\n // nearest\n `\n ${getInputFloatFunction}\n float process(int indices[${dim}]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int d, m;\n for (int dim = 0; dim < ${dim}; ++dim) {\n d = output_index / output_pitches[dim];\n m = output_index - d * output_pitches[dim];\n output_index = m;\n\n if (scales[dim] != 1 && d > 0) {\n int d2 = d / scales[dim];\n m = d - d2 * scales[dim];\n d = d2;\n }\n input_index += input_pitches[dim] * d;\n }\n\n return getInputFloat(input_index);\n }` :\n dim === 4 ?\n // bilinear 4D\n `\n ${getInputFloatFunction}\n float process(int indices[4]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int m;\n int index_of_dim0, index_of_dim1, index_of_dim2, index_of_dim3;\n index_of_dim0 = output_index / output_pitches[0];\n m = output_index - index_of_dim0 * output_pitches[0];\n index_of_dim1 = m / output_pitches[1];\n m = m - index_of_dim1 * output_pitches[1];\n index_of_dim2 = m / output_pitches[2];\n m = m - index_of_dim2 * output_pitches[2];\n index_of_dim3 = m;\n\n int index_of_input_dim2, index_of_input_dim3, x_offset, y_offset;\n index_of_input_dim2 = index_of_dim2 / scales[2];\n y_offset = index_of_dim2 - index_of_input_dim2 * scales[2];\n index_of_input_dim3 = index_of_dim3 / scales[3];\n x_offset = index_of_dim3 - index_of_input_dim3 * scales[3];\n\n input_index = index_of_dim0 * input_pitches[0] +\n index_of_dim1 * input_pitches[1] +\n index_of_input_dim2 * input_pitches[2] +\n index_of_input_dim3;\n\n float x00 = getInputFloat(input_index);\n float x10, x01, x11;\n\n bool end_of_dim2 = false;\n if (index_of_input_dim2 == (${inputs[0].dims[2]} - 1)) {\n // It's the end in dimension 2\n x01 = x00;\n end_of_dim2 = true;\n } else {\n x01 = getInputFloat(input_index + input_pitches[2]);\n }\n\n if (index_of_input_dim3 == (input_pitches[2] - 1)) {\n // It's the end in dimension 3\n x10 = x00;\n x11 = x01;\n }\n else {\n x10 = getInputFloat(input_index + 1);\n x11 = end_of_dim2 ? x10 : getInputFloat(input_index + input_pitches[2] + 1);\n }\n\n float y0 = x00 + float(y_offset) * (x01 - x00) / float(scales[2]);\n float y1 = x10 + float(y_offset) * (x11 - x10) / float(scales[2]);\n return y0 + float(x_offset) * (y1 - y0) / float(scales[3]);\n }` :\n // bilinear 2D\n `\n ${getInputFloatFunction}\n float process(int indices[2]) {\n int input_index = 0;\n int output_index = coordsToOffset(TexCoords, ${outputWidth}, ${outputHeight});\n\n ${precalculatedPitches}\n\n int m;\n int index_of_dim0, index_of_dim1;\n index_of_dim0 = output_index / output_pitches[0];\n m = output_index - index_of_dim0 * output_pitches[0];\n index_of_dim1 = m;\n\n int index_of_input_dim0, index_of_input_dim1, x_offset, y_offset;\n index_of_input_dim0 = index_of_dim0 / scales[0];\n y_offset = index_of_dim0 - index_of_input_dim0 * scales[0];\n index_of_input_dim1 = index_of_dim1 / scales[1];\n x_offset = index_of_dim1 - index_of_input_dim1 * scales[1];\n\n input_index = index_of_input_dim0 * input_pitches[0] + index_of_input_dim1;\n\n float x00 = getInputFloat(input_index);\n float x10, x01, x11;\n\n bool end_of_dim0 = false;\n if (index_of_input_dim0 == (${inputs[0].dims[0]} - 1)) {\n // It's the end in dimension 0\n x01 = x00;\n end_of_dim0 = true;\n } else {\n x01 = getInputFloat(input_index + input_pitches[0]);\n }\n\n if (index_of_input_dim1 == (input_pitches[0] - 1)) {\n // It's the end in dimension 1\n x10 = x00;\n x11 = x01;\n }\n else {\n x10 = getInputFloat(input_index + 1);\n x11 = end_of_dim0 ? x10 : getInputFloat(input_index + input_pitches[0] + 1);\n }\n\n float y0 = x00 + float(y_offset) * (x01 - x00) / float(scales[0]);\n float y1 = x10 + float(y_offset) * (x11 - x10) / float(scales[0]);\n return y0 + float(x_offset) * (y1 - y0) / float(scales[1]);\n }`;\n return {\n ...upsampleProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource,\n variables: [{\n name: 'scales',\n type: 'int',\n arrayLength: attributes.scales.length,\n data: attributes.scales.map(x => Math.ceil(x))\n }]\n };\n };\n\nexport const validateInputs = (inputs: Tensor[], attribute: UpsampleAttributes): void => {\n if (!inputs || (attribute.opset < 9 && inputs.length !== 1) ||\n (attribute.opset >= 9 && attribute.opset < 11 && inputs.length !== 2) ||\n (attribute.opset >= 11 && inputs.length < 2)) {\n throw new Error('invalid inputs.');\n }\n\n if (attribute.scales.length > 0 && inputs[0].dims.length !== attribute.scales.length) {\n throw new Error('Invalid input shape.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('Invalid input tensor types.');\n }\n};\n\nexport const scalesValidation = (scales: number[], mode: string, isResize: boolean): void => {\n if (!isResize) {\n for (const scale of scales) {\n if (scale < 1) {\n throw new Error('Scale value should be greater than or equal to 1.');\n }\n }\n } else {\n for (const scale of scales) {\n if (scale <= 0) {\n throw new Error('Scale value should be greater than 0.');\n }\n }\n }\n if (mode === 'linear' || mode === 'cubic') {\n if (scales.length !== 2 && (scales.length !== 4 || scales[0] !== 1 || scales[1] !== 1)) {\n throw new Error(`'Linear' mode and 'Cubic' mode only support 2-D inputs ('Bilinear', 'Bicubic') \\\n or 4-D inputs with the corresponding outermost 2 scale values being 1 \\\n in the ${isResize ? 'Resize' : 'Upsample'} opeartor.`);\n }\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\nimport {getCoordsDataType} from '../utils';\n\nimport {unpackFromChannel} from './packing-utils';\nimport {parseUpsampleAttributes, scalesValidation, UpsampleAttributes, validateInputs} from './upsample';\n\nconst resizeProgramMetadata = {\n name: 'Resize',\n inputNames: ['A'],\n inputTypes: [TextureType.packed]\n};\n\nexport const resize: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): Tensor[] => {\n validateInputs(inputs, attributes);\n const output = inferenceHandler.run(\n {\n ...resizeProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createPackedResizeProgramInfo(inferenceHandler, inputs, attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseResizeAttributesV10: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 10);\n\nexport const parseResizeAttributesV11: OperatorInitialization =\n (node: Graph.Node): UpsampleAttributes => parseUpsampleAttributes(node, 11);\n\nconst createPackedResizeProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: UpsampleAttributes): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const [scales, outputShape] = prepareInputs(inputs, attributes);\n\n const isSame =\n scales.every((s: number) => s === 1) && attributes.coordinateTransformMode !== 'tf_crop_and_resize';\n if (isSame) {\n return {\n ...resizeProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n hasMain: true,\n shaderSource: `void main() {\n vec4 v = ${glsl.texture2D}(X, TexCoords);\n ${glsl.output} = v;\n }`\n };\n }\n\n const dim = outputShape.length;\n if (dim < 2) {\n throw new Error(`output dimension should be at least 2, but got ${dim}`);\n }\n\n const outputHeight = outputShape[dim - 2];\n const outputWidth = outputShape[dim - 1];\n\n const inputShape = inputs[0].dims;\n if (dim !== inputShape.length) {\n throw new Error(`output dimension should match input ${inputShape.length}, but got ${dim}`);\n }\n const inputHeight = inputShape[dim - 2];\n const inputWidth = inputShape[dim - 1];\n\n const scalesHeight = scales[dim - 2];\n const scalesWidth = scales[dim - 1];\n\n let getSourceFracIndex = '';\n\n if (attributes.mode !== 'linear') {\n // TODO: support other modes\n throw new Error(`resize (packed) does not support mode: '${attributes.mode}'`);\n }\n switch (attributes.coordinateTransformMode) {\n case 'asymmetric':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n return vec4(coords) / scaleWHWH;\n }\n `;\n break;\n case 'half_pixel':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n return (vec4(coords) + 0.5) / scaleWHWH - 0.5;\n }\n `;\n break;\n case 'pytorch_half_pixel':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n vec4 fcoords = vec4(coords);\n return vec4(\n ${outputWidth}.0 > 1.0 ? (fcoords.x + 0.5) / scaleWHWH.x - 0.5 : 0.0,\n ${outputHeight}.0 > 1.0 ? (fcoords.y + 0.5) / scaleWHWH.y - 0.5 : 0.0,\n ${outputWidth}.0 > 1.0 ? (fcoords.z + 0.5) / scaleWHWH.z - 0.5 : 0.0,\n ${outputHeight}.0 > 1.0 ? (fcoords.w + 0.5) / scaleWHWH.w - 0.5 : 0.0\n );\n }\n `;\n break;\n case 'align_corners':\n getSourceFracIndex = `\n vec4 getSourceFracIndex(ivec4 coords) {\n vec4 resized = vec4(${outputWidth}.0 - 1.0, ${outputHeight}.0 - 1.0, ${outputWidth}.0 - 1.0,\n ${outputHeight}.0 - 1.0);\n vec4 original = vec4(${inputWidth}.0 - 1.0, ${inputHeight}.0 - 1.0, ${inputWidth}.0 - 1.0,\n ${inputHeight}.0 - 1.0);\n vec4 new_scale = original / resized;\n return vec4(coords) * new_scale;\n }\n `;\n break;\n default:\n // TODO:supporting other coordinateTransformModes\n throw new Error(`resize (packed) does not support coordinateTransformMode: \\\n '${attributes.coordinateTransformMode}'`);\n }\n\n const coordsDataType = getCoordsDataType(dim);\n const unpackChannel = unpackFromChannel();\n const shaderSource = `\n const vec2 inputWH = vec2(${inputHeight}.0, ${inputWidth}.0);\n const vec4 scaleWHWH = vec4(float(${scalesHeight}), float(${scalesWidth}), float(${scalesHeight}), float(${\n scalesWidth}));\n ${unpackChannel}\n ${getSourceFracIndex}\n float getAValue(int x10, int r, int c, int d) {\n return getChannel(getA(x10, r, c, d), vec2(c, d));\n }\n void main() {\n ${coordsDataType} rc = getOutputCoords();\n\n int batch = rc[0];\n int depth = rc[1];\n\n // retrieve the 4 coordinates that is used in the 4 packed output values.\n ivec4 coords = ivec4(rc.wz, rc.w + 1, rc.z + 1);\n\n // calculate the source index in fraction\n vec4 sourceFrac = getSourceFracIndex(coords);\n\n // get the lower and upper bound of the 4 values that will be packed into one texel.\n ivec4 x00 = ivec4(max(sourceFrac.xy, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.xy)));\n ivec4 x01 = ivec4(max(sourceFrac.xw, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.xw)));\n ivec4 x10 = ivec4(max(sourceFrac.zy, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.zy)));\n ivec4 x11 = ivec4(max(sourceFrac.zw, vec2(0.0)), min(inputWH - 1.0, ceil(sourceFrac.zw)));\n\n bool hasNextRow = rc.w < ${outputHeight - 1};\n bool hasNextCol = rc.z < ${outputWidth - 1};\n\n // pack x00, x01, x10, x11's top-left corner into one vec4 structure\n vec4 topLeft = vec4(\n getAValue(batch, depth, x00.x, x00.y),\n hasNextCol ? getAValue(batch, depth, x01.x, x01.y) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.x, x10.y) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.x, x11.y) : 0.0);\n\n // pack x00, x01, x10, x11's top-right corner into one vec4 structure\n vec4 topRight = vec4(\n getAValue(batch, depth, x00.x, x00.w),\n hasNextCol ? getAValue(batch, depth, x01.x, x01.w) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.x, x10.w) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.x, x11.w) : 0.0);\n\n // pack x00, x01, x10, x11's bottom-left corner into one vec4 structure\n vec4 bottomLeft = vec4(\n getAValue(batch, depth, x00.z, x00.y),\n hasNextCol ? getAValue(batch, depth, x01.z, x01.y) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.z, x10.y) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.z, x11.y) : 0.0);\n\n // pack x00, x01, x10, x11's bottom-right corner into one vec4 structure\n vec4 bottomRight = vec4(\n getAValue(batch, depth, x00.z, x00.w),\n hasNextCol ? getAValue(batch, depth, x01.z, x01.w) : 0.0,\n hasNextRow ? getAValue(batch, depth, x10.z, x10.w) : 0.0,\n (hasNextRow && hasNextCol) ? getAValue(batch, depth, x11.z, x11.w) : 0.0);\n\n // calculate the interpolation fraction on u and v direction\n vec4 frac = vec4(sourceFrac) - floor(sourceFrac);\n vec4 clampFrac = clamp(frac, vec4(0.0), vec4(1.0));\n\n vec4 top = mix(topLeft, topRight, clampFrac.ywyw);\n vec4 bottom = mix(bottomLeft, bottomRight, clampFrac.ywyw);\n vec4 newValue = mix(top, bottom, clampFrac.xxzz);\n\n ${glsl.output} = vec4(newValue);\n }\n `;\n return {\n ...resizeProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.packed},\n hasMain: true,\n shaderSource\n };\n };\n\n\nconst prepareInputs = (inputs: Tensor[], attributes: UpsampleAttributes): [readonly number[], readonly number[]] => {\n const x = inputs[0];\n const xDims = x.dims;\n\n let scales = attributes.scales;\n let outputSizes: number[]|undefined;\n if (scales.length === 0) {\n const scalesTensor = inputs[attributes.scalesInputIdx];\n if (scalesTensor && scalesTensor.size !== 0) {\n if (inputs[attributes.sizesInputIdx]) {\n throw new Error('Only one of scales or sizes must be provided as input.');\n }\n scales = parseScalesData(scalesTensor, attributes.mode, attributes.isResize);\n } else {\n const sizesTensor = inputs[attributes.sizesInputIdx];\n if (!sizesTensor || sizesTensor.size === 0) {\n throw new Error('Either scales or sizes MUST be provided as input.');\n }\n\n outputSizes = Array.from(sizesTensor.integerData);\n scales = parseScalesDataFromOutputSize(outputSizes, xDims, attributes.mode, attributes.isResize);\n }\n } else {\n if (inputs[attributes.sizesInputIdx]) {\n throw new Error('Only one of scales or sizes must be provided as input.');\n }\n }\n\n const yDims = outputSizes || (xDims.map((dim, i) => Math.floor(dim * scales[i])));\n\n return [scales, yDims];\n};\n\nconst parseScalesData = (scale: Tensor, mode: string, isResize: boolean): number[] => {\n const scales = Array.from(scale.floatData);\n scalesValidation(scales, mode, isResize);\n return scales;\n};\n\nconst parseScalesDataFromOutputSize =\n (yDims: readonly number[], xDims: readonly number[], mode: string, isResize: boolean): number[] => {\n const length = xDims.length;\n const scales = new Array(length);\n\n for (let i = 0, end = length; i < end; i++) {\n if (xDims[i] === 0) {\n if (yDims[i] !== 0) {\n throw new Error('Input dim is zero but required output dim is non-zero.');\n }\n scales[i] = 1;\n } else {\n scales[i] = yDims[i] / xDims[i];\n }\n }\n scalesValidation(scales, mode, isResize);\n return scales;\n };\n\n// roi data is not used yet. but leave here for future usage.\n// const getRoi = (inputs: Tensor[], attributes: UpsampleAttributes) : number[] => {\n// let roi: number[] = [];\n// if (attributes.needRoiInput) {\n// if (attributes.roiInputIdx <= 0) {\n// throw new Error('Invalid roi input index.');\n// }\n// const roiTensor = inputs[attributes.roiInputIdx];\n// roi = roiTensor.size > 0 ? Array.from(roiTensor.floatData) : [];\n// } else {\n// roi = new Array(inputs[0].dims.length * 2).fill(0);\n// }\n// return roi;\n// };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const shape = (_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n return [new Tensor([inputs[0].dims.length], 'int32', undefined, undefined, new Int32Array(inputs[0].dims))];\n};\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Shape requires 1 input.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {NUMBER_TYPES, OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly axes: number[];\n readonly ends: number[];\n readonly starts: number[];\n}\n\nconst sliceProgramMetadata = {\n name: 'Slice',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked]\n};\n\nexport const slice: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SliceAttributes): Tensor[] => {\n validateInputs(inputs);\n const output = inferenceHandler.run(\n {\n ...sliceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createSliceProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n inputs);\n return [output];\n };\n\nexport const parseSliceAttributes: OperatorInitialization = (node: Graph.Node): SliceAttributes => {\n const starts = node.attributes.getInts('starts');\n const ends = node.attributes.getInts('ends');\n const axes = node.attributes.getInts('axes', []);\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n\nconst createSliceProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SliceAttributes): ProgramInfo => {\n const axes = (attributes.axes.length === 0) ? input.dims.slice(0).map((_val, i) => i) : attributes.axes;\n const normalizedAxes = ShapeUtil.normalizeAxes(axes, input.dims.length);\n const starts = attributes.starts.map((start, i) => {\n if (start > input.dims[normalizedAxes[i]] - 1) {\n return input.dims[normalizedAxes[i]];\n }\n return ShapeUtil.normalizeAxis(start, input.dims[normalizedAxes[i]]);\n });\n const ends = attributes.ends.map((end, i) => {\n if (end > input.dims[normalizedAxes[i]] - 1) {\n return input.dims[normalizedAxes[i]];\n }\n return ShapeUtil.normalizeAxis(end, input.dims[normalizedAxes[i]]);\n });\n\n const outputShape = input.dims.slice();\n\n const sliceOps: string[] = [];\n for (let i = 0; i < normalizedAxes.length; i++) {\n outputShape[normalizedAxes[i]] = ends[i] - starts[i];\n if (starts[i] > 0) {\n sliceOps.push(`outputIdx[${normalizedAxes[i]}] += ${starts[i]};`);\n } // else { sliceOps.push(`outputIdx[${normalizedAxes[i]}] += 0;`); }\n }\n\n const rank = outputShape.length;\n const shaderSource = `\n float process(int outputIdx[${rank}]) {\n ${sliceOps.join('\\n ')}\n return _A(outputIdx);\n }`;\n return {\n ...sliceProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Slice requires 1 input.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport const sliceV10 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV10(inputs);\n const attributes = generateSliceAttributesFromInputs(inferenceHandler, inputs);\n const output = inferenceHandler.run(\n {\n ...sliceProgramMetadata,\n cacheHint: attributes.cacheKey,\n get: () => createSliceProgramInfo(inferenceHandler, inputs[0], attributes)\n },\n [inputs[0]]);\n return [output];\n};\n\nconst generateSliceAttributesFromInputs =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): SliceAttributes => {\n if (!inferenceHandler.session.isInitializer(inputs[1].dataId) ||\n !inferenceHandler.session.isInitializer(inputs[2].dataId) ||\n (inputs.length >= 4 && !inferenceHandler.session.isInitializer(inputs[3].dataId)) ||\n (inputs.length >= 5 && !inferenceHandler.session.isInitializer(inputs[4].dataId))) {\n throw new Error('dynamic slice attributes are not allowed');\n }\n\n if (inputs.length >= 5 && inputs[4].integerData.some((i: number) => i !== 1)) {\n throw new Error('currently non-1 steps is not supported for Slice');\n }\n\n const starts = Array.from(inputs[1].integerData);\n const ends = Array.from(inputs[2].integerData);\n const axes = inputs.length >= 4 ? Array.from(inputs[3].integerData) : [];\n const cacheKey = `${axes};${starts};${ends}`;\n return {starts, ends, axes, cacheKey};\n };\n\nconst validateInputsV10 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length < 3 || inputs.length > 5) {\n throw new Error('Invalid input number.');\n }\n if (inputs[1].type !== 'int32' || inputs[1].dims.length !== 1) {\n throw new Error('Invalid input type.');\n }\n if (inputs[2].type !== 'int32' || inputs[2].dims.length !== 1) {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 4 && (inputs[3].type !== 'int32' || inputs[3].dims.length !== 1)) {\n throw new Error('Invalid input type.');\n }\n if (inputs.length >= 5 && (inputs[4].type !== 'int32' || inputs[4].dims.length !== 1)) {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nimport {transpose, TransposeAttributes} from './transpose';\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst softmaxComputeMaxProgramMetadata = {\n name: 'SoftmaxComputeMax',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nconst softmaxComputeScaleProgramMetadata = {\n name: 'SoftmaxComputeScale',\n inputNames: ['A', 'Max'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked],\n};\n\nconst softmaxProgramMetadata = {\n name: 'SoftMax',\n inputNames: ['A', 'Max', 'Norm'],\n inputTypes: [TextureType.unpacked, TextureType.unpacked, TextureType.unpacked],\n};\n\nexport const softmax: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const inputShape = inputs[0].dims.slice();\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const logicalRowCount = ShapeUtil.sizeToDimension(inputShape, axis);\n const featureCount = ShapeUtil.sizeFromDimension(inputShape, axis);\n\n const output = computeSoftmax(inferenceHandler, inputs, attributes, logicalRowCount, featureCount);\n return output;\n };\n\nexport const parseSoftmaxAttributes: OperatorInitialization =\n (node: Graph.Node): SoftmaxAttributes => createAttributeWithCacheKey({axis: node.attributes.getInt('axis', 1)});\n\nexport const parseSoftmaxAttributesV13: OperatorInitialization =\n (node: Graph.Node): SoftmaxAttributes => createAttributeWithCacheKey({axis: node.attributes.getInt('axis', -1)});\n\n// The \"semantic\" meaning of axis has changed in opset-13.\n// Please compare: https://github.com/onnx/onnx/blob/main/docs/Operators.md#Softmax\n// with https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Softmax-11 for detailed explanations\n// To account for the opset-13 behavior, our plan will be to transpose the \"axis\" dim to the innermost dim\n// and perform softmax and then reverse the transpose. We can skip the transposing aspect if the axis is already\n// the innermost dim\nexport const softmaxV13: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const inputShape = inputs[0].dims.slice();\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const rank = inputShape.length;\n\n const isTransposeRequired = (axis !== rank - 1) ? true : false;\n const transposedInputShape: number[] = [];\n let perm: number[] = [];\n let transposedInputs: Tensor[] = [];\n let transposeAttribute: TransposeAttributes;\n\n if (isTransposeRequired) {\n perm = Array.from({length: rank}).map((_, i) => i);\n\n // swap the innermost dim with the dim corresponding to axis\n perm[axis] = rank - 1;\n perm[rank - 1] = axis;\n\n perm.map(p => transposedInputShape.push(inputShape[p]));\n\n transposeAttribute = createAttributeWithCacheKey({perm});\n transposedInputs = transpose(inferenceHandler, inputs, transposeAttribute);\n }\n\n const logicalRowCount = isTransposeRequired ? ShapeUtil.sizeToDimension(transposedInputShape, rank - 1) :\n ShapeUtil.sizeToDimension(inputShape, rank - 1);\n const featureCount = isTransposeRequired ? ShapeUtil.sizeFromDimension(transposedInputShape, rank - 1) :\n ShapeUtil.sizeFromDimension(inputShape, rank - 1);\n\n const output = computeSoftmax(\n inferenceHandler, isTransposeRequired ? transposedInputs : inputs, attributes, logicalRowCount, featureCount);\n\n if (isTransposeRequired) {\n const reversedOutput = transpose(inferenceHandler, output, transposeAttribute!);\n return reversedOutput;\n } else {\n return output;\n }\n };\n\nconst computeSoftmax =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SoftmaxAttributes, logicalRowCount: number,\n featureCount: number): Tensor[] => {\n const computeMaxProgramInfo =\n createComputeMaxProgramInfo(inferenceHandler, inputs[0], logicalRowCount, featureCount, [logicalRowCount]);\n const max = inferenceHandler.run(\n {...softmaxComputeMaxProgramMetadata, cacheHint: attributes.cacheKey, get: () => computeMaxProgramInfo},\n inputs);\n\n const computeScaleProgramInfo = createComputScaleProgramInfo(\n inferenceHandler, inputs[0], logicalRowCount, featureCount, computeMaxProgramInfo.output.dims,\n [logicalRowCount]);\n const scale = inferenceHandler.run(\n {...softmaxComputeScaleProgramMetadata, cacheHint: attributes.cacheKey, get: () => computeScaleProgramInfo},\n [inputs[0], max]);\n\n const softMaxProgramInfo = createSoftMaxProgramInfo(\n inferenceHandler, inputs[0], logicalRowCount, featureCount, computeMaxProgramInfo.output.dims,\n computeScaleProgramInfo.output.dims);\n const output = inferenceHandler.run(\n {...softmaxProgramMetadata, cacheHint: attributes.cacheKey, get: () => softMaxProgramInfo},\n [inputs[0], max, scale]);\n return [output];\n };\n\n/**\n * Create a texture that contains the maximum value of each of the 'N' rows\n */\nconst createComputeMaxProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n outputShape: number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = outputShape.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (outputShape.length !== 1) {\n throw new Error('Dimensionality of the output should be 1');\n }\n\n if (outputShape[0] !== logicalRowCount) {\n throw new Error('Shape of the output should be equal to logical row count');\n }\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n float process(int[${rank}] indices) {\n int logical_row_start_offset = indices[0] * ${featureCount};\n\n float max = getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset, ${textureWidth},\n ${textureHeight} )));\n for(int i=1; i<${featureCount}; ++i)\n {\n float current = getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset + i,\n ${textureWidth}, ${textureHeight})));\n if(current > max)\n max = current;\n }\n\n return max;\n }`;\n return {\n ...softmaxComputeMaxProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\n/**\n * Create a texture that contains the normalization factor for each of the 'N' rows\n */\nconst createComputScaleProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n maxElementPerLogicalRow: readonly number[], outputShape: number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = outputShape.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (outputShape.length !== 1) {\n throw new Error('Dimensionality of the output should be 1');\n }\n\n if (outputShape[0] !== logicalRowCount) {\n throw new Error('Shape of the output should be equal to logical row count');\n }\n\n if (maxElementPerLogicalRow.length !== 1) {\n throw new Error('Dimensionality of the intermediate results should be 1');\n }\n\n if (maxElementPerLogicalRow[0] !== logicalRowCount) {\n throw new Error('Shape of the intermediate results should be equal to logical row count');\n }\n\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const shaderSource = `\n float process(int[${rank}] indices) {\n int logical_row_start_offset = indices[0] * ${featureCount};\n\n float norm_factor = 0.0;\n float max = _Max(indices);\n for(int i=0; i<${featureCount}; ++i)\n {\n norm_factor += exp(getColorAsFloat(${glsl.texture2D}(A, offsetToCoords(logical_row_start_offset + i,\n ${textureWidth}, ${textureHeight}))) - max);\n }\n\n return norm_factor;\n }`;\n return {\n ...softmaxComputeScaleProgramMetadata,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst createSoftMaxProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, input: Tensor, logicalRowCount: number, featureCount: number,\n maxElementPerLogicalRow: readonly number[], normalizationPerLogicalRow: readonly number[]): ProgramInfo => {\n const [textureWidth, textureHeight] =\n inferenceHandler.calculateTextureWidthAndHeight(input.dims, TextureType.unpacked);\n const rank = input.dims.length;\n\n if (logicalRowCount < 1 || featureCount < 1) {\n throw new Error('Logical row count N and feature count D must be greater than or equal to 1');\n }\n\n if (maxElementPerLogicalRow.length !== 1 || normalizationPerLogicalRow.length !== 1) {\n throw new Error('Dimensionality of the intermediate results should be 1');\n }\n\n if (maxElementPerLogicalRow[0] !== logicalRowCount || normalizationPerLogicalRow[0] !== logicalRowCount) {\n throw new Error('Shape of the intermediate results should be equal to logical row count');\n }\n\n const shaderSource = `\n float process(int[${rank}] indices) {\n\n // get offset of current logical tensor index from the 2-D texture coordinates (TexCoords)\n int offset = coordsToOffset(TexCoords, ${textureWidth}, ${textureHeight});\n\n //determine the logical row for this index\n int logical_row_index[1];\n logical_row_index[0] = offset / ${featureCount};\n\n float norm_factor = _Norm(logical_row_index);\n\n // avoid possible division by 0\n // if norm_facor is 0, all elements are zero\n // if so, return 0\n if(norm_factor == 0.0)\n return 0.0;\n\n return exp(_A(indices) - _Max(logical_row_index)) / norm_factor;\n }`;\n return {\n ...softmaxProgramMetadata,\n output: {dims: input.dims, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax requires 1 input.');\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../../../attribute-with-cache-key';\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil, SplitUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, TextureType} from '../types';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly split: number[];\n readonly numOutputs: number;\n}\n\nconst splitProgramMetadata = {\n name: 'Split',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n};\n\nexport const split: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], attributes: SplitAttributes): Tensor[] => {\n validateInputs(inputs);\n\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputs[0].dims.length);\n const count = getProgramCount(inferenceHandler, inputs, axis, attributes);\n const output: Tensor[] = [];\n for (let i = 0; i < count; ++i) {\n output.push(inferenceHandler.run(\n {\n ...splitProgramMetadata,\n cacheHint: `${attributes.cacheKey};${i}`,\n get: () => createSplitProgramInfo(inferenceHandler, inputs[0], attributes, axis, i)\n },\n inputs));\n }\n\n return output;\n };\n\nexport const parseSplitAttributes: OperatorInitialization = (node: Graph.Node): SplitAttributes => {\n const axis = node.attributes.getInt('axis', 0);\n const split = node.attributes.getInts('split', []);\n const numOutputs = node.outputs.length;\n return createAttributeWithCacheKey({axis, split, numOutputs});\n};\n\nconst getProgramCount =\n (_inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axis: number, attributes: SplitAttributes): number => {\n const [, offsets] = SplitUtil.splitShape(inputs[0].dims, axis, attributes.split, attributes.numOutputs);\n return offsets.length;\n };\n\nconst createSplitProgramInfo =\n (_inferenceHandler: WebGLInferenceHandler, input: Tensor, attributes: SplitAttributes, axis: number, index: number):\n ProgramInfo => {\n const [shapes, offsets] = SplitUtil.splitShape(input.dims, axis, attributes.split, attributes.numOutputs);\n const offset = offsets[index];\n const outputShape = shapes[index];\n const rank = outputShape.length;\n const shaderSource = `\n float process(int indices[${rank}]) {\n indices[${axis}] += ${offset};\n return _A(indices);\n }\n `;\n return {\n ...splitProgramMetadata,\n cacheHint: `${attributes.cacheKey}:${index}`,\n output: {dims: outputShape, type: input.type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Split requires one input.');\n }\n\n if (inputs[0].type !== 'int8' && inputs[0].type !== 'uint8' && inputs[0].type !== 'int16' &&\n inputs[0].type !== 'uint16' && inputs[0].type !== 'int32' && inputs[0].type !== 'uint32' &&\n inputs[0].type !== 'float32' && inputs[0].type !== 'float64' && inputs[0].type !== 'bool') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const squeeze: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axes: number[]): Tensor[] => {\n validateInputs(inputs);\n const outputShape = ShapeUtil.squeezeShape(inputs[0].dims, axes);\n const output = inferenceHandler.reshapeUnpacked(inputs[0], outputShape);\n return [output];\n };\n\nexport const squeezeV13 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV13(inputs);\n return squeeze(inferenceHandler, [inputs[0]], Array.from(inputs[1].integerData));\n};\n\nexport const parseSqueezeAttributes: OperatorInitialization = (node: Graph.Node): number[] =>\n node.attributes.getInts('axes');\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Squeeze requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('invalid input tensor types.');\n }\n};\n\nconst validateInputsV13 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Squeeze requires 2 inputs.');\n }\n\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n};", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from '../../../tensor';\nimport {getGlsl} from '../glsl-source';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport const sum = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n\n const sumProgramMetadata = {\n name: 'Sum',\n inputNames: inputs.map((_v, i) => `X${i}`),\n inputTypes: new Array(inputs.length).fill(TextureType.unpacked)\n };\n\n const output = inferenceHandler.run(\n {...sumProgramMetadata, get: () => createSumProgramInfo(inferenceHandler, inputs, sumProgramMetadata)}, inputs);\n return [output];\n};\n\nconst createSumProgramInfo =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], sumProgramMetadata: ProgramMetadata): ProgramInfo => {\n const glsl = getGlsl(inferenceHandler.session.backend.glContext.version);\n const outputShape = inputs[0].dims.slice();\n const sumLine = inputs.map((_v, i) => `${glsl.texture2D}(X${i},TexCoords)`).join(' + ');\n const shaderSource = `\n void main() {\n vec4 result = ${sumLine};\n ${glsl.output} = result;\n }\n `;\n return {\n ...sumProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n hasMain: true,\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length === 0) {\n throw new Error('Sum requires inputs.');\n }\n\n const length = inputs[0].dims.length;\n for (let i = 1; i < inputs.length; i++) {\n if (length !== inputs[i].dims.length) {\n throw new Error('Input shapes are mismatched.');\n }\n\n for (let j = 0; j < length; j++) {\n if (inputs[0].dims[j] !== inputs[i].dims[j]) {\n throw new Error('Input shapes are not matched.');\n }\n }\n }\n\n if (inputs[0].type !== 'float32' && inputs[0].type !== 'float64') {\n throw new Error('Invalid input type.');\n }\n for (let i = 1; i < inputs.length; i++) {\n if (inputs[0].type !== inputs[i].type) {\n throw new Error('Input types are not matched.');\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {NUMBER_TYPES} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {WebGLInferenceHandler} from '../inference-handler';\nimport {ProgramInfo, ProgramMetadata, TextureType} from '../types';\n\nexport const tile = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputs(inputs);\n\n const tileProgramMetadata = {\n name: 'Tile',\n inputNames: ['A'],\n inputTypes: [TextureType.unpacked],\n };\n\n const output = inferenceHandler.run(\n {...tileProgramMetadata, get: () => createTileProgramInfo(inferenceHandler, inputs, tileProgramMetadata)},\n inputs);\n return [output];\n};\n\nconst createTileProgramInfo =\n (_handler: WebGLInferenceHandler, inputs: Tensor[], tileProgramMetadata: ProgramMetadata): ProgramInfo => {\n const inputShape = inputs[0].dims.slice();\n const outputShape = new Array(inputShape.length);\n\n const tileOps: string[] = [];\n for (let i = 0; i < inputShape.length; i++) {\n outputShape[i] = inputShape[i] * inputs[1].numberData[i];\n tileOps.push(`inputIdx[${i}] = int(mod(float(outputIdx[${i}]), ${inputShape[i]}.));`);\n }\n\n const rank = outputShape.length;\n const shaderSource = `\n float process(int outputIdx[${rank}]) {\n int inputIdx[${rank}];\n ${tileOps.join('\\n')}\n return _A(inputIdx);\n }\n `;\n return {\n ...tileProgramMetadata,\n output: {dims: outputShape, type: inputs[0].type, textureType: TextureType.unpacked},\n shaderSource\n };\n };\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 input.');\n }\n if (inputs[1].dims.length !== 1) {\n throw new Error('The second input shape must 1 dimension.');\n }\n if (inputs[1].dims[0] !== inputs[0].dims.length) {\n throw new Error('Invalid input shape.');\n }\n if (NUMBER_TYPES.indexOf(inputs[0].type) === -1) {\n throw new Error('Invalid input type.');\n }\n if (inputs[1].type !== 'int32' && inputs[1].type !== 'int16') {\n throw new Error('Invalid repeat type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Graph} from '../../../graph';\nimport {OperatorImplementation, OperatorInitialization} from '../../../operators';\nimport {Tensor} from '../../../tensor';\nimport {ShapeUtil} from '../../../util';\nimport {WebGLInferenceHandler} from '../inference-handler';\n\nexport const unsqueeze: OperatorImplementation =\n (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[], axes: number[]): Tensor[] => {\n validateInputs(inputs);\n const outputShape = ShapeUtil.unsqueezeShape(inputs[0].dims, axes);\n const output = inferenceHandler.reshapeUnpacked(inputs[0], outputShape);\n return [output];\n };\n\nexport const unsqueezeV13 = (inferenceHandler: WebGLInferenceHandler, inputs: Tensor[]): Tensor[] => {\n validateInputsV13(inputs);\n return unsqueeze(inferenceHandler, [inputs[0]], Array.from(inputs[1].integerData));\n};\n\nexport const parseUnsqueezeAttributes: OperatorInitialization = (node: Graph.Node): number[] =>\n node.attributes.getInts('axes');\n\nconst validateInputs = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Unsqueeze requires 1 input.');\n }\n\n if (inputs[0].type === 'string') {\n throw new Error('invalid input tensor types.');\n }\n};\n\nconst validateInputsV13 = (inputs: Tensor[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Unsqueeze requires 2 inputs.');\n }\n\n if (inputs[1].type !== 'int32') {\n throw new Error('Invalid input type.');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OpSet} from '../../opset';\n\nimport {batchNormalization, parseBatchNormalizationAttributes} from './ops/batch-normalization';\nimport * as binaryOps from './ops/binary-op';\nimport {cast, parseCastAttributes} from './ops/cast';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {flatten, parseFlattenAttributes} from './ops/flatten';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gemm, parseGemmAttributesV11, parseGemmAttributesV7} from './ops/gemm';\nimport {imageScaler, parseImageScalerAttributes} from './ops/image-scaler';\nimport {instanceNormalization, parseInstanceNormalizationAttributes} from './ops/instance-normalization';\nimport {lrn, parseLrnAttributes} from './ops/lrn';\nimport {matMul, parseMatMulAttributes} from './ops/matmul';\nimport {padV11, padV2, parsePadAttributesV11, parsePadAttributesV2} from './ops/pad';\nimport {averagePool, globalAveragePool, globalMaxPool, maxPool, parseAveragePoolAttributes, parseGlobalAveragePoolAttributes, parseMaxPoolAttributes} from './ops/pool';\nimport {parseReduceAttributes, reduceLogSum, reduceLogSumSquare, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum} from './ops/reduce';\nimport {reshape} from './ops/reshape';\nimport {parseResizeAttributesV10, parseResizeAttributesV11, resize} from './ops/resize-packed';\nimport {shape} from './ops/shape';\nimport {parseSliceAttributes, slice, sliceV10} from './ops/slice';\nimport {parseSoftmaxAttributes, parseSoftmaxAttributesV13, softmax, softmaxV13} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {parseSqueezeAttributes, squeeze, squeezeV13} from './ops/squeeze';\nimport {sum} from './ops/sum';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {parseUnsqueezeAttributes, unsqueeze, unsqueezeV13} from './ops/unsqueeze';\nimport {parseUpsampleAttributesV7, parseUpsampleAttributesV9, upsample} from './ops/upsample';\n\nexport const WEBGL_OP_RESOLVE_RULES: readonly OpSet.ResolveRule[] = [\n ['Abs', '', '6+', unaryOps.abs],\n ['Acos', '', '7+', unaryOps.acos],\n ['Add', '', '7+', binaryOps.add],\n ['And', '', '7+', binaryOps.and],\n ['Asin', '', '7+', unaryOps.asin],\n ['Atan', '', '7+', unaryOps.atan],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', '', '7+', averagePool, parseAveragePoolAttributes],\n ['BatchNormalization', '', '7+', batchNormalization, parseBatchNormalizationAttributes],\n ['Cast', '', '6+', cast, parseCastAttributes],\n ['Ceil', '', '6+', unaryOps.ceil],\n ['Clip', '', '6-10', unaryOps.clip, unaryOps.parseClipAttributes],\n ['Clip', '', '11+', unaryOps.clipV11],\n ['Concat', '', '4+', concat, parseConcatAttributes],\n ['Conv', '', '1+', conv, parseConvAttributes],\n ['ConvTranspose', '', '1+', convTranspose, parseConvTransposeAttributes],\n ['Cos', '', '7+', unaryOps.cos],\n ['Div', '', '7+', binaryOps.div],\n ['Dropout', '', '7+', unaryOps.identity],\n ['DepthToSpace', '', '1+', depthToSpace, parseDepthToSpaceAttributes],\n ['Equal', '', '7+', binaryOps.equal],\n ['Elu', '', '6+', unaryOps.elu, unaryOps.parseEluAttributes],\n ['Exp', '', '6+', unaryOps.exp],\n ['Flatten', '', '1+', flatten, parseFlattenAttributes],\n ['Floor', '', '6+', unaryOps.floor],\n ['FusedConv', 'com.microsoft', '1+', conv, parseConvAttributes],\n ['Gather', '', '1+', gather, parseGatherAttributes],\n ['Gemm', '', '7-10', gemm, parseGemmAttributesV7],\n ['Gemm', '', '11+', gemm, parseGemmAttributesV11],\n ['GlobalAveragePool', '', '1+', globalAveragePool, parseGlobalAveragePoolAttributes],\n ['GlobalMaxPool', '', '1+', globalMaxPool],\n ['Greater', '', '7+', binaryOps.greater],\n ['Identity', '', '1+', unaryOps.identity],\n ['ImageScaler', '', '1+', imageScaler, parseImageScalerAttributes],\n ['InstanceNormalization', '', '6+', instanceNormalization, parseInstanceNormalizationAttributes],\n ['LeakyRelu', '', '6+', unaryOps.leakyRelu, unaryOps.parseLeakyReluAttributes],\n ['Less', '', '7+', binaryOps.less],\n ['LRN', '', '1+', lrn, parseLrnAttributes],\n ['Log', '', '6+', unaryOps.log],\n ['MatMul', '', '1+', matMul, parseMatMulAttributes],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', '', '1+', maxPool, parseMaxPoolAttributes],\n ['Mul', '', '7+', binaryOps.mul],\n ['Neg', '', '6+', unaryOps.neg],\n ['Not', '', '1+', unaryOps.not],\n ['Or', '', '7+', binaryOps.or],\n ['Pad', '', '2-10', padV2, parsePadAttributesV2],\n ['Pad', '', '11+', padV11, parsePadAttributesV11],\n ['Pow', '', '7+', binaryOps.pow],\n ['PRelu', '', '7+', binaryOps.pRelu],\n ['ReduceLogSum', '', '1+', reduceLogSum, parseReduceAttributes],\n ['ReduceMax', '', '1+', reduceMax, parseReduceAttributes],\n ['ReduceMean', '', '1+', reduceMean, parseReduceAttributes],\n ['ReduceMin', '', '1+', reduceMin, parseReduceAttributes],\n ['ReduceProd', '', '1+', reduceProd, parseReduceAttributes],\n ['ReduceSum', '', '1-12', reduceSum, parseReduceAttributes],\n ['ReduceSumSquare', '', '1+', reduceLogSumSquare, parseReduceAttributes],\n ['Relu', '', '6+', unaryOps.relu],\n ['Reshape', '', '5+', reshape],\n ['Resize', '', '10', resize, parseResizeAttributesV10],\n ['Resize', '', '11+', resize, parseResizeAttributesV11],\n ['Shape', '', '1+', shape],\n ['Sigmoid', '', '6+', unaryOps.sigmoid],\n ['Sin', '', '7+', unaryOps.sin],\n ['Slice', '', '10+', sliceV10], // TODO: support 'steps' for Slice-10\n ['Slice', '', '1-9', slice, parseSliceAttributes],\n // The \"semantic\" meaning of axis has changed in opset-13.\n ['Softmax', '', '1-12', softmax, parseSoftmaxAttributes],\n ['Softmax', '', '13+', softmaxV13, parseSoftmaxAttributesV13],\n // 'Split' operator has an optional attribute 'split'\n // this attribute determines how the specified axis of input data is split.\n // When the attribute is missing, we need the count of number of outputs\n // so that we can determine the 'split' attribute from the runtime input to the Operator\n ['Split', '', '2-12', split, parseSplitAttributes],\n ['Sqrt', '', '6+', unaryOps.sqrt],\n ['Squeeze', '', '1-12', squeeze, parseSqueezeAttributes],\n ['Squeeze', '', '13+', squeezeV13],\n ['Sub', '', '7+', binaryOps.sub],\n ['Sum', '', '6+', sum],\n ['Tan', '', '7+', unaryOps.tan],\n ['Tanh', '', '6+', unaryOps.tanh],\n ['Tile', '', '6+', tile],\n ['Transpose', '', '1+', transpose, parseTransposeAttributes],\n ['Upsample', '', '7-8', upsample, parseUpsampleAttributesV7],\n ['Upsample', '', '9', upsample, parseUpsampleAttributesV9],\n ['Unsqueeze', '', '1-12', unsqueeze, parseUnsqueezeAttributes],\n ['Unsqueeze', '', '13+', unsqueezeV13],\n ['Xor', '', '7+', binaryOps.xor],\n];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nconst INLINE_FUNC_DEF_REGEX = /@inline[\\s\\n\\r]+(\\w+)[\\s\\n\\r]+([0-9a-zA-Z_]+)\\s*\\(([^)]*)\\)\\s*{(([^}]|[\\n\\r])*)}/gm;\nconst FUNC_CALL_REGEX = '(\\\\w+)?\\\\s+([_0-9a-zA-Z]+)\\\\s+=\\\\s+__FUNC__\\\\((.*)\\\\)\\\\s*;';\n/**\n * GLSL preprocessor responsible for resolving @inline directives\n */\nexport function replaceInlines(script: string): string {\n const inlineDefs: {[name: string]: {params: Array<{type: string; name: string}|null>; body: string}} = {};\n let match;\n while ((match = INLINE_FUNC_DEF_REGEX.exec(script)) !== null) {\n const params = match[3]\n .split(',')\n .map(s => {\n const tokens = s.trim().split(' ');\n if (tokens && tokens.length === 2) {\n return {type: tokens[0], name: tokens[1]};\n }\n return null;\n })\n .filter(v => v !== null);\n inlineDefs[match[2]] = {params, body: match[4]};\n }\n for (const name in inlineDefs) {\n const regexString = FUNC_CALL_REGEX.replace('__FUNC__', name);\n const regex = new RegExp(regexString, 'gm');\n while ((match = regex.exec(script)) !== null) {\n const type = match[1];\n const variable = match[2];\n const params = match[3].split(',');\n const declLine = (type) ? `${type} ${variable};` : '';\n let newBody: string = inlineDefs[name].body;\n let paramRedecLine = '';\n inlineDefs[name].params.forEach((v, i) => {\n if (v) {\n paramRedecLine += `${v.type} ${v.name} = ${params[i]};\\n`;\n }\n });\n newBody = `${paramRedecLine}\\n ${newBody}`;\n newBody = newBody.replace('return', `${variable} = `);\n const replacement = `\n ${declLine}\n {\n ${newBody}\n }\n `;\n script = script.replace(match[0], replacement);\n }\n }\n script = script.replace(INLINE_FUNC_DEF_REGEX, '');\n return script;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\nimport {assert} from '../../util';\n\n/** Layout preferences */\nexport interface WidthHeightPrefs {\n breakAxis?: number;\n isPacked?: boolean;\n reverseWH?: boolean;\n}\n/**\n * TextureLayoutStrategy is an abstraction for different plans\n * for mapping n-dimensional arrays to 2D textures (and back)\n */\nexport interface TextureLayoutStrategy {\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number];\n}\n\n/**\n * This strategy try to find the minimal max(W,H) that fulfills (W * H == totalSize)\n */\nexport class AlwaysKeepOriginalSizeStrategy implements TextureLayoutStrategy {\n constructor(public maxTextureSize: number) {}\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n // scalar tensor\n if (shape.length === 0) {\n return [1, 1];\n }\n const maxTextureSize = this.maxTextureSize;\n if (prefs && prefs.breakAxis !== undefined) {\n // check to see if dims fit\n const wsize = prefs.breakAxis >= shape.length ? 1 : shape.slice(prefs.breakAxis).reduce((a, b) => a * b);\n const hsize = prefs.breakAxis <= 0 ? 1 : shape.slice(0, prefs.breakAxis).reduce((a, b) => a * b);\n if (wsize > maxTextureSize || hsize > maxTextureSize) {\n // ignore preferences\n // continue with default layout\n Logger.verbose(\n 'TextureLayout',\n `Given width/height preferences were unattainable: shape:${shape}, breakAxis:${prefs.breakAxis}`);\n } else {\n return [wsize, hsize];\n }\n }\n const totalSize = shape.reduce((a, b) => a * b);\n\n let width = Math.floor(Math.sqrt(totalSize));\n\n for (; width < maxTextureSize && width < totalSize; width++) {\n if (totalSize % width === 0) {\n break;\n }\n }\n\n if (width >= maxTextureSize || totalSize % width !== 0) {\n throw new Error(`The given dimensions are outside this GPU's boundaries: ${shape}`);\n }\n return [width, totalSize / width];\n }\n}\n\nexport class PreferLogicalStrategy implements TextureLayoutStrategy {\n constructor(public maxTextureSize: number) {}\n computeTextureWH(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n const wh = this.computeTexture(shape, prefs);\n if (prefs && prefs.isPacked) {\n wh[0] /= 2;\n wh[1] /= 2;\n }\n if (prefs && prefs.reverseWH) {\n return [wh[1], wh[0]];\n }\n return wh;\n }\n\n computeTexture(shape: readonly number[], prefs?: WidthHeightPrefs): [number, number] {\n const isPacked = prefs && prefs.isPacked;\n // scalar tensor\n if (shape.length === 0) {\n return isPacked ? [2, 2] : [1, 1];\n }\n let maxTextureSize = this.maxTextureSize;\n if (prefs && prefs.breakAxis !== undefined) {\n // check to see if dims fit\n const wsize = prefs.breakAxis >= shape.length ? 1 : shape.slice(prefs.breakAxis).reduce((a, b) => a * b);\n const hsize = prefs.breakAxis <= 0 ? 1 : shape.slice(0, prefs.breakAxis).reduce((a, b) => a * b);\n if (wsize > maxTextureSize || hsize > maxTextureSize) {\n // ignore preferences\n // continue with default layout\n Logger.verbose(\n 'TextureLayout',\n `Given width/height preferences were unattainable: shape:${shape}, breakAxis:${prefs.breakAxis}`);\n } else {\n return [wsize, hsize];\n }\n }\n let logShape = shape.slice(0);\n if (isPacked) {\n maxTextureSize = maxTextureSize * 2;\n\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map(\n (_d, i) => i >= logShape.length - 2 ? (logShape[i] % 2 === 0 ? logShape[i] : logShape[i] + 1) : logShape[i]);\n\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n\n const size = sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTextureSize) {\n return [1, size];\n } else if (logShape.length === 2 && logShape[0] <= maxTextureSize && logShape[1] <= maxTextureSize) {\n return logShape as [number, number];\n } else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTextureSize && logShape[2] <= maxTextureSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n } else if (logShape.length === 3 && logShape[0] <= maxTextureSize && logShape[1] * logShape[2] <= maxTextureSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n } else if (\n logShape.length === 4 && logShape[0] * logShape[1] * logShape[2] <= maxTextureSize &&\n logShape[3] <= maxTextureSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n } else if (\n logShape.length === 4 && logShape[0] <= maxTextureSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTextureSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n } else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n return sizeToSquarishShape(size / 4).map(d => d * 2) as [number, number];\n }\n return sizeToSquarishShape(size);\n }\n }\n}\n\nexport function squeezeShape(shape: number[], axis?: number[]): {newShape: number[]; keptDims: number[]} {\n const newShape: number[] = [];\n const keptDims: number[] = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ? null : parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return {newShape, keptDims};\n}\n\nexport function parseAxisParam(axis: number|number[], shape: number[]): number[] {\n const rank = shape.length;\n\n // Normalize input\n axis = axis == null ? shape.map((_s, i) => i) : ([] as number[]).concat(axis);\n\n // Check for valid range\n assert(\n axis.every(ax => ax >= -rank && ax < rank),\n () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n\n // Check for only integers\n assert(\n axis.every(isInt),\n () => 'All values in axis param must be integers but ' +\n `got axis ${axis}`);\n\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\nexport function isInt(a: number): boolean {\n return a % 1 === 0;\n}\nexport function sizeFromShape(shape: number[]): number {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function getRowsCols(shape: number[]): [number, number] {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n\n return [shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]];\n}\nexport function sizeToSquarishShape(size: number): [number, number] {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\nexport function getBatchDim(shape: number[], dimsToSkip = 2): number {\n return sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {ArrayUtil, BroadcastUtil, ShapeUtil} from '../../util';\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\nimport {getGlsl} from './glsl-source';\nimport {squeezeShape} from './texture-layout-strategy';\nimport {TextureLayout} from './types';\nimport {generateShaderFuncNameFromInputSamplerName, generateShaderFuncNameFromInputSamplerNameAtOutCoords, getCoordsDataType, getGlChannels, getSqueezedParams, squeezeInputShape} from './utils';\n\n/**\n * GLSL Library responsible for data types and routines for manipulating\n * coordinates and mapping to/from tensor indices\n */\nexport class CoordsGlslLib extends GlslLib {\n returnType: string;\n\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {\n ...this.offsetToCoords(),\n ...this.coordsToOffset(),\n ...this.toVec(),\n ...this.valueFrom(),\n // TODO return these only when packing is enabled.\n ...this.getCommonUtilFuncs(),\n ...this.getInputsSamplingSnippets(),\n ...this.getOutputSamplingSnippet()\n };\n }\n getCustomTypes() {\n return {};\n }\n /**\n * Produces a function that can map from\n * 2D normalzied coordinates (s,t) to a flat offset\n */\n protected offsetToCoords(): {[name: string]: GlslLibRoutine} {\n const funcName = 'offsetToCoords';\n return {\n offsetToCoords: new GlslLibRoutine(`\n vec2 ${funcName}(int offset, int width, int height) {\n int t = offset / width;\n int s = offset - t*width;\n vec2 coords = (vec2(s,t) + vec2(0.5,0.5)) / vec2(width, height);\n return coords;\n }\n `)\n };\n }\n\n /**\n * Produces a function that can map from\n * 2D normalzied coordinates (s,t) to a flat offset\n */\n protected coordsToOffset(): {[name: string]: GlslLibRoutine} {\n const funcName = 'coordsToOffset';\n return {\n coordsToOffset: new GlslLibRoutine(`\n int ${funcName}(vec2 coords, int width, int height) {\n float s = coords.s * float(width);\n float t = coords.t * float(height);\n int offset = int(t) * width + int(s);\n return offset;\n }\n `)\n };\n }\n\n /**\n * Generates code for output sampler.\n */\n\n protected getOutputSamplingSnippet(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n if (outputLayout.isPacked) {\n return this.getPackedOutputSamplingSnippet(outputLayout);\n } else {\n return this.getUnpackedOutputSamplingSnippet(outputLayout);\n }\n }\n\n /**\n * Generates code for packed output sampler.\n */\n protected getPackedOutputSamplingSnippet(outputLayout: TextureLayout): {[name: string]: GlslLibRoutine} {\n const outShape = outputLayout.unpackedShape;\n const outTexShape = [outputLayout.width, outputLayout.height];\n const result: {[name: string]: GlslLibRoutine} = {};\n const funcName = 'getOutputCoords';\n switch (outShape.length) {\n case 0:\n result[funcName] = this.getOutputScalarCoords();\n break;\n case 1:\n result[funcName] = this.getOutputPacked1DCoords(outShape as [number], outTexShape as [number, number]);\n break;\n case 2:\n result[funcName] = this.getOutputPacked2DCoords(outShape as [number, number], outTexShape as [number, number]);\n break;\n case 3:\n result[funcName] =\n this.getOutputPacked3DCoords(outShape as [number, number, number], outTexShape as [number, number]);\n break;\n default:\n result[funcName] = this.getOutputPackedNDCoords(outShape, outTexShape as [number, number]);\n }\n const glsl = getGlsl(this.context.glContext.version);\n // TODO we need this to properly return a packed vec4 from kernels.\n // Replace all '{glsl.output} = result' with 'setOutput(result)' in all kernels.\n const floatTextureSetRGBASource = `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n const floatTextureSetRGBAFuncName = 'floatTextureSetRGBA';\n result[floatTextureSetRGBAFuncName] = new GlslLibRoutine(floatTextureSetRGBASource);\n return result;\n }\n\n /**\n * Generates code for unpacked output sampler.\n */\n protected getUnpackedOutputSamplingSnippet(outputLayout: TextureLayout): {[name: string]: GlslLibRoutine} {\n const outShape = outputLayout.unpackedShape;\n const outTexShape = [outputLayout.width, outputLayout.height];\n const result: {[name: string]: GlslLibRoutine} = {};\n const funcName = 'getOutputCoords';\n switch (outShape.length) {\n case 0:\n result[funcName] = this.getOutputScalarCoords();\n break;\n case 1:\n result[funcName] = this.getOutputUnpacked1DCoords(outShape as [number], outTexShape as [number, number]);\n break;\n case 2:\n result[funcName] =\n this.getOutputUnpacked2DCoords(outShape as [number, number], outTexShape as [number, number]);\n break;\n case 3:\n result[funcName] =\n this.getOutputUnpacked3DCoords(outShape as [number, number, number], outTexShape as [number, number]);\n break;\n case 4:\n result[funcName] = this.getOutputUnpacked4DCoords(\n outShape as [number, number, number, number], outTexShape as [number, number]);\n break;\n case 5:\n result[funcName] = this.getOutputUnpacked5DCoords(\n outShape as [number, number, number, number, number], outTexShape as [number, number]);\n break;\n case 6:\n result[funcName] = this.getOutputUnpacked6DCoords(\n outShape as [number, number, number, number, number, number], outTexShape as [number, number]);\n break;\n default:\n throw new Error(`Unsupported output dimensionality: ${outShape.length}`);\n }\n const glsl = getGlsl(this.context.glContext.version);\n // TODO we need this to properly return a packed vec4 from kernels.\n // Replace all '{glsl.output} = result' with 'setOutput(result)' in all kernels.\n const floatTextureSetRSource = `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n const floatTextureSetRFuncName = 'floatTextureSetR';\n result[floatTextureSetRFuncName] = new GlslLibRoutine(floatTextureSetRSource);\n return result;\n }\n\n /**\n * Scalar output coordinates.\n */\n protected getOutputScalarCoords(): GlslLibRoutine {\n return new GlslLibRoutine(`\n int getOutputCoords() {\n return 0;\n }\n `);\n }\n\n /**\n * 1D packed output coordinates.\n */\n protected getOutputPacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = texShape;\n let source = '';\n if (packedTexShape[0] === 1) {\n source = `\n int getOutputCoords() {\n return 2 * int(TexCoords.y * ${packedTexShape[1]}.0);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n if (packedTexShape[1] === 1) {\n source = `\n int getOutputCoords() {\n return 2 * int(TexCoords.x * ${packedTexShape[0]}.0);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n source = `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.y * ${packedTexShape[0]} + resTexRC.x);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * 2D packed output coordinates.\n */\n protected getOutputPacked2DCoords(shape: [number, number], texShape: [number, number]): GlslLibRoutine {\n let source = '';\n if (ArrayUtil.arraysEqual(shape, texShape)) {\n source = `\n ivec2 getOutputCoords() {\n return 2 * ivec2(TexCoords.xy * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n const packedTexShape = texShape;\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n source = `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec2(r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * 3D packed output coordinates.\n */\n protected getOutputPacked3DCoords(shape: [number, number, number], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = [texShape[0], texShape[1]];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n const source = `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec3(b, r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * ND packed output coordinates.\n */\n protected getOutputPackedNDCoords(shape: readonly number[], texShape: [number, number]): GlslLibRoutine {\n const packedTexShape = [texShape[0], texShape[1]];\n\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = '';\n let coords = 'b, r, c';\n\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n const source = `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.y * ${packedTexShape[0]} + resTexRC.x;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n // reverse r and c order for packed texture\n int r = imod(index, ${texelsInLogicalRow}) * 2;\n int c = 2 * (index / ${texelsInLogicalRow});\n\n return ivec${shape.length}(${coords});\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 1D output coordinates.\n */\n protected getOutputUnpacked1DCoords(_shape: [number], texShape: [number, number]): GlslLibRoutine {\n const source = `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.y * ${texShape[0]} + resTexRC.x;\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 2D output coordinates.\n */\n protected getOutputUnpacked2DCoords(shape: [number, number], texShape: [number, number]): GlslLibRoutine {\n const source = `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 3D output coordinates.\n */\n protected getOutputUnpacked3DCoords(shape: [number, number, number], texShape: [number, number]): GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 4D output coordinates.\n */\n protected getOutputUnpacked4DCoords(shape: [number, number, number, number], texShape: [number, number]):\n GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 5D output coordinates.\n */\n protected getOutputUnpacked5DCoords(shape: [number, number, number, number, number], texShape: [number, number]):\n GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2', 'd3'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec5(r, c, d, d2, d3);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked 6D output coordinates.\n */\n protected getOutputUnpacked6DCoords(shape: [number, number, number, number, number, number], texShape: [\n number, number\n ]): GlslLibRoutine {\n let source = '';\n const rank = shape.length;\n\n let strides = null;\n if (rank < 2) {\n strides = [];\n }\n\n strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n const coordsToCompute = ['r', 'c', 'd', 'd2', 'd3', 'd4'];\n const coordsFromIndexSnippet =\n strides\n .map((stride, i) => {\n const line1 = `int ${coordsToCompute[i]} = index / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coordsToCompute[i + 1]} = index - ${coordsToCompute[i]} * ${stride}` :\n `index -= ${coordsToCompute[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n\n source = `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(TexCoords.xy *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.y * ${texShape[0]} + resTexRC.x;\n ${coordsFromIndexSnippet}\n return ivec6(r, c, d, d2, d3, d4);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Generates code for common UV coords computation utility functions.\n */\n protected getCommonUtilFuncs(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n let funcName = 'uvFromFlat';\n result[funcName] = new GlslLibRoutine(`\n vec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texC = index / texNumR;\n int texR = index - texC * texNumR;\n // TODO: swap texR, texC order in following function so row is corresponding to u and column is corresponding to\n // v.\n return (vec2(texR, texC) + halfCR) / vec2(texNumR, texNumC);\n }\n `);\n funcName = 'packedUVfrom1D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'packedUVfrom2D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom2D(int texNumR, int texNumC, int texelsInLogicalRow, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'packedUVfrom3D';\n result[funcName] = new GlslLibRoutine(`\n vec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n }\n `);\n funcName = 'sampleTexture';\n const glsl = getGlsl(this.context.glContext.version);\n result[funcName] = new GlslLibRoutine(`\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }`);\n return result;\n }\n\n /**\n * Constructing snippets for inputs\n */\n protected getInputsSamplingSnippets(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n const outputLayout = this.context.outputTextureLayout;\n this.context.programInfo.inputNames.forEach((samplerName, i) => {\n const inputLayout = this.context.inputTextureLayouts[i];\n const funcName = generateShaderFuncNameFromInputSamplerName(samplerName);\n if (inputLayout.isPacked) {\n result[funcName] = this.getPackedSamplerFromInput(funcName, samplerName, inputLayout);\n } else {\n result[funcName] = this.getUnpackedSamplerFromInput(funcName, samplerName, inputLayout);\n }\n\n const outCoordFuncName = generateShaderFuncNameFromInputSamplerNameAtOutCoords(samplerName);\n if (inputLayout.unpackedShape.length <= outputLayout.unpackedShape.length) {\n if (inputLayout.isPacked) {\n result[outCoordFuncName] =\n this.getPackedSamplerAtOutputCoords(outCoordFuncName, inputLayout, outputLayout, samplerName);\n } else {\n result[outCoordFuncName] =\n this.getUnpackedSamplerAtOutputCoords(outCoordFuncName, inputLayout, outputLayout, samplerName);\n }\n }\n });\n\n return result;\n }\n\n /**\n * Constructing snippets for output coordinates of samplers\n */\n protected getPackedSamplerAtOutputCoords(\n funcName: string, inputLayout: TextureLayout, outputLayout: TextureLayout, name: string): GlslLibRoutine {\n const inShape = inputLayout.unpackedShape;\n const outShape = outputLayout.unpackedShape;\n const texName = name;\n const texFuncSnippet = generateShaderFuncNameFromInputSamplerName(texName);\n\n const inRank = inShape.length;\n const outRank = outShape.length;\n\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet: string;\n const fields = getGlChannels();\n\n if (inRank === 0) {\n coordsSnippet = '';\n } else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n } else {\n coordsSnippet = broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`).join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');\n }\n\n let output = 'return outputValue;';\n const inSize = ShapeUtil.size(inShape);\n const isInputScalar = inSize === 1;\n const outSize = ShapeUtil.size(outShape);\n const isOutputScalar = outSize === 1;\n\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n } else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n } else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n } else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = 'return vec4(outputValue.x);';\n } else if (broadcastDims.indexOf(rows) > -1) {\n output = 'return vec4(outputValue.x, outputValue.y, ' +\n 'outputValue.x, outputValue.y);';\n } else if (broadcastDims.indexOf(cols) > -1) {\n output = 'return vec4(outputValue.xx, outputValue.zz);';\n }\n }\n\n const swapLastDimsSnippet = `\n int lastDim = coords.${fields[outRank - 1]};\n coords.${fields[outRank - 1]} = coords.${fields[outRank - 2]};\n coords.${fields[outRank - 2]} = lastDim;\n `;\n const source = `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${swapLastDimsSnippet}\n ${coordsSnippet}\n vec4 outputValue = ${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.getOutputCoords']);\n }\n\n /**\n * Constructing snippets for unpacked output coordinates of samplers\n */\n protected getUnpackedSamplerAtOutputCoords(\n funcName: string, inputLayout: TextureLayout, outputLayout: TextureLayout, name: string): GlslLibRoutine {\n const outTexShape = [outputLayout.width, outputLayout.height];\n const inTexShape = [inputLayout.width, inputLayout.height];\n const inRank = inputLayout.unpackedShape.length;\n const outRank = outputLayout.unpackedShape.length;\n const inShape = inputLayout.unpackedShape;\n const outShape = outputLayout.unpackedShape;\n const texFuncSnippet = generateShaderFuncNameFromInputSamplerName(name);\n\n if (inRank === outRank && ArrayUtil.arraysEqual(inTexShape, outTexShape)) {\n const source = `\n float ${funcName}() {\n return sampleTexture(${name}, TexCoords);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const type = getCoordsDataType(outRank);\n const broadcastDims = BroadcastUtil.getBroadcastDims(inShape, outShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet: string;\n const fields = getGlChannels();\n\n if (inRank === 0) {\n coordsSnippet = '';\n } else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n } else {\n coordsSnippet = broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`).join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inputLayout.unpackedShape.map((_s, i) => `coords.${fields[i + rankDiff]}`).join(', ');\n }\n const source = `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return ${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.getOutputCoords']);\n }\n\n /**\n * Constructing snippets for packed operations.\n */\n protected getPackedSamplerFromInput(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n switch (inputLayout.unpackedShape.length) {\n case 0:\n return this.getPackedSamplerScalar(funcName, name);\n case 1:\n return this.getPackedSampler1D(funcName, name, inputLayout);\n case 2:\n return this.getPackedSampler2D(funcName, name, inputLayout);\n case 3:\n return this.getPackedSampler3D(funcName, name, inputLayout);\n default:\n return this.getPackedSamplerND(funcName, name, inputLayout);\n }\n }\n\n /**\n * Constructing snippets for unpacked operations.\n */\n protected getUnpackedSamplerFromInput(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n switch (shape.length) {\n case 0:\n return this.getUnpackedSamplerScalar(funcName, name, inputLayout);\n case 1:\n return this.getUnpackedSampler1D(funcName, name, inputLayout);\n case 2:\n return this.getUnpackedSampler2D(funcName, name, inputLayout);\n case 3:\n return this.getUnpackedSampler3D(funcName, name, inputLayout);\n case 4:\n return this.getUnpackedSampler4D(funcName, name, inputLayout);\n case 5:\n return this.getUnpackedSampler5D(funcName, name, inputLayout);\n case 6:\n return this.getUnpackedSampler6D(funcName, name, inputLayout);\n default:\n // TODO support more dimensionalities\n throw new Error(`Unsupported dimension ${shape.length}-D`);\n }\n }\n\n /**\n * Packed scalar snippet.\n */\n protected getPackedSamplerScalar(funcName: string, name: string): GlslLibRoutine {\n const glsl = getGlsl(this.context.glContext.version);\n const source = `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Packed 1D snippet.\n */\n protected getPackedSampler1D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const texShape = [inputLayout.width, inputLayout.height];\n const packedTexShape = [texShape[1], texShape[0]];\n const glsl = getGlsl(this.context.glContext.version);\n\n const packedSampler = `vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom1D']);\n }\n\n /**\n * Packed 2D snippet.\n */\n protected getPackedSampler2D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const texShape = [inputLayout.width, inputLayout.height];\n const glsl = getGlsl(this.context.glContext.version);\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n\n if (texShape != null && ArrayUtil.arraysEqual(shape, texShape)) {\n const packedSampler = `vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n\n return new GlslLibRoutine(packedSampler);\n }\n const packedTexShape = texShape;\n const valuesPerRow = Math.ceil(shape[1] / 2);\n const packedSampler = `vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${packedTexShape[1]}, ${packedTexShape[0]}, ${valuesPerRow}, row, col);\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom2D']);\n }\n\n /**\n * Packed 3D snippet.\n */\n protected getPackedSampler3D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const texShape = [inputLayout.width, inputLayout.height];\n const packedTexShape = [texShape[0], texShape[1]];\n const glsl = getGlsl(this.context.glContext.version);\n\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n const params = ['b', 'row', 'col'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n const samplerRoutine = this.getPackedSamplerFromInput(funcName, name, newInputLayout);\n const packedSampler = `${samplerRoutine.routineBody}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n } `;\n const source = packedSampler;\n return new GlslLibRoutine(source, samplerRoutine.dependencies);\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n\n const packedSampler = `vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumC}, ${texNumR}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${name}, uv);}`;\n const source = packedSampler;\n return new GlslLibRoutine(source, ['coordinates.packedUVfrom3D']);\n }\n /*\n * Packed ND snippet.\n */\n protected getPackedSamplerND(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const rank = shape.length;\n const texShape = [inputLayout.width, inputLayout.height];\n const glsl = getGlsl(this.context.glContext.version);\n\n const packedTexShape = [texShape[0], texShape[1]];\n const texNumR = packedTexShape[1];\n const texNumC = packedTexShape[0];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = 'int b, int row, int col';\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const packedSampler = `vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${name}, uv);\n }`;\n const source = packedSampler;\n return new GlslLibRoutine(source);\n }\n\n /**\n * Unpacked scalar snippet.\n */\n protected getUnpackedSamplerScalar(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const [texNumR, texNumC] = [inputLayout.width, inputLayout.height];\n if (texNumR === 1 && texNumC === 1) {\n const source = `\n float ${funcName}() {\n return sampleTexture(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const source = `\n float ${funcName}() {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, offset_${name});\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 1D snippet.\n */\n protected getUnpackedSampler1D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const tNumR = inputLayout.width;\n const tNumC = inputLayout.height;\n\n if (tNumC === 1 && tNumR === 1) {\n const source = `\n float ${funcName}(int index) {\n return sampleTexture(${name}, halfCR);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n if (tNumC === 1) {\n const source = `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index) + 0.5) / ${tNumR}.0, 0.5);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n if (tNumR === 1) {\n const source = `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index) + 0.5) / ${tNumC}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n const source = `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture']);\n }\n\n /**\n * Unpacked 2D snippet.\n */\n\n protected getUnpackedSampler2D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n\n // TODO: modify row/col order for other dimensions.\n const texShape = [inputLayout.height, inputLayout.width];\n\n if (texShape != null && ArrayUtil.arraysEqual(shape, texShape)) {\n const texNumR = texShape[1];\n const texNumC = texShape[0];\n const source = `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(row, col) + halfCR) / vec2(${texNumR}.0, ${texNumC}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const params = ['col', 'row'];\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture']);\n }\n\n const texNumR = texShape[1];\n const texNumC = texShape[0];\n if (texNumC === 1) {\n const source = `\n float ${funcName}(int row, int col) {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n float index = dot(vec3(row, col, offset_${name}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n if (texNumR === 1) {\n const source = `\n float ${funcName}(int row, int col) {\n int offset_${name} = coordsToOffset(TexCoords, ${texNumR}, ${texNumC});\n float index = dot(vec3(row, col, offset_${name}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n const source = `\n float ${funcName}(int row, int col) {\n int index = col * ${shape[1]} + row;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 3D snippet.\n */\n\n protected getUnpackedSampler3D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, squeezedShape);\n const params = ['batch', 'col', 'row'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n const routine = this.getUnpackedSamplerFromInput(funcName, name, newInputLayout);\n // TODO: revisit the logic here to make it simpler\n const revDims = keptDims.reverse();\n const source = `\n ${routine.routineBody}\n float ${funcName}(int batch, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, revDims)});\n }\n `;\n return new GlslLibRoutine(source, routine.dependencies);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int depth, int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = depth * ${stride0} + col * ${stride1} + row;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * Unpacked 4D snippet.\n */\n\n protected getUnpackedSampler4D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n //\n // TODO: re-enable this shortcut once the index calculation bug is fixed.\n //\n // const {newShape, keptDims} = squeezeShape(shape as number[]);\n // if (newShape.length < shape.length) {\n // const newInputShape = squeezeInputShape(shape, newShape);\n // const params = ['row', 'col', 'depth', 'depth2'];\n // // Deep copy of input texture layout.\n // const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n // newInputLayout.unpackedShape = newInputShape;\n // const source = `\n // ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n // float ${funcName}(int row, int col, int depth, int depth2) {\n // return ${funcName}(${getSqueezedParams(params, keptDims)});\n // }\n // `;\n // return new GlslLibRoutine(\n // source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n // }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = row * ${stride0} + col * ${stride1} +\n depth2 * ${stride2} + depth;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture']);\n }\n\n /**\n * Unpacked 5D snippet.\n */\n protected getUnpackedSampler5D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n if (newShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth3 * ${stride3} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n /**\n * Unpacked 6D snippet.\n */\n protected getUnpackedSampler6D(funcName: string, name: string, inputLayout: TextureLayout): GlslLibRoutine {\n const shape = inputLayout.unpackedShape;\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = squeezeShape(shape as number[]);\n if (newShape.length < shape.length) {\n const newInputShape = squeezeInputShape(shape, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n // Deep copy of input texture layout.\n const newInputLayout: TextureLayout = JSON.parse(JSON.stringify(inputLayout));\n newInputLayout.unpackedShape = newInputShape;\n\n const source = `\n ${this.getUnpackedSamplerFromInput(funcName, name, newInputLayout).routineBody}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n return new GlslLibRoutine(source, ['coordinates.sampleTexture', 'coordinates.uvFromFlat']);\n }\n\n const texNumR = inputLayout.width;\n const texNumC = inputLayout.height;\n const source = `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${name}, uv);\n }\n `;\n return new GlslLibRoutine(\n source, ['coordinates.uvFromFlat', 'coordinates.sampleTexture', 'coordinates.coordsToOffset']);\n }\n\n /**\n * This is the main function to map from the given texture coordinates (s,t)\n * to logical indices for the output\n * There will only be one single variation of this\n * Also see coordsToOffset and offsetToIndices for input-specific versions\n */\n protected toVec(): {[name: string]: GlslLibRoutine} {\n const output = this.context.outputTextureLayout;\n const rank = output.shape.length;\n const strides = output.strides;\n const xScale = output.width;\n const yScale = output.height;\n\n const stridesBlock = [];\n for (let i = 0; i < rank - 1; ++i) {\n stridesBlock.push(`\n c[${i}] = offset / ${strides[i]};`);\n stridesBlock.push(`\n offset -= c[${i}] * ${strides[i]};`);\n }\n stridesBlock.push(`\n c[${rank - 1}] = offset;`);\n const body = `\n void toVec(vec2 texCoords, out int c[${rank}]) {\n int offset = coordsToOffset(texCoords, ${xScale}, ${yScale});\n ${stridesBlock.join('')}\n }\n void toVec(int offset, out int c[${rank}]) {\n ${stridesBlock.join('')}\n }\n `;\n return {toVec: new GlslLibRoutine(body, ['coordinates.coordsToOffset'])};\n }\n /**\n * These are value getter functions generated for each input\n * Each function is hardwired to the name and dimensions of the input\n * An '_T' variation is also produced which accesses values as if the\n * input was transposed\n */\n protected valueFrom(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const layout = this.context.inputTextureLayouts[i];\n const shape = layout.unpackedShape.length > 0 ? layout.unpackedShape : layout.shape;\n const rank = shape.length;\n let funcName = `_${name}`;\n result[funcName] = new GlslLibRoutine(\n this.getValueFromSingle(name, rank, layout.width, layout.height, false),\n [`shapeUtils.indicesToOffset${funcName}`, 'coordinates.offsetToCoords', 'fragcolor.getColorAsFloat']);\n funcName = funcName + '_T';\n result[funcName] = new GlslLibRoutine(\n this.getValueFromSingle(name, rank, layout.width, layout.height, true),\n [`shapeUtils.indicesToOffset${funcName}`, 'coordinates.offsetToCoords', 'fragcolor.getColorAsFloat']);\n });\n return result;\n }\n /**\n * Produces one value getter function for the name and rank given\n * If a transpose is set proper offsetToCoords mapping will be used\n * @param name name of the function\n * @param rank rank of the input\n * @param transpose whether or not should generate a transpose variation\n */\n protected getValueFromSingle(varName: string, rank: number, width: number, height: number, transpose: boolean):\n string {\n let name = `_${varName}`;\n if (transpose) {\n name = name + '_T';\n }\n const glsl = getGlsl(this.context.glContext.version);\n return `\n float ${name}(int m[${rank}]) {\n int offset = indicesToOffset${name}(m);\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n float value = getColorAsFloat(${glsl.texture2D}(${varName}, coords));\n return value;\n }\n `;\n }\n\n /**\n * Produces a packed value getter function for the name and rank given\n * If a transpose is set proper offsetToCoords mapping will be used\n * @param name name of the function\n * @param rank rank of the input\n * @param transpose whether or not should generate a transpose variation\n */\n protected getPackedValueFrom(varName: string, rank: number, width: number, height: number, transpose: boolean):\n string {\n let name = `_${varName}_Pack`;\n if (transpose) {\n name = name + '_T';\n }\n const glsl = getGlsl(this.context.glContext.version);\n return `\n vec4 ${name}(int m[${rank}]) {\n int offset = indicesToOffset_${varName}(m);\n vec2 coords = offsetToCoords(offset, ${width}, ${height});\n return ${glsl.texture2D}(${varName}, coords);\n }\n `;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * This GLSL library handles routines converting\n * float32 to/from Unsigned byte or float 16\n */\nexport class EncodingGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.encodeFloat32(), ...this.decodeFloat32()};\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n protected encodeFloat32(): {[name: string]: GlslLibRoutine} {\n return {\n encode: new GlslLibRoutine(`highp vec4 encode(highp float f) {\n return vec4(f, 0.0, 0.0, 0.0);\n }\n `)\n };\n }\n protected decodeFloat32(): {[name: string]: GlslLibRoutine} {\n return {\n decode: new GlslLibRoutine(`highp float decode(highp vec4 rgba) {\n return rgba.r;\n }\n `)\n };\n }\n /**\n * returns the routine to encode encode a 32bit float to a vec4 (of unsigned bytes)\n * @credit: https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float\n */\n protected encodeUint8(): {[name: string]: GlslLibRoutine} {\n const endianness = EncodingGlslLib.isLittleEndian() ? 'rgba.rgba=rgba.abgr;' : '';\n return {\n encode: new GlslLibRoutine(`\n highp vec4 encode(highp float f) {\n highp float F = abs(f);\n highp float Sign = step(0.0,-f);\n highp float Exponent = floor(log2(F));\n highp float Mantissa = (exp2(- Exponent) * F);\n Exponent = floor(log2(F) + 127.0) + floor(log2(Mantissa));\n highp vec4 rgba;\n rgba[0] = 128.0 * Sign + floor(Exponent*exp2(-1.0));\n rgba[1] = 128.0 * mod(Exponent,2.0) + mod(floor(Mantissa*128.0),128.0);\n rgba[2] = floor(mod(floor(Mantissa*exp2(23.0 -8.0)),exp2(8.0)));\n rgba[3] = floor(exp2(23.0)*mod(Mantissa,exp2(-15.0)));\n ${endianness}\n rgba = rgba / 255.0; // values need to be normalized to [0,1]\n return rgba;\n }\n `)\n };\n }\n /**\n * returns the routine to encode a vec4 of unsigned bytes to float32\n * @credit: https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float\n */\n protected decodeUint8(): {[name: string]: GlslLibRoutine} {\n const endianness = EncodingGlslLib.isLittleEndian() ? 'rgba.rgba=rgba.abgr;' : '';\n return {\n decode: new GlslLibRoutine(`\n highp float decode(highp vec4 rgba) {\n rgba = rgba * 255.0; // values need to be de-normalized from [0,1] to [0,255]\n ${endianness}\n highp float Sign = 1.0 - step(128.0,rgba[0])*2.0;\n highp float Exponent = 2.0 * mod(rgba[0],128.0) + step(128.0,rgba[1]) - 127.0;\n highp float Mantissa = mod(rgba[1],128.0)*65536.0 + rgba[2]*256.0 +rgba[3] + float(0x800000);\n highp float Result = Sign * exp2(Exponent) * (Mantissa * exp2(-23.0 ));\n return Result;\n }\n `)\n };\n }\n /**\n * Determines if the machine is little endian or not\n * @credit: https://gist.github.com/TooTallNate/4750953\n */\n static isLittleEndian(): boolean {\n const b = new ArrayBuffer(4);\n const a = new Uint32Array(b);\n const c = new Uint8Array(b);\n a[0] = 0xdeadbeef;\n if (c[0] === 0xef) {\n return true;\n }\n if (c[0] === 0xde) {\n return false;\n }\n throw new Error('unknown endianness');\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\nimport {getGlsl} from './glsl-source';\n\n/**\n * This GLSL library handles routines around reading a texlet and writing to it\n * Reading and writing could be more than just dealing with one channel\n * It may require encoding/decoding to/from 4 channels into one\n */\nexport class FragColorGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.setFragColor(), ...this.getColorAsFloat()};\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n protected setFragColor(): {[name: string]: GlslLibRoutine} {\n const glsl = getGlsl(this.context.glContext.version);\n return {\n setFragColor: new GlslLibRoutine(\n `\n void setFragColor(float value) {\n ${glsl.output} = encode(value);\n }\n `,\n ['encoding.encode'])\n };\n }\n protected getColorAsFloat(): {[name: string]: GlslLibRoutine} {\n return {\n getColorAsFloat: new GlslLibRoutine(\n `\n float getColorAsFloat(vec4 color) {\n return decode(color);\n }\n `,\n ['encoding.decode'])\n };\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * GLSL Library responsible for data types and routines for manipulating\n * coordinates and mapping to/from tensor indices\n */\nexport class ShapeUtilsGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {\n ...this.bcastIndex(),\n ...this.bcastMatmulIndex(),\n ...this.offsetToIndices(),\n ...this.indicesToOffset(),\n ...this.incrementIndices()\n };\n }\n getCustomTypes() {\n return {};\n }\n protected bcastIndex(): {[name: string]: GlslLibRoutine} {\n const outputRank = this.context.outputTextureLayout.shape.length;\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].unpackedShape;\n if (shape.length <= outputRank) {\n const rank = shape.length;\n const dimOffset = outputRank - rank;\n const funcName = `bcastIndices_${name}`;\n let block = '';\n for (let i = 0; i < rank; ++i) {\n block += `\n realIndices[${i}] = int( mod(float(bcastedIndices[${dimOffset + i}]), ${shape[i]}.0) );\n `;\n }\n const body = `\n void ${funcName} (int bcastedIndices[${outputRank}], out int realIndices[${rank}]) {\n ${block}\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n }\n });\n return result;\n }\n protected bcastMatmulIndex(): {[name: string]: GlslLibRoutine} {\n const outputRank = this.context.outputTextureLayout.shape.length;\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n if (!(shape.length < 2 || shape.length > outputRank)) {\n const rank = shape.length;\n const dimOffset = outputRank - rank;\n const funcName = `bcastMatmulIndices_${name}`;\n let block = '';\n for (let i = 0; i < rank - 2; ++i) {\n block += `\n realIndices[${i}] = int( mod(float(bcastedIndices[${dimOffset + i}]), ${shape[i]}.0) );\n `;\n }\n const body = `\n void ${funcName}(int bcastedIndices[${outputRank}], out int realIndices[${rank}]) {\n ${block}\n realIndices[${rank - 1}] = bcastedIndices[${outputRank - 1}];\n realIndices[${rank - 2}] = bcastedIndices[${outputRank - 2}];\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n }\n });\n return result;\n }\n protected indicesToOffset(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const strides = this.context.inputTextureLayouts[i].strides;\n const rank = shape.length;\n let funcName = `indicesToOffset_${name}`;\n result[funcName] = new GlslLibRoutine(ShapeUtilsGlslLib.indexToOffsetSingle(funcName, rank, strides));\n funcName = `indicesToOffset_${name}_T`;\n result[funcName] =\n new GlslLibRoutine(ShapeUtilsGlslLib.indexToOffsetSingle(funcName, rank, strides.slice().reverse()));\n });\n return result;\n }\n static indexToOffsetSingle(name: string, rank: number, strides: readonly number[]): string {\n let block = '';\n for (let i = rank - 1; i >= 0; --i) {\n block += `\n offset += indices[${i}] * ${strides[i]};\n `;\n }\n return `\n int ${name}(int indices[${rank}]) {\n int offset = 0;\n ${block}\n return offset;\n }\n `;\n }\n protected offsetToIndices(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const strides = this.context.inputTextureLayouts[i].strides;\n const rank = shape.length;\n let funcName = `offsetToIndices_${name}`;\n result[funcName] = new GlslLibRoutine(ShapeUtilsGlslLib.offsetToIndicesSingle(funcName, rank, strides));\n funcName = `offsetToIndices_${name}_T`;\n result[funcName] =\n new GlslLibRoutine(ShapeUtilsGlslLib.offsetToIndicesSingle(funcName, rank, strides.slice().reverse()));\n });\n return result;\n }\n static offsetToIndicesSingle(name: string, rank: number, strides: readonly number[]): string {\n const stridesBlock = [];\n for (let i = 0; i < rank - 1; ++i) {\n stridesBlock.push(`\n indices[${i}] = offset / ${strides[i]};`);\n stridesBlock.push(`\n offset -= indices[${i}] * ${strides[i]};`);\n }\n stridesBlock.push(`\n indices[${rank - 1}] = offset;`);\n return `\n void ${name}(int offset, out int indices[${rank}]) {\n ${stridesBlock.join('')}\n }\n `;\n }\n protected incrementIndices(): {[name: string]: GlslLibRoutine} {\n const result: {[name: string]: GlslLibRoutine} = {};\n this.context.programInfo.inputNames.forEach((name, i) => {\n const shape = this.context.inputTextureLayouts[i].shape;\n const rank = shape.length;\n const funcName = `incrementIndices_${name}`;\n let shapeInit = '';\n for (let i = 0; i < rank; ++i) {\n shapeInit += `\n shape[${i}] = ${shape[i]};`;\n }\n const body = `\n void ${funcName}(int axis, out int indices[${rank}]) {\n int shape[${rank}];\n ${shapeInit};\n for(int i = ${rank} -1 ; i >= 0; --i) {\n if(i > axis) continue;\n indices[i] += 1;\n if(indices[i] < shape[i]) {\n break;\n }\n indices[i] = 0;\n }\n }\n `;\n result[funcName] = new GlslLibRoutine(body);\n });\n return result;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutine} from './glsl-definitions';\n\n/**\n * GLSL Library responsible for vec routines\n * Vec is an varible length int array. The length is fixed at the time of\n * generating the library functions from the dimensions of the output.\n */\nexport class VecGlslLib extends GlslLib {\n constructor(context: GlslContext) {\n super(context);\n }\n getCustomTypes(): {[name: string]: string} {\n return {};\n }\n getFunctions(): {[name: string]: GlslLibRoutine} {\n return {...this.binaryVecFunctions(), ...this.copyVec(), ...this.setVecItem(), ...this.getVecItem()};\n }\n protected binaryVecFunctions(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n const nameOp: {[name: string]: string} = {add: '+=', sub: '-=', mul: '*=', div: '/='};\n const result: {[name: string]: GlslLibRoutine} = {};\n for (const name in nameOp) {\n const fname = `${name}Vec`;\n let assignmentBlock = '';\n for (let i = 0; i < rank; ++i) {\n assignmentBlock += `\n dest[${i}] ${nameOp[name]} src[${i}];\n `;\n }\n const body = `\n void ${fname}(int src[${rank}], out int dest[${rank}]) {\n ${assignmentBlock}\n }\n `;\n result[fname] = new GlslLibRoutine(body);\n }\n\n return result;\n }\n protected copyVec(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let assignmentBlock = '';\n for (let i = 0; i < rank; ++i) {\n assignmentBlock += `\n dest[${i}] = src[${i}];\n `;\n }\n const body = `\n void copyVec(int src[${rank}], out int dest[${rank}]) {\n ${assignmentBlock}\n }\n `;\n return {copyVec: new GlslLibRoutine(body)};\n }\n\n protected setVecItem(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let block = `\n if(index < 0)\n index =${rank} + index;\n if (index == 0)\n m[0] = value;\n `;\n for (let i = 1; i < rank - 1; ++i) {\n block += `\n else if (index == ${i})\n m[${i}] = value;\n `;\n }\n block += `\n else\n m[${rank - 1}] = value;\n `;\n const body = `\n void setVecItem(out int m[${rank}], int index, int value) {\n ${block}\n }\n `;\n return {setVecItem: new GlslLibRoutine(body)};\n }\n protected getVecItem(): {[name: string]: GlslLibRoutine} {\n const outputLayout = this.context.outputTextureLayout;\n const rank = outputLayout.shape.length;\n let block = `\n if(index < 0)\n index = ${rank} + index;\n if (index == 0)\n return m[0];\n `;\n for (let i = 1; i < rank - 1; ++i) {\n block += `\n else if (index == ${i})\n return m[${i}];\n `;\n }\n block += `\n else\n return m[${rank - 1}];\n `;\n const body = `\n int getVecItem(int m[${rank}], int index) {\n ${block}\n }\n `;\n return {getVecItem: new GlslLibRoutine(body)};\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CoordsGlslLib} from './glsl-coordinate-lib';\nimport {GlslContext, GlslLib} from './glsl-definitions';\nimport {EncodingGlslLib} from './glsl-encoding-lib';\nimport {FragColorGlslLib} from './glsl-fragcolor-lib';\nimport {ShapeUtilsGlslLib} from './glsl-shape-utils-lib';\nimport {VecGlslLib} from './glsl-vec-lib';\n\nexport const glslRegistry: {[name: string]: new (context: GlslContext) => GlslLib} = {\n 'encoding': EncodingGlslLib,\n 'fragcolor': FragColorGlslLib,\n 'vec': VecGlslLib,\n 'shapeUtils': ShapeUtilsGlslLib,\n 'coordinates': CoordsGlslLib,\n // 'arrays': ArrayGlslSLib\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {GlslContext, GlslLib, GlslLibRoutineNode, TopologicalSortGlslRoutines} from './glsl-definitions';\nimport {replaceInlines} from './glsl-function-inliner';\nimport {glslRegistry} from './glsl-registered-libs';\nimport {getDefaultFragShaderMain, getFragShaderPreamble} from './glsl-source';\nimport {ProgramInfo, TextureLayout, VariableInfo} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/**\n * Preprocessor for the additions to the GLSL language\n * It deals with:\n * @include directives\n * @inline\n * Loop unrolling (not implemented)\n * Macro resolution (not implemented)\n */\nexport class GlslPreprocessor {\n readonly context: GlslContext;\n readonly libs: {[name: string]: GlslLib} = {};\n readonly glslLibRoutineDependencyGraph: {[routineName: string]: GlslLibRoutineNode} = {};\n\n constructor(\n glContext: WebGLContext, programInfo: ProgramInfo, inputTextureLayouts: TextureLayout[],\n outputTextureLayout: TextureLayout) {\n this.context = new GlslContext(glContext, programInfo, inputTextureLayouts, outputTextureLayout);\n\n // construct GlslLibs\n Object.keys(glslRegistry).forEach((name: string) => {\n const lib = new glslRegistry[name](this.context);\n this.libs[name] = lib;\n });\n\n // construct GlslRoutineDependencyGraph\n const map = this.glslLibRoutineDependencyGraph;\n for (const libName in this.libs) {\n const lib = this.libs[libName];\n const routinesInLib = lib.getFunctions();\n for (const routine in routinesInLib) {\n const key = libName + '.' + routine;\n let currentNode: GlslLibRoutineNode;\n if (map[key]) {\n currentNode = map[key];\n currentNode.routineBody = routinesInLib[routine].routineBody;\n } else {\n currentNode = new GlslLibRoutineNode(key, routinesInLib[routine].routineBody);\n map[key] = currentNode;\n }\n const dependencies = routinesInLib[routine].dependencies;\n if (dependencies) {\n for (let i = 0; i < dependencies.length; ++i) {\n if (!map[dependencies[i]]) {\n const node = new GlslLibRoutineNode(dependencies[i]);\n map[dependencies[i]] = node;\n currentNode.addDependency(node);\n } else {\n currentNode.addDependency(map[dependencies[i]]);\n }\n }\n }\n }\n }\n }\n\n preprocess(): string {\n const programInfo = this.context.programInfo;\n let source = programInfo.shaderSource;\n\n // append main() function\n if (!this.context.programInfo.hasMain) {\n source = `${source}\n ${getDefaultFragShaderMain(this.context.glContext.version, this.context.outputTextureLayout.shape.length)}`;\n }\n // replace inlines\n source = replaceInlines(source);\n\n // concat final source string\n return `${getFragShaderPreamble(this.context.glContext.version)}\n ${this.getUniforms(programInfo.inputNames, programInfo.variables)}\n ${this.getImports(source)}\n ${source}`;\n }\n\n protected getImports(script: string): string {\n const routinesIncluded = this.selectGlslLibRoutinesToBeIncluded(script);\n\n if (routinesIncluded.length === 0) {\n return '';\n }\n\n let routines = '';\n for (let i = 0; i < routinesIncluded.length; ++i) {\n if (routinesIncluded[i].routineBody) {\n routines += routinesIncluded[i].routineBody + '\\n';\n } else {\n throw new Error(`Missing body for the Glsl Library routine: ${routinesIncluded[i].name}`);\n }\n }\n\n return routines;\n }\n private selectGlslLibRoutinesToBeIncluded(script: string): GlslLibRoutineNode[] {\n const nodes: GlslLibRoutineNode[] = [];\n\n Object.keys(this.glslLibRoutineDependencyGraph).forEach(classAndRoutine => {\n const routine = classAndRoutine.split('.')[1];\n if (script.indexOf(routine) !== -1) {\n nodes.push(this.glslLibRoutineDependencyGraph[classAndRoutine]);\n }\n });\n\n return TopologicalSortGlslRoutines.returnOrderedNodes(nodes);\n }\n\n protected getUniforms(samplers?: string[], variables?: VariableInfo[]): string {\n const uniformLines: string[] = [];\n if (samplers) {\n for (const sampler of samplers) {\n uniformLines.push(`uniform sampler2D ${sampler};`);\n }\n }\n if (variables) {\n for (const variable of variables) {\n uniformLines.push(\n `uniform ${variable.type} ${variable.name}${variable.arrayLength ? `[${variable.arrayLength}]` : ''};`);\n }\n }\n return uniformLines.join('\\n');\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {Logger, Profiler} from '../../instrument';\n\nimport {GlslPreprocessor} from './glsl-preprocessor';\nimport {getVertexShaderSource} from './glsl-source';\nimport {TextureLayoutStrategy} from './texture-layout-strategy';\nimport {Artifact, ProgramInfo, ProgramVariable, TextureData, TextureLayout, VariableInfo} from './types';\nimport {WebGLContext} from './webgl-context';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n vertexShader: WebGLShader;\n attributesBound: boolean;\n\n constructor(\n public profiler: Readonly, public glContext: WebGLContext,\n public textureLayoutStrategy: TextureLayoutStrategy) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: TextureData[], output: TextureData): void {\n this.profiler.event('op', `ProgramManager.run ${buildArtifact.programInfo.name ?? 'unknown kernel'}`, () => {\n const gl = this.glContext.gl;\n const program = buildArtifact.program;\n gl.useProgram(program);\n try {\n this.bindOutput(output);\n if (!this.attributesBound) {\n this.bindAttributes(buildArtifact.attribLocations);\n }\n this.bindUniforms(buildArtifact.uniformLocations, buildArtifact.programInfo.variables ?? [], inputs);\n } catch (err) {\n Logger.error('ProgramManager', buildArtifact.programInfo.shaderSource);\n throw err;\n }\n this.profiler.event('backend', 'GlContext.draw()', () => {\n this.glContext.draw();\n });\n }, this.glContext);\n }\n dispose(): void {\n if (this.vertexShader) {\n this.glContext.deleteShader(this.vertexShader);\n }\n this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, inputTextureLayouts: TextureLayout[], outputTextureLayout: TextureLayout): Artifact {\n return this.profiler.event('backend', 'ProgramManager.build', () => {\n const preprocessor = new GlslPreprocessor(this.glContext, programInfo, inputTextureLayouts, outputTextureLayout);\n const fragScript = preprocessor.preprocess();\n const program = this.compile(fragScript);\n const artifact = {\n programInfo,\n program,\n uniformLocations: this.getUniformLocations(\n program, preprocessor.context.programInfo.inputNames, preprocessor.context.programInfo.variables),\n attribLocations: this.getAttribLocations(program)\n };\n return artifact;\n });\n }\n protected compile(fragShaderScript: string): WebGLProgram {\n if (!this.vertexShader) {\n Logger.verbose('ProrgramManager', 'Compiling and caching Vertex shader for the first time');\n const vertexShaderScript = getVertexShaderSource(this.glContext.version);\n this.vertexShader = this.glContext.compileShader(vertexShaderScript, this.glContext.gl.VERTEX_SHADER);\n }\n if (env.debug) {\n Logger.verbose('ProrgramManager', `FragShader:\n${fragShaderScript}\n`);\n }\n const fragShader = this.glContext.compileShader(fragShaderScript, this.glContext.gl.FRAGMENT_SHADER);\n const program = this.glContext.createProgram(this.vertexShader, fragShader);\n this.glContext.deleteShader(fragShader);\n return program;\n }\n bindOutput(td: TextureData): void {\n const width = td.width;\n const height = td.height;\n Logger.verbose(\n 'ProrgramManager',\n `Binding output texture to Framebuffer: w/h=${width}/${height}, shape=${td.shape}, type=${td.tensor.type}`);\n this.glContext.attachFramebuffer(td.texture, width, height);\n }\n bindAttributes(attribLocations: Artifact.AttribLocations): void {\n const positionHandle = attribLocations.position;\n const textureCoordHandle = attribLocations.textureCoord;\n this.glContext.setVertexAttributes(positionHandle, textureCoordHandle);\n this.attributesBound = true;\n }\n bindUniforms(uniformLocations: Artifact.UniformLocations, variables: ProgramVariable[], textures: TextureData[]):\n void {\n const gl = this.glContext.gl;\n let texturePosition = 0;\n for (const {name, type, location, arrayLength} of uniformLocations) {\n const value = variables.find(v => v.name === name)?.data;\n if (type !== 'sampler2D' && !value) {\n throw new Error(`variable '${name}' does not have data defined in program info`);\n }\n switch (type) {\n case 'sampler2D':\n this.bindTexture(textures[texturePosition], location, texturePosition);\n texturePosition++;\n break;\n case 'float':\n if (arrayLength) {\n gl.uniform1fv(location, value as number[]);\n } else {\n gl.uniform1f(location, value as number);\n }\n break;\n case 'int':\n if (arrayLength) {\n gl.uniform1iv(location, value as number[]);\n } else {\n gl.uniform1i(location, value as number);\n }\n break;\n default:\n throw new Error(`Uniform not implemented: ${type}`);\n }\n }\n }\n bindTexture(td: TextureData, uniformHandle: WebGLUniformLocation, position: number): void {\n this.glContext.bindTextureToUniform(td.texture, position, uniformHandle);\n }\n getAttribLocations(program: WebGLProgram): Artifact.AttribLocations {\n return {\n position: this.getAttribLocation(program, 'position'),\n textureCoord: this.getAttribLocation(program, 'textureCoord')\n };\n }\n getUniformLocations(program: WebGLProgram, samplers?: string[], variables?: VariableInfo[]):\n Artifact.UniformLocations {\n const uniformLocations: Artifact.UniformLocations = [];\n if (samplers) {\n for (const sampler of samplers) {\n uniformLocations.push({name: sampler, type: 'sampler2D', location: this.getUniformLocation(program, sampler)});\n }\n }\n if (variables) {\n for (const variable of variables) {\n uniformLocations.push({...variable, location: this.getUniformLocation(program, variable.name)});\n }\n }\n return uniformLocations;\n }\n getUniformLocation(program: WebGLProgram, name: string): WebGLUniformLocation {\n const gl = this.glContext.gl;\n const reference = gl.getUniformLocation(program, name);\n if (reference === null) {\n throw new Error(`Uniform ${name} not found.`);\n }\n return reference;\n }\n getAttribLocation(program: WebGLProgram, name: string): number {\n const gl = this.glContext.gl;\n const attributeLocation: number = gl.getAttribLocation(program, name);\n return attributeLocation;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger, Profiler} from '../../instrument';\nimport {Tensor} from '../../tensor';\n\nimport {Encoder, EncoderUsage} from './texture-data-encoder';\nimport {TextureLayoutStrategy} from './texture-layout-strategy';\nimport {TextureData, TextureLayout} from './types';\nimport {WebGLContext} from './webgl-context';\n\nexport interface TextureManagerConfig {\n reuseTextures?: boolean;\n}\n\n/**\n * TextureManager is the mainly responsible for caching Textures\n * Textures are cached in 2 levels:\n * 1. the texures which are associated with a dataId (from Tensor)\n * Caching these is crucial to performance. These are In-use Textures\n * 2. textures which are not in use by any current ProgramInfo/Tensor\n * These are called Free Textures\n * TextureManager is also used to help creating textures. For this it\n * uses WebGLContext and TextureLayoutStrategy\n */\nexport class TextureManager {\n private readonly inUseTextures: Map;\n private readonly idleTextures: Map;\n private readonly textureLookup: Map;\n private readonly pendingRead: Map void>> = new Map();\n\n constructor(\n public glContext: WebGLContext, public layoutStrategy: TextureLayoutStrategy, public profiler: Readonly,\n private config: TextureManagerConfig) {\n if (config.reuseTextures) {\n this.inUseTextures = new Map();\n this.idleTextures = new Map();\n this.textureLookup = new Map();\n }\n }\n createTextureFromLayout(\n dataType: Tensor.DataType, layout: TextureLayout, data?: Tensor.NumberType, usage?: EncoderUsage) {\n const textureDataType = this.toEncoderType(dataType);\n\n const encoder = this.glContext.getEncoder(textureDataType, layout.channels || 1, usage);\n if (layout.isPacked && usage === EncoderUsage.UploadOnly) {\n throw new Error('not implemented');\n }\n const width = layout.width;\n const height = layout.height;\n\n let key: string|undefined;\n let inUseTextures: WebGLTexture[]|undefined;\n if (this.config.reuseTextures) {\n key = `${width}x${height}_${encoder.format}_${encoder.internalFormat}_${encoder.textureType}`;\n inUseTextures = this.inUseTextures.get(key);\n if (!inUseTextures) {\n inUseTextures = [];\n this.inUseTextures.set(key, inUseTextures);\n }\n\n const idleTextures = this.idleTextures.get(key);\n if (idleTextures && idleTextures.length > 0) {\n const texture = idleTextures.pop()!;\n inUseTextures.push(texture);\n if (usage === EncoderUsage.UploadOnly) {\n this.glContext.updateTexture(texture, width, height, encoder, this.toTextureData(dataType, data)!);\n }\n return texture;\n }\n }\n\n Logger.verbose('TextureManager', `Creating new texture of size ${layout.width}x${layout.height}`);\n const texture = this.glContext.allocateTexture(width, height, encoder, this.toTextureData(dataType, data));\n\n if (this.config.reuseTextures) {\n inUseTextures!.push(texture);\n this.textureLookup.set(texture, key!);\n }\n return texture;\n }\n readTexture(td: TextureData, dataType: Tensor.DataType, channels?: number): Tensor.NumberType {\n if (!channels) {\n channels = 1;\n }\n return this.profiler.event('backend', 'TextureManager.readTexture', () => {\n const dataSize = td.shape.reduce((a, b) => a * b) * channels!;\n const data = this.glContext.readTexture(\n td.texture, td.width, td.height, dataSize, this.toEncoderType(dataType), channels!);\n return this.toTensorData(dataType, data);\n });\n }\n async readTextureAsync(td: TextureData, dataType: Tensor.DataType, channels?: number): Promise {\n const dataId = td.tensor.dataId;\n if (!channels) {\n channels = 1;\n }\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers?.push(resolve));\n }\n return this.profiler.event('backend', 'TextureManager.readTextureAsync', async () => {\n this.pendingRead.set(dataId, []);\n const dataSize = td.shape.reduce((a, b) => a * b) * channels!;\n // add a fence waiting for the data to be ready\n await this.glContext.createAndWaitForFence();\n const data = this.glContext.readTexture(\n td.texture, td.width, td.height, dataSize, this.toEncoderType(dataType), channels!);\n const tensorData = this.toTensorData(dataType, data);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n subscribers?.forEach(resolve => resolve(tensorData));\n return tensorData;\n });\n }\n readUint8TextureAsFloat(td: TextureData): Float32Array {\n return this.profiler.event('backend', 'TextureManager.readUint8TextureAsFloat', () => {\n const dataSize = td.shape.reduce((a, b) => a * b);\n const data = this.glContext.readTexture(td.texture, td.width, td.height, dataSize * 4, 'byte', 4);\n return new Float32Array(data.buffer, data.byteOffset, dataSize);\n });\n }\n releaseTexture(textureData: TextureData, deleteTexture?: boolean): void {\n let key: string|undefined;\n if (this.config.reuseTextures) {\n key = this.textureLookup.get(textureData.texture);\n if (key) {\n if (deleteTexture) {\n this.textureLookup.delete(key);\n }\n const inUseTextures = this.inUseTextures.get(key);\n if (inUseTextures) {\n const index = inUseTextures.indexOf(textureData.texture);\n if (index !== -1) {\n inUseTextures.splice(index, 1);\n let idleTextures = this.idleTextures.get(key);\n if (!idleTextures) {\n idleTextures = [];\n this.idleTextures.set(key, idleTextures);\n }\n idleTextures.push(textureData.texture);\n }\n }\n }\n }\n\n if (!key || deleteTexture) {\n Logger.verbose('TextureManager', `Deleting texture of size ${textureData.width}x${textureData.height}`);\n this.glContext.deleteTexture(textureData.texture);\n }\n }\n toTensorData(dataType: Tensor.DataType, data: Encoder.DataArrayType): Tensor.NumberType {\n switch (dataType) {\n case 'int16':\n return data instanceof Int16Array ? data : Int16Array.from(data);\n case 'int32':\n return data instanceof Int32Array ? data : Int32Array.from(data);\n case 'int8':\n return data instanceof Int8Array ? data : Int8Array.from(data);\n case 'uint16':\n return data instanceof Uint16Array ? data : Uint16Array.from(data);\n case 'uint32':\n return data instanceof Uint32Array ? data : Uint32Array.from(data);\n case 'uint8':\n case 'bool':\n return data instanceof Uint8Array ? data : Uint8Array.from(data);\n case 'float32':\n return data instanceof Float32Array ? data : Float32Array.from(data);\n case 'float64':\n return data instanceof Float64Array ? data : Float64Array.from(data);\n default:\n throw new Error(`TensorData type ${dataType} is not supported`);\n }\n }\n toTextureData(_dataType: Tensor.DataType, data: Tensor.NumberType|undefined): Encoder.DataArrayType|undefined {\n if (!data) {\n return undefined;\n }\n return (data instanceof Float32Array) ? data : new Float32Array(data);\n /*\n switch (dataType) {\n case 'int16':\n case 'int32':\n case 'uint16':\n case 'uint32':\n return (data.constructor === Uint32Array) ? data as Uint32Array : new Uint32Array(data);\n case 'int8':\n case 'uint8':\n case 'bool':\n return (data.constructor === Uint8Array) ? data as Uint8Array : new Uint8Array(data);\n case 'float32':\n case 'float64':\n return (data.constructor === Float32Array) ? data as Float32Array : new Float32Array(data);\n default:\n throw new Error(`TensorData type ${dataType} is not supported`);\n }\n */\n }\n toEncoderType(_dataType: Tensor.DataType): Encoder.DataType {\n return 'float';\n // switch (dataType) {\n // case 'int16':\n // case 'int32':\n // case 'uint16':\n // case 'uint32':\n // return 'int';\n // case 'uint8':\n // case 'bool':\n // return 'byte';\n // case 'float32':\n // case 'float64':\n // return 'float';\n // default:\n // throw new Error(`TensorData type ${dataType} is not supported`);\n // }\n }\n clearActiveTextures(): void {\n this.glContext.clearActiveTextures();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {SessionHandler} from '../../backend';\nimport {Graph} from '../../graph';\nimport {Logger} from '../../instrument';\nimport {Operator} from '../../operators';\nimport {OpSet, resolveOperator} from '../../opset';\nimport {Session} from '../../session';\nimport {Tensor} from '../../tensor';\nimport {WebGLBackend} from '../backend-webgl';\n\nimport {WebGLInferenceHandler} from './inference-handler';\nimport {WEBGL_OP_RESOLVE_RULES} from './op-resolve-rules';\nimport {ProgramManager} from './program-manager';\nimport {PreferLogicalStrategy, TextureLayoutStrategy} from './texture-layout-strategy';\nimport {TextureManager} from './texture-manager';\nimport {TextureData} from './types';\n\nexport class WebGLSessionHandler implements SessionHandler {\n programManager: ProgramManager;\n textureManager: TextureManager;\n layoutStrategy: TextureLayoutStrategy;\n packedTextureDataCache: Map;\n unpackedTextureDataCache: Map;\n pack2unpackMap: Map;\n unpack2packMap: Map;\n initializers: Set;\n pack?: boolean;\n\n constructor(public readonly backend: WebGLBackend, public readonly context: Session.Context) {\n this.layoutStrategy = new PreferLogicalStrategy(backend.glContext.maxTextureSize);\n this.programManager = new ProgramManager(this.context.profiler, backend.glContext, this.layoutStrategy);\n this.textureManager = new TextureManager(\n backend.glContext, this.layoutStrategy, this.context.profiler,\n {reuseTextures: backend.textureCacheMode === 'full'});\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache = new Map();\n this.pack = backend.pack;\n this.pack2unpackMap = new Map();\n this.unpack2packMap = new Map();\n }\n\n createInferenceHandler() {\n return new WebGLInferenceHandler(this);\n }\n onGraphInitialized(graph: Graph): void {\n const initializers = graph.getValues().filter(v => v.from === -1 && v.tensor).map(v => v.tensor!.dataId);\n this.initializers = new Set(initializers);\n }\n isInitializer(tensorId: Tensor.Id): boolean {\n return this.initializers ? this.initializers.has(tensorId) : false;\n }\n addInitializer(tensorId: Tensor.Id): void {\n this.initializers.add(tensorId);\n }\n getTextureData(tensorId: Tensor.Id, isPacked: boolean): TextureData|undefined {\n if (isPacked) {\n return this.packedTextureDataCache.get(tensorId);\n } else {\n return this.unpackedTextureDataCache.get(tensorId);\n }\n }\n setTextureData(tensorId: Tensor.Id, textureData: TextureData, isPacked = false): void {\n Logger.verbose('WebGLSessionHandler', 'Storing Texture data in cache');\n if (isPacked) {\n this.packedTextureDataCache.set(tensorId, textureData);\n } else {\n this.unpackedTextureDataCache.set(tensorId, textureData);\n }\n }\n dispose(): void {\n this.programManager.dispose();\n this.textureManager.clearActiveTextures();\n this.packedTextureDataCache.forEach(td => this.textureManager.releaseTexture(td, true));\n this.packedTextureDataCache = new Map();\n this.unpackedTextureDataCache.forEach(td => this.textureManager.releaseTexture(td, true));\n this.unpackedTextureDataCache = new Map();\n }\n resolve(node: Graph.Node, opsets: readonly OpSet[], graph: Graph): Operator {\n const op = resolveOperator(node, opsets, WEBGL_OP_RESOLVE_RULES);\n return {impl: op.opImpl, context: op.opInit ? op.opInit(node, graph) : node};\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport * as DataEncoders from './texture-data-encoder';\nimport {DataEncoder, Encoder, EncoderUsage} from './texture-data-encoder';\nimport {repeatedTry} from './utils';\n\nexport interface FenceContext {\n query: WebGLSync|null;\n isFencePassed(): boolean;\n}\n\ntype PollItem = {\n isDoneFn: () => boolean; resolveFn: () => void;\n};\n\nexport function linearSearchLastTrue(arr: Array<() => boolean>): number {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n\n/**\n * Abstraction and wrapper around WebGLRenderingContext and its operations\n */\nexport class WebGLContext {\n gl: WebGLRenderingContext;\n version: 1|2;\n\n private vertexbuffer: WebGLBuffer;\n private framebuffer: WebGLFramebuffer;\n\n // WebGL flags and vital parameters\n private isFloatTextureAttachableToFrameBuffer: boolean;\n isFloat32DownloadSupported: boolean;\n isRenderFloat32Supported: boolean;\n isBlendSupported: boolean;\n maxTextureSize: number;\n // private maxCombinedTextureImageUnits: number;\n private maxTextureImageUnits: number;\n // private maxCubeMapTextureSize: number;\n // private shadingLanguageVersion: string;\n // private webglVendor: string;\n // private webglVersion: string;\n\n // WebGL2 flags and vital parameters\n // private max3DTextureSize: number;\n // private maxArrayTextureLayers: number;\n // private maxColorAttachments: number;\n // private maxDrawBuffers: number;\n\n // WebGL extensions\n // eslint-disable-next-line camelcase\n textureFloatExtension: OES_texture_float|null;\n // eslint-disable-next-line camelcase\n textureHalfFloatExtension: OES_texture_half_float|null;\n\n // WebGL2 extensions\n colorBufferFloatExtension: unknown|null;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n disjointTimerQueryWebgl2Extension: {TIME_ELAPSED_EXT: GLenum; GPU_DISJOINT_EXT: GLenum}|null;\n\n private disposed: boolean;\n private frameBufferBound = false;\n\n constructor(gl: WebGLRenderingContext, version: 1|2) {\n this.gl = gl;\n this.version = version;\n\n this.getExtensions();\n this.vertexbuffer = this.createVertexbuffer();\n this.framebuffer = this.createFramebuffer();\n this.queryVitalParameters();\n }\n\n allocateTexture(width: number, height: number, encoder: DataEncoder, data?: Encoder.DataArrayType): WebGLTexture {\n const gl = this.gl;\n // create the texture\n const texture = gl.createTexture();\n // bind the texture so the following methods effect this texture.\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n const buffer = data ? encoder.encode(data, width * height) : null;\n gl.texImage2D(\n gl.TEXTURE_2D,\n 0, // Level of detail.\n encoder.internalFormat, width, height,\n 0, // Always 0 in OpenGL ES.\n encoder.format, encoder.textureType, buffer);\n this.checkError();\n return texture as WebGLTexture;\n }\n updateTexture(\n texture: WebGLTexture, width: number, height: number, encoder: DataEncoder, data: Encoder.DataArrayType): void {\n const gl = this.gl;\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const buffer = encoder.encode(data, width * height);\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0, // level\n 0, // xoffset\n 0, // yoffset\n width, height, encoder.format, encoder.textureType, buffer);\n this.checkError();\n }\n attachFramebuffer(texture: WebGLTexture, width: number, height: number): void {\n const gl = this.gl;\n // Make it the target for framebuffer operations - including rendering.\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture,\n 0); // 0, we aren't using MIPMAPs\n this.checkError();\n gl.viewport(0, 0, width, height);\n gl.scissor(0, 0, width, height);\n }\n readTexture(\n texture: WebGLTexture, width: number, height: number, dataSize: number, dataType: Encoder.DataType,\n channels: number): Encoder.DataArrayType {\n const gl = this.gl;\n if (!channels) {\n channels = 1;\n }\n if (!this.frameBufferBound) {\n this.attachFramebuffer(texture, width, height);\n }\n const encoder = this.getEncoder(dataType, channels);\n const buffer = encoder.allocate(width * height);\n // bind texture to framebuffer\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture,\n 0); // 0, we aren't using MIPMAPs\n // TODO: Check if framebuffer is ready\n gl.readPixels(0, 0, width, height, gl.RGBA, encoder.textureType, buffer);\n this.checkError();\n // unbind FB\n return encoder.decode(buffer, dataSize);\n }\n\n isFramebufferReady(): boolean {\n // TODO: Implement logic to check if the framebuffer is ready\n return true;\n }\n getActiveTexture(): string {\n const gl = this.gl;\n const n = gl.getParameter(this.gl.ACTIVE_TEXTURE);\n return `TEXTURE${(n - gl.TEXTURE0)}`;\n }\n getTextureBinding(): WebGLTexture {\n return this.gl.getParameter(this.gl.TEXTURE_BINDING_2D);\n }\n getFramebufferBinding(): WebGLFramebuffer {\n return this.gl.getParameter(this.gl.FRAMEBUFFER_BINDING);\n }\n setVertexAttributes(positionHandle: number, textureCoordHandle: number): void {\n const gl = this.gl;\n gl.vertexAttribPointer(positionHandle, 3, gl.FLOAT, false, 20, 0);\n gl.enableVertexAttribArray(positionHandle);\n if (textureCoordHandle !== -1) {\n gl.vertexAttribPointer(textureCoordHandle, 2, gl.FLOAT, false, 20, 12);\n gl.enableVertexAttribArray(textureCoordHandle);\n }\n this.checkError();\n }\n createProgram(\n vertexShader: WebGLShader,\n fragShader: WebGLShader,\n ): WebGLProgram {\n const gl = this.gl;\n const program = gl.createProgram()!;\n\n // the program consists of our shaders\n gl.attachShader(program, vertexShader);\n gl.attachShader(program, fragShader);\n gl.linkProgram(program);\n return program;\n }\n compileShader(shaderSource: string, shaderType: number): WebGLShader {\n const gl = this.gl;\n const shader = gl.createShader(shaderType);\n if (!shader) {\n throw new Error(`createShader() returned null with type ${shaderType}`);\n }\n\n gl.shaderSource(shader, shaderSource);\n gl.compileShader(shader);\n if (gl.getShaderParameter(shader, gl.COMPILE_STATUS) === false) {\n throw new Error(`Failed to compile shader: ${gl.getShaderInfoLog(shader)}\nShader source:\n${shaderSource}`);\n }\n return shader;\n }\n deleteShader(shader: WebGLShader): void {\n this.gl.deleteShader(shader);\n }\n bindTextureToUniform(texture: WebGLTexture, position: number, uniformHandle: WebGLUniformLocation): void {\n const gl = this.gl;\n gl.activeTexture(gl.TEXTURE0 + position);\n this.checkError();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n this.checkError();\n gl.uniform1i(uniformHandle, position);\n this.checkError();\n }\n draw(): void {\n this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);\n this.checkError();\n }\n checkError(): void {\n if (env.debug) {\n const gl = this.gl;\n const error = gl.getError();\n let label = '';\n switch (error) {\n case (gl.NO_ERROR):\n return;\n case (gl.INVALID_ENUM):\n label = 'INVALID_ENUM';\n break;\n case (gl.INVALID_VALUE):\n label = 'INVALID_VALUE';\n break;\n case (gl.INVALID_OPERATION):\n label = 'INVALID_OPERATION';\n break;\n case (gl.INVALID_FRAMEBUFFER_OPERATION):\n label = 'INVALID_FRAMEBUFFER_OPERATION';\n break;\n case (gl.OUT_OF_MEMORY):\n label = 'OUT_OF_MEMORY';\n break;\n case (gl.CONTEXT_LOST_WEBGL):\n label = 'CONTEXT_LOST_WEBGL';\n break;\n default:\n label = `Unknown WebGL Error: ${error.toString(16)}`;\n }\n throw new Error(label);\n }\n }\n deleteTexture(texture: WebGLTexture): void {\n this.gl.deleteTexture(texture);\n }\n deleteProgram(program: WebGLProgram): void {\n this.gl.deleteProgram(program);\n }\n getEncoder(dataType: Encoder.DataType, channels: number, usage: EncoderUsage = EncoderUsage.Default): DataEncoder {\n if (this.version === 2) {\n return new DataEncoders.RedFloat32DataEncoder(this.gl as WebGL2RenderingContext, channels);\n }\n\n switch (dataType) {\n case 'float':\n if (usage === EncoderUsage.UploadOnly || this.isRenderFloat32Supported) {\n return new DataEncoders.RGBAFloatDataEncoder(this.gl, channels);\n } else {\n return new DataEncoders.RGBAFloatDataEncoder(\n this.gl, channels, this.textureHalfFloatExtension!.HALF_FLOAT_OES);\n }\n case 'int':\n throw new Error('not implemented');\n case 'byte':\n return new DataEncoders.Uint8DataEncoder(this.gl, channels);\n default:\n throw new Error(`Invalid dataType: ${dataType}`);\n }\n }\n clearActiveTextures(): void {\n const gl = this.gl;\n for (let unit = 0; unit < this.maxTextureImageUnits; ++unit) {\n gl.activeTexture(gl.TEXTURE0 + unit);\n gl.bindTexture(gl.TEXTURE_2D, null);\n }\n }\n dispose(): void {\n if (this.disposed) {\n return;\n }\n const gl = this.gl;\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteFramebuffer(this.framebuffer);\n gl.bindBuffer(gl.ARRAY_BUFFER, null);\n gl.deleteBuffer(this.vertexbuffer);\n gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);\n gl.finish();\n this.disposed = true;\n }\n\n private createDefaultGeometry(): Float32Array {\n // Sets of x,y,z(=0),s,t coordinates.\n return new Float32Array([\n -1.0, 1.0, 0.0, 0.0, 1.0, // upper left\n -1.0, -1.0, 0.0, 0.0, 0.0, // lower left\n 1.0, 1.0, 0.0, 1.0, 1.0, // upper right\n 1.0, -1.0, 0.0, 1.0, 0.0 // lower right\n ]);\n }\n private createVertexbuffer(): WebGLBuffer {\n const gl = this.gl;\n const buffer = gl.createBuffer();\n if (!buffer) {\n throw new Error('createBuffer() returned null');\n }\n const geometry = this.createDefaultGeometry();\n gl.bindBuffer(gl.ARRAY_BUFFER, buffer);\n gl.bufferData(gl.ARRAY_BUFFER, geometry, gl.STATIC_DRAW);\n this.checkError();\n return buffer;\n }\n private createFramebuffer(): WebGLFramebuffer {\n const fb = this.gl.createFramebuffer();\n if (!fb) {\n throw new Error('createFramebuffer returned null');\n }\n return fb;\n }\n\n private queryVitalParameters(): void {\n const gl = this.gl;\n\n this.isFloatTextureAttachableToFrameBuffer = this.checkFloatTextureAttachableToFrameBuffer();\n this.isRenderFloat32Supported = this.checkRenderFloat32();\n this.isFloat32DownloadSupported = this.checkFloat32Download();\n\n if (this.version === 1 && !this.textureHalfFloatExtension && !this.isRenderFloat32Supported) {\n throw new Error('both float32 and float16 TextureType are not supported');\n }\n\n this.isBlendSupported = !this.isRenderFloat32Supported || this.checkFloat32Blend();\n\n // this.maxCombinedTextureImageUnits = gl.getParameter(gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS);\n this.maxTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n this.maxTextureImageUnits = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n // this.maxCubeMapTextureSize = gl.getParameter(gl.MAX_CUBE_MAP_TEXTURE_SIZE);\n // this.shadingLanguageVersion = gl.getParameter(gl.SHADING_LANGUAGE_VERSION);\n // this.webglVendor = gl.getParameter(gl.VENDOR);\n // this.webglVersion = gl.getParameter(gl.VERSION);\n\n if (this.version === 2) {\n // this.max3DTextureSize = gl.getParameter(WebGL2RenderingContext.MAX_3D_TEXTURE_SIZE);\n // this.maxArrayTextureLayers = gl.getParameter(WebGL2RenderingContext.MAX_ARRAY_TEXTURE_LAYERS);\n // this.maxColorAttachments = gl.getParameter(WebGL2RenderingContext.MAX_COLOR_ATTACHMENTS);\n // this.maxDrawBuffers = gl.getParameter(WebGL2RenderingContext.MAX_DRAW_BUFFERS);\n }\n }\n private getExtensions(): void {\n if (this.version === 2) {\n this.colorBufferFloatExtension = this.gl.getExtension('EXT_color_buffer_float');\n this.disjointTimerQueryWebgl2Extension = this.gl.getExtension('EXT_disjoint_timer_query_webgl2');\n } else {\n this.textureFloatExtension = this.gl.getExtension('OES_texture_float');\n this.textureHalfFloatExtension = this.gl.getExtension('OES_texture_half_float');\n }\n }\n\n private checkFloatTextureAttachableToFrameBuffer(): boolean {\n // test whether Float32 texture is supported:\n // STEP.1 create a float texture\n const gl = this.gl;\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n // eslint-disable-next-line @typescript-eslint/naming-convention\n const internalFormat = this.version === 2 ? (gl as unknown as {RGBA32F: number}).RGBA32F : gl.RGBA;\n gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, 1, 1, 0, gl.RGBA, gl.FLOAT, null);\n // STEP.2 bind a frame buffer\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n // STEP.3 attach texture to framebuffer\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n // STEP.4 test whether framebuffer is complete\n const isComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isComplete;\n }\n\n private checkRenderFloat32(): boolean {\n if (this.version === 2) {\n if (!this.colorBufferFloatExtension) {\n return false;\n }\n } else {\n if (!this.textureFloatExtension) {\n return false;\n }\n }\n return this.isFloatTextureAttachableToFrameBuffer;\n }\n\n private checkFloat32Download(): boolean {\n if (this.version === 2) {\n if (!this.colorBufferFloatExtension) {\n return false;\n }\n } else {\n if (!this.textureFloatExtension) {\n return false;\n }\n if (!this.gl.getExtension('WEBGL_color_buffer_float')) {\n return false;\n }\n }\n return this.isFloatTextureAttachableToFrameBuffer;\n }\n\n /**\n * Check whether GL_BLEND is supported\n */\n private checkFloat32Blend(): boolean {\n // it looks like currently (2019-05-08) there is no easy way to detect whether BLEND is supported\n // https://github.com/microsoft/onnxjs/issues/145\n\n const gl = this.gl;\n\n let texture: WebGLTexture|null|undefined;\n let frameBuffer: WebGLFramebuffer|null|undefined;\n let vertexShader: WebGLShader|null|undefined;\n let fragmentShader: WebGLShader|null|undefined;\n let program: WebGLProgram|null|undefined;\n\n try {\n texture = gl.createTexture();\n frameBuffer = gl.createFramebuffer();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n\n // eslint-disable-next-line @typescript-eslint/naming-convention\n const internalFormat = this.version === 2 ? (gl as unknown as {RGBA32F: number}).RGBA32F : gl.RGBA;\n gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, 1, 1, 0, gl.RGBA, gl.FLOAT, null);\n\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.enable(gl.BLEND);\n\n vertexShader = gl.createShader(gl.VERTEX_SHADER);\n if (!vertexShader) {\n return false;\n }\n gl.shaderSource(vertexShader, 'void main(){}');\n gl.compileShader(vertexShader);\n\n fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);\n if (!fragmentShader) {\n return false;\n }\n gl.shaderSource(fragmentShader, 'precision highp float;void main(){gl_FragColor=vec4(0.5);}');\n gl.compileShader(fragmentShader);\n\n program = gl.createProgram();\n if (!program) {\n return false;\n }\n gl.attachShader(program, vertexShader);\n gl.attachShader(program, fragmentShader);\n gl.linkProgram(program);\n gl.useProgram(program);\n\n gl.drawArrays(gl.POINTS, 0, 1);\n return gl.getError() === gl.NO_ERROR;\n\n } finally {\n gl.disable(gl.BLEND);\n\n if (program) {\n gl.deleteProgram(program);\n }\n if (vertexShader) {\n gl.deleteShader(vertexShader);\n }\n if (fragmentShader) {\n gl.deleteShader(fragmentShader);\n }\n if (frameBuffer) {\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteFramebuffer(frameBuffer);\n }\n if (texture) {\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.deleteTexture(texture);\n }\n }\n }\n\n beginTimer(): WebGLQuery {\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n\n const query = gl2.createQuery() as WebGLQuery;\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported.');\n }\n }\n\n endTimer() {\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n }\n\n isTimerResultAvailable(query: WebGLQuery): boolean {\n let available = false, disjoint = false;\n if (this.version === 2 && this.disjointTimerQueryWebgl2Extension) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.disjointTimerQueryWebgl2Extension;\n\n available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n disjoint = gl2.getParameter(ext.GPU_DISJOINT_EXT);\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n\n return available && !disjoint;\n }\n\n getTimerResult(query: WebGLQuery): number {\n let timeElapsed = 0;\n if (this.version === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n timeElapsed = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n gl2.deleteQuery(query);\n } else {\n // TODO: add webgl 1 handling.\n throw new Error('WebGL1 profiling currently not supported');\n }\n // return miliseconds\n return timeElapsed / 1000000;\n }\n\n async waitForQueryAndGetTime(query: WebGLQuery): Promise {\n await repeatedTry(() => this.isTimerResultAvailable(query));\n return this.getTimerResult(query);\n }\n\n public async createAndWaitForFence(): Promise {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n\n private createFence(gl: WebGLRenderingContext): FenceContext {\n let isFencePassed: () => boolean;\n const gl2 = gl as WebGL2RenderingContext;\n const query = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n if (query === null) {\n isFencePassed = () => true;\n } else {\n isFencePassed = () => {\n const status = gl2.clientWaitSync(query, 0, 0);\n return status === gl2.ALREADY_SIGNALED || status === gl2.CONDITION_SATISFIED;\n };\n }\n return {query, isFencePassed};\n }\n\n async pollFence(fenceContext: FenceContext) {\n return new Promise(resolve => {\n void this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n\n private itemsToPoll: PollItem[] = [];\n\n pollItems(): void {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const {resolveFn} = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n\n private async addItemToPoll(isDoneFn: () => boolean, resolveFn: () => void) {\n this.itemsToPoll.push({isDoneFn, resolveFn});\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n await repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Logger} from '../../instrument';\n\nimport {WebGLContext} from './webgl-context';\n\nconst cache: {[contextId: string]: WebGLContext} = {};\n\n/**\n * This factory function creates proper WebGLRenderingContext based on\n * the current browsers capabilities\n * The order is from higher/most recent versions to most basic\n */\nexport function createWebGLContext(contextId?: 'webgl'|'webgl2'): WebGLContext {\n let context: WebGLContext|undefined;\n if ((!contextId || contextId === 'webgl2') && 'webgl2' in cache) {\n context = cache.webgl2;\n } else if ((!contextId || contextId === 'webgl') && 'webgl' in cache) {\n context = cache.webgl;\n }\n\n if (!context) {\n try {\n // try to create webgl context from an offscreen canvas\n const offscreenCanvas = createOffscreenCanvas();\n context = createNewWebGLContext(offscreenCanvas, contextId);\n } catch (e) {\n // if failed, fallback to try to use a normal canvas element\n const canvas = createCanvas();\n context = createNewWebGLContext(canvas, contextId);\n }\n }\n\n contextId = contextId || context.version === 1 ? 'webgl' : 'webgl2';\n const gl = context.gl;\n\n cache[contextId] = context;\n\n if (gl.isContextLost()) {\n delete cache[contextId];\n return createWebGLContext(contextId);\n }\n\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n\n return context;\n}\n\nexport function createNewWebGLContext(canvas: HTMLCanvasElement, contextId?: 'webgl'|'webgl2'): WebGLContext {\n const contextAttributes: WebGLContextAttributes = {\n alpha: false,\n depth: false,\n antialias: false,\n stencil: false,\n preserveDrawingBuffer: false,\n premultipliedAlpha: false,\n failIfMajorPerformanceCaveat: false\n };\n let gl: WebGLRenderingContext|null;\n const ca = contextAttributes;\n if (!contextId || contextId === 'webgl2') {\n gl = canvas.getContext('webgl2', ca);\n if (gl) {\n try {\n return new WebGLContext(gl, 2);\n } catch (err) {\n Logger.warning('GlContextFactory', `failed to create WebGLContext using contextId 'webgl2'. Error: ${err}`);\n }\n }\n }\n if (!contextId || contextId === 'webgl') {\n gl = canvas.getContext('webgl', ca) || canvas.getContext('experimental-webgl', ca) as WebGLRenderingContext;\n if (gl) {\n try {\n return new WebGLContext(gl, 1);\n } catch (err) {\n Logger.warning(\n 'GlContextFactory',\n `failed to create WebGLContext using contextId 'webgl' or 'experimental-webgl'. Error: ${err}`);\n }\n }\n }\n\n throw new Error('WebGL is not supported');\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\ndeclare let OffscreenCanvas: {new (width: number, height: number): HTMLCanvasElement};\n\nfunction createCanvas(): HTMLCanvasElement {\n if (typeof document === 'undefined') {\n throw new TypeError('failed to create canvas: document is not supported');\n }\n const canvas: HTMLCanvasElement = document.createElement('canvas');\n canvas.width = 1;\n canvas.height = 1;\n return canvas;\n}\n\nfunction createOffscreenCanvas(): HTMLCanvasElement {\n if (typeof OffscreenCanvas === 'undefined') {\n throw new TypeError('failed to create offscreen canvas: OffscreenCanvas is not supported');\n }\n return new OffscreenCanvas(1, 1);\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {Backend, SessionHandler} from '../backend';\nimport {Logger} from '../instrument';\nimport {Session} from '../session';\n\nimport {WebGLSessionHandler} from './webgl/session-handler';\nimport {WebGLContext} from './webgl/webgl-context';\nimport {createWebGLContext} from './webgl/webgl-context-factory';\n\n/**\n * WebGLBackend is the entry point for all WebGL opeartions\n * When it starts it created the WebGLRenderingContext\n * and other main framework components such as Program and Texture Managers\n */\nexport class WebGLBackend implements Backend {\n glContext: WebGLContext;\n\n get contextId(): 'webgl'|'webgl2'|undefined {\n return env.webgl.contextId;\n }\n set contextId(value: 'webgl'|'webgl2'|undefined) {\n env.webgl.contextId = value;\n }\n\n get matmulMaxBatchSize(): number|undefined {\n return env.webgl.matmulMaxBatchSize;\n }\n set matmulMaxBatchSize(value: number|undefined) {\n env.webgl.matmulMaxBatchSize = value;\n }\n\n get textureCacheMode(): 'initializerOnly'|'full'|undefined {\n return env.webgl.textureCacheMode;\n }\n set textureCacheMode(value: 'initializerOnly'|'full'|undefined) {\n env.webgl.textureCacheMode = value;\n }\n\n get pack(): boolean|undefined {\n return env.webgl.pack;\n }\n set pack(value: boolean|undefined) {\n env.webgl.pack = value;\n }\n\n get async(): boolean|undefined {\n return env.webgl.async;\n }\n set async(value: boolean|undefined) {\n env.webgl.async = value;\n }\n\n initialize(): boolean {\n try {\n this.glContext = createWebGLContext(this.contextId);\n if (typeof this.matmulMaxBatchSize !== 'number') {\n this.matmulMaxBatchSize = 16;\n }\n if (typeof this.textureCacheMode !== 'string') {\n this.textureCacheMode = 'full';\n }\n if (typeof this.pack !== 'boolean') {\n this.pack = false;\n }\n if (typeof this.async !== 'boolean') {\n this.async = false;\n }\n\n Logger.setWithEnv(env);\n\n if (!env.webgl.context) {\n Object.defineProperty(env.webgl, 'context', {value: this.glContext.gl});\n }\n\n Logger.verbose(\n 'WebGLBackend',\n `Created WebGLContext: ${typeof this.glContext} with matmulMaxBatchSize: ${\n this.matmulMaxBatchSize}; textureCacheMode: ${this.textureCacheMode}; pack: ${this.pack}; async: ${\n this.async}.`);\n return true;\n } catch (e) {\n Logger.warning('WebGLBackend', `Unable to initialize WebGLBackend. ${e}`);\n return false;\n }\n }\n createSessionHandler(context: Session.Context): SessionHandler {\n return new WebGLSessionHandler(this, context);\n }\n dispose(): void {\n this.glContext.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGLBackend} from './backends/backend-webgl';\nimport {Graph} from './graph';\nimport {Operator} from './operators';\nimport {OpSet} from './opset';\nimport {Session} from './session';\n\nexport interface InferenceHandler {\n /**\n * dispose the inference handler. it will be called as the last step in Session.run()\n */\n dispose(): void;\n}\n\nexport interface SessionHandler {\n /**\n * transform the graph at initialization time\n * @param graphTransformer the graph transformer to manipulate the model graph\n */\n transformGraph?(graphTransformer: Graph.Transformer): void;\n\n /**\n * create an instance of InferenceHandler to use in a Session.run() call\n */\n createInferenceHandler(): InferenceHandler;\n\n /**\n * dispose the session handler. it will be called when a session is being disposed explicitly\n */\n dispose(): void;\n\n /**\n * Resolves the operator from the name and opset version; backend specific\n * @param node the node to resolve\n * @param opsets a list of opsets that exported from the model\n * @param graph the completely initialized graph\n */\n resolve(node: Graph.Node, opsets: readonly OpSet[], graph: Graph): Operator;\n\n /**\n * This method let's the sessionHandler know that the graph initialization is complete\n * @param graph the completely initialized graph\n */\n onGraphInitialized?(graph: Graph): void;\n\n /**\n * a reference to the corresponding backend\n */\n readonly backend: Backend;\n\n /**\n * a reference to the session context\n */\n readonly context: Session.Context;\n}\n\nexport interface Backend {\n /**\n * initialize the backend. will be called only once, when the first time the\n * backend it to be used\n */\n initialize(): boolean|Promise;\n\n /**\n * create an instance of SessionHandler to use in a Session object's lifecycle\n */\n createSessionHandler(context: Session.Context): SessionHandler;\n\n /**\n * dispose the backend. currently this will not be called\n */\n dispose(): void;\n}\n\n// caches all initialized backend instances\nconst backendsCache: Map = new Map();\n\nexport const backend: {[name: string]: Backend} = {\n webgl: new WebGLBackend()\n};\n\n/**\n * Resolve a reference to the backend. If a hint is specified, the corresponding\n * backend will be used.\n */\nexport async function resolveBackend(hint?: string|readonly string[]): Promise {\n if (!hint) {\n return resolveBackend(['webgl']);\n } else {\n const hints = typeof hint === 'string' ? [hint] : hint;\n\n for (const backendHint of hints) {\n const cache = backendsCache.get(backendHint);\n if (cache) {\n return cache;\n }\n\n const backend = await tryLoadBackend(backendHint);\n if (backend) {\n return backend;\n }\n }\n }\n\n throw new Error('no available backend to use');\n}\n\nasync function tryLoadBackend(backendHint: string): Promise {\n const backendObj = backend;\n\n if (typeof backendObj[backendHint] !== 'undefined' && isBackend(backendObj[backendHint])) {\n const backend = backendObj[backendHint];\n let init = backend.initialize();\n if (typeof init === 'object' && 'then' in init) {\n init = await init;\n }\n if (init) {\n backendsCache.set(backendHint, backend);\n return backend;\n }\n }\n\n return undefined;\n}\n\nfunction isBackend(obj: unknown) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const o = obj as any;\n\n // check if an object is a Backend instance\n if (\n 'initialize' in o && typeof o.initialize === 'function' && // initialize()\n 'createSessionHandler' in o && typeof o.createSessionHandler === 'function' && // createSessionHandler()\n 'dispose' in o && typeof o.dispose === 'function' // dispose()\n ) {\n return true;\n }\n\n return false;\n}\n\nexport type BackendType = Backend;\nexport type SessionHandlerType = ReturnType;\nexport type InferenceHandlerType = ReturnType;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {SessionHandler} from './backend';\nimport {Graph} from './graph';\nimport {Logger, Profiler} from './instrument';\nimport {Operator} from './operators';\nimport {Tensor} from './tensor';\n\nclass KernelOp {\n constructor(public op: Operator, public node: Graph.Node) {}\n}\n\nexport class ExecutionPlan {\n constructor(private graph: Graph, ops: Operator[], private profiler: Readonly) {\n this.initialize(ops);\n }\n\n initialize(ops: Operator[]) {\n this.profiler.event('session', 'ExecutionPlan.initialize', () => {\n const graphNodes = this.graph.getNodes();\n if (graphNodes.length !== ops.length) {\n throw new Error('The size of nodes and OPs do not match.');\n }\n\n this._ops = ops.map((op, i) => new KernelOp(op, graphNodes[i]));\n this.reset();\n\n // look for starter node(s)\n this._starter = [];\n this._ops.forEach((op, i) => {\n let resolved = true;\n for (const input of op.node.inputs) {\n if (\n !this._values[input] // not an initialized input\n && this.graph.getInputIndices().indexOf(input) === -1 // not model input\n ) {\n resolved = false;\n break;\n }\n }\n if (resolved) {\n this._starter.push(i);\n }\n });\n });\n }\n\n reset() {\n this._values = this.graph.getValues().map(i => i.tensor);\n }\n\n async execute(sessionHandler: SessionHandler, modelInputs: Tensor[]): Promise {\n return this.profiler.event('session', 'ExecutionPlan.execute', async () => {\n // reset mediem result\n this.reset();\n\n // create inference handler\n const inferenceHandler = sessionHandler.createInferenceHandler();\n\n // populate inputs value\n const graphInputs = this.graph.getInputIndices();\n if (modelInputs.length !== graphInputs.length) {\n throw new Error(`number of input tensors don't match the number of inputs to the model: actual: ${\n modelInputs.length} expected: ${graphInputs.length}`);\n }\n\n modelInputs.forEach((input, i) => {\n const index = graphInputs[i];\n this._values[index] = input;\n });\n\n // prepare running sequence\n const sequence: number[] = this._starter.slice(0);\n\n // execution iterations\n const graphValues = this.graph.getValues();\n const graphNodes = this.graph.getNodes();\n\n let rear = 0;\n while (rear < sequence.length) {\n const thisOpIndex = sequence[rear++];\n const thisOp = this._ops[thisOpIndex];\n\n // check input\n const inputList = thisOp.node.inputs.map(i => this._values[i]);\n if (inputList.indexOf(undefined) !== -1) {\n throw new Error(`unresolved input detected: op: ${thisOp.node}`);\n }\n\n // run\n const inputTensors = inputList as Tensor[];\n Logger.verbose(\n 'ExecPlan',\n `Running op:${thisOp.node.name} (${\n inputTensors.map((t, i) => `'${thisOp.node.inputs[i]}': ${t.type}[${t.dims.join(',')}]`).join(', ')})`);\n\n const outputList = await this.profiler.event(\n 'node', thisOp.node.name, async () => thisOp.op.impl(inferenceHandler, inputTensors, thisOp.op.context));\n\n // check output\n if (outputList.length !== thisOp.node.outputs.length) {\n throw new Error('the size of output does not match model definition.');\n }\n\n // fill value\n outputList.forEach((output, i) => {\n const j = thisOp.node.outputs[i];\n if (this._values[j]) {\n throw new Error(`output [${j}] already has value: op:${thisOp.node.name}`);\n }\n this._values[j] = output;\n });\n\n // resolve downstream nodes\n const downstreamNodes = new Set();\n outputList.forEach((_output, i) => {\n const j = thisOp.node.outputs[i];\n for (const currentDownstreamNodeIndex of graphValues[j].to) {\n const currentDownstreamNode = graphNodes[currentDownstreamNodeIndex];\n let resolved = true;\n for (const k of currentDownstreamNode.inputs) {\n if (!this._values[k]) {\n resolved = false;\n break;\n }\n }\n if (resolved) {\n downstreamNodes.add(currentDownstreamNodeIndex);\n }\n }\n });\n sequence.push(...downstreamNodes);\n }\n\n const output: Tensor[] = [];\n for (let i = 0; i < this.graph.getOutputIndices().length; i++) {\n const outputIndex = this.graph.getOutputIndices()[i];\n const outputTensor = this._values[outputIndex];\n if (outputTensor === undefined) {\n throw new Error(`required output [${outputIndex}] does not have value`);\n }\n if (outputIndex === 0) {\n await outputTensor.getData();\n } else {\n // eslint-disable-next-line no-unused-expressions\n outputTensor.data;\n }\n output.push(outputTensor);\n }\n Logger.verbose('ExecPlan', 'disposing of inferenceHandler');\n inferenceHandler.dispose();\n return output;\n });\n }\n\n _values: Array;\n _ops: KernelOp[];\n _starter: number[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport Long from 'long';\n\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\nimport {decodeUtf8String, LongUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Attribute {\n export interface DataTypeMap {\n float: number;\n int: number;\n string: string;\n tensor: Tensor;\n floats: number[];\n ints: number[];\n strings: string[];\n tensors: Tensor[];\n }\n\n export type DataType = keyof DataTypeMap;\n}\n\ntype ValueTypes = Attribute.DataTypeMap[Attribute.DataType];\n\ntype Value = [ValueTypes, Attribute.DataType];\n\nexport class Attribute {\n constructor(attributes: onnx.IAttributeProto[]|ortFbs.Attribute[]|null|undefined) {\n this._attributes = new Map();\n if (attributes !== null && attributes !== undefined) {\n for (const attr of attributes) {\n if (attr instanceof onnx.AttributeProto) {\n this._attributes.set(attr.name, [Attribute.getValue(attr), Attribute.getType(attr)]);\n } else if (attr instanceof ortFbs.Attribute) {\n this._attributes.set(attr.name()!, [Attribute.getValue(attr), Attribute.getType(attr)]);\n }\n }\n if (this._attributes.size < attributes.length) {\n throw new Error('duplicated attribute names');\n }\n }\n }\n\n set(key: string, type: Attribute.DataType, value: ValueTypes): void {\n this._attributes.set(key, [value, type]);\n }\n delete(key: string): void {\n this._attributes.delete(key);\n }\n getFloat(key: string, defaultValue?: Attribute.DataTypeMap['float']) {\n return this.get(key, 'float', defaultValue);\n }\n\n getInt(key: string, defaultValue?: Attribute.DataTypeMap['int']) {\n return this.get(key, 'int', defaultValue);\n }\n\n getString(key: string, defaultValue?: Attribute.DataTypeMap['string']) {\n return this.get(key, 'string', defaultValue);\n }\n\n getTensor(key: string, defaultValue?: Attribute.DataTypeMap['tensor']) {\n return this.get(key, 'tensor', defaultValue);\n }\n\n getFloats(key: string, defaultValue?: Attribute.DataTypeMap['floats']) {\n return this.get(key, 'floats', defaultValue);\n }\n\n getInts(key: string, defaultValue?: Attribute.DataTypeMap['ints']) {\n return this.get(key, 'ints', defaultValue);\n }\n\n getStrings(key: string, defaultValue?: Attribute.DataTypeMap['strings']) {\n return this.get(key, 'strings', defaultValue);\n }\n\n getTensors(key: string, defaultValue?: Attribute.DataTypeMap['tensors']) {\n return this.get(key, 'tensors', defaultValue);\n }\n\n private get(\n key: string, type: Attribute.DataType, defaultValue?: V): V {\n const valueAndType = this._attributes.get(key);\n if (valueAndType === undefined) {\n if (defaultValue !== undefined) {\n return defaultValue;\n }\n throw new Error(`required attribute not found: ${key}`);\n }\n if (valueAndType[1] !== type) {\n throw new Error(`type mismatch: expected ${type} but got ${valueAndType[1]}`);\n }\n return valueAndType[0] as V;\n }\n\n private static getType(attr: onnx.IAttributeProto|ortFbs.Attribute): Attribute.DataType {\n const type = attr instanceof onnx.AttributeProto ? (attr).type : (attr as ortFbs.Attribute).type();\n switch (type) {\n case onnx.AttributeProto.AttributeType.FLOAT:\n return 'float';\n case onnx.AttributeProto.AttributeType.INT:\n return 'int';\n case onnx.AttributeProto.AttributeType.STRING:\n return 'string';\n case onnx.AttributeProto.AttributeType.TENSOR:\n return 'tensor';\n case onnx.AttributeProto.AttributeType.FLOATS:\n return 'floats';\n case onnx.AttributeProto.AttributeType.INTS:\n return 'ints';\n case onnx.AttributeProto.AttributeType.STRINGS:\n return 'strings';\n case onnx.AttributeProto.AttributeType.TENSORS:\n return 'tensors';\n default:\n throw new Error(`attribute type is not supported yet: ${onnx.AttributeProto.AttributeType[type]}`);\n }\n }\n\n private static getValue(attr: onnx.IAttributeProto|ortFbs.Attribute) {\n const attrType = attr instanceof onnx.AttributeProto ? attr.type : (attr as ortFbs.Attribute).type();\n if (attrType === onnx.AttributeProto.AttributeType.GRAPH || attrType === onnx.AttributeProto.AttributeType.GRAPHS) {\n throw new Error('graph attribute is not supported yet');\n }\n\n const value = this.getValueNoCheck(attr);\n\n // cast LONG to number\n if (attrType === onnx.AttributeProto.AttributeType.INT && LongUtil.isLong(value)) {\n return LongUtil.longToNumber(value as Long | flatbuffers.Long);\n }\n\n // cast LONG[] to number[]\n if (attrType === onnx.AttributeProto.AttributeType.INTS) {\n const arr = (value as Array);\n const numberValue: number[] = new Array(arr.length);\n\n for (let i = 0; i < arr.length; i++) {\n const maybeLong = arr[i];\n numberValue[i] = LongUtil.longToNumber(maybeLong);\n }\n\n return numberValue;\n }\n\n // cast onnx.TensorProto to onnxjs.Tensor\n if (attrType === onnx.AttributeProto.AttributeType.TENSOR) {\n return attr instanceof onnx.AttributeProto ? Tensor.fromProto(value as onnx.ITensorProto) :\n Tensor.fromOrtTensor(value as ortFbs.Tensor);\n }\n\n // cast onnx.TensorProto[] to onnxjs.Tensor[]\n if (attrType === onnx.AttributeProto.AttributeType.TENSORS) {\n if (attr instanceof onnx.AttributeProto) {\n const tensorProtos = value as onnx.ITensorProto[];\n return tensorProtos.map(value => Tensor.fromProto(value));\n } else if (attr instanceof ortFbs.Attribute) {\n const tensorProtos = value as ortFbs.Tensor[];\n return tensorProtos.map(value => Tensor.fromOrtTensor(value));\n }\n }\n\n // cast Uint8Array to string\n if (attrType === onnx.AttributeProto.AttributeType.STRING) {\n // string in onnx attribute is of uint8array type, so we need to convert it to string below. While in ort format,\n // string attributes are returned as string, so no conversion is needed.\n if (attr instanceof onnx.AttributeProto) {\n const utf8String = value as Uint8Array;\n return decodeUtf8String(utf8String);\n }\n }\n\n // cast Uint8Array[] to string[]\n if (attrType === onnx.AttributeProto.AttributeType.STRINGS) {\n // strings in onnx attribute is returned as uint8array[], so we need to convert it to string[] below. While in ort\n // format strings attributes are returned as string[], so no conversion is needed.\n if (attr instanceof onnx.AttributeProto) {\n const utf8Strings = value as Uint8Array[];\n return utf8Strings.map(decodeUtf8String);\n }\n }\n\n return value as ValueTypes;\n }\n\n private static getValueNoCheck(attr: onnx.IAttributeProto|ortFbs.Attribute) {\n return attr instanceof (onnx.AttributeProto) ? this.getValueNoCheckFromOnnxFormat(attr) :\n this.getValueNoCheckFromOrtFormat(attr as ortFbs.Attribute);\n }\n\n private static getValueNoCheckFromOnnxFormat(attr: onnx.IAttributeProto) {\n switch (attr.type!) {\n case onnx.AttributeProto.AttributeType.FLOAT:\n return attr.f;\n case onnx.AttributeProto.AttributeType.INT:\n return attr.i;\n case onnx.AttributeProto.AttributeType.STRING:\n return attr.s;\n case onnx.AttributeProto.AttributeType.TENSOR:\n return attr.t;\n case onnx.AttributeProto.AttributeType.GRAPH:\n return attr.g;\n case onnx.AttributeProto.AttributeType.FLOATS:\n return attr.floats;\n case onnx.AttributeProto.AttributeType.INTS:\n return attr.ints;\n case onnx.AttributeProto.AttributeType.STRINGS:\n return attr.strings;\n case onnx.AttributeProto.AttributeType.TENSORS:\n return attr.tensors;\n case onnx.AttributeProto.AttributeType.GRAPHS:\n return attr.graphs;\n default:\n throw new Error(`unsupported attribute type: ${onnx.AttributeProto.AttributeType[attr.type!]}`);\n }\n }\n\n private static getValueNoCheckFromOrtFormat(attr: ortFbs.Attribute) {\n switch (attr.type()) {\n case ortFbs.AttributeType.FLOAT:\n return attr.f();\n case ortFbs.AttributeType.INT:\n return attr.i();\n case ortFbs.AttributeType.STRING:\n return attr.s();\n case ortFbs.AttributeType.TENSOR:\n return attr.t();\n case ortFbs.AttributeType.GRAPH:\n return attr.g();\n case ortFbs.AttributeType.FLOATS:\n return attr.floatsArray();\n case ortFbs.AttributeType.INTS: {\n const ints = [];\n for (let i = 0; i < attr.intsLength(); i++) {\n ints.push(attr.ints(i)!);\n }\n return ints;\n }\n case ortFbs.AttributeType.STRINGS: {\n const strings = [];\n for (let i = 0; i < attr.stringsLength(); i++) {\n strings.push(attr.strings(i));\n }\n return strings;\n }\n case ortFbs.AttributeType.TENSORS: {\n const tensors = [];\n for (let i = 0; i < attr.tensorsLength(); i++) {\n tensors.push(attr.tensors(i)!);\n }\n return tensors;\n }\n // case ortFbs.AttributeType.GRAPHS:\n // TODO: Subgraph not supported yet.\n // const graphs = [];\n // for (let i = 0; i < attr.graphsLength(); i++) {\n // graphs.push(attr.graphs(i)!);\n // }\n // return graphs;\n default:\n throw new Error(`unsupported attribute type: ${ortFbs.AttributeType[attr.type()]}`);\n }\n }\n\n protected _attributes: Map;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Attribute} from './attribute';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {Tensor} from './tensor';\nimport {LongUtil, MAX_CLIP, MIN_CLIP, ProtoUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport declare namespace Graph {\n export interface Shape {\n readonly dims: readonly number[];\n }\n export interface ValueType {\n readonly tensorType: Tensor.DataType;\n readonly shape: Shape;\n }\n export interface Value {\n // the tensor data. empty for non-initialized inputs\n readonly tensor?: Tensor;\n\n // index to the Node where the value comes from. -1 for initializer.\n readonly from: number;\n\n // indices to the Nodes where the values go to.\n readonly to: readonly number[];\n\n // value type specification. empty for non-input values.\n readonly type?: ValueType;\n }\n export interface Node {\n // name of the node\n readonly name: string;\n\n // the operator type\n readonly opType: string;\n\n // indices to the Values where the inputs come from.\n readonly inputs: readonly number[];\n\n // indices to the Values where the outpus go to.\n readonly outputs: readonly number[];\n\n // the attributes that used by the operator\n readonly attributes: Attribute;\n }\n\n /**\n * a Transformer is an instance that allows all possible transformation operations that applied to a graph\n */\n export interface Transformer {\n removeAllIdentityNodes(): void;\n removeAllDropoutNodes(): void;\n fuseConvActivationNodes(): void;\n // TODO: add generic functions to manipulate the graph\n }\n\n // an initializer can use transformer to transform the graph\n export interface Initializer {\n transformGraph(transformer: Transformer): void;\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface Graph {\n getInputIndices(): readonly number[];\n getInputNames(): readonly string[];\n getOutputIndices(): readonly number[];\n getOutputNames(): readonly string[];\n getValues(): readonly Graph.Value[];\n getNodes(): readonly Graph.Node[];\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-redeclare\nexport const Graph = {\n /**\n * construct a graph from a graph protobuf type\n */\n from: (graphProto: onnx.IGraphProto|ortFbs.Graph, initializer?: Graph.Initializer) =>\n new GraphImpl(graphProto, initializer),\n};\n\nclass Value implements Graph.Value {\n constructor(valueInfo?: onnx.IValueInfoProto) {\n this._from = undefined;\n this._to = [];\n this.tensor = undefined;\n this.type = undefined;\n\n if (valueInfo) {\n this.type = ProtoUtil.tensorValueTypeFromProto(valueInfo.type!.tensorType!);\n }\n }\n\n _from?: number; // -1 represent from initializer\n get from() {\n return this._from!;\n }\n _to: number[];\n get to() {\n return this._to;\n }\n type?: Graph.ValueType;\n tensor?: Tensor;\n}\n\nclass Node implements Graph.Node {\n constructor(_nodeProto: onnx.INodeProto|ortFbs.Node, name?: string) {\n if (_nodeProto instanceof onnx.NodeProto) {\n this.name = _nodeProto.name;\n this.opType = _nodeProto.opType;\n this.attributes = new Attribute(_nodeProto.attribute);\n } else if (_nodeProto instanceof ortFbs.Node) {\n this.name = name ?? _nodeProto.name()!;\n this.opType = _nodeProto.opType()!;\n this.attributes = new Attribute(ProtoUtil.tensorAttributesFromORTFormat(_nodeProto));\n }\n\n this.inputs = [];\n this.outputs = [];\n this.executeNode = true;\n }\n\n name: string;\n opType: string;\n inputs: number[];\n outputs: number[];\n attributes: Attribute;\n executeNode: boolean;\n}\n\nclass GraphImpl implements Graph, Graph.Transformer {\n private _allData: Value[];\n\n private _allInputIndices: number[];\n private _allInputNames: string[];\n\n private _allOutputIndices: number[];\n private _allOutputNames: string[];\n\n private _nodes: Node[];\n\n constructor(graph: onnx.IGraphProto|ortFbs.Graph, graphInitializer?: Graph.Initializer) {\n if (!graph) {\n throw new TypeError('graph is empty');\n }\n\n // build the graph - will throw exceptions if something fatal is detected\n this.buildGraph(graph);\n\n // execute any transformation logic for the graph (if applicable)\n this.transformGraph(graphInitializer);\n\n // check for cycles and other inconsistencies - will throw exceptions if something fatal is detected\n this.checkIsAcyclic();\n }\n\n getInputIndices(): readonly number[] {\n return this._allInputIndices;\n }\n\n getInputNames(): readonly string[] {\n return this._allInputNames;\n }\n\n getOutputIndices(): readonly number[] {\n return this._allOutputIndices;\n }\n\n getOutputNames(): readonly string[] {\n return this._allOutputNames;\n }\n\n getValues(): readonly Graph.Value[] {\n return this._allData;\n }\n\n getNodes(): readonly Graph.Node[] {\n return this._nodes;\n }\n\n private buildGraph(graph: onnx.IGraphProto|ortFbs.Graph) {\n // build the graph - will throw exceptions if something fatal is detected\n if (graph instanceof onnx.GraphProto) {\n this.buildGraphFromOnnxFormat(graph);\n } else if (graph instanceof ortFbs.Graph) {\n this.buildGraphFromOrtFormat(graph);\n } else {\n throw new TypeError('Graph type is not supported.');\n }\n }\n private buildGraphFromOnnxFormat(graph: onnx.IGraphProto) {\n const dataIndices = new Map();\n this._allData = [];\n\n this._allInputIndices = [];\n this._allInputNames = [];\n\n this._allOutputIndices = [];\n this._allOutputNames = [];\n\n this._nodes = [];\n\n const nodesIndices = new Map();\n\n // scan all inputs\n if (!graph.input) {\n throw new Error('missing information in graph: input');\n }\n const inputValueNames = [];\n for (const i of graph.input) {\n if (dataIndices.has(i.name!)) {\n throw new Error(`duplicated input name: ${i.name}`);\n }\n const currentIndex = this._allData.push(new Value(i)) - 1;\n dataIndices.set(i.name!, currentIndex);\n inputValueNames.push(i.name!);\n }\n\n // scan all initializers\n if (!graph.initializer) {\n throw new Error('missing information in graph: initializer');\n }\n for (const i of graph.initializer) {\n let index = dataIndices.get(i.name!);\n if (index === undefined) {\n const value = new Value();\n value.type = {\n shape: {dims: ProtoUtil.tensorDimsFromProto(i.dims!)},\n tensorType: ProtoUtil.tensorDataTypeFromProto(i.dataType!)\n };\n index = this._allData.push(value) - 1;\n dataIndices.set(i.name!, index);\n }\n this._allData[index]._from = -1;\n this._allData[index].tensor = Tensor.fromProto(i);\n }\n\n // filter out input indices\n for (let i = 0; i < this._allData.length; i++) {\n if (!this._allData[i].tensor) {\n this._allInputIndices.push(i);\n this._allInputNames.push(inputValueNames[i]);\n }\n }\n\n // scan all outputs\n if (!graph.output) {\n throw new Error('missing information in graph: output');\n }\n for (const i of graph.output) {\n if (dataIndices.has(i.name!)) {\n throw new Error(`duplicated output name: ${i.name}`);\n }\n const currentIndex = this._allData.push(new Value(i)) - 1;\n dataIndices.set(i.name!, currentIndex);\n this._allOutputIndices.push(currentIndex);\n this._allOutputNames.push(i.name!);\n }\n\n // scan all nodes\n if (!graph.node) {\n throw new Error('missing information in graph: node');\n }\n for (const nodeProto of graph.node) {\n if (!nodeProto.name) {\n // assign a name to the node if it doesn't have one\n for (let pick = 0;; pick++) {\n const name = `unnamed_${nodeProto.opType}_${pick}`;\n if (!nodesIndices.has(name)) {\n nodeProto.name = name;\n break;\n }\n }\n }\n\n if (nodesIndices.has(nodeProto.name)) {\n throw new Error(`duplicated node name: ${nodeProto.name}`);\n }\n const currentIndex = this._nodes.push(new Node(nodeProto)) - 1;\n nodesIndices.set(nodeProto.name, currentIndex);\n }\n\n // scan node's outputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.node[i];\n if (!nodeProto.output) {\n throw new Error(`missing output for node: ${nodeProto.name}`);\n }\n for (const output of nodeProto.output) {\n let dataIndex = dataIndices.get(output);\n if (typeof dataIndex === 'undefined') {\n dataIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(output, dataIndex);\n }\n node.outputs.push(dataIndex);\n\n if (this._allData[dataIndex]._from !== undefined) {\n throw new Error(`multiple nodes output to one data value: ${dataIndex}`);\n }\n this._allData[dataIndex]._from = i;\n\n // for the 'Constant' operator, just create a new edge in the graph corresponding to the 'output' of the\n // operator and ignore the node from the graph\n if (nodeProto.opType === 'Constant') {\n if (!nodeProto.attribute || nodeProto.attribute.length !== 1 || !nodeProto.attribute[0].t) {\n throw new Error('missing attributes or missing tensor value in attributes for this Constant operator');\n }\n if (!nodeProto.output || nodeProto.output.length !== 1) {\n throw new Error('missing output or incorrect number of outputs for this Constant operator');\n }\n node.outputs.pop();\n node.executeNode = false;\n\n this._allData[dataIndex]._from = -1;\n this._allData[dataIndex].tensor = Tensor.fromProto(nodeProto.attribute[0].t);\n }\n }\n }\n\n // scan node's inputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.node[i];\n\n if (!nodeProto.input) {\n throw new Error(`missing input for node: ${nodeProto.name}`);\n }\n for (const input of nodeProto.input) {\n const dataIndex = dataIndices.get(input);\n if (typeof dataIndex === 'undefined') {\n // handle exception when opset > 9 and roi / scales not given\n if (input === '' && (nodeProto.input.length === 3 || nodeProto.input.length === 4) &&\n nodeProto.opType === 'Resize') {\n continue;\n }\n throw new Error(`unrecognized input '${input}' for node: ${nodeProto.name}`);\n }\n node.inputs.push(dataIndex);\n\n this._allData[dataIndex]._to.push(i);\n }\n }\n\n return true;\n }\n\n private buildGraphFromOrtFormat(graph: ortFbs.Graph) {\n const dataIndices = new Map();\n this._allData = [];\n\n this._allInputIndices = [];\n this._allInputNames = [];\n\n this._allOutputIndices = [];\n this._allOutputNames = [];\n\n this._nodes = [];\n\n const nodesIndices = new Map();\n\n // scan all inputs\n const inputValueNames = [];\n for (let i = 0; i < graph.inputsLength(); i++) {\n const inputName = graph.inputs(i);\n if (dataIndices.has(inputName)) {\n throw new Error(`duplicated input name: ${inputName}`);\n }\n // Find the input typeInfo from nodeargs\n for (let j = 0; j < graph.nodeArgsLength(); j++) {\n if (graph.nodeArgs(j)?.name() === inputName) {\n const value = new Value();\n const valueType = graph.nodeArgs(j)?.type()?.valueType();\n if (valueType !== ortFbs.TypeInfoValue.tensor_type) {\n throw new Error('Unexpected value type for the nodeArg.');\n }\n const valueInfo = graph.nodeArgs(j)!.type()!.value(new ortFbs.TensorTypeAndShape())!;\n const type = ProtoUtil.tensorDataTypeFromProto(valueInfo.elemType());\n const shape = valueInfo.shape()!;\n const dims = [];\n for (let k = 0; k < shape.dimLength()!; k++) {\n dims.push(LongUtil.longToNumber(shape.dim(k)!.value()!.dimValue()!));\n }\n value.type = {shape: {dims}, tensorType: type};\n const currentIndex = this._allData.push(value) - 1;\n dataIndices.set(inputName, currentIndex);\n inputValueNames.push(inputName);\n }\n }\n }\n // check initializers\n for (let i = 0; i < graph.initializersLength(); i++) {\n const initializer = graph.initializers(i)!;\n let index = dataIndices.get(initializer.name()!);\n if (index === undefined) {\n const value = new Value();\n const dims = ProtoUtil.tensorDimsFromORTFormat(initializer);\n const type = ProtoUtil.tensorDataTypeFromProto(initializer.dataType());\n value.type = {shape: {dims}, tensorType: type};\n index = this._allData.push(value) - 1;\n dataIndices.set(initializer.name()!, index);\n }\n this._allData[index]._from = -1;\n this._allData[index].tensor = Tensor.fromOrtTensor(initializer);\n }\n\n // filter out input indices\n for (let i = 0; i < this._allData.length; i++) {\n if (!this._allData[i].tensor) {\n this._allInputIndices.push(i);\n this._allInputNames.push(inputValueNames[i]);\n }\n }\n\n // scan all outputs\n for (let i = 0; i < graph.outputsLength(); i++) {\n const outputName = graph.outputs(i);\n if (dataIndices.has(outputName)) {\n throw new Error(`duplicated output name: ${outputName}`);\n }\n const currentIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(outputName, currentIndex);\n this._allOutputIndices.push(currentIndex);\n this._allOutputNames.push(outputName);\n }\n\n // scan all nodes\n if (!graph.nodes) {\n throw new Error('missing information in graph: node');\n }\n for (let i = 0; i < graph.nodesLength(); i++) {\n const nodeProto = graph.nodes(i);\n let name = nodeProto!.name();\n if (!name) {\n // assign a name to the node if it doesn't have one\n for (let pick = 0;; pick++) {\n name = `unnamed_${nodeProto!.opType()}_${pick}`;\n if (!nodesIndices.has(name)) {\n // an unique name is found. break.\n break;\n }\n }\n }\n\n if (nodesIndices.has(name)) {\n throw new Error(`duplicated node name: ${name}`);\n }\n const currentIndex = this._nodes.push(new Node(nodeProto!, name)) - 1;\n nodesIndices.set(name, currentIndex);\n }\n\n // scan node's outputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.nodes(i);\n if (nodeProto == null) {\n throw new Error(`No node exists at index ${i}`);\n }\n if (nodeProto?.outputsLength() === 0) {\n throw new Error(`missing output for node: ${nodeProto.name}`);\n }\n for (let j = 0; j < nodeProto?.outputsLength(); j++) {\n const output = nodeProto?.outputs(j);\n let dataIndex = dataIndices.get(output);\n if (typeof dataIndex === 'undefined') {\n dataIndex = this._allData.push(new Value()) - 1;\n dataIndices.set(output, dataIndex);\n }\n node.outputs.push(dataIndex);\n\n if (this._allData[dataIndex]._from !== undefined) {\n throw new Error(`multiple nodes output to one data value: ${dataIndex}`);\n }\n this._allData[dataIndex]._from = i;\n\n // for the 'Constant' operator, just create a new edge in the graph corresponding to the 'output' of the\n // operator and ignore the node from the graph\n if (nodeProto.opType() === 'Constant') {\n if (nodeProto.attributesLength() !== 1 || !nodeProto.attributes(0)!.t()) {\n throw new Error('missing attributes or missing tensor value in attributes for this Constant operator');\n }\n if (nodeProto.outputsLength() !== 1) {\n throw new Error('missing output or incorrect number of outputs for this Constant operator');\n }\n node.outputs.pop();\n node.executeNode = false;\n\n this._allData[dataIndex]._from = -1;\n this._allData[dataIndex].tensor = Tensor.fromOrtTensor(nodeProto.attributes(0)!.t()!);\n }\n }\n }\n\n // scan node's inputs\n for (let i = 0; i < this._nodes.length; i++) {\n const node = this._nodes[i];\n const nodeProto = graph.nodes(i)!;\n\n if (nodeProto.inputsLength() === 0) {\n throw new Error(`missing input for node: ${nodeProto.name}`);\n }\n for (let j = 0; j < nodeProto.inputsLength()!; j++) {\n const input = nodeProto.inputs(j)!;\n const dataIndex = dataIndices.get(input);\n if (typeof dataIndex === 'undefined') {\n throw new Error(`unrecognized input '${input}' for node: ${nodeProto!.name()}`);\n }\n node.inputs.push(dataIndex);\n\n this._allData[dataIndex]._to.push(i);\n }\n }\n }\n\n private checkIsAcyclic() {\n // go through the graph and check for cycles or other fatal inconsistencies\n const starters: Set = new Set();\n this._allInputIndices.forEach(i => {\n const data = this._allData[i];\n data._to.forEach(j => {\n starters.add(j);\n });\n });\n\n // Iterative DFS to check for cycles\n const nodesStack = Array.from(starters);\n const nodesState = new Array(this._nodes.length).fill('white');\n\n while (nodesStack.length > 0) {\n const nodeIndex = nodesStack.pop()!;\n // this node has now been processed completely. Mark this node 'black' to denote this.\n if (nodesState[nodeIndex] === 'gray') {\n nodesState[nodeIndex] = 'black';\n } else {\n // this node is under processing stage. mark this node 'gray' to denote this.\n nodesStack.push(nodeIndex);\n nodesState[nodeIndex] = 'gray';\n\n this._nodes[nodeIndex].outputs.forEach((outgoingEdgeIndex) => {\n const data = this._allData[outgoingEdgeIndex];\n if (typeof data.tensor !== 'undefined') {\n throw new Error('node outputs should not be initialized');\n }\n if (data._from !== nodeIndex) {\n throw new Error('from property of the Value object doesn\\'t match index of Node being processed');\n }\n data._to.forEach((downstreamNodeIndex) => {\n // back edge found - cyclic\n if (nodesState[downstreamNodeIndex] === 'gray') {\n throw new Error('model graph is cyclic');\n }\n // tree edge found - continue processing by adding it to stack\n else if (nodesState[downstreamNodeIndex] === 'white') {\n nodesStack.push(downstreamNodeIndex);\n }\n });\n });\n }\n }\n }\n\n private transformGraph(graphInitializer?: Graph.Initializer): void {\n // apply common transform\n this.removeAllIdentityNodes();\n this.removeAllDropoutNodes();\n this.fuseConvActivationNodes();\n // apply initializer specific transform\n if (graphInitializer) {\n graphInitializer.transformGraph(this);\n }\n\n // finalize graph\n this.finalizeGraph();\n }\n\n /**\n * finalize the graph.\n *\n * this function should be called after all the transformation completed.\n * this function removes all unnecessary nodes and values from the graph\n */\n finalizeGraph() {\n let offset = 0;\n // delete all nodes that are not being executed\n // The graph is represented using these two arrays\n // this._nodes - Array holding the kernels to execute - each entry is a kernel pointing to this._allData\n // this._allData - hold 2 fields - to [] & from - these feileds hold the graph map for inputs and outputs per node\n // newIndices - remapping the graph after reading the flag 'executeNode'\n const newIndices = new Array(this._nodes.length, 0);\n let nodePossition = 0;\n\n for (let i = 0; i < this._nodes.length; i++) {\n // giving new indexes to the nodes based on execution flag\n newIndices[i] = nodePossition;\n if (this._nodes[i].executeNode) {\n if (nodePossition !== i) {\n this._nodes[nodePossition] = this._nodes[i];\n }\n nodePossition++;\n\n } else {\n // delete all output values\n this._nodes[i].outputs.forEach(ind => {\n this._allData[ind]._from = -2;\n });\n }\n }\n\n // removing the unused nodes\n this._nodes.splice(nodePossition, this._nodes.length - nodePossition);\n\n // Updating this._allData according to the new this._nodes\n for (let i = 0; i < this._allData.length; i++) {\n const currentData = this._allData[i];\n if (currentData._from !== undefined && currentData._from !== -1 && currentData._from !== -2) {\n currentData._from = newIndices[currentData._from];\n }\n\n for (let j = 0; j < currentData._to.length; j++) {\n if (currentData._to[j] >= 0) {\n currentData._to[j] = newIndices[currentData._to[j]];\n } else {\n throw new Error('Trying to update a removed node');\n }\n }\n }\n\n offset = 0;\n // delete all values that are not being referenced\n for (let i = 0; i < this._allData.length; i++) {\n // if current value is neither linked to next node, nor an output value, remove it.\n if (this._allData[i].from === -2 && this._allOutputIndices.indexOf(i + offset) === -1) {\n offset++;\n this._allData.splice(i, 1);\n i--;\n continue;\n }\n if (offset > 0) {\n let ind = -1;\n // if current value is neither an input value nor an initializer, find the node it's\n // coming from and update the corresponding node output\n if (this._allData[i].from !== undefined && this._allData[i].from !== -1) {\n ind = this._nodes[this._allData[i].from].outputs.indexOf(i + offset);\n if (ind !== -1) {\n this._nodes[this._allData[i].from].outputs[ind] = i;\n }\n } else {\n // if current value is an input value, update its reference in inputIndices\n ind = this._allInputIndices.indexOf(i + offset);\n if (ind !== -1) {\n this._allInputIndices[ind] = i;\n }\n }\n\n // find the node that the current value is linking to and update its input reference\n this._allData[i].to.forEach(node => {\n ind = this._nodes[node].inputs.indexOf(i + offset);\n if (ind !== -1) {\n this._nodes[node].inputs[ind] = i;\n }\n });\n if (this._allData[i].to.length === 0) {\n // if current value is a graph output, update its reference in outputIndices\n ind = this._allOutputIndices.indexOf(i + offset);\n if (ind !== -1) {\n this._allOutputIndices[ind] = i;\n }\n }\n }\n }\n }\n\n /**\n * Delete the specified node. Assume the node has one incoming input and the first output connected to other nodes.\n * An input validation must be done before calling this function.\n * @param nodeIndex The index of node to be deleted\n */\n private deleteNode(nodeIndex: number) {\n const node = this._nodes[nodeIndex];\n if (node.outputs.length > 1) {\n for (let i = 1; i < node.outputs.length; i++) {\n if (this._allData[node.outputs[i]].to.length > 0) {\n throw new Error('Node deletion with more than one output connected to other nodes is not supported. ');\n }\n }\n }\n\n // this node wil not be executed\n node.executeNode = false;\n const inputValueIndex = node.inputs[0];\n const outputValueIndex = node.outputs[0];\n const nodesConsumingOutput = this._allData[outputValueIndex].to;\n\n // remove this node from the to property of the input Value\n for (let i = 0; i < node.inputs.length; i++) {\n const delIndex = this._allData[node.inputs[i]].to.indexOf(nodeIndex);\n // should not happen\n if (delIndex === -1) {\n throw new Error('The Value object doesn\\'t have the current Node in it\\'s \\'to\\' property ');\n }\n this._allData[node.inputs[i]].to.splice(delIndex, 1);\n }\n\n // clear node indices consuming this output Value\n this._allData[outputValueIndex]._to = [];\n\n // if the output of this node is a graph output, adjust the index appropriately\n const index = this._allOutputIndices.indexOf(outputValueIndex);\n if (index !== -1) {\n this._allOutputIndices[index] = inputValueIndex;\n }\n\n // override the inputs for nodes consuming this node's output with the input to this node\n if (nodesConsumingOutput && nodesConsumingOutput.length > 0) {\n for (const nodeIndex of nodesConsumingOutput) {\n const replaceIndex = this._nodes[nodeIndex].inputs.indexOf(outputValueIndex);\n // should not happen\n if (replaceIndex === -1) {\n throw new Error('The Node object doesn\\'t have the output Value in it\\'s \\'inputs\\' property ');\n }\n this._nodes[nodeIndex].inputs[replaceIndex] = inputValueIndex;\n this._allData[inputValueIndex].to.push(nodeIndex);\n }\n }\n }\n\n removeAllDropoutNodes() {\n let nodeIndex = 0;\n for (const node of this._nodes) {\n // weed out 'Dropout' nodes so that no time is wasted in execution\n if (node.opType === 'Dropout') {\n // the node should have exactly 1 input and 1 or 2 outputs\n if (node.inputs.length !== 1) {\n throw new Error('Dropout nodes should only contain one input. ');\n }\n if (node.outputs.length !== 1 && node.outputs.length !== 2) {\n throw new Error('Dropout nodes should contain either 1 or 2 output(s)');\n }\n // the second output should not be referenced by any other node\n if (node.outputs.length === 2 && this._allData[node.outputs[1]]._to.length !== 0) {\n throw new Error('Dropout nodes\\'s second output should not be referenced by other nodes');\n }\n this.deleteNode(nodeIndex);\n }\n nodeIndex++;\n }\n }\n\n removeAllIdentityNodes() {\n let nodeIndex = 0;\n for (const node of this._nodes) {\n // weed out 'Identity' nodes so that no time is wasted in execution\n if (node.opType === 'Identity') {\n this.deleteNode(nodeIndex);\n }\n nodeIndex++;\n }\n }\n\n isActivation(n: Node): boolean {\n switch (n.opType) {\n // TODO: add other activation methods\n case 'Relu':\n case 'Sigmoid':\n case 'Clip':\n return true;\n default:\n return false;\n }\n }\n\n fuseConvActivationNodes() {\n for (const node of this._nodes) {\n if (node.opType === 'Conv') {\n const next = this._allData[node.outputs[0]]._to;\n if (next.length === 1 && this.isActivation(this._nodes[next[0]])) {\n const child = this._nodes[next[0]];\n if (child.opType === 'Clip') {\n if (child.inputs.length === 1) {\n try {\n node.attributes.set(\n 'activation_params', 'floats',\n [child.attributes.getFloat('min'), child.attributes.getFloat('max')]);\n } catch (e) {\n node.attributes.set('activation_params', 'floats', [MIN_CLIP, MAX_CLIP]);\n }\n } else if (\n child.inputs.length >= 3 && this._allData[child.inputs[1]].tensor !== undefined &&\n this._allData[child.inputs[2]].tensor !== undefined) {\n node.attributes.set('activation_params', 'floats', [\n this._allData[child.inputs[1]].tensor!.floatData[0], this._allData[child.inputs[2]].tensor!.floatData[0]\n ]);\n } else {\n // Skip fusion with clip node since clip min and clip max are not coming from initializer\n continue;\n }\n }\n node.attributes.set('activation', 'string', (child.opType));\n this.deleteNode(next[0]);\n }\n }\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {flatbuffers} from 'flatbuffers';\n\nimport {Graph} from './graph';\nimport {OpSet} from './opset';\nimport {onnxruntime} from './ort-schema/flatbuffers/ort-generated';\nimport {onnx} from './ort-schema/protobuf/onnx';\nimport {LongUtil} from './util';\n\nimport ortFbs = onnxruntime.experimental.fbs;\n\nexport class Model {\n // empty model\n constructor() {}\n\n load(buf: Uint8Array, graphInitializer?: Graph.Initializer, isOrtFormat?: boolean): void {\n let onnxError: Error|undefined;\n if (!isOrtFormat) {\n // isOrtFormat === false || isOrtFormat === undefined\n try {\n this.loadFromOnnxFormat(buf, graphInitializer);\n return;\n } catch (e) {\n if (isOrtFormat !== undefined) {\n throw e;\n }\n onnxError = e;\n }\n }\n\n try {\n this.loadFromOrtFormat(buf, graphInitializer);\n } catch (e) {\n if (isOrtFormat !== undefined) {\n throw e;\n }\n // Tried both formats and failed (when isOrtFormat === undefined)\n throw new Error(`Failed to load model as ONNX format: ${onnxError}\\nas ORT format: ${e}`);\n }\n }\n\n private loadFromOnnxFormat(buf: Uint8Array, graphInitializer?: Graph.Initializer): void {\n const modelProto = onnx.ModelProto.decode(buf);\n const irVersion = LongUtil.longToNumber(modelProto.irVersion);\n if (irVersion < 3) {\n throw new Error('only support ONNX model with IR_VERSION>=3');\n }\n\n this._opsets =\n modelProto.opsetImport.map(i => ({domain: i.domain as string, version: LongUtil.longToNumber(i.version!)}));\n\n this._graph = Graph.from(modelProto.graph!, graphInitializer);\n }\n\n private loadFromOrtFormat(buf: Uint8Array, graphInitializer?: Graph.Initializer): void {\n const fb = new flatbuffers.ByteBuffer(buf);\n const ortModel = ortFbs.InferenceSession.getRootAsInferenceSession(fb).model()!;\n const irVersion = LongUtil.longToNumber(ortModel.irVersion());\n if (irVersion < 3) {\n throw new Error('only support ONNX model with IR_VERSION>=3');\n }\n this._opsets = [];\n for (let i = 0; i < ortModel.opsetImportLength(); i++) {\n const opsetId = ortModel.opsetImport(i)!;\n this._opsets.push({domain: opsetId?.domain() as string, version: LongUtil.longToNumber(opsetId.version()!)});\n }\n\n this._graph = Graph.from(ortModel.graph()!, graphInitializer);\n }\n\n private _graph: Graph;\n get graph(): Graph {\n return this._graph;\n }\n\n private _opsets: OpSet[];\n get opsets(): readonly OpSet[] {\n return this._opsets;\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackend, SessionHandlerType} from './backend';\nimport {ExecutionPlan} from './execution-plan';\nimport {Graph} from './graph';\nimport {Profiler} from './instrument';\nimport {Model} from './model';\nimport {Operator} from './operators';\nimport {Tensor} from './tensor';\n\nexport declare namespace Session {\n export interface Config {\n backendHint?: string;\n profiler?: Profiler.Config;\n }\n\n export interface Context {\n profiler: Readonly;\n graphInputTypes?: Tensor.DataType[];\n graphInputDims?: Array;\n }\n}\n\nexport class Session {\n constructor(config: Session.Config = {}) {\n this._initialized = false;\n this.backendHint = config.backendHint;\n this.profiler = Profiler.create(config.profiler);\n this.context = {profiler: this.profiler, graphInputTypes: [], graphInputDims: []};\n }\n\n get inputNames(): readonly string[] {\n return this._model.graph.getInputNames();\n }\n get outputNames(): readonly string[] {\n return this._model.graph.getOutputNames();\n }\n\n startProfiling() {\n this.profiler.start();\n }\n\n endProfiling() {\n this.profiler.stop();\n }\n\n async loadModel(uri: string): Promise;\n async loadModel(buffer: ArrayBuffer, byteOffset?: number, length?: number): Promise;\n async loadModel(buffer: Uint8Array): Promise;\n async loadModel(arg: string|ArrayBuffer|Uint8Array, byteOffset?: number, length?: number): Promise {\n await this.profiler.event('session', 'Session.loadModel', async () => {\n // resolve backend and session handler\n const backend = await resolveBackend(this.backendHint);\n this.sessionHandler = backend.createSessionHandler(this.context);\n\n this._model = new Model();\n if (typeof arg === 'string') {\n const isOrtFormat = arg.endsWith('.ort');\n if (typeof process !== 'undefined' && process.versions && process.versions.node) {\n // node\n const {readFile} = require('node:fs/promises');\n const buf = await readFile(arg);\n this.initialize(buf, isOrtFormat);\n } else {\n // browser\n const response = await fetch(arg);\n const buf = await response.arrayBuffer();\n this.initialize(new Uint8Array(buf), isOrtFormat);\n }\n } else if (!ArrayBuffer.isView(arg)) {\n // load model from ArrayBuffer\n const arr = new Uint8Array(arg, byteOffset || 0, length || arg.byteLength);\n this.initialize(arr);\n } else {\n // load model from Uint8array\n this.initialize(arg);\n }\n });\n }\n\n private initialize(modelProtoBlob: Uint8Array, isOrtFormat?: boolean): void {\n if (this._initialized) {\n throw new Error('already initialized');\n }\n\n this.profiler.event('session', 'Session.initialize', () => {\n // load graph\n const graphInitializer =\n this.sessionHandler.transformGraph ? this.sessionHandler as Graph.Initializer : undefined;\n this._model.load(modelProtoBlob, graphInitializer, isOrtFormat);\n\n // graph is completely initialzied at this stage , let the interested handlers know\n if (this.sessionHandler.onGraphInitialized) {\n this.sessionHandler.onGraphInitialized(this._model.graph);\n }\n // initialize each operator in the graph\n this.initializeOps(this._model.graph);\n\n // instantiate an ExecutionPlan object to be used by the Session object\n this._executionPlan = new ExecutionPlan(this._model.graph, this._ops, this.profiler);\n });\n\n this._initialized = true;\n }\n\n async run(inputs: Map|Tensor[]): Promise> {\n if (!this._initialized) {\n throw new Error('session not initialized yet');\n }\n\n return this.profiler.event('session', 'Session.run', async () => {\n const inputTensors = this.normalizeAndValidateInputs(inputs);\n\n const outputTensors = await this._executionPlan.execute(this.sessionHandler, inputTensors);\n\n return this.createOutput(outputTensors);\n });\n }\n\n private normalizeAndValidateInputs(inputs: Map|Tensor[]): Tensor[] {\n const modelInputNames = this._model.graph.getInputNames();\n\n // normalize inputs\n // inputs: Tensor[]\n if (Array.isArray(inputs)) {\n if (inputs.length !== modelInputNames.length) {\n throw new Error(`incorrect input array length: expected ${modelInputNames.length} but got ${inputs.length}`);\n }\n }\n // convert map to array\n // inputs: Map\n else {\n if (inputs.size !== modelInputNames.length) {\n throw new Error(`incorrect input map size: expected ${modelInputNames.length} but got ${inputs.size}`);\n }\n\n const sortedInputs = new Array(inputs.size);\n let sortedInputsIndex = 0;\n for (let i = 0; i < modelInputNames.length; ++i) {\n const tensor = inputs.get(modelInputNames[i]);\n if (!tensor) {\n throw new Error(`missing input tensor for: '${name}'`);\n }\n sortedInputs[sortedInputsIndex++] = tensor;\n }\n\n inputs = sortedInputs;\n }\n\n // validate dims requirements\n // First session run - graph input data is not cached for the session\n if (!this.context.graphInputTypes || this.context.graphInputTypes.length === 0 || !this.context.graphInputDims ||\n this.context.graphInputDims.length === 0) {\n const modelInputIndices = this._model.graph.getInputIndices();\n const modelValues = this._model.graph.getValues();\n\n const graphInputDims = new Array(modelInputIndices.length);\n\n for (let i = 0; i < modelInputIndices.length; ++i) {\n const graphInput = modelValues[modelInputIndices[i]];\n graphInputDims[i] = graphInput.type!.shape.dims;\n\n // cached for second and subsequent runs.\n // Some parts of the framework works on the assumption that the graph and types and shapes are static\n this.context.graphInputTypes!.push(graphInput.type!.tensorType);\n this.context.graphInputDims!.push(inputs[i].dims);\n }\n\n this.validateInputTensorDims(graphInputDims, inputs, true);\n }\n\n // Second and subsequent session runs - graph input data is cached for the session\n else {\n this.validateInputTensorDims(this.context.graphInputDims, inputs, false);\n }\n\n // validate types requirement\n this.validateInputTensorTypes(this.context.graphInputTypes!, inputs);\n\n return inputs;\n }\n\n private validateInputTensorTypes(graphInputTypes: Tensor.DataType[], givenInputs: Tensor[]) {\n for (let i = 0; i < givenInputs.length; i++) {\n const expectedType = graphInputTypes[i];\n const actualType = givenInputs[i].type;\n if (expectedType !== actualType) {\n throw new Error(`input tensor[${i}] check failed: expected type '${expectedType}' but got ${actualType}`);\n }\n }\n }\n\n private validateInputTensorDims(\n graphInputDims: Array, givenInputs: Tensor[], noneDimSupported: boolean) {\n for (let i = 0; i < givenInputs.length; i++) {\n const expectedDims = graphInputDims[i];\n const actualDims = givenInputs[i].dims;\n if (!this.compareTensorDims(expectedDims, actualDims, noneDimSupported)) {\n throw new Error(`input tensor[${i}] check failed: expected shape '[${expectedDims.join(',')}]' but got [${\n actualDims.join(',')}]`);\n }\n }\n }\n\n private compareTensorDims(expectedDims: readonly number[], actualDims: readonly number[], noneDimSupported: boolean):\n boolean {\n if (expectedDims.length !== actualDims.length) {\n return false;\n }\n\n for (let i = 0; i < expectedDims.length; ++i) {\n if (expectedDims[i] !== actualDims[i] && (!noneDimSupported || expectedDims[i] !== 0)) {\n // data shape mis-match AND not a 'None' dimension.\n return false;\n }\n }\n\n return true;\n }\n\n private createOutput(outputTensors: Tensor[]): Map {\n const modelOutputNames = this._model.graph.getOutputNames();\n if (outputTensors.length !== modelOutputNames.length) {\n throw new Error('expected number of outputs do not match number of generated outputs');\n }\n\n const output = new Map();\n for (let i = 0; i < modelOutputNames.length; ++i) {\n output.set(modelOutputNames[i], outputTensors[i]);\n }\n\n return output;\n }\n\n private initializeOps(graph: Graph): void {\n const nodes = graph.getNodes();\n this._ops = new Array(nodes.length);\n\n for (let i = 0; i < nodes.length; i++) {\n this._ops[i] = this.sessionHandler.resolve(nodes[i], this._model.opsets, graph);\n }\n }\n\n private _model: Model;\n private _initialized: boolean;\n\n private _ops: Operator[];\n private _executionPlan: ExecutionPlan;\n\n private backendHint?: string;\n\n private sessionHandler: SessionHandlerType;\n private context: Session.Context;\n private profiler: Readonly;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor} from 'onnxruntime-common';\n\nimport {Session} from './session';\nimport {Tensor as OnnxjsTensor} from './tensor';\n\nexport class OnnxjsSessionHandler implements InferenceSessionHandler {\n constructor(private session: Session) {\n this.inputNames = this.session.inputNames;\n this.outputNames = this.session.outputNames;\n }\n\n async dispose(): Promise {}\n inputNames: readonly string[];\n outputNames: readonly string[];\n async run(\n feeds: SessionHandler.FeedsType, _fetches: SessionHandler.FetchesType,\n _options: InferenceSession.RunOptions): Promise {\n const inputMap = new Map();\n for (const name in feeds) {\n if (Object.hasOwnProperty.call(feeds, name)) {\n const feed = feeds[name];\n inputMap.set(\n name,\n new OnnxjsTensor(\n feed.dims, feed.type as OnnxjsTensor.DataType, undefined, undefined,\n feed.data as OnnxjsTensor.NumberType));\n }\n }\n const outputMap = await this.session.run(inputMap);\n const output: SessionHandler.ReturnType = {};\n outputMap.forEach((tensor, name) => {\n output[name] = new Tensor(tensor.type, tensor.data, tensor.dims);\n });\n return output;\n }\n startProfiling(): void {\n this.session.startProfiling();\n }\n endProfiling(): void {\n this.session.endProfiling();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable import/no-internal-modules */\nimport {Backend, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {Session} from './onnxjs/session';\nimport {OnnxjsSessionHandler} from './onnxjs/session-handler-inference';\n\nclass OnnxjsBackend implements Backend {\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n async init(): Promise {}\n\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n // NOTE: Session.Config(from onnx.js) is not compatible with InferenceSession.SessionOptions(from\n // onnxruntime-common).\n // In future we should remove Session.Config and use InferenceSession.SessionOptions.\n // Currently we allow this to happen to make test runner work.\n const session = new Session(options as unknown as Session.Config);\n\n // typescript cannot merge method override correctly (so far in 4.2.3). need if-else to call the method.\n if (typeof pathOrBuffer === 'string') {\n await session.loadModel(pathOrBuffer);\n } else {\n await session.loadModel(pathOrBuffer);\n }\n\n return new OnnxjsSessionHandler(session);\n }\n}\n\nexport const onnxjsBackend = new OnnxjsBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nexport const isNode = !!(typeof process !== 'undefined' && process.versions && process.versions.node);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/// \n\n//\n// * type hack for \"HTMLImageElement\"\n//\n// in typescript, the type of \"HTMLImageElement\" is defined in lib.dom.d.ts, which is conflict with lib.webworker.d.ts.\n// when we use webworker, the lib.webworker.d.ts will be used, which does not have HTMLImageElement defined.\n//\n// we will get the following errors complaining that HTMLImageElement is not defined:\n//\n// ====================================================================================================================\n//\n// ../common/dist/cjs/tensor-factory.d.ts:187:29 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 187 fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n// Promise | TypedTensor<'uint8'>>;\n// ~~~~~~~~~~~~~~~~\n//\n// node_modules/@webgpu/types/dist/index.d.ts:83:7 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 83 | HTMLImageElement\n// ~~~~~~~~~~~~~~~~\n//\n// ====================================================================================================================\n//\n// `HTMLImageElement` is only used in type declaration and not in real code. So we define it as `unknown` here to\n// bypass the type check.\n\n//\n// * type hack for \"document\"\n//\n// in typescript, the type of \"document\" is defined in lib.dom.d.ts, so it's not available in webworker.\n//\n// we will get the following errors complaining that document is not defined:\n//\n// ====================================================================================================================\n//\n// lib/wasm/wasm-utils-import.ts:7:33 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:61 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:88 - error TS2552: Cannot find name 'HTMLScriptElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~~~~~~~~~~\n// ====================================================================================================================\n//\n// `document` is used to get the current script URL, which is not available in webworker. This file is served as a\n// \"dual\" file for entries of both webworker and the esm module.\n//\ndeclare global {\n type HTMLImageElement = unknown;\n type HTMLScriptElement = {src?: string};\n const document: undefined|{currentScript?: HTMLScriptElement};\n}\n\n/**\n * @summary\n *\n * This file is served as a \"dual\" file for both entries of the following:\n * - The proxy worker itself.\n * - When used as a worker, it listens to the messages from the main thread and performs the corresponding operations.\n * - Should be imported directly using `new Worker()` in the main thread.\n *\n * - The ESM module that creates the proxy worker (as a worker launcher).\n * - When used as a worker launcher, it creates the proxy worker and returns it.\n * - Should be imported using `import()` in the main thread, with the query parameter `import=1`.\n *\n * This file will be always compiling into ESM format.\n */\n\nimport type {OrtWasmMessage, SerializableTensorMetadata} from '../proxy-messages.js';\nimport {createSession, copyFromExternalBuffer, endProfiling, extractTransferableBuffers, initEp, initRuntime, releaseSession, run} from '../wasm-core-impl.js';\nimport {initializeWebAssembly} from '../wasm-factory.js';\nimport {scriptSrc} from '../wasm-utils-import.js';\n\nconst WORKER_NAME = 'ort-wasm-proxy-worker';\nconst isProxyWorker = globalThis.self?.name === WORKER_NAME;\n\nif (isProxyWorker) {\n // Worker thread\n self.onmessage = (ev: MessageEvent): void => {\n const {type, in : message} = ev.data;\n try {\n switch (type) {\n case 'init-wasm':\n initializeWebAssembly(message!.wasm)\n .then(\n () => {\n initRuntime(message!).then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n },\n err => {\n postMessage({type, err});\n });\n break;\n case 'init-ep': {\n const {epName, env} = message!;\n initEp(env, epName)\n .then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'copy-from': {\n const {buffer} = message!;\n const bufferData = copyFromExternalBuffer(buffer);\n postMessage({type, out: bufferData} as OrtWasmMessage);\n break;\n }\n case 'create': {\n const {model, options} = message!;\n createSession(model, options)\n .then(\n sessionMetadata => {\n postMessage({type, out: sessionMetadata} as OrtWasmMessage);\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'release':\n releaseSession(message!);\n postMessage({type});\n break;\n case 'run': {\n const {sessionId, inputIndices, inputs, outputIndices, options} = message!;\n run(sessionId, inputIndices, inputs, outputIndices, new Array(outputIndices.length).fill(null), options)\n .then(\n outputs => {\n if (outputs.some(o => o[3] !== 'cpu')) {\n postMessage({type, err: 'Proxy does not support non-cpu tensor location.'});\n } else {\n postMessage(\n {type, out: outputs} as OrtWasmMessage,\n extractTransferableBuffers([...inputs, ...outputs] as SerializableTensorMetadata[]));\n }\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'end-profiling':\n endProfiling(message!);\n postMessage({type});\n break;\n default:\n }\n } catch (err) {\n postMessage({type, err} as OrtWasmMessage);\n }\n };\n}\n\nexport default isProxyWorker ?\n null :\n (urlOverride?: string) =>\n new Worker(urlOverride ?? scriptSrc!, {type: BUILD_DEFS.IS_ESM ? 'module' : 'classic', name: WORKER_NAME});\n", "var e,r=(e=import.meta.url,async function(r={}){function t(){return x.buffer!=R.buffer&&L(),R}function n(){return x.buffer!=R.buffer&&L(),H}function a(){return x.buffer!=R.buffer&&L(),D}function o(){return x.buffer!=R.buffer&&L(),F}function i(){return x.buffer!=R.buffer&&L(),P}function s(){return x.buffer!=R.buffer&&L(),B}function u(){return x.buffer!=R.buffer&&L(),I}function f(){return x.buffer!=R.buffer&&L(),$}var l,c,d=Object.assign({},r),b=new Promise(((e,r)=>{l=e,c=r})),m=\"object\"==typeof window,p=\"function\"==typeof importScripts,h=p&&\"em-pthread\"==self.name;d.mountExternalData=(e,r)=>{(d.Fb||(d.Fb=new Map)).set(e,r)},d.unmountExternalData=()=>{delete d.Fb};var g=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let v=()=>{const e=(e,r,t)=>(...n)=>{const a=zr,o=r?.();n=e(...n);const i=r?.();return o!==i&&(e=i,t(o),r=t=null),zr!=a?new Promise(((e,r)=>{Qr={resolve:e,reject:r}})):n},r=e=>async(...r)=>{try{if(d.Eb)throw Error(\"Session already started\");const t=d.Eb={bc:r[0],errors:[]},n=await e(...r);if(d.Eb!==t)throw Error(\"Session mismatch\");d.Mb?.flush();const a=t.errors;if(0e)),0d._OrtCreateSession),(e=>d._OrtCreateSession=e)),d._OrtRun=r(e(d._OrtRun,(()=>d._OrtRun),(e=>d._OrtRun=e))),d._OrtRunWithBinding=r(e(d._OrtRunWithBinding,(()=>d._OrtRunWithBinding),(e=>d._OrtRunWithBinding=e))),d._OrtBindInput=e(d._OrtBindInput,(()=>d._OrtBindInput),(e=>d._OrtBindInput=e)),v=void 0};d.jsepInit=(e,r)=>{if(v?.(),\"webgpu\"===e){[d.Mb,d.Tb,d.Xb,d.Nb,d.Wb,d.jb,d.Yb,d.$b,d.Ub,d.Vb,d.Zb]=r;const e=d.Mb;d.jsepRegisterBuffer=(r,t,n,a)=>e.registerBuffer(r,t,n,a),d.jsepGetBuffer=r=>e.getBuffer(r),d.jsepCreateDownloader=(r,t,n)=>e.createDownloader(r,t,n),d.jsepOnReleaseSession=r=>{e.onReleaseSession(r)},d.jsepOnRunStart=r=>e.onRunStart(r)}};var y,w,A=Object.assign({},d),_=\"./this.program\",C=(e,r)=>{throw r},O=\"\";(m||p)&&(p?O=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(O=document.currentScript.src),e&&(O=e),O=O.startsWith(\"blob:\")?\"\":O.substr(0,O.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),e=>{var r=new XMLHttpRequest;return r.open(\"GET\",e,!1),r.send(null),r.responseText},p&&(w=e=>{var r=new XMLHttpRequest;return r.open(\"GET\",e,!1),r.responseType=\"arraybuffer\",r.send(null),new Uint8Array(r.response)}),y=(e,r,t)=>{var n=new XMLHttpRequest;n.open(\"GET\",e,!0),n.responseType=\"arraybuffer\",n.onload=()=>{200==n.status||0==n.status&&n.response?r(n.response):t()},n.onerror=t,n.send(null)});var j=console.log.bind(console),T=console.error.bind(console),S=j,W=T;if(Object.assign(d,A),A=null,h){var E,M=!1;function Pn(e){try{var r=e.data,t=r.cmd;if(\"load\"===t){let e=[];self.onmessage=r=>e.push(r),self.startWorker=()=>{postMessage({cmd:\"loaded\"});for(let r of e)Pn(r);self.onmessage=Pn};for(const e of r.handlers)d[e]&&!d[e].proxy||(d[e]=(...r)=>{postMessage({Lb:\"callHandler\",kc:e,args:r})},\"print\"==e&&(S=d[e]),\"printErr\"==e&&(W=d[e]));x=r.wasmMemory,L(),E(r.wasmModule)}else if(\"run\"===t){_n(r.pthread_ptr,0,0,1,0,0),Rr(r.pthread_ptr),Te(),_e(),M||(gn(),M=!0);try{Se(r.start_routine,r.arg)}catch(e){if(\"unwind\"!=e)throw e}}else\"cancel\"===t?yn()&&Tn(-1):\"setimmediate\"!==r.target&&(\"checkMailbox\"===t?M&&Hr():t&&(W(`worker: received unknown command ${t}`),W(r)))}catch(e){throw Cn(),e}}W=function(...e){e=e.join(\" \"),console.error(e)},self.alert=function(...e){postMessage({Lb:\"alert\",text:e.join(\" \"),mc:yn()})},d.instantiateWasm=(e,r)=>new Promise((e=>{E=t=>{t=new WebAssembly.Instance(t,oe()),r(t),e()}})),self.onunhandledrejection=e=>{throw e.reason||e},self.onmessage=Pn}var x,N,k,R,H,D,F,P,B,I,U,G,$,Y=!1;function L(){var e=x.buffer;d.HEAP8=R=new Int8Array(e),d.HEAP16=D=new Int16Array(e),d.HEAPU8=H=new Uint8Array(e),d.HEAPU16=F=new Uint16Array(e),d.HEAP32=P=new Int32Array(e),d.HEAPU32=B=new Uint32Array(e),d.HEAPF32=I=new Float32Array(e),d.HEAPF64=$=new Float64Array(e),d.HEAP64=U=new BigInt64Array(e),d.HEAPU64=G=new BigUint64Array(e)}if(!h){if(d.wasmMemory)x=d.wasmMemory;else if(!((x=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof g))throw W(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),Error(\"bad memory\");L()}var z=[],V=[],q=[],J=0,X=null,K=null;function Q(){if(0==--J&&(null!==X&&(clearInterval(X),X=null),K)){var e=K;K=null,e()}}function Z(e){throw W(e=\"Aborted(\"+e+\")\"),Y=!0,k=1,e=new WebAssembly.RuntimeError(e+\". Build with -sASSERTIONS for more info.\"),c(e),e}var ee,re=e=>e.startsWith(\"data:application/octet-stream;base64,\"),te=e=>e.startsWith(\"file://\");function ne(e){if(w)return w(e);throw\"both async and sync fetching of the wasm failed\"}function ae(e,r,t){return function(e){if(m||p){if(\"function\"==typeof fetch&&!te(e))return fetch(e,{credentials:\"same-origin\"}).then((r=>{if(!r.ok)throw`failed to load wasm binary file at '${e}'`;return r.arrayBuffer()})).catch((()=>ne(e)));if(y)return new Promise(((r,t)=>{y(e,(e=>r(new Uint8Array(e))),t)}))}return Promise.resolve().then((()=>ne(e)))}(e).then((e=>WebAssembly.instantiate(e,r))).then(t,(e=>{W(`failed to asynchronously prepare wasm: ${e}`),Z(e)}))}function oe(){return{a:{M:ue,za:se,b:Ee,$:xe,z:He,pa:De,X:Ie,Z:Ue,qa:Ge,na:$e,ga:Ye,ma:Le,J:ze,Y:Ve,V:qe,oa:Je,W:Xe,va:Ze,D:ir,P:ur,O:hr,C:vr,s:yr,p:wr,E:Ar,y:Er,Q:Mr,ta:xr,ja:Nr,T:Dr,aa:Pr,F:Br,ia:Rr,sa:Ir,u:$r,B:rt,o:nt,k:it,c:cr,n:ut,j:dt,Aa:bt,r:mt,d:pt,v:ht,m:gt,g:vt,l:yt,i:wt,h:At,e:_t,da:Ct,ea:St,fa:Wt,ba:Et,ca:Mt,S:xt,f:Rt,N:Ht,G:Dt,K:Ft,w:Pt,ra:It,U:Ut,t:Bt,x:Gt,L:$t,R:Yt,ya:qt,xa:Jt,ka:Zt,la:en,_:he,A:rn,I:tn,ha:nn,H:on,a:x,wa:me,ua:ln,q:cn}}}var ie={1337716:(e,r,t,a)=>{if(void 0===d||!d.Fb)return 1;if((e=Re(e>>>0)).startsWith(\"./\")&&(e=e.substring(2)),!(e=d.Fb.get(e)))return 2;if(a>>>=0,(r>>>=0)+(t>>>=0)>e.byteLength)return 3;try{return n().set(e.subarray(r,r+t),a>>>0),0}catch{return 4}},1338217:()=>{d.Ub()},1338248:()=>{d.Vb()},1338277:()=>{d.Zb()},1338302:e=>d.Tb(e),1338335:e=>d.Xb(e),1338367:(e,r,t)=>{d.Nb(e,r,t,!0)},1338406:(e,r,t)=>{d.Nb(e,r,t)},1338439:()=>\"undefined\"!=typeof wasmOffsetConverter,1338496:e=>{d.jb(\"Abs\",e,void 0)},1338547:e=>{d.jb(\"Neg\",e,void 0)},1338598:e=>{d.jb(\"Floor\",e,void 0)},1338651:e=>{d.jb(\"Ceil\",e,void 0)},1338703:e=>{d.jb(\"Reciprocal\",e,void 0)},1338761:e=>{d.jb(\"Sqrt\",e,void 0)},1338813:e=>{d.jb(\"Exp\",e,void 0)},1338864:e=>{d.jb(\"Erf\",e,void 0)},1338915:e=>{d.jb(\"Sigmoid\",e,void 0)},1338970:(e,r,t)=>{d.jb(\"HardSigmoid\",e,{alpha:r,beta:t})},1339049:e=>{d.jb(\"Log\",e,void 0)},1339100:e=>{d.jb(\"Sin\",e,void 0)},1339151:e=>{d.jb(\"Cos\",e,void 0)},1339202:e=>{d.jb(\"Tan\",e,void 0)},1339253:e=>{d.jb(\"Asin\",e,void 0)},1339305:e=>{d.jb(\"Acos\",e,void 0)},1339357:e=>{d.jb(\"Atan\",e,void 0)},1339409:e=>{d.jb(\"Sinh\",e,void 0)},1339461:e=>{d.jb(\"Cosh\",e,void 0)},1339513:e=>{d.jb(\"Asinh\",e,void 0)},1339566:e=>{d.jb(\"Acosh\",e,void 0)},1339619:e=>{d.jb(\"Atanh\",e,void 0)},1339672:e=>{d.jb(\"Tanh\",e,void 0)},1339724:e=>{d.jb(\"Not\",e,void 0)},1339775:(e,r,t)=>{d.jb(\"Clip\",e,{min:r,max:t})},1339844:e=>{d.jb(\"Clip\",e,void 0)},1339896:(e,r)=>{d.jb(\"Elu\",e,{alpha:r})},1339954:e=>{d.jb(\"Relu\",e,void 0)},1340006:(e,r)=>{d.jb(\"LeakyRelu\",e,{alpha:r})},1340070:(e,r)=>{d.jb(\"ThresholdedRelu\",e,{alpha:r})},1340140:(e,r)=>{d.jb(\"Cast\",e,{to:r})},1340198:e=>{d.jb(\"Add\",e,void 0)},1340249:e=>{d.jb(\"Sub\",e,void 0)},1340300:e=>{d.jb(\"Mul\",e,void 0)},1340351:e=>{d.jb(\"Div\",e,void 0)},1340402:e=>{d.jb(\"Pow\",e,void 0)},1340453:e=>{d.jb(\"Equal\",e,void 0)},1340506:e=>{d.jb(\"Greater\",e,void 0)},1340561:e=>{d.jb(\"GreaterOrEqual\",e,void 0)},1340623:e=>{d.jb(\"Less\",e,void 0)},1340675:e=>{d.jb(\"LessOrEqual\",e,void 0)},1340734:(e,r,t,n,a)=>{d.jb(\"ReduceMean\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340893:(e,r,t,n,a)=>{d.jb(\"ReduceMax\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341051:(e,r,t,n,a)=>{d.jb(\"ReduceMin\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341209:(e,r,t,n,a)=>{d.jb(\"ReduceProd\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341368:(e,r,t,n,a)=>{d.jb(\"ReduceSum\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341526:(e,r,t,n,a)=>{d.jb(\"ReduceL1\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341683:(e,r,t,n,a)=>{d.jb(\"ReduceL2\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341840:(e,r,t,n,a)=>{d.jb(\"ReduceLogSum\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342001:(e,r,t,n,a)=>{d.jb(\"ReduceSumSquare\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342165:(e,r,t,n,a)=>{d.jb(\"ReduceLogSumExp\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342329:e=>{d.jb(\"Where\",e,void 0)},1342382:(e,r,t)=>{d.jb(\"Transpose\",e,{perm:r?Array.from(i().subarray(r>>>0,t>>>0)):[]})},1342490:(e,r,t,n)=>{d.jb(\"DepthToSpace\",e,{blocksize:r,mode:Re(t),format:n?\"NHWC\":\"NCHW\"})},1342623:(e,r,t,n)=>{d.jb(\"DepthToSpace\",e,{blocksize:r,mode:Re(t),format:n?\"NHWC\":\"NCHW\"})},1342756:(e,r,n,a,o,s,u,f,l,c,b,m,p,h,g)=>{d.jb(\"ConvTranspose\",e,{format:l?\"NHWC\":\"NCHW\",autoPad:r,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,m>>>0)):[],outputShape:p?Array.from(i().subarray(p>>>0,h>>>0)):[],activation:Re(g)})},1343157:(e,r,n,a,o,s,u,f,l,c,b,m,p,h)=>{d.jb(\"ConvTranspose\",e,{format:f?\"NHWC\":\"NCHW\",autoPad:r,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Re(h)})},1343722:(e,r,n,a,o,s,u,f,l,c,b,m,p,h,g)=>{d.jb(\"ConvTranspose\",e,{format:l?\"NHWC\":\"NCHW\",autoPad:r,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,m>>>0)):[],outputShape:p?Array.from(i().subarray(p>>>0,h>>>0)):[],activation:Re(g)})},1344123:(e,r,n,a,o,s,u,f,l,c,b,m,p,h)=>{d.jb(\"ConvTranspose\",e,{format:f?\"NHWC\":\"NCHW\",autoPad:r,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Re(h)})},1344688:(e,r)=>{d.jb(\"GlobalAveragePool\",e,{format:r?\"NHWC\":\"NCHW\"})},1344779:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"AveragePool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345063:(e,r)=>{d.jb(\"GlobalAveragePool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345154:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"AveragePool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345438:(e,r)=>{d.jb(\"GlobalMaxPool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345525:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"MaxPool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345805:(e,r)=>{d.jb(\"GlobalMaxPool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345892:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"MaxPool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1346172:(e,r,t,n,a)=>{d.jb(\"Gemm\",e,{alpha:r,beta:t,transA:n,transB:a})},1346276:e=>{d.jb(\"MatMul\",e,void 0)},1346330:(e,r,t,n)=>{d.jb(\"ArgMax\",e,{keepDims:!!r,selectLastIndex:!!t,axis:n})},1346438:(e,r,t,n)=>{d.jb(\"ArgMin\",e,{keepDims:!!r,selectLastIndex:!!t,axis:n})},1346546:(e,r)=>{d.jb(\"Softmax\",e,{axis:r})},1346609:(e,r)=>{d.jb(\"Concat\",e,{axis:r})},1346669:(e,r,t,n,a)=>{d.jb(\"Split\",e,{axis:r,numOutputs:t,splitSizes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1346809:e=>{d.jb(\"Expand\",e,void 0)},1346863:(e,r)=>{d.jb(\"Gather\",e,{axis:Number(r)})},1346934:(e,r)=>{d.jb(\"GatherElements\",e,{axis:Number(r)})},1347013:(e,r,t,n,a,o,s,u,f,l,c)=>{d.jb(\"Resize\",e,{antialias:r,axes:t?Array.from(i().subarray(t>>>0,n>>>0)):[],coordinateTransformMode:Re(a),cubicCoeffA:o,excludeOutside:s,extrapolationValue:u,keepAspectRatioPolicy:Re(f),mode:Re(l),nearestMode:Re(c)})},1347359:(e,r,t,n,a,o,s)=>{d.jb(\"Slice\",e,{starts:r?Array.from(i().subarray(r>>>0,t>>>0)):[],ends:n?Array.from(i().subarray(n>>>0,a>>>0)):[],axes:o?Array.from(i().subarray(o>>>0,s>>>0)):[]})},1347575:e=>{d.jb(\"Tile\",e,void 0)},1347627:(e,r,t)=>{d.jb(\"InstanceNormalization\",e,{epsilon:r,format:t?\"NHWC\":\"NCHW\"})},1347741:(e,r,t)=>{d.jb(\"InstanceNormalization\",e,{epsilon:r,format:t?\"NHWC\":\"NCHW\"})},1347855:e=>{d.jb(\"Range\",e,void 0)},1347908:(e,r)=>{d.jb(\"Einsum\",e,{equation:Re(r)})},1347989:(e,r,t,n,a)=>{d.jb(\"Pad\",e,{mode:r,value:t,pads:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1348116:(e,r,t,n,a,o)=>{d.jb(\"BatchNormalization\",e,{epsilon:r,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1348285:(e,r,t,n,a,o)=>{d.jb(\"BatchNormalization\",e,{epsilon:r,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1348454:(e,r,t)=>{d.jb(\"CumSum\",e,{exclusive:Number(r),reverse:Number(t)})},1348551:(e,r,t,n,a,o,s,u,f)=>{d.jb(\"Attention\",e,{numHeads:r,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o,qkvHiddenSizes:s?Array.from(i().subarray(Number(u)>>>0,Number(u)+s>>>0)):[],pastPresentShareBuffer:!!f})},1348823:e=>{d.jb(\"BiasAdd\",e,void 0)},1348878:e=>{d.jb(\"BiasSplitGelu\",e,void 0)},1348939:e=>{d.jb(\"FastGelu\",e,void 0)},1348995:(e,r,n,a,o,s,f,l,c,b,m,p,h,g,v,y)=>{d.jb(\"Conv\",e,{format:p?\"NHWC\":\"NCHW\",auto_pad:r,dilations:n?Array.from(i().subarray(n>>>0,a>>>0)):[],group:o,kernel_shape:s?Array.from(i().subarray(s>>>0,f>>>0)):[],pads:l?Array.from(i().subarray(l>>>0,c>>>0)):[],strides:b?Array.from(i().subarray(b>>>0,m>>>0)):[],w_is_const:()=>!!t()[h>>>0],activation:Re(g),activation_params:v?Array.from(u().subarray(v>>>0,y>>>0)):[]})},1349491:e=>{d.jb(\"Gelu\",e,void 0)},1349543:(e,r,t,n)=>{d.jb(\"GroupQueryAttention\",e,{numHeads:r,kvNumHeads:t,scale:n})},1349656:(e,r,t,n)=>{d.jb(\"LayerNormalization\",e,{axis:r,epsilon:t,simplified:!!n})},1349767:(e,r,t,n)=>{d.jb(\"LayerNormalization\",e,{axis:r,epsilon:t,simplified:!!n})},1349878:(e,r,t,n,a,o)=>{d.jb(\"MatMulNBits\",e,{k:r,n:t,accuracyLevel:n,bits:a,blockSize:o})},1350005:(e,r,t,n,a,o)=>{d.jb(\"MultiHeadAttention\",e,{numHeads:r,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o})},1350164:(e,r)=>{d.jb(\"QuickGelu\",e,{alpha:r})},1350228:(e,r,t,n,a)=>{d.jb(\"RotaryEmbedding\",e,{interleaved:!!r,numHeads:t,rotaryEmbeddingDim:n,scale:a})},1350367:(e,r,t)=>{d.jb(\"SkipLayerNormalization\",e,{epsilon:r,simplified:!!t})},1350469:e=>{d.Yb(e)},1350503:(e,r)=>d.$b(e,r,d.Eb.bc,d.Eb.errors),1350615:(e,r,t)=>{d.jb(\"SkipLayerNormalization\",e,{epsilon:r,simplified:!!t})}};function se(e,r,t){return et((async()=>{await d.Wb(e,r,t)}))}function ue(){return\"undefined\"!=typeof wasmOffsetConverter}function fe(e){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${e})`,this.status=e}var le=e=>{e.terminate(),e.onmessage=()=>{}},ce=e=>{0==ge.length&&(Oe(),Ce(ge[0]));var r=ge.pop();if(!r)return 6;ve.push(r),we[e.Ab]=r,r.Ab=e.Ab;var t={cmd:\"run\",start_routine:e.cc,arg:e.Pb,pthread_ptr:e.Ab};return r.postMessage(t,e.ic),0},de=0,be=(e,r,...t)=>{for(var n=2*t.length,a=xn(),o=Mn(8*n),i=o>>>3,s=0;s>>0]=u)}return e=On(e,0,n,o,r),En(a),e};function me(e){if(h)return be(0,1,e);if(k=e,!(0{if(k=e,h)throw pe(e),\"unwind\";me(e)},ge=[],ve=[],ye=[],we={},Ae=e=>{var r=e.Ab;delete we[r],ge.push(e),ve.splice(ve.indexOf(e),1),e.Ab=0,jn(r)};function _e(){ye.forEach((e=>e()))}var Ce=e=>new Promise((r=>{e.onmessage=t=>{var n=(t=t.data).cmd;if(t.targetThread&&t.targetThread!=yn()){var a=we[t.targetThread];a?a.postMessage(t,t.transferList):W(`Internal error! Worker sent a message \"${n}\" to target pthread ${t.targetThread}, but that thread no longer exists!`)}else\"checkMailbox\"===n?Hr():\"spawnThread\"===n?ce(t):\"cleanupThread\"===n?Ae(we[t.thread]):\"killThread\"===n?(t=t.thread,n=we[t],delete we[t],le(n),jn(t),ve.splice(ve.indexOf(n),1),n.Ab=0):\"cancelThread\"===n?we[t.thread].postMessage({cmd:\"cancel\"}):\"loaded\"===n?(e.loaded=!0,r(e)):\"alert\"===n?alert(`Thread ${t.threadId}: ${t.text}`):\"setimmediate\"===t.target?e.postMessage(t):\"callHandler\"===n?d[t.handler](...t.args):n&&W(`worker sent an unknown command ${n}`)},e.onerror=e=>{throw W(`worker sent an error! ${e.filename}:${e.lineno}: ${e.message}`),e};var t,n=[];for(t of[\"onExit\"])d.hasOwnProperty(t)&&n.push(t);e.postMessage({cmd:\"load\",handlers:n,wasmMemory:x,wasmModule:N})}));function Oe(){var e=new Worker(new URL(import.meta.url),{type:\"module\",workerData:\"em-pthread\",name:\"em-pthread\"});ge.push(e)}var je=e=>{for(;0{var e=yn(),r=s()[e+52>>>2>>>0];e=s()[e+56>>>2>>>0],Wn(r,r-e),En(r)},Se=(e,r)=>{de=0,e=Nn(e,r),0>>=0);throw r>>>=0,t>>>=0,s()[n.Ib+16>>>2>>>0]=0,s()[n.Ib+4>>>2>>>0]=r,s()[n.Ib+8>>>2>>>0]=t,e}function Me(e,r,t,n){return h?be(2,1,e,r,t,n):xe(e,r,t,n)}function xe(e,r,t,n){if(e>>>=0,r>>>=0,t>>>=0,n>>>=0,void 0===g)return W(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var a=[];return h&&0===a.length?Me(e,r,t,n):(e={cc:t,Ab:e,Pb:n,ic:a},h?(e.Lb=\"spawnThread\",postMessage(e,a),0):ce(e))}var Ne=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,ke=(e,r,t)=>{var n=(r>>>=0)+t;for(t=r;e[t]&&!(t>=n);)++t;if(16(a=224==(240&a)?(15&a)<<12|o<<6|i:(7&a)<<18|o<<12|i<<6|63&e[r++])?n+=String.fromCharCode(a):(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a))}}else n+=String.fromCharCode(a)}return n},Re=(e,r)=>(e>>>=0)?ke(n(),e,r):\"\";function He(e,r,t){return h?be(3,1,e,r,t):0}function De(e,r){if(h)return be(4,1,e,r)}var Fe=e=>{for(var r=0,t=0;t=n?r++:2047>=n?r+=2:55296<=n&&57343>=n?(r+=4,++t):r+=3}return r},Pe=(e,r,t,n)=>{if(!(0>>=0;n=t+n-1;for(var o=0;o=i&&(i=65536+((1023&i)<<10)|1023&e.charCodeAt(++o)),127>=i){if(t>=n)break;r[t++>>>0]=i}else{if(2047>=i){if(t+1>=n)break;r[t++>>>0]=192|i>>6}else{if(65535>=i){if(t+2>=n)break;r[t++>>>0]=224|i>>12}else{if(t+3>=n)break;r[t++>>>0]=240|i>>18,r[t++>>>0]=128|i>>12&63}r[t++>>>0]=128|i>>6&63}r[t++>>>0]=128|63&i}}return r[t>>>0]=0,t-a},Be=(e,r,t)=>Pe(e,n(),r,t);function Ie(e,r){if(h)return be(5,1,e,r)}function Ue(e,r,t){if(h)return be(6,1,e,r,t)}function Ge(e,r,t){return h?be(7,1,e,r,t):0}function $e(e,r){if(h)return be(8,1,e,r)}function Ye(e,r,t){if(h)return be(9,1,e,r,t)}function Le(e,r,t,n){if(h)return be(10,1,e,r,t,n)}function ze(e,r,t,n){if(h)return be(11,1,e,r,t,n)}function Ve(e,r,t,n){if(h)return be(12,1,e,r,t,n)}function qe(e){if(h)return be(13,1,e)}function Je(e,r){if(h)return be(14,1,e,r)}function Xe(e,r,t){if(h)return be(15,1,e,r,t)}var Ke,Qe,Ze=()=>{Z(\"\")},er=e=>{for(var r=\"\";n()[e>>>0];)r+=Ke[n()[e++>>>0]];return r},rr={},tr={},nr={};function ar(e,r,t={}){if(!(\"argPackAdvance\"in r))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return function(e,r,t={}){var n=r.name;if(!e)throw new Qe(`type \"${n}\" must have a positive integer typeid pointer`);if(tr.hasOwnProperty(e)){if(t.Rb)return;throw new Qe(`Cannot register type '${n}' twice`)}tr[e]=r,delete nr[e],rr.hasOwnProperty(e)&&(r=rr[e],delete rr[e],r.forEach((e=>e())))}(e,r,t)}var or=(e,r,u)=>{switch(r){case 1:return u?e=>t()[e>>>0]:e=>n()[e>>>0];case 2:return u?e=>a()[e>>>1>>>0]:e=>o()[e>>>1>>>0];case 4:return u?e=>i()[e>>>2>>>0]:e=>s()[e>>>2>>>0];case 8:return u?e=>U[e>>>3]:e=>G[e>>>3];default:throw new TypeError(`invalid integer width (${r}): ${e}`)}};function ir(e,r,t){t>>>=0,ar(e>>>=0,{name:r=er(r>>>0),fromWireType:e=>e,toWireType:function(e,r){if(\"bigint\"!=typeof r&&\"number\"!=typeof r)throw r=null===r?\"null\":\"object\"==(e=typeof r)||\"array\"===e||\"function\"===e?r.toString():\"\"+r,new TypeError(`Cannot convert \"${r}\" to ${this.name}`);return\"number\"==typeof r&&(r=BigInt(r)),r},argPackAdvance:sr,readValueFromPointer:or(r,t,-1==r.indexOf(\"u\")),Db:null})}var sr=8;function ur(e,r,t,a){ar(e>>>=0,{name:r=er(r>>>0),fromWireType:function(e){return!!e},toWireType:function(e,r){return r?t:a},argPackAdvance:sr,readValueFromPointer:function(e){return this.fromWireType(n()[e>>>0])},Db:null})}var fr=[],lr=[];function cr(e){9<(e>>>=0)&&0==--lr[e+1]&&(lr[e]=void 0,fr.push(e))}var dr=e=>{if(!e)throw new Qe(\"Cannot use deleted val. handle = \"+e);return lr[e]},br=e=>{switch(e){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:const r=fr.pop()||lr.length;return lr[r]=e,lr[r+1]=1,r}};function mr(e){return this.fromWireType(s()[e>>>2>>>0])}var pr={name:\"emscripten::val\",fromWireType:e=>{var r=dr(e);return cr(e),r},toWireType:(e,r)=>br(r),argPackAdvance:sr,readValueFromPointer:mr,Db:null};function hr(e){return ar(e>>>0,pr)}var gr=(e,r)=>{switch(r){case 4:return function(e){return this.fromWireType(u()[e>>>2>>>0])};case 8:return function(e){return this.fromWireType(f()[e>>>3>>>0])};default:throw new TypeError(`invalid float width (${r}): ${e}`)}};function vr(e,r,t){t>>>=0,ar(e>>>=0,{name:r=er(r>>>0),fromWireType:e=>e,toWireType:(e,r)=>r,argPackAdvance:sr,readValueFromPointer:gr(r,t),Db:null})}function yr(e,r,t,n,a){if(e>>>=0,t>>>=0,r=er(r>>>0),-1===a&&(a=4294967295),a=e=>e,0===n){var o=32-8*t;a=e=>e<>>o}var i=r.includes(\"unsigned\")?function(e,r){return r>>>0}:function(e,r){return r};ar(e,{name:r,fromWireType:a,toWireType:i,argPackAdvance:sr,readValueFromPointer:or(r,t,0!==n),Db:null})}function wr(e,r,n){function a(e){var r=s()[e>>>2>>>0];return e=s()[e+4>>>2>>>0],new o(t().buffer,e,r)}var o=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][r];ar(e>>>=0,{name:n=er(n>>>0),fromWireType:a,argPackAdvance:sr,readValueFromPointer:a},{Rb:!0})}function Ar(e,r){e>>>=0;var t=\"std::string\"===(r=er(r>>>0));ar(e,{name:r,fromWireType:function(e){var r=s()[e>>>2>>>0],a=e+4;if(t)for(var o=a,i=0;i<=r;++i){var u=a+i;if(i==r||0==n()[u>>>0]){if(o=Re(o,u-o),void 0===f)var f=o;else f+=String.fromCharCode(0),f+=o;o=u+1}}else{for(f=Array(r),i=0;i>>0]);f=f.join(\"\")}return An(e),f},toWireType:function(e,r){r instanceof ArrayBuffer&&(r=new Uint8Array(r));var a=\"string\"==typeof r;if(!(a||r instanceof Uint8Array||r instanceof Uint8ClampedArray||r instanceof Int8Array))throw new Qe(\"Cannot pass non-string to std::string\");var o=t&&a?Fe(r):r.length,i=wn(4+o+1),u=i+4;if(s()[i>>>2>>>0]=o,t&&a)Be(r,u,o+1);else if(a)for(a=0;a>>0]=f}else for(a=0;a>>0]=r[a];return null!==e&&e.push(An,i),i},argPackAdvance:sr,readValueFromPointer:mr,Db(e){An(e)}})}var _r=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf-16le\"):void 0,Cr=(e,r)=>{for(var t=e>>1,i=t+r/2;!(t>=i)&&o()[t>>>0];)++t;if(32<(t<<=1)-e&&_r)return _r.decode(n().slice(e,t));for(t=\"\",i=0;!(i>=r/2);++i){var s=a()[e+2*i>>>1>>>0];if(0==s)break;t+=String.fromCharCode(s)}return t},Or=(e,r,t)=>{if(t??=2147483647,2>t)return 0;var n=r;t=(t-=2)<2*e.length?t/2:e.length;for(var o=0;o>>1>>>0]=i,r+=2}return a()[r>>>1>>>0]=0,r-n},jr=e=>2*e.length,Tr=(e,r)=>{for(var t=0,n=\"\";!(t>=r/4);){var a=i()[e+4*t>>>2>>>0];if(0==a)break;++t,65536<=a?(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a)):n+=String.fromCharCode(a)}return n},Sr=(e,r,t)=>{if(r>>>=0,t??=2147483647,4>t)return 0;var n=r;t=n+t-4;for(var a=0;a=o&&(o=65536+((1023&o)<<10)|1023&e.charCodeAt(++a)),i()[r>>>2>>>0]=o,(r+=4)+4>t)break}return i()[r>>>2>>>0]=0,r-n},Wr=e=>{for(var r=0,t=0;t=n&&++t,r+=4}return r};function Er(e,r,t){if(e>>>=0,r>>>=0,t=er(t>>>=0),2===r)var n=Cr,a=Or,i=jr,u=e=>o()[e>>>1>>>0];else 4===r&&(n=Tr,a=Sr,i=Wr,u=e=>s()[e>>>2>>>0]);ar(e,{name:t,fromWireType:e=>{for(var t,a=s()[e>>>2>>>0],o=e+4,i=0;i<=a;++i){var f=e+4+i*r;i!=a&&0!=u(f)||(o=n(o,f-o),void 0===t?t=o:(t+=String.fromCharCode(0),t+=o),o=f+r)}return An(e),t},toWireType:(e,n)=>{if(\"string\"!=typeof n)throw new Qe(`Cannot pass non-string to C++ string type ${t}`);var o=i(n),u=wn(4+o+r);return s()[u>>>2>>>0]=o/r,a(n,u+4,o+r),null!==e&&e.push(An,u),u},argPackAdvance:sr,readValueFromPointer:mr,Db(e){An(e)}})}function Mr(e,r){ar(e>>>=0,{Sb:!0,name:r=er(r>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var xr=()=>1;function Nr(e){_n(e>>>0,!p,1,!m,131072,!1),_e()}var kr=e=>{if(!Y)try{if(e(),!(0>>=0,\"function\"==typeof Atomics.jc&&(Atomics.jc(i(),e>>>2,e).value.then(Hr),e+=128,Atomics.store(i(),e>>>2,1))}var Hr=()=>{var e=yn();e&&(Rr(e),kr(Sn))};function Dr(e,r){(e>>>=0)==r>>>0?setTimeout(Hr):h?postMessage({targetThread:e,cmd:\"checkMailbox\"}):(e=we[e])&&e.postMessage({cmd:\"checkMailbox\"})}var Fr=[];function Pr(e,r,t,n,a){for(r>>>=0,n/=2,Fr.length=n,t=a>>>0>>>3,a=0;a>>0];return(r?ie[r]:mn[e])(...Fr)}function Br(e){e>>>=0,h?postMessage({cmd:\"cleanupThread\",thread:e}):Ae(we[e])}function Ir(e){}var Ur=(e,r)=>{var t=tr[e];if(void 0===t)throw e=hn(e),t=er(e),An(e),new Qe(`${r} has unknown type ${t}`);return t},Gr=(e,r,t)=>{var n=[];return e=e.toWireType(n,t),n.length&&(s()[r>>>2>>>0]=br(n)),e};function $r(e,r,t){return r>>>=0,t>>>=0,e=dr(e>>>0),r=Ur(r,\"emval::as\"),Gr(r,t,e)}var Yr=e=>{try{e()}catch(e){Z(e)}},Lr=0,zr=null,Vr=0,qr=[],Jr={},Xr={},Kr=0,Qr=null,Zr=[];function et(e){return function(e){if(!Y){if(0===Lr){var r=!1,t=!1;e(((e=0)=>{if(!Y&&(Vr=e,r=!0,t)){Lr=2,Yr((()=>Hn(zr))),\"undefined\"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.resume(),e=!1;try{var n=function(){var e=i()[zr+8>>>2>>>0];return e=pn[Xr[e]],--de,e()}()}catch(r){n=r,e=!0}var a=!1;if(!zr){var o=Qr;o&&(Qr=null,(e?o.reject:o.resolve)(n),a=!0)}if(e&&!a)throw n}})),t=!0,r||(Lr=1,zr=function(){var e=wn(65548),r=e+12;s()[e>>>2>>>0]=r,s()[e+4>>>2>>>0]=r+65536,r=qr[0];var t=Jr[r];return void 0===t&&(t=Kr++,Jr[r]=t,Xr[t]=r),r=t,i()[e+8>>>2>>>0]=r,e}(),\"undefined\"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.pause(),Yr((()=>kn(zr))))}else 2===Lr?(Lr=0,Yr(Dn),An(zr),zr=null,Zr.forEach(kr)):Z(`invalid state: ${Lr}`);return Vr}}((r=>{e().then(r)}))}function rt(e){return e>>>=0,et((()=>(e=dr(e)).then(br)))}var tt=[];function nt(e,r,t,n){return t>>>=0,n>>>=0,(e=tt[e>>>0])(null,r=dr(r>>>0),t,n)}var at={},ot=e=>{var r=at[e];return void 0===r?er(e):r};function it(e,r,t,n,a){return t>>>=0,n>>>=0,a>>>=0,(e=tt[e>>>0])(r=dr(r>>>0),r[t=ot(t)],n,a)}var st=()=>\"object\"==typeof globalThis?globalThis:Function(\"return this\")();function ut(e){return 0==(e>>>=0)?br(st()):(e=ot(e),br(st()[e]))}var ft=e=>{var r=tt.length;return tt.push(e),r},lt=(e,r)=>{for(var t=Array(e),n=0;n>>2>>>0],\"parameter \"+n);return t},ct=(e,r)=>Object.defineProperty(r,\"name\",{value:e});function dt(e,r,t){var n=(r=lt(e,r>>>0)).shift();e--;var a=\"return function (obj, func, destructorsRef, args) {\\n\",o=0,i=[];0===t&&i.push(\"obj\");for(var s=[\"retType\"],u=[n],f=0;fe.name)).join(\", \")}) => ${n.name}>`,ft(ct(t,e))}function bt(e){return e=ot(e>>>0),br(d[e])}function mt(e,r){return r>>>=0,e=dr(e>>>0),r=dr(r),br(e[r])}function pt(e){9<(e>>>=0)&&(lr[e+1]+=1)}function ht(){return br([])}function gt(e){e=dr(e>>>0);for(var r=Array(e.length),t=0;t>>0))}function yt(){return br({})}function wt(e){for(var r=dr(e>>>=0);r.length;){var t=r.pop();r.pop()(t)}cr(e)}function At(e,r,t){r>>>=0,t>>>=0,e=dr(e>>>0),r=dr(r),t=dr(t),e[r]=t}function _t(e,r){return r>>>=0,e=(e=Ur(e>>>0,\"_emval_take_value\")).readValueFromPointer(r),br(e)}function Ct(e,r){e=-9007199254740992>e||9007199254740992>>=0,e=new Date(1e3*e),i()[r>>>2>>>0]=e.getUTCSeconds(),i()[r+4>>>2>>>0]=e.getUTCMinutes(),i()[r+8>>>2>>>0]=e.getUTCHours(),i()[r+12>>>2>>>0]=e.getUTCDate(),i()[r+16>>>2>>>0]=e.getUTCMonth(),i()[r+20>>>2>>>0]=e.getUTCFullYear()-1900,i()[r+24>>>2>>>0]=e.getUTCDay(),e=(e.getTime()-Date.UTC(e.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[r+28>>>2>>>0]=e}var Ot=e=>0==e%4&&(0!=e%100||0==e%400),jt=[0,31,60,91,121,152,182,213,244,274,305,335],Tt=[0,31,59,90,120,151,181,212,243,273,304,334];function St(e,r){e=-9007199254740992>e||9007199254740992>>=0,e=new Date(1e3*e),i()[r>>>2>>>0]=e.getSeconds(),i()[r+4>>>2>>>0]=e.getMinutes(),i()[r+8>>>2>>>0]=e.getHours(),i()[r+12>>>2>>>0]=e.getDate(),i()[r+16>>>2>>>0]=e.getMonth(),i()[r+20>>>2>>>0]=e.getFullYear()-1900,i()[r+24>>>2>>>0]=e.getDay();var t=(Ot(e.getFullYear())?jt:Tt)[e.getMonth()]+e.getDate()-1|0;i()[r+28>>>2>>>0]=t,i()[r+36>>>2>>>0]=-60*e.getTimezoneOffset(),t=new Date(e.getFullYear(),6,1).getTimezoneOffset();var n=new Date(e.getFullYear(),0,1).getTimezoneOffset();e=0|(t!=n&&e.getTimezoneOffset()==Math.min(n,t)),i()[r+32>>>2>>>0]=e}function Wt(e){e>>>=0;var r=new Date(i()[e+20>>>2>>>0]+1900,i()[e+16>>>2>>>0],i()[e+12>>>2>>>0],i()[e+8>>>2>>>0],i()[e+4>>>2>>>0],i()[e>>>2>>>0],0),t=i()[e+32>>>2>>>0],n=r.getTimezoneOffset(),a=new Date(r.getFullYear(),6,1).getTimezoneOffset(),o=new Date(r.getFullYear(),0,1).getTimezoneOffset(),s=Math.min(o,a);return 0>t?i()[e+32>>>2>>>0]=Number(a!=o&&s==n):0>>2>>>0]=r.getDay(),t=(Ot(r.getFullYear())?jt:Tt)[r.getMonth()]+r.getDate()-1|0,i()[e+28>>>2>>>0]=t,i()[e>>>2>>>0]=r.getSeconds(),i()[e+4>>>2>>>0]=r.getMinutes(),i()[e+8>>>2>>>0]=r.getHours(),i()[e+12>>>2>>>0]=r.getDate(),i()[e+16>>>2>>>0]=r.getMonth(),i()[e+20>>>2>>>0]=r.getYear(),e=r.getTime(),BigInt(isNaN(e)?-1:e/1e3)}function Et(e,r,t,n,a,o,i){return h?be(16,1,e,r,t,n,a,o,i):-52}function Mt(e,r,t,n,a,o){if(h)return be(17,1,e,r,t,n,a,o)}function xt(e,r,t,n){e>>>=0,r>>>=0,t>>>=0,n>>>=0;var a=(new Date).getFullYear(),o=new Date(a,0,1),u=new Date(a,6,1);a=o.getTimezoneOffset();var f=u.getTimezoneOffset(),l=Math.max(a,f);s()[e>>>2>>>0]=60*l,i()[r>>>2>>>0]=Number(a!=f),o=(e=e=>e.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1])(o),u=e(u),f{Nt.length=0;for(var t;t=n()[e++>>>0];){var a=105!=t;r+=(a&=112!=t)&&r%8?4:0,Nt.push(112==t?s()[r>>>2>>>0]:106==t?U[r>>>3]:105==t?i()[r>>>2>>>0]:f()[r>>>3>>>0]),r+=a?8:4}return Nt};function Rt(e,r,t){return e>>>=0,r=kt(r>>>0,t>>>0),ie[e](...r)}function Ht(e,r,t){return e>>>=0,r=kt(r>>>0,t>>>0),ie[e](...r)}var Dt=()=>{},Ft=()=>Date.now();function Pt(e,r){return W(Re(e>>>0,r>>>0))}var Bt,It=()=>{throw de+=1,\"unwind\"};function Ut(){return 4294901760}Bt=()=>performance.timeOrigin+performance.now();var Gt=()=>navigator.hardwareConcurrency;function $t(){return Z(\"Cannot use emscripten_pc_get_function without -sUSE_OFFSET_CONVERTER\"),0}function Yt(e){e>>>=0;var r=n().length;if(e<=r||4294901760=t;t*=2){var a=r*(1+.2/t);a=Math.min(a,e+100663296);var o=Math;a=Math.max(e,a);e:{o=(o.min.call(o,4294901760,a+(65536-a%65536)%65536)-x.buffer.byteLength+65535)/65536;try{x.grow(o),L();var i=1;break e}catch(e){}i=void 0}if(i)return!0}return!1}var Lt=()=>(Z(\"Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER\"),0),zt={},Vt=e=>{e.forEach((e=>{var r=Lt();r&&(zt[r]=e)}))};function qt(){var e=Error().stack.toString().split(\"\\n\");return\"Error\"==e[0]&&e.shift(),Vt(e),zt.Ob=Lt(),zt.ac=e,zt.Ob}function Jt(e,r,t){if(e>>>=0,r>>>=0,zt.Ob==e)var n=zt.ac;else\"Error\"==(n=Error().stack.toString().split(\"\\n\"))[0]&&n.shift(),Vt(n);for(var a=3;n[a]&&Lt()!=e;)++a;for(e=0;e>>2>>>0]=Lt();return e}var Xt,Kt={},Qt=()=>{if(!Xt){var e,r={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:_||\"./this.program\"};for(e in Kt)void 0===Kt[e]?delete r[e]:r[e]=Kt[e];var t=[];for(e in r)t.push(`${e}=${r[e]}`);Xt=t}return Xt};function Zt(e,r){if(h)return be(18,1,e,r);e>>>=0,r>>>=0;var n=0;return Qt().forEach(((a,o)=>{var i=r+n;for(o=s()[e+4*o>>>2>>>0]=i,i=0;i>>0]=a.charCodeAt(i);t()[o>>>0]=0,n+=a.length+1})),0}function en(e,r){if(h)return be(19,1,e,r);e>>>=0,r>>>=0;var t=Qt();s()[e>>>2>>>0]=t.length;var n=0;return t.forEach((e=>n+=e.length+1)),s()[r>>>2>>>0]=n,0}function rn(e){return h?be(20,1,e):52}function tn(e,r,t,n){return h?be(21,1,e,r,t,n):52}function nn(e,r,t,n){return h?be(22,1,e,r,t,n):70}var an=[null,[],[]];function on(e,r,t,a){if(h)return be(23,1,e,r,t,a);r>>>=0,t>>>=0,a>>>=0;for(var o=0,i=0;i>>2>>>0],f=s()[r+4>>>2>>>0];r+=8;for(var l=0;l>>0],d=an[e];0===c||10===c?((1===e?S:W)(ke(d,0)),d.length=0):d.push(c)}o+=f}return s()[a>>>2>>>0]=o,0}var sn=[31,29,31,30,31,30,31,31,30,31,30,31],un=[31,28,31,30,31,30,31,31,30,31,30,31],fn=(e,r)=>{t().set(e,r>>>0)};function ln(e,r,t,n){function a(e,r,t){for(e=\"number\"==typeof e?e.toString():e||\"\";e.lengthe?-1:0n-e.getDate())){e.setDate(e.getDate()+r);break}r-=n-e.getDate()+1,e.setDate(1),11>t?e.setMonth(t+1):(e.setMonth(0),e.setFullYear(e.getFullYear()+1))}return t=new Date(e.getFullYear()+1,0,4),r=f(new Date(e.getFullYear(),0,4)),t=f(t),0>=u(r,e)?0>=u(t,e)?e.getFullYear()+1:e.getFullYear():e.getFullYear()-1}e>>>=0,r>>>=0,t>>>=0,n>>>=0;var c=s()[n+40>>>2>>>0];for(var d in n={fc:i()[n>>>2>>>0],ec:i()[n+4>>>2>>>0],Gb:i()[n+8>>>2>>>0],Kb:i()[n+12>>>2>>>0],Hb:i()[n+16>>>2>>>0],Cb:i()[n+20>>>2>>>0],ub:i()[n+24>>>2>>>0],Bb:i()[n+28>>>2>>>0],nc:i()[n+32>>>2>>>0],dc:i()[n+36>>>2>>>0],hc:c?Re(c):\"\"},t=Re(t),c={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"})t=t.replace(new RegExp(d,\"g\"),c[d]);var b=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),m=\"January February March April May June July August September October November December\".split(\" \");for(d in c={\"%a\":e=>b[e.ub].substring(0,3),\"%A\":e=>b[e.ub],\"%b\":e=>m[e.Hb].substring(0,3),\"%B\":e=>m[e.Hb],\"%C\":e=>o((e.Cb+1900)/100|0,2),\"%d\":e=>o(e.Kb,2),\"%e\":e=>a(e.Kb,2,\" \"),\"%g\":e=>l(e).toString().substring(2),\"%G\":l,\"%H\":e=>o(e.Gb,2),\"%I\":e=>(0==(e=e.Gb)?e=12:12{for(var r=0,t=0;t<=e.Hb-1;r+=(Ot(e.Cb+1900)?sn:un)[t++]);return o(e.Kb+r,3)},\"%m\":e=>o(e.Hb+1,2),\"%M\":e=>o(e.ec,2),\"%n\":()=>\"\\n\",\"%p\":e=>0<=e.Gb&&12>e.Gb?\"AM\":\"PM\",\"%S\":e=>o(e.fc,2),\"%t\":()=>\"\\t\",\"%u\":e=>e.ub||7,\"%U\":e=>o(Math.floor((e.Bb+7-e.ub)/7),2),\"%V\":e=>{var r=Math.floor((e.Bb+7-(e.ub+6)%7)/7);if(2>=(e.ub+371-e.Bb-2)%7&&r++,r)53==r&&(4==(t=(e.ub+371-e.Bb)%7)||3==t&&Ot(e.Cb)||(r=1));else{r=52;var t=(e.ub+7-e.Bb-1)%7;(4==t||5==t&&Ot(e.Cb%400-1))&&r++}return o(r,2)},\"%w\":e=>e.ub,\"%W\":e=>o(Math.floor((e.Bb+7-(e.ub+6)%7)/7),2),\"%y\":e=>(e.Cb+1900).toString().substring(2),\"%Y\":e=>e.Cb+1900,\"%z\":e=>{var r=0<=(e=e.dc);return e=Math.abs(e)/60,(r?\"+\":\"-\")+String(\"0000\"+(e/60*100+e%60)).slice(-4)},\"%Z\":e=>e.hc,\"%%\":()=>\"%\"},t=t.replace(/%%/g,\"\\0\\0\"),c)t.includes(d)&&(t=t.replace(new RegExp(d,\"g\"),c[d](n)));return d=function(e){var r=Array(Fe(e)+1);return Pe(e,r,0,r.length),r}(t=t.replace(/\\0\\0/g,\"%\")),d.length>r?0:(fn(d,e),d.length-1)}function cn(e,r,t,n){return ln(e>>>0,r>>>0,t>>>0,n>>>0)}h||function(){for(var e=d.numThreads-1;e--;)Oe();z.unshift((()=>{J++,function(e){h?e():Promise.all(ge.map(Ce)).then(e)}((()=>Q()))}))}();for(var dn=Array(256),bn=0;256>bn;++bn)dn[bn]=String.fromCharCode(bn);Ke=dn,Qe=d.BindingError=class extends Error{constructor(e){super(e),this.name=\"BindingError\"}},d.InternalError=class extends Error{constructor(e){super(e),this.name=\"InternalError\"}},lr.push(0,1,void 0,1,null,1,!0,1,!1,1),d.count_emval_handles=()=>lr.length/2-5-fr.length;var mn=[me,pe,Me,He,De,Ie,Ue,Ge,$e,Ye,Le,ze,Ve,qe,Je,Xe,Et,Mt,Zt,en,rn,tn,nn,on],pn=function(){function e(e,r){return pn=e.exports,pn=function(){var e=pn,r={};for(let[t,n]of Object.entries(e))r[t]=\"function\"==typeof n?(...e)=>{qr.push(t);try{return n(...e)}finally{Y||(qr.pop(),zr&&1===Lr&&0===qr.length&&(Lr=0,de+=1,Yr(Rn),\"undefined\"!=typeof Fibers&&Fibers.oc()))}}:n;return r}(),pn=function(){var e=pn,r=e=>r=>e(r)>>>0,t=e=>()=>e()>>>0;return(e=Object.assign({},e)).Ca=r(e.Ca),e.fb=t(e.fb),e.gb=r(e.gb),e.emscripten_main_runtime_thread_id=t(e.emscripten_main_runtime_thread_id),e.sb=r(e.sb),e.tb=t(e.tb),e}(),ye.push(pn.ib),V.unshift(pn.Ba),N=r,Q(),pn}var r=oe();if(J++,d.instantiateWasm)try{return d.instantiateWasm(r,e)}catch(e){W(`Module.instantiateWasm callback failed with error: ${e}`),c(e)}return ee||=d.locateFile?re(\"ort-wasm-simd-threaded.jsep.wasm\")?\"ort-wasm-simd-threaded.jsep.wasm\":d.locateFile?d.locateFile(\"ort-wasm-simd-threaded.jsep.wasm\",O):O+\"ort-wasm-simd-threaded.jsep.wasm\":new URL(\"ort-wasm-simd-threaded.jsep.wasm\",import.meta.url).href,function(e,r){var t=ee;return\"function\"!=typeof WebAssembly.instantiateStreaming||re(t)||te(t)||\"function\"!=typeof fetch?ae(t,e,r):fetch(t,{credentials:\"same-origin\"}).then((n=>WebAssembly.instantiateStreaming(n,e).then(r,(function(n){return W(`wasm streaming compile failed: ${n}`),W(\"falling back to ArrayBuffer instantiation\"),ae(t,e,r)}))))}(r,(function(r){e(r.instance,r.module)})).catch(c),{}}(),hn=e=>(hn=pn.Ca)(e),gn=()=>(gn=pn.Da)();d._OrtInit=(e,r)=>(d._OrtInit=pn.Ea)(e,r),d._OrtGetLastError=(e,r)=>(d._OrtGetLastError=pn.Fa)(e,r),d._OrtCreateSessionOptions=(e,r,t,n,a,o,i,s,u,f)=>(d._OrtCreateSessionOptions=pn.Ga)(e,r,t,n,a,o,i,s,u,f),d._OrtAppendExecutionProvider=(e,r)=>(d._OrtAppendExecutionProvider=pn.Ha)(e,r),d._OrtAddFreeDimensionOverride=(e,r,t)=>(d._OrtAddFreeDimensionOverride=pn.Ia)(e,r,t),d._OrtAddSessionConfigEntry=(e,r,t)=>(d._OrtAddSessionConfigEntry=pn.Ja)(e,r,t),d._OrtReleaseSessionOptions=e=>(d._OrtReleaseSessionOptions=pn.Ka)(e),d._OrtCreateSession=(e,r,t)=>(d._OrtCreateSession=pn.La)(e,r,t),d._OrtReleaseSession=e=>(d._OrtReleaseSession=pn.Ma)(e),d._OrtGetInputOutputCount=(e,r,t)=>(d._OrtGetInputOutputCount=pn.Na)(e,r,t),d._OrtGetInputName=(e,r)=>(d._OrtGetInputName=pn.Oa)(e,r),d._OrtGetOutputName=(e,r)=>(d._OrtGetOutputName=pn.Pa)(e,r),d._OrtFree=e=>(d._OrtFree=pn.Qa)(e),d._OrtCreateTensor=(e,r,t,n,a,o)=>(d._OrtCreateTensor=pn.Ra)(e,r,t,n,a,o),d._OrtGetTensorData=(e,r,t,n,a)=>(d._OrtGetTensorData=pn.Sa)(e,r,t,n,a),d._OrtReleaseTensor=e=>(d._OrtReleaseTensor=pn.Ta)(e),d._OrtCreateRunOptions=(e,r,t,n)=>(d._OrtCreateRunOptions=pn.Ua)(e,r,t,n),d._OrtAddRunConfigEntry=(e,r,t)=>(d._OrtAddRunConfigEntry=pn.Va)(e,r,t),d._OrtReleaseRunOptions=e=>(d._OrtReleaseRunOptions=pn.Wa)(e),d._OrtCreateBinding=e=>(d._OrtCreateBinding=pn.Xa)(e),d._OrtBindInput=(e,r,t)=>(d._OrtBindInput=pn.Ya)(e,r,t),d._OrtBindOutput=(e,r,t,n)=>(d._OrtBindOutput=pn.Za)(e,r,t,n),d._OrtClearBoundOutputs=e=>(d._OrtClearBoundOutputs=pn._a)(e),d._OrtReleaseBinding=e=>(d._OrtReleaseBinding=pn.$a)(e),d._OrtRunWithBinding=(e,r,t,n,a)=>(d._OrtRunWithBinding=pn.ab)(e,r,t,n,a),d._OrtRun=(e,r,t,n,a,o,i,s)=>(d._OrtRun=pn.bb)(e,r,t,n,a,o,i,s),d._OrtEndProfiling=e=>(d._OrtEndProfiling=pn.cb)(e),d._JsepOutput=(e,r,t)=>(d._JsepOutput=pn.db)(e,r,t),d._JsepGetNodeName=e=>(d._JsepGetNodeName=pn.eb)(e);var vn,yn=()=>(yn=pn.fb)(),wn=d._malloc=e=>(wn=d._malloc=pn.gb)(e),An=d._free=e=>(An=d._free=pn.hb)(e),_n=(e,r,t,n,a,o)=>(_n=pn.kb)(e,r,t,n,a,o),Cn=()=>(Cn=pn.lb)(),On=(e,r,t,n,a)=>(On=pn.mb)(e,r,t,n,a),jn=e=>(jn=pn.nb)(e),Tn=e=>(Tn=pn.ob)(e),Sn=()=>(Sn=pn.pb)(),Wn=(e,r)=>(Wn=pn.qb)(e,r),En=e=>(En=pn.rb)(e),Mn=e=>(Mn=pn.sb)(e),xn=()=>(xn=pn.tb)(),Nn=d.dynCall_ii=(e,r)=>(Nn=d.dynCall_ii=pn.vb)(e,r),kn=e=>(kn=pn.wb)(e),Rn=()=>(Rn=pn.xb)(),Hn=e=>(Hn=pn.yb)(e),Dn=()=>(Dn=pn.zb)();function Fn(){if(!(0xn(),d.stackRestore=e=>En(e),d.stackAlloc=e=>Mn(e),d.UTF8ToString=Re,d.stringToUTF8=Be,d.lengthBytesUTF8=Fe,K=function e(){vn||Fn(),vn||(K=e)},Fn(),b});export default r;\"em-pthread\"===globalThis.self?.name&&r();", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {isNode} from './wasm-utils-env';\n\n/**\n * The classic script source URL. This is not always available in non ESModule environments.\n *\n * In Node.js, this is undefined.\n */\nexport const scriptSrc =\n // if Nodejs, return undefined\n isNode ? undefined :\n // if It's ESM, use import.meta.url\n BUILD_DEFS.ESM_IMPORT_META_URL ??\n // use `document.currentScript.src` if available\n (typeof document !== 'undefined' ? (document.currentScript as HTMLScriptElement)?.src :\n // use `self.location.href` if available\n (typeof self !== 'undefined' ? self.location?.href : undefined));\n\n/**\n * The origin of the current location.\n *\n * In Node.js, this is undefined.\n */\nconst origin = isNode || typeof location === 'undefined' ? undefined : location.origin;\n\n/**\n * Check if the given filename with prefix is from the same origin.\n */\nconst isSameOrigin = (filename: string, prefixOverride?: string) => {\n try {\n const baseUrl = prefixOverride ?? scriptSrc;\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.origin === origin;\n } catch {\n return false;\n }\n};\n\n/**\n * Normalize the inputs to an absolute URL with the given prefix override. If failed, return undefined.\n */\nconst normalizeUrl = (filename: string, prefixOverride?: string) => {\n const baseUrl = prefixOverride ?? scriptSrc;\n try {\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.href;\n } catch {\n return undefined;\n }\n};\n\n/**\n * Create a fallback URL if an absolute URL cannot be created by the normalizeUrl function.\n */\nconst fallbackUrl = (filename: string, prefixOverride?: string) => `${prefixOverride ?? './'}${filename}`;\n\n/**\n * This helper function is used to preload a module from a URL.\n *\n * If the origin of the worker URL is different from the current origin, the worker cannot be loaded directly.\n * See discussions in https://github.com/webpack-contrib/worker-loader/issues/154\n *\n * In this case, we will fetch the worker URL and create a new Blob URL with the same origin as a workaround.\n *\n * @param absoluteUrl - The absolute URL to preload.\n *\n * @returns - A promise that resolves to a new Blob URL\n */\nconst preload = async(absoluteUrl: string): Promise => {\n const response = await fetch(absoluteUrl, {credentials: 'same-origin'});\n const blob = await response.blob();\n return URL.createObjectURL(blob);\n};\n\n/**\n * This helper function is used to dynamically import a module from a URL.\n *\n * The build script has special handling for this function to ensure that the URL is not bundled into the final output.\n *\n * @param url - The URL to import.\n *\n * @returns - A promise that resolves to the default export of the module.\n */\nconst dynamicImportDefault = async(url: string): Promise => (await import(/* webpackIgnore: true */ url)).default;\n\n/**\n * The proxy worker factory imported from the proxy worker module.\n *\n * This is only available when the WebAssembly proxy is not disabled.\n */\nconst createProxyWorker: ((urlOverride?: string) => Worker)|undefined =\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n BUILD_DEFS.DISABLE_WASM_PROXY ? undefined : require('./proxy-worker/main').default;\n\n/**\n * Import the proxy worker.\n *\n * This function will perform the following steps:\n * 1. If a preload is needed, it will preload the module and return the object URL.\n * 2. Use the proxy worker factory to create the proxy worker.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The proxy worker.\n */\nexport const importProxyWorker = async(): Promise<[undefined | string, Worker]> => {\n if (!scriptSrc) {\n throw new Error('Failed to load proxy worker: cannot determine the script source URL.');\n }\n\n // If the script source is from the same origin, we can use the embedded proxy module directly.\n if (isSameOrigin(scriptSrc)) {\n return [undefined, createProxyWorker!()];\n }\n\n // Otherwise, need to preload\n const url = await preload(scriptSrc);\n return [url, createProxyWorker!(url)];\n};\n\n/**\n * The embedded WebAssembly module.\n *\n * This is only available in ESM and when embedding is not disabled.\n */\nconst embeddedWasmModule: EmscriptenModuleFactory|undefined =\n BUILD_DEFS.IS_ESM && BUILD_DEFS.DISABLE_DYNAMIC_IMPORT ?\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n require(\n !BUILD_DEFS.DISABLE_TRAINING ? '../../dist/ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? '../../dist/ort-wasm-simd-threaded.jsep.mjs' :\n '../../dist/ort-wasm-simd-threaded.mjs')\n .default :\n undefined;\n\n/**\n * Import the WebAssembly module.\n *\n * This function will perform the following steps:\n * 1. If BUILD_DEFS.DISABLE_DYNAMIC_IMPORT is true, use the embedded module.\n * 2. If a preload is needed, it will preload the module and return the object URL.\n * 3. Otherwise, it will perform a dynamic import of the module.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The default export of the module, which is a factory function to create the WebAssembly module.\n */\nexport const importWasmModule = async(\n urlOverride: string|undefined, prefixOverride: string|undefined,\n isMultiThreaded: boolean): Promise<[undefined | string, EmscriptenModuleFactory]> => {\n if (BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n return [undefined, embeddedWasmModule!];\n } else {\n const wasmModuleFilename = !BUILD_DEFS.DISABLE_TRAINING ? 'ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? 'ort-wasm-simd-threaded.jsep.mjs' :\n 'ort-wasm-simd-threaded.mjs';\n const wasmModuleUrl = urlOverride ?? normalizeUrl(wasmModuleFilename, prefixOverride);\n // need to preload if all of the following conditions are met:\n // 1. not in Node.js.\n // - Node.js does not have the same origin policy for creating workers.\n // 2. multi-threaded is enabled.\n // - If multi-threaded is disabled, no worker will be created. So we don't need to preload the module.\n // 3. the absolute URL is available.\n // - If the absolute URL is failed to be created, the origin cannot be determined. In this case, we will not\n // preload the module.\n // 4. the worker URL is not from the same origin.\n // - If the worker URL is from the same origin, we can create the worker directly.\n const needPreload = !isNode && isMultiThreaded && wasmModuleUrl && !isSameOrigin(wasmModuleUrl, prefixOverride);\n const url = needPreload ? (await preload(wasmModuleUrl)) :\n (wasmModuleUrl ?? fallbackUrl(wasmModuleFilename, prefixOverride));\n return [needPreload ? url : undefined, await dynamicImportDefault>(url)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {importWasmModule} from './wasm-utils-import';\n\nlet wasm: OrtWasmModule|undefined;\nlet initialized = false;\nlet initializing = false;\nlet aborted = false;\n\nconst isMultiThreadSupported = (): boolean => {\n // If 'SharedArrayBuffer' is not available, WebAssembly threads will not work.\n if (typeof SharedArrayBuffer === 'undefined') {\n return false;\n }\n\n try {\n // Test for transferability of SABs (for browsers. needed for Firefox)\n // https://groups.google.com/forum/#!msg/mozilla.dev.platform/IHkBZlHETpA/dwsMNchWEQAJ\n if (typeof MessageChannel !== 'undefined') {\n new MessageChannel().port1.postMessage(new SharedArrayBuffer(1));\n }\n\n // Test for WebAssembly threads capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing threaded instructions.\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 5,\n 4, 1, 3, 1, 1, 10, 11, 1, 9, 0, 65, 0, 254, 16, 2, 0, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst isSimdSupported = (): boolean => {\n try {\n // Test for WebAssembly SIMD capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing SIMD instructions.\n\n // The binary data is generated from the following code by wat2wasm:\n //\n // (module\n // (type $t0 (func))\n // (func $f0 (type $t0)\n // (drop\n // (i32x4.dot_i16x8_s\n // (i8x16.splat\n // (i32.const 0))\n // (v128.const i32x4 0x00000000 0x00000000 0x00000000 0x00000000)))))\n\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 10, 30, 1, 28, 0, 65, 0,\n 253, 15, 253, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 253, 186, 1, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nexport const initializeWebAssembly = async(flags: Env.WebAssemblyFlags): Promise => {\n if (initialized) {\n return Promise.resolve();\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initializeWebAssembly()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initializeWebAssembly()\\' failed.');\n }\n\n initializing = true;\n\n // wasm flags are already initialized\n const timeout = flags.initTimeout!;\n let numThreads = flags.numThreads!;\n\n // ensure SIMD is supported\n if (!isSimdSupported()) {\n throw new Error('WebAssembly SIMD is not supported in the current environment.');\n }\n\n // check if multi-threading is supported\n const multiThreadSupported = isMultiThreadSupported();\n if (numThreads > 1 && !multiThreadSupported) {\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', but this will not work unless you enable crossOriginIsolated mode. ' +\n 'See https://web.dev/cross-origin-isolation-guide/ for more info.');\n }\n\n // eslint-disable-next-line no-console\n console.warn(\n 'WebAssembly multi-threading is not supported in the current environment. ' +\n 'Falling back to single-threading.');\n\n // set flags.numThreads to 1 so that OrtInit() will not create a global thread pool.\n flags.numThreads = numThreads = 1;\n }\n\n const wasmPaths = flags.wasmPaths;\n const wasmPrefixOverride = typeof wasmPaths === 'string' ? wasmPaths : undefined;\n const mjsPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.mjs;\n const mjsPathOverride = (mjsPathOverrideFlag as URL)?.href ?? mjsPathOverrideFlag;\n const wasmPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.wasm;\n const wasmPathOverride = (wasmPathOverrideFlag as URL)?.href ?? wasmPathOverrideFlag;\n\n const [objectUrl, ortWasmFactory] = (await importWasmModule(mjsPathOverride, wasmPrefixOverride, numThreads > 1));\n\n let isTimeout = false;\n\n const tasks: Array> = [];\n\n // promise for timeout\n if (timeout > 0) {\n tasks.push(new Promise((resolve) => {\n setTimeout(() => {\n isTimeout = true;\n resolve();\n }, timeout);\n }));\n }\n\n // promise for module initialization\n tasks.push(new Promise((resolve, reject) => {\n const config: Partial = {\n /**\n * The number of threads. WebAssembly will create (Module.numThreads - 1) workers. If it is 1, no worker will be\n * created.\n */\n numThreads,\n };\n\n if (wasmPathOverride || wasmPrefixOverride) {\n /**\n * A callback function to locate the WebAssembly file. The function should return the full path of the file.\n *\n * Since Emscripten 3.1.58, this function is only called for the .wasm file.\n */\n config.locateFile = (fileName, scriptDirectory) =>\n wasmPathOverride ?? (wasmPrefixOverride ?? scriptDirectory) + fileName;\n }\n\n ortWasmFactory(config).then(\n // wasm module initialized successfully\n module => {\n initializing = false;\n initialized = true;\n wasm = module;\n resolve();\n if (objectUrl) {\n URL.revokeObjectURL(objectUrl);\n }\n },\n // wasm module failed to initialize\n (what) => {\n initializing = false;\n aborted = true;\n reject(what);\n });\n }));\n\n await Promise.race(tasks);\n\n if (isTimeout) {\n throw new Error(`WebAssembly backend initializing failed due to timeout: ${timeout}ms`);\n }\n};\n\nexport const getInstance = (): OrtWasmModule => {\n if (initialized && wasm) {\n return wasm;\n }\n\n throw new Error('WebAssembly is not initialized yet.');\n};\n\nexport const dispose = (): void => {\n if (initialized && !initializing && !aborted) {\n // TODO: currently \"PThread.terminateAllThreads()\" is not exposed in the wasm module.\n // And this function is not yet called by any code.\n // If it is needed in the future, we should expose it in the wasm module and uncomment the following line.\n\n // wasm?.PThread?.terminateAllThreads();\n wasm = undefined;\n\n initializing = false;\n initialized = false;\n aborted = true;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getInstance} from './wasm-factory';\n\nexport const allocWasmString = (data: string, allocs: number[]): number => {\n const wasm = getInstance();\n\n const dataLength = wasm.lengthBytesUTF8(data) + 1;\n const dataOffset = wasm._malloc(dataLength);\n wasm.stringToUTF8(data, dataOffset, dataLength);\n allocs.push(dataOffset);\n\n return dataOffset;\n};\n\ninterface ExtraOptionsHandler {\n (name: string, value: string): void;\n}\n\nexport const iterateExtraOptions =\n (options: Record, prefix: string, seen: WeakSet>,\n handler: ExtraOptionsHandler): void => {\n if (typeof options == 'object' && options !== null) {\n if (seen.has(options)) {\n throw new Error('Circular reference in options');\n } else {\n seen.add(options);\n }\n }\n\n Object.entries(options).forEach(([key, value]) => {\n const name = (prefix) ? prefix + key : key;\n if (typeof value === 'object') {\n iterateExtraOptions(value as Record, name + '.', seen, handler);\n } else if (typeof value === 'string' || typeof value === 'number') {\n handler(name, value.toString());\n } else if (typeof value === 'boolean') {\n handler(name, (value) ? '1' : '0');\n } else {\n throw new Error(`Can't handle extra config type: ${typeof value}`);\n }\n });\n };\n\n/**\n * check web assembly API's last error and throw error if any error occurred.\n * @param message a message used when an error occurred.\n */\nexport const checkLastError = (message: string): void => {\n const wasm = getInstance();\n\n const stack = wasm.stackSave();\n try {\n const paramsOffset = wasm.stackAlloc(8);\n wasm._OrtGetLastError(paramsOffset, paramsOffset + 4);\n const errorCode = wasm.HEAP32[paramsOffset / 4];\n const errorMessagePointer = wasm.HEAPU32[paramsOffset / 4 + 1];\n const errorMessage = errorMessagePointer ? wasm.UTF8ToString(errorMessagePointer) : '';\n throw new Error(`${message} ERROR_CODE: ${errorCode}, ERROR_MESSAGE: ${errorMessage}`);\n } finally {\n wasm.stackRestore(stack);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nexport const setRunOptions = (options: InferenceSession.RunOptions): [number, number[]] => {\n const wasm = getInstance();\n let runOptionsHandle = 0;\n const allocs: number[] = [];\n\n const runOptions: InferenceSession.RunOptions = options || {};\n\n try {\n if (options?.logSeverityLevel === undefined) {\n runOptions.logSeverityLevel = 2; // Default to warning\n } else if (\n typeof options.logSeverityLevel !== 'number' || !Number.isInteger(options.logSeverityLevel) ||\n options.logSeverityLevel < 0 || options.logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${options.logSeverityLevel}`);\n }\n\n if (options?.logVerbosityLevel === undefined) {\n runOptions.logVerbosityLevel = 0; // Default to 0\n } else if (typeof options.logVerbosityLevel !== 'number' || !Number.isInteger(options.logVerbosityLevel)) {\n throw new Error(`log verbosity level is not valid: ${options.logVerbosityLevel}`);\n }\n\n if (options?.terminate === undefined) {\n runOptions.terminate = false;\n }\n\n let tagDataOffset = 0;\n if (options?.tag !== undefined) {\n tagDataOffset = allocWasmString(options.tag, allocs);\n }\n\n runOptionsHandle = wasm._OrtCreateRunOptions(\n runOptions.logSeverityLevel!, runOptions.logVerbosityLevel!, !!runOptions.terminate!, tagDataOffset);\n if (runOptionsHandle === 0) {\n checkLastError('Can\\'t create run options.');\n }\n\n if (options?.extra !== undefined) {\n iterateExtraOptions(options.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddRunConfigEntry(runOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a run config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [runOptionsHandle, allocs];\n } catch (e) {\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nconst getGraphOptimzationLevel = (graphOptimizationLevel: string|unknown): number => {\n switch (graphOptimizationLevel) {\n case 'disabled':\n return 0;\n case 'basic':\n return 1;\n case 'extended':\n return 2;\n case 'all':\n return 99;\n default:\n throw new Error(`unsupported graph optimization level: ${graphOptimizationLevel}`);\n }\n};\n\nconst getExecutionMode = (executionMode: 'sequential'|'parallel'): number => {\n switch (executionMode) {\n case 'sequential':\n return 0;\n case 'parallel':\n return 1;\n default:\n throw new Error(`unsupported execution mode: ${executionMode}`);\n }\n};\n\nconst appendDefaultOptions = (options: InferenceSession.SessionOptions): void => {\n if (!options.extra) {\n options.extra = {};\n }\n if (!options.extra.session) {\n options.extra.session = {};\n }\n const session = options.extra.session as Record;\n if (!session.use_ort_model_bytes_directly) {\n // eslint-disable-next-line camelcase\n session.use_ort_model_bytes_directly = '1';\n }\n\n // if using JSEP with WebGPU, always disable memory pattern\n if (options.executionProviders &&\n options.executionProviders.some(ep => (typeof ep === 'string' ? ep : ep.name) === 'webgpu')) {\n options.enableMemPattern = false;\n }\n};\n\nconst setExecutionProviders =\n (sessionOptionsHandle: number, executionProviders: readonly InferenceSession.ExecutionProviderConfig[],\n allocs: number[]): void => {\n for (const ep of executionProviders) {\n let epName = typeof ep === 'string' ? ep : ep.name;\n\n // check EP name\n switch (epName) {\n case 'webnn':\n epName = 'WEBNN';\n if (typeof ep !== 'string') {\n const webnnOptions = ep as InferenceSession.WebNNExecutionProviderOption;\n // const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n if (deviceType) {\n const keyDataOffset = allocWasmString('deviceType', allocs);\n const valueDataOffset = allocWasmString(deviceType, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'deviceType' - ${deviceType}.`);\n }\n }\n }\n break;\n case 'webgpu':\n epName = 'JS';\n if (typeof ep !== 'string') {\n const webgpuOptions = ep as InferenceSession.WebGpuExecutionProviderOption;\n if (webgpuOptions?.preferredLayout) {\n if (webgpuOptions.preferredLayout !== 'NCHW' && webgpuOptions.preferredLayout !== 'NHWC') {\n throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${webgpuOptions.preferredLayout}`);\n }\n const keyDataOffset = allocWasmString('preferredLayout', allocs);\n const valueDataOffset = allocWasmString(webgpuOptions.preferredLayout, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'preferredLayout' - ${webgpuOptions.preferredLayout}.`);\n }\n }\n }\n break;\n case 'wasm':\n case 'cpu':\n continue;\n default:\n throw new Error(`not supported execution provider: ${epName}`);\n }\n\n const epNameDataOffset = allocWasmString(epName, allocs);\n if (getInstance()._OrtAppendExecutionProvider(sessionOptionsHandle, epNameDataOffset) !== 0) {\n checkLastError(`Can't append execution provider: ${epName}.`);\n }\n }\n };\n\nexport const setSessionOptions = (options?: InferenceSession.SessionOptions): [number, number[]] => {\n const wasm = getInstance();\n let sessionOptionsHandle = 0;\n const allocs: number[] = [];\n\n const sessionOptions: InferenceSession.SessionOptions = options || {};\n appendDefaultOptions(sessionOptions);\n\n try {\n const graphOptimizationLevel = getGraphOptimzationLevel(sessionOptions.graphOptimizationLevel ?? 'all');\n const executionMode = getExecutionMode(sessionOptions.executionMode ?? 'sequential');\n const logIdDataOffset =\n typeof sessionOptions.logId === 'string' ? allocWasmString(sessionOptions.logId, allocs) : 0;\n\n const logSeverityLevel = sessionOptions.logSeverityLevel ?? 2; // Default to 2 - warning\n if (!Number.isInteger(logSeverityLevel) || logSeverityLevel < 0 || logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${logSeverityLevel}`);\n }\n\n const logVerbosityLevel = sessionOptions.logVerbosityLevel ?? 0; // Default to 0 - verbose\n if (!Number.isInteger(logVerbosityLevel) || logVerbosityLevel < 0 || logVerbosityLevel > 4) {\n throw new Error(`log verbosity level is not valid: ${logVerbosityLevel}`);\n }\n\n const optimizedModelFilePathOffset = typeof sessionOptions.optimizedModelFilePath === 'string' ?\n allocWasmString(sessionOptions.optimizedModelFilePath, allocs) :\n 0;\n\n sessionOptionsHandle = wasm._OrtCreateSessionOptions(\n graphOptimizationLevel, !!sessionOptions.enableCpuMemArena, !!sessionOptions.enableMemPattern, executionMode,\n !!sessionOptions.enableProfiling, 0, logIdDataOffset, logSeverityLevel, logVerbosityLevel,\n optimizedModelFilePathOffset);\n if (sessionOptionsHandle === 0) {\n checkLastError('Can\\'t create session options.');\n }\n\n if (sessionOptions.executionProviders) {\n setExecutionProviders(sessionOptionsHandle, sessionOptions.executionProviders, allocs);\n }\n\n if (sessionOptions.enableGraphCapture !== undefined) {\n if (typeof sessionOptions.enableGraphCapture !== 'boolean') {\n throw new Error(`enableGraphCapture must be a boolean value: ${sessionOptions.enableGraphCapture}`);\n }\n const keyDataOffset = allocWasmString('enableGraphCapture', allocs);\n const valueDataOffset = allocWasmString(sessionOptions.enableGraphCapture.toString(), allocs);\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(\n `Can't set a session config entry: 'enableGraphCapture' - ${sessionOptions.enableGraphCapture}.`);\n }\n }\n\n if (sessionOptions.freeDimensionOverrides) {\n for (const [name, value] of Object.entries(sessionOptions.freeDimensionOverrides)) {\n if (typeof name !== 'string') {\n throw new Error(`free dimension override name must be a string: ${name}`);\n }\n if (typeof value !== 'number' || !Number.isInteger(value) || value < 0) {\n throw new Error(`free dimension override value must be a non-negative integer: ${value}`);\n }\n const nameOffset = allocWasmString(name, allocs);\n if (wasm._OrtAddFreeDimensionOverride(sessionOptionsHandle, nameOffset, value) !== 0) {\n checkLastError(`Can't set a free dimension override: ${name} - ${value}.`);\n }\n }\n }\n\n if (sessionOptions.extra !== undefined) {\n iterateExtraOptions(sessionOptions.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a session config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [sessionOptionsHandle, allocs];\n } catch (e) {\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// This file includes common definitions. They do NOT have dependency on the WebAssembly instance.\n\n/**\n * Copied from ONNX definition. Use this to drop dependency 'onnx_proto' to decrease compiled .js file size.\n */\nexport const enum DataType {\n undefined = 0,\n float = 1,\n uint8 = 2,\n int8 = 3,\n uint16 = 4,\n int16 = 5,\n int32 = 6,\n int64 = 7,\n string = 8,\n bool = 9,\n float16 = 10,\n double = 11,\n uint32 = 12,\n uint64 = 13,\n complex64 = 14,\n complex128 = 15,\n bfloat16 = 16\n}\n\n/**\n * Map string tensor data to enum value\n */\nexport const tensorDataTypeStringToEnum = (type: string): DataType => {\n switch (type) {\n case 'int8':\n return DataType.int8;\n case 'uint8':\n return DataType.uint8;\n case 'bool':\n return DataType.bool;\n case 'int16':\n return DataType.int16;\n case 'uint16':\n return DataType.uint16;\n case 'int32':\n return DataType.int32;\n case 'uint32':\n return DataType.uint32;\n case 'float16':\n return DataType.float16;\n case 'float32':\n return DataType.float;\n case 'float64':\n return DataType.double;\n case 'string':\n return DataType.string;\n case 'int64':\n return DataType.int64;\n case 'uint64':\n return DataType.uint64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n};\n\n/**\n * Map enum value to string tensor data\n */\nexport const tensorDataTypeEnumToString = (typeProto: DataType): Tensor.Type => {\n switch (typeProto) {\n case DataType.int8:\n return 'int8';\n case DataType.uint8:\n return 'uint8';\n case DataType.bool:\n return 'bool';\n case DataType.int16:\n return 'int16';\n case DataType.uint16:\n return 'uint16';\n case DataType.int32:\n return 'int32';\n case DataType.uint32:\n return 'uint32';\n case DataType.float16:\n return 'float16';\n case DataType.float:\n return 'float32';\n case DataType.double:\n return 'float64';\n case DataType.string:\n return 'string';\n case DataType.int64:\n return 'int64';\n case DataType.uint64:\n return 'uint64';\n\n default:\n throw new Error(`unsupported data type: ${typeProto}`);\n }\n};\n\n/**\n * get tensor element size in bytes by the given data type\n * @returns size in integer or undefined if the data type is not supported\n */\nexport const getTensorElementSize = (dateType: number): number|\n undefined => [undefined, 4, 1, 1, 2, 2, 4, 8, undefined, 1, 2, 8, 4, 8, undefined, undefined, undefined][dateType];\n\n/**\n * get typed array constructor by the given tensor type\n */\nexport const tensorTypeToTypedArrayConstructor = (type: Tensor.Type): Float32ArrayConstructor|Uint8ArrayConstructor|\n Int8ArrayConstructor|Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|\n Uint8ArrayConstructor|Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor => {\n switch (type) {\n case 'float16':\n // allow Float16Array polyfill.\n return typeof Float16Array !== 'undefined' && Float16Array.from ? Float16Array : Uint16Array;\n case 'float32':\n return Float32Array;\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'uint16':\n return Uint16Array;\n case 'int16':\n return Int16Array;\n case 'int32':\n return Int32Array;\n case 'bool':\n return Uint8Array;\n case 'float64':\n return Float64Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'uint64':\n return BigUint64Array;\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n };\n\n/**\n * Map string log level to integer value\n */\nexport const logLevelStringToEnum = (logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal'): number => {\n switch (logLevel) {\n case 'verbose':\n return 0;\n case 'info':\n return 1;\n case 'warning':\n return 2;\n case 'error':\n return 3;\n case 'fatal':\n return 4;\n default:\n throw new Error(`unsupported logging level: ${logLevel}`);\n }\n};\n\n/**\n * Check whether the given tensor type is supported by GPU buffer\n */\nexport const isGpuBufferSupportedType = (type: Tensor.Type): type is Tensor.GpuBufferDataTypes => type === 'float32' ||\n type === 'float16' || type === 'int32' || type === 'int64' || type === 'uint32' || type === 'uint8' ||\n type === 'bool';\n\n/**\n * Map string data location to integer value\n */\nexport const dataLocationStringToEnum = (location: Tensor.DataLocation): number => {\n switch (location) {\n case 'none':\n return 0;\n case 'cpu':\n return 1;\n case 'cpu-pinned':\n return 2;\n case 'texture':\n return 3;\n case 'gpu-buffer':\n return 4;\n default:\n throw new Error(`unsupported data location: ${location}`);\n }\n};\n\n/**\n * Map integer data location to string value\n */\nexport const dataLocationEnumToString = (location: number): Tensor.DataLocation|undefined =>\n (['none', 'cpu', 'cpu-pinned', 'texture', 'gpu-buffer'] as const)[location];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {isNode} from './wasm-utils-env';\n\n/**\n * Load a file into a Uint8Array.\n *\n * @param file - the file to load. Can be a URL/path, a Blob, an ArrayBuffer, or a Uint8Array.\n * @returns a Uint8Array containing the file data.\n */\nexport const loadFile = async(file: string|Blob|ArrayBufferLike|Uint8Array): Promise => {\n if (typeof file === 'string') {\n if (isNode) {\n // load file into ArrayBuffer in Node.js\n try {\n const {readFile} = require('node:fs/promises');\n return new Uint8Array(await readFile(file));\n } catch (e) {\n if (e.code === 'ERR_FS_FILE_TOO_LARGE') {\n // file is too large, use fs.createReadStream instead\n const {createReadStream} = require('node:fs');\n const stream = createReadStream(file);\n const chunks: Uint8Array[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n return new Uint8Array(Buffer.concat(chunks));\n }\n throw e;\n }\n } else {\n // load file into ArrayBuffer in browsers\n const response = await fetch(file);\n if (!response.ok) {\n throw new Error(`failed to load external data file: ${file}`);\n }\n const contentLengthHeader = response.headers.get('Content-Length');\n const fileSize = contentLengthHeader ? parseInt(contentLengthHeader, 10) : 0;\n if (fileSize < 1073741824 /* 1GB */) {\n // when Content-Length header is not set, we cannot determine the file size. We assume it is small enough to\n // load into memory.\n return new Uint8Array(await response.arrayBuffer());\n } else {\n // file is too large, use stream instead\n if (!response.body) {\n throw new Error(`failed to load external data file: ${file}, no response body.`);\n }\n const reader = response.body.getReader();\n\n let buffer;\n try {\n // try to create ArrayBuffer directly\n buffer = new ArrayBuffer(fileSize);\n } catch (e) {\n if (e instanceof RangeError) {\n // use WebAssembly Memory to allocate larger ArrayBuffer\n const pages = Math.ceil(fileSize / 65536);\n buffer = new WebAssembly.Memory({initial: pages, maximum: pages}).buffer;\n } else {\n throw e;\n }\n }\n\n let offset = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const {done, value} = await reader.read();\n if (done) {\n break;\n }\n const chunkSize = value.byteLength;\n const chunk = new Uint8Array(buffer, offset, chunkSize);\n chunk.set(value);\n offset += chunkSize;\n }\n return new Uint8Array(buffer, 0, fileSize);\n }\n }\n\n } else if (file instanceof Blob) {\n return new Uint8Array(await file.arrayBuffer());\n } else if (file instanceof Uint8Array) {\n return file;\n } else {\n return new Uint8Array(file);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {logLevelStringToEnum} from '../wasm-common';\n\ntype LogLevel = NonNullable;\ntype MessageString = string;\ntype MessageFunction = () => string;\ntype Message = MessageString|MessageFunction;\n\nconst logLevelPrefix = ['V', 'I', 'W', 'E', 'F'];\n\nconst doLog = (level: number, message: string): void => {\n // eslint-disable-next-line no-console\n console.log(`[${logLevelPrefix[level]},${new Date().toISOString()}]${message}`);\n};\n\nlet configLogLevel: LogLevel|undefined;\nlet debug: boolean|undefined;\n\nexport const configureLogger = ($configLogLevel: LogLevel, $debug: boolean): void => {\n configLogLevel = $configLogLevel;\n debug = $debug;\n};\n\n/**\n * A simple logging utility to log messages to the console.\n */\nexport const LOG = (logLevel: LogLevel, msg: Message): void => {\n const messageLevel = logLevelStringToEnum(logLevel);\n const configLevel = logLevelStringToEnum(configLogLevel);\n if (messageLevel >= configLevel) {\n doLog(messageLevel, typeof msg === 'function' ? msg() : msg);\n }\n};\n\n/**\n * A simple logging utility to log messages to the console. Only logs when debug is enabled.\n */\nexport const LOG_DEBUG: typeof LOG = (...args: Parameters) => {\n if (debug) {\n LOG(...args);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\nimport {tensorTypeToTypedArrayConstructor} from '../wasm-common';\n\nexport const createView = (dataBuffer: ArrayBuffer, type: Tensor.Type): Int32Array|Uint32Array|BigInt64Array|\n BigUint64Array|Uint8Array|Float32Array|Float64Array|Int8Array|Int16Array|Uint16Array =>\n new (tensorTypeToTypedArrayConstructor(type))(dataBuffer);\n\n/**\n * a TensorView does not own the data.\n */\nexport interface TensorView {\n readonly data: number;\n readonly dataType: number;\n readonly dims: readonly number[];\n\n /**\n * get a Float32Array data view of the tensor data. tensor data must be on CPU.\n */\n getFloat32Array(): Float32Array;\n\n /**\n * get a BigInt64Array data view of the tensor data. tensor data must be on CPU.\n */\n getBigInt64Array(): BigInt64Array;\n\n /**\n * get a Int32Array data view of the tensor data. tensor data must be on CPU.\n */\n getInt32Array(): Int32Array;\n\n /**\n * create a new tensor view with the same data but different dimensions.\n */\n reshape(newDims: readonly number[]): TensorView;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../wasm-common';\nimport {TensorView} from '../tensor-view';\n\nimport {ShaderHelper} from './ops/common';\n\nexport type SessionState = 'default'|'capturing'|'replaying';\n\nexport enum GpuDataType {\n default = 0,\n upload = 1,\n profile = 2\n}\nexport type GpuDataId = number;\n\nexport type GpuArchitecture = 'ampere';\nexport type GpuVendor = 'amd'|'intel'|'nvidia';\nexport interface AdapterInfo {\n isArchitecture: (architecture: GpuArchitecture) => boolean;\n isVendor: (vendor: GpuVendor) => boolean;\n}\n\nexport interface GpuData {\n type: GpuDataType;\n id: GpuDataId;\n buffer: GPUBuffer;\n}\n\nexport interface TensorInfo {\n dims: readonly number[];\n dataType: number;\n}\n\nexport interface ProgramUniform {\n type: DataType;\n data: number|readonly number[];\n}\n\nexport type ProgramUniformVariableInfo = [type: DataType, length: number];\n\n/**\n * Represent the dependency of a program on a specific input tensor.\n *\n * - 'none': the shader/uniform does not depend on this input's info\n * - 'type': the shader/uniform depends on data type of this input\n * - 'rank': the shader/uniform depends on data type and the rank of this input\n * - 'dims': the shader/uniform depends on data type and the dims of this input\n * - 'data': the shader/uniform depends on data type, the dims and the data of this input\n */\nexport type ProgramInputTensorInfoDependency = 'none'|'type'|'rank'|'dims'|'data';\n\n/**\n * Represent information about a program's cache for shader.\n */\nexport interface ProgramShaderCacheInfo {\n /**\n * an optional string as a cache hint in the artifact cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains initializing-time information, such as the attributes or any information of\n * initializers. It should NOT contain any runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'dims' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n/**\n * Represent information about a program's cache for uniform.\n */\nexport interface ProgramUniformCacheInfo {\n /**\n * an optional string as a cache hint in the uniform cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'none' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n\n /**\n * an optional object describing the cache information of the program shader.\n *\n * If this is not specified, assume hint is empty and inputDependencies are ['dims'] for all inputs.\n */\n shaderCache?: ProgramShaderCacheInfo;\n\n /**\n * the shader's processing source code.\n *\n * This function will be called when shader cache missed.\n */\n getShaderSource: (shaderHelper: ShaderHelper) => string;\n\n /**\n * A function to get run data required to run the program.\n *\n * This function will be called every time the program is executed. Should keep this function as simple as possible.\n */\n getRunData: (inputs: readonly TensorView[]) => {\n outputs: readonly TensorInfo[];\n dispatchGroup: {x: number; y?: number; z?: number};\n programUniforms?: readonly ProgramUniform[];\n };\n}\n\nexport interface Artifact {\n programInfo: ProgramInfo;\n computePipeline: GPUComputePipeline;\n uniformVariablesInfo: readonly ProgramUniformVariableInfo[]|undefined;\n}\n\nexport interface ComputeContextInputsOutputsMapping {\n /**\n * specify the mapping to the program's inputs. the value can be a number or a tensor view.\n * - if it's a number, it's the index of the kernel's input\n * - if it's a tensor view, it's an existing tensor view that will be used as the input\n *\n * if inputs is not specified, the mapping will be the kernel's inputs in order.\n */\n readonly inputs?: ReadonlyArray;\n /**\n * specify the mapping to the program's outputs. the value must be a number.\n * - if it's a non-negative number, it's the index of the kernel's output\n * - if it's -1, it's an output that will be created as a temporary value. this value will be released after\n * the kernel is executed.\n * - if it's -2, it's an output that will be created as a persistent value. this value will be released when the\n * kernel is released.\n *\n * if outputs is not specified, the mapping will be the kernel's outputs in order.\n */\n readonly outputs?: readonly number[];\n}\n\n/**\n * A ComputeContext instance carries the states that representing the current running of a kernel.\n */\nexport interface ComputeContext {\n /**\n * gpu adapter info\n */\n readonly adapterInfo: AdapterInfo;\n\n /**\n * stores the pointer to OpKernelContext\n */\n readonly opKernelContext: number;\n\n /**\n * a list of inputs, each input is an instance of TensorView\n */\n readonly inputs: readonly TensorView[];\n\n /**\n * a custom data object that can be used to store any data that is needed by the kernel\n */\n readonly kernelCustomData: {[key: string]: unknown};\n\n /**\n * a buffer that can be used to access custom data created each time the kernel is executed\n */\n readonly customDataBuffer: Uint8Array;\n\n /**\n * a number of outputs for the node\n */\n readonly outputCount: number;\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[];\n output(index: number, dims: readonly number[]): number;\n getMaxComputeWorkgroupSizes(): [number, number, number];\n getMaxComputeWorkgroupStoragesize(): number;\n}\n\nexport type TimestampQuery = 'none'|'inside-passes'|'at-passes';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {GpuData, GpuDataId, GpuDataType} from './types';\n\n/**\n * manages GpuDataId -> GpuBuffer\n */\nexport interface GpuDataManager {\n /**\n * copy data from CPU to GPU.\n */\n upload(id: GpuDataId, data: Uint8Array): void;\n /**\n * copy data from GPU to GPU.\n */\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void;\n /**\n * create new data on GPU.\n */\n create(size: number, usage?: number): GpuData;\n /**\n * get GPU data by ID.\n */\n get(id: GpuDataId): GpuData|undefined;\n /**\n * release the data on GPU by ID.\n *\n * @return size of the data released\n */\n release(id: GpuDataId): number;\n /**\n * copy data from GPU to CPU.\n */\n download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise;\n\n /**\n * refresh the buffers that marked for release.\n *\n * when release() is called, the buffer is not released immediately. this is because we need to wait for the commands\n * to be submitted to the GPU. this function is called after the commands are submitted so that the buffers can be\n * actually released.\n */\n refreshPendingBuffers(): void;\n\n /**\n * register an external buffer for IO Binding. If the buffer is already registered, return the existing GPU data ID.\n *\n * GPU data manager only manages a mapping between the buffer and the GPU data ID. It will not manage the lifecycle of\n * the external buffer.\n */\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number;\n\n /**\n * unregister an external buffer for IO Binding.\n */\n unregisterExternalBuffer(buffer: GPUBuffer): void;\n\n /**\n * destroy all gpu buffers.\n */\n dispose(): void;\n\n /**\n * release session related data.\n * @param sessionId - specify the session ID.\n */\n onReleaseSession(sessionId: number): void;\n}\n\ninterface StorageCacheValue {\n gpuData: GpuData;\n originalSize: number;\n}\n\nconst bucketFreelist: Map = new Map([\n [64, 250],\n [128, 200],\n [256, 200],\n [512, 200],\n [2048, 230],\n [4096, 200],\n [8192, 50],\n [16384, 50],\n [32768, 50],\n [65536, 50],\n [131072, 50],\n [262144, 50],\n [524288, 50],\n [1048576, 50],\n [2097152, 30],\n [4194304, 20],\n [8388608, 10],\n [12582912, 10],\n [16777216, 10],\n [26214400, 15],\n [33554432, 22],\n [44236800, 2],\n [58982400, 6],\n // we don't want to cache the bucket sizes below but not caching them\n // results in some major performance hits for models like sd-turbo.\n [67108864, 6],\n [134217728, 6],\n [167772160, 6],\n]);\n\nconst bucketArr: number[] = [];\n\n/**\n * normalize the buffer size so that it fits the 128-bits (16 bytes) alignment.\n */\nconst calcNormalizedBufferSize = (size: number) => Math.ceil(size / 16) * 16;\n\n/**\n * calculate the buffer size so that it fits into buckets.\n */\nconst calcBucketBufferSize = (size: number) => {\n for (let idx = 0; idx < bucketArr.length; idx++) {\n const sizeForBucket = bucketArr[idx];\n if (size <= sizeForBucket) {\n return sizeForBucket;\n }\n }\n // not in bucket list -> caller will not cache, round up to 16.\n return Math.ceil(size / 16) * 16;\n};\n\nlet guid = 1;\nconst createNewGpuDataId = () => guid++;\n\n/**\n * exported standard download function. This function is used by the session to download the data from GPU, and also by\n * factory to create GPU tensors with the capacity of downloading data from GPU.\n *\n * @param backend - the WebGPU backend\n * @param gpuBuffer - the GPU buffer to download\n * @param originalSize - the original size of the data\n * @param getTargetBuffer - optional. If provided, the data will be copied to the target buffer. Otherwise, a new buffer\n * will be created and returned.\n */\nexport const downloadGpuData =\n async(backend: WebGpuBackend, gpuBuffer: GPUBuffer, originalSize: number, getTargetBuffer?: () => Uint8Array):\n Promise => {\n const bufferSize = calcNormalizedBufferSize(originalSize);\n const gpuReadBuffer = backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: bufferSize, usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ});\n try {\n const commandEncoder = backend.getCommandEncoder();\n backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n gpuBuffer /* source buffer */, 0 /* source offset */, gpuReadBuffer /* destination buffer */,\n 0 /* destination offset */, bufferSize /* size */\n );\n backend.flush();\n\n await gpuReadBuffer.mapAsync(GPUMapMode.READ);\n\n const arrayBuffer = gpuReadBuffer.getMappedRange();\n if (getTargetBuffer) {\n // if we already have a CPU buffer to accept the data, no need to clone the ArrayBuffer.\n const targetBuffer = getTargetBuffer();\n targetBuffer.set(new Uint8Array(arrayBuffer, 0, originalSize));\n return targetBuffer;\n } else {\n // the mapped ArrayBuffer will be released when the GPU buffer is destroyed. Need to clone the\n // ArrayBuffer.\n return new Uint8Array(arrayBuffer.slice(0, originalSize));\n }\n } finally {\n gpuReadBuffer.destroy();\n }\n };\n\nclass GpuDataManagerImpl implements GpuDataManager {\n // GPU Data ID => GPU Data ( storage buffer )\n private storageCache: Map;\n\n // pending buffers for uploading ( data is unmapped )\n private buffersForUploadingPending: GPUBuffer[];\n // pending buffers for computing\n private buffersPending: GPUBuffer[];\n\n // The reusable storage buffers for computing.\n private freeBuffers: Map;\n // The reusable uniform buffers\n private freeUniformBuffers: Map;\n\n // The external buffers registered users for IO Binding.\n private externalBuffers: Map;\n\n // The pendingBuffers for capture graph.\n // a SessionID -> GPUBuffer[] mapping.\n private capturedPendingBuffers: Map;\n\n constructor(private backend: WebGpuBackend) {\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.buffersForUploadingPending = [];\n this.buffersPending = [];\n this.externalBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n\n for (const [key, ] of bucketFreelist) {\n bucketArr.push(key);\n this.freeBuffers.set(key, []);\n this.freeUniformBuffers.set(key, []);\n }\n }\n\n upload(id: GpuDataId, data: Uint8Array): void {\n const srcArrayBuffer = data.buffer;\n const srcOffset = data.byteOffset;\n const srcLength = data.byteLength;\n const size = calcNormalizedBufferSize(srcLength);\n\n // get destination gpu buffer\n const gpuDataCache = this.storageCache.get(id);\n if (!gpuDataCache) {\n throw new Error('gpu data for uploading does not exist');\n }\n if (gpuDataCache.originalSize !== srcLength) {\n throw new Error(`inconsistent data size. gpu data size=${gpuDataCache.originalSize}, data size=${srcLength}`);\n }\n\n // create gpu buffer\n const gpuBufferForUploading = this.backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {mappedAtCreation: true, size, usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC});\n\n // copy (upload) data\n const arrayBuffer = gpuBufferForUploading.getMappedRange();\n new Uint8Array(arrayBuffer).set(new Uint8Array(srcArrayBuffer, srcOffset, srcLength));\n gpuBufferForUploading.unmap();\n\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(gpuBufferForUploading, 0, gpuDataCache.gpuData.buffer, 0, size);\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.upload(id=${id})`);\n\n this.buffersForUploadingPending.push(gpuBufferForUploading);\n }\n\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void {\n // get source gpu buffer\n const sourceGpuDataCache = this.storageCache.get(sourceId);\n if (!sourceGpuDataCache) {\n throw new Error('source gpu data for memcpy does not exist');\n }\n // get destination gpu buffer\n const destinationGpuDataCache = this.storageCache.get(destinationId);\n if (!destinationGpuDataCache) {\n throw new Error('destination gpu data for memcpy does not exist');\n }\n if (sourceGpuDataCache.originalSize !== destinationGpuDataCache.originalSize) {\n throw new Error('inconsistent source and destination gpu data size');\n }\n\n const size = calcNormalizedBufferSize(sourceGpuDataCache.originalSize);\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n sourceGpuDataCache.gpuData.buffer, 0, destinationGpuDataCache.gpuData.buffer, 0, size);\n }\n\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number {\n let id: number|undefined;\n if (previousBuffer) {\n id = this.externalBuffers.get(previousBuffer);\n if (id === undefined) {\n throw new Error('previous buffer is not registered');\n }\n if (buffer === previousBuffer) {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${\n id}, buffer is the same, skip.`);\n return id;\n } else if (this.backend.capturedCommandList.has(this.backend.currentSessionId!)) {\n throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);\n }\n this.externalBuffers.delete(previousBuffer);\n } else {\n id = createNewGpuDataId();\n }\n\n this.storageCache.set(id, {gpuData: {id, type: GpuDataType.default, buffer}, originalSize});\n this.externalBuffers.set(buffer, id);\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${id}, registered.`);\n return id;\n }\n\n unregisterExternalBuffer(buffer: GPUBuffer): void {\n const id = this.externalBuffers.get(buffer);\n if (id !== undefined) {\n this.storageCache.delete(id);\n this.externalBuffers.delete(buffer);\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${id}`);\n }\n }\n\n // eslint-disable-next-line no-bitwise\n create(size: number, usage = GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST): GpuData {\n const bufferSize = calcBucketBufferSize(size);\n\n let gpuBuffer;\n // Currently, only storage buffers are reused.\n // eslint-disable-next-line no-bitwise\n const isStorage = (usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE;\n // eslint-disable-next-line no-bitwise\n const isUniform = (usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM;\n if (isStorage || isUniform) {\n const freeBuffers = isStorage ? this.freeBuffers : this.freeUniformBuffers;\n const buffers = freeBuffers.get(bufferSize);\n if (!buffers) {\n // no such bucket/freelist - create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n } else {\n if (buffers.length > 0) {\n // in freelist, use it\n gpuBuffer = buffers.pop() as GPUBuffer;\n } else {\n // bucket empty, create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n }\n } else {\n // create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n\n const gpuData = {id: createNewGpuDataId(), type: GpuDataType.default, buffer: gpuBuffer};\n this.storageCache.set(gpuData.id, {gpuData, originalSize: size});\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.create(size=${size}) => id=${gpuData.id}`);\n return gpuData;\n }\n\n get(id: GpuDataId): GpuData|undefined {\n return this.storageCache.get(id)?.gpuData;\n }\n\n release(id: GpuDataId): number {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('releasing data does not exist');\n }\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.release(id=${id}), gpuDataId=${cachedData.gpuData.id}`);\n\n this.storageCache.delete(id);\n this.buffersPending.push(cachedData.gpuData.buffer);\n // cachedData.gpuData.buffer.destroy();\n\n return cachedData.originalSize;\n }\n\n async download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('data does not exist');\n }\n await downloadGpuData(this.backend, cachedData.gpuData.buffer, cachedData.originalSize, getTargetBuffer);\n }\n\n refreshPendingBuffers(): void {\n for (const buffer of this.buffersForUploadingPending) {\n // upload buffer is only useful in the session creation time. So we don't need to reuse them in session running.\n buffer.destroy();\n }\n this.buffersForUploadingPending = [];\n\n if (this.buffersPending.length === 0) {\n return;\n }\n\n if (this.backend.sessionStatus === 'default') {\n for (const buffer of this.buffersPending) {\n const maxInFreeList = bucketFreelist.get(buffer.size);\n\n // eslint-disable-next-line no-bitwise\n if ((buffer.usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE) {\n // Put the pending buffer to freeBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n // eslint-disable-next-line no-bitwise\n } else if ((buffer.usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM) {\n // Put the pending buffer to freeUniformBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeUniformBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n } else {\n buffer.destroy();\n }\n }\n this.buffersPending = [];\n } else {\n // Don't release intermediate tensors in non-default mode.\n // TODO: reuse the storage buffers in non-default mode.\n let capturedBuffers = this.capturedPendingBuffers.get(this.backend.currentSessionId!);\n if (!capturedBuffers) {\n capturedBuffers = [];\n this.capturedPendingBuffers.set(this.backend.currentSessionId!, capturedBuffers);\n }\n for (const buffer of this.buffersPending) {\n capturedBuffers.push(buffer);\n }\n this.buffersPending = [];\n }\n }\n\n dispose() {\n this.freeBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.freeUniformBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n\n this.storageCache.forEach((storage) => {\n storage.gpuData.buffer.destroy();\n });\n\n this.capturedPendingBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n }\n\n onReleaseSession(sessionId: number) {\n // release the captured pending buffers.\n const pendingBuffers = this.capturedPendingBuffers.get(sessionId);\n if (pendingBuffers) {\n pendingBuffers.forEach(buffer => {\n buffer.destroy();\n });\n this.capturedPendingBuffers.delete(sessionId);\n }\n }\n}\n\nexport const createGpuDataManager = (...args: ConstructorParameters): GpuDataManager =>\n new GpuDataManagerImpl(...args);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\n/**\n * create a new object from the given attribute, and add a cacheKey property to it\n */\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable no-param-reassign */\n\nexport class MatMulUtil {\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n const max = Math.max(aLen, bLen);\n if (aLen && bLen) {\n cdims[crank - i] = Math.max(aLen, bLen);\n } else {\n // when either aLen or bLen is 0, the other should be either 0 or 1, otherwise it is not broadcastable.\n if (max > 1) {\n return undefined;\n }\n cdims[crank - i] = 0;\n }\n }\n\n return cdims;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n}\n\n\nexport class ShapeUtil {\n /**\n * calculate the size (number of elements)\n */\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n /**\n * convert dims corresponding to type change to pack. ex. uint8 data to uint32\n */\n static convertShape(dims: readonly number[], size = 4): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n }\n const newDims = new Array(rank);\n let i = rank - 1;\n while (i >= 0) {\n if (dims[i] % size === 0) {\n newDims[i] = dims[i] / size;\n break;\n }\n if (size % dims[i] !== 0) {\n throw new Error('cannot convert shape');\n }\n newDims[i] = 1;\n size /= dims[i];\n i--;\n }\n for (i--; i >= 0; i--) {\n newDims[i] = dims[i];\n }\n return newDims;\n }\n\n /**\n * calculate the size (number of elements) from the given axis (inclusive)\n */\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n /**\n * calculate the size (number of elements) to the given axis (exclusive)\n */\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n /**\n * calculate the size (number of elements) from and to the given axis [start, end)\n */\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be negative.\n if (dims[i] < 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank?: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank ?? axes.length));\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]): void {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], isChannelLast: boolean, autoPad?: string): void {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + (isChannelLast ? 1 : 2)], strides[dim], dilations[dim], kernelShape[dim], pads, dim,\n dim + inputDims.length - 2, autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {ShapeUtil} from '../../util';\nimport {ProgramUniform, ProgramUniformVariableInfo} from '../types';\n\n/**\n * constant value for a workgroup size.\n *\n * We definitely can do further optimization in future, but for now we use 64.\n *\n * rule of thumb: Use [a workgroup size of] 64 unless you know what GPU you are targeting or that your workload\n * needs something different.\n *\n * from: https://surma.dev/things/webgpu/\n **/\nexport const WORKGROUP_SIZE = 64;\n\ninterface IndicesHelperTypes {\n /**\n * WGSL type of indices expression\n */\n readonly indices: string;\n\n /**\n * WGSL type of a value\n */\n readonly value: string;\n\n /**\n * WGSL type of storage type representing a value\n *\n * This is usually the same to `value`, but for some type (eg. bool), we need to use `u32` as storage type for\n * value type `vec4`\n */\n readonly storage: string;\n\n /**\n * tensor type as represented in TensorView\n */\n readonly tensor: number;\n}\n\n/**\n * A helper class for generating WGSL code for manipulating indices and data for a shader's input or output.\n *\n * This class is designed to offer a unified way to generate WGSL code for manipulating indices and data for a shader's\n * input or output.\n *\n * The following is a list of terminologies used in this class:\n * - `offset`: a uint32 value representing the offset of an element in the data buffer.\n * - `indices`: an abstraction of a multi-dimensional array's indices representing the data's index on each dimension.\n * - `value`: a value of a data element.\n *\n * Users are expected to create an instance of this class for each shader's input or output, and use the instance to\n * generate WGSL code for manipulating indices and data. The following 2 exported functions are for users to call to\n * create an instance of an indices helper:\n * - `inputVariable()`: create an indices helper instance for an input.\n * - `outputVariable()`: create an indices helper instance for an output.\n * - `internalVariable()`: create an indices helper instance for an internal variable.\n *\n * An indices helper instance contains helper functions for the following operations:\n * - access readonly basic information, including: `name`(the name of the input or output), `usage`(whether it's an\n * input, an output or an internal variable) and `shape`(the passed in shape).\n * - `type`: access readonly type information, including: `indices`(the type of indices), `value`(the type of value at\n * runtime), `storage`(the type of value at storage) and `tensor`(the tensor type as represented in TensorView).\n * - generate WGSL code for getting indices from offset. Use `offsetToIndices()` for WGSL code snippet to calculate\n * indices from offset, and use `indicesToOffset()` for WGSL code snippet to calculate offset from indices.\n * - to manipulate an instance of indices, use `setIndices()` and `getIndices()` to set and get the indices on an\n * indices variable.\n * - to manipulate data, use `set()`/`get()` to access data at the given indices from parameter list, use\n * `setByIndices()`/`getByIndices()` to access data at the given indices from an indices variable, and use\n * `setByOffset()`/`getByOffset()` to access data at the given offset.\n * - `impl`: get WGSL code of function implementation for the util functions mentioned above.\n */\nexport interface IndicesHelper {\n /**\n * get WGSL code of function implementation for the util functions.\n *\n */\n readonly impl: () => string;\n\n /**\n * get type info\n */\n readonly type: IndicesHelperTypes;\n\n /**\n * WGSL code of a expression for getting indices from offset.\n *\n * @param varOffset - a u32 expression representing the offset.\n *\n * @returns an `type.indices` expression\n */\n readonly offsetToIndices: (varOffset: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting offset from indices.\n *\n * @param varIndices - a `type.indices` expression representing the indices.\n *\n * @returns an `u32` expression\n */\n readonly indicesToOffset: (varIndices: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting original offset from broadcasted indices.\n *\n * @param varIndices - a `type.indices` expression representing the output indices.\n * @param output - output IndicesHelper.\n *\n * @returns an `u32` expression\n */\n readonly broadcastedIndicesToOffset: (varIndices: string, output: IndicesHelper) => string;\n\n /**\n * WGSL code of generating an indices literal\n *\n * @param init - initial value.\n */\n readonly indices: (...init: ReadonlyArray) => string;\n\n /**\n * WGSL code of a statement for setting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to set. can be a number or a string (WGSL `u32` expression).\n * @param value - the value to set. can be a number or a string (WGSL `u32` expression).\n *\n * @returns a WGSL statement\n */\n readonly indicesSet: (varIndices: string, idx: number|string, value: number|string) => void;\n\n /**\n * WGSL code of an `u32` expression for getting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to get. can be a number or a string (WGSL `u32` expression).\n *\n * @returns an `u32` expression\n */\n readonly indicesGet: (varIndices: string, idx: number|string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices.\n *\n * @param indicesAndValue - an array of numbers or strings (WGSL `u32` expression) representing the indices, followed\n * by the value to set. This array should have exactly `shape.length + 1` elements.\n */\n readonly set: (...indicesAndValue: ReadonlyArray) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByIndices: (varIndices: string, value: string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByOffset: (offset: number|string, value: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices.\n *\n * @param indices - an array of numbers or strings (WGSL `u32` expression) representing the indices.\n */\n readonly get: (...indices: ReadonlyArray) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n */\n readonly getByIndices: (varIndices: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n */\n readonly getByOffset: (offset: number|string) => string;\n\n /**\n * name of the data variable\n */\n readonly name: string;\n\n /**\n * whether the helper is for an input, an output or an internal variable.\n */\n readonly usage: 'input'|'output'|'internal';\n\n /**\n * the rank of the input or output.\n */\n readonly rank: number;\n\n /**\n * a string representing the variable name for the shape of the input or output.\n */\n readonly shape: string;\n\n /**\n * a string representing the variable name for the strides of the input or output.\n */\n readonly strides: string;\n}\n\nconst getWgslMappedType = (type: number, components: 1|2|3|4): string|[string, string] => {\n if (components === 3) {\n throw new Error('vec3 has same alignment as vec4, use vec4 instead');\n }\n\n // return type is [ storage type, runtime type ] or a single string for both\n switch (type) {\n case DataType.float16:\n return components > 1 ? `vec${components}` : 'f16';\n case DataType.float:\n return components > 1 ? `vec${components}` : 'f32';\n case DataType.int32:\n return components > 1 ? `vec${components}` : 'i32';\n case DataType.uint32:\n return components > 1 ? `vec${components}` : 'u32';\n case DataType.int64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'i32'];\n case DataType.uint64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'u32'];\n case DataType.bool:\n if (components !== 4) {\n throw new Error('bool must be vec4');\n }\n return ['u32', 'vec4'];\n\n default:\n throw new Error(`Unknown data type: ${type}`);\n }\n};\n\nexport const tensorTypeToWsglStorageType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[0];\n};\n\nexport const tensorTypeToWsglValueType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[1];\n};\n\nexport const createTensorShapeVariables = (...dims: ReadonlyArray): ProgramUniform[] => {\n const programUniforms: ProgramUniform[] = [];\n dims.forEach(dim => {\n if (dim.length !== 0) {\n programUniforms.push(\n {type: DataType.uint32, data: dim}, {type: DataType.uint32, data: ShapeUtil.computeStrides(dim)});\n }\n });\n return programUniforms;\n};\n\n/**\n * A helper function to get maximum vector size for specified data length\n * @param size\n */\nexport const getMaxComponents = (size: number) => {\n // we cannot use vec3 type since it has alignment of 16 bytes\n if (size % 4 === 0) {\n return 4;\n } else if (size % 2 === 0) {\n return 2;\n }\n\n return 1;\n};\n\n/**\n * A helper function that initializes variable as a scalar or vector. e.g. f32(0) or vec4f(0,0,0,0)\n * @param dataType\n * @param components\n * @param value\n */\nexport const fillVector = (dataType = 'f32', components?: number, value = '0') => {\n if (!components || components === 1) {\n return `${dataType}(${value})`;\n }\n\n return `vec${components}<${dataType}>(${value})`;\n};\n\n/**\n * A helper function that casts value or vector to f32\n * @param dataType\n * @param components\n * @param value\n */\nexport const castToF32 = (dataType: string, components: number, value: string) => {\n if (dataType === 'f32') {\n return value;\n }\n if (components === 1) {\n return `f32(${value})`;\n }\n\n return `vec${components}(${value})`;\n};\n\n/**\n * A helper function that returns scalar or sums all components of a vector\n * @param name\n * @param components\n */\nexport const sumVector = (name: string, components: number) => {\n if (components === 4) {\n return `(${name}.x + ${name}.y + ${name}.z + ${name}.w)`;\n } else if (components === 2) {\n return `(${name}.x + ${name}.y)`;\n } else if (components === 3) {\n return `(${name}.x + ${name}.y + ${name}.z)`;\n }\n\n return name;\n};\n\n/**\n * A helper function that returns variable element at index.\n * @param name - the name of variable.\n * @param index - the index of variable element.\n * @param length - the length of variable.\n * @param type - the type of variable, optional.\n */\nexport const getElementAt =\n (name: string, index: number|string, length: number, type?: UniformDataElementType): string => {\n if (name.startsWith('uniforms.') && length > 4) {\n if (typeof (index) === 'string') {\n if (type === 'f16') {\n return `${name}[(${index}) / 8][(${index}) % 8 / 4][(${index}) % 8 % 4]`;\n } else {\n return `${name}[(${index}) / 4][(${index}) % 4]`;\n }\n } else {\n if (type === 'f16') {\n return `${name}[${Math.floor(index / 8)}][${Math.floor(index % 8 / 4)}][${index % 8 % 4}]`;\n } else {\n return `${name}[${Math.floor(index / 4)}][${index % 4}]`;\n }\n }\n } else {\n return length > 1 ? `${name}[${index}]` : name;\n }\n };\n\n/**\n * A helper function to get a IndicesHelper for a given input or output.\n *\n * @param name - the name of the input or output.\n * @param tensorType - the tensor type of the input or output.\n * @param shapeOrRank - the tensor shape or the rank of the input or output.\n * @param usage - the usage of the indices helper.\n * @param components - indicates the number of components of each element. 1 for scalar, 2 for vec2, 3 for vec3, 4 for\n * vec4.\n */\nconst createIndicesHelper =\n (name: string, tensorType: number, shapeOrRank: number|readonly number[], usage: IndicesHelper['usage'],\n components: 1|2|3|4): IndicesHelper => {\n const useUniform = typeof shapeOrRank === 'number';\n const rank = useUniform ? shapeOrRank : shapeOrRank.length;\n const rankIdentity = [...new Array(rank).keys()];\n const indicesType = rank < 2 ? 'u32' : rank <= 4 ? `vec${rank}` : `array`;\n const mappedType = getWgslMappedType(tensorType, components);\n const valueType = typeof mappedType === 'string' ? mappedType : mappedType[1];\n const storageType = typeof mappedType === 'string' ? mappedType : mappedType[0];\n const type = {indices: indicesType, value: valueType, storage: storageType, tensor: tensorType};\n\n const normalizeDim = (dim: number|string): string => typeof dim === 'string' ? dim : `${dim}u`;\n\n const implementationUsed = {\n offsetToIndices: false,\n indicesToOffset: false,\n broadcastedIndicesToOffset: false,\n set: false,\n setByIndices: false,\n get: false,\n getByIndices: false,\n };\n\n const uniformPrefix = useUniform ? 'uniforms.' : '';\n const shape = `${uniformPrefix}${name}_shape`;\n const strides = `${uniformPrefix}${name}_strides`;\n\n let o2iSnippet = '';\n for (let i = 0; i < rank - 1; i++) {\n o2iSnippet += `\n let dim${i} = current / ${getElementAt(strides, i, rank)};\n let rest${i} = current % ${getElementAt(strides, i, rank)};\n indices[${i}] = dim${i};\n current = rest${i};\n `;\n }\n o2iSnippet += `indices[${rank - 1}] = current;`;\n\n const offsetToIndicesImplementation = rank < 2 ? '' : `\n fn o2i_${name}(offset: u32) -> ${type.indices} {\n var indices: ${type.indices};\n var current = offset;\n ${o2iSnippet}\n return indices;\n }`;\n\n const offsetToIndices = (varOffset: string) => {\n implementationUsed.offsetToIndices = true;\n return rank < 2 ? varOffset : `o2i_${name}(${varOffset})`;\n };\n\n const offsets: string[] = [];\n if (rank >= 2) {\n for (let i = rank - 1; i >= 0; i--) {\n offsets.push(`${getElementAt(strides, i, rank)} * (indices[${i}])`);\n }\n }\n\n const indicesToOffsetImplementation = rank < 2 ? '' : `\n fn i2o_${name}(indices: ${type.indices}) -> u32 {\n return ${offsets.join('+')};\n }`;\n\n const indicesToOffset = (varIndices: string) => {\n implementationUsed.indicesToOffset = true;\n return rank < 2 ? varIndices : `i2o_${name}(${varIndices})`;\n };\n\n const indices = (...init: ReadonlyArray) =>\n rank === 0 ? '0u' : `${type.indices}(${init.map(normalizeDim).join(',')})`;\n\n const indicesGet = (varIndices: string, idx: number|string) => {\n if (rank < 2) {\n return `${varIndices}`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}`;\n }\n };\n\n const indicesSet = (varIndices: string, idx: number|string, value: string) => {\n if (rank < 2) {\n return `${varIndices}=${value};`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}=${value};`;\n }\n };\n\n const broadcastedIndicesToOffsetImplementation: {[key: string]: string} = {};\n const broadcastedIndicesToOffset = (varIndices: string, output: IndicesHelper) => {\n implementationUsed.broadcastedIndicesToOffset = true;\n const implKey = `${output.name}broadcastedIndicesTo${name}Offset`;\n if (implKey in broadcastedIndicesToOffsetImplementation) {\n return `${implKey}(${varIndices})`;\n }\n const offsets = [];\n for (let i = rank - 1; i >= 0; i--) {\n const idx = output.indicesGet('outputIndices', i + output.rank - rank);\n offsets.push(`${indicesGet(strides, i)} * (${idx} % ${indicesGet(shape, i)})`);\n }\n broadcastedIndicesToOffsetImplementation[implKey] =\n `fn ${implKey}(outputIndices: ${output.type.indices}) -> u32 {\n return ${offsets.length > 0 ? offsets.join('+') : '0u'};\n }`;\n\n return `${implKey}(${varIndices})`;\n };\n\n const setByOffset = (offset: number|string, value: string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]=${value};`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), select(0u, 0xFFFFFFFFu, ${value} < 0));`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), 0u);`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `${name}[${offset}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${value}));`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByOffset = (offset: number|string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `i32(${name}[${offset}].x)`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `u32(${name}[${offset}].x)`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `vec4(bool(${name}[${offset}] & 0xFFu), bool(${name}[${offset}] & 0xFF00u), bool(${name}[${\n offset}] & 0xFF0000u), bool(${name}[${offset}] & 0xFF000000u))`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByIndicesImplementation = rank < 2 ? '' : `\n fn get_${name}ByIndices(indices: ${type.indices}) -> ${valueType} {\n return ${getByOffset(`i2o_${name}(indices)`)};\n }`;\n\n const getImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn get_${name}(${functionParams}) -> ${valueType} {\n return get_${name}ByIndices(${indices(dimsParams)});\n }`;\n })();\n\n const get = (...indices: ReadonlyArray) => {\n if (indices.length !== rank) {\n throw new Error(`indices length must be ${rank}`);\n }\n\n const normalizedIndices = indices.map(normalizeDim).join(',');\n\n if (rank === 0) {\n return getByOffset('0u');\n } else if (rank === 1) {\n return getByOffset(normalizedIndices[0]);\n } else {\n implementationUsed.get = true;\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}(${normalizedIndices})`;\n }\n };\n\n const getByIndices = (varIndices: string) => {\n if (rank < 2) {\n return getByOffset(varIndices);\n } else {\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}ByIndices(${varIndices})`;\n }\n };\n\n const setByIndicesImplementation = rank < 2 ? '' : `\n fn set_${name}ByIndices(indices: ${type.indices}, value: ${valueType}) {\n ${setByOffset(`i2o_${name}(indices)`, 'value')}\n }`;\n\n const setImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn set_${name}(${functionParams}, value: ${valueType}) {\n set_${name}ByIndices(${indices(dimsParams)}, value);\n }`;\n })();\n\n const set = (...indicesAndValue: ReadonlyArray) => {\n if (indicesAndValue.length !== rank + 1) {\n throw new Error(`indices length must be ${rank}`);\n }\n const value = indicesAndValue[rank];\n if (typeof value !== 'string') {\n throw new Error('value must be string');\n }\n\n const normalizedIndices = indicesAndValue.slice(0, rank).map(normalizeDim).join(',');\n\n if (rank === 0) {\n return setByOffset('0u', value);\n } else if (rank === 1) {\n return setByOffset(normalizedIndices[0], value);\n } else {\n implementationUsed.set = true;\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}(${normalizedIndices}, ${value})`;\n }\n };\n\n const setByIndices = (varIndices: string, value: string) => {\n if (rank < 2) {\n return setByOffset(varIndices, value);\n } else {\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}ByIndices(${varIndices}, ${value});`;\n }\n };\n\n const impl = () => {\n const impls = [];\n let needShapeStrides = false;\n if (implementationUsed.offsetToIndices) {\n impls.push(offsetToIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.indicesToOffset) {\n impls.push(indicesToOffsetImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.broadcastedIndicesToOffset) {\n Object.values(broadcastedIndicesToOffsetImplementation).forEach(impl => impls.push(impl));\n needShapeStrides = true;\n }\n if (implementationUsed.set) {\n impls.push(setImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.setByIndices) {\n impls.push(setByIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.get) {\n impls.push(getImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.getByIndices) {\n impls.push(getByIndicesImplementation);\n needShapeStrides = true;\n }\n if (!useUniform && needShapeStrides) {\n impls.unshift(\n `const ${shape} = ${type.indices}(${shapeOrRank.join(',')});`,\n `const ${strides} = ${type.indices}(${ShapeUtil.computeStrides(shapeOrRank).join(',')});`);\n }\n return impls.join('\\n');\n };\n\n return {\n impl,\n type,\n offsetToIndices,\n indicesToOffset,\n broadcastedIndicesToOffset,\n indices,\n indicesGet,\n indicesSet,\n set,\n setByOffset,\n setByIndices,\n get,\n getByOffset,\n getByIndices,\n // isVec4,\n usage,\n name,\n strides,\n shape,\n rank\n };\n };\n\n/**\n * Create a IndicesHelper for an input.\n *\n * @param name - the name of the input.\n * @param type - the tensor type of the input.\n * @param shapeOrRank - the tensor shape or the rank of the input.\n * @param components - the number of components of the input. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the input.\n */\nexport const inputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'input', components);\n\n/**\n * Create a IndicesHelper for an output.\n *\n * @param name - the name of the output.\n * @param type - the tensor type of the output.\n * @param shapeOrRank - the tensor shape or the rank of the output.\n * @param components - the number of components of the output. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the output.\n */\nexport const outputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'output', components);\n\n/**\n * Create a IndicesHelper for an internal variable.\n *\n * @param name - the name of the variable.\n * @param type - the tensor type of the variable.\n * @param shapeOrRank - the tensor shape or the rank of the variable.\n * @param components - the number of components of the variable. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the variable.\n */\nexport const internalVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'internal', components);\n\nexport type UniformDataElementType = 'u32'|'f16'|'f32'|'i32';\nexport type UniformsArrayType = Array<{name: string; type: UniformDataElementType; length?: number}>;\n\n/**\n * A ShaderHelper is a helper class for generating WGSL code.\n */\nexport interface ShaderHelper {\n /**\n * A helper function to generate the start of main function in WGSL source code.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param workgroupSize - an optional workgroup size. default is WORKGROUP_SIZE.\n */\n mainStart(workgroupSize?: number|[number, number, number]): string;\n\n /**\n * A helper function to generate the code snippet for guarding against out-of-bounds size.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n *\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param size - the size of the data to guard against. can be a number or a string (WGSL `u32` expression).\n */\n guardAgainstOutOfBoundsWorkgroupSizes(size: unknown): string;\n\n /**\n * A helper function to generate the code snippet for declaring multiple inputs or outputs.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n declareVariables(...variables: IndicesHelper[]): string;\n\n /**\n * A helper function to register one uniform. Can be called multiple times to register multiple uniforms.\n *\n * @param name - the name of the uniform.\n * @param type - the type of the uniform.\n * @param length - the length of the uniform, default to 1 when it is not provided.\n */\n registerUniform(name: string, type: string, length?: number): ShaderHelper;\n\n /**\n * A helper function to register multiple uniforms. Can be called multiple times to register multiple uniforms.\n *\n * @param uniforms - an array of uniforms. Each element of the array is an object with 2 properties: `name` and\n * `type`.\n */\n registerUniforms(uniforms: UniformsArrayType): ShaderHelper;\n\n /**\n * A helper function to register multiple internal variables. Can be called multiple times to register multiple\n * internal variables.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper;\n}\n\nclass ShaderHelperImpl implements ShaderHelper {\n constructor(private normalizedDispatchGroup: [number, number, number], private limits: GPUSupportedLimits) {}\n\n guardAgainstOutOfBoundsWorkgroupSizes(size: number|string): string {\n // Guard against out-of-bounds work group sizes\n const sizeInCode = typeof size === 'number' ? `${size}u` : size;\n return `if (global_idx >= ${sizeInCode}) { return; }`;\n }\n\n mainStart(workgroupSize: number|[number, number, number] = WORKGROUP_SIZE) {\n const workgroupSizeX = typeof workgroupSize === 'number' ? workgroupSize : workgroupSize[0];\n const workgroupSizeY = typeof workgroupSize === 'number' ? 1 : workgroupSize[1];\n const workgroupSizeZ = typeof workgroupSize === 'number' ? 1 : workgroupSize[2];\n\n if (workgroupSizeX > this.limits.maxComputeWorkgroupSizeX ||\n workgroupSizeY > this.limits.maxComputeWorkgroupSizeY ||\n workgroupSizeZ > this.limits.maxComputeWorkgroupSizeZ) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${\n this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);\n }\n\n if (workgroupSizeX * workgroupSizeY * workgroupSizeZ > this.limits.maxComputeInvocationsPerWorkgroup) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup invocations ${\n this.limits.maxComputeInvocationsPerWorkgroup}.`);\n }\n\n const is1DimensionDispatch = this.normalizedDispatchGroup[1] === 1 && this.normalizedDispatchGroup[2] === 1;\n const paramList = is1DimensionDispatch ? `@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3` :\n `@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`;\n const globalIdxDefinition = is1DimensionDispatch ?\n 'let global_idx = global_id.x; let local_idx = local_id.x;' :\n `let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${\n workgroupSizeX * workgroupSizeY * workgroupSizeZ}u + local_idx;`;\n\n return `@compute @workgroup_size(${workgroupSizeX}, ${workgroupSizeY}, ${workgroupSizeZ})\n fn main(${paramList}) {\n ${globalIdxDefinition}\n `;\n }\n\n private appendVariableUniforms(variable: IndicesHelper): void {\n if (variable.rank !== 0) {\n if (variable.shape.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.shape.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n if (variable.strides.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.strides.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n }\n }\n\n private declareVariable(variable: IndicesHelper, bindingIndex: number): string {\n if (variable.usage === 'internal') {\n throw new Error('cannot use internal variable with declareVariable(). use registerInternalVariables() instead.');\n }\n this.variables.push(variable);\n this.appendVariableUniforms(variable);\n\n const access = variable.usage === 'input' ? 'read' : 'read_write';\n const storageType = variable.type.storage;\n return `@group(0) @binding(${bindingIndex}) var ${variable.name}: array<${storageType}>;`;\n }\n\n declareVariables(...variables: IndicesHelper[]): string {\n return variables.map(v => this.declareVariable(v, this.variableIndex++)).join('\\n');\n }\n\n private registerInternalVariable(variable: IndicesHelper): void {\n if (variable.usage !== 'internal') {\n throw new Error(\n 'cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.');\n }\n\n this.internalVariables.push(variable);\n this.appendVariableUniforms(variable);\n }\n\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper {\n variables.forEach(v => this.registerInternalVariable(v));\n return this;\n }\n\n registerUniform(name: string, type: UniformDataElementType, length = 1): ShaderHelper {\n this.uniforms.push({name, type, length});\n return this;\n }\n\n registerUniforms(additionalUniforms: UniformsArrayType): ShaderHelper {\n this.uniforms = this.uniforms.concat(additionalUniforms);\n return this;\n }\n\n private internalVariables: IndicesHelper[] = [];\n private variables: IndicesHelper[] = [];\n private uniforms: UniformsArrayType = [];\n private uniformDeclaration(): string {\n if (this.uniforms.length === 0) {\n return '';\n }\n\n const uniformSnippets: string[] = [];\n for (const {name, type, length} of this.uniforms) {\n if (length && length > 4) {\n if (type === 'f16') {\n uniformSnippets.push(`@align(16) ${name}:array, ${Math.ceil(length / 8)}>`);\n } else {\n uniformSnippets.push(`${name}:array, ${Math.ceil(length / 4)}>`);\n }\n } else {\n const typeTemp = length == null || length === 1 ? type : `vec${length}<${type}>`;\n uniformSnippets.push(`${name}:${typeTemp}`);\n }\n }\n\n return `\n struct Uniforms { ${uniformSnippets.join(', ')} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`;\n }\n private variableIndex = 0;\n\n /**\n * Get additional implementation that needs to be added to the shader source.\n */\n get additionalImplementations(): string {\n return this.uniformDeclaration() + this.variables.map(i => i.impl()).join('\\n') +\n this.internalVariables.map(i => i.impl()).join('\\n');\n }\n\n /**\n * Get the variable info of the shader program.\n */\n get variablesInfo(): ProgramUniformVariableInfo[]|undefined {\n if (this.uniforms.length === 0) {\n return undefined;\n }\n\n const uniformWgslTypeToDataType = (type: UniformDataElementType) =>\n ([DataType.uint32, DataType.float16, DataType.float,\n DataType.int32][['u32', 'f16', 'f32', 'i32'].indexOf(type)]);\n return this.uniforms.map(u => ([uniformWgslTypeToDataType(u.type), u.length ?? 1]));\n }\n}\n\nexport const createShaderHelper = (dispatchGroup: [number, number, number], limits: GPUSupportedLimits) =>\n new ShaderHelperImpl(dispatchGroup, limits);\n\n/**\n * This function comes from https://github.com/tensorflow/tfjs/blob/master/tfjs-core/src/ops/broadcast_util.ts#L18-L40\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport const getBroadcastDims = (inShape: readonly number[], outShape: readonly number[]): number[] => {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n};\n\nconst getAdjustedPerm = (inputRank: number, perm: number[]): number[] =>\n (perm && perm.length !== inputRank) ? [...(new Array(inputRank).keys())].reverse() : perm;\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] =>\n ShapeUtil.sortBasedOnPerm(inputShape, getAdjustedPerm(inputShape.length, perm));\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nexport const createTransposeProgramInfo = (inputTensor: TensorView, permAttr: number[]): ProgramInfo => {\n const inputDataType = inputTensor.dataType;\n const inputRank = inputTensor.dims.length;\n const perm = getAdjustedPerm(inputRank, permAttr);\n const outputShape = getOutputShape(inputTensor.dims, perm);\n const output = outputVariable('output', inputDataType, outputShape.length);\n const input = inputVariable('a', inputDataType, inputRank);\n let getShaderSource;\n if (perm.length === 2 && perm[0] === 1 && perm[1] === 0) {\n const wgslType = output.type.value;\n const workgroupSize: [number, number, number] = [16, 16, 1];\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n var tile : array, ${workgroupSize[0]}>;\n ${shaderHelper.mainStart(workgroupSize)}\n var x = workgroup_id.x * ${workgroupSize[0]}u + local_id.x;\n var y = workgroup_id.y * ${workgroupSize[0]}u + local_id.y;\n let width = uniforms.output_shape[0];\n let height = uniforms.output_shape[1];\n if (x < width && y < height) {\n tile[local_id.y][local_id.x] = ${input.getByOffset('y * width + x')};\n }\n workgroupBarrier();\n x = workgroup_id.y * ${workgroupSize[0]}u + local_id.x;\n y = workgroup_id.x * ${workgroupSize[0]}u + local_id.y;\n if (x < height && y < width) {\n ${output.setByOffset('y * height + x', 'tile[local_id.x][local_id.y]')}\n }\n }`;\n } else {\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${permFunctionBody(perm, inputRank, input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${output.setByOffset('global_idx', input.getByIndices('aIndices'))}\n }`;\n }\n return {\n name: 'Transpose',\n shaderCache: {hint: `${permAttr}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputSize = ShapeUtil.size(outputShape);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const transpose = (context: ComputeContext, attributes: TransposeAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createTransposeProgramInfo(context.inputs[0], attributes.perm));\n};\n\nexport const parseTransposeAttributes = (attributes: Record): TransposeAttributes =>\n createAttributeWithCacheKey({perm: attributes.perm as number[]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\nimport {createReduceAttributesFromInputs, ReduceAttributes} from './reduce';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst reduceOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate * candidate',\n logSumExp: 'bestValue + exp(candidate)',\n l1: 'bestValue + abs(candidate)',\n l2: 'bestValue + candidate * candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceSharedOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate',\n logSumExp: 'bestValue + candidate',\n l1: 'bestValue + candidate',\n l2: 'bestValue + candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceInitValues: {[key: string]: string} = {\n max: '_A[offset]',\n min: '_A[offset]',\n mean: '0',\n sum: '0',\n prod: '1',\n sumSquare: '0',\n logSumExp: '0',\n l1: '0',\n l2: '0',\n logSum: '0'\n};\n\nconst reduceOutputValues: {[key: string]: string} = {\n max: 'bestValue',\n min: 'bestValue',\n sum: 'bestValue',\n prod: 'bestValue',\n sumSquare: 'bestValue',\n logSumExp: 'log(bestValue)',\n l1: 'bestValue',\n l2: 'sqrt(bestValue)',\n logSum: 'log(bestValue)'\n};\n\nconst getInnerMostAxes = (numInnerAxes: number, rank: number): number[] => {\n const res = [];\n for (let i = rank - numInnerAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n};\n\nconst computeOutAndReduceShapes = (shape: readonly number[], axes: readonly number[]): [number[], number[]] => {\n const outputShape = [];\n const rank = shape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputShape.push(shape[dim]);\n }\n }\n const reduceShape = axes.map(dim => shape[dim]);\n return [outputShape, reduceShape];\n};\n\nconst expandShapeToKeepDim = (shape: number[], axes: number[]): number[] => {\n const rank = shape.length + axes.length;\n const expandShape = [];\n let shapeIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n expandShape.push(shape[shapeIdx++]);\n } else {\n expandShape.push(1);\n }\n }\n return expandShape;\n};\n\nconst areAxesInnerMostDims = (axes: number[], rank: number): boolean => {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n};\n\nconst getAxesPermutation = (axes: number[], rank: number): number[] => {\n const res = [];\n if (!areAxesInnerMostDims(axes, rank)) {\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n res.push(i);\n }\n }\n axes.forEach(axis => res.push(axis));\n }\n return res;\n};\n\nexport const createReduceSharedProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceType: string,\n outputDataType: DataType, outputShape: number[], reduceShape: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n\n const outputSize = ShapeUtil.size(outputShape);\n const reduceSize = ShapeUtil.size(reduceShape);\n\n const input = inputVariable('_A', inputs[0].dataType, inputShape);\n const output = outputVariable('output', outputDataType, outputShape);\n\n const workgroupSize = 32;\n\n const sharedMemorySnippet = `\n var aBestValues : array;\n `;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('reduceSize', 'u32').declareVariables(input, output)}\n ${sharedMemorySnippet}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${shaderHelper.mainStart(workgroupSize)}\n\n let outputIndex = global_idx / ${workgroupSize};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${reduceInitValues[reduceType]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${workgroupSize}) {\n let candidate = f32(${input.getByOffset('offset + k')});\n bestValue = ${reduceOps[reduceType]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${workgroupSize}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${reduceSharedOps[reduceType]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${\n output.setByOffset(\n 'outputIndex',\n `${\n reduceType === 'mean' ? `${output.type.storage}(bestValue / f32(uniforms.reduceSize))` :\n `${output.type.storage}(${reduceOutputValues[reduceType]})`}`)};\n }\n }`;\n\n // One work group is responsible for only one element of output.\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: outputSize},\n programUniforms: [{type: DataType.uint32, data: reduceSize}]\n }),\n };\n };\n\nconst reduceCommon =\n (context: ComputeContext, name: string, attributes: ReduceAttributes,\n reduceType: 'sum'|'sumSquare'|'prod'|'min'|'max'|'mean'|'logSumExp'|'l1'|'l2'|'logSum'): void => {\n const updatedAttributes: ReduceAttributes =\n context.inputs.length === 1 ? attributes : createReduceAttributesFromInputs(context.inputs, attributes);\n\n let updatedAxes = updatedAttributes.axes;\n if (updatedAxes.length === 0 && !updatedAttributes.noopWithEmptyAxes) {\n updatedAxes = context.inputs[0].dims.map((_dim, i) => i);\n }\n const normalizeAxes = ShapeUtil.normalizeAxes(updatedAxes, context.inputs[0].dims.length);\n\n let axes = normalizeAxes;\n let input = context.inputs[0];\n const permutedAxes = getAxesPermutation(axes, context.inputs[0].dims.length);\n if (permutedAxes.length > 0) {\n input = context.compute(\n createTransposeProgramInfo(context.inputs[0], permutedAxes), {inputs: [0], outputs: [-1]})[0];\n axes = getInnerMostAxes(axes.length, input.dims.length);\n }\n\n const [outputShape, reduceShape] = computeOutAndReduceShapes(input.dims, axes);\n let finalOutputShape = outputShape;\n if (updatedAttributes.keepDims) {\n finalOutputShape = expandShapeToKeepDim(outputShape, normalizeAxes);\n }\n\n context.compute(\n createReduceSharedProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['type']}, [input], reduceType,\n context.inputs[0].dataType, finalOutputShape, reduceShape),\n {inputs: [input]});\n };\n\nexport const reduceMeanShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMeanShared', attributes, 'mean');\n};\n\nexport const reduceL1Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL1Shared', attributes, 'l1');\n};\n\nexport const reduceL2Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL2Shared', attributes, 'l2');\n};\n\nexport const reduceLogSumExpShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumExpShared', attributes, 'logSumExp');\n};\n\nexport const reduceMaxShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMaxShared', attributes, 'max');\n};\n\nexport const reduceMinShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMinShared', attributes, 'min');\n};\n\nexport const reduceProdShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceProdShared', attributes, 'prod');\n};\n\nexport const reduceSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumShared', attributes, 'sum');\n};\n\nexport const reduceSumSquareShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumSquareShared', attributes, 'sumSquare');\n};\n\nexport const reduceLogSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumShared', attributes, 'logSum');\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\nimport {reduceL1Shared, reduceL2Shared, reduceLogSumExpShared, reduceLogSumShared, reduceMaxShared, reduceMeanShared, reduceMinShared, reduceProdShared, reduceSumShared, reduceSumSquareShared} from './reduce-shared';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('Reduce op requires 1 or 2 inputs.');\n }\n\n if (inputs.length === 2 && inputs[1].dims.length !== 1) {\n throw new Error('Invalid axes input dims.');\n }\n};\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n noopWithEmptyAxes: boolean;\n axes: number[];\n}\n\nexport type ReduceOp =\n (input: IndicesHelper, output: IndicesHelper,\n axes: readonly number[]) => [string, string, string, string, ...string[]];\n\nconst noOp: ReduceOp = (input) => ['', '', `var value = ${input.getByIndices('input_indices')};`, ''];\nexport const createReduceProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceOp: ReduceOp,\n axesInput: number[], outputDataType: DataType, keepDims = false, noopWithEmptyAxes = false): ProgramInfo => {\n const outputShape: number[] = [];\n const inputShape = inputs[0].dims;\n const inputRank = inputShape.length;\n const axes = ShapeUtil.normalizeAxes(axesInput, inputRank);\n const reduceOnAllAxes = !noopWithEmptyAxes && axes.length === 0;\n inputShape.forEach((d, i) => {\n if (reduceOnAllAxes || axes.indexOf(i) >= 0) {\n if (keepDims) {\n outputShape.push(1);\n } // else { // skip this axis}\n } else {\n outputShape.push(d);\n }\n });\n const outputRank = outputShape.length;\n const outputSize = ShapeUtil.size(outputShape);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = []; // copy output indexes to input indexes\n\n const input = inputVariable('_A', inputs[0].dataType, inputRank);\n const output = outputVariable('output', outputDataType, outputRank);\n const ops = reduceOp(input, output, axes);\n let reduceOps = ops[2];\n\n for (let k = 0, l = 0; k < inputRank; k++) {\n // if this axis is reduced\n if (reduceOnAllAxes || axes.indexOf(k) >= 0) {\n if (keepDims) {\n l++;\n }\n // loop over the d-th axis\n reduceOps = `for(var j${k}: u32 = 0; j${k} < ${inputShape[k]}; j${k}++) {\n ${ops[2].includes('last_index') ? `let last_index = j${k};` : ''}\n ${input.indicesSet('input_indices', k, `j${k}`)}\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`${input.indicesSet('input_indices', k, output.indicesGet('output_indices', l))};`);\n l++;\n }\n }\n return `\n\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var input_indices: ${input.type.indices};\n let output_indices = ${output.offsetToIndices('global_idx')};\n\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${ops[1]}\n ${reduceOps}\n ${ops[3]}\n ${ops.length === 4 ? output.setByOffset('global_idx', 'value') : ops.slice(4).join('\\n')}\n }`;\n };\n\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)]\n }),\n };\n };\n\nexport const createReduceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: ReduceAttributes): ReduceAttributes => {\n const axes: number[] = [];\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => axes.push(Number(v)));\n }\n return createAttributeWithCacheKey(\n {axes, keepDims: attributes.keepDims, noopWithEmptyAxes: attributes.noopWithEmptyAxes});\n };\n\nconst runReduceProgram =\n (context: ComputeContext, name: string, attributes: ReduceAttributes, reduceOp: ReduceOp): void => {\n const inputs = context.inputs;\n const updatedAttributes: ReduceAttributes =\n inputs.length === 1 ? attributes : createReduceAttributesFromInputs(inputs, attributes);\n\n context.compute(\n createReduceProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['rank']}, [inputs[0]],\n updatedAttributes.noopWithEmptyAxes && updatedAttributes.axes.length === 0 ? noOp : reduceOp,\n updatedAttributes.axes, inputs[0].dataType, updatedAttributes.keepDims,\n updatedAttributes.noopWithEmptyAxes),\n {inputs: [0]});\n };\n\nconst reduceLogSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSum', attributes, reduceOp);\n};\n\nconst reduceL1Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += abs(${input.getByIndices('input_indices')});`,\n '',\n ];\n runReduceProgram(context, 'ReduceL1', attributes, reduceOp);\n};\n\nconst reduceL2Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += (t * t);`,\n 'value = sqrt(value);',\n ];\n runReduceProgram(context, 'ReduceL2', attributes, reduceOp);\n};\n\nconst reduceLogSumExpNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += exp(${input.getByIndices('input_indices')});`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSumExp', attributes, reduceOp);\n};\n\nconst reduceMaxNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(input.indicesSet('input_indices', k, 0));\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = max(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMax', attributes, reduceOp);\n};\n\nconst reduceMeanNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output, axes) => {\n let size = 1.0;\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n // TODO: this depends on the input dims. If we want to use uniform, this need to be updated.\n size *= context.inputs[0].dims[k];\n }\n }\n\n return [\n 'var sum = f32(0);',\n '',\n `sum += f32(${input.getByIndices('input_indices')});`,\n `let value = ${output.type.value}(sum / ${size});`,\n ];\n };\n runReduceProgram(context, 'ReduceMean', attributes, reduceOp);\n};\n\nconst reduceMinNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = min(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMin', attributes, reduceOp);\n};\n\nconst reduceProdNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(1);`,\n '',\n `value *= ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceProd', attributes, reduceOp);\n};\n\nconst reduceSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceSum', attributes, reduceOp);\n};\n\nconst reduceSumSquareNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += t * t;`,\n '',\n ];\n runReduceProgram(context, 'ReduceSumSquare', attributes, reduceOp);\n};\n\nconst useNaiveReduceMethod =\n (shape: readonly number[], axes: readonly number[], noopWithEmptyAxes: boolean): boolean => {\n if (axes.length === 0) {\n return noopWithEmptyAxes;\n }\n\n let outputSize = 1;\n let reduceSize = 1;\n for (let dim = 0; dim < axes.length; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputSize *= shape[dim];\n } else {\n reduceSize *= shape[dim];\n }\n }\n\n // The condition data is very rough, although considering the count of Execution Unit (EU), the potential\n // work groups in a EU and the counts of loops in the naive and shared methods, also doing experiments\n // on some machines.\n return reduceSize < 32 && outputSize > 1024;\n };\n\nexport const reduceMean = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMeanNaive(context, attributes);\n } else {\n reduceMeanShared(context, attributes);\n }\n};\n\nexport const reduceL1 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL1Naive(context, attributes);\n } else {\n reduceL1Shared(context, attributes);\n }\n};\n\nexport const reduceL2 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL2Naive(context, attributes);\n } else {\n reduceL2Shared(context, attributes);\n }\n};\n\nexport const reduceLogSumExp = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumExpNaive(context, attributes);\n } else {\n reduceLogSumExpShared(context, attributes);\n }\n};\n\nexport const reduceMax = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMaxNaive(context, attributes);\n } else {\n reduceMaxShared(context, attributes);\n }\n};\n\nexport const reduceMin = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMinNaive(context, attributes);\n } else {\n reduceMinShared(context, attributes);\n }\n};\n\nexport const reduceProd = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceProdNaive(context, attributes);\n } else {\n reduceProdShared(context, attributes);\n }\n};\n\nexport const reduceSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumNaive(context, attributes);\n } else {\n reduceSumShared(context, attributes);\n }\n};\n\nexport const reduceSumSquare = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumSquareNaive(context, attributes);\n } else {\n reduceSumSquareShared(context, attributes);\n }\n};\n\nexport const reduceLogSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumNaive(context, attributes);\n } else {\n reduceLogSumShared(context, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createReduceProgramInfo, ReduceOp} from './reduce';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('ArgMinMaxOp op requires 1 or 2 inputs.');\n }\n if (inputs[0].dataType !== DataType.float) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport interface ArgMinMaxAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n axis: number;\n selectLastIndex: number;\n}\n\nexport const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '<=' : '<'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'ArgMin', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '>=' : '>'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'argMax', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const parseArgMinMaxAttributes = (attributes: Record): ArgMinMaxAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext, GpuDataType, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, tensorTypeToWsglValueType, UniformDataElementType, UniformsArrayType} from './common';\n\nexport const enum AttentionQkvFormat {\n unknown, // enum value not set, or depends on qkv projection implementation details\n qkvBNSH, // for non-packed qkv, permuted\n qkvBSNH, // for non-packed qkv, not permuted, used by memory efficient attention or MultiHeadAttention\n qkvBSN3H, // for TRT fused attention, qkv are packed\n qkvBNSHqkvBS3NH, // for TRT fused causal attention, data has two formats (qkv is 3BNSH, gemm_buffer is BS3NH)\n qKvBSNHxBSN2H, // for TRT fused cross attention, kv are packed\n qkvTNH, // for memory efficient attention, qkv are not packed, and paddings are removed.\n qkvTN3H, // for TRT fused attention, qkv are packed and paddings are removed\n}\n\nexport const enum AttentionMaskType {\n none, // No mask\n mask1dKeySeqLen, // [batch_size], key sequence length\n mask1dEndStart, // [2 * batch_size] with end positions and start positions\n mask1DKeySeqLenStart, // [3 * batch_size + 2] with [key_len[0], ..., key_len[batch_size - 1], query_start[0],\n // ..., query_start[batch_size - 1], query_end[batch_size - 1], key_start[0], ...,\n // key_start[batch_size - 1], key_end[batch_size - 1]]\n mask2dDummy, // dummy mask with shape [1, 1] or [batch_size, 1]. It has same effect as no mask.\n mask2dKeyPadding, // [batch_size, total_sequence_length]\n mask3dAttention, // [batch_size, sequence_length, total_sequence_length]\n mask4dMegatron, // Megatron causal mask with shape [batch_size, 1, max_sequence_length, max_sequence_length]\n maskUnknown\n}\n\nexport interface AttentionParameters {\n batchSize: number;\n sequenceLength: number;\n pastSequenceLength: number;\n kvSequenceLength: number;\n totalSequenceLength: number;\n maxSequenceLength: number;\n inputHiddenSize: number;\n hiddenSize: number;\n vHiddenSize: number;\n headSize: number;\n vHeadSize: number;\n numHeads: number;\n kvNumHeads?: number;\n nReps?: number;\n isUnidirectional?: boolean;\n pastPresentShareBuffer: boolean;\n maskFilterValue?: number;\n maskType: AttentionMaskType;\n scale: number;\n broadcastResPosBias: boolean;\n passPastInKv: boolean;\n qkvFormat: AttentionQkvFormat;\n isPastkvBSNH?: boolean;\n}\n\nexport interface AttentionAttrs {\n numHeads: number;\n kvNumHeads?: number;\n isUnidirectional?: number;\n maskFilterValue?: number;\n scale: number;\n doRotary: number;\n qkvHiddenSizes: number[];\n pastPresentShareBuffer: boolean;\n}\n\nconst validateAttentionInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // When past state is used, Q, K and V should have same hidden size (unless we split it into past_key and past_value).\n\n // Input shapes:\n // input (Q/K/V) : (B, S, D_i)\n // weights (Q/K/V) : (D_i, D + D + D_v)\n // bias (Q/K/V) : (D + D + D_v)\n // mask_index : see below\n // past (K/V) : (2, B, N, P, H) or NULL\n // relative_position_bias : (B, N, S, T) or NULL\n\n // For mask_index, the following shapes are supported:\n // NULL, (B, 1), (1, 1)\n // (B), (2 * B), (3 * B + 2)\n // (B, T)\n // (B, S, T)\n // (B, 1, M, M)\n //\n // When a model is pruned (like some attention heads are removed in Q/K/V), input_hidden_size could be larger\n // than hidden dimension of Q, K and V.\n\n const input = inputs[0];\n const weights = inputs[1];\n const bias = inputs[2];\n const maskIndex = inputs[3];\n const past = inputs[4];\n const relativePositionBias = inputs[5];\n\n if (past && relativePositionBias) {\n throw new Error('Attention cannot have both past and relative_position_bias');\n }\n\n if (input.dims.length !== 3) {\n throw new Error('Input \"input\" must have 3 dimensions');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[1];\n const inputHiddenSize = input.dims[2];\n\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimensions');\n }\n\n if (weights.dims.length !== 2) {\n throw new Error('Input \"weights\" is expected to have 2 dimensions');\n }\n\n if (weights.dims[0] !== inputHiddenSize) {\n throw new Error('Input 1 dimension 0 should have same length as dimension 2 of input 0');\n }\n\n if (bias.dims[0] !== weights.dims[1]) {\n throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');\n }\n\n let qHiddenSize = bias.dims[0] / 3;\n let kHiddenSize = qHiddenSize;\n let vHiddenSize = kHiddenSize;\n if (attributes.qkvHiddenSizes.length > 0) {\n if (attributes.qkvHiddenSizes.length !== 3) {\n throw new Error('qkv_hidden_sizes attribute should have 3 elements');\n }\n for (const sz of attributes.qkvHiddenSizes) {\n if (sz % attributes.numHeads !== 0) {\n throw new Error('qkv_hidden_sizes should be divisible by num_heads');\n }\n }\n\n qHiddenSize = attributes.qkvHiddenSizes[0];\n kHiddenSize = attributes.qkvHiddenSizes[1];\n vHiddenSize = attributes.qkvHiddenSizes[2];\n }\n\n const kvSequenceLength = sequenceLength;\n\n if (qHiddenSize !== kHiddenSize) {\n throw new Error('qkv_hidden_sizes first element should be same as the second');\n }\n\n if (bias.dims[0] !== qHiddenSize + kHiddenSize + vHiddenSize) {\n throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');\n }\n\n let pastSequenceLength = 0;\n if (past) {\n if (kHiddenSize !== vHiddenSize) {\n throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');\n }\n if (past.dims.length !== 5) {\n throw new Error('Input \"past\" must have 5 dimensions');\n }\n if (past.dims[0] !== 2) {\n throw new Error('Input \"past\" first dimension must be 2');\n }\n if (past.dims[1] !== batchSize) {\n throw new Error('Input \"past\" second dimension must be batch_size');\n }\n if (past.dims[2] !== attributes.numHeads) {\n throw new Error('Input \"past\" third dimension must be num_heads');\n }\n if (past.dims[4] !== kHiddenSize / attributes.numHeads) {\n throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');\n }\n\n if (!attributes.pastPresentShareBuffer) {\n pastSequenceLength = past.dims[3];\n }\n // TODO: handle past_seq_len\n }\n\n const totalSequenceLength = kvSequenceLength + pastSequenceLength;\n const maxSequenceLength = -1;\n\n const maskType = AttentionMaskType.none;\n if (maskIndex) {\n // maskType = AttentionMaskType.MASK_UNKNOWN;\n // TODO: handle mask\n throw new Error('Mask not supported');\n }\n\n if (past) {\n throw new Error('past is not supported');\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize,\n hiddenSize: qHiddenSize,\n vHiddenSize,\n headSize: Math.floor(qHiddenSize / attributes.numHeads),\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias: false,\n passPastInKv: false,\n qkvFormat: AttentionQkvFormat.qkvBNSH,\n };\n};\n\nconst createInPlaceSoftmaxProgramInfo = (_context: ComputeContext, input: TensorView, n: number, d: number) => {\n const components = getMaxComponents(d);\n let WG = 64;\n const dComp = d / components;\n if (dComp < WG) {\n WG = 1;\n } else if (dComp / 8 < 64) {\n WG = Math.ceil(dComp / 8);\n }\n const elementsPerThread = Math.ceil(d / components / WG);\n const programUniforms: ProgramUniform[] = [\n {type: input.dataType, data: 1 / d}, {type: DataType.uint32, data: dComp},\n {type: DataType.uint32, data: elementsPerThread}\n ];\n const dataType = tensorTypeToWsglStorageType(input.dataType, components);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = outputVariable('x', input.dataType, input.dims, components);\n const elemValueType = tensorTypeToWsglValueType(input.dataType);\n const uniforms: UniformsArrayType = [\n {name: 'd_inv', type: elemValueType as UniformDataElementType}, {name: 'd_comp', type: 'u32'},\n {name: 'elements_per_thread', type: 'u32'}\n ];\n\n return `\n var thread_max: array;\n var thread_sum: array;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(inputHelper)}\n ${shaderHelper.mainStart([\n WG, 1, 1\n ])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${f32Type}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${f32Type}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'thread_max_vector';\n case 2:\n return 'max(thread_max_vector.x, thread_max_vector.y)';\n case 4:\n return 'max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${WG}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${f32Type}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${f32Type}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'sum_vector';\n case 2:\n return 'sum_vector.x + sum_vector.y';\n case 4:\n return 'sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${WG}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${inputHelper.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${f32Type}(x[offset + i]);\n x[offset + i] = ${inputHelper.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`;\n };\n\n return {\n name: 'AttentionProbsSoftmax',\n shaderCache: {hint: `${WG};${dataType};${components}`},\n getShaderSource,\n getRunData: () => ({outputs: [], dispatchGroup: {x: n}, programUniforms}),\n };\n};\n\nconst createAttentionProbsProgramInfo =\n (context: ComputeContext, q: TensorView, key: TensorView, pastKey: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs,\n pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n const probsShape = [parameters.batchSize, parameters.numHeads, parameters.sequenceLength, totalSequenceLength];\n const presentKey = parameters.kvNumHeads === undefined && context.outputCount > 1;\n const presentKeyShape = presentKey ?\n [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize] :\n undefined;\n\n // TODO: handle mask\n\n const alpha = attributes.scale === 0 ? 1.0 / Math.sqrt(parameters.headSize) : attributes.scale;\n const components = getMaxComponents(parameters.headSize);\n const vectorizedHeadSize = parameters.headSize / components;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(totalSequenceLength / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: parameters.sequenceLength}, {type: DataType.uint32, data: vectorizedHeadSize},\n {type: DataType.uint32, data: totalSequenceLength}, {type: DataType.uint32, data: parameters.numHeads},\n {type: DataType.float, data: alpha}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: parameters.kvSequenceLength}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (pastKey) {\n inputDependencies.push('type');\n }\n if (relativePositionBias) {\n inputDependencies.push('type');\n }\n const outputs = [{dims: probsShape, dataType: q.dataType, gpuDataType: GpuDataType.default}];\n if (presentKey) {\n outputs.push({dims: presentKeyShape!, dataType: q.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const qInput = inputVariable('q', q.dataType, q.dims, components);\n const kInput = inputVariable('key', key.dataType, key.dims, components);\n const inputVars = [qInput, kInput];\n if (pastKey) {\n const pastKeyInput = inputVariable('past_key', pastKey.dataType, pastKey.dims, components);\n inputVars.push(pastKeyInput);\n }\n if (relativePositionBias) {\n inputVars.push(\n inputVariable('relative_position_bias', relativePositionBias.dataType, relativePositionBias.dims));\n }\n const output = outputVariable('output', q.dataType, probsShape);\n const outputVars = [output];\n if (presentKey) {\n outputVars.push(outputVariable('present_key', q.dataType, presentKeyShape!, components));\n }\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'alpha', type: 'f32' as UniformDataElementType},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n\n var tileQ: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n let kOffset = uniforms.kv_sequence_length * uniforms.K * headIdx;\n let pastKeyOffset = uniforms.past_sequence_length * uniforms.K * headIdx;`;\n } else {\n return `\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;`;\n }\n })()}\n ${presentKey ? 'let presentKeyOffset = headIdx * uniforms.N * uniforms.K;' : ''}\n var value = ${f32Type}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n if (n + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_key[pastKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x];\n } else {\n tileK[idx] =\n key[kOffset + (n + local_id.y - uniforms.past_sequence_length) * uniforms.K + w + local_id.x];\n }`;\n } else {\n return 'tileK[idx] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];';\n }\n })()}\n ${\n presentKey ?\n 'present_key[presentKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x] = tileK[idx];' :\n ''}\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${f32Type}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(() => {\n switch (components) {\n case 1:\n return 'value';\n case 2:\n return 'value.x + value.y';\n case 4:\n return 'value.x + value.y + value.z + value.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n output[outputIdx] = ${output.type.value} (sum * uniforms.alpha) + ${\n relativePositionBias ? 'relative_position_bias[outputIdx]' : '0.0'};\n }\n }`;\n };\n return {\n name: 'AttentionProbs',\n shaderCache: {\n hint: `${components};${relativePositionBias !== undefined};${pastKey !== undefined};${context.outputCount}`,\n inputDependencies\n },\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\n\nconst createVxAttentionScoreProgramInfo =\n (context: ComputeContext, probs: TensorView, v: TensorView, pastValue: TensorView|undefined,\n params: AttentionParameters, pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + params.kvSequenceLength;\n const nReps = params.nReps ? params.nReps : 1;\n const repeatedVHiddenSize = params.vHiddenSize * nReps;\n const presentValue = params.kvNumHeads == null && context.outputCount > 1;\n const presentValueShape =\n presentValue ? [params.batchSize, params.numHeads, totalSequenceLength, params.headSize] : undefined;\n const outputShape = [params.batchSize, params.sequenceLength, repeatedVHiddenSize];\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(params.vHeadSize / TILE_SIZE),\n y: Math.ceil(params.sequenceLength / TILE_SIZE),\n z: params.batchSize * params.numHeads\n };\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: params.sequenceLength}, {type: DataType.uint32, data: totalSequenceLength},\n {type: DataType.uint32, data: params.vHeadSize}, {type: DataType.uint32, data: params.numHeads},\n {type: DataType.uint32, data: repeatedVHiddenSize}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] =\n pastValue ? ['type', 'type', 'type'] : ['type', 'type'];\n const outputs = [{dims: outputShape, dataType: probs.dataType, gpuDataType: GpuDataType.default}];\n if (presentValue) {\n outputs.push({dims: presentValueShape!, dataType: probs.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const probsHelper = inputVariable('probs', probs.dataType, probs.dims);\n const vHelper = inputVariable('v', v.dataType, v.dims);\n const inputVars = [probsHelper, vHelper];\n if (pastValue) {\n inputVars.push(inputVariable('past_value', pastValue.dataType, pastValue.dims));\n }\n const output = outputVariable('output', probs.dataType, outputShape);\n const outputVars = [output];\n if (presentValue) {\n outputVars.push(outputVariable('present_value', probs.dataType, presentValueShape!));\n }\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'v_hidden_size', type: 'u32'},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileQ: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n let pastValueOffset = headIdx * uniforms.N * uniforms.past_sequence_length + n;\n let vOffset = headIdx * uniforms.N * uniforms.kv_sequence_length + n;\n `;\n } else {\n return `\n let offsetB = headIdx * uniforms.N * uniforms.K + n;\n `;\n }\n })()}\n ${presentValue ? 'let presentValueOffset = headIdx * uniforms.N * uniforms.K + n;' : ''}\n var value = ${probsHelper.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n if (w + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_value[pastValueOffset + (w + local_id.y) * uniforms.N];\n } else {\n tileK[idx] = v[vOffset + (w + local_id.y - uniforms.past_sequence_length) * uniforms.N];\n }\n `;\n } else {\n return `\n tileK[idx] = v[offsetB + (w + local_id.y) * uniforms.N];\n `;\n }\n })()}\n ${presentValue ? 'present_value[presentValueOffset + (w + local_id.y) * uniforms.N] = tileK[idx];' : ''}\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`;\n };\n\n return {\n name: 'AttentionScore',\n shaderCache: {hint: `${pastValue !== undefined};${context.outputCount}`, inputDependencies},\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const applyAttention =\n (context: ComputeContext, q: TensorView, k: TensorView, v: TensorView, _maskIndex: TensorView|undefined,\n _past: TensorView|undefined, pastKey: TensorView|undefined, pastValue: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs) => {\n const outputCount = context.outputCount;\n const pastSequenceLength =\n parameters.kvNumHeads !== undefined || outputCount > 1 ? parameters.pastSequenceLength : 0;\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n\n const inputsK = (parameters.kvNumHeads === undefined && outputCount > 1 && pastKey) ? [q, k, pastKey] : [q, k];\n if (relativePositionBias) {\n inputsK.push(relativePositionBias);\n }\n\n // Run AttentionProbs\n const probs = context.compute(\n createAttentionProbsProgramInfo(\n context, q, k, outputCount > 1 ? pastKey : undefined, relativePositionBias, parameters, attributes,\n pastSequenceLength),\n {inputs: inputsK, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [-1, 1] : [-1]})[0];\n\n // Run Softmax\n context.compute(\n createInPlaceSoftmaxProgramInfo(\n context, probs, parameters.batchSize * parameters.numHeads * parameters.sequenceLength,\n totalSequenceLength),\n {inputs: [probs], outputs: []});\n\n // Run AttrionScore\n const inputsV =\n (parameters.kvNumHeads === undefined && outputCount > 1 && pastValue) ? [probs, v, pastValue] : [probs, v];\n context.compute(\n createVxAttentionScoreProgramInfo(\n context, probs, v, outputCount > 1 && pastValue ? pastValue : undefined, parameters, pastSequenceLength),\n {inputs: inputsV, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [0, 2] : [0]});\n };\n\nconst prepare = (context: ComputeContext, parameters: AttentionParameters) => {\n const outputShape = [\n parameters.batchSize,\n parameters.numHeads,\n parameters.sequenceLength,\n parameters.headSize,\n ];\n const M = parameters.sequenceLength;\n const K = parameters.inputHiddenSize;\n const N = parameters.headSize;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(parameters.headSize / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const inputs = [context.inputs[0], context.inputs[1], context.inputs[2]];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: M}, {type: DataType.uint32, data: K}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: parameters.numHeads}, {type: DataType.uint32, data: parameters.headSize},\n {type: DataType.uint32, data: parameters.hiddenSize},\n {type: DataType.uint32, data: parameters.hiddenSize + parameters.hiddenSize + parameters.vHiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const outputQ = outputVariable('output_q', inputs[0].dataType, outputShape);\n const outputK = outputVariable('output_k', inputs[0].dataType, outputShape);\n const outputV = outputVariable('output_v', inputs[0].dataType, outputShape);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims);\n const weight = inputVariable('weight', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const dataType = input.type.storage;\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'num_heads', type: 'u32'},\n {name: 'head_size', type: 'u32'}, {name: 'hidden_size', type: 'u32'}, {name: 'ldb', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileInput: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightQ: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightK: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightV: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, weight, bias, outputQ, outputK, outputV)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${dataType}(0);\n var valueK = ${dataType}(0);\n var valueV = ${dataType}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k ({\n outputs: [\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n ],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n },\n {inputs, outputs: [-1, -1, -1]});\n};\n\nexport const attention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateAttentionInputs(context.inputs, attributes);\n\n const [q, k, v] = prepare(context, params);\n\n return applyAttention(\n context, q, k, v, context.inputs[4], undefined, undefined, undefined, context.inputs[5], params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface BatchNormAttributes extends AttributeWithCacheKey {\n readonly epsilon: number;\n readonly momentum: number;\n readonly spatial: boolean;\n readonly trainingMode: boolean;\n readonly format: 'NHWC'|'NCHW';\n readonly outputCount: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: BatchNormAttributes): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs');\n }\n\n const checkShapeEqual = (actual: readonly number[], expected: readonly number[], message: string) => {\n const r = expected.length;\n if (r !== actual.length) {\n throw new Error(`${message}: num dimensions != ${r}`);\n }\n expected.forEach((v, i) => {\n if (v !== actual[i]) {\n throw new Error(`${message}: dim[${i}] do not match`);\n }\n });\n };\n\n if (inputs[0].dims.length > 1) {\n const shape = attributes.format === 'NHWC' ?\n (attributes.spatial ? inputs[0].dims.slice(-1) :\n inputs[0].dims.slice(-1).concat(inputs[0].dims.slice(1, inputs[0].dims.length - 1))) :\n inputs[0].dims.slice(1, attributes.spatial ? 2 : undefined);\n checkShapeEqual(inputs[1].dims, shape, 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, shape, 'Invalid input B');\n checkShapeEqual(inputs[3].dims, shape, 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, shape, 'Invalid input var');\n } else {\n checkShapeEqual(inputs[1].dims, [1], 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, [1], 'Invalid input B');\n checkShapeEqual(inputs[3].dims, [1], 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, [1], 'Invalid input var');\n }\n};\n\nconst createBatchNormInferenceProgramInfo =\n (inputs: readonly TensorView[], attributes: BatchNormAttributes): ProgramInfo => {\n const {epsilon, spatial, format} = attributes;\n const yShape = inputs[0].dims;\n const components = spatial ? getMaxComponents(yShape[yShape.length - 1]) : 1;\n const cComponents = format === 'NHWC' && yShape.length > 1 ? components : 1;\n const outputSize = ShapeUtil.size(yShape) / components;\n // Only support uniforms for opset version >= 9 (spatial = true).\n const useShapesUniforms = spatial;\n const shapeOrRank = useShapesUniforms ? yShape.length : yShape;\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims, cComponents);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims, cComponents);\n const inputMean = inputVariable('inputMean', inputs[3].dataType, inputs[3].dims, cComponents);\n const inputVar = inputVariable('inputVar', inputs[4].dataType, inputs[4].dims, cComponents);\n const y = outputVariable('y', inputs[0].dataType, shapeOrRank, components);\n // TODO: support inputs with different data type. Current we need to make sure all inputs have the same data type.\n // Otherwise, the shader compilation will fail.\n const calcCOffset = (): string => {\n let cOffset = '';\n if (spatial) {\n cOffset = `let cOffset = ${\n yShape.length === 1 ? '0u' :\n format === 'NHWC' ? `outputIndices[${yShape.length - 1}] / ${components}` :\n 'outputIndices[1]'};`;\n } else {\n if (format === 'NCHW') {\n cOffset = `\n ${y.indicesSet('outputIndices', '0', '0')}\n let cOffset = ${y.indicesToOffset('outputIndices')};`;\n } else {\n // update C channel.\n cOffset = `var cIndices = ${scale.type.indices}(0);\n cIndices[0] = outputIndices[${yShape.length - 1}];`;\n // update D1 x ... x Dn channels.\n for (let i = 1; i < scale.rank; i++) {\n cOffset += `cIndices[${i}] = outputIndices[${i}];`;\n }\n cOffset += `let cOffset = ${scale.indicesToOffset('cIndices')};`;\n }\n }\n return cOffset;\n };\n const getInferenceModeShaderSource = (helper: ShaderHelper) => `\n const epsilon = ${epsilon};\n ${helper.registerUniform('outputSize', 'u32').declareVariables(x, scale, bias, inputMean, inputVar, y)}\n ${helper.mainStart()}\n ${helper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${y.offsetToIndices(`global_idx * ${components}`)};\n ${calcCOffset()}\n let scale = ${scale.getByOffset('cOffset')};\n let bias = ${bias.getByOffset('cOffset')};\n let inputMean = ${inputMean.getByOffset('cOffset')};\n let inputVar = ${inputVar.getByOffset('cOffset')};\n let x = ${x.getByOffset('global_idx')};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${y.setByOffset('global_idx', 'value')}\n }`;\n return {\n name: 'BatchNormalization',\n shaderCache: {\n hint: `${attributes.epsilon}_${attributes.format}_${spatial}_${components}`,\n inputDependencies: useShapesUniforms ? ['rank', 'type', 'type', 'type', 'type'] : undefined,\n },\n getShaderSource: getInferenceModeShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: useShapesUniforms ?\n [\n {type: DataType.uint32, data: outputSize},\n ...createTensorShapeVariables(yShape),\n ] :\n [\n {type: DataType.uint32, data: outputSize},\n ],\n }),\n };\n };\n\nexport const parseBatchNormAttributes = (attributes: Record): BatchNormAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n\nexport const batchNorm = (context: ComputeContext, attributes: Record): void => {\n const {inputs, outputCount} = context;\n const updatedAttributes = parseBatchNormAttributes({...attributes, outputCount});\n if (env.webgpu.validateInputContent) {\n validateInputs(inputs, updatedAttributes);\n }\n if (attributes.trainingMode) {\n throw new Error('BatchNormalization trainingMode is not supported yet.');\n } else {\n context.compute(createBatchNormInferenceProgramInfo(inputs, updatedAttributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![320, 640, 1280].includes(inputs[0].dims[2])) {\n throw new Error('number of channels should be 320, 640 or 1280');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasAddProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims;\n\n const channels = inputs[0].dims[2];\n // since channel number can be only 320/640/1280, it's always divisable by 4\n const outputSize = ShapeUtil.size(outputShape) / 4;\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, outputShape, 4);\n const bias = inputVariable('bias', dataType, [channels], 4);\n const residual = inputVariable('residual', dataType, outputShape, 4);\n const output = outputVariable('output', dataType, outputShape, 4);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const channels = ${channels}u / 4;\n ${shaderHelper.declareVariables(input, bias, residual, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let value = ${input.getByOffset('global_idx')}\n + ${bias.getByOffset('global_idx % channels')} + ${residual.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', 'value')}\n }`;\n\n return {\n name: 'BiasAdd',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasAdd = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasAddProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {MAX_CLIP, MIN_CLIP, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType} from './common';\n\ntype BuiltinFunctionName = string;\ntype ElementwiseCustomExpression = (expression: string) => string;\ntype ElementwiseFunctionCall = BuiltinFunctionName|ElementwiseCustomExpression;\n\nconst createElementwiseProgramShader =\n (shaderHelper: ShaderHelper, datasize: number, inputDataType: number, outputDataType: number,\n funcCall: ElementwiseFunctionCall, additionalImplementation?: string): string => {\n const vecSize = Math.ceil(datasize / 4);\n\n let expression = '';\n if (typeof funcCall === 'string') {\n expression = `${funcCall}(a)`;\n } else {\n expression = funcCall('a');\n }\n\n const input = inputVariable('inputData', inputDataType, [vecSize], 4);\n const output = outputVariable('outputData', outputDataType, [vecSize], 4);\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n\n let a = ${input.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', expression)}\n }`;\n };\n\nconst createElementwiseProgramInfo =\n (input: TensorView, name: string, funcCall: ElementwiseFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType: number = input.dataType): ProgramInfo => ({\n name,\n shaderCache: {hint: cacheKey, inputDependencies: ['type']},\n getShaderSource: shaderHelper => createElementwiseProgramShader(\n shaderHelper, ShapeUtil.size(input.dims), input.dataType, outputDataType, funcCall, additionalImplementation),\n getRunData: (inputTensors) => ({\n outputs: [{dims: input.dims, dataType: outputDataType}],\n dispatchGroup:\n {x: Math.ceil(ShapeUtil.size(inputTensors[0].dims) / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(input.dims) / 4)},\n ],\n })\n });\n\nexport const abs = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Abs', 'abs'));\n};\n\nexport const acos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acos', 'acos'));\n};\n\nexport const acosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acosh', 'acosh'));\n};\n\nexport const asin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asin', 'asin'));\n};\n\nexport const asinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asinh', 'asinh'));\n};\n\nexport const atan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atan', 'atan'));\n};\nexport const atanh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atanh', 'atanh'));\n};\n\nexport interface CastAttributes extends AttributeWithCacheKey {\n readonly to: number;\n readonly saturate?: boolean;\n}\n\nexport const parseCastAttributes = (attributes: Record): CastAttributes =>\n createAttributeWithCacheKey(attributes as {to: number});\n\n\nexport const cast = (context: ComputeContext, attributes: CastAttributes): void => {\n let func: ElementwiseFunctionCall;\n switch (attributes.to) {\n case DataType.float16:\n func = 'vec4';\n break;\n case DataType.float:\n func = 'vec4';\n break;\n case DataType.uint32:\n func = 'vec4';\n break;\n case DataType.int32:\n func = 'vec4';\n break;\n case DataType.bool:\n func = 'vec4';\n break;\n default:\n throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${attributes.to}`);\n }\n context.compute(\n createElementwiseProgramInfo(context.inputs[0], 'Cast', func, undefined, attributes.cacheKey, attributes.to));\n};\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nconst generateClipAttributesFromInputs = (inputs: readonly TensorView[]): ClipAttributes => {\n const min = (inputs.length >= 2 && inputs[1].data !== 0) ? inputs[1].getFloat32Array()[0] : MIN_CLIP;\n const max = (inputs.length >= 3 && inputs[2].data !== 0) ? inputs[2].getFloat32Array()[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const clip = (context: ComputeContext, clipAttributes: ClipAttributes): void => {\n const attributes = context.inputs.length === 1 ? clipAttributes : generateClipAttributesFromInputs(context.inputs);\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(\n createElementwiseProgramInfo(\n context.inputs[0], 'Clip', a => `clamp(${a}, clip_min_, clip_max_)`, `\n const clip_min_: vec4<${dataType}> = vec4(${dataType}(${attributes.min}));\n const clip_max_: vec4<${dataType}> = vec4(${dataType}(${attributes.max}));\n`,\n attributes.cacheKey),\n {inputs: [0]});\n};\n\nexport const ceil = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Ceil', 'ceil'));\n};\n\nexport const cos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cos', 'cos'));\n};\n\nexport const cosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cosh', 'cosh'));\n};\n\nexport interface AlphaAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const parseAlphaAttributes = (attributes: Record): AlphaAttributes =>\n createAttributeWithCacheKey(attributes as {alpha: number});\n\nexport const elu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Elu', a => `elu_vf32(${a})`, `\n const elu_alpha_ = ${dataType}(${attributes.alpha});\n\n fn elu_f32(a: ${dataType}) -> ${dataType} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${dataType}>) -> vec4<${dataType}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,\n attributes.cacheKey));\n};\n\nexport const erfImpl = (varType = 'f32') => `\nconst r0: ${varType} = 0.3275911;\nconst r1: ${varType} = 0.254829592;\nconst r2: ${varType} = -0.284496736;\nconst r3: ${varType} = 1.421413741;\nconst r4: ${varType} = -1.453152027;\nconst r5: ${varType} = 1.061405429;\n\nfn erf_vf32(v: vec4<${varType}>) -> vec4<${varType}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`;\n\nexport const erf = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Erf', a => `erf_vf32(${a})`, erfImpl(dataType)));\n};\n\nexport const exp = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Exp', 'exp'));\n};\n\nexport const floor = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Floor', 'floor'));\n};\n\nexport const gelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Gelu', a => `0.5 * ${a} * (1.0 + erf_vf32(${a} * 0.7071067811865475))`, erfImpl(dataType)));\n};\n\nexport const leakyRelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'LeakyRelu', a => `select(leaky_relu_alpha_ * ${a}, ${a}, ${a} >= vec4<${dataType}>(0.0))`,\n `const leaky_relu_alpha_ = ${dataType}(${attributes.alpha});`, attributes.cacheKey));\n};\n\nexport const not = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Not', a => `!${a}`));\n};\n\nexport const neg = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Neg', a => `-${a}`));\n};\n\nexport const reciprocal = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Reciprocal', a => `1.0/${a}`));\n};\n\nexport const relu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Relu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > vec4<${dataType}>(0.0))`));\n};\n\nexport const sigmoid = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sigmoid', a => `(1.0 / (1.0 + exp(-${a})))`));\n};\n\nexport interface HardSigmoidAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n readonly beta: number;\n}\n\nexport const parseHardSigmoidAttributes = (attributes: Record): HardSigmoidAttributes =>\n createAttributeWithCacheKey(attributes as {\n alpha: number;\n beta: number;\n });\n\nexport const hardSigmoid = (context: ComputeContext, attributes: HardSigmoidAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'HardSigmoid',\n a => `max(vec4<${dataType}>(0.0), min(vec4<${dataType}>(1.0), ${attributes.alpha} * ${a} + vec4<${dataType}>(${\n attributes.beta})))`,\n undefined, attributes.cacheKey));\n};\n\nexport const sin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sin', 'sin'));\n};\n\nexport const sinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sinh', 'sinh'));\n};\n\nexport const sqrt = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sqrt', 'sqrt'));\n};\n\nexport const tan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tan', 'tan'));\n};\n\nexport const tanhExpression = (a: string) => `sign(${a}) * (1 - exp(-2 * abs(${a}))) / (1 + exp(-2 * abs(${a})))`;\n\nexport const tanh = (context: ComputeContext): void => {\n // TODO: revisit after https://github.com/gpuweb/gpuweb/issues/4458 is resolved\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tanh', tanhExpression));\n};\n\nexport const fastGeluImpl = (varType = 'f32') => `\nconst fast_gelu_a: ${varType} = 0.5;\nconst fast_gelu_b: ${varType} = 0.7978845608028654;\nconst fast_gelu_c: ${varType} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${varType}>) -> vec4<${varType}> {\n return ${tanhExpression('v')};\n}\n`;\n\nexport const fastGeluExpression = (x: string) =>\n `(fast_gelu_a + fast_gelu_a * tanh_v(${x} * (fast_gelu_c * ${x} * ${x} + fast_gelu_b))) * ${x}`;\n\nexport const fastGelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'FastGelu', fastGeluExpression, fastGeluImpl(dataType), undefined,\n context.inputs[0].dataType));\n};\n\nexport const thresholdedRelu = (context: ComputeContext, attributes: AlphaAttributes): number => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'ThresholdedRelu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > thresholded_relu_alpha_)`,\n `const thresholded_relu_alpha_ = vec4<${dataType}>(${attributes.alpha});`, attributes.cacheKey));\n return 0;\n};\n\nexport const log = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Log', 'log'));\n};\n\nexport const quickGeluImpl = (varType: string, alpha: number) => `\nconst alpha = vec4<${varType}>(${alpha});\nconst one = ${varType}(1.0);\nconst zero = ${varType}(0.0);\n\nfn quick_gelu_impl(x: vec4<${varType}>) -> vec4<${varType}> {\n let v = x *alpha;\n var x1 : vec4<${varType}>;\n for (var i = 0; i < 4; i = i + 1) {\n if (v[i] >= zero) {\n x1[i] = one / (one + exp(-v[i]));\n } else {\n x1[i] = one - one / (one + exp(v[i]));\n }\n }\n return x * x1;\n}\n`;\n\nexport const quickGeluExpression = (x: string) => `quick_gelu_impl(${x})`;\n\nexport const quickgelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'QuickGelu', quickGeluExpression, quickGeluImpl(dType, attributes.alpha), attributes.cacheKey,\n context.inputs[0].dataType));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType} from './common';\nimport {erfImpl} from './unary-op';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![2560, 5120, 10240].includes(inputs[0].dims[2])) {\n throw new Error('hidden state should be 2560, 5120 or 10240');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasSplitGeluProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n outputShape[2] = outputShape[2] / 2;\n\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims, 4);\n const bias = inputVariable('bias', inputs[0].dataType, [inputs[0].dims[2]], 4);\n const output = outputVariable('output', inputs[0].dataType, outputShape, 4);\n\n const outputSize = ShapeUtil.size(outputShape) / 4;\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${inputs[0].dims[2] / 4 / 2}u;\n\n ${shaderHelper.declareVariables(input, bias, output)}\n\n ${erfImpl(dataType)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${output.setByOffset('global_idx', 'valueLeft * geluRight')}\n }`;\n\n return {\n name: 'BiasSplitGelu',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasSplitGelu = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasSplitGeluProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype BuiltinFunctionName = string;\ntype BinaryCustomExpression = (expressionA: string, expressionB: string) => string;\ntype BinaryFunctionCall = BuiltinFunctionName|BinaryCustomExpression|{\n scalar: BinaryCustomExpression;\n vector: BinaryCustomExpression;\n};\n\nconst createBinaryOpProgramShader =\n (shaderHelper: ShaderHelper, dimsA: readonly number[], dimsB: readonly number[], dimsOutput: readonly number[],\n vectorize: boolean, doBroadcast: boolean, sharedDimensionDivisibleBy4: boolean, funcCall: BinaryFunctionCall,\n typeA: number, typeB: number, typeOutput: number, additionalImplementation?: string) => {\n let expressionScalar: BinaryCustomExpression;\n let expressionVector: BinaryCustomExpression;\n if (typeof funcCall === 'string') {\n expressionScalar = expressionVector = (a, b) => `${funcCall}((${a}),(${b}))`;\n } else if (typeof funcCall === 'function') {\n expressionScalar = expressionVector = funcCall;\n } else {\n expressionScalar = funcCall.scalar;\n expressionVector = funcCall.vector;\n }\n\n const output = outputVariable('outputData', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('aData', typeA, dimsA.length, 4);\n const b = inputVariable('bData', typeB, dimsB.length, 4);\n\n let assignment: string;\n if (vectorize) {\n if (doBroadcast) {\n const isAOneElement = ShapeUtil.size(dimsA) === 1;\n const isBOneElement = ShapeUtil.size(dimsB) === 1;\n const aLastDimDivisibleBy4 = dimsA.length > 0 && dimsA[dimsA.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = dimsB.length > 0 && dimsB[dimsB.length - 1] % 4 === 0;\n if (isAOneElement || isBOneElement) {\n assignment = output.setByOffset(\n 'global_idx',\n expressionVector(\n isAOneElement ? `${a.type.value}(${a.getByOffset('0')}.x)` : a.getByOffset('global_idx'),\n isBOneElement ? `${b.type.value}(${b.getByOffset('0')}.x)` : b.getByOffset('global_idx')));\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx * 4u')};\n let offsetA = ${a.broadcastedIndicesToOffset('outputIndices', output)};\n let offsetB = ${b.broadcastedIndicesToOffset('outputIndices', output)};\n ${\n output.setByOffset(\n 'global_idx',\n expressionVector(\n sharedDimensionDivisibleBy4 || aLastDimDivisibleBy4 ?\n a.getByOffset('offsetA / 4u') :\n `${a.type.value}(${a.getByOffset('offsetA / 4u')}[offsetA % 4u])`,\n sharedDimensionDivisibleBy4 || bLastDimDivisibleBy4 ?\n b.getByOffset('offsetB / 4u') :\n `${b.type.value}(${b.getByOffset('offsetB / 4u')}[offsetB % 4u])`))}\n `;\n }\n } else {\n assignment = output.setByOffset(\n 'global_idx', expressionVector(a.getByOffset('global_idx'), b.getByOffset('global_idx')));\n }\n } else {\n if (!doBroadcast) {\n throw new Error('no necessary to use scalar implementation for element-wise binary op implementation.');\n }\n\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `aData[indexA${x}][componentA${x}]`;\n const expressionB = `bData[indexB${x}][componentB${x}]`;\n return `\n let outputIndices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offsetA${x} = ${a.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let offsetB${x} = ${b.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let indexA${x} = offsetA${x} / 4u;\n let indexB${x} = offsetB${x} / 4u;\n let componentA${x} = offsetA${x} % 4u;\n let componentB${x} = offsetB${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expressionScalar(expressionA, expressionB)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('outputData[global_idx]', 0)}\n ${singleAssignment('outputData[global_idx]', 1)}\n ${singleAssignment('outputData[global_idx]', 2)}\n ${singleAssignment('outputData[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(a, b, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createBinaryOpProgramInfo =\n (name: string, cacheKey: string, a: TensorView, b: TensorView, funcCall: BinaryFunctionCall,\n additionalImplementation?: string, outputDataType: number = a.dataType): ProgramInfo => {\n const isBroadcast = !ShapeUtil.areEqual(a.dims, b.dims);\n let outputShape = a.dims;\n let outputSize = ShapeUtil.size(a.dims);\n\n let vectorize = false;\n let sharedDimensionDivisibleBy4 = false;\n\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n const cacheKeyAux = [isBroadcast];\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(a.dims, b.dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n const isAOneElement = ShapeUtil.size(a.dims) === 1;\n const isBOneElement = ShapeUtil.size(b.dims) === 1;\n const aLastDimDivisibleBy4 = a.dims.length > 0 && a.dims[a.dims.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = b.dims.length > 0 && b.dims[b.dims.length - 1] % 4 === 0;\n cacheKeyAux.push(isAOneElement);\n cacheKeyAux.push(isBOneElement);\n cacheKeyAux.push(aLastDimDivisibleBy4);\n cacheKeyAux.push(bLastDimDivisibleBy4);\n // check whether vectorize can be enabled\n let sharedDimension = 1;\n for (let i = 1; i < outputShape.length; i++) {\n const dimA = a.dims[a.dims.length - i] ?? 1;\n const dimB = b.dims[b.dims.length - i] ?? 1;\n if (dimA === dimB) {\n sharedDimension *= dimA;\n } else {\n break;\n }\n }\n if (sharedDimension % 4 === 0) {\n sharedDimensionDivisibleBy4 = true;\n vectorize = true;\n } else if (isAOneElement || isBOneElement || aLastDimDivisibleBy4 || bLastDimDivisibleBy4) {\n vectorize = true;\n }\n } else {\n // element-wise\n vectorize = true;\n }\n cacheKeyAux.push(vectorize);\n\n return {\n name,\n shaderCache: {\n hint: cacheKey + cacheKeyAux.map((x) => x.toString()).join('_'),\n inputDependencies: ['rank', 'rank'],\n },\n getShaderSource: (shaderHelper) => createBinaryOpProgramShader(\n shaderHelper, a.dims, b.dims, outputShape, vectorize, isBroadcast, sharedDimensionDivisibleBy4, funcCall,\n a.dataType, b.dataType, outputDataType, additionalImplementation),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* component size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(outputShape) / 4)},\n ...createTensorShapeVariables(a.dims, b.dims, outputShape)\n ],\n }),\n };\n };\n\nconst runBinaryOp =\n (context: ComputeContext, name: string, funcCall: BinaryFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType?: number): void => {\n context.compute(createBinaryOpProgramInfo(\n name, cacheKey ?? '', context.inputs[0], context.inputs[1], funcCall, additionalImplementation,\n outputDataType));\n };\n\nexport const add = (context: ComputeContext): void => {\n runBinaryOp(context, 'Add', (a, b) => `${a}+${b}`);\n};\n\nexport const div = (context: ComputeContext): void => {\n runBinaryOp(context, 'Div', (a, b) => `${a}/${b}`);\n};\n\nexport const equal = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Equal', ({scalar: (a, b) => `u32(${a}==${b})`, vector: (a, b) => `vec4(${a}==${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const mul = (context: ComputeContext): void => {\n runBinaryOp(context, 'Mul', (a, b) => `${a}*${b}`);\n};\n\nexport const pow = (context: ComputeContext): void => {\n const type = inputVariable('input', context.inputs[0].dataType, context.inputs[0].dims).type.value;\n const roundStr = type === 'i32' ? 'round' : '';\n runBinaryOp(\n context, 'Pow', ({scalar: (a, b) => `pow_custom(${a},${b})`, vector: (a, b) => `pow_vector_custom(${a},${b})`}),\n `\n fn pow_custom(a : ${type}, b : ${type}) -> ${type} {\n if (b == ${type}(0.0)) {\n return ${type}(1.0);\n } else if (a < ${type}(0.0) && f32(b) != floor(f32(b))) {\n return ${type}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${type}(1.0), round(f32(abs(b) % ${type}(2.0))) != 1.0) * ${type}(${\n roundStr}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${type}>, b : vec4<${type}>) -> vec4<${type}> {\n // TODO: implement vectorized pow\n return vec4<${type}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `);\n};\n\nexport const sub = (context: ComputeContext): void => {\n runBinaryOp(context, 'Sub', (a, b) => `${a}-${b}`);\n};\n\nexport const greater = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Greater', ({scalar: (a, b) => `u32(${a}>${b})`, vector: (a, b) => `vec4(${a}>${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const less = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Less', ({scalar: (a, b) => `u32(${a}<${b})`, vector: (a, b) => `vec4(${a}<${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const greaterOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'GreaterOrEqual', ({scalar: (a, b) => `u32(${a}>=${b})`, vector: (a, b) => `vec4(${a}>=${b})`}),\n undefined, undefined, DataType.bool);\n};\n\nexport const lessOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'LessOrEqual', ({scalar: (a, b) => `u32(${a}<=${b})`, vector: (a, b) => `vec4(${a}<=${b})`}),\n undefined, undefined, DataType.bool);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], axis: number): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n const referenceIndex = 0;\n const referenceInput = inputs[referenceIndex];\n const inputType = referenceInput.dataType;\n const inputRank = referenceInput.dims.length;\n inputs.forEach((input, i) => {\n if (i === referenceIndex) {\n return;\n }\n // make sure types of all inputs match\n if (input.dataType !== inputType) {\n throw new Error('input tensors should be one type');\n }\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputRank) {\n throw new Error('input tensors should have the same shape');\n }\n input.dims.forEach((dim, i) => {\n if (i !== axis && dim !== referenceInput.dims[i]) {\n throw new Error('non concat dimensions must match');\n }\n });\n });\n};\n\nconst calculateInputIndexImpl = (numberOfTensors: number, sizeInConcatAxisStr: string): string => `\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n for (var i: u32 = 0u; i < ${numberOfTensors}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n }`;\n\nconst assignOutputData = (inputs: readonly IndicesHelper[], output: IndicesHelper) => {\n const numberOfTensors = inputs.length;\n\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = output.setByOffset('global_idx', inputs[i].getByIndices('indices'));\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (inputIndex == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (inputIndex == ${i}) { ${returnSnippet} }`);\n }\n }\n return codeLines.join('\\n');\n};\n\nconst createConcatProgramInfo =\n (inputs: readonly TensorView[], adjustedAxis: number, outputShape: number[], dataType: DataType): ProgramInfo => {\n const outputSize = ShapeUtil.size(outputShape);\n\n const sizeInConcatAxis = new Array(inputs.length);\n const inputVars = new Array(inputs.length);\n\n let previousSum = 0;\n const inputDependencies: ProgramInputTensorInfoDependency[] = [];\n const inputRanks = [];\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: outputSize}];\n for (let i = 0; i < inputs.length; ++i) {\n previousSum += inputs[i].dims[adjustedAxis];\n sizeInConcatAxis[i] = previousSum;\n inputRanks.push(inputs[i].dims.length);\n inputVars[i] = inputVariable(`input${i}`, dataType, inputRanks[i]);\n inputDependencies.push('rank');\n programUniforms.push({type: DataType.uint32, data: sizeInConcatAxis[i]});\n }\n for (let i = 0; i < inputs.length; ++i) {\n programUniforms.push(...createTensorShapeVariables(inputs[i].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const output = outputVariable('output', dataType, outputShape.length);\n const indicesAxis = output.indicesGet('indices', adjustedAxis);\n const sizeInConcatAxisStr =\n Array.from(Array(sizeInConcatAxis.length).keys()).map(i => `uniforms.sizeInConcatAxis${i}`).join(',');\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${(() => {\n shaderHelper.registerUniform('outputSize', 'u32');\n for (let i = 0; i < inputs.length; i++) {\n shaderHelper.registerUniform(`sizeInConcatAxis${i}`, 'u32');\n }\n return shaderHelper.declareVariables(...inputVars, output);\n })()}\n\n ${calculateInputIndexImpl(sizeInConcatAxis.length, sizeInConcatAxisStr)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n var indices = ${output.offsetToIndices('global_idx')};\n\n let inputIndex = calculateInputIndex(${indicesAxis});\n if (inputIndex != 0u) {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n ${indicesAxis} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${assignOutputData(inputVars, output)}\n }`;\n\n return {\n name: 'Concat',\n shaderCache: {hint: `${adjustedAxis}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const concat = (context: ComputeContext, attributes: ConcatAttributes): void => {\n const inputs = context.inputs;\n const inputShape = inputs[0].dims;\n const adjustedAxis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n validateInputs(inputs, adjustedAxis);\n const outputShape = inputShape.slice();\n outputShape[adjustedAxis] =\n inputs.reduce((sum, input) => sum + (input.dims.length > adjustedAxis ? input.dims[adjustedAxis] : 0), 0);\n // 0 length tensors are valid for concat, remove them\n const nonEmptyInputs = inputs.filter(input => ShapeUtil.size(input.dims) > 0);\n context.compute(\n createConcatProgramInfo(nonEmptyInputs, adjustedAxis, outputShape, inputs[0].dataType), {inputs: nonEmptyInputs});\n};\n\nexport const parseConcatAttributes = (attributes: Record): ConcatAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {MAX_CLIP, MIN_CLIP} from '../../util';\nimport {ProgramUniform} from '../types';\n\nimport {UniformsArrayType} from './common';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly alpha?: number;\n readonly beta?: number;\n}\n\nexport const getActivationSnippet =\n (attributes: InternalActivationAttributes, valueType: string, baseType = 'f32'): string => {\n switch (attributes.activation) {\n case 'Relu':\n return `value = max(value, ${valueType}(0.0));`;\n case 'Sigmoid':\n return `value = (${valueType}(1.0) / (${valueType}(1.0) + exp(-value)));`;\n case 'Clip':\n return `value = clamp(value, ${valueType}(${baseType}(uniforms.clip_min)), ${valueType}(${\n baseType}(uniforms.clip_max)));`;\n case 'HardSigmoid':\n return `value = max(${valueType}(0.0), min(${valueType}(1.0), ${baseType}(uniforms.alpha) * value + ${\n baseType}(uniforms.beta)));`;\n case 'LeakyRelu':\n return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;\n case '':\n return '';\n // TODO: adding other activations that can be fused.\n default:\n throw new Error(`Unsupported activation ${attributes.activation}`);\n }\n };\n\nexport const appendActivationUniformsData =\n (attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {\n if (attributes.activation === 'Clip') {\n programUniform.push(\n {type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});\n } else if (attributes.activation === 'HardSigmoid') {\n programUniform.push(\n {type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});\n } else if (attributes.activation === 'LeakyRelu') {\n programUniform.push({type: DataType.float, data: attributes.alpha!});\n }\n };\n\nexport const appendActivationUniforms = (attributes: InternalActivationAttributes, uniforms: UniformsArrayType) => {\n if (attributes.activation === 'Clip') {\n uniforms.push({name: 'clip_max', type: 'f32'}, {name: 'clip_min', type: 'f32'});\n } else if (attributes.activation === 'HardSigmoid') {\n uniforms.push({name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'});\n } else if (attributes.activation === 'LeakyRelu') {\n uniforms.push({name: 'alpha', type: 'f32'});\n }\n};\n\nexport const parseInternalActivationAttributes =\n (attributes: Record|undefined): InternalActivationAttributes => {\n const activation = attributes?.activation as string || '';\n if (activation === 'HardSigmoid') {\n const [alpha, beta] = attributes?.activation_params as [number, number] || [0.2, 0.5];\n return {activation, alpha, beta};\n } else if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes?.activation_params as [number, number] || [MIN_CLIP, MAX_CLIP];\n return {activation, clipMax, clipMin};\n } else if (activation === 'LeakyRelu') {\n const [alpha] = attributes?.activation_params as [number] || [0.01];\n return {activation, alpha};\n }\n return {activation};\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/activation_util.ts\n//\n// modified to fit the needs of the project\n\nexport const typeSnippet = (component: number, dataType: string) => {\n switch (component) {\n case 1:\n return dataType;\n case 2:\n return `vec2<${dataType}>`;\n case 3:\n return `vec3<${dataType}>`;\n case 4:\n return `vec4<${dataType}>`;\n default:\n throw new Error(`${component}-component is not supported.`);\n }\n};\n\nexport const biasSnippet = (hasBias: boolean): string => `\n ${hasBias ? 'value = value + getBiasByOutputCoords(coords);' : ''}\n `;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-core/src/ops/conv_util.ts\n//\n// modified to fit the needs of the project\n\nexport const utilFunctions = (strideStr: string) => (`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${strideStr}.x), i32(${strideStr}.y), i32(${strideStr}.z), 1));\n}\n`);\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/matmul_packed_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getBroadcastDims, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from '../fuse-utils';\n\nimport {typeSnippet} from './activation_util';\n\nconst writeDataToSubAVec4Snippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst calculateResultSnippet = (transposeA: boolean, innerElementSize: number) => {\n if (transposeA) {\n return `\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${innerElementSize === 3 ? '' : 'let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];'}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached3[i] + acc[i];'}\n }`;\n } else {\n return `\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached.w + acc[i];'}\n }`;\n }\n};\n\nexport const makeMatMulPackedVec4Source =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32): string => {\n const tileAOuter = workgroupSize[1] * workPerThread[1];\n const tileBOuter = workgroupSize[0] * workPerThread[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n const innerElementSize = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n\n if (!(((transposeA && innerElementSize === 4 && workPerThread[1] === 4) ||\n (!transposeA && (innerElementSize === 3 || innerElementSize === 4))) &&\n tileAWidth % workgroupSize[0] === 0 && tileInner % workgroupSize[1] === 0 && workPerThread[0] === 4)) {\n throw new Error(`If transposeA ${transposeA} is true, innerElementSize ${\n innerElementSize} and workPerThread[1] ${workPerThread[1]} must be 4.\n Otherwise, innerElementSize ${innerElementSize} must be 3 or 4.\n tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${workgroupSize[0]}. tileInner ${\n tileInner} must be divisible by workgroupSize[1] ${workgroupSize[1]}. colPerThread ${\n workPerThread[0]} must be 4.`);\n }\n return `\nvar mm_Asub: array, ${tileAWidth / innerElementSize}>, ${tileAHight}>;\nvar mm_Bsub: array, ${tileBOuter / workPerThread[0]}>, ${tileInner}>;\n\nconst rowPerThread = ${workPerThread[1]};\nconst colPerThread = ${workPerThread[0]};\nconst innerElementSize = ${innerElementSize};\nconst tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\n let num_tiles = ${splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${rowPerThreadB};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${writeDataToSubAVec4Snippet(transposeA, batchDims)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${\n batchDims ? ', batchIndices' : ''});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${innerElementSize === 3 ? '' : 'let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];'}\n\n ${calculateResultSnippet(transposeA, innerElementSize)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`;\n };\n\nconst writeDataToSubASnippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst readDataFromSubASnippet = (transposeA: boolean) =>\n transposeA ? 'let ACached = mm_Asub[k][tileRow + innerRow];' : 'let ACached = mm_Asub[tileRow + innerRow][k];';\n\n// sequentialAccessByThreads means sequential data in memory is accessed by\n// threads, instead of a single thread (default behavior).\nexport const makeMatMulPackedSource =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32,\n sequentialAccessByThreads = false): string => {\n const tileAOuter = workPerThread[1] * workgroupSize[1];\n const tileBOuter = workPerThread[0] * workgroupSize[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n\n if (!(tileAHight % workgroupSize[1] === 0 && tileAWidth % workgroupSize[0] === 0 &&\n tileInner % workgroupSize[1] === 0)) {\n throw new Error(`tileAHight ${tileAHight} must be divisible by workgroupSize[1]${\n workgroupSize[1]}, tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${\n workgroupSize[0]}, tileInner ${tileInner} must be divisible by workgroupSize[1]${workgroupSize[1]}`);\n }\n const rowPerThreadA = tileAHight / workgroupSize[1];\n const colPerThreadA = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n const matmulSnippet = sequentialAccessByThreads ?\n `\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n let globalColStart = i32(workgroupId.x) * ${tileBOuter};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${tileAHight}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileAWidth}; inputCol = inputCol + ${workgroupSize[0]}) {\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${tileInner}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileBOuter}; inputCol = inputCol + ${workgroupSize[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${workgroupSize[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${\n transposeA ? `mm_Asub[k][localRow + innerRow * ${workgroupSize[1]}];` :\n `mm_Asub[localRow + innerRow * ${workgroupSize[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${workgroupSize[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${workgroupSize[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n ` :\n `\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\nlet tileRowA = i32(localId.y) * ${rowPerThreadA};\nlet tileColA = i32(localId.x) * ${colPerThreadA};\nlet tileRowB = i32(localId.y) * ${rowPerThreadB};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadA}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${colPerThreadA}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${readDataFromSubASnippet(transposeA)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;\n\n return `\n var mm_Asub : array, ${tileAHight}>;\n var mm_Bsub : array, ${tileInner}>;\n const rowPerThread = ${workPerThread[1]};\n const colPerThread = ${workPerThread[0]};\n const tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let num_tiles = ${\n splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc : array, rowPerThread>;\n ${matmulSnippet}\n }\n`;\n };\n\nconst matMulReadWriteFnSource =\n (component: number, hasBias: boolean, applyActivation: string, variables: IndicesHelper[],\n batchShapes: Array, isChannelsLast = false): string => {\n const [batchAShape, batchBShape, batchShape] = batchShapes;\n const [batchVariable, aVariable, bVariable, outputVariable] = variables;\n const broadCastADims = getBroadcastDims(batchAShape, batchShape);\n const broadCastBDims = getBroadcastDims(batchBShape, batchShape);\n const dataType = tensorTypeToWsglStorageType(variables[0].type.tensor);\n const getAIndices = () => {\n const aRank = aVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var aIndices: ${aVariable.type.indices};`;\n for (let i = aRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\naIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastADims.forEach(i => {\n resStr += `\\naIndices[${i}] = 0;`;\n });\n resStr += `\\naIndices[${aRank - 2}] = u32(row);\n aIndices[${aRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const getBIndices = () => {\n const bRank = bVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var bIndices: ${bVariable.type.indices};`;\n for (let i = bRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\nbIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastBDims.forEach(i => {\n resStr += `\\nbIndices[${i}] = 0;`;\n });\n resStr += `\\nbIndices[${bRank - 2}] = u32(row);\n bIndices[${bRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const source = `\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${getAIndices()}\n value = ${aVariable.getByIndices('aIndices')};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${getBIndices()}\n value = ${bVariable.getByIndices('bIndices')};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${typeSnippet(component, dataType)}) {\n let col = colIn * ${component};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${\n hasBias ?\n `value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :\n '' }\n ${applyActivation}\n ${outputVariable.setByIndices('vec3(coords)', 'value')}\n }\n }\n `;\n return source;\n };\n\nexport const createMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const dimAOuter = aShape[aShape.length - 2];\n const dimInner = aShape[aShape.length - 1];\n const dimBOuter = bShape[bShape.length - 1];\n const isVec4 = dimInner % 4 === 0 && dimBOuter % 4 === 0;\n\n // TODO: fine tune size\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const workgroupSize: [number, number, number] = [8, 8, 1];\n const dispatch = [\n Math.ceil(dimBOuter / workgroupSize[0] / elementsPerThread[0]),\n Math.ceil(dimAOuter / workgroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workgroupSize[2] / elementsPerThread[2])\n ];\n\n const components = isVec4 ? 4 : 1;\n const aShapeTemp = [...outerDimsA, dimAOuter, dimInner / components];\n const aRank = aShapeTemp.length;\n const bShapeTemp = [...outerDimsB, dimInner, dimBOuter / components];\n const bRank = bShapeTemp.length;\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShapeTemp, bShapeTemp));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n const hasBias = inputs.length > 2;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchRank = outerDims.length;\n const batchDims = internalVariable('batchDims', inputs[0].dataType, batchRank, 1);\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const A = inputVariable('a', inputs[0].dataType, aRank, components);\n const B = inputVariable('b', inputs[1].dataType, bRank, components);\n const output = outputVariable('result', inputs[0].dataType, outputShapeTemp.length, components);\n const inputVariables = [A, B];\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n }\n const uniforms: UniformsArrayType =\n [{name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'}];\n appendActivationUniforms(activationAttributes, uniforms);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const declareFunctions = matMulReadWriteFnSource(\n components, hasBias, applyActivation, [batchDims, A, B, output], [outerDimsA, outerDimsB, outerDims],\n isChannelsLast);\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${declareFunctions}\n ${\n isVec4 ? makeMatMulPackedVec4Source(elementsPerThread, workgroupSize, dataType, batchDims) :\n makeMatMulPackedSource(elementsPerThread, workgroupSize, dataType, batchDims)}\n `;\n };\n return {\n name: 'MatMul',\n shaderCache: {\n hint: `${elementsPerThread};${activationAttributes.activation};${isVec4};${isChannelsLast}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv2d_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet, typeSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dCommonSnippet =\n (isChannelsLast: boolean, fitAOuter: boolean, fitBOuter: boolean, fitInner: boolean, addBias = false,\n attributes: ConvAttributes, innerElementSizeX = 4, innerElementSizeW = 4, innerElementSize = 4,\n dataType = 'f32'): string => {\n const getXSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'resData = x[xIndex];';\n case 3:\n return `resData = vec3<${dataType}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;\n case 4:\n return 'resData = x[xIndex / 4];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[row * i32(uniforms.w_shape[3]) + colIn];';\n case 4:\n return 'return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, xRow, xCol, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, xRow, xCol);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n const readXSnippet = `\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${col} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${col} % inChannels;\n var resData = ${typeSnippet(innerElementSizeX, dataType)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${xHeight} && xCol >= 0 && xCol < ${xWidth}) {\n ${coordASnippet}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${getXSnippet(innerElementSizeX)}\n }\n return resData;`;\n\n const sampleX = isChannelsLast ? (fitAOuter && fitInner ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`) :\n (fitInner && fitBOuter ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`);\n\n const sampleW = `${getWSnippet(innerElementSizeW)}`;\n\n const resType = typeSnippet(innerElementSize, dataType);\n const aType =\n isChannelsLast ? typeSnippet(innerElementSizeX, dataType) : typeSnippet(innerElementSizeW, dataType);\n const bType =\n isChannelsLast ? typeSnippet(innerElementSizeW, dataType) : typeSnippet(innerElementSizeX, dataType);\n const applyActivation = getActivationSnippet(attributes, resType, dataType);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${aType} {\n ${isChannelsLast ? sampleX : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${bType} {\n ${isChannelsLast ? sampleW : sampleX}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${resType}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[], dimAOuter: number,\n dimBOuter: number, dimInner: number, hasBias: boolean, sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 || inChannels % 3 === 0) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv2d_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const tileAOuter = workGroupSize[1] * elementsPerThread[1];\n const tileBOuter = workGroupSize[0] * elementsPerThread[0];\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const fitAOuter = dimAOuter % tileAOuter === 0;\n const fitBOuter = dimBOuter % tileBOuter === 0;\n const fitInner = dimInner % tileInner === 0;\n const elementsSize = isVec4 ? [innerElementSize, 4, 4] : [1, 1, 1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.int32, data: attributes.strides}, {type: DataType.int32, data: attributes.dilations}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'pad', type: 'i32', length: 2}, {name: 'stride', type: 'i32', length: 2},\n {name: 'dilation', type: 'i32', length: 2}\n ];\n appendActivationUniforms(attributes, uniforms);\n\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n result[flatIndex] = ${isVec4 ? `vec4<${t}>` : t}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${isVec4 ? '/ 4' : ''}, value);\n }`;\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${utilFunctions('uniforms.result_strides')}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n ${\n conv2dCommonSnippet(\n isChannelsLast, fitAOuter, fitBOuter, fitInner, hasBias, attributes, elementsSize[0], elementsSize[1],\n elementsSize[2], t)}\n ${\n isVec4 ?\n makeMatMulPackedVec4Source(elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner, false, undefined,\n sequentialAccessByThreads)}`;\n };\n return {\n name: 'Conv2DMatMul',\n shaderCache: {\n hint: `${attributes.cacheKey};${innerElementSize};${isVec4};${fitAOuter};${fitBOuter};${fitInner};${\n tileAOuter};${tileBOuter};${tileInner}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv3d_naive_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\n\nconst arrayProduct = (arr: number[]) => {\n let product = 1;\n for (let i = 0; i < arr.length; i++) {\n product *= arr[i];\n }\n return product;\n};\n\nconst parse3TupleParam = (param: number|[number, number, number]): [number, number, number] =>\n typeof param === 'number' ? [param, param, param] : param;\n\nconst getEffectiveFilterSize = (filterSize: number, dilation: number): number => {\n if (dilation <= 1) {\n return filterSize;\n }\n\n return filterSize + (filterSize - 1) * (dilation - 1);\n};\n\nconst computeDefaultPad =\n (inputShape: [number, number]|[number, number, number, number], fieldSize: number, stride: number, dilation = 1):\n number => {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n };\n\nconst computeOutputShape4D =\n (inShape: [number, number, number, number], filterShape: [number, number, number], outChannels: number,\n strides: [number, number, number], zeroPad?: number): [number, number, number, number] => {\n if (zeroPad == null) {\n // eslint-disable-next-line no-param-reassign\n zeroPad = computeDefaultPad(inShape, filterShape[0], strides[0]);\n }\n const outShape: [number, number, number, number] = [0, 0, 0, outChannels];\n for (let index = 0; index < 3; index++) {\n if (inShape[index] + 2 * zeroPad >= filterShape[index]) {\n outShape[index] = Math.trunc((inShape[index] - filterShape[index] + 2 * zeroPad) / strides[index] + 1);\n }\n }\n return outShape;\n };\n\nconst get3DPadAndOutInfo =\n (pad: number|string|number[], inDepth: number, inHeight: number, inWidth: number, strideDepth: number,\n strideHeight: number, strideWidth: number, filterDepth: number, filterHeight: number,\n filterWidth: number): {padInfo: PadInfo3D; outDepth: number; outHeight: number; outWidth: number} => {\n let padInfo: PadInfo3D;\n let outDepth: number;\n let outHeight: number;\n let outWidth: number;\n\n if (pad === 'VALID') {\n // eslint-disable-next-line no-param-reassign\n pad = 0;\n }\n\n if (typeof pad === 'number') {\n padInfo = {top: pad, bottom: pad, left: pad, right: pad, front: pad, back: pad};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (Array.isArray(pad)) {\n if (!pad.every((val, _, arr) => val === arr[0])) {\n throw Error(`Unsupported padding parameter: ${pad}`);\n }\n padInfo = {top: pad[0], bottom: pad[1], left: pad[2], right: pad[3], front: pad[4], back: pad[5]};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad[0]);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (pad === 'SAME_UPPER') {\n // TODO: support 'SAME_LOWER'.\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n\n padInfo = {top, bottom, left, right, front, back};\n } else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return {padInfo, outDepth, outHeight, outWidth};\n };\n\ntype PadInfo3D = {\n top: number; left: number; right: number; bottom: number; front: number; back: number;\n};\n\nexport type Conv3DInfo = {\n batchSize: number; inDepth: number; inHeight: number; inWidth: number; inChannels: number; outDepth: number;\n outHeight: number;\n outWidth: number;\n outChannels: number;\n dataFormat: 'channelsFirst' | 'channelsLast';\n strideDepth: number;\n strideHeight: number;\n strideWidth: number;\n dilationDepth: number;\n dilationHeight: number;\n dilationWidth: number;\n filterDepth: number;\n filterHeight: number;\n filterWidth: number;\n effectiveFilterDepth: number;\n effectiveFilterHeight: number;\n effectiveFilterWidth: number;\n padInfo: PadInfo3D;\n inShape: [number, number, number, number, number];\n outShape: [number, number, number, number, number];\n filterShape: [number, number, number, number, number];\n};\n\nexport const computeConv3DInfo =\n (inShape: [number, number, number, number, number], filterShape: [number, number, number, number, number],\n strides: number|[number, number, number], dilations: number|[number, number, number], pad: number|string|number[],\n depthwise = false, dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv3DInfo => {\n let batchSize, inDepth, inHeight, inWidth, inChannels;\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n } else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterChannels, , filterDepth, filterHeight, filterWidth] = filterShape;\n\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const {padInfo, outDepth, outHeight, outWidth} = get3DPadAndOutInfo(\n pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth,\n effectiveFilterHeight, effectiveFilterWidth);\n\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n\n let outShape: [number, number, number, number, number] = [0, 0, 0, 0, 0];\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n } else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n };\n\nexport const createConv3DNaiveProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[],\n filterDims: readonly number[], pads: readonly number[], dataFormat: string): ProgramInfo => {\n const isChannelsLast = dataFormat === 'channelsLast';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n // TODO: enable vec4.\n const isVec4 = false;\n const workGroupSize: [number, number, number] = [64, 1, 1];\n const dispatchLayout = {x: outputShape.map((_, i) => i)};\n const dispatch = [Math.ceil(arrayProduct(dispatchLayout.x.map(d => outputShape[d])) / (workGroupSize[0])), 1, 1];\n\n LOG_DEBUG('verbose', () => `[conv3d_naive_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: filterDims},\n {type: DataType.uint32, data: pads}, {type: DataType.uint32, data: attributes.strides},\n {type: DataType.uint32, data: attributes.dilations}\n ];\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'pads', type: 'u32', length: pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length},\n {name: 'dilations', type: 'u32', length: attributes.dilations.length}\n ];\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : array) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[${isChannelsLast ? getElementAt('coords', 4, 5) : getElementAt('coords', 1, 5)}${\n isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${declareFunctions}\n fn getX(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${x.getByIndices('aIndices')};\n }\n fn getW(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${w.getByIndices('aIndices')};\n }\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let coords = ${output.offsetToIndices('global_idx')};\n let batch = ${getElementAt('coords', 0, x.rank)};\n let d2 = ${\n isChannelsLast ? getElementAt('coords', x.rank - 1, x.rank) : getElementAt('coords', 1, x.rank)};\n let xFRCCorner = vec3(${\n isChannelsLast ? getElementAt('coords', 1, x.rank) : getElementAt('coords', 2, x.rank)},\n ${isChannelsLast ? getElementAt('coords', 2, x.rank) : getElementAt('coords', 3, x.rank)},\n ${\n isChannelsLast ? getElementAt('coords', 3, x.rank) :\n getElementAt('coords', 4, x.rank)}) * uniforms.strides - uniforms.pads;\n let xFCorner = xFRCCorner.x;\n let xRCorner = xFRCCorner.y;\n let xCCorner = xFRCCorner.z;\n let xShapeY = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 1, x.rank) : getElementAt('uniforms.x_shape', 2, x.rank)};\n let xShapeZ = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 2, x.rank) : getElementAt('uniforms.x_shape', 3, x.rank)};\n let xShapeW = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 3, x.rank) : getElementAt('uniforms.x_shape', 4, x.rank)};\n let xShapeU = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 4, x.rank) : getElementAt('uniforms.x_shape', 1, x.rank)};\n let inputDepthNearestVec4 = (xShapeU / 4) * 4;\n let inputDepthVec4Remainder = xShapeU % 4;\n\n var dotProd = 0.0;\n for (var wF = 0u; wF < uniforms.filter_dims[0]; wF++) {\n let xF = xFCorner + wF * uniforms.dilations[0];\n if (xF < 0 || xF >= xShapeY) {\n continue;\n }\n\n for (var wR = 0u; wR < uniforms.filter_dims[1]; wR++) {\n let xR = xRCorner + wR * uniforms.dilations[1];\n if (xR < 0 || xR >= xShapeZ) {\n continue;\n }\n\n for (var wC = 0u; wC < uniforms.filter_dims[2]; wC++) {\n let xC = xCCorner + wC * uniforms.dilations[2];\n if (xC < 0 || xC >= xShapeW) {\n continue;\n }\n\n for (var d1 = 0u; d1 < inputDepthNearestVec4; d1 += 4) {\n ${\n isChannelsLast ? `let xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3));\n ` :\n `let xValues = vec4(\n getX(batch, d1, xF, xR, xC),\n getX(batch, d1 + 1, xF, xR, xC),\n getX(batch, d1 + 2, xF, xR, xC),\n getX(batch, d1 + 3, xF, xR, xC));\n `}\n let wValues = vec4(\n getW(d2, d1, wF, wR, wC),\n getW(d2, d1 + 1, wF, wR, wC),\n getW(d2, d1 + 2, wF, wR, wC),\n getW(d2, d1 + 3, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n if (inputDepthVec4Remainder == 1) {\n ${\n isChannelsLast ? `dotProd += getX(batch, xF, xR, xC, inputDepthNearestVec4)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);` :\n `dotProd += getX(batch, inputDepthNearestVec4, xF, xR, xC)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);`}\n } else if (inputDepthVec4Remainder == 2) {\n ${\n isChannelsLast ? `let xValues = vec2(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1));\n ` :\n `let xValues = vec2(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC));\n `}\n let wValues = vec2(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n } else if (inputDepthVec4Remainder == 3) {\n ${\n isChannelsLast ? `let xValues = vec3(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 2));\n ` :\n `let xValues = vec3(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 2, xF, xR, xC));\n `}\n let wValues = vec3(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 2, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n ${hasBias ? 'dotProd = dotProd + getBiasByOutputCoords(coords)' : ''};\n result[global_idx] = f32(dotProd);\n }`;\n };\n return {\n name: 'Conv3DNaive',\n shaderCache:\n {hint: `${attributes.cacheKey};${isChannelsLast};${innerElementSize};${hasBias}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from './fuse-utils';\n\n/**\n * naive grouped conv implementation, supports 1d/2d conv\n * @param squeezeOutputShapeFunction - an optional function to squeeze the output shape, only used in conv1d\n */\nexport const createGroupedConvProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n\n const isChannelLast = attributes.format === 'NHWC';\n const outputShape = calculateOutputShape(\n xShape, wShape, attributes.dilations, attributes.pads, attributes.strides, isChannelLast);\n const outputSize = ShapeUtil.size(outputShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.dilations},\n {type: DataType.uint32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.uint32, data: outputChannelsPerGroup}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length);\n const w = inputVariable('w', inputs[1].dataType, wShape.length);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims.length));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'dilations', type: 'u32', length: attributes.dilations.length},\n {name: 'strides', type: 'u32', length: 2}, {name: 'pads', type: 'u32', length: 2},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${isChannelLast ? 3 : 1}];\n let xRCCorner: vec2 = vec2(outputIndices[${isChannelLast ? 1 : 2}], outputIndices[${\n isChannelLast ? 2 : 3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${output.type.value} = ${output.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${isChannelLast ? 1 : 2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${isChannelLast ? 2 : 3}]) {\n continue;\n }\n\n let xVal = ${\n isChannelLast ? x.get('batch', 'xHeight', 'xWidth', 'input_channel') :\n x.get('batch', 'input_channel', 'xHeight', 'xWidth')};\n let wVal = ${w.get('output_channel', 'wInChannel', 'wHeight', 'wWidth')};\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${output.setByOffset('global_idx', 'value')}\n }`;\n };\n return {\n name: 'GroupedConv',\n shaderCache: {hint: attributes.cacheKey, inputDependencies},\n getRunData: () => ({\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const createGroupedConvVectorizeProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const components = getMaxComponents(outputShape[3]);\n const outputNumber = getMaxComponents(outputShape[2]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const xShape = [inputs[0].dims[0], inputs[0].dims[1], inputs[0].dims[2], inputs[0].dims[3] / components];\n const wShape = [inputs[1].dims[0], inputs[1].dims[1], inputs[1].dims[2], inputs[1].dims[3] / components];\n const outputShapeInShader = [outputShape[0], outputShape[1], outputShape[2], outputShape[3] / components];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.int32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape, outputShapeInShader));\n const xNumber = (outputNumber - 1) * attributes.strides[1] + wShape[1];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length, components);\n const w = inputVariable('w', inputs[1].dataType, wShape.length, components);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims, components));\n }\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'strides', type: 'i32', length: 2},\n {name: 'pads', type: 'i32', length: 2},\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${outputNumber}u;\n let col = (index1 % width1) * ${outputNumber}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${x.type.value}, ${xNumber}>;\n var values: array<${output.type.value}, ${outputNumber}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${wShape[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${xNumber}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${x.get('batch', 'u32(x_height)', 'u32(x_width)', 'input_channel')};\n } else {\n x_vals[i] = ${x.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${wShape[1]}; w_width++) {\n let w_val = ${w.get('w_height', 'w_width', '0', 'output_channel')};\n for (var i = 0u; i < ${outputNumber}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n ${output.set('batch', 'row', 'col + i', 'output_channel', 'value')};\n }\n }`;\n };\n\n return {\n name: 'GroupedConv-Vectorize',\n shaderCache: {\n hint: `${attributes.cacheKey};${components};${outputNumber};${xNumber};${wShape[0]};${wShape[1]}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'type'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createTensorShapeVariables, getBroadcastDims, getMaxComponents, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\n\nexport const createNaiveMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n\n const M = aShape[aShape.length - 2];\n const N = bShape[bShape.length - 1];\n const K = aShape[aShape.length - 1];\n const components = getMaxComponents(N);\n const aComponents = getMaxComponents(K);\n const outputNumber = getMaxComponents(M);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const hasBias = inputs.length > 2;\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const outputShapeInShader = [batchSize, M, N];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShape, bShape));\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeInShader));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchDims = internalVariable('batch_dims', inputs[0].dataType, outerDims.length);\n const a = inputVariable('a', inputs[0].dataType, aShape.length, aComponents);\n const b = inputVariable('b', inputs[1].dataType, bShape.length, components);\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const inputVariables = [a, b];\n let processBias = '';\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n processBias = `${\n isChannelsLast ? `value += bias[col / ${biasComponents}];` :\n `value += ${output.type.value}(bias[row + i]);`}`;\n }\n\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const broadCastADims = getBroadcastDims(outerDimsA, outerDims);\n const broadCastBDims = getBroadcastDims(outerDimsB, outerDims);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'K', type: 'u32'}\n ];\n appendActivationUniforms(activationAttributes, uniforms);\n\n const getIndices = (variable: IndicesHelper, broadCastDims: number[]) => {\n const rank = variable.rank;\n const name = variable.name;\n if (rank === 2) {\n return `var ${name}_indices = ${variable.type.indices}(0u, 0u);`;\n }\n const batchRank = batchDims.rank;\n let resStr = `var ${name}_indices: ${variable.type.indices};`;\n for (let i = rank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\n${name}_indices[${i}] = ${batchRank > 1 ? `batch_indices[${j}]` : 'batch_indices'};`;\n }\n broadCastDims.forEach(i => {\n resStr += `\\n${name}_indices[${i}] = 0;`;\n });\n resStr += `${name}_indices[${rank - 2}] = 0u;\n ${name}_indices[${rank - 1}] = 0u;`;\n return resStr;\n };\n\n const calcResult = (): string => {\n let calcStr = `var a_data: ${a.type.value};`;\n for (let i = 0; i < aComponents; i++) {\n calcStr += `\n let b_data${i} = b[(b_offset + (k + ${i}) * uniforms.N + col) / ${components}];`;\n }\n for (let i = 0; i < outputNumber; i++) {\n calcStr += `a_data = a[(a_offset + (row + ${i}) * uniforms.K + k) / ${aComponents}];`;\n\n for (let j = 0; j < aComponents; j++) {\n calcStr += `\n values[${i}] = fma(${b.type.value}(a_data${aComponents === 1 ? '' : `[${j}]`}), b_data${j}, values[${\n i}]);\\n`;\n }\n }\n return calcStr;\n };\n\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let col = (global_idx % (uniforms.N / ${components})) * ${components};\n var index1 = global_idx / (uniforms.N / ${components});\n let stride1 = uniforms.M / ${outputNumber};\n let row = (index1 % stride1) * ${outputNumber};\n let batch = index1 / stride1;\n\n ${outputShape.length === 2 ? '' : `let batch_indices = ${batchDims.offsetToIndices('batch')};`}\n ${getIndices(a, broadCastADims)}\n let a_offset = ${a.indicesToOffset('a_indices')};\n ${getIndices(b, broadCastBDims)}\n let b_offset = ${b.indicesToOffset('b_indices')};\n var values: array<${output.type.value}, ${outputNumber}>;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${aComponents}) {\n ${calcResult()}\n }\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n let cur_indices = ${output.type.indices}(batch, row + i, col);\n let offset = ${output.indicesToOffset('cur_indices')};\n ${output.setByOffset(`offset / ${components}`, 'value')};\n }\n }\n `;\n };\n return {\n name: 'MatMulNaive',\n shaderCache: {\n hint: `${activationAttributes.activation};${components};${aComponents};${outputNumber};${isChannelsLast}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'rank'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n};\n\nexport const matMul = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n const outputShape = BroadcastUtil.calcShape(context.inputs[0].dims, context.inputs[1].dims, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const N = outputShape[outputShape.length - 1];\n const K = context.inputs[0].dims[context.inputs[0].dims.length - 1];\n if (N < 8 && K < 8) {\n context.compute(createNaiveMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n } else {\n context.compute(createMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DMatMulProgramInfo} from './3rd-party/conv2d_mm_webgpu';\nimport {computeConv3DInfo, createConv3DNaiveProgramInfo} from './3rd-party/conv3d_naive_webgpu';\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createGroupedConvProgramInfo, createGroupedConvVectorizeProgramInfo} from './conv-grouped';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createNaiveMatmulProgramInfo} from './matmul';\nimport {createTransposeProgramInfo} from './transpose';\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[], isChannelLast: boolean): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(isChannelLast ? 1 : 2, isChannelLast ? 3 : 4);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly format: 'NHWC'|'NCHW';\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n readonly wIsConst: boolean;\n}\n\n// for transposing weight tensor from [M, C/group, KH, KW] to [KH, KW, C/group, M]\nconst weightTransposeAttribute = [2, 3, 1, 0];\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n if (inputs[0].dims.length > 5) {\n throw new Error('greater than 5D is not supported');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n};\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n if (kernelShape[i - 2] === 0) {\n kernelShape[i - 2] = inputs[1].dims[i];\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.format === 'NHWC',\n attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads});\n return newAttributes;\n};\n\nexport const parseConvAttributes = (attributes: Record): ConvAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad = ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number];\n const dilations = attributes.dilations as number[];\n const group = attributes.group as number;\n const kernelShape = attributes.kernel_shape as number[];\n const pads = attributes.pads as number[];\n const strides = attributes.strides as number[];\n const wIsConst = (attributes.w_is_const as () => boolean)();\n\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst conv2d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n\n // check attributes\n\n // const hasPreluActivationWeights = false; /* TODO: add support for prelu activation weights */\n const isChannelsLast = attributes.format === 'NHWC';\n if (attributes.group !== 1) {\n // NVIDIA GPU with ampere architecture fails with below 2 cases, but we couldn't repro them with any other\n // GPUs. So just disable vectorize on NVIDIA ampere to ensure always correct outputs.\n // [webgpu]Conv - conv - vectorize group - B\n // [webgpu]Conv - conv - vectorize group - D\n const enableGroupedConvVectorize = !context.adapterInfo.isArchitecture('ampere');\n if (enableGroupedConvVectorize && isChannelsLast && inputs[1].dims[0] === attributes.group &&\n inputs[1].dims[1] === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1) {\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n const convInputs = [inputs[0], transposedWeight];\n if (inputs.length === 3) {\n convInputs.push(inputs[2]);\n }\n context.compute(\n createGroupedConvVectorizeProgramInfo(convInputs, adjustedAttributes, outputShape), {inputs: convInputs});\n } else {\n context.compute(createGroupedConvProgramInfo(inputs, adjustedAttributes));\n }\n return;\n }\n\n const hasBias = inputs.length === 3;\n const inputHeight = inputs[0].dims[isChannelsLast ? 1 : 2];\n const inputWidth = inputs[0].dims[isChannelsLast ? 2 : 3];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n\n const sameSize = isChannelsLast && weightHeight === inputHeight && weightWidth === inputWidth &&\n attributes.pads[0] === 0 && attributes.pads[1] === 0;\n if (sameSize ||\n (weightHeight === 1 && weightWidth === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1 &&\n attributes.strides[0] === 1 && attributes.strides[1] === 1 && attributes.pads[0] === 0 &&\n attributes.pads[1] === 0)) {\n // conv2dByMatMul\n const batch = outputShape[0];\n let xReshaped, wReshaped, matmulOutputShape;\n const matmulInputs = [];\n if (isChannelsLast) {\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n if (sameSize) {\n const sharedDim = inputHeight * inputWidth * inputChannels;\n xReshaped = inputs[0].reshape([1, batch, sharedDim]);\n wReshaped = transposedWeight.reshape([1, sharedDim, outChannels]);\n matmulOutputShape = [1, batch, outChannels];\n } else {\n xReshaped = inputs[0].reshape([batch, inputHeight * inputWidth, inputChannels]);\n wReshaped = transposedWeight.reshape([1, inputChannels, outChannels]);\n matmulOutputShape = [batch, outHeight * outWidth, outChannels];\n }\n matmulInputs.push(xReshaped);\n matmulInputs.push(wReshaped);\n } else {\n xReshaped = inputs[0].reshape([batch, inputChannels, inputHeight * inputWidth]);\n wReshaped = inputs[1].reshape([1, outChannels, inputChannels]);\n matmulOutputShape = [batch, outChannels, outHeight * outWidth];\n matmulInputs.push(wReshaped);\n matmulInputs.push(xReshaped);\n }\n if (hasBias) {\n matmulInputs.push(inputs[2]);\n }\n const N = matmulOutputShape[2];\n const K = matmulInputs[0].dims[matmulInputs[0].dims.length - 1];\n // Tune the threshold.\n if (N < 8 && K < 8) {\n context.compute(\n createNaiveMatmulProgramInfo(\n matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n } else {\n context.compute(\n createMatmulProgramInfo(matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n }\n return;\n }\n\n // TODO: implement conv2dWithIm2Col()\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convInputs = [inputs[0], transposedWeight];\n if (hasBias) {\n convInputs.push(inputs[2]);\n }\n\n // STEP.3: compute matmul\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n context.compute(\n createConv2DMatMulProgramInfo(\n convInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convInputs});\n};\n\nconst conv1d = (context: ComputeContext, attributes: ConvAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n const pads = [0, attributes.pads[0], 0, attributes.pads[1]];\n const strides = [1].concat(attributes.strides);\n const dilations = [1].concat(attributes.dilations);\n const kernelShape = [1].concat(attributes.kernelShape);\n const adjustedAttributes = getAdjustedConvAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createGroupedConvProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] : []));\n};\n\nconst conv3d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const format = attributes.format === 'NHWC' ? 'channelsLast' : 'channelsFirst';\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const pads = attributes.autoPad === 'NOTSET' ? attributes.pads : attributes.autoPad;\n const convInfo = computeConv3DInfo(\n inputs[0].dims as [number, number, number, number, number],\n inputs[1].dims as [number, number, number, number, number],\n attributes.strides as number | [number, number, number],\n attributes.dilations as number | [number, number, number], pads as string | number[], false, format);\n context.compute(createConv3DNaiveProgramInfo(\n inputs, adjustedAttributes, convInfo.outShape,\n [convInfo.filterDepth, convInfo.filterHeight, convInfo.filterWidth],\n [convInfo.padInfo.front, convInfo.padInfo.top, convInfo.padInfo.left], format));\n};\n\nexport const conv = (context: ComputeContext, attributes: ConvAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n conv1d(context, attributes);\n } else if (context.inputs[0].dims.length === 5) {\n conv3d(context, context.inputs, attributes);\n } else {\n conv2d(context, context.inputs, attributes);\n }\n};\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dTransposeCommonSnippet =\n (isChannelsLast: boolean, addBias = false, attributes: ConvTransposeAttributes, type: string,\n innerElementSize = 4): string => {\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];';\n case 4:\n return `\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${type}(v0, v1, v2, v3);\n `;\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, iXR, iXC, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, iXR, iXC);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n\n const readASnippet = `\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${col} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${xHeight}) || fract(xR) > 0.0) {\n return ${type}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${xWidth}) || fract(xC) > 0.0) {\n return ${type}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${col} % inChannels;\n ${coordASnippet}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${innerElementSize}];`;\n\n const sampleA = isChannelsLast ? `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readASnippet}\n }\n return ${type}(0.0);` :\n `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readASnippet}\n }\n return ${type}(0.0);`;\n\n const sampleW = `\n let col = colIn * ${innerElementSize};\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${\n isChannelsLast ? 'row < uniforms.dim_inner && col < uniforms.dim_b_outer' :\n 'row < uniforms.dim_inner && col < uniforms.dim_a_outer'} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${getWSnippet(innerElementSize)}\n }\n return ${type}(0.0);\n `;\n\n const applyActivation = getActivationSnippet(attributes, type);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleA : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleW : sampleA}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${type}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${innerElementSize}] = value;\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DTransposeMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes, outputShape: readonly number[],\n dimAOuter: number, dimBOuter: number, dimInner: number, hasBias: boolean,\n sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 && inChannels % 3) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv_backprop_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? 4 : 1;\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const components = isVec4 ? 4 : 1;\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const effectiveFilterDims = [\n filterDims[0] + (attributes.dilations[0] <= 1 ? 0 : (filterDims[0] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] + (attributes.dilations[1] <= 1 ? 0 : (filterDims[1] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor((attributes.pads[1] + attributes.pads[3]) / 2)\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: attributes.strides},\n {type: DataType.int32, data: attributes.dilations}, {type: DataType.int32, data: filterDims},\n {type: DataType.int32, data: pads}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims.length, components);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, 1);\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n const inputVariables = [x, w];\n\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${bias.type.value} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'strides', type: 'i32', length: 2}, {name: 'dilations', type: 'i32', length: 2},\n {name: 'filter_dims', type: 'i32', length: filterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}\n ];\n appendActivationUniforms(attributes, uniforms);\n const elemType = tensorTypeToWsglStorageType(inputs[0].dataType, 1);\n if (elemType !== 'f16' && elemType !== 'f32') {\n throw new Error(`elemType ${elemType} is not supported.`);\n }\n return `\n ${utilFunctions('uniforms.result_strides')}\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)};\n ${declareFunctions}\n ${conv2dTransposeCommonSnippet(isChannelsLast, hasBias, attributes, x.type.value, innerElementSize)}\n ${\n isVec4 ? makeMatMulPackedVec4Source(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner, false,\n undefined, sequentialAccessByThreads)}`;\n };\n\n return {\n name: 'Conv2DTransposeMatMul',\n shaderCache:\n {hint: `${attributes.cacheKey};${elementsPerThread};${workGroupSize};${isVec4}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_webgpu.ts\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\n\nconst createConvTranspose2DOpProgramShaderSource =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], outputShape: readonly number[], hasBias: boolean,\n is1DimensionDispatch: boolean, isVec4 = false, dataType: string, uniforms: UniformsArrayType,\n isChannelsLast = false): string => {\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n const workPerThread = isVec4 ? 2 : 1;\n\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : u32, value : ${isVec4 ? `vec4<${dataType}>` : dataType}) {\n result[flatIndex] = ${isVec4 ? `vec4<${dataType}>` : dataType}(value);\n }`;\n if (hasBias) {\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${dataType}>` : dataType} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n const components = isVec4 ? 4 : 1;\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const dy = inputVariable('Dy', inputs[0].dataType, inputs[0].dims.length, components);\n const inputVariables = [dy, w];\n if (hasBias) {\n inputVariables.push(inputVariable('bias', inputs[2].dataType, [outputShape[channelDim]].length, components));\n }\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n\n const codeSnippet4 = `{\n let batch: u32 = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} / uniforms.result_shape[1];\n let r = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} % uniforms.result_shape[1];\n let c = ${is1DimensionDispatch ? 'global_id.y' : 'workgroup_id.y'} * ${workPerThread};\n let d1: u32 = ${is1DimensionDispatch ? 'global_id.x' : 'workgroup_id.x'} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${workPerThread}>;\n for (var i = 0; i < ${workPerThread}; i++) {\n dotProd[i] = vec4<${dataType}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${dataType}(dyCorner.x) + ${dataType}(wR)) / ${dataType}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${dataType}(dyCorner.y) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let dyC2 = (${dataType}(dyCorner.y) + 1.0 + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n\n dotProd[1] = dotProd[1] + vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${channelDim}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${workPerThread}; i = i + 1) {\n let value = dotProd[i] + ${hasBias ? 'bias[c+i]' : `vec4<${dataType}>(0.0)`};\n ${output.set('batch', 'r', 'c + i', 'd1', 'value')};\n }\n }`;\n const codeSnippet = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch = ${output.indicesGet('outputIndices', 0)};\n let d1 = ${output.indicesGet('outputIndices', channelDim)};\n let r = ${output.indicesGet('outputIndices', rowDim)};\n let c = ${output.indicesGet('outputIndices', colDim)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${dataType}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${dataType}(dyRCorner) + ${dataType}(wR)) / ${dataType}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[${rowDim}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${dataType}(dyCCorner) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[${colDim}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${\n isChannelsLast ? dy.get('batch', 'idyR', 'idyC', 'inputChannel') :\n dy.get('batch', 'inputChannel', 'idyR', 'idyC')};\n let wValue = ${w.get('inputChannel', 'wOutChannel', 'u32(wRPerm)', 'u32(wCPerm)')};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${hasBias ? 'bias[d1]' : `${dataType}(0.0)`};\n ${output.setByOffset('global_idx', 'value')};\n `;\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')};\n ${isVec4 ? codeSnippet4 : codeSnippet}}`;\n };\n\nexport const createConvTranspose2DProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n // const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = attributes.outputShape;\n const outputSize = ShapeUtil.size(outputShape);\n\n // const inChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // TODO Enable isVec4 for performance\n // Disabled due to weight matrix layout issue\n // const isVec4 = attributes.group === 1 && isChannelsLast && inChannels % 4 === 0 && outChannels % 4 === 0;\n const dispatch = [\n Math.ceil(outputSize / 64),\n 1,\n 1,\n ];\n LOG_DEBUG('verbose', () => `[conv2d_backprop_webgpu] dispatch = ${dispatch}`);\n\n const isChannelsLast = attributes.format === 'NHWC';\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const strides = [attributes.strides[0], attributes.strides[1]];\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const dilations = [attributes.dilations[0], attributes.dilations[1]];\n const effectiveFilterDims = [\n filterDims[0] +\n (attributes.dilations[0] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 1 : 2] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] +\n (attributes.dilations[1] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 2 : 3] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor(attributes.pads[1] + attributes.pads[3]) / 2\n ];\n\n const isVec4 = false;\n const group = attributes.group;\n const wShape = inputs[1].dims;\n const inputChannelsPerGroup = wShape[0] / group;\n const outputChannelsPerGroup = wShape[1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: strides},\n {type: DataType.uint32, data: filterDims}, {type: DataType.uint32, data: dilations},\n {type: DataType.uint32, data: effectiveFilterDims}, {type: DataType.int32, data: pads},\n {type: DataType.uint32, data: inputChannelsPerGroup}, {type: DataType.uint32, data: outputChannelsPerGroup},\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims)\n ];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const is1DimensionDispatch = dispatch[1] === 1 && dispatch[2] === 1;\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'strides', type: 'u32', length: strides.length},\n {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'dilations', type: 'u32', length: filterDims.length},\n {name: 'effective_filter_dims', type: 'u32', length: effectiveFilterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}, {name: 'input_channels_per_group', type: 'u32'},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `${\n createConvTranspose2DOpProgramShaderSource(\n shaderHelper, inputs, outputShape, hasBias, is1DimensionDispatch, isVec4, dataType, uniforms,\n isChannelsLast)}`;\n };\n return {\n name: 'ConvTranspose2D',\n shaderCache: {hint: `${attributes.cacheKey};`, inputDependencies},\n getRunData: () => ({\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n programUniforms\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DTransposeMatMulProgramInfo} from './3rd-party/conv_backprop_mm_webgpu';\nimport {createConvTranspose2DProgramInfo} from './3rd-party/conv_backprop_webgpu';\nimport {ConvAttributes} from './conv';\nimport {parseInternalActivationAttributes} from './fuse-utils';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n group: number, pads: number[], strides: readonly number[], isChannelLast: boolean, outputPadding: number[],\n outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateOutputShape = outputShape.length === 0;\n if (outputPadding.length === 0) {\n for (let i = 0; i < spatialRank; ++i) {\n outputPadding.push(0);\n }\n }\n const batchSize = inputShape[0];\n const outChannels = kernelShape[isChannelLast ? 3 : 1] * group;\n for (let i = 0, j = inputShape.length - spatialRank - (isChannelLast ? 1 : 0); i < spatialRank; ++i, ++j) {\n const inSize = inputShape[j];\n const outSize = updateOutputShape ? inSize * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inSize, strides[i], pads[i], kernelShape[j], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateOutputShape) {\n outputShape.push(\n strides[i] * (inSize - 1) + outputPadding[i] + (kernelShape[j] - 1) * dilations[i] + 1 - pads[i] -\n pads[i + spatialRank]);\n }\n }\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nconst getAdjustedConvTransposeAttributes =\n (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0 || attributes.kernelShape.reduce((a, b) => a * b, 1) === 0) {\n kernelShape.length = 0;\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const isChannelsLast = attributes.format === 'NHWC';\n kernelShape.splice(0, 0, inputs[1].dims[0]);\n kernelShape.splice(isChannelsLast ? 3 : 1, 0, inputs[1].dims[1]);\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const outputPadding = attributes.outputPadding.slice();\n const inputShape = inputs[0].dims;\n let dilations = attributes.dilations.slice();\n if (dilations.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n dilations = new Array(spatialRank).fill(1);\n }\n let strides = attributes.strides.slice();\n if (strides.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n strides = new Array(spatialRank).fill(1);\n }\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, dilations, attributes.autoPad, attributes.group, pads, strides, isChannelsLast,\n outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputPadding, outputShape, dilations, strides});\n return newAttributes;\n };\n\nexport const parseConvTransposeAttributes = (attributes: Record): ConvTransposeAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad =\n ['NOTSET', 'VALID', 'SAME_UPPER',\n 'SAME_LOWER'][typeof attributes.autoPad == 'undefined' ? 0 : attributes.autoPad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernelShape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.wIsConst as () => boolean)();\n const outputPadding = attributes.outputPadding as [number, number, number, number];\n const outputShape = attributes.outputShape as [number, number];\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n outputPadding,\n outputShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#ConvTranspose\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n const dilationsSet = attributes.dilations.reduce((a, b) => a + b, 0) > 0;\n // wrong dilations dimension\n if (dilationsSet && attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n const stridesSet = attributes.strides.reduce((a, b) => a + b, 0) > 0;\n // Wrong strides dimension\n if (stridesSet && attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n const padsSet = attributes.pads.reduce((a, b) => a + b, 0) > 0;\n if (padsSet && attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank && attributes.outputPadding.length !== 0) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n const kernelShapeSet = attributes.kernelShape.reduce((a, b) => a + b, 0) > 0;\n if (kernelShapeSet && attributes.kernelShape.length !== 0 &&\n attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n};\n\n// for transposing weight tensor from [C, M/group, KH, KW] to [KH, KW, M/group, C]\nconst weightTransposePerm = [2, 3, 1, 0];\n\nconst convTranspose2d =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = adjustedAttributes.outputShape;\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // Switch to naive method when outChannels and inputChannels are very small. It's because that in this case it's\n // not suitable for matmul version since matmul uses tile size 32x32 resulting the underlying execution unit\n // utilization rate is very low.\n if (adjustedAttributes.group !== 1 || (outChannels === 1 && inputChannels === 1)) {\n context.compute(createConvTranspose2DProgramInfo(inputs, adjustedAttributes));\n return;\n }\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposePerm),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convTransposeInputs = [inputs[0], transposedWeight];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n if (!isChannelsLast && inputs[2].dims.length === 1) {\n convTransposeInputs.push(inputs[2].reshape([inputs[2].dims[0], 1, 1]));\n } else {\n convTransposeInputs.push(inputs[2]);\n }\n }\n\n // STEP.3: compute matmul\n context.compute(\n createConv2DTransposeMatMulProgramInfo(\n convTransposeInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convTransposeInputs});\n };\n\nconst convTranspose1d = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n let kernelShape = attributes.kernelShape;\n if (kernelShape.length === 0 || kernelShape[0] === 0) {\n kernelShape = [context.inputs[1].dims[2]];\n }\n let dilations = attributes.dilations;\n if (dilations.length === 0 || dilations[0] === 0) {\n dilations = [1];\n }\n let strides = attributes.strides;\n if (strides.length === 0 || strides[0] === 0) {\n strides = [1];\n }\n let pads = attributes.pads;\n if (pads.length === 0) {\n pads = [0, 0];\n }\n pads = [0, pads[0], 0, pads[1]];\n strides = [1].concat(strides);\n dilations = [1].concat(dilations);\n kernelShape = [1].concat(kernelShape);\n const adjustedAttributes =\n getAdjustedConvTransposeAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createConvTranspose2DProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] :\n [outputShape[0], outputShape[1], outputShape[3]]));\n};\n\nexport const convTranspose = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n convTranspose1d(context, attributes);\n } else {\n convTranspose2d(context, context.inputs, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper} from './common';\n\n\nexport interface CumSumAttributes extends AttributeWithCacheKey {\n readonly exclusive: boolean;\n readonly reverse: boolean;\n}\nconst createCumsumProgramInfo =\n (inputType: number, inputShape: readonly number[], axisInput: TensorView, attributes: CumSumAttributes):\n ProgramInfo => {\n const outputSize = ShapeUtil.size(inputShape); // outputShape is same as inputShape.\n const rank = inputShape.length; // input/output rank\n const input = inputVariable('input', inputType, rank);\n const output = outputVariable('output', inputType, rank);\n const axisValue = axisInput.dataType === DataType.int32 ? axisInput.getInt32Array()[0] :\n Number(axisInput.getBigInt64Array()[0]);\n const axis = ShapeUtil.normalizeAxis(axisValue, rank);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const index = ` i32(${input.indicesGet('inputIndices', 'uniforms.axis')}) `;\n const max = getElementAt('uniforms.input_shape', 'uniforms.axis', rank);\n const lowerLimit = attributes.reverse ? index + (attributes.exclusive ? ' + 1' : '') : '0';\n const upperLimit = attributes.reverse ? max : index + (attributes.exclusive ? '' : ' + 1');\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var inputIndices = ${output.offsetToIndices('global_idx')};\n var sum = ${output.type.value}(0);\n let first : i32 = ${lowerLimit};\n let last : i32 = ${upperLimit};\n for (var i : i32 = first; i < last; i++) {\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(i)')};\n sum = sum + ${input.getByIndices('inputIndices')};\n }\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'CumSum',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: inputShape, dataType: inputType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: axis},\n ...createTensorShapeVariables(inputShape, inputShape)\n ]\n\n }),\n getShaderSource\n };\n };\n\n\nexport const cumsum = (context: ComputeContext, attributes: CumSumAttributes): void => {\n const inputShape = context.inputs[0].dims;\n const inputType = context.inputs[0].dataType;\n const axis = context.inputs[1];\n context.compute(createCumsumProgramInfo(inputType, inputShape, axis, attributes), {inputs: [0]});\n};\n\nexport const parseCumSumAttributes = (attributes: Record): CumSumAttributes => {\n const exclusive = attributes.exclusive as number === 1;\n const reverse = attributes.reverse as number === 1;\n return createAttributeWithCacheKey({exclusive, reverse});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface DepthToSpaceAttributes extends FormatAttributes, AttributeWithCacheKey {\n readonly blocksize: number;\n readonly mode: string;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('DepthToSpace requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('DepthToSpace requires 4D input.');\n }\n};\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nconst createDepthToSpaceProgramInfo = (inputTensor: TensorView, attributes: DepthToSpaceAttributes): ProgramInfo => {\n let n: number, h: number, w: number, c: number;\n let shape: number[];\n let perm: number[];\n const isChannelLast = attributes.format === 'NHWC';\n const blocksize = attributes.blocksize;\n const isDCRmode = attributes.mode === 'DCR';\n if (isChannelLast) {\n [n, h, w, c] = inputTensor.dims;\n shape = isDCRmode ? [n, h, w, blocksize, blocksize, c / (blocksize ** 2)] :\n [n, h, w, c / (blocksize ** 2), blocksize, blocksize];\n perm = isDCRmode ? [0, 1, 3, 2, 4, 5] : [0, 1, 4, 2, 5, 3];\n } else {\n [n, h, w, c] = [inputTensor.dims[0], inputTensor.dims[2], inputTensor.dims[3], inputTensor.dims[1]];\n shape = isDCRmode ? [n, blocksize, blocksize, c / (blocksize ** 2), h, w] :\n [n, c / (blocksize ** 2), blocksize, blocksize, h, w];\n perm = isDCRmode ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n }\n const reshapedInputTensor = inputTensor.reshape(shape);\n const reshapedInputRank = reshapedInputTensor.dims.length;\n const inputDataType = inputTensor.dataType;\n\n const reshapedInput = inputVariable('a', inputDataType, reshapedInputRank);\n const permedOutput = outputVariable('output', inputDataType, reshapedInputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(reshapedInput, permedOutput)}\n\n ${permFunctionBody(perm, reshapedInputRank, reshapedInput, permedOutput)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${permedOutput.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${permedOutput.setByOffset('global_idx', reshapedInput.getByIndices('aIndices'))}\n }`;\n\n return {\n name: 'DepthToSpace',\n shaderCache: {hint: `${inputTensor.dims};${attributes.blocksize};${attributes.mode}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputShape = isChannelLast ? [n, h * blocksize, w * blocksize, c / (blocksize ** 2)] :\n [n, c / (blocksize ** 2), h * blocksize, w * blocksize];\n const outputSize = ShapeUtil.size(outputShape);\n const shapeBeforePerm = reshapedInputTensor.dims;\n const shapeAfterPerm = ShapeUtil.sortBasedOnPerm(shapeBeforePerm, perm);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(shapeBeforePerm, shapeAfterPerm)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const depthToSpace = (context: ComputeContext, attributes: DepthToSpaceAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createDepthToSpaceProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseDepthToSpaceAttributes = (attributes: Record): DepthToSpaceAttributes =>\n createAttributeWithCacheKey({\n blocksize: attributes.blocksize as number,\n mode: attributes.mode as string,\n format: attributes.format as 'NHWC' | 'NCHW'\n });\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface EinsumAttributes extends AttributeWithCacheKey {\n readonly equation: string;\n}\n// The equation attribute value is a string which consists of left hand side (LHS) and optionally right hand side (RHS)\n// separated by '->'. Ex. \"ij,jk -> ik\" expresses matrix multiplication\n// \"ij->ji\" expresses matrix transpose\n// \"ii->i\" diagonal elements of a square matrix\n// LHS consists of a sequence of terms separated by commas. Each term corresponds to an input variable.\n// Each symbol corresponds to a dimension in the input variable. The symbol can be either a letter, 'a' to 'z' or 'A' to\n// 'Z' or '...' to represent arbitrary dimensions.\n\nconst symbolPattern =\n '[a-zA-Z]|\\\\.\\\\.\\\\.'; // The pattern each symbol in each term in the symbolic equation should match\nconst termPattern = '(' + symbolPattern + ')+'; // The pattern each term in the symbolic equation should match\nconst termPatternOnly = '^' + termPattern + '$'; // The patterns only matchs a term begin to end.\nconst lhsPattern = '(' + termPattern + ',)*' + termPattern; // The pattern the LHS should match\nconst lhsPatternOnly = '^' + lhsPattern + '$'; // The patterns only matchs a LHS begin to end.\n\ninterface SymbolInfo {\n count: number; // Symbol corresponding to a dimmension of an input\n inputIndices: number[]; // Number of input variables the symbol corresponds to\n dimValue: number; // Number of dimensions the symbol corresponds to\n}\n\nclass EinsumTerm {\n constructor(inputIndex = -1) {\n this.symbolToIndices = new Map();\n this.inputIndex = inputIndex;\n }\n\n // Add a symbol to the term\n addSymbol(symbol: string, index: number) {\n let value = this.symbolToIndices.get(symbol);\n if (value === undefined) {\n value = [index];\n } else {\n value.push(index);\n }\n this.symbolToIndices.set(symbol, value);\n }\n\n symbolToIndices: Map; // Map from symbol to dimensions of the input corresponding to the term\n inputIndex: number; // -1 for output and 0, 1, 2, ... for inputs\n}\n\nclass EinsumEquation {\n constructor(inputs: readonly TensorView[], public readonly equation: string) {\n this.hasEllipsis = false;\n this.symbolToInfo = new Map();\n this.lhs = new Array();\n this.outputDims = [];\n // As rhs needs to be updated allow using let instead of const for both lhs and rhs.\n // eslint-disable-next-line prefer-const\n let [lhs, rhs] = equation.includes('->') ? equation.split('->', 2) : [equation, ''];\n if (!lhs.match(RegExp(lhsPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const inputTerms = lhs.split(',');\n inputTerms.forEach((inputTerm, index) => {\n const dims = inputs[index].dims.slice();\n if (!inputTerm.match(RegExp(termPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const einsumTerm = this.processTerm(inputTerm, true, dims, index);\n this.lhs.push(einsumTerm);\n });\n\n // Initialize the RHS if not specified\n if (rhs === '') {\n // Construct RHS from LHS terms/symbols\n rhs += [...this.symbolToInfo.entries()]\n .filter(([sym, info]) => (info.count === 1 || sym === '...'))\n .map(([sym]) => sym)\n .join('');\n } else {\n if (!rhs.match(RegExp(termPattern))) {\n throw new Error('Invalid RHS');\n }\n }\n\n // Compute output dims\n const rhsSymbols = rhs.match(RegExp(symbolPattern, 'g'));\n rhsSymbols?.forEach((symbol) => {\n if (symbol === '...') {\n this.outputDims = this.outputDims.concat(this.ellipsisDims);\n } else {\n const info = this.symbolToInfo.get(symbol);\n if (info === undefined) {\n throw new Error('Invalid RHS symbol');\n }\n this.outputDims.push(info.dimValue);\n }\n });\n this.rhs = this.processTerm(rhs, false, this.outputDims);\n } // End of EinsumEqation constructor\n\n // Add a symbol to the equation\n addSymbol(symbol: string, dimValue: number, inputIndex: number) {\n let info = this.symbolToInfo.get(symbol);\n if (info !== undefined) {\n if (info.dimValue !== dimValue && info.count !== 1) {\n throw new Error('Dimension mismatch');\n } else {\n info.count++;\n info.inputIndices.push(inputIndex);\n }\n } else {\n info = {count: 1, dimValue, inputIndices: [inputIndex]};\n }\n this.symbolToInfo.set(symbol, info);\n }\n\n // Process one input/output term\n processTerm(term: string, isInput: boolean, dims: readonly number[], index = -1): EinsumTerm {\n const rank = dims.length;\n let ellipsis = false;\n let ellipsisDims = [];\n let nextDim = 0;\n // For output empty string is allowed because the output may be reduced to a scalar value\n if (!term.match(RegExp(termPatternOnly)) && (!isInput && term !== '')) {\n throw new Error('Invalid LHS term');\n }\n const indexSymbols = term.match(RegExp(symbolPattern, 'g'));\n const einsumTerm = new EinsumTerm(index);\n // symbol can be either a lettre, 'a' to 'z' or 'A' to 'Z', or '...'\n indexSymbols?.forEach((symbol: string, i: number) => {\n if (symbol === '...') {\n if (ellipsis) {\n throw new Error('Only one ellipsis is allowed per input term');\n }\n ellipsis = true;\n const ellipsisDimLength = rank - indexSymbols.length + 1;\n if (ellipsisDimLength < 0) {\n throw new Error('Ellipsis out of bounds');\n }\n ellipsisDims = dims.slice(nextDim, nextDim + ellipsisDimLength);\n if (this.hasEllipsis) {\n if (this.ellipsisDims.length !== ellipsisDims.length ||\n this.ellipsisDims.toString() !== ellipsisDims.toString()) {\n throw new Error('Ellipsis dimensions mismatch');\n }\n } else if (isInput) {\n this.hasEllipsis = true;\n this.ellipsisDims = ellipsisDims;\n } else {\n throw new Error('Ellipsis must be specified in the LHS');\n }\n // Add '0', '1', '2', '3', '4', etc to represent ellipsis dimensions to avoid special handling\n for (let j = 0; j < ellipsisDims.length; j++) {\n const symbol = String.fromCharCode('0'.charCodeAt(0) + j);\n einsumTerm.addSymbol(symbol, i + j);\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n } else {\n einsumTerm.addSymbol(symbol, i + (this.hasEllipsis ? this.ellipsisDims.length - 1 : 0));\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n });\n return einsumTerm;\n }\n\n symbolToInfo: Map; // All symbols in the equation\n hasEllipsis: boolean; // The equation has ellipsis or not\n ellipsisDims: number[]; // The dimensions of the equation ellipsis corresponds to.\n lhs: EinsumTerm[]; // Terms on the left-hand side of the equation\n rhs: EinsumTerm; // Term on the right-hand side of the equation\n outputDims: number[]; // Output dimensions of the equation\n} // End of class EinsumEquation\n\nconst appendMax = (name: string): string => name + '_max';\n\nconst createEinsumProgramInfo =\n (inputShapes: Array, dataType: number, einsumEquation: EinsumEquation,\n outputShape: readonly number[]): ProgramInfo => {\n const ranks = inputShapes.map((dims) => dims.length);\n const inputVars = ranks.map((rank, index) => inputVariable(`input${index}`, dataType, rank));\n const outputSize = ShapeUtil.size(outputShape);\n const output = outputVariable('output', dataType, outputShape.length);\n const uniformsSymbols =\n [...einsumEquation.symbolToInfo.keys()].filter((symbol) => !einsumEquation.rhs.symbolToIndices.has(symbol));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = [];\n const initProd = 'var prod = 1.0;';\n const initSum = 'var sum = 0.0;';\n const updateSum = 'sum += prod;';\n const reduceOpsSetIndices: string[] = [];\n const reduceOpsLoopHeaders: string[] = [];\n const reduceOpsLoopFooters: string[] = [];\n const reduceOpCompute: string[] = [];\n const isReduceOpsWithoutLoop = einsumEquation.symbolToInfo.size === einsumEquation.rhs.symbolToIndices.size;\n einsumEquation.symbolToInfo.forEach((info, symbol) => {\n if (einsumEquation.rhs.symbolToIndices.has(symbol)) {\n const outputIndex = einsumEquation.rhs.symbolToIndices.get(symbol)?.[0];\n if (outputIndex !== undefined) {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n idxCopy.push(`${\n inputVars[i].indicesSet(\n `input${i}Indices`, index, output.indicesGet('outputIndices', outputIndex))}`);\n });\n }\n });\n }\n } else {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n reduceOpsSetIndices.push(`${inputVars[i].indicesSet(`input${i}Indices`, index, `${symbol}`)}`);\n });\n reduceOpCompute.push(`prod *= ${inputVars[i].getByIndices(`input${i}Indices`)};`);\n }\n });\n reduceOpsLoopHeaders.push(\n `for(var ${symbol}: u32 = 0; ${symbol} < uniforms.${appendMax(symbol)}; ${symbol}++) {`);\n reduceOpsLoopFooters.push('}');\n }\n });\n const reduceOps = isReduceOpsWithoutLoop ?\n [\n ...idxCopy,\n `let sum = ${inputVars.map((inputVar, i) => inputVar.getByIndices(`input${i}Indices`)).join(' * ')};`\n ] :\n [\n ...idxCopy,\n initSum,\n ...reduceOpsLoopHeaders,\n ...reduceOpsSetIndices,\n initProd,\n ...reduceOpCompute,\n updateSum,\n ...reduceOpsLoopFooters,\n ];\n return `\n ${\n shaderHelper\n .registerUniforms(uniformsSymbols.map((symbol) => ({name: `${appendMax(symbol)}`, type: 'u32'})))\n .registerUniform('outputSize', 'u32')\n .declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${output.offsetToIndices('global_idx')};\n ${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\\n')}\n ${reduceOps.join('\\n')};\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'Einsum',\n shaderCache: {hint: einsumEquation.equation, inputDependencies: inputShapes.map(() => 'rank')},\n getRunData: () => {\n // The symbols from uniformSymbols array are guaranteed to exist in einsumEquations.symbolToInfo map. The\n // filter is added to make sure that dimValue is never 0.\n const programUniformsInit: ProgramUniform[] =\n uniformsSymbols.filter((symbol) => einsumEquation.symbolToInfo.has(symbol))\n .map(\n (symbol) =>\n ({type: DataType.uint32, data: einsumEquation.symbolToInfo.get(symbol)?.dimValue || 0}));\n programUniformsInit.push({type: DataType.uint32, data: outputSize});\n const programUniforms: ProgramUniform[] =\n inputShapes.map((dims, _) => [...createTensorShapeVariables(dims)])\n .reduce((acc, inputProgramUniforms) => acc.concat(inputProgramUniforms), programUniformsInit);\n programUniforms.push(...createTensorShapeVariables(outputShape));\n return ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n });\n },\n getShaderSource,\n };\n };\n\nexport const einsum = (context: ComputeContext, attributes: EinsumAttributes): void => {\n const einsumEquation = new EinsumEquation(context.inputs, attributes.equation);\n const outputShape = einsumEquation.outputDims;\n const inputShapes = context.inputs.map((input, _) => input.dims);\n context.compute(createEinsumProgramInfo(inputShapes, context.inputs[0].dataType, einsumEquation, outputShape));\n};\n\nexport const parseEinsumAttributes = (attributes: Record): EinsumAttributes => {\n const equation = (attributes.equation as string).replace(/\\s+/g, '');\n return createAttributeWithCacheKey({equation});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Expand requires 2 input.');\n }\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n\n let shapeIndex = shape.length < inputShape.length ? 0 : shape.length - inputShape.length;\n let inputShapeIndex = inputShape.length < shape.length ? 0 : inputShape.length - shape.length;\n for (; shapeIndex < shape.length && inputShapeIndex < inputShape.length; ++shapeIndex, ++inputShapeIndex) {\n if (shape[shapeIndex] !== inputShape[inputShapeIndex] && shape[shapeIndex] !== 1 &&\n inputShape[inputShapeIndex] !== 1) {\n throw new Error('Expand requires shape to be broadcastable to input');\n }\n }\n};\n\nconst getAdjustedShape = (shape1: readonly number[], shape2: readonly number[]): number[] => {\n const diff = shape1.length - shape2.length;\n const shape: number[] = [];\n for (let i = 0; i < diff; ++i) {\n shape.push(shape1[i]);\n }\n for (let i = 0; i < shape2.length; ++i) {\n shape.push(shape2[i] === 1 ? shape1[i + diff] : shape2[i]);\n }\n return shape;\n};\n\nconst calculateOutputShape = (inputShape: readonly number[], shape: readonly number[]): number[] =>\n (inputShape.length > shape.length) ? getAdjustedShape(inputShape, shape) : getAdjustedShape(shape, inputShape);\n\n\nconst createExpandProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n const outputShape: number[] = calculateOutputShape(inputShape, shape);\n const dataType = inputs[0].dataType;\n const components = dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', dataType, inputShape.length, components);\n const output = outputVariable('output', dataType, outputShape.length, components);\n let assignment: string;\n if (dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n let offset${x} = ${input.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${input.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n ${output.setByOffset('global_idx', 'data')}\n }`;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let inputOffset = ${input.broadcastedIndicesToOffset('outputIndices', output)};\n ${output.setByOffset('global_idx', input.getByOffset('inputOffset'))}\n }`;\n }\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}`;\n };\n\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)];\n return {\n name: 'Expand',\n shaderCache: {hint: `${outputShape.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const expand = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createExpandProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType, UniformsArrayType, WORKGROUP_SIZE} from './common';\nimport * as unary from './unary-op';\n\n// GELU is defined as Y=0.5*X*(1+tanh(0.797885*X+0.035677*X*X*X)), where X may pre-add a bias.\n\nconst createFastGeluProgramInfo = (inputTensors: readonly TensorView[]): ProgramInfo => {\n const dataType = inputTensors[0].dataType;\n const outputSize = ShapeUtil.size(inputTensors[0].dims);\n const biasLength = ShapeUtil.size(inputTensors[1].dims);\n // can only use vec4 when bias length is multiple of 4\n const useVec4 = biasLength % 4 === 0;\n const getShaderSource = (shaderHelper: ShaderHelper): string => {\n const x = inputVariable('x', dataType, [1], 4);\n const bias = inputVariable('bias', dataType, [1], 4);\n const y = outputVariable('y', dataType, [1], 4);\n\n const uniforms: UniformsArrayType = [{name: 'output_vec_size', type: 'u32'}, {name: 'bias_size', type: 'u32'}];\n\n const singleElementBias = (i: 0|1|2|3) => `\n let bias${i}_offset: u32 = (global_idx * 4 + ${i}) % uniforms.bias_size;\n let bias${i} = ${bias.getByOffset(`bias${i}_offset / 4`)}[bias${i}_offset % 4];`;\n const biasGetExpression = useVec4 ?\n `\n let bias = ${bias.getByOffset('global_idx % (uniforms.bias_size / 4)')};` :\n `${singleElementBias(0)}${singleElementBias(1)}${singleElementBias(2)}${singleElementBias(3)}\n let bias = ${x.type.value}(bias0, bias1, bias2, bias3);`;\n\n return `${shaderHelper.registerUniforms(uniforms).declareVariables(x, bias, y)}\n\n ${unary.fastGeluImpl(tensorTypeToWsglValueType(dataType))}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_vec_size')}\n\n let x = ${x.getByOffset('global_idx')};\n ${biasGetExpression}\n let x_in = x + bias;\n ${y.setByOffset('global_idx', unary.fastGeluExpression('x_in'))}\n }`;\n };\n\n return {\n name: 'FastGeluWithBias',\n shaderCache: {hint: `${useVec4}`, inputDependencies: ['type', 'type']},\n getShaderSource,\n getRunData: (inputs) => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n programUniforms:\n [{type: DataType.uint32, data: Math.ceil(outputSize / 4)}, {type: DataType.uint32, data: biasLength}],\n dispatchGroup: {x: Math.ceil(outputSize / WORKGROUP_SIZE / 4)}\n })\n };\n};\n\nexport const fastGelu = (context: ComputeContext): void => {\n if (context.inputs.length < 2 || ShapeUtil.size(context.inputs[1].dims) === 0) {\n unary.fastGelu(context);\n } else {\n context.compute(createFastGeluProgramInfo(context.inputs));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n};\n\nconst createGatherProgramInfo = (inputs: readonly TensorView[], attributes: GatherAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const indicesShape = inputs[1].dims;\n\n const inputRank = inputShape.length;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n\n const outputShape = inputShape.slice(0);\n outputShape.splice(axis, 1, ...indicesShape);\n\n const axisDimLimit = inputShape[axis];\n const components = inputs[0].dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}, ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const data = inputVariable('data', inputs[0].dataType, inputs[0].dims.length, components);\n const indices = inputVariable('inputIndices', inputs[1].dataType, inputs[1].dims.length);\n const output = outputVariable('output', inputs[0].dataType, outputShape.length, components);\n\n const calcDataIndices = (x: number|string): string => {\n const indicesRank = indicesShape.length;\n let calcStr = `var indicesIndices${x} = ${indices.type.indices}(0);`;\n for (let i = 0; i < indicesRank; i++) {\n calcStr += `${indicesRank > 1 ? `indicesIndices${x}[${i}]` : `indicesIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[uniforms.axis + ${i}]` : `outputIndices${x}`};`;\n }\n calcStr += `\n var idx${x} = ${indices.getByIndices(`indicesIndices${x}`)};\n if (idx${x} < 0) {\n idx${x} = idx${x} + uniforms.axisDimLimit;\n }\n var dataIndices${x} : ${data.type.indices};\n `;\n for (let i = 0, j = 0; i < inputRank; i++) {\n if (i === axis) {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = u32(idx${x});`;\n j += indicesRank;\n } else {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[${j}]` : `outputIndices${x}`};`;\n j++;\n }\n }\n return calcStr;\n };\n let assignment: string;\n if (inputs[0].dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n ${calcDataIndices(x)};\n let offset${x} = ${data.indicesToOffset(`dataIndices${x}`)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${data.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var value = vec4(0);\n ${singleAssignment('value', 0, 'u32')}\n ${singleAssignment('value', 1, 'u32')}\n ${singleAssignment('value', 2, 'u32')}\n ${singleAssignment('value', 3, 'u32')}\n ${output.setByOffset('global_idx', 'value')}\n `;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n ${calcDataIndices('')};\n let value = ${data.getByIndices('dataIndices')};\n ${output.setByOffset('global_idx', 'value')};\n `;\n }\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(data, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n ${assignment}\n }`;\n };\n return {\n name: 'Gather',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank', 'rank']},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGatherAttributes = (attributes: Record): GatherAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gather = (context: ComputeContext, attributes: GatherAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherElementsAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('GatherElements requires 2 inputs.');\n }\n\n if (inputs[0].dims.length < 1) {\n throw new Error('GatherElements requires that the data input be rank >= 1.');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`);\n }\n};\n\nconst createGatherElementsProgramInfo =\n (inputs: readonly TensorView[], attributes: GatherElementsAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputOutputDataType = inputs[0].dataType;\n const inputRank = inputShape.length;\n\n const indicesShape = inputs[1].dims;\n const indicesDataType = inputs[1].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n const axisDimLimit = inputShape[axis];\n\n const outputShape = indicesShape.slice(0);\n const outputSize = ShapeUtil.size(outputShape);\n\n const input = inputVariable('input', inputOutputDataType, inputRank);\n const indices = inputVariable('indicesInput', indicesDataType, indicesShape.length);\n const output = outputVariable('output', inputOutputDataType, outputShape.length);\n\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}\n ];\n programUniforms.push(...createTensorShapeVariables(inputShape, indicesShape, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n // int64 indices would be treated as little endian i32 with assumption they fall in i32 limits\n // That assumption is safe as it's not possible to allocate >2gb buffer for input tensor\n // Input data will be treated as u32 or two u32 for 8-byte tensors\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n\n var idx = ${indices.getByOffset('global_idx')};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${input.type.indices}(outputIndices);\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(idx)')};\n let value = ${input.getByIndices('inputIndices')};\n\n ${output.setByOffset('global_idx', 'value')};\n }`;\n\n return {\n name: 'GatherElements',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const parseGatherElementsAttributes = (attributes: Record): GatherElementsAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gatherElements = (context: ComputeContext, attributes: GatherElementsAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherElementsProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {GemmUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (inputs.length < 2 || inputs.length > 3) {\n throw new Error('Invaid input number.');\n }\n\n // 'C' can be of dimensionality 0, 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length > 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].dataType !== inputs[1].dataType) ||\n (inputs.length === 3 && inputs[0].dataType !== inputs[2].dataType)) {\n throw new Error('Input types are mismatched');\n }\n};\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n}\n\nconst createGemmProgramInfo = (inputs: readonly TensorView[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N, K] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}, {type: DataType.float, data: attributes.alpha},\n {type: DataType.float, data: attributes.beta}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (inputs.length === 3) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n let line = '';\n if (attributes.transA && attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[n * uniforms.K + k];';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[k * uniforms.N + n];';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[n * uniforms.K + k];';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[k * uniforms.N + n];';\n }\n\n const calculateAlpha = attributes.alpha === 1 ? '' : 'value *= uniforms.alpha;';\n const a = inputVariable('a', inputs[0].dataType, inputs[0].dims);\n const b = inputVariable('b', inputs[1].dataType, inputs[1].dims);\n const dataType = a.type.value;\n let c: IndicesHelper|null = null;\n const variables = [a, b];\n if (inputs.length === 3) {\n c = inputVariable('c', inputs[2].dataType, inputs[2].dims.length);\n variables.push(c);\n }\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n variables.push(output);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'K', type: 'u32'},\n {name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${dataType}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${line}\n }\n\n ${calculateAlpha}\n ${(() => {\n if (c != null) {\n return `let cOffset = ${c.broadcastedIndicesToOffset('vec2(m, n)', output)}; value += ${\n dataType}(uniforms.beta) * ${c.getByOffset('cOffset')};`;\n }\n return '';\n })()}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Gemm',\n shaderCache: {hint: `${attributes.cacheKey}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGemmAttributes = (attributes: Record): GemmAttributes => {\n const transA = attributes.transA as boolean;\n const transB = attributes.transB as boolean;\n const alpha = attributes.alpha as number;\n const beta = attributes.beta as number;\n return {transA, transB, alpha, beta, cacheKey: `${attributes.transA};${attributes.transB};${attributes.alpha === 1}`};\n};\n\nexport const gemm = (context: ComputeContext, attributes: GemmAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createGemmProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, GpuDataType, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nconst getInput = (inputs: readonly TensorView[], i: number) =>\n (inputs.length > i) && (inputs[i].dims.length > 0) && (ShapeUtil.size(inputs[i].dims)) > 0 ? inputs[i] : undefined;\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = getInput(inputs, 1);\n const value = getInput(inputs, 2);\n const bias = getInput(inputs, 3);\n const keyPaddingMask = getInput(inputs, 4);\n const relativePositionBias = getInput(inputs, 5);\n const pastKey = getInput(inputs, 6);\n const pastValue = getInput(inputs, 7);\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // key_padding_mask (K/V) : (B) or (2*B + 1) or (B, L) or None\n // relative_position_bias : (B, 1, S, L)\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // bias (Q/K/V) : (D + D + D_v)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // bias (Q/K/V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n // bias (Q/K/V) : None or (D + D + D_v)\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n if (pastKey && pastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastKey.dims[0] !== batchSize || pastKey.dims[1] !== attributes.numHeads || pastKey.dims[3] !== headSize) {\n throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastValue.dims[0] !== batchSize || pastValue.dims[1] !== attributes.numHeads ||\n pastValue.dims[3] !== headSize) {\n throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastKey.dims[2] !== pastValue.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n } else if (pastKey || pastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (key.dims[2] !== query.dims[2]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n if (bias) {\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimension');\n }\n\n if (value) {\n if (query.dims.length === 5 && query.dims[3] === 2) {\n throw new Error('bias is not allowed for packed kv.');\n }\n }\n }\n\n let maskType: AttentionMaskType = AttentionMaskType.none;\n if (keyPaddingMask) {\n maskType = AttentionMaskType.maskUnknown;\n const maskDims = keyPaddingMask.dims;\n if (maskDims.length === 1) {\n if (maskDims[0] === batchSize) {\n maskType = AttentionMaskType.mask1dKeySeqLen;\n } else if (maskDims[0] === 3 * batchSize + 2) {\n maskType = AttentionMaskType.mask1DKeySeqLenStart;\n }\n } else if (maskDims.length === 2 && maskDims[0] === batchSize && maskDims[1] === kvSequenceLength) {\n maskType = AttentionMaskType.mask2dKeyPadding;\n }\n if (maskType === AttentionMaskType.maskUnknown) {\n throw new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)');\n }\n throw new Error('Mask not supported');\n }\n\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n if (keyPaddingMask) {\n throw new Error('Key padding mask is not supported');\n }\n\n if (relativePositionBias) {\n if (relativePositionBias.dims.length !== 4) {\n throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');\n }\n if ((relativePositionBias.dims[0] !== batchSize && relativePositionBias.dims[0] !== 1) ||\n relativePositionBias.dims[1] !== attributes.numHeads || relativePositionBias.dims[2] !== sequenceLength ||\n relativePositionBias.dims[3] !== totalSequenceLength) {\n throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)');\n }\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n };\n};\n\nexport const parseMultiHeadAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst addBiasTranspose =\n (context: ComputeContext, qkv: TensorView, bias: TensorView, batchSize: number, sequenceLength: number,\n hiddenSize: number, biasOffset: number) => {\n const outputShape = [batchSize, sequenceLength, hiddenSize];\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: biasOffset},\n {type: DataType.uint32, data: hiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('qkv_with_bias', qkv.dataType, outputShape);\n const qkvInput = inputVariable('qkv', qkv.dataType, outputShape);\n const biasInput = inputVariable('bias', bias.dataType, outputShape);\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'bias_offset', type: 'u32'}, {name: 'hidden_size', type: 'u32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(qkvInput, biasInput, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`;\n };\n\n return context.compute(\n {\n name: 'MultiHeadAttentionAddBias',\n shaderCache: {inputDependencies: ['type', 'type']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: qkv.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [qkv, bias], outputs: [-1]})[0];\n };\n\nexport const maybeTransposeToBNSHAndAddBias =\n (context: ComputeContext, batchSize: number, numHeads: number, sequenceLength: number, headSize: number,\n input: TensorView, bias?: TensorView, biasOffset?: number) => {\n // const newDims = [];\n\n let reshapedInput = input;\n if (!bias) {\n if (input.dims.length === 3) {\n reshapedInput = input.reshape([batchSize, sequenceLength, numHeads, headSize]);\n }\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n } else {\n if (sequenceLength === 1) {\n throw new Error('AddBiasReshape is not implemented. Please export your model with packed QKV or KV');\n } else {\n reshapedInput =\n addBiasTranspose(context, input, bias, batchSize, sequenceLength, numHeads * headSize, biasOffset!);\n reshapedInput = reshapedInput.reshape([batchSize, sequenceLength, numHeads, headSize]);\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n }\n }\n };\n\nexport const multiHeadAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n const query = context.inputs[0];\n const key = getInput(context.inputs, 1);\n const value = getInput(context.inputs, 2);\n const bias = getInput(context.inputs, 3);\n const keyPaddingMask = getInput(context.inputs, 4);\n const relativePositionBias = getInput(context.inputs, 5);\n const pastKey = getInput(context.inputs, 6);\n const pastValue = getInput(context.inputs, 7);\n if (query.dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (key?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n // applyAttention expects BNSH inputs\n const kvBNSH = key && value && key.dims.length === 4 && value.dims.length === 4;\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, query, bias, 0);\n\n if (kvBNSH) {\n return applyAttention(\n context, Q, key, value, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params,\n attributes);\n }\n if (!key || !value) {\n throw new Error('key and value must be provided');\n }\n const K = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.headSize, key, bias,\n params.hiddenSize);\n\n const V = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.vHeadSize, value, bias,\n 2 * params.hiddenSize);\n\n applyAttention(\n context, Q, K, V, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst getRepeats = (repeatsTensorView: TensorView): readonly number[] =>\n Array.from(repeatsTensorView.getBigInt64Array(), Number);\n\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 inputs.');\n }\n\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16 &&\n inputs[0].dataType !== DataType.int32 && inputs[0].dataType !== DataType.uint32) {\n throw new Error('Tile only support float, float16, int32, and uint32 data types');\n }\n\n if (inputs[1].dataType !== DataType.int64) {\n throw new Error('Tile `repeats` input should be of int64 data type');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('Tile `repeats` input should be 1-D');\n }\n\n const repeats: readonly number[] = getRepeats(inputs[1]);\n\n if (repeats.length !== inputs[0].dims.length) {\n throw new Error('Tile `repeats` input should have same number of elements as rank of input data tensor');\n }\n};\n\nconst getOutputShape = (inputShape: readonly number[], repeats: readonly number[]): readonly number[] => {\n const outputShape: number[] = [];\n\n for (let i = 0; i < inputShape.length; ++i) {\n outputShape.push(inputShape[i] * repeats[i]);\n }\n\n return outputShape;\n};\n\nexport const createTileProgramInfo = (inputs: readonly TensorView[], shape?: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const repeats: readonly number[] = shape == null ? getRepeats(inputs[1]) : shape;\n const outputShape = getOutputShape(inputShape, repeats);\n const outputSize = ShapeUtil.size(outputShape);\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, inputShape.length);\n const output = outputVariable('output', dataType, outputShape.length);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const inputShape = ${input.indices(...inputShape)};\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n for (var i = 0; i < ${inputShape.length}; i++) {\n let input_dim_i = ${input.indicesGet('uniforms.input_shape', 'i')};\n let input_dim_value = ${output.indicesGet('output_indices', 'i')} % input_dim_i;\n\n ${input.indicesSet('input_indices', 'i', 'input_dim_value')}\n }\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n\n return {\n name: 'Tile',\n shaderCache: {hint: `${repeats}`, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n }),\n getShaderSource,\n };\n};\n\nexport const tile = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createTileProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {maybeTransposeToBNSHAndAddBias} from './multihead-attention';\nimport {createTileProgramInfo} from './tile';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nexport const validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = inputs[1];\n const value = inputs[2];\n const pastKey = inputs[3];\n const pastValue = inputs[4];\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n const hasPastKey = pastKey && pastKey.dims.length !== 0;\n const hasPastValue = pastValue && pastValue.dims.length !== 0;\n // TODO : this should be from attributes.\n const isPastkvBSNH = true;\n if (hasPastKey && hasPastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n if (isPastkvBSNH) {\n // For BSNH\n pastSequenceLength = pastKey.dims[1];\n maxSequenceLength = pastKey.dims[1];\n } else {\n // For BNSH\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n }\n } else if (hasPastKey || hasPastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (query.dims[2] % key.dims[2] !== 0) {\n throw new Error('Dimension 2 of \"query\" should be a multiple of \"key\"');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n const maskType: AttentionMaskType = AttentionMaskType.none;\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.kvNumHeads!),\n numHeads: attributes.numHeads,\n kvNumHeads: attributes.kvNumHeads,\n nReps: attributes.numHeads / attributes.kvNumHeads!,\n pastPresentShareBuffer: false,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n isPastkvBSNH,\n };\n};\n\nconst createConcatProgramInfo =\n (a: TensorView, b: TensorView|undefined, dataType: DataType, params: AttentionParameters): ProgramInfo => {\n const outputShape = [params.batchSize, params.totalSequenceLength, params.kvNumHeads!, params.headSize];\n const component = 4;\n const outputSize = ShapeUtil.size(outputShape) / component;\n const presentSequenceLength = params.totalSequenceLength;\n const output = outputVariable('present_kv', dataType, outputShape.length, component);\n const inputA = inputVariable('new_kv', a.dataType, a.dims.length, component);\n const inputB = b ? inputVariable('past_kv', b.dataType, b.dims.length, component) : undefined;\n\n const H = Math.ceil(params.headSize / component);\n const dispatch = {x: presentSequenceLength, y: a.dims[0], z: 1};\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = b ? ['rank', 'rank'] : ['rank'];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: params.pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength},\n {type: DataType.uint32, data: params.totalSequenceLength}\n ];\n\n const inputs = [inputA];\n if (inputB) {\n programUniforms.push(\n ...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(b!.dims),\n ...createTensorShapeVariables(outputShape));\n inputs.push(inputB);\n } else {\n programUniforms.push(...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(outputShape));\n }\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'past_seqlen', type: 'u32'}, {name: 'new_seqlen', type: 'u32'},\n {name: 'present_seqlen', type: 'u32'}\n ];\n\n const pastStr = ` let past_batch_stride = uniforms.past_seqlen * num_heads * H;\n var past_head_stride = uniforms.past_seqlen * H;\n if (is_bsnh) {\n past_head_stride = H;\n }\n let in_offset = b * past_batch_stride + s * row_stride + n * past_head_stride + h;\n present_kv[out_offset] = past_kv[in_offset];`;\n const newStr = ` let new_batch_stride = uniforms.new_seqlen * num_heads * H;\n let new_row_stride = num_heads * H;\n let new_head_stride = H;\n let in_offset = b * new_batch_stride + (s - past_seqlen) * new_row_stride + n * new_head_stride + h;\n present_kv[out_offset] = new_kv[in_offset];`;\n const concatStr = b ? `if (s < past_seqlen) {\n ${pastStr}\n } else if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }` :\n `if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }`;\n\n // TODO: handle H * params.kvNumHeads greater than maxComputeInvocationsPerWorkgroup limit.\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputs, output)}\n ${shaderHelper.mainStart([\n H, params.kvNumHeads!, 1\n ])}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var indices = ${output.offsetToIndices('global_idx')};\n let h = local_id.x;\n let n = local_id.y;\n let s = workgroup_id.x;\n let b = workgroup_id.y;\n let num_heads = ${params.kvNumHeads!}u;\n let H = ${H}u;\n\n let present_seqlen = uniforms.present_seqlen;\n let present_batch_stride = present_seqlen * num_heads * H;\n var row_stride = H;\n let is_bsnh = ${params.isPastkvBSNH};\n\n if (is_bsnh) {\n row_stride = num_heads * H;\n }\n var present_head_stride = present_seqlen * H;\n if (is_bsnh) {\n present_head_stride = H;\n }\n\n let past_seqlen = uniforms.past_seqlen;\n\n let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h;\n ${concatStr}\n }`;\n\n return {\n name: 'ConcatPastNew',\n shaderCache: {hint: `${params.kvNumHeads!}${H}${!!b}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: dispatch,\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const parseGroupQueryAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst maybeExpandAndTransposeToBNSH =\n (context: ComputeContext, input: TensorView, pastKV: TensorView|undefined, params: AttentionParameters,\n outputIndex: number) => {\n let reshapedInput = input;\n const numHeads = params.kvNumHeads!;\n const nReps = params.nReps!;\n if (input.dims.length === 3 && params.kvSequenceLength !== 0) {\n reshapedInput = input.reshape([params.batchSize, params.kvSequenceLength, numHeads, params.headSize]);\n }\n\n if (pastKV) {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, pastKV, reshapedInput.dataType, params),\n {inputs: [reshapedInput, pastKV], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n } else {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, undefined, reshapedInput.dataType, params),\n {inputs: [reshapedInput], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n }\n if (nReps !== 1) {\n reshapedInput = context.compute(\n createTileProgramInfo([reshapedInput], [1, 1, 1, nReps]), {inputs: [reshapedInput], outputs: [-1]})[0];\n reshapedInput =\n reshapedInput.reshape([params.batchSize, params.totalSequenceLength, numHeads * nReps, params.headSize]);\n }\n\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n };\n\nexport const groupQueryAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (context.inputs[1]?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, context.inputs[0], undefined,\n 0);\n const pastKey = context.inputs[3] && context.inputs[3].dims.length !== 0 ? context.inputs[3] : undefined;\n const pastValue = context.inputs[4] && context.inputs[4].dims.length !== 0 ? context.inputs[4] : undefined;\n const K = maybeExpandAndTransposeToBNSH(context, context.inputs[1], pastKey, params, 1);\n const V = maybeExpandAndTransposeToBNSH(context, context.inputs[2], pastValue, params, 2);\n applyAttention(context, Q, K, V, undefined, undefined, undefined, undefined, undefined, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface InstanceNormAttributes {\n epsilon: number;\n format: 'NHWC'|'NCHW';\n}\n\nconst createInstanceNormProgramInfo =\n (inputs: readonly TensorView[], attributes: InstanceNormAttributes): ProgramInfo => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const axis = 2;\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n const components = getMaxComponents(normSize);\n const normPackedSize = normSize / components;\n const inputShape = [xShape[0], xShape[1], normPackedSize];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'type', 'type'];\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: normSize}, {type: DataType.uint32, data: normPackedSize}];\n programUniforms.push(...createTensorShapeVariables(inputShape, inputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputShape.length, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const output = outputVariable('output', inputs[0].dataType, inputShape.length, components);\n const variables = [x, scale, bias, output];\n const dataType = x.type.value;\n const f32Type = components === 1 ? 'f32' : `vec${components}`;\n const workgroupSize = 64;\n\n const uniforms: UniformsArrayType = [{name: 'normSize', type: 'u32'}, {name: 'normPackedSize', type: 'u32'}];\n return `\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${f32Type}, ${workgroupSize}>;\n const workgroupSize = ${workgroupSize}u;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart(workgroupSize)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${f32Type}(${x.get('batch', 'channel', 'h')});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${sumVector('workgroupShared[0]', components)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${f32Type}(${x.get('batch', 'channel', 'h')}) - ${f32Type}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${sumVector('workgroupShared[0]', components)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${attributes.epsilon}));\n let channelScale = invStdDev * f32(${scale.getByOffset('channel')});\n let channelShift = f32(${bias.getByOffset('channel')}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get('batch', 'channel', 'h')} * ${dataType}(${f32Type}(channelScale)) + ${dataType}(${\n f32Type}(channelShift));\n ${output.set('batch', 'channel', 'h', 'value')};\n }\n }`;\n };\n return {\n ...{name: 'InstanceNormalization'},\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${attributes.epsilon};${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: normCount},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nconst computeMean =\n (context: ComputeContext, input: TensorView, scale: TensorView, bias: TensorView, n: number, h: number, c: number,\n epsilon: number) => {\n const components = getMaxComponents(c);\n const WG = 64;\n // we will store channel scale and channel shift in [2, components] matrix\n // or in vec2 when components == 1\n const outputType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const sumCastType = components === 1 ? 'f32' : `vec${components}f`;\n const setOutputValue = (var1: string, var2: string) => `${outputType}(${var1}, ${var2})`;\n const unitsOfWork = n * c / components;\n const wgSize = Math.ceil(h / WG);\n\n const meanInputDependencies: ProgramInputTensorInfoDependency[] = ['type'];\n const meanProgramUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: wgSize}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(h * c / components)}\n ];\n\n const getMeanShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = inputVariable('input', input.dataType, input.dims, components);\n return `\n ${shaderHelper.declareVariables(inputHelper)}\n @group(0) @binding(1) var output : array<${outputType}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart(WG)}\n let currentImageNumber = global_idx / ${WG} / uniforms.C;\n let currentChannelNumber = (global_idx / ${WG}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${sumCastType}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${setOutputValue('sum', 'squaredSum')};\n }`;\n };\n\n const meanValues = context.compute(\n {\n name: 'InstanceNormComputeMean',\n shaderCache: {hint: `${components}`, inputDependencies: meanInputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, WG, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: n * c / components},\n programUniforms: meanProgramUniforms\n }),\n getShaderSource: getMeanShaderSource,\n },\n {inputs: [input], outputs: [-1]})[0];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: unitsOfWork}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(WG * c / components)}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const scaleHelper = inputVariable('scale', scale.dataType, scale.dims, components);\n const biasHelper = inputVariable('bias', bias.dataType, bias.dims, components);\n return `\n @group(0) @binding(0) var input : array<${outputType}>;\n @group(0) @binding(1) var scale : array<${scaleHelper.type.storage}>;\n @group(0) @binding(2) var bias : array<${biasHelper.type.storage}>;\n @group(0) @binding(3) var output : array<${outputType}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.units_of_work')}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < min(${WG}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${WG}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${epsilon}));\n let channelScale = invStdDev * ${sumCastType}(scale[currentChannelNumber]);\n let channelShift = ${sumCastType}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${setOutputValue('channelScale', 'channelShift')};\n }`;\n };\n return context.compute(\n {\n name: 'InstanceNormComputeChannelScaleShift',\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${components};${epsilon}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: Math.ceil(unitsOfWork / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [meanValues, scale, bias], outputs: [-1]})[0];\n };\n\nconst createInstanceNormNHWCProgramInfo =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: InstanceNormAttributes) => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const N = xShape[0];\n const C = xShape[xShape.length - 1];\n const H = ShapeUtil.sizeFromDimension(xShape, 1) / C;\n const components = getMaxComponents(C);\n const outputSize = ShapeUtil.size(outputShape) / components;\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: H}, {type: DataType.uint32, data: Math.floor(C / components)}];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n // first compute mean\n const channelScaleShift = computeMean(context, inputs[0], inputs[1], inputs[2], N, H, C, attributes.epsilon);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const scaleType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const scaleCastType = components === 1 ? dataType : `vec${components}<${dataType}>`;\n\n const inputHelper = inputVariable('input', inputs[0].dataType, inputs[0].dims, components);\n const outputHelper = outputVariable('output', inputs[0].dataType, outputShape, components);\n\n return `\n @group(0) @binding(0) var input : array<${inputHelper.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${scaleType}>;\n @group(0) @binding(2) var output : array<${outputHelper.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${scaleCastType}(scale[0]), ${scaleCastType}(scale[1]));\n }`;\n };\n context.compute(\n {\n name: 'InstanceNormalizationNHWC',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [inputs[0], channelScaleShift]});\n };\n\nexport const instanceNorm = (context: ComputeContext, attributes: InstanceNormAttributes): void => {\n if (attributes.format === 'NHWC') {\n createInstanceNormNHWCProgramInfo(context, context.inputs, attributes);\n } else {\n context.compute(createInstanceNormProgramInfo(context.inputs, attributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType,} from './common';\n\ninterface LayerNormAttributes {\n simplified: boolean;\n axis: number;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 2) {\n throw new Error('layerNorm requires at least 2 inputs.');\n }\n};\n\nconst createLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: LayerNormAttributes, outputCount: number): ProgramInfo => {\n const simplified = attributes.simplified;\n\n const xShape = inputs[0].dims;\n const scale = inputs[1];\n const bias = !simplified && inputs[2];\n\n const outputShape = xShape;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, xShape.length);\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n\n const scaleSize = ShapeUtil.size(scale.dims);\n const biasSize = bias ? ShapeUtil.size(bias.dims) : 0;\n if (scaleSize !== normSize || (bias && biasSize !== normSize)) {\n throw new Error(`Size of X.shape()[axis:] == ${normSize}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${scaleSize} and bias size of ${biasSize}`);\n }\n\n const meanInvStdDevDim: number[] = [];\n for (let i = 0; i < xShape.length; ++i) {\n if (i < axis) {\n meanInvStdDevDim.push(xShape[i]);\n } else {\n meanInvStdDevDim.push(1);\n }\n }\n const components = getMaxComponents(normSize);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: normCount}, {type: DataType.float, data: normSize},\n {type: DataType.uint32, data: Math.floor(normSize / components)},\n {type: DataType.float, data: attributes.epsilon}\n ];\n if (bias) {\n inputDependencies.push('type');\n }\n const hasMeanDataOutput = outputCount > 1;\n const hasInvStdOutput = outputCount > 2;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('scale', scale.dataType, scale.dims, components),\n ];\n if (bias) {\n variables.push(inputVariable('bias', bias.dataType, bias.dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanDataOutput) {\n variables.push(outputVariable('mean_data_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'norm_count', type: 'u32'}, {name: 'norm_size', type: 'f32'},\n {name: 'norm_size_vectorized', type: 'u32'}, {name: 'epsilon', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.norm_count')}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${fillVector('f32', components)};\n var mean_square_vector = ${fillVector('f32', components)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${castToF32(dataType, components, 'x[h + offset]')};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${sumVector('mean_vector', components)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${sumVector('mean_square_vector', components)} / uniforms.norm_size ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${castToF32(dataType, components, 'x[j + offset]')};\n let f32scale = ${castToF32(dataType, components, 'scale[j]')};\n output[j + offset] = ${variables[0].type.value}((f32input ${simplified ? '' : '- mean'}) * inv_std_dev * f32scale\n ${bias ? `+ ${castToF32(dataType, components, 'bias[j]')}` : ''}\n );\n }\n\n ${hasMeanDataOutput ? 'mean_data_output[global_idx] = mean' : ''};\n ${hasInvStdOutput ? 'inv_std_output[global_idx] = inv_std_dev' : ''};\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (hasMeanDataOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (hasInvStdOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n\n return {\n name: 'LayerNormalization',\n shaderCache: {hint: `${components};${outputCount};${simplified}`, inputDependencies},\n getRunData: () =>\n ({outputs, dispatchGroup: {x: Math.ceil(normCount / 64 /* workgroup size */)}, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const layerNorm = (context: ComputeContext, attributes: LayerNormAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createLayerNormProgramInfo(context.inputs, attributes, context.outputCount));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType, getTensorElementSize} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\n// TODO support quantization bits not equal to 4\nexport interface MatMulNBitsAttributes extends AttributeWithCacheKey {\n k: number;\n n: number;\n accuracyLevel: number;\n bits: number;\n blockSize: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes): void => {\n if (inputs.length < 3 || inputs.length > 4) {\n throw new Error('MatMulNBits requires 3 or 4 inputs');\n }\n const a = inputs[0];\n const aRank = a.dims.length;\n if (a.dims[aRank - 1] !== attributes.k) {\n throw new Error('The last dim of input shape does not match the k value');\n }\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const b = inputs[1];\n if (!ShapeUtil.areEqual(b.dims, [attributes.n, nBlocksPerCol, blobSize])) {\n throw new Error('The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize');\n }\n const scales = inputs[2];\n const scalesShape = scales.dims;\n if (ShapeUtil.size(scalesShape) !== attributes.n * nBlocksPerCol) {\n throw new Error('scales input size error.');\n }\n if (inputs.length === 4) {\n const zeroPoints = inputs[3];\n const zeroPointsShape = zeroPoints.dims;\n const expectedZeroPointsSize =\n attributes.bits > 4 ? (attributes.n * nBlocksPerCol) : attributes.n * Math.floor((nBlocksPerCol + 1) / 2);\n if (ShapeUtil.size(zeroPointsShape) !== expectedZeroPointsSize) {\n throw new Error('zeroPoints input size error.');\n }\n }\n};\n\nexport const createMatMulNBitsProgramInfo =\n (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes,\n maxComputeWorkgroupSizes: [number, number, number], maxComputeWorkgroupStorageSize: number): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const aRank = inputShape.length;\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const dimAOuter = inputShape[aRank - 2];\n const dimInner = attributes.k;\n const dimBOuter = attributes.n;\n const batchDims = inputShape.slice(0, aRank - 2);\n const batchSize = ShapeUtil.size(batchDims);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const blobSizeInWords = blobSize / 4;\n const dataType = inputs[0].dataType;\n const outputNumber = getMaxComponents(dimAOuter);\n const aComponents = getMaxComponents(attributes.k);\n const bComponents = getMaxComponents(blobSizeInWords);\n const elementSize = getTensorElementSize(dataType)!;\n const workgroupOutputSize = dimAOuter * nBlocksPerCol * elementSize;\n const maxNumberOfComponents = Math.floor(maxComputeWorkgroupStorageSize / workgroupOutputSize);\n const useBlockwiseMatMulNBits = nBlocksPerCol <= maxComputeWorkgroupSizes[0] && maxNumberOfComponents > 0;\n const components = (!useBlockwiseMatMulNBits || maxNumberOfComponents >= 4) ? getMaxComponents(dimBOuter) :\n ((maxNumberOfComponents >= 2) && getMaxComponents(dimBOuter) >= 2) ? 2 :\n 1;\n const outputShape = batchDims.concat([dimAOuter, dimBOuter]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n\n const programUniforms: ProgramUniform[] = useBlockwiseMatMulNBits ?\n [] :\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.blockSize}];\n const inputShapeTemp = [batchSize, dimAOuter, dimInner / aComponents];\n const bShape = ShapeUtil.convertShape(inputs[1].dims).slice();\n bShape.splice(-1, 1, blobSizeInWords / bComponents);\n programUniforms.push(...createTensorShapeVariables(inputShapeTemp));\n programUniforms.push(...createTensorShapeVariables(bShape));\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n if (inputs.length === 4) {\n programUniforms.push(...createTensorShapeVariables(ShapeUtil.convertShape(inputs[3].dims)));\n }\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputRank = inputShapeTemp.length;\n const a = inputVariable('a', inputs[0].dataType, inputRank, aComponents);\n const b = inputVariable('b', DataType.uint32, bShape.length, bComponents);\n const scales = inputVariable('scales', inputs[2].dataType, inputs[2].dims.length);\n const inputVariables = [a, b, scales];\n const zeroPoints =\n inputs.length === 4 ? inputVariable('zero_points', DataType.uint32, inputs[3].dims.length) : undefined;\n if (zeroPoints) {\n inputVariables.push(zeroPoints);\n }\n const outputRank = outputShapeTemp.length;\n const output = outputVariable('output', inputs[0].dataType, outputRank, components);\n const uniforms: UniformsArrayType = [{name: 'output_size', type: 'u32'}, {name: 'block_size', type: 'u32'}];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const qDqDataType = (() => {\n switch (aComponents) {\n case 1:\n return `array<${dataType}, 8>`;\n case 2:\n return `mat4x2<${dataType}>`;\n case 4:\n return `mat2x4<${dataType}>`;\n default:\n throw new Error(`${aComponents}-component is not supported.`);\n }\n })();\n\n const processOneBlock = `\n for (var word: u32 = 0; word < ${blobSizeInWords}; word += ${bComponents}) {\n ${b.indicesSet('b_indices', '2', 'word')};\n let b_data = ${b.getByIndices('b_indices')};\n for (var i: u32 = 0; i < ${bComponents}; i++) {\n let b_value: u32 = ${bComponents === 1 ? 'b_data' : 'b_data[word + i]'};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${qDqDataType}(${\n Array.from({length: 4}, (_, i) => `${dataType}(b_value_lower[${i}]), ${dataType}(b_value_upper[${i}])`)\n .join(', ')});\n let b_dequantized_values = ${(() => {\n if (aComponents === 1) {\n return `${qDqDataType}(${\n Array.from({length: 8}, (_, i) => `(b_quantized_values[${i}] - zero_point) * scale`).join(', ')});`;\n } else {\n return `(b_quantized_values - ${qDqDataType}(${Array(8).fill('zero_point').join(',')})) * scale;`;\n }\n })()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${useBlockwiseMatMulNBits ? dimAOuter : outputNumber}u; m++) {\n ${a.indicesSet('a_indices', inputRank - 2, useBlockwiseMatMulNBits ? 'm' : `row * ${outputNumber} + m`)};\n ${a.indicesSet('a_indices', inputRank - 1, 'word_offset')};\n var input_offset = ${a.indicesToOffset('a_indices')};\n var a_data: ${qDqDataType};\n for (var j: u32 = 0; j < ${8 / aComponents}; j++) {\n a_data[j] = ${a.getByOffset('input_offset')};\n input_offset++;\n }\n ${useBlockwiseMatMulNBits ? 'workgroup_shared[workgroup_shared_offset + m]' : 'output_values[m]'}${\n components > 1 ? '[c]' : ''} += ${\n Array\n .from(\n {length: 8 / aComponents},\n (_, i) => `${\n aComponents === 1 ? `a_data[${i}] * b_dequantized_values[${i}]` :\n `dot(a_data[${i}], b_dequantized_values[${i}])`}`)\n .join(' + ')};\n }\n word_offset += ${8 / aComponents};\n }\n }`;\n const updateZeroPointIndex = zeroPoints ? `\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${zeroPoints.getByOffset('zero_point_index')};\n }` :\n '';\n\n return useBlockwiseMatMulNBits ? `\n var workgroup_shared: array<${output.type.value}, ${dimAOuter * nBlocksPerCol}>;\n ${shaderHelper.declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart([\n nBlocksPerCol, 1, 1\n ])}\n var a_indices: ${a.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${a.indicesSet('a_indices', '0', 'batch')};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${components}; c++) {\n let col_times_components_plus_c = col * ${components} + c;\n ${\n zeroPoints ? `\n var zero_point_bytes_per_col: u32 = (${nBlocksPerCol} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_word_index')} >> zero_point_bits_offset;` :\n ''}\n var b_indices: ${b.type.indices};\n ${b.indicesSet('b_indices', '0', 'col_times_components_plus_c')};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${nBlocksPerCol} + block;\n let scale = ${scales.getByOffset('scales_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? '(zero_point_word) & 0xFu' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block * ${attributes.blockSize / aComponents};\n var workgroup_shared_offset: u32 = block * ${dimAOuter};\n ${processOneBlock}\n }\n workgroupBarrier();\n var output_indices: ${output.type.indices};\n var elements_per_thread: u32 = ${Math.ceil(dimAOuter / nBlocksPerCol)};\n ${output.indicesSet('output_indices', '0', 'batch')};\n ${output.indicesSet('output_indices', outputRank - 1, 'col')};\n ${output.indicesSet('output_indices', outputRank - 2, 'local_id.x * elements_per_thread')};\n var output_offset = ${output.indicesToOffset('output_indices')};\n for (var m: u32 = 0u; m < elements_per_thread; m++) {\n var row = m + local_id.x * elements_per_thread;\n if (row < ${dimAOuter}) {\n var output_value: ${output.type.value} = ${output.type.value}(0);\n var workgroup_shared_offset: u32 = row;\n for (var b: u32 = 0u; b < ${nBlocksPerCol}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${dimAOuter};\n }\n ${output.setByOffset('output_offset', 'output_value')};\n output_offset += ${dimBOuter / components};\n }\n }\n }` :\n `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var output_values: array<${output.type.value}, ${outputNumber}>;\n var output_indices = ${output.offsetToIndices('global_idx')};\n var col = ${output.indicesGet('output_indices', outputRank - 1)};\n var row = ${output.indicesGet('output_indices', outputRank - 2)};\n var a_indices: ${a.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${\n zeroPoints ? `\n var zero_point_abs_offset = col * ${components} * ((${nBlocksPerCol} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_index')};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;` :\n ''}\n var scale_index = col * ${nBlocksPerCol * components};\n var b_indices: ${b.type.indices};\n for (var c: u32 = 0; c < ${components}; c++) {\n ${b.indicesSet('b_indices', '0', `col * ${components} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${nBlocksPerCol}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${scales.getByOffset('scale_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? 'extractBits(zero_point_word, zero_point_offset, 4)' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block_offset;\n ${processOneBlock}\n scale_index++;\n ${updateZeroPointIndex}\n block_offset += uniforms.block_size / ${aComponents};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${\n zeroPoints ? `if (zero_point_offset % 8 > 0) {\n ${updateZeroPointIndex}\n }` :\n ''}\n }\n for (var k: u32 = 0u; k < ${outputNumber}u; k++) {\n ${output.indicesSet('output_indices', outputRank - 2, `${outputNumber} * row + k`)};\n ${output.setByIndices('output_indices', 'output_values[k]')}\n }\n }`;\n };\n return {\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n shaderCache: {\n hint: `${attributes.cacheKey};${dimAOuter};${dataType};${inputs.length}`,\n inputDependencies: Array(inputs.length).fill('rank')\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n dispatchGroup: useBlockwiseMatMulNBits ? {x: 1, y: Math.ceil(dimBOuter / components), z: batchSize} :\n {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nexport const matMulNBits = (context: ComputeContext, attributes: MatMulNBitsAttributes): void => {\n validateInputs(context.inputs, attributes);\n const maxComputeWorkgroupSizes: [number, number, number] = context.getMaxComputeWorkgroupSizes();\n const maxComputeWorkgroupStorageSize = context.getMaxComputeWorkgroupStoragesize();\n context.compute(createMatMulNBitsProgramInfo(\n context.inputs, attributes, maxComputeWorkgroupSizes, maxComputeWorkgroupStorageSize));\n};\n\nexport const parseMatMulNBitsAttributes = (attributes: Record): MatMulNBitsAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\ninterface PadAttributes {\n // 0-constant, 1-reflect, 2-edge, 3-wrap\n readonly mode: number;\n readonly value: number;\n readonly pads: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('Too few inputs');\n }\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16) {\n throw new Error('Input type must be float or float16.');\n }\n\n if (inputs.length >= 2) {\n let validPads = inputs[0].dims.length * 2 === inputs[1].dims[0];\n if (inputs.length === 4) {\n validPads = inputs[3].dims[0] * 2 === inputs[1].dims[0];\n }\n if (!validPads) {\n throw new Error('The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].');\n }\n }\n};\n\nconst getPadConstant = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n break;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n value = ${output.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n }\n `;\n};\n\nconst getPadReflect = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadEdge = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadWrap = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k += i32(${getElementAt('uniforms.x_shape', i, inputRank)}]);\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k -= i32(${getElementAt('uniforms.x_shape', i, inputRank)});\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadSnippet = (output: IndicesHelper, inputRank: number, attributes: PadAttributes): string => {\n switch (attributes.mode) {\n case 0:\n return getPadConstant(output, inputRank, attributes.pads.length);\n case 1:\n return getPadReflect(output, inputRank, attributes.pads.length);\n case 2:\n return getPadEdge(output, inputRank, attributes.pads.length);\n case 3:\n return getPadWrap(output, inputRank, attributes.pads.length);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst createPadProgramInfo = (inputs: readonly TensorView[], attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(inputs[0].dims.slice(), attributes.pads);\n const inputDims = inputs[0].dims;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: attributes.pads}];\n if (attributes.mode === 0) {\n programUniforms.push({type: inputs[0].dataType, data: attributes.value});\n }\n\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('x', inputs[0].dataType, inputDims.length);\n const dataType = input.type.value;\n const padSnippet = getPadSnippet(output, inputDims.length, attributes);\n const uniforms: UniformsArrayType =\n [{name: 'output_size', type: 'u32'}, {name: 'pads', type: 'i32', length: attributes.pads.length}];\n if (attributes.mode === 0) {\n uniforms.push({name: 'constant_value', type: dataType as UniformDataElementType});\n }\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(0);\n ${padSnippet}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Pad',\n shaderCache: {hint: `${attributes.mode}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nconst createPadAttributesFromInputs = (inputs: readonly TensorView[], attributes: PadAttributes): PadAttributes => {\n if (inputs.length > 1) {\n const bigInt64Pads = inputs[1].getBigInt64Array();\n const value = (inputs.length >= 3 && inputs[2].data) ? inputs[2].getFloat32Array()[0] : 0.0;\n\n const inputRank = inputs[0].dims.length;\n const updatePads = new Int32Array(2 * inputRank).fill(0);\n if (inputs.length >= 4) {\n const axes = inputs[3].getBigInt64Array();\n for (let i = 0; i < axes.length; i++) {\n updatePads[Number(axes[i])] = Number(bigInt64Pads[i]);\n updatePads[Number(axes[i]) + inputRank] = Number(bigInt64Pads[i + axes.length]);\n }\n } else {\n bigInt64Pads.forEach((v, i) => updatePads[Number(i)] = (Number(v)));\n }\n\n const pads: number[] = [];\n updatePads.forEach(v => pads.push(v));\n\n return {mode: attributes.mode, value, pads};\n } else {\n return attributes;\n }\n};\n\nexport const pad = (context: ComputeContext, attributes: PadAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes = createPadAttributesFromInputs(context.inputs, attributes);\n context.compute(createPadProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\n// TODO: support:\n// - ceil_mode \"test_maxpool_2d_ceil\"\n// - storage_order \"test_maxpool_with_argmax_2d_precomputed_strides\"\n// - [MaxPool] dilations \"test_maxpool_2d_dilations\"\n// - [MaxPool] output[1] \"test_maxpool_with_argmax_2d_precomputed_pads\"\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (env.webgpu.validateInputContent && (!inputs || inputs.length !== 1)) {\n throw new Error('Pool ops requires 1 input.');\n }\n};\n\nconst getAdjustedPoolAttributesAndOutputShape = (\n input: TensorView, attributes: AttributeType, isGlobalOperator: boolean): [AttributeType, number[]] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inputShapeAsChannelFirst = input.dims.slice();\n if (isChannelsLast) {\n inputShapeAsChannelFirst.splice(1, 0, inputShapeAsChannelFirst.pop()!); // Move channel to the second position.\n }\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShapeAsChannelFirst, kernelShape, strides, dilations, pads);\n\n const outputShapeAsChannelFirst = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShapeAsChannelFirst, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n const outputShapeAsChannelLast = outputShapeAsChannelFirst.slice();\n outputShapeAsChannelLast.push(outputShapeAsChannelLast.splice(1, 1)[0]);\n return [newAttributes, isChannelsLast ? outputShapeAsChannelLast : outputShapeAsChannelFirst];\n};\n\nconst getUniformAndPadInfo = (\n outputShape: readonly number[],\n attributes: AttributeType): [ProgramUniform[], UniformsArrayType, boolean, boolean, boolean] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const outputSize = ShapeUtil.size(outputShape);\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: kernelSize}];\n const uniforms: UniformsArrayType = [{name: 'outputSize', type: 'u32'}, {name: 'kernelSize', type: 'u32'}];\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const pwStartEndNotZero = !!(pwStart + pwEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kw},\n {type: DataType.uint32, data: sw},\n {type: DataType.uint32, data: pwStart},\n {type: DataType.uint32, data: pwEnd},\n );\n uniforms.push(\n {name: 'kw', type: 'u32'}, {name: 'sw', type: 'u32'}, {name: 'pwStart', type: 'u32'},\n {name: 'pwEnd', type: 'u32'});\n\n let phStartEndNotZero = false;\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n phStartEndNotZero = !!(phStart + phEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kh}, {type: DataType.uint32, data: sh}, {type: DataType.uint32, data: phStart},\n {type: DataType.uint32, data: phEnd});\n\n uniforms.push(\n {name: 'kh', type: 'u32'}, {name: 'sh', type: 'u32'}, {name: 'phStart', type: 'u32'},\n {name: 'phEnd', type: 'u32'});\n }\n return [programUniforms, uniforms, true, pwStartEndNotZero, phStartEndNotZero];\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n programUniforms.push(\n {type: DataType.uint32, data: kernelStrides}, {type: DataType.uint32, data: attributes.pads},\n {type: DataType.uint32, data: attributes.strides});\n uniforms.push(\n {name: 'kernelStrides', type: 'u32', length: kernelStrides.length},\n {name: 'pads', type: 'u32', length: attributes.pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length});\n\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n return [programUniforms, uniforms, !!hasPads, false, false];\n }\n};\n\nconst generatePoolingCode = (\n shaderHelper: ShaderHelper, x: IndicesHelper, rank: number, outputShapeRank: number, attributes: AttributeType,\n op1: string, op2: string, start: number, uniforms: UniformsArrayType, hasPads: boolean, pwStartEndNotZero: boolean,\n phStartEndNotZero: boolean): string => {\n const isChannelsLast = attributes.format === 'NHWC';\n const dataType = x.type.value;\n const output = outputVariable('output', x.type.tensor, outputShapeRank);\n\n if (attributes.kernelShape.length <= 2) {\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n const dimIdxW = rank - (isChannelsLast ? 2 : 1);\n if (pwStartEndNotZero) {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${dimIdxW}] < 0 || xIndices[${dimIdxW}]\n >= uniforms.x_shape[${dimIdxW}]) {\n pad++;\n continue;\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const dimIdxH = rank - (isChannelsLast ? 3 : 2);\n if (phStartEndNotZero) {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${dimIdxH}] < 0 || xIndices[${dimIdxH}] >= uniforms.x_shape[${dimIdxH}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `;\n } else {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(${start});\n var pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const stridesRank = attributes.kernelShape.length;\n const padsRank = attributes.pads.length;\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n padCode = `\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n `;\n }\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var offsets: array;\n\n var value = ${dataType}(${start});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${stridesRank - 1}u; j++) {\n offsets[j] = offset / ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n offset -= offsets[j] * ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n }\n offsets[${stridesRank - 1}] = offset;\n\n isPad = false;\n for (var j = ${rank - stridesRank}u; j < ${rank}u; j++) {\n xIndices[j] = indices[j] * ${\n getElementAt('uniforms.strides', `j - ${rank - stridesRank}u`, stridesRank)}\n + offsets[j - ${rank - stridesRank}u] - ${getElementAt('uniforms.pads', 'j - 2u', padsRank)};\n ${padCode}\n }\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n }\n};\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface PoolCommonAttributes extends FormatAttributes {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nconst createShaderKeyFromAttributes = (attributes: PoolCommonAttributes): string =>\n (`${attributes.format};${attributes.ceilMode};${attributes.autoPad};${attributes.kernelShape.length}`);\n\nconst createAveragePoolShaderKeyFromAttributes = (attributes: AveragePoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.countIncludePad}`);\n\nconst createMaxPoolShaderKeyFromAttributes = (attributes: MaxPoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.storageOrder};${attributes.dilations}`);\n\nconst parsePoolCommonAttributes = (attributes: Record): PoolCommonAttributes => ({\n format: attributes.format as FormatAttributes['format'],\n autoPad: ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number],\n ceilMode: attributes.ceil_mode as number,\n kernelShape: attributes.kernel_shape as [number, number],\n strides: attributes.strides as [number, number],\n pads: attributes.pads as [number, number, number, number]\n});\n\nexport interface AveragePoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly countIncludePad: boolean;\n}\n\nconst createAveragePoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: AveragePoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const x = inputVariable('x', input.dataType, input.dims.length);\n const dataType = x.type.value;\n\n const op1 = 'value += x_val;';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= ${dataType}(uniforms.kernelSize);`;\n } else {\n op2 += `value /= ${dataType}(i32(uniforms.kernelSize) - pad);`;\n }\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2, 0.0, uniforms,\n hasPads, pwStartEndNotZero, phStartEndNotZero),\n };\n };\n\nexport const parseAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const countIncludePad = (attributes.count_include_pad as number) === 0 ? false : true;\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode'\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n const averagePoolAttributes = {countIncludePad, ...attr, cacheKey: ''};\n return {...averagePoolAttributes, cacheKey: createAveragePoolShaderKeyFromAttributes(averagePoolAttributes)};\n};\n\nexport const averagePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('AveragePool', context.inputs[0], false, attributes));\n};\n\nconst globalPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: []\n};\n\nexport const parseGlobalAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalAveragePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('GlobalAveragePool', context.inputs[0], true, attributes));\n};\n\nexport interface MaxPoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nconst createMaxPoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: MaxPoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const op1 = `\n value = max(x_val, value);\n `;\n const op2 = '';\n const x = inputVariable('x', input.dataType, input.dims.length);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2,\n (input.dataType === DataType.float16) ? -65504 : -1e5, uniforms, hasPads, pwStartEndNotZero,\n phStartEndNotZero),\n };\n };\n\nexport const maxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('MaxPool', context.inputs[0], false, attributes));\n};\n\nexport const parseMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const storageOrder = attributes.storage_order as number;\n const dilations = attributes.dilations as [number, number];\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n const maxPoolAttributes = {storageOrder, dilations, ...attr, cacheKey: ''};\n return {...maxPoolAttributes, cacheKey: createMaxPoolShaderKeyFromAttributes(maxPoolAttributes)};\n};\n\nexport const parseGlobalMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalMaxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('GlobalMaxPool', context.inputs[0], true, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\nconst validateInputsContent = (start: number, limit: number, delta: number): void => {\n const sameStartLimit = start === limit;\n const increasingRangeNegativeStep = start < limit && delta < 0;\n const decreasingRangePositiveStep = start > limit && delta > 0;\n\n if (sameStartLimit || increasingRangeNegativeStep || decreasingRangePositiveStep) {\n throw new Error('Range these inputs\\' contents are invalid.');\n }\n};\n\nconst createRangeProgramInfo = (start: number, limit: number, delta: number, dataType: DataType): ProgramInfo => {\n const numElements = Math.abs(Math.ceil((limit - start) / delta));\n const outputShape: number[] = [numElements];\n const outputSize = numElements;\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: dataType, data: start}, {type: dataType, data: delta},\n ...createTensorShapeVariables(outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', dataType, outputShape.length);\n const wgslType = output.type.value;\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'start', type: wgslType as UniformDataElementType},\n {name: 'delta', type: wgslType as UniformDataElementType}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n output[global_idx] = uniforms.start + ${wgslType}(global_idx) * uniforms.delta;\n }`;\n };\n\n return {\n name: 'Range',\n shaderCache: {hint: `${dataType}`},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const range = (context: ComputeContext): void => {\n let start = 0;\n let limit = 0;\n let delta = 0;\n if (context.inputs[0].dataType === DataType.int32) {\n start = context.inputs[0].getInt32Array()[0];\n limit = context.inputs[1].getInt32Array()[0];\n delta = context.inputs[2].getInt32Array()[0];\n } else if (context.inputs[0].dataType === DataType.float) {\n start = context.inputs[0].getFloat32Array()[0];\n limit = context.inputs[1].getFloat32Array()[0];\n delta = context.inputs[2].getFloat32Array()[0];\n }\n if (env.webgpu.validateInputContent) {\n validateInputsContent(start, limit, delta);\n }\n\n context.compute(createRangeProgramInfo(start, limit, delta, context.inputs[0].dataType), {inputs: []});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype CoordinateTransformMode = 'half_pixel'|'asymmetric'|'pytorch_half_pixel'|'tf_half_pixel_for_nn'|'align_corners'|\n 'tf_crop_and_resize'|'half_pixel_symmetric';\n\ntype KeepAspectRatioPolicy = 'stretch'|'not_smaller'|'not_larger';\n\ntype Mode = 'nearest'|'linear'|'cubic';\n\ntype NearestMode = 'round_prefer_floor'|'round_prefer_ceil'|'floor'|'ceil'|'simple';\n\nexport interface ResizeAttributes extends AttributeWithCacheKey {\n antialias: number;\n axes: number[];\n coordinateTransformMode: CoordinateTransformMode;\n cubicCoeffA: number;\n excludeOutside: boolean;\n extrapolationValue: number;\n keepAspectRatioPolicy: KeepAspectRatioPolicy;\n mode: Mode;\n nearestMode: NearestMode;\n}\n\nconst validateScales = (scales: number[], attributes: ResizeAttributes): void => {\n scales.every((value) => value > 0 || (() => {\n throw new Error('Resize requires scales input values to be positive');\n }));\n // Check scales dims based on mode: LINEAR, CUBIC\n if (scales.length > 0) {\n if (attributes.mode === 'linear') {\n if (!(scales.length === 2 || scales.length === 3 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1) ||\n (scales.length === 5 && scales[0] === 1 && scales[1] === 1))) {\n throw new Error(\n `For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`);\n }\n } else if (attributes.mode === 'cubic') {\n if (!(scales.length === 2 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1))) {\n throw new Error('Resize requires scales input size to be 2 or 4 for cubic mode');\n }\n }\n }\n};\n\nconst updateScales = (scales: readonly number[], axes: readonly number[], rank: number): number[] => {\n axes.every((value) => value >= 0 && value < rank || (() => {\n throw new Error('Resize requires axes input values to be positive and less than rank');\n }));\n const newScales = new Array(rank).fill(1.0);\n axes.forEach((value, index) => newScales[value] = scales[index]);\n return newScales;\n};\n\nconst validateInputs =\n (inputs: readonly TensorView[], attributes: ResizeAttributes, opsetVersion: number, scales: number[],\n sizes: number[], roi: number[]): void => {\n const [roiInputIndex, scalesInputIndex, sizesInputIndex] =\n (opsetVersion > 10) ? [1, 2, 3] : [-1, (inputs.length > 1) ? 1 : -1, -1];\n const rank = inputs[0].dims.length;\n if (roiInputIndex > 0 && inputs.length > roiInputIndex && inputs[roiInputIndex].dims.length > 0) {\n inputs[roiInputIndex].getFloat32Array().forEach((value) => roi.push(value));\n } else if (attributes.coordinateTransformMode === 'tf_crop_and_resize') {\n throw new Error('Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize');\n }\n\n if (scalesInputIndex > 0 && inputs.length > scalesInputIndex && inputs[scalesInputIndex].dims.length > 0) {\n inputs[scalesInputIndex].getFloat32Array().forEach((value) => scales.push(value));\n if (scales.length !== 0 &&\n (scales.length !== rank && (opsetVersion >= 18 && scales.length !== attributes.axes.length))) {\n throw new Error(\n 'Resize requires scales input size to be same as input rank or axes size for opset 18 and up');\n }\n validateScales(scales, attributes);\n if (attributes.axes.length > 0) {\n updateScales(scales, attributes.axes, rank).forEach((value, index) => scales[index] = value);\n }\n }\n if (sizesInputIndex > 0 && inputs.length > sizesInputIndex) {\n inputs[sizesInputIndex].getBigInt64Array().forEach((value) => sizes.push(Number(value)));\n if (sizes.length !== rank || (opsetVersion >= 18 && sizes.length === attributes.axes.length)) {\n throw new Error('Resize requires sizes input size to be same as input rank or axes size for opset 18 and up');\n }\n }\n\n if (attributes.axes.length > 0) {\n if (scales.length !== attributes.axes.length) {\n throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');\n }\n if (sizes.length !== attributes.axes.length) {\n throw new Error(\n 'Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified');\n }\n }\n if (typeof scales !== 'undefined' && typeof sizes !== 'undefined' && scales.length > 0 && sizes.length > rank) {\n throw new Error('Resize requires only of scales or sizes to be specified');\n }\n };\n\nconst getOriginalCoordinateFromResizedCoordinate =\n (coordinateTransferMode: CoordinateTransformMode, dType: string): string =>\n `fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${dType} { ` +\n (() => {\n switch (coordinateTransferMode) {\n case 'asymmetric':\n return `return ${dType}(xResized) / ${dType}(xScale);`;\n case 'pytorch_half_pixel':\n return `if (lengthResized > 1) {\n return (${dType}(xResized) + 0.5) / ${dType}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;\n case 'tf_half_pixel_for_nn':\n return `return (${dType}(xResized) + 0.5) / ${dType}(xScale);`;\n case 'align_corners':\n return `if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${dType}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${dType}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${dType}(lengthResized - 1);\n return whole + fract;\n }`;\n case 'tf_crop_and_resize':\n return `if (lengthResized > 1) {\n return ${dType}(roiStart) * ${dType}(lengthOriginal - 1) +\n (${dType}(xResized) * ${dType}(roiEnd - roiStart) * ${dType}(lengthOriginal - 1)) /\n ${dType}(lengthResized - 1);\n } else {\n return 0.5 * ${dType}(roiStart + roiEnd) * ${dType}(lengthOriginal - 1);\n }`;\n case 'half_pixel_symmetric':\n return `const outputWidth = ${dType}xScale * ${dType}(lengthResized);\n const adjustment = ${dType}(lengthResized) / outputWidth;\n const center = ${dType}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n case 'half_pixel':\n return `return ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n default:\n throw new Error(`Coordinate transform mode ${coordinateTransferMode} is not supported`);\n }\n })() +\n '}';\n\nconst getNearestPixelFromOriginal = (nearestMode: NearestMode, opsetVersion: number, dType: string): string =>\n `fn getNearestPixelFromOriginal(xOriginal: ${dType}, isDownSample: bool) -> ${dType} {` + (() => {\n switch (nearestMode) {\n case 'round_prefer_ceil':\n return 'if (fract(xOriginal) == 0.5) { \\\n return ceil(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'floor':\n return 'return floor(xOriginal);';\n case 'ceil':\n return 'return ceil(xOriginal);';\n case 'round_prefer_floor':\n return 'if (fract(xOriginal) == 0.5) { \\\n return floor(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'simple':\n default:\n if (opsetVersion < 11) {\n return 'if (isDownSample) \\\n { \\\n return ceil(xOriginal); \\\n } else { \\\n return xOriginal; \\\n }';\n }\n throw new Error(`Nearest mode ${nearestMode} is not supported`);\n }\n })() +\n '}';\n\nconst updateRoI = (roi: readonly number[], axes: readonly number[], rank: number): number[] => {\n const roiTmp = new Array(rank).fill(0).concat(new Array(rank).fill(1));\n const roiLocal = roi.length === 0 ? roiTmp : roi.slice();\n if (axes.length > 0) {\n axes.forEach((v, i) => {\n roiTmp[v] = roiLocal[i];\n roiTmp[i + rank] = roiLocal[axes.length + i];\n });\n return roiTmp;\n }\n return roiLocal;\n};\n\nconst initOutputShape =\n (inputShape: readonly number[], scales: readonly number[], sizes: readonly number[], axes: readonly number[]):\n number[] => {\n let outputShape: number[] = [];\n if (sizes.length > 0) {\n if (axes.length > 0) {\n inputShape.forEach((v) => outputShape.push(v));\n if (Math.max(...axes) > inputShape.length) {\n throw new Error('axes is out of bound');\n }\n axes.forEach((v, i) => outputShape[v] = sizes[i]);\n } else {\n sizes.forEach((v) => outputShape.push(v));\n }\n } else {\n if (scales.length === 0) {\n throw new Error('Resize requires either scales or sizes.');\n } else {\n outputShape = inputShape.map((value, index) => Math.round(value * scales[index]));\n }\n }\n return outputShape;\n };\n\nconst adjustOutputShape = (inputShape: readonly number[], scales: number[], attributes: ResizeAttributes) => {\n const scaleInPolicy = (() => {\n switch (attributes.keepAspectRatioPolicy) {\n case 'not_larger':\n return attributes.axes.length > 0 ? Math.min(...attributes.axes.map(i => scales[i]), Number.MAX_VALUE) :\n Math.min(...scales, Number.MAX_VALUE);\n case 'not_smaller':\n return attributes.axes.length > 0 ? Math.max(...attributes.axes.map(i => scales[i]), Number.MIN_VALUE) :\n Math.max(...scales, Number.MIN_VALUE);\n default:\n throw new Error(`Keep aspect ratio policy ${attributes.keepAspectRatioPolicy} is not supported`);\n }\n })();\n scales.fill(1.0, 0, scales.length);\n const adjustedOutputShape = inputShape.slice();\n if (attributes.axes.length > 0) {\n attributes.axes.forEach((v) => scales[v] = scaleInPolicy);\n attributes.axes.forEach((v) => adjustedOutputShape[v] = Math.round(inputShape[v] * scales[v]));\n } else {\n scales.fill(scaleInPolicy, 0, scales.length);\n adjustedOutputShape.forEach((v, i) => adjustedOutputShape[i] = Math.round(v * scales[i]));\n }\n return adjustedOutputShape;\n};\n\nconst calculateOriginalIndicesFromOutputIndices =\n (output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[], scalesLength: number,\n roiLength: number): string => `\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> array<${\n output.type.value}, ${outputShape.length}> {\n var original_indices: array<${output.type.value}, ${outputShape.length}>;\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n if (scale == 1.0) {\n original_indices[i] = ${output.type.value}(output_index);\n } else {\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`;\n\nconst calculateInputIndicesFromOutputIndices =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scalesLength: number, roiLength: number, useExtrapolation: boolean): string => `\n fn calculateInputIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index: u32;\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${useExtrapolation} || (original_idx >= 0 && original_idx < ${output.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${output.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${input.indicesSet('input_indices', 'i', ' input_index')}\n }\n return input_indices;\n }`;\nconst checkInputIndices = (input: IndicesHelper, inputShape: readonly number[]): string => `\n fn checkInputIndices(input_indices: ${input.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${inputShape.length}; i++) {\n var input_index = ${input.indicesGet('input_indices', 'i')};\n if (input_index < 0 || input_index >= ${getElementAt('uniforms.input_shape', 'i', inputShape.length)}) {\n return false;\n }\n }\n return true;\n }`;\n\nconst setChannelAndBatchIndices =\n (input: IndicesHelper, channelIdx: number, batchIdx: number, spacialDims: number): string =>\n input.rank > spacialDims ? `\n ${input.indicesSet('input_indices', channelIdx, 'channel')};\n ${input.indicesSet('input_indices', batchIdx, 'batch')};\n` :\n '';\n\nconst bilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 2 ? [-1, 0, 1, -1] : (isNchw ? [0, 2, 3, 1] : [0, 1, 2, 3]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(row, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(col, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 2)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn bilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${dType} = originalIndices[${heightIdx}];\n var col:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ?\n `if (row < 0 || row > (${inputShape[heightIdx]} - 1) || col < 0 || col > (${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n row = max(0, min(row, ${inputShape[heightIdx]} - 1));\n col = max(0, min(col, ${inputShape[widthIdx]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${batchIdx}])` : '0'};\n var x11: ${dType} = getInputValue(batch, channel, row1, col1);\n var x12: ${dType} = getInputValue(batch, channel, row1, col2);\n var x21: ${dType} = getInputValue(batch, channel, row2, col1);\n var x22: ${dType} = getInputValue(batch, channel, row2, col2);\n var dx1: ${dType} = abs(row - ${dType}(row1));\n var dx2: ${dType} = abs(${dType}(row2) - row);\n var dy1: ${dType} = abs(col - ${dType}(col1));\n var dy2: ${dType} = abs(${dType}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`;\n };\n\nconst bicubicInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scales: readonly number[], roi: readonly number[], cubicCoeffA: number, useExtrapolation: boolean,\n extrapolationValue: number, excludeOutside: boolean): string => {\n const is2D = inputShape.length === 2;\n const isNchw = true;\n const [heightIdx, widthIdx] = is2D ? [0, 1] : isNchw ? [2, 3] : [1, 2];\n const dType = input.type.value;\n const createCubicInterpolationFunction = (idx: number): string => {\n const direction = idx === heightIdx ? 'row' : 'col';\n return `\n fn ${direction}CubicInterpolation(input_indices: ${input.type.indices}, output_indices: ${\n output.type.indices}) -> ${dType} {\n var output_index = ${output.indicesGet('output_indices', idx)};\n var originalIdx: ${dType} = getOriginalCoordinateFromResizedCoordinate(output_index, ${scales[idx]},\n ${outputShape[idx]}, ${inputShape[idx]}, ${roi[idx]}, ${roi[idx]} + ${inputShape.length});\n var fractOriginalIdx: ${dType} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${useExtrapolation} && (originalIdx < 0 || originalIdx > (${inputShape[idx]} - 1))) {\n return ${extrapolationValue};\n }\n var data: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${direction}: ${dType} = originalIdx + ${dType}(i);\n if (${direction} < 0 || ${direction} >= ${inputShape[idx]}) {\n ${(() => {\n if (excludeOutside) {\n return `coefs[i + 1] = 0.0;\n continue;`;\n } else if (useExtrapolation) {\n return `return ${extrapolationValue};`;\n } else {\n return `${direction} = max(0, min(${direction}, ${inputShape[idx]} - 1));`;\n }\n })()};\n }\n var input_indices_copy: ${input.type.indices} = input_indices;\n ${input.indicesSet('input_indices_copy', idx, `u32(${direction})`)};\n data[i + 1] = ${\n idx === heightIdx ? input.getByIndices('input_indices_copy') :\n 'rowCubicInterpolation(input_indices_copy, output_indices)'};\n }\n return cubicInterpolation1D(data, coefs);\n }`;\n };\n\n return `\n ${createCubicInterpolationFunction(heightIdx)};\n ${createCubicInterpolationFunction(widthIdx)};\n fn getCubicInterpolationCoefs(s: ${dType}) -> array<${dType}, 4> {\n var absS = abs(s);\n var coeffs: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${dType} = 1.0 - absS;\n var twoMinusAbsS: ${dType} = 2.0 - absS;\n var onePlusAbsS: ${dType} = 1.0 + absS;\n coeffs[0] = ((${cubicCoeffA} * onePlusAbsS - 5 * ${cubicCoeffA}) * onePlusAbsS + 8 * ${\n cubicCoeffA}) * onePlusAbsS - 4 * ${cubicCoeffA};\n coeffs[1] = ((${cubicCoeffA} + 2) * absS - (${cubicCoeffA} + 3)) * absS * absS + 1;\n coeffs[2] = ((${cubicCoeffA} + 2) * oneMinusAbsS - (${cubicCoeffA} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${cubicCoeffA} * twoMinusAbsS - 5 * ${cubicCoeffA}) * twoMinusAbsS + 8 * ${\n cubicCoeffA}) * twoMinusAbsS - 4 * ${cubicCoeffA};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${dType}, 4>, coefs: array<${dType}, 4>) -> ${dType} {\n var coefsSum: ${dType} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var input_indices: ${input.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `;\n };\n\nconst trilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, depthIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 3 ? [-1, 0, 1, 2, -1] : (isNchw ? [0, 2, 3, 4, 1] : [0, 1, 2, 3, 4]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', depthIdx, `max(0, min(depth, ${inputShape[depthIdx]} - 1))`)};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(height, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(width, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 3)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn trilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${dType} = originalIndices[${depthIdx}];\n var height:${dType} = originalIndices[${heightIdx}];\n var width:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ? `if (depth < 0 || depth > (${inputShape[depthIdx]} - 1) || height < 0 || height > (${\n inputShape[heightIdx]} - 1) || width < 0 || (width > ${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n\n depth = max(0, min(depth, ${inputShape[depthIdx]} - 1));\n height = max(0, min(height, ${inputShape[heightIdx]} - 1));\n width = max(0, min(width, ${inputShape[widthIdx]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${batchIdx}])` : '0'};\n\n var x111: ${dType} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${dType} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${dType} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${dType} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${dType} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${dType} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${dType} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${dType} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${dType} = abs(depth - ${dType}(depth1));\n var dx2: ${dType} = abs(${dType}(depth2) - depth);\n var dy1: ${dType} = abs(height - ${dType}(height1));\n var dy2: ${dType} = abs(${dType}(height2) - height);\n var dz1: ${dType} = abs(width - ${dType}(width1));\n var dz2: ${dType} = abs(${dType}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`;\n };\n\nconst createResizeProgramInfo =\n (inputTensor: TensorView, attributes: ResizeAttributes, opsetVersion: number, scalesInput: readonly number[],\n sizes: readonly number[], roiInput: readonly number[]): ProgramInfo => {\n const inputShape = inputTensor.dims;\n const roi = updateRoI(roiInput, attributes.axes, inputShape.length);\n\n let outputShape = initOutputShape(inputShape, scalesInput, sizes, attributes.axes);\n let scales = scalesInput.slice();\n if (scalesInput.length === 0) {\n scales = inputShape.map((value, index) => value === 0 ? 1.0 : outputShape[index] / value);\n if (attributes.keepAspectRatioPolicy !== 'stretch') {\n outputShape = adjustOutputShape(inputShape, scales, attributes);\n }\n }\n const output = outputVariable('output', inputTensor.dataType, outputShape.length);\n const input = inputVariable('input', inputTensor.dataType, inputShape.length);\n const outputSize = ShapeUtil.size(outputShape);\n const noScale = inputShape.length === outputShape.length && inputShape.every((d, i) => d === outputShape[i]);\n const useExtrapolation = attributes.coordinateTransformMode === 'tf_crop_and_resize';\n const extrapolationValue = attributes.extrapolationValue;\n const dataType = input.type.value;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${noScale ? '' : `\n ${getOriginalCoordinateFromResizedCoordinate(attributes.coordinateTransformMode, dataType)};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `\n ${checkInputIndices(input, inputShape)};\n ${getNearestPixelFromOriginal(attributes.nearestMode, opsetVersion, dataType)};\n ${\n calculateInputIndicesFromOutputIndices(\n input, output, inputShape, outputShape, scales.length, roi.length, useExtrapolation)};\n `;\n case 'linear':\n return `\n ${calculateOriginalIndicesFromOutputIndices(output, inputShape, outputShape, scales.length, roi.length)};\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${bilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else if (inputShape.length === 3 || inputShape.length === 5) {\n return `${trilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else {\n throw Error('Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.');\n }\n })()};\n `;\n case 'cubic':\n return `\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${\n bicubicInterpolation(\n input, output, inputShape, outputShape, scales, roi, attributes.cubicCoeffA, useExtrapolation,\n attributes.extrapolationValue, attributes.excludeOutside)}`;\n } else {\n throw Error('Cubic mode only supports input dims 2 and 4 are supported in linear mode.');\n }\n })()};\n `;\n default:\n throw Error('Invalid resize mode');\n }\n })()};\n `}\n ${\n shaderHelper.registerUniform('output_size', 'u32')\n .registerUniform('scales', 'f32', scales.length)\n .registerUniform('roi', 'f32', roi.length)\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n ${noScale ? 'output[global_idx] = input[global_idx];' : `\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${input.getByIndices('input_indices')};\n } else {\n output[global_idx] = ${attributes.extrapolationValue};\n }`;\n case 'linear':\n return `output[global_idx] = ${\n (inputShape.length === 2 || inputShape.length === 4) ? 'bilinearInterpolation' :\n 'trilinearInterpolation'}(output_indices);`;\n case 'cubic':\n return 'output[global_idx] = bicubicInterpolation(output_indices);';\n default:\n throw Error(`Unsupported resize mode: ${attributes.mode}`);\n }\n })()};\n`}\n }`;\n\n return {\n name: 'Resize',\n shaderCache: {\n hint: `${attributes.cacheKey}|${opsetVersion}|${scales.length > 0 ? scales : ''}|${\n sizes.length > 0 ? sizes : ''}|${roi.length > 0 ? roi : ''}|${noScale}|${inputShape}`,\n inputDependencies: ['rank']\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputTensor.dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.float, data: scales},\n {type: DataType.float, data: roi}, ...createTensorShapeVariables(inputShape, outputShape)\n ]\n })\n };\n };\n\nconst getOpsetVersionFromCustomDataBuffer = (context: ComputeContext): number => {\n const customDataBuffer = context.customDataBuffer;\n const customDataBuffer32 = new Uint32Array(customDataBuffer, customDataBuffer.byteOffset, 1);\n const opsetVersion = customDataBuffer32[0];\n return opsetVersion;\n};\n\nexport const resize = (context: ComputeContext, attributes: ResizeAttributes): void => {\n const scales: number[] = [];\n const sizes: number[] = [];\n const roi: number[] = [];\n\n // Note that scales in resize are always f32. roi can be f32 or f16.\n // TODO: Currently this code does not support f16 for roi when passed as optional input.\n\n const opsetVersion = getOpsetVersionFromCustomDataBuffer(context);\n if (attributes.antialias !== 0) {\n throw Error('Only default value (0) for Antialias attribute is supported');\n }\n validateInputs(context.inputs, attributes, opsetVersion, scales, sizes, roi);\n context.compute(\n createResizeProgramInfo(context.inputs[0], attributes, opsetVersion, scales, sizes, roi), {inputs: [0]});\n};\n\nexport const parseResizeAttributes = (attributes: Record): ResizeAttributes => {\n const antialias = attributes.antialias as number;\n const axes = attributes.axes as number[];\n const coordinateTransformMode: CoordinateTransformMode =\n attributes.coordinateTransformMode as CoordinateTransformMode;\n const cubicCoeffA = attributes.cubicCoeffA as number;\n const excludeOutside = attributes.excludeOutside as number !== 0;\n const extrapolationValue = attributes.extrapolationValue as number;\n const keepAspectRatioPolicy: KeepAspectRatioPolicy = attributes.keepAspectRatioPolicy as KeepAspectRatioPolicy;\n const mode: Mode = attributes.mode as Mode;\n // If nearestMode is not specified, use simple mode.\n const nearestMode: NearestMode = (attributes.nearestMode === '' ? 'simple' : attributes.nearestMode) as NearestMode;\n return createAttributeWithCacheKey({\n antialias,\n axes,\n coordinateTransformMode,\n cubicCoeffA,\n excludeOutside,\n extrapolationValue,\n keepAspectRatioPolicy,\n mode,\n nearestMode\n });\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, WORKGROUP_SIZE} from './common';\n\nexport interface RotaryEmbeddingAttributes {\n readonly interleaved: boolean;\n readonly numHeads: number;\n readonly rotaryEmbeddingDim: number;\n readonly scale: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): void => {\n const [input, positionIds, cosCache, sinCache] = inputs;\n const {numHeads, rotaryEmbeddingDim} = attributes;\n\n if (input.dims.length !== 3 && input.dims.length !== 4) {\n throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${input.dims.length}`);\n }\n if (!ShapeUtil.areEqual(positionIds.dims, []) && !ShapeUtil.areEqual(positionIds.dims, [1]) &&\n positionIds.dims.length !== 2) {\n throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${positionIds.dims.length}`);\n }\n if (cosCache.dims.length !== 2) {\n throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${cosCache.dims.length}`);\n }\n if (sinCache.dims.length !== 2) {\n throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${sinCache.dims.length}`);\n }\n if (!ShapeUtil.areEqual(cosCache.dims, sinCache.dims)) {\n throw new Error('Inputs \\'cos_cache\\' and \\'sin_cache\\' are expected to have the same shape');\n }\n\n if (rotaryEmbeddingDim > 0 && numHeads === 0) {\n throw new Error('num_heads must be provided if rotary_embedding_dim is specified');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[input.dims.length - 2];\n const maxSequenceLength = cosCache.dims[0];\n const hiddenSize = ShapeUtil.sizeFromDimension(input.dims, 1) / sequenceLength;\n const headSize = rotaryEmbeddingDim === 0 ? cosCache.dims[1] * 2 : hiddenSize / numHeads;\n if (rotaryEmbeddingDim > headSize) {\n throw new Error('rotary_embedding_dim must be less than or equal to head_size');\n }\n\n if (positionIds.dims.length === 2) {\n if (batchSize !== positionIds.dims[0]) {\n throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${positionIds.dims[0]}`);\n }\n if (sequenceLength !== positionIds.dims[1]) {\n throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${positionIds.dims[1]}`);\n }\n }\n\n if (headSize / 2 !== cosCache.dims[1] && rotaryEmbeddingDim / 2 !== cosCache.dims[1]) {\n throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${\n cosCache.dims[1]}`);\n }\n\n if (sequenceLength > maxSequenceLength) {\n throw new Error('Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported');\n }\n};\n\nconst createRotaryEmbeddingProgramInfo =\n (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): ProgramInfo => {\n const {interleaved, numHeads, rotaryEmbeddingDim, scale} = attributes;\n const batchSize = inputs[0].dims[0];\n const batchStride = ShapeUtil.sizeFromDimension(inputs[0].dims, 1);\n const sequenceLength = inputs[0].dims[inputs[0].dims.length - 2];\n const hiddenSize = batchStride / sequenceLength;\n const halfRotaryEmbeddingDim = inputs[2].dims[1];\n const headSize = rotaryEmbeddingDim === 0 ? halfRotaryEmbeddingDim * 2 : hiddenSize / numHeads;\n\n // Rotary embeddings will be calculated in a pair-wise fashion. In accordance, use the shape\n // [batch size, sequence length, num of heads, num of pairs to rotate + num of dims to copy]\n // to unfold the global index in shader.\n const globalShape =\n new Array(batchSize, sequenceLength, hiddenSize / headSize, headSize - halfRotaryEmbeddingDim);\n const globalStrides = ShapeUtil.computeStrides(globalShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.float, data: scale},\n {type: DataType.uint32, data: globalShape},\n {type: DataType.uint32, data: globalStrides},\n\n // strides for addressing the input/output tensor, in permutated order to align with the unfolded global index,\n // i.e. BSNH\n ...(inputs[0].dims.length === 3 ?\n new Array({type: DataType.uint32, data: [batchStride, hiddenSize, headSize, 1]}) :\n []),\n ...(inputs[0].dims.length === 4 ?\n new Array(\n {type: DataType.uint32, data: [batchStride, headSize, sequenceLength * headSize, 1]}) :\n []),\n\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, inputs[2].dims, inputs[3].dims, inputs[0].dims),\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const positionIds = inputVariable('position_ids', inputs[1].dataType, inputs[1].dims.length);\n const cosCache = inputVariable('cos_cache', inputs[2].dataType, inputs[2].dims.length);\n const sinCache = inputVariable('sin_cache', inputs[3].dataType, inputs[3].dims.length);\n const output = outputVariable('output', inputs[0].dataType, inputs[0].dims.length);\n\n shaderHelper.registerUniforms([\n {name: 'scale', type: 'f32'},\n {name: 'global_shape', type: 'u32', length: globalShape.length},\n {name: 'global_strides', type: 'u32', length: globalStrides.length},\n {name: 'input_output_strides', type: 'u32', length: globalStrides.length},\n ]);\n\n return `\n ${shaderHelper.declareVariables(input, positionIds, cosCache, sinCache, output)}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n let half_rotary_emb_dim = uniforms.${cosCache.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('size')}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${positionIds.broadcastedIndicesToOffset('bsnh.xy', outputVariable('', positionIds.type.tensor, 2))};\n let position_id =\n u32(${positionIds.getByOffset('position_ids_idx')}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${interleaved});\n let j = i + select(half_rotary_emb_dim, 1, ${interleaved});\n let re = ${input.getByOffset('i')} * ${cosCache.get('position_id', 'bsnh[3]')} -\n ${input.getByOffset('j')} * ${sinCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('i', 're')}\n let im = ${input.getByOffset('i')} * ${sinCache.get('position_id', 'bsnh[3]')} +\n ${input.getByOffset('j')} * ${cosCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('j', 'im')}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${output.setByOffset('k', input.getByOffset('k'))}\n }\n }`;\n };\n\n return {\n name: 'RotaryEmbedding',\n shaderCache: {\n hint: createAttributeWithCacheKey({\n interleaved,\n }).cacheKey,\n inputDependencies: ['rank', 'rank', 'rank', 'rank'],\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(globalShape) / WORKGROUP_SIZE)},\n programUniforms,\n }),\n };\n };\n\nexport const rotaryEmbedding = (context: ComputeContext, attributes: RotaryEmbeddingAttributes): void => {\n validateInputs(context.inputs, attributes);\n context.compute(createRotaryEmbeddingProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {castToF32, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface SkipLayerNormAttributes {\n simplified: boolean;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 3) {\n throw new Error('layerNorm requires at least 3 inputs.');\n }\n\n const input: TensorView = inputs[0];\n const skip: TensorView = inputs[1];\n const gamma: TensorView = inputs[2];\n\n if (input.dataType !== skip.dataType || input.dataType !== gamma.dataType) {\n throw new Error('All inputs must have the same data type');\n }\n\n if (input.dims.length !== 3 && input.dims.length !== 2) {\n throw new Error('Input must be 2D or 3D');\n }\n\n if (skip.dims.length !== 3 && skip.dims.length !== 2) {\n throw new Error('Skip must be 2D or 3D');\n }\n\n const hiddenSize = input.dims[input.dims.length - 1];\n const sequenceLength = input.dims[input.dims.length - 2];\n if (skip.dims[skip.dims.length - 1] !== hiddenSize) {\n throw new Error('Skip must have the same hidden size as input');\n }\n if (skip.dims[skip.dims.length - 2] !== sequenceLength) {\n throw new Error('Skip must have the same sequence length as input');\n }\n\n if (gamma.dims.length !== 1) {\n throw new Error('Gamma must be 1D');\n }\n if (gamma.dims[gamma.dims.length - 1] !== hiddenSize) {\n throw new Error('Gamma must have the same hidden size as input');\n }\n if (inputs.length > 3) {\n const beta: TensorView = inputs[3];\n if (beta.dims.length !== 1) {\n throw new Error('Beta must be 1D');\n }\n if (beta.dims[beta.dims.length - 1] !== hiddenSize) {\n throw new Error('Beta must have the same hidden size as input');\n }\n }\n if (inputs.length > 4) {\n const bias: TensorView = inputs[4];\n if (bias.dims.length !== 1) {\n throw new Error('Bias must be 1D');\n }\n if (bias.dims[bias.dims.length - 1] !== hiddenSize) {\n throw new Error('Bias must have the same hidden size as input');\n }\n }\n};\n\nconst createSkipLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: SkipLayerNormAttributes, outputCount: number, isTraining: boolean):\n ProgramInfo => {\n const simplified = attributes.simplified;\n\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const outputShape = inputShape;\n const outputSize = inputSize;\n const hiddenSize = inputShape.slice(-1)[0];\n const meanInvStdDevDim = isTraining ? inputShape.slice(0, -1).concat(1) : [];\n const hasBetaInput = !simplified && inputs.length > 3;\n const hasBiasInput = inputs.length > 4;\n const hasMeanOutput = isTraining && outputCount > 1;\n const hasInvStdDevOutput = isTraining && outputCount > 2;\n const hasInputSkipBiasSumOutput = outputCount > 3;\n const workgroupSize = 64;\n\n const components = getMaxComponents(hiddenSize);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.uint32, data: components},\n {type: DataType.uint32, data: hiddenSize},\n {type: DataType.float, data: attributes.epsilon},\n ];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniformsArray: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'components', type: 'u32'},\n {name: 'hidden_size', type: 'u32'},\n {name: 'epsilon', type: 'f32'},\n ];\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('skip', inputs[1].dataType, inputs[1].dims, components),\n inputVariable('gamma', inputs[2].dataType, inputs[2].dims, components),\n ];\n if (hasBetaInput) {\n variables.push(inputVariable('beta', inputs[3].dataType, inputs[3].dims, components));\n }\n if (hasBiasInput) {\n variables.push(inputVariable('bias', inputs[4].dataType, inputs[4].dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanOutput) {\n variables.push(outputVariable('mean_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdDevOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInputSkipBiasSumOutput) {\n variables.push(outputVariable('input_skip_bias_sum', inputs[0].dataType, outputShape, components));\n }\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const vecDataType = tensorTypeToWsglStorageType(DataType.float, components);\n return `\n\n ${shaderHelper.registerUniforms(uniformsArray).declareVariables(...variables)}\n var sum_shared : array<${vecDataType}, ${workgroupSize}>;\n var sum_squared_shared : array<${vecDataType}, ${workgroupSize}>;\n\n ${shaderHelper.mainStart([\n workgroupSize, 1, 1\n ])}\n let ix = local_id.x;\n let iy = global_id.x / ${workgroupSize};\n\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n var stride = hidden_size_vectorized / ${workgroupSize};\n let offset = ix * stride + iy * hidden_size_vectorized;\n let offset1d = stride * ix;\n if (ix == ${workgroupSize - 1}) {\n stride = hidden_size_vectorized - stride * ix;\n }\n for (var i: u32 = 0; i < stride; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${hasBiasInput ? 'bias[offset1d + i]' : dataType + '(0.0)'};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${hasInputSkipBiasSumOutput ? 'input_skip_bias_sum[offset + i] = value;' : ''}\n output[offset + i] = value;\n let f32_value = ${castToF32(dataType, components, 'value')};\n sum_shared[ix] += f32_value;\n sum_squared_shared[ix] += f32_value * f32_value;\n }\n workgroupBarrier();\n\n var reduce_size : u32 = ${workgroupSize};\n for (var curr_size = reduce_size >> 1; curr_size > 0; curr_size = reduce_size >> 1) {\n reduce_size = curr_size + (reduce_size & 1);\n if (ix < curr_size) {\n sum_shared[ix] += sum_shared[ix + reduce_size];\n sum_squared_shared[ix] += sum_squared_shared[ix + reduce_size];\n }\n workgroupBarrier();\n }\n\n let sum = sum_shared[0];\n let square_sum = sum_squared_shared[0];\n let mean = ${sumVector('sum', components)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${sumVector('square_sum', components)} / f32(uniforms.hidden_size) ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n ${hasMeanOutput ? 'mean_output[global_idx] = mean;' : ''}\n ${hasInvStdDevOutput ? 'inv_std_output[global_idx] = inv_std_dev;' : ''}\n\n for (var i: u32 = 0; i < stride; i++) {\n output[offset + i] = (output[offset + i] ${simplified ? '' : `- ${dataType}(mean)`}) *\n ${dataType}(inv_std_dev) * gamma[offset1d + i]\n ${hasBetaInput ? '+ beta[offset1d + i]' : ''};\n }\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (outputCount > 1) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 2) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 3) {\n outputs.push({dims: inputShape, dataType: inputs[0].dataType});\n }\n return {\n name: 'SkipLayerNormalization',\n shaderCache: {\n hint: `${components};${hasMeanOutput};${hasInvStdDevOutput};${hasInputSkipBiasSumOutput}`,\n inputDependencies: inputs.map((_input, _index) => 'type')\n },\n getShaderSource,\n getRunData: () => ({\n outputs,\n dispatchGroup: {\n x: Math.ceil(outputSize / hiddenSize),\n },\n programUniforms\n }),\n };\n };\n\nexport const skipLayerNorm = (context: ComputeContext, attributes: SkipLayerNormAttributes): void => {\n // TODO: initialize isTraining from ComputeContext\n const isTraining = false;\n validateInputs(context.inputs);\n // Mean and InvStdDev are only used in training mode and are not required for inference.\n // They are added here for completeness only.\n const outputs = [0];\n if (context.outputCount > 1) {\n outputs.push(isTraining ? 1 : -3);\n }\n if (context.outputCount > 2) {\n outputs.push(isTraining ? 2 : -3);\n }\n if (context.outputCount > 3) {\n outputs.push(3);\n }\n context.compute(\n createSkipLayerNormProgramInfo(context.inputs, attributes, context.outputCount, isTraining), {outputs});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly starts: number[];\n readonly ends: number[];\n readonly axes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: SliceAttributes): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n if (attributes.axes.length !== 0) {\n if (attributes.axes.length !== attributes.starts.length || attributes.axes.length !== attributes.ends.length) {\n throw new Error('axes, starts and ends must have the same length');\n }\n } else if (attributes.starts.length !== attributes.ends.length) {\n throw new Error('starts and ends must have the same length');\n }\n inputs.slice(1).forEach((_, idx) => {\n if (inputs[idx + 1].dataType !== DataType.int32 && inputs[idx + 1].dataType !== DataType.int64) {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n });\n};\n\nconst readInput = (inputs: readonly TensorView[], idx: number): number[] => {\n const input: number[] = [];\n if (inputs.length > idx) {\n if (inputs[idx].dataType === DataType.int64) {\n inputs[idx].getBigInt64Array().forEach(v => input.push(Number(v)));\n } else if (inputs[idx].dataType === DataType.int32) {\n inputs[idx].getInt32Array().forEach(v => input.push(Number(v)));\n } else {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n }\n return input;\n};\n\nconst createSliceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SliceAttributes): SliceAttributes => {\n if (inputs.length > 1) {\n const starts: number[] = readInput(inputs, 1);\n const ends: number[] = readInput(inputs, 2);\n let axes: number[] = readInput(inputs, 3);\n if (axes.length === 0) {\n axes = [...Array(inputs[0].dims.length).keys()];\n }\n return createAttributeWithCacheKey({starts, ends, axes});\n } else {\n return attributes;\n }\n };\n\nconst fixStartEndValues =\n (value: number, index: number, inputShape: readonly number[], axes: readonly number[], steps: readonly number[]):\n number => {\n let newValue = value;\n if (value < 0) {\n newValue += inputShape[axes[index]];\n }\n if (steps[index] < 0) {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]] - 1));\n } else {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]]));\n }\n };\n\nconst calculateInputIndicesImpl =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[]): string =>\n `fn calculateInputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n var carry = 0u;\n for (var i = ${inputShape.length}; i >= 0; i--) {\n let input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n let steps_i = ${getElementAt('uniforms.steps', 'i', inputShape.length)};\n let signs_i = ${getElementAt('uniforms.signs', 'i', inputShape.length)};\n let starts_i = ${getElementAt('uniforms.starts', 'i', inputShape.length)};\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${input.indicesSet('input_indices', 'i', 'input_index')};\n }\n return input_indices;\n }`;\n\nconst createSliceProgramInfo = (inputs: readonly TensorView[], attributes: SliceAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const axes = (attributes.axes.length > 0) ? ShapeUtil.normalizeAxes(attributes.axes, inputShape.length) :\n [...Array(inputShape.length).keys()];\n let steps = readInput(inputs, 4);\n steps.forEach((step) => step !== 0 || (() => {\n throw new Error('step cannot be 0');\n }));\n if (steps.length === 0) {\n steps = Array(axes.length).fill(1);\n }\n const starts = attributes.starts.map((start, i) => fixStartEndValues(start, i, inputShape, axes, steps));\n\n const ends = attributes.ends.map((end, i) => fixStartEndValues(end, i, inputShape, axes, steps));\n\n if (axes.length !== starts.length || axes.length !== ends.length) {\n throw new Error('start, ends and axes should have the same number of elements');\n }\n\n if (axes.length !== inputShape.length) {\n for (let i = 0; i < inputShape.length; ++i) {\n if (!axes.includes(i)) {\n starts.splice(i, 0, 0);\n ends.splice(i, 0, inputShape[i]);\n steps.splice(i, 0, 1);\n }\n }\n }\n const signs = steps.map(step => Math.sign(step));\n // Convert negative steps to positive steps and reverse starts and ends\n steps.forEach((step, i, array) => {\n if (step < 0) {\n const numSteps = (ends[i] - starts[i]) / step;\n const newEnd = starts[i];\n const newStart = newEnd + numSteps * steps[i];\n starts[i] = newStart;\n ends[i] = newEnd;\n array[i] = -step;\n }\n });\n // Output rank is expected to be less than or equal to the input rank.\n const outputShape = inputShape.slice(0);\n axes.forEach((axis, _) => {\n outputShape[axis] = Math.ceil((ends[axis] - starts[axis]) / steps[axis]);\n });\n const outputTensorInfo: TensorInfo = {dims: outputShape, dataType: inputs[0].dataType};\n\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const outputSize = ShapeUtil.size(outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'starts', type: 'u32', length: starts.length},\n {name: 'signs', type: 'i32', length: signs.length}, {name: 'steps', type: 'u32', length: steps.length}\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: starts},\n {type: DataType.int32, data: signs}, {type: DataType.uint32, data: steps},\n ...createTensorShapeVariables(inputs[0].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${calculateInputIndicesImpl(input, output, inputShape)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n let input_indices = calculateInputIndices(output_indices);\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n return {\n name: 'Slice',\n shaderCache: {hint: `${signs.length}_${starts.length}_${steps.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [outputTensorInfo],\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const slice = (context: ComputeContext, attributes: SliceAttributes): void => {\n validateInputs(context.inputs, attributes);\n const updatedAttributes = createSliceAttributesFromInputs(context.inputs, attributes);\n context.compute(createSliceProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n // if (ShapeUtil.size(program.outputs[0].dims) > 0) {\n // context.compute(programInfoLoader, {inputs: [0]});\n // } else {\n // // TODO: support empty output\n // throw new Error('slice: output size is 0');\n // }\n};\n\nexport const parseSliceAttributes = (attributes: Record): SliceAttributes => {\n const starts = attributes.starts as number[];\n const ends = attributes.ends as number[];\n const axes = attributes.axes as number[];\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax op requires 1 input.');\n }\n};\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst createSoftmaxProgramInfo = (input: TensorView, attributes: SoftmaxAttributes): ProgramInfo => {\n const shape = input.dims;\n const outputSize = ShapeUtil.size(shape);\n const WG = 64;\n let axis = attributes.axis;\n if (axis < 0) {\n axis = shape.length + axis;\n }\n if (axis < shape.length - 1) {\n throw new Error('softmax only supports last axis for now.');\n }\n\n const cols = shape[axis];\n const rows = outputSize / cols;\n const components = getMaxComponents(cols);\n const packedCols = cols / components;\n\n const maxVector = (name: string, components: number) => {\n if (components === 4) {\n return `max(max(${name}.x, ${name}.y), max(${name}.z, ${name}.w))`;\n } else if (components === 2) {\n return `max(${name}.x, ${name}.y)`;\n } else if (components === 3) {\n return `max(max(${name}.x, ${name}.y), ${name}.z)`;\n }\n\n return name;\n };\n const x = inputVariable('x', input.dataType, input.dims, components);\n const output = outputVariable('result', input.dataType, input.dims, components);\n const valueType = x.type.value;\n // 6.2.4 in wgsl spec\n const threadMaxDecl = tensorTypeToWsglStorageType(input.dataType) === 'f32' ?\n `var threadMax = ${valueType}(-3.402823e+38f);` :\n `var threadMax = ${valueType}(-65504.0h);`;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n var rowMaxShared : ${valueType};\n var rowSumShared : ${valueType};\n var threadShared : array<${valueType}, ${WG}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${valueType} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${valueType}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${shaderHelper.registerUniform('packedCols', 'i32').declareVariables(x, output)}\n ${shaderHelper.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${WG};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${threadMaxDecl}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${valueType}(${maxVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${valueType}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${valueType}(${sumVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;\n return {\n name: 'Softmax',\n shaderCache: {hint: `${components}`, inputDependencies: ['type']},\n getRunData: () => ({\n outputs: [{dims: shape, dataType: input.dataType}],\n dispatchGroup: {x: rows},\n programUniforms: [{type: DataType.int32, data: packedCols}]\n }),\n getShaderSource,\n };\n};\n\nexport const softmax = (context: ComputeContext, attributes: SoftmaxAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createSoftmaxProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseSoftmaxAttributes = (attributes: Record): SoftmaxAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly numOutputs: number;\n readonly splitSizes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n};\n\nconst createSplitAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SplitAttributes): SplitAttributes => {\n const splitSizes: number[] = [];\n let numOutputs: number = attributes.numOutputs;\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => splitSizes.push(Number(v)));\n numOutputs = splitSizes.length;\n }\n return createAttributeWithCacheKey({numOutputs, axis: attributes.axis, splitSizes});\n };\n\nconst calculateOutputIndexImpl = (numberOfTensors: number): string => `\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${numberOfTensors}u; i += 1u ) {\n if (index < ${getElementAt('uniforms.size_in_split_axis', 'i', numberOfTensors)}) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n}`;\nconst writeBufferDataImpl = (outputs: readonly IndicesHelper[]) => {\n const numberOfTensors = outputs.length;\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = outputs[i].setByIndices('indices', 'input[global_idx]');\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (output_number == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (output_number == ${i}) { ${returnSnippet} }`);\n }\n }\n return `\n fn writeBufferData(output_number: u32, indices: ${outputs[0].type.indices}, global_idx: u32) {\n ${codeLines.join('\\n')}\n }`;\n};\n\nconst createSplitProgramInfo = (inputs: readonly TensorView[], attributes: SplitAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const dataType = inputs[0].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const outputs = new Array(attributes.numOutputs);\n const input = inputVariable('input', dataType, inputShape.length);\n const sizeInSplitAxis = new Array(attributes.numOutputs);\n const outputsTensorInfo: TensorInfo[] = [];\n const outputShapes: number[][] = [];\n let previousSum = 0;\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: inputSize}];\n for (let i = 0; i < attributes.numOutputs; i++) {\n previousSum += attributes.splitSizes[i];\n sizeInSplitAxis[i] = previousSum;\n const outputShape = inputShape.slice();\n outputShape[attributes.axis] = attributes.splitSizes[i];\n outputShapes.push(outputShape);\n outputs[i] = outputVariable(`output${i}`, dataType, outputShape.length);\n outputsTensorInfo.push({dims: outputShapes[i], dataType: inputs[0].dataType});\n }\n programUniforms.push(\n {type: DataType.uint32, data: sizeInSplitAxis}, ...createTensorShapeVariables(inputShape, ...outputShapes));\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('input_size', 'u32')\n .registerUniform('size_in_split_axis', 'u32', sizeInSplitAxis.length)\n .declareVariables(input, ...outputs)}\n ${calculateOutputIndexImpl(sizeInSplitAxis.length)}\n ${writeBufferDataImpl(outputs)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.input_size')}\n\n var indices = ${input.offsetToIndices('global_idx')};\n var index = ${input.indicesGet('indices', axis)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${getElementAt('uniforms.size_in_split_axis', 'output_number - 1u', sizeInSplitAxis.length)};\n ${input.indicesSet('indices', axis, 'index')};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;\n return {\n name: 'Split',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: outputsTensorInfo,\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const split = (context: ComputeContext, attributes: SplitAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes =\n context.inputs.length === 1 ? attributes : createSplitAttributesFromInputs(context.inputs, attributes);\n context.compute(createSplitProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n\nexport const parseSplitAttributes = (attributes: Record): SplitAttributes => {\n const axis = attributes.axis as number;\n const splitSizes: number[] = attributes.splitSizes as number[];\n const numOutputs = attributes.numOutputs as number < 0 ? splitSizes.length : attributes.numOutputs as number;\n if (numOutputs !== splitSizes.length) {\n throw new Error('numOutputs and splitSizes lengh must be equal');\n }\n return createAttributeWithCacheKey({axis, numOutputs, splitSizes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst createWhereOpProgramShader =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], dimsOutput: readonly number[], isBroadcast: boolean,\n typeOutput: number) => {\n const output = outputVariable('output_data', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('a_data', inputs[1].dataType, inputs[1].dims.length, 4);\n const b = inputVariable('b_data', inputs[2].dataType, inputs[2].dims.length, 4);\n const c = inputVariable('c_data', inputs[0].dataType, inputs[0].dims.length, 4);\n\n let assignment: string;\n const expression = (a: string, b: string, c: string) => `select(${b}, ${a}, ${c})`;\n if (!isBroadcast) {\n assignment = output.setByOffset(\n 'global_idx',\n expression(a.getByOffset('global_idx'), b.getByOffset('global_idx'), c.getByOffset('global_idx')));\n } else {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `a_data[index_a${x}][component_a${x}]`;\n const expressionB = `b_data[index_b${x}][component_b${x}]`;\n // eslint-disable-next-line no-bitwise\n const expressionC = `bool(c_data[index_c${x}] & (0xffu << (component_c${x} * 8)))`;\n return `\n let output_indices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offset_a${x} = ${a.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_b${x} = ${b.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_c${x} = ${c.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let index_a${x} = offset_a${x} / 4u;\n let index_b${x} = offset_b${x} / 4u;\n let index_c${x} = offset_c${x} / 4u;\n let component_a${x} = offset_a${x} % 4u;\n let component_b${x} = offset_b${x} % 4u;\n let component_c${x} = offset_c${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expression(expressionA, expressionB, expressionC)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('output_data[global_idx]', 0)}\n ${singleAssignment('output_data[global_idx]', 1)}\n ${singleAssignment('output_data[global_idx]', 2)}\n ${singleAssignment('output_data[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(c, a, b, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createWhereOpProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const dimsA = inputs[1].dims;\n const dimsB = inputs[2].dims;\n const dimsC = inputs[0].dims;\n const outputDataType = inputs[1].dataType;\n\n const isBroadcast = !(ShapeUtil.areEqual(dimsA, dimsB) && ShapeUtil.areEqual(dimsB, dimsC));\n let outputShape = dimsA;\n let outputSize = ShapeUtil.size(dimsA);\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(BroadcastUtil.calcShape(dimsA, dimsB, false)!, dimsC, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform where op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n }\n\n const vecSize = Math.ceil(outputSize / 4);\n\n return {\n name: 'Where',\n shaderCache: {inputDependencies: ['rank', 'rank', 'rank']},\n getShaderSource: (shaderHelper) =>\n createWhereOpProgramShader(shaderHelper, inputs, outputShape, isBroadcast, outputDataType),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms:\n [{type: DataType.uint32, data: vecSize}, ...createTensorShapeVariables(dimsC, dimsA, dimsB, outputShape)],\n }),\n };\n};\n\nexport const where = (context: ComputeContext): void => {\n context.compute(createWhereOpProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {argMax, argMin, parseArgMinMaxAttributes} from './ops/argminmax';\nimport {attention} from './ops/attention';\nimport {batchNorm} from './ops/batch-norm';\nimport {biasAdd} from './ops/bias-add';\nimport {biasSplitGelu} from './ops/bias-split-gelu';\nimport * as binaryOps from './ops/binary-op';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {cumsum, parseCumSumAttributes} from './ops/cumsum';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {einsum, parseEinsumAttributes} from './ops/einsum';\nimport {expand} from './ops/expand';\nimport {fastGelu} from './ops/fast-gelu';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gatherElements, parseGatherElementsAttributes} from './ops/gather-elements';\nimport {gemm, parseGemmAttributes} from './ops/gemm';\nimport {groupQueryAttention, parseGroupQueryAttentionAttributes} from './ops/group-query-attention';\nimport {instanceNorm} from './ops/instance-norm';\nimport {layerNorm} from './ops/layer-norm';\nimport {matMul} from './ops/matmul';\nimport {matMulNBits, parseMatMulNBitsAttributes} from './ops/matmulnbits';\nimport {multiHeadAttention, parseMultiHeadAttentionAttributes} from './ops/multihead-attention';\nimport {pad} from './ops/pad';\nimport * as pool from './ops/pool';\nimport {range} from './ops/range';\nimport {reduceL1, reduceL2, reduceLogSum, reduceLogSumExp, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum, reduceSumSquare} from './ops/reduce';\nimport {parseResizeAttributes, resize} from './ops/resize';\nimport {rotaryEmbedding} from './ops/rotary-embedding';\nimport {skipLayerNorm} from './ops/skip-layer-norm';\nimport {parseSliceAttributes, slice} from './ops/slice';\nimport {parseSoftmaxAttributes, softmax} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {where} from './ops/where';\nimport {ComputeContext} from './types';\n\nexport type RunFunction = (context: ComputeContext, attribute?: unknown) => void;\nexport type ParseAttributeFunction = (attributeRaw: unknown) => unknown;\nexport type OperatorImplementation = [RunFunction]|[RunFunction, ParseAttributeFunction];\n\nexport const WEBGPU_OP_RESOLVE_RULES: Map = new Map([\n ['Abs', [unaryOps.abs]],\n ['Acos', [unaryOps.acos]],\n ['Acosh', [unaryOps.acosh]],\n ['Add', [binaryOps.add]],\n ['ArgMax', [argMax, parseArgMinMaxAttributes]],\n ['ArgMin', [argMin, parseArgMinMaxAttributes]],\n ['Asin', [unaryOps.asin]],\n ['Asinh', [unaryOps.asinh]],\n ['Atan', [unaryOps.atan]],\n ['Atanh', [unaryOps.atanh]],\n ['Attention', [attention]],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', [pool.averagePool, pool.parseAveragePoolAttributes]],\n ['BatchNormalization', [batchNorm]],\n ['BiasAdd', [biasAdd]],\n ['BiasSplitGelu', [biasSplitGelu]],\n ['Cast', [unaryOps.cast, unaryOps.parseCastAttributes]],\n ['Ceil', [unaryOps.ceil]],\n ['Clip', [unaryOps.clip]],\n ['Concat', [concat, parseConcatAttributes]],\n ['Conv', [conv, parseConvAttributes]],\n ['ConvTranspose', [convTranspose, parseConvTransposeAttributes]],\n ['Cos', [unaryOps.cos]],\n ['Cosh', [unaryOps.cosh]],\n ['CumSum', [cumsum, parseCumSumAttributes]],\n ['DepthToSpace', [depthToSpace, parseDepthToSpaceAttributes]],\n ['Div', [binaryOps.div]],\n ['Einsum', [einsum, parseEinsumAttributes]],\n ['Elu', [unaryOps.elu, unaryOps.parseAlphaAttributes]],\n ['Equal', [binaryOps.equal]],\n ['Erf', [unaryOps.erf]],\n ['Exp', [unaryOps.exp]],\n ['Expand', [expand]],\n ['FastGelu', [fastGelu]],\n ['Floor', [unaryOps.floor]],\n ['FusedConv', [conv, parseConvAttributes]],\n ['Gather', [gather, parseGatherAttributes]],\n ['GatherElements', [gatherElements, parseGatherElementsAttributes]],\n ['Gelu', [unaryOps.gelu]],\n ['Gemm', [gemm, parseGemmAttributes]],\n ['GlobalAveragePool', [pool.globalAveragePool, pool.parseGlobalAveragePoolAttributes]],\n ['GlobalMaxPool', [pool.globalMaxPool, pool.parseGlobalMaxPoolAttributes]],\n ['Greater', [binaryOps.greater]],\n ['GreaterOrEqual', [binaryOps.greaterOrEqual]],\n ['GroupQueryAttention', [groupQueryAttention, parseGroupQueryAttentionAttributes]],\n ['HardSigmoid', [unaryOps.hardSigmoid, unaryOps.parseHardSigmoidAttributes]],\n ['InstanceNormalization', [instanceNorm]],\n ['LayerNormalization', [layerNorm]],\n ['LeakyRelu', [unaryOps.leakyRelu, unaryOps.parseAlphaAttributes]],\n ['Less', [binaryOps.less]],\n ['LessOrEqual', [binaryOps.lessOrEqual]],\n ['Log', [unaryOps.log]],\n ['MatMul', [matMul]],\n ['MatMulNBits', [matMulNBits, parseMatMulNBitsAttributes]],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', [pool.maxPool, pool.parseMaxPoolAttributes]],\n ['Mul', [binaryOps.mul]],\n ['MultiHeadAttention', [multiHeadAttention, parseMultiHeadAttentionAttributes]],\n ['Neg', [unaryOps.neg]],\n ['Not', [unaryOps.not]],\n ['Pad', [pad]],\n ['Pow', [binaryOps.pow]],\n ['QuickGelu', [unaryOps.quickgelu, unaryOps.parseAlphaAttributes]],\n ['Range', [range]],\n ['Reciprocal', [unaryOps.reciprocal]],\n ['ReduceMin', [reduceMin]],\n ['ReduceMean', [reduceMean]],\n ['ReduceMax', [reduceMax]],\n ['ReduceSum', [reduceSum]],\n ['ReduceProd', [reduceProd]],\n ['ReduceL1', [reduceL1]],\n ['ReduceL2', [reduceL2]],\n ['ReduceLogSum', [reduceLogSum]],\n ['ReduceLogSumExp', [reduceLogSumExp]],\n ['ReduceSumSquare', [reduceSumSquare]],\n ['Relu', [unaryOps.relu]],\n ['Resize', [resize, parseResizeAttributes]],\n ['RotaryEmbedding', [rotaryEmbedding]],\n ['Sigmoid', [unaryOps.sigmoid]],\n ['Sin', [unaryOps.sin]],\n ['Sinh', [unaryOps.sinh]],\n ['Slice', [slice, parseSliceAttributes]],\n ['SkipLayerNormalization', [skipLayerNorm]],\n ['Split', [split, parseSplitAttributes]],\n ['Sqrt', [unaryOps.sqrt]],\n ['Softmax', [softmax, parseSoftmaxAttributes]],\n ['Sub', [binaryOps.sub]],\n ['Tan', [unaryOps.tan]],\n ['Tanh', [unaryOps.tanh]],\n ['ThresholdedRelu', [unaryOps.thresholdedRelu, unaryOps.parseAlphaAttributes]],\n ['Tile', [tile]],\n ['Transpose', [transpose, parseTransposeAttributes]],\n ['Where', [where]],\n]);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {createShaderHelper} from './ops/common';\nimport {Artifact, GpuData, ProgramInfo} from './types';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n attributesBound: boolean;\n\n constructor(private backend: WebGpuBackend) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: GpuData[], outputs: GpuData[], dispatchGroup: [number, number, number],\n uniformBufferBinding: GPUBindingResource|undefined): void {\n TRACE_FUNC_BEGIN(buildArtifact.programInfo.name);\n const device = this.backend.device;\n const computePassEncoder = this.backend.getComputePassEncoder();\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2);\n const entries = [];\n for (const input of inputs) {\n entries.push({binding: entries.length, resource: {buffer: input.buffer}});\n }\n for (const output of outputs) {\n entries.push({binding: entries.length, resource: {buffer: output.buffer}});\n }\n if (uniformBufferBinding) {\n entries.push({binding: entries.length, resource: uniformBufferBinding});\n }\n const bindGroup = device.createBindGroup(\n {layout: buildArtifact.computePipeline.getBindGroupLayout(0), entries, label: buildArtifact.programInfo.name});\n\n if (this.backend.sessionStatus === 'capturing') {\n const commandInfo = {\n kernelId: this.backend.currentKernelId!,\n computePipeline: buildArtifact.computePipeline,\n bindGroup,\n dispatchGroup\n };\n const sessionCommandList = this.backend.capturedCommandList.get(this.backend.currentSessionId!);\n sessionCommandList!.push(commandInfo);\n }\n\n computePassEncoder.setPipeline(buildArtifact.computePipeline);\n computePassEncoder.setBindGroup(0, bindGroup);\n computePassEncoder.dispatchWorkgroups(...dispatchGroup);\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2 + 1);\n this.backend.pendingDispatchNumber++;\n\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber ||\n this.backend.queryType === 'at-passes') {\n this.backend.endComputePass();\n }\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber) {\n this.backend.flush();\n }\n TRACE_FUNC_END(buildArtifact.programInfo.name);\n }\n dispose(): void {\n // this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, normalizedDispatchGroupSize: [number, number, number]): Artifact {\n TRACE_FUNC_BEGIN(programInfo.name);\n const device = this.backend.device;\n const extensions: string[] = [];\n if (device.features.has('shader-f16')) {\n extensions.push('enable f16;');\n }\n const shaderHelper = createShaderHelper(normalizedDispatchGroupSize, this.backend.device.limits);\n const userCode = programInfo.getShaderSource(shaderHelper);\n const code = `${extensions.join('\\n')}\\n${shaderHelper.additionalImplementations}\\n${userCode}`;\n const shaderModule = device.createShaderModule({code, label: programInfo.name});\n LOG_DEBUG('verbose', () => `[WebGPU] ${programInfo.name} shader code: ${code}`);\n\n const computePipeline = device.createComputePipeline(\n {compute: {module: shaderModule, entryPoint: 'main'}, layout: 'auto', label: programInfo.name});\n\n TRACE_FUNC_END(programInfo.name);\n return {programInfo, computePipeline, uniformVariablesInfo: shaderHelper.variablesInfo};\n }\n\n normalizeDispatchGroupSize(dispatchGroup: ReturnType['dispatchGroup']):\n [number, number, number] {\n const x = typeof dispatchGroup === 'number' ? dispatchGroup : dispatchGroup.x;\n const y = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.y || 1);\n const z = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.z || 1);\n const limitPerDimension = this.backend.device.limits.maxComputeWorkgroupsPerDimension;\n if (x <= limitPerDimension && y <= limitPerDimension && z <= limitPerDimension) {\n return [x, y, z];\n }\n const size = x * y * z;\n let dispatchAverage = Math.ceil(Math.sqrt(size));\n if (dispatchAverage > limitPerDimension) {\n dispatchAverage = Math.ceil(Math.cbrt(size));\n if (dispatchAverage > limitPerDimension) {\n throw new Error('Total dispatch size exceeds WebGPU maximum.');\n }\n return [dispatchAverage, dispatchAverage, dispatchAverage];\n } else {\n return [dispatchAverage, dispatchAverage, 1];\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, Tensor, TRACE, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {DataType, tensorDataTypeEnumToString} from '../wasm-common';\n\nimport {configureLogger, LOG_DEBUG} from './log';\nimport {createView, TensorView} from './tensor-view';\nimport {createGpuDataManager, downloadGpuData, GpuDataManager} from './webgpu/gpu-data-manager';\nimport {RunFunction, WEBGPU_OP_RESOLVE_RULES} from './webgpu/op-resolve-rules';\nimport {ProgramManager} from './webgpu/program-manager';\nimport {AdapterInfo, ComputeContext, GpuArchitecture, GpuData, GpuVendor, ProgramInfo, ProgramInputTensorInfoDependency, SessionState, TimestampQuery} from './webgpu/types';\n\ninterface CommandInfo {\n readonly kernelId: number;\n readonly computePipeline: GPUComputePipeline;\n readonly bindGroup: GPUBindGroup;\n readonly dispatchGroup: [number, number, number];\n}\n\ninterface KernelInfo {\n readonly kernelType: string;\n readonly kernelName: string;\n readonly kernelEntry: RunFunction;\n readonly attributes: [((attribute: unknown) => unknown)|undefined, unknown];\n}\n\ninterface PendingKernelInfo {\n readonly kernelId: number;\n readonly programName: string;\n readonly inputTensorViews: readonly TensorView[];\n readonly outputTensorViews: readonly TensorView[];\n}\n\nconst getProgramInputTensorInfoDependencyKey =\n (inputTensors: readonly TensorView[], inputDependencies: readonly ProgramInputTensorInfoDependency[]): string => {\n if (inputDependencies.length !== inputTensors.length) {\n throw new Error(`inputDependencies length ${inputDependencies.length} is not equal to inputTensors length ${\n inputTensors.length}.`);\n }\n\n const inputInfos: string[] = [];\n for (let i = 0; i < inputTensors.length; ++i) {\n const type = inputTensors[i].dataType;\n switch (inputDependencies[i]) {\n case 'none': {\n inputInfos.push('');\n break;\n }\n case 'type': {\n inputInfos.push(`${type}`);\n break;\n }\n case 'rank': {\n const rank = inputTensors[i].dims.length;\n inputInfos.push(`${type};${rank}`);\n break;\n }\n case 'dims': {\n const dims = inputTensors[i].dims.join(',');\n inputInfos.push(`${type};${dims}`);\n break;\n }\n default:\n throw new Error(`unsupported input dependency: ${inputDependencies[i]}`);\n }\n }\n\n return inputInfos.join('|');\n };\n\n/**\n * get a unique key representing the program from the program info, input shapes and types.\n *\n * @returns a unique key is a shorter string than the shader source, which contains all the information to identify a\n * program. if the key is the same, the program shader source should be the same, so we can reuse the program.\n *\n */\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo, inputTensors: readonly TensorView[], is1DimensionDispatch: boolean): string => {\n // final key format:\n // []:is1DimensionDispatch:||...\n let key = programInfo.name;\n if (programInfo.shaderCache?.hint) {\n key += '[' + programInfo.shaderCache.hint + ']';\n }\n key += ':' + is1DimensionDispatch +\n `:${\n getProgramInputTensorInfoDependencyKey(\n inputTensors,\n programInfo.shaderCache?.inputDependencies ??\n new Array(inputTensors.length).fill('dims'))}`;\n return key;\n };\n\nclass AdapterInfoImpl implements AdapterInfo {\n readonly architecture?: string;\n readonly vendor?: string;\n\n constructor(adapterInfo: GPUAdapterInfo) {\n if (adapterInfo) {\n this.architecture = adapterInfo.architecture;\n this.vendor = adapterInfo.vendor;\n }\n }\n\n isArchitecture(architecture: GpuArchitecture): boolean {\n return this.architecture === architecture;\n }\n\n isVendor(vendor: GpuVendor): boolean {\n return this.vendor === vendor;\n }\n}\n\n/**\n * this class is designed to store status and being used as a singleton for JSEP. It will be passed to jsepInit() as\n * the first parameter so that it is stored for future use.\n */\nexport class WebGpuBackend {\n adapterInfo: AdapterInfoImpl;\n device: GPUDevice;\n /**\n * an instance of GpuDataManager to manage a GpuDataId -> GpuBuffer mapping\n */\n gpuDataManager: GpuDataManager;\n /**\n * an instance of ProgramManager to build and run WebGPU compute shader program, and manage a ProgramKey -> Program\n * artifacts mapping\n */\n programManager: ProgramManager;\n\n /**\n * representing the session ID of which is currently being run.\n * `null` means no session is being run.\n * only valid when session.run is executed.\n */\n currentSessionId: number|null = null;\n\n /**\n * representing the kernel ID of which is currently being computed (CPU code perspective).\n * `null` means no kernel is being computed.\n * only one kernel can be computed at a moment.\n */\n currentKernelId: number|null = null;\n /**\n * a list of temporary GPU data for the current kernel. should release when the kernel done computation.\n */\n private temporaryData: GpuData[];\n /**\n * a KernelID -> a GPU data list, which stores persistent GPU data owned by the specific kernel.\n */\n private kernelPersistentData: Map;\n /**\n * a KernelID -> a custom data, which stores custom data owned by the specific kernel.\n */\n private kernelCustomData: Map;\n /**\n * get the custom data of the current kernel\n */\n get currentKernelCustomData(): {[key: string]: unknown} {\n if (this.currentKernelId === null) {\n throw new Error('currentKernelCustomData(): currentKernelId is null. (should not happen)');\n }\n\n let data = this.kernelCustomData.get(this.currentKernelId);\n if (!data) {\n data = {};\n this.kernelCustomData.set(this.currentKernelId, data);\n }\n\n return data;\n }\n\n // KernelID -> kernelInfo mapping\n kernels: Map;\n private commandEncoder: GPUCommandEncoder|null = null;\n private computePassEncoder: GPUComputePassEncoder|null = null;\n maxDispatchNumber = 16;\n pendingDispatchNumber = 0;\n\n // info of kernels pending submission for a single batch\n private pendingKernels: PendingKernelInfo[] = [];\n // queryReadBuffer -> pendingKernels mapping for all the batches\n private pendingQueries: Map = new Map();\n private queryResolveBuffer?: GPUBuffer;\n private querySet?: GPUQuerySet;\n private queryTimeBase?: bigint;\n queryType: TimestampQuery;\n\n env: Env;\n sessionStatus: SessionState = 'default';\n /**\n * a SessionID -> CommandInfo[] mapping. It's used to record all GPU commands for corresponding session.\n */\n capturedCommandList: Map = new Map();\n\n /**\n * a SessionID -> PendingKernelInfo[] mapping for profiling.\n */\n private capturedPendingKernels: Map = new Map();\n\n /**\n * a SessionID -> a Map of (InputOutputIndex -> [ID, GPUBuffer]) mapping.\n */\n sessionExternalDataMapping: Map> = new Map();\n\n async initialize(env: Env, adapter: GPUAdapter): Promise {\n this.env = env;\n const requiredFeatures: GPUFeatureName[] = [];\n const deviceDescriptor: GPUDeviceDescriptor = {\n requiredLimits: {\n maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize,\n maxComputeWorkgroupsPerDimension: adapter.limits.maxComputeWorkgroupsPerDimension,\n maxStorageBufferBindingSize: adapter.limits.maxStorageBufferBindingSize,\n maxBufferSize: adapter.limits.maxBufferSize,\n maxComputeInvocationsPerWorkgroup: adapter.limits.maxComputeInvocationsPerWorkgroup,\n maxComputeWorkgroupSizeX: adapter.limits.maxComputeWorkgroupSizeX,\n maxComputeWorkgroupSizeY: adapter.limits.maxComputeWorkgroupSizeY,\n maxComputeWorkgroupSizeZ: adapter.limits.maxComputeWorkgroupSizeZ,\n },\n requiredFeatures,\n };\n\n if (adapter.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n requiredFeatures.push('chromium-experimental-timestamp-query-inside-passes' as GPUFeatureName);\n } else if (adapter.features.has('timestamp-query')) {\n requiredFeatures.push('timestamp-query');\n }\n if (adapter.features.has('shader-f16')) {\n requiredFeatures.push('shader-f16');\n }\n\n this.device = await adapter.requestDevice(deviceDescriptor);\n this.adapterInfo = new AdapterInfoImpl(adapter.info || await adapter.requestAdapterInfo());\n this.gpuDataManager = createGpuDataManager(this);\n this.programManager = new ProgramManager(this);\n this.kernels = new Map();\n this.kernelPersistentData = new Map();\n this.kernelCustomData = new Map();\n\n // set up flags for logger\n configureLogger(env.logLevel!, !!env.debug);\n\n // TODO: set up flags\n\n this.device.onuncapturederror = ev => {\n if (ev.error instanceof GPUValidationError) {\n // eslint-disable-next-line no-console\n console.error(`An uncaught WebGPU validation error was raised: ${ev.error.message}`);\n }\n };\n\n Object.defineProperty(\n this.env.webgpu, 'device', {value: this.device, writable: false, enumerable: true, configurable: false});\n Object.defineProperty(\n this.env.webgpu, 'adapter', {value: adapter, writable: false, enumerable: true, configurable: false});\n\n // init queryType, which is necessary for InferenceSession.create\n this.setQueryType();\n }\n\n dispose(): void {\n if (typeof this.querySet !== 'undefined') {\n this.querySet.destroy();\n }\n this.gpuDataManager.dispose();\n }\n\n getCommandEncoder(): GPUCommandEncoder {\n if (!this.commandEncoder) {\n this.commandEncoder = this.device.createCommandEncoder();\n }\n return this.commandEncoder;\n }\n\n getComputePassEncoder(): GPUComputePassEncoder {\n if (!this.computePassEncoder) {\n const commandEncoder = this.getCommandEncoder();\n const computePassDescriptor: GPUComputePassDescriptor = {};\n\n if (this.queryType === 'at-passes') {\n computePassDescriptor.timestampWrites = {\n querySet: this.querySet!,\n beginningOfPassWriteIndex: this.pendingDispatchNumber * 2,\n endOfPassWriteIndex: this.pendingDispatchNumber * 2 + 1,\n };\n }\n\n this.computePassEncoder = commandEncoder.beginComputePass(computePassDescriptor);\n }\n return this.computePassEncoder;\n }\n\n endComputePass(): void {\n if (this.computePassEncoder) {\n this.computePassEncoder.end();\n this.computePassEncoder = null;\n }\n }\n\n flush(): void {\n if (!this.commandEncoder) {\n return;\n }\n\n TRACE_FUNC_BEGIN();\n\n this.endComputePass();\n let queryReadBuffer: GPUBuffer;\n if (this.queryType !== 'none') {\n this.commandEncoder.resolveQuerySet(\n this.querySet!, 0, this.pendingDispatchNumber * 2, this.queryResolveBuffer!, 0);\n\n queryReadBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.pendingDispatchNumber * 2 * 8, usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST});\n\n this.pendingQueries.set(queryReadBuffer, this.pendingKernels);\n this.pendingKernels = [];\n this.commandEncoder.copyBufferToBuffer(\n this.queryResolveBuffer!, 0, queryReadBuffer, 0, this.pendingDispatchNumber * 2 * 8);\n }\n\n this.device.queue.submit([this.commandEncoder.finish()]);\n this.gpuDataManager.refreshPendingBuffers();\n this.commandEncoder = null;\n this.pendingDispatchNumber = 0;\n\n if (this.queryType !== 'none') {\n void queryReadBuffer!.mapAsync(GPUMapMode.READ).then(() => {\n const mappedData = new BigUint64Array(queryReadBuffer.getMappedRange());\n const pendingKernels = this.pendingQueries.get(queryReadBuffer)!;\n for (let i = 0; i < mappedData.length / 2; i++) {\n const pendingKernelInfo = pendingKernels[i];\n const kernelId = pendingKernelInfo.kernelId;\n const kernelInfo = this.kernels.get(kernelId)!;\n const kernelType = kernelInfo.kernelType;\n const kernelName = kernelInfo.kernelName;\n const programName = pendingKernelInfo.programName;\n const inputTensorViews = pendingKernelInfo.inputTensorViews;\n const outputTensorViews = pendingKernelInfo.outputTensorViews;\n const startTimeU64 = mappedData[i * 2];\n const endTimeU64 = mappedData[i * 2 + 1];\n\n if (typeof this.queryTimeBase === 'undefined') {\n this.queryTimeBase = startTimeU64;\n }\n\n const startTime = Number(startTimeU64 - this.queryTimeBase);\n const endTime = Number(endTimeU64 - this.queryTimeBase);\n\n if (!Number.isSafeInteger(startTime) || !Number.isSafeInteger(endTime)) {\n throw new RangeError('incorrect timestamp range');\n }\n\n if (this.env.webgpu.profiling?.ondata) {\n this.env.webgpu.profiling.ondata({\n version: 1,\n inputsMetadata: inputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n outputsMetadata: outputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n kernelId,\n kernelType,\n kernelName,\n programName,\n startTime,\n endTime,\n });\n } else {\n // if no callback is provided, print the profiling message to console\n let inputShapes = '';\n inputTensorViews.forEach((value, i) => {\n inputShapes += `input[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n let outputShapes = '';\n outputTensorViews.forEach((value, i) => {\n outputShapes += `output[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n // eslint-disable-next-line no-console\n console.log(`[profiling] kernel \"${kernelId}|${kernelType}|${kernelName}|${programName}\" ${inputShapes}${\n outputShapes}execution time: ${endTime - startTime} ns`);\n }\n TRACE('GPU', `${programName}::${startTimeU64}::${endTimeU64}`);\n }\n queryReadBuffer.unmap();\n this.pendingQueries.delete(queryReadBuffer);\n });\n }\n TRACE_FUNC_END();\n }\n\n /**\n * run a WebGPU program.\n * @param program a ProgramInfo instance\n * @param inputTensorViews a TensorView array. each element represents a value already exists in GPU.\n * @param outputIndices an indices array. each element can be either -1 (temporary data), -2 (persistent data) or an\n * index to the kernel's output.\n * @param createKernelOutput a callback function that create a value to kernel's output with the given index\n * @param createIntermediateOutput a callback function that create a value as a intermediate value, either temporary\n * or persistent (owned by the current kernel)\n * @returns a TensorView array representing the result.\n */\n run(program: ProgramInfo, inputTensorViews: readonly TensorView[], outputIndices: readonly number[],\n createKernelOutput: (index: number, dataType: number, dims: readonly number[]) => TensorView,\n createIntermediateOutput: (dataType: number, dims: readonly number[]) => TensorView,\n outputCount: number): TensorView[] {\n TRACE_FUNC_BEGIN(program.name);\n // create info for inputs\n const inputDatas: GpuData[] = [];\n for (let i = 0; i < inputTensorViews.length; ++i) {\n const data = inputTensorViews[i].data;\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(data);\n if (!gpuData) {\n throw new Error(`no GPU data for input: ${data}`);\n }\n inputDatas.push(gpuData);\n }\n\n const {outputs, dispatchGroup, programUniforms} = program.getRunData(inputTensorViews);\n\n // check output indices\n const validatedOutputIndices = outputIndices.length === 0 ? outputs.map((_, i) => i) : outputIndices;\n if (validatedOutputIndices.length !== outputs.length) {\n throw new Error(`Output size ${validatedOutputIndices.length} must be equal to ${outputs.length}.`);\n }\n\n // create info for outputs\n const outputTensorViews: TensorView[] = [];\n const outputDatas: GpuData[] = [];\n for (let i = 0; i < outputs.length; ++i) {\n // value -1 and -2 are used for creating temporary and persistent outputs.\n // value -3 is used for placeholder output. So -3, -2, -1 and 0, 1, 2, ... are valid\n // output indices. see type definition of ComputeContextInputsOutputsMapping for more details.\n if (!Number.isInteger(validatedOutputIndices[i]) || validatedOutputIndices[i] < -3 ||\n validatedOutputIndices[i] >= outputCount) {\n throw new Error(`Invalid output index: ${validatedOutputIndices[i]}`);\n }\n if (validatedOutputIndices[i] === -3) {\n continue;\n }\n const isTemporary = validatedOutputIndices[i] === -1;\n const isPersistent = validatedOutputIndices[i] === -2;\n const tensorView = (isTemporary || isPersistent) ?\n createIntermediateOutput(outputs[i].dataType, outputs[i].dims) :\n createKernelOutput(validatedOutputIndices[i], outputs[i].dataType, outputs[i].dims);\n outputTensorViews.push(tensorView);\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (tensorView.data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(tensorView.data);\n if (!gpuData) {\n throw new Error(`no GPU data for output: ${tensorView.data}`);\n }\n if (isTemporary) {\n this.temporaryData.push(gpuData);\n }\n if (isPersistent) {\n let persistentData = this.kernelPersistentData.get(this.currentKernelId!);\n if (!persistentData) {\n persistentData = [];\n this.kernelPersistentData.set(this.currentKernelId!, persistentData);\n }\n persistentData.push(gpuData);\n }\n outputDatas.push(gpuData);\n }\n\n // when there are any zero-sized tensor in the inputs or outputs, we should report error unless all outputs are\n // zero-sized tensors.\n if (inputDatas.length !== inputTensorViews.length || outputDatas.length !== outputTensorViews.length) {\n // if all outputs are zero-sized tensors, there is no need to run the program.\n if (outputDatas.length === 0) {\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n // if some outputs are zero-sized tensors, report an error.\n //\n // TODO: so far we don't see any use case that outputs include both zero-sized tensors and non-zero-sized tensors.\n // If we see such use case, we need to make a change here to support it.\n throw new Error(\n `Program ${program.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`);\n }\n\n // load uniforms\n // TODO: add cache for uniform (is it necessary?)\n //\n let uniformBufferBinding: GPUBindingResource|undefined;\n if (programUniforms) {\n let currentOffset = 0;\n const offsets: number[] = [];\n\n programUniforms.forEach(v => {\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (data.length === 0) {\n return;\n }\n // https://www.w3.org/TR/WGSL/#alignof\n const sizeOfElement = v.type === DataType.float16 ? 2 : 4;\n let sizeOfVecOrMat;\n let baseAlignment;\n if (v.type === DataType.float16) {\n baseAlignment = data.length > 4 ? 16 : (data.length > 2 ? 8 : data.length * sizeOfElement);\n sizeOfVecOrMat = data.length > 4 ? 16 : sizeOfElement * data.length;\n } else {\n baseAlignment = data.length <= 2 ? data.length * sizeOfElement : 16;\n sizeOfVecOrMat = 16;\n }\n currentOffset = Math.ceil(currentOffset / baseAlignment) * baseAlignment;\n offsets.push(currentOffset);\n // For non-float16 type, when data.length > 4, the uniform variable is of type array,N>, where\n // N = Math.ceil(data.length / 4) and SizeOf(vec4) = 16. The total byte length is N *\n // SizeOf(vec4). For float16 type, when data.length > 4, the uniform variable is of type\n // array,N>, where N = Math.ceil(data.length / 8) and SizeOf(mat2x4) = 16. The total byte\n // length is N * SizeOf(mat2x4).\n const elementPerVecOrMat = v.type === DataType.float16 ? 8 : 4;\n currentOffset += data.length > 4 ? Math.ceil(data.length / elementPerVecOrMat) * sizeOfVecOrMat :\n data.length * sizeOfElement;\n });\n\n // Meet alignment of struct here: https://www.w3.org/TR/WGSL/#alignment-and-size. For simplicity, set\n // maxAlignmentOfField to 16 since the underlying buffer has been rounded up to 16.\n const maxAlignmentOfField = 16;\n currentOffset = Math.ceil(currentOffset / maxAlignmentOfField) * maxAlignmentOfField;\n const arrayBuffer = new ArrayBuffer(currentOffset);\n programUniforms.forEach((v, i) => {\n const offset = offsets[i];\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (v.type === DataType.int32) {\n new Int32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.uint32) {\n new Uint32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float16) {\n // TODO: use Float16Array.\n new Uint16Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float) {\n new Float32Array(arrayBuffer, offset, data.length).set(data);\n } else {\n throw new Error(`Unsupported uniform type: ${tensorDataTypeEnumToString(v.type)}`);\n }\n });\n\n const uniformBufferData =\n // eslint-disable-next-line no-bitwise\n this.gpuDataManager.create(currentOffset, GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM);\n this.device.queue.writeBuffer(uniformBufferData.buffer, 0, arrayBuffer, 0, currentOffset);\n this.gpuDataManager.release(uniformBufferData.id);\n uniformBufferBinding = {offset: 0, size: currentOffset, buffer: uniformBufferData.buffer};\n }\n\n const normalizedDispatchGroup = this.programManager.normalizeDispatchGroupSize(dispatchGroup);\n const is1DimensionDispatch = normalizedDispatchGroup[1] === 1 && normalizedDispatchGroup[2] === 1;\n // get program info\n const key = getProgramInfoUniqueKey(program, inputTensorViews, is1DimensionDispatch);\n let artifact = this.programManager.getArtifact(key);\n if (!artifact) {\n artifact = this.programManager.build(program, normalizedDispatchGroup);\n this.programManager.setArtifact(key, artifact);\n LOG_DEBUG('info', () => `[artifact] key: ${key}, programName: ${program.name}`);\n }\n\n // validate uniform variables\n if (programUniforms && artifact.uniformVariablesInfo) {\n if (programUniforms.length !== artifact.uniformVariablesInfo.length) {\n throw new Error(`Uniform variables count mismatch: expect ${artifact.uniformVariablesInfo.length}, got ${\n programUniforms.length} in program \"${artifact.programInfo.name}\".`);\n }\n for (let i = 0; i < programUniforms.length; i++) {\n const uniform = programUniforms[i];\n const actualType = uniform.type;\n const actualLength = typeof uniform.data === 'number' ? 1 : uniform.data.length;\n const [type, length] = artifact.uniformVariablesInfo[i];\n if (actualType !== type || actualLength !== length) {\n throw new Error(`Uniform variable ${i} mismatch: expect type ${type} with size ${length}, got type ${\n actualType} with size ${actualLength} in program \"${artifact.programInfo.name}\".`);\n }\n }\n }\n\n LOG_DEBUG(\n 'info',\n () => `[ProgramManager] run \"${program.name}\" (key=${key}) with ${normalizedDispatchGroup[0]}x${\n normalizedDispatchGroup[1]}x${normalizedDispatchGroup[2]}`);\n\n if (this.queryType !== 'none' || this.sessionStatus === 'capturing') {\n const pendingKernelInfo: PendingKernelInfo = {\n kernelId: this.currentKernelId!,\n programName: artifact.programInfo.name,\n inputTensorViews,\n outputTensorViews,\n };\n this.pendingKernels.push(pendingKernelInfo);\n\n if (this.sessionStatus === 'capturing') {\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n sessionPendingKernels!.push(pendingKernelInfo);\n }\n }\n\n this.programManager.run(artifact, inputDatas, outputDatas, normalizedDispatchGroup, uniformBufferBinding);\n\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n\n upload(gpuDataId: number, data: Uint8Array): void {\n this.gpuDataManager.upload(gpuDataId, data);\n }\n\n memcpy(src: number, dst: number): void {\n this.gpuDataManager.memcpy(src, dst);\n }\n\n async download(gpuDataId: number, getTargetBuffer: () => Uint8Array): Promise {\n // the underlying buffer may be changed after the async function is called. so we use a getter function to make sure\n // the buffer is up-to-date.\n await this.gpuDataManager.download(gpuDataId, getTargetBuffer);\n }\n\n alloc(size: number): number {\n return this.gpuDataManager.create(size).id;\n }\n\n free(ptr: number): number {\n return this.gpuDataManager.release(ptr);\n }\n\n createKernel(kernelType: string, kernelId: number, attribute: unknown, kernelName: string): void {\n const op = WEBGPU_OP_RESOLVE_RULES.get(kernelType);\n if (!op) {\n throw new Error(`kernel not implemented: ${kernelType}`);\n }\n\n const kernelInfo: KernelInfo = {\n kernelType,\n kernelName,\n kernelEntry: op[0],\n attributes: [op[1], attribute],\n };\n this.kernels.set(kernelId, kernelInfo);\n }\n\n releaseKernel(kernelId: number): void {\n const persistentData = this.kernelPersistentData.get(kernelId);\n if (persistentData) {\n for (const data of persistentData) {\n this.gpuDataManager.release(data.id);\n }\n this.kernelPersistentData.delete(kernelId);\n }\n\n this.kernelCustomData.delete(kernelId);\n this.kernels.delete(kernelId);\n }\n\n computeKernel(kernelId: number, context: ComputeContext, errors: Array>): number {\n const kernel = this.kernels.get(kernelId);\n if (!kernel) {\n throw new Error(`kernel not created: ${kernelId}`);\n }\n const kernelType = kernel.kernelType;\n const kernelName = kernel.kernelName;\n const kernelEntry = kernel.kernelEntry;\n const attributes = kernel.attributes;\n if (this.currentKernelId !== null) {\n throw new Error(`kernel \"[${kernelType}] ${kernelName}\" is not allowed to be called recursively`);\n }\n this.currentKernelId = kernelId;\n\n // parse attributes if necessary\n if (attributes[0]) {\n attributes[1] = attributes[0](attributes[1]);\n attributes[0] = undefined;\n }\n\n LOG_DEBUG('info', () => `[WebGPU] Start to run kernel \"[${kernelType}] ${kernelName}\"...`);\n\n const useErrorScope = this.env.debug;\n\n this.temporaryData = [];\n try {\n if (useErrorScope) {\n this.device.pushErrorScope('validation');\n }\n\n kernelEntry(context, attributes[1]);\n return 0; // ORT_OK\n } catch (e) {\n errors.push(Promise.resolve(`[WebGPU] Kernel \"[${kernelType}] ${kernelName}\" failed. ${e}`));\n return 1; // ORT_FAIL\n } finally {\n if (useErrorScope) {\n errors.push(this.device.popErrorScope().then(\n err => err ? `GPU validation error for kernel \"[${kernelType}] ${kernelName}\": ${err.message}` : null));\n }\n\n for (const data of this.temporaryData) {\n this.gpuDataManager.release(data.id);\n }\n this.temporaryData = [];\n this.currentKernelId = null;\n }\n }\n\n // #region external buffer\n registerBuffer(sessionId: number, index: number, buffer: GPUBuffer, size: number): number {\n let sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (!sessionInputOutputMapping) {\n sessionInputOutputMapping = new Map();\n this.sessionExternalDataMapping.set(sessionId, sessionInputOutputMapping);\n }\n\n const previousBuffer = sessionInputOutputMapping.get(index);\n const id = this.gpuDataManager.registerExternalBuffer(buffer, size, previousBuffer?.[1]);\n sessionInputOutputMapping.set(index, [id, buffer]);\n return id;\n }\n unregisterBuffers(sessionId: number): void {\n const sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (sessionInputOutputMapping) {\n sessionInputOutputMapping.forEach(bufferInfo => this.gpuDataManager.unregisterExternalBuffer(bufferInfo[1]));\n this.sessionExternalDataMapping.delete(sessionId);\n }\n }\n getBuffer(gpuDataId: number): GPUBuffer {\n const gpuData = this.gpuDataManager.get(gpuDataId);\n if (!gpuData) {\n throw new Error(`no GPU data for buffer: ${gpuDataId}`);\n }\n return gpuData.buffer;\n }\n createDownloader(gpuBuffer: GPUBuffer, size: number, type: Tensor.GpuBufferDataTypes):\n () => Promise {\n return async () => {\n const data = await downloadGpuData(this, gpuBuffer, size);\n return createView(data.buffer, type);\n };\n }\n // #endregion\n writeTimestamp(index: number): void {\n if (this.queryType !== 'inside-passes') {\n return;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this.computePassEncoder as any).writeTimestamp(this.querySet, index);\n }\n setQueryType(): void {\n this.queryType = 'none';\n if (this.env.webgpu.profiling?.mode === 'default' ||\n (typeof this.env.trace === 'undefined' ? this.env.wasm.trace : this.env.trace)) {\n if (this.device.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n this.queryType = 'inside-passes';\n } else if (this.device.features.has('timestamp-query')) {\n this.queryType = 'at-passes';\n }\n\n if (this.queryType !== 'none' && typeof this.querySet === 'undefined') {\n this.querySet = this.device.createQuerySet({\n type: 'timestamp',\n count: this.maxDispatchNumber * 2,\n });\n this.queryResolveBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.maxDispatchNumber * 2 * 8, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE});\n }\n }\n }\n\n captureBegin(): void {\n LOG_DEBUG('info', 'captureBegin');\n if (!this.capturedCommandList.get(this.currentSessionId!)) {\n this.capturedCommandList.set(this.currentSessionId!, []);\n }\n if (!this.capturedPendingKernels.get(this.currentSessionId!)) {\n this.capturedPendingKernels.set(this.currentSessionId!, []);\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'capturing';\n }\n captureEnd(): void {\n LOG_DEBUG('info', 'captureEnd');\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n replay(): void {\n LOG_DEBUG('info', 'replay');\n this.sessionStatus = 'replaying';\n const sessionCommandList = this.capturedCommandList.get(this.currentSessionId!);\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n const length = sessionCommandList!.length;\n this.pendingKernels = [];\n for (let i = 0; i < length; i++) {\n const computePassEncoder = this.getComputePassEncoder();\n const command = sessionCommandList![i];\n this.writeTimestamp(this.pendingDispatchNumber * 2);\n computePassEncoder.setPipeline(command.computePipeline);\n computePassEncoder.setBindGroup(0, command.bindGroup);\n computePassEncoder.dispatchWorkgroups(...command.dispatchGroup);\n this.writeTimestamp(this.pendingDispatchNumber * 2 + 1);\n this.pendingDispatchNumber++;\n if (this.queryType !== 'none') {\n this.pendingKernels.push(sessionPendingKernels![i]);\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber || this.queryType === 'at-passes') {\n this.endComputePass();\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber) {\n this.flush();\n }\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n\n onReleaseSession(sessionId: number): void {\n this.unregisterBuffers(sessionId);\n if (this.capturedCommandList.has(sessionId)) {\n this.capturedCommandList.delete(sessionId);\n }\n if (this.capturedPendingKernels.has(sessionId)) {\n this.capturedPendingKernels.delete(sessionId);\n }\n this.gpuDataManager.onReleaseSession(sessionId);\n }\n\n onRunStart(sessionId: number): void {\n this.currentSessionId = sessionId;\n this.setQueryType();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from '../wasm-types';\nimport {DataType, getTensorElementSize} from '../wasm-common';\n\nimport {WebGpuBackend} from './backend-webgpu';\nimport {LOG_DEBUG} from './log';\nimport {TensorView} from './tensor-view';\nimport {ShapeUtil} from './util';\nimport {AdapterInfo, ComputeContext, ComputeContextInputsOutputsMapping, ProgramInfo} from './webgpu/types';\n\n/* eslint-disable no-bitwise */\n\nclass TensorViewImpl implements TensorView {\n constructor(\n private module: OrtWasmModule, public readonly dataType: number, public readonly data: number,\n public readonly dims: readonly number[]) {}\n\n getFloat32Array(): Float32Array {\n if (this.dataType !== DataType.float) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Float32Array() :\n new Float32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getBigInt64Array(): BigInt64Array {\n if (this.dataType !== DataType.int64) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new BigInt64Array() :\n new BigInt64Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getInt32Array(): Int32Array {\n if (this.dataType !== DataType.int32) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Int32Array() : new Int32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n reshape(newDims: readonly number[]): TensorView {\n if (ShapeUtil.size(newDims) !== ShapeUtil.size(this.dims)) {\n throw new Error('Invalid new shape');\n }\n return new TensorViewImpl(this.module, this.dataType, this.data, newDims);\n }\n}\n\nclass ComputeContextImpl implements ComputeContext {\n readonly adapterInfo: AdapterInfo;\n readonly opKernelContext: number;\n readonly inputs: readonly TensorView[];\n readonly outputCount: number;\n get kernelCustomData(): {[key: string]: unknown} {\n return this.backend.currentKernelCustomData;\n }\n get customDataBuffer(): Uint8Array {\n return this.module.HEAPU8.subarray(this.customDataOffset, this.customDataOffset + this.customDataSize);\n }\n private customDataOffset = 0;\n private customDataSize = 0;\n constructor(private module: OrtWasmModule, private backend: WebGpuBackend, contextDataOffset: number) {\n this.adapterInfo = backend.adapterInfo;\n const heapU32 = module.HEAPU32;\n\n // extract context data\n let dataIndex = (contextDataOffset >>> 2);\n this.opKernelContext = heapU32[dataIndex++];\n const inputCount = heapU32[dataIndex++];\n this.outputCount = heapU32[dataIndex++];\n this.customDataOffset = heapU32[dataIndex++];\n this.customDataSize = heapU32[dataIndex++];\n\n const inputs: TensorView[] = [];\n for (let i = 0; i < inputCount; i++) {\n const dataType = heapU32[dataIndex++];\n const data = heapU32[dataIndex++];\n const dim = heapU32[dataIndex++];\n const dims: number[] = [];\n for (let d = 0; d < dim; d++) {\n dims.push(heapU32[dataIndex++]);\n }\n inputs.push(new TensorViewImpl(module, dataType, data, dims));\n }\n this.inputs = inputs;\n }\n\n getMaxComputeWorkgroupSizes(): [number, number, number] {\n return [\n this.backend.device.limits.maxComputeWorkgroupSizeX, this.backend.device.limits.maxComputeWorkgroupSizeY,\n this.backend.device.limits.maxComputeWorkgroupSizeZ\n ];\n }\n\n getMaxComputeWorkgroupStoragesize(): number {\n return this.backend.device.limits.maxComputeWorkgroupStorageSize;\n }\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[] {\n // prepare inputs. inputs should always be valid data.\n const mappedInputs =\n inputsOutputsMapping?.inputs?.map(i => typeof i === 'number' ? this.inputs[i] : i) ?? this.inputs;\n // prepare outputs.\n const outputIndices = inputsOutputsMapping?.outputs ?? [];\n const createKernelOutput = (index: number, dataType: number, dims: readonly number[]): TensorView =>\n new TensorViewImpl(this.module, dataType, this.output(index, dims), dims);\n const createTemporaryOutput = (dataType: number, dims: readonly number[]): TensorView => {\n const elementSize = getTensorElementSize(dataType);\n if (!elementSize) {\n throw new Error(`Unsupported data type: ${dataType}`);\n }\n const bufferSize = elementSize * ShapeUtil.size(dims);\n const gpuDataId = bufferSize > 0 ? this.backend.gpuDataManager.create(bufferSize).id : 0;\n return new TensorViewImpl(this.module, dataType, gpuDataId, dims);\n };\n return this.backend.run(\n program, mappedInputs, outputIndices, createKernelOutput, createTemporaryOutput, this.outputCount);\n }\n\n output(index: number, dims: readonly number[]): number {\n const stack = this.module.stackSave();\n try {\n const data = this.module.stackAlloc((1 + dims.length) * 4 /* sizeof(size_t) */);\n let offset = data >> 2;\n this.module.HEAPU32[offset++] = dims.length;\n for (let i = 0; i < dims.length; i++) {\n this.module.HEAPU32[offset++] = dims[i];\n }\n return this.module._JsepOutput!(this.opKernelContext, index, data);\n } catch (e) {\n throw new Error(\n `Failed to generate kernel's output[${index}] with dims [${dims}]. ` +\n 'If you are running with pre-allocated output, please make sure the output type/dims are correct. ' +\n `Error: ${e}`);\n } finally {\n this.module.stackRestore(stack);\n }\n }\n}\n\n/**\n * Initialize JSEP with WebGPU backend.\n *\n * This function will be called after the WebAssembly module is loaded and initialized (\"_OrtInit\" is called), once for\n * each of the following EPs if they are specified:\n * - \"webgpu\"\n * - \"webnn\"\n *\n * For WebGPU, this function expects:\n * - WebGPU is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebGPU is available in current environment. (a valid GPUAdapter is passed in)\n *\n * For WebNN, this function expects:\n * - WebNN is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebNN is available in current environment. (navigator.ml is not undefined)\n *\n * If the WebAssembly module is not built with JSEP support, this function will throw an error. This will invalidate\n * 'webgpu'/'webnn' backend.\n *\n * @param name - the name of the EP, either \"webgpu\" or \"webnn\"\n * @param module - the ORT WebAssembly module\n * @param env - the ORT environment variable (ort.env)\n * @param gpuAdapter - the pre-created GPU adapter\n */\nexport const init =\n async(name: 'webgpu'|'webnn', module: OrtWasmModule, env: Env, gpuAdapter?: GPUAdapter): Promise => {\n const jsepInit = module.jsepInit;\n if (!jsepInit) {\n throw new Error('Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.');\n }\n\n if (name === 'webgpu') {\n const backend = new WebGpuBackend();\n await backend.initialize(env, gpuAdapter!);\n\n jsepInit('webgpu', [\n // backend\n backend,\n\n // jsepAlloc()\n (size: number) => backend.alloc(size),\n\n // jsepFree()\n (ptr: number) => backend.free(ptr),\n\n // jsepCopy(src, dst, size, isSourceGpu)\n (src: number, dst: number, size: number, isSourceGpu = false) => {\n if (isSourceGpu) {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyGpuToGpu: src=${src}, dst=${dst}, size=${size}`);\n backend.memcpy(src, dst);\n } else {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyCpuToGpu: dataOffset=${src}, gpuDataId=${dst}, size=${size}`);\n const data = module.HEAPU8.subarray(src >>> 0, (src >>> 0) + size);\n backend.upload(dst, data);\n }\n },\n\n // jsepCopyAsync(src, dst, size)\n async(gpuDataId: number, dataOffset: number, size: number):\n Promise => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepCopyGpuToCpu: gpuDataId=${gpuDataId}, dataOffset=${dataOffset}, size=${size}`);\n\n await backend.download(\n gpuDataId, () => module.HEAPU8.subarray(dataOffset >>> 0, (dataOffset >>> 0) + size));\n },\n\n // jsepCreateKernel\n (kernelType: string, kernelId: number, attribute: unknown) => backend.createKernel(\n kernelType, kernelId, attribute, module.UTF8ToString(module._JsepGetNodeName!(kernelId))),\n\n // jsepReleaseKernel\n (kernel: number) => backend.releaseKernel(kernel),\n\n // jsepRun\n (kernel: number, contextDataOffset: number, sessionHandle: number, errors: Array>) => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepRun: sessionHandle=${sessionHandle}, kernel=${kernel}, contextDataOffset=${\n contextDataOffset}`);\n const context = new ComputeContextImpl(module, backend, contextDataOffset);\n return backend.computeKernel(kernel, context, errors);\n },\n // jsepCaptureBegin\n () => backend.captureBegin(),\n // jsepCaptureEnd\n () => backend.captureEnd(),\n // jsepReplay\n () => backend.replay()\n ]);\n } else {\n jsepInit('webnn');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// WebNN API currently does not have a TypeScript definition file. This file is a workaround with types generated from\n// WebNN API specification.\n// https://github.com/webmachinelearning/webnn/issues/677\n/// \n\nimport {Env, InferenceSession, Tensor} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport {setRunOptions} from './run-options';\nimport {setSessionOptions} from './session-options';\nimport {dataLocationStringToEnum, getTensorElementSize, isGpuBufferSupportedType, logLevelStringToEnum, tensorDataTypeEnumToString, tensorDataTypeStringToEnum, tensorTypeToTypedArrayConstructor} from './wasm-common';\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError} from './wasm-utils';\nimport {loadFile} from './wasm-utils-load-file';\n\n// #region Initializations\n\n/**\n * There are 4 different \"initialization\" steps for ORT. They happen in different places and different time.\n *\n * 1. JavaScript initialization for onnxruntime-common and onnxruntime-web.\n * This is the first initialization step. In this step, onnxruntime-web calls onnxruntime-common's registerBackend()\n * function multiple times to register all the available backends. The backend registration is very fast. It only\n * registers the backend name with the uninitialized backend object. No heavy initialization is done in this step.\n * Refer to web/lib/index.ts for the backend registration.\n *\n * 2. WebAssembly artifact initialization.\n * This happens when any registered wasm backend is used for the first time (ie. `ort.InferenceSession.create()` or\n * `ort.TrainingSession.create()` is called). In this step, onnxruntime-web does the followings:\n * - create a proxy worker and make sure the proxy worker is ready to receive messages, if proxy is enabled.\n * - perform feature detection, locate correct WebAssembly artifact path and call the Emscripten generated\n * JavaScript code to initialize the WebAssembly runtime.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-wasm'.\n * - downloading the 'ort-wasm{...}.wasm' file is done in this step.\n * - if multi-thread is enabled, one or more webworker will be created to initialize the PThread threadpool.\n *\n * 3. ORT environment initialization.\n * This happens after step 2. In this step, onnxruntime-web performs ONNX Runtime environment initialization.\n * Function `_OrtInit()` is called in this step.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-ort'.\n * - logging level (ort.env.logLevel) and thread number (ort.env.wasm.numThreads) are set in this step.\n *\n * 4. Session initialization.\n * This happens when `ort.InferenceSession.create()` or `ort.TrainingSession.create()` is called. Unlike the first 3\n * steps (they only called once), this step will be done for each session. In this step, onnxruntime-web does the\n * followings:\n * If the parameter is a URL:\n * - download the model data from the URL.\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - dereference the model buffer. This step allows the original ArrayBuffer to be garbage collected.\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n * If the parameter is a Uint8Array object:\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n *\n */\n\n/**\n * initialize ORT environment.\n *\n * @param numThreads SetGlobalIntraOpNumThreads(numThreads)\n * @param loggingLevel CreateEnv(static_cast(logging_level))\n */\nconst initOrt = (numThreads: number, loggingLevel: number): void => {\n const errorCode = getInstance()._OrtInit(numThreads, loggingLevel);\n if (errorCode !== 0) {\n checkLastError('Can\\'t initialize onnxruntime.');\n }\n};\n\n/**\n * initialize runtime environment.\n * @param env passed in the environment config object.\n */\nexport const initRuntime = async(env: Env): Promise => {\n // init ORT\n initOrt(env.wasm.numThreads!, logLevelStringToEnum(env.logLevel));\n};\n\n/**\n * perform EP specific initialization.\n *\n * @param env\n * @param epName\n */\nexport const initEp = async(env: Env, epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_JSEP) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n const initJsep = require('./jsep/init').init;\n\n if (epName === 'webgpu') {\n // perform WebGPU availability check\n if (typeof navigator === 'undefined' || !navigator.gpu) {\n throw new Error('WebGPU is not supported in current environment');\n }\n\n let adapter = env.webgpu.adapter as GPUAdapter | null;\n if (!adapter) {\n // if adapter is not set, request a new adapter.\n const powerPreference = env.webgpu.powerPreference;\n if (powerPreference !== undefined && powerPreference !== 'low-power' &&\n powerPreference !== 'high-performance') {\n throw new Error(`Invalid powerPreference setting: \"${powerPreference}\"`);\n }\n const forceFallbackAdapter = env.webgpu.forceFallbackAdapter;\n if (forceFallbackAdapter !== undefined && typeof forceFallbackAdapter !== 'boolean') {\n throw new Error(`Invalid forceFallbackAdapter setting: \"${forceFallbackAdapter}\"`);\n }\n adapter = await navigator.gpu.requestAdapter({powerPreference, forceFallbackAdapter});\n if (!adapter) {\n throw new Error(\n 'Failed to get GPU adapter. ' +\n 'You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.');\n }\n } else {\n // if adapter is set, validate it.\n if (typeof adapter.limits !== 'object' || typeof adapter.features !== 'object' ||\n typeof adapter.requestDevice !== 'function') {\n throw new Error('Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.');\n }\n }\n\n await initJsep('webgpu', getInstance(), env, adapter);\n }\n if (epName === 'webnn') {\n // perform WebNN availability check\n if (typeof navigator === 'undefined' || !(navigator as unknown as {ml: unknown}).ml) {\n throw new Error('WebNN is not supported in current environment');\n }\n\n await initJsep('webnn', getInstance(), env);\n }\n }\n};\n\n// #endregion Initializations\n\n/**\n * valid data locations for input/output tensors.\n */\ntype SupportedTensorDataLocationForInputOutput = 'cpu'|'cpu-pinned'|'gpu-buffer';\n\ntype IOBindingState = {\n /**\n * the handle of IO binding.\n */\n readonly handle: number;\n\n /**\n * the preferred location for each output tensor.\n *\n * value is one of 'cpu', 'cpu-pinned', 'gpu-buffer'.\n */\n readonly outputPreferredLocations: readonly SupportedTensorDataLocationForInputOutput[];\n\n /**\n * enum value of the preferred location for each output tensor.\n */\n readonly outputPreferredLocationsEncoded: readonly number[];\n};\n\n/**\n * tuple elements are: InferenceSession ID; inputNamesUTF8Encoded; outputNamesUTF8Encoded; bindingState\n */\ntype SessionMetadata = [\n inferenceSessionId: number, inputNamesUTF8Encoded: number[], outputNamesUTF8Encoded: number[],\n bindingState: IOBindingState|null, enableGraphCapture: boolean, inputOutputBound: boolean\n];\n\nconst activeSessions = new Map();\n\n/**\n * get the input/output count of the session.\n * @param sessionHandle the handle representing the session. should be non-zero.\n * @returns a tuple including 2 numbers, representing the input count and output count.\n */\nconst getSessionInputOutputCount = (sessionHandle: number): [number, number] => {\n const wasm = getInstance();\n const stack = wasm.stackSave();\n try {\n const dataOffset = wasm.stackAlloc(8);\n const errorCode = wasm._OrtGetInputOutputCount(sessionHandle, dataOffset, dataOffset + 4);\n if (errorCode !== 0) {\n checkLastError('Can\\'t get session input/output count.');\n }\n return [wasm.HEAP32[dataOffset / 4], wasm.HEAP32[dataOffset / 4 + 1]];\n } finally {\n wasm.stackRestore(stack);\n }\n};\n\n/**\n * allocate the memory and memcpy the external buffer.\n *\n * @param model - the external buffer containing the model data. Must not be the same buffer as the WASM heap.\n * @returns a 2-elements tuple - the pointer and size of the allocated buffer\n */\nexport const copyFromExternalBuffer = (model: Uint8Array): [number, number] => {\n const wasm = getInstance();\n const modelDataOffset = wasm._malloc(model.byteLength);\n if (modelDataOffset === 0) {\n throw new Error(`Can't create a session. failed to allocate a buffer of size ${model.byteLength}.`);\n }\n wasm.HEAPU8.set(model, modelDataOffset);\n return [modelDataOffset, model.byteLength];\n};\n\n/**\n * create an inference session from a model data buffer.\n *\n * @param modelData - either a Uint8Array object representing the model data, or a 2-elements tuple containing the\n * pointer and size of the model data buffer.\n * @param options an optional session options object.\n * @returns a 3-elements tuple containing [session handle, input names, output names]\n */\nexport const createSession = async(\n modelData: Uint8Array|SerializableInternalBuffer,\n options?: InferenceSession.SessionOptions): Promise => {\n let modelDataOffset: number, modelDataLength: number;\n const wasm = getInstance();\n\n if (Array.isArray(modelData)) {\n // if model data is an array, it must be a 2-elements tuple containing the pointer and size of the model data\n [modelDataOffset, modelDataLength] = modelData;\n } else if (modelData.buffer === wasm.HEAPU8.buffer) {\n // if model data uses the same buffer as the WASM heap, we don't need to copy it.\n [modelDataOffset, modelDataLength] = [modelData.byteOffset, modelData.byteLength];\n } else {\n // otherwise, copy the model data to the WASM heap.\n [modelDataOffset, modelDataLength] = copyFromExternalBuffer(modelData);\n }\n\n let sessionHandle = 0;\n let sessionOptionsHandle = 0;\n let ioBindingHandle = 0;\n let allocs: number[] = [];\n const inputNamesUTF8Encoded = [];\n const outputNamesUTF8Encoded = [];\n\n try {\n [sessionOptionsHandle, allocs] = setSessionOptions(options);\n\n if (options?.externalData && wasm.mountExternalData) {\n const loadingPromises = [];\n for (const file of options.externalData) {\n const path = typeof file === 'string' ? file : file.path;\n loadingPromises.push(loadFile(typeof file === 'string' ? file : file.data).then(data => {\n wasm.mountExternalData!(path, data);\n }));\n }\n\n // wait for all external data files to be loaded\n await Promise.all(loadingPromises);\n }\n\n for (const provider of options?.executionProviders ?? []) {\n const providerName = typeof provider === 'string' ? provider : provider.name;\n if (providerName === 'webnn') {\n if (wasm.currentContext) {\n throw new Error('WebNN execution provider is already set.');\n }\n if (typeof provider !== 'string') {\n const webnnOptions = provider as InferenceSession.WebNNExecutionProviderOption;\n const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const gpuDevice = (webnnOptions as InferenceSession.WebNNOptionsWebGpu)?.gpuDevice;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n const numThreads = (webnnOptions as InferenceSession.WebNNContextOptions)?.numThreads;\n const powerPreference = (webnnOptions as InferenceSession.WebNNContextOptions)?.powerPreference;\n if (context) {\n wasm.currentContext = context as MLContext;\n } else if (gpuDevice) {\n wasm.currentContext = await navigator.ml.createContext(gpuDevice);\n } else {\n wasm.currentContext = await navigator.ml.createContext({deviceType, numThreads, powerPreference});\n }\n } else {\n wasm.currentContext = await navigator.ml.createContext();\n }\n break;\n }\n }\n\n sessionHandle = await wasm._OrtCreateSession(modelDataOffset, modelDataLength, sessionOptionsHandle);\n if (sessionHandle === 0) {\n checkLastError('Can\\'t create a session.');\n }\n\n // clear current MLContext after session creation\n if (wasm.currentContext) {\n wasm.currentContext = undefined;\n }\n\n const [inputCount, outputCount] = getSessionInputOutputCount(sessionHandle);\n\n const enableGraphCapture = !!options?.enableGraphCapture;\n\n const inputNames = [];\n const outputNames = [];\n const outputPreferredLocations: SupportedTensorDataLocationForInputOutput[] = [];\n for (let i = 0; i < inputCount; i++) {\n const name = wasm._OrtGetInputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an input name.');\n }\n inputNamesUTF8Encoded.push(name);\n inputNames.push(wasm.UTF8ToString(name));\n }\n for (let i = 0; i < outputCount; i++) {\n const name = wasm._OrtGetOutputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an output name.');\n }\n outputNamesUTF8Encoded.push(name);\n const nameString = wasm.UTF8ToString(name);\n outputNames.push(nameString);\n\n if (!BUILD_DEFS.DISABLE_JSEP) {\n if (enableGraphCapture && options?.preferredOutputLocation === undefined) {\n outputPreferredLocations.push('gpu-buffer');\n continue;\n }\n const location = typeof options?.preferredOutputLocation === 'string' ?\n options.preferredOutputLocation :\n options?.preferredOutputLocation?.[nameString] ?? 'cpu';\n if (location !== 'cpu' && location !== 'cpu-pinned' && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${location}.`);\n }\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${\n location}. Only 'gpu-buffer' location is supported when enableGraphCapture is true.`);\n }\n outputPreferredLocations.push(location);\n }\n }\n\n // use IO binding only when at least one output is preffered to be on GPU.\n let bindingState: IOBindingState|null = null;\n if (!BUILD_DEFS.DISABLE_JSEP && outputPreferredLocations.some(l => l === 'gpu-buffer')) {\n ioBindingHandle = wasm._OrtCreateBinding(sessionHandle);\n if (ioBindingHandle === 0) {\n checkLastError('Can\\'t create IO binding.');\n }\n\n bindingState = {\n handle: ioBindingHandle,\n outputPreferredLocations,\n outputPreferredLocationsEncoded: outputPreferredLocations.map(l => dataLocationStringToEnum(l)),\n };\n }\n\n activeSessions.set(\n sessionHandle,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, bindingState, enableGraphCapture, false]);\n return [sessionHandle, inputNames, outputNames];\n } catch (e) {\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n\n if (ioBindingHandle !== 0) {\n wasm._OrtReleaseBinding(ioBindingHandle);\n }\n\n if (sessionHandle !== 0) {\n wasm._OrtReleaseSession(sessionHandle);\n }\n throw e;\n } finally {\n wasm._free(modelDataOffset);\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n\n // unmount external data if necessary\n wasm.unmountExternalData?.();\n }\n};\n\nexport const releaseSession = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot release session. invalid session id: ${sessionId}`);\n }\n const [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture] = session;\n\n if (ioBindingState) {\n if (enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n }\n wasm._OrtReleaseBinding(ioBindingState.handle);\n }\n\n wasm.jsepOnReleaseSession?.(sessionId);\n\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n wasm._OrtReleaseSession(sessionHandle);\n activeSessions.delete(sessionId);\n};\n\nexport const prepareInputOutputTensor =\n (tensor: TensorMetadata|null, tensorHandles: number[], allocs: number[], sessionId: number, index: number,\n enableGraphCapture = false): void => {\n if (!tensor) {\n tensorHandles.push(0);\n return;\n }\n\n const wasm = getInstance();\n\n const dataType = tensor[0];\n const dims = tensor[1];\n const location = tensor[3];\n\n let rawData: number;\n let dataByteLength: number;\n\n if (dataType === 'string' && location === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(\n `External buffer must be provided for input/output index ${index} when enableGraphCapture is true.`);\n }\n\n if (location === 'gpu-buffer') {\n const gpuBuffer = tensor[2].gpuBuffer as GPUBuffer;\n const elementSizeInBytes = getTensorElementSize(tensorDataTypeStringToEnum(dataType))!;\n dataByteLength = dims.reduce((a, b) => a * b, 1) * elementSizeInBytes;\n\n const registerBuffer = wasm.jsepRegisterBuffer;\n if (!registerBuffer) {\n throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');\n }\n rawData = registerBuffer(sessionId, index, gpuBuffer, dataByteLength);\n } else {\n const data = tensor[2];\n\n if (Array.isArray(data)) {\n // string tensor\n dataByteLength = 4 * data.length;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n let dataIndex = rawData / 4;\n for (let i = 0; i < data.length; i++) {\n if (typeof data[i] !== 'string') {\n throw new TypeError(`tensor data at index ${i} is not a string`);\n }\n wasm.HEAPU32[dataIndex++] = allocWasmString(data[i], allocs);\n }\n } else {\n dataByteLength = data.byteLength;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n wasm.HEAPU8.set(new Uint8Array(data.buffer, data.byteOffset, dataByteLength), rawData);\n }\n }\n\n const stack = wasm.stackSave();\n const dimsOffset = wasm.stackAlloc(4 * dims.length);\n try {\n let dimIndex = dimsOffset / 4;\n dims.forEach(d => wasm.HEAP32[dimIndex++] = d);\n const tensor = wasm._OrtCreateTensor(\n tensorDataTypeStringToEnum(dataType), rawData, dataByteLength, dimsOffset, dims.length,\n dataLocationStringToEnum(location));\n if (tensor === 0) {\n checkLastError(`Can't create tensor for input/output. session=${sessionId}, index=${index}.`);\n }\n tensorHandles.push(tensor);\n } finally {\n wasm.stackRestore(stack);\n }\n };\n\n/**\n * perform inference run\n */\nexport const run = async(\n sessionId: number, inputIndices: number[], inputTensors: TensorMetadata[], outputIndices: number[],\n outputTensors: Array, options: InferenceSession.RunOptions): Promise => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot run inference. invalid session id: ${sessionId}`);\n }\n const sessionHandle = session[0];\n const inputNamesUTF8Encoded = session[1];\n const outputNamesUTF8Encoded = session[2];\n const ioBindingState = session[3];\n const enableGraphCapture = session[4];\n const inputOutputBound = session[5];\n\n const inputCount = inputIndices.length;\n const outputCount = outputIndices.length;\n\n let runOptionsHandle = 0;\n let runOptionsAllocs: number[] = [];\n\n const inputTensorHandles: number[] = [];\n const outputTensorHandles: number[] = [];\n const inputOutputAllocs: number[] = [];\n\n const beforeRunStack = wasm.stackSave();\n const inputValuesOffset = wasm.stackAlloc(inputCount * 4);\n const inputNamesOffset = wasm.stackAlloc(inputCount * 4);\n const outputValuesOffset = wasm.stackAlloc(outputCount * 4);\n const outputNamesOffset = wasm.stackAlloc(outputCount * 4);\n\n try {\n [runOptionsHandle, runOptionsAllocs] = setRunOptions(options);\n\n // create input tensors\n for (let i = 0; i < inputCount; i++) {\n prepareInputOutputTensor(\n inputTensors[i], inputTensorHandles, inputOutputAllocs, sessionId, inputIndices[i], enableGraphCapture);\n }\n\n // create output tensors\n for (let i = 0; i < outputCount; i++) {\n prepareInputOutputTensor(\n outputTensors[i], outputTensorHandles, inputOutputAllocs, sessionId, inputCount + outputIndices[i],\n enableGraphCapture);\n }\n\n let inputValuesIndex = inputValuesOffset / 4;\n let inputNamesIndex = inputNamesOffset / 4;\n let outputValuesIndex = outputValuesOffset / 4;\n let outputNamesIndex = outputNamesOffset / 4;\n for (let i = 0; i < inputCount; i++) {\n wasm.HEAPU32[inputValuesIndex++] = inputTensorHandles[i];\n wasm.HEAPU32[inputNamesIndex++] = inputNamesUTF8Encoded[inputIndices[i]];\n }\n for (let i = 0; i < outputCount; i++) {\n wasm.HEAPU32[outputValuesIndex++] = outputTensorHandles[i];\n wasm.HEAPU32[outputNamesIndex++] = outputNamesUTF8Encoded[outputIndices[i]];\n }\n\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState && !inputOutputBound) {\n const {handle, outputPreferredLocations, outputPreferredLocationsEncoded} = ioBindingState;\n\n if (inputNamesUTF8Encoded.length !== inputCount) {\n throw new Error(`input count from feeds (${\n inputCount}) is expected to be always equal to model's input count (${inputNamesUTF8Encoded.length}).`);\n }\n\n // process inputs\n for (let i = 0; i < inputCount; i++) {\n const index = inputIndices[i];\n const errorCode = await wasm._OrtBindInput(handle, inputNamesUTF8Encoded[index], inputTensorHandles[i]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind input[${i}] for session=${sessionId}.`);\n }\n }\n\n // process pre-allocated outputs\n for (let i = 0; i < outputCount; i++) {\n const index = outputIndices[i];\n const location = outputTensors[i]?.[3]; // undefined means output is not pre-allocated.\n\n if (location) {\n // output is pre-allocated. bind the tensor.\n const errorCode = wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], outputTensorHandles[i], 0);\n if (errorCode !== 0) {\n checkLastError(`Can't bind pre-allocated output[${i}] for session=${sessionId}.`);\n }\n } else {\n // output is not pre-allocated. reset preferred location.\n const errorCode =\n wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], 0, outputPreferredLocationsEncoded[index]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind output[${i}] to ${outputPreferredLocations[i]} for session=${sessionId}.`);\n }\n }\n }\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, true]);\n }\n\n wasm.jsepOnRunStart?.(sessionHandle);\n let errorCode: number;\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState) {\n errorCode = await wasm._OrtRunWithBinding(\n sessionHandle, ioBindingState.handle, outputCount, outputValuesOffset, runOptionsHandle);\n } else {\n errorCode = await wasm._OrtRun(\n sessionHandle, inputNamesOffset, inputValuesOffset, inputCount, outputNamesOffset, outputCount,\n outputValuesOffset, runOptionsHandle);\n }\n\n if (errorCode !== 0) {\n checkLastError('failed to call OrtRun().');\n }\n\n const output: TensorMetadata[] = [];\n\n for (let i = 0; i < outputCount; i++) {\n const tensor = wasm.HEAPU32[outputValuesOffset / 4 + i];\n if (tensor === outputTensorHandles[i]) {\n // output tensor is pre-allocated. no need to copy data.\n output.push(outputTensors[i]!);\n continue;\n }\n\n const beforeGetTensorDataStack = wasm.stackSave();\n // stack allocate 4 pointer value\n const tensorDataOffset = wasm.stackAlloc(4 * 4);\n\n let keepOutputTensor = false;\n let type: Tensor.Type|undefined, dataOffset = 0;\n try {\n const errorCode = wasm._OrtGetTensorData(\n tensor, tensorDataOffset, tensorDataOffset + 4, tensorDataOffset + 8, tensorDataOffset + 12);\n if (errorCode !== 0) {\n checkLastError(`Can't access output tensor data on index ${i}.`);\n }\n let tensorDataIndex = tensorDataOffset / 4;\n const dataType = wasm.HEAPU32[tensorDataIndex++];\n dataOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsLength = wasm.HEAPU32[tensorDataIndex++];\n const dims = [];\n for (let i = 0; i < dimsLength; i++) {\n dims.push(wasm.HEAPU32[dimsOffset / 4 + i]);\n }\n wasm._OrtFree(dimsOffset);\n\n const size = dims.reduce((a, b) => a * b, 1);\n type = tensorDataTypeEnumToString(dataType);\n\n const preferredLocation = ioBindingState?.outputPreferredLocations[outputIndices[i]];\n\n if (type === 'string') {\n if (preferredLocation === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n const stringData: string[] = [];\n let dataIndex = dataOffset / 4;\n for (let i = 0; i < size; i++) {\n const offset = wasm.HEAPU32[dataIndex++];\n const maxBytesToRead = i === size - 1 ? undefined : wasm.HEAPU32[dataIndex] - offset;\n stringData.push(wasm.UTF8ToString(offset, maxBytesToRead));\n }\n output.push([type, dims, stringData, 'cpu']);\n } else {\n // If a certain output's preferred location is GPU but the tensor is empty, we still need to create a CPU\n // tensor for it. There is no mapping GPU buffer for an empty tensor.\n if (preferredLocation === 'gpu-buffer' && size > 0) {\n const getBuffer = wasm.jsepGetBuffer;\n if (!getBuffer) {\n throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');\n }\n const gpuBuffer = getBuffer(dataOffset);\n const elementSize = getTensorElementSize(dataType);\n if (elementSize === undefined || !isGpuBufferSupportedType(type)) {\n throw new Error(`Unsupported data type: ${type}`);\n }\n\n // do not release the tensor right now. it will be released when user calls tensor.dispose().\n keepOutputTensor = true;\n\n output.push([\n type, dims, {\n gpuBuffer,\n download: wasm.jsepCreateDownloader!(gpuBuffer, size * elementSize, type),\n dispose: () => {\n wasm._OrtReleaseTensor(tensor);\n }\n },\n 'gpu-buffer'\n ]);\n } else {\n const typedArrayConstructor = tensorTypeToTypedArrayConstructor(type);\n const data = new typedArrayConstructor(size);\n new Uint8Array(data.buffer, data.byteOffset, data.byteLength)\n .set(wasm.HEAPU8.subarray(dataOffset, dataOffset + data.byteLength));\n output.push([type, dims, data, 'cpu']);\n }\n }\n } finally {\n wasm.stackRestore(beforeGetTensorDataStack);\n if (type === 'string' && dataOffset) {\n wasm._free(dataOffset);\n }\n if (!keepOutputTensor) {\n wasm._OrtReleaseTensor(tensor);\n }\n }\n }\n\n if (ioBindingState && !enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, false]);\n }\n return output;\n } finally {\n wasm.stackRestore(beforeRunStack);\n\n inputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n outputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n inputOutputAllocs.forEach(p => wasm._free(p));\n\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n runOptionsAllocs.forEach(p => wasm._free(p));\n }\n};\n\n/**\n * end profiling\n */\nexport const endProfiling = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error('invalid session id');\n }\n const sessionHandle = session[0];\n\n // profile file name is not used yet, but it must be freed.\n const profileFileName = wasm._OrtEndProfiling(sessionHandle);\n if (profileFileName === 0) {\n checkLastError('Can\\'t get an profile file name.');\n }\n wasm._OrtFree(profileFileName);\n};\n\nexport const extractTransferableBuffers = (tensors: readonly SerializableTensorMetadata[]): ArrayBufferLike[] => {\n const buffers: ArrayBufferLike[] = [];\n for (const tensor of tensors) {\n const data = tensor[2];\n if (!Array.isArray(data) && 'buffer' in data) {\n buffers.push(data.buffer);\n }\n }\n return buffers;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env, InferenceSession} from 'onnxruntime-common';\n\nimport {OrtWasmMessage, SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport * as core from './wasm-core-impl';\nimport {initializeWebAssembly} from './wasm-factory';\nimport {importProxyWorker} from './wasm-utils-import';\n\nconst isProxy = (): boolean => !!env.wasm.proxy && typeof document !== 'undefined';\nlet proxyWorker: Worker|undefined;\nlet initializing = false;\nlet initialized = false;\nlet aborted = false;\nlet temporaryObjectUrl: string|undefined;\n\ntype PromiseCallbacks = [resolve: (result: T) => void, reject: (reason: unknown) => void];\nlet initWasmCallbacks: PromiseCallbacks;\nconst queuedCallbacks: Map>> = new Map();\n\nconst enqueueCallbacks = (type: OrtWasmMessage['type'], callbacks: PromiseCallbacks): void => {\n const queue = queuedCallbacks.get(type);\n if (queue) {\n queue.push(callbacks);\n } else {\n queuedCallbacks.set(type, [callbacks]);\n }\n};\n\nconst ensureWorker = (): void => {\n if (initializing || !initialized || aborted || !proxyWorker) {\n throw new Error('worker not ready');\n }\n};\n\nconst onProxyWorkerMessage = (ev: MessageEvent): void => {\n switch (ev.data.type) {\n case 'init-wasm':\n initializing = false;\n if (ev.data.err) {\n aborted = true;\n initWasmCallbacks[1](ev.data.err);\n } else {\n initialized = true;\n initWasmCallbacks[0]();\n }\n if (temporaryObjectUrl) {\n URL.revokeObjectURL(temporaryObjectUrl);\n temporaryObjectUrl = undefined;\n }\n break;\n case 'init-ep':\n case 'copy-from':\n case 'create':\n case 'release':\n case 'run':\n case 'end-profiling': {\n const callbacks = queuedCallbacks.get(ev.data.type)!;\n if (ev.data.err) {\n callbacks.shift()![1](ev.data.err);\n } else {\n callbacks.shift()![0](ev.data.out!);\n }\n break;\n }\n default:\n }\n};\n\n\nexport const initializeWebAssemblyAndOrtRuntime = async(): Promise => {\n if (initialized) {\n return;\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initWasm()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initWasm()\\' failed.');\n }\n\n initializing = true;\n\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n return new Promise((resolve, reject) => {\n proxyWorker?.terminate();\n\n void importProxyWorker().then(([objectUrl, worker]) => {\n try {\n proxyWorker = worker;\n proxyWorker.onerror = (ev: ErrorEvent) => reject(ev);\n proxyWorker.onmessage = onProxyWorkerMessage;\n initWasmCallbacks = [resolve, reject];\n const message: OrtWasmMessage = {type: 'init-wasm', in : env};\n proxyWorker.postMessage(message);\n temporaryObjectUrl = objectUrl;\n } catch (e) {\n reject(e);\n }\n }, reject);\n });\n\n } else {\n try {\n await initializeWebAssembly(env.wasm);\n await core.initRuntime(env);\n initialized = true;\n } catch (e) {\n aborted = true;\n throw e;\n } finally {\n initializing = false;\n }\n }\n};\n\nexport const initializeOrtEp = async(epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('init-ep', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'init-ep', in : {epName, env}};\n proxyWorker!.postMessage(message);\n });\n } else {\n await core.initEp(env, epName);\n }\n};\n\nexport const copyFromExternalBuffer = async(buffer: Uint8Array): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('copy-from', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'copy-from', in : {buffer}};\n proxyWorker!.postMessage(message, [buffer.buffer]);\n });\n } else {\n return core.copyFromExternalBuffer(buffer);\n }\n};\n\nexport const createSession =\n async(model: SerializableInternalBuffer|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check unsupported options\n if (options?.preferredOutputLocation) {\n throw new Error('session option \"preferredOutputLocation\" is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('create', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'create', in : {model, options: {...options}}};\n const transferable: Transferable[] = [];\n if (model instanceof Uint8Array) {\n transferable.push(model.buffer);\n }\n proxyWorker!.postMessage(message, transferable);\n });\n } else {\n return core.createSession(model, options);\n }\n };\n\nexport const releaseSession = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('release', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'release', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.releaseSession(sessionId);\n }\n};\n\nexport const run = async(\n sessionId: number, inputIndices: number[], inputs: TensorMetadata[], outputIndices: number[],\n outputs: Array, options: InferenceSession.RunOptions): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check inputs location\n if (inputs.some(t => t[3] !== 'cpu')) {\n throw new Error('input tensor on GPU is not supported for proxy.');\n }\n // check outputs location\n if (outputs.some(t => t)) {\n throw new Error('pre-allocated output tensor is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('run', [resolve, reject]);\n const serializableInputs = inputs as SerializableTensorMetadata[]; // every input is on CPU.\n const message: OrtWasmMessage =\n {type: 'run', in : {sessionId, inputIndices, inputs: serializableInputs, outputIndices, options}};\n proxyWorker!.postMessage(message, core.extractTransferableBuffers(serializableInputs));\n });\n } else {\n return core.run(sessionId, inputIndices, inputs, outputIndices, outputs, options);\n }\n};\n\nexport const endProfiling = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('end-profiling', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'end-profiling', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.endProfiling(sessionId);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, TensorMetadata} from './proxy-messages';\nimport {copyFromExternalBuffer, createSession, endProfiling, releaseSession, run} from './proxy-wrapper';\nimport {isGpuBufferSupportedType} from './wasm-common';\nimport {isNode} from './wasm-utils-env';\nimport {loadFile} from './wasm-utils-load-file';\n\nexport const encodeTensorMetadata = (tensor: Tensor, getName: () => string): TensorMetadata => {\n switch (tensor.location) {\n case 'cpu':\n return [tensor.type, tensor.dims, tensor.data, 'cpu'];\n case 'gpu-buffer':\n return [tensor.type, tensor.dims, {gpuBuffer: tensor.gpuBuffer}, 'gpu-buffer'];\n default:\n throw new Error(`invalid data location: ${tensor.location} for ${getName()}`);\n }\n};\n\nexport const decodeTensorMetadata = (tensor: TensorMetadata): Tensor => {\n switch (tensor[3]) {\n case 'cpu':\n return new Tensor(tensor[0], tensor[2], tensor[1]);\n case 'gpu-buffer': {\n const dataType = tensor[0];\n if (!isGpuBufferSupportedType(dataType)) {\n throw new Error(`not supported data type: ${dataType} for deserializing GPU tensor`);\n }\n const {gpuBuffer, download, dispose} = tensor[2];\n return Tensor.fromGpuBuffer(gpuBuffer, {dataType, dims: tensor[1], download, dispose});\n }\n default:\n throw new Error(`invalid data location: ${tensor[3]}`);\n }\n};\n\nexport class OnnxruntimeWebAssemblySessionHandler implements InferenceSessionHandler {\n private sessionId: number;\n\n inputNames: string[];\n outputNames: string[];\n\n async fetchModelAndCopyToWasmMemory(path: string): Promise {\n // fetch model from url and move to wasm heap.\n return copyFromExternalBuffer(await loadFile(path));\n }\n\n async loadModel(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n let model: Parameters[0];\n\n if (typeof pathOrBuffer === 'string') {\n if (isNode) {\n // node\n model = await loadFile(pathOrBuffer);\n } else {\n // browser\n // fetch model and copy to wasm heap.\n model = await this.fetchModelAndCopyToWasmMemory(pathOrBuffer);\n }\n } else {\n model = pathOrBuffer;\n }\n\n [this.sessionId, this.inputNames, this.outputNames] = await createSession(model, options);\n TRACE_FUNC_END();\n }\n\n async dispose(): Promise {\n return releaseSession(this.sessionId);\n }\n\n async run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType, options: InferenceSession.RunOptions):\n Promise {\n TRACE_FUNC_BEGIN();\n const inputArray: Tensor[] = [];\n const inputIndices: number[] = [];\n Object.entries(feeds).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.inputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid input '${name}'`);\n }\n inputArray.push(tensor);\n inputIndices.push(index);\n });\n\n const outputArray: Array = [];\n const outputIndices: number[] = [];\n Object.entries(fetches).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.outputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid output '${name}'`);\n }\n outputArray.push(tensor);\n outputIndices.push(index);\n });\n\n const inputs =\n inputArray.map((t, i) => encodeTensorMetadata(t, () => `input \"${this.inputNames[inputIndices[i]]}\"`));\n const outputs = outputArray.map(\n (t, i) => t ? encodeTensorMetadata(t, () => `output \"${this.outputNames[outputIndices[i]]}\"`) : null);\n\n const results = await run(this.sessionId, inputIndices, inputs, outputIndices, outputs, options);\n\n const resultMap: SessionHandler.ReturnType = {};\n for (let i = 0; i < results.length; i++) {\n resultMap[this.outputNames[outputIndices[i]]] = outputArray[i] ?? decodeTensorMetadata(results[i]);\n }\n TRACE_FUNC_END();\n return resultMap;\n }\n\n startProfiling(): void {\n // TODO: implement profiling\n }\n\n endProfiling(): void {\n void endProfiling(this.sessionId);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend, env, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {initializeOrtEp, initializeWebAssemblyAndOrtRuntime} from './wasm/proxy-wrapper';\nimport {OnnxruntimeWebAssemblySessionHandler} from './wasm/session-handler-inference';\nimport {scriptSrc} from './wasm/wasm-utils-import';\n\n/**\n * This function initializes all flags for WebAssembly.\n *\n * Those flags are accessible from `ort.env.wasm`. Users are allow to set those flags before the first inference session\n * being created, to override default value.\n */\nexport const initializeFlags = (): void => {\n if (typeof env.wasm.initTimeout !== 'number' || env.wasm.initTimeout < 0) {\n env.wasm.initTimeout = 0;\n }\n\n if (env.wasm.simd === false) {\n // eslint-disable-next-line no-console\n console.warn(\n 'Deprecated property \"env.wasm.simd\" is set to false. ' +\n 'non-SIMD build is no longer provided, and this setting will be ignored.');\n }\n\n if (typeof env.wasm.proxy !== 'boolean') {\n env.wasm.proxy = false;\n }\n\n if (typeof env.wasm.trace !== 'boolean') {\n env.wasm.trace = false;\n }\n\n if (typeof env.wasm.numThreads !== 'number' || !Number.isInteger(env.wasm.numThreads) || env.wasm.numThreads <= 0) {\n // The following logic only applies when `ort.env.wasm.numThreads` is not set by user. We will always honor user's\n // setting if it is provided.\n\n // Browser: when crossOriginIsolated is false, SharedArrayBuffer is not available so WebAssembly threads will not\n // work. In this case, we will set numThreads to 1.\n //\n // There is an exception: when the browser is configured to force-enable SharedArrayBuffer (e.g. Chromuim with\n // --enable-features=SharedArrayBuffer), it is possible that `self.crossOriginIsolated` is false and\n // SharedArrayBuffer is available at the same time. This is usually for testing. In this case, we will still set\n // numThreads to 1 here. If we want to enable multi-threading in test, we should set `ort.env.wasm.numThreads` to a\n // value greater than 1.\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n env.wasm.numThreads = 1;\n } else {\n const numCpuLogicalCores =\n typeof navigator === 'undefined' ? require('node:os').cpus().length : navigator.hardwareConcurrency;\n env.wasm.numThreads = Math.min(4, Math.ceil((numCpuLogicalCores || 1) / 2));\n }\n }\n\n if (!BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n // overwrite wasm paths override if not set\n if (env.wasm.wasmPaths === undefined && scriptSrc && scriptSrc.indexOf('blob:') !== 0) {\n env.wasm.wasmPaths = scriptSrc.substring(0, scriptSrc.lastIndexOf('/') + 1);\n }\n }\n};\n\nexport class OnnxruntimeWebAssemblyBackend implements Backend {\n /**\n * This function initializes the WebAssembly backend.\n *\n * This function will be called only once for each backend name. It will be called the first time when\n * `ort.InferenceSession.create()` is called with a registered backend name.\n *\n * @param backendName - the registered backend name.\n */\n async init(backendName: string): Promise {\n // populate wasm flags\n initializeFlags();\n\n // init wasm\n await initializeWebAssemblyAndOrtRuntime();\n\n // performe EP specific initialization\n await initializeOrtEp(backendName);\n }\n createInferenceSessionHandler(path: string, options?: InferenceSession.SessionOptions):\n Promise;\n createInferenceSessionHandler(buffer: Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n const handler = new OnnxruntimeWebAssemblySessionHandler();\n await handler.loadModel(pathOrBuffer, options);\n return Promise.resolve(handler);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OnnxruntimeWebAssemblyBackend} from './backend-wasm';\nexport const wasmBackend = new OnnxruntimeWebAssemblyBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports */\n\n// We use \"require\" instead of \"import\" here because import statement must be put in top level. Our current code does\n// not allow bundler to tree-shaking code as expected because some codes are treated as having side effects.\n// So we import code inside the if-clause to allow bundler remove the code safely.\n\nexport * from 'onnxruntime-common';\nimport * as ort from 'onnxruntime-common';\nexport default ort;\n\nimport {registerBackend, env} from 'onnxruntime-common';\nimport {version} from './version';\n\nif (!BUILD_DEFS.DISABLE_WEBGL) {\n const onnxjsBackend = require('./backend-onnxjs').onnxjsBackend;\n registerBackend('webgl', onnxjsBackend, -10);\n}\n\nif (!BUILD_DEFS.DISABLE_WASM) {\n const wasmBackend = BUILD_DEFS.DISABLE_TRAINING ? require('./backend-wasm-inference').wasmBackend :\n require('./backend-wasm-training').wasmBackend;\n if (!BUILD_DEFS.DISABLE_JSEP) {\n registerBackend('webgpu', wasmBackend, 5);\n registerBackend('webnn', wasmBackend, 5);\n }\n registerBackend('cpu', wasmBackend, 10);\n registerBackend('wasm', wasmBackend, 10);\n}\n\nObject.defineProperty(env.versions, 'web', {value: version, enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n"], + "mappings": ";;;;;05BAAA,IAgBMA,GACAC,GAYOC,GAwCPC,GAwCOC,GA7GbC,GAAAC,EAAA,kBAgBMN,GAAqC,IAAI,IACzCC,GAAqC,CAAA,EAY9BC,GAAkB,CAACK,EAAcC,EAAkBC,IAA0B,CACxF,GAAID,GAAW,OAAOA,EAAQ,MAAS,YAAc,OAAOA,EAAQ,+BAAkC,WAAY,CAChH,IAAME,EAAiBV,GAAS,IAAIO,CAAI,EACxC,GAAIG,IAAmB,OACrBV,GAAS,IAAIO,EAAM,CAAC,QAAAC,EAAS,SAAAC,CAAQ,CAAC,MACjC,IAAIC,EAAe,SAAWD,EAEnC,OACK,GAAIC,EAAe,WAAaD,GACjCC,EAAe,UAAYF,EAC7B,MAAM,IAAI,MAAM,4BAA4BD,CAAI,oBAAoBE,CAAQ,EAAE,EAIlF,GAAIA,GAAY,EAAG,CACjB,IAAME,EAAIV,GAAyB,QAAQM,CAAI,EAC3CI,IAAM,IACRV,GAAyB,OAAOU,EAAG,CAAC,EAGtC,QAAS,EAAI,EAAG,EAAIV,GAAyB,OAAQ,IACnD,GAAID,GAAS,IAAIC,GAAyB,CAAC,CAAC,EAAG,UAAYQ,EAAU,CACnER,GAAyB,OAAO,EAAG,EAAGM,CAAI,EAC1C,OAGJN,GAAyB,KAAKM,CAAI,EAEpC,OAGF,MAAM,IAAI,UAAU,qBAAqB,CAC3C,EAQMJ,GAAiC,MAAMS,GAAgD,CAC3F,IAAMC,EAAcb,GAAS,IAAIY,CAAW,EAC5C,GAAI,CAACC,EACH,MAAO,qBAGT,GAAIA,EAAY,YACd,OAAOA,EAAY,QACd,GAAIA,EAAY,QACrB,OAAOA,EAAY,MACd,CACL,IAAMC,EAAiB,CAAC,CAACD,EAAY,YACrC,GAAI,CACF,OAAKC,IACHD,EAAY,YAAcA,EAAY,QAAQ,KAAKD,CAAW,GAEhE,MAAMC,EAAY,YAClBA,EAAY,YAAc,GACnBA,EAAY,cACZE,EAAG,CACV,OAAKD,IACHD,EAAY,MAAQ,GAAGE,CAAC,GACxBF,EAAY,QAAU,IAEjBA,EAAY,cAEnB,OAAOA,EAAY,aAGzB,EAWaT,GAAsC,MAAMY,GACmB,CAEtE,IAAMC,EAAMD,EAAQ,oBAAsB,CAAA,EACpCE,EAAeD,EAAI,IAAIN,GAAK,OAAOA,GAAM,SAAWA,EAAIA,EAAE,IAAI,EAC9DQ,EAAeD,EAAa,SAAW,EAAIjB,GAA2BiB,EAGxEV,EACEY,EAAS,CAAA,EACTC,EAAwB,IAAI,IAClC,QAAWT,KAAeO,EAAc,CACtC,IAAMG,EAAgB,MAAMnB,GAA+BS,CAAW,EAClE,OAAOU,GAAkB,SAC3BF,EAAO,KAAK,CAAC,KAAMR,EAAa,IAAKU,CAAa,CAAC,GAE9Cd,IACHA,EAAUc,GAERd,IAAYc,GACdD,EAAsB,IAAIT,CAAW,GAM3C,GAAI,CAACJ,EACH,MAAM,IAAI,MAAM,oCAAoCY,EAAO,IAAIL,GAAK,IAAIA,EAAE,IAAI,KAAKA,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,EAI1G,OAAW,CAAC,KAAAR,EAAM,IAAAgB,CAAG,IAAKH,EACpBF,EAAa,SAASX,CAAI,GAE5B,QAAQ,KAAK,0CACTA,CAAI,uDAAuDgB,CAAG,EAAE,EAIxE,IAAMC,EAAcP,EAAI,OAAON,GAAKU,EAAsB,IAAI,OAAOV,GAAM,SAAWA,EAAIA,EAAE,IAAI,CAAC,EAEjG,MAAO,CACLH,EAAS,IAAI,MAAMQ,EAAS,CAC1B,IAAK,CAACS,EAAQC,IACRA,IAAS,qBACJF,EAEF,QAAQ,IAAIC,EAAQC,CAAI,EAElC,EAEL,IChKJ,IAAAC,GAAAC,EAAA,kBAoFAC,OCpFA,IAMaC,GANbC,GAAAC,EAAA,kBAMaF,GAAU,WCNvB,IAQIG,GAESC,GAVbC,GAAAC,EAAA,kBAIAC,KAIIJ,GAAwC,UAE/BC,GAAW,CACtB,KAAM,CAAA,EACN,MAAO,CAAA,EACP,OAAQ,CAAA,EACR,SAAU,CAAC,OAAQI,EAAO,EAE1B,IAAI,SAASC,EAAmB,CAC9B,GAAIA,IAAU,OAGd,IAAI,OAAOA,GAAU,UAAY,CAAC,UAAW,OAAQ,UAAW,QAAS,OAAO,EAAE,QAAQA,CAAK,IAAM,GACnG,MAAM,IAAI,MAAM,8BAA8BA,CAAK,EAAE,EAEvDN,GAAgBM,EAClB,EACA,IAAI,UAAQ,CACV,OAAON,EACT,GAIF,OAAO,eAAeC,GAAK,WAAY,CAAC,WAAY,EAAI,CAAC,IC/BzD,IAmRaM,GAnRbC,GAAAC,EAAA,kBAGAC,KAgRaH,GAAWA,KCnRxB,IASaI,GA+FAC,GAxGbC,GAAAC,EAAA,kBASaH,GAAkB,CAACI,EAAgBC,IAA4C,CAC1F,IAAMC,EAAS,OAAO,SAAa,IAAc,SAAS,cAAc,QAAQ,EAAK,IAAI,gBAAgB,EAAG,CAAC,EAC7GA,EAAO,MAAQF,EAAO,KAAK,CAAC,EAC5BE,EAAO,OAASF,EAAO,KAAK,CAAC,EAC7B,IAAMG,EACFD,EAAO,WAAW,IAAI,EAE1B,GAAIC,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAJ,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,IAEtBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,GAGxB,IAAMM,EAAcL,GAAS,SAAW,OAAYA,EAAQ,OAAS,MAE/DM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EAEpBO,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5B,QAASK,EAAI,EAAGA,EAAIV,EAAQU,IAC1B,QAASC,EAAI,EAAGA,EAAIZ,EAAOY,IAAK,CAC9B,IAAMC,GAAMjB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EU,GAAMlB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EW,GAAMnB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EY,EAAIN,IAAmB,GACzB,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,EAE1EL,EAAgB,UAAY,QAAUc,EAAI,IAAMC,EAAI,IAAMC,EAAI,IAAMC,EAAI,IACxEjB,EAAgB,SAASa,EAAGD,EAAG,EAAG,CAAC,EAGvC,GAAI,cAAeb,EACjB,OAAOA,EAAO,UAAS,EAEvB,MAAM,IAAI,MAAM,4BAA4B,MAG9C,OAAM,IAAI,MAAM,2BAA2B,CAE/C,EAKaL,GAAoB,CAACG,EAAgBC,IAAiD,CACjG,IAAME,EAAkB,OAAO,SAAa,IACxC,SAAS,cAAc,QAAQ,EAAE,WAAW,IAAI,EAChD,IAAI,gBAAgB,EAAG,CAAC,EAAE,WAAW,IAAI,EACzCkB,EACJ,GAAIlB,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAiB,EACArB,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,IAExBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,GAE1B,IAAMM,EAAcL,IAAY,QAAaA,EAAQ,SAAW,OAAYA,EAAQ,OAAkB,MAEhGM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,GAAG,EACrDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EACxB,GAAIH,IAAY,SACVA,EAAQ,SAAW,QAAcqB,IAAa,GAAKrB,EAAQ,SAAW,QACrEqB,IAAa,GAAMrB,EAAQ,SAAW,OAASA,EAAQ,SAAW,OACrE,MAAM,IAAI,MAAM,+CAAgD,EAKpE,IAAMsB,EAAO,EACTC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACzEhB,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5BW,EAAQlB,EAAgB,gBAAgBC,EAAOC,CAAM,EAErD,QAASU,EAAI,EAAGA,EAAIV,EAASD,EACxBoB,GAAiBD,EAAME,GAAiBF,EAAMG,GAAiBH,EAAMI,GAAiBJ,EAAMR,IAC/FM,EAAM,KAAKG,CAAa,GAAMxB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKI,CAAa,GAAMzB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKK,CAAa,GAAM1B,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKM,CAAa,EAAIb,IAAmB,GAC3C,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,MAI5E,OAAM,IAAI,MAAM,2BAA2B,EAE7C,OAAOa,CACT,ICtMA,IAiBaO,GAkFAC,GAgKAC,GAWAC,GASAC,GAvRbC,GAAAC,EAAA,kBAIAC,KAaaP,GAAiB,CAACQ,EAAqCC,IAA0C,CAC5G,GAAID,IAAW,OACb,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAIC,EAAQ,SAAW,QAAaA,EAAQ,QAAU,OACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAM,CAAC,OAAAC,EAAQ,MAAAC,CAAK,EAAIF,EAElBG,EAAOH,EAAQ,MAAQ,CAAC,KAAM,IAAK,KAAM,CAAC,EAC5CI,EACAC,EAEA,OAAQF,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDC,EAAW,CAACD,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,GAAG,EAG3E,OAAQA,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDE,EAAW,CAACF,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,CAAC,EAG7E,IAAMG,EAAcN,EAAQ,SAAW,OAAYA,EAAQ,OAAS,OAG9DO,EACFP,EAAQ,eAAiB,QAAaA,EAAQ,eAAiB,OAAYA,EAAQ,aAAwB,MACzGQ,EAASP,EAASC,EAClBO,EAAcF,IAAiB,OAAS,IAAI,aAAaC,EAAS,CAAC,EAAI,IAAI,aAAaA,EAAS,CAAC,EAGpGE,EAAO,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACnFC,EAAiB,EAAGC,EAAiBR,EAAQS,EAAiBT,EAAS,EAAGU,EAAiB,GAG3FZ,IAAgB,QAClBI,EAAO,EACPC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,IAIdP,IAAiB,OACnBW,EAAiBV,EAAS,EACjBD,IAAiB,OAC1BQ,EAAiB,EACjBE,EAAiBT,EACjBQ,EAAiBR,EAAS,GACjBD,IAAiB,QAC1BU,EAAiB,EACjBD,EAAiBR,EACjBO,EAAiBP,EAAS,GAG5B,QAASW,EAAI,EAAGA,EAAIX,EACfW,IAAKR,GAAiBD,EAAMG,GAAiBH,EAAME,GAAiBF,EAAMI,GAAiBJ,EAC9FD,EAAYM,GAAgB,GAAKhB,EAAOY,CAAa,EAAIN,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYO,GAAgB,GAAKjB,EAAOa,CAAa,EAAIP,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYQ,GAAgB,GAAKlB,EAAOc,CAAa,EAAIR,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC9Ec,IAAmB,IAAMJ,IAAkB,KAC7CL,EAAYS,GAAgB,GAAKnB,EAAOe,CAAa,EAAIT,EAAS,CAAC,GAAKD,EAAS,CAAC,GAOtF,OAFqBG,IAAiB,OAAS,IAAIa,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,EACxD,IAAIkB,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,CAEzG,EAKaV,GAAkB,MAC3B6B,EACArB,IACyC,CAE3C,IAAMsB,EAAiB,OAAQ,iBAAsB,KAAeD,aAAiB,iBAC/EE,EAAiB,OAAQ,UAAe,KAAeF,aAAiB,UACxEG,EAAgB,OAAQ,YAAiB,KAAeH,aAAiB,YACzEI,EAAW,OAAOJ,GAAU,SAE9BK,EACAC,EAA+C3B,GAAW,CAAA,EAExD4B,EAAe,IAAK,CACxB,GAAI,OAAO,SAAa,IACtB,OAAO,SAAS,cAAc,QAAQ,EACjC,GAAI,OAAO,gBAAoB,IACpC,OAAO,IAAI,gBAAgB,EAAG,CAAC,EAE/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,EACMC,EAAuBC,GACvBA,aAAkB,mBAEXA,aAAkB,gBADpBA,EAAO,WAAW,IAAI,EAItB,KAIX,GAAIR,EAAgB,CAElB,IAAMQ,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAI9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MAMlB,GALIrB,IAAY,QAAaA,EAAQ,gBAAkB,QAAaA,EAAQ,eAAiB,SAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,cAGdA,IAAY,OAAW,CAEzB,GADA2B,EAAwB3B,EACpBA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,6DAA6D,EAE7E2B,EAAsB,aAAe,OAEvCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,OAE9ByB,EAAsB,aAAe,OACrCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAGhC6B,EAAgB,UAAUV,EAAO,EAAG,CAAC,EACrCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,UAEpCsB,EAAgB,CACzB,IAAItB,EACAC,EAiBJ,GAfIF,IAAY,QAAaA,EAAQ,eAAiB,QAAaA,EAAQ,gBAAkB,QAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,eAEhBC,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,OAGZrB,IAAY,SACd2B,EAAwB3B,GAE1B2B,EAAsB,OAAS,OAC/BA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAE1BF,IAAY,OAAW,CACzB,IAAMgC,EAAaJ,EAAY,EAE/BI,EAAW,MAAQ9B,EACnB8B,EAAW,OAAS/B,EAEpB,IAAM8B,EAAkBF,EAAoBG,CAAU,EAEtD,GAAID,GAAmB,KACrBA,EAAgB,aAAaV,EAAO,EAAG,CAAC,EACxCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,OAG7CyB,EAAOL,EAAM,aAENG,EAAe,CAExB,GAAIxB,IAAY,OACd,MAAM,IAAI,MAAM,yDAAyD,EAG3E,IAAM8B,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAM9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MACpB,OAAAU,EAAgB,UAAUV,EAAO,EAAG,EAAGnB,EAAOD,CAAM,EACpDyB,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,KACzD0B,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EACvBX,GAAemC,EAAMC,CAAqB,MAEjD,OAAM,IAAI,MAAM,2BAA2B,MAExC,IAAIF,EACT,OAAO,IAAI,QAAQ,CAACQ,EAASC,IAAU,CACrC,IAAMJ,EAASF,EAAY,EACrBO,EAAUN,EAAoBC,CAAM,EAC1C,GAAI,CAACT,GAAS,CAACc,EACb,OAAOD,EAAM,EAEf,IAAME,EAAW,IAAI,MACrBA,EAAS,YAAc,YACvBA,EAAS,IAAMf,EACfe,EAAS,OAAS,IAAK,CACrBN,EAAO,MAAQM,EAAS,MACxBN,EAAO,OAASM,EAAS,OACzBD,EAAQ,UAAUC,EAAU,EAAG,EAAGN,EAAO,MAAOA,EAAO,MAAM,EAC7D,IAAMO,EAAMF,EAAQ,aAAa,EAAG,EAAGL,EAAO,MAAOA,EAAO,MAAM,EAElEH,EAAsB,OAASG,EAAO,OACtCH,EAAsB,MAAQG,EAAO,MACrCG,EAAQ1C,GAAe8C,EAAI,KAAMV,CAAqB,CAAC,CACzD,CACF,CAAC,EAED,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAID,IAAS,OACX,OAAOnC,GAAemC,EAAMC,CAAqB,EAEjD,MAAM,IAAI,MAAM,gEAAgE,CAEpF,EAKalC,GAAoB,CAC7B6C,EAAsCtC,IAAgD,CACxF,GAAM,CAAC,MAAAE,EAAO,OAAAD,EAAQ,SAAAsC,EAAU,QAAAC,CAAO,EAAIxC,EAErCyC,EAAO,CAAC,EAAGxC,EAAQC,EAAO,CAAC,EACjC,OAAO,IAAIkB,GAAO,CAAC,SAAU,UAAW,KAAM,UAAW,QAAAkB,EAAS,KAAAG,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC5F,EAKa9C,GAAsB,CAC/BgD,EAA0C1C,IAAkD,CAC9F,GAAM,CAAC,SAAA2C,EAAU,KAAAF,EAAM,SAAAF,EAAU,QAAAC,CAAO,EAAIxC,EAC5C,OAAO,IAAIoB,GAAO,CAAC,SAAU,aAAc,KAAMuB,GAAY,UAAW,UAAAD,EAAW,KAAAD,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC7G,EAKa7C,GAAyB,CAClCiD,EAAS7C,EAAwC0C,IACjD,IAAIrB,GAAO,CAAC,SAAU,aAAc,KAAAwB,EAAM,KAAM7C,EAAQ,KAAM0C,GAAQ,CAAC1C,EAAO,MAAM,CAAC,CAAC,ICzR1F,IAWa8C,GAaAC,GAoBTC,GACSC,GA7CbC,GAAAC,EAAA,kBAWaL,GAAwC,IAAI,IAA6C,CACpG,CAAC,UAAW,YAAY,EACxB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,SAAS,EAClB,CAAC,SAAU,WAAW,EACtB,CAAC,QAAS,UAAU,EACpB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,UAAU,EACnB,CAAC,UAAW,YAAY,EACxB,CAAC,SAAU,WAAW,EACvB,EAGYC,GAAwC,IAAI,IAAkD,CACzG,CAAC,aAAc,SAAS,EACxB,CAAC,WAAY,OAAO,EACpB,CAAC,UAAW,MAAM,EAClB,CAAC,YAAa,QAAQ,EACtB,CAAC,WAAY,OAAO,EACpB,CAAC,WAAY,OAAO,EACpB,CAAC,aAAc,SAAS,EACxB,CAAC,YAAa,QAAQ,EACvB,EAWGC,GAAsB,GACbC,GAAkB,IAAK,CAClC,GAAI,CAACD,GAAqB,CACxBA,GAAsB,GACtB,IAAMI,EAA2B,OAAO,cAAkB,KAAe,cAAc,KACjFC,EAA4B,OAAO,eAAmB,KAAe,eAAe,KACpFC,EAA0B,OAAO,aAAiB,KAAe,aAAa,KAEhFF,IACFN,GAAsC,IAAI,QAAS,aAAa,EAChEC,GAAsC,IAAI,cAAe,OAAO,GAE9DM,IACFP,GAAsC,IAAI,SAAU,cAAc,EAClEC,GAAsC,IAAI,eAAgB,QAAQ,GAEhEO,GACFR,GAAsC,IAAI,UAAW,YAAY,EACjEC,GAAsC,IAAI,aAAc,SAAS,GAGjED,GAAsC,IAAI,UAAW,WAAW,EAGtE,ICpEA,IAWaS,GAkBAC,GA7BbC,GAAAC,EAAA,kBAIAC,KAOaJ,GAAiBK,GAAoC,CAChE,IAAIC,EAAO,EACX,QAASC,EAAI,EAAGA,EAAIF,EAAK,OAAQE,IAAK,CACpC,IAAMC,EAAMH,EAAKE,CAAC,EAClB,GAAI,OAAOC,GAAQ,UAAY,CAAC,OAAO,cAAcA,CAAG,EACtD,MAAM,IAAI,UAAU,QAAQD,CAAC,8BAA8BC,CAAG,EAAE,EAElE,GAAIA,EAAM,EACR,MAAM,IAAI,WAAW,QAAQD,CAAC,0CAA0CC,CAAG,EAAE,EAE/EF,GAAQE,EAEV,OAAOF,CACT,EAKaL,GAAgB,CAACQ,EAAgBJ,IAAmC,CAC/E,OAAQI,EAAO,SAAU,CACvB,IAAK,MACH,OAAO,IAAIC,GAAOD,EAAO,KAAMA,EAAO,KAAMJ,CAAI,EAClD,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,KAAMD,EAAO,KACb,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,UACH,OAAO,IAAIK,GAAO,CAChB,SAAU,UACV,QAASD,EAAO,QAChB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,UAAWD,EAAO,UAClB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,QACE,MAAM,IAAI,MAAM,kCAAkCI,EAAO,QAAQ,mBAAmB,EAE1F,ICzDA,IAwBaE,GAxBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAgBaN,GAAP,KAAa,CAyCjB,YACIO,EAEAC,EAA8EC,EAAwB,CAExGC,GAAe,EAEf,IAAIC,EACAC,EAEJ,GAAI,OAAOL,GAAS,UAAY,aAAcA,EAO5C,OAHA,KAAK,aAAeA,EAAK,SACzBI,EAAOJ,EAAK,KACZK,EAAOL,EAAK,KACJA,EAAK,SAAU,CACrB,IAAK,aAAc,CACjB,IAAMM,EAAgCC,GAAsC,IAAIH,CAAI,EACpF,GAAI,CAACE,EACH,MAAM,IAAI,UAAU,qBAAqBF,CAAI,uCAAuC,EAEtF,GAAI,EAAEJ,EAAK,gBAAgBM,GACzB,MAAM,IAAI,UAAU,4BAA4BA,EAA8B,IAAI,EAAE,EAEtF,KAAK,QAAUN,EAAK,KACpB,MAEF,IAAK,UAAW,CACd,GAAII,IAAS,UACX,MAAM,IAAI,UAAU,qBAAqBA,CAAI,iCAAiC,EAEhF,KAAK,eAAiBJ,EAAK,QAC3B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,IAAK,aAAc,CACjB,GAAKI,IAAS,WAAaA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAC7FA,IAAS,SAAWA,IAAS,OAChC,MAAM,IAAI,UAAU,qBAAqBA,CAAI,oCAAoC,EAEnF,KAAK,cAAgBJ,EAAK,UAC1B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,QACE,MAAM,IAAI,MAAM,6CAA6C,KAAK,YAAY,GAAG,MAEhF,CAIL,IAAIQ,EACAC,EAEJ,GAAI,OAAOT,GAAS,SAMlB,GAFAI,EAAOJ,EACPS,EAAYP,EACRF,IAAS,SAAU,CAErB,GAAI,CAAC,MAAM,QAAQC,CAAI,EACrB,MAAM,IAAI,UAAU,gDAAiD,EAIvEO,EAAOP,MACF,CAEL,IAAMS,EAAwBH,GAAsC,IAAIP,CAAI,EAC5E,GAAIU,IAA0B,OAC5B,MAAM,IAAI,UAAU,4BAA4BV,CAAI,GAAG,EAEzD,GAAI,MAAM,QAAQC,CAAI,EAAG,CACvB,GAAID,IAAS,WAAaU,IAA0B,YAMlD,MAAM,IAAI,UACN,+FAA+F,EAC1FV,IAAS,UAAYA,IAAS,QAYvCQ,EAAQE,EAA8B,KAAKT,EAAM,MAAM,EAIvDO,EAAQE,EAA8B,KAAKT,CAAI,UAExCA,aAAgBS,EACzBF,EAAOP,MAEP,OAAM,IAAI,UAAU,KAAKG,CAAI,kCAAkCM,CAAqB,EAAE,UAO1FD,EAAYR,EACR,MAAM,QAAQD,CAAI,EAAG,CAEvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qDAAqD,EAE3E,IAAMW,EAAmB,OAAOX,EAAK,CAAC,EACtC,GAAIW,IAAqB,SACvBP,EAAO,SACPI,EAAOR,UACEW,IAAqB,UAC9BP,EAAO,OAIPI,EAAO,WAAW,KAAKR,CAAa,MAEpC,OAAM,IAAI,UAAU,uCAAuCW,CAAgB,GAAG,MAE3E,CAEL,IAAMC,EACFC,GAAsC,IAAIb,EAAK,WAA8C,EACjG,GAAIY,IAAe,OACjB,MAAM,IAAI,UAAU,qCAAqCZ,EAAK,WAAW,GAAG,EAE9EI,EAAOQ,EACPJ,EAAOR,EAKX,GAAIS,IAAc,OAEhBA,EAAY,CAACD,EAAK,MAAM,UACf,CAAC,MAAM,QAAQC,CAAS,EACjC,MAAM,IAAI,UAAU,wCAAyC,EAE/DJ,EAAOI,EAEP,KAAK,QAAUD,EACf,KAAK,aAAe,MAItB,IAAMM,EAAOC,GAAcV,CAAI,EAE/B,GAAI,KAAK,SAAWS,IAAS,KAAK,QAAQ,OACxC,MAAM,IAAI,MAAM,iBAAiBA,CAAI,gCAAgC,KAAK,QAAQ,MAAM,IAAI,EAG9F,KAAK,KAAOV,EACZ,KAAK,KAAOC,EACZ,KAAK,KAAOS,CACd,CAIA,aAAa,UACTE,EACAC,EACoB,CACtB,OAAOC,GAAgBF,EAAOC,CAAO,CACvC,CAEA,OAAO,YACHE,EAA4BF,EAAoC,CAClE,OAAOG,GAAkBD,EAASF,CAAO,CAC3C,CAEA,OAAO,cACHI,EAAgCJ,EAAsC,CACxE,OAAOK,GAAoBD,EAAWJ,CAAO,CAC/C,CAEA,OAAO,iBACHb,EAASmB,EAAwClB,EAAwB,CAC3E,OAAOmB,GAAuBpB,EAAMmB,EAAQlB,CAAI,CAClD,CAKA,UAAUY,EAAgC,CACxC,OAAOQ,GAAgB,KAAMR,CAAO,CACtC,CAEA,YAAYA,EAAkC,CAC5C,OAAOS,GAAkB,KAAMT,CAAO,CACxC,CAgDA,IAAI,MAAI,CAEN,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,QACR,MAAM,IAAI,MACN,gJAC2E,EAEjF,OAAO,KAAK,OACd,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,YACd,CAEA,IAAI,SAAO,CAET,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,eACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,cACd,CAEA,IAAI,WAAS,CAEX,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,cACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,aACd,CAKA,MAAM,QAAQU,EAAqB,CAEjC,OADA,KAAK,YAAW,EACR,KAAK,aAAc,CACzB,IAAK,MACL,IAAK,aACH,OAAO,KAAK,KACd,IAAK,UACL,IAAK,aAAc,CACjB,GAAI,CAAC,KAAK,WACR,MAAM,IAAI,MAAM,qEAAqE,EAEvF,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAE3D,GAAI,CACF,KAAK,cAAgB,GACrB,IAAMnB,EAAO,MAAM,KAAK,WAAU,EAClC,YAAK,WAAa,OAClB,KAAK,aAAe,MACpB,KAAK,QAAUA,EAEXmB,GAAe,KAAK,WACtB,KAAK,SAAQ,EACb,KAAK,SAAW,QAGXnB,UAGP,KAAK,cAAgB,IAGzB,QACE,MAAM,IAAI,MAAM,kCAAkC,KAAK,YAAY,EAAE,EAE3E,CAEA,SAAO,CACL,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAGvD,KAAK,WACP,KAAK,SAAQ,EACb,KAAK,SAAW,QAElB,KAAK,QAAU,OACf,KAAK,eAAiB,OACtB,KAAK,cAAgB,OACrB,KAAK,WAAa,OAClB,KAAK,cAAgB,OAErB,KAAK,aAAe,MACtB,CAKQ,aAAW,CACjB,GAAI,KAAK,eAAiB,OACxB,MAAM,IAAI,MAAM,yBAAyB,CAE7C,CAEA,QAAQH,EAAuB,CAE7B,GADA,KAAK,YAAW,EACZ,KAAK,YAAc,KAAK,SAC1B,MAAM,IAAI,MAAM,iDAAiD,EAEnE,OAAOuB,GAAc,KAAMvB,CAAI,CACjC,KCpaF,IAwUawB,GAxUbC,GAAAC,EAAA,kBAIAC,KAoUaH,GAASA,KCxUtB,IAQaI,GAQPC,GAqBOC,GAUAC,GA/CbC,GAAAC,EAAA,kBAGAC,KAKaN,GAAQ,CAACO,EAAoBC,IAAiB,EACrD,OAAOC,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAI9D,QAAQ,UAAU,GAAGF,CAAU,UAAUC,CAAK,EAAE,CAClD,EAEMP,GAAa,CAACS,EAAaC,IAAqB,CACpD,IAAMC,EAAQ,IAAI,MAAK,EAAG,OAAO,MAAM,aAAa,GAAK,CAAA,EACrDC,EAAe,GACnB,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,GAAID,GAAgB,CAACD,EAAME,CAAC,EAAE,SAAS,YAAY,EAAG,CACpD,IAAIN,EAAQ,QAAQE,CAAG,KAAKE,EAAME,CAAC,EAAE,KAAI,EAAG,MAAM,GAAG,EAAE,CAAC,CAAC,GACrDH,IACFH,GAAS,KAAKG,CAAQ,IAExBX,GAAM,MAAOQ,CAAK,EAClB,OAEEI,EAAME,CAAC,EAAE,SAAS,YAAY,IAChCD,EAAe,IAGrB,EAKaX,GAAoBS,GAAqB,EAChD,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,QAASU,CAAQ,CAC9B,EAKaR,GAAkBQ,GAAqB,EAC9C,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,MAAOU,CAAQ,CAC5B,ICpDA,IAgBaI,GAhBbC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAQaL,GAAP,MAAOM,CAAgB,CAC3B,YAAoBC,EAAgC,CAClD,KAAK,QAAUA,CACjB,CAGA,MAAM,IAAIC,EAAkBC,EAA+BC,EAAiB,CAC1EC,GAAgB,EAChB,IAAMC,EAA4C,CAAA,EAC9CC,EAAsB,CAAA,EAE1B,GAAI,OAAOL,GAAU,UAAYA,IAAU,MAAQA,aAAiBM,IAAU,MAAM,QAAQN,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIO,EAAiB,GAErB,GAAI,OAAON,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBK,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQL,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DM,EAAiB,GAEjB,QAAWC,KAAQP,EAAM,CACvB,GAAI,OAAOO,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAI,KAAK,YAAY,QAAQA,CAAI,IAAM,GACrC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEJ,EAAQI,CAAI,EAAI,KAGlB,GAAI,OAAON,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIO,EAAY,GACVC,EAAW,OAAO,oBAAoBT,CAAI,EAChD,QAAWO,KAAQ,KAAK,YACtB,GAAIE,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKV,EAA4DO,CAAI,GACvEG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBH,EAAQI,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOP,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDG,EAAUJ,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWO,KAAQ,KAAK,WACtB,GAAI,OAAOR,EAAMQ,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQ,KAAK,YACtBJ,EAAQI,CAAI,EAAI,KAMpB,IAAMI,EAAU,MAAM,KAAK,QAAQ,IAAIZ,EAAOI,EAASC,CAAO,EACxDQ,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAAC,GAAc,EACPH,CACT,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,CAOA,aAAa,OACTI,EAAyChB,EAA8BC,EACvEgB,EAAqB,CACvBf,GAAgB,EAEhB,IAAIgB,EACAd,EAA0B,CAAA,EAE9B,GAAI,OAAOY,GAAS,UAElB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7CgB,aAAgB,YAEzB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAGpDgB,aAAgB,aACf,OAAO,kBAAsB,KAAeA,aAAgB,kBAAoB,CACnF,IAAMG,EAASH,EACXI,EAAa,EACbC,EAAaL,EAAK,WACtB,GAAI,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,GAAS,SAAU,CAEnC,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,EAAa,GAAKA,GAAcD,EAAO,WACzC,MAAM,IAAI,WAAW,oCAAoCA,EAAO,UAAU,IAAI,EAGhF,GADAE,EAAaL,EAAK,WAAaI,EAC3B,OAAOnB,GAAS,SAAU,CAE5B,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,GAAc,GAAKD,EAAaC,EAAaF,EAAO,WACtD,MAAM,IAAI,WAAW,oCAAoCA,EAAO,WAAaC,CAAU,IAAI,EAE7F,GAAI,OAAOH,GAAS,UAAYA,IAAS,KACvCb,EAAUa,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7C,OAAOhB,EAAS,IACzB,MAAM,IAAI,UAAU,gCAAkC,UAE/C,OAAOD,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,EAEtDkB,EAAuB,IAAI,WAAWC,EAAQC,EAAYC,CAAU,MAEpE,OAAM,IAAI,UAAU,qDAAyD,EAI/E,GAAM,CAACC,EAASC,CAAuB,EAAI,MAAMC,GAAoCpB,CAAO,EACtFN,EAAU,MAAMwB,EAAQ,8BAA8BJ,EAAsBK,CAAuB,EACzG,OAAAR,GAAc,EACP,IAAIlB,EAAiBC,CAAO,CACrC,CAEA,gBAAc,CACZ,KAAK,QAAQ,eAAc,CAC7B,CACA,cAAY,CACV,KAAK,QAAQ,aAAY,CAC3B,CAEA,IAAI,YAAU,CACZ,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,aAAW,CACb,OAAO,KAAK,QAAQ,WACtB,KCxNF,IA8hBa2B,GA9hBbC,GAAAC,EAAA,kBAGAC,KA2hBaH,GAA4CA,KC9hBzD,IAAAI,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAgBMC,GAGOC,GAnBbC,GAAAC,EAAA,kBAGAC,KAIAC,KASML,GAA0B,gHAGnBC,GAAP,MAAOK,CAAe,CAC1B,YAAoBC,EAAiCC,EAA4BC,EAAqB,CACpG,KAAK,QAAUF,EACf,KAAK,kBAAoBC,EACzB,KAAK,aAAeC,CACtB,CAKA,IAAI,oBAAkB,CACpB,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,qBAAmB,CACrB,OAAO,KAAK,QAAQ,WACtB,CAEA,IAAI,gBAAc,CAChB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,eAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CACA,IAAI,iBAAe,CACjB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,gBAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CAEA,aAAa,OAAOC,EAA+CC,EAA+B,CAEhG,IAAMC,EAA+BF,EAAgB,WAAa,GAC5DG,EAAoCH,EAAgB,gBAAkB,GACtEI,EAA0BH,GAAkB,CAAA,EAG5C,CAACI,EAASC,CAAuB,EAAI,MAAMC,GAAoCH,CAAO,EAC5F,GAAIC,EAAQ,6BAA8B,CACxC,IAAMR,EAAU,MAAMQ,EAAQ,6BAC1BL,EAAgB,gBAAiBA,EAAgB,WAAYE,EAAWC,EACxEG,CAAuB,EAC3B,OAAO,IAAIV,EAAgBC,EAAS,CAAC,CAACG,EAAgB,eAAgB,CAAC,CAACA,EAAgB,SAAS,MAEjG,OAAM,IAAI,MAAMV,EAAe,CAEnC,CAeA,wBACIkB,EAA+BC,EAAgCC,EAAkBC,EACjFC,EAAiB,CACnB,IAAMC,EAA4C,CAAA,EAC9CT,EAAsB,CAAA,EAE1B,GAAI,OAAOM,GAAU,UAAYA,IAAU,MAAQA,aAAiBI,IAAU,MAAM,QAAQJ,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIK,EAAiB,GAErB,GAAI,OAAOJ,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBG,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQH,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DI,EAAiB,GAEjB,QAAWC,KAAQL,EAAM,CACvB,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAIP,EAAY,QAAQO,CAAI,IAAM,GAChC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEH,EAAQG,CAAI,EAAI,KAGlB,GAAI,OAAOJ,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIK,EAAY,GACVC,EAAW,OAAO,oBAAoBP,CAAI,EAChD,QAAWK,KAAQP,EACjB,GAAIS,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKR,EAAmDK,CAAI,GAC9DG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBF,EAAQG,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOL,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDR,EAAUO,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWK,KAAQR,EACjB,GAAI,OAAOE,EAAMM,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQP,EACjBI,EAAQG,CAAI,EAAI,KAIpB,MAAO,CAACH,EAAST,CAAO,CAC1B,CASA,uCAAuCgB,EAAkC,CACvE,IAAMC,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAOF,CACT,CAEA,MAAM,eAAa,CACjB,MAAM,KAAK,QAAQ,cAAa,CAClC,CAIA,MAAM,aAAaX,EAAkBC,EAA+BC,EAAiB,CACnF,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,mBAAoB,KAAK,oBAAqBM,EAAOC,EAAMC,CAAI,EAC/FQ,EAAU,MAAM,KAAK,QAAQ,aAAaV,EAAOG,EAAST,CAAO,EACvE,OAAO,KAAK,uCAAuCgB,CAAO,CAC5D,CAEA,MAAM,iBAAiBhB,EAA+C,CACpE,GAAI,KAAK,kBACP,MAAM,KAAK,QAAQ,iBAAiBA,GAAW,CAAA,CAAE,MAEjD,OAAM,IAAI,MAAM,oDAAoD,CAExE,CAIA,MAAM,YAAYM,EAAkBC,EAA+BC,EAAiB,CAClF,GAAI,KAAK,aAAc,CACrB,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,eAAgB,KAAK,gBAAiBM,EAAOC,EAAMC,CAAI,EACvFQ,EAAU,MAAM,KAAK,QAAQ,YAAYV,EAAOG,EAAST,CAAO,EACtE,OAAO,KAAK,uCAAuCgB,CAAO,MAE1D,OAAM,IAAI,MAAM,+CAA+C,CAEnE,CAEA,MAAM,kBAAkBI,EAAgB,GAAI,CAC1C,OAAO,KAAK,QAAQ,kBAAkBA,CAAa,CACrD,CAEA,MAAM,qBAAqBC,EAAmBD,EAAgB,GAAI,CAChE,IAAME,EAAa,MAAM,KAAK,kBAAkBF,CAAa,EAG7D,GAAIC,EAAM,SAAW,EAAIC,EACvB,MAAM,IAAI,MACN,qJAC0D,EAEhE,OAAO,KAAK,QAAQ,qBAAqBD,EAAOD,CAAa,CAC/D,CAEA,MAAM,wBAAwBA,EAAgB,GAAI,CAChD,OAAO,KAAK,QAAQ,wBAAwBA,CAAa,CAC3D,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,KCzPF,IAmMaG,GAnMbC,GAAAC,EAAA,kBAKAC,KA8LaH,GAA0CA,KCnMvD,IAAAI,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,UAAAC,GAAA,qBAAAC,GAAA,mBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,QAAAC,GAAA,oBAAAC,KAAA,IAAAC,GAAAC,EAAA,kBAmBAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,OCmHA,SAASC,GACLC,EAA8BC,EAAeC,EAAsBC,EAA8C,CACnH,GAAIF,IAAS,OAEX,OAAOG,GAAwBJ,CAAI,EAC9B,GAAIE,IAAS,OAElBG,GAAYL,EAAyBC,EAAM,CAAC,UACnC,OAAOC,GAAS,UAAYC,IAAS,OAE9CE,GAAYL,EAAyBC,EAAMC,CAAI,UACtC,OAAOA,GAAS,UAAYC,IAAS,OAE9CE,GAAYL,EAAyBE,EAAM,EAAGD,CAAI,UACzC,OAAOC,GAAS,UAAY,OAAOC,GAAS,SAErDE,GAAYL,EAAyBE,EAAMC,EAAMF,CAAI,MAErD,OAAM,IAAI,UAAU,gBAAgB,CAExC,CAEA,SAASG,GAAwBE,EAA4C,CAC3E,MAAO,CACL,QAASP,GAAI,QAAQ,KAAK,KAAMO,CAAQ,EACxC,KAAMP,GAAI,KAAK,KAAK,KAAMO,CAAQ,EAClC,QAASP,GAAI,QAAQ,KAAK,KAAMO,CAAQ,EACxC,MAAOP,GAAI,MAAM,KAAK,KAAMO,CAAQ,EACpC,MAAOP,GAAI,MAAM,KAAK,KAAMO,CAAQ,CACtC,CACF,CAKA,SAASD,GAAYE,EAA2BC,EAAiBC,EAAgBH,EAAmB,CAClG,IAAMI,EAASC,GAAkBL,GAAY,EAAE,GAAKK,GAAkB,EAAE,EACpEC,GAAeL,CAAQ,EAAIK,GAAeF,EAAO,eAAe,IAIhEA,EAAO,cACTF,EAAU,GAAG,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIA,CAAO,IAG9CE,EAAO,kBAIXG,GAAoBH,EAAO,QAAQ,EAAE,IAAIH,EAAUC,EAASF,CAAQ,EACtE,CAjMA,IAyFMQ,GAKAC,GAwBAH,GAQAC,GAIAG,GAMFL,GAsHSM,GAkBPC,GAmBAC,GAKOC,GAsJAC,GA9bbC,GAAAC,EAAA,kBAyFMT,GAAN,KAAmD,CACjD,IAAIU,EAA4BC,EAAkBC,EAAoB,CAEtE,CACF,EACMX,GAAN,KAAsD,CACpD,IAAIR,EAA2BC,EAAiBF,EAAmB,CAEjE,QAAQ,IAAI,GAAG,KAAK,MAAMC,CAAQ,CAAC,IAAID,EAAW,WAAaA,EAAW,WAAa,EAAE,GAAGE,CAAO,EAAE,CACvG,CAEQ,MAAMD,EAA2B,CACvC,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,sBACT,IAAK,OACH,MAAO,mBACT,IAAK,UACH,MAAO,sBACT,IAAK,QACH,MAAO,sBACT,IAAK,QACH,MAAO,oBACT,QACE,MAAM,IAAI,MAAM,yBAAyBA,CAAQ,EAAE,CACvD,CACF,CACF,EAEMK,GAAiB,CACrB,QAAS,IACT,KAAM,IACN,QAAS,IACT,MAAO,IACP,MAAO,GACT,EAEMC,GAA+E,CAClF,KAAS,IAAIC,GACb,QAAY,IAAIC,EACnB,EACMC,GAAwB,CAC5B,SAAU,UACV,gBAAiB,UACjB,YAAa,GACb,kBAAmB,EACrB,EACIL,GAC0D,CAAE,GAAKK,EAAgD,GA2D3GjB,GAAV,CAGS,SAAS4B,EAAQ3B,EAAcC,EAAe,CACnDF,EAAI,UAAWC,EAAMC,CAAI,CAC3B,CAFOF,EAAS,QAAA4B,EAKT,SAASC,EAAK5B,EAAcC,EAAe,CAChDF,EAAI,OAAQC,EAAMC,CAAI,CACxB,CAFOF,EAAS,KAAA6B,EAKT,SAASC,EAAQ7B,EAAcC,EAAe,CACnDF,EAAI,UAAWC,EAAMC,CAAI,CAC3B,CAFOF,EAAS,QAAA8B,EAKT,SAASC,EAAM9B,EAAcC,EAAe,CACjDF,EAAI,QAASC,EAAMC,CAAI,CACzB,CAFOF,EAAS,MAAA+B,EAKT,SAASC,EAAM/B,EAAcC,EAAe,CACjDF,EAAI,QAASC,EAAMC,CAAI,CACzB,CAFOF,EAAS,MAAAgC,EAIT,SAASC,EAAMtB,EAA8B,CAClDC,GAAoB,CAAC,EACrBsB,EAAI,GAAIvB,GAAU,CAAC,CAAC,CACtB,CAHOX,EAAS,MAAAiC,EAIT,SAASC,EAAI3B,EAAkBI,EAA6B,CACjE,GAAIJ,IAAa,IACf0B,EAAMtB,CAAM,MACP,CACL,IAAMwB,EAAiBvB,GAAkBL,CAAQ,GAAKU,GACtDL,GAAkBL,CAAQ,EAAI,CAC5B,SAAUI,EAAO,UAAYwB,EAAe,SAC5C,gBAAiBxB,EAAO,iBAAmBwB,EAAe,gBAC1D,YAAcxB,EAAO,cAAgB,OAAawB,EAAe,YAAcxB,EAAO,YACtF,kBAAoBA,EAAO,oBAAsB,OAAawB,EAAe,kBACfxB,EAAO,iBACvE,CACF,CAGF,CAfOX,EAAS,IAAAkC,EAiBT,SAASE,EAAWC,EAAgB,CACzC,IAAM1B,EAAwB,CAAC,EAC3B0B,EAAI,WACN1B,EAAO,gBAAkB0B,EAAI,UAE/BH,EAAI,GAAIvB,CAAM,CAChB,CANOX,EAAS,WAAAoC,IAhDRpC,KAAA,IA0DGkB,GAAiBlB,GAkBxBmB,GAAN,KAAsC,CACpC,YACWZ,EAAyC+B,EAAqBC,EAC7DC,EAAsDC,EAA2BC,EAAoB,CADtG,cAAAnC,EAAyC,UAAA+B,EAAqB,eAAAC,EAC7D,iBAAAC,EAAsD,WAAAC,EAA2B,SAAAC,CAAqB,CAElH,MAAM,KAAM,CACV,OAAO,KAAK,YAAY,IAAI,CAC9B,CAEA,MAAM,YAA8B,CAClC,GAAI,KAAK,MAAQ,QAAa,KAAK,QAAU,OAC3C,MAAM,IAAI,MAAM,sBAAsB,EAEtC,YAAK,IAAI,SAAS,EACX,KAAK,IAAI,uBAAuB,KAAK,KAAK,CAErD,CACF,EAEMtB,GAAN,KAAkB,CAChB,YACWb,EAAyC+B,EAAqBC,EAA0BI,EAAiB,CAAzG,cAAApC,EAAyC,UAAA+B,EAAqB,eAAAC,EAA0B,aAAAI,CAAkB,CACvH,EAEatB,GAAN,KAAe,CAQZ,YAAYuB,EAA0BC,EAAyBC,EAAsC,CA+H7G,KAAQ,SAAW,GASnB,KAAQ,cAAgB,EAvItB,KAAK,SAAW,GAChB,KAAK,iBAAmBF,IAAoB,OAAY,IAAQA,EAChE,KAAK,gBAAkBC,IAAmB,OAAY,GAAKA,EAC3D,KAAK,6BAA+BC,IAAgC,OAAY,IAAOA,CACzF,CAZA,OAAO,OAAOnC,EAAoC,CAChD,OAAIA,IAAW,OACN,IAAI,KAEN,IAAI,KAAKA,EAAO,gBAAiBA,EAAO,eAAgBA,EAAO,2BAA2B,CACnG,CAUA,OAAQ,CACN,KAAK,SAAW,GAChB,KAAK,cAAgB,CAAC,EACtB,KAAK,WAAaW,GAAI,EACtB,KAAK,cAAgB,CACvB,CAGA,MAAO,CAEL,IADA,KAAK,SAAW,GACT,KAAK,cAAgB,KAAK,cAAc,OAAQ,KAAK,gBAC1D,KAAK,YAAY,KAAK,cAAc,KAAK,aAAa,CAAC,CAE3D,CAMA,MAASf,EAAkC+B,EAAcS,EAA4BL,EACrE,CACd,IAAMM,EAAQ,KAAK,SAAW,KAAK,MAAMzC,EAAU+B,EAAMI,CAAG,EAAI,OAC5DO,EAAY,GAEVC,EAAMH,EAAK,EAGjB,GAAIG,GAAO,OAAQA,EAAmB,MAAS,WAC7C,OAAAD,EAAY,GACL,IAAI,QAAW,CAACE,EAASC,IAAW,CACxCF,EACI,KACG,MAAMG,GAAS,CACTL,GACF,MAAMA,EAAM,IAAI,EAElBG,EAAQE,CAAK,CACf,EACA,MAAMC,GAAU,CACVN,GACF,MAAMA,EAAM,IAAI,EAElBI,EAAOE,CAAM,CACf,CAAC,CACX,CAAC,EAEH,GAAI,CAACL,GAAaD,EAAO,CACvB,IAAMO,EAAWP,EAAM,IAAI,EAC3B,GAAIO,GAAY,OAAOA,EAAS,MAAS,WACvC,OAAO,IAAI,QAAW,CAACJ,EAASC,IAAW,CACxCG,EAAU,KACP,IAAM,CACJJ,EAAQD,CAAG,CACb,EACCI,GAAW,CACVF,EAAOE,CAAM,CACf,CAAC,CACP,CAAC,CAEL,CACA,OAAOJ,CACT,CAGA,MAAM3C,EAAkC+B,EAAcI,EAA2B,CAC/E,GAAI,CAAC,KAAK,SACR,MAAM,IAAI,MAAM,6BAA6B,EAE/C,GAAIA,IAAQ,OAAW,CACrB,IAAMH,EAAYjB,GAAI,EACtB,YAAK,MAAMiB,CAAS,EACb,IAAIpB,GAAMZ,EAAU+B,EAAMC,EAAWiB,GAAK,KAAK,QAAQA,CAAC,CAAC,CAClE,KAAO,CACL,IAAMf,EAAoBC,EAAI,WAAW,EACzC,OAAO,IAAIvB,GAAMZ,EAAU+B,EAAM,EAAG,MAAMkB,GAAK,KAAK,IAAIA,CAAC,EAAGf,EAAOC,CAAG,CACxE,CACF,CAGA,MAAc,IAAIM,EAA6B,CAC7C,IAAML,EAAkB,MAAMK,EAAM,WAAW,EAC3C,KAAK,cAAc,OAAS,KAAK,mBACnC,KAAK,cAAc,KAAK,IAAI5B,GAAY4B,EAAM,SAAUA,EAAM,KAAMA,EAAM,UAAWL,CAAO,CAAC,EAC7F,KAAK,MAAMA,CAAO,EAEtB,CAEQ,QAAQK,EAAoB,CAClC,IAAML,EAAkBrB,GAAI,EACxB,KAAK,cAAc,OAAS,KAAK,mBACnC,KAAK,cAAc,KAAK,IAAIF,GAAY4B,EAAM,SAAUA,EAAM,KAAMA,EAAM,UAAWL,CAAO,CAAC,EAC7F,KAAK,MAAMA,CAAO,EAEtB,CAEQ,YAAYK,EAAoB,CACtC9B,GAAO,QACH,YAAY8B,EAAM,QAAQ,GAC1B,IAAIA,EAAM,QAAUA,EAAM,WAAW,QAAQ,CAAC,CAAC,gBAAgBA,EAAM,IAAI,QAAQA,EAAM,QAAQ,QAAQ,CAAC,CAAC,EAAE,CACjH,CAEQ,MAAMS,EAAqB,CACjC,GAAI,KAAK,cAAc,OAAS,KAAK,eAAiB,KAAK,iBACvDA,EAAc,KAAK,YAAc,KAAK,6BAA8B,CAGtE,QAAWC,EAAkB,KAAK,cAAe,KAAK,cAAgBA,EAAkB,KAAK,iBACxF,KAAK,cAAgB,KAAK,cAAc,OACxC,KAAK,gBACR,KAAK,YAAY,KAAK,cAAc,KAAK,aAAa,CAAC,EAGzD,KAAK,WAAapC,GAAI,CACxB,CACF,CAEA,IAAI,SAAU,CACZ,OAAO,KAAK,QACd,CAWF,EAKaA,GAAO,OAAO,YAAgB,KAAe,YAAY,IAAO,IAAM,YAAY,IAAI,EAAI,KAAK,MCtarG,SAASqC,GAAgBC,EAAkBC,EAA0BC,EAAqC,CAC/G,QAAWC,KAAQD,EAAO,CACxB,IAAME,EAASD,EAAK,CAAC,EACfE,EAASF,EAAK,CAAC,EACfG,EAAkBH,EAAK,CAAC,EACxBI,EAASJ,EAAK,CAAC,EACfK,EAASL,EAAK,CAAC,EAErB,GAAIH,EAAK,SAAWI,GAClB,QAAWK,KAASR,EAElB,IAAIQ,EAAM,SAAWJ,GAAWI,EAAM,SAAW,WAAaJ,IAAW,KACnEK,GAAcD,EAAM,QAASH,CAAe,EAC9C,MAAO,CAAC,OAAAC,EAAQ,OAAAC,CAAM,EAKhC,CAEA,MAAM,IAAI,UAAU,4BAA4BR,EAAK,MAAM,kBACvDC,EAAO,IAAIU,GAAO,GAAGA,EAAI,QAAU,SAAS,KAAKA,EAAI,OAAO,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,CAClF,CAEA,SAASD,GAAcE,EAAiBC,EAA2B,CACjE,GAAIA,EAAS,SAAS,GAAG,EAAG,CAE1B,IAAMC,EAAa,OAAO,SAASD,EAAS,UAAU,EAAGA,EAAS,OAAS,CAAC,EAAG,EAAE,EACjF,MAAO,CAAC,MAAMC,CAAU,GAAKA,GAAcF,CAC7C,SAAWC,EAAS,MAAM,GAAG,EAAE,SAAW,EAAG,CAE3C,IAAME,EAAOF,EAAS,MAAM,GAAG,EACzBC,EAAa,OAAO,SAASC,EAAK,CAAC,EAAG,EAAE,EACxCC,EAAW,OAAO,SAASD,EAAK,CAAC,EAAG,EAAE,EAC5C,MAAO,CAAC,MAAMD,CAAU,GAAK,CAAC,MAAME,CAAQ,GAAKF,GAAcF,GAAWA,GAAWI,CACvF,KAEE,QAAO,OAAO,SAASH,EAAU,EAAE,IAAMD,CAE7C,CA/DA,IAAAK,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,GAAAC,IAAA,cACAA,GAAQ,WAAa,GACrB,IAAIC,GAAsB,UAAY,CAClC,SAASA,EAAKC,EAAM,CAChB,GAAI,CAACA,EACD,MAAM,IAAI,UAAU,yCAAyC,EAEjE,KAAK,MAAQD,EAAK,MACdC,GAAQD,EAAK,OAAOC,CAAI,IACxB,KAAK,MAAQA,EAErB,CACA,OAAAD,EAAK,OAAS,SAAUC,EAAM,CAC1B,IAAIC,EAAQD,EAAK,SAAS,EAC1B,OAAOA,IAASA,aAAgBD,GAAQA,EAAK,UAAU,KAAKE,CAAK,EACrE,EACAF,EAAK,OAAS,UAAY,CACtB,OAAO,IAAIA,EAAK,CAACA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAC/F,EACAA,EAAK,YAAc,UAAY,CAC3B,OAAO,IAAIA,EAAK,WAAW,CAC/B,EACAA,EAAK,MAAQ,SAAUC,EAAM,CACzB,OAAO,IAAID,EAAKC,CAAI,CACxB,EACAD,EAAK,IAAM,UAAY,CACnB,MAAO,CAACA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,EAAGA,EAAK,IAAI,CAAC,CAAC,EAAE,KAAK,GAAG,CACrF,EACAA,EAAK,IAAM,SAAUG,EAAO,CAExB,QADIC,EAAM,GACDC,EAAI,EAAGA,EAAIF,EAAOE,IAEvBD,KAAU,EAAI,KAAK,OAAO,GAAK,MAAW,GAAG,SAAS,EAAE,EAAE,UAAU,CAAC,EAEzE,OAAOA,CACX,EACAJ,EAAK,UAAU,OAAS,SAAUM,EAAO,CAGrC,OAAON,EAAK,OAAOM,CAAK,GAAK,KAAK,QAAUA,EAAM,SAAS,CAC/D,EACAN,EAAK,UAAU,QAAU,UAAY,CACjC,OAAO,KAAK,QAAUA,EAAK,KAC/B,EACAA,EAAK,UAAU,SAAW,UAAY,CAClC,OAAO,KAAK,KAChB,EACAA,EAAK,UAAU,OAAS,UAAY,CAChC,MAAO,CACH,MAAO,KAAK,KAChB,CACJ,EACAA,EAAK,UAAY,IAAI,OAAO,iEAAkE,GAAG,EACjGA,EAAK,MAAQ,uCACNA,CACX,EAAE,EACFD,GAAQ,KAAOC,KChBf,SAASO,GAAKC,EAAKC,EAAMC,EAAU,CAMjC,KAAK,IAAMF,EAAM,EAMjB,KAAK,KAAOC,EAAO,EAMnB,KAAK,SAAW,CAAC,CAACC,CACpB,CAmCA,SAASC,GAAOC,EAAK,CACnB,OAAQA,GAAOA,EAAI,cAAmB,EACxC,CAQA,SAASC,GAAMC,EAAO,CACpB,IAAIC,EAAI,KAAK,MAAMD,EAAQ,CAACA,CAAK,EACjC,OAAOA,EAAQ,GAAKC,EAAIA,CAC1B,CA8BA,SAASC,GAAQF,EAAOJ,EAAU,CAChC,IAAIE,EAAKK,EAAWC,EACpB,OAAIR,GACFI,KAAW,GACPI,EAAS,GAAKJ,GAASA,EAAQ,OACjCG,EAAYE,GAAWL,CAAK,EACxBG,GACKA,GAEXL,EAAMQ,GAASN,EAAO,EAAG,EAAI,EACzBI,IACFC,GAAWL,CAAK,EAAIF,GACfA,KAEPE,GAAS,GACLI,EAAS,MAAQJ,GAASA,EAAQ,OACpCG,EAAYI,GAAUP,CAAK,EACvBG,GACKA,GAEXL,EAAMQ,GAASN,EAAOA,EAAQ,EAAI,GAAK,EAAG,EAAK,EAC3CI,IACFG,GAAUP,CAAK,EAAIF,GACdA,GAEX,CAiBA,SAASU,GAAWR,EAAOJ,EAAU,CACnC,GAAI,MAAMI,CAAK,EACb,OAAOJ,EAAWa,GAAQC,GAC5B,GAAId,EAAU,CACZ,GAAII,EAAQ,EACV,OAAOS,GACT,GAAIT,GAASW,GACX,OAAOC,EACX,KAAO,CACL,GAAIZ,GAAS,CAACa,GACZ,OAAOC,GACT,GAAId,EAAQ,GAAKa,GACf,OAAOE,EACX,CACA,OAAIf,EAAQ,EACHQ,GAAW,CAACR,EAAOJ,CAAQ,EAAE,IAAI,EACnCU,GAAUN,EAAQgB,GAAkB,EAAIhB,EAAQgB,GAAkB,EAAGpB,CAAQ,CACtF,CAkBA,SAASU,GAASW,EAASC,EAAUtB,EAAU,CAC7C,OAAO,IAAIH,GAAKwB,EAASC,EAAUtB,CAAQ,CAC7C,CA6BA,SAASuB,GAAWC,EAAKxB,EAAUyB,EAAO,CACxC,GAAID,EAAI,SAAW,EACjB,MAAM,MAAM,cAAc,EAQ5B,GAPI,OAAOxB,GAAa,UAEtByB,EAAQzB,EACRA,EAAW,IAEXA,EAAW,CAAC,CAACA,EAEXwB,IAAQ,OAASA,IAAQ,YAAcA,IAAQ,aAAeA,IAAQ,YACxE,OAAOxB,EAAWa,GAAQC,GAE5B,GADAW,EAAQA,GAAS,GACbA,EAAQ,GAAK,GAAKA,EACpB,MAAM,WAAW,OAAO,EAE1B,IAAIC,EACJ,IAAKA,EAAIF,EAAI,QAAQ,GAAG,GAAK,EAC3B,MAAM,MAAM,iBAAiB,EAC1B,GAAIE,IAAM,EACb,OAAOH,GAAWC,EAAI,UAAU,CAAC,EAAGxB,EAAUyB,CAAK,EAAE,IAAI,EAQ3D,QAHIE,EAAef,GAAWgB,GAAQH,EAAO,CAAC,CAAC,EAE3CI,EAASf,GACJgB,EAAI,EAAGA,EAAIN,EAAI,OAAQM,GAAK,EAAG,CACtC,IAAIC,EAAO,KAAK,IAAI,EAAGP,EAAI,OAASM,CAAC,EACnC1B,EAAQ,SAASoB,EAAI,UAAUM,EAAGA,EAAIC,CAAI,EAAGN,CAAK,EACpD,GAAIM,EAAO,EAAG,CACZ,IAAIC,EAAQpB,GAAWgB,GAAQH,EAAOM,CAAI,CAAC,EAC3CF,EAASA,EAAO,IAAIG,CAAK,EAAE,IAAIpB,GAAWR,CAAK,CAAC,CAClD,MACEyB,EAASA,EAAO,IAAIF,CAAY,EAChCE,EAASA,EAAO,IAAIjB,GAAWR,CAAK,CAAC,CAEzC,CACA,OAAAyB,EAAO,SAAW7B,EACX6B,CACT,CAmBA,SAASI,GAAUC,EAAKlC,EAAU,CAChC,OAAI,OAAOkC,GAAQ,SACVtB,GAAWsB,EAAKlC,CAAQ,EAC7B,OAAOkC,GAAQ,SACVX,GAAWW,EAAKlC,CAAQ,EAE1BU,GAASwB,EAAI,IAAKA,EAAI,KAAM,OAAOlC,GAAa,UAAYA,EAAWkC,EAAI,QAAQ,CAC5F,CAxTA,IAqBIC,GAqGAxB,GAOAF,GA2GAmB,GA+FAQ,GAOAC,GAOAjB,GAOAL,GAOAE,GAOAqB,GAMAxB,GAYAD,GAYA0B,GAYAC,GAYAC,GAYAtB,GAYAH,GAYAE,GAYAwB,EAs+BGC,GA17CPC,GAAAC,EAAA,KAqBIV,GAAO,KACX,GAAI,CACFA,GAAO,IAAI,YAAY,SAAS,IAAI,YAAY,OAAO,IAAI,WAAW,CACpE,EAAG,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,IAAK,IAAK,IAAK,EAAG,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,IAAK,EAAG,GAAI,EAAG,GAAI,EAAG,GAAI,EAAG,EAAG,IAAK,IAAK,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,EAAG,EAAG,EAAG,IAAK,IAAK,IAAK,GAAI,IAAK,IAAK,IAAK,IAAK,EAAG,EAAG,GAAI,IAAK,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,GAAI,GAAI,EAAG,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,IAAK,GAAI,EAAG,IAAK,GAAI,GAAI,IAAK,IAAK,IAAK,GAAI,EAAG,GAAI,GAAI,IAAK,IAAK,GAAI,EAAG,GAAI,EAAG,IAAK,EAC5nC,CAAC,CAAC,EAAG,CAAC,CAAC,EAAE,OACX,MAAY,CAEZ,CAwDAtC,GAAK,UAAU,WAEf,OAAO,eAAeA,GAAK,UAAW,aAAc,CAAE,MAAO,EAAK,CAAC,EA6BnEA,GAAK,OAASI,GAOVU,GAAY,CAAC,EAObF,GAAa,CAAC,EA0ClBZ,GAAK,QAAUS,GAkCfT,GAAK,WAAae,GAsBlBf,GAAK,SAAWa,GASZkB,GAAU,KAAK,IA4DnB/B,GAAK,WAAa0B,GAyBlB1B,GAAK,UAAYoC,GAUbG,GAAiB,MAOjBC,GAAiB,GAAK,GAOtBjB,GAAiBgB,GAAiBA,GAOlCrB,GAAiBK,GAAiBA,GAOlCH,GAAiBF,GAAiB,EAOlCuB,GAAahC,GAAQ+B,EAAc,EAMnCvB,GAAOR,GAAQ,CAAC,EAMpBT,GAAK,KAAOiB,GAMRD,GAAQP,GAAQ,EAAG,EAAI,EAM3BT,GAAK,MAAQgB,GAMT0B,GAAMjC,GAAQ,CAAC,EAMnBT,GAAK,IAAM0C,GAMPC,GAAOlC,GAAQ,EAAG,EAAI,EAM1BT,GAAK,KAAO2C,GAMRC,GAAUnC,GAAQ,EAAE,EAMxBT,GAAK,QAAU4C,GAMXtB,GAAYT,GAAS,GAAgB,WAAgB,EAAK,EAM9Db,GAAK,UAAYsB,GAMbH,GAAqBN,GAAS,GAAgB,GAAgB,EAAI,EAMtEb,GAAK,mBAAqBmB,GAMtBE,GAAYR,GAAS,EAAG,YAAgB,EAAK,EAMjDb,GAAK,UAAYqB,GAMbwB,EAAgB7C,GAAK,UAOzB6C,EAAc,MAAQ,UAAiB,CACrC,OAAO,KAAK,SAAW,KAAK,MAAQ,EAAI,KAAK,GAC/C,EAOAA,EAAc,SAAW,UAAoB,CAC3C,OAAI,KAAK,UACE,KAAK,OAAS,GAAKtB,IAAmB,KAAK,MAAQ,GACvD,KAAK,KAAOA,IAAkB,KAAK,MAAQ,EACpD,EAUAsB,EAAc,SAAW,SAAkBjB,EAAO,CAEhD,GADAA,EAAQA,GAAS,GACbA,EAAQ,GAAK,GAAKA,EACpB,MAAM,WAAW,OAAO,EAC1B,GAAI,KAAK,OAAO,EACd,MAAO,IACT,GAAI,KAAK,WAAW,EAClB,GAAI,KAAK,GAAGP,EAAS,EAAG,CAGtB,IAAI4B,EAAYlC,GAAWa,CAAK,EAC9BsB,EAAM,KAAK,IAAID,CAAS,EACxBE,EAAOD,EAAI,IAAID,CAAS,EAAE,IAAI,IAAI,EACpC,OAAOC,EAAI,SAAStB,CAAK,EAAIuB,EAAK,MAAM,EAAE,SAASvB,CAAK,CAC1D,KACE,OAAO,IAAM,KAAK,IAAI,EAAE,SAASA,CAAK,EAQ1C,QAHIE,EAAef,GAAWgB,GAAQH,EAAO,CAAC,EAAG,KAAK,QAAQ,EAC5DwB,EAAM,KACJpB,EAAS,KACA,CACX,IAAIqB,EAASD,EAAI,IAAItB,CAAY,EAC/BwB,EAASF,EAAI,IAAIC,EAAO,IAAIvB,CAAY,CAAC,EAAE,MAAM,IAAM,EACvDyB,EAASD,EAAO,SAAS1B,CAAK,EAEhC,GADAwB,EAAMC,EACFD,EAAI,OAAO,EACb,OAAOG,EAASvB,EAEhB,KAAOuB,EAAO,OAAS,GACrBA,EAAS,IAAMA,EACjBvB,EAAS,GAAKuB,EAASvB,CAE3B,CACF,EAOAa,EAAc,YAAc,UAAuB,CACjD,OAAO,KAAK,IACd,EAOAA,EAAc,oBAAsB,UAA+B,CACjE,OAAO,KAAK,OAAS,CACvB,EAOAA,EAAc,WAAa,UAAsB,CAC/C,OAAO,KAAK,GACd,EAOAA,EAAc,mBAAqB,UAA8B,CAC/D,OAAO,KAAK,MAAQ,CACtB,EAOAA,EAAc,cAAgB,UAAyB,CACrD,GAAI,KAAK,WAAW,EAClB,OAAO,KAAK,GAAGxB,EAAS,EAAI,GAAK,KAAK,IAAI,EAAE,cAAc,EAE5D,QADIgB,EAAM,KAAK,MAAQ,EAAI,KAAK,KAAO,KAAK,IACnCmB,EAAM,GAAIA,EAAM,GAClB,EAAAnB,EAAO,GAAKmB,GADSA,IAC1B,CAEF,OAAO,KAAK,MAAQ,EAAIA,EAAM,GAAKA,EAAM,CAC3C,EAOAX,EAAc,OAAS,UAAkB,CACvC,OAAO,KAAK,OAAS,GAAK,KAAK,MAAQ,CACzC,EAMAA,EAAc,IAAMA,EAAc,OAOlCA,EAAc,WAAa,UAAsB,CAC/C,MAAO,CAAC,KAAK,UAAY,KAAK,KAAO,CACvC,EAOAA,EAAc,WAAa,UAAsB,CAC/C,OAAO,KAAK,UAAY,KAAK,MAAQ,CACvC,EAOAA,EAAc,MAAQ,UAAiB,CACrC,OAAQ,KAAK,IAAM,KAAO,CAC5B,EAOAA,EAAc,OAAS,UAAkB,CACvC,OAAQ,KAAK,IAAM,KAAO,CAC5B,EAQAA,EAAc,OAAS,SAAgBY,EAAO,CAG5C,OAFKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GACrB,KAAK,WAAaA,EAAM,UAAa,KAAK,OAAS,KAAQ,GAAMA,EAAM,OAAS,KAAQ,EACnF,GACF,KAAK,OAASA,EAAM,MAAQ,KAAK,MAAQA,EAAM,GACxD,EAQAZ,EAAc,GAAKA,EAAc,OAQjCA,EAAc,UAAY,SAAmBY,EAAO,CAClD,MAAO,CAAC,KAAK,GAAmBA,CAAK,CACvC,EAQAZ,EAAc,IAAMA,EAAc,UAQlCA,EAAc,GAAKA,EAAc,UAQjCA,EAAc,SAAW,SAAkBY,EAAO,CAChD,OAAO,KAAK,KAAqBA,CAAK,EAAI,CAC5C,EAQAZ,EAAc,GAAKA,EAAc,SAQjCA,EAAc,gBAAkB,SAAyBY,EAAO,CAC9D,OAAO,KAAK,KAAqBA,CAAK,GAAK,CAC7C,EAQAZ,EAAc,IAAMA,EAAc,gBAQlCA,EAAc,GAAKA,EAAc,gBAQjCA,EAAc,YAAc,SAAqBY,EAAO,CACtD,OAAO,KAAK,KAAqBA,CAAK,EAAI,CAC5C,EAQAZ,EAAc,GAAKA,EAAc,YAQjCA,EAAc,mBAAqB,SAA4BY,EAAO,CACpE,OAAO,KAAK,KAAqBA,CAAK,GAAK,CAC7C,EAQAZ,EAAc,IAAMA,EAAc,mBAQlCA,EAAc,GAAKA,EAAc,mBASjCA,EAAc,QAAU,SAAiBY,EAAO,CAG9C,GAFKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GACrB,KAAK,GAAGA,CAAK,EACf,MAAO,GACT,IAAIC,EAAU,KAAK,WAAW,EAC5BC,EAAWF,EAAM,WAAW,EAC9B,OAAIC,GAAW,CAACC,EACP,GACL,CAACD,GAAWC,EACP,EAEJ,KAAK,SAGFF,EAAM,OAAS,EAAM,KAAK,OAAS,GAAOA,EAAM,OAAS,KAAK,MAASA,EAAM,MAAQ,EAAM,KAAK,MAAQ,EAAM,GAAK,EAFlH,KAAK,IAAIA,CAAK,EAAE,WAAW,EAAI,GAAK,CAG/C,EASAZ,EAAc,KAAOA,EAAc,QAOnCA,EAAc,OAAS,UAAkB,CACvC,MAAI,CAAC,KAAK,UAAY,KAAK,GAAGxB,EAAS,EAC9BA,GACF,KAAK,IAAI,EAAE,IAAIqB,EAAG,CAC3B,EAOAG,EAAc,IAAMA,EAAc,OAQlCA,EAAc,IAAM,SAAae,EAAQ,CAClCxD,GAAOwD,CAAM,IAChBA,EAASxB,GAAUwB,CAAM,GAI3B,IAAIC,EAAM,KAAK,OAAS,GACpBC,EAAM,KAAK,KAAO,MAClBC,EAAM,KAAK,MAAQ,GACnBC,EAAM,KAAK,IAAM,MAEjBC,EAAML,EAAO,OAAS,GACtBM,EAAMN,EAAO,KAAO,MACpBO,EAAMP,EAAO,MAAQ,GACrBQ,EAAMR,EAAO,IAAM,MAEnBS,EAAM,EAAGC,EAAM,EAAGC,EAAM,EAAGC,EAAM,EACrC,OAAAA,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMI,EACbI,GAAO,MACAxD,GAAU0D,GAAO,GAAMC,EAAMH,GAAO,GAAMC,EAAK,KAAK,QAAQ,CACrE,EAQAzB,EAAc,SAAW,SAAkB4B,EAAY,CACrD,OAAKrE,GAAOqE,CAAU,IACpBA,EAAarC,GAAUqC,CAAU,GAC5B,KAAK,IAAIA,EAAW,IAAI,CAAC,CAClC,EAQA5B,EAAc,IAAMA,EAAc,SAQlCA,EAAc,SAAW,SAAkB6B,EAAY,CACrD,GAAI,KAAK,OAAO,EACd,OAAO,KAKT,GAJKtE,GAAOsE,CAAU,IACpBA,EAAatC,GAAUsC,CAAU,GAG/BpC,GAAM,CACR,IAAIrC,EAAMqC,GAAK,IAAO,KAAK,IACzB,KAAK,KACLoC,EAAW,IACXA,EAAW,IAAI,EACjB,OAAO7D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,GAAIoC,EAAW,OAAO,EACpB,OAAO,KAAK,SAAW1D,GAAQC,GACjC,GAAI,KAAK,GAAGI,EAAS,EACnB,OAAOqD,EAAW,MAAM,EAAIrD,GAAYJ,GAC1C,GAAIyD,EAAW,GAAGrD,EAAS,EACzB,OAAO,KAAK,MAAM,EAAIA,GAAYJ,GAEpC,GAAI,KAAK,WAAW,EAClB,OAAIyD,EAAW,WAAW,EACjB,KAAK,IAAI,EAAE,IAAIA,EAAW,IAAI,CAAC,EAE/B,KAAK,IAAI,EAAE,IAAIA,CAAU,EAAE,IAAI,EACnC,GAAIA,EAAW,WAAW,EAC/B,OAAO,KAAK,IAAIA,EAAW,IAAI,CAAC,EAAE,IAAI,EAGxC,GAAI,KAAK,GAAGjC,EAAU,GAAKiC,EAAW,GAAGjC,EAAU,EACjD,OAAO1B,GAAW,KAAK,SAAS,EAAI2D,EAAW,SAAS,EAAG,KAAK,QAAQ,EAK1E,IAAIb,EAAM,KAAK,OAAS,GACpBC,EAAM,KAAK,KAAO,MAClBC,EAAM,KAAK,MAAQ,GACnBC,EAAM,KAAK,IAAM,MAEjBC,EAAMS,EAAW,OAAS,GAC1BR,EAAMQ,EAAW,KAAO,MACxBP,EAAMO,EAAW,MAAQ,GACzBN,EAAMM,EAAW,IAAM,MAEvBL,EAAM,EAAGC,EAAM,EAAGC,EAAM,EAAGC,EAAM,EACrC,OAAAA,GAAOR,EAAMI,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMK,EACbE,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAOP,EAAMG,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMM,EACbC,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAOP,EAAMI,EACbE,GAAOC,IAAQ,GACfA,GAAO,MACPA,GAAON,EAAME,EACbG,GAAOC,IAAQ,GACfA,GAAO,MACPD,GAAOR,EAAMO,EAAMN,EAAMK,EAAMJ,EAAMG,EAAMF,EAAMC,EACjDI,GAAO,MACAxD,GAAU0D,GAAO,GAAMC,EAAMH,GAAO,GAAMC,EAAK,KAAK,QAAQ,CACrE,EAQAzB,EAAc,IAAMA,EAAc,SASlCA,EAAc,OAAS,SAAgB8B,EAAS,CAG9C,GAFKvE,GAAOuE,CAAO,IACjBA,EAAUvC,GAAUuC,CAAO,GACzBA,EAAQ,OAAO,EACjB,MAAM,MAAM,kBAAkB,EAGhC,GAAIrC,GAAM,CAIR,GAAI,CAAC,KAAK,UACR,KAAK,OAAS,aACdqC,EAAQ,MAAQ,IAAMA,EAAQ,OAAS,GAEvC,OAAO,KAET,IAAI1E,GAAO,KAAK,SAAWqC,GAAK,MAAWA,GAAK,OAC9C,KAAK,IACL,KAAK,KACLqC,EAAQ,IACRA,EAAQ,IACV,EACA,OAAO9D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,GAAI,KAAK,OAAO,EACd,OAAO,KAAK,SAAWtB,GAAQC,GACjC,IAAI2D,EAAQxB,EAAKyB,EACjB,GAAK,KAAK,SA6BH,CAKL,GAFKF,EAAQ,WACXA,EAAUA,EAAQ,WAAW,GAC3BA,EAAQ,GAAG,IAAI,EACjB,OAAO3D,GACT,GAAI2D,EAAQ,GAAG,KAAK,KAAK,CAAC,CAAC,EACzB,OAAOhC,GACTkC,EAAM7D,EACR,KAvCoB,CAGlB,GAAI,KAAK,GAAGK,EAAS,EAAG,CACtB,GAAIsD,EAAQ,GAAGjC,EAAG,GAAKiC,EAAQ,GAAG/B,EAAO,EACvC,OAAOvB,GACJ,GAAIsD,EAAQ,GAAGtD,EAAS,EAC3B,OAAOqB,GAGP,IAAIoC,EAAW,KAAK,IAAI,CAAC,EAEzB,OADAF,EAASE,EAAS,IAAIH,CAAO,EAAE,IAAI,CAAC,EAChCC,EAAO,GAAG3D,EAAI,EACT0D,EAAQ,WAAW,EAAIjC,GAAME,IAEpCQ,EAAM,KAAK,IAAIuB,EAAQ,IAAIC,CAAM,CAAC,EAClCC,EAAMD,EAAO,IAAIxB,EAAI,IAAIuB,CAAO,CAAC,EAC1BE,EAGb,SAAWF,EAAQ,GAAGtD,EAAS,EAC7B,OAAO,KAAK,SAAWL,GAAQC,GACjC,GAAI,KAAK,WAAW,EAClB,OAAI0D,EAAQ,WAAW,EACd,KAAK,IAAI,EAAE,IAAIA,EAAQ,IAAI,CAAC,EAC9B,KAAK,IAAI,EAAE,IAAIA,CAAO,EAAE,IAAI,EAC9B,GAAIA,EAAQ,WAAW,EAC5B,OAAO,KAAK,IAAIA,EAAQ,IAAI,CAAC,EAAE,IAAI,EACrCE,EAAM5D,EACR,CAkBA,IADAmC,EAAM,KACCA,EAAI,IAAIuB,CAAO,GAAG,CAGvBC,EAAS,KAAK,IAAI,EAAG,KAAK,MAAMxB,EAAI,SAAS,EAAIuB,EAAQ,SAAS,CAAC,CAAC,EAWpE,QAPII,EAAO,KAAK,KAAK,KAAK,IAAIH,CAAM,EAAI,KAAK,GAAG,EAC9CI,EAASD,GAAQ,GAAM,EAAIhD,GAAQ,EAAGgD,EAAO,EAAE,EAI/CE,EAAYlE,GAAW6D,CAAM,EAC7BM,EAAYD,EAAU,IAAIN,CAAO,EAC5BO,EAAU,WAAW,GAAKA,EAAU,GAAG9B,CAAG,GAC/CwB,GAAUI,EACVC,EAAYlE,GAAW6D,EAAQ,KAAK,QAAQ,EAC5CM,EAAYD,EAAU,IAAIN,CAAO,EAK/BM,EAAU,OAAO,IACnBA,EAAYvC,IAEdmC,EAAMA,EAAI,IAAII,CAAS,EACvB7B,EAAMA,EAAI,IAAI8B,CAAS,CACzB,CACA,OAAOL,CACT,EAQAhC,EAAc,IAAMA,EAAc,OAQlCA,EAAc,OAAS,SAAgB8B,EAAS,CAK9C,GAJKvE,GAAOuE,CAAO,IACjBA,EAAUvC,GAAUuC,CAAO,GAGzBrC,GAAM,CACR,IAAIrC,GAAO,KAAK,SAAWqC,GAAK,MAAWA,GAAK,OAC9C,KAAK,IACL,KAAK,KACLqC,EAAQ,IACRA,EAAQ,IACV,EACA,OAAO9D,GAASZ,EAAKqC,GAAK,SAAY,EAAG,KAAK,QAAQ,CACxD,CAEA,OAAO,KAAK,IAAI,KAAK,IAAIqC,CAAO,EAAE,IAAIA,CAAO,CAAC,CAChD,EAQA9B,EAAc,IAAMA,EAAc,OAQlCA,EAAc,IAAMA,EAAc,OAOlCA,EAAc,IAAM,UAAe,CACjC,OAAOhC,GAAS,CAAC,KAAK,IAAK,CAAC,KAAK,KAAM,KAAK,QAAQ,CACtD,EAOAgC,EAAc,kBAAoB,UAA6B,CAC7D,OAAO,KAAK,KAAO,KAAK,MAAM,KAAK,IAAI,EAAI,KAAK,MAAM,KAAK,GAAG,EAAI,EACpE,EAQAA,EAAc,IAAMA,EAAc,kBAOlCA,EAAc,mBAAqB,UAA8B,CAC/D,OAAO,KAAK,IAAMvC,GAAM,KAAK,GAAG,EAAIA,GAAM,KAAK,IAAI,EAAI,EACzD,EAQAuC,EAAc,IAAMA,EAAc,mBAQlCA,EAAc,IAAM,SAAaY,EAAO,CACtC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,GAAK,SAAYY,EAAO,CACpC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,IAAM,SAAaY,EAAO,CACtC,OAAKrD,GAAOqD,CAAK,IACfA,EAAQrB,GAAUqB,CAAK,GAClB5C,GAAS,KAAK,IAAM4C,EAAM,IAAK,KAAK,KAAOA,EAAM,KAAM,KAAK,QAAQ,CAC7E,EAQAZ,EAAc,UAAY,SAAmBsC,EAAS,CAGpD,OAFI/E,GAAO+E,CAAO,IAChBA,EAAUA,EAAQ,MAAM,IACrBA,GAAW,MAAQ,EACf,KACAA,EAAU,GACVtE,GAAS,KAAK,KAAOsE,EAAU,KAAK,MAAQA,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,QAAQ,EAEnGtE,GAAS,EAAG,KAAK,KAAQsE,EAAU,GAAK,KAAK,QAAQ,CAChE,EAQAtC,EAAc,IAAMA,EAAc,UAQlCA,EAAc,WAAa,SAAoBsC,EAAS,CAGtD,OAFI/E,GAAO+E,CAAO,IAChBA,EAAUA,EAAQ,MAAM,IACrBA,GAAW,MAAQ,EACf,KACAA,EAAU,GACVtE,GAAU,KAAK,MAAQsE,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,MAAQA,EAAS,KAAK,QAAQ,EAEpGtE,GAAS,KAAK,MAASsE,EAAU,GAAK,KAAK,MAAQ,EAAI,EAAI,GAAI,KAAK,QAAQ,CACvF,EAQAtC,EAAc,IAAMA,EAAc,WAQlCA,EAAc,mBAAqB,SAA4BsC,EAAS,CAEtE,OADI/E,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,EAAU,GAAWtE,GAAU,KAAK,MAAQsE,EAAY,KAAK,MAAS,GAAKA,EAAW,KAAK,OAASA,EAAS,KAAK,QAAQ,EAC1HA,IAAY,GAAWtE,GAAS,KAAK,KAAM,EAAG,KAAK,QAAQ,EACxDA,GAAS,KAAK,OAAUsE,EAAU,GAAK,EAAG,KAAK,QAAQ,CAChE,EAQAtC,EAAc,KAAOA,EAAc,mBAQnCA,EAAc,MAAQA,EAAc,mBAQpCA,EAAc,WAAa,SAAoBsC,EAAS,CACtD,IAAIC,EAEJ,OADIhF,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,IAAY,GAAWtE,GAAS,KAAK,KAAM,KAAK,IAAK,KAAK,QAAQ,EAClEsE,EAAU,IACZC,EAAK,GAAKD,EACHtE,GAAW,KAAK,KAAOsE,EAAY,KAAK,OAASC,EAAO,KAAK,MAAQD,EAAY,KAAK,MAAQC,EAAK,KAAK,QAAQ,IAEzHD,GAAW,GACXC,EAAK,GAAKD,EACHtE,GAAW,KAAK,MAAQsE,EAAY,KAAK,MAAQC,EAAO,KAAK,KAAOD,EAAY,KAAK,OAASC,EAAK,KAAK,QAAQ,EACzH,EAOAvC,EAAc,KAAOA,EAAc,WAQnCA,EAAc,YAAc,SAAqBsC,EAAS,CACxD,IAAIC,EAEJ,OADIhF,GAAO+E,CAAO,IAAGA,EAAUA,EAAQ,MAAM,IACxCA,GAAW,MAAQ,EAAU,KAC9BA,IAAY,GAAWtE,GAAS,KAAK,KAAM,KAAK,IAAK,KAAK,QAAQ,EAClEsE,EAAU,IACZC,EAAK,GAAKD,EACHtE,GAAW,KAAK,MAAQuE,EAAM,KAAK,MAAQD,EAAa,KAAK,KAAOC,EAAM,KAAK,OAASD,EAAW,KAAK,QAAQ,IAEzHA,GAAW,GACXC,EAAK,GAAKD,EACHtE,GAAW,KAAK,KAAOuE,EAAM,KAAK,OAASD,EAAa,KAAK,MAAQC,EAAM,KAAK,MAAQD,EAAW,KAAK,QAAQ,EACzH,EAOAtC,EAAc,KAAOA,EAAc,YAOnCA,EAAc,SAAW,UAAoB,CAC3C,OAAK,KAAK,SAEHhC,GAAS,KAAK,IAAK,KAAK,KAAM,EAAK,EADjC,IAEX,EAOAgC,EAAc,WAAa,UAAsB,CAC/C,OAAI,KAAK,SACA,KACFhC,GAAS,KAAK,IAAK,KAAK,KAAM,EAAI,CAC3C,EAQAgC,EAAc,QAAU,SAAiBwC,EAAI,CAC3C,OAAOA,EAAK,KAAK,UAAU,EAAI,KAAK,UAAU,CAChD,EAOAxC,EAAc,UAAY,UAAqB,CAC7C,IAAIyC,EAAK,KAAK,KACZC,EAAK,KAAK,IACZ,MAAO,CACLA,EAAK,IACLA,IAAO,EAAI,IACXA,IAAO,GAAK,IACZA,IAAO,GACPD,EAAK,IACLA,IAAO,EAAI,IACXA,IAAO,GAAK,IACZA,IAAO,EACT,CACF,EAOAzC,EAAc,UAAY,UAAqB,CAC7C,IAAIyC,EAAK,KAAK,KACZC,EAAK,KAAK,IACZ,MAAO,CACLD,IAAO,GACPA,IAAO,GAAK,IACZA,IAAO,EAAI,IACXA,EAAK,IACLC,IAAO,GACPA,IAAO,GAAK,IACZA,IAAO,EAAI,IACXA,EAAK,GACP,CACF,EASAvF,GAAK,UAAY,SAAmBwF,EAAOrF,EAAUkF,EAAI,CACvD,OAAOA,EAAKrF,GAAK,YAAYwF,EAAOrF,CAAQ,EAAIH,GAAK,YAAYwF,EAAOrF,CAAQ,CAClF,EAQAH,GAAK,YAAc,SAAqBwF,EAAOrF,EAAU,CACvD,OAAO,IAAIH,GACTwF,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZrF,CACF,CACF,EAQAH,GAAK,YAAc,SAAqBwF,EAAOrF,EAAU,CACvD,OAAO,IAAIH,GACTwF,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,EACPA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,GACZA,EAAM,CAAC,GAAK,EACZA,EAAM,CAAC,EACPrF,CACF,CACF,EAEO2C,GAAQ9C,KC17Cf,IAiBIyF,EAjBJC,GAAAC,EAAA,KAiBIF,EAAc,CAAC,EAKnBA,EAAY,OAQZA,EAAY,MAMZA,EAAY,aAAe,EAM3BA,EAAY,WAAa,EAMzBA,EAAY,uBAAyB,EAMrCA,EAAY,mBAAqB,EAKjCA,EAAY,SAAW,CACrB,WAAY,EACZ,aAAc,CAChB,EAMAA,EAAY,MAAQ,IAAI,WAAW,CAAC,EAMpCA,EAAY,QAAU,IAAI,aAAaA,EAAY,MAAM,MAAM,EAM/DA,EAAY,QAAU,IAAI,aAAaA,EAAY,MAAM,MAAM,EAM/DA,EAAY,eAAiB,IAAI,YAAY,IAAI,WAAW,CAAC,EAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,IAAM,EASnFA,EAAY,KAAO,SAASG,EAAKC,EAAM,CAKrC,KAAK,IAAMD,EAAM,EAMjB,KAAK,KAAOC,EAAO,CACrB,EAOAJ,EAAY,KAAK,OAAS,SAASG,EAAKC,EAAM,CAE5C,OAAOD,GAAO,GAAKC,GAAQ,EAAIJ,EAAY,KAAK,KAAO,IAAIA,EAAY,KAAKG,EAAKC,CAAI,CACvF,EAKAJ,EAAY,KAAK,UAAU,UAAY,UAAW,CAChD,OAAQ,KAAK,MAAQ,GAAK,KAAK,KAAO,UACxC,EAMAA,EAAY,KAAK,UAAU,OAAS,SAASK,EAAO,CAClD,OAAO,KAAK,KAAOA,EAAM,KAAO,KAAK,MAAQA,EAAM,IACrD,EAMAL,EAAY,KAAK,KAAO,IAAIA,EAAY,KAAK,EAAG,CAAC,EAUjDA,EAAY,QAAU,SAASM,EAAkB,CAC/C,GAAKA,EAGH,IAAIC,EAAeD,MAFnB,KAAIC,EAAe,KASrB,KAAK,GAAKP,EAAY,WAAW,SAASO,CAAY,EAQtD,KAAK,MAAQA,EAQb,KAAK,SAAW,EAQhB,KAAK,OAAS,KAQd,KAAK,cAAgB,EAQrB,KAAK,SAAW,GAQhB,KAAK,aAAe,EAQpB,KAAK,QAAU,CAAC,EAQhB,KAAK,iBAAmB,EAQxB,KAAK,eAAiB,EACxB,EAEAP,EAAY,QAAQ,UAAU,MAAQ,UAAW,CAC/C,KAAK,GAAG,MAAM,EACd,KAAK,MAAQ,KAAK,GAAG,SAAS,EAC9B,KAAK,SAAW,EAChB,KAAK,OAAS,KACd,KAAK,cAAgB,EACrB,KAAK,SAAW,GAChB,KAAK,aAAe,EACpB,KAAK,QAAU,CAAC,EAChB,KAAK,iBAAmB,EACxB,KAAK,eAAiB,EACxB,EASAA,EAAY,QAAQ,UAAU,cAAgB,SAASQ,EAAe,CACpE,KAAK,eAAiBA,CACxB,EASAR,EAAY,QAAQ,UAAU,WAAa,UAAW,CACpD,OAAO,KAAK,EACd,EAQAA,EAAY,QAAQ,UAAU,aAAe,UAAW,CACtD,OAAO,KAAK,GAAG,MAAM,EAAE,SAAS,KAAK,GAAG,SAAS,EAAG,KAAK,GAAG,SAAS,EAAI,KAAK,OAAO,CAAC,CACxF,EAYAA,EAAY,QAAQ,UAAU,KAAO,SAASS,EAAMC,EAAkB,CAEhED,EAAO,KAAK,WACd,KAAK,SAAWA,GAQlB,QAHIE,EAAe,EAAE,KAAK,GAAG,SAAS,EAAI,KAAK,MAAQD,GAAqB,EAAMD,EAAO,EAGlF,KAAK,MAAQE,EAAaF,EAAOC,GAAkB,CACxD,IAAIE,EAAe,KAAK,GAAG,SAAS,EACpC,KAAK,GAAKZ,EAAY,QAAQ,eAAe,KAAK,EAAE,EACpD,KAAK,OAAS,KAAK,GAAG,SAAS,EAAIY,CACrC,CAEA,KAAK,IAAID,CAAU,CACrB,EAKAX,EAAY,QAAQ,UAAU,IAAM,SAASa,EAAW,CACtD,QAASC,EAAI,EAAGA,EAAID,EAAWC,IAC7B,KAAK,GAAG,UAAU,EAAE,KAAK,MAAO,CAAC,CAErC,EAKAd,EAAY,QAAQ,UAAU,UAAY,SAASe,EAAO,CACxD,KAAK,GAAG,UAAU,KAAK,OAAS,EAAGA,CAAK,CAC1C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,GAAG,WAAW,KAAK,OAAS,EAAGA,CAAK,CAC3C,EAKAf,EAAY,QAAQ,UAAU,aAAe,SAASe,EAAO,CAC3D,KAAK,GAAG,aAAa,KAAK,OAAS,EAAGA,CAAK,CAC7C,EAKAf,EAAY,QAAQ,UAAU,aAAe,SAASe,EAAO,CAC3D,KAAK,GAAG,aAAa,KAAK,OAAS,EAAGA,CAAK,CAC7C,EAOAf,EAAY,QAAQ,UAAU,QAAU,SAASe,EAAO,CACtD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,UAAUA,CAAK,CACtB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,SAAW,SAASe,EAAO,CACvD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,WAAWA,CAAK,CACvB,EAMAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,aAAaA,CAAK,CACzB,EAMAf,EAAY,QAAQ,UAAU,WAAa,SAASe,EAAO,CACzD,KAAK,KAAK,EAAG,CAAC,EACd,KAAK,aAAaA,CAAK,CACzB,EAQAf,EAAY,QAAQ,UAAU,aAAe,SAASgB,EAASD,EAAOE,EAAc,EAC9E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,QAAQF,CAAK,EAClB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkBF,GAASE,KAClC,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,cAAgB,SAASgB,EAASD,EAAOE,EAAc,EAC/E,KAAK,gBAAkB,CAACF,EAAM,OAAOE,CAAY,KACnD,KAAK,SAASF,CAAK,EACnB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,gBAAkB,SAASgB,EAASD,EAAOE,EAAc,EACjF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,WAAWF,CAAK,EACrB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,gBAAkB,SAASgB,EAASD,EAAOE,EAAc,EACjF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,WAAWF,CAAK,EACrB,KAAK,KAAKC,CAAO,EAErB,EAOAhB,EAAY,QAAQ,UAAU,eAAiB,SAASgB,EAASD,EAAOE,EAAc,EAChF,KAAK,gBAAkBF,GAASE,KAClC,KAAK,UAAUF,CAAK,EACpB,KAAK,KAAKC,CAAO,EAErB,EASAhB,EAAY,QAAQ,UAAU,eAAiB,SAASgB,EAASD,EAAOE,EAAc,CAChFF,GAASE,IACX,KAAK,OAAOF,CAAK,EACjB,KAAK,KAAKC,CAAO,EAErB,EASAhB,EAAY,QAAQ,UAAU,OAAS,SAASkB,EAAK,CACnD,GAAIA,GAAO,KAAK,OAAO,EACrB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,EAMAlB,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,GAAI,KAAK,SACP,MAAM,IAAI,MAAM,uDAAuD,CAE3E,EAOAA,EAAY,QAAQ,UAAU,KAAO,SAASgB,EAAS,CACrD,KAAK,OAAOA,CAAO,EAAI,KAAK,OAAO,CACrC,EAKAhB,EAAY,QAAQ,UAAU,OAAS,UAAW,CAChD,OAAO,KAAK,GAAG,SAAS,EAAI,KAAK,KACnC,EAcAA,EAAY,QAAQ,eAAiB,SAASmB,EAAI,CAChD,IAAIP,EAAeO,EAAG,SAAS,EAG/B,GAAIP,EAAe,WACjB,MAAM,IAAI,MAAM,qDAAqD,EAGvE,IAAIQ,EAAeR,GAAgB,EAC/BS,EAAMrB,EAAY,WAAW,SAASoB,CAAY,EACtD,OAAAC,EAAI,YAAYD,EAAeR,CAAY,EAC3CS,EAAI,MAAM,EAAE,IAAIF,EAAG,MAAM,EAAGC,EAAeR,CAAY,EAChDS,CACT,EAQArB,EAAY,QAAQ,UAAU,UAAY,SAASsB,EAAQ,CACzD,KAAK,KAAKtB,EAAY,WAAY,CAAC,EACnC,KAAK,WAAW,KAAK,OAAO,EAAIsB,EAAStB,EAAY,UAAU,CACjE,EAUAA,EAAY,QAAQ,UAAU,YAAc,SAASuB,EAAW,CAC9D,KAAK,UAAU,EACX,KAAK,QAAU,OACjB,KAAK,OAAS,CAAC,GAEjB,KAAK,cAAgBA,EACrB,QAAST,EAAI,EAAGA,EAAIS,EAAWT,IAC7B,KAAK,OAAOA,CAAC,EAAI,EAEnB,KAAK,SAAW,GAChB,KAAK,aAAe,KAAK,OAAO,CAClC,EAOAd,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,GAAI,KAAK,QAAU,MAAQ,CAAC,KAAK,SAC/B,MAAM,IAAI,MAAM,mDAAmD,EAGrE,KAAK,SAAS,CAAC,EAKf,QAJIwB,EAAY,KAAK,OAAO,EAGxBV,EAAI,KAAK,cAAgB,EACtBA,GAAK,GAAK,KAAK,OAAOA,CAAC,GAAK,EAAGA,IAAK,CAI3C,QAHIW,EAAeX,EAAI,EAGhBA,GAAK,EAAGA,IAEb,KAAK,SAAS,KAAK,OAAOA,CAAC,GAAK,EAAIU,EAAY,KAAK,OAAOV,CAAC,EAAI,CAAC,EAGpE,IAAIY,EAAkB,EACtB,KAAK,SAASF,EAAY,KAAK,YAAY,EAC3C,IAAIG,GAAOF,EAAeC,GAAmB1B,EAAY,aACzD,KAAK,SAAS2B,CAAG,EAGjB,IAAIC,EAAkB,EAClBC,EAAM,KAAK,MACjBC,EACE,IAAKhB,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IAAK,CACxC,IAAIiB,EAAM,KAAK,GAAG,SAAS,EAAI,KAAK,QAAQjB,CAAC,EAC7C,GAAIa,GAAO,KAAK,GAAG,UAAUI,CAAG,EAAG,CACjC,QAASC,EAAIhC,EAAY,aAAcgC,EAAIL,EAAKK,GAAKhC,EAAY,aAC/D,GAAI,KAAK,GAAG,UAAU6B,EAAMG,CAAC,GAAK,KAAK,GAAG,UAAUD,EAAMC,CAAC,EACzD,SAASF,EAGbF,EAAkB,KAAK,QAAQd,CAAC,EAChC,KACF,CACF,CAEA,OAAIc,GAGF,KAAK,MAAQ,KAAK,GAAG,SAAS,EAAIJ,EAGlC,KAAK,GAAG,WAAW,KAAK,MAAOI,EAAkBJ,CAAS,IAI1D,KAAK,QAAQ,KAAK,KAAK,OAAO,CAAC,EAG/B,KAAK,GAAG,WAAW,KAAK,GAAG,SAAS,EAAIA,EAAW,KAAK,OAAO,EAAIA,CAAS,GAG9E,KAAK,SAAW,GACTA,CACT,EAUAxB,EAAY,QAAQ,UAAU,OAAS,SAASiC,EAAYC,EAAqBC,EAAiB,CAChG,IAAIC,EAAcD,EAAkBnC,EAAY,mBAAqB,EACrE,GAAIkC,EAAqB,CACvB,IAAIG,EAAkBH,EAGtB,GAFA,KAAK,KAAK,KAAK,SAAUlC,EAAY,WACnCA,EAAY,uBAAyBoC,CAAW,EAC9CC,EAAgB,QAAUrC,EAAY,uBACxC,MAAM,IAAI,MAAM,+CACdA,EAAY,sBAAsB,EAEtC,QAAS,EAAIA,EAAY,uBAAyB,EAAG,GAAK,EAAG,IAC3D,KAAK,UAAUqC,EAAgB,WAAW,CAAC,CAAC,CAEhD,CACA,KAAK,KAAK,KAAK,SAAUrC,EAAY,WAAaoC,CAAW,EAC7D,KAAK,UAAUH,CAAU,EACrBG,GACF,KAAK,SAAS,KAAK,GAAG,SAAS,EAAI,KAAK,KAAK,EAE/C,KAAK,GAAG,YAAY,KAAK,KAAK,CAChC,EAQApC,EAAY,QAAQ,UAAU,mBAAqB,SAAUiC,EAAYC,EAAqB,CAC5F,KAAK,OAAOD,EAAYC,EAAqB,EAAI,CACnD,EAUAlC,EAAY,QAAQ,UAAU,cAAgB,SAASsC,EAAOC,EAAO,CACnE,IAAIC,EAAc,KAAK,GAAG,SAAS,EAAIF,EACnCG,EAAeD,EAAc,KAAK,GAAG,UAAUA,CAAW,EAC1DE,EAAK,KAAK,GAAG,UAAUD,EAAeF,CAAK,GAAK,EAGpD,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,sBAAwBH,EAAQ,cAAc,CAElE,EAWAvC,EAAY,QAAQ,UAAU,YAAc,SAAS2C,EAAWC,EAAWC,EAAW,CACpF,KAAK,UAAU,EACf,KAAK,iBAAmBD,EACxB,KAAK,KAAK5C,EAAY,WAAY2C,EAAYC,CAAS,EACvD,KAAK,KAAKC,EAAWF,EAAYC,CAAS,CAC5C,EASA5C,EAAY,QAAQ,UAAU,UAAY,UAAW,CACnD,YAAK,WAAW,KAAK,gBAAgB,EAC9B,KAAK,OAAO,CACrB,EAUAA,EAAY,QAAQ,UAAU,aAAe,SAAS8C,EAAG,CACvD,GAAIA,aAAa,WACf,IAAIC,EAAOD,MAKX,SAHIC,EAAO,CAAC,EACRjC,EAAI,EAEDA,EAAIgC,EAAE,QAAQ,CACnB,IAAIE,EAGAC,EAAIH,EAAE,WAAWhC,GAAG,EACxB,GAAImC,EAAI,OAAUA,GAAK,MACrBD,EAAYC,MACP,CACL,IAAIC,EAAIJ,EAAE,WAAWhC,GAAG,EACxBkC,GAAaC,GAAK,IAAMC,GAAK,MAAW,SAAgB,MAC1D,CAGIF,EAAY,IACdD,EAAK,KAAKC,CAAS,GAEfA,EAAY,KACdD,EAAK,KAAOC,GAAa,EAAK,GAAQ,GAAI,GAEtCA,EAAY,MACdD,EAAK,KAAOC,GAAa,GAAM,GAAQ,GAAI,EAE3CD,EAAK,KACDC,GAAa,GAAM,EAAQ,IAC3BA,GAAa,GAAM,GAAQ,GAAI,EAErCD,EAAK,KAAOC,GAAa,EAAK,GAAQ,GAAI,GAE5CD,EAAK,KAAMC,EAAY,GAAQ,GAAI,EAEvC,CAGF,KAAK,QAAQ,CAAC,EACd,KAAK,YAAY,EAAGD,EAAK,OAAQ,CAAC,EAClC,KAAK,GAAG,YAAY,KAAK,OAASA,EAAK,MAAM,EAC7C,QAASjC,EAAI,EAAGQ,EAAS,KAAK,MAAO6B,EAAQ,KAAK,GAAG,MAAM,EAAGrC,EAAIiC,EAAK,OAAQjC,IAC7EqC,EAAM7B,GAAQ,EAAIyB,EAAKjC,CAAC,EAE1B,OAAO,KAAK,UAAU,CACxB,EASAd,EAAY,QAAQ,UAAU,WAAa,SAASG,EAAKC,EAAM,CAC7D,OAAOJ,EAAY,KAAK,OAAOG,EAAKC,CAAI,CAC1C,EASAJ,EAAY,WAAa,SAASmD,EAAO,CAKvC,KAAK,OAASA,EAMd,KAAK,UAAY,CACnB,EAQAnD,EAAY,WAAW,SAAW,SAASa,EAAW,CACpD,OAAO,IAAIb,EAAY,WAAW,IAAI,WAAWa,CAAS,CAAC,CAC7D,EAEAb,EAAY,WAAW,UAAU,MAAQ,UAAW,CAClD,KAAK,UAAY,CACnB,EAOAA,EAAY,WAAW,UAAU,MAAQ,UAAW,CAClD,OAAO,KAAK,MACd,EAOAA,EAAY,WAAW,UAAU,SAAW,UAAW,CACrD,OAAO,KAAK,SACd,EAOAA,EAAY,WAAW,UAAU,YAAc,SAASoD,EAAU,CAChE,KAAK,UAAYA,CACnB,EAOApD,EAAY,WAAW,UAAU,SAAW,UAAW,CACrD,OAAO,KAAK,OAAO,MACrB,EAMAA,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQ,CAC3D,OAAO,KAAK,UAAUA,CAAM,GAAK,IAAM,EACzC,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,OAAOA,CAAM,CAC3B,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,WAAWA,CAAM,GAAK,IAAM,EAC1C,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,KAAK,OAAOA,CAAM,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,CAC1D,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,KAAK,OAAOA,CAAM,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,EAAI,KAAK,OAAOA,EAAS,CAAC,GAAK,GAAK,KAAK,OAAOA,EAAS,CAAC,GAAK,EACzH,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,KAAK,UAAUA,CAAM,IAAM,CACpC,EAMAtB,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQ,CAC5D,OAAO,IAAItB,EAAY,KAAK,KAAK,UAAUsB,CAAM,EAAG,KAAK,UAAUA,EAAS,CAAC,CAAC,CAChF,EAMAtB,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAO,IAAItB,EAAY,KAAK,KAAK,WAAWsB,CAAM,EAAG,KAAK,WAAWA,EAAS,CAAC,CAAC,CAClF,EAMAtB,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQ,CAC9D,OAAAtB,EAAY,MAAM,CAAC,EAAI,KAAK,UAAUsB,CAAM,EACrCtB,EAAY,QAAQ,CAAC,CAC9B,EAMAA,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQ,CAC9D,OAAAtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,EAAI,KAAK,UAAUsB,CAAM,EAC7EtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,EAAI,KAAK,UAAUsB,EAAS,CAAC,EAC1EtB,EAAY,QAAQ,CAAC,CAC9B,EAMAA,EAAY,WAAW,UAAU,UAAY,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAA0BP,CAC9C,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,CACxB,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,CACrC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,CACvC,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,GACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACrC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,OAAOO,CAAM,EAAIP,EACtB,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,GACnC,KAAK,OAAOO,EAAS,CAAC,EAAIP,GAAS,EACvC,EAMAf,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQP,EAAO,CACpE,KAAK,WAAWO,EAAQP,EAAM,GAAG,EACjC,KAAK,WAAWO,EAAS,EAAGP,EAAM,IAAI,CACxC,EAMAf,EAAY,WAAW,UAAU,YAAc,SAASsB,EAAQP,EAAO,CACnE,KAAK,YAAYO,EAAQP,EAAM,GAAG,EAClC,KAAK,YAAYO,EAAS,EAAGP,EAAM,IAAI,CAC3C,EAMAf,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQP,EAAO,CACtEf,EAAY,QAAQ,CAAC,EAAIe,EACzB,KAAK,WAAWO,EAAQtB,EAAY,MAAM,CAAC,CAAC,CAC9C,EAMAA,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQP,EAAO,CACtEf,EAAY,QAAQ,CAAC,EAAIe,EACzB,KAAK,WAAWO,EAAQtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,CAAC,EAC7E,KAAK,WAAWsB,EAAS,EAAGtB,EAAY,MAAMA,EAAY,eAAiB,EAAI,CAAC,CAAC,CACnF,EAQAA,EAAY,WAAW,UAAU,oBAAsB,UAAW,CAChE,GAAI,KAAK,OAAO,OAAS,KAAK,UAAYA,EAAY,WAClDA,EAAY,uBACd,MAAM,IAAI,MACN,gEAAgE,EAGtE,QADIqD,EAAS,GACJvC,EAAI,EAAGA,EAAId,EAAY,uBAAwBc,IACtDuC,GAAU,OAAO,aACb,KAAK,SAAS,KAAK,UAAYrD,EAAY,WAAac,CAAC,CAAC,EAEhE,OAAOuC,CACT,EAUArD,EAAY,WAAW,UAAU,SAAW,SAASsD,EAAQC,EAAe,CAC1E,IAAIC,EAASF,EAAS,KAAK,UAAUA,CAAM,EAC3C,OAAOC,EAAgB,KAAK,UAAUC,CAAM,EAAI,KAAK,UAAUA,EAASD,CAAa,EAAI,CAC3F,EASAvD,EAAY,WAAW,UAAU,QAAU,SAASyD,EAAGnC,EAAQ,CAC7D,OAAAmC,EAAE,OAASnC,EAAS,KAAK,UAAUA,CAAM,EACzCmC,EAAE,GAAK,KACAA,CACT,EAeAzD,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQoC,EAAc,CACzEpC,GAAU,KAAK,UAAUA,CAAM,EAE/B,IAAIqC,EAAS,KAAK,UAAUrC,CAAM,EAC9B+B,EAAS,GACTvC,EAAI,EAIR,GAFAQ,GAAUtB,EAAY,WAElB0D,IAAiB1D,EAAY,SAAS,WACxC,OAAO,KAAK,OAAO,SAASsB,EAAQA,EAASqC,CAAM,EAGrD,KAAO7C,EAAI6C,GAAQ,CACjB,IAAIX,EAGAC,EAAI,KAAK,UAAU3B,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,EAAYC,MACP,CACL,IAAIC,EAAI,KAAK,UAAU5B,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,GACIC,EAAI,KAAS,EACdC,EAAI,OACF,CACL,IAAIU,EAAI,KAAK,UAAUtC,EAASR,GAAG,EACnC,GAAImC,EAAI,IACND,GACIC,EAAI,KAAS,IACbC,EAAI,KAAS,EACdU,EAAI,OACF,CACL,IAAIC,EAAI,KAAK,UAAUvC,EAASR,GAAG,EACnCkC,GACIC,EAAI,IAAS,IACbC,EAAI,KAAS,IACbU,EAAI,KAAS,EACdC,EAAI,EACT,CACF,CACF,CAGIb,EAAY,MACdK,GAAU,OAAO,aAAaL,CAAS,GAEvCA,GAAa,MACbK,GAAU,OAAO,cACdL,GAAa,IAAM,OACnBA,EAAc,KAAW,GAAM,KAAM,EAE5C,CAEA,OAAOK,CACT,EAOArD,EAAY,WAAW,UAAU,WAAa,SAASsB,EAAQ,CAC7D,OAAOA,EAAS,KAAK,UAAUA,CAAM,CACvC,EAQAtB,EAAY,WAAW,UAAU,SAAW,SAASsB,EAAQ,CAC3D,OAAOA,EAAS,KAAK,UAAUA,CAAM,EAAItB,EAAY,UACvD,EAQAA,EAAY,WAAW,UAAU,aAAe,SAASsB,EAAQ,CAC/D,OAAO,KAAK,UAAUA,EAAS,KAAK,UAAUA,CAAM,CAAC,CACvD,EAMAtB,EAAY,WAAW,UAAU,iBAAmB,SAAS8D,EAAO,CAClE,GAAIA,EAAM,QAAU9D,EAAY,uBAC9B,MAAM,IAAI,MAAM,+CACAA,EAAY,sBAAsB,EAEpD,QAASc,EAAI,EAAGA,EAAId,EAAY,uBAAwBc,IACtD,GAAIgD,EAAM,WAAWhD,CAAC,GAAK,KAAK,SAAS,KAAK,UAAYd,EAAY,WAAac,CAAC,EAClF,MAAO,GAGX,MAAO,EACT,EASAd,EAAY,WAAW,UAAU,WAAa,SAASG,EAAKC,EAAM,CAChE,OAAOJ,EAAY,KAAK,OAAOG,EAAKC,CAAI,CAC1C,ICpuCA,IAQiB2D,GARjBC,GAAAC,EAAA,kBAGAC,MAKiBH,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKC,OACVA,IAAA,UAAY,GAAZ,YACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,IAAM,GAAN,MACAA,IAAA,OAAS,GAAT,SACAA,IAAA,OAAS,GAAT,SACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,OAAS,GAAT,SACAA,IAAA,KAAO,GAAP,OACAA,IAAA,QAAU,GAAV,UACAA,IAAA,QAAU,GAAV,UACAA,IAAA,OAAS,IAAT,SACAA,IAAA,cAAgB,IAAhB,gBACAA,IAAA,eAAiB,IAAjB,mBAbUA,EAAAD,EAAA,sBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqBAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKE,OAAoBA,IAAA,QAAU,GAAV,UAAaA,IAAA,MAAQ,GAAR,QAAWA,IAAA,MAAQ,GAAR,UAA5CA,EAAAF,EAAA,2BAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKG,OACVA,IAAA,UAAY,GAAZ,YACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,KAAO,GAAP,OACAA,IAAA,OAAS,GAAT,SACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,MAAQ,GAAR,QACAA,IAAA,OAAS,GAAT,SACAA,IAAA,KAAO,GAAP,OACAA,IAAA,QAAU,IAAV,UACAA,IAAA,OAAS,IAAT,SACAA,IAAA,OAAS,IAAT,SACAA,IAAA,OAAS,IAAT,SACAA,IAAA,UAAY,IAAZ,YACAA,IAAA,WAAa,IAAb,aACAA,IAAA,SAAW,IAAX,WACAA,IAAA,aAAe,IAAf,eACAA,IAAA,eAAiB,IAAjB,iBACAA,IAAA,WAAa,IAAb,aACAA,IAAA,eAAiB,IAAjB,mBArBUA,EAAAH,EAAA,uBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6BAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKI,OAAUA,IAAA,UAAY,GAAZ,YAAeA,IAAA,MAAQ,GAAR,UAAzBA,EAAAJ,EAAA,iBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,IAAKK,OAAeA,IAAA,KAAO,GAAP,OAAUA,IAAA,YAAc,GAAd,cAAiBA,IAAA,cAAgB,GAAhB,gBAAmBA,IAAA,SAAW,GAAX,aAA7DA,EAAAL,EAAA,sBAD4BA,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAOAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMM,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOC,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAIH,GAAS,OAAOE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIH,GAAS,OAAOE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,IAAIG,EAAeF,EAA2F,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,WAAoB,CAClB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,WAAWC,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,OAAOA,EAA8BC,EAA+B,CACzED,EAAQ,eAAe,EAAGC,EAAW,CAAC,CACxC,CAOA,OAAO,gBAAgBD,EAA8BE,EAAgD,CACnGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,eAAeA,EAA8BG,EAAkB,CACpEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YAAYA,EAA8BC,EAAmD,CAClG,OAAAR,EAAM,WAAWO,CAAO,EACxBP,EAAM,OAAOO,EAASC,CAAS,EACxBR,EAAM,SAASO,CAAO,CAC/B,CACF,CAxGOb,EAAM,MAAAM,IAD2BN,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA8GAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiB,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOV,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAIQ,GAAa,OAAOT,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIQ,GAAa,OAAOT,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAMA,MAAMC,EAAqG,CACzG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,gBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAQA,WAAWM,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,eAAeL,EAA8B,CAClDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,SAASA,EAA8BM,EAAiC,CAC7EN,EAAQ,eAAe,EAAGM,EAAa,CAAC,CAC1C,CAMA,OAAO,cAAcN,EAA8BO,EAAsC,CACvFP,EAAQ,eAAe,EAAGO,EAAkB,CAAC,CAC/C,CAMA,OAAO,aAAaP,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8BM,EAC9BC,EAA0D,CAC5D,OAAAH,EAAU,eAAeJ,CAAO,EAChCI,EAAU,SAASJ,EAASM,CAAW,EACvCF,EAAU,cAAcJ,EAASO,CAAgB,EAC1CH,EAAU,aAAaJ,CAAO,CACvC,CACF,CAhGOb,EAAM,UAAAiB,IAD2BjB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqB,CAAe,CAArB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOd,EAAWC,EAA4C,CAC5D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,wBAAwBA,EAA4BC,EAAsC,CAC/F,OAAQA,GAAO,IAAIY,GAAkB,OAAOb,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC7F,CAOA,OAAO,oCAAoCA,EAA4BC,EAAsC,CAC3G,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIY,GAAkB,OAAOb,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC7F,CAKA,SAA2D,CACzD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC/C,CAClB,CAKA,UAA6B,CAC3B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,SAASM,EAAgD,CACvD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,oBAAoBL,EAA8B,CACvDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BS,EAA0D,CACxGT,EAAQ,aAAa,EAAGS,EAAS,CAAuD,CAC1F,CAMA,OAAO,YAAYT,EAA8BU,EAA4B,CAC3EV,EAAQ,cAAc,EAAGU,EAAUV,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC7D,CAMA,OAAO,YAAYA,EAA8BW,EAAoC,CACnFX,EAAQ,eAAe,EAAGW,EAAgB,CAAC,CAC7C,CAMA,OAAO,kBAAkBX,EAAkD,CAEzE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,qBACHA,EAA8BS,EAC9BC,EAA4BC,EAAwD,CACtF,OAAAH,EAAe,oBAAoBR,CAAO,EAC1CQ,EAAe,WAAWR,EAASS,CAAO,EAC1CD,EAAe,YAAYR,EAASU,CAAQ,EAC5CF,EAAe,YAAYR,EAASW,CAAc,EAC3CH,EAAe,kBAAkBR,CAAO,CACjD,CACF,CA/GOb,EAAM,eAAAqB,IAD2BrB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqHAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMyB,CAAmB,CAAzB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOlB,EAAWC,EAAgD,CAChE,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,4BAA4BA,EAA4BC,EAA8C,CAC3G,OAAQA,GAAO,IAAIgB,GAAsB,OAAOjB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACjG,CAOA,OAAO,wCAAwCA,EAA4BC,EACpD,CACrB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIgB,GAAsB,OAAOjB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACjG,CAKA,UAAwD,CACtD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,MAAMH,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,wBAAwBC,EAA8B,CAC3DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,YAAYA,EAA8Ba,EAAuD,CACtGb,EAAQ,cAAc,EAAGa,EAAU,CAAqD,CAC1F,CAMA,OAAO,SAASb,EAA8Bc,EAAiC,CAC7Ed,EAAQ,eAAe,EAAGc,EAAa,CAAC,CAC1C,CAMA,OAAO,sBAAsBd,EAAkD,CAE7E,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,yBACHA,EAA8Ba,EAC9BC,EAAqD,CACvD,OAAAF,EAAmB,wBAAwBZ,CAAO,EAClDY,EAAmB,YAAYZ,EAASa,CAAQ,EAChDD,EAAmB,SAASZ,EAASc,CAAW,EACzCF,EAAmB,sBAAsBZ,CAAO,CACzD,CACF,CA/FOb,EAAM,mBAAAyB,IAD2BzB,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAqGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM4B,CAAQ,CAAd,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOrB,EAAWC,EAAqC,CACrD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,iBAAiBA,EAA4BC,EAAwB,CAC1E,OAAQA,GAAO,IAAImB,GAAW,OAAOpB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACtF,CAOA,OAAO,6BAA6BA,EAA4BC,EAAwB,CACtF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAImB,GAAW,OAAOpB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACtF,CAKA,SAAuD,CACrD,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,UAAUH,EAAyF,CACjG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,aAAaC,EAA8B,CAChDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BgB,EAAsD,CACpGhB,EAAQ,cAAc,EAAGgB,EAAS,CAAqD,CACzF,CAMA,OAAO,aAAahB,EAA8BiB,EAAqC,CACrFjB,EAAQ,eAAe,EAAGiB,EAAiB,CAAC,CAC9C,CAMA,OAAO,WAAWjB,EAAkD,CAElE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,cACHA,EAA8BgB,EAC9BC,EAAyD,CAC3D,OAAAF,EAAQ,aAAaf,CAAO,EAC5Be,EAAQ,WAAWf,EAASgB,CAAO,EACnCD,EAAQ,aAAaf,EAASiB,CAAe,EACtCF,EAAQ,WAAWf,CAAO,CACnC,CACF,CA9FOb,EAAM,QAAA4B,IAD2B5B,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAoGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM+B,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOxB,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAIsB,GAAgB,OAAOvB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIsB,GAAgB,OAAOvB,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,SAASC,EAAyF,CAChG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,YAAYA,EAA8BmB,EAAoC,CACnFnB,EAAQ,eAAe,EAAGmB,EAAgB,CAAC,CAC7C,CAMA,OAAO,gBAAgBnB,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBAAmBA,EAA8BmB,EAAwD,CAC9G,OAAAD,EAAa,kBAAkBlB,CAAO,EACtCkB,EAAa,YAAYlB,EAASmB,CAAc,EACzCD,EAAa,gBAAgBlB,CAAO,CAC7C,CACF,CA1EOb,EAAM,aAAA+B,IAD2B/B,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAgFAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiC,CAAQ,CAAd,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO1B,EAAWC,EAAqC,CACrD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAKA,WAAoB,CAClB,OAAO,KAAK,GAAI,WAAW,KAAK,MAAM,CACxC,CAKA,aAAsB,CACpB,OAAO,KAAK,GAAI,UAAU,KAAK,OAAS,CAAC,CAC3C,CAKA,aAAsB,CACpB,OAAO,KAAK,GAAI,UAAU,KAAK,OAAS,CAAC,CAC3C,CASA,OAAO,cACHK,EAA8BqB,EAAoBC,EAClDC,EAA2C,CAC7C,OAAAvB,EAAQ,KAAK,EAAG,EAAE,EAClBA,EAAQ,WAAWuB,CAAa,EAChCvB,EAAQ,WAAWsB,CAAa,EAChCtB,EAAQ,WAAWqB,CAAU,EACtBrB,EAAQ,OAAO,CACxB,CACF,CApDOb,EAAM,QAAAiC,IAD2BjC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0DAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqC,CAAS,CAAf,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO9B,EAAWC,EAAsC,CACtD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,kBAAkBA,EAA4BC,EAA0B,CAC7E,OAAQA,GAAO,IAAI4B,GAAY,OAAO7B,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAOA,OAAO,8BAA8BA,EAA4BC,EAA0B,CACzF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI4B,GAAY,OAAO7B,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAKA,WAAoB,CAClB,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAOA,WAAWD,EAAeF,EAAuF,CAC/G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,SACpC,OAAO,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,GAAI,KAAK,EAAG,EAC1E,IAClB,CAKA,kBAA2B,CACzB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,YAAYD,EAAeF,EAAuF,CAChH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,SACpC,OAAO,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,GAAI,KAAK,EAAG,EAC1E,IAClB,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,cAAcC,EAA8B,CACjDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,aAAaA,EAA8ByB,EAAmB,CACnEzB,EAAQ,cAAc,EAAGyB,EAAW,CAAC,CACvC,CAMA,OAAO,cAAczB,EAA8B0B,EAAsC,CACvF1B,EAAQ,eAAe,EAAG0B,EAAkB,CAAC,CAC/C,CAMA,OAAO,sBAAsB1B,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,GAAIG,EAAU,CAAC,CACrC,CAMA,OAAO,eAAeH,EAA8B2B,EAAuC,CACzF3B,EAAQ,eAAe,EAAG2B,EAAmB,CAAC,CAChD,CAMA,OAAO,uBAAuB3B,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,GAAIG,EAAU,CAAC,CACrC,CAMA,OAAO,YAAYH,EAAkD,CAEnE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,eACHA,EAA8ByB,EAAmBC,EACjDC,EAA2D,CAC7D,OAAAH,EAAS,cAAcxB,CAAO,EAC9BwB,EAAS,aAAaxB,EAASyB,CAAS,EACxCD,EAAS,cAAcxB,EAAS0B,CAAgB,EAChDF,EAAS,eAAexB,EAAS2B,CAAiB,EAC3CH,EAAS,YAAYxB,CAAO,CACrC,CACF,CAnJOb,EAAM,SAAAqC,IAD2BrC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAyJAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMyC,CAAK,CAAX,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOlC,EAAWC,EAAkC,CAClD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,cAAcA,EAA4BC,EAAkB,CACjE,OAAQA,GAAO,IAAIgC,GAAQ,OAAOjC,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnF,CAOA,OAAO,0BAA0BA,EAA4BC,EAAkB,CAC7E,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIgC,GAAQ,OAAOjC,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,OAAOA,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,CAC7D,CAKA,OAAgB,CACd,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAQA,OAAOM,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,MAA8C,CAC5C,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAQA,sBAAsBM,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CASA,OAAOP,EAAeO,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,WAAWD,EAAeF,EAChB,CACR,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,kBAA2B,CACzB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAMA,eAAeD,EAA4B,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC5F,CAKA,sBAA+B,CAC7B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,qBAAuC,CACrC,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,WACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CASA,eAAeD,EAAeO,EAAgD,CAC5E,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,sBAA+B,CAC7B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,UAAUC,EAA8B,CAC7CA,EAAQ,YAAY,EAAE,CACxB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,UAAU9B,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,gBAAgB/B,EAA8BgC,EAAsB,CACzEhC,EAAQ,cAAc,EAAGgC,EAAc,CAAC,CAC1C,CAMA,OAAO,SAAShC,EAA8BF,EAAe,CAC3DE,EAAQ,cAAc,EAAGF,EAAO,CAAC,CACnC,CAMA,OAAO,UAAUE,EAA8BiC,EAAkC,CAC/EjC,EAAQ,eAAe,EAAGiC,EAAc,CAAC,CAC3C,CAMA,OAAO,QAAQjC,EAA8BkC,EAA6C,CACxFlC,EAAQ,cAAc,EAAGkC,EAAM,CAA+C,CAChF,CAMA,OAAO,yBAAyBlC,EAA8BmC,EAAiD,CAC7GnC,EAAQ,eAAe,EAAGmC,EAA6B,CAAC,CAC1D,CAMA,OAAO,UAAUnC,EAA8BoC,EAAkC,CAC/EpC,EAAQ,eAAe,EAAGoC,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmBpC,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8BqC,EAAmC,CACjFrC,EAAQ,eAAe,EAAGqC,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBrC,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,cAAcH,EAA8BsC,EAAsC,CACvFtC,EAAQ,eAAe,GAAIsC,EAAkB,CAAC,CAChD,CAOA,OAAO,uBAAuBtC,EAA8BE,EAAgD,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,sBAAsBA,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,kBAAkBH,EAA8BuC,EAA0C,CAC/FvC,EAAQ,eAAe,GAAIuC,EAAsB,CAAC,CACpD,CAOA,OAAO,2BAA2BvC,EAA8BE,EAA+C,CAC7GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,0BAA0BA,EAA8BG,EAAkB,CAC/EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,kBAAkBH,EAA8BwC,EAA0C,CAC/FxC,EAAQ,eAAe,GAAIwC,EAAsB,CAAC,CACpD,CAOA,OAAO,2BAA2BxC,EAA8BE,EAAgD,CAC9GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,0BAA0BA,EAA8BG,EAAkB,CAC/EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,QAAQH,EAAkD,CAE/D,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,WACHA,EAA8B6B,EAAgCC,EAC9DC,EAAkCC,EAAsBlC,EAAemC,EACvEC,EAA6CC,EAC7CC,EAAkCC,EAAmCC,EACrEC,EAA0CC,EAA8D,CAC1G,OAAAZ,EAAK,UAAU5B,CAAO,EACtB4B,EAAK,QAAQ5B,EAAS6B,CAAU,EAChCD,EAAK,aAAa5B,EAAS8B,CAAe,EAC1CF,EAAK,UAAU5B,EAAS+B,CAAY,EACpCH,EAAK,gBAAgB5B,EAASgC,CAAY,EAC1CJ,EAAK,SAAS5B,EAASF,CAAK,EAC5B8B,EAAK,UAAU5B,EAASiC,CAAY,EACpCL,EAAK,QAAQ5B,EAASkC,CAAI,EAC1BN,EAAK,yBAAyB5B,EAASmC,CAA2B,EAClEP,EAAK,UAAU5B,EAASoC,CAAY,EACpCR,EAAK,WAAW5B,EAASqC,CAAa,EACtCT,EAAK,cAAc5B,EAASsC,CAAgB,EAC5CV,EAAK,kBAAkB5B,EAASuC,CAAoB,EACpDX,EAAK,kBAAkB5B,EAASwC,CAAoB,EAC7CZ,EAAK,QAAQ5B,CAAO,CAC7B,CACF,CAvdOb,EAAM,KAAAyC,IAD2BzC,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6dAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMsD,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO/C,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAI6C,GAAa,OAAO9C,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI6C,GAAa,OAAO9C,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,KAAKT,EAAyF,CAC5F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,eAAeC,EAA8B,CAClDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8B0C,EAAgC,CAC3E1C,EAAQ,eAAe,EAAG0C,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa1C,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8B6B,EAAgCC,EAC9DY,EAAoD,CACtD,OAAAD,EAAU,eAAezC,CAAO,EAChCyC,EAAU,QAAQzC,EAAS6B,CAAU,EACrCY,EAAU,aAAazC,EAAS8B,CAAe,EAC/CW,EAAU,QAAQzC,EAAS0C,CAAU,EAC9BD,EAAU,aAAazC,CAAO,CACvC,CACF,CApHOb,EAAM,UAAAsD,IAD2BtD,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0HAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMwD,CAAS,CAAf,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOjD,EAAWC,EAAsC,CACtD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,kBAAkBA,EAA4BC,EAA0B,CAC7E,OAAQA,GAAO,IAAI+C,GAAY,OAAOhD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAOA,OAAO,8BAA8BA,EAA4BC,EAA0B,CACzF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI+C,GAAY,OAAOhD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACvF,CAQA,WAAWU,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,WAAwD,CACtD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,MAAmCH,EAAgB,CACjD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,QAAQH,EAAK,KAAK,OAASG,CAAM,EAAI,IAChE,CAKA,OAAO,cAAcC,EAA8B,CACjDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,cAAcA,EAA8BO,EAAsC,CACvFP,EAAQ,eAAe,EAAGO,EAAkB,CAAC,CAC/C,CAMA,OAAO,aAAaP,EAA8B4C,EAAuD,CACvG5C,EAAQ,aAAa,EAAG4C,EAAW,CAA+C,CACpF,CAMA,OAAO,SAAS5C,EAA8BM,EAAiC,CAC7EN,EAAQ,eAAe,EAAGM,EAAa,CAAC,CAC1C,CAMA,OAAO,YAAYN,EAAkD,CAEnE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,eACHA,EAA8BO,EAC9BqC,EAAuDtC,EAAqD,CAC9G,OAAAqC,EAAS,cAAc3C,CAAO,EAC9B2C,EAAS,cAAc3C,EAASO,CAAgB,EAChDoC,EAAS,aAAa3C,EAAS4C,CAAS,EACxCD,EAAS,SAAS3C,EAASM,CAAW,EAC/BqC,EAAS,YAAY3C,CAAO,CACrC,CACF,CAhHOb,EAAM,SAAAwD,IAD2BxD,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsHAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM0D,CAAc,CAApB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOnD,EAAWC,EAA2C,CAC3D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,uBAAuBA,EAA4BC,EAAoC,CAC5F,OAAQA,GAAO,IAAIiD,GAAiB,OAAOlD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC5F,CAOA,OAAO,mCAAmCA,EAA4BC,EAAoC,CACxG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIiD,GAAiB,OAAOlD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC5F,CAQA,OAAOU,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,SAA4B,CAC1B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAKA,OAAO,mBAAmBC,EAA8B,CACtDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,UAAUA,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,WAAW/B,EAA8B8C,EAA2B,CACzE9C,EAAQ,cAAc,EAAG8C,EAAS9C,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC5D,CAMA,OAAO,iBAAiBA,EAAkD,CAExE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,oBACHA,EAA8B+B,EAAkCe,EAA+C,CACjH,OAAAD,EAAc,mBAAmB7C,CAAO,EACxC6C,EAAc,UAAU7C,EAAS+B,CAAY,EAC7Cc,EAAc,WAAW7C,EAAS8C,CAAO,EAClCD,EAAc,iBAAiB7C,CAAO,CAC/C,CACF,CA5FOb,EAAM,cAAA0D,IAD2B1D,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAkGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM4D,CAAO,CAAb,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOrD,EAAWC,EAAoC,CACpD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,gBAAgBA,EAA4BC,EAAsB,CACvE,OAAQA,GAAO,IAAImD,GAAU,OAAOpD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACrF,CAOA,OAAO,4BAA4BA,EAA4BC,EAAsB,CACnF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAImD,GAAU,OAAOpD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACrF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,KAAKP,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,UAAwD,CACtD,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAMA,QAAQD,EAA4B,CAClC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,CAAK,EAAI,CACxF,CAKA,eAAwB,CACtB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,cAAgC,CAC9B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,WACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CASA,WAAWD,EAAeO,EAAgD,CACxE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,kBAA2B,CACzB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,YAAYC,EAA8B,CAC/CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8BgD,EAAgC,CAC3EhD,EAAQ,eAAe,EAAGgD,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiBhD,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,YAAYH,EAA8BiD,EAAuD,CACtGjD,EAAQ,cAAc,EAAGiD,EAAU,CAAqD,CAC1F,CAMA,OAAO,WAAWjD,EAA8BkD,EAAmC,CACjFlD,EAAQ,eAAe,EAAGkD,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBlD,EAA8BE,EAA+C,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,QAAQE,EAAKR,CAAC,CAAC,EAEzB,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,cAAcH,EAA8BmD,EAAsC,CACvFnD,EAAQ,eAAe,EAAGmD,EAAkB,CAAC,CAC/C,CAOA,OAAO,uBAAuBnD,EAA8BE,EAAgD,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,sBAAsBA,EAA8BG,EAAkB,CAC3EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAAkD,CAEjE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,aACHA,EAA8B6B,EAAgCC,EAC9DkB,EAAgCC,EAChCC,EAAmCC,EAA0D,CAC/F,OAAAJ,EAAO,YAAY/C,CAAO,EAC1B+C,EAAO,QAAQ/C,EAAS6B,CAAU,EAClCkB,EAAO,aAAa/C,EAAS8B,CAAe,EAC5CiB,EAAO,QAAQ/C,EAASgD,CAAU,EAClCD,EAAO,YAAY/C,EAASiD,CAAQ,EACpCF,EAAO,WAAW/C,EAASkD,CAAa,EACxCH,EAAO,cAAc/C,EAASmD,CAAgB,EACvCJ,EAAO,UAAU/C,CAAO,CACjC,CACF,CAhROb,EAAM,OAAA4D,IAD2B5D,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsRAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMiE,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO1D,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAIwD,GAAgB,OAAOzD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIwD,GAAgB,OAAOzD,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,OAAOC,EAAqF,CAC1F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,QAAQH,EAAqF,CAC3F,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,KAAKD,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,UAAUA,EAA8BqD,EAAkC,CAC/ErD,EAAQ,eAAe,EAAGqD,EAAc,CAAC,CAC3C,CAMA,OAAO,WAAWrD,EAA8BsD,EAAmC,CACjFtD,EAAQ,eAAe,EAAGsD,EAAe,CAAC,CAC5C,CAMA,OAAO,QAAQtD,EAA8BgD,EAAgC,CAC3EhD,EAAQ,eAAe,EAAGgD,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiBhD,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBACHA,EAA8BqD,EAAkCC,EAChEN,EAAoD,CACtD,OAAAI,EAAa,kBAAkBpD,CAAO,EACtCoD,EAAa,UAAUpD,EAASqD,CAAY,EAC5CD,EAAa,WAAWpD,EAASsD,CAAa,EAC9CF,EAAa,QAAQpD,EAASgD,CAAU,EACjCI,EAAa,gBAAgBpD,CAAO,CAC7C,CACF,CAhJOb,EAAM,aAAAiE,IAD2BjE,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsJAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMoE,CAAU,CAAhB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO7D,EAAWC,EAAuC,CACvD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,mBAAmBA,EAA4BC,EAA4B,CAChF,OAAQA,GAAO,IAAI2D,GAAa,OAAO5D,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAOA,OAAO,+BAA+BA,EAA4BC,EAA4B,CAC5F,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI2D,GAAa,OAAO5D,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACxF,CAQA,KAAKU,EAAgD,CACnD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,UAAUA,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,MAAmD,CACjD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAkB,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAChD,CAClB,CAKA,GAAY,CACV,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,YAAY,KAAK,OAASA,CAAM,EAAI,CAC/D,CAKA,GAAsB,CACpB,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,EAAEM,EAAgD,CAChD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,EAAET,EAAqF,CACrF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,EAAEH,EAAmF,CACnF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,OAAOD,EAA4B,CACjC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,YAAY,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC9F,CAKA,cAAuB,CACrB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,aAAiC,CAC/B,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EACH,IAAI,aACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CAMA,KAAKD,EAAsC,CACzC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACtE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,YAAqB,CACnB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,QAAQD,EAAeF,EAAqF,CAC1G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,eAAwB,CACtB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,OAAOD,EAAeF,EAAmF,CACvG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,cAAuB,CACrB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,eAAeC,EAA8B,CAClDA,EAAQ,YAAY,EAAE,CACxB,CAMA,OAAO,QAAQA,EAA8B6B,EAAgC,CAC3E7B,EAAQ,eAAe,EAAG6B,EAAY,CAAC,CACzC,CAMA,OAAO,aAAa7B,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,QAAQ9B,EAA8BkC,EAAkD,CAC7FlC,EAAQ,cAAc,EAAGkC,EAAM,CAAoD,CACrF,CAMA,OAAO,KAAKlC,EAA8BwD,EAAW,CACnDxD,EAAQ,gBAAgB,EAAGwD,EAAG,CAAG,CACnC,CAMA,OAAO,KAAKxD,EAA8BN,EAAqB,CAC7DM,EAAQ,cAAc,EAAGN,EAAGM,EAAQ,WAAW,EAAG,CAAC,CAAC,CACtD,CAMA,OAAO,KAAKA,EAA8ByD,EAA6B,CACrEzD,EAAQ,eAAe,EAAGyD,EAAS,CAAC,CACtC,CAMA,OAAO,KAAKzD,EAA8B0D,EAA6B,CACrE1D,EAAQ,eAAe,EAAG0D,EAAS,CAAC,CACtC,CAMA,OAAO,KAAK1D,EAA8B2D,EAA6B,CACrE3D,EAAQ,eAAe,EAAG2D,EAAS,CAAC,CACtC,CAMA,OAAO,UAAU3D,EAA8B4D,EAAkC,CAC/E5D,EAAQ,eAAe,EAAG4D,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmB5D,EAA8BE,EAA+C,CACrGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,WAAWE,EAAKR,CAAC,CAAC,EAE5B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,QAAQH,EAA8B6D,EAAgC,CAC3E7D,EAAQ,eAAe,EAAG6D,EAAY,CAAC,CACzC,CAOA,OAAO,iBAAiB7D,EAA8BE,EAA8C,CAClGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,gBAAgBA,EAA8BG,EAAkB,CACrEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8B8D,EAAmC,CACjF9D,EAAQ,eAAe,GAAI8D,EAAe,CAAC,CAC7C,CAOA,OAAO,oBAAoB9D,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8B+D,EAAmC,CACjF/D,EAAQ,eAAe,GAAI+D,EAAe,CAAC,CAC7C,CAOA,OAAO,oBAAoB/D,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAA8BgE,EAAkC,CAC/EhE,EAAQ,eAAe,GAAIgE,EAAc,CAAC,CAC5C,CAOA,OAAO,mBAAmBhE,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,aAAaH,EAAkD,CAEpE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,gBACHA,EAA8B6B,EAAgCC,EAC9DI,EAAkDsB,EAAW9D,EAAqB+D,EAClFC,EAA6BC,EAA6BC,EAC1DC,EAAgCC,EAAmCC,EACnEC,EAAsD,CACxD,OAAAT,EAAU,eAAevD,CAAO,EAChCuD,EAAU,QAAQvD,EAAS6B,CAAU,EACrC0B,EAAU,aAAavD,EAAS8B,CAAe,EAC/CyB,EAAU,QAAQvD,EAASkC,CAAI,EAC/BqB,EAAU,KAAKvD,EAASwD,CAAC,EACzBD,EAAU,KAAKvD,EAASN,CAAC,EACzB6D,EAAU,KAAKvD,EAASyD,CAAO,EAC/BF,EAAU,KAAKvD,EAAS0D,CAAO,EAC/BH,EAAU,KAAKvD,EAAS2D,CAAO,EAC/BJ,EAAU,UAAUvD,EAAS4D,CAAY,EACzCL,EAAU,QAAQvD,EAAS6D,CAAU,EACrCN,EAAU,WAAWvD,EAAS8D,CAAa,EAC3CP,EAAU,WAAWvD,EAAS+D,CAAa,EAC3CR,EAAU,UAAUvD,EAASgE,CAAY,EAClCT,EAAU,aAAavD,CAAO,CACvC,CACF,CApdOb,EAAM,UAAAoE,IAD2BpE,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0dAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM8E,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOvE,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAIqE,GAAS,OAAOtE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIqE,GAAS,OAAOtE,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,aAAaG,EAAeF,EAAqF,CAC/G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,QACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,oBAA6B,CAC3B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,SAASD,EAAeF,EAA2F,CACjH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,WACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,gBAAyB,CACvB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,MAAMD,EAAeF,EAAiF,CACpG,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,MACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,aAAsB,CACpB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,cAAuB,CACrB,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,OAASA,CAAM,EAAI,CAC9D,CAOA,UAAUD,EAAeF,EAAyF,CAChH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,UACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,iBAA0B,CACxB,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,OAAOD,EAAeO,EAAgD,CACpE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,cAAuB,CACrB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CASA,QAAQD,EAAeO,EAAgD,CACrE,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,EAAGO,CAAgB,EAAI,IAC7G,CAKA,eAAwB,CACtB,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAOA,mBAAmBD,EAAeF,EACiB,CACjD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,0BAAmC,CACjC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,WAAWC,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,gBAAgBA,EAA8BkE,EAAwC,CAC3FlE,EAAQ,eAAe,EAAGkE,EAAoB,CAAC,CACjD,CAOA,OAAO,yBAAyBlE,EAA8BE,EAAgD,CAC5GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,wBAAwBA,EAA8BG,EAAkB,CAC7EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,YAAYH,EAA8BmE,EAAoC,CACnFnE,EAAQ,eAAe,EAAGmE,EAAgB,CAAC,CAC7C,CAOA,OAAO,qBAAqBnE,EAA8BE,EAAgD,CACxGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,oBAAoBA,EAA8BG,EAAkB,CACzEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAA8BoE,EAAiC,CAC7EpE,EAAQ,eAAe,EAAGoE,EAAa,CAAC,CAC1C,CAOA,OAAO,kBAAkBpE,EAA8BE,EAAgD,CACrGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,iBAAiBA,EAA8BG,EAAkB,CACtEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAA8BqE,EAAsB,CACzErE,EAAQ,cAAc,EAAGqE,EAAc,CAAC,CAC1C,CAMA,OAAO,aAAarE,EAA8BsE,EAAqC,CACrFtE,EAAQ,eAAe,EAAGsE,EAAiB,CAAC,CAC9C,CAOA,OAAO,sBAAsBtE,EAA8BE,EAAgD,CACzGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,qBAAqBA,EAA8BG,EAAkB,CAC1EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,UAAUH,EAA8BoC,EAAkC,CAC/EpC,EAAQ,eAAe,EAAGoC,EAAc,CAAC,CAC3C,CAOA,OAAO,mBAAmBpC,EAA8BE,EAAgD,CACtGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,kBAAkBA,EAA8BG,EAAkB,CACvEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,WAAWH,EAA8BqC,EAAmC,CACjFrC,EAAQ,eAAe,EAAGqC,EAAe,CAAC,CAC5C,CAOA,OAAO,oBAAoBrC,EAA8BE,EAAgD,CACvGF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,mBAAmBA,EAA8BG,EAAkB,CACxEH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,sBAAsBH,EAA8BuE,EAA8C,CACvGvE,EAAQ,eAAe,EAAGuE,EAA0B,CAAC,CACvD,CAOA,OAAO,+BAA+BvE,EAA8BE,EAC7C,CACrBF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,8BAA8BA,EAA8BG,EAAkB,CACnFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,SAASH,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YACHA,EAA8BkE,EAAwCC,EACtEC,EAAiCC,EAAsBC,EACvDlC,EAAkCC,EAClCkC,EAAkE,CACpE,OAAAN,EAAM,WAAWjE,CAAO,EACxBiE,EAAM,gBAAgBjE,EAASkE,CAAkB,EACjDD,EAAM,YAAYjE,EAASmE,CAAc,EACzCF,EAAM,SAASjE,EAASoE,CAAW,EACnCH,EAAM,gBAAgBjE,EAASqE,CAAY,EAC3CJ,EAAM,aAAajE,EAASsE,CAAe,EAC3CL,EAAM,UAAUjE,EAASoC,CAAY,EACrC6B,EAAM,WAAWjE,EAASqC,CAAa,EACvC4B,EAAM,sBAAsBjE,EAASuE,CAAwB,EACtDN,EAAM,SAASjE,CAAO,CAC/B,CACF,CA3aOb,EAAM,MAAA8E,IAD2B9E,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAibAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMqF,CAAM,CAAZ,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO9E,EAAWC,EAAmC,CACnD,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,eAAeA,EAA4BC,EAAoB,CACpE,OAAQA,GAAO,IAAI4E,GAAS,OAAO7E,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAOA,OAAO,2BAA2BA,EAA4BC,EAAoB,CAChF,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI4E,GAAS,OAAO7E,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACpF,CAKA,WAA8B,CAC5B,IAAII,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAOA,YAAYD,EAAeF,EACyB,CAClD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,eACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAQA,aAAaM,EAAgD,CAC3D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,gBAAgBA,EAAgD,CAC9D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAQA,OAAOA,EAAgD,CACrD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,cAAiC,CAC/B,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,UAAU,KAAK,OAASA,CAAM,EAAI,KAAK,GAAI,WAAW,EAAG,CAAC,CACrF,CAQA,UAAUM,EAAgD,CACxD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,MAAMT,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAQA,eAAeM,EAAgD,CAC7D,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,EAAE,EAC9C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAKA,OAAO,WAAWL,EAA8B,CAC9CA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,aAAaA,EAA8ByE,EAA6B,CAC7EzE,EAAQ,cAAc,EAAGyE,EAAWzE,EAAQ,WAAW,EAAG,CAAC,CAAC,CAC9D,CAMA,OAAO,eAAeA,EAA8B0E,EAAuC,CACzF1E,EAAQ,eAAe,EAAG0E,EAAmB,CAAC,CAChD,CAOA,OAAO,wBAAwB1E,EAA8BE,EAAgD,CAC3GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,uBAAuBA,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAA8B2E,EAAwC,CAC3F3E,EAAQ,eAAe,EAAG2E,EAAoB,CAAC,CACjD,CAMA,OAAO,mBAAmB3E,EAA8B4E,EAA2C,CACjG5E,EAAQ,eAAe,EAAG4E,EAAuB,CAAC,CACpD,CAMA,OAAO,UAAU5E,EAA8B+B,EAAkC,CAC/E/B,EAAQ,eAAe,EAAG+B,EAAc,CAAC,CAC3C,CAMA,OAAO,gBAAgB/B,EAA8B6E,EAAgC,CACnF7E,EAAQ,cAAc,EAAG6E,EAAc7E,EAAQ,WAAW,EAAG,CAAC,CAAC,CACjE,CAMA,OAAO,aAAaA,EAA8B8B,EAAqC,CACrF9B,EAAQ,eAAe,EAAG8B,EAAiB,CAAC,CAC9C,CAMA,OAAO,SAAS9B,EAA8B8E,EAAiC,CAC7E9E,EAAQ,eAAe,EAAG8E,EAAa,CAAC,CAC1C,CAMA,OAAO,kBAAkB9E,EAA8B+E,EAA0C,CAC/F/E,EAAQ,eAAe,EAAG+E,EAAsB,CAAC,CACnD,CAMA,OAAO,SAAS/E,EAAkD,CAEhE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,YACHA,EAA8ByE,EAA6BC,EAC3DC,EAAwCC,EACxC7C,EAAkC8C,EAAgC/C,EAClEgD,EAAiCC,EAA8D,CACjG,OAAAP,EAAM,WAAWxE,CAAO,EACxBwE,EAAM,aAAaxE,EAASyE,CAAS,EACrCD,EAAM,eAAexE,EAAS0E,CAAiB,EAC/CF,EAAM,gBAAgBxE,EAAS2E,CAAkB,EACjDH,EAAM,mBAAmBxE,EAAS4E,CAAqB,EACvDJ,EAAM,UAAUxE,EAAS+B,CAAY,EACrCyC,EAAM,gBAAgBxE,EAAS6E,CAAY,EAC3CL,EAAM,aAAaxE,EAAS8B,CAAe,EAC3C0C,EAAM,SAASxE,EAAS8E,CAAW,EACnCN,EAAM,kBAAkBxE,EAAS+E,CAAoB,EAC9CP,EAAM,SAASxE,CAAO,CAC/B,CACF,CAvQOb,EAAM,MAAAqF,IAD2BrF,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA6QAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAM6F,CAAkB,CAAxB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOtF,EAAWC,EAA+C,CAC/D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,2BAA2BA,EAA4BC,EAA4C,CACxG,OAAQA,GAAO,IAAIoF,GAAqB,OAAOrF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAChG,CAOA,OAAO,uCAAuCA,EAA4BC,EACpD,CACpB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIoF,GAAqB,OAAOrF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAChG,CAMA,YAAYG,EAA4B,CACtC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EAAI,CAC7F,CAKA,mBAA4B,CAC1B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,kBAAqC,CACnC,IAAIA,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EACH,IAAI,YACA,KAAK,GAAI,MAAM,EAAE,OAAQ,KAAK,GAAI,MAAM,EAAE,WAAa,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAC7F,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,CAAC,EAC/C,IACN,CAMA,gBAAgBD,EAAsC,CACpD,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASA,CAAM,EAAID,EAAQ,CAAC,EACvE,KAAK,GAAI,WAAW,EAAG,CAAC,CAC1C,CAKA,uBAAgC,CAC9B,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,uBAAuBC,EAA8B,CAC1DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,eAAeA,EAA8BiF,EAAuC,CACzFjF,EAAQ,eAAe,EAAGiF,EAAmB,CAAC,CAChD,CAOA,OAAO,wBAAwBjF,EAA8BE,EAA+C,CAC1GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,uBAAuBA,EAA8BG,EAAkB,CAC5EH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,mBAAmBH,EAA8BkF,EAA2C,CACjGlF,EAAQ,eAAe,EAAGkF,EAAuB,CAAC,CACpD,CAOA,OAAO,4BAA4BlF,EAA8BE,EAA8C,CAC7GF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,SAASE,EAAKR,CAAC,CAAC,EAE1B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,2BAA2BA,EAA8BG,EAAkB,CAChFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,qBAAqBH,EAAkD,CAE5E,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,wBACHA,EAA8BiF,EAC9BC,EAA+D,CACjE,OAAAF,EAAkB,uBAAuBhF,CAAO,EAChDgF,EAAkB,eAAehF,EAASiF,CAAiB,EAC3DD,EAAkB,mBAAmBhF,EAASkF,CAAqB,EAC5DF,EAAkB,qBAAqBhF,CAAO,CACvD,CACF,CApKOb,EAAM,kBAAA6F,IAD2B7F,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KA0KAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMgG,CAAqB,CAA3B,cACL,QAAkC,KAElC,YAAS,EAMT,OAAOzF,EAAWC,EAAkD,CAClE,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,8BAA8BA,EAA4BC,EAAkD,CACjH,OAAQA,GAAO,IAAIuF,GAAwB,OAAOxF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnG,CAOA,OAAO,0CAA0CA,EAA4BC,EACpD,CACvB,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAIuF,GAAwB,OAAOxF,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CACnG,CAQA,QAAQU,EAAgD,CACtD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,aAAaT,EAAiG,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,0BAA0BC,EAA8B,CAC7DA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BoF,EAAmC,CACjFpF,EAAQ,eAAe,EAAGoF,EAAe,CAAC,CAC5C,CAMA,OAAO,gBAAgBpF,EAA8BqF,EAAwC,CAC3FrF,EAAQ,eAAe,EAAGqF,EAAoB,CAAC,CACjD,CAMA,OAAO,wBAAwBrF,EAAkD,CAC/E,IAAID,EAASC,EAAQ,UAAU,EAC/B,OAAAA,EAAQ,cAAcD,EAAQ,CAAC,EACxBA,CACT,CAEA,OAAO,2BACHC,EAA8BoF,EAC9BC,EAA4D,CAC9D,OAAAF,EAAqB,0BAA0BnF,CAAO,EACtDmF,EAAqB,WAAWnF,EAASoF,CAAa,EACtDD,EAAqB,gBAAgBnF,EAASqF,CAAkB,EACzDF,EAAqB,wBAAwBnF,CAAO,CAC7D,CACF,CAlGOb,EAAM,qBAAAgG,IAD2BhG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAwGAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMmG,CAAa,CAAnB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO5F,EAAWC,EAA0C,CAC1D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,sBAAsBA,EAA4BC,EAAkC,CACzF,OAAQA,GAAO,IAAI0F,GAAgB,OAAO3F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAOA,OAAO,kCAAkCA,EAA4BC,EAAkC,CACrG,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI0F,GAAgB,OAAO3F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC3F,CAMA,QAAQC,EAA2G,CACjH,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,mBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAOA,sBAAsBD,EAAeF,EACsB,CACzD,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,sBACpC,OAAO,KAAK,GAAI,WAAW,KAAK,GAAI,SAAS,KAAK,OAASiB,CAAM,EAAID,EAAQ,CAAC,EAAG,KAAK,EAAG,EAC9F,IAClB,CAKA,6BAAsC,CACpC,IAAIC,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,aAAa,KAAK,OAASA,CAAM,EAAI,CAChE,CAKA,OAAO,kBAAkBC,EAA8B,CACrDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,WAAWA,EAA8BuF,EAAmC,CACjFvF,EAAQ,eAAe,EAAGuF,EAAe,CAAC,CAC5C,CAMA,OAAO,yBAAyBvF,EAA8BwF,EAAiD,CAC7GxF,EAAQ,eAAe,EAAGwF,EAA6B,CAAC,CAC1D,CAOA,OAAO,kCAAkCxF,EAA8BE,EAChD,CACrBF,EAAQ,YAAY,EAAGE,EAAK,OAAQ,CAAC,EACrC,QAASR,EAAIQ,EAAK,OAAS,EAAGR,GAAK,EAAGA,IACpCM,EAAQ,UAAUE,EAAKR,CAAC,CAAC,EAE3B,OAAOM,EAAQ,UAAU,CAC3B,CAMA,OAAO,iCAAiCA,EAA8BG,EAAkB,CACtFH,EAAQ,YAAY,EAAGG,EAAU,CAAC,CACpC,CAMA,OAAO,gBAAgBH,EAAkD,CAEvE,OADaA,EAAQ,UAAU,CAEjC,CAEA,OAAO,mBACHA,EAA8BuF,EAC9BC,EAAqE,CACvE,OAAAF,EAAa,kBAAkBtF,CAAO,EACtCsF,EAAa,WAAWtF,EAASuF,CAAa,EAC9CD,EAAa,yBAAyBtF,EAASwF,CAA2B,EACnEF,EAAa,gBAAgBtF,CAAO,CAC7C,CACF,CAhIOb,EAAM,aAAAmG,IAD2BnG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,KAsIAA,GAAV,CAAqB,IAACI,MAAD,CAAa,IAACC,MAAD,CAChC,MAAMsG,CAAiB,CAAvB,cACL,QAAkC,KAElC,YAAS,EAMT,OAAO/F,EAAWC,EAA8C,CAC9D,YAAK,OAASD,EACd,KAAK,GAAKC,EACH,IACT,CAOA,OAAO,0BAA0BA,EAA4BC,EAA0C,CACrG,OAAQA,GAAO,IAAI6F,GAAoB,OAAO9F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC/F,CAOA,OAAO,sCAAsCA,EAA4BC,EAA0C,CACjH,OAAAD,EAAG,YAAYA,EAAG,SAAS,EAAIE,EAAY,kBAAkB,GACrDD,GAAO,IAAI6F,GAAoB,OAAO9F,EAAG,UAAUA,EAAG,SAAS,CAAC,EAAIA,EAAG,SAAS,EAAGA,CAAE,CAC/F,CAMA,OAAO,oBAAoBA,EAAqC,CAC9D,OAAOA,EAAG,iBAAiB,MAAM,CACnC,CAQA,WAAWU,EAAgD,CACzD,IAAIN,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,EAAS,KAAK,GAAI,SAAS,KAAK,OAASA,EAAQM,CAAgB,EAAI,IAC9E,CAMA,MAAMT,EAAmF,CACvF,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,OACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAMA,aAAaH,EAAiG,CAC5G,IAAIG,EAAS,KAAK,GAAI,SAAS,KAAK,OAAQ,CAAC,EAC7C,OAAOA,GAAUH,GAAO,IAAId,EAAY,aAAa,IAAI,cACpC,OAAO,KAAK,GAAI,WAAW,KAAK,OAASiB,CAAM,EAAG,KAAK,EAAG,EAC/D,IAClB,CAKA,OAAO,sBAAsBC,EAA8B,CACzDA,EAAQ,YAAY,CAAC,CACvB,CAMA,OAAO,cAAcA,EAA8B0F,EAAsC,CACvF1F,EAAQ,eAAe,EAAG0F,EAAkB,CAAC,CAC/C,CAMA,OAAO,SAAS1F,EAA8B2F,EAAiC,CAC7E3F,EAAQ,eAAe,EAAG2F,EAAa,CAAC,CAC1C,CAMA,OAAO,gBAAgB3F,EAA8BqF,EAAwC,CAC3FrF,EAAQ,eAAe,EAAGqF,EAAoB,CAAC,CACjD,CAMA,OAAO,oBAAoBrF,EAAkD,CAE3E,OADaA,EAAQ,UAAU,CAEjC,CAMA,OAAO,6BAA6BA,EAA8BD,EAA4B,CAC5FC,EAAQ,OAAOD,EAAQ,MAAM,CAC/B,CAMA,OAAO,yCAAyCC,EAA8BD,EAA4B,CACxGC,EAAQ,OAAOD,EAAQ,OAAQ,EAAI,CACrC,CAEA,OAAO,uBACHC,EAA8B0F,EAAsCC,EACpEN,EAA4D,CAC9D,OAAAI,EAAiB,sBAAsBzF,CAAO,EAC9CyF,EAAiB,cAAczF,EAAS0F,CAAgB,EACxDD,EAAiB,SAASzF,EAAS2F,CAAW,EAC9CF,EAAiB,gBAAgBzF,EAASqF,CAAkB,EACrDI,EAAiB,oBAAoBzF,CAAO,CACrD,CACF,CA5IOb,EAAM,iBAAAsG,IAD2BtG,EAAAD,EAAA,YAAbA,EAAAJ,EAAA,qBAAZA,KAAA,MC/oHjB,IAAA8G,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAmBjB,SAASA,GAAUC,EAAIC,EAAmB,CAKtC,QAJIC,EAAU,IAAI,MAAM,UAAU,OAAS,CAAC,EACxCC,EAAU,EACVC,EAAU,EACVC,EAAU,GACPD,EAAQ,UAAU,QACrBF,EAAOC,GAAQ,EAAI,UAAUC,GAAO,EACxC,OAAO,IAAI,QAAQ,SAAkBE,EAASC,EAAQ,CAClDL,EAAOC,CAAM,EAAI,SAAkBK,EAAmB,CAClD,GAAIH,EAEA,GADAA,EAAU,GACNG,EACAD,EAAOC,CAAG,MACT,CAGD,QAFIN,EAAS,IAAI,MAAM,UAAU,OAAS,CAAC,EACvCC,EAAS,EACNA,EAASD,EAAO,QACnBA,EAAOC,GAAQ,EAAI,UAAUA,CAAM,EACvCG,EAAQ,MAAM,KAAMJ,CAAM,CAC9B,CAER,EACA,GAAI,CACAF,EAAG,MAAMC,GAAO,KAAMC,CAAM,CAChC,OAASM,EAAK,CACNH,IACAA,EAAU,GACVE,EAAOC,CAAG,EAElB,CACJ,CAAC,CACL,ICnDA,IAAAC,GAAAC,GAAAC,IAAA,cAOA,IAAIC,GAASD,GAObC,GAAO,OAAS,SAAgBC,EAAQ,CACpC,IAAIC,EAAID,EAAO,OACf,GAAI,CAACC,EACD,MAAO,GAEX,QADIC,EAAI,EACD,EAAED,EAAI,EAAI,GAAKD,EAAO,OAAOC,CAAC,IAAM,KACvC,EAAEC,EACN,OAAO,KAAK,KAAKF,EAAO,OAAS,CAAC,EAAI,EAAIE,CAC9C,EAGA,IAAIC,GAAM,IAAI,MAAM,EAAE,EAGlBC,GAAM,IAAI,MAAM,GAAG,EAGvB,IAASC,GAAI,EAAGA,GAAI,IAChBD,GAAID,GAAIE,EAAC,EAAIA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,GAAKA,GAAI,EAAIA,GAAI,GAAK,EAAE,EAAIA,KAD5E,IAAAA,GAUTN,GAAO,OAAS,SAAgBO,EAAQC,EAAOC,EAAK,CAMhD,QALIC,EAAQ,KACRC,EAAQ,CAAC,EACTL,EAAI,EACJM,EAAI,EACJC,EACGL,EAAQC,GAAK,CAChB,IAAIK,EAAIP,EAAOC,GAAO,EACtB,OAAQI,EAAG,CACP,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIU,GAAK,CAAC,EACvBD,GAAKC,EAAI,IAAM,EACfF,EAAI,EACJ,MACJ,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIS,EAAIC,GAAK,CAAC,EAC3BD,GAAKC,EAAI,KAAO,EAChBF,EAAI,EACJ,MACJ,IAAK,GACDD,EAAML,GAAG,EAAIF,GAAIS,EAAIC,GAAK,CAAC,EAC3BH,EAAML,GAAG,EAAIF,GAAIU,EAAI,EAAE,EACvBF,EAAI,EACJ,KACR,CACIN,EAAI,QACHI,IAAUA,EAAQ,CAAC,IAAI,KAAK,OAAO,aAAa,MAAM,OAAQC,CAAK,CAAC,EACrEL,EAAI,EAEZ,CAOA,OANIM,IACAD,EAAML,GAAG,EAAIF,GAAIS,CAAC,EAClBF,EAAML,GAAG,EAAI,GACTM,IAAM,IACND,EAAML,GAAG,EAAI,KAEjBI,GACIJ,GACAI,EAAM,KAAK,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAAC,EAC5DI,EAAM,KAAK,EAAE,GAEjB,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAC9D,EAEA,IAAIS,GAAkB,mBAUtBf,GAAO,OAAS,SAAgBC,EAAQM,EAAQS,EAAQ,CAIpD,QAHIR,EAAQQ,EACRJ,EAAI,EACJC,EACKP,EAAI,EAAGA,EAAIL,EAAO,QAAS,CAChC,IAAIgB,EAAIhB,EAAO,WAAWK,GAAG,EAC7B,GAAIW,IAAM,IAAML,EAAI,EAChB,MACJ,IAAKK,EAAIZ,GAAIY,CAAC,KAAO,OACjB,MAAM,MAAMF,EAAe,EAC/B,OAAQH,EAAG,CACP,IAAK,GACDC,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,EAAIH,GAAK,GAAKI,EAAI,KAAO,EACxCJ,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,GAAKH,EAAI,KAAO,GAAKI,EAAI,KAAO,EAC/CJ,EAAII,EACJL,EAAI,EACJ,MACJ,IAAK,GACDL,EAAOS,GAAQ,GAAKH,EAAI,IAAM,EAAII,EAClCL,EAAI,EACJ,KACR,CACJ,CACA,GAAIA,IAAM,EACN,MAAM,MAAMG,EAAe,EAC/B,OAAOC,EAASR,CACpB,EAOAR,GAAO,KAAO,SAAcC,EAAQ,CAChC,MAAO,mEAAmE,KAAKA,CAAM,CACzF,IC1IA,IAAAiB,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAQjB,SAASA,IAAe,CAOpB,KAAK,WAAa,CAAC,CACvB,CASAA,GAAa,UAAU,GAAK,SAAYC,EAAKC,EAAIC,EAAK,CAClD,OAAC,KAAK,WAAWF,CAAG,IAAM,KAAK,WAAWA,CAAG,EAAI,CAAC,IAAI,KAAK,CACvD,GAAMC,EACN,IAAMC,GAAO,IACjB,CAAC,EACM,IACX,EAQAH,GAAa,UAAU,IAAM,SAAaC,EAAKC,EAAI,CAC/C,GAAID,IAAQ,OACR,KAAK,WAAa,CAAC,UAEfC,IAAO,OACP,KAAK,WAAWD,CAAG,EAAI,CAAC,MAGxB,SADIG,EAAY,KAAK,WAAWH,CAAG,EAC1BI,EAAI,EAAGA,EAAID,EAAU,QACtBA,EAAUC,CAAC,EAAE,KAAOH,EACpBE,EAAU,OAAOC,EAAG,CAAC,EAErB,EAAEA,EAGlB,OAAO,IACX,EAQAL,GAAa,UAAU,KAAO,SAAcC,EAAK,CAC7C,IAAIG,EAAY,KAAK,WAAWH,CAAG,EACnC,GAAIG,EAAW,CAGX,QAFIE,EAAO,CAAC,EACRD,EAAI,EACDA,EAAI,UAAU,QACjBC,EAAK,KAAK,UAAUD,GAAG,CAAC,EAC5B,IAAKA,EAAI,EAAGA,EAAID,EAAU,QACtBA,EAAUC,CAAC,EAAE,GAAG,MAAMD,EAAUC,GAAG,EAAE,IAAKC,CAAI,CACtD,CACA,OAAO,IACX,IC3EA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAEAA,GAAO,QAAUC,GAAQA,EAAO,EAqFhC,SAASA,GAAQF,EAAS,CAGtB,OAAI,OAAO,aAAiB,IAAc,UAAW,CAEjD,IAAIG,EAAM,IAAI,aAAa,CAAE,EAAG,CAAC,EAC7BC,EAAM,IAAI,WAAWD,EAAI,MAAM,EAC/BE,EAAMD,EAAI,CAAC,IAAM,IAErB,SAASE,EAAmBC,EAAKC,EAAKC,EAAK,CACvCN,EAAI,CAAC,EAAII,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAEA,SAASM,EAAmBH,EAAKC,EAAKC,EAAK,CACvCN,EAAI,CAAC,EAAII,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAGAJ,EAAQ,aAAeK,EAAKC,EAAqBI,EAEjDV,EAAQ,aAAeK,EAAKK,EAAqBJ,EAEjD,SAASK,EAAkBH,EAAKC,EAAK,CACjC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbN,EAAI,CAAC,CAChB,CAEA,SAASS,EAAkBJ,EAAKC,EAAK,CACjC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbN,EAAI,CAAC,CAChB,CAGAH,EAAQ,YAAcK,EAAKM,EAAoBC,EAE/CZ,EAAQ,YAAcK,EAAKO,EAAoBD,CAGnD,EAAG,EAAS,UAAW,CAEnB,SAASE,EAAmBC,EAAWP,EAAKC,EAAKC,EAAK,CAClD,IAAIM,EAAOR,EAAM,EAAI,EAAI,EAGzB,GAFIQ,IACAR,EAAM,CAACA,GACPA,IAAQ,EACRO,EAAU,EAAIP,EAAM,EAAmB,EAAqB,WAAYC,EAAKC,CAAG,UAC3E,MAAMF,CAAG,EACdO,EAAU,WAAYN,EAAKC,CAAG,UACzBF,EAAM,qBACXO,GAAWC,GAAQ,GAAK,cAAgB,EAAGP,EAAKC,CAAG,UAC9CF,EAAM,sBACXO,GAAWC,GAAQ,GAAK,KAAK,MAAMR,EAAM,oBAAqB,KAAO,EAAGC,EAAKC,CAAG,MAC/E,CACD,IAAIO,EAAW,KAAK,MAAM,KAAK,IAAIT,CAAG,EAAI,KAAK,GAAG,EAC9CU,EAAW,KAAK,MAAMV,EAAM,KAAK,IAAI,EAAG,CAACS,CAAQ,EAAI,OAAO,EAAI,QACpEF,GAAWC,GAAQ,GAAKC,EAAW,KAAO,GAAKC,KAAc,EAAGT,EAAKC,CAAG,CAC5E,CACJ,CAEAT,EAAQ,aAAea,EAAmB,KAAK,KAAMK,EAAW,EAChElB,EAAQ,aAAea,EAAmB,KAAK,KAAMM,EAAW,EAEhE,SAASC,EAAkBC,EAAUb,EAAKC,EAAK,CAC3C,IAAIa,EAAOD,EAASb,EAAKC,CAAG,EACxBM,GAAQO,GAAQ,IAAM,EAAI,EAC1BN,EAAWM,IAAS,GAAK,IACzBL,EAAWK,EAAO,QACtB,OAAON,IAAa,IACdC,EACA,IACAF,EAAO,MACPC,IAAa,EACbD,EAAO,qBAAwBE,EAC/BF,EAAO,KAAK,IAAI,EAAGC,EAAW,GAAG,GAAKC,EAAW,QAC3D,CAEAjB,EAAQ,YAAcoB,EAAkB,KAAK,KAAMG,EAAU,EAC7DvB,EAAQ,YAAcoB,EAAkB,KAAK,KAAMI,EAAU,CAEjE,EAAG,EAGC,OAAO,aAAiB,IAAc,UAAW,CAEjD,IAAIC,EAAM,IAAI,aAAa,CAAC,EAAE,CAAC,EAC3BrB,EAAM,IAAI,WAAWqB,EAAI,MAAM,EAC/BpB,EAAMD,EAAI,CAAC,IAAM,IAErB,SAASsB,EAAoBnB,EAAKC,EAAKC,EAAK,CACxCgB,EAAI,CAAC,EAAIlB,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAEA,SAASuB,EAAoBpB,EAAKC,EAAKC,EAAK,CACxCgB,EAAI,CAAC,EAAIlB,EACTC,EAAIC,CAAO,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,EACpBI,EAAIC,EAAM,CAAC,EAAIL,EAAI,CAAC,CACxB,CAGAJ,EAAQ,cAAgBK,EAAKqB,EAAsBC,EAEnD3B,EAAQ,cAAgBK,EAAKsB,EAAsBD,EAEnD,SAASE,EAAmBpB,EAAKC,EAAK,CAClC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbgB,EAAI,CAAC,CAChB,CAEA,SAASI,EAAmBrB,EAAKC,EAAK,CAClC,OAAAL,EAAI,CAAC,EAAII,EAAIC,CAAO,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACpBL,EAAI,CAAC,EAAII,EAAIC,EAAM,CAAC,EACbgB,EAAI,CAAC,CAChB,CAGAzB,EAAQ,aAAeK,EAAKuB,EAAqBC,EAEjD7B,EAAQ,aAAeK,EAAKwB,EAAqBD,CAGrD,EAAG,EAAS,UAAW,CAEnB,SAASE,EAAoBhB,EAAWiB,EAAMC,EAAMzB,EAAKC,EAAKC,EAAK,CAC/D,IAAIM,EAAOR,EAAM,EAAI,EAAI,EAGzB,GAFIQ,IACAR,EAAM,CAACA,GACPA,IAAQ,EACRO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,EAAU,EAAIP,EAAM,EAAmB,EAAqB,WAAYC,EAAKC,EAAMuB,CAAI,UAChF,MAAMzB,CAAG,EAChBO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,EAAU,WAAYN,EAAKC,EAAMuB,CAAI,UAC9BzB,EAAM,sBACbO,EAAU,EAAGN,EAAKC,EAAMsB,CAAI,EAC5BjB,GAAWC,GAAQ,GAAK,cAAgB,EAAGP,EAAKC,EAAMuB,CAAI,MACvD,CACH,IAAIf,EACJ,GAAIV,EAAM,uBACNU,EAAWV,EAAM,OACjBO,EAAUG,IAAa,EAAGT,EAAKC,EAAMsB,CAAI,EACzCjB,GAAWC,GAAQ,GAAKE,EAAW,cAAgB,EAAGT,EAAKC,EAAMuB,CAAI,MAClE,CACH,IAAIhB,EAAW,KAAK,MAAM,KAAK,IAAIT,CAAG,EAAI,KAAK,GAAG,EAC9CS,IAAa,OACbA,EAAW,MACfC,EAAWV,EAAM,KAAK,IAAI,EAAG,CAACS,CAAQ,EACtCF,EAAUG,EAAW,mBAAqB,EAAGT,EAAKC,EAAMsB,CAAI,EAC5DjB,GAAWC,GAAQ,GAAKC,EAAW,MAAQ,GAAKC,EAAW,QAAU,WAAa,EAAGT,EAAKC,EAAMuB,CAAI,CACxG,CACJ,CACJ,CAEAhC,EAAQ,cAAgB8B,EAAoB,KAAK,KAAMZ,GAAa,EAAG,CAAC,EACxElB,EAAQ,cAAgB8B,EAAoB,KAAK,KAAMX,GAAa,EAAG,CAAC,EAExE,SAASc,EAAmBZ,EAAUU,EAAMC,EAAMxB,EAAKC,EAAK,CACxD,IAAIyB,EAAKb,EAASb,EAAKC,EAAMsB,CAAI,EAC7BI,EAAKd,EAASb,EAAKC,EAAMuB,CAAI,EAC7BjB,GAAQoB,GAAM,IAAM,EAAI,EACxBnB,EAAWmB,IAAO,GAAK,KACvBlB,EAAW,YAAckB,EAAK,SAAWD,EAC7C,OAAOlB,IAAa,KACdC,EACA,IACAF,EAAO,MACPC,IAAa,EACbD,EAAO,OAASE,EAChBF,EAAO,KAAK,IAAI,EAAGC,EAAW,IAAI,GAAKC,EAAW,iBAC5D,CAEAjB,EAAQ,aAAeiC,EAAmB,KAAK,KAAMV,GAAY,EAAG,CAAC,EACrEvB,EAAQ,aAAeiC,EAAmB,KAAK,KAAMT,GAAY,EAAG,CAAC,CAEzE,EAAG,EAEIxB,CACX,CAIA,SAASkB,GAAYX,EAAKC,EAAKC,EAAK,CAChCD,EAAIC,CAAO,EAAKF,EAAa,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,GAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAC5B,CAEA,SAASY,GAAYZ,EAAKC,EAAKC,EAAK,CAChCD,EAAIC,CAAO,EAAKF,IAAQ,GACxBC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,GAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,IAAQ,EAAK,IAC7BC,EAAIC,EAAM,CAAC,EAAKF,EAAa,GACjC,CAEA,SAASgB,GAAWf,EAAKC,EAAK,CAC1B,OAAQD,EAAIC,CAAO,EACXD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,MAAQ,CACpC,CAEA,SAASe,GAAWhB,EAAKC,EAAK,CAC1B,OAAQD,EAAIC,CAAO,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,KAAO,CAC9B,IC9UA,IAAA2B,GAAAC,GAAA,gCACA,OAAO,QAAU,QAQjB,SAAS,QAAQ,WAAY,CACzB,GAAI,CACA,IAAI,IAAM,KAAK,QAAQ,QAAQ,IAAI,IAAI,CAAC,EAAE,UAAU,EACpD,GAAI,MAAQ,IAAI,QAAU,OAAO,KAAK,GAAG,EAAE,QACvC,OAAO,GACf,OAASC,EAAG,CAAC,CACb,OAAO,IACX,IChBA,IAAAC,GAAAC,GAAAC,IAAA,cAOA,IAAIC,GAAOD,GAOXC,GAAK,OAAS,SAAqBC,EAAQ,CAGvC,QAFIC,EAAM,EACNC,EAAI,EACCC,EAAI,EAAGA,EAAIH,EAAO,OAAQ,EAAEG,EACjCD,EAAIF,EAAO,WAAWG,CAAC,EACnBD,EAAI,IACJD,GAAO,EACFC,EAAI,KACTD,GAAO,GACDC,EAAI,SAAY,QAAWF,EAAO,WAAWG,EAAI,CAAC,EAAI,SAAY,OACxE,EAAEA,EACFF,GAAO,GAEPA,GAAO,EAEf,OAAOA,CACX,EASAF,GAAK,KAAO,SAAmBK,EAAQC,EAAOC,EAAK,CAC/C,IAAIL,EAAMK,EAAMD,EAChB,GAAIJ,EAAM,EACN,MAAO,GAKX,QAJIM,EAAQ,KACRC,EAAQ,CAAC,EACTL,EAAI,EACJM,EACGJ,EAAQC,GACXG,EAAIL,EAAOC,GAAO,EACdI,EAAI,IACJD,EAAML,GAAG,EAAIM,EACRA,EAAI,KAAOA,EAAI,IACpBD,EAAML,GAAG,GAAKM,EAAI,KAAO,EAAIL,EAAOC,GAAO,EAAI,GAC1CI,EAAI,KAAOA,EAAI,KACpBA,IAAMA,EAAI,IAAM,IAAML,EAAOC,GAAO,EAAI,KAAO,IAAMD,EAAOC,GAAO,EAAI,KAAO,EAAID,EAAOC,GAAO,EAAI,IAAM,MAC1GG,EAAML,GAAG,EAAI,OAAUM,GAAK,IAC5BD,EAAML,GAAG,EAAI,OAAUM,EAAI,OAE3BD,EAAML,GAAG,GAAKM,EAAI,KAAO,IAAML,EAAOC,GAAO,EAAI,KAAO,EAAID,EAAOC,GAAO,EAAI,GAC9EF,EAAI,QACHI,IAAUA,EAAQ,CAAC,IAAI,KAAK,OAAO,aAAa,MAAM,OAAQC,CAAK,CAAC,EACrEL,EAAI,GAGZ,OAAII,GACIJ,GACAI,EAAM,KAAK,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAAC,EAC5DI,EAAM,KAAK,EAAE,GAEjB,OAAO,aAAa,MAAM,OAAQC,EAAM,MAAM,EAAGL,CAAC,CAAC,CAC9D,EASAJ,GAAK,MAAQ,SAAoBC,EAAQI,EAAQM,EAAQ,CAIrD,QAHIL,EAAQK,EACRC,EACAC,EACKT,EAAI,EAAGA,EAAIH,EAAO,OAAQ,EAAEG,EACjCQ,EAAKX,EAAO,WAAWG,CAAC,EACpBQ,EAAK,IACLP,EAAOM,GAAQ,EAAIC,EACZA,EAAK,MACZP,EAAOM,GAAQ,EAAIC,GAAM,EAAU,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,MAC3BA,EAAK,SAAY,SAAYC,EAAKZ,EAAO,WAAWG,EAAI,CAAC,GAAK,SAAY,OAClFQ,EAAK,QAAYA,EAAK,OAAW,KAAOC,EAAK,MAC7C,EAAET,EACFC,EAAOM,GAAQ,EAAIC,GAAM,GAAU,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,GAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,EAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,MAEnCP,EAAOM,GAAQ,EAAIC,GAAM,GAAU,IACnCP,EAAOM,GAAQ,EAAIC,GAAM,EAAK,GAAK,IACnCP,EAAOM,GAAQ,EAAIC,EAAW,GAAK,KAG3C,OAAOD,EAASL,CACpB,ICxGA,IAAAQ,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GA6BjB,SAASA,GAAKC,EAAOC,EAAOC,EAAM,CAC9B,IAAIC,EAASD,GAAQ,KACjBE,EAASD,IAAS,EAClBE,EAAS,KACTC,EAASH,EACb,OAAO,SAAoBD,EAAM,CAC7B,GAAIA,EAAO,GAAKA,EAAOE,EACnB,OAAOJ,EAAME,CAAI,EACjBI,EAASJ,EAAOC,IAChBE,EAAOL,EAAMG,CAAI,EACjBG,EAAS,GAEb,IAAIC,EAAMN,EAAM,KAAKI,EAAMC,EAAQA,GAAUJ,CAAI,EACjD,OAAII,EAAS,IACTA,GAAUA,EAAS,GAAK,GACrBC,CACX,CACJ,IC/CA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAO,KAUX,SAASD,GAASE,EAAIC,EAAI,CAStB,KAAK,GAAKD,IAAO,EAMjB,KAAK,GAAKC,IAAO,CACrB,CAOA,IAAIC,GAAOJ,GAAS,KAAO,IAAIA,GAAS,EAAG,CAAC,EAE5CI,GAAK,SAAW,UAAW,CAAE,MAAO,EAAG,EACvCA,GAAK,SAAWA,GAAK,SAAW,UAAW,CAAE,OAAO,IAAM,EAC1DA,GAAK,OAAS,UAAW,CAAE,MAAO,EAAG,EAOrC,IAAIC,GAAWL,GAAS,SAAW,mBAOnCA,GAAS,WAAa,SAAoBM,EAAO,CAC7C,GAAIA,IAAU,EACV,OAAOF,GACX,IAAIG,EAAOD,EAAQ,EACfC,IACAD,EAAQ,CAACA,GACb,IAAIJ,EAAKI,IAAU,EACfH,GAAMG,EAAQJ,GAAM,aAAe,EACvC,OAAIK,IACAJ,EAAK,CAACA,IAAO,EACbD,EAAK,CAACA,IAAO,EACT,EAAEA,EAAK,aACPA,EAAK,EACD,EAAEC,EAAK,aACPA,EAAK,KAGV,IAAIH,GAASE,EAAIC,CAAE,CAC9B,EAOAH,GAAS,KAAO,SAAcM,EAAO,CACjC,GAAI,OAAOA,GAAU,SACjB,OAAON,GAAS,WAAWM,CAAK,EACpC,GAAIL,GAAK,SAASK,CAAK,EAEnB,GAAIL,GAAK,KACLK,EAAQL,GAAK,KAAK,WAAWK,CAAK,MAElC,QAAON,GAAS,WAAW,SAASM,EAAO,EAAE,CAAC,EAEtD,OAAOA,EAAM,KAAOA,EAAM,KAAO,IAAIN,GAASM,EAAM,MAAQ,EAAGA,EAAM,OAAS,CAAC,EAAIF,EACvF,EAOAJ,GAAS,UAAU,SAAW,SAAkBQ,EAAU,CACtD,GAAI,CAACA,GAAY,KAAK,KAAO,GAAI,CAC7B,IAAIN,EAAK,CAAC,KAAK,GAAK,IAAM,EACtBC,EAAK,CAAC,KAAK,KAAW,EAC1B,OAAKD,IACDC,EAAKA,EAAK,IAAM,GACb,EAAED,EAAKC,EAAK,WACvB,CACA,OAAO,KAAK,GAAK,KAAK,GAAK,UAC/B,EAOAH,GAAS,UAAU,OAAS,SAAgBQ,EAAU,CAClD,OAAOP,GAAK,KACN,IAAIA,GAAK,KAAK,KAAK,GAAK,EAAG,KAAK,GAAK,EAAG,EAAQO,CAAS,EAEzD,CAAE,IAAK,KAAK,GAAK,EAAG,KAAM,KAAK,GAAK,EAAG,SAAU,EAAQA,CAAU,CAC7E,EAEA,IAAIC,GAAa,OAAO,UAAU,WAOlCT,GAAS,SAAW,SAAkBU,EAAM,CACxC,OAAIA,IAASL,GACFD,GACJ,IAAIJ,IACLS,GAAW,KAAKC,EAAM,CAAC,EACvBD,GAAW,KAAKC,EAAM,CAAC,GAAK,EAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,GAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,MAAQ,GAEpCD,GAAW,KAAKC,EAAM,CAAC,EACvBD,GAAW,KAAKC,EAAM,CAAC,GAAK,EAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,GAC5BD,GAAW,KAAKC,EAAM,CAAC,GAAK,MAAQ,CAC1C,CACJ,EAMAV,GAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,OAAO,aACV,KAAK,GAAY,IACjB,KAAK,KAAO,EAAK,IACjB,KAAK,KAAO,GAAK,IACjB,KAAK,KAAO,GACZ,KAAK,GAAY,IACjB,KAAK,KAAO,EAAK,IACjB,KAAK,KAAO,GAAK,IACjB,KAAK,KAAO,EAChB,CACJ,EAMAA,GAAS,UAAU,SAAW,UAAoB,CAC9C,IAAIW,EAAS,KAAK,IAAM,GACxB,YAAK,KAAQ,KAAK,IAAM,EAAI,KAAK,KAAO,IAAMA,KAAU,EACxD,KAAK,IAAQ,KAAK,IAAM,EAAsBA,KAAU,EACjD,IACX,EAMAX,GAAS,UAAU,SAAW,UAAoB,CAC9C,IAAIW,EAAO,EAAE,KAAK,GAAK,GACvB,YAAK,KAAQ,KAAK,KAAO,EAAI,KAAK,IAAM,IAAMA,KAAU,EACxD,KAAK,IAAQ,KAAK,KAAO,EAAqBA,KAAU,EACjD,IACX,EAMAX,GAAS,UAAU,OAAS,UAAkB,CAC1C,IAAIY,EAAS,KAAK,GACdC,GAAS,KAAK,KAAO,GAAK,KAAK,IAAM,KAAO,EAC5CC,EAAS,KAAK,KAAO,GACzB,OAAOA,IAAU,EACVD,IAAU,EACRD,EAAQ,MACNA,EAAQ,IAAM,EAAI,EAClBA,EAAQ,QAAU,EAAI,EACxBC,EAAQ,MACNA,EAAQ,IAAM,EAAI,EAClBA,EAAQ,QAAU,EAAI,EAC1BC,EAAQ,IAAM,EAAI,EAC7B,ICvMA,IAAAC,GAAAC,GAAAC,IAAA,cACA,IAAIC,GAAOD,GAGXC,GAAK,UAAY,KAGjBA,GAAK,OAAS,KAGdA,GAAK,aAAe,KAGpBA,GAAK,MAAQ,KAGbA,GAAK,QAAU,KAGfA,GAAK,KAAO,KAGZA,GAAK,KAAO,KAGZA,GAAK,SAAW,KAOhBA,GAAK,OAAS,GAAQ,OAAO,OAAW,KAClB,QACA,OAAO,SACP,OAAO,QAAQ,UACf,OAAO,QAAQ,SAAS,MAO9CA,GAAK,OAASA,GAAK,QAAU,QACf,OAAO,OAAW,KAAe,QACjC,OAAO,KAAW,KAAe,MACjCD,GAQdC,GAAK,WAAa,OAAO,OAAS,OAAO,OAAO,CAAC,CAAC,EAA+B,CAAC,EAOlFA,GAAK,YAAc,OAAO,OAAS,OAAO,OAAO,CAAC,CAAC,EAA+B,CAAC,EAQnFA,GAAK,UAAY,OAAO,WAAwC,SAAmBC,EAAO,CACtF,OAAO,OAAOA,GAAU,UAAY,SAASA,CAAK,GAAK,KAAK,MAAMA,CAAK,IAAMA,CACjF,EAOAD,GAAK,SAAW,SAAkBC,EAAO,CACrC,OAAO,OAAOA,GAAU,UAAYA,aAAiB,MACzD,EAOAD,GAAK,SAAW,SAAkBC,EAAO,CACrC,OAAOA,GAAS,OAAOA,GAAU,QACrC,EAUAD,GAAK,MAQLA,GAAK,MAAQ,SAAeE,EAAKC,EAAM,CACnC,IAAIF,EAAQC,EAAIC,CAAI,EACpB,OAAIF,GAAS,MAAQC,EAAI,eAAeC,CAAI,EACjC,OAAOF,GAAU,WAAa,MAAM,QAAQA,CAAK,EAAIA,EAAM,OAAS,OAAO,KAAKA,CAAK,EAAE,QAAU,EACrG,EACX,EAaAD,GAAK,OAAU,UAAW,CACtB,GAAI,CACA,IAAII,EAASJ,GAAK,QAAQ,QAAQ,EAAE,OAEpC,OAAOI,EAAO,UAAU,UAAYA,EAAoC,IAC5E,MAAY,CAER,OAAO,IACX,CACJ,EAAG,EAGHJ,GAAK,aAAe,KAGpBA,GAAK,oBAAsB,KAO3BA,GAAK,UAAY,SAAmBK,EAAa,CAE7C,OAAO,OAAOA,GAAgB,SACxBL,GAAK,OACDA,GAAK,oBAAoBK,CAAW,EACpC,IAAIL,GAAK,MAAMK,CAAW,EAC9BL,GAAK,OACDA,GAAK,aAAaK,CAAW,EAC7B,OAAO,WAAe,IAClBA,EACA,IAAI,WAAWA,CAAW,CAC5C,EAMAL,GAAK,MAAQ,OAAO,WAAe,IAAc,WAAwC,MAezFA,GAAK,KAAkCA,GAAK,OAAO,SAAsCA,GAAK,OAAO,QAAQ,MACtEA,GAAK,OAAO,MACvCA,GAAK,QAAQ,MAAM,EAO/BA,GAAK,OAAS,mBAOdA,GAAK,QAAU,wBAOfA,GAAK,QAAU,6CAOfA,GAAK,WAAa,SAAoBC,EAAO,CACzC,OAAOA,EACDD,GAAK,SAAS,KAAKC,CAAK,EAAE,OAAO,EACjCD,GAAK,SAAS,QACxB,EAQAA,GAAK,aAAe,SAAsBM,EAAMC,EAAU,CACtD,IAAIC,EAAOR,GAAK,SAAS,SAASM,CAAI,EACtC,OAAIN,GAAK,KACEA,GAAK,KAAK,SAASQ,EAAK,GAAIA,EAAK,GAAID,CAAQ,EACjDC,EAAK,SAAS,EAAQD,CAAS,CAC1C,EAUA,SAASE,GAAMC,EAAKC,EAAKC,EAAU,CAC/B,QAASC,EAAO,OAAO,KAAKF,CAAG,EAAGG,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,GACpDJ,EAAIG,EAAKC,CAAC,CAAC,IAAM,QAAa,CAACF,KAC/BF,EAAIG,EAAKC,CAAC,CAAC,EAAIH,EAAIE,EAAKC,CAAC,CAAC,GAClC,OAAOJ,CACX,CAEAV,GAAK,MAAQS,GAObT,GAAK,QAAU,SAAiBe,EAAK,CACjC,OAAOA,EAAI,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAI,UAAU,CAAC,CACxD,EAQA,SAASC,GAASC,EAAM,CAEpB,SAASC,EAAYC,EAASC,EAAY,CAEtC,GAAI,EAAE,gBAAgBF,GAClB,OAAO,IAAIA,EAAYC,EAASC,CAAU,EAK9C,OAAO,eAAe,KAAM,UAAW,CAAE,IAAK,UAAW,CAAE,OAAOD,CAAS,CAAE,CAAC,EAG1E,MAAM,kBACN,MAAM,kBAAkB,KAAMD,CAAW,EAEzC,OAAO,eAAe,KAAM,QAAS,CAAE,MAAO,IAAI,MAAM,EAAE,OAAS,EAAG,CAAC,EAEvEE,GACAX,GAAM,KAAMW,CAAU,CAC9B,CAEA,OAAAF,EAAY,UAAY,OAAO,OAAO,MAAM,UAAW,CACnD,YAAa,CACT,MAAOA,EACP,SAAU,GACV,WAAY,GACZ,aAAc,EAClB,EACA,KAAM,CACF,IAAK,UAAe,CAAE,OAAOD,CAAM,EACnC,IAAK,OACL,WAAY,GAKZ,aAAc,EAClB,EACA,SAAU,CACN,MAAO,UAAiB,CAAE,OAAO,KAAK,KAAO,KAAO,KAAK,OAAS,EAClE,SAAU,GACV,WAAY,GACZ,aAAc,EAClB,CACJ,CAAC,EAEMC,CACX,CAEAlB,GAAK,SAAWgB,GAmBhBhB,GAAK,cAAgBgB,GAAS,eAAe,EAoB7ChB,GAAK,YAAc,SAAkBqB,EAAY,CAE7C,QADIC,EAAW,CAAC,EACPR,EAAI,EAAGA,EAAIO,EAAW,OAAQ,EAAEP,EACrCQ,EAASD,EAAWP,CAAC,CAAC,EAAI,EAO9B,OAAO,UAAW,CACd,QAASD,EAAO,OAAO,KAAK,IAAI,EAAG,EAAIA,EAAK,OAAS,EAAG,EAAI,GAAI,EAAE,EAC9D,GAAIS,EAAST,EAAK,CAAC,CAAC,IAAM,GAAK,KAAKA,EAAK,CAAC,CAAC,IAAM,QAAa,KAAKA,EAAK,CAAC,CAAC,IAAM,KAC5E,OAAOA,EAAK,CAAC,CACzB,CACJ,EAeAb,GAAK,YAAc,SAAkBqB,EAAY,CAQ7C,OAAO,SAASJ,EAAM,CAClB,QAASH,EAAI,EAAGA,EAAIO,EAAW,OAAQ,EAAEP,EACjCO,EAAWP,CAAC,IAAMG,GAClB,OAAO,KAAKI,EAAWP,CAAC,CAAC,CACrC,CACJ,EAkBAd,GAAK,cAAgB,CACjB,MAAO,OACP,MAAO,OACP,MAAO,OACP,KAAM,EACV,EAGAA,GAAK,WAAa,UAAW,CACzB,IAAII,EAASJ,GAAK,OAElB,GAAI,CAACI,EAAQ,CACTJ,GAAK,aAAeA,GAAK,oBAAsB,KAC/C,MACJ,CAGAA,GAAK,aAAeI,EAAO,OAAS,WAAW,MAAQA,EAAO,MAE1D,SAAqBH,EAAOsB,EAAU,CAClC,OAAO,IAAInB,EAAOH,EAAOsB,CAAQ,CACrC,EACJvB,GAAK,oBAAsBI,EAAO,aAE9B,SAA4BoB,EAAM,CAC9B,OAAO,IAAIpB,EAAOoB,CAAI,CAC1B,CACR,ICrbA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAY,KAEZC,GAEAC,GAAYF,GAAK,SACjBG,GAAYH,GAAK,OACjBI,GAAYJ,GAAK,KAWrB,SAASK,GAAGC,EAAIC,EAAKC,EAAK,CAMtB,KAAK,GAAKF,EAMV,KAAK,IAAMC,EAMX,KAAK,KAAO,OAMZ,KAAK,IAAMC,CACf,CAGA,SAASC,IAAO,CAAC,CAUjB,SAASC,GAAMC,EAAQ,CAMnB,KAAK,KAAOA,EAAO,KAMnB,KAAK,KAAOA,EAAO,KAMnB,KAAK,IAAMA,EAAO,IAMlB,KAAK,KAAOA,EAAO,MACvB,CAOA,SAASZ,IAAS,CAMd,KAAK,IAAM,EAMX,KAAK,KAAO,IAAIM,GAAGI,GAAM,EAAG,CAAC,EAM7B,KAAK,KAAO,KAAK,KAMjB,KAAK,OAAS,IAOlB,CAEA,IAAIG,GAAS,UAAkB,CAC3B,OAAOZ,GAAK,OACN,UAA+B,CAC7B,OAAQD,GAAO,OAAS,UAAyB,CAC7C,OAAO,IAAIE,EACf,GAAG,CACP,EAEE,UAAwB,CACtB,OAAO,IAAIF,EACf,CACR,EAOAA,GAAO,OAASa,GAAO,EAOvBb,GAAO,MAAQ,SAAec,EAAM,CAChC,OAAO,IAAIb,GAAK,MAAMa,CAAI,CAC9B,EAIIb,GAAK,QAAU,QACfD,GAAO,MAAQC,GAAK,KAAKD,GAAO,MAAOC,GAAK,MAAM,UAAU,QAAQ,GAUxED,GAAO,UAAU,MAAQ,SAAcO,EAAIC,EAAKC,EAAK,CACjD,YAAK,KAAO,KAAK,KAAK,KAAO,IAAIH,GAAGC,EAAIC,EAAKC,CAAG,EAChD,KAAK,KAAOD,EACL,IACX,EAEA,SAASO,GAAUN,EAAKO,EAAKC,EAAK,CAC9BD,EAAIC,CAAG,EAAIR,EAAM,GACrB,CAEA,SAASS,GAAcT,EAAKO,EAAKC,EAAK,CAClC,KAAOR,EAAM,KACTO,EAAIC,GAAK,EAAIR,EAAM,IAAM,IACzBA,KAAS,EAEbO,EAAIC,CAAG,EAAIR,CACf,CAWA,SAASU,GAASX,EAAKC,EAAK,CACxB,KAAK,IAAMD,EACX,KAAK,KAAO,OACZ,KAAK,IAAMC,CACf,CAEAU,GAAS,UAAY,OAAO,OAAOb,GAAG,SAAS,EAC/Ca,GAAS,UAAU,GAAKD,GAOxBlB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CAGnD,YAAK,MAAQ,KAAK,KAAO,KAAK,KAAK,KAAO,IAAID,IACzCC,EAAQA,IAAU,GACT,IAAY,EACpBA,EAAQ,MAAY,EACpBA,EAAQ,QAAY,EACpBA,EAAQ,UAAY,EACA,EAC1BA,CAAK,GAAG,IACD,IACX,EAQApB,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,OAAOA,EAAQ,EACT,KAAK,MAAMC,GAAe,GAAIlB,GAAS,WAAWiB,CAAK,CAAC,EACxD,KAAK,OAAOA,CAAK,CAC3B,EAOApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,OAAO,KAAK,QAAQA,GAAS,EAAIA,GAAS,MAAQ,CAAC,CACvD,EAEA,SAASC,GAAcZ,EAAKO,EAAKC,EAAK,CAClC,KAAOR,EAAI,IACPO,EAAIC,GAAK,EAAIR,EAAI,GAAK,IAAM,IAC5BA,EAAI,IAAMA,EAAI,KAAO,EAAIA,EAAI,IAAM,MAAQ,EAC3CA,EAAI,MAAQ,EAEhB,KAAOA,EAAI,GAAK,KACZO,EAAIC,GAAK,EAAIR,EAAI,GAAK,IAAM,IAC5BA,EAAI,GAAKA,EAAI,KAAO,EAExBO,EAAIC,GAAK,EAAIR,EAAI,EACrB,CAQAT,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAC9B,OAAO,KAAK,MAAMC,GAAeC,EAAK,OAAO,EAAGA,CAAI,CACxD,EASAtB,GAAO,UAAU,MAAQA,GAAO,UAAU,OAQ1CA,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAAE,SAAS,EACzC,OAAO,KAAK,MAAMC,GAAeC,EAAK,OAAO,EAAGA,CAAI,CACxD,EAOAtB,GAAO,UAAU,KAAO,SAAoBoB,EAAO,CAC/C,OAAO,KAAK,MAAML,GAAW,EAAGK,EAAQ,EAAI,CAAC,CACjD,EAEA,SAASG,GAAad,EAAKO,EAAKC,EAAK,CACjCD,EAAIC,CAAO,EAAKR,EAAc,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,EAAM,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,GAAM,IAC9BO,EAAIC,EAAM,CAAC,EAAKR,IAAQ,EAC5B,CAOAT,GAAO,UAAU,QAAU,SAAuBoB,EAAO,CACrD,OAAO,KAAK,MAAMG,GAAc,EAAGH,IAAU,CAAC,CAClD,EAQApB,GAAO,UAAU,SAAWA,GAAO,UAAU,QAQ7CA,GAAO,UAAU,QAAU,SAAuBoB,EAAO,CACrD,IAAIE,EAAOnB,GAAS,KAAKiB,CAAK,EAC9B,OAAO,KAAK,MAAMG,GAAc,EAAGD,EAAK,EAAE,EAAE,MAAMC,GAAc,EAAGD,EAAK,EAAE,CAC9E,EASAtB,GAAO,UAAU,SAAWA,GAAO,UAAU,QAQ7CA,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,OAAO,KAAK,MAAMnB,GAAK,MAAM,aAAc,EAAGmB,CAAK,CACvD,EAQApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,OAAO,KAAK,MAAMnB,GAAK,MAAM,cAAe,EAAGmB,CAAK,CACxD,EAEA,IAAII,GAAavB,GAAK,MAAM,UAAU,IAChC,SAAwBQ,EAAKO,EAAKC,EAAK,CACrCD,EAAI,IAAIP,EAAKQ,CAAG,CACpB,EAEE,SAAwBR,EAAKO,EAAKC,EAAK,CACrC,QAASQ,EAAI,EAAGA,EAAIhB,EAAI,OAAQ,EAAEgB,EAC9BT,EAAIC,EAAMQ,CAAC,EAAIhB,EAAIgB,CAAC,CAC5B,EAOJzB,GAAO,UAAU,MAAQ,SAAqBoB,EAAO,CACjD,IAAIZ,EAAMY,EAAM,SAAW,EAC3B,GAAI,CAACZ,EACD,OAAO,KAAK,MAAMO,GAAW,EAAG,CAAC,EACrC,GAAId,GAAK,SAASmB,CAAK,EAAG,CACtB,IAAIJ,EAAMhB,GAAO,MAAMQ,EAAMJ,GAAO,OAAOgB,CAAK,CAAC,EACjDhB,GAAO,OAAOgB,EAAOJ,EAAK,CAAC,EAC3BI,EAAQJ,CACZ,CACA,OAAO,KAAK,OAAOR,CAAG,EAAE,MAAMgB,GAAYhB,EAAKY,CAAK,CACxD,EAOApB,GAAO,UAAU,OAAS,SAAsBoB,EAAO,CACnD,IAAIZ,EAAMH,GAAK,OAAOe,CAAK,EAC3B,OAAOZ,EACD,KAAK,OAAOA,CAAG,EAAE,MAAMH,GAAK,MAAOG,EAAKY,CAAK,EAC7C,KAAK,MAAML,GAAW,EAAG,CAAC,CACpC,EAOAf,GAAO,UAAU,KAAO,UAAgB,CACpC,YAAK,OAAS,IAAIW,GAAM,IAAI,EAC5B,KAAK,KAAO,KAAK,KAAO,IAAIL,GAAGI,GAAM,EAAG,CAAC,EACzC,KAAK,IAAM,EACJ,IACX,EAMAV,GAAO,UAAU,MAAQ,UAAiB,CACtC,OAAI,KAAK,QACL,KAAK,KAAS,KAAK,OAAO,KAC1B,KAAK,KAAS,KAAK,OAAO,KAC1B,KAAK,IAAS,KAAK,OAAO,IAC1B,KAAK,OAAS,KAAK,OAAO,OAE1B,KAAK,KAAO,KAAK,KAAO,IAAIM,GAAGI,GAAM,EAAG,CAAC,EACzC,KAAK,IAAO,GAET,IACX,EAMAV,GAAO,UAAU,OAAS,UAAkB,CACxC,IAAI0B,EAAO,KAAK,KACZC,EAAO,KAAK,KACZnB,EAAO,KAAK,IAChB,YAAK,MAAM,EAAE,OAAOA,CAAG,EACnBA,IACA,KAAK,KAAK,KAAOkB,EAAK,KACtB,KAAK,KAAOC,EACZ,KAAK,KAAOnB,GAET,IACX,EAMAR,GAAO,UAAU,OAAS,UAAkB,CAIxC,QAHI0B,EAAO,KAAK,KAAK,KACjBV,EAAO,KAAK,YAAY,MAAM,KAAK,GAAG,EACtCC,EAAO,EACJS,GACHA,EAAK,GAAGA,EAAK,IAAKV,EAAKC,CAAG,EAC1BA,GAAOS,EAAK,IACZA,EAAOA,EAAK,KAGhB,OAAOV,CACX,EAEAhB,GAAO,WAAa,SAAS4B,EAAe,CACxC1B,GAAe0B,EACf5B,GAAO,OAASa,GAAO,EACvBX,GAAa,WAAW,CAC5B,IChdA,IAAA2B,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAGjB,IAAIC,GAAS,MACZD,GAAa,UAAY,OAAO,OAAOC,GAAO,SAAS,GAAG,YAAcD,GAEzE,IAAIE,GAAO,KAQX,SAASF,IAAe,CACpBC,GAAO,KAAK,IAAI,CACpB,CAEAD,GAAa,WAAa,UAAY,CAOlCA,GAAa,MAAQE,GAAK,oBAE1BF,GAAa,iBAAmBE,GAAK,QAAUA,GAAK,OAAO,qBAAqB,YAAcA,GAAK,OAAO,UAAU,IAAI,OAAS,MAC3H,SAA8BC,EAAKC,EAAKC,EAAK,CAC7CD,EAAI,IAAID,EAAKE,CAAG,CAElB,EAEE,SAA+BF,EAAKC,EAAKC,EAAK,CAC9C,GAAIF,EAAI,KACNA,EAAI,KAAKC,EAAKC,EAAK,EAAGF,EAAI,MAAM,MAC7B,SAASG,EAAI,EAAGA,EAAIH,EAAI,QAC3BC,EAAIC,GAAK,EAAIF,EAAIG,GAAG,CACxB,CACR,EAMAN,GAAa,UAAU,MAAQ,SAA4BO,EAAO,CAC1DL,GAAK,SAASK,CAAK,IACnBA,EAAQL,GAAK,aAAaK,EAAO,QAAQ,GAC7C,IAAIC,EAAMD,EAAM,SAAW,EAC3B,YAAK,OAAOC,CAAG,EACXA,GACA,KAAK,MAAMR,GAAa,iBAAkBQ,EAAKD,CAAK,EACjD,IACX,EAEA,SAASE,GAAkBN,EAAKC,EAAKC,EAAK,CAClCF,EAAI,OAAS,GACbD,GAAK,KAAK,MAAMC,EAAKC,EAAKC,CAAG,EACxBD,EAAI,UACTA,EAAI,UAAUD,EAAKE,CAAG,EAEtBD,EAAI,MAAMD,EAAKE,CAAG,CAC1B,CAKAL,GAAa,UAAU,OAAS,SAA6BO,EAAO,CAChE,IAAIC,EAAMN,GAAK,OAAO,WAAWK,CAAK,EACtC,YAAK,OAAOC,CAAG,EACXA,GACA,KAAK,MAAMC,GAAmBD,EAAKD,CAAK,EACrC,IACX,EAUAP,GAAa,WAAW,ICpFxB,IAAAU,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAY,KAEZC,GAEAC,GAAYF,GAAK,SACjBG,GAAYH,GAAK,KAGrB,SAASI,GAAgBC,EAAQC,EAAa,CAC1C,OAAO,WAAW,uBAAyBD,EAAO,IAAM,OAASC,GAAe,GAAK,MAAQD,EAAO,GAAG,CAC3G,CAQA,SAASN,GAAOQ,EAAQ,CAMpB,KAAK,IAAMA,EAMX,KAAK,IAAM,EAMX,KAAK,IAAMA,EAAO,MACtB,CAEA,IAAIC,GAAe,OAAO,WAAe,IACnC,SAA4BD,EAAQ,CAClC,GAAIA,aAAkB,YAAc,MAAM,QAAQA,CAAM,EACpD,OAAO,IAAIR,GAAOQ,CAAM,EAC5B,MAAM,MAAM,gBAAgB,CAChC,EAEE,SAAsBA,EAAQ,CAC5B,GAAI,MAAM,QAAQA,CAAM,EACpB,OAAO,IAAIR,GAAOQ,CAAM,EAC5B,MAAM,MAAM,gBAAgB,CAChC,EAEAE,GAAS,UAAkB,CAC3B,OAAOT,GAAK,OACN,SAA6BO,EAAQ,CACnC,OAAQR,GAAO,OAAS,SAAuBQ,EAAQ,CACnD,OAAOP,GAAK,OAAO,SAASO,CAAM,EAC5B,IAAIN,GAAaM,CAAM,EAEvBC,GAAaD,CAAM,CAC7B,GAAGA,CAAM,CACb,EAEEC,EACV,EASAT,GAAO,OAASU,GAAO,EAEvBV,GAAO,UAAU,OAASC,GAAK,MAAM,UAAU,UAAuCA,GAAK,MAAM,UAAU,MAO3GD,GAAO,UAAU,OAAU,UAA6B,CACpD,IAAIW,EAAQ,WACZ,OAAO,UAAuB,CAKgC,GAJ1DA,GAAkB,KAAK,IAAI,KAAK,GAAG,EAAI,OAAgB,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,MACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAS,KAAO,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,OACrFA,GAASA,GAAS,KAAK,IAAI,KAAK,GAAG,EAAK,KAAO,MAAQ,EAAO,KAAK,IAAI,KAAK,KAAK,EAAI,KAAK,OAAOA,EAGjG,IAAK,KAAK,KAAO,GAAK,KAAK,IACvB,WAAK,IAAM,KAAK,IACVN,GAAgB,KAAM,EAAE,EAElC,OAAOM,CACX,CACJ,EAAG,EAMHX,GAAO,UAAU,MAAQ,UAAsB,CAC3C,OAAO,KAAK,OAAO,EAAI,CAC3B,EAMAA,GAAO,UAAU,OAAS,UAAuB,CAC7C,IAAIW,EAAQ,KAAK,OAAO,EACxB,OAAOA,IAAU,EAAI,EAAEA,EAAQ,GAAK,CACxC,EAIA,SAASC,IAAiB,CAEtB,IAAIC,EAAO,IAAIV,GAAS,EAAG,CAAC,EACxBW,EAAI,EACR,GAAI,KAAK,IAAM,KAAK,IAAM,EAAG,CACzB,KAAOA,EAAI,EAAG,EAAEA,EAGZ,GADAD,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,KAAO,EAC1D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,EAKf,GAFAA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQ,MAAQ,EAC3DA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAS,KAAO,EACvD,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOA,EACXC,EAAI,CACR,KAAO,CACH,KAAOA,EAAI,EAAG,EAAEA,EAAG,CAEf,GAAI,KAAK,KAAO,KAAK,IACjB,MAAMT,GAAgB,IAAI,EAG9B,GADAQ,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,KAAO,EAC1D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,CACf,CAEA,OAAAA,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,KAAK,EAAI,MAAQC,EAAI,KAAO,EACzDD,CACX,CACA,GAAI,KAAK,IAAM,KAAK,IAAM,GACtB,KAAOC,EAAI,EAAG,EAAEA,EAGZ,GADAD,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,EAAI,KAAO,EAC9D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,MAGf,MAAOC,EAAI,EAAG,EAAEA,EAAG,CAEf,GAAI,KAAK,KAAO,KAAK,IACjB,MAAMT,GAAgB,IAAI,EAG9B,GADAQ,EAAK,IAAMA,EAAK,IAAM,KAAK,IAAI,KAAK,GAAG,EAAI,MAAQC,EAAI,EAAI,KAAO,EAC9D,KAAK,IAAI,KAAK,KAAK,EAAI,IACvB,OAAOD,CACf,CAGJ,MAAM,MAAM,yBAAyB,CACzC,CA6BAb,GAAO,UAAU,KAAO,UAAqB,CACzC,OAAO,KAAK,OAAO,IAAM,CAC7B,EAEA,SAASe,GAAgBC,EAAKC,EAAK,CAC/B,OAAQD,EAAIC,EAAM,CAAC,EACXD,EAAIC,EAAM,CAAC,GAAK,EAChBD,EAAIC,EAAM,CAAC,GAAK,GAChBD,EAAIC,EAAM,CAAC,GAAK,MAAQ,CACpC,CAMAjB,GAAO,UAAU,QAAU,UAAwB,CAG/C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,OAAOU,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,CAClD,EAMAf,GAAO,UAAU,SAAW,UAAyB,CAGjD,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,OAAOU,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,EAAI,CACtD,EAIA,SAASG,IAAgC,CAGrC,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMb,GAAgB,KAAM,CAAC,EAEjC,OAAO,IAAIF,GAASY,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,EAAGA,GAAgB,KAAK,IAAK,KAAK,KAAO,CAAC,CAAC,CAC1G,CAuBAf,GAAO,UAAU,MAAQ,UAAsB,CAG3C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,IAAIM,EAAQV,GAAK,MAAM,YAAY,KAAK,IAAK,KAAK,GAAG,EACrD,YAAK,KAAO,EACLU,CACX,EAOAX,GAAO,UAAU,OAAS,UAAuB,CAG7C,GAAI,KAAK,IAAM,EAAI,KAAK,IACpB,MAAMK,GAAgB,KAAM,CAAC,EAEjC,IAAIM,EAAQV,GAAK,MAAM,aAAa,KAAK,IAAK,KAAK,GAAG,EACtD,YAAK,KAAO,EACLU,CACX,EAMAX,GAAO,UAAU,MAAQ,UAAsB,CAC3C,IAAImB,EAAS,KAAK,OAAO,EACrBC,EAAS,KAAK,IACdH,EAAS,KAAK,IAAME,EAGxB,GAAIF,EAAM,KAAK,IACX,MAAMZ,GAAgB,KAAMc,CAAM,EAGtC,GADA,KAAK,KAAOA,EACR,MAAM,QAAQ,KAAK,GAAG,EACtB,OAAO,KAAK,IAAI,MAAMC,EAAOH,CAAG,EAEpC,GAAIG,IAAUH,EAAK,CACf,IAAII,EAAepB,GAAK,OACxB,OAAOoB,EACDA,EAAa,MAAM,CAAC,EACpB,IAAI,KAAK,IAAI,YAAY,CAAC,CACpC,CACA,OAAO,KAAK,OAAO,KAAK,KAAK,IAAKD,EAAOH,CAAG,CAChD,EAMAjB,GAAO,UAAU,OAAS,UAAuB,CAC7C,IAAIsB,EAAQ,KAAK,MAAM,EACvB,OAAOlB,GAAK,KAAKkB,EAAO,EAAGA,EAAM,MAAM,CAC3C,EAOAtB,GAAO,UAAU,KAAO,SAAcmB,EAAQ,CAC1C,GAAI,OAAOA,GAAW,SAAU,CAE5B,GAAI,KAAK,IAAMA,EAAS,KAAK,IACzB,MAAMd,GAAgB,KAAMc,CAAM,EACtC,KAAK,KAAOA,CAChB,KACI,GAEI,IAAI,KAAK,KAAO,KAAK,IACjB,MAAMd,GAAgB,IAAI,QACzB,KAAK,IAAI,KAAK,KAAK,EAAI,KAEpC,OAAO,IACX,EAOAL,GAAO,UAAU,SAAW,SAASuB,EAAU,CAC3C,OAAQA,EAAU,CACd,IAAK,GACD,KAAK,KAAK,EACV,MACJ,IAAK,GACD,KAAK,KAAK,CAAC,EACX,MACJ,IAAK,GACD,KAAK,KAAK,KAAK,OAAO,CAAC,EACvB,MACJ,IAAK,GACD,MAAQA,EAAW,KAAK,OAAO,EAAI,KAAO,GACtC,KAAK,SAASA,CAAQ,EAE1B,MACJ,IAAK,GACD,KAAK,KAAK,CAAC,EACX,MAGJ,QACI,MAAM,MAAM,qBAAuBA,EAAW,cAAgB,KAAK,GAAG,CAC9E,CACA,OAAO,IACX,EAEAvB,GAAO,WAAa,SAASwB,EAAe,CACxCtB,GAAesB,EACfxB,GAAO,OAASU,GAAO,EACvBR,GAAa,WAAW,EAExB,IAAIuB,EAAKxB,GAAK,KAAO,SAAsC,WAC3DA,GAAK,MAAMD,GAAO,UAAW,CAEzB,MAAO,UAAsB,CACzB,OAAOY,GAAe,KAAK,IAAI,EAAEa,CAAE,EAAE,EAAK,CAC9C,EAEA,OAAQ,UAAuB,CAC3B,OAAOb,GAAe,KAAK,IAAI,EAAEa,CAAE,EAAE,EAAI,CAC7C,EAEA,OAAQ,UAAuB,CAC3B,OAAOb,GAAe,KAAK,IAAI,EAAE,SAAS,EAAEa,CAAE,EAAE,EAAK,CACzD,EAEA,QAAS,UAAwB,CAC7B,OAAOP,GAAY,KAAK,IAAI,EAAEO,CAAE,EAAE,EAAI,CAC1C,EAEA,SAAU,UAAyB,CAC/B,OAAOP,GAAY,KAAK,IAAI,EAAEO,CAAE,EAAE,EAAK,CAC3C,CAEJ,CAAC,CACL,IC/ZA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAGjB,IAAIC,GAAS,MACZD,GAAa,UAAY,OAAO,OAAOC,GAAO,SAAS,GAAG,YAAcD,GAEzE,IAAIE,GAAO,KASX,SAASF,GAAaG,EAAQ,CAC1BF,GAAO,KAAK,KAAME,CAAM,CAO5B,CAEAH,GAAa,WAAa,UAAY,CAE9BE,GAAK,SACLF,GAAa,UAAU,OAASE,GAAK,OAAO,UAAU,MAC9D,EAMAF,GAAa,UAAU,OAAS,UAA8B,CAC1D,IAAII,EAAM,KAAK,OAAO,EACtB,OAAO,KAAK,IAAI,UACV,KAAK,IAAI,UAAU,KAAK,IAAK,KAAK,IAAM,KAAK,IAAI,KAAK,IAAMA,EAAK,KAAK,GAAG,CAAC,EAC1E,KAAK,IAAI,SAAS,QAAS,KAAK,IAAK,KAAK,IAAM,KAAK,IAAI,KAAK,IAAMA,EAAK,KAAK,GAAG,CAAC,CAC5F,EASAJ,GAAa,WAAW,IClDxB,IAAAK,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAUC,GAEjB,IAAIC,GAAO,MAGVD,GAAQ,UAAY,OAAO,OAAOC,GAAK,aAAa,SAAS,GAAG,YAAcD,GAmC/E,SAASA,GAAQE,EAASC,EAAkBC,EAAmB,CAE3D,GAAI,OAAOF,GAAY,WACnB,MAAM,UAAU,4BAA4B,EAEhDD,GAAK,aAAa,KAAK,IAAI,EAM3B,KAAK,QAAUC,EAMf,KAAK,iBAAmB,EAAQC,EAMhC,KAAK,kBAAoB,EAAQC,CACrC,CAaAJ,GAAQ,UAAU,QAAU,SAASK,EAAQC,EAAQC,EAAaC,EAAcC,EAASC,EAAU,CAE/F,GAAI,CAACD,EACD,MAAM,UAAU,2BAA2B,EAE/C,IAAIE,EAAO,KACX,GAAI,CAACD,EACD,OAAOT,GAAK,UAAUI,EAASM,EAAML,EAAQC,EAAaC,EAAcC,CAAO,EAEnF,GAAI,CAACE,EAAK,QAAS,CACf,WAAW,UAAW,CAAED,EAAS,MAAM,eAAe,CAAC,CAAG,EAAG,CAAC,EAC9D,MACJ,CAEA,GAAI,CACA,OAAOC,EAAK,QACRL,EACAC,EAAYI,EAAK,iBAAmB,kBAAoB,QAAQ,EAAEF,CAAO,EAAE,OAAO,EAClF,SAAqBG,EAAKC,EAAU,CAEhC,GAAID,EACA,OAAAD,EAAK,KAAK,QAASC,EAAKN,CAAM,EACvBI,EAASE,CAAG,EAGvB,GAAIC,IAAa,KAAM,CACnBF,EAAK,IAAqB,EAAI,EAC9B,MACJ,CAEA,GAAI,EAAEE,aAAoBL,GACtB,GAAI,CACAK,EAAWL,EAAaG,EAAK,kBAAoB,kBAAoB,QAAQ,EAAEE,CAAQ,CAC3F,OAASD,EAAK,CACV,OAAAD,EAAK,KAAK,QAASC,EAAKN,CAAM,EACvBI,EAASE,CAAG,CACvB,CAGJ,OAAAD,EAAK,KAAK,OAAQE,EAAUP,CAAM,EAC3BI,EAAS,KAAMG,CAAQ,CAClC,CACJ,CACJ,OAASD,EAAK,CACVD,EAAK,KAAK,QAASC,EAAKN,CAAM,EAC9B,WAAW,UAAW,CAAEI,EAASE,CAAG,CAAG,EAAG,CAAC,EAC3C,MACJ,CACJ,EAOAZ,GAAQ,UAAU,IAAM,SAAac,EAAY,CAC7C,OAAI,KAAK,UACAA,GACD,KAAK,QAAQ,KAAM,KAAM,IAAI,EACjC,KAAK,QAAU,KACf,KAAK,KAAK,KAAK,EAAE,IAAI,GAElB,IACX,IC7IA,IAAAC,GAAAC,GAAAC,IAAA,cAMA,IAAIC,GAAMD,GA6BVC,GAAI,QAAU,OCnCd,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACAA,GAAO,QAAU,CAAC,ICDlB,IAAAC,GAAAC,GAAAC,IAAA,cACA,IAAIC,GAAWD,GAQfC,GAAS,MAAQ,UAGjBA,GAAS,OAAe,KACxBA,GAAS,aAAe,KACxBA,GAAS,OAAe,KACxBA,GAAS,aAAe,KAGxBA,GAAS,KAAe,KACxBA,GAAS,IAAe,KACxBA,GAAS,MAAe,KACxBA,GAAS,UAAeC,GAOxB,SAASA,IAAY,CACjBD,GAAS,KAAK,WAAW,EACzBA,GAAS,OAAO,WAAWA,GAAS,YAAY,EAChDA,GAAS,OAAO,WAAWA,GAAS,YAAY,CACpD,CAGAC,GAAU,ICnCV,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAGAA,GAAO,QAAU,OCHjB,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cAGA,IAAIC,GAAY,KAGZC,EAAUD,GAAU,OAAQE,GAAUF,GAAU,OAAQG,EAAQH,GAAU,KAG1EI,EAAQJ,GAAU,MAAM,UAAeA,GAAU,MAAM,QAAa,CAAC,GAEzEI,EAAM,KAAQ,UAAW,CAOrB,IAAIC,EAAO,CAAC,EAiBZ,OAAAA,EAAK,QAAW,UAAW,CACvB,IAAIC,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,gBAAgB,EAAI,EAC3CC,EAAOD,EAAW,CAAC,EAAI,uBAAuB,EAAI,EAClDC,EAAOD,EAAW,CAAC,EAAI,uBAAuB,EAAI,EAClDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,qBAAqB,EAAI,EAChDC,EAAOD,EAAW,CAAC,EAAI,sBAAsB,EAAI,EACjDC,EAAOD,EAAW,CAAC,EAAI,YAAY,EAAI,EAChCC,CACX,EAAG,EAEHF,EAAK,eAAkB,UAAW,CAkC9B,SAASG,EAAeC,EAAY,CAQhC,GAPA,KAAK,OAAS,CAAC,EACf,KAAK,KAAO,CAAC,EACb,KAAK,QAAU,CAAC,EAChB,KAAK,QAAU,CAAC,EAChB,KAAK,OAAS,CAAC,EACf,KAAK,cAAgB,CAAC,EACtB,KAAK,WAAa,CAAC,EACfA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAH,EAAe,UAAU,KAAO,GAQhCA,EAAe,UAAU,YAAc,GAQvCA,EAAe,UAAU,UAAY,GAQrCA,EAAe,UAAU,KAAO,EAQhCA,EAAe,UAAU,EAAI,EAQ7BA,EAAe,UAAU,EAAIL,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQ3EK,EAAe,UAAU,EAAIL,EAAM,UAAU,CAAC,CAAC,EAQ/CK,EAAe,UAAU,EAAI,KAQ7BA,EAAe,UAAU,EAAI,KAQ7BA,EAAe,UAAU,aAAe,KAQxCA,EAAe,UAAU,GAAK,KAQ9BA,EAAe,UAAU,OAASL,EAAM,WAQxCK,EAAe,UAAU,KAAOL,EAAM,WAQtCK,EAAe,UAAU,QAAUL,EAAM,WAQzCK,EAAe,UAAU,QAAUL,EAAM,WAQzCK,EAAe,UAAU,OAASL,EAAM,WAQxCK,EAAe,UAAU,cAAgBL,EAAM,WAQ/CK,EAAe,UAAU,WAAaL,EAAM,WAU5CK,EAAe,OAAS,SAAgBC,EAAY,CAChD,OAAO,IAAID,EAAeC,CAAU,CACxC,EAWAD,EAAe,OAAS,SAAgBI,EAASC,EAAQ,CAerD,GAdKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,CAAC,EACxDA,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACjGD,EAAQ,GAAK,MAAQ,OAAO,eAAe,KAAKA,EAAS,GAAG,GAC5DR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAChGD,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OAAQ,CACjDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,MAAMD,EAAQ,OAAO,CAAC,CAAC,EAClCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,SAAW,MAAQA,EAAQ,QAAQ,OAC3C,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1CC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,QAAQ,CAAC,CAAC,EACzE,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,QAAQ,OAC3C,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1CR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAAQ,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnH,GAAID,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,OAAO,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAKjH,GAJID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,GAAG,EAAE,OAAOD,EAAQ,SAAS,EACnEA,EAAQ,IAAM,MAAQ,OAAO,eAAe,KAAKA,EAAS,IAAI,GAC9DR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,GAAIC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAClGD,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OACjD,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAC7CR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,WAAW,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAOrH,GANID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA+B,GAAG,EAAE,MAAMD,EAAQ,IAAI,EAC7DA,EAAQ,aAAe,MAAQ,OAAO,eAAe,KAAKA,EAAS,aAAa,GAChFC,EAAO,OAA+B,GAAG,EAAE,OAAOD,EAAQ,WAAW,EACrEA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAcC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpHD,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAChI,OAAOA,CACX,EAWAL,EAAe,gBAAkB,SAAyBI,EAASC,EAAQ,CACvE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAL,EAAe,OAAS,SAAgBM,EAAQC,EAAQ,CAC9CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,eACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,YAAcE,EAAO,OAAO,EACpC,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,KAAOE,EAAO,MAAM,EAC5B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIE,EAAO,MAAM,EACzB,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACjE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,EAAIR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAChE,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,aAAeR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAClF,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,GAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAChE,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,IACjBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,OAAO,KAAKE,EAAO,MAAM,CAAC,OAEtCF,EAAQ,OAAO,KAAKE,EAAO,MAAM,CAAC,EACtC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,SAAWA,EAAQ,QAAQ,SACrCA,EAAQ,QAAU,CAAC,GACvBA,EAAQ,QAAQ,KAAKE,EAAO,MAAM,CAAC,EACnC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,SAAWA,EAAQ,QAAQ,SACrCA,EAAQ,QAAU,CAAC,GACvBA,EAAQ,QAAQ,KAAKR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACzE,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACvF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,GAC1BA,EAAQ,WAAW,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAJ,EAAe,gBAAkB,SAAyBM,EAAQ,CAC9D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAN,EAAe,OAAS,SAAgBI,EAAS,CAC7C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,GAC/D,CAACT,EAAM,SAASS,EAAQ,WAAW,EACnC,MAAO,+BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EACrD,OAAQA,EAAQ,KAAM,CACtB,QACI,MAAO,4BACX,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,IACL,IAAK,IACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,GACL,IAAK,IACL,IAAK,IACL,IAAK,IACD,KACJ,CACJ,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,OAAOA,EAAQ,GAAM,SACrB,MAAO,qBACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,CAACT,EAAM,UAAUS,EAAQ,CAAC,GAAK,EAAEA,EAAQ,GAAKT,EAAM,UAAUS,EAAQ,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,EAAE,IAAI,GAC9G,MAAO,2BACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,GAC3C,EAAEA,EAAQ,GAAK,OAAOA,EAAQ,EAAE,QAAW,UAAYT,EAAM,SAASS,EAAQ,CAAC,GAC/E,MAAO,qBACf,GAAIA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,EAAG,CAClD,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,CAAC,EACnD,GAAIO,EACA,MAAO,KAAOA,CACtB,CACA,GAAIP,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,EAAG,CAClD,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,CAAC,EAClD,GAAIO,EACA,MAAO,KAAOA,CACtB,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,IAAIO,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,YAAY,EACpE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACA,GAAIP,EAAQ,IAAM,MAAQA,EAAQ,eAAe,IAAI,EAAG,CACpD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,EAAE,EAClD,GAAIO,EACA,MAAO,MAAQA,CACvB,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzC,GAAI,OAAOA,EAAQ,OAAO,CAAC,GAAM,SAC7B,MAAO,2BACnB,CACA,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvC,GAAI,CAACT,EAAM,UAAUS,EAAQ,KAAK,CAAC,CAAC,GAAK,EAAEA,EAAQ,KAAK,CAAC,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAI,CAAC,MAAM,QAAQA,EAAQ,OAAO,EAC9B,MAAO,0BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAC1C,GAAI,EAAEA,EAAQ,QAAQ,CAAC,GAAK,OAAOA,EAAQ,QAAQ,CAAC,EAAE,QAAW,UAAYT,EAAM,SAASS,EAAQ,QAAQ,CAAC,CAAC,GAC1G,MAAO,4BACnB,CACA,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAI,CAAC,MAAM,QAAQA,EAAQ,OAAO,EAC9B,MAAO,0BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,QAAQ,OAAQ,EAAE,EAAG,CAC7C,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAAQ,CAAC,CAAC,EAC5D,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACJ,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EAAG,CAC5C,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,OAAO,CAAC,CAAC,EAC1D,GAAIO,EACA,MAAO,UAAYA,CAC3B,CACJ,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAAG,CACnD,IAAIO,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,cAAc,CAAC,CAAC,EACxE,GAAIO,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,GAAIP,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAAG,CAChD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,WAAW,CAAC,CAAC,EAC7D,GAAIO,EACA,MAAO,cAAgBA,CAC/B,CACJ,CACA,OAAO,IACX,EAUAX,EAAe,WAAa,SAAoBY,EAAQ,CACpD,GAAIA,aAAkBhB,EAAM,KAAK,eAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,eAO7B,OANIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,aAAe,OACtBR,EAAQ,YAAc,OAAOQ,EAAO,WAAW,GAC/CA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACvCA,EAAO,KAAM,CACrB,QACI,GAAI,OAAOA,EAAO,MAAS,SAAU,CACjCR,EAAQ,KAAOQ,EAAO,KACtB,KACJ,CACA,MACJ,IAAK,YACL,IAAK,GACDR,EAAQ,KAAO,EACf,MACJ,IAAK,QACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,MACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,QACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,gBACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,aACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,SACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,OACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,UACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,UACL,IAAK,GACDA,EAAQ,KAAO,EACf,MACJ,IAAK,SACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,iBACL,IAAK,IACDA,EAAQ,KAAO,GACf,MACJ,IAAK,cACL,IAAK,IACDA,EAAQ,KAAO,GACf,KACJ,CAiBA,GAhBIQ,EAAO,GAAK,OACZR,EAAQ,EAAI,OAAOQ,EAAO,CAAC,GAC3BA,EAAO,GAAK,OACRjB,EAAM,MACLS,EAAQ,EAAIT,EAAM,KAAK,UAAUiB,EAAO,CAAC,GAAG,SAAW,GACnD,OAAOA,EAAO,GAAM,SACzBR,EAAQ,EAAI,SAASQ,EAAO,EAAG,EAAE,EAC5B,OAAOA,EAAO,GAAM,SACzBR,EAAQ,EAAIQ,EAAO,EACd,OAAOA,EAAO,GAAM,WACzBR,EAAQ,EAAI,IAAIT,EAAM,SAASiB,EAAO,EAAE,MAAQ,EAAGA,EAAO,EAAE,OAAS,CAAC,EAAE,SAAS,IACrFA,EAAO,GAAK,OACR,OAAOA,EAAO,GAAM,SACpBjB,EAAM,OAAO,OAAOiB,EAAO,EAAGR,EAAQ,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,CAAC,CAAC,EAAG,CAAC,EACtFA,EAAO,EAAE,QAAU,IACxBR,EAAQ,EAAIQ,EAAO,IACvBA,EAAO,GAAK,KAAM,CAClB,GAAI,OAAOA,EAAO,GAAM,SACpB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,CAAC,CAC1D,CACA,GAAIA,EAAO,GAAK,KAAM,CAClB,GAAI,OAAOA,EAAO,GAAM,SACpB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,EAAIR,EAAM,KAAK,WAAW,WAAWgB,EAAO,CAAC,CACzD,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,aAAeR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,YAAY,CACtF,CACA,GAAIA,EAAO,IAAM,KAAM,CACnB,GAAI,OAAOA,EAAO,IAAO,SACrB,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,GAAKR,EAAM,KAAK,UAAU,WAAWgB,EAAO,EAAE,CAC1D,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CACA,GAAIA,EAAO,QAAS,CAChB,GAAI,CAAC,MAAM,QAAQA,EAAO,OAAO,EAC7B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,QAAU,CAAC,EACnB,QAAS,EAAI,EAAG,EAAIQ,EAAO,QAAQ,OAAQ,EAAE,EACrC,OAAOA,EAAO,QAAQ,CAAC,GAAM,SAC7BjB,EAAM,OAAO,OAAOiB,EAAO,QAAQ,CAAC,EAAGR,EAAQ,QAAQ,CAAC,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,QAAQ,CAAC,CAAC,CAAC,EAAG,CAAC,EACjHA,EAAO,QAAQ,CAAC,EAAE,QAAU,IACjCR,EAAQ,QAAQ,CAAC,EAAIQ,EAAO,QAAQ,CAAC,EACjD,CACA,GAAIA,EAAO,QAAS,CAChB,GAAI,CAAC,MAAM,QAAQA,EAAO,OAAO,EAC7B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,QAAU,CAAC,EACnB,QAAS,EAAI,EAAG,EAAIQ,EAAO,QAAQ,OAAQ,EAAE,EAAG,CAC5C,GAAI,OAAOA,EAAO,QAAQ,CAAC,GAAM,SAC7B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,QAAQ,CAAC,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,QAAQ,CAAC,CAAC,CAC5E,CACJ,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EAAG,CAC3C,GAAI,OAAOA,EAAO,OAAO,CAAC,GAAM,SAC5B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,OAAO,CAAC,EAAIR,EAAM,KAAK,WAAW,WAAWgB,EAAO,OAAO,CAAC,CAAC,CACzE,CACJ,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CAC9F,CACJ,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EAAG,CAC/C,GAAI,OAAOA,EAAO,WAAW,CAAC,GAAM,SAChC,MAAM,UAAU,kDAAkD,EACtER,EAAQ,WAAW,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,WAAW,CAAC,CAAC,CAChF,CACJ,CACA,OAAOR,CACX,EAWAJ,EAAe,SAAW,SAAkBI,EAASS,EAAS,CACrDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAUd,IATIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,OAAS,CAAC,EACjBA,EAAO,KAAO,CAAC,EACfA,EAAO,QAAU,CAAC,EAClBA,EAAO,QAAU,CAAC,EAClBA,EAAO,OAAS,CAAC,EACjBA,EAAO,WAAa,CAAC,EACrBA,EAAO,cAAgB,CAAC,GAExBC,EAAQ,SAAU,CAGlB,GAFAD,EAAO,KAAO,GACdA,EAAO,EAAI,EACPjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,EAAIC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACzG,MACIF,EAAO,EAAIC,EAAQ,QAAU,OAAS,IAAM,EAC5CA,EAAQ,QAAU,OAClBD,EAAO,EAAI,IAEXA,EAAO,EAAI,CAAC,EACRC,EAAQ,QAAU,QAClBD,EAAO,EAAIjB,EAAM,UAAUiB,EAAO,CAAC,IAE3CA,EAAO,EAAI,KACXA,EAAO,EAAI,KACXA,EAAO,UAAY,GACnBA,EAAO,GAAK,KACZA,EAAO,KAAOC,EAAQ,QAAU,OAAS,YAAc,EACvDD,EAAO,YAAc,GACrBA,EAAO,aAAe,IAC1B,CAgBA,GAfIR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIC,EAAQ,MAAQ,CAAC,SAAST,EAAQ,CAAC,EAAI,OAAOA,EAAQ,CAAC,EAAIA,EAAQ,GAC9EA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC3C,OAAOA,EAAQ,GAAM,SACrBQ,EAAO,EAAIC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,CAAC,EAAIA,EAAQ,EAElEQ,EAAO,EAAIC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,CAAC,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,EAAE,MAAQ,EAAGA,EAAQ,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,GACxMA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIC,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,EAAG,EAAGA,EAAQ,EAAE,MAAM,EAAIS,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,CAAC,EAAIA,EAAQ,GACtKA,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIhB,EAAM,KAAK,YAAY,SAASQ,EAAQ,EAAGS,CAAO,GAC7DT,EAAQ,GAAK,MAAQA,EAAQ,eAAe,GAAG,IAC/CQ,EAAO,EAAIhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,EAAGS,CAAO,GAC5DT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,OAAOW,CAAC,CAAC,EAAI,OAAOX,EAAQ,OAAOW,CAAC,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CACtH,CACA,GAAIX,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CACA,GAAIX,EAAQ,SAAWA,EAAQ,QAAQ,OAAQ,CAC3CQ,EAAO,QAAU,CAAC,EAClB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,QAAQ,OAAQ,EAAEW,EAC1CH,EAAO,QAAQG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,QAAQW,CAAC,EAAG,EAAGX,EAAQ,QAAQW,CAAC,EAAE,MAAM,EAAIF,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,QAAQW,CAAC,CAAC,EAAIX,EAAQ,QAAQW,CAAC,CAC3N,CACA,GAAIX,EAAQ,SAAWA,EAAQ,QAAQ,OAAQ,CAC3CQ,EAAO,QAAU,CAAC,EAClB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,QAAQ,OAAQ,EAAEW,EAC1CH,EAAO,QAAQG,CAAC,EAAInB,EAAM,KAAK,YAAY,SAASQ,EAAQ,QAAQW,CAAC,EAAGF,CAAO,CACvF,CACA,GAAIT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAInB,EAAM,KAAK,WAAW,SAASQ,EAAQ,OAAOW,CAAC,EAAGF,CAAO,CACpF,CAKA,GAJIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,IAAM,MAAQA,EAAQ,eAAe,IAAI,IACjDQ,EAAO,GAAKhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,GAAIS,CAAO,GAC7DT,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,WAAWW,CAAC,EAAGF,CAAO,CAC3F,CAOA,GANIT,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOC,EAAQ,QAAU,OAASjB,EAAM,KAAK,eAAe,cAAcQ,EAAQ,IAAI,IAAM,OAAYA,EAAQ,KAAOR,EAAM,KAAK,eAAe,cAAcQ,EAAQ,IAAI,EAAIA,EAAQ,MAC9LA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,IACnEQ,EAAO,YAAcR,EAAQ,aAC7BA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,aAAcS,CAAO,GACzFT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CACzG,CACA,OAAOD,CACX,EASAZ,EAAe,UAAU,OAAS,UAAkB,CAChD,OAAO,KAAK,YAAY,SAAS,KAAMR,GAAU,KAAK,aAAa,CACvE,EAUAQ,EAAe,WAAa,SAAoBgB,EAAe,CAC3D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,sBAC3B,EAsBAhB,EAAe,cAAiB,UAAW,CACvC,IAAIF,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,WAAW,EAAI,EACtCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,KAAK,EAAI,EAChCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,EAAE,EAAI,eAAe,EAAI,GAC3CC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GAC5CC,EAAOD,EAAW,EAAE,EAAI,aAAa,EAAI,GAClCC,CACX,EAAG,EAEIC,CACX,EAAG,EAEHH,EAAK,eAAkB,UAAW,CAmB9B,SAASoB,EAAehB,EAAY,CAChC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAc,EAAe,UAAU,KAAO,GAQhCA,EAAe,UAAU,KAAO,KAQhCA,EAAe,UAAU,UAAY,GAUrCA,EAAe,OAAS,SAAgBhB,EAAY,CAChD,OAAO,IAAIgB,EAAehB,CAAU,CACxC,EAWAgB,EAAe,OAAS,SAAgBb,EAASC,EAAQ,CACrD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAMC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAClGD,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAC9DC,CACX,EAWAY,EAAe,gBAAkB,SAAyBb,EAASC,EAAQ,CACvE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAY,EAAe,OAAS,SAAgBX,EAAQC,EAAQ,CAC9CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,eACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAClE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAa,EAAe,gBAAkB,SAAyBX,EAAQ,CAC9D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAW,EAAe,OAAS,SAAgBb,EAAS,CAC7C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,IAAI,EACpD,GAAIO,EACA,MAAO,QAAUA,CACzB,CACA,OAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EAC1B,6BACR,IACX,EAUAa,EAAe,WAAa,SAAoBL,EAAQ,CACpD,GAAIA,aAAkBhB,EAAM,KAAK,eAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,eAG7B,GAFIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,MAAQ,KAAM,CACrB,GAAI,OAAOA,EAAO,MAAS,SACvB,MAAM,UAAU,4CAA4C,EAChER,EAAQ,KAAOR,EAAM,KAAK,UAAU,WAAWgB,EAAO,IAAI,CAC9D,CACA,OAAIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACxCR,CACX,EAWAa,EAAe,SAAW,SAAkBb,EAASS,EAAS,CACrDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,KAAO,KACdA,EAAO,UAAY,IAEnBR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAMS,CAAO,GACjET,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WACxBQ,CACX,EASAK,EAAe,UAAU,OAAS,UAAkB,CAChD,OAAO,KAAK,YAAY,SAAS,KAAMzB,GAAU,KAAK,aAAa,CACvE,EAUAyB,EAAe,WAAa,SAAoBD,EAAe,CAC3D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,sBAC3B,EAEOC,CACX,EAAG,EAEHpB,EAAK,UAAa,UAAW,CAuBzB,SAASqB,EAAUjB,EAAY,CAI3B,GAHA,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EACdA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAe,EAAU,UAAU,MAAQvB,EAAM,WAQlCuB,EAAU,UAAU,OAASvB,EAAM,WAQnCuB,EAAU,UAAU,KAAO,GAQ3BA,EAAU,UAAU,OAAS,GAQ7BA,EAAU,UAAU,OAAS,GAQ7BA,EAAU,UAAU,UAAYvB,EAAM,WAQtCuB,EAAU,UAAU,UAAY,GAUhCA,EAAU,OAAS,SAAgBjB,EAAY,CAC3C,OAAO,IAAIiB,EAAUjB,CAAU,CACnC,EAWAiB,EAAU,OAAS,SAAgBd,EAASC,EAAQ,CAGhD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EACxE,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,OAAO,CAAC,CAAC,EAKzE,GAJIA,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvH,OAAID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC3DC,CACX,EAWAa,EAAU,gBAAkB,SAAyBd,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAa,EAAU,OAAS,SAAgBZ,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKE,EAAO,OAAO,CAAC,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKE,EAAO,OAAO,CAAC,EACnC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAChF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAc,EAAU,gBAAkB,SAAyBZ,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAY,EAAU,OAAS,SAAgBd,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EACxC,GAAI,CAACR,EAAM,SAASS,EAAQ,MAAMD,CAAC,CAAC,EAChC,MAAO,0BACnB,CACA,GAAIC,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EACzC,GAAI,CAACR,EAAM,SAASS,EAAQ,OAAOD,CAAC,CAAC,EACjC,MAAO,2BACnB,CACA,GAAIC,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EACjE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,OAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EAC1B,6BACR,IACX,EAUAc,EAAU,WAAa,SAAoBN,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,UAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAC7B,GAAIgB,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,uCAAuC,EAC3DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EACvCR,EAAQ,MAAM,CAAC,EAAI,OAAOQ,EAAO,MAAM,CAAC,CAAC,CACjD,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CAOA,GANIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,UAAU,CAAC,CAAC,CACnF,CACJ,CACA,OAAIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GACxCR,CACX,EAWAc,EAAU,SAAW,SAAkBd,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAYd,IAXIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,GAEpBC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,OAAS,GAChBA,EAAO,UAAY,GACnBA,EAAO,OAAS,IAEhBR,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAIX,EAAQ,MAAMW,CAAC,CACzC,CACA,GAAIX,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CAC3C,CAKA,GAJIX,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC9F,CACA,OAAIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACrBQ,CACX,EASAM,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAM1B,GAAU,KAAK,aAAa,CACvE,EAUA0B,EAAU,WAAa,SAAoBF,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,iBAC3B,EAEOE,CACX,EAAG,EAEHrB,EAAK,kBAAqB,UAAW,CAoBjC,SAASsB,EAAkBlB,EAAY,CAGnC,GAFA,KAAK,sBAAwB,CAAC,EAC9B,KAAK,cAAgB,CAAC,EAClBA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAgB,EAAkB,UAAU,eAAiB,KAQ7CA,EAAkB,UAAU,UAAY,KAQxCA,EAAkB,UAAU,sBAAwBxB,EAAM,WAQ1DwB,EAAkB,UAAU,cAAgBxB,EAAM,WAUlDwB,EAAkB,OAAS,SAAgBlB,EAAY,CACnD,OAAO,IAAIkB,EAAkBlB,CAAU,CAC3C,EAWAkB,EAAkB,OAAS,SAAgBf,EAASC,EAAQ,CAOxD,GANKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,gBAAkB,MAAQ,OAAO,eAAe,KAAKA,EAAS,gBAAgB,GACtFR,EAAM,KAAK,WAAW,OAAOQ,EAAQ,eAAgBC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7GD,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5ER,EAAM,KAAK,WAAW,OAAOQ,EAAQ,UAAWC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACxGD,EAAQ,uBAAyB,MAAQA,EAAQ,sBAAsB,OACvE,QAAS,EAAI,EAAG,EAAIA,EAAQ,sBAAsB,OAAQ,EAAE,EACxDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,sBAAsB,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC3I,GAAID,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnI,OAAOA,CACX,EAWAc,EAAkB,gBAAkB,SAAyBf,EAASC,EAAQ,CAC1E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAc,EAAkB,OAAS,SAAgBb,EAAQC,EAAQ,CACjDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,kBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,eAAiBR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC7E,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACxE,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,uBAAyBA,EAAQ,sBAAsB,SACjEA,EAAQ,sBAAwB,CAAC,GACrCA,EAAQ,sBAAsB,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACpG,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5F,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAe,EAAkB,gBAAkB,SAAyBb,EAAQ,CACjE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAa,EAAkB,OAAS,SAAgBf,EAAS,CAChD,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,EAAG,CAC5E,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,cAAc,EAC/D,GAAIO,EACA,MAAO,kBAAoBA,CACnC,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,SAAS,EAC1D,GAAIO,EACA,MAAO,aAAeA,CAC9B,CACA,GAAIP,EAAQ,uBAAyB,MAAQA,EAAQ,eAAe,uBAAuB,EAAG,CAC1F,GAAI,CAAC,MAAM,QAAQA,EAAQ,qBAAqB,EAC5C,MAAO,wCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,sBAAsB,OAAQ,EAAE,EAAG,CAC3D,IAAIO,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,sBAAsB,CAAC,CAAC,EACrF,GAAIO,EACA,MAAO,yBAA2BA,CAC1C,CACJ,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAAG,CACnD,IAAIO,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,CAAC,EAC7E,GAAIO,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,OAAO,IACX,EAUAQ,EAAkB,WAAa,SAAoBP,EAAQ,CACvD,GAAIA,aAAkBhB,EAAM,KAAK,kBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,kBAC7B,GAAIgB,EAAO,gBAAkB,KAAM,CAC/B,GAAI,OAAOA,EAAO,gBAAmB,SACjC,MAAM,UAAU,yDAAyD,EAC7ER,EAAQ,eAAiBR,EAAM,KAAK,WAAW,WAAWgB,EAAO,cAAc,CACnF,CACA,GAAIA,EAAO,WAAa,KAAM,CAC1B,GAAI,OAAOA,EAAO,WAAc,SAC5B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,UAAYR,EAAM,KAAK,WAAW,WAAWgB,EAAO,SAAS,CACzE,CACA,GAAIA,EAAO,sBAAuB,CAC9B,GAAI,CAAC,MAAM,QAAQA,EAAO,qBAAqB,EAC3C,MAAM,UAAU,+DAA+D,EACnFR,EAAQ,sBAAwB,CAAC,EACjC,QAAS,EAAI,EAAG,EAAIQ,EAAO,sBAAsB,OAAQ,EAAE,EAAG,CAC1D,GAAI,OAAOA,EAAO,sBAAsB,CAAC,GAAM,SAC3C,MAAM,UAAU,gEAAgE,EACpFR,EAAQ,sBAAsB,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,sBAAsB,CAAC,CAAC,CACnH,CACJ,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,uDAAuD,EAC3ER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,wDAAwD,EAC5ER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CACnG,CACJ,CACA,OAAOR,CACX,EAWAe,EAAkB,SAAW,SAAkBf,EAASS,EAAS,CACxDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAad,IAZIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,sBAAwB,CAAC,EAChCA,EAAO,cAAgB,CAAC,GAExBC,EAAQ,WACRD,EAAO,eAAiB,KACxBA,EAAO,UAAY,MAEnBR,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,IACzEQ,EAAO,eAAiBhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,eAAgBS,CAAO,GACtFT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,UAAWS,CAAO,GAC5ET,EAAQ,uBAAyBA,EAAQ,sBAAsB,OAAQ,CACvEQ,EAAO,sBAAwB,CAAC,EAChC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,sBAAsB,OAAQ,EAAEW,EACxDH,EAAO,sBAAsBG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,sBAAsBW,CAAC,EAAGF,CAAO,CAC9H,CACA,GAAIT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CAC9G,CACA,OAAOD,CACX,EASAO,EAAkB,UAAU,OAAS,UAAkB,CACnD,OAAO,KAAK,YAAY,SAAS,KAAM3B,GAAU,KAAK,aAAa,CACvE,EAUA2B,EAAkB,WAAa,SAAoBH,EAAe,CAC9D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,yBAC3B,EAEOG,CACX,EAAG,EAEHtB,EAAK,WAAc,UAAW,CA2B1B,SAASuB,EAAWnB,EAAY,CAK5B,GAJA,KAAK,YAAc,CAAC,EACpB,KAAK,cAAgB,CAAC,EACtB,KAAK,aAAe,CAAC,EACrB,KAAK,UAAY,CAAC,EACdA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAiB,EAAW,UAAU,UAAYzB,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQ/EyB,EAAW,UAAU,YAAczB,EAAM,WAQzCyB,EAAW,UAAU,aAAe,GAQpCA,EAAW,UAAU,gBAAkB,GAQvCA,EAAW,UAAU,OAAS,GAQ9BA,EAAW,UAAU,aAAezB,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQlFyB,EAAW,UAAU,UAAY,GAQjCA,EAAW,UAAU,MAAQ,KAQ7BA,EAAW,UAAU,cAAgBzB,EAAM,WAQ3CyB,EAAW,UAAU,aAAezB,EAAM,WAQ1CyB,EAAW,UAAU,UAAYzB,EAAM,WAUvCyB,EAAW,OAAS,SAAgBnB,EAAY,CAC5C,OAAO,IAAImB,EAAWnB,CAAU,CACpC,EAWAmB,EAAW,OAAS,SAAgBhB,EAASC,EAAQ,CAiBjD,GAhBKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,SAAS,EAC/DA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,YAAY,EACpEA,EAAQ,iBAAmB,MAAQ,OAAO,eAAe,KAAKA,EAAS,iBAAiB,GACxFC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,eAAe,EACvEA,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,YAAY,EACnEA,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,WAAW,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpGD,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,GAAID,EAAQ,eAAiB,MAAQA,EAAQ,cAAc,OACvD,QAAS,EAAI,EAAG,EAAIA,EAAQ,cAAc,OAAQ,EAAE,EAChDR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAc,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACrI,GAAID,EAAQ,cAAgB,MAAQA,EAAQ,aAAa,OACrD,QAAS,EAAI,EAAG,EAAIA,EAAQ,aAAa,OAAQ,EAAE,EAC/CR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAa,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/H,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,cAAc,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACxH,OAAOA,CACX,EAWAe,EAAW,gBAAkB,SAAyBhB,EAASC,EAAQ,CACnE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAe,EAAW,OAAS,SAAgBd,EAAQC,EAAQ,CAC1CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,WACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,UAAYE,EAAO,MAAM,EACjC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,mBAAmB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeE,EAAO,OAAO,EACrC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,gBAAkBE,EAAO,OAAO,EACxC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeE,EAAO,MAAM,EACpC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,WAAW,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACpE,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,eAAiBA,EAAQ,cAAc,SACjDA,EAAQ,cAAgB,CAAC,GAC7BA,EAAQ,cAAc,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5F,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,cAAgBA,EAAQ,aAAa,SAC/CA,EAAQ,aAAe,CAAC,GAC5BA,EAAQ,aAAa,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,cAAc,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAgB,EAAW,gBAAkB,SAAyBd,EAAQ,CAC1D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAc,EAAW,OAAS,SAAgBhB,EAAS,CACzC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,UAAUS,EAAQ,SAAS,GAAK,EAAEA,EAAQ,WAAaT,EAAM,UAAUS,EAAQ,UAAU,GAAG,GAAKT,EAAM,UAAUS,EAAQ,UAAU,IAAI,GAC9I,MAAO,mCACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,GACjE,CAACT,EAAM,SAASS,EAAQ,YAAY,EACpC,MAAO,gCACf,GAAIA,EAAQ,iBAAmB,MAAQA,EAAQ,eAAe,iBAAiB,GACvE,CAACT,EAAM,SAASS,EAAQ,eAAe,EACvC,MAAO,mCACf,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EAC9B,MAAO,0BACf,GAAIA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,GACjE,CAACT,EAAM,UAAUS,EAAQ,YAAY,GAAK,EAAEA,EAAQ,cAAgBT,EAAM,UAAUS,EAAQ,aAAa,GAAG,GAAKT,EAAM,UAAUS,EAAQ,aAAa,IAAI,GAC1J,MAAO,sCACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,WAAW,OAAOQ,EAAQ,KAAK,EACtD,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,GAAIP,EAAQ,eAAiB,MAAQA,EAAQ,eAAe,eAAe,EAAG,CAC1E,GAAI,CAAC,MAAM,QAAQA,EAAQ,aAAa,EACpC,MAAO,gCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,cAAc,OAAQ,EAAED,EAAG,CACnD,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,cAAcD,CAAC,CAAC,EAC7E,GAAIQ,EACA,MAAO,iBAAmBA,CAClC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAI,CAAC,MAAM,QAAQA,EAAQ,YAAY,EACnC,MAAO,+BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,aAAa,OAAQ,EAAED,EAAG,CAClD,IAAIQ,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,aAAaD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,cAAc,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EAChE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,OAAO,IACX,EAUAS,EAAW,WAAa,SAAoBR,EAAQ,CAChD,GAAIA,aAAkBhB,EAAM,KAAK,WAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,WAU7B,GATIgB,EAAO,WAAa,OAChBjB,EAAM,MACLS,EAAQ,UAAYT,EAAM,KAAK,UAAUiB,EAAO,SAAS,GAAG,SAAW,GACnE,OAAOA,EAAO,WAAc,SACjCR,EAAQ,UAAY,SAASQ,EAAO,UAAW,EAAE,EAC5C,OAAOA,EAAO,WAAc,SACjCR,EAAQ,UAAYQ,EAAO,UACtB,OAAOA,EAAO,WAAc,WACjCR,EAAQ,UAAY,IAAIT,EAAM,SAASiB,EAAO,UAAU,MAAQ,EAAGA,EAAO,UAAU,OAAS,CAAC,EAAE,SAAS,IAC7GA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,mBAAmB,WAAWgB,EAAO,YAAY,CAAC,CAAC,CAC3F,CACJ,CAkBA,GAjBIA,EAAO,cAAgB,OACvBR,EAAQ,aAAe,OAAOQ,EAAO,YAAY,GACjDA,EAAO,iBAAmB,OAC1BR,EAAQ,gBAAkB,OAAOQ,EAAO,eAAe,GACvDA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,cAAgB,OACnBjB,EAAM,MACLS,EAAQ,aAAeT,EAAM,KAAK,UAAUiB,EAAO,YAAY,GAAG,SAAW,GACzE,OAAOA,EAAO,cAAiB,SACpCR,EAAQ,aAAe,SAASQ,EAAO,aAAc,EAAE,EAClD,OAAOA,EAAO,cAAiB,SACpCR,EAAQ,aAAeQ,EAAO,aACzB,OAAOA,EAAO,cAAiB,WACpCR,EAAQ,aAAe,IAAIT,EAAM,SAASiB,EAAO,aAAa,MAAQ,EAAGA,EAAO,aAAa,OAAS,CAAC,EAAE,SAAS,IACtHA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,MAAQR,EAAM,KAAK,WAAW,WAAWgB,EAAO,KAAK,CACjE,CACA,GAAIA,EAAO,cAAe,CACtB,GAAI,CAAC,MAAM,QAAQA,EAAO,aAAa,EACnC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,cAAgB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIQ,EAAO,cAAc,OAAQ,EAAE,EAAG,CAClD,GAAI,OAAOA,EAAO,cAAc,CAAC,GAAM,SACnC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,cAAc,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,cAAc,CAAC,CAAC,CACnG,CACJ,CACA,GAAIA,EAAO,aAAc,CACrB,GAAI,CAAC,MAAM,QAAQA,EAAO,YAAY,EAClC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAe,CAAC,EACxB,QAAS,EAAI,EAAG,EAAIQ,EAAO,aAAa,OAAQ,EAAE,EAAG,CACjD,GAAI,OAAOA,EAAO,aAAa,CAAC,GAAM,SAClC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,aAAa,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,aAAa,CAAC,CAAC,CAC5F,CACJ,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,cAAc,WAAWgB,EAAO,UAAU,CAAC,CAAC,CAClF,CACJ,CACA,OAAOR,CACX,EAWAgB,EAAW,SAAW,SAAkBhB,EAASS,EAAS,CACjDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAOd,IANIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,YAAc,CAAC,EACtBA,EAAO,cAAgB,CAAC,EACxBA,EAAO,aAAe,CAAC,EACvBA,EAAO,UAAY,CAAC,GAEpBC,EAAQ,SAAU,CAClB,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,UAAYC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACjH,MACIF,EAAO,UAAYC,EAAQ,QAAU,OAAS,IAAM,EAIxD,GAHAD,EAAO,aAAe,GACtBA,EAAO,gBAAkB,GACzBA,EAAO,OAAS,GACZjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,aAAeC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CACpH,MACIF,EAAO,aAAeC,EAAQ,QAAU,OAAS,IAAM,EAC3DD,EAAO,UAAY,GACnBA,EAAO,MAAQ,IACnB,CAqBA,GApBIR,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC3D,OAAOA,EAAQ,WAAc,SAC7BQ,EAAO,UAAYC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,SAAS,EAAIA,EAAQ,UAElFQ,EAAO,UAAYC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,SAAS,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,UAAU,MAAQ,EAAGA,EAAQ,UAAU,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,WACxOA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAeR,EAAQ,cAC9BA,EAAQ,iBAAmB,MAAQA,EAAQ,eAAe,iBAAiB,IAC3EQ,EAAO,gBAAkBR,EAAQ,iBACjCA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACjE,OAAOA,EAAQ,cAAiB,SAChCQ,EAAO,aAAeC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,YAAY,EAAIA,EAAQ,aAExFQ,EAAO,aAAeC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,YAAY,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,aAAa,MAAQ,EAAGA,EAAQ,aAAa,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,cACpPA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,WAAW,SAASQ,EAAQ,MAAOS,CAAO,GACpET,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,mBAAmB,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CACtG,CACA,GAAIT,EAAQ,eAAiBA,EAAQ,cAAc,OAAQ,CACvDQ,EAAO,cAAgB,CAAC,EACxB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,cAAc,OAAQ,EAAEW,EAChDH,EAAO,cAAcG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,cAAcW,CAAC,EAAGF,CAAO,CAC9G,CACA,GAAIT,EAAQ,cAAgBA,EAAQ,aAAa,OAAQ,CACrDQ,EAAO,aAAe,CAAC,EACvB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,aAAa,OAAQ,EAAEW,EAC/CH,EAAO,aAAaG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,aAAaW,CAAC,EAAGF,CAAO,CACvG,CACA,GAAIT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,cAAc,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC7F,CACA,OAAOD,CACX,EASAQ,EAAW,UAAU,OAAS,UAAkB,CAC5C,OAAO,KAAK,YAAY,SAAS,KAAM5B,GAAU,KAAK,aAAa,CACvE,EAUA4B,EAAW,WAAa,SAAoBJ,EAAe,CACvD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kBAC3B,EAEOI,CACX,EAAG,EAEHvB,EAAK,uBAA0B,UAAW,CAkBtC,SAASwB,EAAuBpB,EAAY,CACxC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAkB,EAAuB,UAAU,IAAM,GAQvCA,EAAuB,UAAU,MAAQ,GAUzCA,EAAuB,OAAS,SAAgBpB,EAAY,CACxD,OAAO,IAAIoB,EAAuBpB,CAAU,CAChD,EAWAoB,EAAuB,OAAS,SAAgBjB,EAASC,EAAQ,CAC7D,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,KAAO,MAAQ,OAAO,eAAe,KAAKA,EAAS,KAAK,GAChEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,GAAG,EAC3DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,KAAK,EAC1DC,CACX,EAWAgB,EAAuB,gBAAkB,SAAyBjB,EAASC,EAAQ,CAC/E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAgB,EAAuB,OAAS,SAAgBf,EAAQC,EAAQ,CACtDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,uBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,IAAME,EAAO,OAAO,EAC5B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQE,EAAO,OAAO,EAC9B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAiB,EAAuB,gBAAkB,SAAyBf,EAAQ,CACtE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAe,EAAuB,OAAS,SAAgBjB,EAAS,CACrD,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,GAC/C,CAACT,EAAM,SAASS,EAAQ,GAAG,EACpB,uBACXA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,GACnD,CAACT,EAAM,SAASS,EAAQ,KAAK,EACtB,yBACR,IACX,EAUAiB,EAAuB,WAAa,SAAoBT,EAAQ,CAC5D,GAAIA,aAAkBhB,EAAM,KAAK,uBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,uBAC7B,OAAIgB,EAAO,KAAO,OACdR,EAAQ,IAAM,OAAOQ,EAAO,GAAG,GAC/BA,EAAO,OAAS,OAChBR,EAAQ,MAAQ,OAAOQ,EAAO,KAAK,GAChCR,CACX,EAWAiB,EAAuB,SAAW,SAAkBjB,EAASS,EAAS,CAC7DA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,IAAM,GACbA,EAAO,MAAQ,IAEfR,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,IACnDQ,EAAO,IAAMR,EAAQ,KACrBA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQR,EAAQ,OACpBQ,CACX,EASAS,EAAuB,UAAU,OAAS,UAAkB,CACxD,OAAO,KAAK,YAAY,SAAS,KAAM7B,GAAU,KAAK,aAAa,CACvE,EAUA6B,EAAuB,WAAa,SAAoBL,EAAe,CACnE,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,8BAC3B,EAEOK,CACX,EAAG,EAEHxB,EAAK,iBAAoB,UAAW,CAkBhC,SAASyB,EAAiBrB,EAAY,CAElC,GADA,KAAK,0BAA4B,CAAC,EAC9BA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAmB,EAAiB,UAAU,WAAa,GAQxCA,EAAiB,UAAU,0BAA4B3B,EAAM,WAU7D2B,EAAiB,OAAS,SAAgBrB,EAAY,CAClD,OAAO,IAAIqB,EAAiBrB,CAAU,CAC1C,EAWAqB,EAAiB,OAAS,SAAgBlB,EAASC,EAAQ,CAKvD,GAJKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAClEA,EAAQ,2BAA6B,MAAQA,EAAQ,0BAA0B,OAC/E,QAAS,EAAI,EAAG,EAAIA,EAAQ,0BAA0B,OAAQ,EAAE,EAC5DR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,0BAA0B,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/I,OAAOA,CACX,EAWAiB,EAAiB,gBAAkB,SAAyBlB,EAASC,EAAQ,CACzE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAiB,EAAiB,OAAS,SAAgBhB,EAAQC,EAAQ,CAChDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,2BAA6BA,EAAQ,0BAA0B,SACzEA,EAAQ,0BAA4B,CAAC,GACzCA,EAAQ,0BAA0B,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACxG,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAkB,EAAiB,gBAAkB,SAAyBhB,EAAQ,CAChE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAgB,EAAiB,OAAS,SAAgBlB,EAAS,CAC/C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAClC,MAAO,8BACf,GAAIA,EAAQ,2BAA6B,MAAQA,EAAQ,eAAe,2BAA2B,EAAG,CAClG,GAAI,CAAC,MAAM,QAAQA,EAAQ,yBAAyB,EAChD,MAAO,4CACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,0BAA0B,OAAQ,EAAED,EAAG,CAC/D,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,0BAA0BD,CAAC,CAAC,EACzF,GAAIQ,EACA,MAAO,6BAA+BA,CAC9C,CACJ,CACA,OAAO,IACX,EAUAW,EAAiB,WAAa,SAAoBV,EAAQ,CACtD,GAAIA,aAAkBhB,EAAM,KAAK,iBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAG7B,GAFIgB,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC7CA,EAAO,0BAA2B,CAClC,GAAI,CAAC,MAAM,QAAQA,EAAO,yBAAyB,EAC/C,MAAM,UAAU,kEAAkE,EACtFR,EAAQ,0BAA4B,CAAC,EACrC,QAAS,EAAI,EAAG,EAAIQ,EAAO,0BAA0B,OAAQ,EAAE,EAAG,CAC9D,GAAI,OAAOA,EAAO,0BAA0B,CAAC,GAAM,SAC/C,MAAM,UAAU,mEAAmE,EACvFR,EAAQ,0BAA0B,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,0BAA0B,CAAC,CAAC,CAC3H,CACJ,CACA,OAAOR,CACX,EAWAkB,EAAiB,SAAW,SAAkBlB,EAASS,EAAS,CACvDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAOd,IANIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,0BAA4B,CAAC,GACpCC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YAC5BA,EAAQ,2BAA6BA,EAAQ,0BAA0B,OAAQ,CAC/EQ,EAAO,0BAA4B,CAAC,EACpC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,0BAA0B,OAAQ,EAAEW,EAC5DH,EAAO,0BAA0BG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,0BAA0BW,CAAC,EAAGF,CAAO,CACtI,CACA,OAAOD,CACX,EASAU,EAAiB,UAAU,OAAS,UAAkB,CAClD,OAAO,KAAK,YAAY,SAAS,KAAM9B,GAAU,KAAK,aAAa,CACvE,EAUA8B,EAAiB,WAAa,SAAoBN,EAAe,CAC7D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEOM,CACX,EAAG,EAEHzB,EAAK,WAAc,UAAW,CAyB1B,SAAS0B,EAAWtB,EAAY,CAQ5B,GAPA,KAAK,KAAO,CAAC,EACb,KAAK,YAAc,CAAC,EACpB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EAClB,KAAK,uBAAyB,CAAC,EAC3BA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAoB,EAAW,UAAU,KAAO5B,EAAM,WAQlC4B,EAAW,UAAU,KAAO,GAQ5BA,EAAW,UAAU,YAAc5B,EAAM,WAQzC4B,EAAW,UAAU,kBAAoB5B,EAAM,WAQ/C4B,EAAW,UAAU,UAAY,GAQjCA,EAAW,UAAU,MAAQ5B,EAAM,WAQnC4B,EAAW,UAAU,OAAS5B,EAAM,WAQpC4B,EAAW,UAAU,UAAY5B,EAAM,WAQvC4B,EAAW,UAAU,uBAAyB5B,EAAM,WAUpD4B,EAAW,OAAS,SAAgBtB,EAAY,CAC5C,OAAO,IAAIsB,EAAWtB,CAAU,CACpC,EAWAsB,EAAW,OAAS,SAAgBnB,EAASC,EAAQ,CAGjD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OACrC,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvCR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAK,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7G,GAFID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,YAAY,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAGtH,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAClEA,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,MAAM,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpH,GAAID,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,OAAO,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACrH,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAU,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACzH,GAAID,EAAQ,wBAA0B,MAAQA,EAAQ,uBAAuB,OACzE,QAAS,EAAI,EAAG,EAAIA,EAAQ,uBAAuB,OAAQ,EAAE,EACzDR,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,uBAAuB,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACxI,GAAID,EAAQ,mBAAqB,MAAQA,EAAQ,kBAAkB,OAC/D,QAAS,EAAI,EAAG,EAAIA,EAAQ,kBAAkB,OAAQ,EAAE,EACpDR,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,kBAAkB,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpI,OAAOA,CACX,EAWAkB,EAAW,gBAAkB,SAAyBnB,EAASC,EAAQ,CACnE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAkB,EAAW,OAAS,SAAgBjB,EAAQC,EAAQ,CAC1CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,WACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,GACpBA,EAAQ,KAAK,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,mBAAqBA,EAAQ,kBAAkB,SACzDA,EAAQ,kBAAoB,CAAC,GACjCA,EAAQ,kBAAkB,KAAKR,EAAM,KAAK,kBAAkB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3F,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC5E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC7E,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAChF,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,wBAA0BA,EAAQ,uBAAuB,SACnEA,EAAQ,uBAAyB,CAAC,GACtCA,EAAQ,uBAAuB,KAAKR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC/F,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAmB,EAAW,gBAAkB,SAAyBjB,EAAQ,CAC1D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAiB,EAAW,OAAS,SAAgBnB,EAAS,CACzC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EAAG,CAC1C,IAAIQ,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAKD,CAAC,CAAC,EACvD,GAAIQ,EACA,MAAO,QAAUA,CACzB,CACJ,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EAChE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,GAAIP,EAAQ,mBAAqB,MAAQA,EAAQ,eAAe,mBAAmB,EAAG,CAClF,GAAI,CAAC,MAAM,QAAQA,EAAQ,iBAAiB,EACxC,MAAO,oCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,kBAAkB,OAAQ,EAAED,EAAG,CACvD,IAAIQ,EAAQf,EAAM,KAAK,kBAAkB,OAAOQ,EAAQ,kBAAkBD,CAAC,CAAC,EAC5E,GAAIQ,EACA,MAAO,qBAAuBA,CACtC,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EAAG,CAC3C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,MAAMD,CAAC,CAAC,EAC7D,GAAIQ,EACA,MAAO,SAAWA,CAC1B,CACJ,CACA,GAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EAAG,CAC5C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,OAAOD,CAAC,CAAC,EAC9D,GAAIQ,EACA,MAAO,UAAYA,CAC3B,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAAG,CAC/C,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,UAAUD,CAAC,CAAC,EACjE,GAAIQ,EACA,MAAO,aAAeA,CAC9B,CACJ,CACA,GAAIP,EAAQ,wBAA0B,MAAQA,EAAQ,eAAe,wBAAwB,EAAG,CAC5F,GAAI,CAAC,MAAM,QAAQA,EAAQ,sBAAsB,EAC7C,MAAO,yCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,uBAAuB,OAAQ,EAAED,EAAG,CAC5D,IAAIQ,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,uBAAuBD,CAAC,CAAC,EAChF,GAAIQ,EACA,MAAO,0BAA4BA,CAC3C,CACJ,CACA,OAAO,IACX,EAUAY,EAAW,WAAa,SAAoBX,EAAQ,CAChD,GAAIA,aAAkBhB,EAAM,KAAK,WAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,WAC7B,GAAIgB,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,uCAAuC,EAC3DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAAG,CACzC,GAAI,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC1B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,KAAK,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,KAAK,CAAC,CAAC,CACpE,CACJ,CAGA,GAFIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,+CAA+C,EACnER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,YAAY,WAAWgB,EAAO,YAAY,CAAC,CAAC,CACpF,CACJ,CACA,GAAIA,EAAO,kBAAmB,CAC1B,GAAI,CAAC,MAAM,QAAQA,EAAO,iBAAiB,EACvC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,kBAAoB,CAAC,EAC7B,QAAS,EAAI,EAAG,EAAIQ,EAAO,kBAAkB,OAAQ,EAAE,EAAG,CACtD,GAAI,OAAOA,EAAO,kBAAkB,CAAC,GAAM,SACvC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,kBAAkB,CAAC,EAAIR,EAAM,KAAK,kBAAkB,WAAWgB,EAAO,kBAAkB,CAAC,CAAC,CACtG,CACJ,CAGA,GAFIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EAAG,CAC1C,GAAI,OAAOA,EAAO,MAAM,CAAC,GAAM,SAC3B,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,MAAM,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,MAAM,CAAC,CAAC,CAC3E,CACJ,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,yCAAyC,EAC7DR,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EAAG,CAC3C,GAAI,OAAOA,EAAO,OAAO,CAAC,GAAM,SAC5B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,OAAO,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,OAAO,CAAC,CAAC,CAC7E,CACJ,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAAG,CAC9C,GAAI,OAAOA,EAAO,UAAU,CAAC,GAAM,SAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAU,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,UAAU,CAAC,CAAC,CACnF,CACJ,CACA,GAAIA,EAAO,uBAAwB,CAC/B,GAAI,CAAC,MAAM,QAAQA,EAAO,sBAAsB,EAC5C,MAAM,UAAU,yDAAyD,EAC7ER,EAAQ,uBAAyB,CAAC,EAClC,QAAS,EAAI,EAAG,EAAIQ,EAAO,uBAAuB,OAAQ,EAAE,EAAG,CAC3D,GAAI,OAAOA,EAAO,uBAAuB,CAAC,GAAM,SAC5C,MAAM,UAAU,0DAA0D,EAC9ER,EAAQ,uBAAuB,CAAC,EAAIR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,uBAAuB,CAAC,CAAC,CAC/G,CACJ,CACA,OAAOR,CACX,EAWAmB,EAAW,SAAW,SAAkBnB,EAASS,EAAS,CACjDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAcd,IAbIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,EACfA,EAAO,YAAc,CAAC,EACtBA,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,EACpBA,EAAO,uBAAyB,CAAC,EACjCA,EAAO,kBAAoB,CAAC,GAE5BC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,UAAY,IAEnBR,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACvCH,EAAO,KAAKG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAKW,CAAC,EAAGF,CAAO,CAC/E,CAGA,GAFIT,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,YAAY,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CAC/F,CAGA,GAFIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,MAAMW,CAAC,EAAGF,CAAO,CACtF,CACA,GAAIT,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,OAAOW,CAAC,EAAGF,CAAO,CACxF,CACA,GAAIT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,UAAUW,CAAC,EAAGF,CAAO,CAC9F,CACA,GAAIT,EAAQ,wBAA0BA,EAAQ,uBAAuB,OAAQ,CACzEQ,EAAO,uBAAyB,CAAC,EACjC,QAASG,EAAI,EAAGA,EAAIX,EAAQ,uBAAuB,OAAQ,EAAEW,EACzDH,EAAO,uBAAuBG,CAAC,EAAInB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,uBAAuBW,CAAC,EAAGF,CAAO,CAC1H,CACA,GAAIT,EAAQ,mBAAqBA,EAAQ,kBAAkB,OAAQ,CAC/DQ,EAAO,kBAAoB,CAAC,EAC5B,QAASG,EAAI,EAAGA,EAAIX,EAAQ,kBAAkB,OAAQ,EAAEW,EACpDH,EAAO,kBAAkBG,CAAC,EAAInB,EAAM,KAAK,kBAAkB,SAASQ,EAAQ,kBAAkBW,CAAC,EAAGF,CAAO,CACjH,CACA,OAAOD,CACX,EASAW,EAAW,UAAU,OAAS,UAAkB,CAC5C,OAAO,KAAK,YAAY,SAAS,KAAM/B,GAAU,KAAK,aAAa,CACvE,EAUA+B,EAAW,WAAa,SAAoBP,EAAe,CACvD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kBAC3B,EAEOO,CACX,EAAG,EAEH1B,EAAK,YAAe,UAAW,CA8B3B,SAAS2B,EAAYvB,EAAY,CAS7B,GARA,KAAK,KAAO,CAAC,EACb,KAAK,UAAY,CAAC,EAClB,KAAK,UAAY,CAAC,EAClB,KAAK,WAAa,CAAC,EACnB,KAAK,UAAY,CAAC,EAClB,KAAK,aAAe,CAAC,EACrB,KAAK,WAAa,CAAC,EACnB,KAAK,WAAa,CAAC,EACfA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAqB,EAAY,UAAU,KAAO7B,EAAM,WAQnC6B,EAAY,UAAU,SAAW,EAQjCA,EAAY,UAAU,QAAU,KAQhCA,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,WAAa7B,EAAM,WAQzC6B,EAAY,UAAU,UAAY7B,EAAM,WAQxC6B,EAAY,UAAU,KAAO,GAQ7BA,EAAY,UAAU,UAAY,GAQlCA,EAAY,UAAU,QAAU7B,EAAM,UAAU,CAAC,CAAC,EAQlD6B,EAAY,UAAU,aAAe7B,EAAM,WAQ3C6B,EAAY,UAAU,aAAe,EAQrCA,EAAY,UAAU,WAAa7B,EAAM,WAQzC6B,EAAY,UAAU,WAAa7B,EAAM,WAUzC6B,EAAY,OAAS,SAAgBvB,EAAY,CAC7C,OAAO,IAAIuB,EAAYvB,CAAU,CACrC,EAWAuB,EAAY,OAAS,SAAgBpB,EAASC,EAAQ,CAGlD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CAKA,GAJID,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,QAAQ,EAC/DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,YAAY,QAAQ,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/GD,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OACjD,QAAS,EAAI,EAAG,EAAIA,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,WAAW,CAAC,CAAC,EAC5E,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAAQ,CACvDC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,MAAMD,EAAQ,UAAU,CAAC,CAAC,EACrCC,EAAO,OAAO,CAClB,CAKA,GAJID,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,OAAO,EAC9DA,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OAAQ,CACzDC,EAAO,OAA+B,EAAE,EAAE,KAAK,EAC/C,QAAS,EAAI,EAAG,EAAID,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAAOD,EAAQ,WAAW,CAAC,CAAC,EACvCC,EAAO,OAAO,CAClB,CACA,GAAID,EAAQ,YAAc,MAAQA,EAAQ,WAAW,OAAQ,CACzDC,EAAO,OAA+B,EAAE,EAAE,KAAK,EAC/C,QAAS,EAAI,EAAG,EAAID,EAAQ,WAAW,OAAQ,EAAE,EAC7CC,EAAO,OAAOD,EAAQ,WAAW,CAAC,CAAC,EACvCC,EAAO,OAAO,CAClB,CAGA,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EAClEA,EAAQ,cAAgB,MAAQA,EAAQ,aAAa,OACrD,QAAS,EAAI,EAAG,EAAIA,EAAQ,aAAa,OAAQ,EAAE,EAC/CR,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,aAAa,CAAC,EAAGC,EAAO,OAA+B,GAAG,EAAE,KAAK,CAAC,EAAE,OAAO,EACpI,OAAID,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFC,EAAO,OAA+B,GAAG,EAAE,MAAMD,EAAQ,YAAY,EAClEC,CACX,EAWAmB,EAAY,gBAAkB,SAAyBpB,EAASC,EAAQ,CACpE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAmB,EAAY,OAAS,SAAgBlB,EAAQC,EAAQ,CAC3CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,YACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CAGA,GAFML,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,YAAY,QAAQ,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC/E,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,GAC1BA,EAAQ,WAAW,KAAKE,EAAO,MAAM,CAAC,EACtC,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,IACpBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,OAEzCF,EAAQ,UAAU,KAAKE,EAAO,MAAM,CAAC,EACzC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,cAAgBA,EAAQ,aAAa,SAC/CA,EAAQ,aAAe,CAAC,GAC5BA,EAAQ,aAAa,KAAKR,EAAM,KAAK,uBAAuB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EAC3F,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,aAAeE,EAAO,MAAM,EACpC,KACJ,CACJ,IAAK,IAAI,CAGD,GAFMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,IACrBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,OAE3CF,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,EAC3C,KACJ,CACJ,IAAK,IAAI,CAGD,GAFMF,EAAQ,YAAcA,EAAQ,WAAW,SAC3CA,EAAQ,WAAa,CAAC,IACrBK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,OAE3CF,EAAQ,WAAW,KAAKE,EAAO,OAAO,CAAC,EAC3C,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAoB,EAAY,gBAAkB,SAAyBlB,EAAQ,CAC3D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAkB,EAAY,OAAS,SAAgBpB,EAAS,CAC1C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EACvC,GAAI,CAACR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,CAAC,GAAK,EAAEC,EAAQ,KAAKD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,KAAKD,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,GAAIC,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,IAAIO,EAAQf,EAAM,KAAK,YAAY,QAAQ,OAAOQ,EAAQ,OAAO,EACjE,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,OAAOC,EAAQ,UAAUD,CAAC,GAAM,SAChC,MAAO,8BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,CAAC,EACrC,MAAO,+BACnB,CACA,GAAIC,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,EAAEC,EAAQ,WAAWD,CAAC,GAAK,OAAOC,EAAQ,WAAWD,CAAC,EAAE,QAAW,UAAYR,EAAM,SAASS,EAAQ,WAAWD,CAAC,CAAC,GACnH,MAAO,+BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,CAAC,GAAK,EAAEC,EAAQ,UAAUD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,UAAUD,CAAC,EAAE,IAAI,GAC1J,MAAO,oCACnB,CACA,GAAIC,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,EAAEA,EAAQ,SAAW,OAAOA,EAAQ,QAAQ,QAAW,UAAYT,EAAM,SAASS,EAAQ,OAAO,GACjG,MAAO,2BACf,GAAIA,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAI,CAAC,MAAM,QAAQA,EAAQ,YAAY,EACnC,MAAO,+BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,aAAa,OAAQ,EAAED,EAAG,CAClD,IAAIQ,EAAQf,EAAM,KAAK,uBAAuB,OAAOQ,EAAQ,aAAaD,CAAC,CAAC,EAC5E,GAAIQ,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EACrE,OAAQA,EAAQ,aAAc,CAC9B,QACI,MAAO,oCACX,IAAK,GACL,IAAK,GACD,KACJ,CACJ,GAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,OAAOC,EAAQ,WAAWD,CAAC,GAAM,SACjC,MAAO,+BACnB,CACA,GAAIC,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpE,GAAI,CAAC,MAAM,QAAQA,EAAQ,UAAU,EACjC,MAAO,6BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,WAAW,OAAQ,EAAED,EAC7C,GAAI,CAACR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,CAAC,GAAK,EAAEC,EAAQ,WAAWD,CAAC,GAAKR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,EAAE,GAAG,GAAKR,EAAM,UAAUS,EAAQ,WAAWD,CAAC,EAAE,IAAI,GAC9J,MAAO,qCACnB,CACA,OAAO,IACX,EAUAqB,EAAY,WAAa,SAAoBZ,EAAQ,CACjD,GAAIA,aAAkBhB,EAAM,KAAK,YAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,YAC7B,GAAIgB,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,wCAAwC,EAC5DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CAGA,GAFIA,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,QAAUR,EAAM,KAAK,YAAY,QAAQ,WAAWgB,EAAO,OAAO,CAC9E,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAI,OAAOQ,EAAO,UAAU,CAAC,CAAC,CACzD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAIQ,EAAO,UAAU,CAAC,EAAI,CACrD,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EACxC,OAAOA,EAAO,WAAW,CAAC,GAAM,SAChCjB,EAAM,OAAO,OAAOiB,EAAO,WAAW,CAAC,EAAGR,EAAQ,WAAW,CAAC,EAAIT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,WAAW,CAAC,CAAC,CAAC,EAAG,CAAC,EAC1HA,EAAO,WAAW,CAAC,EAAE,QAAU,IACpCR,EAAQ,WAAW,CAAC,EAAIQ,EAAO,WAAW,CAAC,EACvD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EACvCjB,EAAM,MACLS,EAAQ,UAAU,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,UAAU,CAAC,CAAC,GAAG,SAAW,GACzE,OAAOA,EAAO,UAAU,CAAC,GAAM,SACpCR,EAAQ,UAAU,CAAC,EAAI,SAASQ,EAAO,UAAU,CAAC,EAAG,EAAE,EAClD,OAAOA,EAAO,UAAU,CAAC,GAAM,SACpCR,EAAQ,UAAU,CAAC,EAAIQ,EAAO,UAAU,CAAC,EACpC,OAAOA,EAAO,UAAU,CAAC,GAAM,WACpCR,EAAQ,UAAU,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,UAAU,CAAC,EAAE,MAAQ,EAAGA,EAAO,UAAU,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC9H,CAUA,GATIA,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,SAAW,OACd,OAAOA,EAAO,SAAY,SAC1BjB,EAAM,OAAO,OAAOiB,EAAO,QAASR,EAAQ,QAAUT,EAAM,UAAUA,EAAM,OAAO,OAAOiB,EAAO,OAAO,CAAC,EAAG,CAAC,EACxGA,EAAO,QAAQ,QAAU,IAC9BR,EAAQ,QAAUQ,EAAO,UAC7BA,EAAO,aAAc,CACrB,GAAI,CAAC,MAAM,QAAQA,EAAO,YAAY,EAClC,MAAM,UAAU,gDAAgD,EACpER,EAAQ,aAAe,CAAC,EACxB,QAAS,EAAI,EAAG,EAAIQ,EAAO,aAAa,OAAQ,EAAE,EAAG,CACjD,GAAI,OAAOA,EAAO,aAAa,CAAC,GAAM,SAClC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,aAAa,CAAC,EAAIR,EAAM,KAAK,uBAAuB,WAAWgB,EAAO,aAAa,CAAC,CAAC,CACjG,CACJ,CACA,OAAQA,EAAO,aAAc,CAC7B,QACI,GAAI,OAAOA,EAAO,cAAiB,SAAU,CACzCR,EAAQ,aAAeQ,EAAO,aAC9B,KACJ,CACA,MACJ,IAAK,UACL,IAAK,GACDR,EAAQ,aAAe,EACvB,MACJ,IAAK,WACL,IAAK,GACDA,EAAQ,aAAe,EACvB,KACJ,CACA,GAAIQ,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EAC5CR,EAAQ,WAAW,CAAC,EAAI,OAAOQ,EAAO,WAAW,CAAC,CAAC,CAC3D,CACA,GAAIA,EAAO,WAAY,CACnB,GAAI,CAAC,MAAM,QAAQA,EAAO,UAAU,EAChC,MAAM,UAAU,8CAA8C,EAClER,EAAQ,WAAa,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIQ,EAAO,WAAW,OAAQ,EAAE,EACxCjB,EAAM,MACLS,EAAQ,WAAW,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,WAAW,CAAC,CAAC,GAAG,SAAW,GAC3E,OAAOA,EAAO,WAAW,CAAC,GAAM,SACrCR,EAAQ,WAAW,CAAC,EAAI,SAASQ,EAAO,WAAW,CAAC,EAAG,EAAE,EACpD,OAAOA,EAAO,WAAW,CAAC,GAAM,SACrCR,EAAQ,WAAW,CAAC,EAAIQ,EAAO,WAAW,CAAC,EACtC,OAAOA,EAAO,WAAW,CAAC,GAAM,WACrCR,EAAQ,WAAW,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,WAAW,CAAC,EAAE,MAAQ,EAAGA,EAAO,WAAW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAI,EACrI,CACA,OAAOR,CACX,EAWAoB,EAAY,SAAW,SAAkBpB,EAASS,EAAS,CAClDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAyBd,IAxBIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,EACfA,EAAO,UAAY,CAAC,EACpBA,EAAO,UAAY,CAAC,EACpBA,EAAO,WAAa,CAAC,EACrBA,EAAO,UAAY,CAAC,EACpBA,EAAO,WAAa,CAAC,EACrBA,EAAO,WAAa,CAAC,EACrBA,EAAO,aAAe,CAAC,GAEvBC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,QAAU,KACjBA,EAAO,KAAO,GACVC,EAAQ,QAAU,OAClBD,EAAO,QAAU,IAEjBA,EAAO,QAAU,CAAC,EACdC,EAAQ,QAAU,QAClBD,EAAO,QAAUjB,EAAM,UAAUiB,EAAO,OAAO,IAEvDA,EAAO,UAAY,GACnBA,EAAO,aAAeC,EAAQ,QAAU,OAAS,UAAY,GAE7DT,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CAKA,GAJIX,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,YAAY,QAAQ,SAASQ,EAAQ,QAASS,CAAO,GACjFT,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,UAAUW,CAAC,CAAC,EAAI,OAAOX,EAAQ,UAAUW,CAAC,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CAClI,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CACjD,CACA,GAAIX,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,WAAWW,CAAC,EAAG,EAAGX,EAAQ,WAAWW,CAAC,EAAE,MAAM,EAAIF,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,CAC1O,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EACxC,OAAOX,EAAQ,UAAUW,CAAC,GAAM,SAChCH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,UAAUW,CAAC,CAAC,EAAIX,EAAQ,UAAUW,CAAC,EAEnGH,EAAO,UAAUG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,UAAUW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,UAAUW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,UAAUW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,UAAUW,CAAC,CACvQ,CAKA,GAJIX,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUC,EAAQ,QAAU,OAASlB,EAAM,OAAO,OAAOS,EAAQ,QAAS,EAAGA,EAAQ,QAAQ,MAAM,EAAIS,EAAQ,QAAU,MAAQ,MAAM,UAAU,MAAM,KAAKT,EAAQ,OAAO,EAAIA,EAAQ,SAC9LA,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EAC7CH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,MAAQ,CAAC,SAAST,EAAQ,WAAWW,CAAC,CAAC,EAAI,OAAOX,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,CACtI,CACA,GAAIX,EAAQ,YAAcA,EAAQ,WAAW,OAAQ,CACjDQ,EAAO,WAAa,CAAC,EACrB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,WAAW,OAAQ,EAAEW,EACzC,OAAOX,EAAQ,WAAWW,CAAC,GAAM,SACjCH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,WAAWW,CAAC,CAAC,EAAIX,EAAQ,WAAWW,CAAC,EAEtGH,EAAO,WAAWG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,WAAWW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,WAAWW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,WAAWW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAI,EAAIX,EAAQ,WAAWW,CAAC,CAChR,CAGA,GAFIX,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,cAAgBA,EAAQ,aAAa,OAAQ,CACrDQ,EAAO,aAAe,CAAC,EACvB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,aAAa,OAAQ,EAAEW,EAC/CH,EAAO,aAAaG,CAAC,EAAInB,EAAM,KAAK,uBAAuB,SAASQ,EAAQ,aAAaW,CAAC,EAAGF,CAAO,CAC5G,CACA,OAAIT,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAeC,EAAQ,QAAU,OAASjB,EAAM,KAAK,YAAY,aAAaQ,EAAQ,YAAY,IAAM,OAAYA,EAAQ,aAAeR,EAAM,KAAK,YAAY,aAAaQ,EAAQ,YAAY,EAAIA,EAAQ,cACnNQ,CACX,EASAY,EAAY,UAAU,OAAS,UAAkB,CAC7C,OAAO,KAAK,YAAY,SAAS,KAAMhC,GAAU,KAAK,aAAa,CACvE,EAUAgC,EAAY,WAAa,SAAoBR,EAAe,CACxD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,mBAC3B,EA4BAQ,EAAY,SAAY,UAAW,CAC/B,IAAI1B,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,WAAW,EAAI,EACtCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,OAAO,EAAI,EAClCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EACnCC,EAAOD,EAAW,CAAC,EAAI,MAAM,EAAI,EACjCC,EAAOD,EAAW,EAAE,EAAI,SAAS,EAAI,GACrCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,QAAQ,EAAI,GACpCC,EAAOD,EAAW,EAAE,EAAI,WAAW,EAAI,GACvCC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,EAAE,EAAI,UAAU,EAAI,GACtCC,EAAOD,EAAW,EAAE,EAAI,cAAc,EAAI,GAC1CC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GAC5CC,EAAOD,EAAW,EAAE,EAAI,YAAY,EAAI,GACxCC,EAAOD,EAAW,EAAE,EAAI,gBAAgB,EAAI,GACrCC,CACX,EAAG,EAEHyB,EAAY,QAAW,UAAW,CAkB9B,SAASC,EAAQxB,EAAY,CACzB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAG,EAAI,EAAG,EAAIC,EAAK,OAAQ,EAAE,EAC3DD,EAAWC,EAAK,CAAC,CAAC,GAAK,OACvB,KAAKA,EAAK,CAAC,CAAC,EAAID,EAAWC,EAAK,CAAC,CAAC,EAClD,CAQA,OAAAuB,EAAQ,UAAU,MAAQ9B,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAQxE8B,EAAQ,UAAU,IAAM9B,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAUtE8B,EAAQ,OAAS,SAAgBxB,EAAY,CACzC,OAAO,IAAIwB,EAAQxB,CAAU,CACjC,EAWAwB,EAAQ,OAAS,SAAgBrB,EAASC,EAAQ,CAC9C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpEC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,KAAK,EAC3DA,EAAQ,KAAO,MAAQ,OAAO,eAAe,KAAKA,EAAS,KAAK,GAChEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,GAAG,EACvDC,CACX,EAWAoB,EAAQ,gBAAkB,SAAyBrB,EAASC,EAAQ,CAChE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAoB,EAAQ,OAAS,SAAgBnB,EAAQC,EAAQ,CACvCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,YAAY,QACjGU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,MAAQE,EAAO,MAAM,EAC7B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,IAAME,EAAO,MAAM,EAC3B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAqB,EAAQ,gBAAkB,SAAyBnB,EAAQ,CACvD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAmB,EAAQ,OAAS,SAAgBrB,EAAS,CACtC,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,GACnD,CAACT,EAAM,UAAUS,EAAQ,KAAK,GAAK,EAAEA,EAAQ,OAAST,EAAM,UAAUS,EAAQ,MAAM,GAAG,GAAKT,EAAM,UAAUS,EAAQ,MAAM,IAAI,GACvH,+BACXA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,GAC/C,CAACT,EAAM,UAAUS,EAAQ,GAAG,GAAK,EAAEA,EAAQ,KAAOT,EAAM,UAAUS,EAAQ,IAAI,GAAG,GAAKT,EAAM,UAAUS,EAAQ,IAAI,IAAI,GAC/G,6BACR,IACX,EAUAqB,EAAQ,WAAa,SAAoBb,EAAQ,CAC7C,GAAIA,aAAkBhB,EAAM,KAAK,YAAY,QACzC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,YAAY,QACzC,OAAIgB,EAAO,OAAS,OACZjB,EAAM,MACLS,EAAQ,MAAQT,EAAM,KAAK,UAAUiB,EAAO,KAAK,GAAG,SAAW,GAC3D,OAAOA,EAAO,OAAU,SAC7BR,EAAQ,MAAQ,SAASQ,EAAO,MAAO,EAAE,EACpC,OAAOA,EAAO,OAAU,SAC7BR,EAAQ,MAAQQ,EAAO,MAClB,OAAOA,EAAO,OAAU,WAC7BR,EAAQ,MAAQ,IAAIT,EAAM,SAASiB,EAAO,MAAM,MAAQ,EAAGA,EAAO,MAAM,OAAS,CAAC,EAAE,SAAS,IACjGA,EAAO,KAAO,OACVjB,EAAM,MACLS,EAAQ,IAAMT,EAAM,KAAK,UAAUiB,EAAO,GAAG,GAAG,SAAW,GACvD,OAAOA,EAAO,KAAQ,SAC3BR,EAAQ,IAAM,SAASQ,EAAO,IAAK,EAAE,EAChC,OAAOA,EAAO,KAAQ,SAC3BR,EAAQ,IAAMQ,EAAO,IAChB,OAAOA,EAAO,KAAQ,WAC3BR,EAAQ,IAAM,IAAIT,EAAM,SAASiB,EAAO,IAAI,MAAQ,EAAGA,EAAO,IAAI,OAAS,CAAC,EAAE,SAAS,IACxFR,CACX,EAWAqB,EAAQ,SAAW,SAAkBrB,EAASS,EAAS,CAC9CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,GAAIC,EAAQ,SAAU,CAClB,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,MAAQC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC7G,MACIF,EAAO,MAAQC,EAAQ,QAAU,OAAS,IAAM,EACpD,GAAIlB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,IAAMC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC3G,MACIF,EAAO,IAAMC,EAAQ,QAAU,OAAS,IAAM,CACtD,CACA,OAAIT,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACnD,OAAOA,EAAQ,OAAU,SACzBQ,EAAO,MAAQC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAK,EAAIA,EAAQ,MAE1EQ,EAAO,MAAQC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAK,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,MAAM,MAAQ,EAAGA,EAAQ,MAAM,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,OACxNA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,IAC/C,OAAOA,EAAQ,KAAQ,SACvBQ,EAAO,IAAMC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,GAAG,EAAIA,EAAQ,IAEtEQ,EAAO,IAAMC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,GAAG,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,IAAI,MAAQ,EAAGA,EAAQ,IAAI,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,KAC7MQ,CACX,EASAa,EAAQ,UAAU,OAAS,UAAkB,CACzC,OAAO,KAAK,YAAY,SAAS,KAAMjC,GAAU,KAAK,aAAa,CACvE,EAUAiC,EAAQ,WAAa,SAAoBT,EAAe,CACpD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,2BAC3B,EAEOS,CACX,EAAG,EASHD,EAAY,aAAgB,UAAW,CACnC,IAAI1B,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,SAAS,EAAI,EACpCC,EAAOD,EAAW,CAAC,EAAI,UAAU,EAAI,EAC9BC,CACX,EAAG,EAEIyB,CACX,EAAG,EAEH3B,EAAK,kBAAqB,UAAW,CAmBjC,SAAS6B,EAAkBzB,EAAY,CAEnC,GADA,KAAK,KAAO,CAAC,EACTA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAuB,EAAkB,UAAU,OAAS,KAQrCA,EAAkB,UAAU,QAAU,KAQtCA,EAAkB,UAAU,KAAO/B,EAAM,WAUzC+B,EAAkB,OAAS,SAAgBzB,EAAY,CACnD,OAAO,IAAIyB,EAAkBzB,CAAU,CAC3C,EAWAyB,EAAkB,OAAS,SAAgBtB,EAASC,EAAQ,CAOxD,GANKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtER,EAAM,KAAK,YAAY,OAAOQ,EAAQ,OAAQC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACtGD,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,YAAY,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGD,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OAAQ,CAC7CC,EAAO,OAA8B,EAAE,EAAE,KAAK,EAC9C,QAAS,EAAI,EAAG,EAAID,EAAQ,KAAK,OAAQ,EAAE,EACvCC,EAAO,MAAMD,EAAQ,KAAK,CAAC,CAAC,EAChCC,EAAO,OAAO,CAClB,CACA,OAAOA,CACX,EAWAqB,EAAkB,gBAAkB,SAAyBtB,EAASC,EAAQ,CAC1E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAqB,EAAkB,OAAS,SAAgBpB,EAAQC,EAAQ,CACjDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,kBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,OAASR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,YAAY,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACvE,KACJ,CACJ,IAAK,GAAG,CAGA,GAFMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,IACfK,EAAM,KAAO,EAEd,QADIC,EAAOJ,EAAO,OAAO,EAAIA,EAAO,IAC7BA,EAAO,IAAMI,GAChBN,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,OAEpCF,EAAQ,KAAK,KAAKE,EAAO,MAAM,CAAC,EACpC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAsB,EAAkB,gBAAkB,SAAyBpB,EAAQ,CACjE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAoB,EAAkB,OAAS,SAAgBtB,EAAS,CAChD,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,MAAM,EACxD,GAAIO,EACA,MAAO,UAAYA,CAC3B,CACA,GAAIP,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,IAAIO,EAAQf,EAAM,KAAK,YAAY,OAAOQ,EAAQ,OAAO,EACzD,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvC,GAAI,CAACT,EAAM,UAAUS,EAAQ,KAAK,CAAC,CAAC,GAAK,EAAEA,EAAQ,KAAK,CAAC,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,GAAG,GAAKT,EAAM,UAAUS,EAAQ,KAAK,CAAC,EAAE,IAAI,GACtI,MAAO,+BACnB,CACA,OAAO,IACX,EAUAsB,EAAkB,WAAa,SAAoBd,EAAQ,CACvD,GAAIA,aAAkBhB,EAAM,KAAK,kBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,kBAC7B,GAAIgB,EAAO,QAAU,KAAM,CACvB,GAAI,OAAOA,EAAO,QAAW,SACzB,MAAM,UAAU,iDAAiD,EACrER,EAAQ,OAASR,EAAM,KAAK,YAAY,WAAWgB,EAAO,MAAM,CACpE,CACA,GAAIA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,kDAAkD,EACtER,EAAQ,QAAUR,EAAM,KAAK,YAAY,WAAWgB,EAAO,OAAO,CACtE,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,8CAA8C,EAClER,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAClCjB,EAAM,MACLS,EAAQ,KAAK,CAAC,EAAIT,EAAM,KAAK,UAAUiB,EAAO,KAAK,CAAC,CAAC,GAAG,SAAW,GAC/D,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAI,SAASQ,EAAO,KAAK,CAAC,EAAG,EAAE,EACxC,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC/BR,EAAQ,KAAK,CAAC,EAAIQ,EAAO,KAAK,CAAC,EAC1B,OAAOA,EAAO,KAAK,CAAC,GAAM,WAC/BR,EAAQ,KAAK,CAAC,EAAI,IAAIT,EAAM,SAASiB,EAAO,KAAK,CAAC,EAAE,MAAQ,EAAGA,EAAO,KAAK,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAC/G,CACA,OAAOR,CACX,EAWAsB,EAAkB,SAAW,SAAkBtB,EAASS,EAAS,CACxDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAWd,IAVIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,KAAO,CAAC,GACfC,EAAQ,WACRD,EAAO,OAAS,KAChBA,EAAO,QAAU,MAEjBR,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAAShB,EAAM,KAAK,YAAY,SAASQ,EAAQ,OAAQS,CAAO,GACvET,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,YAAY,SAASQ,EAAQ,QAASS,CAAO,GACzET,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACnC,OAAOX,EAAQ,KAAKW,CAAC,GAAM,SAC3BH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAAS,OAAOT,EAAQ,KAAKW,CAAC,CAAC,EAAIX,EAAQ,KAAKW,CAAC,EAEpFH,EAAO,KAAKG,CAAC,EAAIF,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,KAAKW,CAAC,CAAC,EAAIF,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,KAAKW,CAAC,EAAE,MAAQ,EAAGX,EAAQ,KAAKW,CAAC,EAAE,OAAS,CAAC,EAAE,SAAS,EAAIX,EAAQ,KAAKW,CAAC,CAC9O,CACA,OAAOH,CACX,EASAc,EAAkB,UAAU,OAAS,UAAkB,CACnD,OAAO,KAAK,YAAY,SAAS,KAAMlC,GAAU,KAAK,aAAa,CACvE,EAUAkC,EAAkB,WAAa,SAAoBV,EAAe,CAC9D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,yBAC3B,EAEOU,CACX,EAAG,EAEH7B,EAAK,iBAAoB,UAAW,CAiBhC,SAAS8B,EAAiB1B,EAAY,CAElC,GADA,KAAK,IAAM,CAAC,EACRA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAwB,EAAiB,UAAU,IAAMhC,EAAM,WAUvCgC,EAAiB,OAAS,SAAgB1B,EAAY,CAClD,OAAO,IAAI0B,EAAiB1B,CAAU,CAC1C,EAWA0B,EAAiB,OAAS,SAAgBvB,EAASC,EAAQ,CAGvD,GAFKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,KAAO,MAAQA,EAAQ,IAAI,OACnC,QAAS,EAAI,EAAG,EAAIA,EAAQ,IAAI,OAAQ,EAAE,EACtCR,EAAM,KAAK,iBAAiB,UAAU,OAAOQ,EAAQ,IAAI,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,OAAOA,CACX,EAWAsB,EAAiB,gBAAkB,SAAyBvB,EAASC,EAAQ,CACzE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAsB,EAAiB,OAAS,SAAgBrB,EAAQC,EAAQ,CAChDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACML,EAAQ,KAAOA,EAAQ,IAAI,SAC7BA,EAAQ,IAAM,CAAC,GACnBA,EAAQ,IAAI,KAAKR,EAAM,KAAK,iBAAiB,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAuB,EAAiB,gBAAkB,SAAyBrB,EAAQ,CAChE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAqB,EAAiB,OAAS,SAAgBvB,EAAS,CAC/C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,KAAO,MAAQA,EAAQ,eAAe,KAAK,EAAG,CACtD,GAAI,CAAC,MAAM,QAAQA,EAAQ,GAAG,EAC1B,MAAO,sBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,IAAI,OAAQ,EAAED,EAAG,CACzC,IAAIQ,EAAQf,EAAM,KAAK,iBAAiB,UAAU,OAAOQ,EAAQ,IAAID,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,OAASA,CACxB,CACJ,CACA,OAAO,IACX,EAUAgB,EAAiB,WAAa,SAAoBf,EAAQ,CACtD,GAAIA,aAAkBhB,EAAM,KAAK,iBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAC7B,GAAIgB,EAAO,IAAK,CACZ,GAAI,CAAC,MAAM,QAAQA,EAAO,GAAG,EACzB,MAAM,UAAU,4CAA4C,EAChER,EAAQ,IAAM,CAAC,EACf,QAAS,EAAI,EAAG,EAAIQ,EAAO,IAAI,OAAQ,EAAE,EAAG,CACxC,GAAI,OAAOA,EAAO,IAAI,CAAC,GAAM,SACzB,MAAM,UAAU,6CAA6C,EACjER,EAAQ,IAAI,CAAC,EAAIR,EAAM,KAAK,iBAAiB,UAAU,WAAWgB,EAAO,IAAI,CAAC,CAAC,CACnF,CACJ,CACA,OAAOR,CACX,EAWAuB,EAAiB,SAAW,SAAkBvB,EAASS,EAAS,CACvDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAGd,IAFIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,IAAM,CAAC,GACdR,EAAQ,KAAOA,EAAQ,IAAI,OAAQ,CACnCQ,EAAO,IAAM,CAAC,EACd,QAASG,EAAI,EAAGA,EAAIX,EAAQ,IAAI,OAAQ,EAAEW,EACtCH,EAAO,IAAIG,CAAC,EAAInB,EAAM,KAAK,iBAAiB,UAAU,SAASQ,EAAQ,IAAIW,CAAC,EAAGF,CAAO,CAC9F,CACA,OAAOD,CACX,EASAe,EAAiB,UAAU,OAAS,UAAkB,CAClD,OAAO,KAAK,YAAY,SAAS,KAAMnC,GAAU,KAAK,aAAa,CACvE,EAUAmC,EAAiB,WAAa,SAAoBX,EAAe,CAC7D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEAW,EAAiB,UAAa,UAAW,CAmBrC,SAASC,EAAU3B,EAAY,CAC3B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQAyB,EAAU,UAAU,SAAW,KAQ/BA,EAAU,UAAU,SAAW,KAQ/BA,EAAU,UAAU,WAAa,GAGjC,IAAIC,EAQJ,cAAO,eAAeD,EAAU,UAAW,QAAS,CAChD,IAAKjC,EAAM,YAAYkC,EAAe,CAAC,WAAY,UAAU,CAAC,EAC9D,IAAKlC,EAAM,YAAYkC,CAAY,CACvC,CAAC,EAUDD,EAAU,OAAS,SAAgB3B,EAAY,CAC3C,OAAO,IAAI2B,EAAU3B,CAAU,CACnC,EAWA2B,EAAU,OAAS,SAAgBxB,EAASC,EAAQ,CAChD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,QAAQ,EAChEA,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAC/DC,CACX,EAWAuB,EAAU,gBAAkB,SAAyBxB,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAuB,EAAU,OAAS,SAAgBtB,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,iBAAiB,UACtGU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,SAAWE,EAAO,OAAO,EACjC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAwB,EAAU,gBAAkB,SAAyBtB,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAsB,EAAU,OAAS,SAAgBxB,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,IAAIH,EAAa,CAAC,EAClB,GAAIG,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DH,EAAW,MAAQ,EACf,CAACN,EAAM,UAAUS,EAAQ,QAAQ,GAAK,EAAEA,EAAQ,UAAYT,EAAM,UAAUS,EAAQ,SAAS,GAAG,GAAKT,EAAM,UAAUS,EAAQ,SAAS,IAAI,IAC1I,MAAO,kCAEf,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBAEX,GADAA,EAAW,MAAQ,EACf,CAACN,EAAM,SAASS,EAAQ,QAAQ,EAChC,MAAO,2BACf,CACA,OAAIA,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAC3B,8BACR,IACX,EAUAwB,EAAU,WAAa,SAAoBhB,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,iBAAiB,UAC9C,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,iBAAiB,UAC9C,OAAIgB,EAAO,UAAY,OACfjB,EAAM,MACLS,EAAQ,SAAWT,EAAM,KAAK,UAAUiB,EAAO,QAAQ,GAAG,SAAW,GACjE,OAAOA,EAAO,UAAa,SAChCR,EAAQ,SAAW,SAASQ,EAAO,SAAU,EAAE,EAC1C,OAAOA,EAAO,UAAa,SAChCR,EAAQ,SAAWQ,EAAO,SACrB,OAAOA,EAAO,UAAa,WAChCR,EAAQ,SAAW,IAAIT,EAAM,SAASiB,EAAO,SAAS,MAAQ,EAAGA,EAAO,SAAS,OAAS,CAAC,EAAE,SAAS,IAC1GA,EAAO,UAAY,OACnBR,EAAQ,SAAW,OAAOQ,EAAO,QAAQ,GACzCA,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC1CR,CACX,EAWAwB,EAAU,SAAW,SAAkBxB,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IACzD,OAAOA,EAAQ,UAAa,SAC5BQ,EAAO,SAAWC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,QAAQ,EAAIA,EAAQ,SAEhFQ,EAAO,SAAWC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,QAAQ,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,SAAS,MAAQ,EAAGA,EAAQ,SAAS,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,SAChOS,EAAQ,SACRD,EAAO,MAAQ,aAEnBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,SACtBS,EAAQ,SACRD,EAAO,MAAQ,aAEnBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YACzBQ,CACX,EASAgB,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAMpC,GAAU,KAAK,aAAa,CACvE,EAUAoC,EAAU,WAAa,SAAoBZ,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,kCAC3B,EAEOY,CACX,EAAG,EAEID,CACX,EAAG,EAEH9B,EAAK,UAAa,UAAW,CAsBzB,SAASiC,EAAU7B,EAAY,CAC3B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAG,EAAI,EAAG,EAAIC,EAAK,OAAQ,EAAE,EAC3DD,EAAWC,EAAK,CAAC,CAAC,GAAK,OACvB,KAAKA,EAAK,CAAC,CAAC,EAAID,EAAWC,EAAK,CAAC,CAAC,EAClD,CAQA4B,EAAU,UAAU,WAAa,KAQjCA,EAAU,UAAU,aAAe,KAQnCA,EAAU,UAAU,QAAU,KAQ9BA,EAAU,UAAU,aAAe,KAQnCA,EAAU,UAAU,iBAAmB,KAQvCA,EAAU,UAAU,WAAa,GAGjC,IAAID,EAQJ,cAAO,eAAeC,EAAU,UAAW,QAAS,CAChD,IAAKnC,EAAM,YAAYkC,EAAe,CAAC,aAAc,eAAgB,UAAW,eAAgB,kBAAkB,CAAC,EACnH,IAAKlC,EAAM,YAAYkC,CAAY,CACvC,CAAC,EAUDC,EAAU,OAAS,SAAgB7B,EAAY,CAC3C,OAAO,IAAI6B,EAAU7B,CAAU,CACnC,EAWA6B,EAAU,OAAS,SAAgB1B,EAASC,EAAQ,CAChD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9ER,EAAM,KAAK,UAAU,OAAO,OAAOQ,EAAQ,WAAYC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC/GD,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,aAAcC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnHD,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxER,EAAM,KAAK,UAAU,IAAI,OAAOQ,EAAQ,QAASC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACzGD,EAAQ,YAAc,MAAQ,OAAO,eAAe,KAAKA,EAAS,YAAY,GAC9EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,EAClEA,EAAQ,kBAAoB,MAAQ,OAAO,eAAe,KAAKA,EAAS,kBAAkB,GAC1FR,EAAM,KAAK,UAAU,aAAa,OAAOQ,EAAQ,iBAAkBC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC3HD,EAAQ,cAAgB,MAAQ,OAAO,eAAe,KAAKA,EAAS,cAAc,GAClFR,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,aAAcC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAChHA,CACX,EAWAyB,EAAU,gBAAkB,SAAyB1B,EAASC,EAAQ,CAClE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAyB,EAAU,OAAS,SAAgBxB,EAAQC,EAAQ,CACzCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,WAAaR,EAAM,KAAK,UAAU,OAAO,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC/E,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACnF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUR,EAAM,KAAK,UAAU,IAAI,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACzE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACnF,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,iBAAmBR,EAAM,KAAK,UAAU,aAAa,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC3F,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,WAAaE,EAAO,OAAO,EACnC,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA0B,EAAU,gBAAkB,SAAyBxB,EAAQ,CACzD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAwB,EAAU,OAAS,SAAgB1B,EAAS,CACxC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,IAAIH,EAAa,CAAC,EAClB,GAAIG,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,EAAG,CACpEH,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,OAAO,OAAOQ,EAAQ,UAAU,EACjE,GAAIO,EACA,MAAO,cAAgBA,CAC/B,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,YAAY,EACrE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,EAAG,CAC9D,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,IAAI,OAAOQ,EAAQ,OAAO,EAC3D,GAAIO,EACA,MAAO,WAAaA,CAC5B,CACJ,CACA,GAAIP,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,EAAG,CACxE,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,SAAS,OAAOQ,EAAQ,YAAY,EACrE,GAAIO,EACA,MAAO,gBAAkBA,CACjC,CACJ,CACA,GAAIP,EAAQ,kBAAoB,MAAQA,EAAQ,eAAe,kBAAkB,EAAG,CAChF,GAAIH,EAAW,QAAU,EACrB,MAAO,yBACXA,EAAW,MAAQ,EACnB,CACI,IAAIU,EAAQf,EAAM,KAAK,UAAU,aAAa,OAAOQ,EAAQ,gBAAgB,EAC7E,GAAIO,EACA,MAAO,oBAAsBA,CACrC,CACJ,CACA,OAAIP,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,GAC7D,CAACT,EAAM,SAASS,EAAQ,UAAU,EAC3B,8BACR,IACX,EAUA0B,EAAU,WAAa,SAAoBlB,EAAQ,CAC/C,GAAIA,aAAkBhB,EAAM,KAAK,UAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAC7B,GAAIgB,EAAO,YAAc,KAAM,CAC3B,GAAI,OAAOA,EAAO,YAAe,SAC7B,MAAM,UAAU,6CAA6C,EACjER,EAAQ,WAAaR,EAAM,KAAK,UAAU,OAAO,WAAWgB,EAAO,UAAU,CACjF,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,WAAWgB,EAAO,YAAY,CACvF,CACA,GAAIA,EAAO,SAAW,KAAM,CACxB,GAAI,OAAOA,EAAO,SAAY,SAC1B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,QAAUR,EAAM,KAAK,UAAU,IAAI,WAAWgB,EAAO,OAAO,CACxE,CACA,GAAIA,EAAO,cAAgB,KAAM,CAC7B,GAAI,OAAOA,EAAO,cAAiB,SAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,aAAeR,EAAM,KAAK,UAAU,SAAS,WAAWgB,EAAO,YAAY,CACvF,CACA,GAAIA,EAAO,kBAAoB,KAAM,CACjC,GAAI,OAAOA,EAAO,kBAAqB,SACnC,MAAM,UAAU,mDAAmD,EACvER,EAAQ,iBAAmBR,EAAM,KAAK,UAAU,aAAa,WAAWgB,EAAO,gBAAgB,CACnG,CACA,OAAIA,EAAO,YAAc,OACrBR,EAAQ,WAAa,OAAOQ,EAAO,UAAU,GAC1CR,CACX,EAWA0B,EAAU,SAAW,SAAkB1B,EAASS,EAAS,CAChDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,WAAa,IACpBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAahB,EAAM,KAAK,UAAU,OAAO,SAASQ,EAAQ,WAAYS,CAAO,EAChFA,EAAQ,SACRD,EAAO,MAAQ,eAEnBR,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,UAAU,SAAS,SAASQ,EAAQ,aAAcS,CAAO,EACtFA,EAAQ,SACRD,EAAO,MAAQ,iBAEnBR,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUhB,EAAM,KAAK,UAAU,IAAI,SAASQ,EAAQ,QAASS,CAAO,EACvEA,EAAQ,SACRD,EAAO,MAAQ,YAEnBR,EAAQ,YAAc,MAAQA,EAAQ,eAAe,YAAY,IACjEQ,EAAO,WAAaR,EAAQ,YAC5BA,EAAQ,kBAAoB,MAAQA,EAAQ,eAAe,kBAAkB,IAC7EQ,EAAO,iBAAmBhB,EAAM,KAAK,UAAU,aAAa,SAASQ,EAAQ,iBAAkBS,CAAO,EAClGA,EAAQ,SACRD,EAAO,MAAQ,qBAEnBR,EAAQ,cAAgB,MAAQA,EAAQ,eAAe,cAAc,IACrEQ,EAAO,aAAehB,EAAM,KAAK,UAAU,SAAS,SAASQ,EAAQ,aAAcS,CAAO,EACtFA,EAAQ,SACRD,EAAO,MAAQ,iBAEhBA,CACX,EASAkB,EAAU,UAAU,OAAS,UAAkB,CAC3C,OAAO,KAAK,YAAY,SAAS,KAAMtC,GAAU,KAAK,aAAa,CACvE,EAUAsC,EAAU,WAAa,SAAoBd,EAAe,CACtD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,iBAC3B,EAEAc,EAAU,OAAU,UAAW,CAkB3B,SAASC,EAAO9B,EAAY,CACxB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA4B,EAAO,UAAU,SAAW,EAQ5BA,EAAO,UAAU,MAAQ,KAUzBA,EAAO,OAAS,SAAgB9B,EAAY,CACxC,OAAO,IAAI8B,EAAO9B,CAAU,CAChC,EAWA8B,EAAO,OAAS,SAAgB3B,EAASC,EAAQ,CAC7C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGA,CACX,EAWA0B,EAAO,gBAAkB,SAAyB3B,EAASC,EAAQ,CAC/D,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA0B,EAAO,OAAS,SAAgBzB,EAAQC,EAAQ,CACtCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,OAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC1E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA2B,EAAO,gBAAkB,SAAyBzB,EAAQ,CACtD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUAyB,EAAO,OAAS,SAAgB3B,EAAS,CACrC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,KAAK,EAC5D,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,OAAO,IACX,EAUAoB,EAAO,WAAa,SAAoBnB,EAAQ,CAC5C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,OACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,OAGvC,GAFIgB,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,+CAA+C,EACnER,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,KAAK,CACvE,CACA,OAAOR,CACX,EAWA2B,EAAO,SAAW,SAAkB3B,EAASS,EAAS,CAC7CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,MAAQ,MAEfR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,MAAOS,CAAO,GACvED,CACX,EASAmB,EAAO,UAAU,OAAS,UAAkB,CACxC,OAAO,KAAK,YAAY,SAAS,KAAMvC,GAAU,KAAK,aAAa,CACvE,EAUAuC,EAAO,WAAa,SAAoBf,EAAe,CACnD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,wBAC3B,EAEOe,CACX,EAAG,EAEHD,EAAU,SAAY,UAAW,CAiB7B,SAASE,EAAS/B,EAAY,CAC1B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA6B,EAAS,UAAU,SAAW,KAU9BA,EAAS,OAAS,SAAgB/B,EAAY,CAC1C,OAAO,IAAI+B,EAAS/B,CAAU,CAClC,EAWA+B,EAAS,OAAS,SAAgB5B,EAASC,EAAQ,CAC/C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAUC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnGA,CACX,EAWA2B,EAAS,gBAAkB,SAAyB5B,EAASC,EAAQ,CACjE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA2B,EAAS,OAAS,SAAgB1B,EAAQC,EAAQ,CACxCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,SAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA4B,EAAS,gBAAkB,SAAyB1B,EAAQ,CACxD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA0B,EAAS,OAAS,SAAgB5B,EAAS,CACvC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,QAAQ,EACxD,GAAIO,EACA,MAAO,YAAcA,CAC7B,CACA,OAAO,IACX,EAUAqB,EAAS,WAAa,SAAoBpB,EAAQ,CAC9C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,SACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,SACvC,GAAIgB,EAAO,UAAY,KAAM,CACzB,GAAI,OAAOA,EAAO,UAAa,SAC3B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,SAAWR,EAAM,KAAK,UAAU,WAAWgB,EAAO,QAAQ,CACtE,CACA,OAAOR,CACX,EAWA4B,EAAS,SAAW,SAAkB5B,EAASS,EAAS,CAC/CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,MAClBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,SAAUS,CAAO,GACtED,CACX,EASAoB,EAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,KAAK,YAAY,SAAS,KAAMxC,GAAU,KAAK,aAAa,CACvE,EAUAwC,EAAS,WAAa,SAAoBhB,EAAe,CACrD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOgB,CACX,EAAG,EAEHF,EAAU,IAAO,UAAW,CAkBxB,SAASG,EAAIhC,EAAY,CACrB,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA8B,EAAI,UAAU,QAAU,EAQxBA,EAAI,UAAU,UAAY,KAU1BA,EAAI,OAAS,SAAgBhC,EAAY,CACrC,OAAO,IAAIgC,EAAIhC,CAAU,CAC7B,EAWAgC,EAAI,OAAS,SAAgB7B,EAASC,EAAQ,CAC1C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,OAAO,EAC7DA,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,UAAWC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACpGA,CACX,EAWA4B,EAAI,gBAAkB,SAAyB7B,EAASC,EAAQ,CAC5D,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA4B,EAAI,OAAS,SAAgB3B,EAAQC,EAAQ,CACnCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,IAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACvE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA6B,EAAI,gBAAkB,SAAyB3B,EAAQ,CACnD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA2B,EAAI,OAAS,SAAgB7B,EAAS,CAClC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,CAACT,EAAM,UAAUS,EAAQ,OAAO,EAChC,MAAO,4BACf,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAS,EACzD,GAAIO,EACA,MAAO,aAAeA,CAC9B,CACA,OAAO,IACX,EAUAsB,EAAI,WAAa,SAAoBrB,EAAQ,CACzC,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,IACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,IAGvC,GAFIgB,EAAO,SAAW,OAClBR,EAAQ,QAAUQ,EAAO,QAAU,GACnCA,EAAO,WAAa,KAAM,CAC1B,GAAI,OAAOA,EAAO,WAAc,SAC5B,MAAM,UAAU,gDAAgD,EACpER,EAAQ,UAAYR,EAAM,KAAK,UAAU,WAAWgB,EAAO,SAAS,CACxE,CACA,OAAOR,CACX,EAWA6B,EAAI,SAAW,SAAkB7B,EAASS,EAAS,CAC1CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,QAAU,EACjBA,EAAO,UAAY,MAEnBR,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IAC3DQ,EAAO,QAAUR,EAAQ,SACzBA,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,UAAWS,CAAO,GACxED,CACX,EASAqB,EAAI,UAAU,OAAS,UAAkB,CACrC,OAAO,KAAK,YAAY,SAAS,KAAMzC,GAAU,KAAK,aAAa,CACvE,EAUAyC,EAAI,WAAa,SAAoBjB,EAAe,CAChD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,qBAC3B,EAEOiB,CACX,EAAG,EAEHH,EAAU,SAAY,UAAW,CAiB7B,SAASI,EAASjC,EAAY,CAC1B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAA+B,EAAS,UAAU,SAAW,KAU9BA,EAAS,OAAS,SAAgBjC,EAAY,CAC1C,OAAO,IAAIiC,EAASjC,CAAU,CAClC,EAWAiC,EAAS,OAAS,SAAgB9B,EAASC,EAAQ,CAC/C,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1ER,EAAM,KAAK,UAAU,OAAOQ,EAAQ,SAAUC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACnGA,CACX,EAWA6B,EAAS,gBAAkB,SAAyB9B,EAASC,EAAQ,CACjE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA6B,EAAS,OAAS,SAAgB5B,EAAQC,EAAQ,CACxCD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,SAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,EACtE,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA8B,EAAS,gBAAkB,SAAyB5B,EAAQ,CACxD,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA4B,EAAS,OAAS,SAAgB9B,EAAS,CACvC,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,EAAG,CAChE,IAAIO,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,QAAQ,EACxD,GAAIO,EACA,MAAO,YAAcA,CAC7B,CACA,OAAO,IACX,EAUAuB,EAAS,WAAa,SAAoBtB,EAAQ,CAC9C,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,SACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,SACvC,GAAIgB,EAAO,UAAY,KAAM,CACzB,GAAI,OAAOA,EAAO,UAAa,SAC3B,MAAM,UAAU,oDAAoD,EACxER,EAAQ,SAAWR,EAAM,KAAK,UAAU,WAAWgB,EAAO,QAAQ,CACtE,CACA,OAAOR,CACX,EAWA8B,EAAS,SAAW,SAAkB9B,EAASS,EAAS,CAC/CA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,MAClBR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWhB,EAAM,KAAK,UAAU,SAASQ,EAAQ,SAAUS,CAAO,GACtED,CACX,EASAsB,EAAS,UAAU,OAAS,UAAkB,CAC1C,OAAO,KAAK,YAAY,SAAS,KAAM1C,GAAU,KAAK,aAAa,CACvE,EAUA0C,EAAS,WAAa,SAAoBlB,EAAe,CACrD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOkB,CACX,EAAG,EAEHJ,EAAU,aAAgB,UAAW,CAkBjC,SAASK,EAAalC,EAAY,CAC9B,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAgC,EAAa,UAAU,SAAW,EAQlCA,EAAa,UAAU,MAAQ,KAU/BA,EAAa,OAAS,SAAgBlC,EAAY,CAC9C,OAAO,IAAIkC,EAAalC,CAAU,CACtC,EAWAkC,EAAa,OAAS,SAAgB/B,EAASC,EAAQ,CACnD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,UAAY,MAAQ,OAAO,eAAe,KAAKA,EAAS,UAAU,GAC1EC,EAAO,OAA8B,CAAC,EAAE,MAAMD,EAAQ,QAAQ,EAC9DA,EAAQ,OAAS,MAAQ,OAAO,eAAe,KAAKA,EAAS,OAAO,GACpER,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,MAAOC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EACvGA,CACX,EAWA8B,EAAa,gBAAkB,SAAyB/B,EAASC,EAAQ,CACrE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA8B,EAAa,OAAS,SAAgB7B,EAAQC,EAAQ,CAC5CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,UAAU,aAC/FU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,SAAWE,EAAO,MAAM,EAChC,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,OAAOU,EAAQA,EAAO,OAAO,CAAC,EAC1E,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYA+B,EAAa,gBAAkB,SAAyB7B,EAAQ,CAC5D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA6B,EAAa,OAAS,SAAgB/B,EAAS,CAC3C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,GACzD,CAACT,EAAM,UAAUS,EAAQ,QAAQ,EACjC,MAAO,6BACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,IAAIO,EAAQf,EAAM,KAAK,iBAAiB,OAAOQ,EAAQ,KAAK,EAC5D,GAAIO,EACA,MAAO,SAAWA,CAC1B,CACA,OAAO,IACX,EAUAwB,EAAa,WAAa,SAAoBvB,EAAQ,CAClD,GAAIA,aAAkBhB,EAAM,KAAK,UAAU,aACvC,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,UAAU,aAGvC,GAFIgB,EAAO,UAAY,OACnBR,EAAQ,SAAWQ,EAAO,SAAW,GACrCA,EAAO,OAAS,KAAM,CACtB,GAAI,OAAOA,EAAO,OAAU,SACxB,MAAM,UAAU,qDAAqD,EACzER,EAAQ,MAAQR,EAAM,KAAK,iBAAiB,WAAWgB,EAAO,KAAK,CACvE,CACA,OAAOR,CACX,EAWA+B,EAAa,SAAW,SAAkB/B,EAASS,EAAS,CACnDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,OAAIC,EAAQ,WACRD,EAAO,SAAW,EAClBA,EAAO,MAAQ,MAEfR,EAAQ,UAAY,MAAQA,EAAQ,eAAe,UAAU,IAC7DQ,EAAO,SAAWR,EAAQ,UAC1BA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,IACvDQ,EAAO,MAAQhB,EAAM,KAAK,iBAAiB,SAASQ,EAAQ,MAAOS,CAAO,GACvED,CACX,EASAuB,EAAa,UAAU,OAAS,UAAkB,CAC9C,OAAO,KAAK,YAAY,SAAS,KAAM3C,GAAU,KAAK,aAAa,CACvE,EAUA2C,EAAa,WAAa,SAAoBnB,EAAe,CACzD,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,8BAC3B,EAEOmB,CACX,EAAG,EAEIL,CACX,EAAG,EAEHjC,EAAK,mBAAsB,UAAW,CAkBlC,SAASuC,EAAmBnC,EAAY,CACpC,GAAIA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAiC,EAAmB,UAAU,OAAS,GAQtCA,EAAmB,UAAU,QAAUzC,EAAM,KAAOA,EAAM,KAAK,SAAS,EAAE,EAAE,EAAK,EAAI,EAUrFyC,EAAmB,OAAS,SAAgBnC,EAAY,CACpD,OAAO,IAAImC,EAAmBnC,CAAU,CAC5C,EAWAmC,EAAmB,OAAS,SAAgBhC,EAASC,EAAQ,CACzD,OAAKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC9DA,EAAQ,SAAW,MAAQ,OAAO,eAAe,KAAKA,EAAS,SAAS,GACxEC,EAAO,OAA8B,EAAE,EAAE,MAAMD,EAAQ,OAAO,EAC3DC,CACX,EAWA+B,EAAmB,gBAAkB,SAAyBhC,EAASC,EAAQ,CAC3E,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaA+B,EAAmB,OAAS,SAAgB9B,EAAQC,EAAQ,CAClDD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,mBACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,QAAUE,EAAO,MAAM,EAC/B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAgC,EAAmB,gBAAkB,SAAyB9B,EAAQ,CAClE,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA8B,EAAmB,OAAS,SAAgBhC,EAAS,CACjD,OAAI,OAAOA,GAAY,UAAYA,IAAY,KACpC,kBACPA,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EACvB,0BACXA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,GACvD,CAACT,EAAM,UAAUS,EAAQ,OAAO,GAAK,EAAEA,EAAQ,SAAWT,EAAM,UAAUS,EAAQ,QAAQ,GAAG,GAAKT,EAAM,UAAUS,EAAQ,QAAQ,IAAI,GAC/H,iCACR,IACX,EAUAgC,EAAmB,WAAa,SAAoBxB,EAAQ,CACxD,GAAIA,aAAkBhB,EAAM,KAAK,mBAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,mBAC7B,OAAIgB,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GACrCA,EAAO,SAAW,OACdjB,EAAM,MACLS,EAAQ,QAAUT,EAAM,KAAK,UAAUiB,EAAO,OAAO,GAAG,SAAW,GAC/D,OAAOA,EAAO,SAAY,SAC/BR,EAAQ,QAAU,SAASQ,EAAO,QAAS,EAAE,EACxC,OAAOA,EAAO,SAAY,SAC/BR,EAAQ,QAAUQ,EAAO,QACpB,OAAOA,EAAO,SAAY,WAC/BR,EAAQ,QAAU,IAAIT,EAAM,SAASiB,EAAO,QAAQ,MAAQ,EAAGA,EAAO,QAAQ,OAAS,CAAC,EAAE,SAAS,IACpGR,CACX,EAWAgC,EAAmB,SAAW,SAAkBhC,EAASS,EAAS,CACzDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EACd,GAAIC,EAAQ,SAER,GADAD,EAAO,OAAS,GACZjB,EAAM,KAAM,CACZ,IAAImB,EAAO,IAAInB,EAAM,KAAK,EAAG,EAAG,EAAK,EACrCiB,EAAO,QAAUC,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAID,EAAQ,QAAU,OAASC,EAAK,SAAS,EAAIA,CAC/G,MACIF,EAAO,QAAUC,EAAQ,QAAU,OAAS,IAAM,EAE1D,OAAIT,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,SAAW,MAAQA,EAAQ,eAAe,SAAS,IACvD,OAAOA,EAAQ,SAAY,SAC3BQ,EAAO,QAAUC,EAAQ,QAAU,OAAS,OAAOT,EAAQ,OAAO,EAAIA,EAAQ,QAE9EQ,EAAO,QAAUC,EAAQ,QAAU,OAASlB,EAAM,KAAK,UAAU,SAAS,KAAKS,EAAQ,OAAO,EAAIS,EAAQ,QAAU,OAAS,IAAIlB,EAAM,SAASS,EAAQ,QAAQ,MAAQ,EAAGA,EAAQ,QAAQ,OAAS,CAAC,EAAE,SAAS,EAAIA,EAAQ,SAC7NQ,CACX,EASAwB,EAAmB,UAAU,OAAS,UAAkB,CACpD,OAAO,KAAK,YAAY,SAAS,KAAM5C,GAAU,KAAK,aAAa,CACvE,EAUA4C,EAAmB,WAAa,SAAoBpB,EAAe,CAC/D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,0BAC3B,EAEOoB,CACX,EAAG,EASHvC,EAAK,eAAkB,UAAW,CAC9B,IAAIC,EAAa,CAAC,EAAGC,EAAS,OAAO,OAAOD,CAAU,EACtD,OAAAC,EAAOD,EAAW,CAAC,EAAI,cAAc,EAAI,EACzCC,EAAOD,EAAW,CAAC,EAAI,QAAQ,EAAI,EAC5BC,CACX,EAAG,EAEHF,EAAK,cAAiB,UAAW,CAyB7B,SAASwC,EAAcpC,EAAY,CAO/B,GANA,KAAK,MAAQ,CAAC,EACd,KAAK,OAAS,CAAC,EACf,KAAK,UAAY,CAAC,EAClB,KAAK,eAAiB,CAAC,EACvB,KAAK,KAAO,CAAC,EACb,KAAK,YAAc,CAAC,EAChBA,EACA,QAASC,EAAO,OAAO,KAAKD,CAAU,EAAGE,EAAI,EAAGA,EAAID,EAAK,OAAQ,EAAEC,EAC3DF,EAAWC,EAAKC,CAAC,CAAC,GAAK,OACvB,KAAKD,EAAKC,CAAC,CAAC,EAAIF,EAAWC,EAAKC,CAAC,CAAC,EAClD,CAQA,OAAAkC,EAAc,UAAU,KAAO,GAQ/BA,EAAc,UAAU,MAAQ1C,EAAM,WAQtC0C,EAAc,UAAU,OAAS1C,EAAM,WAQvC0C,EAAc,UAAU,UAAY1C,EAAM,WAQ1C0C,EAAc,UAAU,eAAiB1C,EAAM,WAQ/C0C,EAAc,UAAU,KAAO1C,EAAM,WAQrC0C,EAAc,UAAU,UAAY,GAQpCA,EAAc,UAAU,YAAc1C,EAAM,WAQ5C0C,EAAc,UAAU,OAAS,GAUjCA,EAAc,OAAS,SAAgBpC,EAAY,CAC/C,OAAO,IAAIoC,EAAcpC,CAAU,CACvC,EAWAoC,EAAc,OAAS,SAAgBjC,EAASC,EAAQ,CAKpD,GAJKA,IACDA,EAASX,GAAQ,OAAO,GACxBU,EAAQ,MAAQ,MAAQ,OAAO,eAAe,KAAKA,EAAS,MAAM,GAClEC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,IAAI,EAC5DA,EAAQ,OAAS,MAAQA,EAAQ,MAAM,OACvC,QAAS,EAAI,EAAG,EAAIA,EAAQ,MAAM,OAAQ,EAAE,EACxCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,MAAM,CAAC,CAAC,EACxE,GAAIA,EAAQ,QAAU,MAAQA,EAAQ,OAAO,OACzC,QAAS,EAAI,EAAG,EAAIA,EAAQ,OAAO,OAAQ,EAAE,EACzCC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,OAAO,CAAC,CAAC,EACzE,GAAIA,EAAQ,WAAa,MAAQA,EAAQ,UAAU,OAC/C,QAAS,EAAI,EAAG,EAAIA,EAAQ,UAAU,OAAQ,EAAE,EAC5CC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,UAAU,CAAC,CAAC,EAC5E,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,KAAK,OACrC,QAAS,EAAI,EAAG,EAAIA,EAAQ,KAAK,OAAQ,EAAE,EACvCR,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAK,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7G,GAFID,EAAQ,WAAa,MAAQ,OAAO,eAAe,KAAKA,EAAS,WAAW,GAC5EC,EAAO,OAA8B,EAAE,EAAE,OAAOD,EAAQ,SAAS,EACjEA,EAAQ,aAAe,MAAQA,EAAQ,YAAY,OACnD,QAAS,EAAI,EAAG,EAAIA,EAAQ,YAAY,OAAQ,EAAE,EAC9CR,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAY,CAAC,EAAGC,EAAO,OAA8B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAG7H,GAFID,EAAQ,QAAU,MAAQ,OAAO,eAAe,KAAKA,EAAS,QAAQ,GACtEC,EAAO,OAA+B,EAAE,EAAE,OAAOD,EAAQ,MAAM,EAC/DA,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,OACzD,QAAS,EAAI,EAAG,EAAIA,EAAQ,eAAe,OAAQ,EAAE,EACjDR,EAAM,KAAK,eAAe,OAAOQ,EAAQ,eAAe,CAAC,EAAGC,EAAO,OAA+B,EAAE,EAAE,KAAK,CAAC,EAAE,OAAO,EAC7H,OAAOA,CACX,EAWAgC,EAAc,gBAAkB,SAAyBjC,EAASC,EAAQ,CACtE,OAAO,KAAK,OAAOD,EAASC,CAAM,EAAE,OAAO,CAC/C,EAaAgC,EAAc,OAAS,SAAgB/B,EAAQC,EAAQ,CAC7CD,aAAkBb,IACpBa,EAASb,EAAQ,OAAOa,CAAM,GAElC,QADIE,EAAMD,IAAW,OAAYD,EAAO,IAAMA,EAAO,IAAMC,EAAQH,EAAU,IAAIR,EAAM,KAAK,cACrFU,EAAO,IAAME,GAAK,CACrB,IAAIC,EAAMH,EAAO,OAAO,EACxB,OAAQG,IAAQ,EAAG,CACnB,IAAK,GAAG,CACAL,EAAQ,KAAOE,EAAO,OAAO,EAC7B,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,OAASA,EAAQ,MAAM,SACjCA,EAAQ,MAAQ,CAAC,GACrBA,EAAQ,MAAM,KAAKE,EAAO,OAAO,CAAC,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,QAAUA,EAAQ,OAAO,SACnCA,EAAQ,OAAS,CAAC,GACtBA,EAAQ,OAAO,KAAKE,EAAO,OAAO,CAAC,EACnC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,WAAaA,EAAQ,UAAU,SACzCA,EAAQ,UAAY,CAAC,GACzBA,EAAQ,UAAU,KAAKE,EAAO,OAAO,CAAC,EACtC,KACJ,CACJ,IAAK,IAAI,CACKF,EAAQ,gBAAkBA,EAAQ,eAAe,SACnDA,EAAQ,eAAiB,CAAC,GAC9BA,EAAQ,eAAe,KAAKR,EAAM,KAAK,eAAe,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACrF,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,MAAQA,EAAQ,KAAK,SAC/BA,EAAQ,KAAO,CAAC,GACpBA,EAAQ,KAAK,KAAKR,EAAM,KAAK,UAAU,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtE,KACJ,CACJ,IAAK,GAAG,CACAF,EAAQ,UAAYE,EAAO,OAAO,EAClC,KACJ,CACJ,IAAK,GAAG,CACMF,EAAQ,aAAeA,EAAQ,YAAY,SAC7CA,EAAQ,YAAc,CAAC,GAC3BA,EAAQ,YAAY,KAAKR,EAAM,KAAK,mBAAmB,OAAOU,EAAQA,EAAO,OAAO,CAAC,CAAC,EACtF,KACJ,CACJ,IAAK,IAAI,CACDF,EAAQ,OAASE,EAAO,OAAO,EAC/B,KACJ,CACJ,QACIA,EAAO,SAASG,EAAM,CAAC,EACvB,KACJ,CACJ,CACA,OAAOL,CACX,EAYAiC,EAAc,gBAAkB,SAAyB/B,EAAQ,CAC7D,OAAMA,aAAkBb,IACpBa,EAAS,IAAIb,EAAQa,CAAM,GACxB,KAAK,OAAOA,EAAQA,EAAO,OAAO,CAAC,CAC9C,EAUA+B,EAAc,OAAS,SAAgBjC,EAAS,CAC5C,GAAI,OAAOA,GAAY,UAAYA,IAAY,KAC3C,MAAO,kBACX,GAAIA,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,GACjD,CAACT,EAAM,SAASS,EAAQ,IAAI,EAC5B,MAAO,wBACf,GAAIA,EAAQ,OAAS,MAAQA,EAAQ,eAAe,OAAO,EAAG,CAC1D,GAAI,CAAC,MAAM,QAAQA,EAAQ,KAAK,EAC5B,MAAO,wBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,MAAM,OAAQ,EAAED,EACxC,GAAI,CAACR,EAAM,SAASS,EAAQ,MAAMD,CAAC,CAAC,EAChC,MAAO,0BACnB,CACA,GAAIC,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,EAAG,CAC5D,GAAI,CAAC,MAAM,QAAQA,EAAQ,MAAM,EAC7B,MAAO,yBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,OAAO,OAAQ,EAAED,EACzC,GAAI,CAACR,EAAM,SAASS,EAAQ,OAAOD,CAAC,CAAC,EACjC,MAAO,2BACnB,CACA,GAAIC,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,EAAG,CAClE,GAAI,CAAC,MAAM,QAAQA,EAAQ,SAAS,EAChC,MAAO,4BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,UAAU,OAAQ,EAAED,EAC5C,GAAI,CAACR,EAAM,SAASS,EAAQ,UAAUD,CAAC,CAAC,EACpC,MAAO,8BACnB,CACA,GAAIC,EAAQ,gBAAkB,MAAQA,EAAQ,eAAe,gBAAgB,EAAG,CAC5E,GAAI,CAAC,MAAM,QAAQA,EAAQ,cAAc,EACrC,MAAO,iCACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,eAAe,OAAQ,EAAED,EAAG,CACpD,IAAIQ,EAAQf,EAAM,KAAK,eAAe,OAAOQ,EAAQ,eAAeD,CAAC,CAAC,EACtE,GAAIQ,EACA,MAAO,kBAAoBA,CACnC,CACJ,CACA,GAAIP,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,EAAG,CACxD,GAAI,CAAC,MAAM,QAAQA,EAAQ,IAAI,EAC3B,MAAO,uBACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,KAAK,OAAQ,EAAED,EAAG,CAC1C,IAAIQ,EAAQf,EAAM,KAAK,UAAU,OAAOQ,EAAQ,KAAKD,CAAC,CAAC,EACvD,GAAIQ,EACA,MAAO,QAAUA,CACzB,CACJ,CACA,GAAIP,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,GAC3D,CAACT,EAAM,SAASS,EAAQ,SAAS,EACjC,MAAO,6BACf,GAAIA,EAAQ,aAAe,MAAQA,EAAQ,eAAe,aAAa,EAAG,CACtE,GAAI,CAAC,MAAM,QAAQA,EAAQ,WAAW,EAClC,MAAO,8BACX,QAASD,EAAI,EAAGA,EAAIC,EAAQ,YAAY,OAAQ,EAAED,EAAG,CACjD,IAAIQ,EAAQf,EAAM,KAAK,mBAAmB,OAAOQ,EAAQ,YAAYD,CAAC,CAAC,EACvE,GAAIQ,EACA,MAAO,eAAiBA,CAChC,CACJ,CACA,OAAIP,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,GACrD,CAACT,EAAM,SAASS,EAAQ,MAAM,EACvB,0BACR,IACX,EAUAiC,EAAc,WAAa,SAAoBzB,EAAQ,CACnD,GAAIA,aAAkBhB,EAAM,KAAK,cAC7B,OAAOgB,EACX,IAAIR,EAAU,IAAIR,EAAM,KAAK,cAG7B,GAFIgB,EAAO,MAAQ,OACfR,EAAQ,KAAO,OAAOQ,EAAO,IAAI,GACjCA,EAAO,MAAO,CACd,GAAI,CAAC,MAAM,QAAQA,EAAO,KAAK,EAC3B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,MAAQ,CAAC,EACjB,QAAS,EAAI,EAAG,EAAIQ,EAAO,MAAM,OAAQ,EAAE,EACvCR,EAAQ,MAAM,CAAC,EAAI,OAAOQ,EAAO,MAAM,CAAC,CAAC,CACjD,CACA,GAAIA,EAAO,OAAQ,CACf,GAAI,CAAC,MAAM,QAAQA,EAAO,MAAM,EAC5B,MAAM,UAAU,4CAA4C,EAChER,EAAQ,OAAS,CAAC,EAClB,QAAS,EAAI,EAAG,EAAIQ,EAAO,OAAO,OAAQ,EAAE,EACxCR,EAAQ,OAAO,CAAC,EAAI,OAAOQ,EAAO,OAAO,CAAC,CAAC,CACnD,CACA,GAAIA,EAAO,UAAW,CAClB,GAAI,CAAC,MAAM,QAAQA,EAAO,SAAS,EAC/B,MAAM,UAAU,+CAA+C,EACnER,EAAQ,UAAY,CAAC,EACrB,QAAS,EAAI,EAAG,EAAIQ,EAAO,UAAU,OAAQ,EAAE,EAC3CR,EAAQ,UAAU,CAAC,EAAI,OAAOQ,EAAO,UAAU,CAAC,CAAC,CACzD,CACA,GAAIA,EAAO,eAAgB,CACvB,GAAI,CAAC,MAAM,QAAQA,EAAO,cAAc,EACpC,MAAM,UAAU,oDAAoD,EACxER,EAAQ,eAAiB,CAAC,EAC1B,QAAS,EAAI,EAAG,EAAIQ,EAAO,eAAe,OAAQ,EAAE,EAAG,CACnD,GAAI,OAAOA,EAAO,eAAe,CAAC,GAAM,SACpC,MAAM,UAAU,qDAAqD,EACzER,EAAQ,eAAe,CAAC,EAAIR,EAAM,KAAK,eAAe,WAAWgB,EAAO,eAAe,CAAC,CAAC,CAC7F,CACJ,CACA,GAAIA,EAAO,KAAM,CACb,GAAI,CAAC,MAAM,QAAQA,EAAO,IAAI,EAC1B,MAAM,UAAU,0CAA0C,EAC9DR,EAAQ,KAAO,CAAC,EAChB,QAAS,EAAI,EAAG,EAAIQ,EAAO,KAAK,OAAQ,EAAE,EAAG,CACzC,GAAI,OAAOA,EAAO,KAAK,CAAC,GAAM,SAC1B,MAAM,UAAU,2CAA2C,EAC/DR,EAAQ,KAAK,CAAC,EAAIR,EAAM,KAAK,UAAU,WAAWgB,EAAO,KAAK,CAAC,CAAC,CACpE,CACJ,CAGA,GAFIA,EAAO,WAAa,OACpBR,EAAQ,UAAY,OAAOQ,EAAO,SAAS,GAC3CA,EAAO,YAAa,CACpB,GAAI,CAAC,MAAM,QAAQA,EAAO,WAAW,EACjC,MAAM,UAAU,iDAAiD,EACrER,EAAQ,YAAc,CAAC,EACvB,QAAS,EAAI,EAAG,EAAIQ,EAAO,YAAY,OAAQ,EAAE,EAAG,CAChD,GAAI,OAAOA,EAAO,YAAY,CAAC,GAAM,SACjC,MAAM,UAAU,kDAAkD,EACtER,EAAQ,YAAY,CAAC,EAAIR,EAAM,KAAK,mBAAmB,WAAWgB,EAAO,YAAY,CAAC,CAAC,CAC3F,CACJ,CACA,OAAIA,EAAO,QAAU,OACjBR,EAAQ,OAAS,OAAOQ,EAAO,MAAM,GAClCR,CACX,EAWAiC,EAAc,SAAW,SAAkBjC,EAASS,EAAS,CACpDA,IACDA,EAAU,CAAC,GACf,IAAID,EAAS,CAAC,EAgBd,IAfIC,EAAQ,QAAUA,EAAQ,YAC1BD,EAAO,MAAQ,CAAC,EAChBA,EAAO,OAAS,CAAC,EACjBA,EAAO,UAAY,CAAC,EACpBA,EAAO,KAAO,CAAC,EACfA,EAAO,YAAc,CAAC,EACtBA,EAAO,eAAiB,CAAC,GAEzBC,EAAQ,WACRD,EAAO,KAAO,GACdA,EAAO,UAAY,GACnBA,EAAO,OAAS,IAEhBR,EAAQ,MAAQ,MAAQA,EAAQ,eAAe,MAAM,IACrDQ,EAAO,KAAOR,EAAQ,MACtBA,EAAQ,OAASA,EAAQ,MAAM,OAAQ,CACvCQ,EAAO,MAAQ,CAAC,EAChB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,MAAM,OAAQ,EAAEW,EACxCH,EAAO,MAAMG,CAAC,EAAIX,EAAQ,MAAMW,CAAC,CACzC,CACA,GAAIX,EAAQ,QAAUA,EAAQ,OAAO,OAAQ,CACzCQ,EAAO,OAAS,CAAC,EACjB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,OAAO,OAAQ,EAAEW,EACzCH,EAAO,OAAOG,CAAC,EAAIX,EAAQ,OAAOW,CAAC,CAC3C,CACA,GAAIX,EAAQ,WAAaA,EAAQ,UAAU,OAAQ,CAC/CQ,EAAO,UAAY,CAAC,EACpB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,UAAU,OAAQ,EAAEW,EAC5CH,EAAO,UAAUG,CAAC,EAAIX,EAAQ,UAAUW,CAAC,CACjD,CACA,GAAIX,EAAQ,MAAQA,EAAQ,KAAK,OAAQ,CACrCQ,EAAO,KAAO,CAAC,EACf,QAASG,EAAI,EAAGA,EAAIX,EAAQ,KAAK,OAAQ,EAAEW,EACvCH,EAAO,KAAKG,CAAC,EAAInB,EAAM,KAAK,UAAU,SAASQ,EAAQ,KAAKW,CAAC,EAAGF,CAAO,CAC/E,CAGA,GAFIT,EAAQ,WAAa,MAAQA,EAAQ,eAAe,WAAW,IAC/DQ,EAAO,UAAYR,EAAQ,WAC3BA,EAAQ,aAAeA,EAAQ,YAAY,OAAQ,CACnDQ,EAAO,YAAc,CAAC,EACtB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,YAAY,OAAQ,EAAEW,EAC9CH,EAAO,YAAYG,CAAC,EAAInB,EAAM,KAAK,mBAAmB,SAASQ,EAAQ,YAAYW,CAAC,EAAGF,CAAO,CACtG,CAGA,GAFIT,EAAQ,QAAU,MAAQA,EAAQ,eAAe,QAAQ,IACzDQ,EAAO,OAASR,EAAQ,QACxBA,EAAQ,gBAAkBA,EAAQ,eAAe,OAAQ,CACzDQ,EAAO,eAAiB,CAAC,EACzB,QAASG,EAAI,EAAGA,EAAIX,EAAQ,eAAe,OAAQ,EAAEW,EACjDH,EAAO,eAAeG,CAAC,EAAInB,EAAM,KAAK,eAAe,SAASQ,EAAQ,eAAeW,CAAC,EAAGF,CAAO,CACxG,CACA,OAAOD,CACX,EASAyB,EAAc,UAAU,OAAS,UAAkB,CAC/C,OAAO,KAAK,YAAY,SAAS,KAAM7C,GAAU,KAAK,aAAa,CACvE,EAUA6C,EAAc,WAAa,SAAoBrB,EAAe,CAC1D,OAAIA,IAAkB,SAClBA,EAAgB,uBAEbA,EAAgB,qBAC3B,EAEOqB,CACX,EAAG,EAEIxC,CACX,EAAG,EAEHN,GAAO,QAAUK,IC78OV,SAAS0C,GAAOC,EAAeC,EAAmB,CACvD,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,OAAOC,GAAQ,SAAWA,EAAMA,EAAI,CAAC,CAEzD,CAosCO,SAASC,GAAiBC,EAA4B,CAC3D,OAAO,IAAI,YAAY,EAAE,OAAOA,CAAM,CACxC,CAtuCA,IAQAC,GA0BaC,GAwBAC,GAoDAC,GAwNAC,GAiDAC,GAkGAC,GAiBAC,EAuaAC,GAsIAC,GA0MAC,GACAC,GAluCbC,GAAAC,EAAA,kBAGAC,KACAC,KAIAf,GAAmB,SACnBgB,KAyBaf,GAAN,KAAgB,CAOrB,OAAO,YACHgB,EAEAC,EAC2B,CAC7B,GAAID,EAAG,SAAWC,EAAG,OACnB,MAAO,GAET,QAASC,EAAI,EAAGA,EAAIF,EAAG,OAAQE,IAC7B,GAAIF,EAAGE,CAAC,IAAMD,EAAGC,CAAC,EAChB,MAAO,GAGX,MAAO,EACT,CACF,EAEajB,GAAN,KAAiB,CAOtB,OAAO,sBAAsBkB,EAA0BC,EACZ,CAIzC,IAAMC,EAAKF,EAAM,SAAW,EAAK,CAAC,EAAGA,EAAM,CAAC,CAAC,EAAIA,EAK3CG,EAAKF,EAAM,SAAW,EAAK,CAACA,EAAM,CAAC,EAAG,CAAC,EAAIA,EAEjD,MAAO,CAACC,EAAGC,CAAC,CACd,CASA,OAAO,uBAAuBC,EAAuBC,EAAeC,EAAe,CAE7ED,IAAU,GAEZD,EAAY,OAAOA,EAAY,OAAS,EAAG,CAAC,EAG1CE,IAAU,GACZF,EAAY,IAAI,CAEpB,CAQA,OAAO,gBAAgBF,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAEapB,GAAN,MAAMwB,CAAc,CAQzB,OAAO,UAAUC,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFjC,GAAW,gBAAgB,CAAC0B,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAAShB,EAAIW,EAAW,EAAI,EAAGX,GAAKc,EAAOd,IAAK,CAC9C,IAAMiB,EAAOL,EAAQZ,EAAI,EAAI,EAAIS,EAAMG,EAAQZ,CAAC,EAC1CkB,EAAOL,EAAQb,EAAI,EAAI,EAAIU,EAAMG,EAAQb,CAAC,EAEhD,GAAIiB,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEFH,EAAMD,EAAQd,CAAC,EAAI,KAAK,IAAIiB,EAAMC,CAAI,CACxC,CAEA,OAAOH,CACT,CAQA,OAAO,MAAMI,EAAuCC,EAA4C,CAI9F,IAAMC,EAAkB,IAAI,MAAMD,EAAc,MAAM,EACtD,OAAAZ,EAAc,UAAUW,EAAoBC,EAAeC,CAAe,EACnEA,CACT,CASA,OAAO,UAAUF,EAAuCC,EAAkCC,EAA2B,CAInH,IAAMC,EAAYH,EAAmB,OAASC,EAAc,OAC5D,QAAS,EAAI,EAAG,EAAIA,EAAc,OAAQ,IACxCC,EAAgB,CAAC,EAAIF,EAAmBG,EAAY,CAAC,EAAIF,EAAc,CAAC,CAE5E,CAUA,OAAO,KACHjB,EAAWC,EAAWmB,EAA+DC,EACrFC,EAAgD,CAClD,IAAMpB,EAAcG,EAAc,UAAUL,EAAE,KAAMC,EAAE,IAAI,EAE1D,GAAIC,EAAa,CACf,GAAImB,GAAW,CAACpC,EAAU,SAASiB,EAAaF,EAAE,IAAI,EAEpD,OAGF,IAAMuB,EAAOtC,EAAU,KAAKiB,CAAW,EACjCsB,EAAIH,EAAUrB,EAAI,IAAIyB,GAAOvB,EAAaoB,GAActB,EAAE,IAAI,EAGpE,GAAIE,EAAY,SAAW,EACzBsB,EAAE,IAAI,CAAC,EAAGJ,EAAGpB,EAAE,IAAI,CAAC,CAAC,EAAaC,EAAE,IAAI,CAAC,CAAC,CAAW,CAAC,MAInD,CACH,IAAMyB,EAAgB,IAAI,MAAcxB,EAAY,MAAM,EACpDyB,EAAmB,IAAI,MAAM3B,EAAE,KAAK,MAAM,EAC1C4B,EAAmB,IAAI,MAAM3B,EAAE,KAAK,MAAM,EAC5C4B,EAAsB,EACtBC,EAAsB,EACtBC,EAAY,GACZC,EAAY,GACZhC,EAAE,KAAK,SAAW,IACpB6B,EAAO7B,EAAE,IAAI,CAAC,CAAC,EACf+B,EAAY,IAEV9B,EAAE,KAAK,SAAW,IACpB6B,EAAO7B,EAAE,IAAI,CAAC,CAAC,EACf+B,EAAY,IAEd,IAAIC,EACJ,QAASpC,EAAI,EAAGA,EAAI0B,EAAM1B,IAAK,CAE7BoC,EAAOpC,EACP,QAASqC,EAAIhC,EAAY,OAAS,EAAGgC,GAAK,EAAGA,IAC3CR,EAAcQ,CAAC,EAAID,EAAO/B,EAAYgC,CAAC,EACvCD,EAAO,KAAK,MAAMA,EAAO/B,EAAYgC,CAAC,CAAC,EAGpCH,IAEH1B,EAAc,UAAUqB,EAAe1B,EAAE,KAAM2B,CAAgB,EAC/DE,EAAO7B,EAAE,IAAI2B,CAAgB,GAE1BK,IACH3B,EAAc,UAAUqB,EAAezB,EAAE,KAAM2B,CAAgB,EAC/DE,EAAO7B,EAAE,IAAI2B,CAAgB,GAG/BJ,EAAE,IAAIE,EAAeN,EAAGS,EAAMC,CAAI,CAAC,CACrC,CACF,CAEA,OAAON,CACT,CAGF,CAOA,OAAO,iBAAiBW,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CASA,OAAO,iBAAiBC,EAA+BrC,EAA0C,CAC/F,IAAMsC,EAASD,EAAW,OACpBE,EAAiB,CAAC,EACxB,QAAS,EAAI,EAAG,EAAID,EAAQ,IAAK,CAC/B,IAAME,EAAMF,EAAS,EAAI,EACnB,EAAID,EAAWG,CAAG,GAAK,GACnBxC,EAAYA,EAAY,OAAS,EAAI,CAAC,GAAK,GAC7C,GAAK,IAAM,GACjBuC,EAAK,QAAQC,CAAG,CAEpB,CACA,OAAOD,CACT,CACF,EAyBa3D,GAAN,KAAe,CAIpB,OAAO,qBACH6D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAEalE,GAAN,MAAMqE,CAAU,CACrB,OAAO,wBAAwBC,EAC8D,CAC3F,OAAQA,EAAW,CACjB,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,OACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,OACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SACT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,UACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,UACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SAIT,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,QACT,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0B,QAAK,YAAY,SAASA,CAAS,CAAC,EAAE,CACpF,CACF,CAEA,OAAO,2BAA2BC,EAAyC,CACzE,OAAQA,EAAM,CACZ,IAAK,OACH,OAAO,QAAK,YAAY,SAAS,KACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,OACH,OAAO,QAAK,YAAY,SAAS,KACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,UACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,UACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OACnC,IAAK,QACH,OAAO,QAAK,YAAY,SAAS,MACnC,IAAK,SACH,OAAO,QAAK,YAAY,SAAS,OAEnC,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,CAEA,OAAO,oBAAoBb,EAAoC,CAE7D,OAAOA,EAAK,IAAIc,GAAKC,GAAK,OAAOD,CAAC,EAAIA,EAAE,SAAS,EAAIA,CAAC,CACxD,CAEA,OAAO,yBAAyBE,EAAoD,CAClF,MAAO,CACL,WAAYL,EAAU,wBAAwBK,EAAU,QAAS,EACjE,MAAO,CAAC,KAAML,EAAU,oBAAoBK,EAAU,MAAO,IAAK,IAAIF,GAAKA,EAAE,QAAS,CAAC,CAAC,CAC1F,CACF,CAEA,OAAO,wBAAwBG,EAA6C,CAC1E,IAAMjB,EAAO,CAAC,EACd,QAAS5C,EAAI,EAAGA,EAAI6D,EAAO,WAAW,EAAG7D,IACvC4C,EAAK,KAAKzD,GAAS,aAAa0E,EAAO,KAAK7D,CAAC,CAAE,CAAC,EAElD,OAAO4C,CACT,CAEA,OAAO,8BAA8BkB,EAAyC,CAC5E,IAAMC,EAAa,CAAC,EACpB,QAAS/D,EAAI,EAAGA,EAAI8D,EAAK,iBAAiB,EAAG9D,IAC3C+D,EAAW,KAAKD,EAAK,WAAW9D,CAAC,CAAE,EAErC,OAAO+D,CACT,CACF,EAEa5E,GAAN,KAAe,CAIpB,OAAO,aAAa6E,EAAiCC,EAAoB,CACvE,OAAIN,GAAK,OAAOK,CAAC,EACRA,EAAE,SAAS,EACTA,aAAaE,EAAY,KAC3BP,GAAK,UAAU,CAAC,IAAKK,EAAE,IAAK,KAAMA,EAAE,KAAM,SAAUC,GAAY,EAAK,CAAC,EAAE,SAAS,EAEnFD,CACT,CACA,OAAO,OAAOA,EAAY,CACxB,OAAOL,GAAK,OAAOK,CAAC,GAAKA,aAAaE,EAAY,IACpD,CACF,EAEa9E,EAAN,MAAM+E,CAAU,CACrB,OAAO,KAAKvB,EAAiC,CAC3C,OAAOuB,EAAU,0BAA0BvB,EAAM,EAAGA,EAAK,MAAM,CACjE,CAGA,OAAO,kBAAkBA,EAAyBwB,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOxB,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBwB,CAAI,wCAAwCxB,EAAK,MAAM,cAAc,EAE/G,OAAOuB,EAAU,0BAA0BvB,EAAMwB,EAAMxB,EAAK,MAAM,CACpE,CAGA,OAAO,gBAAgBA,EAAyBwB,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOxB,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBwB,CAAI,sCAAsCxB,EAAK,MAAM,cAAc,EAE7G,OAAOuB,EAAU,0BAA0BvB,EAAM,EAAGwB,CAAI,CAC1D,CAEA,OAAO,0BAA0BxB,EAAyByB,EAAeC,EAAqB,CAC5F,IAAI5C,EAAO,EACX,QAAS,EAAI2C,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAI1B,EAAK,CAAC,GAAK,EACb,MAAM,IAAI,MAEN,oHAAoH,EAE1HlB,GAAQkB,EAAK,CAAC,CAChB,CACA,OAAOlB,CACT,CAEA,OAAO,eAAekB,EAA4C,CAChE,IAAM2B,EAAO3B,EAAK,OAClB,GAAI2B,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC9BC,EAAQD,EAAO,CAAC,EAAI,EACpBC,EAAQD,EAAO,CAAC,EAAI3B,EAAK2B,EAAO,CAAC,EACjC,QAASvE,EAAIuE,EAAO,EAAGvE,GAAK,EAAG,EAAEA,EAC/BwE,EAAQxE,CAAC,EAAIwE,EAAQxE,EAAI,CAAC,EAAI4C,EAAK5C,EAAI,CAAC,EAE1C,OAAOwE,CACT,CAEA,OAAO,UAAU5B,EAA4C,CAE3D,OADaA,EAAK,MAAM,EACZ,QAAQ,CACtB,CAEA,OAAO,gBAAgB6B,EAA4BD,EAA4BJ,EAAuB,CAChGA,IAAS,SACXA,EAAOK,EAAQ,QAEjB,IAAIC,EAAS,EACb,QAAS,EAAI,EAAG,EAAIN,EAAM,EAAE,EAC1BM,GAAUF,EAAQ,CAAC,EAAIC,EAAQ,CAAC,EAElC,OAAOC,CACT,CAEA,OAAO,gBAAgBA,EAAgBF,EAA+C,CACpF,IAAMD,EAAOC,EAAQ,OACrB,GAAID,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAACG,EAASF,EAAQ,CAAC,CAAC,EAE7B,IAAMC,EAAoB,IAAI,MAAMD,EAAQ,MAAM,EAClD,QAAS,EAAI,EAAG,EAAIC,EAAQ,OAAS,EAAG,EAAE,EACxCA,EAAQ,CAAC,EAAI,KAAK,MAAMC,EAASF,EAAQ,CAAC,CAAC,EAC3CE,GAAUD,EAAQ,CAAC,EAAID,EAAQ,CAAC,EAElC,OAAAC,EAAQA,EAAQ,OAAS,CAAC,EAAIC,EACvBD,CACT,CAKA,OAAO,cAAcL,EAAcO,EAA4B,CAC7D,GAAIP,EAAO,CAACO,GAAcP,GAAQO,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOP,EAAO,EAAIA,EAAOO,EAAaP,CACxC,CAEA,OAAO,cAAcQ,EAAyBD,EAA8B,CAC1E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,CAAU,CAAC,CACxD,CAUA,OAAO,eAAeG,EAAiBlC,EAAyBmC,EAA4B,CAC1F,GAAInC,EAAK,SAAW,GAAKkC,EAAM,SAAW,EACxC,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIC,IAAsB,OACxBA,EAAoBnC,EAAK,eAErBmC,GAAqB,GAAKA,EAAoBnC,EAAK,OACrD,MAAM,IAAI,MAAM,gCAAgC,EAIpD,QAASoC,EAAID,EAAoB,EAAGC,GAAK,IACvCF,EAAME,CAAC,IACH,EAAAF,EAAME,CAAC,EAAIpC,EAAKoC,CAAC,IAFqB,EAAEA,EAK5CF,EAAME,CAAC,EAAI,CAEf,CAcA,OAAO,sBAAsBC,EAAiCC,EAAyC,CAErG,GAAIA,EAAW,SAAW,EAAG,CAC3B,GAAID,EAAa,SAAW,GAAKd,EAAU,KAAKc,CAAY,IAAM,EAChE,MAAO,CAAC,EAER,MAAM,IAAI,MAAM,mCAAmC,CAEvD,CAEA,IAAME,EAAQD,EAAW,OACnBE,EAAe,IAAI,MAAcD,CAAK,EACxCE,EAAmB,GACnBC,EAAgB,EACpB,QAAStF,EAAI,EAAGA,EAAImF,EAAOnF,IAAK,CAC9B,GAAIkF,EAAWlF,CAAC,EAAI,GAClB,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIkF,EAAWlF,CAAC,IAAM,GAAI,CACxB,GAAIqF,IAAqB,GACvB,MAAM,IAAI,MAAM,gDAAgD,EAElEA,EAAmBrF,CACrB,KAAO,CACL,GAAIkF,EAAWlF,CAAC,IAAM,EAAG,CACvB,GAAIA,GAAKiF,EAAa,OACpB,MAAM,IAAI,MAAM,8EAA8E,EAEhGG,EAAapF,CAAC,EAAIiF,EAAajF,CAAC,CAClC,MACEoF,EAAapF,CAAC,EAAIkF,EAAWlF,CAAC,EAEhCsF,GAAiBF,EAAapF,CAAC,CACjC,CACF,CAEA,IAAMuF,EAAgBpB,EAAU,KAAKc,CAAY,EACjD,GAAII,IAAqB,GAAI,CAC3B,GAAIE,EAAgBD,IAAkB,EACpC,MAAM,IAAI,MAAM,6EACZL,CAAY,oBAAoBC,CAAU,GAAG,EAEnDE,EAAaC,CAAgB,EAAIE,EAAgBD,CACnD,SAGMA,IAAkBC,EACpB,MAAM,IAAI,MAAM,yDAA0D,EAG9E,OAAOH,CACT,CAQA,OAAO,gBAAgBjF,EAAsBqF,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAMtF,EAAEsF,CAAC,CAAC,EAEpBtF,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASyC,EAAyB8C,EAA2C,CAClF,IAAMnB,EAAO3B,EAAK,OAClB,OAAOA,EAAK,IAAI,CAAC6C,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAInB,CAAI,CAAC,CACtD,CAOA,OAAO,SAASoB,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGzF,IAAMyF,IAAMG,EAAO5F,CAAC,CAAC,CAC/C,CAMA,OAAO,wBAAwB4C,EAAiC,CAC9D,GAAIA,EAAK,OAAS,EAChB,MAAM,IAAI,UAAU,iDAAiD,EAEvE,IAAIlB,EAAO,EACX,QAAWsC,KAAKpB,EAAM,CACpB,GAAI,CAAC,OAAO,UAAUoB,CAAC,EACrB,MAAM,IAAI,UAAU,kBAAkBA,CAAC,oBAAoB,EAE7D,GAAIA,EAAI,GAAKA,EAAI,WACf,MAAM,IAAI,UAAU,yBAAyBA,CAAC,iBAAiB,EAEjEtC,GAAQsC,CACV,CACA,OAAOtC,CACT,CAOA,OAAO,aAAakB,EAAyBwB,EAAiC,CACxEA,EAAO,IACTA,GAAQxB,EAAK,QAEf,IAAMiD,EAAQjD,EAAK,OAAO,CAACiC,EAAGiB,IAAMjB,EAAIiB,EAAG,CAAC,EACtCC,EAAQnD,EAAK,MAAMwB,CAAI,EAAE,OAAO,CAACS,EAAGiB,IAAMjB,EAAIiB,EAAG,CAAC,EAGxD,MAFmB,CAACD,EAAQE,EAAOA,CAAK,CAG1C,CAOA,OAAO,aAAanD,EAAyBgC,EAA4C,CACvF,IAAMoB,EAAa,IAAI,MAGvBpB,EAAOT,EAAU,cAAcS,EAAMhC,EAAK,MAAM,EAEhD,QAAS5C,EAAI,EAAGA,EAAI4C,EAAK,OAAQ5C,IAAK,CACpC,IAAMiG,EAAgBrB,EAAK,QAAQ5E,CAAC,GAAK,EACzC,GAAIiG,GAAiBrD,EAAK5C,CAAC,IAAM,EAC/B,MAAM,IAAI,MAAM,0CAA0C,GAGvD4E,EAAK,SAAW,GAAKhC,EAAK5C,CAAC,EAAI,GAAO4E,EAAK,OAAS,GAAK,CAACqB,IAC7DD,EAAW,KAAKpD,EAAK5C,CAAC,CAAC,CAE3B,CAEA,OAAOgG,CACT,CAOA,OAAO,eAAepD,EAAyBgC,EAA4C,CACzF,IAAMoB,EAAa,IAAI,MAAcpD,EAAK,OAASgC,EAAK,MAAM,EAG9DoB,EAAW,KAAK,CAAC,EAGjB,QAAS,EAAI,EAAG,EAAIpB,EAAK,OAAQ,IAAK,CACpC,IAAMR,EAAOD,EAAU,cAAcS,EAAK,CAAC,EAAGoB,EAAW,MAAM,EAC/D,GAAI5B,GAAQ4B,EAAW,OACrB,MAAM,IAAI,MAAM,iCAAmC,EAErD,GAAIA,EAAW5B,CAAI,IAAM,EACvB,MAAM,IAAI,MAAM,6BAA+B,EAGjD4B,EAAW5B,CAAI,EAAI,CACrB,CAGA,IAAI8B,EAAoB,EACxB,QAAS,EAAI,EAAG,EAAIF,EAAW,OAAQ,IACjCA,EAAW,CAAC,IAAM,IACpBA,EAAW,CAAC,EAAIpD,EAAKsD,GAAmB,GAM5C,GAAIA,IAAsBtD,EAAK,OAC7B,MAAM,IAAI,MAAM,mDAAmD,EAGrE,OAAOoD,CACT,CACF,EA6Fa3G,GAAN,MAAM8G,CAAU,CAOrB,OAAO,WAAWvD,EAAyBwB,EAAcgC,EAAiBC,EAC/C,CACzB,GAAID,EAAM,SAAW,EAAG,CACtB,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,4EAA8E,EAEhGF,EAAU,eAAevD,EAAKwB,CAAI,EAAGiC,EAAYD,CAAK,CACxD,CAEA,IAAME,EAAqB,CAAC,EACtBC,EAAU,CAAC,CAAC,EAClB,QAASvG,EAAI,EAAGA,EAAIoG,EAAM,OAAQ,EAAEpG,EAAG,CACjCA,IAAM,GACRuG,EAAQ,KAAKA,EAAQvG,EAAI,CAAC,EAAIoG,EAAMpG,EAAI,CAAC,CAAC,EAE5C,IAAMsC,EAAQM,EAAK,MAAM,EACzBN,EAAM8B,CAAI,EAAIgC,EAAMpG,CAAC,EACrBsG,EAAO,KAAKhE,CAAK,CACnB,CACA,MAAO,CAACgE,EAAQC,CAAO,CACzB,CAEA,OAAO,eAAeC,EAA8BH,EAAoBD,EAAiB,CAEvF,GAAII,EAAuBH,IAAe,EACxC,MAAM,IAAI,MAAM,0CAA0C,EAE5D,QAASrG,EAAI,EAAGA,EAAIqG,EAAY,EAAErG,EAChCoG,EAAM,KAAKI,EAAuBH,CAAU,CAEhD,CACF,EAgGa/G,GAAN,MAAMmH,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBpC,EAChFqC,EAAqBC,EAAgB,CACvC,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAAS7D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IACxCA,GAAO+D,EAAY,OACrBA,EAAY,KAAKD,EAAU9D,EAAM,CAAC,CAAC,EAEnC+D,EAAY/D,CAAG,EAAI8D,EAAU9D,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAC1C,GAAIA,EAAM2B,EAAQ,QAChB,GAAIA,EAAQ3B,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhE2B,EAAQ,KAAK,CAAC,EAKlB,QAAS3B,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAC1C,GAAIA,EAAMgE,EAAU,QAClB,GAAIA,EAAUhE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEgE,EAAU,KAAK,CAAC,EAKpB,QAAShE,EAAM,EAAGA,EAAM+D,EAAY,OAAS,EAAG/D,IAC9C,GAAIA,EAAMiE,EAAK,QACb,GAAIA,EAAKjE,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DiE,EAAK,KAAK,CAAC,EAKf,QAASjE,EAAM,EAAGA,EAAM+D,EAAY,OAAQ/D,IAAO,CACjD,GAAI+D,EAAY/D,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIiE,EAAKjE,CAAG,GAAK+D,EAAY/D,CAAG,GAAKiE,EAAKjE,EAAM+D,EAAY,MAAM,GAAKA,EAAY/D,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACH8D,EAA8BnC,EAA4BqC,EAC1DD,EAAgCE,EAAgBC,EAAkB,CACpE,GAAKA,EAIL,IAAID,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAInC,EAAQ,SAAYmC,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAAS9D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5C4D,EAAa,wBACTE,EAAU9D,EAAM,CAAC,EAAG2B,EAAQ3B,CAAG,EAAGgE,EAAUhE,CAAG,EAAG+D,EAAY/D,CAAG,EAAGiE,EAAMjE,EAAKA,EAAM8D,EAAU,OAAS,EACxGI,CAAO,EAEf,CAaA,OAAO,uBACHL,EAA2BC,EAA8BnC,EAAmBqC,EAC5ED,EAAuBE,EAAgBC,EAA4B,CACrE,GAAIJ,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMX,EAAa,CAACW,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWX,EAAYxB,EAASqC,EAAWD,EAAaE,EAAMC,CAAO,EACpFf,CACT,CAYA,OAAO,uBACHW,EAA8BK,EAA+BxC,EAAmBqC,EAChFD,EAAuBE,EAAgBC,EAA4B,CACrE,GAAIJ,EAAU,QAAU,GAAKK,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMhB,EAAa,CAACW,EAAU,CAAC,EAAGK,EAAW,CAAC,CAAC,EAE/C,OAAAP,EAAa,mBAAmB,GAAOE,EAAWX,EAAYxB,EAASqC,EAAWD,EAAaE,EAAMC,CAAO,EACrGf,CACT,CAKA,OAAe,mBACXU,EAA2BC,EAA8BX,EAAsBxB,EAC/EqC,EAA8BD,EAAgCE,EAAgBC,EAAkB,CAClG,GAAIL,EACF,QAAS7D,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5CmD,EAAW,KAAK,CAAC,MAGnB,SAASnD,EAAM,EAAGA,EAAM8D,EAAU,OAAS,EAAG9D,IAC5CmD,EAAW,KAAKS,EAAa,wBACzBE,EAAU9D,EAAM,CAAC,EAAG2B,EAAQ3B,CAAG,EAAGgE,EAAUhE,CAAG,EAAG+D,EAAY/D,CAAG,EAAGiE,EAAMjE,EAAKA,EAAM8D,EAAU,OAAS,EACxGI,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXE,EAAgBC,EAAgBC,EAAkBC,EAAgBN,EAAgBO,EAClFC,EAAsBP,EAA0B,CAClD,IAAMQ,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIL,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAD,EAAKO,CAAY,EAAI,EACrBP,EAAKQ,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAH,EAAKO,CAAY,EACgB,KAAK,MAAjCN,IAAY,cAA4BS,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/DV,EAAKQ,CAAY,EAAIE,EAAYV,EAAKO,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASH,EAAKO,CAAY,EAAIP,EAAKQ,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEa3H,GAAW,sBACXC,GAAW,uBCn4BxB,SAASiI,GAAOC,EAA+B,CAC7C,OAAQA,EAAM,CACZ,IAAK,OACL,IAAK,OACL,IAAK,QACH,MAAO,GACT,IAAK,QACL,IAAK,SACH,MAAO,GACT,IAAK,QACL,IAAK,SACL,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,qCAAqCA,CAAI,EAAE,CAC/D,CACF,CAEA,SAASC,GAAYD,EAA+D,CAClF,OAAQA,EAAM,CACZ,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,KAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,MAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,GACT,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,OAC7B,MAAO,GACT,QACE,MAAM,IAAI,MAAM,qCAAqC,QAAK,YAAY,SAASA,CAAI,CAAC,EAAE,CAC1F,CACF,CAEA,SAASE,GAAWC,EAAyBH,EAAuB,CAClE,OAAO,IAAKI,GAAoBJ,CAAI,GAAGG,CAAU,CACnD,CAEA,SAASC,GAAoBJ,EAAuB,CAClD,OAAQA,EAAM,CACZ,IAAK,OACL,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,QACH,OAAO,WACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,UACH,OAAO,aACT,IAAK,UACH,OAAO,aACT,QAEE,MAAM,IAAI,MAAM,mBAAmB,CACvC,CACF,CAGA,SAASK,GAAaC,EAASN,EAA+D,CAE5F,GAAIA,IAAS,QAAK,YAAY,SAAS,OAASA,IAASO,GAAO,eAAe,OAC7E,GAAID,EAAE,mBAAmB,UAAU,GAAKA,EAAE,SAAS,WAAW,EAC5D,MAAM,IAAI,UAAU,wBAAwB,UAG5CN,IAAS,QAAK,YAAY,SAAS,QAAUA,IAASO,GAAO,eAAe,QAC5EP,IAAS,QAAK,YAAY,SAAS,QAAUA,IAASO,GAAO,eAAe,QAC9E,GAAID,EAAE,mBAAmB,UAAU,GAAKA,EAAE,SAAS,CAAC,EAClD,MAAM,IAAI,UAAU,yBAAyB,MAG/C,OAAM,IAAI,UAAU,oBAAoB,QAAK,YAAY,SAASN,CAAI,CAAC,EAAE,EAG3E,OAAOM,EAAE,SAAS,CACpB,CAGA,SAASE,GAAUC,EAAgBT,EAAuDU,EAA4B,CACpH,OAAQV,EAAM,CACZ,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOS,EAAK,SAASC,CAAU,EACjC,KAAK,QAAK,YAAY,SAAS,KAC7B,OAAOD,EAAK,QAAQC,CAAU,EAChC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOD,EAAK,UAAUC,EAAY,EAAI,EACxC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,SAASC,EAAY,EAAI,EACvC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,WAAWC,EAAY,EAAI,EACzC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOD,EAAK,SAASC,EAAY,EAAI,EACvC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOD,EAAK,UAAUC,EAAY,EAAI,EACxC,KAAK,QAAK,YAAY,SAAS,MAC7B,OAAOL,GACHM,GAAK,SAASF,EAAK,UAAUC,EAAY,EAAI,EAAGD,EAAK,UAAUC,EAAa,EAAG,EAAI,EAAG,EAAK,EAAGV,CAAI,EACxG,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOS,EAAK,WAAWC,EAAY,EAAI,EACzC,KAAK,QAAK,YAAY,SAAS,OAC7B,OAAOL,GACHM,GAAK,SAASF,EAAK,UAAUC,EAAY,EAAI,EAAGD,EAAK,UAAUC,EAAa,EAAG,EAAI,EAAG,EAAI,EAAGV,CAAI,EACvG,QACE,MAAM,IAAI,MAAM,sCAAsC,QAAK,YAAY,SAASA,CAAI,CAAC,EAAE,CAC3F,CACF,CAzdA,IAGAY,GAIAC,GAGON,GAkCMO,GA5CbC,GAAAC,EAAA,kBAGAJ,GAAmB,SACnBK,KAEAC,KACAL,GAAmB,SACnBM,KAEOZ,GAASa,GAAY,aAAa,IAkC5BN,GAAN,MAAMO,CAAO,CA+GlB,YAIoBC,EAIAtB,EAA+BuB,EACvCC,EAA+CC,EAIvCC,EAAe,QAAK,OAAO,EAAG,CAT9B,UAAAJ,EAIA,UAAAtB,EAA+B,kBAAAuB,EACvC,uBAAAC,EAA+C,WAAAC,EAIvC,YAAAC,EAClB,KAAK,KAAOC,EAAU,wBAAwBL,CAAI,EAClD,IAAMM,EAAO,KAAK,KACZC,EAASN,IAAiB,QAAaC,IAAsB,QAAaC,IAAU,OAE1F,GAAIA,IAAU,QACRA,EAAM,SAAWG,EACnB,MAAM,IAAI,WAAW,uCAAwC,EAIjE,GAAI5B,IAAS,SAAU,CACrB,GAAIyB,IAAU,SAAc,CAAC,MAAM,QAAQA,CAAK,GAAK,CAACA,EAAM,MAAMnB,GAAK,OAAOA,GAAM,QAAQ,GAC1F,MAAM,IAAI,UAAU,gCAAgC,EAGlDuB,IACF,KAAK,MAAQ,IAAI,MAAcD,CAAI,EAEvC,KAAO,CACL,GAAIH,IAAU,OAAW,CACvB,IAAMK,EAAc1B,GAAoBJ,CAAI,EAC5C,GAAI,EAAEyB,aAAiBK,GACrB,MAAM,IAAI,UAAU,wBAAwBA,EAAY,IAAI,EAAE,CAElE,CAEA,GAAID,EAAO,CACT,IAAME,EAAM,IAAI,YAAYH,EAAO7B,GAAOC,CAAI,CAAC,EAC/C,KAAK,MAAQE,GAAW6B,EAAK/B,CAAI,CACnC,CACF,CACF,CAxJA,IAAI,MAAmB,CACrB,GAAI,KAAK,QAAU,OAAW,CAC5B,IAAMgC,EAAO,KAAK,aAAc,KAAK,MAAM,EAC3C,GAAIA,EAAK,SAAW,KAAK,KACvB,MAAM,IAAI,MAAM,4FAA4F,EAE9G,KAAK,MAAQA,CACf,CACA,OAAO,KAAK,KACd,CAKA,IAAI,YAAa,CACf,GAAI,KAAK,OAAS,SAChB,MAAM,IAAI,UAAU,yBAAyB,EAG/C,OAAO,KAAK,IACd,CAMA,IAAI,aAAc,CAChB,OAAQ,KAAK,KAAM,CACjB,IAAK,QACL,IAAK,OACL,IAAK,SACL,IAAK,QACL,IAAK,QACL,IAAK,SACL,IAAK,OACH,OAAO,KAAK,KAEd,QACE,MAAM,IAAI,UAAU,4EAA4E,CACpG,CACF,CAKA,IAAI,WAAY,CACd,OAAQ,KAAK,KAAM,CACjB,IAAK,UACL,IAAK,UACH,OAAO,KAAK,KAEd,QACE,MAAM,IAAI,UAAU,2CAA2C,CACnE,CACF,CAMA,IAAI,YAAa,CACf,GAAI,KAAK,OAAS,SAChB,OAAO,KAAK,KAEd,MAAM,IAAI,UAAU,oCAAoC,CAC1D,CAKA,IAAIC,EAAyE,CAC3E,OAAO,KAAK,KAAKN,EAAU,gBAAgBM,EAAS,KAAK,OAAO,CAAC,CACnE,CAKA,IAAIA,EAA4BC,EAAoD,CAClF,KAAK,KAAKP,EAAU,gBAAgBM,EAAS,KAAK,OAAO,CAAC,EAAIC,CAChE,CAKA,MAAM,SAA+B,CACnC,OAAI,KAAK,QAAU,SACjB,KAAK,MAAQ,MAAM,KAAK,kBAAmB,KAAK,MAAM,GAEjD,KAAK,KACd,CAWA,IAAI,SAA6B,CAC/B,OAAK,KAAK,WACR,KAAK,SAAWP,EAAU,eAAe,KAAK,IAAI,GAE7C,KAAK,QACd,CAqDA,OAAO,UAAUQ,EAAwC,CACvD,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,6CAA6C,EAE/D,IAAMnC,EAAOoC,GAAU,wBAAwBD,EAAY,QAAS,EAC9Db,EAAOc,GAAU,oBAAoBD,EAAY,IAAK,EAEtDD,EAAQ,IAAIb,EAAOC,EAAMtB,CAAI,EAEnC,GAAIA,IAAS,SAGXmC,EAAY,WAAY,QAAQ,CAACE,EAAK/B,IAAM,CAC1C4B,EAAM,KAAK5B,CAAC,EAAIgC,GAAiBD,CAAG,CACtC,CAAC,UAGCF,EAAY,SAAW,OAAOA,EAAY,QAAQ,YAAe,UACjEA,EAAY,QAAQ,WAAa,EAAG,CAItC,IAAMI,EAAWL,EAAM,KACjBM,EACF,IAAI,SAASL,EAAY,QAAQ,OAAQA,EAAY,QAAQ,WAAYA,EAAY,QAAQ,UAAU,EACrGM,EAAcxC,GAAYkC,EAAY,QAAS,EAC/CO,EAASP,EAAY,QAAQ,WAAaM,EAEhD,GAAIN,EAAY,QAAQ,WAAaM,IAAgB,EACnD,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIF,EAAS,SAAWG,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,QAASpC,EAAI,EAAGA,EAAIoC,EAAQpC,IAAK,CAC/B,IAAMqC,EAAInC,GAAUgC,EAAYL,EAAY,SAAW7B,EAAImC,CAAW,EACtEF,EAASjC,CAAC,EAAIqC,CAChB,CACF,KAAO,CAEL,IAAIC,EACJ,OAAQT,EAAY,SAAU,CAC5B,KAAK,QAAK,YAAY,SAAS,MAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,KAC/B,KAAK,QAAK,YAAY,SAAS,MAC/B,KAAK,QAAK,YAAY,SAAS,KAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,MAC7BS,EAAQT,EAAY,UACpB,MACF,KAAK,QAAK,YAAY,SAAS,OAC7BS,EAAQT,EAAY,WACpB,MACF,KAAK,QAAK,YAAY,SAAS,OAC/B,KAAK,QAAK,YAAY,SAAS,OAC7BS,EAAQT,EAAY,WACpB,MACF,QAEE,MAAM,IAAI,MAAM,kBAAkB,CACtC,CAEA,GAAIS,GAAU,KACZ,MAAM,IAAI,MAAM,kDAAkD,EAGpE,IAAMZ,EAAOE,EAAM,KACnB,GAAIF,EAAK,SAAWY,EAAM,OACxB,MAAM,IAAI,MAAM,uBAAuB,EAGzC,QAAStC,EAAI,EAAGA,EAAIsC,EAAM,OAAQtC,IAAK,CACrC,IAAMuC,EAAUD,EAAMtC,CAAC,EACnBK,GAAK,OAAOkC,CAAO,EACrBb,EAAK1B,CAAC,EAAID,GAAawC,EAASV,EAAY,QAAQ,EAEpDH,EAAK1B,CAAC,EAAIuC,CAEd,CACF,CAEA,OAAOX,CACT,CASA,OAAO,SAASF,EAA2CV,EAAyBtB,EAAuB,CACzG,OAAO,IAAIqB,EAAOC,EAAMtB,EAAM,OAAW,OAAWgC,CAAI,CAC1D,CAEA,OAAO,cAAcc,EAA0B,CAC7C,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,6CAA6C,EAE/D,IAAMxB,EAAOc,GAAU,wBAAwBU,CAAS,EAClD9C,EAAOoC,GAAU,wBAAwBU,EAAU,SAAS,CAAC,EAE7DZ,EAAQ,IAAIb,EAAOC,EAAMtB,CAAI,EAEnC,GAAIA,IAAS,SAGX,QAAS,EAAI,EAAG,EAAI8C,EAAU,iBAAiB,EAAG,IAChDZ,EAAM,KAAK,CAAC,EAAIY,EAAU,WAAW,CAAC,UAItCA,EAAU,aAAa,GAAK,OAAOA,EAAU,cAAc,GAAM,UAAYA,EAAU,cAAc,EAAI,EAAG,CAI9G,IAAMP,EAAWL,EAAM,KACjBM,EAAa,IAAI,SACnBM,EAAU,aAAa,EAAG,OAAQA,EAAU,aAAa,EAAG,WAAYA,EAAU,cAAc,CAAC,EAC/FL,EAAcxC,GAAY6C,EAAU,SAAS,CAAC,EAC9CJ,EAASI,EAAU,cAAc,EAAIL,EAE3C,GAAIK,EAAU,cAAc,EAAIL,IAAgB,EAC9C,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIF,EAAS,SAAWG,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,QAASpC,EAAI,EAAGA,EAAIoC,EAAQpC,IAAK,CAC/B,IAAMqC,EAAInC,GAAUgC,EAAYM,EAAU,SAAS,EAAGxC,EAAImC,CAAW,EACrEF,EAASjC,CAAC,EAAIqC,CAChB,CACF,CACA,OAAOT,CACT,CACF,IC1TO,SAASa,GAAQC,EAAc,CACpC,OAAOA,IAAY,EAAIC,GAAcC,EACvC,CAEO,SAASC,GAAsBH,EAAsB,CAC1D,IAAMI,EAAOL,GAAQC,CAAO,EAC5B,MAAO,GAAGI,EAAK,OAAO;AAAA;AAAA,QAEhBA,EAAK,SAAS;AAAA,QACdA,EAAK,SAAS;AAAA;AAAA,QAEdA,EAAK,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAO1B,CAEO,SAASC,GAAsBL,EAAsB,CAC1D,IAAMI,EAAOL,GAAQC,CAAO,EAC5B,MAAO,GAAGI,EAAK,OAAO;AAAA;AAAA;AAAA;AAAA,MAIlBA,EAAK,WAAW;AAAA,MAChBA,EAAK,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KA4B5B,CAEO,SAASE,GAAyBN,EAAcO,EAAmC,CACxF,IAAMH,EAAOL,GAAQC,CAAO,EAC5B,MAAO;AAAA;AAAA,kBAESO,CAAiB;AAAA;AAAA;AAAA,MAG7BH,EAAK,MAAM;AAAA;AAAA,GAGjB,CAtGA,IAgBMH,GASAC,GAzBNM,GAAAC,EAAA,kBAgBMR,GAAoB,CACxB,QAAS,GACT,UAAW,YACX,cAAe,UACf,YAAa,UACb,UAAW,YACX,OAAQ,eACR,kBAAmB,EACrB,EACMC,GAAoB,CACxB,QAAS,kBACT,UAAW,KACX,cAAe,MACf,YAAa,KACb,UAAW,UACX,OAAQ,cACR,kBAAmB,uBACrB,ICjCA,IAAAQ,GAAAC,EAAA,oBCeA,eAAsBC,GAClBC,EAAwBC,EAAWC,GAAqB,EAAGC,EAAoC,CACjG,OAAO,IAAI,QAAc,CAACC,EAASC,IAAW,CAC5C,IAAIC,EAAW,EAETC,EAAQ,IAAM,CAClB,GAAIP,EAAQ,EAAG,CACbI,EAAQ,EACR,MACF,CAEAE,IAEA,IAAME,EAAcP,EAAQK,CAAQ,EAEpC,GAAIH,GAAc,MAAQG,GAAYH,EAAY,CAChDE,EAAO,EACP,MACF,CACA,WAAWE,EAAOC,CAAW,CAC/B,EAEAD,EAAM,CACR,CAAC,CACH,CAMO,SAASE,GAA2CC,EAA6B,CACtF,OAAAC,GAAO,OAAOD,EAAgB,KAAeA,EAAY,SAAW,EAAG,IAAM,qCAAqC,EAC3G,MAAQA,EAAY,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAY,MAAM,CAAC,CAC1E,CAMO,SAASE,GAAsDF,EAA6B,CACjG,OAAAC,GAAO,OAAOD,EAAgB,KAAeA,EAAY,SAAW,EAAG,IAAM,qCAAqC,EAC3G,MAAQA,EAAY,OAAO,CAAC,EAAE,YAAY,EAAIA,EAAY,MAAM,CAAC,EAAI,aAC9E,CAGO,SAASG,GAAkBC,EAA+BC,EAAmC,CAElG,IAAIC,EAA0B,KAAK,MAAM,KAAK,UAAUF,CAAU,CAAC,EACnE,OAAAE,EAAgBD,EACTC,CACT,CAGO,SAASC,GAAkBC,EAAkBC,EAA4B,CAC9E,OAAOA,EAAS,IAAIC,GAAKF,EAAOE,CAAC,CAAC,EAAE,KAAK,IAAI,CAC/C,CAGO,SAASC,GAAkBC,EAAsB,CACtD,GAAIA,GAAQ,EACV,MAAO,MACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QACF,GAAIA,IAAS,EAClB,MAAO,QAEP,MAAM,MAAM,gBAAgBA,CAAI,uBAAuB,CAE3D,CAEO,SAASC,GAAcD,EAAO,EAAa,CAChD,MAAO,CAAC,IAAK,IAAK,IAAK,IAAK,IAAK,GAAG,EAAE,MAAM,EAAGA,CAAI,CACrD,CA7FA,IAAAE,GAAAC,EAAA,kBAGAC,OCEO,SAASC,GAAeC,EAAcC,EAAwB,CACnE,OAAOC,GAAcD,CAAI,EAAE,IAAIE,GAAK,GAAGH,CAAI,IAAIG,CAAC,EAAE,CACpD,CAEO,SAASC,GAAYJ,EAAcC,EAAwB,CAChE,OAAIA,IAAS,EACJ,CAACD,CAAI,EAEPD,GAAeC,EAAMC,CAAI,CAClC,CAEO,SAASI,IAA4B,CAC1C,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAaT,CA9BA,IAAAC,GAAAC,EAAA,kBAGAC,OCgEA,SAASC,GAAwBC,EAAcC,EAA0BC,EAAwB,CAC/F,GAAIF,IAAS,EACX,MAAO,QAET,GAAIA,IAAS,EACX,MAAO,QAAQC,EAAM,CAAC,CAAC,GAGzB,IAAIE,EAAO,GACX,QAASC,EAAIJ,EAAO,EAAGI,EAAIJ,EAAMI,IAC/BD,GAAQ,GAAGD,EAAKE,CAAC,CAAC,OAAOH,EAAMG,EAAIJ,EAAO,CAAC,CAAC,GACxCI,EAAIJ,EAAO,IACbG,GAAQ,MAIZ,OAAOA,CACT,CAKA,SAASE,GAAUJ,EAA0BC,EAAwB,CACnE,IAAMF,EAAOC,EAAM,OAEnB,GAAID,IAAS,EACX,MAAO,kBAGT,GAAIA,IAAS,EACX,MAAO;AAAA,wBACaC,EAAM,CAAC,CAAC;AAAA,kBAI9B,IAAMK,EAAU,OACVC,EAAU,SACVC,EAAU,SACVC,EAAU,WACZC,EAAI,GACR,GAAIV,EAAO,EACT,QAASI,EAAI,EAAGA,EAAIJ,EAAO,EAAG,EAAEI,EAC9BM,EAAIA,EAAI,GAAGR,EAAKE,CAAC,CAAC,IAGtB,MAAO,QAAQM,CAAC,GAAGJ,CAAO;AAAA,8BACEI,CAAC,GAAGF,CAAO;AAAA,8BACXE,CAAC,GAAGH,CAAO;AAAA,uCACFG,CAAC,GAAGD,CAAO,GAClD,CAKA,SAASE,GAASX,EAAcE,EAAgBU,EAAcC,EAAsB,CAClF,OAAIb,IAAS,GAAKA,IAAS,EAClB,GAIO;AAAA,cACJE,EAAKF,EAAO,CAAC,CAAC;AAAA,cACdE,EAAKF,EAAO,CAAC,CAAC;AAAA,gBACZE,EAAKF,EAAO,CAAC,CAAC;AAAA,gBACdE,EAAKF,EAAO,CAAC,CAAC;AAAA,0BACJa,CAAI;AAAA,0BACJD,CAAI;AAAA,KAI9B,CAzIA,IAWME,GAMAC,GA4COC,GA7DbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAEAC,KAEMR,GAAsB,CAC1B,KAAM,OACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAA6B,CAC3C,EAEMC,GAAwB,CAACQ,EAAgCC,IAA+B,CAC5F,IAAMC,EAAOC,GAAQH,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDI,EAAaH,EAAM,KAEnBI,EAAYD,EAAW,OAEvBE,EAAaL,EAAM,KAAK,OAExBM,EAAiBC,GAAkBF,CAAU,EAC7CG,EAAWC,GAAY,KAAMJ,CAAU,EACvCK,EAAQvB,GAASkB,EAAYG,EAAUL,EAAWA,EAAW,OAAS,CAAC,EAAGA,EAAWA,EAAW,OAAS,CAAC,CAAC,EAE7GQ,EACAP,IAAc,EAChBO,EAAkB,CAAC,EAAG,CAAC,EACdP,IAAc,EACvBO,EAAkB,CAACR,EAAW,CAAC,EAAG,CAAC,EAEnCQ,EAAkB,CAACR,EAAWE,EAAa,CAAC,EAAGF,EAAWE,EAAa,CAAC,CAAC,EAE3E,IAAMO,EAAuBrC,GAAwB8B,EAAYM,EAAiBH,CAAQ,EACpFK,EAAShC,GAAUsB,EAAYK,CAAQ,EAEvCM,EAAe;AAAA;AAAA,YAEXR,CAAc;AAAA;AAAA,eAEXM,CAAoB;AAAA,cACrBX,EAAK,MAAM;AAAA;AAAA,cAEXS,CAAK;AAAA;AAAA,cAELT,EAAK,MAAM,WAAWY,CAAM;AAAA;AAAA;AAAA,QAIxC,MAAO,CACL,GAAGvB,GACH,QAAS,GACT,OAAQ,CAAC,KAAMU,EAAM,KAAM,KAAMA,EAAM,KAAM,aAA+B,EAC5E,aAAAc,CACF,CACF,EAEatB,GAA8B,CAACO,EAAgCC,KACvE,CAAC,GAAGV,GAAqB,IAAK,IAAMC,GAAsBQ,EAASC,CAAK,CAAC,KC0BvE,SAASe,GAAcC,EAAoD,CAChF,GAAIA,EAAM,SAAW,EACnB,MAAO,CAAC,EAAG,EAAG,CAAC,EAGjB,IAAIC,EAAQ,EACZ,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAS,EAAG,EAAEE,EACtCD,GAASD,EAAME,CAAC,EAElB,MAAO,CAACD,EAAOD,EAAM,OAAS,EAAIA,EAAMA,EAAM,OAAS,CAAC,EAAI,EAAGA,EAAMA,EAAM,OAAS,CAAC,CAAC,CACxF,CAaO,SAASG,GAAeC,EAAyBC,EAAiC,CACvF,IAAIC,EAAiB,GACrB,OAAIF,EAAK,SAAW,GAAKC,EAAa,SAAW,EAC/CC,EAAiB,GACRF,EAAK,OAAS,GAAKC,EAAa,OAAS,EAClDC,EAAiBF,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,EAE/EC,EAAiBF,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,GAC3ED,EAAKA,EAAK,OAAS,CAAC,IAAMC,EAAaA,EAAa,OAAS,CAAC,EAG7DC,CACT,CAEA,SAASC,GAAuBP,EAAyC,CACvE,IAAMQ,EAAUC,EAAU,eAAeT,CAAK,EACxCU,EAAS,CAAC,IAAK,IAAK,GAAG,EACvBC,EAAQ,QAWd,MAAO;AAAA;AAAA,QAVwBH,EACK,IAAI,CAACI,EAAQV,IAAM,CAClB,IAAMW,EAAQ,OAAOH,EAAOR,CAAC,CAAC,MAAMS,CAAK,MAAMC,CAAM,GAC/CE,EAAQZ,IAAMM,EAAQ,OAAS,EACjC,OAAOE,EAAOR,EAAI,CAAC,CAAC,MAAMS,CAAK,MAAMD,EAAOR,CAAC,CAAC,MAAMU,CAAM,GAC1D,YAAYF,EAAOR,CAAC,CAAC,MAAMU,CAAM,GACrC,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,CAIf;AAAA;AAAA;AAAA,GAI9B,CAEA,SAASC,GAAwBf,EAAyC,CACxE,IAAMQ,EAAUC,EAAU,eAAeT,CAAK,EAE9C,MAAO;AAAA;AAAA;AAAA,wBAGeQ,EAAQ,CAAC,CAAC,iBAAiBA,EAAQ,CAAC,CAAC;AAAA;AAAA,CAG7D,CA5JA,IAWMQ,GAGAC,GAoEOC,GAlFbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KAEAC,KAEMR,GAAwCS,IACzC,CAAC,KAAM,mBAAoB,WAAY,EAAmB,EAAG,WAAY,CAAC,GAAG,EAAG,UAAW,GAAGA,CAAa,EAAE,GAE5GR,GACF,CAACS,EAAgCC,EAAiBC,EAA2BH,IAC1D,CACb,IAAMI,EAAeF,EAAQ,KACvBG,EAAsBL,EAExBM,EAAW,GACf,QAAS7B,EAAI,EAAGA,EAAI,EAAGA,IAAK,CAC1B,IAAI8B,EAAe,GACnB,OAAQ9B,EAAG,CACT,IAAK,GACH8B,EAAe,qBACf,MACF,IAAK,GACHA,EAAe,4CACf,MACF,IAAK,GACHA,EAAe,4CACf,MACF,IAAK,GACHA,EAAe,8CACf,MACF,QACE,MAAM,IAAI,KACd,CAEAD,GAAY;AAAA,UACdC,CAAY;AAAA,UACZ9B,EAAI,EAAI,sDAAwD,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAMzDA,CAAC;AAAA;AAAA,UAEVA,EAAI,EAAI,IAAM,EAAE;AAAA,OAEhB,CACA,IAAM+B,EAAOC,GAAQR,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAExDS,EAAe;AAAA,QACvB5B,GAAuBsB,CAAY,CAAC;AAAA,QACpCd,GAAwBe,CAAmB,CAAC;AAAA,QAC5CM,GAAkB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQNN,EAAoB,CAAC,CAAC;AAAA,qBACtBA,EAAoB,CAAC,CAAC;AAAA;AAAA,UAEjCC,CAAQ;AAAA,UACRE,EAAK,MAAM;AAAA;AAAA,MAIX,MAAO,CACL,GAAGL,EACH,OAAQ,CAAC,KAAME,EAAqB,KAAMH,EAAQ,KAAM,aAA+B,EACvF,aAAAQ,EACA,QAAS,EACX,CACF,EAEKjB,GACT,CAACQ,EAAgCC,EAAiBF,IAAwD,CACxG,IAAMG,EAAWZ,GAAqCS,CAAa,EACnE,MAAO,CAAC,GAAGG,EAAU,IAAK,IAAMX,GAAiCS,EAASC,EAASC,EAAUH,CAAa,CAAC,CAC7G,ICtFJ,IAOaY,GAPbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEaJ,GAAgB,CAACK,EAAyCC,IAAoC,CACzG,IAAMC,EAAcD,EAAM,MACpBE,EAAOC,GAAQJ,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EAIjEK,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBA6CDF,EAAK,SAAS;AAAA,QAC5BA,EAAK,MAAM;AAAA,OAEXG,EAAc,CAClB,KAAM,cACN,WAAY,EAAqB,EACjC,WAAY,CAAC,GAAG,EAChB,OAAQ,CAAC,KAAMJ,EAAa,KAAMD,EAAM,OAAO,KAAM,aAA6C,EAClG,aAAAI,EACA,QAAS,EACX,EACA,OAAOL,EAAiB,eAAeM,EAAa,CAACL,EAAM,MAAM,CAAC,CACpE,ICnBA,SAASM,GAAgBC,EAAcC,EAAwB,CAC7D,GAAID,IAAS,EACX,MAAO,KAGT,IAAIE,EAAS,GACb,QAASC,EAAI,EAAGA,EAAIH,EAAMG,IACxBD,GAAUD,EAAKE,CAAC,EACZA,EAAIH,EAAO,IACbE,GAAU,KAGd,OAAOA,CACT,CAhEA,IAWME,GAMOC,GA+BAC,GAhDbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAEAC,KAEMR,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,CACjC,EAEaC,GAA0B,CAACQ,EAAgCC,IAA+B,CACrG,IAAMd,EAAOc,EAAM,KAAK,OAElBC,EAAWC,GAAY,KAAMhB,CAAI,EACjCiB,EAAYF,EAAS,MAAM,EAAE,EAC7BG,EAAiBC,GAAkBnB,CAAI,EACvCoB,EAAgBC,GAAkB,EAElCC,EADYR,EAAM,KAAK,SAAW,EACR,GAAKf,GAAgBC,EAAMe,CAAQ,EAC7Db,EAASF,GAAQ,EAAI,KAAO,QAAQiB,EAAU,KAAK,GAAG,CAAC,IACvDM,EAAOC,GAAQX,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDY,EAAe;AAAA,MACjBL,CAAa;AAAA;AAAA,QAEXF,CAAc;AAAA;AAAA;AAAA,iCAGWI,CAAY;AAAA;AAAA,SAEpCC,EAAK,MAAM,mCAAmCrB,CAAM;AAAA;AAAA,KAI3D,MAAO,CACL,GAAGE,GACH,QAAS,GACT,OAAQ,CAAC,KAAMU,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,aAAAW,CACF,CACF,EAEanB,GAAgC,CAACO,EAAgCC,KACzE,CAAC,GAAGV,GAAuB,IAAK,IAAMC,GAAwBQ,EAASC,CAAK,CAAC,KCjDlF,IAyCaY,GAoDAC,GAmCAC,GAhIbC,GAAAC,EAAA,kBAGAC,KAsCaL,GAAN,KAAmD,CAKxD,YAAYM,EAA4BC,EAAW,EAAG,CACpD,GAAIA,IAAa,EACf,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,IACjB,KAAK,YAAcA,EAAG,MACtB,KAAK,YAAcC,UACVA,IAAa,EACtB,KAAK,eAAiBD,EAAG,QACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcA,EAAG,MACtB,KAAK,YAAcC,MAEnB,OAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,CAE7D,CACA,OAAOC,EAA4BC,EAA4C,CAC7E,IAAIC,EACAC,EACJ,OAAIH,EAAI,cAAgB,eACtBI,GAAO,QAAQ,UAAW,yDAAyD,EACnFD,EAAS,IAAI,aAAaH,CAAG,GAE3BC,EAAc,KAAK,YAAcD,EAAI,QACvCI,GAAO,QAAQ,UAAW,gDAAgD,EAC1ED,EAASH,EACTE,EAAS,KAAK,SAASD,EAAc,KAAK,WAAW,EACrDE,EAAO,QAAQ,CAACE,EAAGC,IAAMJ,EAAOI,CAAC,EAAID,CAAC,IAEtCF,EAASH,EACTE,EAASC,GAEJD,CACT,CACA,SAASK,EAAqC,CAC5C,OAAO,IAAI,aAAaA,EAAO,CAAC,CAClC,CACA,OAAOC,EAA+BC,EAAgC,CACpE,OAAI,KAAK,cAAgB,EACDD,EAAwB,OAAO,CAACE,EAAQC,IAAUA,EAAQ,IAAM,CAAC,EAAE,SAAS,EAAGF,CAAQ,EAGxGD,EAAO,SAAS,EAAGC,CAAQ,CACpC,CACF,EAIahB,GAAN,KAAkD,CAKvD,YAAYK,EAA2BC,EAAW,EAAGa,EAAsB,CACzE,GAAIb,IAAa,GAAKA,IAAa,EACjC,MAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,EAE3D,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcC,EACnB,KAAK,YAAca,GAAed,EAAG,KACvC,CACA,OAAOE,EAAmBC,EAA4C,CACpE,IAAIY,EAAOb,EACX,OAAI,KAAK,cAAgB,IACvBI,GAAO,QAAQ,UAAW,+BAA+B,EACzDS,EAAO,KAAK,SAASZ,CAAW,EAChCD,EAAI,QAAQ,CAACK,EAAG,IAAMQ,EAAK,EAAI,CAAC,EAAIR,CAAC,GAEhCQ,CACT,CACA,SAASN,EAAqC,CAC5C,OAAO,IAAI,aAAaA,EAAO,CAAC,CAClC,CACA,OAAOC,EAA+BC,EAAgC,CACpE,OAAI,KAAK,cAAgB,EACDD,EAAwB,OAAO,CAACE,EAAQC,IAAUA,EAAQ,IAAM,CAAC,EAAE,SAAS,EAAGF,CAAQ,EAGxGD,EAAO,SAAS,EAAGC,CAAQ,CACpC,CACF,EAEaf,GAAN,KAA8C,CAKnD,YAAYI,EAA2BC,EAAW,EAAG,CADrD,iBAAc,EAEZ,GAAIA,IAAa,EACf,KAAK,eAAiBD,EAAG,MACzB,KAAK,OAASA,EAAG,MACjB,KAAK,YAAcA,EAAG,cACtB,KAAK,YAAcC,UACVA,IAAa,EACtB,KAAK,eAAiBD,EAAG,KACzB,KAAK,OAASA,EAAG,KACjB,KAAK,YAAcA,EAAG,cACtB,KAAK,YAAcC,MAEnB,OAAM,IAAI,MAAM,+BAA+BA,CAAQ,EAAE,CAE7D,CACA,OAAOC,EAAiBc,EAA6C,CACnE,OAAO,IAAI,WAAWd,EAAI,OAAQA,EAAI,WAAYA,EAAI,UAAU,CAClE,CACA,SAASO,EAAqC,CAC5C,OAAO,IAAI,WAAWA,EAAO,KAAK,WAAW,CAC/C,CACA,OAAOC,EAA+BC,EAA8B,CAClE,GAAID,aAAkB,WACpB,OAAOA,EAAO,SAAS,EAAGC,CAAQ,EAEpC,MAAM,IAAI,MAAM,uBAAuBD,EAAO,WAAW,EAAE,CAC7D,CACF,IChKA,IAQaO,GAcAC,GAUAC,GAhCbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaN,GACT,CAACO,EAA8CC,EAC9CC,IAA4C,CAC3C,IAAMC,EAAWD,IAAgB,GAAwBA,IAAgB,EAAgC,EAAI,EACvGE,EAAWF,IAAgB,EAC3BG,EAAaH,IAAgB,GAAgCA,IAAgB,EAC7EI,EAAYJ,IAAgB,EAAkCD,EAAM,OAAS,EAAI,OACjFM,EAAgBL,IAAgB,EAClCD,EAAM,IAAI,CAACO,EAAGC,IAAMA,IAAMR,EAAM,OAAS,EAAIO,EAAI,EAAIA,CAAC,EACtD,OACJ,OAAOb,GACHK,EAAuBC,EAAOE,EAASI,EAAe,CAAC,SAAAH,EAAU,UAAAC,EAAW,UAAAC,CAAS,CAAC,CAC5F,EAESZ,GACT,CAACM,EAA8CC,EAA0BC,IACjD,CAClB,IAAMQ,EAASjB,GAAmCO,EAAuBC,EAAOC,CAAW,EAC3F,MAAO,CAACQ,EAAO,MAAOA,EAAO,MAAM,CACrC,EAKKf,GACT,CAACK,EAA8CC,EAA0BU,EAAgB,EACxFJ,EAAmCK,IAA4C,CAC9E,IAAMR,EAAW,CAAC,EAAEQ,GAASA,EAAM,UAC7B,CAACC,EAAOC,CAAM,EAAId,EAAsB,iBAAiBI,GAAWG,GAAiBN,EAAeW,CAAK,EACzGG,EAAOd,EAAM,OACfe,EAAef,EAAM,MAAM,CAAC,EAIhC,GAHIc,IAAS,IACXC,EAAe,CAAC,CAAC,GAEfL,IAAa,EAEfJ,EAAgBN,UACPG,EAAU,CACnB,GAAIO,IAAa,EACf,MAAM,IAAI,MAAM,oCAAoC,EAEtDJ,EAAgBN,EACZc,EAAO,IACTC,EAAaD,EAAO,CAAC,EAAI,KAAK,KAAKC,EAAaD,EAAO,CAAC,EAAI,CAAC,GAE3DA,EAAO,IACTC,EAAaD,EAAO,CAAC,EAAI,KAAK,KAAKC,EAAaD,EAAO,CAAC,EAAI,CAAC,EAEjE,SAAW,CAACR,EACV,MAAM,IAAI,MAAM,kDAAkD,EAEpE,MAAO,CACL,MAAAM,EACA,OAAAC,EACA,SAAAH,EACA,SAAAP,EACA,MAAOY,EACP,QAASC,EAAU,eAAeD,CAAY,EAC9C,cAAAT,EACA,WAAaK,GAASA,EAAM,SAC9B,CACF,ICrEJ,IAiBMM,GAaOC,GA9BbC,GAAAC,EAAA,kBAIAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMb,GACF,CAACc,EAA4CC,IAA6C,CACxF,IAAMC,EACFD,EAAkB,IAAIE,GAAW,GAAGA,EAAQ,cAAc,KAAK,GAAG,CAAC,IAAIA,EAAQ,KAAK,IAAIA,EAAQ,MAAM,EAAE,EACnG,KAAK,GAAG,EACbC,EAAMJ,EAAY,KACtB,OAAIA,EAAY,YACdI,GAAO,IAAMJ,EAAY,UAAY,KAEvCI,GAAO,IAAMF,EACNE,CACT,EAESjB,GAAN,KAAwD,CAG7D,YAAmBkB,EAA8B,CAA9B,aAAAA,EACjB,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAA2B,IAAI,GACtC,CAKA,+BAA+BC,EAA0BC,EAA4C,CACnG,OAAOC,GAA+B,KAAK,QAAQ,eAAgBF,EAAOC,CAAW,CACvF,CAEA,eAAeE,EAAwCP,EAAwC,CAC7F,GAAIA,EAAO,OAASO,EAAQ,WAAW,OACrC,MAAM,IAAI,MAAM,mCAAmCA,EAAQ,WAAW,MAAM,GAAG,EAEjF,GAAIA,EAAQ,WAAW,SAAWA,EAAQ,WAAW,OACnD,MAAM,IAAI,MAAM,6CAA6C,EAI/D,IAAMR,EAAmC,CAAC,EAC1C,QAASS,EAAI,EAAGA,EAAID,EAAQ,WAAW,OAAQ,EAAEC,EAC/CT,EAAkBS,CAAC,EAAI,KAAK,uBAAuBR,EAAOQ,CAAC,EAAGD,EAAQ,WAAWC,CAAC,CAAC,EAGrF,IAAMN,EAAMlB,GAAwBuB,EAASR,CAAiB,EAC1DU,EAAW,KAAK,QAAQ,eAAe,YAAYP,CAAG,EACpDJ,EAAcW,EAChBA,EAAS,YACR,OAAQF,EAA8B,KAAQ,WAAcA,EAA8B,IAAI,EAClCA,EAG3DG,EAAsBC,GACxB,KAAK,QAAQ,eAAgBb,EAAY,OAAO,KAAMA,EAAY,OAAO,WAAW,EAClFc,EAAoB,KAAK,kBAAkBF,EAAqBZ,EAAY,OAAO,IAAI,EAE7F,OAAKW,IACHA,EAAW,KAAK,QAAQ,eAAe,MAAMX,EAAaC,EAAmBa,CAAiB,EAC9F,KAAK,QAAQ,eAAe,YAAYV,EAAKO,CAAQ,GAGvD,KAAK,WAAWA,EAAUV,EAAmBa,CAAiB,EACvDA,CACT,CAEA,IAAIL,EAA4BP,EAAmC,CAEjE,OAD0B,KAAK,eAAeO,EAASP,CAAM,EACpC,MAC3B,CAEQ,WAAWS,EAAoBT,EAAuBa,EAA2B,CAEvF,QAASL,EAAI,EAAGA,EAAIR,EAAO,OAAQ,EAAEQ,EACnC,GAAI,CAAC,CAACR,EAAOQ,CAAC,EAAE,WAAcC,EAAS,YAAY,WAAWD,CAAC,IAAM,GACnE,MAAM,IAAI,MAAM,SAASA,CAAC,gCAAgC,EAK9D,GAAI,CAAC,CAACK,EAAO,WAAcJ,EAAS,YAAY,OAAO,cAAgB,GACrE,MAAM,IAAI,MAAM,qCAAqC,EAGvD,KAAK,QAAQ,eAAe,IAAIA,EAAUT,EAAQa,CAAM,CAC1D,CAaQ,uBAAuBC,EAAgBT,EAA0B,CACvE,IAAIU,EAAK,KAAK,eAAeD,EAAO,OAAQT,IAAgB,CAAkB,EAE9E,GAAI,CAACU,IAEHA,EAAK,KAAK,eAAeD,EAAO,OAAQT,IAAgB,CAAkB,EACtEU,GACF,OAAIV,IAAgB,EACX,KAAK,KAAKU,CAAE,EAEZ,KAAK,OAAOA,CAAE,EAK3B,GAAI,CAACA,EAAI,CACP,IAAMC,EAASL,GAAmC,KAAK,QAAQ,eAAgBG,EAAO,KAAMT,CAAW,EAEvG,GAAIA,IAAgB,EAAiC,CAGnD,IAAMD,EAAQU,EAAO,KACrB,GAAIV,EAAM,SAAW,EAAG,CAQtB,IAAMa,EAAsB,CAACb,EAAM,CAAC,EAAG,KAAK,KAAMA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAK,CAAQ,CAAC,EACvFc,EACFP,GAAmC,KAAK,QAAQ,eAAgBM,EAAqBZ,CAAW,EAChGc,EAASL,EAAO,WACpB,GAAIV,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAI,IAAa,EAAG,CACnD,IAAMgB,EAAiBhB,EAAM,CAAC,EACxBiB,EAAajB,EAAM,CAAC,EAAIA,EAAM,CAAC,EAAIA,EAAM,CAAC,EAC1CkB,EAAa,KAAK,KAAKD,EAAa,EAAQ,CAAQ,EAAI,EACxDE,EAAUH,EAAiBE,EACjCH,EAAS,IAAI,aAAaI,CAAO,EACjC,QAASC,EAAI,EAAGA,EAAIJ,EAAgB,EAAEI,EAAG,CACvC,IAAMC,EAAYD,EAAIH,EAChBK,EAAYF,EAAIF,EAAaE,EAAI,EAAQH,EAC/CF,EAAO,IAAIL,EAAO,WAAW,SAASW,EAAWA,EAAYJ,CAAU,EAAGK,CAAS,CACrF,CACF,CACA,OAAO,KAAK,kBAAkBR,EAAgBJ,EAAO,KAAMK,EAAQL,GAA+B,CACpG,CACF,CAEA,GAAIT,IAAgB,EAAoB,CACtC,IAAMsB,EACFC,GAA6B,KAAK,QAAQ,eAAgBd,EAAO,KAAM,EAAG,CAAC,EAAG,CAAC,UAAW,EAAI,CAAC,EAC7Fe,EAAsB,KAAK,kBAC7BF,EAAuBb,EAAO,KAAMA,EAAO,WAAYA,GAA+B,EAC1FC,EAAK,KAAK,KAAKc,CAAmB,CACpC,MACEd,EAAK,KAAK,kBAAkBC,EAAQF,EAAO,KAAMA,EAAO,WAAYA,GAA+B,CAEvG,CACA,OAAOC,CACT,CAWA,sCACIC,EAAuBc,EAA2BC,EAAyBjB,EAA6B,CAC1G,OAAO,KAAK,kBAAkBE,EAAQc,EAAUC,EAAMjB,GAA+B,CACvF,CAEQ,kBACJE,EAAuBc,EAA2BC,EAA0BjB,EAC5EkB,EAAmC,CACrCC,GAAO,QAAQ,mBAAoB,iCAAiC,KAAK,UAAUjB,CAAM,CAAC,GAAG,EAC7F,IAAMf,EAAU,KAAK,QAAQ,eAAe,wBAAwB6B,EAAUd,EAAQe,EAAMC,CAAK,EACjG,OAAO,KAAK,6BAA6BhB,EAAQc,EAAU7B,EAASa,CAAM,CAC5E,CAEA,gBAAgBoB,EAAeC,EAAyC,CACtE,IAAMC,EAAU,KAAK,uBAAuBF,GAA2B,EACjEG,EAAkC,CACtC,SAAUD,EAAQ,SAClB,OAAQA,EAAQ,OAChB,MAAOA,EAAQ,MAEf,MAAOD,EAAa,SAAW,EAAIA,EAAe,CAAC,CAAC,EACpD,QAASG,EAAU,eAAeH,CAAY,EAC9C,cAAeA,CACjB,EAEA,OADuB,KAAK,6BAA6BE,EAAkBH,EAAM,KAAME,EAAQ,OAAO,EAChF,MACxB,CAEA,cAAcF,EAAeC,EAAyC,CACpE,IAAMC,EAAU,KAAK,uBAAuBF,GAAyB,EAGrE,GAAIK,GAAeL,EAAM,KAAMC,CAAY,EAAG,CAC5C,IAAME,EAAkC,CACtC,SAAUD,EAAQ,SAClB,OAAQA,EAAQ,OAChB,MAAOA,EAAQ,MAEf,MAAOD,EAAa,SAAW,EAAIA,EAAe,CAAC,CAAC,EACpD,QAASG,EAAU,eAAeH,CAAY,EAC9C,cAAeA,EACf,SAAU,EACZ,EAEA,OADuB,KAAK,6BAA6BE,EAAkBH,EAAM,KAAME,EAAQ,OAAO,EAChF,MACxB,CAEA,IAAMI,EAAqBC,GAAcP,EAAM,IAAI,EAC7CQ,EAAsBD,GAAcN,CAAY,EAEhDQ,EAAsB,KAAK,cAAcT,EAAOM,CAAkB,EAClEI,EAAuB,KAAK,IAC9BC,GAAuC,KAAMF,EAAqBD,CAAmB,EAAG,CAACC,CAAmB,CAAC,EAEjH,OADqB,KAAK,cAAcC,EAAsBT,CAAY,CAE5E,CAEA,KAAKD,EAAeY,EAA+B,CACjD,IAAMV,EAAU,KAAK,uBAAuBF,GAA2B,EAEvE,OADuB,KAAK,6BAA6BE,EAA0BU,EAAMV,EAAQ,OAAO,EAClF,MACxB,CAEQ,6BACJpB,EAAuBc,EAA2B7B,EAAuBa,EAAiBiC,EAAsB,CAClH,IAAMC,EAA2B,CAC/B,GAAGhC,EACH,OAAQF,GACJ,IAAImC,GACIjC,EAAO,cAAec,EAAWoB,GAAmB,KAAK,YAAYF,CAAW,EAChF,MAAOE,GAAmB,KAAK,iBAAiBF,CAAW,EAAG,OAAWD,CAAQ,EAC7F,QAAA9C,CACF,EACA,YAAK,eAAe+C,EAAY,OAAO,OAAQA,EAAahC,EAAO,QAAQ,EACpEgC,CACT,CAEQ,eAAeD,EAAqBI,EAAW,GAA8B,CACnF,OAAO,KAAK,QAAQ,cAAcJ,CAAQ,EAAI,KAAK,QAAQ,eAAeA,EAAUI,CAAQ,EACxFA,EAA0C,KAAK,uBAAuB,IAAIJ,CAAQ,EACxC,KAAK,yBAAyB,IAAIA,CAAQ,CAC1F,CACA,eAAeA,EAAqBhC,EAAiBoC,EAAW,GAAa,CACvE,KAAK,QAAQ,cAAcJ,CAAQ,EACrC,KAAK,QAAQ,eAAeA,EAAUhC,EAAIoC,CAAQ,GAEjDA,EAAW,KAAK,uBAAyB,KAAK,0BAA0B,IAAIJ,EAAUhC,CAAE,CAE7F,CACA,sBAAsBD,EAAgBqC,EAAW,GAAgB,CAC/D,MAAO,CAAC,CAAC,KAAK,eAAerC,EAAO,OAAQqC,CAAQ,CACtD,CAEA,SAAgB,CACd,KAAK,QAAQ,eAAe,oBAAoB,EAChD,KAAK,uBAAuB,QAAQpC,GAAM,KAAK,QAAQ,eAAe,eAAeA,CAAE,CAAC,EACxF,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAAyB,QAAQA,GAAM,KAAK,QAAQ,eAAe,eAAeA,CAAE,CAAC,EAC1F,KAAK,yBAA2B,IAAI,GACtC,CAEA,YAAYiC,EAA6C,CACvD,OAAIA,EAAY,SACP,KAAK,YAAY,KAAK,OAAOA,CAAW,CAAC,EAE7C,KAAK,QAAQ,QAAQ,UAAU,2BAG7B,KAAK,QAAQ,eAAe,YAAYA,EAAaA,EAAY,OAAO,KAAMA,EAAY,QAAQ,EAFhG,KAAK,QAAQ,eAAe,wBAAwBI,GAAc,KAAMJ,CAAW,CAAC,CAG/F,CAEA,MAAM,iBAAiBA,EAAsD,CAC3E,OAAIA,EAAY,SACP,KAAK,iBAAiB,KAAK,OAAOA,CAAW,CAAC,EAElD,KAAK,QAAQ,QAAQ,UAAU,2BAG7B,KAAK,QAAQ,eAAe,iBAAiBA,EAAaA,EAAY,OAAO,KAAMA,EAAY,QAAQ,EAFrG,KAAK,QAAQ,eAAe,wBAAwBI,GAAc,KAAMJ,CAAW,CAAC,CAG/F,CAEA,KAAKd,EAAiC,CAEpC,OAD0B,KAAK,eAAemB,GAA4B,KAAMnB,EAAM,MAAM,EAAG,CAACA,EAAM,MAAM,CAAC,CAE/G,CAEA,OAAOA,EAAiC,CAEtC,OAD0B,KAAK,eAAeoB,GAA8B,KAAMpB,EAAM,MAAM,EAAG,CAACA,EAAM,MAAM,CAAC,CAEjH,CACF,IC1TA,IAGMqB,GAmBOC,GAtBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EAMaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,ICvB3C,IAiBME,GAOOC,GAaAC,GAQPC,GAwBAC,GArENC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAQMT,GAAoC,CACxC,KAAM,qBACN,WAAY,CAAC,IAAK,QAAS,IAAK,OAAQ,UAAU,EAClD,WACI,UAA6G,CACnH,EAEaC,GACT,CAACS,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGV,GACH,UAAWY,EAAW,SACtB,IAAK,IAAMT,GAAoCO,EAAkBC,EAAQC,CAAU,CACrF,EACAD,CAAM,CACI,GAGPT,GACRW,GAAmD,CAClD,IAAMC,EAAUD,EAAK,WAAW,SAAS,UAAW,IAAI,EAClDE,EAAWF,EAAK,WAAW,SAAS,WAAY,EAAG,EACnDG,EAAUH,EAAK,WAAW,OAAO,UAAW,CAAC,EACnD,OAAOI,GAA4B,CAAC,QAAAH,EAAS,SAAAC,EAAU,QAAAC,CAAO,CAAC,CACjE,EAEEb,GACF,CAACO,EAAyCC,EAAkBC,IACzC,CACb,IAAMM,EAAOC,GAAQT,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEU,EAAOT,EAAO,CAAC,EAAE,KAAK,OACtB,CAACU,EAAYC,CAAW,EAC1BZ,EAAiB,+BAA+BC,EAAO,CAAC,EAAE,MAA0B,EAClFY,EAAe;AAAA,sBACTH,CAAI;AAAA,iDACuBC,CAAU,KAAKC,CAAW;AAAA,oCACvCJ,EAAK,SAAS;AAAA,mCACfA,EAAK,SAAS;AAAA,uCACVA,EAAK,SAAS;AAAA,gCACrBA,EAAK,SAAS;AAAA;AAAA,oEAEsBN,EAAW,OAAO;AAAA,KAE5E,MAAO,CACL,GAAGZ,GACH,OAAQ,CAAC,KAAMW,EAAO,CAAC,EAAE,KAAM,KAAMA,EAAO,CAAC,EAAE,KAAM,aAAiC,EACtF,aAAAY,CACF,CACF,EAEFnB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMa,EAAIb,EAAO,CAAC,EACZc,EAAQd,EAAO,CAAC,EAChBe,EAAIf,EAAO,CAAC,EACZgB,EAAOhB,EAAO,CAAC,EACfiB,EAAOjB,EAAO,CAAC,EAIrB,GAAIa,EAAE,KAAK,OAAS,GAAKC,EAAM,KAAK,SAAW,GAAKC,EAAE,KAAK,SAAW,GAAKC,EAAK,KAAK,SAAW,GAC5FC,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIH,EAAM,KAAK,CAAC,IAAMD,EAAE,KAAK,CAAC,GAAKE,EAAE,KAAK,CAAC,IAAMF,EAAE,KAAK,CAAC,GAAKG,EAAK,KAAK,CAAC,IAAMH,EAAE,KAAK,CAAC,GACnFI,EAAK,KAAK,CAAC,IAAMJ,EAAE,KAAK,CAAC,EAC3B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAKA,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAM,OAAS,WAAaA,EAAM,OAAS,WAC7FC,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAK,OAAS,WAAaA,EAAK,OAAS,WAC3FC,EAAK,OAAS,WAAaA,EAAK,OAAS,UAC5C,MAAM,IAAI,MAAM,6BAA6B,CAEjD,IC/FA,IAsBaC,GAKSC,GAOTC,EAMAC,GAsBAC,GA9DbC,GAAAC,EAAA,kBAsBaN,GAAN,KAAkB,CACvB,YACWO,EAAgCC,EAAiCC,EACjEC,EAAoC,CADpC,eAAAH,EAAgC,iBAAAC,EAAiC,yBAAAC,EACjE,yBAAAC,CAAqC,CAClD,EACsBT,GAAf,KAAuB,CAC5B,YAAmBU,EAAsB,CAAtB,aAAAA,CAAuB,CAG5C,EAGaT,EAAN,KAAqB,CAC1B,YAAmBU,EAA4BC,EAAyB,CAArD,iBAAAD,EAA4B,kBAAAC,CAA0B,CAC3E,EAIaV,GAAN,KAAyB,CAG9B,YAAmBW,EAAcF,EAAsBC,EAAqC,CAAzE,UAAAC,EACbD,EACF,KAAK,aAAeA,EAEpB,KAAK,aAAe,CAAC,EAGnBD,IACF,KAAK,YAAcA,EAEvB,CACA,cAAcG,EAA0B,CAClCA,GACF,KAAK,aAAa,KAAKA,CAAI,CAE/B,CACF,EAGaX,GAAN,KAAkC,CACvC,OAAO,mBAAmBY,EAAmD,CAC3E,GAAI,CAACA,GAASA,EAAM,SAAW,EAC7B,MAAO,CAAC,EAGV,GAAIA,EAAM,SAAW,EACnB,OAAOA,EAGT,IAAMC,EAAa,IAAI,IACjBC,EAAmB,IAAI,IACvBC,EAAS,IAAI,MAEnB,YAAK,mBAAmBH,EAAOC,EAAYC,EAAkBC,CAAM,EAC5DA,CACT,CAEA,OAAe,mBACXC,EAAkCH,EAAyBC,EAC3DC,EAA8B,CAChC,QAAS,EAAI,EAAG,EAAIC,EAAW,OAAQ,EAAE,EACvC,KAAK,YAAYA,EAAW,CAAC,EAAGH,EAAYC,EAAkBC,CAAM,CAExE,CAEA,OAAe,YACXE,EAA0BJ,EAAyBC,EAA+BC,EAA8B,CAElH,GAAI,CAACE,GAAQH,EAAiB,IAAIG,EAAK,IAAI,EACzC,OAIF,GAAIJ,EAAW,IAAII,EAAK,IAAI,EAC1B,MAAM,IAAI,MAAM,kFAAmF,EAIrGJ,EAAW,IAAII,EAAK,IAAI,EAGxB,IAAMR,EAAeQ,EAAK,aAC1B,GAAIR,GAAgBA,EAAa,OAAS,EACxC,QAASS,EAAI,EAAGA,EAAIT,EAAa,OAAQ,EAAES,EACzC,KAAK,YAAYT,EAAaS,CAAC,EAAGL,EAAYC,EAAkBC,CAAM,EAK1EA,EAAO,KAAKE,CAAI,EAGhBH,EAAiB,IAAIG,EAAK,IAAI,EAG9BJ,EAAW,OAAOI,EAAK,IAAI,CAC7B,CACF,IC9GO,SAASE,IAA6B,CAC3C,IAAMC,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASC,IAA6B,CAC3C,IAAMD,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASE,IAA6B,CAC3C,IAAMF,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASG,IAA6B,CAC3C,IAAMH,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASI,IAA+B,CAC7C,IAAMJ,EAAO,SASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASK,IAAiC,CAC/C,IAAML,EAAO,WAYb,MAAO,CAAC,KAXK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASM,IAA8B,CAC5C,IAAMN,EAAO,QAYb,MAAO,CAAC,KAXK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASO,IAA6B,CAC3C,IAAMP,EAAO,OAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASQ,IAA4B,CAC1C,IAAMR,EAAO,MAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASS,IAA6B,CAC3C,IAAMT,EAAO,OAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASU,IAA6B,CAC3C,OAAOC,GAAkB,KAAK,CAChC,CACO,SAASC,IAA+B,CAC7C,IAAMZ,EAAO,SAcb,MAAO,CAAC,KAbK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASG,KAAAA,EAAM,MAA6B,CACnD,CAEA,SAASW,GAAkBE,EAAkC,CAC3D,IAAMb,EAAO,GAAGa,CAAK,IASrB,MAAO,CAAC,KARK;AAAA,UACLb,CAAI;AAAA,aACDa,CAAK;AAAA;AAAA,SAETb,CAAI;AAAA,aACAa,CAAK;AAAA;AAAA,IAGF,KAAAb,EAAM,MAA6B,CACnD,CAvLA,IAyLMc,GAaAC,GAsEOC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GA7SbC,GAAAC,EAAA,kBAIAC,KACAC,KACAC,KAEAC,KAiLMnB,GACF,CAACoB,EAAgCC,EAAkBC,EAClDC,EAAoCF,EAAO,CAAC,EAAE,KAAMG,IAAyC,CAC5F,IAAMC,EAAcL,EAAQ,QAAQ,SACpC,MAAO,CACL,KAAME,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,UAAWD,EACX,IAAK,IAAMvB,GAAwBmB,EAASC,EAAQC,EAAUC,CAAgB,CAChF,CACF,EAEEtB,GACF,CAACmB,EAAgCC,EAAkBC,EAClDC,EAAoCF,EAAO,CAAC,EAAE,OAAsB,CACnE,IAAMI,EAAcL,EAAQ,QAAQ,SAC9BM,EAAc,CAACC,EAAU,SAASN,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,EAClEO,EAAcP,EAAO,CAAC,EAAE,KAEtBQ,EAAmBT,EAAQ,QAAQ,KAEzC,GAAIM,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUV,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAM,EAAK,EACrF,GAAI,CAACS,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEF,EAAcE,EACd,IAAME,EAAaJ,EAAY,OACzBK,EAAQZ,EAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC9Da,EAAQb,EAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC9Dc,EAASd,EAAO,CAAC,EAAE,KAAK,SAAW,EAAI,qCAAuC,mBAC9Ee,EAASf,EAAO,CAAC,EAAE,KAAK,SAAW,EAAI,qCAAuC,mBAE9EgB,EAAOC,GAAQlB,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDmB,EAAeV,EAAmB;AAAA,QACxCP,EAAS,IAAI;AAAA;AAAA;AAAA;AAAA,wBAIGA,EAAS,IAAI;AAAA,UAC3Be,EAAK,MAAM;AAAA,SAE2B;AAAA,QACxCf,EAAS,IAAI;AAAA,kCACaU,CAAU;AAAA,uBACrBC,CAAK;AAAA,uBACLC,CAAK;AAAA,UAClBC,CAAM;AAAA,UACNC,CAAM;AAAA,iBACCd,EAAS,IAAI;AAAA,SAGtB,MAAO,CACL,KAAMA,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,OAAQ,CAAC,KAAMG,EAAa,KAAML,EAAkB,YAAAE,CAAW,EAC/D,aAAAc,EACA,QAASV,CACX,CACF,CACA,IAAMQ,EAAOC,GAAQlB,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EACxDmB,EAAe;AAAA,MACrBjB,EAAS,IAAI;AAAA;AAAA,kBAEDe,EAAK,SAAS;AAAA,kBACdA,EAAK,SAAS;AAAA,sBACVf,EAAS,IAAI;AAAA,QAC3Be,EAAK,MAAM;AAAA;AAAA,MAIb,MAAO,CACL,KAAMf,EAAS,KACf,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,CAACG,EAAaA,CAAW,EACrC,OAAQ,CAAC,KAAMJ,EAAO,CAAC,EAAE,KAAM,KAAME,EAAkB,YAAAE,CAAW,EAClE,aAAAc,EACA,QAAS,EACX,CACF,EAESrC,GAAM,CAACkB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQpC,GAAQ,CAAC,EAAGoC,CAAM,CAAC,EAElFlB,GAAM,CAACiB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ5B,GAAQ,EAAG,MAAM,EAAG4B,CAAM,CAAC,EAE1FjB,GAAM,CAACgB,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQlC,GAAQ,CAAC,EAAGkC,CAAM,CAAC,EAElFhB,GAAQ,CAACe,EAAgCC,IACtC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ/B,GAAU,EAAG,MAAM,EAAG+B,CAAM,CAAC,EAE5Ff,GAAU,CAACc,EAAgCC,IACxC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ9B,GAAY,EAAG,MAAM,EAAG8B,CAAM,CAAC,EAE9Fd,GAAO,CAACa,EAAgCC,IACrC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ7B,GAAS,EAAG,MAAM,EAAG6B,CAAM,CAAC,EAE3Fb,GAAM,CAACY,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQjC,GAAQ,CAAC,EAAGiC,CAAM,CAAC,EAElFZ,GAAK,CAACW,EAAgCC,IACnC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ3B,GAAO,EAAG,MAAM,EAAG2B,CAAM,CAAC,EAEzFX,GAAM,CAACU,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQzB,GAAQ,CAAC,EAAGyB,CAAM,CAAC,EAElFV,GAAQ,CAACS,EAAgCC,IACtC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQvB,GAAU,CAAC,EAAGuB,CAAM,CAAC,EAEpFT,GAAM,CAACQ,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQhC,GAAQ,CAAC,EAAGgC,CAAM,CAAC,EAElFR,GAAM,CAACO,EAAgCC,IACpC,CAACD,EAAQ,IAAIpB,GAA8BoB,EAASC,EAAQ1B,GAAQ,EAAG,MAAM,EAAG0B,CAAM,CAAC,IC9SvG,IASamB,GAMAC,GAGPC,GAlBNC,GAAAC,EAAA,kBAMAC,KAGaL,GACT,CAACM,EAAgCC,EAAkBC,KACjDN,GAAeK,CAAM,EACd,CAACD,EAAQ,KAAKC,EAAO,CAAC,EAAGC,CAAE,CAAC,GAG5BP,GAAgEQ,GACzEC,GAAU,wBAAwBD,EAAK,WAAW,OAAO,IAAI,CAAC,EAE5DP,GAAkBK,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC1BA,IAYMI,GAOAC,GA2GOC,GAMPC,GApINC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAGAC,KAEMT,GAAoC,CAACU,EAAoBC,KAAuB,CACpF,KAAM,kBACN,WAAY,MAAM,KAAK,CAAC,OAAQD,CAAU,EAAG,CAACE,EAAIC,IAAM,IAAIA,CAAC,EAAE,EAC/D,WAAY,MAAMH,CAAU,EAAE,MAAuB,EACrD,UAAAC,CACF,GAEMV,GACF,CAACa,EAAgCC,EAA2BC,EAAkBC,IAA8B,CAC1G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EACxC,GAAIC,GAAQC,EAAW,QAAUD,EAAQ,GAAKC,EAAW,OACvD,MAAM,IAAI,MAAM,8DAA+D,EAE7ED,EAAO,IACTA,EAAOC,EAAW,OAASD,GAI7B,IAAME,EAAcD,EAAW,MAAM,CAAC,EACtC,QAASL,EAAI,EAAGA,EAAIG,EAAO,OAAQH,IAAK,CACtC,IAAMO,EAAaJ,EAAOH,CAAC,EAAE,KAAK,MAAM,EACxC,QAASQ,EAAY,EAAGA,EAAYH,EAAW,OAAQG,IAErD,GAAIA,IAAcJ,EAChBE,EAAYF,CAAI,GAAKG,EAAWC,CAAS,UAGlCH,EAAWG,CAAS,IAAMD,EAAWC,CAAS,EACrD,MAAM,IAAI,MAAM,kCAAkC,CAGxD,CAEA,IAAMC,EAAOH,EAAY,OACnBI,EAASC,GAAY,SAAUF,CAAI,EACnCG,EAAQC,GAAkBJ,CAAI,EAC9BK,EAAgBC,GAAkB,EAElCC,EAASb,EAAO,IAAIH,GAAKA,EAAE,IAAI,EAC/BiB,EAAWC,GAAcT,CAAI,EAC7BU,EAAoB,IAAI,MAAMH,EAAO,OAAS,CAAC,EAErDG,EAAQ,CAAC,EAAIH,EAAO,CAAC,EAAEZ,CAAI,EAC3B,QAASJ,EAAI,EAAGA,EAAImB,EAAQ,OAAQnB,IAClCmB,EAAQnB,CAAC,EAAImB,EAAQnB,EAAI,CAAC,EAAIgB,EAAOhB,CAAC,EAAEI,CAAI,EAG9C,IAAMgB,EAAUH,EAASb,CAAI,EACvBiB,EAAeJ,EAAS,MAAM,EAAE,EAChCK,EAAcL,EAAS,KAAK,EAE9BM,EAAkB,OAAOH,CAAO,MAAMD,EAAQ,CAAC,CAAC;AAAA;AAAA,oBAEtCG,CAAW,WAAWD,EAAa,KAAK,CAAC;AAAA,WAEvD,QAASrB,EAAI,EAAGA,EAAImB,EAAQ,OAAQnB,IAAK,CACvC,IAAMwB,EAAQL,EAAQnB,EAAI,CAAC,EAC3BuB,GAAmB;AAAA,kBACTH,CAAO,MAAMD,EAAQnB,CAAC,CAAC,QAAQoB,CAAO,OAAOD,EAAQnB,EAAI,CAAC,CAAC;AAAA;AAAA,sBAEvDA,CAAC,IAAIV,GAA0B2B,EAAUG,EAASI,CAAK,CAAC;AAAA,uBACvDlC,GAA0B+B,EAAcD,EAASI,CAAK,CAAC;AAAA,cAExE,CACA,IAAMC,EAAYN,EAAQ,OACpBK,EAAQL,EAAQA,EAAQ,OAAS,CAAC,EACxCI,GAAmB;AAAA;AAAA,oBAELE,CAAS,IAAInC,GAA0B2B,EAAUG,EAASI,CAAK,CAAC;AAAA,qBAC/DlC,GAA0B+B,EAAcD,EAASI,CAAK,CAAC,MAEtE,IAAME,EAAOC,GAAQ1B,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAExD2B,EAAe;AAAA,YACfd,CAAa;AAAA,2BACEG,EAAS,IAAIY,GAAK,OAASA,CAAC,CAAC;AAAA,cAC1CN,CAAe;AAAA;AAAA;AAAA;AAAA,cAIfX,CAAK;AAAA,mCACgBK,EAASR,EAAO,CAAC,CAAC;AAAA,qBAChCQ,EAASR,EAAO,CAAC,CAAC,aAAaQ,EAASR,EAAO,CAAC,CAAC;AAAA,qBACjDQ,EAASR,EAAO,CAAC,CAAC;AAAA;AAAA,0CAEGC,CAAM;AAAA;AAAA,cAElCA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA;AAAA,cAG5BA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA;AAAA,cAG5BA,EAAOD,EAAO,CAAC,CAAC,MAAMC,EAAOD,EAAO,CAAC,CAAC;AAAA,kBAClCC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,kBAC3CC,EAAOD,EAAO,CAAC,CAAC,MAAMH,EAAYG,EAAO,CAAC,CAAC;AAAA,oCACzBC,CAAM;AAAA;AAAA,cAE5BgB,EAAK,MAAM;AAAA;AAAA,UAInB,MAAO,CACL,GAAGxB,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAMH,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,aAAAyB,EACA,QAAS,EACX,CACF,EAESvC,GACT,CAACY,EAAgCE,EAAkB2B,IAAoD,CACrG,IAAM5B,EAAWf,GAAkCgB,EAAO,OAAQ2B,EAAW,QAAQ,EACrF,MAAO,CAAC,GAAG5B,EAAU,IAAK,IAAMd,GAA8Ba,EAASC,EAAUC,EAAQ2B,EAAW,IAAI,CAAC,CAC3G,EAEExC,GAA4B,CAAC2B,EAAoBG,EAAiBI,IAA0B,CAChG,IAAMO,EAAad,EAAS,QAAQG,CAAO,EAQ3C,OAPYH,EAAS,IAAI,CAACe,EAAGC,IACvBA,IAAQF,EACH,GAAGC,CAAC,MAAMR,CAAK,GAEfQ,CAEV,EACU,KAAK,CAClB,IC9IA,IAgBaE,GAcPC,GAOAC,GAiEAC,GAMAC,GASAC,GAGAC,GAuBAC,GAwBOC,GAGPC,GA1KNC,GAAAC,EAAA,kBAGAC,KAKAC,KAEAC,KAMad,GACT,CAACe,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EACjBD,EAAiB,QAAQ,MAAQC,EAAO,CAAC,EAAE,KAAK,OAAS,EAGpD,CADHD,EAAiB,IAAIG,GAAoCH,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC5F,EAIP,CADHD,EAAiB,IAAIZ,GAAsCY,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC9F,GAIhBf,GAAsC,CAACkB,EAAoBC,KAAuB,CACtF,KAAM,SACN,WAAY,MAAM,KAAK,CAAC,OAAQD,CAAU,EAAG,CAACE,EAAIC,IAAM,IAAIA,CAAC,EAAE,EAC/D,WAAY,MAAMH,CAAU,EAAE,MAAyB,EACvD,UAAAC,CACF,GAEMlB,GACF,CAACqB,EAAiCC,EAA2BR,EAAkBS,IAA8B,CAC3G,IAAMC,EAAaV,EAAO,CAAC,EAAE,KAAK,MAAM,EACxC,GAAIS,GAAQC,EAAW,QAAUD,EAAQ,GAAKC,EAAW,OACvD,MAAM,IAAI,MAAM,8DAA+D,EAE7ED,EAAO,IACTA,EAAOC,EAAW,OAASD,GAI7B,IAAME,EAAcD,EAAW,MAAM,CAAC,EACtC,QAASJ,EAAI,EAAGA,EAAIN,EAAO,OAAQM,IAAK,CACtC,IAAMM,EAAaZ,EAAOM,CAAC,EAAE,KAAK,MAAM,EACxC,QAASO,EAAY,EAAGA,EAAYH,EAAW,OAAQG,IAErD,GAAIA,IAAcJ,EAChBE,EAAYF,CAAI,GAAKG,EAAWC,CAAS,UAGlCH,EAAWG,CAAS,IAAMD,EAAWC,CAAS,EACrD,MAAM,IAAI,MAAM,kCAAkC,CAGxD,CAEA,IAAMC,EAAOH,EAAY,OAEnBI,EAAmB,IAAI,MAAcf,EAAO,MAAM,EACpDgB,EAAc,EAClB,QAASV,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EAC7CU,GAAehB,EAAOM,CAAC,EAAE,KAAKG,CAAI,EAClCM,EAAiBT,CAAC,EAAIU,EAGxB,IAAIC,EAAwC,GAExCjB,EAAO,OAAS,EAClBiB,EAAwC7B,GAA4C2B,CAAgB,EAEpGE,EAAwC5B,GAA4C0B,CAAgB,EAGtG,IAAMG,EAAoC5B,GAAqCU,EAAO,OAAQc,CAAI,EAC5FK,EAA0C5B,GAA2CwB,CAAgB,EACrGK,EAAe;AAAA,UACjBF,CAAiC;AAAA,UACjCC,CAAuC;AAAA,UACvCF,CAAqC;AAAA,oCACXH,CAAI;AAAA,mEAC2BL,CAAI;AAAA;AAAA;AAAA,sBAGjDA,CAAI,eAAeA,CAAI;AAAA;AAAA;AAAA;AAAA,WAKvC,MAAO,CACL,GAAGD,EACH,OAAQ,CAAC,KAAMG,EAAa,KAAMX,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAoB,CACF,CACF,EAEEjC,GACF,CAACkC,EAAgCrB,EAAkBC,IAAoD,CACrG,IAAMO,EAAWvB,GAAoCe,EAAO,OAAQC,EAAW,QAAQ,EACvF,MAAO,CAAC,GAAGO,EAAU,IAAK,IAAMtB,GAAgCmC,EAASb,EAAUR,EAAQC,EAAW,IAAI,CAAC,CAC7G,EAEEb,GAA+C2B,GAG5C;AAAA,QAFYA,EAAiB,IAAI,CAACO,EAAMhB,IAAM,YAAYgB,CAAI,aAAahB,CAAC;AAAA,CACpF,EAEkB,KAAK,EAAE,CAAC;AAAA,OAKrBjB,GAA+C0B,GACjD3B,GAA4C2B,CAAgB,EAE1DzB,GAAuC,CAACiC,EAAyBC,IAAuB,CAC5F,IAAMC,EAAsB,CAAC,mEAAmED,CAAU,MAAM,EAChH,QAASlB,EAAI,EAAGA,EAAIiB,EAAiB,EAAEjB,EACjCA,IAAM,EACRmB,EAAU,KACN,wBACuBnB,CAAC,gBAAgBA,CAAC,cAAc,EAClDA,IAAMiB,EAAkB,EACjCE,EAAU,KACN,oBACmBnB,CAAC,cAAc,EAEtCmB,EAAU,KACN,6BAC4BnB,CAAC,gBAAgBA,CAAC,cAAc,EAGpE,OAAAmB,EAAU,KACN,IACG,EACAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMlC,GAA8CwB,GAAuC,CACzF,IAAMU,EAAsB,CAAC,oDAAoD,EACjF,QAASnB,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EACzCA,IAAM,EACRmB,EAAU,KACN,iBACgBnB,CAAC,cAAcS,EAAiBT,CAAC,CAAC,KAAK,EAClDA,IAAMS,EAAiB,OAAS,EACzCU,EAAU,KACN,kBACiBV,EAAiBT,CAAC,CAAC,KAAK,EAE7CmB,EAAU,KACN,sBACqBnB,CAAC,cAAcS,EAAiBT,CAAC,CAAC,KAAK,EAGpE,OAAAmB,EAAU,KACN,IACG,EAEAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEajC,GAAmEkC,GAC5EC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,MAAM,CAAC,CAAC,EAEhEjC,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAGlC,IAAM4B,EAAY5B,EAAO,CAAC,EAAE,KACtB6B,EAAsB7B,EAAO,CAAC,EAAE,KAAK,OAG3C,GAAI4B,IAAc,SAChB,MAAM,IAAI,MAAM,oCAAoC,EAGtD,QAAWE,KAAS9B,EAAQ,CAE1B,GAAI8B,EAAM,OAASF,EACjB,MAAM,IAAI,MAAM,kCAAkC,EAIpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,CAE9D,CACF,ICtLO,SAASE,IAA6B,CAC3C,OAAOC,GAAiB,KAAK,CAC/B,CACO,SAASC,IAA8B,CAC5C,OAAOD,GAAiB,MAAM,CAChC,CACO,SAASE,IAA8B,CAC5C,OAAOF,GAAiB,MAAM,CAChC,CACO,SAASG,IAA8B,CAC5C,OAAOH,GAAiB,MAAM,CAChC,CACO,SAASI,IAA8B,CAC5C,OAAOJ,GAAiB,MAAM,CAChC,CACO,SAASK,IAA6B,CAC3C,OAAOL,GAAiB,KAAK,CAC/B,CACO,SAASM,GAAQC,EAAkC,CACxD,IAAMC,EAAO,MAWb,MAAO,CAAC,KAVK;AAAA,8BACeD,CAAK;AAAA;AAAA,UAEzBC,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA,kBACKA,CAAI,WAAWA,CAAI,WAAWA,CAAI,WAAWA,CAAI;AAAA;AAAA,IAGnD,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASC,IAA6B,CAC3C,OAAOT,GAAiB,KAAK,CAC/B,CACO,SAASU,IAA+B,CAC7C,OAAOV,GAAiB,OAAO,CACjC,CACO,SAASW,GAASC,EAAaC,EAAgC,CACpE,IAAML,EAAO,OAYb,MAAO,CAAC,KAXK;AAAA,4BACaI,CAAG;AAAA,4BACHC,CAAG;AAAA;AAAA,UAErBL,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASM,IAAkC,CAChD,IAAMN,EAAO,YASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASO,GAAcR,EAAkC,CAC9D,IAAMC,EAAO,YAWb,MAAO,CAAC,KAVK;AAAA,8BACeD,CAAK;AAAA;AAAA,UAEzBC,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA,kBACKA,CAAI,WAAWA,CAAI,WAAWA,CAAI,WAAWA,CAAI;AAAA;AAAA,IAGnD,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASQ,IAA6B,CAC3C,OAAOhB,GAAiB,KAAK,CAC/B,CACO,SAASiB,IAA6B,CAC3C,IAAMT,EAAO,MASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASU,IAA6B,CAC3C,IAAMV,EAAO,MAeb,MAAO,CAAC,KAdK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,SAGJA,CAAI;AAAA;AAAA;AAAA,UAGHA,CAAI;AAAA;AAAA;AAAA,IAIE,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASW,IAA6B,CAC3C,OAAOnB,GAAiB,KAAK,CAC/B,CACO,SAASoB,IAA8B,CAC5C,IAAMZ,EAAO,OASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASa,IAAiC,CAC/C,IAAMb,EAAO,UASb,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA,SAGLA,CAAI;AAAA;AAAA;AAAA,IAIG,KAAAA,EAAM,MAA6B,CACnD,CACO,SAASc,IAA8B,CAC5C,OAAOtB,GAAiB,MAAM,CAChC,CACO,SAASuB,IAA6B,CAC3C,OAAOvB,GAAiB,KAAK,CAC/B,CACO,SAASwB,IAA8B,CAC5C,IAAMhB,EAAO,OAab,MAAO,CAAC,KAZK;AAAA,UACLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,SAKLA,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,IAMG,KAAAA,EAAM,MAA6B,CACnD,CACA,SAASR,GAAiBQ,EAAiC,CASzD,MAAO,CAAC,KARK;AAAA,UACLA,CAAI;AAAA,aACDA,CAAI;AAAA;AAAA,SAERA,CAAI;AAAA,aACAA,CAAI;AAAA;AAAA,IAGD,KAAAA,EAAM,MAA6B,CACnD,CAvLA,IA6LMiB,GAoBAC,GAQOC,GAGAC,GAGAC,GAGAC,GAQAC,GAMAC,GAGAC,GAKPC,GAWOC,GAGAC,GAOAC,GAKAC,GAGAC,GAGAC,GAGAC,GAOAC,GAKAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAGAC,GAlUbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEAC,KAmLMlC,GACF,CAACmC,EAAgCC,EAA2BC,EAAeC,IACxD,CACb,IAAMC,EAAcJ,EAAQ,QAAQ,SAC9BK,EAAOC,GAAQN,EAAQ,QAAQ,QAAQ,UAAU,OAAO,EAC9D,MAAO,CACL,GAAGC,EACH,OAAQ,CAAC,KAAMC,EAAM,KAAM,KAAMA,EAAM,KAAM,YAAAE,CAAW,EACxD,aAAc;AAAA,OACnBD,EAAS,IAAI;AAAA;AAAA,kBAEFE,EAAK,SAAS;AAAA,aACnBF,EAAS,IAAI;AAAA,SACjBE,EAAK,MAAM;AAAA;AAAA,OAGR,QAAS,EACX,CACF,EAEFvC,GACF,CAACkC,EAAgCE,EAAeC,EAA6BI,IACpD,CACnB,IAAMH,EAAcJ,EAAQ,QAAQ,SAC9BC,EAAW,CAAC,KAAME,EAAS,KAAM,WAAY,CAACC,CAAW,EAAG,WAAY,CAAC,GAAG,EAAG,UAAWG,CAAQ,EACxG,MAAO,CAAC,GAAGN,EAAU,IAAK,IAAMpC,GAA6BmC,EAASC,EAAUC,EAAOC,CAAQ,CAAC,CAClG,EAEKpC,GAAM,CAACiC,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGrE,GAAQ,CAAC,EAAGqE,CAAM,CAAC,EAE1FxC,GAAO,CAACgC,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGnE,GAAS,CAAC,EAAGmE,CAAM,CAAC,EAE3FvC,GAAO,CAAC+B,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGlE,GAAS,CAAC,EAAGkE,CAAM,CAAC,EAE3FtC,GAAO,CAAC8B,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGjE,GAAS,CAAC,EAAGiE,CAAM,CAAC,EAO3FrC,GACT,CAAC6B,EAAgCQ,EAAkBC,IAAyC,CAACT,EAAQ,IACjGlC,GACIkC,EAASQ,EAAO,CAAC,EAAGzD,GAAS0D,EAAW,IAAKA,EAAW,GAAG,EAAGA,EAAW,QAAQ,EACrFD,CAAM,CAAC,EAEFpC,GAAuBsC,GAAqCC,GACrE,CAAC,IAAKD,EAAK,WAAW,SAAS,MAAOE,EAAQ,EAAG,IAAKF,EAAK,WAAW,SAAS,MAAOG,EAAQ,CAAC,CAAC,EAEvFxC,GAAU,CAAC2B,EAAgCQ,IAA+B,CACrF,IAAMC,EAAanC,GAAiC0B,EAASQ,CAAM,EACnE,OAAOrC,GAAK6B,EAAS,CAACQ,EAAO,CAAC,CAAC,EAAGC,CAAU,CAC9C,EAEMnC,GAAmC,CAAC0B,EAAgCQ,IAAqC,CAC7G,GAAIA,EAAO,QAAU,IAChB,CAACR,EAAQ,QAAQ,cAAcQ,EAAO,CAAC,EAAE,MAAM,GAAK,CAACR,EAAQ,QAAQ,cAAcQ,EAAO,CAAC,EAAE,MAAM,GACtG,MAAM,IAAI,MAAM,yCAAyC,EAG3D,IAAMxD,EAAOwD,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,EAAII,GACvD3D,EAAOuD,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,EAAIK,GAC7D,OAAOF,GAA4B,CAAC,IAAA3D,EAAK,IAAAC,CAAG,CAAC,CAC/C,EAEasB,GAAO,CAACyB,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGhE,GAAS,CAAC,EAAGgE,CAAM,CAAC,EAE3FhC,GAAM,CAACwB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG/D,GAAQ,CAAC,EAAG+D,CAAM,CAAC,EAM1F/B,GACT,CAACuB,EAAgCQ,EAAkBC,IAAwC,CAACT,EAAQ,IAChGlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG9D,GAAQ+D,EAAW,KAAK,EAAGA,EAAW,QAAQ,EACrGD,CAAM,CAAC,EAEF9B,GAAsBgC,GAC/BC,GAA4B,CAAC,MAAOD,EAAK,WAAW,SAAS,QAAS,CAAG,CAAC,CAAC,EAElE/B,GAAM,CAACqB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG3D,GAAQ,CAAC,EAAG2D,CAAM,CAAC,EAE1F5B,GAAQ,CAACoB,EAAgCQ,IACtC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG1D,GAAU,CAAC,EAAG0D,CAAM,CAAC,EAE5F3B,GAAW,CAACmB,EAAgCQ,IACzC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGtD,GAAa,CAAC,EAAGsD,CAAM,CAAC,EAM/F1B,GACT,CAACkB,EAAgCQ,EAAkBC,IAA8C,CAACT,EAAQ,IACtGlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGrD,GAAcsD,EAAW,KAAK,EAAGA,EAAW,QAAQ,EAC3GD,CAAM,CAAC,EAEFzB,GAA4B2B,GACrCC,GAA4B,CAAC,MAAOD,EAAK,WAAW,SAAS,QAAS,GAAI,CAAC,CAAC,EAEnE1B,GAAM,CAACgB,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGpD,GAAQ,CAAC,EAAGoD,CAAM,CAAC,EAE1FvB,GAAM,CAACe,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGnD,GAAQ,CAAC,EAAGmD,CAAM,CAAC,EAE1FtB,GAAM,CAACc,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGlD,GAAQ,CAAC,EAAGkD,CAAM,CAAC,EAE1FrB,GAAO,CAACa,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGhD,GAAS,CAAC,EAAGgD,CAAM,CAAC,EAE3FpB,GAAU,CAACY,EAAgCQ,IACxC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG/C,GAAY,CAAC,EAAG+C,CAAM,CAAC,EAE9FnB,GAAM,CAACW,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAGjD,GAAQ,CAAC,EAAGiD,CAAM,CAAC,EAE1FlB,GAAO,CAACU,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG9C,GAAS,CAAC,EAAG8C,CAAM,CAAC,EAE3FjB,GAAM,CAACS,EAAgCQ,IACpC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG7C,GAAQ,CAAC,EAAG6C,CAAM,CAAC,EAE1FhB,GAAO,CAACQ,EAAgCQ,IACrC,CAACR,EAAQ,IAAIlC,GAAmCkC,EAASQ,EAAO,CAAC,EAAG5C,GAAS,CAAC,EAAG4C,CAAM,CAAC,ICnTjG,SAASM,GAAqBC,EAA0C,CAC7E,IAAIC,EACJ,OAAQD,EAAW,WAAY,CAC7B,IAAK,OACHC,EAAOC,GAAS,EAChB,MACF,IAAK,UACHD,EAAOE,GAAY,EACnB,MACF,IAAK,OACHF,EAAOG,GAASJ,EAAW,QAAUA,EAAW,OAAQ,EACxD,MAEF,QACE,MAAO,CAAC,mBAAoB,GAAI,gBAAiB,EAAE,CACvD,CAEA,IAAMK,EAAiBJ,EAAK,KACtBK,EAAqBL,EAAK,KAC1BM,EAAkB,WAAWF,CAAc,YACjD,MAAO,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,CAC7C,CArCA,IAuCaC,GAvCbC,GAAAC,EAAA,kBAIAC,KAGAC,KAgCaJ,GAAqCR,GAAwD,CACxG,IAAMa,EAAab,EAAW,UAAU,aAAc,EAAE,EAExD,GAAIa,IAAe,OAAQ,CACzB,GAAM,CAACC,EAASC,CAAO,EAAIf,EAAW,UAAU,oBAAqB,CAACgB,GAAUC,EAAQ,CAAC,EACzF,MAAO,CAAC,WAAAJ,EAAY,QAAAE,EAAS,QAAAD,EAAS,mBAAoB,GAAGD,CAAU,IAAIC,CAAO,IAAIC,CAAO,EAAE,CACjG,CACA,MAAO,CAAC,WAAAF,EAAY,mBAAoBA,CAAU,CACpD,IC/CA,IAYMK,GAQAC,GA+DOC,GAnFbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KACAC,KAEMT,GAA2C,CAACU,EAAkBC,KAAwC,CAC1G,KAAM,cACN,WAAYD,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAC,CACF,GAEMV,GACF,CAACW,EAAyCC,EAA2BC,EACpEC,IAA4C,CAE3C,IAAMC,EADUH,EAAO,OAAS,EACF,oCAAsC,GAC9DI,EAASJ,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BK,EAASL,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BM,EAAyBD,EAAO,CAAC,EAAIH,EAAW,MACtDK,GAAO,QACH,cACA,WAAWL,EAAW,OAAO,eAAeA,EAAW,SAAS,WAAWA,EAAW,KAAK,iBACvFA,EAAW,WAAW,UAAUA,EAAW,IAAI,aAAaA,EAAW,OAAO,EAAE,EACxF,IAAMM,EACFC,GAAqBL,EAAQC,EAAQH,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5FQ,EAAOC,GAAQZ,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAAC,mBAAAa,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBZ,CAAU,EAEvEa,EAAe;AAAA,gCACKb,EAAW,QAAQ,CAAC,CAAC,KAAKA,EAAW,QAAQ,CAAC,CAAC;AAAA,6BAClDA,EAAW,KAAK,CAAC,CAAC,KAAKA,EAAW,KAAK,CAAC,CAAC;AAAA,IAClEU,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAMgBN,CAAsB;AAAA;AAAA;AAAA,4CAGhBD,EAAO,CAAC,CAAC;AAAA,uCACdA,EAAO,CAAC,CAAC;AAAA,wCACRA,EAAO,CAAC,CAAC;AAAA,gDACDH,EAAW,UAAU,CAAC,CAAC;AAAA;AAAA,wCAE/BE,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,wCAITC,EAAO,CAAC,CAAC;AAAA,gDACDH,EAAW,UAAU,CAAC,CAAC;AAAA,wCAC/BE,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAU3CD,CAAW;AAAA,MACXU,CAAe;AAAA,MACfH,EAAK,MAAM;AAAA;AAAA,EAGX,MAAO,CACL,GAAGT,EACH,OAAQ,CAAC,KAAMO,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAe,EACA,QAAS,EACX,CACF,EAES1B,GACT,CAACU,EAAyCC,EAA2BE,IAC5C,CACnB,IAAMD,EAAWd,GAAyCa,EAAO,OAAS,EAAGE,EAAW,QAAQ,EAChG,MAAO,CACL,GAAGD,EACH,IAAK,IAAMb,GAAqCW,EAAkBC,EAAQC,EAAUC,CAAU,CAChG,CACF,IC3FR,IAWMc,GAOAC,GAiEOC,GAnFbC,GAAAC,EAAA,kBAIAC,KAEAC,KAGAC,KAEMP,GAAqCQ,IAAuB,CAChE,KAAM,kBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,EAC/B,UAAAA,CACF,GAEMP,GACF,CAACQ,EAAyCC,EAA2BC,EAAWC,EAC/EC,EAAgCC,IAA4C,CAC3E,IAAMC,EAASJ,EAAE,KACXK,EAASJ,EAAE,KACXK,EAAS,EACTC,EAAS,EACTC,EAAON,EAAY,OACnBO,EAAc,CAACJ,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAGH,EAAY,CAAC,EAAIA,EAAY,CAAC,CAAC,EACjFQ,EAAaL,EAAO,CAAC,EAAIA,EAAO,CAAC,EACjCM,EAAgBC,GAAkB,EAClCC,EAAOC,GAAQhB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACnEiB,EAAW,GAEf,QAASC,EAAM,EAAGA,GAAO,EAAGA,IAC1B,QAASC,EAAM,EAAGA,GAAO,EAAGA,IAC1BF,GAAY;AAAA,kCACYE,CAAG;AAAA,2BACVD,CAAG;AAAA;AAAA,8BAEAP,EAAY,CAAC,CAAC,aAAaA,EAAY,CAAC,CAAC;AAAA,4CAC3BP,EAAYM,EAAO,CAAC,CAAC,QAAQL,EAAW,QAAQ,CAAC,CAAC;AAAA,kBAC5EA,EAAW,KAAK,CAAC,CAAC;AAAA,+BACLA,EAAW,UAAU,CAAC,CAAC,iBAAiBO,CAAU,OAAOL,EAAO,CAAC,CAAC;AAAA;AAAA,wBAEzED,EAAOE,CAAM,CAAC;AAAA,6CACOJ,EAAYM,EAAO,CAAC,CAAC,OAAOL,EAAW,QAAQ,CAAC,CAAC;AAAA,oBAC1EA,EAAW,KAAK,CAAC,CAAC;AAAA,iCACLA,EAAW,UAAU,CAAC,CAAC,qBAAqBO,CAAU,MAAML,EAAO,CAAC,CAAC;AAAA;AAAA,0BAE5ED,EAAOG,CAAM,CAAC;AAAA;AAAA,yCAECG,CAAU;AAAA;AAAA,6BAEtBM,EAAM,EAAIC,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAWpC,IAAMC,EAAe;AAAA,QACnBP,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAOTI,CAAQ;AAAA,YACRF,EAAK,MAAM;AAAA;AAAA,cAGjB,MAAO,CACL,GAAGd,EACH,OAAQ,CAAC,KAAMU,EAAa,KAAMT,EAAE,KAAM,aAA+B,EACzE,aAAAkB,EACA,QAAS,EACX,CACF,EAES3B,GACT,CAACO,EAAyCE,EAAWC,EAAWC,EAC/DC,IAAkD,CACjD,IAAMJ,EAAWV,GAAkCc,EAAW,QAAQ,EACtE,MAAO,CACL,GAAGJ,EACH,IAAK,IAAMT,GAA8BQ,EAAkBC,EAAUC,EAAGC,EAAGC,EAAaC,CAAU,CACpG,CACF,ICtDJ,SAASgB,GACLC,EAA2BC,EAAkBC,EAAiE,CAChH,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EAAcC,GAAc,UAAUH,EAAQC,EAAQ,EAAI,EAChE,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAME,EAAiBC,GAAkBH,EAAY,MAAM,EACrDI,EAAgBC,GAAc,EAC9B,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBX,CAAoB,EAEjFY,EAAUb,EAAO,OAAS,EAC1Bc,EAAcD,EAAU,+BAAiC,GACzDE,EACFF,EAAU,GAAGG,GAAiBV,EAAgBE,EAAeR,EAAO,CAAC,EAAE,KAAMI,EAAa,EAAK,CAAC,GAAK,GAEnGa,EAAOb,EAAY,OACnBc,EAAQhB,EAAO,OACfiB,EAAQhB,EAAO,OACfiB,EAAYlB,EAAOA,EAAO,OAAS,CAAC,EACpCmB,EAAe;AAAA,MACjBX,CAAkB;AAAA,MAClBK,CAAuB;AAAA,gCACGE,CAAI;AAAA,gBACpBC,CAAK;AAAA,gBACLC,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKKC,CAAS;AAAA,gBACnBF,EAAQ,CAAC;AAAA,gBACTC,EAAQ,CAAC;AAAA;AAAA;AAAA,UAGfL,CAAW;AAAA,UACXH,CAAe;AAAA;AAAA,OAGvB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAMJ,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAqB,CACF,CACF,CAEO,SAASC,GACZtB,EAAkBC,EAAuE,CAC3F,IAAMF,EAAWwB,GAA4BvB,EAAO,OAAS,EAAGC,EAAqB,kBAAkB,EACvG,MAAO,CAAC,GAAGF,EAAU,IAAK,IAAMD,GAAwBC,EAAUC,EAAQC,CAAoB,CAAC,CACjG,CAqBO,SAASe,GACZV,EAAwBE,EAAkCgB,EAA4BC,EACtFC,EAA2B,CAC7B,IAAIC,EAAwB,GACtBC,EAASJ,EAAQ,OACjBK,EAAUJ,EAAS,OACnBK,EAAWD,EAAUD,EACvBC,EAAU,GAAKD,EAAS,EAC1BD,EAAwB,SAExBA,EAAwBH,EAAQ,IAAI,CAACO,EAAIC,IAAM,UAAUxB,EAAcwB,EAAIF,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAGnG,IAAMG,EADgB5B,GAAc,iBAAiBmB,EAASC,CAAQ,EAClC,IAAIS,GAAK,UAAU1B,EAAc0B,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAE9FK,EADSC,EAAU,KAAKZ,CAAO,IACJ,EAC7Ba,EAAS,uCACb,OAAIF,IACFE,EAAS,uBAEoBX,EAAW;AAAA;AAAA,IAExCpB,CAAc;AAAA,IACd2B,CAAa;AAAA,+BACcN,CAAqB;AAAA,WACzCU,CAAM;AAAA,GAE2B;AAAA;AAAA,IAExC/B,CAAc;AAAA,IACd2B,CAAa;AAAA;AAAA,EAKjB,CAhJA,IAcaK,GAYAC,GAGPhB,GA6DAiB,GA1FNC,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KACAC,KAEaT,GACT,CAACU,EAAyChD,EAAkBiD,KAC1DT,GAAexC,CAAM,EAEjBgD,EAAiB,QAAQ,KACpB,CAACA,EAAiB,IACrBE,GAAoCF,EAAkBhD,EAAQiD,CAAU,EAAGjD,CAAM,CAAC,EAE/E,CAACgD,EAAiB,IAAI1B,GAA8BtB,EAAQiD,CAAU,EAAGjD,CAAM,CAAC,GAIlFuC,GACRY,GAAmDC,GAAkCD,EAAK,UAAU,EAEnG5B,GAA8B,CAACV,EAAkBwC,KAAuB,CAC5E,KAAM,SACN,WAAYxC,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAwC,CACF,GAuDMb,GAAkBxC,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACtD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,KAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,ICbA,SAASsD,GACLC,EAAwBC,EAAkCC,EAAkBC,EAAqC,CACnH,IAAIC,EAAyB,CAAC,EAC1BC,EAAyB,CAAC,EAExBC,EAAWJ,EAAO,CAAC,EAAE,KACrBK,EAAWL,EAAO,CAAC,EAAE,KAErBM,EAAUF,EAAS,OACnBG,EAAUF,EAAS,OAEnBG,EAAUP,EAAS,OACnBQ,EAAYD,EAAUF,EACtBI,EAAYF,EAAUD,EAE5BL,EAAyBE,EAAS,IAAI,CAACO,EAAIC,IAAM,UAAUb,EAAca,EAAIH,CAAS,CAAC,EAAE,EACzFP,EAAuBI,EAAU,CAAC,EAAI,MACtCJ,EAAuB,KAAK,IAAI,EAChCC,EAAyBE,EAAS,IAAI,CAACM,EAAIC,IAAM,UAAUb,EAAca,EAAIF,CAAS,CAAC,EAAE,EACzFP,EAAuBI,EAAU,CAAC,EAAI,MACtCJ,EAAuB,KAAK,IAAI,EAEhC,IAAMU,EAAiBC,GAAc,iBAAiBV,EAAUH,CAAQ,EAClEc,EAAiBD,GAAc,iBAAiBT,EAAUJ,CAAQ,EAElEe,EAAiBH,EAAe,IAAII,GAAK,UAAUlB,EAAckB,EAAIR,CAAS,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EACjGS,EAAiBH,EAAe,IAAIE,GAAK,UAAUlB,EAAckB,EAAIP,CAAS,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EACjGS,EAAiB,wBAAwBpB,EAAcS,EAAU,CAAC,CAAC;AAAA,WAChET,EAAcS,EAAU,CAAC,CAAC,aAAaT,EAAcS,EAAU,CAAC,CAAC;AAAA,WACjET,EAAcS,EAAU,CAAC,CAAC,cAmBnC,MAjBoC;AAAA;AAAA,IAElCV,CAAc;AAAA,IACdqB,CAAc;AAAA,IACdH,CAAc;AAAA,4BACUd,CAAsB;AAAA;AAAA;AAAA;AAAA;AAAA,IAK9CJ,CAAc;AAAA,IACdqB,CAAc;AAAA,IACdD,CAAc;AAAA,4BACUf,CAAsB;AAAA;AAAA,EAKlD,CAEA,SAASiB,GAAKrB,EAAyBsB,EAAsB,CAC3D,IAAIC,EAAM,GACV,QAASV,EAAI,EAAGA,EAAIS,EAAO,EAAGT,IAC5BU,GAAO,MAAMvB,EAAca,CAAC,CAAC,KAE/B,OAAAU,GAAO,MAAMvB,EAAcsB,EAAO,CAAC,CAAC,QAE7BC,CACT,CAEA,SAASC,GAAKxB,EAAyBsB,EAAsB,CAC3D,IAAIC,EAAM,GACV,QAASV,EAAI,EAAGA,EAAIS,EAAO,EAAGT,IAC5BU,GAAO,MAAMvB,EAAca,CAAC,CAAC,KAE/B,OAAAU,GAAO,WACGvB,EAAcsB,EAAO,CAAC,CAAC,GAC1BC,CACT,CAnKA,IAaME,GAQAC,GA+DOC,GApFbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KACAC,KAEAC,KACAC,KAEMV,GAAoC,CAACW,EAAkBC,KAAuB,CAClF,KAAM,kBACN,WAAYD,EAAU,CAAC,IAAK,IAAK,MAAM,EAAI,CAAC,IAAK,GAAG,EACpD,WAAYA,EAAU,MAA2D,EAC3D,IAAuC,EAC7D,UAAAC,CACF,GAEMX,GACF,CAACY,EAAyCC,EAA2BtC,EACpEuC,IAAoE,CACnE,IAAMJ,EAAUnC,EAAO,OAAS,EAC1BwC,EAAcL,EAAU,+BAAiC,GACzDM,EAASzC,EAAO,CAAC,EAAE,KACnB0C,EAAS1C,EAAO,CAAC,EAAE,KACnB2C,EAAc7B,GAAc,UAAU2B,EAAQC,EAAQ,EAAI,EAC1DE,EAAc,CAACC,EAAU,SAAS7C,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,EAEtE,GAAI,CAAC2C,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMG,EAAYL,EAAOA,EAAO,OAAS,CAAC,EACpCM,EAAiB,KAAK,KAAKD,EAAY,CAAC,EACxCE,EAAQP,EAAO,OACfQ,EAAQP,EAAO,OAEfQ,EAAOC,GAAQd,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEvC,EAAiBsD,GAAkBT,EAAY,MAAM,EACrDnC,EAAUmC,EAAY,OACtB5C,EAAgBsD,GAAc,EAC9B,CAAC,mBAAAC,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBjB,CAAoB,EAEjFkB,EACFtB,EAAU,GAAGuB,GAAiB5D,EAAgBC,EAAeC,EAAO,CAAC,EAAE,KAAM2C,EAAa,EAAI,CAAC,GAAK,GAElGgB,EACFf,EAAc,GAAG/C,GAAyBC,EAAgBC,EAAeC,EAAQ2C,CAAW,CAAC,GAAK,GAEhGiB,EAA2BhB,EAAc,2BAA6B,QAAQxB,GAAKrB,EAAeiD,CAAK,CAAC,IACxGa,EAA2BjB,EAAc,2BAA6B,QAAQrB,GAAKxB,EAAekD,CAAK,CAAC,IACxGa,EAAyBlB,EAAc,GAAK,GAAG9C,CAAc;AAAA,gDACzBC,EAAcS,EAAU,CAAC,CAAC,QAAQT,EAAcS,EAAU,CAAC,CAAC;AAAA,eAC7FT,EAAcS,EAAU,CAAC,CAAC,QAAQT,EAAcS,EAAU,CAAC,CAAC;AAAA,QAE/DuD,EAAe;AAAA,cACbJ,CAAiC;AAAA,cACjCF,CAAuB;AAAA,cACvBH,CAAkB;AAAA;AAAA,gBAEhBQ,CAAsB;AAAA;AAAA;AAAA,oCAGFf,CAAc;AAAA,2BACvBa,CAAwB;AAAA,2BACxBC,CAAwB;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKnCrB,CAAW;AAAA,gBACXe,CAAe;AAAA,gBACfL,EAAK,MAAM;AAAA,eAErB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAM3C,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,aAAA+D,EACA,QAAS,EACX,CACF,EAESrC,GACT,CAACW,EAAyCrC,EACzCuC,IAA0E,CACzE,IAAMD,EAAWd,GAAkCxB,EAAO,OAAS,EAAGuC,EAAqB,kBAAkB,EAC7G,MAAO,CACL,GAAGD,EACH,IAAK,IAAMb,GAA8BY,EAAkBC,EAAUtC,EAAQuC,CAAoB,CACnG,CACF,IC5FJ,IAyBayB,GAzBbC,GAAAC,EAAA,kBAMAC,KACAC,KACAC,KAiBaL,GACT,CAACM,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EACFC,GAAqBH,EAAQC,EAAQF,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAG5FK,EAAeP,EAAiB,IAClCQ,GAAoCR,EAAkBC,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGI,EAAaH,CAAU,EACnG,CAACD,EAAO,CAAC,CAAC,CAAC,EAGTQ,EAAiBT,EAAiB,cAAcC,EAAO,CAAC,EAAG,CAACG,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAIA,EAAO,CAAC,CAAC,CAAC,EAGzGM,EACDT,EAAO,SAAW,EAAK,CAACQ,EAAgBF,EAAcN,EAAO,CAAC,CAAC,EAAI,CAACQ,EAAgBF,CAAY,EAC/FI,EAAeX,EAAiB,IAClCY,GAAoCZ,EAAkBU,EAAcR,CAAU,EAAGQ,CAAY,EAIjG,OADuBV,EAAiB,cAAcW,EAAcN,CAAW,CAEjF,ICjDJ,IASMQ,GAOAC,GA6DOC,GAWAC,GAxFbC,GAAAC,EAAA,kBAKAC,KAIMN,GAA+BO,IAAuB,CAC1D,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,UAAAA,CACF,GAEMN,GACF,CAACO,EAA0CC,EAA2BC,EAAWC,EAChFC,EAAgCC,IAA4C,CAC3E,IAAMC,EAASJ,EAAE,KACXK,EAASJ,EAAE,KAEXK,EAAOJ,EAAY,OACnBK,EAAad,GAAoBW,EAAQC,EAAQH,EAAa,CAAC,EAE/DM,EAAe;AAAA,yBACFJ,EAAO,CAAC,CAAC;AAAA,yBACTA,EAAO,CAAC,CAAC;AAAA,yBACTA,EAAO,CAAC,CAAC;AAAA,yBACTD,EAAW,YAAY,CAAC,CAAC;AAAA,yBACzBA,EAAW,YAAY,CAAC,CAAC;AAAA,gCAClBA,EAAW,UAAU,CAAC,CAAC;AAAA,gCACvBA,EAAW,UAAU,CAAC,CAAC;AAAA,8BACzBA,EAAW,QAAQ,CAAC,CAAC;AAAA,8BACrBA,EAAW,QAAQ,CAAC,CAAC;AAAA,2BACxBA,EAAW,KAAK,CAAC,CAAC;AAAA,2BAClBA,EAAW,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,mCAIVG,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAajBF,EAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAiB7B,MAAO,CACL,GAAGL,EACH,OAAQ,CAAC,KAAMQ,EAAY,KAAMP,EAAE,KAAM,aAA4C,EACrF,aAAAQ,CACF,CACF,EAEShB,GACT,CAACiB,EAAyCT,EAAWC,EAAWC,EAC/DC,IAAkD,CACjD,IAAMJ,EAAWT,GAA4Ba,EAAW,QAAQ,EAChE,MAAO,CACL,GAAGJ,EACH,IAAK,IAAMR,GAAwBkB,EAAkBV,EAAUC,EAAGC,EAAGC,EAAaC,CAAU,CAC9F,CACF,EAGSV,GACT,CAACiB,EAA+BC,EAAgCT,EAAgCU,EAAW,IAEnG,CAACV,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAC7C,KAAK,KAAKQ,EAAW,CAAC,EAAIC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAIC,CAAQ,CAAC,IC5FlF,IAYMC,GAQAC,GAiDOC,GArEbC,GAAAC,EAAA,kBAIAC,KACAC,KAEAC,KAEAC,KACAC,KAEMT,GAAkC,CAACU,EAAkBC,KAA8C,CACvG,KAAM,iBACN,WAAYD,EAAU,CAAC,SAAU,IAAK,GAAG,EAAI,CAAC,SAAU,GAAG,EAC3D,WAAYA,EAAU,MAA4E,EAC5E,IAAsD,EAC5E,SAAUC,EAAW,kBACvB,GAEMV,GACF,CAACW,EAAyCC,EAA2BC,EACpEC,EAAuBJ,IAA0D,CAChF,IAAMK,EAASF,EAAO,CAAC,EAAE,KACnBG,EAASH,EAAO,CAAC,EAAE,KACnBI,EAAsB,CAACD,EAAO,CAAC,EAAG,KAAK,KAAMD,EAAO,CAAC,EAAIC,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAK,CAAC,CAAC,EACpFE,EAAcC,GAAoBJ,EAAQC,EAAQF,CAAW,EAC7D,CAACM,EAAQC,CAAO,EAClBV,EAAiB,+BAA+BM,GAAoD,EAElGK,EAAgBC,EAAU,eAAeL,CAAW,EACpD,CAACM,EAAaC,CAAY,EAC5Bd,EAAiB,+BAA+BO,GAA4C,EAC1FQ,EAAOZ,EAAY,OAEnBa,EAAad,EAAO,OAAS,EAAK,MAAQ,QAC1Ce,EAAY,KAAK,KAAKb,EAAO,CAAC,EAAIC,EAAO,CAAC,EAAIA,EAAO,CAAC,EAAI,CAAC,EAC3D,CAAC,mBAAAa,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBrB,CAAU,EACvEsB,EAAOC,GAAQtB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEuB,EAAe;AAAA,EACzBL,CAAkB;AAAA,4BACQH,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAOGJ,EAAc,CAAC,CAAC,kBAAkBA,EAAc,CAAC,CAAC,kBAC3EA,EAAc,CAAC,CAAC;AAAA,oCACUL,EAAoB,CAAC,CAAC;AAAA,kBACxCU,CAAS;AAAA,wBACHC,CAAS;AAAA,uDACsBJ,CAAW,KAAKC,CAAY;AAAA,uDAC5BL,CAAM,KAAKC,CAAO;AAAA,mBACtDW,EAAK,SAAS,2BAA2BA,EAAK,SAAS;AAAA;AAAA;AAAA;AAAA,IAItEF,CAAe;AAAA;AAAA,GAGb,MAAO,CACL,GAAGlB,EACH,OAAQ,CAAC,KAAME,EAAa,KAAMD,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAqB,CACF,CACF,EAESjC,GACT,CAACU,EAAyCE,EAA2BC,EACpEJ,IAAgE,CAC/D,IAAME,EAAWb,GAAgCc,EAAO,OAAS,EAAGH,CAAU,EAC9E,MAAO,CACL,GAAGE,EACH,IAAK,IAAMZ,GAA4BW,EAAkBC,EAAUC,EAAQC,EAAaJ,CAAU,CACpG,CACF,IC7EJ,IAmBayB,GAyBAC,GAMPC,GAkBAC,GAcAC,GAeAC,GAkBOC,GAcPC,GAjINC,GAAAC,EAAA,kBAGAC,KAKAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAGajB,GACT,CAACkB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,IAAyC,CACvE,IAAMC,EAAYL,EAAW,CAAC,EACxBM,EAAoBN,EAAW,MAAM,CAAC,EACtCO,EAAcD,EAAkB,OAChCE,EAAcP,EAAY,CAAC,EAE3BQ,EADqBR,EAAY,MAAM,CAAC,EACA,IAAI,CAACS,EAAGC,IAAMD,GAAKA,EAAI,IAAMR,EAAUS,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIP,EAAWQ,CAAC,EAAIR,EAAWQ,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIP,EAAQO,CAAC,GAAKP,EAAQO,CAAC,CAAC,CAAC,EAE5G,MADoB,CAACN,EAAWG,CAAW,EAAE,OAAO,GAAGI,CAAkB,CAE3E,EAWS7B,GACT,CAAC8B,EAAoCC,EAAkBC,KACrD1B,GAAeyB,EAAQC,CAAU,EAC1B/B,GAAO6B,EAAkBC,EAAQC,CAAU,GAGlD/B,GACF,CAAC6B,EAAyCC,EAAkBC,IAAyC,CACnG,IAAMC,EAAqB7B,GAA0B4B,EAAYD,CAAM,EACjEG,EAAWJ,EAAiB,QAAQ,KACpCK,EAAcF,EAAmB,YAAY,CAAC,IAAM,GAAKA,EAAmB,YAAY,CAAC,IAAM,EACrG,OAAIA,EAAmB,MAAQ,EAGtB,CAFQH,EAAiB,IAC5BM,GAA2CN,EAAkBC,EAAQE,CAAkB,EAAGF,CAAM,CACtF,EACLI,GAAeD,EACjB,CAAChC,GAAwB4B,EAAkBC,EAAQE,CAAkB,CAAC,EACpEC,GAAYH,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAK,CAACI,EACzE,CAACE,GAAaP,EAAkBC,EAAQE,CAAkB,CAAC,EAE3D,CAAC9B,GAAe2B,EAAkBC,EAAQE,CAAkB,CAAC,CAExE,EAEE/B,GACF,CAAC4B,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMM,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EACFzC,GAAqBuC,EAAQC,EAAQP,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5FS,EAAYX,EAAiB,gBAAgBC,EAAO,CAAC,EAAG,CAACO,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAIA,EAAO,CAAC,CAAC,CAAC,EAC1FI,EAAYZ,EAAiB,gBAAgBC,EAAO,CAAC,EAAG,CAACQ,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,CAAC,EAE9EI,EAAeZ,EAAO,OAAS,EAAI,CAACW,EAAWD,EAAWV,EAAO,CAAC,CAAC,EAAI,CAACW,EAAWD,CAAS,EAC5FG,EAAed,EAAiB,IAAIe,GAA8BF,EAAcX,CAAU,EAAGW,CAAY,EAC/G,OAAOb,EAAiB,gBAAgBc,EAAcJ,CAAW,CACnE,EAEErC,GACF,CAAC2B,EAAyCC,EAA2BC,IAAuC,CAC1G,IAAMM,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EACFzC,GAAqBuC,EAAQC,EAAQP,EAAW,UAAWA,EAAW,KAAMA,EAAW,OAAO,EAC5Fc,EAAUhB,EAAiB,IAC7BiB,GAA8BjB,EAAkBC,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGS,EAAaR,CAAU,EAAG,CAACD,EAAO,CAAC,CAAC,CAAC,EAEzGiB,EAAmBjB,EAAO,SAAW,EAAI,CAACe,EAASf,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EAAI,CAACe,EAASf,EAAO,CAAC,CAAC,EAGpG,OAFeD,EAAiB,IAC5BmB,GAAkCnB,EAAkBC,EAAQS,EAAaR,CAAU,EAAGgB,CAAgB,CAE5G,EAEE5C,GAA4B,CAA2B4B,EAAeD,IAAwB,CAClG,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,EACpC,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EAC3Cb,EAAY,KAAKa,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAGtC,IAAMmB,EAAOlB,EAAW,KAAK,MAAM,EACnCmB,GAAa,yBACTpB,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAagC,EAAMlB,EAAW,OAAO,EAGnG,IAAMoB,EAAmB,OAAO,OAAO,CAAC,EAAGpB,CAAU,EACrD,cAAO,OAAOoB,EAAe,CAAC,YAAAlC,EAAa,KAAAgC,EAAM,SAAUlB,EAAW,QAAQ,CAAC,EACxEoB,CACT,EAEa/C,GAA+DgD,GAAqC,CAC/G,IAAMrB,EAAaqB,EAAK,WAClBC,EAAuBC,GAAkCvB,CAAU,EAEnEwB,EAAUxB,EAAW,UAAU,WAAY,QAAQ,EACnDb,EAAYa,EAAW,QAAQ,YAAa,CAAC,EAAG,CAAC,CAAC,EAClDyB,EAAQzB,EAAW,OAAO,QAAS,CAAC,EACpCd,EAAcc,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDkB,EAAOlB,EAAW,QAAQ,OAAQ,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,EAC9CX,EAAUW,EAAW,QAAQ,UAAW,CAAC,EAAG,CAAC,CAAC,EAEpD,OAAO0B,GAA4B,CAAC,QAAAF,EAAS,UAAArC,EAAW,MAAAsC,EAAO,YAAAvC,EAAa,KAAAgC,EAAM,QAAA7B,EAAS,GAAGiC,CAAoB,CAAC,CACrH,EAEMhD,GAAiB,CAACyB,EAAkBC,IAAqC,CAG7E,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAI7D,IAAM4B,EAAc5B,EAAO,CAAC,EAAE,KAAK,CAAC,EAC9B6B,EAAkB7B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAI2B,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAI7B,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMP,EAAcO,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWR,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIQ,EAAW,QAAQ,SAAWR,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIQ,EAAW,KAAK,SAAWR,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIQ,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,UAC5C,MAAM,IAAI,MAAM,yCAAyC,CAE7D,ICvLA,IAeM8B,GAIAC,GAWAC,GAsBOC,GAMPC,GAMAC,GAQAC,GA2DAC,GAWAC,GAQAC,GAwBOC,GAkBPC,GAhMNC,GAAAC,EAAA,kBAGAC,KAKAC,KAEAC,KAGAC,KAEMjB,GACF,CAACkB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DtB,GAAoB,CAACuB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEM3B,GACF,CAAC4B,EAA+BC,EAAgCC,EAA8BP,EAC7FC,EAAgBO,EAA4BC,EAAkCC,IAA0B,CACvG,IAAMC,EAAcN,EAAW,OAAS,EAClCO,EAAcF,EAAY,SAAW,EAC3C,QAASG,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EAAG,CACpC,IAAMf,EAAUc,EAAcP,EAAWQ,EAAI,CAAC,EAAIL,EAAQK,CAAC,EAAIH,EAAYG,CAAC,EACtEd,EAAWxB,GAAgB8B,EAAWQ,EAAI,CAAC,EAAGL,EAAQK,CAAC,EAAGZ,EAAKY,CAAC,EAAGP,EAAYO,CAAC,EAAGN,EAAUM,CAAC,EAAGf,CAAO,EAC9GtB,GAAkBuB,EAAUC,EAASC,EAAMY,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRF,EAAQK,CAAC,GAAKR,EAAWQ,EAAI,CAAC,EAAI,GAAKJ,EAAcI,CAAC,GAAKP,EAAYO,CAAC,EAAI,GAAKN,EAAUM,CAAC,EAAI,EAChGZ,EAAKY,CAAC,EAAIZ,EAAKY,EAAIF,CAAW,CAAC,CAEvC,CACF,EAOSjC,GACT,CAACoC,EAAoCC,EAAkBC,KACrD9B,GAAe6B,EAAQC,CAAU,EAC1BrC,GAAgBmC,EAAkBC,EAAQC,CAAU,GAG3DrC,GACF,CAACmC,EAAyCC,EAAkBC,IAAkD,CAC5G,IAAMC,EAAqBjC,GAAmCgC,EAAYD,CAAM,EAChF,MAAO,CAAChC,GAAwB+B,EAAkBC,EAAQE,CAAkB,CAAC,CAC/E,EAEErC,GAAqC,CAACsC,EAAkBC,KAAuB,CACnF,KAAM,gBACN,WAAYD,EAAU,CAAC,IAAK,IAAK,GAAG,EAAI,CAAC,IAAK,GAAG,EACjD,WAAYA,EAAU,MAAiE,EACjE,IAA2C,EACjE,UAAAC,CACF,GAEMtC,GACF,CAACiC,EAAyCC,EAA2BK,EACpEJ,IAAqD,CAEpD,IAAMK,EADUN,EAAO,OAAS,EACJ,uBAAyB,MAC/CO,EAASP,EAAO,CAAC,EAAE,KACnBQ,EAASR,EAAO,CAAC,EAAE,KACnBS,EAAyBD,EAAO,CAAC,EACjCE,EAAwBF,EAAO,CAAC,EAAIP,EAAW,MAC/CN,EAAc,CAACK,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MAAO,GAAGA,EAAW,WAAW,EACjGU,EAAOC,GAAQb,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAAC,mBAAAc,EAAoB,gBAAAC,CAAe,EAAIC,GAAqBd,CAAU,EAEvEe,EAAe;AAAA,gCACKf,EAAW,QAAQ,CAAC,CAAC,KAAKA,EAAW,QAAQ,CAAC,CAAC;AAAA,6BAClDA,EAAW,KAAK,CAAC,CAAC,KAAKA,EAAW,KAAK,CAAC,CAAC;AAAA,IAClEY,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAQgBJ,CAAsB;AAAA,oDACRA,CAAsB;AAAA;AAAA,oBAEtDH,CAAS;AAAA,sDACyBI,CAAqB;AAAA,uCACpCA,CAAqB;AAAA,oCACxBF,EAAO,CAAC,CAAC;AAAA,sCACPA,EAAO,CAAC,CAAC;AAAA,uCACRP,EAAW,UAAU,CAAC,CAAC,aAAaA,EAAW,UAAU,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0CAKxDM,EAAO,CAAC,CAAC;AAAA,0CACTA,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS7CO,CAAe;AAAA,MACfH,EAAK,MAAM;AAAA;AAAA,EAGX,MAAO,CACL,GAAGN,EACH,OAAQ,CAAC,KAAMV,EAAa,KAAMK,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAgB,EACA,QAAS,EACX,CACF,EAEEjD,GACF,CAACgC,EAAyCC,EAA2BC,IAC5C,CACnB,IAAMI,EAAWxC,GAAmCmC,EAAO,OAAS,EAAGC,EAAW,QAAQ,EAC1F,MAAO,CACL,GAAGI,EACH,IAAK,IAAMvC,GAAuCiC,EAAkBC,EAAQK,EAAUJ,CAAU,CAClG,CACF,EAGFjC,GACF,CAAC+B,EAAyCC,EAA2BC,IAEhDF,EAAiB,IAC5BhC,GAA6CgC,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,EAIlG/B,GAAqC,CAAoCgC,EAAeD,IAAwB,CACpH,IAAMT,EAAcU,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,EACpC,QAASH,EAAI,EAAGA,EAAIE,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEF,EAC3CP,EAAY,KAAKS,EAAO,CAAC,EAAE,KAAKF,CAAC,CAAC,EAItC,IAAMZ,EAAOe,EAAW,KAAK,MAAM,EAC7BN,EAAcM,EAAW,YAAY,MAAM,EAC3CX,EAAaU,EAAO,CAAC,EAAE,KAG7BtC,GACI4B,EAAYC,EAAaU,EAAW,UAAWA,EAAW,QAASf,EAAMe,EAAW,QACpFA,EAAW,cAAeN,CAAW,EAGzC,IAAMsB,EAAmB,OAAO,OAAO,CAAC,EAAGhB,CAAU,EACrD,cAAO,OAAOgB,EAAe,CAAC,YAAA1B,EAAa,KAAAL,EAAM,YAAAS,EAAa,SAAUM,EAAW,QAAQ,CAAC,EACrFgB,CACT,EAEa/C,GACRgD,GAA8C,CAC7C,IAAMjB,EAAaiB,EAAK,WAClBC,EAAuBC,GAAkCnB,CAAU,EAEnEhB,EAAUgB,EAAW,UAAU,WAAY,QAAQ,EACnDT,EAAYS,EAAW,QAAQ,YAAa,CAAC,EAAG,CAAC,CAAC,EAClDoB,EAAQpB,EAAW,OAAO,QAAS,CAAC,EACpCV,EAAcU,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDP,EAAgBO,EAAW,QAAQ,iBAAkB,CAAC,EAAG,CAAC,CAAC,EAC3DN,EAAcM,EAAW,QAAQ,eAAgB,CAAC,CAAC,EACnDf,EAAOe,EAAW,QAAQ,OAAQ,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,EAC9CR,EAAUQ,EAAW,QAAQ,UAAW,CAAC,EAAG,CAAC,CAAC,EAEpD,OAAOqB,GACH,CAAC,QAAArC,EAAS,UAAAO,EAAW,MAAA6B,EAAO,YAAA9B,EAAa,cAAAG,EAAe,YAAAC,EAAa,KAAAT,EAAM,QAAAO,EAAS,GAAG0B,CAAoB,CAAC,CAClH,EAEEhD,GAAiB,CAAC6B,EAAkBC,IAA8C,CAGtF,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAI7D,IAAMuB,EAAcvB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC9BwB,EAAkBxB,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIuB,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAczB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MAGnD,GAAID,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMyB,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAM7B,EAAcI,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWL,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIK,EAAW,QAAQ,SAAWL,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIK,EAAW,KAAK,SAAWL,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIK,EAAW,cAAc,SAAWL,EACtC,MAAM,IAAI,MAAM,4BAA4BA,CAAW,GAAG,EAK5D,GAAIK,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIC,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,UAC5C,MAAM,IAAI,MAAM,kDAAkD,CAEtE,IClQA,IAeM0B,GAMOC,GAaAC,GAGPC,GAuBAC,GAOAC,GAKAC,GAUAC,GAlFNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAMMZ,GAA2B,CAC/B,KAAM,YACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACY,EAAyCC,EAAkBC,KAC1DR,GAAeO,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGb,GACH,UAAWe,EAAW,SACtB,IAAK,IAAMZ,GAA2BU,EAAkBC,EAAO,CAAC,EAAGC,EAAW,IAAI,CACpF,EACAD,CAAM,CACI,GAGPZ,GACRc,GAA0CC,GAA4B,CAAC,KAAMD,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,CAAC,CAAC,EAEhHb,GACF,CAACe,EAA0CC,EAAeC,IAAgC,CACxF,IAAMC,EAAaF,EAAM,KACzBC,EAAOhB,GAAgBiB,EAAYD,CAAI,EACvC,IAAME,EAAsBjB,GAAegB,EAAYD,CAAI,EACrDG,EAAOF,EAAW,OAIlBG,EAAe;AAAA,QACnBlB,GAAoB,OAAQc,EAAMG,CAAI,CAAC;AAAA,kCACbA,CAAI;AAAA,gBACtBA,CAAI;AAAA;AAAA;AAAA,SAId,MAAO,CACL,GAAGvB,GACH,OAAQ,CAAC,KAAMsB,EAAqB,KAAMH,EAAM,KAAM,aAAiC,EACvF,aAAAK,CACF,CACF,EAEEpB,GAAkB,CAACiB,EAA+BD,KAClDA,GAAQA,EAAK,SAAWC,EAAW,SACrCD,EAAO,CAAC,GAAIC,EAAW,KAAK,CAAE,EAAE,QAAQ,GAEnCD,GAGHf,GAAiB,CAACgB,EAA+BD,KACrDA,EAAOhB,GAAgBiB,EAAYD,CAAI,EAChCK,EAAU,gBAAgBJ,EAAYD,CAAI,GAG7Cd,GAAsB,CAACoB,EAAcN,EAAgBG,IAAyB,CAClF,IAAMI,EAAc,CAAC,EACrBA,EAAY,KAAK,QAAQD,CAAI,cAAcH,CAAI,cAAcA,CAAI,MAAM,EACvE,QAASK,EAAI,EAAGA,EAAIL,EAAM,EAAEK,EAC1BD,EAAY,KAAK,MAAOP,EAAKQ,CAAC,CAAC,SAASA,CAAC,IAAI,EAE/C,OAAAD,EAAY,KAAK,IAAK,EACfA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMpB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,8BAA8B,CAElD,IC1FA,IAeae,GAqCAC,GAcPC,GAlENC,GAAAC,EAAA,kBAQAC,KAOaL,GACT,CAACM,EAAyCC,EAAkBC,IAAiD,CAC3GN,GAAeK,CAAM,EACrB,IAAME,EAAYD,EAAW,UACvBE,EAAeD,EAAYA,EAC3BE,EAAgBH,EAAW,OAAS,MAAQ,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAClFI,EAAoBJ,EAAW,OAAS,MAC1C,CACED,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGE,EAAWA,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcH,EAAO,CAAC,EAAE,KAAK,CAAC,EAC3FA,EAAO,CAAC,EAAE,KAAK,CAAC,CAClB,EACA,CACEA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcD,EAAWA,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAC3FA,EAAO,CAAC,EAAE,KAAK,CAAC,CAClB,EAQEM,EAAsBP,EAAiB,gBAAgBC,EAAO,CAAC,EAAGK,CAAiB,EAGnFE,EAA2C,CAAC,KAAMH,EAAe,SAAU,GAAGA,CAAa,EAAE,EAC7F,CAACI,CAAe,EAAIC,GAAUV,EAAkB,CAACO,CAAmB,EAAGC,CAAmB,EAG1FG,EAAqB,CACzBV,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIG,EAAcH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIE,EACzEF,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIE,CACtB,EAEA,MAAO,CADQH,EAAiB,gBAAgBS,EAAiBE,CAAkB,CACrE,CAChB,EAEShB,GACRiB,GAA6C,CAE5C,IAAMT,EAAYS,EAAK,WAAW,OAAO,WAAW,EACpD,GAAIT,EAAY,EACd,MAAM,IAAI,MAAM,qCAAqCA,CAAS,mBAAmB,EAEnF,IAAMU,EAAOD,EAAK,WAAW,UAAU,OAAQ,KAAK,EACpD,GAAIC,IAAS,OAASA,IAAS,MAC7B,MAAM,IAAI,MAAM,sBAAsBA,CAAI,mBAAmB,EAE/D,MAAO,CAAC,KAAAA,EAAM,UAAAV,CAAS,CACzB,EAEEP,GAAkBK,GAA2B,CACjD,GAAIA,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyCA,EAAO,MAAM,EAAE,EAK1E,GAAIA,EAAO,CAAC,EAAE,OAAS,UAAYA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,UAAU,mDAAmD,CAE3E,IC5EA,IASaa,GAQAC,GAGPC,GApBNC,GAAAC,EAAA,kBAMAC,KAGaL,GACT,CAACM,EAAyCC,EAAkBC,IAA2B,CACrFN,GAAeK,EAAQC,CAAI,EAE3B,IAAMC,EAAaC,EAAU,aAAaH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAC9D,MAAO,CAACF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAU,CAAC,CACjE,EAESR,GAA0DU,GACnEA,EAAK,WAAW,OAAO,OAAQ,CAAC,EAE9BT,GAAiB,CAACK,EAAkBC,IAAuB,CAC/D,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,IAAMK,EAAIL,EAAO,CAAC,EAAE,KAAK,OACzB,GAAIK,IAAM,EACR,MAAM,IAAI,MAAM,iCAAiC,EAGnD,GAAIJ,EAAO,CAACI,GAAKJ,EAAOI,EACtB,MAAM,IAAI,MAAM,cAAc,EAIhC,GAAIL,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,iCAAiC,CAErD,ICtCA,IAeaM,GAfbC,GAAAC,EAAA,kBAeaF,GACT,CAAC,UAAW,UAAW,QAAS,QAAS,OAAQ,SAAU,SAAU,OAAO,IChBhF,IAeaG,GAOAC,GAGPC,GAMAC,GAgDAC,GAMAC,GArFNC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAMaX,GACT,CAACY,EAAyCC,EAAkBC,KAC1DT,GAAeQ,EAAQC,EAAW,IAAI,EAE/B,CADQF,EAAiB,IAAIR,GAA8BQ,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CACjG,GAGPZ,GAAmEc,GAC5EC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,CAAC,CAAC,CAAC,EAEnEb,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,IAAK,GAAG,EACrB,WAAY,IAA2C,CACzD,EAEMC,GACF,CAACc,EAAiCC,EAA2BL,EAAkBM,IAA8B,CAC3G,IAAMC,EAAaP,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCQ,EAAiBR,EAAO,CAAC,EAAE,KAAK,MAAM,EACtCS,EAAc,IAAI,MAAMF,EAAW,OAASC,EAAe,OAAS,CAAC,EAE3EF,EAAOI,EAAU,cAAcJ,EAAMC,EAAW,MAAM,EACtD,IAAMI,EAAyB,CAAC,EAChC,QAASC,EAAI,EAAGA,EAAIH,EAAY,OAAQG,IAMlCA,EAAIN,GACNG,EAAYG,CAAC,EAAIL,EAAWK,CAAC,EAC7BD,EAAa,KAAK,YAAYC,CAAC,iBAAiBA,CAAC,IAAI,GAEjDA,EAAIN,EAAOE,EAAe,QAC5BC,EAAYG,CAAC,EAAIJ,EAAeI,EAAIN,CAAI,EACxCK,EAAa,KAAK,gBAAgBC,EAAIN,CAAI,iBAAiBM,CAAC,IAAI,IAEhEH,EAAYG,CAAC,EAAIL,EAAWK,EAAIJ,EAAe,OAAS,CAAC,EACzDG,EAAa,KAAK,YAAYC,EAAIJ,EAAe,OAAS,CAAC,iBAAiBI,CAAC,IAAI,GAKvF,IAAMC,EAAQJ,EAAY,QAAU,EAC9BK,EAAQP,EAAW,OACnBQ,EAASP,EAAe,QAAU,EAClCQ,EAAe;AAAA,oCACSH,CAAK;AAAA,uBAClBC,CAAK;AAAA,2BACDC,CAAM;AAAA;AAAA,UAEvBJ,EAAa,KAAK;AAAA,SAAY,CAAC;AAAA;AAAA,mBAEtBL,CAAI,uBAAuBC,EAAWD,CAAI,CAAC;AAAA;AAAA,SAGxD,MAAO,CACL,GAAGD,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAMT,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAgB,CACF,CACF,EAEEzB,GACF,CAAC0B,EAAgCjB,EAAkBC,IAAoD,CACrG,IAAMI,EAAW,CAAC,GAAGhB,GAAuB,UAAWY,EAAW,QAAQ,EAC1E,MAAO,CAAC,GAAGI,EAAU,IAAK,IAAMf,GAAwB2B,EAASZ,EAAUL,EAAQC,EAAW,IAAI,CAAC,CACrG,EAEET,GAAiB,CAACQ,EAAkBM,IAAuB,CAC/D,GAAI,CAACN,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAE7C,IAAMkB,EAAalB,EAAO,CAAC,EAAE,KAAK,OAClC,GAAIkB,EAAa,EACf,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIZ,EAAO,CAACY,GAAcZ,EAAOY,EAAa,EAC5C,MAAM,IAAI,MAAM,eAAe,EAEjC,GAAIC,GAAa,QAAQnB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,oBAAoB,EAEtC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,QACnD,MAAM,IAAI,MAAM,oBAAoB,CAExC,ICtGA,IAmBaoB,GAOPC,GAQOC,GAGAC,GAGPC,GAYAC,GA2DAC,GA/GNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAUaX,GACT,CAACY,EAAyCC,EAAkBC,KAC1DR,GAAeO,EAAQC,CAAU,EAE1B,CADQF,EAAiB,IAAIR,GAA4BS,EAAQC,CAAU,EAAGD,CAAM,CAC7E,GAGdZ,GAAsB,CAACc,EAAkBC,IAAyC,CACtF,IAAMC,EAASF,EAAK,WAAW,OAAO,SAAU,CAAC,IAAM,EACjDG,EAASH,EAAK,WAAW,OAAO,SAAU,CAAC,IAAM,EACjDI,EAAQJ,EAAK,WAAW,SAAS,QAAS,CAAG,EAC7CK,EAAOL,EAAK,WAAW,SAAS,OAAQ,CAAG,EACjD,OAAOM,GAA4B,CAAC,OAAAJ,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,YAAAJ,CAAW,CAAC,CAC/E,EAEad,GAAiEa,GAC1Ed,GAAoBc,EAAM,EAAK,EAEtBZ,GAAkEY,GAC3Ed,GAAoBc,EAAM,EAAI,EAE5BX,GAA8B,CAACS,EAAkBC,IAAkD,CACvG,IAAMQ,EAAW,CACf,KAAM,OACN,WAAYT,EAAO,SAAW,EAAI,CAAC,IAAK,IAAK,GAAG,EAAI,CAAC,IAAK,GAAG,EAC7D,WAAYA,EAAO,SAAW,EAAI,MAAiE,EACjE,IAA2C,EAC7E,IAAKC,EAAW,QAClB,EAEA,MAAO,CAAC,GAAGQ,EAAU,IAAK,IAAMjB,GAAsBiB,EAAUT,EAAQC,CAAU,CAAC,CACrF,EAEMT,GACF,CAACiB,EAA2BT,EAAkBC,IAA4C,CACxF,IAAMS,EAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BW,EAASX,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACY,EAAGC,CAAC,EAAIC,GAAS,qBACpBJ,EAAQT,EAAW,OAAQU,EAAQV,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGe,EAAc,CAACH,EAAGC,CAAC,EACzB,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAIC,EAAYN,EAAOA,EAAO,OAAS,CAAC,EACpCO,EAAO,GACPhB,EAAW,SACbe,EAAYN,EAAO,CAAC,GAElBT,EAAW,QAAUA,EAAW,OAClCgB,EAAO,8BACEhB,EAAW,QAAU,CAACA,EAAW,OAC1CgB,EAAO,4BACE,CAAChB,EAAW,QAAUA,EAAW,OAC1CgB,EAAO,4BACE,CAAChB,EAAW,QAAU,CAACA,EAAW,SAC3CgB,EAAO,2BAET,IAAMC,EAAOH,EAAY,OACnBI,EAAWnB,EAAO,SAAW,EAAI,SAASA,EAAO,CAAC,EAAE,KAAK,MAAM,KAAO,GACtEoB,EAAapB,EAAO,SAAW,EAAI,8BAAgC,GACnEqB,EAAarB,EAAO,SAAW,EAAI,yBAA2B,GAC9DsB,EAAe;AAAA,kCACOJ,CAAI;AAAA,kBACpBA,CAAI;AAAA,kBACJA,CAAI;AAAA,YACVC,CAAQ;AAAA;AAAA;AAAA;AAAA,YAIRC,CAAU;AAAA;AAAA;AAAA,4BAGMJ,CAAS;AAAA,kBACnBE,EAAO,CAAC;AAAA,kBACRA,EAAO,CAAC;AAAA,gBACVD,CAAI;AAAA;AAAA;AAAA;AAAA,YAIRI,CAAU;AAAA;AAAA,SAGhB,MAAO,CACL,GAAGZ,EACH,OAAQ,CAAC,KAAMM,EAAa,KAAMf,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,UAAW,CACT,CAAC,KAAM,QAAS,KAAM,QAAS,KAAMC,EAAW,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,QAAS,KAAMA,EAAW,IAAI,CAC7G,EACA,aAAAqB,CACF,CACF,EAEE7B,GAAiB,CAACO,EAAkBC,IAAqC,CAC7E,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIC,EAAW,cAAgBD,EAAO,OAAS,GAAKA,EAAO,OAAS,GAClE,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAI,CAACC,EAAW,aAAeD,EAAO,SAAW,EAC/C,MAAM,IAAI,MAAM,wBAAwB,EAI1C,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAClF,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WACnDA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UAC7E,MAAM,IAAI,MAAM,qBAAqB,EAGvC,GAAKA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,MAAUA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,OAASA,EAAO,CAAC,EAAE,KAC9F,MAAM,IAAI,MAAM,4BAA4B,CAEhD,ICxIA,IAeauB,GAQAC,GAOPC,GAMAC,GAsBAC,GAMAC,GAuBAC,GAvFNC,GAAAC,EAAA,kBAGAC,KAKAC,KAOaV,GACT,CAACW,EAAyCC,EAAkBC,KAC1DP,GAAeM,CAAM,EAGd,CADHD,EAAiB,IAAIP,GAAmCO,EAAkBC,EAAQC,CAAU,EAAGD,CAAM,CAC3F,GAGPX,GACRa,GAA4C,CAC3C,IAAMC,EAAQD,EAAK,WAAW,SAAS,OAAO,EACxCE,EAAOF,EAAK,WAAW,UAAU,MAAM,EAC7C,OAAOG,GAA4B,CAAC,MAAAF,EAAO,KAAAC,CAAI,CAAC,CAClD,EAEEd,GAA6B,CACjC,KAAM,cACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GACF,CAACe,EAAiCC,EAA2BP,EAAkBC,IAC5D,CACb,IAAMO,EAAcR,EAAO,CAAC,EAAE,KAAK,MAAM,EACnCS,EAAOD,EAAY,OAEnBE,EAAe;AAAA,QADCjB,GAAoBQ,EAAW,KAAK,MAAM,CAErD;AAAA,kCACaQ,CAAI;AAAA;AAAA,SAG5B,MAAO,CACL,GAAGF,EACH,OAAQ,CAAC,KAAMC,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,UAAW,CACT,CAAC,KAAM,OAAQ,KAAM,QAAS,YAAaC,EAAW,KAAK,OAAQ,KAAMA,EAAW,IAAI,EACxF,CAAC,KAAM,QAAS,KAAM,QAAS,KAAMA,EAAW,KAAK,CACvD,EACA,aAAAS,CACF,CACF,EAEFlB,GACF,CAACmB,EAAgCX,EAAkBC,IAAyD,CAC1G,IAAMM,EAAW,CAAC,GAAGjB,GAA4B,UAAWW,EAAW,QAAQ,EAC/E,MAAO,CAAC,GAAGM,EAAU,IAAK,IAAMhB,GAA6BoB,EAASJ,EAAUP,EAAQC,CAAU,CAAC,CACrG,EAEER,GAAuBmB,GAAgC,CAC3D,IAAMC,EAAsB,CAAC,4BAA4BD,CAAW,mBAAmB,EACvF,QAASE,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EAC7BA,IAAM,EACRD,EAAU,KACN,mBACkBC,CAAC,mBAAmBA,CAAC,MAAM,EACxCA,IAAMF,EAAc,EAC7BC,EAAU,KACN,uBACsBC,CAAC,MAAM,EAEjCD,EAAU,KACN,wBACuBC,CAAC,mBAAmBA,CAAC,MAAM,EAG1D,OAAAD,EAAU,KACN,IACG,EACAA,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMnB,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,+BAA+B,EAEjD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,ICjGA,IAUae,GAWAC,GAGPC,GAMAC,GA2CAC,GAKAC,GAMAC,GAqCAC,GAUAC,GAnINC,GAAAC,EAAA,kBAMAC,KAEAC,KAEaZ,GACT,CAACa,EAAyCC,EAAkBC,IAA8B,CACxFP,GAAeM,CAAM,EAErB,IAAME,EAAkBH,EAAiB,IAAIT,GAAuCU,EAAO,CAAC,CAAC,EAAGA,CAAM,EAItG,MAAO,CAHQD,EAAiB,IAC5BN,GAAqCM,EAAkBC,EAAO,CAAC,EAAGC,EAASC,EAAgB,IAAI,EAC/F,CAACF,EAAO,CAAC,EAAGE,EAAiBF,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,CAAC,CACxC,CAChB,EAESb,GAAwEgB,GACjFA,EAAK,WAAW,SAAS,UAAW,IAAI,EAEtCf,GAAiC,CACrC,KAAM,wCACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GAAmC,CAACe,EAA2BC,IAA+B,CAClG,IAAMC,EAAQD,EAAM,KAAK,MAAM,EACzBE,EAAUD,EAAM,CAAC,EACjBE,EAAcF,EAAM,CAAC,EAAIA,EAAM,CAAC,EAChCG,EAAc,CAACH,EAAM,CAAC,EAAGC,CAAO,EAEhCG,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOIJ,EAAM,CAAC,CAAC;AAAA;AAAA,6BAENA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAMDE,CAAW;AAAA;AAAA,2BAEpBF,EAAM,CAAC,CAAC;AAAA;AAAA,6BAENA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAORE,CAAW;AAAA;AAAA;AAAA,SAItC,MAAO,CACL,GAAGJ,EACH,OAAQ,CAAC,KAAMK,EAAa,KAAMJ,EAAM,KAAM,aAA4C,EAC1F,aAAAK,CACF,CACF,EAEMpB,GAA0Ce,IAAsC,CACpF,GAAGjB,GACH,IAAK,IAAMC,GAAiCD,GAAgCiB,CAAK,CACnF,GAEMd,GAA+B,CACnC,KAAM,sCACN,WAAY,CAAC,IAAK,kBAAmB,QAAS,GAAG,EACjD,WAAY,QAAkG,CAChH,EAEMC,GACF,CAACO,EAAyCK,EAA2BC,EAAeJ,EACnFU,IAAyD,CACxD,IAAMC,EAAOC,GAAQd,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACe,EAAcC,CAAa,EAC9BhB,EAAiB,+BAA+BY,GAAqD,EACnG,CAACK,EAAsBC,CAAqB,EAAI,CAACH,EAAe,EAAGC,CAAa,EAChFL,EAAe;AAAA;AAAA;AAAA,+CAGoBM,CAAoB,KAAKC,CAAqB;AAAA,iBAC5EL,EAAK,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAkBzB,MAAO,CACL,GAAGR,EACH,OAAQ,CAAC,KAAMC,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,UAAW,CAAC,CAAC,KAAM,UAAW,KAAM,QAAS,KAAMJ,CAAO,CAAC,EAC3D,aAAAS,CACF,CACF,EAEEjB,GACF,CAACM,EAAyCM,EAAeJ,EAAiBU,IACjD,CACnB,IAAMP,EAAW,CAAC,GAAGb,GAA8B,UAAW,GAAGU,CAAO,EAAE,EAC1E,MAAO,CACL,GAAGG,EACH,IAAK,IAAMZ,GAA+BO,EAAkBK,EAAUC,EAAOJ,EAASU,CAAoB,CAC5G,CACF,EAEFjB,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMkB,EAAIlB,EAAO,CAAC,EACZmB,EAAQnB,EAAO,CAAC,EAChBoB,EAAIpB,EAAO,CAAC,EAIlB,GAAIkB,EAAE,KAAK,OAAS,GAAKC,EAAM,KAAK,SAAW,GAAKC,EAAE,KAAK,SAAW,EACpE,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAID,EAAM,KAAK,CAAC,IAAMD,EAAE,KAAK,CAAC,GAAKE,EAAE,KAAK,CAAC,IAAMF,EAAE,KAAK,CAAC,EACvD,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAKA,EAAE,OAAS,WAAaA,EAAE,OAAS,WAAeC,EAAM,OAAS,WAAaA,EAAM,OAAS,WAC7FC,EAAE,OAAS,WAAaA,EAAE,OAAS,UACtC,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIpB,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,+BAA+B,CAEnD,IC/GA,SAASqB,GAAqBC,EAAkBC,EAAwC,CACtF,IAAMC,EAAIF,EAAO,CAAC,EAAE,KAAK,CAAC,EACpBG,EAAOH,EAAO,CAAC,EAAE,KAAK,OACtBI,EAAO,CAAC,KAAK,OAAOH,EAAW,KAAO,GAAK,CAAC,EAC5CI,EAAK,KAAK,MAAMJ,EAAW,KAAO,GAAK,CAAC,EACxCK,EAAQ,SAASL,EAAW,KAAK,aAAaA,EAAW,IAAI,IAC7DM,EAAO,SAASN,EAAW,IAAI,IAC/BO,EAAO,SAASP,EAAW,IAAI,IAE/BQ,EAAe;AAAA,gCACSN,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA,uBAKbC,CAAI,UAAUC,CAAE;AAAA;AAAA,8BAETH,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAMNK,CAAI,MAAMD,CAAK,kBAAkBE,CAAI;AAAA,OAE5D,MAAO,CACL,GAAGE,GACH,UAAWT,EAAW,SACtB,OAAQ,CAAC,KAAMD,EAAO,CAAC,EAAE,KAAM,KAAMA,EAAO,CAAC,EAAE,KAAM,aAAiC,EACtF,aAAAS,CACF,CACF,CAEO,SAASE,GAA2BX,EAAkBC,EAA8C,CACzG,MAAO,CAAC,GAAGS,GAAoB,UAAWT,EAAW,SAAU,IAAK,IAAMF,GAAqBC,EAAQC,CAAU,CAAC,CACpH,CA/EA,IAiBaW,GAYAC,GASPH,GA2CAI,GAjFNC,GAAAC,EAAA,kBAGAC,KAKAC,KASaN,GACT,CAACO,EAAyCnB,EAAkBC,KAC1Da,GAAed,CAAM,EAMd,CAACmB,EAAiB,IAAIR,GAA2BX,EAAQC,CAAU,EAAGD,CAAM,CAAC,GAI7Ea,GAA6DO,GAAoC,CAC5G,IAAMd,EAAQc,EAAK,WAAW,SAAS,QAAS,IAAM,EAChDZ,EAAOY,EAAK,WAAW,SAAS,OAAQ,GAAI,EAC5Cb,EAAOa,EAAK,WAAW,SAAS,OAAQ,CAAG,EAC3CC,EAAOD,EAAK,WAAW,OAAO,MAAM,EAE1C,OAAOE,GAA4B,CAAC,MAAAhB,EAAO,KAAAE,EAAM,KAAAD,EAAM,KAAAc,CAAI,CAAC,CAC9D,EAEMX,GAAqB,CACzB,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAuCMI,GAAkBd,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,yDAAyD,EAE3E,GAAIA,EAAO,CAAC,EAAE,OAAS,UACrB,MAAM,IAAI,MAAM,4BAA4B,CAEhD,IC3FA,IAkBMuB,GAMOC,GAaAC,GAOAC,GAOAC,GAGPC,GAaAC,GAmBAC,GASAC,GAYAC,GAiBAC,GA0BAC,GA8BAC,GApLNC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAEAC,KAQMlB,GAAqB,CACzB,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACkB,EAAyCC,EAAkBC,KAC1Dd,GAAiBa,CAAM,EAQhB,CAPQD,EAAiB,IAC5B,CACE,GAAGnB,GACH,UAAWqB,EAAW,SACtB,IAAK,IAAMf,GAAqBa,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CACzE,EACAD,CAAM,CACI,GAGPlB,GAA+DoB,GAAoC,CAC9G,IAAMC,EAAOD,EAAK,WAAW,UAAU,OAAQ,UAAU,EACnDE,EAAQF,EAAK,WAAW,SAAS,QAAS,CAAG,EAC7CG,EAAOH,EAAK,WAAW,QAAQ,MAAM,EAC3C,OAAOI,GAA4B,CAAC,KAAAH,EAAM,MAAAC,EAAO,KAAAC,CAAI,CAAC,CACxD,EAEatB,GACT,CAACgB,EAAyCC,EAAkBG,IAA2B,CACrFf,GAAkBY,CAAM,EACxB,IAAMO,EAAatB,GAAgCc,EAAkBC,EAAQG,CAAI,EACjF,OAAOtB,GAAMkB,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAGO,CAAU,CACxD,EAESvB,GAAyDkB,GAClEA,EAAK,WAAW,UAAU,OAAQ,UAAU,EAE1CjB,GACF,CAACc,EAAyCC,EAAkBG,IAAgC,CAC1F,GAAI,CAACJ,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACvDA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,EACjF,MAAM,IAAI,MAAM,wCAAwC,EAG1D,IAAMK,EAAO,MAAM,KAAKL,EAAO,CAAC,EAAE,WAAW,EACvCI,EAASJ,EAAO,QAAU,EAAKA,EAAO,CAAC,EAAE,UAAU,CAAC,EAAI,EAE9D,OAAOM,GAA4B,CAAC,KAAAH,EAAM,KAAAE,EAAM,MAAAD,CAAK,CAAC,CACxD,EAEElB,GACF,CAACa,EAAyCS,EAAeP,IAA2C,CAClG,IAAMQ,EAAcC,EAAU,SAASF,EAAM,KAAK,MAAM,EAAGP,EAAW,IAAI,EACpEU,EAAOF,EAAY,OAEnBG,EAAe;AAAA,QADDvB,GAAeU,EAAkBS,EAAOP,CAAU,CAEzD;AAAA,0BACOU,CAAI;AAAA;AAAA,SAGxB,MAAO,CACL,KAAM,MACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,OAAQ,CAAC,KAAMF,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAI,CACF,CACF,EAEEzB,GAAoBa,GAA2B,CACnD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMZ,GAAqBY,GAA2B,CACpD,GAAI,CAACA,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,4BAA4B,EAE9C,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,SAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMX,GAAiB,CAACU,EAAyCS,EAAeP,IAAsC,CACpH,IAAMY,EAAOC,GAAQf,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACgB,EAAOC,CAAM,EAAIjB,EAAiB,+BAA+BS,EAAM,MAA0B,EAClGS,EAAUP,EAAU,eAAeF,EAAM,IAAI,EAEnD,OAAQP,EAAW,KAAM,CACvB,IAAK,WACH,OAAOX,GAAeuB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,KAAMA,EAAW,KAAK,EACnG,IAAK,UACH,OAAOV,GAAcsB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,IAAI,EAChF,IAAK,OACH,OAAOT,GAAWqB,EAAML,EAAM,KAAMS,EAASF,EAAOC,EAAQf,EAAW,IAAI,EAC7E,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMX,GACF,CAACuB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,EACjGD,IAA0B,CACzB,IAAMO,EAAOO,EAAM,OACfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACDC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA,mBAEZF,EAAME,CAAC,CAAC;AAAA,wBACHH,EAAQG,CAAC,CAAC;AAAA,UAG5B,MAAO;AAAA,yBACYT,CAAI;AAAA,uCACUP,CAAK;AAAA;AAAA;AAAA,UAGlCe,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAIlD,EAEEtB,GACF,CAACsB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,IACpF,CACR,IAAMM,EAAOO,EAAM,OAEfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACLC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGD,GAAKF,EAAME,CAAC,EAAI,EAAE;AAAA;AAAA,oBAE5BF,EAAME,CAAC,CAAC;AAAA;AAAA,wBAEJH,EAAQG,CAAC,CAAC;AAAA,UAGxB,MAAO;AAAA,yBACQT,CAAI;AAAA;AAAA;AAAA,UAGnBQ,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAI9C,EAEFrB,GACF,CAACqB,EAAYK,EAA0BD,EAA4BF,EAAeC,EAAgBX,IACpF,CACR,IAAMM,EAAOO,EAAM,OAEfC,EAAQ,GACZ,QAASC,EAAIT,EAAO,EAAGS,GAAK,EAAG,EAAEA,EAC/BD,GAAS;AAAA,gBACLC,CAAC,OAAOf,EAAKe,CAAC,CAAC;AAAA;AAAA,mBAEZF,EAAME,CAAC,CAAC,SAASF,EAAME,CAAC,EAAI,CAAC;AAAA,wBACxBH,EAAQG,CAAC,CAAC;AAAA,QAGxB,MAAO;AAAA,yBACQT,CAAI;AAAA;AAAA;AAAA,UAGnBQ,CAAK;AAAA,+CACgCJ,CAAK,KAAKC,CAAM;AAAA,wCACvBH,EAAK,SAAS;AAAA;AAAA;AAAA,OAI9C,IC5MR,IAoBaQ,GAUAC,GAiBPC,GAwBOC,GAcAC,GAYAC,GAUAC,GAsBPC,GAoBAC,GAuBAC,GAYAC,GAMOC,GAWPC,GASAC,GAwIAC,GAUAC,GApWNC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAWapB,GACT,CAACqB,EAAyCC,EAAkBC,IAAgD,CAC1GX,GAAeU,CAAM,EACrB,IAAME,EACF,CAAC,KAAM,cAAe,WAAY,CAAC,GAAG,EAAG,WAAY,EAAqB,EAAG,UAAWD,EAAW,QAAQ,EAG/G,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMtB,GAA6BoB,EAAQE,EAAU,GAAOD,CAAU,CAAC,EAAGD,CAAM,CACzF,CAChB,EAESrB,GACRwB,GAA4C,CAC3C,IAAMC,EAAUD,EAAK,WAAW,UAAU,WAAY,QAAQ,EACxDE,EAAWF,EAAK,WAAW,OAAO,YAAa,CAAC,EAChDG,EAAmBH,EAAK,WAAW,OAAO,oBAAqB,CAAC,IAAM,EACtEI,EAAcJ,EAAK,WAAW,QAAQ,cAAc,EACpDK,EAAUL,EAAK,WAAW,QAAQ,UAAW,CAAC,CAAC,EAC/CM,EAAON,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EAG/C,GAAIE,IAAa,EACf,MAAM,IAAI,MAAM,wEAAwE,EAG1F,OAAOK,GAA4B,CAAC,QAAAN,EAAS,SAAAC,EAAU,gBAAAC,EAAiB,YAAAC,EAAa,QAAAC,EAAS,KAAAC,CAAI,CAAC,CACrG,EAEE7B,GACF,CAACoB,EAAkBE,EAA2BS,EAA2BV,IACtD,CACb,GAAM,CAACW,EAAoBC,CAAW,EAClC3B,GAAwCc,EAAQC,EAAYU,CAAgB,EAC1EG,EAAaC,EAAU,KAAKH,EAAmB,WAAW,EAC1DI,EAAM,kBACRC,EAAM,GACNL,EAAmB,gBACrBK,GAAO,kBAAkBH,CAAU,KAEnCG,GAAO,kBAAkBH,CAAU,WAGrC,IAAMI,EAAe;AAAA,UADD3B,GAAoBS,EAAO,CAAC,EAAE,KAAMY,EAAoBI,EAAKC,EAAK,KAAK,CAEhF;AAAA,QAEX,MAAO,CACL,GAAGf,EACH,OAAQ,CAAC,KAAMW,EAAa,KAAMb,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEKrC,GACT,CAACkB,EAAyCC,EAAkBC,IAAgD,CAC1GX,GAAeU,CAAM,EACrB,IAAME,EAAW,CACf,KAAM,oBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,EACjC,UAAW,GAAGD,EAAW,eAAe,EAC1C,EAGA,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMtB,GAA6BoB,EAAQE,EAAU,GAAMD,CAAU,CAAC,EAAGD,CAAM,CACxF,CAChB,EAESlB,GACRqB,GAA4C,CAC3C,IAAMG,EAAmBH,EAAK,WAAW,OAAO,oBAAqB,CAAC,IAAM,EAC5E,OAAOO,GACH,CAAC,QAAS,GAAI,SAAU,EAAG,gBAAAJ,EAAiB,YAAa,CAAC,EAAG,QAAS,CAAC,EAAG,KAAM,CAAC,CAAC,CAAC,CACzF,EAOSvB,GACT,CAACgB,EAAyCC,EAAkBC,IAA4C,CACtGX,GAAeU,CAAM,EACrB,IAAME,EACF,CAAC,KAAM,UAAW,WAAY,CAAC,GAAG,EAAG,WAAY,EAAqB,EAAG,UAAWD,EAAW,QAAQ,EAG3G,MAAO,CAFQF,EAAiB,IAC5B,CAAC,GAAGG,EAAU,IAAK,IAAMjB,GAAyBe,EAAQE,EAAU,GAAOD,CAAU,CAAC,EAAGD,CAAM,CACrF,CAChB,EAEShB,GACRmB,GAAwC,CACvC,IAAMC,EAAUD,EAAK,WAAW,UAAU,WAAY,QAAQ,EACxDE,EAAWF,EAAK,WAAW,OAAO,YAAa,CAAC,EAChDI,EAAcJ,EAAK,WAAW,QAAQ,cAAc,EACpDK,EAAUL,EAAK,WAAW,QAAQ,UAAW,CAAC,CAAC,EAC/CM,EAAON,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EACzCgB,EAAehB,EAAK,WAAW,OAAO,gBAAiB,CAAC,EACxDiB,EAAYjB,EAAK,WAAW,QAAQ,YAAa,CAAC,CAAC,EAGzD,GAAIgB,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAId,IAAa,EACf,MAAM,IAAI,MAAM,oEAAoE,EAGtF,OAAOK,GACH,CAAC,QAAAN,EAAS,SAAAC,EAAU,gBAAiB,GAAO,YAAAE,EAAa,QAAAC,EAAS,KAAAC,EAAM,aAAAU,EAAc,UAAAC,CAAS,CAAC,CACtG,EAEEnC,GACF,CAACe,EAAkBE,EAA2BS,EAA2BV,IACtD,CACb,GAAM,CAACW,EAAoBC,CAAW,EAClC3B,GAAwCc,EAAQC,EAAYU,CAAgB,EAC1EK,EAAM;AAAA;AAAA,MAGNC,EAAM,GAENC,EAAe;AAAA,QADD3B,GAAoBS,EAAO,CAAC,EAAE,KAAMY,EAAoBI,EAAKC,EAAK,MAAM,CAEnF;AAAA,MAET,MAAO,CACL,GAAGf,EACH,OAAQ,CAAC,KAAMW,EAAa,KAAMb,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEFhC,GACF,CAACc,EAAkBC,EAAqDU,IACb,CACrD,IAAMU,EAAarB,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCsB,EAAe,OAAO,eAAe,KAAKrB,EAAY,WAAW,EACjEM,EAAcN,EAAW,YAAY,MAAM,EAC3CO,EAAUP,EAAW,QAAQ,MAAM,EACnCmB,EAAsBE,EAAgBrB,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCsB,GAAa,qBAAqBZ,EAAkBU,EAAYd,EAAaC,EAASY,EAAWX,CAAI,EAErG,IAAMI,EAAcU,GAAa,uBAC7BZ,EAAkBU,EAAYb,EAASY,EAAWb,EAAaE,EAAMR,EAAW,OAAO,EAErFuB,EAAgB,OAAO,OAAO,CAAC,EAAGvB,CAAU,EAClD,OAAIqB,EACF,OAAO,OAAOE,EAAe,CAAC,YAAAjB,EAAa,QAAAC,EAAS,KAAAC,EAAM,UAAAW,EAAW,SAAUnB,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOuB,EAAe,CAAC,YAAAjB,EAAa,QAAAC,EAAS,KAAAC,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAEnF,CAACuB,EAAeX,CAAW,CACpC,EAEF1B,GAA0B,CAC9B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,EACZ,SAAU,EACZ,EAEMC,GAAwB,CAC5B,KAAM,gBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GAAgB,CAACU,EAAyCC,KACrEV,GAAeU,CAAM,EAOd,CANQD,EAAiB,IAC5B,CACE,GAAGX,GACH,IAAK,IAAMH,GAAyBe,EAAQZ,GAAuB,GAAMD,EAAuB,CAClG,EACAa,CAAM,CACI,GAGVV,GAAkBU,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,4BAA4B,EAE9C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEMT,GACF,CAACkC,EAA8BxB,EAAmCe,EAAaC,EAAaS,IAC9E,CACR,IAAMC,EAAOF,EAAU,OACvB,GAAIxB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAM2B,EAAK3B,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D4B,EAAK5B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD6B,EAAU7B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD8B,EAAQ9B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClD+B,EAAOP,EAAUE,EAAO,CAAC,EAC3BM,EAAQ,GACRC,EAAQ,GACRC,EAAW,GAmBf,GAlBIL,EAAUC,IAAU,EACtBE,EAAQ;AAAA,gCACUL,CAAE;AAAA,gBAClBD,CAAI,mBAAmBA,CAAI,WAAWE,CAAE,MAAMC,CAAO;AAAA,oBACjDH,CAAI,kBAAkBA,CAAI,YAAYK,CAAI;AAAA;AAAA;AAAA;AAAA,cAIhDhB,CAAG;AAAA,aAGHiB,EAAQ;AAAA,gCACUL,CAAE;AAAA,gBAClBD,CAAI,mBAAmBA,CAAI,WAAWE,CAAE,MAAMC,CAAO;AAAA,cACvDd,CAAG;AAAA,aAIDf,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMmC,EAAKnC,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoC,EAAKpC,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqC,EAAUrC,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsC,EAAQtC,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuC,EAAOf,EAAUE,EAAO,CAAC,EAC3BW,EAAUC,IAAU,EACtBL,EAAQ;AAAA,kCACUE,CAAE;AAAA,kBAClBT,CAAI,mBAAmBA,CAAI,WAAWU,CAAE,MAAMC,CAAO;AAAA,sBACjDX,CAAI,kBAAkBA,CAAI,YAAYa,CAAI;AAAA,wBACxCZ,CAAE;AAAA;AAAA;AAAA,YAKVM,EAAQ;AAAA,kCACUE,CAAE;AAAA,kBAClBT,CAAI,mBAAmBA,CAAI,WAAWU,CAAE,MAAMC,CAAO;AAAA,cAGzDH,EAAW;AAAA;AAAA,SAGb,CAgBA,MAdoB;AAAA,oCACIR,CAAI;AAAA,kBACtBA,CAAI;AAAA;AAAA;AAAA,0BAGID,CAAK;AAAA;AAAA,YAEnBQ,CAAK;AAAA,YACLD,CAAK;AAAA,YACLE,CAAQ;AAAA,YACRlB,CAAG;AAAA;AAAA;AAAA,OAKL,KAAO,CACL,IAAMH,EAAaC,EAAU,KAAKd,EAAW,WAAW,EAClDwC,EAAgB1B,EAAU,eAAed,EAAW,WAAW,EAC/DyC,EAAcD,EAAc,OAC5BE,EAAW1C,EAAW,KAAK,OAC3B2C,EAA0BnD,GAAgBiD,CAAW,EACrDG,EAAgBrD,GAAUiC,EAAW,WAAW,EAChDqB,EAAWtD,GAAUS,EAAW,KAAM,MAAM,EAC5C8C,EAAoBvD,GAAUiD,EAAe,eAAe,EAC5DO,EAAcxD,GAAUS,EAAW,QAAS,SAAS,EACrDgD,EAAUhD,EAAW,KAAK,OAAO,CAACiD,EAAKC,IAAQD,EAAMC,CAAG,EAC1DC,EAAU,GACd,OAAIH,EACFG,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAQVpC,CAAG;AAAA,aAGHoC,EAAU;AAAA;AAAA,YAEZpC,CAAG;AAAA,UAGiB;AAAA,UACtB4B,CAAuB;AAAA,oCACGjB,CAAI;AAAA,kBACtBA,CAAI;AAAA;AAAA,uBAECe,CAAW;AAAA,qBACbC,CAAQ;AAAA,0BACHhB,CAAI;AAAA,8BACAe,CAAW;AAAA,wBACjBA,CAAW;AAAA,YACvBI,CAAQ;AAAA,YACRD,CAAa;AAAA,YACbG,CAAW;AAAA,YACXD,CAAiB;AAAA;AAAA,0BAEHrB,CAAK;AAAA;AAAA;AAAA,gCAGCZ,CAAU;AAAA;AAAA;AAAA,2BAGfa,CAAI,MAAMe,CAAW,SAASf,CAAI;AAAA,gDACbA,CAAI,MAAMe,CAAW;AAAA,+BACtCf,CAAI,MAAMe,CAAW;AAAA,gBACpCU,CAAO;AAAA;AAAA,YAEXnC,CAAG;AAAA;AAAA;AAAA;AAAA,OAML,CACF,EAEFzB,GAAY,CAAC6D,EAA0BC,IAA8B,CACzE,IAAIC,EAAQ,GACZ,QAASC,EAAI,EAAGA,EAAIH,EAAM,OAAQG,IAChCD,GAAS;AAAA,QACLD,CAAS,IAAIE,CAAC,OAAOH,EAAMG,CAAC,CAAC;AAAA,MAGnC,OAAOD,CACT,EAEM9D,GAAmBkC,GAAyB;AAAA,yCACTA,CAAI,sBAAsBA,CAAI;AAAA,UAC7DA,CAAI;AAAA;AAAA;AAAA,0BAGYA,CAAI;AAAA;AAAA;AAAA;AAAA,cAIhBA,CAAI;OC7WlB,IAmBM8B,GAsBOC,GAMPC,GAoDAC,GAWOC,GAMAC,GAeAC,GAeAC,GAeAC,GAMAC,GAMAC,GA7KbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAUMhB,GACF,CAACiB,EAAyCC,EAAkBC,EAA8BC,EACzFC,IAAiC,CAChClB,GAAee,CAAM,EAErB,IAAMI,EAAwB,CAC5B,KAAAF,EACA,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAUA,MAAO,CARQH,EAAiB,IAC5B,CACE,GAAGK,EACH,UAAWH,EAAW,SACtB,IAAK,IACDjB,GAAwBe,EAAkBC,EAAQC,EAAYC,EAAMC,EAAUC,CAAqB,CACzG,EACAJ,CAAM,CACI,CAChB,EAESjB,GAAmEsB,GAAuC,CACrH,IAAMC,EAAOD,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EACzCE,EAAWF,EAAK,WAAW,OAAO,WAAY,CAAC,IAAM,EAC3D,OAAOG,GAA4B,CAAC,KAAAF,EAAM,SAAAC,CAAQ,CAAC,CACrD,EAEMvB,GACF,CAACyB,EAAiCT,EAAkBC,EAA8BS,EAAeP,EAChGC,IAAwD,CACvD,IAAMO,EAAwB,CAAC,EACzBC,EAAQZ,EAAO,CAAC,EAAE,KAAK,QAAU,EAEjCa,EAAU,CAAC,EAEXP,EAAOQ,EAAU,cAAcb,EAAW,KAAMD,EAAO,CAAC,EAAE,KAAK,MAAM,EACrEe,EAAMZ,EAASH,EAAQM,CAAI,EAC7BU,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,IAErCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,GACtCL,EAAW,UACbU,EAAY,KAAK,CAAC,EAIpBK,EAAY;AAAA,qBACDC,CAAC,UAAUA,CAAC,MAAMjB,EAAO,CAAC,EAAE,KAAKiB,CAAC,CAAC,MAAMA,CAAC;AAAA,uBACxCA,CAAC,QAAQA,CAAC;AAAA,cACnBD,CAAS;AAAA,eAGbH,EAAQ,KAAK,YAAYI,CAAC,iBAAiBN,EAAY,MAAM,IAAI,EAEjEA,EAAY,KAAKX,EAAO,CAAC,EAAE,KAAKiB,CAAC,CAAC,GAMtC,IAAMC,EAAe;AAAA,oCAFPP,EAAY,QAAU,CAGD;AAAA;AAAA,uBAElBC,CAAK;AAAA,UAClBC,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,UAClBE,EAAI,CAAC,CAAC;AAAA,UACNC,CAAS;AAAA,UACTD,EAAI,CAAC,CAAC;AAAA;AAAA,SAIV,MAAO,CACL,GAAGX,EACH,OAAQ,CAAC,KAAMO,EAAa,KAAMX,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAkB,CACF,CACF,EAEEjC,GAAkBe,GAA2B,CAEjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAImB,GAAa,QAAQnB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEad,GACT,CAACa,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YADzB,IAAgB,CAAC,eAAgB,yBAA0B,EAAE,CACf,EAGlEd,GACT,CAACY,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,aAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAIc,EAAO,EACX,QAASH,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,KAC1Cc,GAAQpB,EAAO,CAAC,EAAE,KAAKiB,CAAC,GAI5B,MAAO,CAAC,eAAgB,yBAA0B,YAAYG,CAAI,IAAI,CACxE,CAC0E,EAGnEhC,GACT,CAACW,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAMe,EAAU,CAAC,EACjB,QAASJ,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,IAC1Ce,EAAQ,KAAK,YAAYJ,CAAC,QAAQ,EAItC,MAAO,CAAC,GAAGI,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,uBAA2B,oCAAqC,EAAE,CACjG,CACyE,EAGlEhC,GACT,CAACU,EAAyCC,EAAkBC,IAWnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,YAVzB,CAACD,EAAkBM,IAA6B,CACzE,IAAMe,EAAU,CAAC,EACjB,QAASJ,EAAI,EAAGA,EAAIjB,EAAO,CAAC,EAAE,KAAK,OAAQiB,KACrCX,EAAK,QAAQW,CAAC,GAAK,GAAKX,EAAK,SAAW,IAC1Ce,EAAQ,KAAK,YAAYJ,CAAC,QAAQ,EAItC,MAAO,CAAC,GAAGI,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,uBAA2B,oCAAqC,EAAE,CACjG,CACyE,EAGlE/B,GACT,CAACS,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,aADzB,IAAgB,CAAC,eAAgB,yBAA0B,EAAE,CACd,EAGnEV,GACT,CAACQ,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,eADzB,IAAgB,CAAC,eAAgB,yBAA0B,qBAAqB,CAC/B,EAGrET,GACT,CAACO,EAAyCC,EAAkBC,IAEnDnB,GAAOiB,EAAkBC,EAAQC,EAAY,qBADzB,IAAgB,CAAC,wBAAyB,oCAAqC,EAAE,CAC1B,IChLxF,IAOaqB,GAPbC,GAAAC,EAAA,kBAIAC,KAGaH,GAAU,CAACI,EAAgCC,IAA+B,CACrF,IAAMC,EAAeC,EAAU,sBAAsBF,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,WAAW,EAC1F,OAAID,EAAQ,QAAQ,KACX,CAACA,EAAQ,cAAcC,EAAO,CAAC,EAAGC,CAAY,CAAC,EAE/C,CAACF,EAAQ,gBAAgBC,EAAO,CAAC,EAAGC,CAAY,CAAC,CAE5D,ICdA,IA6BME,GAMOC,GAaAC,GAGAC,GAGAC,GA+EPC,GAmLOC,GAgBAC,GAxUbC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAoBMZ,GAA0B,CAC9B,KAAM,WACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACY,EAAyCC,EAAkBC,KAC1DT,GAAeQ,EAAQC,CAAU,EAQ1B,CAPQF,EAAiB,IAC5B,CACE,GAAGb,GACH,UAAWe,EAAW,SACtB,IAAK,IAAMV,GAA0BQ,EAAkBC,EAAQC,CAAU,CAC3E,EACAD,CAAM,CACI,GAGPZ,GACRc,GAAyCZ,GAAwBY,EAAM,CAAC,EAEhEb,GACRa,GAAyCZ,GAAwBY,EAAM,CAAC,EAEhEZ,GAA0B,CAACY,EAAkBC,IAAsC,CAC9F,IAAMC,EAAYD,GAAS,GAGrBE,EAAOH,EAAK,WAAW,UAAU,OAAQ,SAAS,EACxD,GAAIG,IAAS,WAAaA,IAAS,WAAaF,EAAQ,IAAME,IAAS,SACrE,MAAM,IAAI,MAAM,sBAAsBA,CAAI,EAAE,EAG9C,IAAIC,EAAmB,CAAC,EACpBH,EAAQ,IACVG,EAASJ,EAAK,WAAW,UAAU,QAAQ,EAC3CT,GAAiBa,EAAQD,EAAMD,CAAQ,GAGzC,IAAMG,EAAqBL,EAAK,WAAW,SAAS,sBAAuB,CAAG,EAExEM,EACFL,EAAQ,GAAKD,EAAK,WAAW,UAAU,iCAAkC,YAAY,EAAI,aAC7F,GAAI,CACE,aAAc,qBAAsB,uBAAwB,gBAAiB,qBAAsB,YACrG,EAAE,QAAQM,CAAuB,IAAM,GACzC,MAAM,IAAI,MAAM,8BAA8BA,CAAuB,oBAAoB,EAE3F,IAAMC,EAAgBD,IAA4B,qBAC5CE,EAAmBD,EAEnBE,EACDN,IAAS,WAAaF,GAAS,GAAMD,EAAK,WAAW,UAAU,eAAgB,oBAAoB,EAAI,GAC5G,GAAI,CAAC,qBAAsB,oBAAqB,QAAS,OAAQ,EAAE,EAAE,QAAQS,CAAW,IAAM,GAC5F,MAAM,IAAI,MAAM,iBAAiBA,CAAW,oBAAoB,EAGlE,IAAMC,EAAoBV,EAAK,WAAW,SAAS,gBAAiB,IAAK,EACnEW,EAAiBX,EAAK,WAAW,OAAO,kBAAmB,CAAC,IAAM,EACxE,GAAIW,GAAkBR,IAAS,QAC7B,MAAM,IAAI,MAAM,0DAA0D,EAG5E,IAAMS,EACDX,EAAQ,GAAM,GAAQE,IAAS,WAAaG,IAA4B,cAAgBG,IAAgB,QAEzGI,EAAc,EACdC,EAAiB,EACjBC,EAAgB,EAEpB,OAAId,EAAQ,GAEND,EAAK,OAAO,OAAS,GACvBa,EAAc,EACdC,EAAiB,EACjBC,EAAgB,IAEhBD,EAAiB,EACjBC,EAAgB,GAETd,IAAU,IACnBa,EAAiB,GAGZE,GAA4B,CACjC,MAAAf,EACA,SAAAC,EACA,KAAAC,EACA,OAAAC,EACA,mBAAAC,EACA,wBAAAC,EACA,iBAAAE,EACA,aAAAD,EACA,YAAAE,EACA,kBAAAC,EACA,eAAAC,EACA,yBAAAC,EACA,YAAAC,EACA,eAAAC,EACA,cAAAC,CACF,CAAC,CACH,EAEM1B,GACF,CAACQ,EAAyCC,EAAkBC,IAAgD,CAC1G,IAAMkB,EAAOC,GAAQrB,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACsB,EAAYC,CAAW,EAC1BvB,EAAiB,+BAA+BC,EAAO,CAAC,EAAE,MAA0B,EAElFuB,EAAcvB,EAAO,CAAC,EAAE,KAAK,IAAI,CAACwB,EAAKC,IAAM,KAAK,MAAMD,EAAMvB,EAAW,OAAOwB,CAAC,CAAC,CAAC,EACnF,CAACC,EAAaC,CAAY,EAC5B5B,EAAiB,+BAA+BwB,GAAiC,EAC/EC,EAAMD,EAAY,OAElBK,EAAgB,IAAI,MAAcJ,CAAG,EACrCK,EAAe,IAAI,MAAcL,CAAG,EACtCM,EAAuB;AAAA,2BACNN,CAAG;AAAA,0BACJA,CAAG;AAAA,QAEvB,QAASO,EAAIP,EAAM,EAAGO,GAAK,EAAGA,IAC5BH,EAAcG,CAAC,EAAKA,IAAMP,EAAM,EAAK,EAAII,EAAcG,EAAI,CAAC,EAAIR,EAAYQ,EAAI,CAAC,EACjFF,EAAaE,CAAC,EAAKA,IAAMP,EAAM,EAAK,EAAIK,EAAaE,EAAI,CAAC,EAAI/B,EAAO,CAAC,EAAE,KAAK+B,EAAI,CAAC,EAElFD,GAAwB;AAAA,yBACPC,CAAC,OAAOH,EAAcG,CAAC,CAAC;AAAA,wBACzBA,CAAC,OAAOF,EAAaE,CAAC,CAAC;AAAA,UAGzC,IAAMC,EAAwB;AAAA;AAAA,8CAEUX,CAAU,KAAKC,CAAW;AAAA,wCAChCH,EAAK,SAAS;AAAA;AAAA;AAAA,QAK1Cc,EAAehC,EAAW,OAAS,UAErC;AAAA,MACJ+B,CAAqB;AAAA,gCACKR,CAAG;AAAA;AAAA,qDAEkBE,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA,gCAGIN,CAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAezBA,IAAQ,EAEJ;AAAA,MACRQ,CAAqB;AAAA;AAAA;AAAA,qDAG0BN,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCA2BQ9B,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAuBvC;AAAA,MACRgC,CAAqB;AAAA;AAAA;AAAA,qDAG0BN,CAAW,KAAKC,CAAY;AAAA;AAAA,QAEzEG,CAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAoBQ9B,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAsB/C,MAAO,CACL,GAAGd,GACH,OAAQ,CAAC,KAAMqC,EAAa,KAAMvB,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAiC,EACA,UAAW,CAAC,CACV,KAAM,SACN,KAAM,MACN,YAAahC,EAAW,OAAO,OAC/B,KAAMA,EAAW,OAAO,IAAIiC,GAAK,KAAK,KAAKA,CAAC,CAAC,CAC/C,CAAC,CACH,CACF,EAES1C,GAAiB,CAACQ,EAAkBmC,IAAwC,CACvF,GAAI,CAACnC,GAAWmC,EAAU,MAAQ,GAAKnC,EAAO,SAAW,GACpDmC,EAAU,OAAS,GAAKA,EAAU,MAAQ,IAAMnC,EAAO,SAAW,GAClEmC,EAAU,OAAS,IAAMnC,EAAO,OAAS,EAC5C,MAAM,IAAI,MAAM,iBAAiB,EAGnC,GAAImC,EAAU,OAAO,OAAS,GAAKnC,EAAO,CAAC,EAAE,KAAK,SAAWmC,EAAU,OAAO,OAC5E,MAAM,IAAI,MAAM,sBAAsB,EAGxC,GAAInC,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEaP,GAAmB,CAACa,EAAkBD,EAAcD,IAA4B,CAC3F,GAAKA,GAOH,QAAWgC,KAAS9B,EAClB,GAAI8B,GAAS,EACX,MAAM,IAAI,MAAM,uCAAuC,MAR3D,SAAWA,KAAS9B,EAClB,GAAI8B,EAAQ,EACV,MAAM,IAAI,MAAM,mDAAmD,EAUzE,IAAI/B,IAAS,UAAYA,IAAS,UAC5BC,EAAO,SAAW,IAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAClF,MAAM,IAAI,MAAM,+KAELF,EAAW,SAAW,UAAU,YAAY,CAG7D,IC7VA,IAcMiC,GAMOC,GAaAC,GAGAC,GAGPC,GAyKAC,GAiCAC,GAMAC,GAvPNC,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KACAC,KAEMd,GAAwB,CAC5B,KAAM,SACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAmB,CACjC,EAEaC,GACT,CAACc,EAAyCC,EAAkBC,KAC1DC,GAAeF,EAAQC,CAAU,EAQ1B,CAPQF,EAAiB,IAC5B,CACE,GAAGf,GACH,UAAWiB,EAAW,SACtB,IAAK,IAAMb,GAA8BW,EAAkBC,EAAQC,CAAU,CAC/E,EACAD,CAAM,CACI,GAGPd,GACRiB,GAAyCC,GAAwBD,EAAM,EAAE,EAEjEhB,GACRgB,GAAyCC,GAAwBD,EAAM,EAAE,EAExEf,GACF,CAACW,EAAyCC,EAAkBC,IAAgD,CAC1G,IAAMI,EAAOC,GAAQP,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjE,CAACQ,EAAQC,CAAW,EAAInB,GAAcW,EAAQC,CAAU,EAI9D,GADIM,EAAO,MAAOE,GAAcA,IAAM,CAAC,GAAKR,EAAW,0BAA4B,qBAEjF,MAAO,CACL,GAAGjB,GACH,OAAQ,CAAC,KAAMwB,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,QAAS,GACT,aAAc;AAAA,+BACOK,EAAK,SAAS;AAAA,sBACvBA,EAAK,MAAM;AAAA,kBAEzB,EAGF,IAAMK,EAAMF,EAAY,OACxB,GAAIE,EAAM,EACR,MAAM,IAAI,MAAM,kDAAkDA,CAAG,EAAE,EAGzE,IAAMC,EAAeH,EAAYE,EAAM,CAAC,EAClCE,EAAcJ,EAAYE,EAAM,CAAC,EAEjCG,EAAab,EAAO,CAAC,EAAE,KAC7B,GAAIU,IAAQG,EAAW,OACrB,MAAM,IAAI,MAAM,uCAAuCA,EAAW,MAAM,aAAaH,CAAG,EAAE,EAE5F,IAAMI,EAAcD,EAAWH,EAAM,CAAC,EAChCK,EAAaF,EAAWH,EAAM,CAAC,EAE/BM,EAAeT,EAAOG,EAAM,CAAC,EAC7BO,EAAcV,EAAOG,EAAM,CAAC,EAE9BQ,EAAqB,GAEzB,GAAIjB,EAAW,OAAS,SAEtB,MAAM,IAAI,MAAM,2CAA2CA,EAAW,IAAI,GAAG,EAE/E,OAAQA,EAAW,wBAAyB,CAC1C,IAAK,aACHiB,EAAqB;AAAA;AAAA;AAAA;AAAA,kBAKrB,MACF,IAAK,aACHA,EAAqB;AAAA;AAAA;AAAA;AAAA,kBAKrB,MACF,IAAK,qBACHA,EAAqB;AAAA;AAAA;AAAA;AAAA,8BAIDN,CAAW;AAAA,8BACXD,CAAY;AAAA,8BACZC,CAAW;AAAA,8BACXD,CAAY;AAAA;AAAA;AAAA,kBAIhC,MACF,IAAK,gBACHO,EAAqB;AAAA;AAAA,8CAEeN,CAAW,aAAaD,CAAY,aAAaC,CAAW;AAAA,8BAC5ED,CAAY;AAAA,+CACKI,CAAU,aAAaD,CAAW,aAAaC,CAAU;AAAA,8BAC1ED,CAAW;AAAA;AAAA;AAAA;AAAA,kBAK/B,MACF,QAEE,MAAM,IAAI,MAAM,8FACSb,EAAW,uBAAuB,GAAG,CAClE,CAEA,IAAMkB,EAAiBC,GAAkBV,CAAG,EACtCW,EAAgBC,GAAkB,EAClCC,EAAe;AAAA,wCACaT,CAAW,OAAOC,CAAU;AAAA,gDACpBC,CAAY,YAAYC,CAAW,YAAYD,CAAY,YACjGC,CAAW;AAAA,cACPI,CAAa;AAAA,cACbH,CAAkB;AAAA;AAAA;AAAA;AAAA;AAAA,kBAKdC,CAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2CAiBWR,EAAe,CAAC;AAAA,2CAChBC,EAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAsCxCP,EAAK,MAAM;AAAA;AAAA,UAGvB,MAAO,CACL,GAAGrB,GACH,OAAQ,CAAC,KAAMwB,EAAa,KAAMR,EAAO,CAAC,EAAE,KAAM,aAA+B,EACjF,QAAS,GACT,aAAAuB,CACF,CACF,EAGElC,GAAgB,CAACW,EAAkBC,IAA2E,CAElH,IAAMuB,EADIxB,EAAO,CAAC,EACF,KAEZO,EAASN,EAAW,OACpBwB,EACJ,GAAIlB,EAAO,SAAW,EAAG,CACvB,IAAMmB,EAAe1B,EAAOC,EAAW,cAAc,EACrD,GAAIyB,GAAgBA,EAAa,OAAS,EAAG,CAC3C,GAAI1B,EAAOC,EAAW,aAAa,EACjC,MAAM,IAAI,MAAM,wDAAwD,EAE1EM,EAASjB,GAAgBoC,EAAczB,EAAW,KAAMA,EAAW,QAAQ,CAC7E,KAAO,CACL,IAAM0B,EAAc3B,EAAOC,EAAW,aAAa,EACnD,GAAI,CAAC0B,GAAeA,EAAY,OAAS,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAGrEF,EAAc,MAAM,KAAKE,EAAY,WAAW,EAChDpB,EAAShB,GAA8BkC,EAAaD,EAAOvB,EAAW,KAAMA,EAAW,QAAQ,CACjG,CACF,SACMD,EAAOC,EAAW,aAAa,EACjC,MAAM,IAAI,MAAM,wDAAwD,EAI5E,IAAM2B,EAAQH,GAAgBD,EAAM,IAAI,CAACd,EAAKmB,IAAM,KAAK,MAAMnB,EAAMH,EAAOsB,CAAC,CAAC,CAAC,EAE/E,MAAO,CAACtB,EAAQqB,CAAK,CACvB,EAEMtC,GAAkB,CAACwC,EAAeC,EAAcC,IAAgC,CACpF,IAAMzB,EAAS,MAAM,KAAKuB,EAAM,SAAS,EACzC,OAAAG,GAAiB1B,EAAQwB,EAAMC,CAAQ,EAChCzB,CACT,EAEMhB,GACF,CAACqC,EAA0BJ,EAA0BO,EAAcC,IAAgC,CACjG,IAAME,EAASV,EAAM,OACfjB,EAAS,IAAI,MAAc2B,CAAM,EAEvC,QAASL,EAAI,EAAGM,EAAMD,EAAQL,EAAIM,EAAKN,IACrC,GAAIL,EAAMK,CAAC,IAAM,EAAG,CAClB,GAAID,EAAMC,CAAC,IAAM,EACf,MAAM,IAAI,MAAM,wDAAwD,EAE1EtB,EAAOsB,CAAC,EAAI,CACd,MACEtB,EAAOsB,CAAC,EAAID,EAAMC,CAAC,EAAIL,EAAMK,CAAC,EAGlC,OAAAI,GAAiB1B,EAAQwB,EAAMC,CAAQ,EAChCzB,CACT,ICxQJ,IAMa6B,GAKPC,GAXNC,GAAAC,EAAA,kBAGAC,KAGaJ,GAAQ,CAACK,EAA0CC,KAC9DL,GAAeK,CAAM,EACd,CAAC,IAAIC,GAAO,CAACD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAG,QAAS,OAAW,OAAW,IAAI,WAAWA,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,GAGtGL,GAAkBK,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,ICfA,IAiBME,GAMOC,GAaAC,GAOPC,GAwCAC,GASOC,GAaPC,GAoBAC,GA7HNC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAEAC,KAQMb,GAAuB,CAC3B,KAAM,QACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACa,EAAyCC,EAAkBC,KAC1DZ,GAAeW,CAAM,EAQd,CAPQD,EAAiB,IAC5B,CACE,GAAGd,GACH,UAAWgB,EAAW,SACtB,IAAK,IAAMb,GAAuBW,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CAC3E,EACAD,CAAM,CACI,GAGPb,GAAiEe,GAAsC,CAClH,IAAMC,EAASD,EAAK,WAAW,QAAQ,QAAQ,EACzCE,EAAOF,EAAK,WAAW,QAAQ,MAAM,EACrCG,EAAOH,EAAK,WAAW,QAAQ,OAAQ,CAAC,CAAC,EAC/C,OAAOI,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,EAEMjB,GACF,CAACmB,EAA0CC,EAAeP,IAA6C,CACrG,IAAMI,EAAQJ,EAAW,KAAK,SAAW,EAAKO,EAAM,KAAK,MAAM,CAAC,EAAE,IAAI,CAACC,EAAMC,IAAMA,CAAC,EAAIT,EAAW,KAC7FU,EAAiBC,EAAU,cAAcP,EAAMG,EAAM,KAAK,MAAM,EAChEL,EAASF,EAAW,OAAO,IAAI,CAACY,EAAOH,IACvCG,EAAQL,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAAI,EACnCF,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAE9BE,EAAU,cAAcC,EAAOL,EAAM,KAAKG,EAAeD,CAAC,CAAC,CAAC,CACpE,EACKN,EAAOH,EAAW,KAAK,IAAI,CAACa,EAAKJ,IACjCI,EAAMN,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAAI,EACjCF,EAAM,KAAKG,EAAeD,CAAC,CAAC,EAE9BE,EAAU,cAAcE,EAAKN,EAAM,KAAKG,EAAeD,CAAC,CAAC,CAAC,CAClE,EAEKK,EAAcP,EAAM,KAAK,MAAM,EAE/BQ,EAAqB,CAAC,EAC5B,QAASN,EAAI,EAAGA,EAAIC,EAAe,OAAQD,IACzCK,EAAYJ,EAAeD,CAAC,CAAC,EAAIN,EAAKM,CAAC,EAAIP,EAAOO,CAAC,EAC/CP,EAAOO,CAAC,EAAI,GACdM,EAAS,KAAK,aAAaL,EAAeD,CAAC,CAAC,QAAQP,EAAOO,CAAC,CAAC,GAAG,EAKpE,IAAMO,EAAe;AAAA,oCADRF,EAAY,MAES;AAAA,UAC9BC,EAAS,KAAK;AAAA,OAAU,CAAC;AAAA;AAAA,SAG7B,MAAO,CACL,GAAG/B,GACH,OAAQ,CAAC,KAAM8B,EAAa,KAAMP,EAAM,KAAM,aAAiC,EAC/E,aAAAS,CACF,CACF,EAEE5B,GAAkBW,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAE3C,GAAIkB,GAAa,QAAQlB,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAEaV,GAAW,CAACS,EAAyCC,IAA+B,CAC/FR,GAAkBQ,CAAM,EACxB,IAAMC,EAAaV,GAAkCQ,EAAkBC,CAAM,EAQ7E,MAAO,CAPQD,EAAiB,IAC5B,CACE,GAAGd,GACH,UAAWgB,EAAW,SACtB,IAAK,IAAMb,GAAuBW,EAAkBC,EAAO,CAAC,EAAGC,CAAU,CAC3E,EACA,CAACD,EAAO,CAAC,CAAC,CAAC,CACD,CAChB,EAEMT,GACF,CAACQ,EAAyCC,IAAsC,CAC9E,GAAI,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACxD,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GACvDA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,GAC9EA,EAAO,QAAU,GAAK,CAACD,EAAiB,QAAQ,cAAcC,EAAO,CAAC,EAAE,MAAM,EACjF,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,YAAY,KAAMU,GAAcA,IAAM,CAAC,EACzE,MAAM,IAAI,MAAM,kDAAkD,EAGpE,IAAMP,EAAS,MAAM,KAAKH,EAAO,CAAC,EAAE,WAAW,EACzCI,EAAO,MAAM,KAAKJ,EAAO,CAAC,EAAE,WAAW,EACvCK,EAAOL,EAAO,QAAU,EAAI,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,EAAI,CAAC,EACjEmB,EAAW,GAAGd,CAAI,IAAIF,CAAM,IAAIC,CAAI,GAC1C,MAAO,CAAC,OAAAD,EAAQ,KAAAC,EAAM,KAAAC,EAAM,SAAAc,CAAQ,CACtC,EAEE3B,GAAqBQ,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,OAAS,GAAKA,EAAO,OAAS,EAClD,MAAM,IAAI,MAAM,uBAAuB,EAEzC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC1D,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC1D,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,IAAMA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,GACjF,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,QAAU,IAAMA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,KAAK,SAAW,GACjF,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC7IA,IAkBMoB,GAMAC,GAMAC,GAMOC,GAaAC,GAGAC,GASAC,GA2CPC,GA4BAC,GA8CAC,GAiDAC,GA8CAC,GAjRNC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAEAC,KAEAC,KAMMlB,GAAmC,CACvC,KAAM,oBACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEMC,GAAqC,CACzC,KAAM,sBACN,WAAY,CAAC,IAAK,KAAK,EACvB,WAAY,IAA2C,CACzD,EAEMC,GAAyB,CAC7B,KAAM,UACN,WAAY,CAAC,IAAK,MAAO,MAAM,EAC/B,WAAY,MAAiE,CAC/E,EAEaC,GACT,CAACgB,EAAyCC,EAAkBC,IAA4C,CACtGV,GAAeS,CAAM,EAErB,IAAME,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCG,EAAOC,EAAU,cAAcH,EAAW,KAAMC,EAAW,MAAM,EACjEG,EAAkBD,EAAU,gBAAgBF,EAAYC,CAAI,EAC5DG,EAAeF,EAAU,kBAAkBF,EAAYC,CAAI,EAGjE,OADehB,GAAeY,EAAkBC,EAAQC,EAAYI,EAAiBC,CAAY,CAEnG,EAEStB,GACRuB,GAAwCC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,CAAC,CAAC,CAAC,EAErGtB,GACRsB,GAAwCC,GAA4B,CAAC,KAAMD,EAAK,WAAW,OAAO,OAAQ,EAAE,CAAC,CAAC,EAQtGrB,GACT,CAACa,EAAyCC,EAAkBC,IAA4C,CACtGV,GAAeS,CAAM,EAErB,IAAME,EAAaF,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCG,EAAOC,EAAU,cAAcH,EAAW,KAAMC,EAAW,MAAM,EACjEO,EAAOP,EAAW,OAElBQ,EAAuBP,IAASM,EAAO,EACvCE,EAAiC,CAAC,EACpCC,EAAiB,CAAC,EAClBC,EAA6B,CAAC,EAC9BC,EAEAJ,IACFE,EAAO,MAAM,KAAK,CAAC,OAAQH,CAAI,CAAC,EAAE,IAAI,CAACM,EAAGC,IAAMA,CAAC,EAGjDJ,EAAKT,CAAI,EAAIM,EAAO,EACpBG,EAAKH,EAAO,CAAC,EAAIN,EAEjBS,EAAK,IAAIK,GAAKN,EAAqB,KAAKT,EAAWe,CAAC,CAAC,CAAC,EAEtDH,EAAqBN,GAA4B,CAAC,KAAAI,CAAI,CAAC,EACvDC,EAAmBK,GAAUnB,EAAkBC,EAAQc,CAAkB,GAG3E,IAAMT,EAAkBK,EAAsBN,EAAU,gBAAgBO,EAAsBF,EAAO,CAAC,EACxDL,EAAU,gBAAgBF,EAAYO,EAAO,CAAC,EACtFH,EAAeI,EAAsBN,EAAU,kBAAkBO,EAAsBF,EAAO,CAAC,EAC1DL,EAAU,kBAAkBF,EAAYO,EAAO,CAAC,EAErFU,EAAShC,GACXY,EAAkBW,EAAsBG,EAAmBb,EAAQC,EAAYI,EAAiBC,CAAY,EAEhH,OAAII,EACqBQ,GAAUnB,EAAkBoB,EAAQL,CAAmB,EAGvEK,CAEX,EAEEhC,GACF,CAACY,EAAyCC,EAAkBC,EAA+BI,EAC1FC,IAAmC,CAClC,IAAMc,EACFhC,GAA4BW,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAc,CAACD,CAAe,CAAC,EACvGgB,EAAMtB,EAAiB,IACzB,CAAC,GAAGnB,GAAkC,UAAWqB,EAAW,SAAU,IAAK,IAAMmB,CAAqB,EACtGpB,CAAM,EAEJsB,EAA0BjC,GAC5BU,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAcc,EAAsB,OAAO,KACzF,CAACf,CAAe,CAAC,EACfkB,EAAQxB,EAAiB,IAC3B,CAAC,GAAGlB,GAAoC,UAAWoB,EAAW,SAAU,IAAK,IAAMqB,CAAuB,EAC1G,CAACtB,EAAO,CAAC,EAAGqB,CAAG,CAAC,EAEdG,EAAqBlC,GACvBS,EAAkBC,EAAO,CAAC,EAAGK,EAAiBC,EAAcc,EAAsB,OAAO,KACzFE,EAAwB,OAAO,IAAI,EAIvC,MAAO,CAHQvB,EAAiB,IAC5B,CAAC,GAAGjB,GAAwB,UAAWmB,EAAW,SAAU,IAAK,IAAMuB,CAAkB,EACzF,CAACxB,EAAO,CAAC,EAAGqB,EAAKE,CAAK,CAAC,CACb,CAChB,EAKEnC,GACF,CAACW,EAAyC0B,EAAepB,EAAyBC,EACjFoB,IAAuC,CACtC,GAAM,CAACC,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOiB,EAAY,OAEzB,GAAIrB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAIoB,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAY,CAAC,IAAMrB,EACrB,MAAM,IAAI,MAAM,0DAA0D,EAG5E,IAAMwB,EAAOC,GAAQ/B,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEgC,EAAe;AAAA,0BACDtB,CAAI;AAAA,sDACwBH,CAAY;AAAA;AAAA,sCAE5BuB,EAAK,SAAS,gDAAgDF,CAAY;AAAA,UACtGC,CAAa;AAAA,yBACEtB,CAAY;AAAA;AAAA,4CAEOuB,EAAK,SAAS;AAAA,cAC5CF,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOtC,MAAO,CACL,GAAGhD,GACH,OAAQ,CAAC,KAAM8C,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAM,CACF,CACF,EAKE1C,GACF,CAACU,EAAyC0B,EAAepB,EAAyBC,EACjF0B,EAA4CN,IAAuC,CAClF,GAAM,CAACC,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOiB,EAAY,OAEzB,GAAIrB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAIoB,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,GAAIA,EAAY,CAAC,IAAMrB,EACrB,MAAM,IAAI,MAAM,0DAA0D,EAG5E,GAAI2B,EAAwB,SAAW,EACrC,MAAM,IAAI,MAAM,wDAAwD,EAG1E,GAAIA,EAAwB,CAAC,IAAM3B,EACjC,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAMwB,EAAOC,GAAQ/B,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEgC,EAAe;AAAA,0BACDtB,CAAI;AAAA,sDACwBH,CAAY;AAAA;AAAA;AAAA;AAAA,yBAIzCA,CAAY;AAAA;AAAA,+CAEUuB,EAAK,SAAS;AAAA,cAC/CF,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA,SAKtC,MAAO,CACL,GAAG/C,GACH,OAAQ,CAAC,KAAM6C,EAAa,KAAMD,EAAM,KAAM,aAAiC,EAC/E,aAAAM,CACF,CACF,EAEEzC,GACF,CAACS,EAAyC0B,EAAepB,EAAyBC,EACjF0B,EAA4CC,IAA+D,CAC1G,GAAM,CAACN,EAAcC,CAAa,EAC9B7B,EAAiB,+BAA+B0B,EAAM,MAA0B,EAC9EhB,EAAOgB,EAAM,KAAK,OAExB,GAAIpB,EAAkB,GAAKC,EAAe,EACxC,MAAM,IAAI,MAAM,4EAA4E,EAG9F,GAAI0B,EAAwB,SAAW,GAAKC,EAA2B,SAAW,EAChF,MAAM,IAAI,MAAM,wDAAwD,EAG1E,GAAID,EAAwB,CAAC,IAAM3B,GAAmB4B,EAA2B,CAAC,IAAM5B,EACtF,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAM0B,EAAe;AAAA,0BACDtB,CAAI;AAAA;AAAA;AAAA,+CAGiBkB,CAAY,KAAKC,CAAa;AAAA;AAAA;AAAA;AAAA,wCAIrCtB,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAY9C,MAAO,CACL,GAAGxB,GACH,OAAQ,CAAC,KAAM2C,EAAM,KAAM,KAAMA,EAAM,KAAM,aAAiC,EAC9E,aAAAM,CACF,CACF,EAEExC,GAAkBS,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,oBAAoB,CAExC,ICzRA,IAiBMkC,GAMOC,GAoBAC,GAOPC,GAMAC,GAqBAC,GA7ENC,GAAAC,EAAA,kBAGAC,KAIAC,KAEAC,KAQMV,GAAuB,CAC3B,KAAM,QACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAEaC,GACT,CAACU,EAAyCC,EAAkBC,IAA0C,CACpGR,GAAeO,CAAM,EAErB,IAAME,EAAOC,EAAU,cAAcF,EAAW,KAAMD,EAAO,CAAC,EAAE,KAAK,MAAM,EACrEI,EAAQb,GAAgBQ,EAAkBC,EAAQE,EAAMD,CAAU,EAClEI,EAAmB,CAAC,EAC1B,QAASC,EAAI,EAAGA,EAAIF,EAAO,EAAEE,EAC3BD,EAAO,KAAKN,EAAiB,IACzB,CACE,GAAGX,GACH,UAAW,GAAGa,EAAW,QAAQ,IAAIK,CAAC,GACtC,IAAK,IAAMd,GAAuBO,EAAkBC,EAAO,CAAC,EAAGC,EAAYC,EAAMI,CAAC,CACpF,EACAN,CAAM,CAAC,EAGb,OAAOK,CACT,EAESf,GAAiEiB,GAAsC,CAClH,IAAML,EAAOK,EAAK,WAAW,OAAO,OAAQ,CAAC,EACvClB,EAAQkB,EAAK,WAAW,QAAQ,QAAS,CAAC,CAAC,EAC3CC,EAAaD,EAAK,QAAQ,OAChC,OAAOE,GAA4B,CAAC,KAAAP,EAAM,MAAAb,EAAO,WAAAmB,CAAU,CAAC,CAC9D,EAEMjB,GACF,CAACmB,EAA0CV,EAAkBE,EAAcD,IAAwC,CACjH,GAAM,CAAC,CAAEU,CAAO,EAAIC,GAAU,WAAWZ,EAAO,CAAC,EAAE,KAAME,EAAMD,EAAW,MAAOA,EAAW,UAAU,EACtG,OAAOU,EAAQ,MACjB,EAEEnB,GACF,CAACkB,EAA0CG,EAAeZ,EAA6BC,EAAcY,IAClF,CACb,GAAM,CAACC,EAAQJ,CAAO,EAAIC,GAAU,WAAWC,EAAM,KAAMX,EAAMD,EAAW,MAAOA,EAAW,UAAU,EAClGe,EAASL,EAAQG,CAAK,EACtBG,EAAcF,EAAOD,CAAK,EAE1BI,EAAe;AAAA,kCADRD,EAAY,MAEG;AAAA,kBACpBf,CAAI,QAAQc,CAAM;AAAA;AAAA;AAAA,MAI1B,MAAO,CACL,GAAG5B,GACH,UAAW,GAAGa,EAAW,QAAQ,IAAIa,CAAK,GAC1C,OAAQ,CAAC,KAAMG,EAAa,KAAMJ,EAAM,KAAM,aAAiC,EAC/E,aAAAK,CACF,CACF,EAEFzB,GAAkBO,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,QAAUA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,SAC9EA,EAAO,CAAC,EAAE,OAAS,UAAYA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,UAChFA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,OACrF,MAAM,IAAI,MAAM,qBAAqB,CAEzC,ICvFA,IASamB,GAQAC,GAKAC,GAGPC,GAUAC,GAnCNC,GAAAC,EAAA,kBAMAC,KAGaP,GACT,CAACQ,EAAyCC,EAAkBC,IAA6B,CACvFP,GAAeM,CAAM,EACrB,IAAME,EAAcC,EAAU,aAAaH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAE/D,MAAO,CADQF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAW,CACxD,CAChB,EAESV,GAAa,CAACO,EAAyCC,KAClEL,GAAkBK,CAAM,EACjBT,GAAQQ,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAG,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,GAGpEP,GAA4DW,GACrEA,EAAK,WAAW,QAAQ,MAAM,EAE5BV,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEML,GAAqBK,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,4BAA4B,EAG9C,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC3CA,IAQaK,GAcPC,GAmBAC,GAzCNC,GAAAC,EAAA,kBAIAC,KAEAC,KAEaN,GAAM,CAACO,EAAyCC,IAA+B,CAC1FN,GAAeM,CAAM,EAErB,IAAMC,EAAqB,CACzB,KAAM,MACN,WAAYD,EAAO,IAAI,CAACE,EAAI,IAAM,IAAI,CAAC,EAAE,EACzC,WAAY,IAAI,MAAMF,EAAO,MAAM,EAAE,MAAyB,CAChE,EAIA,MAAO,CAFQD,EAAiB,IAC5B,CAAC,GAAGE,EAAoB,IAAK,IAAMR,GAAqBM,EAAkBC,EAAQC,CAAkB,CAAC,EAAGD,CAAM,CACpG,CAChB,EAEMP,GACF,CAACM,EAAyCC,EAAkBC,IAAqD,CAC/G,IAAME,EAAOC,GAAQL,EAAiB,QAAQ,QAAQ,UAAU,OAAO,EACjEM,EAAcL,EAAO,CAAC,EAAE,KAAK,MAAM,EAEnCM,EAAe;AAAA;AAAA,wBADLN,EAAO,IAAI,CAACE,EAAIK,IAAM,GAAGJ,EAAK,SAAS,KAAKI,CAAC,aAAa,EAAE,KAAK,KAAK,CAG7D;AAAA,UACrBJ,EAAK,MAAM;AAAA;AAAA,MAGf,MAAO,CACL,GAAGF,EACH,OAAQ,CAAC,KAAMI,EAAa,KAAML,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,QAAS,GACT,aAAAM,CACF,CACF,EAEEZ,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAMQ,EAASR,EAAO,CAAC,EAAE,KAAK,OAC9B,QAASO,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IAAK,CACtC,GAAIC,IAAWR,EAAOO,CAAC,EAAE,KAAK,OAC5B,MAAM,IAAI,MAAM,8BAA8B,EAGhD,QAASE,EAAI,EAAGA,EAAID,EAAQC,IAC1B,GAAIT,EAAO,CAAC,EAAE,KAAKS,CAAC,IAAMT,EAAOO,CAAC,EAAE,KAAKE,CAAC,EACxC,MAAM,IAAI,MAAM,+BAA+B,CAGrD,CAEA,GAAIT,EAAO,CAAC,EAAE,OAAS,WAAaA,EAAO,CAAC,EAAE,OAAS,UACrD,MAAM,IAAI,MAAM,qBAAqB,EAEvC,QAASO,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjC,GAAIP,EAAO,CAAC,EAAE,OAASA,EAAOO,CAAC,EAAE,KAC/B,MAAM,IAAI,MAAM,8BAA8B,CAGpD,ICnEA,IAQaG,GAePC,GA0BAC,GAjDNC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaN,GAAO,CAACO,EAAyCC,IAA+B,CAC3FN,GAAeM,CAAM,EAErB,IAAMC,EAAsB,CAC1B,KAAM,OACN,WAAY,CAAC,GAAG,EAChB,WAAY,EAAqB,CACnC,EAKA,MAAO,CAHQF,EAAiB,IAC5B,CAAC,GAAGE,EAAqB,IAAK,IAAMR,GAAsBM,EAAkBC,EAAQC,CAAmB,CAAC,EACxGD,CAAM,CACI,CAChB,EAEMP,GACF,CAACS,EAAiCF,EAAkBC,IAAsD,CACxG,IAAME,EAAaH,EAAO,CAAC,EAAE,KAAK,MAAM,EAClCI,EAAc,IAAI,MAAMD,EAAW,MAAM,EAEzCE,EAAoB,CAAC,EAC3B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQG,IACrCF,EAAYE,CAAC,EAAIH,EAAWG,CAAC,EAAIN,EAAO,CAAC,EAAE,WAAWM,CAAC,EACvDD,EAAQ,KAAK,YAAYC,CAAC,+BAA+BA,CAAC,OAAOH,EAAWG,CAAC,CAAC,MAAM,EAGtF,IAAMC,EAAOH,EAAY,OACnBI,EAAe;AAAA,oCACSD,CAAI;AAAA,uBACjBA,CAAI;AAAA,UACjBF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA;AAAA;AAAA,MAItB,MAAO,CACL,GAAGJ,EACH,OAAQ,CAAC,KAAMG,EAAa,KAAMJ,EAAO,CAAC,EAAE,KAAM,aAAiC,EACnF,aAAAQ,CACF,CACF,EAEEd,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,OACvC,MAAM,IAAI,MAAM,sBAAsB,EAExC,GAAIS,GAAa,QAAQT,EAAO,CAAC,EAAE,IAAI,IAAM,GAC3C,MAAM,IAAI,MAAM,qBAAqB,EAEvC,GAAIA,EAAO,CAAC,EAAE,OAAS,SAAWA,EAAO,CAAC,EAAE,OAAS,QACnD,MAAM,IAAI,MAAM,sBAAsB,CAE1C,ICjEA,IASaU,GAQAC,GAKAC,GAGPC,GAUAC,GAnCNC,GAAAC,EAAA,kBAMAC,KAGaP,GACT,CAACQ,EAAyCC,EAAkBC,IAA6B,CACvFP,GAAeM,CAAM,EACrB,IAAME,EAAcC,EAAU,eAAeH,EAAO,CAAC,EAAE,KAAMC,CAAI,EAEjE,MAAO,CADQF,EAAiB,gBAAgBC,EAAO,CAAC,EAAGE,CAAW,CACxD,CAChB,EAESV,GAAe,CAACO,EAAyCC,KACpEL,GAAkBK,CAAM,EACjBT,GAAUQ,EAAkB,CAACC,EAAO,CAAC,CAAC,EAAG,MAAM,KAAKA,EAAO,CAAC,EAAE,WAAW,CAAC,GAGtEP,GAA8DW,GACvEA,EAAK,WAAW,QAAQ,MAAM,EAE5BV,GAAkBM,GAA2B,CACjD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,OAAS,SACrB,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEML,GAAqBK,GAA2B,CACpD,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,EAGhD,GAAIA,EAAO,CAAC,EAAE,OAAS,QACrB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,IC3CA,IAoCaK,GApCbC,GAAAC,EAAA,kBAKAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEahC,GAAuD,CAClE,CAAC,MAAO,GAAI,KAAeiC,EAAG,EAC9B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAEhC,CAAC,cAAe,GAAI,KAAMC,GAAaC,EAA0B,EACjE,CAAC,qBAAsB,GAAI,KAAMC,GAAoBC,EAAiC,EACtF,CAAC,OAAQ,GAAI,KAAMC,GAAMC,EAAmB,EAC5C,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,OAAiBC,GAAeC,EAAmB,EAChE,CAAC,OAAQ,GAAI,MAAgBC,EAAO,EACpC,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAqB,EAClD,CAAC,OAAQ,GAAI,KAAMC,GAAMC,EAAmB,EAC5C,CAAC,gBAAiB,GAAI,KAAMC,GAAeC,EAA4B,EACvE,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,UAAW,GAAI,KAAeC,EAAQ,EACvC,CAAC,eAAgB,GAAI,KAAMC,GAAcC,EAA2B,EACpE,CAAC,QAAS,GAAI,KAAgBC,EAAK,EACnC,CAAC,MAAO,GAAI,KAAeC,GAAcC,EAAkB,EAC3D,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,UAAW,GAAI,KAAMC,GAASC,EAAsB,EACrD,CAAC,QAAS,GAAI,KAAeC,EAAK,EAClC,CAAC,YAAa,gBAAiB,KAAMf,GAAMC,EAAmB,EAC9D,CAAC,SAAU,GAAI,KAAMe,GAAQC,EAAqB,EAClD,CAAC,OAAQ,GAAI,OAAQC,GAAMC,EAAqB,EAChD,CAAC,OAAQ,GAAI,MAAOD,GAAME,EAAsB,EAChD,CAAC,oBAAqB,GAAI,KAAMC,GAAmBC,EAAgC,EACnF,CAAC,gBAAiB,GAAI,KAAMC,EAAa,EACzC,CAAC,UAAW,GAAI,KAAgBC,EAAO,EACvC,CAAC,WAAY,GAAI,KAAelB,EAAQ,EACxC,CAAC,cAAe,GAAI,KAAMmB,GAAaC,EAA0B,EACjE,CAAC,wBAAyB,GAAI,KAAMC,GAAuBC,EAAoC,EAC/F,CAAC,YAAa,GAAI,KAAeC,GAAoBC,EAAwB,EAC7E,CAAC,OAAQ,GAAI,KAAgBC,EAAI,EACjC,CAAC,MAAO,GAAI,KAAMC,GAAKC,EAAkB,EACzC,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAqB,EAElD,CAAC,UAAW,GAAI,KAAMC,GAASC,EAAsB,EACrD,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,KAAM,GAAI,KAAgBC,EAAE,EAC7B,CAAC,MAAO,GAAI,OAAQC,GAAOC,EAAoB,EAC/C,CAAC,MAAO,GAAI,MAAOC,GAAQC,EAAqB,EAChD,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,QAAS,GAAI,KAAgBC,EAAK,EACnC,CAAC,eAAgB,GAAI,KAAMC,GAAcC,EAAqB,EAC9D,CAAC,YAAa,GAAI,KAAMC,GAAWD,EAAqB,EACxD,CAAC,aAAc,GAAI,KAAME,GAAYF,EAAqB,EAC1D,CAAC,YAAa,GAAI,KAAMG,GAAWH,EAAqB,EACxD,CAAC,aAAc,GAAI,KAAMI,GAAYJ,EAAqB,EAC1D,CAAC,YAAa,GAAI,OAAQK,GAAWL,EAAqB,EAC1D,CAAC,kBAAmB,GAAI,KAAMM,GAAoBN,EAAqB,EACvE,CAAC,OAAQ,GAAI,KAAeO,EAAI,EAChC,CAAC,UAAW,GAAI,KAAMC,EAAO,EAC7B,CAAC,SAAU,GAAI,KAAMC,GAAQC,EAAwB,EACrD,CAAC,SAAU,GAAI,MAAOD,GAAQE,EAAwB,EACtD,CAAC,QAAS,GAAI,KAAMC,EAAK,EACzB,CAAC,UAAW,GAAI,KAAeC,EAAO,EACtC,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,QAAS,GAAI,MAAOC,EAAQ,EAC7B,CAAC,QAAS,GAAI,MAAOC,GAAOC,EAAoB,EAEhD,CAAC,UAAW,GAAI,OAAQC,GAASC,EAAsB,EACvD,CAAC,UAAW,GAAI,MAAOC,GAAYC,EAAyB,EAK5D,CAAC,QAAS,GAAI,OAAQC,GAAOC,EAAoB,EACjD,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,UAAW,GAAI,OAAQC,GAASC,EAAsB,EACvD,CAAC,UAAW,GAAI,MAAOC,EAAU,EACjC,CAAC,MAAO,GAAI,KAAgBC,EAAG,EAC/B,CAAC,MAAO,GAAI,KAAMC,EAAG,EACrB,CAAC,MAAO,GAAI,KAAeC,EAAG,EAC9B,CAAC,OAAQ,GAAI,KAAeC,EAAI,EAChC,CAAC,OAAQ,GAAI,KAAMC,EAAI,EACvB,CAAC,YAAa,GAAI,KAAMC,GAAWC,EAAwB,EAC3D,CAAC,WAAY,GAAI,MAAOC,GAAUC,EAAyB,EAC3D,CAAC,WAAY,GAAI,IAAKD,GAAUE,EAAyB,EACzD,CAAC,YAAa,GAAI,OAAQC,GAAWC,EAAwB,EAC7D,CAAC,YAAa,GAAI,MAAOC,EAAY,EACrC,CAAC,MAAO,GAAI,KAAgBC,EAAG,CACjC,ICrHO,SAASC,GAAeC,EAAwB,CACrD,IAAMC,EAAiG,CAAC,EACpGC,EACJ,MAAQA,EAAQC,GAAsB,KAAKH,CAAM,KAAO,MAAM,CAC5D,IAAMI,EAASF,EAAM,CAAC,EACF,MAAM,GAAG,EACT,IAAIG,GAAK,CACR,IAAMC,EAASD,EAAE,KAAK,EAAE,MAAM,GAAG,EACjC,OAAIC,GAAUA,EAAO,SAAW,EACvB,CAAC,KAAMA,EAAO,CAAC,EAAG,KAAMA,EAAO,CAAC,CAAC,EAEnC,IACT,CAAC,EACA,OAAOC,GAAKA,IAAM,IAAI,EAC1CN,EAAWC,EAAM,CAAC,CAAC,EAAI,CAAC,OAAAE,EAAQ,KAAMF,EAAM,CAAC,CAAC,CAChD,CACA,QAAWM,KAAQP,EAAY,CAC7B,IAAMQ,EAAcC,GAAgB,QAAQ,WAAYF,CAAI,EACtDG,EAAQ,IAAI,OAAOF,EAAa,IAAI,EAC1C,MAAQP,EAAQS,EAAM,KAAKX,CAAM,KAAO,MAAM,CAC5C,IAAMY,EAAOV,EAAM,CAAC,EACdW,EAAWX,EAAM,CAAC,EAClBE,EAASF,EAAM,CAAC,EAAE,MAAM,GAAG,EAC3BY,EAAYF,EAAQ,GAAGA,CAAI,IAAIC,CAAQ,IAAM,GAC/CE,EAAkBd,EAAWO,CAAI,EAAE,KACnCQ,EAAiB,GACrBf,EAAWO,CAAI,EAAE,OAAO,QAAQ,CAACD,EAAGU,IAAM,CACpCV,IACFS,GAAkB,GAAGT,EAAE,IAAI,IAAIA,EAAE,IAAI,MAAMH,EAAOa,CAAC,CAAC;AAAA,EAExD,CAAC,EACDF,EAAU,GAAGC,CAAc;AAAA,GAAMD,CAAO,GACxCA,EAAUA,EAAQ,QAAQ,SAAU,GAAGF,CAAQ,KAAK,EACpD,IAAMK,EAAc;AAAA,QAClBJ,CAAQ;AAAA;AAAA,UAENC,CAAO;AAAA;AAAA,QAGXf,EAASA,EAAO,QAAQE,EAAM,CAAC,EAAGgB,CAAW,CAC/C,CACF,CACA,OAAAlB,EAASA,EAAO,QAAQG,GAAuB,EAAE,EAC1CH,CACT,CApDA,IAGMG,GACAO,GAJNS,GAAAC,EAAA,kBAGMjB,GAAwB,qFACxBO,GAAkB,+DCqJjB,SAASW,GAAaC,EAAiBC,EAA2D,CACvG,IAAMC,EAAqB,CAAC,EACtBC,EAAqB,CAAC,EACtBC,EAAeH,GAAQ,MAAQ,MAAM,QAAQA,CAAI,GAAKA,EAAK,SAAW,EACtEI,EAAQJ,GAAQ,MAAQG,EAAgB,KAAOE,GAAeL,EAAMD,CAAK,EAAE,KAAK,EAClFO,EAAI,EACR,QAASC,EAAI,EAAGA,EAAIR,EAAM,OAAQ,EAAEQ,EAAG,CACrC,GAAIH,GAAQ,KAAM,CAChB,GAAIA,EAAKE,CAAC,IAAMC,GAAKR,EAAMQ,CAAC,IAAM,EAChC,MAAM,IAAI,MAAM,sBAAsBA,CAAC,mBAAmBR,EAAMQ,CAAC,CAAC,YAAY,GAE3EH,EAAKE,CAAC,GAAK,MAAQF,EAAKE,CAAC,EAAIC,IAAMR,EAAMQ,CAAC,IAAM,IACnDN,EAAS,KAAKF,EAAMQ,CAAC,CAAC,EACtBL,EAAS,KAAKK,CAAC,GAEbH,EAAKE,CAAC,GAAKC,GACbD,GAEJ,CACIP,EAAMQ,CAAC,IAAM,IACfN,EAAS,KAAKF,EAAMQ,CAAC,CAAC,EACtBL,EAAS,KAAKK,CAAC,EAEnB,CACA,MAAO,CAAC,SAAAN,EAAU,SAAAC,CAAQ,CAC5B,CAEO,SAASG,GAAeL,EAAuBD,EAA2B,CAC/E,IAAMS,EAAOT,EAAM,OAGnB,OAAAC,EAAOA,GAAQ,KAAOD,EAAM,IAAI,CAACU,EAAIF,IAAMA,CAAC,EAAK,CAAC,EAAe,OAAOP,CAAI,EAG5EU,GACIV,EAAK,MAAMW,GAAMA,GAAM,CAACH,GAAQG,EAAKH,CAAI,EACzC,IAAM,+CAA+CA,CAAI,KAAKA,CAAI,kBAClDR,CAAI,EAAE,EAG1BU,GACIV,EAAK,MAAMY,EAAK,EAChB,IAAM,0DACUZ,CAAI,EAAE,EAGnBA,EAAK,IAAIa,GAAKA,EAAI,EAAIL,EAAOK,EAAIA,CAAC,CAC3C,CACO,SAASD,GAAMC,EAAoB,CACxC,OAAOA,EAAI,IAAM,CACnB,CACO,SAASC,GAAcf,EAAyB,CACrD,GAAIA,EAAM,SAAW,EAEnB,MAAO,GAET,IAAIgB,EAAOhB,EAAM,CAAC,EAClB,QAASQ,EAAI,EAAGA,EAAIR,EAAM,OAAQQ,IAChCQ,GAAQhB,EAAMQ,CAAC,EAEjB,OAAOQ,CACT,CAQO,SAASC,GAAoBD,EAAgC,CAClE,IAAME,EAAQ,KAAK,KAAK,KAAK,KAAKF,CAAI,CAAC,EACvC,MAAO,CAACE,EAAO,KAAK,KAAKF,EAAOE,CAAK,CAAC,CACxC,CAjOA,IA8DaC,GA9DbC,GAAAC,EAAA,kBAGAC,KACAC,KA0DaJ,GAAN,KAA6D,CAClE,YAAmBK,EAAwB,CAAxB,oBAAAA,CAAyB,CAC5C,iBAAiBxB,EAA0ByB,EAA4C,CACrF,IAAMC,EAAK,KAAK,eAAe1B,EAAOyB,CAAK,EAK3C,OAJIA,GAASA,EAAM,WACjBC,EAAG,CAAC,GAAK,EACTA,EAAG,CAAC,GAAK,GAEPD,GAASA,EAAM,UACV,CAACC,EAAG,CAAC,EAAGA,EAAG,CAAC,CAAC,EAEfA,CACT,CAEA,eAAe1B,EAA0ByB,EAA4C,CACnF,IAAME,EAAWF,GAASA,EAAM,SAEhC,GAAIzB,EAAM,SAAW,EACnB,OAAO2B,EAAW,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAElC,IAAIH,EAAiB,KAAK,eAC1B,GAAIC,GAASA,EAAM,YAAc,OAAW,CAE1C,IAAMG,EAAQH,EAAM,WAAazB,EAAM,OAAS,EAAIA,EAAM,MAAMyB,EAAM,SAAS,EAAE,OAAO,CAACX,EAAGe,IAAMf,EAAIe,CAAC,EACjGC,EAAQL,EAAM,WAAa,EAAI,EAAIzB,EAAM,MAAM,EAAGyB,EAAM,SAAS,EAAE,OAAO,CAACX,EAAGe,IAAMf,EAAIe,CAAC,EAC/F,GAAID,EAAQJ,GAAkBM,EAAQN,EAGpCO,GAAO,QACH,gBACA,2DAA2D/B,CAAK,eAAeyB,EAAM,SAAS,EAAE,MAEpG,OAAO,CAACG,EAAOE,CAAK,CAExB,CACA,IAAIE,EAAWhC,EAAM,MAAM,CAAC,EACxB2B,IACFH,EAAiBA,EAAiB,EAOlCQ,EAAWA,EAAS,IAChB,CAACC,EAAIzB,IAAMA,GAAKwB,EAAS,OAAS,EAAKA,EAASxB,CAAC,EAAI,IAAM,EAAIwB,EAASxB,CAAC,EAAIwB,EAASxB,CAAC,EAAI,EAAKwB,EAASxB,CAAC,CAAC,EAI3GwB,EAAS,SAAW,IACtBA,EAAW,CAAC,EAAGA,EAAS,CAAC,CAAC,IAK1BA,EAAS,SAAW,IAEtBA,EADsBjC,GAAaiC,CAAQ,EAClB,UAG3B,IAAMhB,EAAOD,GAAciB,CAAQ,EACnC,OAAIA,EAAS,QAAU,GAAKhB,GAAQQ,EAC3B,CAAC,EAAGR,CAAI,EACNgB,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,GAAKR,EAC3EQ,EACEA,EAAS,SAAW,GAAKA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,GAAKR,EACzF,CAACQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EACrCA,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GAAkBQ,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,EACzF,CAACQ,EAAS,CAAC,EAAGA,EAAS,CAAC,EAAIA,EAAS,CAAC,CAAC,EAE5CA,EAAS,SAAW,GAAKA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,GACpEQ,EAAS,CAAC,GAAKR,EACV,CAACQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAE1DA,EAAS,SAAW,GAAKA,EAAS,CAAC,GAAKR,GACxCQ,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,GAAKR,EACtC,CAACQ,EAAS,CAAC,EAAGA,EAAS,CAAC,EAAIA,EAAS,CAAC,EAAIA,EAAS,CAAC,CAAC,EAExDL,EAMKV,GAAoBD,EAAO,CAAC,EAAE,IAAIkB,GAAKA,EAAI,CAAC,EAE9CjB,GAAoBD,CAAI,CAEnC,CACF,ICvJA,IAeamB,GAfbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAEAC,KAMaP,GAAN,cAA4BQ,EAAQ,CAGzC,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CACL,GAAG,KAAK,eAAe,EACvB,GAAG,KAAK,eAAe,EACvB,GAAG,KAAK,MAAM,EACd,GAAG,KAAK,UAAU,EAElB,GAAG,KAAK,mBAAmB,EAC3B,GAAG,KAAK,0BAA0B,EAClC,GAAG,KAAK,yBAAyB,CACnC,CACF,CACA,gBAAiB,CACf,MAAO,CAAC,CACV,CAKU,gBAAmD,CAC3D,IAAMC,EAAW,iBACjB,MAAO,CACL,eAAgB,IAAIC,EAAe;AAAA,aAC5BD,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMd,CACH,CACF,CAMU,gBAAmD,CAC3D,IAAMA,EAAW,iBACjB,MAAO,CACL,eAAgB,IAAIC,EAAe;AAAA,YAC7BD,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMb,CACH,CACF,CAMU,0BAA6D,CACrE,IAAME,EAAe,KAAK,QAAQ,oBAClC,OAAIA,EAAa,SACR,KAAK,+BAA+BA,CAAY,EAEhD,KAAK,iCAAiCA,CAAY,CAE7D,CAKU,+BAA+BA,EAA+D,CACtG,IAAMC,EAAWD,EAAa,cACxBE,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtDG,EAA2C,CAAC,EAC5CL,EAAW,kBACjB,OAAQG,EAAS,OAAQ,CACvB,IAAK,GACHE,EAAOL,CAAQ,EAAI,KAAK,sBAAsB,EAC9C,MACF,IAAK,GACHK,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAAsBC,CAA+B,EACrG,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAA8BC,CAA+B,EAC7G,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,wBAAwBG,EAAsCC,CAA+B,EACtG,MACF,QACEC,EAAOL,CAAQ,EAAI,KAAK,wBAAwBG,EAAUC,CAA+B,CAC7F,CAIA,IAAME,EAA4B;AAAA;AAAA,UAHrBC,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAKxC,MAAM;AAAA;AAAA,MAGXC,EAA8B,sBACpC,OAAAH,EAAOG,CAA2B,EAAI,IAAIP,EAAeK,CAAyB,EAC3ED,CACT,CAKU,iCAAiCH,EAA+D,CACxG,IAAMC,EAAWD,EAAa,cACxBE,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtDG,EAA2C,CAAC,EAC5CL,EAAW,kBACjB,OAAQG,EAAS,OAAQ,CACvB,IAAK,GACHE,EAAOL,CAAQ,EAAI,KAAK,sBAAsB,EAC9C,MACF,IAAK,GACHK,EAAOL,CAAQ,EAAI,KAAK,0BAA0BG,EAAsBC,CAA+B,EACvG,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,0BAA0BG,EAA8BC,CAA+B,EAChG,MACF,IAAK,GACHC,EAAOL,CAAQ,EACX,KAAK,0BAA0BG,EAAsCC,CAA+B,EACxG,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAA8CC,CAA+B,EACjF,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAAsDC,CAA+B,EACzF,MACF,IAAK,GACHC,EAAOL,CAAQ,EAAI,KAAK,0BACpBG,EAA8DC,CAA+B,EACjG,MACF,QACE,MAAM,IAAI,MAAM,sCAAsCD,EAAS,MAAM,EAAE,CAC3E,CAIA,IAAMM,EAAyB;AAAA;AAAA,YAHlBF,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAKtC,MAAM;AAAA;AAAA,MAGbG,EAA2B,mBACjC,OAAAL,EAAOK,CAAwB,EAAI,IAAIT,EAAeQ,CAAsB,EACrEJ,CACT,CAKU,uBAAwC,CAChD,OAAO,IAAIJ,EAAe;AAAA;AAAA;AAAA;AAAA,KAIzB,CACH,CAKU,wBAAwBU,EAAkBC,EAA4C,CAC9F,IAAMC,EAAiBD,EACnBE,EAAS,GACb,OAAID,EAAe,CAAC,IAAM,GACxBC,EAAS;AAAA;AAAA,2CAE4BD,EAAe,CAAC,CAAC;AAAA;AAAA,UAG/C,IAAIZ,EAAea,CAAM,GAG9BD,EAAe,CAAC,IAAM,GACxBC,EAAS;AAAA;AAAA,2CAE4BD,EAAe,CAAC,CAAC;AAAA;AAAA,UAG/C,IAAIZ,EAAea,CAAM,IAGlCA,EAAS;AAAA;AAAA;AAAA,wCAG2BD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,qCAC1CA,EAAe,CAAC,CAAC;AAAA;AAAA,QAG3C,IAAIZ,EAAea,CAAM,EAClC,CAKU,wBAAwBC,EAAyBH,EAA4C,CACrG,IAAIE,EAAS,GACb,GAAIE,GAAU,YAAYD,EAAOH,CAAQ,EACvC,OAAAE,EAAS;AAAA;AAAA,iDAEkCF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA;AAAA,QAG/D,IAAIX,EAAea,CAAM,EAGlC,IAAMD,EAAiBD,EAEjBK,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAWjD,OAAAD,EAAS;AAAA;AAAA;AAAA,uCAG0BD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA;AAAA,qCAEzCA,EAAe,CAAC,CAAC;AAAA;AAAA;AAAA,gCAGtBI,CAAkB;AAAA,iCACjBA,CAAkB;AAAA;AAAA;AAAA;AAAA,QAKxC,IAAIhB,EAAea,CAAM,CAClC,CAKU,wBAAwBC,EAAiCH,EAA4C,CAC7G,IAAMC,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CK,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAC3CG,EAAgBD,EAAqB,KAAK,KAAKF,EAAM,CAAC,EAAI,CAAC,EAC3DD,EAAS;AAAA;AAAA;AAAA,uCAGoBD,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,qCACzCA,EAAe,CAAC,CAAC;AAAA;AAAA,4BAE1BK,CAAa;AAAA,yBAChBA,CAAa;AAAA;AAAA;AAAA,gCAGND,CAAkB;AAAA,iCACjBA,CAAkB;AAAA;AAAA;AAAA;AAAA,QAK/C,OAAO,IAAIhB,EAAea,CAAM,CAClC,CAKU,wBAAwBC,EAA0BH,EAA4C,CACtG,IAAMC,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAE1CK,EAAqB,KAAK,KAAKF,EAAMA,EAAM,OAAS,CAAC,EAAI,CAAC,EAC1DG,EAAgBD,EAAqB,KAAK,KAAKF,EAAMA,EAAM,OAAS,CAAC,EAAI,CAAC,EAC5EI,EAAiBD,EACjBE,EAAU,GACVC,EAAS,UAEb,QAASC,EAAI,EAAGA,EAAIP,EAAM,OAAS,EAAGO,IACpCH,GAAkBJ,EAAMA,EAAM,OAASO,EAAI,CAAC,EAC5CF,EAAU;AAAA,aACHE,CAAC,cAAcH,CAAc;AAAA,kBACxBG,CAAC,MAAMH,CAAc;AAAA,MAC/BC,EACFC,EAAS,IAAIC,CAAC,KAAOD,EAEvB,IAAMP,EAAS;AAAA,YACPC,EAAM,MAAM;AAAA;AAAA,qCAEaF,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,mCACzCA,EAAe,CAAC,CAAC;AAAA;AAAA,UAE1CO,CAAO;AAAA;AAAA,0BAESF,CAAa;AAAA,uBAChBA,CAAa;AAAA;AAAA;AAAA,8BAGND,CAAkB;AAAA,+BACjBA,CAAkB;AAAA;AAAA,qBAE5BF,EAAM,MAAM,IAAIM,CAAM;AAAA;AAAA,MAGvC,OAAO,IAAIpB,EAAea,CAAM,CAClC,CAKU,0BAA0BH,EAAkBC,EAA4C,CAChG,IAAME,EAAS;AAAA;AAAA;AAAA,uCAGoBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,gCAClCA,EAAS,CAAC,CAAC;AAAA;AAAA,QAGvC,OAAO,IAAIX,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyBH,EAA4C,CACvG,IAAME,EAAS;AAAA;AAAA;AAAA,uCAGoBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,4BACpBG,EAAM,CAAC,CAAC;AAAA,gCACJA,EAAM,CAAC,CAAC;AAAA;AAAA;AAAA,QAIpC,OAAO,IAAId,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAiCH,EAA4C,CAC/G,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,GAAG,EAChCC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyCH,EAC1D,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,IAAI,EACtCC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAiDH,EAClE,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,KAAM,IAAI,EAC5CC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,uCAG0BF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,qCAC7BA,EAAS,CAAC,CAAC;AAAA,YACpCe,CAAsB;AAAA;AAAA;AAAA,QAIvB,IAAI1B,EAAea,CAAM,CAClC,CAKU,0BAA0BC,EAAyDH,EAE1E,CACjB,IAAIE,EAAS,GACPS,EAAOR,EAAM,OAEfS,EAAU,KACVD,EAAO,IACTC,EAAU,CAAC,GAGbA,EAAU,IAAI,MAAMD,EAAO,CAAC,EAC5BC,EAAQD,EAAO,CAAC,EAAIR,EAAMQ,EAAO,CAAC,EAClC,QAASE,EAAIF,EAAO,EAAGE,GAAK,EAAG,EAAEA,EAC/BD,EAAQC,CAAC,EAAID,EAAQC,EAAI,CAAC,EAAIV,EAAMU,EAAI,CAAC,EAE3C,IAAMC,EAAkB,CAAC,IAAK,IAAK,IAAK,KAAM,KAAM,IAAI,EAClDC,EACFH,EACK,IAAI,CAACI,EAAQH,IAAM,CAClB,IAAMI,EAAQ,OAAOH,EAAgBD,CAAC,CAAC,cAAcG,CAAM,GACrDE,EAAQL,IAAMD,EAAQ,OAAS,EACjC,OAAOE,EAAgBD,EAAI,CAAC,CAAC,cAAcC,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GACzE,YAAYF,EAAgBD,CAAC,CAAC,MAAMG,CAAM,GAC9C,MAAO,GAAGC,CAAK,KAAKC,CAAK,GAC3B,CAAC,EACA,KAAK,EAAE,EAEhB,OAAAhB,EAAS;AAAA;AAAA;AAAA,sCAGyBF,EAAS,CAAC,CAAC,KAAKA,EAAS,CAAC,CAAC;AAAA,oCAC7BA,EAAS,CAAC,CAAC;AAAA,WACpCe,CAAsB;AAAA;AAAA;AAAA,OAItB,IAAI1B,EAAea,CAAM,CAClC,CAKU,oBAAuD,CAC/D,IAAMT,EAA2C,CAAC,EAC9CL,EAAW,aACfK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQrC,EACDD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOnC,EACHD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAOnC,EACHD,EAAW,iBACXK,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OASnC,EACHD,EAAW,gBACX,IAAM+B,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,OAAAF,EAAOL,CAAQ,EAAI,IAAIC,EAAe;AAAA;AAAA,qBAErB8B,EAAK,SAAS;AAAA,UACzB,EACC1B,CACT,CAKU,2BAA8D,CACtE,IAAMA,EAA2C,CAAC,EAC5CH,EAAe,KAAK,QAAQ,oBAClC,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAAC8B,EAAa,IAAM,CAC9D,IAAMC,EAAc,KAAK,QAAQ,oBAAoB,CAAC,EAChDjC,EAAWkC,GAA2CF,CAAW,EACnEC,EAAY,SACd5B,EAAOL,CAAQ,EAAI,KAAK,0BAA0BA,EAAUgC,EAAaC,CAAW,EAEpF5B,EAAOL,CAAQ,EAAI,KAAK,4BAA4BA,EAAUgC,EAAaC,CAAW,EAGxF,IAAME,EAAmBC,GAAsDJ,CAAW,EACtFC,EAAY,cAAc,QAAU/B,EAAa,cAAc,SAC7D+B,EAAY,SACd5B,EAAO8B,CAAgB,EACnB,KAAK,+BAA+BA,EAAkBF,EAAa/B,EAAc8B,CAAW,EAEhG3B,EAAO8B,CAAgB,EACnB,KAAK,iCAAiCA,EAAkBF,EAAa/B,EAAc8B,CAAW,EAGxG,CAAC,EAEM3B,CACT,CAKU,+BACNL,EAAkBiC,EAA4B/B,EAA6BmC,EAA8B,CAC3G,IAAMC,EAAUL,EAAY,cACtB9B,EAAWD,EAAa,cAExBqC,EAAiBL,GADPG,CACyD,EAEnEG,EAASF,EAAQ,OACjBG,EAAUtC,EAAS,OAEnBuC,EAAgBC,GAAc,iBAAiBL,EAASnC,CAAQ,EAEhEyC,EAAOC,GAAkBJ,CAAO,EAChCK,EAAWL,EAAUD,EACvBO,EACEC,EAASC,GAAc,EAEzBT,IAAW,EACbO,EAAgB,GACPN,EAAU,GAAKC,EAAc,QAAU,EAChDK,EAAgB,cAEhBA,EAAgBL,EAAc,IAAIQ,GAAK,UAAUF,EAAOE,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAEzF,IAAIK,EAAwB,GACxBV,EAAU,GAAKD,EAAS,EAC1BW,EAAwB,SAExBA,EAAwBb,EAAQ,IAAI,CAACc,EAAI3B,IAAM,UAAUuB,EAAOvB,EAAIqB,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAG5F,IAAIO,EAAS,sBAEPC,EADSC,EAAU,KAAKjB,CAAO,IACJ,EAE3BkB,EADUD,EAAU,KAAKpD,CAAQ,IACJ,EAEnC,GAAIqC,IAAW,GAAK,CAACc,GAAiB,CAACE,EACrCH,EAAS;AAAA;AAAA,gBAGAC,GAAiB,CAACE,EACvBf,IAAY,EACdY,EAAS;AAAA;AAAA,UAITA,EAAS;AAAA;AAAA,kBAIFX,EAAc,OAAQ,CAC/B,IAAMe,EAAOjB,EAAS,EAChBkB,EAAOlB,EAAS,EAElBE,EAAc,QAAQe,CAAI,EAAI,IAAMf,EAAc,QAAQgB,CAAI,EAAI,GACpEL,EAAS,8BACAX,EAAc,QAAQe,CAAI,EAAI,GACvCJ,EAAS,2EAEAX,EAAc,QAAQgB,CAAI,EAAI,KACvCL,EAAS,+CAEb,CAEA,IAAMM,EAAsB;AAAA,+BACDX,EAAOP,EAAU,CAAC,CAAC;AAAA,iBACjCO,EAAOP,EAAU,CAAC,CAAC,aAAaO,EAAOP,EAAU,CAAC,CAAC;AAAA,iBACnDO,EAAOP,EAAU,CAAC,CAAC;AAAA,QAE1B3B,EAAS;AAAA,aACNd,CAAQ;AAAA,UACX4C,CAAI;AAAA,UACJe,CAAmB;AAAA,UACnBZ,CAAa;AAAA,6BACMR,CAAc,IAAIY,CAAqB;AAAA,UAC1DE,CAAM;AAAA;AAAA,MAGZ,OAAO,IAAIpD,EAAea,EAAQ,CAAC,6BAA6B,CAAC,CACnE,CAKU,iCACNd,EAAkBiC,EAA4B/B,EAA6BmC,EAA8B,CAC3G,IAAMjC,EAAc,CAACF,EAAa,MAAOA,EAAa,MAAM,EACtD0D,EAAa,CAAC3B,EAAY,MAAOA,EAAY,MAAM,EACnDO,EAASP,EAAY,cAAc,OACnCQ,EAAUvC,EAAa,cAAc,OACrCoC,EAAUL,EAAY,cACtB9B,EAAWD,EAAa,cACxBqC,EAAiBL,GAA2CG,CAAI,EAEtE,GAAIG,IAAWC,GAAWzB,GAAU,YAAY4C,EAAYxD,CAAW,EAAG,CACxE,IAAMU,EAAS;AAAA,kBACHd,CAAQ;AAAA,mCACSqC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAM8B,EAAOC,GAAkBJ,CAAO,EAChCC,EAAgBC,GAAc,iBAAiBL,EAASnC,CAAQ,EAChE2C,EAAWL,EAAUD,EACvBO,EACEC,EAASC,GAAc,EAEzBT,IAAW,EACbO,EAAgB,GACPN,EAAU,GAAKC,EAAc,QAAU,EAChDK,EAAgB,cAEhBA,EAAgBL,EAAc,IAAIQ,GAAK,UAAUF,EAAOE,EAAIJ,CAAQ,CAAC,OAAO,EAAE,KAAK;AAAA,CAAI,EAEzF,IAAIK,EAAwB,GACxBV,EAAU,GAAKD,EAAS,EAC1BW,EAAwB,SAExBA,EAAwBlB,EAAY,cAAc,IAAI,CAACmB,EAAI3B,IAAM,UAAUuB,EAAOvB,EAAIqB,CAAQ,CAAC,EAAE,EAAE,KAAK,IAAI,EAE9G,IAAMhC,EAAS;AAAA,gBACHd,CAAQ;AAAA,YACZ4C,CAAI;AAAA,YACJG,CAAa;AAAA,mBACNR,CAAc,IAAIY,CAAqB;AAAA;AAAA,QAGtD,OAAO,IAAIlD,EAAea,EAAQ,CAAC,6BAA6B,CAAC,CACnE,CAKU,0BAA0Bd,EAAkBqC,EAAcJ,EAA4C,CAC9G,OAAQA,EAAY,cAAc,OAAQ,CACxC,IAAK,GACH,OAAO,KAAK,uBAAuBjC,EAAUqC,CAAI,EACnD,IAAK,GACH,OAAO,KAAK,mBAAmBrC,EAAUqC,EAAMJ,CAAW,EAC5D,IAAK,GACH,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,EAC5D,IAAK,GACH,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,EAC5D,QACE,OAAO,KAAK,mBAAmBjC,EAAUqC,EAAMJ,CAAW,CAC9D,CACF,CAKU,4BAA4BjC,EAAkBqC,EAAcJ,EAA4C,CAChH,IAAMlB,EAAQkB,EAAY,cAC1B,OAAQlB,EAAM,OAAQ,CACpB,IAAK,GACH,OAAO,KAAK,yBAAyBf,EAAUqC,EAAMJ,CAAW,EAClE,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,IAAK,GACH,OAAO,KAAK,qBAAqBjC,EAAUqC,EAAMJ,CAAW,EAC9D,QAEE,MAAM,IAAI,MAAM,yBAAyBlB,EAAM,MAAM,IAAI,CAC7D,CACF,CAKU,uBAAuBf,EAAkBqC,EAA8B,CAC/E,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAC7CO,EAAS;AAAA,iBACFd,CAAQ;AAAA,qBACJ+B,EAAK,SAAS,IAAIM,CAAI;AAAA;AAAA,UAGvC,OAAO,IAAIpC,EAAea,CAAM,CAClC,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDpB,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CmB,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAO7CO,EALgB,QAAQd,CAAQ;AAAA;AAAA,QAElCa,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC;AAAA,eAChCkB,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDF,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAC7CsD,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EAE1B,GAAIA,GAAY,MAAQI,GAAU,YAAYD,EAAOH,CAAQ,EAAG,CAC9D,IAAMmD,EAAgB,QAAQ/D,CAAQ;AAAA,qDACS8D,CAAO,OAAOD,CAAO;AAAA,iBACzD9B,EAAK,SAAS,IAAIM,CAAI;AAAA,SAGjC,OAAO,IAAIpC,EAAe8D,CAAa,CACzC,CACA,IAAMlD,EAAiBD,EACjBoD,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EAKrCD,EAJgB,QAAQd,CAAQ;AAAA,iCACTa,EAAe,CAAC,CAAC,KAAKA,EAAe,CAAC,CAAC,KAAKmD,CAAY;AAAA,eAC1EjC,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAKU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBrB,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDpB,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CmB,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAEnD,GAAIQ,EAAM,CAAC,IAAM,EAAG,CAClB,IAAMkD,EAAgBlD,EAAM,MAAM,CAAC,EAC7BmD,EAAW,CAAC,EAAG,CAAC,EAChBC,EAAgBC,GAAkBrD,EAAOkD,CAAa,EACtDI,EAAS,CAAC,IAAK,MAAO,KAAK,EAE3BC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAC/B,IAAMI,EAAiB,KAAK,0BAA0BvE,EAAUqC,EAAMiC,CAAc,EAK9ExD,EAJgB,GAAGyD,EAAe,WAAW;AAAA,aAC5CvE,CAAQ;AAAA,iBACJA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA,UAG1D,OAAO,IAAIjE,EAAea,EAAQyD,EAAe,YAAY,CAC/D,CACA,IAAMV,EAAUhD,EAAe,CAAC,EAC1BiD,EAAUjD,EAAe,CAAC,EAE1BmD,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EACrCG,EAAgB8C,EAAe,KAAK,KAAKjD,EAAM,CAAC,EAAI,CAAC,EAMrDD,EAJgB,QAAQd,CAAQ;AAAA;AAAA,UAEhC8D,CAAO,KAAKD,CAAO,KAAK3C,CAAa,KAAK8C,CAAY;AAAA,eACjDjC,EAAK,SAAS,IAAIM,CAAI,UAEjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA4B,CAAC,CAClE,CAIU,mBAAmBd,EAAkBqC,EAAcJ,EAA4C,CACvG,IAAMlB,EAAQkB,EAAY,cACpBV,EAAOR,EAAM,OACbH,EAAW,CAACqB,EAAY,MAAOA,EAAY,MAAM,EACjDF,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EAE7CM,EAAiB,CAACD,EAAS,CAAC,EAAGA,EAAS,CAAC,CAAC,EAC1CiD,EAAUhD,EAAe,CAAC,EAC1BiD,EAAUjD,EAAe,CAAC,EAC1BmD,EAAe,KAAK,KAAKjD,EAAMQ,EAAO,CAAC,EAAI,CAAC,EAC9CL,EAAgB8C,EAAe,KAAK,KAAKjD,EAAMQ,EAAO,CAAC,EAAI,CAAC,EAC5D8C,EAAS,0BACTI,EAAQ,OAAOvD,CAAa,kBAAkB8C,CAAY,eAC9D,QAAS1C,EAAI,EAAGA,EAAIC,EAAO,EAAGD,IAC5B+C,EAAS,QAAQ/C,CAAC,KAAO+C,EACzBnD,GAAiBH,EAAMQ,EAAOD,EAAI,CAAC,EACnCmD,EAAQ,IAAInD,CAAC,MAAMJ,CAAa,MAAQuD,EAS1C,IAAM3D,EAPgB,QAAQd,CAAQ,IAAIqE,CAAM;AAAA,oBAChCI,CAAK;AAAA,2BACEX,CAAO;AAAA,kCACAA,CAAO;AAAA,qDACYA,CAAO,KAAKD,CAAO;AAAA,eACzD9B,EAAK,SAAS,IAAIM,CAAI;AAAA,OAGjC,OAAO,IAAIpC,EAAea,CAAM,CAClC,CAKU,yBAAyBd,EAAkBqC,EAAcJ,EAA4C,CAC7G,GAAM,CAAC4B,EAASC,CAAO,EAAI,CAAC7B,EAAY,MAAOA,EAAY,MAAM,EACjE,GAAI4B,IAAY,GAAKC,IAAY,EAAG,CAClC,IAAMhD,EAAS;AAAA,kBACHd,CAAQ;AAAA,mCACSqC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,uBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,iCAC7CD,CAAO,KAAKC,CAAO,YAAYzB,CAAI;AAAA,iCACnCA,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMyC,EAAQzC,EAAY,MACpB0C,EAAQ1C,EAAY,OAE1B,GAAI0C,IAAU,GAAKD,IAAU,EAAG,CAC9B,IAAM5D,EAAS;AAAA,gBACLd,CAAQ;AAAA,iCACSqC,CAAI;AAAA;AAAA,QAG/B,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,GAAI6D,IAAU,EAAG,CACf,IAAM7D,EAAS;AAAA,kBACHd,CAAQ;AAAA,oDAC0B0E,CAAK;AAAA,mCACtBrC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CACA,GAAI4D,IAAU,EAAG,CACf,IAAM5D,EAAS;AAAA,kBACHd,CAAQ;AAAA,yDAC+B2E,CAAK;AAAA,mCAC3BtC,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CACA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,iCACS0E,CAAK,KAAKC,CAAK;AAAA,iCACftC,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,yBAA0B,2BAA2B,CAAC,CAC3F,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cAGpBrB,EAAW,CAACqB,EAAY,OAAQA,EAAY,KAAK,EAEvD,GAAIrB,GAAY,MAAQI,GAAU,YAAYD,EAAOH,CAAQ,EAAG,CAC9D,IAAMiD,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EACpBE,EAAS;AAAA,kBACHd,CAAQ;AAAA,yDAC+B6D,CAAO,OAAOC,CAAO;AAAA,mCAC3CzB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,GAAM,CAAC,SAAA8D,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EACrDkD,EAAgBW,EACtB,GAAIX,EAAc,OAASlD,EAAM,OAAQ,CACvC,IAAMoD,EAAgBC,GAAkBrD,EAAOkD,CAAa,EAEtDK,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAME,EAAS,CAAC,MAAO,KAAK,EACtBvD,EAAS;AAAA,YACT,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,kBACtEtE,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,UAG9D,OAAO,IAAIjE,EAAea,EAAQ,CAAC,2BAA2B,CAAC,CACjE,CAEA,IAAM+C,EAAUjD,EAAS,CAAC,EACpBkD,EAAUlD,EAAS,CAAC,EAC1B,GAAIkD,IAAY,EAAG,CACjB,IAAMhD,EAAS;AAAA,kBACHd,CAAQ;AAAA,yBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,sDAC1BzB,CAAI,WAAWtB,EAAM,CAAC,CAAC;AAAA,kDAC3B8C,CAAO;AAAA,mCACtBxB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,4BAA4B,CAAC,CAC/F,CAEA,GAAI+C,IAAY,EAAG,CACjB,IAAM/C,EAAS;AAAA,kBACHd,CAAQ;AAAA,yBACDqC,CAAI,gCAAgCwB,CAAO,KAAKC,CAAO;AAAA,sDAC1BzB,CAAI,WAAWtB,EAAM,CAAC,CAAC;AAAA,6CAChC+C,CAAO;AAAA,mCACjBzB,CAAI;AAAA;AAAA,UAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,4BAA4B,CAAC,CAC/F,CAEA,IAAMA,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACMe,EAAM,CAAC,CAAC;AAAA,iCACL8C,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpB6C,EAAU/D,EAAM,CAAC,EAAIA,EAAM,CAAC,EAC5BgE,EAAUhE,EAAM,CAAC,EAEjB,CAAC,SAAA6D,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EACrDkD,EAAgBW,EACtB,GAAIX,EAAc,OAASlD,EAAM,OAAQ,CACvC,IAAMoD,EAAgBC,GAAkBrD,EAAOkD,CAAa,EACtDI,EAAS,CAAC,QAAS,MAAO,KAAK,EAE/BC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAC/B,IAAMa,EAAU,KAAK,4BAA4BhF,EAAUqC,EAAMiC,CAAc,EAEzEW,EAAUf,EAAS,QAAQ,EAC3BpD,EAAS;AAAA,YACTkE,EAAQ,WAAW;AAAA,kBACbhF,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQY,CAAO,CAAC;AAAA;AAAA,UAG7D,OAAO,IAAIhF,EAAea,EAAQkE,EAAQ,YAAY,CACxD,CAEA,IAAMnB,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,kBACDd,CAAQ;AAAA;AAAA,kCAEQ8E,CAAO,YAAYC,CAAO;AAAA,mCACzBlB,CAAO,KAAKC,CAAO;AAAA,mCACnBzB,CAAI;AAAA;AAAA,QAGnC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAMU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBiD,EAAUnE,EAAM,CAAC,EACjBgE,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAsBrBlB,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACM8E,CAAO,YAAYC,CAAO;AAAA,yBAC/BG,CAAO;AAAA,iCACCrB,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,yBAA0B,2BAA2B,CAAC,CAC3F,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBkD,EAAUpE,EAAM,CAAC,EACjBmE,EAAUnE,EAAM,CAAC,EAAIoE,EACrBJ,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAErB,CAAC,SAAAH,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EAC3D,GAAI6D,EAAS,OAAS7D,EAAM,OAAQ,CAClC,IAAMoD,EAAgBC,GAAkBrD,EAAO6D,CAAQ,EACjDP,EAAS,CAAC,MAAO,MAAO,QAAS,SAAU,QAAQ,EAEnDC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAMrD,EAAS;AAAA,YACT,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,kBACtEtE,CAAQ;AAAA,qBACLA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,UAG9D,OAAO,IAAIjE,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAEA,IAAM+C,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,gBACHd,CAAQ;AAAA,8BACM8E,CAAO,YAAYC,CAAO,cAAcG,CAAO;AAAA,qBACxDC,CAAO;AAAA,iCACKtB,CAAO,KAAKC,CAAO;AAAA,iCACnBzB,CAAI;AAAA;AAAA,QAGjC,OAAO,IAAIpC,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAKU,qBAAqBd,EAAkBqC,EAAcJ,EAA4C,CACzG,IAAMlB,EAAQkB,EAAY,cACpBmD,EAAUrE,EAAM,CAAC,EACjBoE,EAAUpE,EAAM,CAAC,EAAIqE,EACrBF,EAAUnE,EAAM,CAAC,EAAIoE,EACrBJ,EAAUhE,EAAM,CAAC,EAAImE,EACrBJ,EAAU/D,EAAM,CAAC,EAAIgE,EAErB,CAAC,SAAAH,EAAU,SAAAV,CAAQ,EAAIW,GAAa9D,CAAiB,EAC3D,GAAI6D,EAAS,OAAS7D,EAAM,OAAQ,CAClC,IAAMoD,EAAgBC,GAAkBrD,EAAO6D,CAAQ,EACjDP,EAAS,CAAC,MAAO,MAAO,QAAS,SAAU,SAAU,QAAQ,EAE7DC,EAAgC,KAAK,MAAM,KAAK,UAAUrC,CAAW,CAAC,EAC5EqC,EAAe,cAAgBH,EAE/B,IAAMrD,EAAS;AAAA,cACP,KAAK,4BAA4Bd,EAAUqC,EAAMiC,CAAc,EAAE,WAAW;AAAA,oBACtEtE,CAAQ;AAAA;AAAA,uBAELA,CAAQ,IAAIwE,GAAkBH,EAAQH,CAAQ,CAAC;AAAA;AAAA,YAGhE,OAAO,IAAIjE,EAAea,EAAQ,CAAC,4BAA6B,wBAAwB,CAAC,CAC3F,CAEA,IAAM+C,EAAU5B,EAAY,MACtB6B,EAAU7B,EAAY,OACtBnB,EAAS;AAAA,kBACDd,CAAQ;AAAA;AAAA,gCAEM8E,CAAO,YAAYC,CAAO,cAAcG,CAAO;AAAA,uBACxDC,CAAO,eAAeC,CAAO;AAAA,mCACjBvB,CAAO,KAAKC,CAAO;AAAA,mCACnBzB,CAAI;AAAA;AAAA,UAGnC,OAAO,IAAIpC,EACPa,EAAQ,CAAC,yBAA0B,4BAA6B,4BAA4B,CAAC,CACnG,CAQU,OAA0C,CAClD,IAAMuC,EAAS,KAAK,QAAQ,oBACtB9B,EAAO8B,EAAO,MAAM,OACpB7B,EAAU6B,EAAO,QACjBgC,EAAShC,EAAO,MAChBiC,EAASjC,EAAO,OAEhBkC,EAAe,CAAC,EACtB,QAAS9D,EAAI,EAAGA,EAAIF,EAAO,EAAG,EAAEE,EAC9B8D,EAAa,KAAK;AAAA,YACZ9D,CAAC,gBAAgBD,EAAQC,CAAC,CAAC,GAAG,EACpC8D,EAAa,KAAK;AAAA,sBACF9D,CAAC,OAAOD,EAAQC,CAAC,CAAC,GAAG,EAEvC8D,EAAa,KAAK;AAAA,YACVhE,EAAO,CAAC,aAAa,EAC7B,IAAMiE,EAAO;AAAA,6CAC4BjE,CAAI;AAAA,iDACA8D,CAAM,KAAKC,CAAM;AAAA,UACxDC,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,yCAEUhE,CAAI;AAAA,UACnCgE,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,MAG3B,MAAO,CAAC,MAAO,IAAItF,EAAeuF,EAAM,CAAC,4BAA4B,CAAC,CAAC,CACzE,CAOU,WAA8C,CACtD,IAAMnF,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACgC,EAAMZ,IAAM,CACvD,IAAMgE,EAAS,KAAK,QAAQ,oBAAoBhE,CAAC,EAE3CF,GADQkE,EAAO,cAAc,OAAS,EAAIA,EAAO,cAAgBA,EAAO,OAC3D,OACfzF,EAAW,IAAIqC,CAAI,GACvBhC,EAAOL,CAAQ,EAAI,IAAIC,EACnB,KAAK,mBAAmBoC,EAAMd,EAAMkE,EAAO,MAAOA,EAAO,OAAQ,EAAK,EACtE,CAAC,6BAA6BzF,CAAQ,GAAI,6BAA8B,2BAA2B,CAAC,EACxGA,EAAWA,EAAW,KACtBK,EAAOL,CAAQ,EAAI,IAAIC,EACnB,KAAK,mBAAmBoC,EAAMd,EAAMkE,EAAO,MAAOA,EAAO,OAAQ,EAAI,EACrE,CAAC,6BAA6BzF,CAAQ,GAAI,6BAA8B,2BAA2B,CAAC,CAC1G,CAAC,EACMK,CACT,CAQU,mBAAmBqF,EAAiBnE,EAAcoE,EAAeC,EAAgBC,EAChF,CACT,IAAIxD,EAAO,IAAIqD,CAAO,GAClBG,IACFxD,EAAOA,EAAO,MAEhB,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO;AAAA,gBACK8B,CAAI,UAAUd,CAAI;AAAA,wCACMc,CAAI;AAAA,iDACKsD,CAAK,KAAKC,CAAM;AAAA,0CACvB7D,EAAK,SAAS,IAAI2D,CAAO;AAAA;AAAA;AAAA,SAIjE,CASU,mBAAmBA,EAAiBnE,EAAcoE,EAAeC,EAAgBC,EAChF,CACT,IAAIxD,EAAO,IAAIqD,CAAO,QAClBG,IACFxD,EAAOA,EAAO,MAEhB,IAAMN,EAAOxB,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO;AAAA,eACI8B,CAAI,UAAUd,CAAI;AAAA,yCACQmE,CAAO;AAAA,iDACCC,CAAK,KAAKC,CAAM;AAAA,mBAC9C7D,EAAK,SAAS,IAAI2D,CAAO;AAAA;AAAA,SAG1C,CACF,ICzzCA,IASaI,GATbC,GAAAC,EAAA,kBAGAC,KAMaH,GAAN,MAAMI,UAAwBC,EAAQ,CAC3C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,cAAc,EAAG,GAAG,KAAK,cAAc,CAAC,CAC1D,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACU,eAAkD,CAC1D,MAAO,CACL,OAAQ,IAAIC,EAAe;AAAA;AAAA;AAAA,SAGxB,CACL,CACF,CACU,eAAkD,CAC1D,MAAO,CACL,OAAQ,IAAIA,EAAe;AAAA;AAAA;AAAA,SAGxB,CACL,CACF,CAKU,aAAgD,CACxD,IAAMC,EAAaJ,EAAgB,eAAe,EAAI,uBAAyB,GAC/E,MAAO,CACL,OAAQ,IAAIG,EAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAYvBC,CAAU;AAAA;AAAA;AAAA;AAAA,SAIX,CACL,CACF,CAKU,aAAgD,CACxD,IAAMA,EAAaJ,EAAgB,eAAe,EAAI,uBAAyB,GAC/E,MAAO,CACL,OAAQ,IAAIG,EAAe;AAAA;AAAA;AAAA,YAGrBC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOb,CACL,CACF,CAKA,OAAO,gBAA0B,CAC/B,IAAMC,EAAI,IAAI,YAAY,CAAC,EACrBC,EAAI,IAAI,YAAYD,CAAC,EACrBE,EAAI,IAAI,WAAWF,CAAC,EAE1B,GADAC,EAAE,CAAC,EAAI,WACHC,EAAE,CAAC,IAAM,IACX,MAAO,GAET,GAAIA,EAAE,CAAC,IAAM,IACX,MAAO,GAET,MAAM,IAAI,MAAM,oBAAoB,CACtC,CACF,IClGA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KACAC,KAOaJ,GAAN,cAA+BK,EAAQ,CAC5C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,aAAa,EAAG,GAAG,KAAK,gBAAgB,CAAC,CAC3D,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACU,cAAiD,CACzD,IAAMC,EAAOC,GAAQ,KAAK,QAAQ,UAAU,OAAO,EACnD,MAAO,CACL,aAAc,IAAIC,EACd;AAAA;AAAA,cAEIF,EAAK,MAAM;AAAA;AAAA,UAGf,CAAC,iBAAiB,CAAC,CACzB,CACF,CACU,iBAAoD,CAC5D,MAAO,CACL,gBAAiB,IAAIE,EACjB;AAAA;AAAA;AAAA;AAAA,UAKA,CAAC,iBAAiB,CAAC,CACzB,CACF,CACF,IC5CA,IASaC,GATbC,GAAAC,EAAA,kBAGAC,KAMaH,GAAN,MAAMI,UAA0BC,EAAQ,CAC7C,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,cAAiD,CAC/C,MAAO,CACL,GAAG,KAAK,WAAW,EACnB,GAAG,KAAK,iBAAiB,EACzB,GAAG,KAAK,gBAAgB,EACxB,GAAG,KAAK,gBAAgB,EACxB,GAAG,KAAK,iBAAiB,CAC3B,CACF,CACA,gBAAiB,CACf,MAAO,CAAC,CACV,CACU,YAA+C,CACvD,IAAMC,EAAa,KAAK,QAAQ,oBAAoB,MAAM,OACpDC,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,cAClD,GAAIC,EAAM,QAAUJ,EAAY,CAC9B,IAAMK,EAAOD,EAAM,OACbE,EAAYN,EAAaK,EACzBE,EAAW,gBAAgBL,CAAI,GACjCM,EAAQ,GACZ,QAASL,EAAI,EAAGA,EAAIE,EAAM,EAAEF,EAC1BK,GAAS;AAAA,wBACKL,CAAC,qCAAqCG,EAAYH,CAAC,OAAOC,EAAMD,CAAC,CAAC;AAAA,YAGlF,IAAMM,EAAO;AAAA,eACNF,CAAQ,wBAAwBP,CAAU,0BAA0BK,CAAI;AAAA,YAC3EG,CAAK;AAAA;AAAA,UAGTP,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CACF,CAAC,EACMR,CACT,CACU,kBAAqD,CAC7D,IAAMD,EAAa,KAAK,QAAQ,oBAAoB,MAAM,OACpDC,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAClD,GAAI,EAAEC,EAAM,OAAS,GAAKA,EAAM,OAASJ,GAAa,CACpD,IAAMK,EAAOD,EAAM,OACbE,EAAYN,EAAaK,EACzBE,EAAW,sBAAsBL,CAAI,GACvCM,EAAQ,GACZ,QAASL,EAAI,EAAGA,EAAIE,EAAO,EAAG,EAAEF,EAC9BK,GAAS;AAAA,wBACKL,CAAC,qCAAqCG,EAAYH,CAAC,OAAOC,EAAMD,CAAC,CAAC;AAAA,YAGlF,IAAMM,EAAO;AAAA,eACNF,CAAQ,uBAAuBP,CAAU,0BAA0BK,CAAI;AAAA,YAC1EG,CAAK;AAAA,wBACOH,EAAO,CAAC,sBAAsBL,EAAa,CAAC;AAAA,wBAC5CK,EAAO,CAAC,sBAAsBL,EAAa,CAAC;AAAA;AAAA,UAG5DC,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CACF,CAAC,EACMR,CACT,CACU,iBAAoD,CAC5D,IAAMA,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CQ,EAAU,KAAK,QAAQ,oBAAoBR,CAAC,EAAE,QAC9CE,EAAOD,EAAM,OACfG,EAAW,mBAAmBL,CAAI,GACtCD,EAAOM,CAAQ,EAAI,IAAIG,EAAeb,EAAkB,oBAAoBU,EAAUF,EAAMM,CAAO,CAAC,EACpGJ,EAAW,mBAAmBL,CAAI,KAClCD,EAAOM,CAAQ,EACX,IAAIG,EAAeb,EAAkB,oBAAoBU,EAAUF,EAAMM,EAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,CACzG,CAAC,EACMV,CACT,CACA,OAAO,oBAAoBC,EAAcG,EAAcM,EAAoC,CACzF,IAAIH,EAAQ,GACZ,QAAS,EAAIH,EAAO,EAAG,GAAK,EAAG,EAAE,EAC/BG,GAAS;AAAA,4BACa,CAAC,OAAOG,EAAQ,CAAC,CAAC;AAAA,UAG1C,MAAO;AAAA,YACCT,CAAI,gBAAgBG,CAAI;AAAA;AAAA,UAE1BG,CAAK;AAAA;AAAA;AAAA,OAIb,CACU,iBAAoD,CAC5D,IAAMP,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CQ,EAAU,KAAK,QAAQ,oBAAoBR,CAAC,EAAE,QAC9CE,EAAOD,EAAM,OACfG,EAAW,mBAAmBL,CAAI,GACtCD,EAAOM,CAAQ,EAAI,IAAIG,EAAeb,EAAkB,sBAAsBU,EAAUF,EAAMM,CAAO,CAAC,EACtGJ,EAAW,mBAAmBL,CAAI,KAClCD,EAAOM,CAAQ,EACX,IAAIG,EAAeb,EAAkB,sBAAsBU,EAAUF,EAAMM,EAAQ,MAAM,EAAE,QAAQ,CAAC,CAAC,CAC3G,CAAC,EACMV,CACT,CACA,OAAO,sBAAsBC,EAAcG,EAAcM,EAAoC,CAC3F,IAAMC,EAAe,CAAC,EACtB,QAAS,EAAI,EAAG,EAAIP,EAAO,EAAG,EAAE,EAC9BO,EAAa,KAAK;AAAA,gBACR,CAAC,gBAAgBD,EAAQ,CAAC,CAAC,GAAG,EACxCC,EAAa,KAAK;AAAA,4BACI,CAAC,OAAOD,EAAQ,CAAC,CAAC,GAAG,EAE7C,OAAAC,EAAa,KAAK;AAAA,gBACNP,EAAO,CAAC,aAAa,EAC1B;AAAA,aACEH,CAAI,gCAAgCG,CAAI;AAAA,UAC3CO,EAAa,KAAK,EAAE,CAAC;AAAA;AAAA,OAG7B,CACU,kBAAqD,CAC7D,IAAMX,EAA2C,CAAC,EAClD,YAAK,QAAQ,YAAY,WAAW,QAAQ,CAACC,EAAMC,IAAM,CACvD,IAAMC,EAAQ,KAAK,QAAQ,oBAAoBD,CAAC,EAAE,MAC5CE,EAAOD,EAAM,OACbG,EAAW,oBAAoBL,CAAI,GACrCW,EAAY,GAChB,QAASV,EAAI,EAAGA,EAAIE,EAAM,EAAEF,EAC1BU,GAAa;AAAA,gBACLV,CAAC,OAAOC,EAAMD,CAAC,CAAC,IAE1B,IAAMM,EAAO;AAAA,eACJF,CAAQ,8BAA8BF,CAAI;AAAA,sBACnCA,CAAI;AAAA,YACdQ,CAAS;AAAA,wBACGR,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAUtBJ,EAAOM,CAAQ,EAAI,IAAIG,EAAeD,CAAI,CAC5C,CAAC,EACMR,CACT,CACF,ICrKA,IAUaa,GAVbC,GAAAC,EAAA,kBAGAC,KAOaH,GAAN,cAAyBI,EAAQ,CACtC,YAAYC,EAAsB,CAChC,MAAMA,CAAO,CACf,CACA,gBAA2C,CACzC,MAAO,CAAC,CACV,CACA,cAAiD,CAC/C,MAAO,CAAC,GAAG,KAAK,mBAAmB,EAAG,GAAG,KAAK,QAAQ,EAAG,GAAG,KAAK,WAAW,EAAG,GAAG,KAAK,WAAW,CAAC,CACrG,CACU,oBAAuD,CAE/D,IAAMC,EADe,KAAK,QAAQ,oBACR,MAAM,OAC1BC,EAAmC,CAAC,IAAK,KAAM,IAAK,KAAM,IAAK,KAAM,IAAK,IAAI,EAC9EC,EAA2C,CAAC,EAClD,QAAWC,KAAQF,EAAQ,CACzB,IAAMG,EAAQ,GAAGD,CAAI,MACjBE,EAAkB,GACtB,QAASC,EAAI,EAAGA,EAAIN,EAAM,EAAEM,EAC1BD,GAAmB;AAAA,iBACVC,CAAC,KAAKL,EAAOE,CAAI,CAAC,QAAQG,CAAC;AAAA,YAGtC,IAAMC,EAAO;AAAA,eACJH,CAAK,YAAYJ,CAAI,mBAAmBA,CAAI;AAAA,YAC/CK,CAAe;AAAA;AAAA,UAGrBH,EAAOE,CAAK,EAAI,IAAII,EAAeD,CAAI,CACzC,CAEA,OAAOL,CACT,CACU,SAA4C,CAEpD,IAAMF,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BK,EAAkB,GACtB,QAAS,EAAI,EAAG,EAAIL,EAAM,EAAE,EAC1BK,GAAmB;AAAA,eACV,CAAC,WAAW,CAAC;AAAA,UAGxB,IAAME,EAAO;AAAA,6BACYP,CAAI,mBAAmBA,CAAI;AAAA,UAC9CK,CAAe;AAAA;AAAA,QAGrB,MAAO,CAAC,QAAS,IAAIG,EAAeD,CAAI,CAAC,CAC3C,CAEU,YAA+C,CAEvD,IAAMP,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BS,EAAQ;AAAA;AAAA,qBAEKT,CAAI;AAAA;AAAA;AAAA,UAIrB,QAAS,EAAI,EAAG,EAAIA,EAAO,EAAG,EAAE,EAC9BS,GAAS;AAAA,4BACa,CAAC;AAAA,gBACb,CAAC;AAAA,cAGbA,GAAS;AAAA;AAAA,gBAEGT,EAAO,CAAC;AAAA,UAEpB,IAAMO,EAAO;AAAA,kCACiBP,CAAI;AAAA,UAC5BS,CAAK;AAAA;AAAA,UAGX,MAAO,CAAC,WAAY,IAAID,EAAeD,CAAI,CAAC,CAC9C,CACU,YAA+C,CAEvD,IAAMP,EADe,KAAK,QAAQ,oBACR,MAAM,OAC5BS,EAAQ;AAAA;AAAA,sBAEMT,CAAI;AAAA;AAAA;AAAA,QAItB,QAAS,EAAI,EAAG,EAAIA,EAAO,EAAG,EAAE,EAC9BS,GAAS;AAAA,4BACa,CAAC;AAAA,uBACN,CAAC;AAAA,QAGpBA,GAAS;AAAA;AAAA,uBAEUT,EAAO,CAAC;AAAA,UAE3B,IAAMO,EAAO;AAAA,6BACYP,CAAI;AAAA,UACvBS,CAAK;AAAA;AAAA,MAGX,MAAO,CAAC,WAAY,IAAID,EAAeD,CAAI,CAAC,CAC9C,CACF,IChHA,IAUaG,GAVbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KACAC,KAEaP,GAAwE,CACnF,SAAYQ,GACZ,UAAaC,GACb,IAAOC,GACP,WAAcC,GACd,YAAeC,EAEjB,ICjBA,IAkBaC,GAlBbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KAYaN,GAAN,KAAuB,CAK5B,YACIO,EAAyBC,EAA0BC,EACnDC,EAAoC,CALxC,KAAS,KAAkC,CAAC,EAC5C,KAAS,8BAA6E,CAAC,EAKrF,KAAK,QAAU,IAAIC,GAAYJ,EAAWC,EAAaC,EAAqBC,CAAmB,EAG/F,OAAO,KAAKE,EAAY,EAAE,QAASC,GAAiB,CAClD,IAAMC,EAAM,IAAIF,GAAaC,CAAI,EAAE,KAAK,OAAO,EAC/C,KAAK,KAAKA,CAAI,EAAIC,CACpB,CAAC,EAGD,IAAMC,EAAM,KAAK,8BACjB,QAAWC,KAAW,KAAK,KAAM,CAE/B,IAAMC,EADM,KAAK,KAAKD,CAAO,EACH,aAAa,EACvC,QAAWE,KAAWD,EAAe,CACnC,IAAME,EAAMH,EAAU,IAAME,EACxBE,EACAL,EAAII,CAAG,GACTC,EAAcL,EAAII,CAAG,EACrBC,EAAY,YAAcH,EAAcC,CAAO,EAAE,cAEjDE,EAAc,IAAIC,GAAmBF,EAAKF,EAAcC,CAAO,EAAE,WAAW,EAC5EH,EAAII,CAAG,EAAIC,GAEb,IAAME,EAAeL,EAAcC,CAAO,EAAE,aAC5C,GAAII,EACF,QAASC,EAAI,EAAGA,EAAID,EAAa,OAAQ,EAAEC,EACzC,GAAKR,EAAIO,EAAaC,CAAC,CAAC,EAKtBH,EAAY,cAAcL,EAAIO,EAAaC,CAAC,CAAC,CAAC,MALrB,CACzB,IAAMC,EAAO,IAAIH,GAAmBC,EAAaC,CAAC,CAAC,EACnDR,EAAIO,EAAaC,CAAC,CAAC,EAAIC,EACvBJ,EAAY,cAAcI,CAAI,CAChC,CAKN,CACF,CACF,CAEA,YAAqB,CACnB,IAAMhB,EAAc,KAAK,QAAQ,YAC7BiB,EAASjB,EAAY,aAGzB,OAAK,KAAK,QAAQ,YAAY,UAC5BiB,EAAS,GAAGA,CAAM;AAAA,QAChBC,GAAyB,KAAK,QAAQ,UAAU,QAAS,KAAK,QAAQ,oBAAoB,MAAM,MAAM,CAAC,IAG3GD,EAASE,GAAeF,CAAM,EAGvB,GAAGG,GAAsB,KAAK,QAAQ,UAAU,OAAO,CAAC;AAAA,MAC7D,KAAK,YAAYpB,EAAY,WAAYA,EAAY,SAAS,CAAC;AAAA,MAC/D,KAAK,WAAWiB,CAAM,CAAC;AAAA,MACvBA,CAAM,EACV,CAEU,WAAWI,EAAwB,CAC3C,IAAMC,EAAmB,KAAK,kCAAkCD,CAAM,EAEtE,GAAIC,EAAiB,SAAW,EAC9B,MAAO,GAGT,IAAIC,EAAW,GACf,QAASR,EAAI,EAAGA,EAAIO,EAAiB,OAAQ,EAAEP,EAC7C,GAAIO,EAAiBP,CAAC,EAAE,YACtBQ,GAAYD,EAAiBP,CAAC,EAAE,YAAc;AAAA,MAE9C,OAAM,IAAI,MAAM,8CAA8CO,EAAiBP,CAAC,EAAE,IAAI,EAAE,EAI5F,OAAOQ,CACT,CACQ,kCAAkCF,EAAsC,CAC9E,IAAMG,EAA8B,CAAC,EAErC,cAAO,KAAK,KAAK,6BAA6B,EAAE,QAAQC,GAAmB,CACzE,IAAMf,EAAUe,EAAgB,MAAM,GAAG,EAAE,CAAC,EACxCJ,EAAO,QAAQX,CAAO,IAAM,IAC9Bc,EAAM,KAAK,KAAK,8BAA8BC,CAAe,CAAC,CAElE,CAAC,EAEMC,GAA4B,mBAAmBF,CAAK,CAC7D,CAEU,YAAYG,EAAqBC,EAAoC,CAC7E,IAAMC,EAAyB,CAAC,EAChC,GAAIF,EACF,QAAWG,KAAWH,EACpBE,EAAa,KAAK,qBAAqBC,CAAO,GAAG,EAGrD,GAAIF,EACF,QAAWG,KAAYH,EACrBC,EAAa,KACT,WAAWE,EAAS,IAAI,IAAIA,EAAS,IAAI,GAAGA,EAAS,YAAc,IAAIA,EAAS,WAAW,IAAM,EAAE,GAAG,EAG9G,OAAOF,EAAa,KAAK;AAAA,CAAI,CAC/B,CACF,IClIA,IAsBaG,GAtBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAcaN,GAAN,KAAqB,CAK1B,YACWO,EAAqCC,EACrCC,EAA8C,CAD9C,cAAAF,EAAqC,eAAAC,EACrC,2BAAAC,EACT,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAuBC,EAA2B,CAC7E,KAAK,SAAS,MAAM,KAAM,sBAAsBF,EAAc,YAAY,MAAQ,gBAAgB,GAAI,IAAM,CAC1G,IAAMG,EAAK,KAAK,UAAU,GACpBC,EAAUJ,EAAc,QAC9BG,EAAG,WAAWC,CAAO,EACrB,GAAI,CACF,KAAK,WAAWF,CAAM,EACjB,KAAK,iBACR,KAAK,eAAeF,EAAc,eAAe,EAEnD,KAAK,aAAaA,EAAc,iBAAkBA,EAAc,YAAY,WAAa,CAAC,EAAGC,CAAM,CACrG,OAASI,EAAK,CACZ,MAAAC,GAAO,MAAM,iBAAkBN,EAAc,YAAY,YAAY,EAC/DK,CACR,CACA,KAAK,SAAS,MAAM,UAAW,mBAAoB,IAAM,CACvD,KAAK,UAAU,KAAK,CACtB,CAAC,CACH,EAAG,KAAK,SAAS,CACnB,CACA,SAAgB,CACV,KAAK,cACP,KAAK,UAAU,aAAa,KAAK,YAAY,EAE/C,KAAK,KAAK,QAAQE,GAAK,KAAK,UAAU,cAAcA,EAAE,OAAO,CAAC,CAChE,CACA,MAAMC,EAA0BC,EAAsCC,EAA8C,CAClH,OAAO,KAAK,SAAS,MAAM,UAAW,uBAAwB,IAAM,CAClE,IAAMC,EAAe,IAAIC,GAAiB,KAAK,UAAWJ,EAAaC,EAAqBC,CAAmB,EACzGG,EAAaF,EAAa,WAAW,EACrCP,EAAU,KAAK,QAAQS,CAAU,EAQvC,MAPiB,CACf,YAAAL,EACA,QAAAJ,EACA,iBAAkB,KAAK,oBACnBA,EAASO,EAAa,QAAQ,YAAY,WAAYA,EAAa,QAAQ,YAAY,SAAS,EACpG,gBAAiB,KAAK,mBAAmBP,CAAO,CAClD,CAEF,CAAC,CACH,CACU,QAAQU,EAAwC,CACxD,GAAI,CAAC,KAAK,aAAc,CACtBR,GAAO,QAAQ,kBAAmB,wDAAwD,EAC1F,IAAMS,EAAqBC,GAAsB,KAAK,UAAU,OAAO,EACvE,KAAK,aAAe,KAAK,UAAU,cAAcD,EAAoB,KAAK,UAAU,GAAG,aAAa,CACtG,CACIE,GAAI,OACNX,GAAO,QAAQ,kBAAmB;AAAA,EACtCQ,CAAgB;AAAA,CACjB,EAEG,IAAMI,EAAa,KAAK,UAAU,cAAcJ,EAAkB,KAAK,UAAU,GAAG,eAAe,EAC7FV,EAAU,KAAK,UAAU,cAAc,KAAK,aAAcc,CAAU,EAC1E,YAAK,UAAU,aAAaA,CAAU,EAC/Bd,CACT,CACA,WAAWe,EAAuB,CAChC,IAAMC,EAAQD,EAAG,MACXE,EAASF,EAAG,OAClBb,GAAO,QACH,kBACA,8CAA8Cc,CAAK,IAAIC,CAAM,WAAWF,EAAG,KAAK,UAAUA,EAAG,OAAO,IAAI,EAAE,EAC9G,KAAK,UAAU,kBAAkBA,EAAG,QAASC,EAAOC,CAAM,CAC5D,CACA,eAAeC,EAAiD,CAC9D,IAAMC,EAAiBD,EAAgB,SACjCE,EAAqBF,EAAgB,aAC3C,KAAK,UAAU,oBAAoBC,EAAgBC,CAAkB,EACrE,KAAK,gBAAkB,EACzB,CACA,aAAaC,EAA6CC,EAA8BC,EAC/E,CACP,IAAMxB,EAAK,KAAK,UAAU,GACtByB,EAAkB,EACtB,OAAW,CAAC,KAAAC,EAAM,KAAAC,EAAM,SAAAC,EAAU,YAAAC,CAAW,IAAKP,EAAkB,CAClE,IAAMQ,EAAQP,EAAU,KAAKQ,GAAKA,EAAE,OAASL,CAAI,GAAG,KACpD,GAAIC,IAAS,aAAe,CAACG,EAC3B,MAAM,IAAI,MAAM,aAAaJ,CAAI,8CAA8C,EAEjF,OAAQC,EAAM,CACZ,IAAK,YACH,KAAK,YAAYH,EAASC,CAAe,EAAGG,EAAUH,CAAe,EACrEA,IACA,MACF,IAAK,QACCI,EACF7B,EAAG,WAAW4B,EAAUE,CAAiB,EAEzC9B,EAAG,UAAU4B,EAAUE,CAAe,EAExC,MACF,IAAK,MACCD,EACF7B,EAAG,WAAW4B,EAAUE,CAAiB,EAEzC9B,EAAG,UAAU4B,EAAUE,CAAe,EAExC,MACF,QACE,MAAM,IAAI,MAAM,4BAA4BH,CAAI,EAAE,CACtD,CACF,CACF,CACA,YAAYX,EAAiBgB,EAAqCC,EAAwB,CACxF,KAAK,UAAU,qBAAqBjB,EAAG,QAASiB,EAAUD,CAAa,CACzE,CACA,mBAAmB/B,EAAiD,CAClE,MAAO,CACL,SAAU,KAAK,kBAAkBA,EAAS,UAAU,EACpD,aAAc,KAAK,kBAAkBA,EAAS,cAAc,CAC9D,CACF,CACA,oBAAoBA,EAAuBiC,EAAqBX,EAClC,CAC5B,IAAMD,EAA8C,CAAC,EACrD,GAAIY,EACF,QAAWC,KAAWD,EACpBZ,EAAiB,KAAK,CAAC,KAAMa,EAAS,KAAM,YAAa,SAAU,KAAK,mBAAmBlC,EAASkC,CAAO,CAAC,CAAC,EAGjH,GAAIZ,EACF,QAAWa,KAAYb,EACrBD,EAAiB,KAAK,CAAC,GAAGc,EAAU,SAAU,KAAK,mBAAmBnC,EAASmC,EAAS,IAAI,CAAC,CAAC,EAGlG,OAAOd,CACT,CACA,mBAAmBrB,EAAuByB,EAAoC,CAE5E,IAAMW,EADK,KAAK,UAAU,GACL,mBAAmBpC,EAASyB,CAAI,EACrD,GAAIW,IAAc,KAChB,MAAM,IAAI,MAAM,WAAWX,CAAI,aAAa,EAE9C,OAAOW,CACT,CACA,kBAAkBpC,EAAuByB,EAAsB,CAG7D,OAFW,KAAK,UAAU,GACW,kBAAkBzB,EAASyB,CAAI,CAEtE,CACF,ICpLA,IAyBaY,GAzBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAmBaJ,GAAN,KAAqB,CAM1B,YACWK,EAAgCC,EAA8CC,EAC7EC,EAA8B,CAD/B,eAAAH,EAAgC,oBAAAC,EAA8C,cAAAC,EAC7E,YAAAC,EAJZ,KAAiB,YAAuE,IAAI,IAKtFA,EAAO,gBACT,KAAK,cAAgB,IAAI,IACzB,KAAK,aAAe,IAAI,IACxB,KAAK,cAAgB,IAAI,IAE7B,CACA,wBACIC,EAA2BC,EAAuBC,EAA0BC,EAAsB,CACpG,IAAMC,EAAkB,KAAK,cAAcJ,CAAQ,EAE7CK,EAAU,KAAK,UAAU,WAAWD,EAAiBH,EAAO,UAAY,EAAGE,CAAK,EACtF,GAAIF,EAAO,UAAYE,IAAU,EAC/B,MAAM,IAAI,MAAM,iBAAiB,EAEnC,IAAMG,EAAQL,EAAO,MACfM,EAASN,EAAO,OAElBO,EACAC,EACJ,GAAI,KAAK,OAAO,cAAe,CAC7BD,EAAM,GAAGF,CAAK,IAAIC,CAAM,IAAIF,EAAQ,MAAM,IAAIA,EAAQ,cAAc,IAAIA,EAAQ,WAAW,GAC3FI,EAAgB,KAAK,cAAc,IAAID,CAAG,EACrCC,IACHA,EAAgB,CAAC,EACjB,KAAK,cAAc,IAAID,EAAKC,CAAa,GAG3C,IAAMC,EAAe,KAAK,aAAa,IAAIF,CAAG,EAC9C,GAAIE,GAAgBA,EAAa,OAAS,EAAG,CAC3C,IAAMC,EAAUD,EAAa,IAAI,EACjC,OAAAD,EAAc,KAAKE,CAAO,EACtBR,IAAU,GACZ,KAAK,UAAU,cAAcQ,EAASL,EAAOC,EAAQF,EAAS,KAAK,cAAcL,EAAUE,CAAI,CAAE,EAE5FS,CACT,CACF,CAEAC,GAAO,QAAQ,iBAAkB,gCAAgCX,EAAO,KAAK,IAAIA,EAAO,MAAM,EAAE,EAChG,IAAMU,EAAU,KAAK,UAAU,gBAAgBL,EAAOC,EAAQF,EAAS,KAAK,cAAcL,EAAUE,CAAI,CAAC,EAEzG,OAAI,KAAK,OAAO,gBACdO,EAAe,KAAKE,CAAO,EAC3B,KAAK,cAAc,IAAIA,EAASH,CAAI,GAE/BG,CACT,CACA,YAAYE,EAAiBb,EAA2Bc,EAAsC,CAC5F,OAAKA,IACHA,EAAW,GAEN,KAAK,SAAS,MAAM,UAAW,6BAA8B,IAAM,CACxE,IAAMC,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAAIH,EAC9CZ,EAAO,KAAK,UAAU,YACxBW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAU,KAAK,cAAcf,CAAQ,EAAGc,CAAS,EACtF,OAAO,KAAK,aAAad,EAAUE,CAAI,CACzC,CAAC,CACH,CACA,MAAM,iBAAiBW,EAAiBb,EAA2Bc,EAA+C,CAChH,IAAMI,EAASL,EAAG,OAAO,OAIzB,GAHKC,IACHA,EAAW,GAET,KAAK,YAAY,IAAII,CAAM,EAAG,CAChC,IAAMC,EAAc,KAAK,YAAY,IAAID,CAAM,EAC/C,OAAO,IAAI,QAA2BE,GAAWD,GAAa,KAAKC,CAAO,CAAC,CAC7E,CACA,OAAO,KAAK,SAAS,MAAM,UAAW,kCAAmC,SAAY,CACnF,KAAK,YAAY,IAAIF,EAAQ,CAAC,CAAC,EAC/B,IAAMH,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAAIH,EAEpD,MAAM,KAAK,UAAU,sBAAsB,EAC3C,IAAMZ,EAAO,KAAK,UAAU,YACxBW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAU,KAAK,cAAcf,CAAQ,EAAGc,CAAS,EAChFO,EAAa,KAAK,aAAarB,EAAUE,CAAI,EAC7CiB,EAAc,KAAK,YAAY,IAAID,CAAM,EAC/C,YAAK,YAAY,OAAOA,CAAM,EAC9BC,GAAa,QAAQC,GAAWA,EAAQC,CAAU,CAAC,EAC5CA,CACT,CAAC,CACH,CACA,wBAAwBR,EAA+B,CACrD,OAAO,KAAK,SAAS,MAAM,UAAW,yCAA0C,IAAM,CACpF,IAAME,EAAWF,EAAG,MAAM,OAAO,CAACG,EAAGC,IAAMD,EAAIC,CAAC,EAC1Cf,EAAO,KAAK,UAAU,YAAYW,EAAG,QAASA,EAAG,MAAOA,EAAG,OAAQE,EAAW,EAAG,OAAQ,CAAC,EAChG,OAAO,IAAI,aAAab,EAAK,OAAQA,EAAK,WAAYa,CAAQ,CAChE,CAAC,CACH,CACA,eAAeO,EAA0BC,EAA+B,CACtE,IAAIf,EACJ,GAAI,KAAK,OAAO,gBACdA,EAAM,KAAK,cAAc,IAAIc,EAAY,OAAO,EAC5Cd,GAAK,CACHe,GACF,KAAK,cAAc,OAAOf,CAAG,EAE/B,IAAMC,EAAgB,KAAK,cAAc,IAAID,CAAG,EAChD,GAAIC,EAAe,CACjB,IAAMe,EAAQf,EAAc,QAAQa,EAAY,OAAO,EACvD,GAAIE,IAAU,GAAI,CAChBf,EAAc,OAAOe,EAAO,CAAC,EAC7B,IAAId,EAAe,KAAK,aAAa,IAAIF,CAAG,EACvCE,IACHA,EAAe,CAAC,EAChB,KAAK,aAAa,IAAIF,EAAKE,CAAY,GAEzCA,EAAa,KAAKY,EAAY,OAAO,CACvC,CACF,CACF,EAGE,CAACd,GAAOe,KACVX,GAAO,QAAQ,iBAAkB,4BAA4BU,EAAY,KAAK,IAAIA,EAAY,MAAM,EAAE,EACtG,KAAK,UAAU,cAAcA,EAAY,OAAO,EAEpD,CACA,aAAatB,EAA2BE,EAAgD,CACtF,OAAQF,EAAU,CAChB,IAAK,QACH,OAAOE,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,QACH,OAAOA,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,OACH,OAAOA,aAAgB,UAAYA,EAAO,UAAU,KAAKA,CAAI,EAC/D,IAAK,SACH,OAAOA,aAAgB,YAAcA,EAAO,YAAY,KAAKA,CAAI,EACnE,IAAK,SACH,OAAOA,aAAgB,YAAcA,EAAO,YAAY,KAAKA,CAAI,EACnE,IAAK,QACL,IAAK,OACH,OAAOA,aAAgB,WAAaA,EAAO,WAAW,KAAKA,CAAI,EACjE,IAAK,UACH,OAAOA,aAAgB,aAAeA,EAAO,aAAa,KAAKA,CAAI,EACrE,IAAK,UACH,OAAOA,aAAgB,aAAeA,EAAO,aAAa,KAAKA,CAAI,EACrE,QACE,MAAM,IAAI,MAAM,mBAAmBF,CAAQ,mBAAmB,CAClE,CACF,CACA,cAAcyB,EAA4BvB,EAAoE,CAC5G,GAAKA,EAGL,OAAQA,aAAgB,aAAgBA,EAAO,IAAI,aAAaA,CAAI,CAmBtE,CACA,cAAcuB,EAA8C,CAC1D,MAAO,OAgBT,CACA,qBAA4B,CAC1B,KAAK,UAAU,oBAAoB,CACrC,CACF,IC3NA,IAmBaC,GAnBbC,GAAAC,EAAA,kBAKAC,KAEAC,KAKAC,KACAC,KACAC,KACAC,KACAC,KAGaT,GAAN,KAAoD,CAWzD,YAA4BU,EAAuCC,EAA0B,CAAjE,aAAAD,EAAuC,aAAAC,EACjE,KAAK,eAAiB,IAAIC,GAAsBF,EAAQ,UAAU,cAAc,EAChF,KAAK,eAAiB,IAAIG,GAAe,KAAK,QAAQ,SAAUH,EAAQ,UAAW,KAAK,cAAc,EACtG,KAAK,eAAiB,IAAII,GACtBJ,EAAQ,UAAW,KAAK,eAAgB,KAAK,QAAQ,SACrD,CAAC,cAAeA,EAAQ,mBAAqB,MAAM,CAAC,EACxD,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAA2B,IAAI,IACpC,KAAK,KAAOA,EAAQ,KACpB,KAAK,eAAiB,IAAI,IAC1B,KAAK,eAAiB,IAAI,GAC5B,CAEA,wBAAyB,CACvB,OAAO,IAAIK,GAAsB,IAAI,CACvC,CACA,mBAAmBC,EAAoB,CACrC,IAAMC,EAAeD,EAAM,UAAU,EAAE,OAAOE,GAAKA,EAAE,OAAS,IAAMA,EAAE,MAAM,EAAE,IAAIA,GAAKA,EAAE,OAAQ,MAAM,EACvG,KAAK,aAAe,IAAI,IAAID,CAAY,CAC1C,CACA,cAAcE,EAA8B,CAC1C,OAAO,KAAK,aAAe,KAAK,aAAa,IAAIA,CAAQ,EAAI,EAC/D,CACA,eAAeA,EAA2B,CACxC,KAAK,aAAa,IAAIA,CAAQ,CAChC,CACA,eAAeA,EAAqBC,EAA0C,CAC5E,OAAIA,EACK,KAAK,uBAAuB,IAAID,CAAQ,EAExC,KAAK,yBAAyB,IAAIA,CAAQ,CAErD,CACA,eAAeA,EAAqBE,EAA0BD,EAAW,GAAa,CACpFE,GAAO,QAAQ,sBAAuB,+BAA+B,EACjEF,EACF,KAAK,uBAAuB,IAAID,EAAUE,CAAW,EAErD,KAAK,yBAAyB,IAAIF,EAAUE,CAAW,CAE3D,CACA,SAAgB,CACd,KAAK,eAAe,QAAQ,EAC5B,KAAK,eAAe,oBAAoB,EACxC,KAAK,uBAAuB,QAAQE,GAAM,KAAK,eAAe,eAAeA,EAAI,EAAI,CAAC,EACtF,KAAK,uBAAyB,IAAI,IAClC,KAAK,yBAAyB,QAAQA,GAAM,KAAK,eAAe,eAAeA,EAAI,EAAI,CAAC,EACxF,KAAK,yBAA2B,IAAI,GACtC,CACA,QAAQC,EAAkBC,EAA0BT,EAAwB,CAC1E,IAAMU,EAAKC,GAAgBH,EAAMC,EAAQG,EAAsB,EAC/D,MAAO,CAAC,KAAMF,EAAG,OAAQ,QAASA,EAAG,OAASA,EAAG,OAAOF,EAAMR,CAAK,EAAIQ,CAAI,CAC7E,CACF,ICjEO,SAASK,GAAqBC,EAAmC,CACtE,IAAIC,EAAI,EACR,KAAOA,EAAID,EAAI,QACEA,EAAIC,CAAC,EAAE,EADD,EAAEA,EAEvB,CAIF,OAAOA,EAAI,CACb,CA3BA,IAgCaC,GAhCbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAA,KACAC,KAyBaL,GAAN,KAAmB,CAwCxB,YAAYM,EAA2BC,EAAc,CAFrD,KAAQ,iBAAmB,GAogB3B,KAAQ,YAA0B,CAAC,EAjgBjC,KAAK,GAAKD,EACV,KAAK,QAAUC,EAEf,KAAK,cAAc,EACnB,KAAK,aAAe,KAAK,mBAAmB,EAC5C,KAAK,YAAc,KAAK,kBAAkB,EAC1C,KAAK,qBAAqB,CAC5B,CAEA,gBAAgBC,EAAeC,EAAgBC,EAAsBC,EAA4C,CAC/G,IAAML,EAAK,KAAK,GAEVM,EAAUN,EAAG,cAAc,EAEjCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,cAAcA,EAAG,WAAYA,EAAG,mBAAoBA,EAAG,OAAO,EACjEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,mBAAoBA,EAAG,OAAO,EACjEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,eAAgBA,EAAG,aAAa,EACnEA,EAAG,cAAcA,EAAG,WAAYA,EAAG,eAAgBA,EAAG,aAAa,EACnE,IAAMO,EAASF,EAAOD,EAAQ,OAAOC,EAAMH,EAAQC,CAAM,EAAI,KAC7D,OAAAH,EAAG,WACCA,EAAG,WACH,EACAI,EAAQ,eAAgBF,EAAOC,EAC/B,EACAC,EAAQ,OAAQA,EAAQ,YAAaG,CAAM,EAC/C,KAAK,WAAW,EACTD,CACT,CACA,cACIA,EAAuBJ,EAAeC,EAAgBC,EAAsBC,EAAmC,CACjH,IAAML,EAAK,KAAK,GAChBA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrC,IAAMC,EAASH,EAAQ,OAAOC,EAAMH,EAAQC,CAAM,EAClDH,EAAG,cACCA,EAAG,WACH,EACA,EACA,EACAE,EAAOC,EAAQC,EAAQ,OAAQA,EAAQ,YAAaG,CAAM,EAC9D,KAAK,WAAW,CAClB,CACA,kBAAkBD,EAAuBJ,EAAeC,EAAsB,CAC5E,IAAMH,EAAK,KAAK,GAEhBA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,gBAAgBA,EAAG,YAAa,KAAK,WAAW,EACnDA,EAAG,qBACCA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EACrD,CAAC,EACL,KAAK,WAAW,EAChBN,EAAG,SAAS,EAAG,EAAGE,EAAOC,CAAM,EAC/BH,EAAG,QAAQ,EAAG,EAAGE,EAAOC,CAAM,CAChC,CACA,YACIG,EAAuBJ,EAAeC,EAAgBK,EAAkBC,EACxEC,EAAyC,CAC3C,IAAMV,EAAK,KAAK,GACXU,IACHA,EAAW,GAER,KAAK,kBACR,KAAK,kBAAkBJ,EAASJ,EAAOC,CAAM,EAE/C,IAAMC,EAAU,KAAK,WAAWK,EAAUC,CAAQ,EAC5CH,EAASH,EAAQ,SAASF,EAAQC,CAAM,EAE9C,OAAAH,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrCN,EAAG,qBACCA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EACrD,CAAC,EAELN,EAAG,WAAW,EAAG,EAAGE,EAAOC,EAAQH,EAAG,KAAMI,EAAQ,YAAaG,CAAM,EACvE,KAAK,WAAW,EAETH,EAAQ,OAAOG,EAAQC,CAAQ,CACxC,CAEA,oBAA8B,CAE5B,MAAO,EACT,CACA,kBAA2B,CACzB,IAAMR,EAAK,KAAK,GAEhB,MAAO,UADGA,EAAG,aAAa,KAAK,GAAG,cAAc,EAC1BA,EAAG,QAAS,EACpC,CACA,mBAAkC,CAChC,OAAO,KAAK,GAAG,aAAa,KAAK,GAAG,kBAAkB,CACxD,CACA,uBAA0C,CACxC,OAAO,KAAK,GAAG,aAAa,KAAK,GAAG,mBAAmB,CACzD,CACA,oBAAoBW,EAAwBC,EAAkC,CAC5E,IAAMZ,EAAK,KAAK,GAChBA,EAAG,oBAAoBW,EAAgB,EAAGX,EAAG,MAAO,GAAO,GAAI,CAAC,EAChEA,EAAG,wBAAwBW,CAAc,EACrCC,IAAuB,KACzBZ,EAAG,oBAAoBY,EAAoB,EAAGZ,EAAG,MAAO,GAAO,GAAI,EAAE,EACrEA,EAAG,wBAAwBY,CAAkB,GAE/C,KAAK,WAAW,CAClB,CACA,cACIC,EACAC,EACgB,CAClB,IAAMd,EAAK,KAAK,GACVe,EAAUf,EAAG,cAAc,EAGjC,OAAAA,EAAG,aAAae,EAASF,CAAY,EACrCb,EAAG,aAAae,EAASD,CAAU,EACnCd,EAAG,YAAYe,CAAO,EACfA,CACT,CACA,cAAcC,EAAsBC,EAAiC,CACnE,IAAMjB,EAAK,KAAK,GACVkB,EAASlB,EAAG,aAAaiB,CAAU,EACzC,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,0CAA0CD,CAAU,EAAE,EAKxE,GAFAjB,EAAG,aAAakB,EAAQF,CAAY,EACpChB,EAAG,cAAckB,CAAM,EACnBlB,EAAG,mBAAmBkB,EAAQlB,EAAG,cAAc,IAAM,GACvD,MAAM,IAAI,MAAM,6BAA6BA,EAAG,iBAAiBkB,CAAM,CAAC;AAAA;AAAA,EAE5EF,CAAY,EAAE,EAEZ,OAAOE,CACT,CACA,aAAaA,EAA2B,CACtC,KAAK,GAAG,aAAaA,CAAM,CAC7B,CACA,qBAAqBZ,EAAuBa,EAAkBC,EAA2C,CACvG,IAAMpB,EAAK,KAAK,GAChBA,EAAG,cAAcA,EAAG,SAAWmB,CAAQ,EACvC,KAAK,WAAW,EAChBnB,EAAG,YAAYA,EAAG,WAAYM,CAAO,EACrC,KAAK,WAAW,EAChBN,EAAG,UAAUoB,EAAeD,CAAQ,EACpC,KAAK,WAAW,CAClB,CACA,MAAa,CACX,KAAK,GAAG,WAAW,KAAK,GAAG,eAAgB,EAAG,CAAC,EAC/C,KAAK,WAAW,CAClB,CACA,YAAmB,CACjB,GAAIE,GAAI,MAAO,CACb,IAAMrB,EAAK,KAAK,GACVsB,EAAQtB,EAAG,SAAS,EACtBuB,EAAQ,GACZ,OAAQD,EAAO,CACb,KAAMtB,EAAG,SACP,OACF,KAAMA,EAAG,aACPuB,EAAQ,eACR,MACF,KAAMvB,EAAG,cACPuB,EAAQ,gBACR,MACF,KAAMvB,EAAG,kBACPuB,EAAQ,oBACR,MACF,KAAMvB,EAAG,8BACPuB,EAAQ,gCACR,MACF,KAAMvB,EAAG,cACPuB,EAAQ,gBACR,MACF,KAAMvB,EAAG,mBACPuB,EAAQ,qBACR,MACF,QACEA,EAAQ,wBAAwBD,EAAM,SAAS,EAAE,CAAC,EACtD,CACA,MAAM,IAAI,MAAMC,CAAK,CACvB,CACF,CACA,cAAcjB,EAA6B,CACzC,KAAK,GAAG,cAAcA,CAAO,CAC/B,CACA,cAAcS,EAA6B,CACzC,KAAK,GAAG,cAAcA,CAAO,CAC/B,CACA,WAAWN,EAA4BC,EAAkBc,IAAyD,CAChH,GAAI,KAAK,UAAY,EACnB,OAAO,IAAiBC,GAAsB,KAAK,GAA8Bf,CAAQ,EAG3F,OAAQD,EAAU,CAChB,IAAK,QACH,OAAIe,IAAU,GAA2B,KAAK,yBACrC,IAAiBE,GAAqB,KAAK,GAAIhB,CAAQ,EAEvD,IAAiBgB,GACpB,KAAK,GAAIhB,EAAU,KAAK,0BAA2B,cAAc,EAEzE,IAAK,MACH,MAAM,IAAI,MAAM,iBAAiB,EACnC,IAAK,OACH,OAAO,IAAiBiB,GAAiB,KAAK,GAAIjB,CAAQ,EAC5D,QACE,MAAM,IAAI,MAAM,qBAAqBD,CAAQ,EAAE,CACnD,CACF,CACA,qBAA4B,CAC1B,IAAMT,EAAK,KAAK,GAChB,QAAS4B,EAAO,EAAGA,EAAO,KAAK,qBAAsB,EAAEA,EACrD5B,EAAG,cAAcA,EAAG,SAAW4B,CAAI,EACnC5B,EAAG,YAAYA,EAAG,WAAY,IAAI,CAEtC,CACA,SAAgB,CACd,GAAI,KAAK,SACP,OAEF,IAAMA,EAAK,KAAK,GAChBA,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,kBAAkB,KAAK,WAAW,EACrCA,EAAG,WAAWA,EAAG,aAAc,IAAI,EACnCA,EAAG,aAAa,KAAK,YAAY,EACjCA,EAAG,WAAWA,EAAG,qBAAsB,IAAI,EAC3CA,EAAG,OAAO,EACV,KAAK,SAAW,EAClB,CAEQ,uBAAsC,CAE5C,OAAO,IAAI,aAAa,CACtB,GAAM,EAAM,EAAK,EAAK,EACtB,GAAM,GAAM,EAAK,EAAK,EACtB,EAAM,EAAM,EAAK,EAAK,EACtB,EAAM,GAAM,EAAK,EAAK,CACxB,CAAC,CACH,CACQ,oBAAkC,CACxC,IAAMA,EAAK,KAAK,GACVO,EAASP,EAAG,aAAa,EAC/B,GAAI,CAACO,EACH,MAAM,IAAI,MAAM,8BAA8B,EAEhD,IAAMsB,EAAW,KAAK,sBAAsB,EAC5C,OAAA7B,EAAG,WAAWA,EAAG,aAAcO,CAAM,EACrCP,EAAG,WAAWA,EAAG,aAAc6B,EAAU7B,EAAG,WAAW,EACvD,KAAK,WAAW,EACTO,CACT,CACQ,mBAAsC,CAC5C,IAAMuB,EAAK,KAAK,GAAG,kBAAkB,EACrC,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,iCAAiC,EAEnD,OAAOA,CACT,CAEQ,sBAA6B,CACnC,IAAM9B,EAAK,KAAK,GAMhB,GAJA,KAAK,sCAAwC,KAAK,yCAAyC,EAC3F,KAAK,yBAA2B,KAAK,mBAAmB,EACxD,KAAK,2BAA6B,KAAK,qBAAqB,EAExD,KAAK,UAAY,GAAK,CAAC,KAAK,2BAA6B,CAAC,KAAK,yBACjE,MAAM,IAAI,MAAM,wDAAwD,EAG1E,KAAK,iBAAmB,CAAC,KAAK,0BAA4B,KAAK,kBAAkB,EAGjF,KAAK,eAAiBA,EAAG,aAAaA,EAAG,gBAAgB,EACzD,KAAK,qBAAuBA,EAAG,aAAaA,EAAG,uBAAuB,EAMlE,KAAK,OAMX,CACQ,eAAsB,CACxB,KAAK,UAAY,GACnB,KAAK,0BAA4B,KAAK,GAAG,aAAa,wBAAwB,EAC9E,KAAK,kCAAoC,KAAK,GAAG,aAAa,iCAAiC,IAE/F,KAAK,sBAAwB,KAAK,GAAG,aAAa,mBAAmB,EACrE,KAAK,0BAA4B,KAAK,GAAG,aAAa,wBAAwB,EAElF,CAEQ,0CAAoD,CAG1D,IAAMA,EAAK,KAAK,GACVM,EAAUN,EAAG,cAAc,EACjCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EAErC,IAAMyB,EAAiB,KAAK,UAAY,EAAK/B,EAAoC,QAAUA,EAAG,KAC9FA,EAAG,WAAWA,EAAG,WAAY,EAAG+B,EAAgB,EAAG,EAAG,EAAG/B,EAAG,KAAMA,EAAG,MAAO,IAAI,EAEhF,IAAMgC,EAAchC,EAAG,kBAAkB,EACzCA,EAAG,gBAAgBA,EAAG,YAAagC,CAAW,EAE9ChC,EAAG,qBAAqBA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EAAS,CAAC,EAEvF,IAAM2B,EAAajC,EAAG,uBAAuBA,EAAG,WAAW,IAAMA,EAAG,qBACpE,OAAAA,EAAG,YAAYA,EAAG,WAAY,IAAI,EAClCA,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,cAAcM,CAAO,EACxBN,EAAG,kBAAkBgC,CAAW,EACzBC,CACT,CAEQ,oBAA8B,CACpC,GAAI,KAAK,UAAY,GACnB,GAAI,CAAC,KAAK,0BACR,MAAO,WAGL,CAAC,KAAK,sBACR,MAAO,GAGX,OAAO,KAAK,qCACd,CAEQ,sBAAgC,CACtC,GAAI,KAAK,UAAY,GACnB,GAAI,CAAC,KAAK,0BACR,MAAO,WAGL,CAAC,KAAK,uBAGN,CAAC,KAAK,GAAG,aAAa,0BAA0B,EAClD,MAAO,GAGX,OAAO,KAAK,qCACd,CAKQ,mBAA6B,CAInC,IAAMjC,EAAK,KAAK,GAEZM,EACA0B,EACAnB,EACAqB,EACAnB,EAEJ,GAAI,CACFT,EAAUN,EAAG,cAAc,EAC3BgC,EAAchC,EAAG,kBAAkB,EACnCA,EAAG,YAAYA,EAAG,WAAYM,CAAO,EAGrC,IAAMyB,EAAiB,KAAK,UAAY,EAAK/B,EAAoC,QAAUA,EAAG,KAuB9F,OAtBAA,EAAG,WAAWA,EAAG,WAAY,EAAG+B,EAAgB,EAAG,EAAG,EAAG/B,EAAG,KAAMA,EAAG,MAAO,IAAI,EAEhFA,EAAG,gBAAgBA,EAAG,YAAagC,CAAW,EAC9ChC,EAAG,qBAAqBA,EAAG,YAAaA,EAAG,kBAAmBA,EAAG,WAAYM,EAAS,CAAC,EAEvFN,EAAG,OAAOA,EAAG,KAAK,EAElBa,EAAeb,EAAG,aAAaA,EAAG,aAAa,EAC3C,CAACa,IAGLb,EAAG,aAAaa,EAAc,eAAe,EAC7Cb,EAAG,cAAca,CAAY,EAE7BqB,EAAiBlC,EAAG,aAAaA,EAAG,eAAe,EAC/C,CAACkC,KAGLlC,EAAG,aAAakC,EAAgB,4DAA4D,EAC5FlC,EAAG,cAAckC,CAAc,EAE/BnB,EAAUf,EAAG,cAAc,EACvB,CAACe,GACI,IAETf,EAAG,aAAae,EAASF,CAAY,EACrCb,EAAG,aAAae,EAASmB,CAAc,EACvClC,EAAG,YAAYe,CAAO,EACtBf,EAAG,WAAWe,CAAO,EAErBf,EAAG,WAAWA,EAAG,OAAQ,EAAG,CAAC,EACtBA,EAAG,SAAS,IAAMA,EAAG,SAE9B,QAAE,CACAA,EAAG,QAAQA,EAAG,KAAK,EAEfe,GACFf,EAAG,cAAce,CAAO,EAEtBF,GACFb,EAAG,aAAaa,CAAY,EAE1BqB,GACFlC,EAAG,aAAakC,CAAc,EAE5BF,IACFhC,EAAG,gBAAgBA,EAAG,YAAa,IAAI,EACvCA,EAAG,kBAAkBgC,CAAW,GAE9B1B,IACFN,EAAG,YAAYA,EAAG,WAAY,IAAI,EAClCA,EAAG,cAAcM,CAAO,EAE5B,CACF,CAEA,YAAyB,CACvB,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAM6B,EAAM,KAAK,GACXC,EAAM,KAAK,kCAEXC,EAAQF,EAAI,YAAY,EAC9B,OAAAA,EAAI,WAAWC,EAAI,iBAAkBC,CAAK,EACnCA,CACT,KAEE,OAAM,IAAI,MAAM,2CAA2C,CAE/D,CAEA,UAAW,CACT,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAMF,EAAM,KAAK,GACXC,EAAM,KAAK,kCACjBD,EAAI,SAASC,EAAI,gBAAgB,EACjC,MACF,KAEE,OAAM,IAAI,MAAM,0CAA0C,CAE9D,CAEA,uBAAuBC,EAA4B,CACjD,IAAIC,EAAY,GAAOC,EAAW,GAClC,GAAI,KAAK,UAAY,GAAK,KAAK,kCAAmC,CAChE,IAAMJ,EAAM,KAAK,GACXC,EAAM,KAAK,kCAEjBE,EAAYH,EAAI,kBAAkBE,EAAOF,EAAI,sBAAsB,EACnEI,EAAWJ,EAAI,aAAaC,EAAI,gBAAgB,CAClD,KAEE,OAAM,IAAI,MAAM,0CAA0C,EAG5D,OAAOE,GAAa,CAACC,CACvB,CAEA,eAAeF,EAA2B,CACxC,IAAIG,EAAc,EAClB,GAAI,KAAK,UAAY,EAAG,CACtB,IAAML,EAAM,KAAK,GACjBK,EAAcL,EAAI,kBAAkBE,EAAOF,EAAI,YAAY,EAC3DA,EAAI,YAAYE,CAAK,CACvB,KAEE,OAAM,IAAI,MAAM,0CAA0C,EAG5D,OAAOG,EAAc,GACvB,CAEA,MAAM,uBAAuBH,EAAoC,CAC/D,aAAMI,GAAY,IAAM,KAAK,uBAAuBJ,CAAK,CAAC,EACnD,KAAK,eAAeA,CAAK,CAClC,CAEA,MAAa,uBAAuC,CAClD,IAAMK,EAAe,KAAK,YAAY,KAAK,EAAE,EAC7C,OAAO,KAAK,UAAUA,CAAY,CACpC,CAEQ,YAAY1C,EAAyC,CAC3D,IAAI2C,EACER,EAAMnC,EACNqC,EAAQF,EAAI,UAAUA,EAAI,2BAA4B,CAAC,EAC7D,OAAAnC,EAAG,MAAM,EACLqC,IAAU,KACZM,EAAgB,IAAM,GAEtBA,EAAgB,IAAM,CACpB,IAAMC,EAAST,EAAI,eAAeE,EAAO,EAAG,CAAC,EAC7C,OAAOO,IAAWT,EAAI,kBAAoBS,IAAWT,EAAI,mBAC3D,EAEK,CAAC,MAAAE,EAAO,cAAAM,CAAa,CAC9B,CAEA,MAAM,UAAUD,EAA4B,CAC1C,OAAO,IAAI,QAAcG,GAAW,CAC7B,KAAK,cAAc,IAAMH,EAAa,cAAc,EAAG,IAAMG,EAAQ,CAAC,CAC7E,CAAC,CACH,CAIA,WAAkB,CAEhB,IAAMC,EAAQvD,GAAqB,KAAK,YAAY,IAAIwD,GAAKA,EAAE,QAAQ,CAAC,EACxE,QAAStD,EAAI,EAAGA,GAAKqD,EAAO,EAAErD,EAAG,CAC/B,GAAM,CAAC,UAAAuD,CAAS,EAAI,KAAK,YAAYvD,CAAC,EACtCuD,EAAU,CACZ,CACA,KAAK,YAAc,KAAK,YAAY,MAAMF,EAAQ,CAAC,CACrD,CAEA,MAAc,cAAcG,EAAyBD,EAAuB,CAC1E,KAAK,YAAY,KAAK,CAAC,SAAAC,EAAU,UAAAD,CAAS,CAAC,EACvC,OAAK,YAAY,OAAS,IAK9B,MAAMP,GAAY,KAChB,KAAK,UAAU,EAER,KAAK,YAAY,SAAW,EACpC,CACH,CACF,ICrlBO,SAASS,GAAmBC,EAA4C,CAC7E,IAAIC,EAOJ,IANK,CAACD,GAAaA,IAAc,WAAa,WAAYE,GACxDD,EAAUC,GAAM,QACN,CAACF,GAAaA,IAAc,UAAY,UAAWE,KAC7DD,EAAUC,GAAM,OAGd,CAACD,EACH,GAAI,CAEF,IAAME,EAAkBC,GAAsB,EAC9CH,EAAUI,GAAsBF,EAAiBH,CAAS,CAC5D,MAAY,CAEV,IAAMM,EAASC,GAAa,EAC5BN,EAAUI,GAAsBC,EAAQN,CAAS,CACnD,CAGFA,EAAYA,GAAaC,EAAQ,UAAY,EAAI,QAAU,SAC3D,IAAMO,EAAKP,EAAQ,GAInB,OAFAC,GAAMF,CAAS,EAAIC,EAEfO,EAAG,cAAc,GACnB,OAAON,GAAMF,CAAS,EACfD,GAAmBC,CAAS,IAGrCQ,EAAG,QAAQA,EAAG,UAAU,EACxBA,EAAG,QAAQA,EAAG,YAAY,EAC1BA,EAAG,QAAQA,EAAG,KAAK,EACnBA,EAAG,QAAQA,EAAG,MAAM,EACpBA,EAAG,QAAQA,EAAG,mBAAmB,EACjCA,EAAG,QAAQA,EAAG,eAAe,EAC7BA,EAAG,OAAOA,EAAG,YAAY,EACzBA,EAAG,OAAOA,EAAG,SAAS,EACtBA,EAAG,SAASA,EAAG,IAAI,EAEZP,EACT,CAEO,SAASI,GAAsBC,EAA2BN,EAA4C,CAC3G,IAAMS,EAA4C,CAChD,MAAO,GACP,MAAO,GACP,UAAW,GACX,QAAS,GACT,sBAAuB,GACvB,mBAAoB,GACpB,6BAA8B,EAChC,EACID,EACEE,EAAKD,EACX,IAAI,CAACT,GAAaA,IAAc,YAC9BQ,EAAKF,EAAO,WAAW,SAAUI,CAAE,EAC/BF,GACF,GAAI,CACF,OAAO,IAAIG,GAAaH,EAAI,CAAC,CAC/B,OAASI,EAAK,CACZC,GAAO,QAAQ,mBAAoB,kEAAkED,CAAG,EAAE,CAC5G,CAGJ,IAAI,CAACZ,GAAaA,IAAc,WAC9BQ,EAAKF,EAAO,WAAW,QAASI,CAAE,GAAKJ,EAAO,WAAW,qBAAsBI,CAAE,EAC7EF,GACF,GAAI,CACF,OAAO,IAAIG,GAAaH,EAAI,CAAC,CAC/B,OAASI,EAAK,CACZC,GAAO,QACH,mBACA,yFAAyFD,CAAG,EAAE,CACpG,CAIJ,MAAM,IAAI,MAAM,wBAAwB,CAC1C,CAKA,SAASL,IAAkC,CACzC,GAAI,OAAO,SAAa,IACtB,MAAM,IAAI,UAAU,oDAAoD,EAE1E,IAAMD,EAA4B,SAAS,cAAc,QAAQ,EACjE,OAAAA,EAAO,MAAQ,EACfA,EAAO,OAAS,EACTA,CACT,CAEA,SAASF,IAA2C,CAClD,GAAI,OAAO,gBAAoB,IAC7B,MAAM,IAAI,UAAU,qEAAqE,EAE3F,OAAO,IAAI,gBAAgB,EAAG,CAAC,CACjC,CAjHA,IAOMF,GAPNY,GAAAC,EAAA,kBAGAC,KAEAC,KAEMf,GAA6C,CAAC,ICPpD,IAkBagB,GAlBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAGAC,KAEAC,KAOaN,GAAN,KAAsC,CAG3C,IAAI,WAAwC,CAC1C,OAAOO,GAAI,MAAM,SACnB,CACA,IAAI,UAAUC,EAAmC,CAC/CD,GAAI,MAAM,UAAYC,CACxB,CAEA,IAAI,oBAAuC,CACzC,OAAOD,GAAI,MAAM,kBACnB,CACA,IAAI,mBAAmBC,EAAyB,CAC9CD,GAAI,MAAM,mBAAqBC,CACjC,CAEA,IAAI,kBAAuD,CACzD,OAAOD,GAAI,MAAM,gBACnB,CACA,IAAI,iBAAiBC,EAA2C,CAC9DD,GAAI,MAAM,iBAAmBC,CAC/B,CAEA,IAAI,MAA0B,CAC5B,OAAOD,GAAI,MAAM,IACnB,CACA,IAAI,KAAKC,EAA0B,CACjCD,GAAI,MAAM,KAAOC,CACnB,CAEA,IAAI,OAA2B,CAC7B,OAAOD,GAAI,MAAM,KACnB,CACA,IAAI,MAAMC,EAA0B,CAClCD,GAAI,MAAM,MAAQC,CACpB,CAEA,YAAsB,CACpB,GAAI,CACF,YAAK,UAAYC,GAAmB,KAAK,SAAS,EAC9C,OAAO,KAAK,oBAAuB,WACrC,KAAK,mBAAqB,IAExB,OAAO,KAAK,kBAAqB,WACnC,KAAK,iBAAmB,QAEtB,OAAO,KAAK,MAAS,YACvB,KAAK,KAAO,IAEV,OAAO,KAAK,OAAU,YACxB,KAAK,MAAQ,IAGfC,GAAO,WAAWH,EAAG,EAEhBA,GAAI,MAAM,SACb,OAAO,eAAeA,GAAI,MAAO,UAAW,CAAC,MAAO,KAAK,UAAU,EAAE,CAAC,EAGxEG,GAAO,QACH,eACA,yBAAyB,OAAO,KAAK,SAAS,6BAC1C,KAAK,kBAAkB,uBAAuB,KAAK,gBAAgB,WAAW,KAAK,IAAI,YACvF,KAAK,KAAK,GAAG,EACd,EACT,OAAS,EAAG,CACV,OAAAA,GAAO,QAAQ,eAAgB,sCAAsC,CAAC,EAAE,EACjE,EACT,CACF,CACA,qBAAqBC,EAA0C,CAC7D,OAAO,IAAIC,GAAoB,KAAMD,CAAO,CAC9C,CACA,SAAgB,CACd,KAAK,UAAU,QAAQ,CACzB,CACF,ICRA,eAAsBE,GAAeC,EAAmD,CACtF,GAAKA,EAEE,CACL,IAAMC,EAAQ,OAAOD,GAAS,SAAW,CAACA,CAAI,EAAIA,EAElD,QAAWE,KAAeD,EAAO,CAC/B,IAAME,EAAQC,GAAc,IAAIF,CAAW,EAC3C,GAAIC,EACF,OAAOA,EAGT,IAAME,EAAU,MAAMC,GAAeJ,CAAW,EAChD,GAAIG,EACF,OAAOA,CAEX,CACF,KAfE,QAAON,GAAe,CAAC,OAAO,CAAC,EAiBjC,MAAM,IAAI,MAAM,6BAA6B,CAC/C,CAEA,eAAeO,GAAeJ,EAAiD,CAC7E,IAAMK,EAAaF,GAEnB,GAAI,OAAOE,EAAWL,CAAW,EAAM,KAAeM,GAAUD,EAAWL,CAAW,CAAC,EAAG,CACxF,IAAMG,EAAUE,EAAWL,CAAW,EAClCO,EAAOJ,EAAQ,WAAW,EAI9B,GAHI,OAAOI,GAAS,UAAY,SAAUA,IACxCA,EAAO,MAAMA,GAEXA,EACF,OAAAL,GAAc,IAAIF,EAAaG,CAAO,EAC/BA,CAEX,CAGF,CAEA,SAASG,GAAUE,EAAc,CAE/B,IAAMC,EAAID,EAGV,MACI,eAAgBC,GAAK,OAAOA,EAAE,YAAe,YAC7C,yBAA0BA,GAAK,OAAOA,EAAE,sBAAyB,YACjE,YAAaA,GAAK,OAAOA,EAAE,SAAY,UAM7C,CA7IA,IA6EMP,GAEOC,GA/EbO,GAAAC,EAAA,kBAGAC,KA0EMV,GAAsC,IAAI,IAEnCC,GAAqC,CAChD,MAAO,IAAIU,EACb,ICjFA,IASMC,GAIOC,GAbbC,GAAAC,EAAA,kBAKAC,KAIMJ,GAAN,KAAe,CACb,YAAmBK,EAAqBC,EAAkB,CAAvC,QAAAD,EAAqB,UAAAC,CAAmB,CAC7D,EAEaL,GAAN,KAAoB,CACzB,YAAoBM,EAAcC,EAAyBC,EAA8B,CAArE,WAAAF,EAAuC,cAAAE,EACzD,KAAK,WAAWD,CAAG,CACrB,CAEA,WAAWA,EAAiB,CAC1B,KAAK,SAAS,MAAM,UAAW,2BAA4B,IAAM,CAC/D,IAAME,EAAa,KAAK,MAAM,SAAS,EACvC,GAAIA,EAAW,SAAWF,EAAI,OAC5B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,KAAK,KAAOA,EAAI,IAAI,CAACH,EAAIM,IAAM,IAAIX,GAASK,EAAIK,EAAWC,CAAC,CAAC,CAAC,EAC9D,KAAK,MAAM,EAGX,KAAK,SAAW,CAAC,EACjB,KAAK,KAAK,QAAQ,CAACN,EAAIM,IAAM,CAC3B,IAAIC,EAAW,GACf,QAAWC,KAASR,EAAG,KAAK,OAC1B,GACI,CAAC,KAAK,QAAQQ,CAAK,GAChB,KAAK,MAAM,gBAAgB,EAAE,QAAQA,CAAK,IAAM,GACrD,CACAD,EAAW,GACX,KACF,CAEEA,GACF,KAAK,SAAS,KAAKD,CAAC,CAExB,CAAC,CACH,CAAC,CACH,CAEA,OAAQ,CACN,KAAK,QAAU,KAAK,MAAM,UAAU,EAAE,IAAIA,GAAKA,EAAE,MAAM,CACzD,CAEA,MAAM,QAAQG,EAAgCC,EAA0C,CACtF,OAAO,KAAK,SAAS,MAAM,UAAW,wBAAyB,SAAY,CAEzE,KAAK,MAAM,EAGX,IAAMC,EAAmBF,EAAe,uBAAuB,EAGzDG,EAAc,KAAK,MAAM,gBAAgB,EAC/C,GAAIF,EAAY,SAAWE,EAAY,OACrC,MAAM,IAAI,MAAM,kFACZF,EAAY,MAAM,cAAcE,EAAY,MAAM,EAAE,EAG1DF,EAAY,QAAQ,CAACF,EAAOF,IAAM,CAChC,IAAMO,EAAQD,EAAYN,CAAC,EAC3B,KAAK,QAAQO,CAAK,EAAIL,CACxB,CAAC,EAGD,IAAMM,EAAqB,KAAK,SAAS,MAAM,CAAC,EAG1CC,EAAc,KAAK,MAAM,UAAU,EACnCV,EAAa,KAAK,MAAM,SAAS,EAEnCW,EAAO,EACX,KAAOA,EAAOF,EAAS,QAAQ,CAC7B,IAAMG,EAAcH,EAASE,GAAM,EAC7BE,EAAS,KAAK,KAAKD,CAAW,EAG9BE,EAAYD,EAAO,KAAK,OAAO,IAAIZ,GAAK,KAAK,QAAQA,CAAC,CAAC,EAC7D,GAAIa,EAAU,QAAQ,MAAS,IAAM,GACnC,MAAM,IAAI,MAAM,kCAAkCD,EAAO,IAAI,EAAE,EAIjE,IAAME,EAAeD,EACrBE,GAAO,QACH,WACA,cAAcH,EAAO,KAAK,IAAI,KAC1BE,EAAa,IAAI,CAACE,EAAGhB,IAAM,IAAIY,EAAO,KAAK,OAAOZ,CAAC,CAAC,MAAMgB,EAAE,IAAI,IAAIA,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,GAAG,EAE9G,IAAMC,EAAa,MAAM,KAAK,SAAS,MACnC,OAAQL,EAAO,KAAK,KAAM,SAAYA,EAAO,GAAG,KAAKP,EAAkBS,EAAcF,EAAO,GAAG,OAAO,CAAC,EAG3G,GAAIK,EAAW,SAAWL,EAAO,KAAK,QAAQ,OAC5C,MAAM,IAAI,MAAM,qDAAqD,EAIvEK,EAAW,QAAQ,CAACC,EAAQlB,IAAM,CAChC,IAAMmB,EAAIP,EAAO,KAAK,QAAQZ,CAAC,EAC/B,GAAI,KAAK,QAAQmB,CAAC,EAChB,MAAM,IAAI,MAAM,WAAWA,CAAC,2BAA2BP,EAAO,KAAK,IAAI,EAAE,EAE3E,KAAK,QAAQO,CAAC,EAAID,CACpB,CAAC,EAGD,IAAME,EAAkB,IAAI,IAC5BH,EAAW,QAAQ,CAACI,EAASrB,IAAM,CACjC,IAAMmB,EAAIP,EAAO,KAAK,QAAQZ,CAAC,EAC/B,QAAWsB,KAA8Bb,EAAYU,CAAC,EAAE,GAAI,CAC1D,IAAMI,EAAwBxB,EAAWuB,CAA0B,EAC/DrB,EAAW,GACf,QAAWuB,KAAKD,EAAsB,OACpC,GAAI,CAAC,KAAK,QAAQC,CAAC,EAAG,CACpBvB,EAAW,GACX,KACF,CAEEA,GACFmB,EAAgB,IAAIE,CAA0B,CAElD,CACF,CAAC,EACDd,EAAS,KAAK,GAAGY,CAAe,CAClC,CAEA,IAAMF,EAAmB,CAAC,EAC1B,QAASlB,EAAI,EAAGA,EAAI,KAAK,MAAM,iBAAiB,EAAE,OAAQA,IAAK,CAC7D,IAAMyB,EAAc,KAAK,MAAM,iBAAiB,EAAEzB,CAAC,EAC7C0B,EAAe,KAAK,QAAQD,CAAW,EAC7C,GAAIC,IAAiB,OACnB,MAAM,IAAI,MAAM,oBAAoBD,CAAW,uBAAuB,EAEpEA,IAAgB,EAClB,MAAMC,EAAa,QAAQ,EAG3BA,EAAa,KAEfR,EAAO,KAAKQ,CAAY,CAC1B,CACA,OAAAX,GAAO,QAAQ,WAAY,+BAA+B,EAC1DV,EAAiB,QAAQ,EAClBa,CACT,CAAC,CACH,CAKF,IC/JA,IAMAS,GAIOC,GAqBMC,GA/BbC,GAAAC,EAAA,kBAKAC,KACAL,GAAmB,SACnBM,KACAC,KAEON,GAASO,GAAY,aAAa,IAqB5BN,GAAN,MAAMO,CAAU,CACrB,YAAYC,EAAsE,CAEhF,GADA,KAAK,YAAc,IAAI,IACnBA,GAAe,KAAkC,CACnD,QAAWC,KAAQD,EACbC,aAAgB,QAAK,eACvB,KAAK,YAAY,IAAIA,EAAK,KAAM,CAACF,EAAU,SAASE,CAAI,EAAGF,EAAU,QAAQE,CAAI,CAAC,CAAC,EAC1EA,aAAgBV,GAAO,WAChC,KAAK,YAAY,IAAIU,EAAK,KAAK,EAAI,CAACF,EAAU,SAASE,CAAI,EAAGF,EAAU,QAAQE,CAAI,CAAC,CAAC,EAG1F,GAAI,KAAK,YAAY,KAAOD,EAAW,OACrC,MAAM,IAAI,MAAM,4BAA4B,CAEhD,CACF,CAEA,IAAIE,EAAaC,EAA0BC,EAAyB,CAClE,KAAK,YAAY,IAAIF,EAAK,CAACE,EAAOD,CAAI,CAAC,CACzC,CACA,OAAOD,EAAmB,CACxB,KAAK,YAAY,OAAOA,CAAG,CAC7B,CACA,SAASA,EAAaG,EAA+C,CACnE,OAAO,KAAK,IAAIH,EAAK,QAASG,CAAY,CAC5C,CAEA,OAAOH,EAAaG,EAA6C,CAC/D,OAAO,KAAK,IAAIH,EAAK,MAAOG,CAAY,CAC1C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,UAAUH,EAAaG,EAAgD,CACrE,OAAO,KAAK,IAAIH,EAAK,SAAUG,CAAY,CAC7C,CAEA,QAAQH,EAAaG,EAA8C,CACjE,OAAO,KAAK,IAAIH,EAAK,OAAQG,CAAY,CAC3C,CAEA,WAAWH,EAAaG,EAAiD,CACvE,OAAO,KAAK,IAAIH,EAAK,UAAWG,CAAY,CAC9C,CAEA,WAAWH,EAAaG,EAAiD,CACvE,OAAO,KAAK,IAAIH,EAAK,UAAWG,CAAY,CAC9C,CAEQ,IACJH,EAAaC,EAA0BE,EAAqB,CAC9D,IAAMC,EAAe,KAAK,YAAY,IAAIJ,CAAG,EAC7C,GAAII,IAAiB,OAAW,CAC9B,GAAID,IAAiB,OACnB,OAAOA,EAET,MAAM,IAAI,MAAM,iCAAiCH,CAAG,EAAE,CACxD,CACA,GAAII,EAAa,CAAC,IAAMH,EACtB,MAAM,IAAI,MAAM,2BAA2BA,CAAI,YAAYG,EAAa,CAAC,CAAC,EAAE,EAE9E,OAAOA,EAAa,CAAC,CACvB,CAEA,OAAe,QAAQL,EAAiE,CACtF,IAAME,EAAOF,aAAgB,QAAK,eAAkBA,EAAM,KAAQA,EAA0B,KAAK,EACjG,OAAQE,EAAM,CACZ,KAAK,QAAK,eAAe,cAAc,MACrC,MAAO,QACT,KAAK,QAAK,eAAe,cAAc,IACrC,MAAO,MACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,OACrC,MAAO,SACT,KAAK,QAAK,eAAe,cAAc,KACrC,MAAO,OACT,KAAK,QAAK,eAAe,cAAc,QACrC,MAAO,UACT,KAAK,QAAK,eAAe,cAAc,QACrC,MAAO,UACT,QACE,MAAM,IAAI,MAAM,wCAAwC,QAAK,eAAe,cAAcA,CAAI,CAAC,EAAE,CACrG,CACF,CAEA,OAAe,SAASF,EAA6C,CACnE,IAAMM,EAAWN,aAAgB,QAAK,eAAiBA,EAAK,KAAQA,EAA0B,KAAK,EACnG,GAAIM,IAAa,QAAK,eAAe,cAAc,OAASA,IAAa,QAAK,eAAe,cAAc,OACzG,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMH,EAAQ,KAAK,gBAAgBH,CAAI,EAGvC,GAAIM,IAAa,QAAK,eAAe,cAAc,KAAOC,GAAS,OAAOJ,CAAK,EAC7E,OAAOI,GAAS,aAAaJ,CAAgC,EAI/D,GAAIG,IAAa,QAAK,eAAe,cAAc,KAAM,CACvD,IAAME,EAAOL,EACPM,EAAwB,IAAI,MAAcD,EAAI,MAAM,EAE1D,QAASE,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAAK,CACnC,IAAMC,EAAYH,EAAIE,CAAC,EACvBD,EAAYC,CAAC,EAAIH,GAAS,aAAaI,CAAS,CAClD,CAEA,OAAOF,CACT,CAGA,GAAIH,IAAa,QAAK,eAAe,cAAc,OACjD,OAAON,aAAgB,QAAK,eAAiBY,GAAO,UAAUT,CAA0B,EAC3CS,GAAO,cAAcT,CAAsB,EAI1F,GAAIG,IAAa,QAAK,eAAe,cAAc,QAAS,CAC1D,GAAIN,aAAgB,QAAK,eAEvB,OADqBG,EACD,IAAIA,GAASS,GAAO,UAAUT,CAAK,CAAC,EACnD,GAAIH,aAAgBV,GAAO,UAEhC,OADqBa,EACD,IAAIA,GAASS,GAAO,cAAcT,CAAK,CAAC,CAEhE,CAGA,OAAIG,IAAa,QAAK,eAAe,cAAc,QAG7CN,aAAgB,QAAK,eAEhBa,GADYV,CACe,EAKlCG,IAAa,QAAK,eAAe,cAAc,SAG7CN,aAAgB,QAAK,eACHG,EACD,IAAIU,EAAgB,EAIpCV,CACT,CAEA,OAAe,gBAAgBH,EAA6C,CAC1E,OAAOA,aAAiB,QAAK,eAAkB,KAAK,8BAA8BA,CAAI,EACvC,KAAK,6BAA6BA,CAAwB,CAC3G,CAEA,OAAe,8BAA8BA,EAA4B,CACvE,OAAQA,EAAK,KAAO,CAClB,KAAK,QAAK,eAAe,cAAc,MACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,IACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,MACrC,OAAOA,EAAK,EACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,OACd,KAAK,QAAK,eAAe,cAAc,KACrC,OAAOA,EAAK,KACd,KAAK,QAAK,eAAe,cAAc,QACrC,OAAOA,EAAK,QACd,KAAK,QAAK,eAAe,cAAc,QACrC,OAAOA,EAAK,QACd,KAAK,QAAK,eAAe,cAAc,OACrC,OAAOA,EAAK,OACd,QACE,MAAM,IAAI,MAAM,+BAA+B,QAAK,eAAe,cAAcA,EAAK,IAAK,CAAC,EAAE,CAClG,CACF,CAEA,OAAe,6BAA6BA,EAAwB,CAClE,OAAQA,EAAK,KAAK,EAAG,CACnB,KAAKV,GAAO,cAAc,MACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,IACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,MACxB,OAAOU,EAAK,EAAE,EAChB,KAAKV,GAAO,cAAc,OACxB,OAAOU,EAAK,YAAY,EAC1B,KAAKV,GAAO,cAAc,KAAM,CAC9B,IAAMwB,EAAO,CAAC,EACd,QAASJ,EAAI,EAAGA,EAAIV,EAAK,WAAW,EAAGU,IACrCI,EAAK,KAAKd,EAAK,KAAKU,CAAC,CAAE,EAEzB,OAAOI,CACT,CACA,KAAKxB,GAAO,cAAc,QAAS,CACjC,IAAMyB,EAAU,CAAC,EACjB,QAASL,EAAI,EAAGA,EAAIV,EAAK,cAAc,EAAGU,IACxCK,EAAQ,KAAKf,EAAK,QAAQU,CAAC,CAAC,EAE9B,OAAOK,CACT,CACA,KAAKzB,GAAO,cAAc,QAAS,CACjC,IAAM0B,EAAU,CAAC,EACjB,QAASN,EAAI,EAAGA,EAAIV,EAAK,cAAc,EAAGU,IACxCM,EAAQ,KAAKhB,EAAK,QAAQU,CAAC,CAAE,EAE/B,OAAOM,CACT,CAQA,QACE,MAAM,IAAI,MAAM,+BAA+B1B,GAAO,cAAcU,EAAK,KAAK,CAAC,CAAC,EAAE,CACtF,CACF,CAGF,IC/QA,IAKAiB,GAIOC,GAmEMC,GAQPC,GAwBAC,GAyBAC,GArINC,GAAAC,EAAA,kBAGAC,KACAC,KACAT,GAAmB,SACnBU,KACAC,KAEOV,GAASW,GAAY,aAAa,IAmE5BV,GAAQ,CAInB,KAAM,CAACW,EAA2CC,IAC9C,IAAIT,GAAUQ,EAAYC,CAAW,CAC3C,EAEMX,GAAN,KAAmC,CACjC,YAAYY,EAAkC,CAC5C,KAAK,MAAQ,OACb,KAAK,IAAM,CAAC,EACZ,KAAK,OAAS,OACd,KAAK,KAAO,OAERA,IACF,KAAK,KAAOC,GAAU,yBAAyBD,EAAU,KAAM,UAAW,EAE9E,CAGA,IAAI,MAAO,CACT,OAAO,KAAK,KACd,CAEA,IAAI,IAAK,CACP,OAAO,KAAK,GACd,CAGF,EAEMX,GAAN,KAAiC,CAC/B,YAAYa,EAAyCC,EAAe,CAC9DD,aAAsB,QAAK,WAC7B,KAAK,KAAOA,EAAW,KACvB,KAAK,OAASA,EAAW,OACzB,KAAK,WAAa,IAAIE,GAAUF,EAAW,SAAS,GAC3CA,aAAsBhB,GAAO,OACtC,KAAK,KAAOiB,GAAQD,EAAW,KAAK,EACpC,KAAK,OAASA,EAAW,OAAO,EAChC,KAAK,WAAa,IAAIE,GAAUH,GAAU,8BAA8BC,CAAU,CAAC,GAGrF,KAAK,OAAS,CAAC,EACf,KAAK,QAAU,CAAC,EAChB,KAAK,YAAc,EACrB,CAQF,EAEMZ,GAAN,KAAoD,CAWlD,YAAYe,EAAsCC,EAAsC,CACtF,GAAI,CAACD,EACH,MAAM,IAAI,UAAU,gBAAgB,EAItC,KAAK,WAAWA,CAAK,EAGrB,KAAK,eAAeC,CAAgB,EAGpC,KAAK,eAAe,CACtB,CAEA,iBAAqC,CACnC,OAAO,KAAK,gBACd,CAEA,eAAmC,CACjC,OAAO,KAAK,cACd,CAEA,kBAAsC,CACpC,OAAO,KAAK,iBACd,CAEA,gBAAoC,CAClC,OAAO,KAAK,eACd,CAEA,WAAoC,CAClC,OAAO,KAAK,QACd,CAEA,UAAkC,CAChC,OAAO,KAAK,MACd,CAEQ,WAAWD,EAAsC,CAEvD,GAAIA,aAAiB,QAAK,WACxB,KAAK,yBAAyBA,CAAK,UAC1BA,aAAiBnB,GAAO,MACjC,KAAK,wBAAwBmB,CAAK,MAElC,OAAM,IAAI,UAAU,8BAA8B,CAEtD,CACQ,yBAAyBA,EAAyB,CACxD,IAAME,EAAc,IAAI,IACxB,KAAK,SAAW,CAAC,EAEjB,KAAK,iBAAmB,CAAC,EACzB,KAAK,eAAiB,CAAC,EAEvB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,gBAAkB,CAAC,EAExB,KAAK,OAAS,CAAC,EAEf,IAAMC,EAAe,IAAI,IAGzB,GAAI,CAACH,EAAM,MACT,MAAM,IAAI,MAAM,qCAAqC,EAEvD,IAAMI,EAAkB,CAAC,EACzB,QAAW,KAAKJ,EAAM,MAAO,CAC3B,GAAIE,EAAY,IAAI,EAAE,IAAK,EACzB,MAAM,IAAI,MAAM,0BAA0B,EAAE,IAAI,EAAE,EAEpD,IAAMG,EAAe,KAAK,SAAS,KAAK,IAAItB,GAAM,CAAC,CAAC,EAAI,EACxDmB,EAAY,IAAI,EAAE,KAAOG,CAAY,EACrCD,EAAgB,KAAK,EAAE,IAAK,CAC9B,CAGA,GAAI,CAACJ,EAAM,YACT,MAAM,IAAI,MAAM,2CAA2C,EAE7D,QAAW,KAAKA,EAAM,YAAa,CACjC,IAAIM,EAAQJ,EAAY,IAAI,EAAE,IAAK,EACnC,GAAII,IAAU,OAAW,CACvB,IAAMC,EAAQ,IAAIxB,GAClBwB,EAAM,KAAO,CACX,MAAO,CAAC,KAAMX,GAAU,oBAAoB,EAAE,IAAK,CAAC,EACpD,WAAYA,GAAU,wBAAwB,EAAE,QAAS,CAC3D,EACAU,EAAQ,KAAK,SAAS,KAAKC,CAAK,EAAI,EACpCL,EAAY,IAAI,EAAE,KAAOI,CAAK,CAChC,CACA,KAAK,SAASA,CAAK,EAAE,MAAQ,GAC7B,KAAK,SAASA,CAAK,EAAE,OAASE,GAAO,UAAU,CAAC,CAClD,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,SAAS,OAAQ,IACnC,KAAK,SAAS,CAAC,EAAE,SACpB,KAAK,iBAAiB,KAAK,CAAC,EAC5B,KAAK,eAAe,KAAKJ,EAAgB,CAAC,CAAC,GAK/C,GAAI,CAACJ,EAAM,OACT,MAAM,IAAI,MAAM,sCAAsC,EAExD,QAAW,KAAKA,EAAM,OAAQ,CAC5B,GAAIE,EAAY,IAAI,EAAE,IAAK,EACzB,MAAM,IAAI,MAAM,2BAA2B,EAAE,IAAI,EAAE,EAErD,IAAMG,EAAe,KAAK,SAAS,KAAK,IAAItB,GAAM,CAAC,CAAC,EAAI,EACxDmB,EAAY,IAAI,EAAE,KAAOG,CAAY,EACrC,KAAK,kBAAkB,KAAKA,CAAY,EACxC,KAAK,gBAAgB,KAAK,EAAE,IAAK,CACnC,CAGA,GAAI,CAACL,EAAM,KACT,MAAM,IAAI,MAAM,oCAAoC,EAEtD,QAAWS,KAAaT,EAAM,KAAM,CAClC,GAAI,CAACS,EAAU,KAEb,QAASC,EAAO,GAAIA,IAAQ,CAC1B,IAAMZ,EAAO,WAAWW,EAAU,MAAM,IAAIC,CAAI,GAChD,GAAI,CAACP,EAAa,IAAIL,CAAI,EAAG,CAC3BW,EAAU,KAAOX,EACjB,KACF,CACF,CAGF,GAAIK,EAAa,IAAIM,EAAU,IAAI,EACjC,MAAM,IAAI,MAAM,yBAAyBA,EAAU,IAAI,EAAE,EAE3D,IAAMJ,EAAe,KAAK,OAAO,KAAK,IAAIrB,GAAKyB,CAAS,CAAC,EAAI,EAC7DN,EAAa,IAAIM,EAAU,KAAMJ,CAAY,CAC/C,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAMM,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,KAAK,CAAC,EAC9B,GAAI,CAACS,EAAU,OACb,MAAM,IAAI,MAAM,4BAA4BA,EAAU,IAAI,EAAE,EAE9D,QAAWG,KAAUH,EAAU,OAAQ,CACrC,IAAII,EAAYX,EAAY,IAAIU,CAAM,EAOtC,GANI,OAAOC,EAAc,MACvBA,EAAY,KAAK,SAAS,KAAK,IAAI9B,EAAO,EAAI,EAC9CmB,EAAY,IAAIU,EAAQC,CAAS,GAEnCF,EAAK,QAAQ,KAAKE,CAAS,EAEvB,KAAK,SAASA,CAAS,EAAE,QAAU,OACrC,MAAM,IAAI,MAAM,4CAA4CA,CAAS,EAAE,EAMzE,GAJA,KAAK,SAASA,CAAS,EAAE,MAAQ,EAI7BJ,EAAU,SAAW,WAAY,CACnC,GAAI,CAACA,EAAU,WAAaA,EAAU,UAAU,SAAW,GAAK,CAACA,EAAU,UAAU,CAAC,EAAE,EACtF,MAAM,IAAI,MAAM,qFAAqF,EAEvG,GAAI,CAACA,EAAU,QAAUA,EAAU,OAAO,SAAW,EACnD,MAAM,IAAI,MAAM,0EAA0E,EAE5FE,EAAK,QAAQ,IAAI,EACjBA,EAAK,YAAc,GAEnB,KAAK,SAASE,CAAS,EAAE,MAAQ,GACjC,KAAK,SAASA,CAAS,EAAE,OAASL,GAAO,UAAUC,EAAU,UAAU,CAAC,EAAE,CAAC,CAC7E,CACF,CACF,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAME,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,KAAK,CAAC,EAE9B,GAAI,CAACS,EAAU,MACb,MAAM,IAAI,MAAM,2BAA2BA,EAAU,IAAI,EAAE,EAE7D,QAAWK,KAASL,EAAU,MAAO,CACnC,IAAMI,EAAYX,EAAY,IAAIY,CAAK,EACvC,GAAI,OAAOD,EAAc,IAAa,CAEpC,GAAIC,IAAU,KAAOL,EAAU,MAAM,SAAW,GAAKA,EAAU,MAAM,SAAW,IAC5EA,EAAU,SAAW,SACvB,SAEF,MAAM,IAAI,MAAM,uBAAuBK,CAAK,eAAeL,EAAU,IAAI,EAAE,CAC7E,CACAE,EAAK,OAAO,KAAKE,CAAS,EAE1B,KAAK,SAASA,CAAS,EAAE,IAAI,KAAK,CAAC,CACrC,CACF,CAEA,MAAO,EACT,CAEQ,wBAAwBb,EAAqB,CACnD,IAAME,EAAc,IAAI,IACxB,KAAK,SAAW,CAAC,EAEjB,KAAK,iBAAmB,CAAC,EACzB,KAAK,eAAiB,CAAC,EAEvB,KAAK,kBAAoB,CAAC,EAC1B,KAAK,gBAAkB,CAAC,EAExB,KAAK,OAAS,CAAC,EAEf,IAAMC,EAAe,IAAI,IAGnBC,EAAkB,CAAC,EACzB,QAAS,EAAI,EAAG,EAAIJ,EAAM,aAAa,EAAG,IAAK,CAC7C,IAAMe,EAAYf,EAAM,OAAO,CAAC,EAChC,GAAIE,EAAY,IAAIa,CAAS,EAC3B,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,EAGvD,QAASC,EAAI,EAAGA,EAAIhB,EAAM,eAAe,EAAGgB,IAC1C,GAAIhB,EAAM,SAASgB,CAAC,GAAG,KAAK,IAAMD,EAAW,CAC3C,IAAMR,EAAQ,IAAIxB,GAElB,GADkBiB,EAAM,SAASgB,CAAC,GAAG,KAAK,GAAG,UAAU,IACrCnC,GAAO,cAAc,YACrC,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMc,EAAYK,EAAM,SAASgB,CAAC,EAAG,KAAK,EAAG,MAAM,IAAInC,GAAO,kBAAoB,EAC5EoC,EAAOrB,GAAU,wBAAwBD,EAAU,SAAS,CAAC,EAC7DuB,EAAQvB,EAAU,MAAM,EACxBwB,EAAO,CAAC,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAM,UAAU,EAAIE,IACtCD,EAAK,KAAKE,GAAS,aAAaH,EAAM,IAAIE,CAAC,EAAG,MAAM,EAAG,SAAS,CAAE,CAAC,EAErEb,EAAM,KAAO,CAAC,MAAO,CAAC,KAAAY,CAAI,EAAG,WAAYF,CAAI,EAC7C,IAAMZ,EAAe,KAAK,SAAS,KAAKE,CAAK,EAAI,EACjDL,EAAY,IAAIa,EAAWV,CAAY,EACvCD,EAAgB,KAAKW,CAAS,CAChC,CAEJ,CAEA,QAAS,EAAI,EAAG,EAAIf,EAAM,mBAAmB,EAAG,IAAK,CACnD,IAAMN,EAAcM,EAAM,aAAa,CAAC,EACpCM,EAAQJ,EAAY,IAAIR,EAAY,KAAK,CAAE,EAC/C,GAAIY,IAAU,OAAW,CACvB,IAAMC,EAAQ,IAAIxB,GACZoC,EAAOvB,GAAU,wBAAwBF,CAAW,EACpDuB,EAAOrB,GAAU,wBAAwBF,EAAY,SAAS,CAAC,EACrEa,EAAM,KAAO,CAAC,MAAO,CAAC,KAAAY,CAAI,EAAG,WAAYF,CAAI,EAC7CX,EAAQ,KAAK,SAAS,KAAKC,CAAK,EAAI,EACpCL,EAAY,IAAIR,EAAY,KAAK,EAAIY,CAAK,CAC5C,CACA,KAAK,SAASA,CAAK,EAAE,MAAQ,GAC7B,KAAK,SAASA,CAAK,EAAE,OAASE,GAAO,cAAcd,CAAW,CAChE,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,SAAS,OAAQ,IACnC,KAAK,SAAS,CAAC,EAAE,SACpB,KAAK,iBAAiB,KAAK,CAAC,EAC5B,KAAK,eAAe,KAAKU,EAAgB,CAAC,CAAC,GAK/C,QAAS,EAAI,EAAG,EAAIJ,EAAM,cAAc,EAAG,IAAK,CAC9C,IAAMsB,EAAatB,EAAM,QAAQ,CAAC,EAClC,GAAIE,EAAY,IAAIoB,CAAU,EAC5B,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,EAEzD,IAAMjB,EAAe,KAAK,SAAS,KAAK,IAAItB,EAAO,EAAI,EACvDmB,EAAY,IAAIoB,EAAYjB,CAAY,EACxC,KAAK,kBAAkB,KAAKA,CAAY,EACxC,KAAK,gBAAgB,KAAKiB,CAAU,CACtC,CAGA,GAAI,CAACtB,EAAM,MACT,MAAM,IAAI,MAAM,oCAAoC,EAEtD,QAAS,EAAI,EAAG,EAAIA,EAAM,YAAY,EAAG,IAAK,CAC5C,IAAMS,EAAYT,EAAM,MAAM,CAAC,EAC3BF,EAAOW,EAAW,KAAK,EAC3B,GAAI,CAACX,EAEH,QAASY,EAAO,EACdZ,EAAO,WAAWW,EAAW,OAAO,CAAC,IAAIC,CAAI,GACzC,EAACP,EAAa,IAAIL,CAAI,EAFRY,IAElB,CAOJ,GAAIP,EAAa,IAAIL,CAAI,EACvB,MAAM,IAAI,MAAM,yBAAyBA,CAAI,EAAE,EAEjD,IAAMO,EAAe,KAAK,OAAO,KAAK,IAAIrB,GAAKyB,EAAYX,CAAI,CAAC,EAAI,EACpEK,EAAa,IAAIL,EAAMO,CAAY,CACrC,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAMM,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,MAAM,CAAC,EAC/B,GAAIS,GAAa,KACf,MAAM,IAAI,MAAM,2BAA2B,CAAC,EAAE,EAEhD,GAAIA,GAAW,cAAc,IAAM,EACjC,MAAM,IAAI,MAAM,4BAA4BA,EAAU,IAAI,EAAE,EAE9D,QAASO,EAAI,EAAGA,EAAIP,GAAW,cAAc,EAAGO,IAAK,CACnD,IAAMJ,EAASH,GAAW,QAAQO,CAAC,EAC/BH,EAAYX,EAAY,IAAIU,CAAM,EAOtC,GANI,OAAOC,EAAc,MACvBA,EAAY,KAAK,SAAS,KAAK,IAAI9B,EAAO,EAAI,EAC9CmB,EAAY,IAAIU,EAAQC,CAAS,GAEnCF,EAAK,QAAQ,KAAKE,CAAS,EAEvB,KAAK,SAASA,CAAS,EAAE,QAAU,OACrC,MAAM,IAAI,MAAM,4CAA4CA,CAAS,EAAE,EAMzE,GAJA,KAAK,SAASA,CAAS,EAAE,MAAQ,EAI7BJ,EAAU,OAAO,IAAM,WAAY,CACrC,GAAIA,EAAU,iBAAiB,IAAM,GAAK,CAACA,EAAU,WAAW,CAAC,EAAG,EAAE,EACpE,MAAM,IAAI,MAAM,qFAAqF,EAEvG,GAAIA,EAAU,cAAc,IAAM,EAChC,MAAM,IAAI,MAAM,0EAA0E,EAE5FE,EAAK,QAAQ,IAAI,EACjBA,EAAK,YAAc,GAEnB,KAAK,SAASE,CAAS,EAAE,MAAQ,GACjC,KAAK,SAASA,CAAS,EAAE,OAASL,GAAO,cAAcC,EAAU,WAAW,CAAC,EAAG,EAAE,CAAE,CACtF,CACF,CACF,CAGA,QAAS,EAAI,EAAG,EAAI,KAAK,OAAO,OAAQ,IAAK,CAC3C,IAAME,EAAO,KAAK,OAAO,CAAC,EACpBF,EAAYT,EAAM,MAAM,CAAC,EAE/B,GAAIS,EAAU,aAAa,IAAM,EAC/B,MAAM,IAAI,MAAM,2BAA2BA,EAAU,IAAI,EAAE,EAE7D,QAASO,EAAI,EAAGA,EAAIP,EAAU,aAAa,EAAIO,IAAK,CAClD,IAAMF,EAAQL,EAAU,OAAOO,CAAC,EAC1BH,EAAYX,EAAY,IAAIY,CAAK,EACvC,GAAI,OAAOD,EAAc,IACvB,MAAM,IAAI,MAAM,uBAAuBC,CAAK,eAAeL,EAAW,KAAK,CAAC,EAAE,EAEhFE,EAAK,OAAO,KAAKE,CAAS,EAE1B,KAAK,SAASA,CAAS,EAAE,IAAI,KAAK,CAAC,CACrC,CACF,CACF,CAEQ,gBAAiB,CAEvB,IAAMU,EAAwB,IAAI,IAClC,KAAK,iBAAiB,QAAQC,GAAK,CACpB,KAAK,SAASA,CAAC,EACvB,IAAI,QAAQR,GAAK,CACpBO,EAAS,IAAIP,CAAC,CAChB,CAAC,CACH,CAAC,EAGD,IAAMS,EAAa,MAAM,KAAKF,CAAQ,EAChCG,EAAa,IAAI,MAAc,KAAK,OAAO,MAAM,EAAE,KAAK,OAAO,EAErE,KAAOD,EAAW,OAAS,GAAG,CAC5B,IAAME,EAAYF,EAAW,IAAI,EAE7BC,EAAWC,CAAS,IAAM,OAC5BD,EAAWC,CAAS,EAAI,SAGxBF,EAAW,KAAKE,CAAS,EACzBD,EAAWC,CAAS,EAAI,OAExB,KAAK,OAAOA,CAAS,EAAE,QAAQ,QAASC,GAAsB,CAC5D,IAAMC,EAAO,KAAK,SAASD,CAAiB,EAC5C,GAAI,OAAOC,EAAK,OAAW,IACzB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,QAAUF,EACjB,MAAM,IAAI,MAAM,+EAAgF,EAElGE,EAAK,IAAI,QAASC,GAAwB,CAExC,GAAIJ,EAAWI,CAAmB,IAAM,OACtC,MAAM,IAAI,MAAM,uBAAuB,EAGhCJ,EAAWI,CAAmB,IAAM,SAC3CL,EAAW,KAAKK,CAAmB,CAEvC,CAAC,CACH,CAAC,EAEL,CACF,CAEQ,eAAe7B,EAA4C,CAEjE,KAAK,uBAAuB,EAC5B,KAAK,sBAAsB,EAC3B,KAAK,wBAAwB,EAEzBA,GACFA,EAAiB,eAAe,IAAI,EAItC,KAAK,cAAc,CACrB,CAQA,eAAgB,CACd,IAAI8B,EAAS,EAMPC,EAAa,IAAI,MAAc,KAAK,OAAO,OAAQ,CAAC,EACtDC,EAAgB,EAEpB,QAAST,EAAI,EAAGA,EAAI,KAAK,OAAO,OAAQA,IAEtCQ,EAAWR,CAAC,EAAIS,EACZ,KAAK,OAAOT,CAAC,EAAE,aACbS,IAAkBT,IACpB,KAAK,OAAOS,CAAa,EAAI,KAAK,OAAOT,CAAC,GAE5CS,KAIA,KAAK,OAAOT,CAAC,EAAE,QAAQ,QAAQU,GAAO,CACpC,KAAK,SAASA,CAAG,EAAE,MAAQ,EAC7B,CAAC,EAKL,KAAK,OAAO,OAAOD,EAAe,KAAK,OAAO,OAASA,CAAa,EAGpE,QAAST,EAAI,EAAGA,EAAI,KAAK,SAAS,OAAQA,IAAK,CAC7C,IAAMW,EAAc,KAAK,SAASX,CAAC,EAC/BW,EAAY,QAAU,QAAaA,EAAY,QAAU,IAAMA,EAAY,QAAU,KACvFA,EAAY,MAAQH,EAAWG,EAAY,KAAK,GAGlD,QAASnB,EAAI,EAAGA,EAAImB,EAAY,IAAI,OAAQnB,IAC1C,GAAImB,EAAY,IAAInB,CAAC,GAAK,EACxBmB,EAAY,IAAInB,CAAC,EAAIgB,EAAWG,EAAY,IAAInB,CAAC,CAAC,MAElD,OAAM,IAAI,MAAM,iCAAiC,CAGvD,CAEAe,EAAS,EAET,QAASP,EAAI,EAAGA,EAAI,KAAK,SAAS,OAAQA,IAAK,CAE7C,GAAI,KAAK,SAASA,CAAC,EAAE,OAAS,IAAM,KAAK,kBAAkB,QAAQA,EAAIO,CAAM,IAAM,GAAI,CACrFA,IACA,KAAK,SAAS,OAAOP,EAAG,CAAC,EACzBA,IACA,QACF,CACA,GAAIO,EAAS,EAAG,CACd,IAAIG,EAAM,GAGN,KAAK,SAASV,CAAC,EAAE,OAAS,QAAa,KAAK,SAASA,CAAC,EAAE,OAAS,IACnEU,EAAM,KAAK,OAAO,KAAK,SAASV,CAAC,EAAE,IAAI,EAAE,QAAQ,QAAQA,EAAIO,CAAM,EAC/DG,IAAQ,KACV,KAAK,OAAO,KAAK,SAASV,CAAC,EAAE,IAAI,EAAE,QAAQU,CAAG,EAAIV,KAIpDU,EAAM,KAAK,iBAAiB,QAAQV,EAAIO,CAAM,EAC1CG,IAAQ,KACV,KAAK,iBAAiBA,CAAG,EAAIV,IAKjC,KAAK,SAASA,CAAC,EAAE,GAAG,QAAQb,GAAQ,CAClCuB,EAAM,KAAK,OAAOvB,CAAI,EAAE,OAAO,QAAQa,EAAIO,CAAM,EAC7CG,IAAQ,KACV,KAAK,OAAOvB,CAAI,EAAE,OAAOuB,CAAG,EAAIV,EAEpC,CAAC,EACG,KAAK,SAASA,CAAC,EAAE,GAAG,SAAW,IAEjCU,EAAM,KAAK,kBAAkB,QAAQV,EAAIO,CAAM,EAC3CG,IAAQ,KACV,KAAK,kBAAkBA,CAAG,EAAIV,GAGpC,CACF,CACF,CAOQ,WAAWG,EAAmB,CACpC,IAAMhB,EAAO,KAAK,OAAOgB,CAAS,EAClC,GAAIhB,EAAK,QAAQ,OAAS,GACxB,QAASa,EAAI,EAAGA,EAAIb,EAAK,QAAQ,OAAQa,IACvC,GAAI,KAAK,SAASb,EAAK,QAAQa,CAAC,CAAC,EAAE,GAAG,OAAS,EAC7C,MAAM,IAAI,MAAM,qFAAqF,EAM3Gb,EAAK,YAAc,GACnB,IAAMyB,EAAkBzB,EAAK,OAAO,CAAC,EAC/B0B,EAAmB1B,EAAK,QAAQ,CAAC,EACjC2B,EAAuB,KAAK,SAASD,CAAgB,EAAE,GAG7D,QAASb,EAAI,EAAGA,EAAIb,EAAK,OAAO,OAAQa,IAAK,CAC3C,IAAMe,EAAW,KAAK,SAAS5B,EAAK,OAAOa,CAAC,CAAC,EAAE,GAAG,QAAQG,CAAS,EAEnE,GAAIY,IAAa,GACf,MAAM,IAAI,MAAM,uEAA2E,EAE7F,KAAK,SAAS5B,EAAK,OAAOa,CAAC,CAAC,EAAE,GAAG,OAAOe,EAAU,CAAC,CACrD,CAGA,KAAK,SAASF,CAAgB,EAAE,IAAM,CAAC,EAGvC,IAAM/B,EAAQ,KAAK,kBAAkB,QAAQ+B,CAAgB,EAM7D,GALI/B,IAAU,KACZ,KAAK,kBAAkBA,CAAK,EAAI8B,GAI9BE,GAAwBA,EAAqB,OAAS,EACxD,QAAWX,KAAaW,EAAsB,CAC5C,IAAME,EAAe,KAAK,OAAOb,CAAS,EAAE,OAAO,QAAQU,CAAgB,EAE3E,GAAIG,IAAiB,GACnB,MAAM,IAAI,MAAM,0EAA8E,EAEhG,KAAK,OAAOb,CAAS,EAAE,OAAOa,CAAY,EAAIJ,EAC9C,KAAK,SAASA,CAAe,EAAE,GAAG,KAAKT,CAAS,CAClD,CAEJ,CAEA,uBAAwB,CACtB,IAAIA,EAAY,EAChB,QAAWhB,KAAQ,KAAK,OAAQ,CAE9B,GAAIA,EAAK,SAAW,UAAW,CAE7B,GAAIA,EAAK,OAAO,SAAW,EACzB,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIA,EAAK,QAAQ,SAAW,GAAKA,EAAK,QAAQ,SAAW,EACvD,MAAM,IAAI,MAAM,sDAAsD,EAGxE,GAAIA,EAAK,QAAQ,SAAW,GAAK,KAAK,SAASA,EAAK,QAAQ,CAAC,CAAC,EAAE,IAAI,SAAW,EAC7E,MAAM,IAAI,MAAM,uEAAwE,EAE1F,KAAK,WAAWgB,CAAS,CAC3B,CACAA,GACF,CACF,CAEA,wBAAyB,CACvB,IAAIA,EAAY,EAChB,QAAWhB,KAAQ,KAAK,OAElBA,EAAK,SAAW,YAClB,KAAK,WAAWgB,CAAS,EAE3BA,GAEJ,CAEA,aAAac,EAAkB,CAC7B,OAAQA,EAAE,OAAQ,CAEhB,IAAK,OACL,IAAK,UACL,IAAK,OACH,MAAO,GACT,QACE,MAAO,EACX,CACF,CAEA,yBAA0B,CACxB,QAAW9B,KAAQ,KAAK,OACtB,GAAIA,EAAK,SAAW,OAAQ,CAC1B,IAAM+B,EAAO,KAAK,SAAS/B,EAAK,QAAQ,CAAC,CAAC,EAAE,IAC5C,GAAI+B,EAAK,SAAW,GAAK,KAAK,aAAa,KAAK,OAAOA,EAAK,CAAC,CAAC,CAAC,EAAG,CAChE,IAAMC,EAAQ,KAAK,OAAOD,EAAK,CAAC,CAAC,EACjC,GAAIC,EAAM,SAAW,OACnB,GAAIA,EAAM,OAAO,SAAW,EAC1B,GAAI,CACFhC,EAAK,WAAW,IACZ,oBAAqB,SACrB,CAACgC,EAAM,WAAW,SAAS,KAAK,EAAGA,EAAM,WAAW,SAAS,KAAK,CAAC,CAAC,CAC1E,MAAY,CACVhC,EAAK,WAAW,IAAI,oBAAqB,SAAU,CAACiC,GAAUC,EAAQ,CAAC,CACzE,SAEEF,EAAM,OAAO,QAAU,GAAK,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,SAAW,QACtE,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,SAAW,OAC5ChC,EAAK,WAAW,IAAI,oBAAqB,SAAU,CACjD,KAAK,SAASgC,EAAM,OAAO,CAAC,CAAC,EAAE,OAAQ,UAAU,CAAC,EAAG,KAAK,SAASA,EAAM,OAAO,CAAC,CAAC,EAAE,OAAQ,UAAU,CAAC,CACzG,CAAC,MAGD,UAGJhC,EAAK,WAAW,IAAI,aAAc,SAAWgC,EAAM,MAAO,EAC1D,KAAK,WAAWD,EAAK,CAAC,CAAC,CACzB,CACF,CAEJ,CACF,ICtyBA,IAQAI,GAGOC,GAEMC,GAbbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAP,GAAmB,SACnBQ,KAEOP,GAASQ,GAAY,aAAa,IAE5BP,GAAN,KAAY,CAEjB,aAAc,CAAC,CAEf,KAAKQ,EAAiBC,EAAsCC,EAA6B,CACvF,IAAIC,EACJ,GAAI,CAACD,EAEH,GAAI,CACF,KAAK,mBAAmBF,EAAKC,CAAgB,EAC7C,MACF,OAASG,EAAG,CACV,GAAIF,IAAgB,OAClB,MAAME,EAERD,EAAYC,CACd,CAGF,GAAI,CACF,KAAK,kBAAkBJ,EAAKC,CAAgB,CAC9C,OAASG,EAAG,CACV,MAAIF,IAAgB,OACZE,EAGF,IAAI,MAAM,wCAAwCD,CAAS;AAAA,iBAAoBC,CAAC,EAAE,CAC1F,CACF,CAEQ,mBAAmBJ,EAAiBC,EAA4C,CACtF,IAAMI,EAAa,QAAK,WAAW,OAAOL,CAAG,EAE7C,GADkBM,GAAS,aAAaD,EAAW,SAAS,EAC5C,EACd,MAAM,IAAI,MAAM,4CAA4C,EAG9D,KAAK,QACDA,EAAW,YAAY,IAAI,IAAM,CAAC,OAAQ,EAAE,OAAkB,QAASC,GAAS,aAAa,EAAE,OAAQ,CAAC,EAAE,EAE9G,KAAK,OAASC,GAAM,KAAKF,EAAW,MAAQJ,CAAgB,CAC9D,CAEQ,kBAAkBD,EAAiBC,EAA4C,CACrF,IAAMO,EAAK,IAAIC,EAAY,WAAWT,CAAG,EACnCU,EAAWnB,GAAO,iBAAiB,0BAA0BiB,CAAE,EAAE,MAAM,EAE7E,GADkBF,GAAS,aAAaI,EAAS,UAAU,CAAC,EAC5C,EACd,MAAM,IAAI,MAAM,4CAA4C,EAE9D,KAAK,QAAU,CAAC,EAChB,QAASC,EAAI,EAAGA,EAAID,EAAS,kBAAkB,EAAGC,IAAK,CACrD,IAAMC,EAAUF,EAAS,YAAYC,CAAC,EACtC,KAAK,QAAQ,KAAK,CAAC,OAAQC,GAAS,OAAO,EAAa,QAASN,GAAS,aAAaM,EAAQ,QAAQ,CAAE,CAAC,CAAC,CAC7G,CAEA,KAAK,OAASL,GAAM,KAAKG,EAAS,MAAM,EAAIT,CAAgB,CAC9D,CAGA,IAAI,OAAe,CACjB,OAAO,KAAK,MACd,CAGA,IAAI,QAA2B,CAC7B,OAAO,KAAK,OACd,CACF,ICjFA,IAwBaY,GAxBbC,GAAAC,EAAA,kBAGAC,KACAC,KAEAC,KACAC,KAiBaN,GAAN,KAAc,CACnB,YAAYO,EAAyB,CAAC,EAAG,CACvC,KAAK,aAAe,GACpB,KAAK,YAAcA,EAAO,YAC1B,KAAK,SAAWC,GAAS,OAAOD,EAAO,QAAQ,EAC/C,KAAK,QAAU,CAAC,SAAU,KAAK,SAAU,gBAAiB,CAAC,EAAG,eAAgB,CAAC,CAAC,CAClF,CAEA,IAAI,YAAgC,CAClC,OAAO,KAAK,OAAO,MAAM,cAAc,CACzC,CACA,IAAI,aAAiC,CACnC,OAAO,KAAK,OAAO,MAAM,eAAe,CAC1C,CAEA,gBAAiB,CACf,KAAK,SAAS,MAAM,CACtB,CAEA,cAAe,CACb,KAAK,SAAS,KAAK,CACrB,CAKA,MAAM,UAAUE,EAAoCC,EAAqBC,EAAgC,CACvG,MAAM,KAAK,SAAS,MAAM,UAAW,oBAAqB,SAAY,CAEpE,IAAMC,EAAU,MAAMC,GAAe,KAAK,WAAW,EAIrD,GAHA,KAAK,eAAiBD,EAAQ,qBAAqB,KAAK,OAAO,EAE/D,KAAK,OAAS,IAAIE,GACd,OAAOL,GAAQ,SAAU,CAC3B,IAAMM,EAAcN,EAAI,SAAS,MAAM,EAMhC,CAGL,IAAMO,EAAM,MADK,MAAM,MAAMP,CAAG,GACL,YAAY,EACvC,KAAK,WAAW,IAAI,WAAWO,CAAG,EAAGD,CAAW,CAClD,CACF,SAAY,YAAY,OAAON,CAAG,EAMhC,KAAK,WAAWA,CAAG,MANgB,CAEnC,IAAMQ,EAAM,IAAI,WAAWR,EAAKC,GAAc,EAAGC,GAAUF,EAAI,UAAU,EACzE,KAAK,WAAWQ,CAAG,CACrB,CAIF,CAAC,CACH,CAEQ,WAAWC,EAA4BH,EAA6B,CAC1E,GAAI,KAAK,aACP,MAAM,IAAI,MAAM,qBAAqB,EAGvC,KAAK,SAAS,MAAM,UAAW,qBAAsB,IAAM,CAEzD,IAAMI,EACF,KAAK,eAAe,eAAiB,KAAK,eAAsC,OACpF,KAAK,OAAO,KAAKD,EAAgBC,EAAkBJ,CAAW,EAG1D,KAAK,eAAe,oBACtB,KAAK,eAAe,mBAAmB,KAAK,OAAO,KAAK,EAG1D,KAAK,cAAc,KAAK,OAAO,KAAK,EAGpC,KAAK,eAAiB,IAAIK,GAAc,KAAK,OAAO,MAAO,KAAK,KAAM,KAAK,QAAQ,CACrF,CAAC,EAED,KAAK,aAAe,EACtB,CAEA,MAAM,IAAIC,EAAoE,CAC5E,GAAI,CAAC,KAAK,aACR,MAAM,IAAI,MAAM,6BAA6B,EAG/C,OAAO,KAAK,SAAS,MAAM,UAAW,cAAe,SAAY,CAC/D,IAAMC,EAAe,KAAK,2BAA2BD,CAAM,EAErDE,EAAgB,MAAM,KAAK,eAAe,QAAQ,KAAK,eAAgBD,CAAY,EAEzF,OAAO,KAAK,aAAaC,CAAa,CACxC,CAAC,CACH,CAEQ,2BAA2BF,EAAgD,CACjF,IAAMG,EAAkB,KAAK,OAAO,MAAM,cAAc,EAIxD,GAAI,MAAM,QAAQH,CAAM,GACtB,GAAIA,EAAO,SAAWG,EAAgB,OACpC,MAAM,IAAI,MAAM,0CAA0CA,EAAgB,MAAM,YAAYH,EAAO,MAAM,EAAE,MAK1G,CACH,GAAIA,EAAO,OAASG,EAAgB,OAClC,MAAM,IAAI,MAAM,sCAAsCA,EAAgB,MAAM,YAAYH,EAAO,IAAI,EAAE,EAGvG,IAAMI,EAAe,IAAI,MAAcJ,EAAO,IAAI,EAC9CK,EAAoB,EACxB,QAAS,EAAI,EAAG,EAAIF,EAAgB,OAAQ,EAAE,EAAG,CAC/C,IAAMG,EAASN,EAAO,IAAIG,EAAgB,CAAC,CAAC,EAC5C,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,8BAA8B,IAAI,GAAG,EAEvDF,EAAaC,GAAmB,EAAIC,CACtC,CAEAN,EAASI,CACX,CAIA,GAAI,CAAC,KAAK,QAAQ,iBAAmB,KAAK,QAAQ,gBAAgB,SAAW,GAAK,CAAC,KAAK,QAAQ,gBAC5F,KAAK,QAAQ,eAAe,SAAW,EAAG,CAC5C,IAAMG,EAAoB,KAAK,OAAO,MAAM,gBAAgB,EACtDC,EAAc,KAAK,OAAO,MAAM,UAAU,EAE1CC,EAAiB,IAAI,MAAyBF,EAAkB,MAAM,EAE5E,QAASG,EAAI,EAAGA,EAAIH,EAAkB,OAAQ,EAAEG,EAAG,CACjD,IAAMC,EAAaH,EAAYD,EAAkBG,CAAC,CAAC,EACnDD,EAAeC,CAAC,EAAIC,EAAW,KAAM,MAAM,KAI3C,KAAK,QAAQ,gBAAiB,KAAKA,EAAW,KAAM,UAAU,EAC9D,KAAK,QAAQ,eAAgB,KAAKX,EAAOU,CAAC,EAAE,IAAI,CAClD,CAEA,KAAK,wBAAwBD,EAAgBT,EAAQ,EAAI,CAC3D,MAIE,KAAK,wBAAwB,KAAK,QAAQ,eAAgBA,EAAQ,EAAK,EAIzE,YAAK,yBAAyB,KAAK,QAAQ,gBAAkBA,CAAM,EAE5DA,CACT,CAEQ,yBAAyBY,EAAoCC,EAAuB,CAC1F,QAASH,EAAI,EAAGA,EAAIG,EAAY,OAAQH,IAAK,CAC3C,IAAMI,EAAeF,EAAgBF,CAAC,EAChCK,EAAaF,EAAYH,CAAC,EAAE,KAClC,GAAII,IAAiBC,EACnB,MAAM,IAAI,MAAM,gBAAgBL,CAAC,kCAAkCI,CAAY,aAAaC,CAAU,EAAE,CAE5G,CACF,CAEQ,wBACJN,EAA0CI,EAAuBG,EAA2B,CAC9F,QAASN,EAAI,EAAGA,EAAIG,EAAY,OAAQH,IAAK,CAC3C,IAAMO,EAAeR,EAAeC,CAAC,EAC/BQ,EAAaL,EAAYH,CAAC,EAAE,KAClC,GAAI,CAAC,KAAK,kBAAkBO,EAAcC,EAAYF,CAAgB,EACpE,MAAM,IAAI,MAAM,gBAAgBN,CAAC,oCAAoCO,EAAa,KAAK,GAAG,CAAC,eACvFC,EAAW,KAAK,GAAG,CAAC,GAAG,CAE/B,CACF,CAEQ,kBAAkBD,EAAiCC,EAA+BF,EAC9E,CACV,GAAIC,EAAa,SAAWC,EAAW,OACrC,MAAO,GAGT,QAASR,EAAI,EAAGA,EAAIO,EAAa,OAAQ,EAAEP,EACzC,GAAIO,EAAaP,CAAC,IAAMQ,EAAWR,CAAC,IAAM,CAACM,GAAoBC,EAAaP,CAAC,IAAM,GAEjF,MAAO,GAIX,MAAO,EACT,CAEQ,aAAaR,EAA8C,CACjE,IAAMiB,EAAmB,KAAK,OAAO,MAAM,eAAe,EAC1D,GAAIjB,EAAc,SAAWiB,EAAiB,OAC5C,MAAM,IAAI,MAAM,qEAAqE,EAGvF,IAAMC,EAAS,IAAI,IACnB,QAASV,EAAI,EAAGA,EAAIS,EAAiB,OAAQ,EAAET,EAC7CU,EAAO,IAAID,EAAiBT,CAAC,EAAGR,EAAcQ,CAAC,CAAC,EAGlD,OAAOU,CACT,CAEQ,cAAcC,EAAoB,CACxC,IAAMC,EAAQD,EAAM,SAAS,EAC7B,KAAK,KAAO,IAAI,MAAMC,EAAM,MAAM,EAElC,QAASZ,EAAI,EAAGA,EAAIY,EAAM,OAAQZ,IAChC,KAAK,KAAKA,CAAC,EAAI,KAAK,eAAe,QAAQY,EAAMZ,CAAC,EAAG,KAAK,OAAO,OAAQW,CAAK,CAElF,CAaF,IC/PA,IAQaE,GARbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEaJ,GAAN,KAA8D,CACnE,YAAoBK,EAAkB,CAAlB,aAAAA,EAClB,KAAK,WAAa,KAAK,QAAQ,WAC/B,KAAK,YAAc,KAAK,QAAQ,WAClC,CAEA,MAAM,SAAyB,CAAC,CAGhC,MAAM,IACFC,EAAiCC,EACjCC,EAA2E,CAC7E,IAAMC,EAAW,IAAI,IACrB,QAAWC,KAAQJ,EACjB,GAAI,OAAO,eAAe,KAAKA,EAAOI,CAAI,EAAG,CAC3C,IAAMC,EAAOL,EAAMI,CAAI,EACvBD,EAAS,IACLC,EACA,IAAIE,GACAD,EAAK,KAAMA,EAAK,KAA+B,OAAW,OAC1DA,EAAK,IAA+B,CAAC,CAC/C,CAEF,IAAME,EAAY,MAAM,KAAK,QAAQ,IAAIJ,CAAQ,EAC3CK,EAAoC,CAAC,EAC3C,OAAAD,EAAU,QAAQ,CAACE,EAAQL,IAAS,CAClCI,EAAOJ,CAAI,EAAI,IAAIE,GAAOG,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,CACjE,CAAC,EACMD,CACT,CACA,gBAAuB,CACrB,KAAK,QAAQ,eAAe,CAC9B,CACA,cAAqB,CACnB,KAAK,QAAQ,aAAa,CAC5B,CACF,IC5CA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,mBAAAE,KAAA,IASMC,GAuBOD,GAhCbE,GAAAC,EAAA,kBAMAC,KACAC,KAEMJ,GAAN,KAAuC,CAErC,MAAM,MAAsB,CAAC,CAE7B,MAAM,8BAA8BK,EAAiCC,EAChC,CAKnC,IAAMC,EAAU,IAAIC,GAAQF,CAAoC,EAGhE,OAAI,OAAOD,GAAiB,SAC1B,MAAME,EAAQ,UAAUF,CAAY,EAEpC,MAAME,EAAQ,UAAUF,CAAY,EAG/B,IAAII,GAAqBF,CAAO,CACzC,CACF,EAEaR,GAAgB,IAAIC,KChCjC,IAAAU,GAAAC,EAAA,oBCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IA0FMC,GACAC,GAwFCF,GAnLPG,GAAAC,EAAA,kBAsFAC,KACAC,KACAC,KAEMN,GAAc,wBACdC,GAAgB,WAAW,MAAM,OAASD,GAE5CC,KAEF,KAAK,UAAaM,GAA2C,CAC3D,GAAM,CAAC,KAAAC,EAAM,GAAKC,CAAO,EAAIF,EAAG,KAChC,GAAI,CACF,OAAQC,EAAM,CACZ,IAAK,YACHE,GAAsBD,EAAS,IAAI,EAC9B,KACG,IAAM,CACJE,GAAYF,CAAQ,EAAE,KAClB,IAAM,CACJ,YAAY,CAAC,KAAAD,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,CACP,EACAA,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,MACF,IAAK,UAAW,CACd,GAAM,CAAC,OAAAC,EAAQ,IAAAC,CAAG,EAAIL,EACtBM,GAAOD,EAAKD,CAAM,EACb,KACG,IAAM,CACJ,YAAY,CAAC,KAAAL,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,YAAa,CAChB,GAAM,CAAC,OAAAI,CAAM,EAAIP,EACXQ,EAAaC,GAAuBF,CAAM,EAChD,YAAY,CAAC,KAAAR,EAAM,IAAKS,CAAU,CAAmB,EACrD,KACF,CACA,IAAK,SAAU,CACb,GAAM,CAAC,MAAAE,EAAO,QAAAC,CAAO,EAAIX,EACzBY,GAAcF,EAAOC,CAAO,EACvB,KACGE,GAAmB,CACjB,YAAY,CAAC,KAAAd,EAAM,IAAKc,CAAe,CAAmB,CAC5D,EACAV,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,UACHW,GAAed,CAAQ,EACvB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,IAAK,MAAO,CACV,GAAM,CAAC,UAAAgB,EAAW,aAAAC,EAAc,OAAAC,EAAQ,cAAAC,EAAe,QAAAP,CAAO,EAAIX,EAClEmB,GAAIJ,EAAWC,EAAcC,EAAQC,EAAe,IAAI,MAAMA,EAAc,MAAM,EAAE,KAAK,IAAI,EAAGP,CAAO,EAClG,KACGS,GAAW,CACLA,EAAQ,KAAKC,GAAKA,EAAE,CAAC,IAAM,KAAK,EAClC,YAAY,CAAC,KAAAtB,EAAM,IAAK,iDAAiD,CAAC,EAE1E,YACI,CAAC,KAAAA,EAAM,IAAKqB,CAAO,EACnBE,GAA2B,CAAC,GAAGL,EAAQ,GAAGG,CAAO,CAAiC,CAAC,CAE3F,EACAjB,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,gBACHoB,GAAavB,CAAQ,EACrB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,QACF,CACF,OAASI,EAAK,CACZ,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAmB,CAC3C,CACF,GAGKb,GAAQE,GACX,KACCgC,GACG,IAAI,OAAOA,GAAeC,GAAY,CAAC,KAA0B,SAAsB,KAAMlC,EAAW,CAAC,ICtLjH,IAAAmC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IAAIC,GAAEC,GAA4s0CF,GAAlt0CG,GAAAC,EAAA,kBAAMF,IAAGD,GAAE,YAAY,IAAI,eAAe,EAAE,CAAC,EAAE,CAAC,SAASI,GAAG,CAAC,OAAOC,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAED,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOD,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEC,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOJ,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEG,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOL,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEI,EAAC,CAAC,SAAS,GAAG,CAAC,OAAON,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEK,EAAC,CAAC,SAAS,GAAG,CAAC,OAAOP,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEM,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOT,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEQ,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOX,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEU,EAAC,CAAC,IAAI,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAE,CAAC,EAAEC,EAAE,IAAI,QAAS,CAACpB,EAAEC,IAAI,CAAC,EAAED,EAAEkB,EAAEjB,CAAC,CAAE,EAAEoB,EAAY,OAAO,QAAjB,SAAwBC,EAAc,OAAO,eAAnB,WAAiCC,EAAED,GAAiB,KAAK,MAAnB,aAAwBH,EAAE,kBAAkB,CAACnB,EAAEC,IAAI,EAAEkB,EAAE,KAAKA,EAAE,GAAG,IAAI,MAAM,IAAInB,EAAEC,CAAC,CAAC,EAAEkB,EAAE,oBAAoB,IAAI,CAAC,OAAOA,EAAE,EAAE,EAAE,IAAIK,EAAE,WAAW,mBAAmB,IAAI,YAAY,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,YAAY,IAAIC,EAAE,IAAI,CAAC,IAAMzB,EAAE,CAACA,EAAEC,EAAEG,IAAI,IAAIsB,IAAI,CAAC,IAAMjB,EAAEkB,GAAGC,EAAE3B,IAAI,EAAEyB,EAAE1B,EAAE,GAAG0B,CAAC,EAAE,IAAMG,EAAE5B,IAAI,EAAE,OAAO2B,IAAIC,IAAI7B,EAAE6B,EAAEzB,EAAEwB,CAAC,EAAE3B,EAAEG,EAAE,MAAMuB,IAAIlB,EAAE,IAAI,QAAS,CAACT,EAAEC,KAAI,CAAC6B,GAAG,CAAC,QAAQ9B,EAAE,OAAOC,EAAC,CAAC,CAAE,EAAEyB,CAAC,EAAEzB,EAAED,GAAG,SAASC,IAAI,CAAC,GAAG,CAAC,GAAGkB,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMf,EAAEe,EAAE,GAAG,CAAC,GAAGlB,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAEyB,EAAE,MAAM1B,EAAE,GAAGC,CAAC,EAAE,GAAGkB,EAAE,KAAKf,EAAE,MAAM,MAAM,kBAAkB,EAAEe,EAAE,IAAI,MAAM,EAAE,IAAMV,EAAEL,EAAE,OAAO,GAAG,EAAEK,EAAE,OAAO,CAAC,IAAIT,EAAE,MAAM,QAAQ,IAAIS,CAAC,EAAE,GAAGT,EAAEA,EAAE,OAAQA,GAAGA,CAAE,EAAE,EAAEA,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAC,CAAC,OAAO0B,CAAC,QAAC,CAAQP,EAAE,GAAG,IAAI,CAAC,EAAEA,EAAE,kBAAkBnB,EAAEmB,EAAE,kBAAmB,IAAIA,EAAE,kBAAoBnB,GAAGmB,EAAE,kBAAkBnB,CAAE,EAAEmB,EAAE,QAAQlB,EAAED,EAAEmB,EAAE,QAAS,IAAIA,EAAE,QAAUnB,GAAGmB,EAAE,QAAQnB,CAAE,CAAC,EAAEmB,EAAE,mBAAmBlB,EAAED,EAAEmB,EAAE,mBAAoB,IAAIA,EAAE,mBAAqBnB,GAAGmB,EAAE,mBAAmBnB,CAAE,CAAC,EAAEmB,EAAE,cAAcnB,EAAEmB,EAAE,cAAe,IAAIA,EAAE,cAAgBnB,GAAGmB,EAAE,cAAcnB,CAAE,EAAEyB,EAAE,MAAM,EAAEN,EAAE,SAAS,CAACnB,EAAEC,IAAI,CAAC,GAAGwB,IAAI,EAAazB,IAAX,SAAa,CAAC,CAACmB,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,EAAE,EAAElB,EAAE,IAAMD,EAAEmB,EAAE,GAAGA,EAAE,mBAAmB,CAAClB,EAAEG,EAAEsB,EAAEjB,IAAIT,EAAE,eAAeC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,cAAclB,GAAGD,EAAE,UAAUC,CAAC,EAAEkB,EAAE,qBAAqB,CAAClB,EAAEG,EAAEsB,IAAI1B,EAAE,iBAAiBC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,qBAAqBlB,GAAG,CAACD,EAAE,iBAAiBC,CAAC,CAAC,EAAEkB,EAAE,eAAelB,GAAGD,EAAE,WAAWC,CAAC,CAAC,CAAC,EAAE,IAAI8B,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAEd,CAAC,EAAEe,EAAE,iBAAiBC,EAAE,CAACnC,EAAEC,IAAI,CAAC,MAAMA,CAAC,EAAEmC,EAAE,IAAIf,GAAGC,KAAKA,EAAEc,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAKpC,KAAIoC,EAAEpC,IAAGoC,EAAEA,EAAE,WAAW,OAAO,EAAE,GAAGA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAsFd,IAAIU,EAAEhC,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAOA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAE,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAG8B,EAAE,CAAC/B,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAM1B,EAAE,EAAE,EAAE0B,EAAE,aAAa,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASzB,EAAEyB,EAAE,QAAQ,EAAEtB,EAAE,CAAC,EAAEsB,EAAE,QAAQtB,EAAEsB,EAAE,KAAK,IAAI,CAAC,GAAG,IAAIW,EAAE,QAAQ,IAAI,KAAK,OAAO,EAAEC,EAAE,QAAQ,MAAM,KAAK,OAAO,EAAEC,EAAEF,EAAEG,EAAEF,EAAE,GAAG,OAAO,OAAOnB,EAAEc,CAAC,EAAEA,EAAE,KAAKV,EAAE,CAAY,IAASkB,EAAT,SAAYzC,EAAE,CAAC,GAAG,CAAC,IAAIC,EAAED,EAAE,KAAKI,EAAEH,EAAE,IAAI,GAAYG,IAAT,OAAW,CAAC,IAAIJ,EAAE,CAAC,EAAE,KAAK,UAAUC,GAAGD,EAAE,KAAKC,CAAC,EAAE,KAAK,YAAY,IAAI,CAAC,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAE,QAAQA,KAAKD,EAAEyC,EAAGxC,CAAC,EAAE,KAAK,UAAUwC,CAAE,EAAE,QAAUzC,KAAKC,EAAE,SAASkB,EAAEnB,CAAC,GAAG,CAACmB,EAAEnB,CAAC,EAAE,QAAQmB,EAAEnB,CAAC,EAAE,IAAIC,IAAI,CAAC,YAAY,CAAC,GAAG,cAAc,GAAGD,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAWD,GAAT,UAAauC,EAAEpB,EAAEnB,CAAC,GAAeA,GAAZ,aAAgBwC,EAAErB,EAAEnB,CAAC,IAAIK,GAAEJ,EAAE,WAAWM,GAAE,EAAEmC,GAAEzC,EAAE,UAAU,CAAC,SAAiBG,IAAR,MAAU,CAACuC,GAAG1C,EAAE,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE2C,GAAG3C,EAAE,WAAW,EAAE4C,GAAG,EAAEC,GAAG,EAAEC,KAAIC,GAAG,EAAED,GAAE,IAAI,GAAG,CAACE,GAAGhD,EAAE,cAAcA,EAAE,GAAG,CAAC,OAAOD,EAAE,CAAC,GAAaA,GAAV,SAAY,MAAMA,CAAC,CAAC,MAAgBI,IAAX,SAAa8C,GAAG,GAAGC,GAAG,EAAE,EAAmBlD,EAAE,SAAnB,iBAA6CG,IAAjB,eAAmB2C,IAAGK,GAAG,EAAEhD,IAAIoC,EAAE,oCAAoCpC,CAAC,EAAE,EAAEoC,EAAEvC,CAAC,GAAG,OAAOD,EAAE,CAAC,MAAMqD,GAAG,EAAErD,CAAC,CAAC,EAAjqB,IAAAyC,KAAhBC,GAAEK,GAAE,GAA8qBP,EAAE,YAAYxC,EAAE,CAACA,EAAEA,EAAE,KAAK,GAAG,EAAE,QAAQ,MAAMA,CAAC,CAAC,EAAE,KAAK,MAAM,YAAYA,EAAE,CAAC,YAAY,CAAC,GAAG,QAAQ,KAAKA,EAAE,KAAK,GAAG,EAAE,GAAGkD,GAAG,CAAC,CAAC,CAAC,EAAE/B,EAAE,gBAAgB,CAACnB,EAAEC,IAAI,IAAI,QAASD,GAAG,CAAC0C,GAAEtC,GAAG,CAACA,EAAE,IAAI,YAAY,SAASA,EAAEkD,GAAG,CAAC,EAAErD,EAAEG,CAAC,EAAEJ,EAAE,CAAC,CAAC,CAAE,EAAE,KAAK,qBAAqBA,GAAG,CAAC,MAAMA,EAAE,QAAQA,CAAC,EAAE,KAAK,UAAUyC,CAAE,CAAC,IAAIpC,GAAEkD,GAAEC,EAAElD,GAAEE,GAAEE,GAAEC,GAAEC,GAAEC,GAAEE,GAAE0C,EAAEC,GAAEzC,GAAE0C,GAAE,GAAG,SAASpD,IAAG,CAAC,IAAIP,EAAEK,GAAE,OAAOc,EAAE,MAAMb,GAAE,IAAI,UAAUN,CAAC,EAAEmB,EAAE,OAAOT,GAAE,IAAI,WAAWV,CAAC,EAAEmB,EAAE,OAAOX,GAAE,IAAI,WAAWR,CAAC,EAAEmB,EAAE,QAAQR,GAAE,IAAI,YAAYX,CAAC,EAAEmB,EAAE,OAAOP,GAAE,IAAI,WAAWZ,CAAC,EAAEmB,EAAE,QAAQN,GAAE,IAAI,YAAYb,CAAC,EAAEmB,EAAE,QAAQJ,GAAE,IAAI,aAAaf,CAAC,EAAEmB,EAAE,QAAQF,GAAE,IAAI,aAAajB,CAAC,EAAEmB,EAAE,OAAOsC,EAAE,IAAI,cAAczD,CAAC,EAAEmB,EAAE,QAAQuC,GAAE,IAAI,eAAe1D,CAAC,CAAC,CAAC,GAAG,CAACuB,EAAE,CAAC,GAAGJ,EAAE,WAAWd,GAAEc,EAAE,mBAAmB,GAAGd,GAAE,IAAI,YAAY,OAAO,CAAC,QAAQ,IAAI,QAAQ,MAAM,OAAO,EAAE,CAAC,GAAG,kBAAkBmB,GAAG,MAAMgB,EAAE,6NAA6N,EAAE,MAAM,YAAY,EAAEjC,GAAE,CAAC,CAAC,IAAIqD,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,EAAEC,GAAE,KAAKC,GAAE,KAAK,SAASC,IAAG,CAAC,GAAM,EAAEH,IAAL,IAAgBC,KAAP,OAAW,cAAcA,EAAC,EAAEA,GAAE,MAAMC,IAAG,CAAC,IAAIjE,EAAEiE,GAAEA,GAAE,KAAKjE,EAAE,CAAC,CAAC,CAAC,SAASmE,GAAEnE,EAAE,CAAC,MAAMwC,EAAExC,EAAE,WAAWA,EAAE,GAAG,EAAE2D,GAAE,GAAGH,EAAE,EAAExD,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAEkB,EAAElB,CAAC,EAAEA,CAAC,CAAC,IAAIoE,GAAGC,GAAGrE,GAAGA,EAAE,WAAW,uCAAuC,EAAEsE,GAAGtE,GAAGA,EAAE,WAAW,SAAS,EAAE,SAASuE,GAAGvE,EAAE,CAAC,GAAGgC,EAAE,OAAOA,EAAEhC,CAAC,EAAE,KAAK,iDAAiD,CAAC,SAASwE,GAAGxE,EAAEC,EAAEG,EAAE,CAAC,OAAO,SAASJ,EAAE,CAAC,GAAGqB,GAAGC,EAAE,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAACgD,GAAGtE,CAAC,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,CAAC,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAE,EAAE,MAAO,IAAIsE,GAAGvE,CAAC,CAAE,EAAE,GAAG+B,EAAE,OAAO,IAAI,QAAS,CAAC9B,EAAEG,IAAI,CAAC2B,EAAE/B,EAAGA,GAAGC,EAAE,IAAI,WAAWD,CAAC,CAAC,EAAGI,CAAC,CAAC,CAAE,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAM,IAAImE,GAAGvE,CAAC,CAAE,CAAC,EAAEA,CAAC,EAAE,KAAMA,GAAG,YAAY,YAAYA,EAAEC,CAAC,CAAE,EAAE,KAAKG,EAAGJ,GAAG,CAACwC,EAAE,0CAA0CxC,CAAC,EAAE,EAAEmE,GAAEnE,CAAC,CAAC,CAAE,CAAC,CAAC,SAASsD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAEmB,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAG5D,GAAG,GAAG6D,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAE/I,GAAE,GAAGgJ,GAAG,GAAGC,GAAG,EAAEC,EAAE,CAAC,CAAC,CAAC,IAAIC,GAAG,CAAC,QAAQ,CAACxJ,EAAEC,EAAEG,EAAEK,IAAI,CAAC,GAAYU,IAAT,QAAY,CAACA,EAAE,GAAG,MAAO,GAAE,IAAInB,EAAEyJ,GAAGzJ,IAAI,CAAC,GAAG,WAAW,IAAI,IAAIA,EAAEA,EAAE,UAAU,CAAC,GAAG,EAAEA,EAAEmB,EAAE,GAAG,IAAInB,CAAC,GAAG,MAAO,GAAE,GAAGS,KAAK,GAAGR,KAAK,IAAIG,KAAK,GAAGJ,EAAE,WAAW,MAAO,GAAE,GAAG,CAAC,OAAO,EAAE,EAAE,IAAIA,EAAE,SAASC,EAAEA,EAAEG,CAAC,EAAEK,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAO,EAAC,CAAC,EAAE,QAAQ,IAAI,CAACU,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQnB,GAAGmB,EAAE,GAAGnB,CAAC,EAAE,QAAQA,GAAGmB,EAAE,GAAGnB,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAGnB,EAAEC,EAAEG,EAAE,EAAE,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAGnB,EAAEC,EAAEG,CAAC,CAAC,EAAE,QAAQ,IAAiB,OAAO,oBAApB,IAAwC,QAAQJ,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,aAAanB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,cAAcnB,EAAE,CAAC,MAAMC,EAAE,KAAKG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,OAAOnB,EAAE,CAAC,IAAIC,EAAE,IAAIG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,MAAMnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,YAAYnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,OAAOnB,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,MAAMnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,iBAAiBnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,cAAcnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,aAAanB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,aAAanB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,WAAWnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,WAAWnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,eAAenB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,YAAYnB,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEG,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,eAAenB,EAAE,CAAC,UAAUC,EAAE,KAAKwJ,GAAGrJ,CAAC,EAAE,OAAOsB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,eAAenB,EAAE,CAAC,UAAUC,EAAE,KAAKwJ,GAAGrJ,CAAC,EAAE,OAAOsB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,EAAE2I,EAAEzI,EAAEE,GAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAO2J,EAAE,OAAO,OAAO,QAAQ1J,EAAE,UAAU,CAACyB,CAAC,EAAE,MAAMjB,EAAE,YAAY,CAACmB,CAAC,EAAE,KAAK,CAAC8H,EAAE5I,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACZ,EAAE,EAAEc,IAAI,CAAC,EAAE,cAAcE,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWkI,GAAGjI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,EAAE2I,EAAEzI,EAAEE,GAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOgB,EAAE,OAAO,OAAO,QAAQf,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASyB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAMjB,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASmB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAAS8H,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAAS5I,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACV,EAAE,EAAEuJ,IAAI,CAAC,EAAE,cAAczI,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWmI,GAAGlI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACvB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,EAAE2I,EAAEzI,EAAEE,GAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAO2J,EAAE,OAAO,OAAO,QAAQ1J,EAAE,UAAU,CAACyB,CAAC,EAAE,MAAMjB,EAAE,YAAY,CAACmB,CAAC,EAAE,KAAK,CAAC8H,EAAE5I,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACZ,EAAE,EAAEc,IAAI,CAAC,EAAE,cAAcE,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWkI,GAAGjI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,EAAE2I,EAAEzI,EAAEE,GAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOgB,EAAE,OAAO,OAAO,QAAQf,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASyB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAMjB,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASmB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAAS8H,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAAS5I,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACV,EAAE,EAAEuJ,IAAI,CAAC,EAAE,cAAczI,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWmI,GAAGlI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACvB,EAAEC,IAAI,CAACkB,EAAE,GAAG,oBAAoBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,EAAE2I,GAAEzI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC6H,EAAE5I,CAAC,EAAE,KAAK,CAACE,EAAE2I,GAAEzI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,oBAAoBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,EAAE2I,GAAEzI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC6H,EAAE5I,CAAC,EAAE,KAAK,CAACE,EAAE2I,GAAEzI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,EAAE2I,GAAEzI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC6H,EAAE5I,CAAC,EAAE,KAAK,CAACE,EAAE2I,GAAEzI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,IAAI,CAACkB,EAAE,GAAG,gBAAgBnB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,EAAE2I,GAAEzI,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUnB,EAAE,CAAC,OAAOuB,GAAE,OAAO,OAAO,SAAStB,EAAE,UAAUG,EAAE,kBAAkBsB,EAAE,cAAcjB,EAAE,UAAU,CAACmB,EAAEC,CAAC,EAAE,aAAa,CAAC6H,EAAE5I,CAAC,EAAE,KAAK,CAACE,EAAE2I,GAAEzI,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACtB,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,OAAOnB,EAAE,CAAC,MAAMC,EAAE,KAAKG,EAAE,OAAOsB,EAAE,OAAOjB,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,SAASnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,SAASnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACG,EAAE,KAAKsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,SAASnB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACG,EAAE,KAAKsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,IAAI,CAACkB,EAAE,GAAG,UAAUnB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,QAAQnB,EAAE,CAAC,KAAKC,EAAE,WAAWG,EAAE,WAAWsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACmB,EAAE,GAAG,SAASnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACkB,EAAE,GAAG,iBAAiBnB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,EAAE2I,EAAEzI,KAAI,CAACC,EAAE,GAAG,SAASnB,EAAE,CAAC,UAAUC,EAAE,KAAKG,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEsB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwB+H,GAAGhJ,CAAC,EAAE,YAAYmB,EAAE,eAAe8H,EAAE,mBAAmB5I,EAAE,sBAAsB2I,GAAGzI,CAAC,EAAE,KAAKyI,GAAGE,CAAC,EAAE,YAAYF,GAAGvI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAClB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE8H,IAAI,CAACvI,EAAE,GAAG,QAAQnB,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEG,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKmB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE8H,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ1J,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,wBAAwBnB,EAAE,CAAC,QAAQC,EAAE,OAAOG,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,wBAAwBnB,EAAE,CAAC,QAAQC,EAAE,OAAOG,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAG,QAAQnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACkB,EAAE,GAAG,SAASnB,EAAE,CAAC,SAASyJ,GAAGxJ,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,MAAMnB,EAAE,CAAC,KAAKC,EAAE,MAAMG,EAAE,KAAKsB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,QAAQC,EAAE,SAASG,EAAE,QAAQ,CAAC,CAACK,EAAE,aAAa,CAAC,CAACiB,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,QAAQC,EAAE,SAASG,EAAE,QAAQ,CAAC,CAACK,EAAE,aAAa,CAAC,CAACiB,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,SAASnB,EAAE,CAAC,UAAU,OAAOC,CAAC,EAAE,QAAQ,OAAOG,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE8H,EAAE5I,EAAEE,IAAI,CAACG,EAAE,GAAG,YAAYnB,EAAE,CAAC,SAASC,EAAE,iBAAiBG,EAAE,gBAAgBsB,EAAE,MAAMjB,EAAE,SAASmB,EAAE,eAAe8H,EAAE,MAAM,KAAK,EAAE,EAAE,SAAS,OAAO5I,CAAC,IAAI,EAAE,OAAOA,CAAC,EAAE4I,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,uBAAuB,CAAC,CAAC1I,CAAC,CAAC,CAAC,EAAE,QAAQhB,GAAG,CAACmB,EAAE,GAAG,UAAUnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,gBAAgBnB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACmB,EAAE,GAAG,WAAWnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEyB,EAAEjB,EAAEmB,EAAE8H,EAAE1I,EAAE2I,EAAEzI,EAAEE,EAAEC,GAAEC,GAAEC,GAAEC,EAAEC,GAAEM,KAAI,CAACZ,EAAE,GAAG,OAAOnB,EAAE,CAAC,OAAOsB,GAAE,OAAO,OAAO,SAASrB,EAAE,UAAUyB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,MAAMmB,EAAE,aAAa8H,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE1I,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK2I,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEzI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,IAAI,CAAC,CAACjB,EAAE,EAAEmB,KAAI,CAAC,EAAE,WAAWkI,GAAGjI,CAAC,EAAE,kBAAkBC,GAAE,MAAM,KAAKX,EAAE,EAAE,SAASW,KAAI,EAAEM,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ/B,GAAG,CAACmB,EAAE,GAAG,OAAOnB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,sBAAsBnB,EAAE,CAAC,SAASC,EAAE,WAAWG,EAAE,MAAMsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,KAAKC,EAAE,QAAQG,EAAE,WAAW,CAAC,CAACsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,IAAI,CAACP,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,KAAKC,EAAE,QAAQG,EAAE,WAAW,CAAC,CAACsB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,cAAcnB,EAAE,CAAC,EAAEC,EAAE,EAAEG,EAAE,cAAcsB,EAAE,KAAKjB,EAAE,UAAUmB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,IAAI,CAACT,EAAE,GAAG,qBAAqBnB,EAAE,CAAC,SAASC,EAAE,iBAAiBG,EAAE,gBAAgBsB,EAAE,MAAMjB,EAAE,SAASmB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,IAAI,CAACkB,EAAE,GAAG,YAAYnB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEG,EAAEsB,EAAEjB,IAAI,CAACU,EAAE,GAAG,kBAAkBnB,EAAE,CAAC,YAAY,CAAC,CAACC,EAAE,SAASG,EAAE,mBAAmBsB,EAAE,MAAMjB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,yBAAyBnB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACG,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACmB,EAAE,GAAGnB,CAAC,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAIkB,EAAE,GAAGnB,EAAEC,EAAEkB,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,EAAE,QAAQ,CAACnB,EAAEC,EAAEG,IAAI,CAACe,EAAE,GAAG,yBAAyBnB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACG,CAAC,CAAC,CAAC,CAAC,EAAE,SAASsE,GAAG1E,EAAEC,EAAEG,EAAE,CAAC,OAAOwJ,GAAI,SAAS,CAAC,MAAMzI,EAAE,GAAGnB,EAAEC,EAAEG,CAAC,CAAC,CAAE,CAAC,CAAC,SAASqE,IAAI,CAAC,OAAmB,OAAO,oBAApB,GAAuC,CAAC,SAASoF,GAAG7J,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,IAAI8J,GAAG9J,GAAG,CAACA,EAAE,UAAU,EAAEA,EAAE,UAAU,IAAI,CAAC,CAAC,EAAE+J,GAAG/J,GAAG,CAAIgK,GAAG,QAAN,IAAeC,GAAG,EAAEC,GAAGF,GAAG,CAAC,CAAC,GAAG,IAAI/J,EAAE+J,GAAG,IAAI,EAAE,GAAG,CAAC/J,EAAE,MAAO,GAAEkK,GAAG,KAAKlK,CAAC,EAAEmK,GAAGpK,EAAE,EAAE,EAAEC,EAAEA,EAAE,GAAGD,EAAE,GAAG,IAAII,EAAE,CAAC,IAAI,MAAM,cAAcJ,EAAE,GAAG,IAAIA,EAAE,GAAG,YAAYA,EAAE,EAAE,EAAE,OAAOC,EAAE,YAAYG,EAAEJ,EAAE,EAAE,EAAE,CAAC,EAAEqK,GAAG,EAAEC,GAAG,CAACtK,EAAEC,KAAKG,IAAI,CAAC,QAAQsB,EAAE,EAAEtB,EAAE,OAAOK,EAAE8J,GAAG,EAAE3I,EAAE4I,GAAG,EAAE9I,CAAC,EAAEG,EAAED,IAAI,EAAE8H,EAAE,EAAEA,EAAEtJ,EAAE,OAAOsJ,IAAI,CAAC,IAAI5I,EAAEV,EAAEsJ,CAAC,EAAY,OAAO5I,GAAjB,UAAoB2C,EAAE5B,EAAE,EAAE6H,CAAC,EAAE,GAAGjG,EAAE5B,EAAE,EAAE6H,EAAE,CAAC,EAAE5I,IAAI2C,EAAE5B,EAAE,EAAE6H,CAAC,EAAE,GAAG1I,EAAE,EAAEa,EAAE,EAAE6H,EAAE,IAAI,CAAC,EAAE5I,EAAE,CAAC,OAAOd,EAAEyK,GAAGzK,EAAE,EAAE0B,EAAEE,EAAE3B,CAAC,EAAEyK,GAAGjK,CAAC,EAAET,CAAC,EAAE,SAASqJ,GAAGrJ,EAAE,CAAC,GAAGuB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,CAAC,EAAE,GAAGwD,EAAExD,EAAE,EAAE,EAAEqK,IAAI,CAAC,QAAQpK,KAAKkK,GAAGL,GAAG7J,CAAC,EAAE,IAAIA,KAAK+J,GAAGF,GAAG7J,CAAC,EAAE+J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEjJ,EAAE,SAASnB,CAAC,EAAE2D,GAAE,EAAE,CAACxB,EAAEnC,EAAE,IAAI6J,GAAG7J,CAAC,CAAC,CAAC,CAAC,SAAS2K,GAAG3K,EAAE,CAAC,GAAGuB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,CAAC,EAAEgJ,GAAGhJ,CAAC,CAAC,CAAC,IAAIgJ,GAAGhJ,GAAG,CAAC,GAAGwD,EAAExD,EAAEuB,EAAE,MAAMoJ,GAAG3K,CAAC,EAAE,SAASqJ,GAAGrJ,CAAC,CAAC,EAAEgK,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAES,GAAG,CAAC,EAAER,GAAG,CAAC,EAAES,GAAG7K,GAAG,CAAC,IAAIC,EAAED,EAAE,GAAG,OAAOoK,GAAGnK,CAAC,EAAE+J,GAAG,KAAKhK,CAAC,EAAEmK,GAAG,OAAOA,GAAG,QAAQnK,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,EAAE8K,GAAG7K,CAAC,CAAC,EAAE,SAAS6C,IAAI,CAAC8H,GAAG,QAAS5K,GAAGA,EAAE,CAAE,CAAC,CAAC,IAAIkK,GAAGlK,GAAG,IAAI,QAASC,GAAG,CAACD,EAAE,UAAUI,GAAG,CAAC,IAAIsB,GAAGtB,EAAEA,EAAE,MAAM,IAAI,GAAGA,EAAE,cAAcA,EAAE,cAAc8C,GAAG,EAAE,CAAC,IAAIzC,EAAE2J,GAAGhK,EAAE,YAAY,EAAEK,EAAEA,EAAE,YAAYL,EAAEA,EAAE,YAAY,EAAEoC,EAAE,0CAA0Cd,CAAC,uBAAuBtB,EAAE,YAAY,qCAAqC,CAAC,MAAsBsB,IAAjB,eAAmB0B,GAAG,EAAkB1B,IAAhB,cAAkBqI,GAAG3J,CAAC,EAAoBsB,IAAlB,gBAAoBmJ,GAAGT,GAAGhK,EAAE,MAAM,CAAC,EAAiBsB,IAAf,cAAkBtB,EAAEA,EAAE,OAAOsB,EAAE0I,GAAGhK,CAAC,EAAE,OAAOgK,GAAGhK,CAAC,EAAE0J,GAAGpI,CAAC,EAAEoJ,GAAG1K,CAAC,EAAE+J,GAAG,OAAOA,GAAG,QAAQzI,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,GAAoBA,IAAjB,eAAmB0I,GAAGhK,EAAE,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAasB,IAAX,UAAc1B,EAAE,OAAO,GAAGC,EAAED,CAAC,GAAa0B,IAAV,QAAY,MAAM,UAAUtB,EAAE,QAAQ,KAAKA,EAAE,IAAI,EAAE,EAAmBA,EAAE,SAAnB,eAA0BJ,EAAE,YAAYI,CAAC,EAAkBsB,IAAhB,cAAkBP,EAAEf,EAAE,OAAO,EAAE,GAAGA,EAAE,IAAI,EAAEsB,GAAGc,EAAE,kCAAkCd,CAAC,EAAE,CAAC,EAAE1B,EAAE,QAAQA,GAAG,CAAC,MAAMwC,EAAE,yBAAyBxC,EAAE,QAAQ,IAAIA,EAAE,MAAM,KAAKA,EAAE,OAAO,EAAE,EAAEA,CAAC,EAAE,IAAII,EAAEsB,EAAE,CAAC,EAAE,IAAItB,IAAI,CAAC,QAAQ,EAAEe,EAAE,eAAef,CAAC,GAAGsB,EAAE,KAAKtB,CAAC,EAAEJ,EAAE,YAAY,CAAC,IAAI,OAAO,SAAS0B,EAAE,WAAWrB,GAAE,WAAWkD,EAAC,CAAC,CAAC,CAAE,EAAE,SAAS0G,IAAI,CAAC,IAAIjK,EAAE,IAAI,OAAO,IAAI,IAAI,YAAY,GAAG,EAAE,CAAC,KAAK,SAAS,WAAW,aAAa,KAAK,YAAY,CAAC,EAAEgK,GAAG,KAAKhK,CAAC,CAAC,CAAC,IAAI+K,GAAG/K,GAAG,CAAC,KAAK,EAAEA,EAAE,QAAQA,EAAE,MAAM,EAAEmB,CAAC,CAAC,EAAE0B,GAAG,IAAI,CAAC,IAAI7C,EAAEkD,GAAG,EAAEjD,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEA,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAEgL,GAAG/K,EAAEA,EAAED,CAAC,EAAE0K,GAAGzK,CAAC,CAAC,EAAEgD,GAAG,CAACjD,EAAEC,IAAI,CAACoK,GAAG,EAAErK,EAAEiL,GAAGjL,EAAEC,CAAC,EAAE,EAAEoK,GAAG7G,EAAExD,EAAEmD,GAAGnD,CAAC,CAAC,EAAE,MAAMkL,EAAE,CAAC,YAAYlL,EAAE,CAAC,KAAK,GAAGA,EAAE,EAAE,CAAC,CAAC,SAAS2E,GAAG3E,EAAEC,EAAEG,EAAE,CAAC,IAAIsB,EAAE,IAAIwJ,GAAGlL,KAAK,CAAC,EAAE,MAAMC,KAAK,EAAEG,KAAK,EAAE,EAAE,EAAEsB,EAAE,GAAG,KAAK,IAAI,CAAC,EAAE,EAAE,EAAE,EAAEA,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEzB,EAAE,EAAE,EAAEyB,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEtB,EAAEJ,CAAC,CAAC,SAASmL,GAAGnL,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE+I,GAAG,EAAE,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,EAAEkD,GAAG5E,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAASkD,GAAG5E,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAG1B,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAWF,IAAT,OAAW,OAAOgB,EAAE,qFAAqF,EAAE,EAAE,IAAI/B,EAAE,CAAC,EAAE,OAAOc,GAAOd,EAAE,SAAN,EAAa0K,GAAGnL,EAAEC,EAAEG,EAAEsB,CAAC,GAAG1B,EAAE,CAAC,GAAGI,EAAE,GAAGJ,EAAE,GAAG0B,EAAE,GAAGjB,CAAC,EAAEc,GAAGvB,EAAE,GAAG,cAAc,YAAYA,EAAES,CAAC,EAAE,GAAGsJ,GAAG/J,CAAC,EAAE,CAAC,IAAIoL,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAACrL,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,GAAGzB,KAAK,GAAGG,EAAE,IAAIA,EAAEH,EAAED,EAAEI,CAAC,GAAG,EAAEA,GAAGsB,IAAI,EAAEtB,EAAE,GAAG,GAAGA,EAAEH,GAAGD,EAAE,QAAQoL,GAAG,OAAOA,GAAG,OAAOpL,EAAE,kBAAkBwB,EAAExB,EAAE,MAAMC,EAAEG,CAAC,EAAEJ,EAAE,SAASC,EAAEG,CAAC,CAAC,EAAE,IAAIsB,EAAE,GAAGzB,EAAEG,GAAG,CAAC,IAAIK,EAAET,EAAEC,GAAG,EAAE,GAAG,IAAIQ,EAAE,CAAC,IAAImB,EAAE,GAAG5B,EAAEC,GAAG,EAAE,IAAS,IAAIQ,IAAV,IAAaiB,GAAG,OAAO,cAAc,GAAGjB,IAAI,EAAEmB,CAAC,MAAM,CAAC,IAAIC,EAAE,GAAG7B,EAAEC,GAAG,EAAE,OAAOQ,GAAQ,IAAIA,IAAV,KAAc,GAAGA,IAAI,GAAGmB,GAAG,EAAEC,GAAG,EAAEpB,IAAI,GAAGmB,GAAG,GAAGC,GAAG,EAAE,GAAG7B,EAAEC,GAAG,GAAGyB,GAAG,OAAO,aAAajB,CAAC,GAAGA,GAAG,MAAMiB,GAAG,OAAO,aAAa,MAAMjB,GAAG,GAAG,MAAM,KAAKA,CAAC,EAAE,CAAC,MAAMiB,GAAG,OAAO,aAAajB,CAAC,CAAC,CAAC,OAAOiB,CAAC,EAAE+H,GAAG,CAACzJ,EAAEC,KAAKD,KAAK,GAAGqL,GAAG,EAAE,EAAErL,EAAEC,CAAC,EAAE,GAAG,SAAS4E,GAAG7E,EAAEC,EAAEG,EAAE,CAAC,OAAOmB,EAAE+I,GAAG,EAAE,EAAEtK,EAAEC,EAAEG,CAAC,EAAE,CAAC,CAAC,SAAS0E,GAAG9E,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,IAAIqL,GAAGtL,GAAG,CAAC,QAAQC,EAAE,EAAEG,EAAE,EAAEA,EAAEJ,EAAE,OAAO,EAAEI,EAAE,CAAC,IAAIsB,EAAE1B,EAAE,WAAWI,CAAC,EAAE,KAAKsB,EAAEzB,IAAI,MAAMyB,EAAEzB,GAAG,EAAE,OAAOyB,GAAG,OAAOA,GAAGzB,GAAG,EAAE,EAAEG,GAAGH,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEsL,GAAG,CAACvL,EAAEC,EAAEG,EAAEsB,IAAI,CAAC,GAAG,EAAE,EAAEA,GAAG,MAAO,GAAE,IAAIjB,EAAEL,KAAK,EAAEsB,EAAEtB,EAAEsB,EAAE,EAAE,QAAQE,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIC,EAAE7B,EAAE,WAAW4B,CAAC,EAAE,GAAG,OAAOC,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK7B,EAAE,WAAW,EAAE4B,CAAC,GAAG,KAAKC,EAAE,CAAC,GAAGzB,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAEyB,CAAC,KAAK,CAAC,GAAG,MAAMA,EAAE,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,EAAE,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,EAAE,KAAK,CAAC,GAAGzB,EAAE,GAAGsB,EAAE,MAAMzB,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,GAAG5B,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,GAAG,EAAE,CAAC5B,EAAEG,MAAM,CAAC,EAAE,IAAIyB,GAAG,EAAE,EAAE,CAAC5B,EAAEG,MAAM,CAAC,EAAE,IAAI,GAAGyB,CAAC,CAAC,CAAC,OAAO5B,EAAEG,IAAI,CAAC,EAAE,EAAEA,EAAEK,CAAC,EAAE+K,GAAG,CAACxL,EAAEC,EAAEG,IAAImL,GAAGvL,EAAE,EAAE,EAAEC,EAAEG,CAAC,EAAE,SAAS2E,GAAG/E,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAAS+E,GAAGhF,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,EAAEC,EAAEG,CAAC,CAAC,CAAC,SAAS6E,GAAGjF,EAAEC,EAAEG,EAAE,CAAC,OAAOmB,EAAE+I,GAAG,EAAE,EAAEtK,EAAEC,EAAEG,CAAC,EAAE,CAAC,CAAC,SAAS8E,GAAGlF,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAASkF,GAAGnF,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO+I,GAAG,EAAE,EAAEtK,EAAEC,EAAEG,CAAC,CAAC,CAAC,SAASgF,GAAGpF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS2D,GAAGrF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS4D,GAAGtF,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,GAAGH,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,SAAS6D,GAAGvF,EAAE,CAAC,GAAGuB,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,CAAC,CAAC,CAAC,SAASwF,GAAGxF,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAASwF,GAAGzF,EAAEC,EAAEG,EAAE,CAAC,GAAGmB,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,CAAC,CAAC,CAAC,IAAIqL,GAAGC,GAAGhG,GAAG,IAAI,CAACvB,GAAE,EAAE,CAAC,EAAEwH,GAAG3L,GAAG,CAAC,QAAQC,EAAE,GAAG,EAAE,EAAED,IAAI,CAAC,GAAGC,GAAGwL,GAAG,EAAE,EAAEzL,MAAM,CAAC,CAAC,EAAE,OAAOC,CAAC,EAAE2L,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASC,GAAG/L,EAAEC,EAAEG,EAAE,CAAC,EAAE,CAAC,GAAG,EAAE,mBAAmBH,GAAG,MAAM,IAAI,UAAU,yDAAyD,EAAE,OAAO,SAASD,EAAEC,EAAEG,EAAE,CAAC,EAAE,CAAC,IAAIsB,EAAEzB,EAAE,KAAK,GAAG,CAACD,EAAE,MAAM,IAAI0L,GAAG,SAAShK,CAAC,+CAA+C,EAAE,GAAGmK,GAAG,eAAe7L,CAAC,EAAE,CAAC,GAAGI,EAAE,GAAG,OAAO,MAAM,IAAIsL,GAAG,yBAAyBhK,CAAC,SAAS,CAAC,CAACmK,GAAG7L,CAAC,EAAEC,EAAE,OAAO6L,GAAG9L,CAAC,EAAE4L,GAAG,eAAe5L,CAAC,IAAIC,EAAE2L,GAAG5L,CAAC,EAAE,OAAO4L,GAAG5L,CAAC,EAAEC,EAAE,QAASD,GAAGA,EAAE,CAAE,EAAE,EAAEA,EAAEC,EAAEG,CAAC,CAAC,CAAC,IAAI4L,GAAG,CAAChM,EAAEC,EAAEa,IAAI,CAAC,OAAOb,EAAE,CAAC,IAAK,GAAE,OAAOa,EAAEd,GAAGI,EAAE,EAAEJ,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAGS,EAAE,EAAET,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOc,EAAEd,GAAGyD,EAAEzD,IAAI,CAAC,EAAEA,GAAG0D,GAAE1D,IAAI,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,0BAA0BC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS2F,GAAG3F,EAAEC,EAAEG,EAAE,CAACA,KAAK,EAAE2L,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,GAAa,OAAOA,GAAjB,UAA8B,OAAOA,GAAjB,SAAmB,MAAMA,EAASA,IAAP,KAAS,QAAkBD,EAAE,OAAOC,IAApB,UAAkCD,IAAV,SAA0BA,IAAb,WAAeC,EAAE,SAAS,EAAE,GAAGA,EAAE,IAAI,UAAU,mBAAmBA,CAAC,QAAQ,KAAK,IAAI,EAAE,EAAE,OAAgB,OAAOA,GAAjB,WAAqBA,EAAE,OAAOA,CAAC,GAAGA,CAAC,EAAE,eAAegM,GAAG,qBAAqBD,GAAG/L,EAAEG,EAAMH,EAAE,QAAQ,GAAG,GAAjB,EAAkB,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAIgM,GAAG,EAAE,SAASrG,GAAG5F,EAAEC,EAAEG,EAAEK,EAAE,CAACsL,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAa,SAASD,EAAE,CAAC,MAAM,CAAC,CAACA,CAAC,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,OAAOA,EAAEG,EAAEK,CAAC,EAAE,eAAewL,GAAG,qBAAqB,SAASjM,EAAE,CAAC,OAAO,KAAK,aAAa,EAAE,EAAEA,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAIkM,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASrF,GAAG9G,EAAE,CAAC,GAAGA,KAAK,IAAO,EAAEmM,GAAGnM,EAAE,CAAC,GAAX,IAAemM,GAAGnM,CAAC,EAAE,OAAOkM,GAAG,KAAKlM,CAAC,EAAE,CAAC,IAAIoM,GAAGpM,GAAG,CAAC,GAAG,CAACA,EAAE,MAAM,IAAI0L,GAAG,oCAAoC1L,CAAC,EAAE,OAAOmM,GAAGnM,CAAC,CAAC,EAAEqM,GAAGrM,GAAG,CAAC,OAAOA,EAAE,CAAC,KAAK,OAAO,MAAO,GAAE,KAAK,KAAK,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,QAAQ,IAAMC,EAAEiM,GAAG,IAAI,GAAGC,GAAG,OAAO,OAAOA,GAAGlM,CAAC,EAAED,EAAEmM,GAAGlM,EAAE,CAAC,EAAE,EAAEA,CAAC,CAAC,EAAE,SAASqM,GAAGtM,EAAE,CAAC,OAAO,KAAK,aAAa,EAAE,EAAEA,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIuM,GAAG,CAAC,KAAK,kBAAkB,aAAavM,GAAG,CAAC,IAAIC,EAAEmM,GAAGpM,CAAC,EAAE,OAAO8G,GAAG9G,CAAC,EAAEC,CAAC,EAAE,WAAW,CAACD,EAAEC,IAAIoM,GAAGpM,CAAC,EAAE,eAAegM,GAAG,qBAAqBK,GAAG,GAAG,IAAI,EAAE,SAASzG,GAAG7F,EAAE,CAAC,OAAO+L,GAAG/L,IAAI,EAAEuM,EAAE,CAAC,CAAC,IAAIC,GAAG,CAACxM,EAAEC,IAAI,CAAC,OAAOA,EAAE,CAAC,IAAK,GAAE,OAAO,SAASD,EAAE,CAAC,OAAO,KAAK,aAAac,EAAE,EAAEd,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,IAAK,GAAE,OAAO,SAASA,EAAE,CAAC,OAAO,KAAK,aAAagB,EAAE,EAAEhB,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,wBAAwBC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS8F,GAAG9F,EAAEC,EAAEG,EAAE,CAACA,KAAK,EAAE2L,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,CAACA,EAAEC,IAAIA,EAAE,eAAegM,GAAG,qBAAqBO,GAAGvM,EAAEG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAAS2F,GAAG/F,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,GAAGT,KAAK,EAAEI,KAAK,EAAEH,EAAE0L,GAAG1L,IAAI,CAAC,EAAOQ,IAAL,KAASA,EAAE,YAAYA,EAAET,GAAGA,EAAM0B,IAAJ,EAAM,CAAC,IAAIE,EAAE,GAAG,EAAExB,EAAEK,EAAET,GAAGA,GAAG4B,IAAIA,CAAC,CAAC,IAAIC,EAAE5B,EAAE,SAAS,UAAU,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,CAAC,EAAE8L,GAAG/L,EAAE,CAAC,KAAKC,EAAE,aAAaQ,EAAE,WAAWoB,EAAE,eAAeoK,GAAG,qBAAqBD,GAAG/L,EAAEG,EAAMsB,IAAJ,CAAK,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAASsE,GAAGhG,EAAEC,EAAEyB,EAAE,CAAC,SAASjB,EAAET,EAAE,CAAC,IAAIC,EAAE,EAAE,EAAED,IAAI,IAAI,CAAC,EAAE,OAAOA,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,IAAI4B,EAAExB,EAAE,EAAE,OAAOJ,EAAEC,CAAC,CAAC,CAAC,IAAI2B,EAAE,CAAC,UAAU,WAAW,WAAW,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,cAAc,EAAE3B,CAAC,EAAE8L,GAAG/L,KAAK,EAAE,CAAC,KAAK0B,EAAEiK,GAAGjK,IAAI,CAAC,EAAE,aAAajB,EAAE,eAAewL,GAAG,qBAAqBxL,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,SAASwF,GAAGjG,EAAEC,EAAE,CAACD,KAAK,EAAE,IAAII,GAAmBH,EAAE0L,GAAG1L,IAAI,CAAC,KAA3B,cAA8B8L,GAAG/L,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASD,EAAE,CAAC,IAAIC,EAAE,EAAE,EAAED,IAAI,IAAI,CAAC,EAAES,EAAET,EAAE,EAAE,GAAGI,EAAE,QAAQwB,EAAEnB,EAAEoB,EAAE,EAAEA,GAAG5B,EAAE,EAAE4B,EAAE,CAAC,IAAIf,EAAEL,EAAEoB,EAAE,GAAGA,GAAG5B,GAAM,EAAE,EAAEa,IAAI,CAAC,GAAZ,EAAc,CAAC,GAAGc,EAAE6H,GAAG7H,EAAEd,EAAEc,CAAC,EAAWZ,IAAT,OAAW,IAAIA,EAAEY,OAAOZ,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGY,EAAEA,EAAEd,EAAE,CAAC,CAAC,KAAK,CAAC,IAAIE,EAAE,MAAMf,CAAC,EAAE4B,EAAE,EAAEA,EAAE5B,EAAE,EAAE4B,EAAEb,EAAEa,CAAC,EAAE,OAAO,aAAa,EAAE,EAAEpB,EAAEoB,IAAI,CAAC,CAAC,EAAEb,EAAEA,EAAE,KAAK,EAAE,CAAC,CAAC,OAAOyL,GAAGzM,CAAC,EAAEgB,CAAC,EAAE,WAAW,SAAShB,EAAEC,EAAE,CAACA,aAAa,cAAcA,EAAE,IAAI,WAAWA,CAAC,GAAG,IAAIQ,EAAY,OAAOR,GAAjB,SAAmB,GAAG,EAAEQ,GAAGR,aAAa,YAAYA,aAAa,mBAAmBA,aAAa,WAAW,MAAM,IAAIyL,GAAG,uCAAuC,EAAE,IAAI9J,EAAExB,GAAGK,EAAE6K,GAAGrL,CAAC,EAAEA,EAAE,OAAO4B,EAAE6K,GAAG,EAAE9K,EAAE,CAAC,EAAEd,EAAEe,EAAE,EAAE,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAED,EAAExB,GAAGK,EAAE+K,GAAGvL,EAAEa,EAAEc,EAAE,CAAC,UAAUnB,EAAE,IAAIA,EAAE,EAAEA,EAAEmB,EAAE,EAAEnB,EAAE,CAAC,IAAIO,EAAEf,EAAE,WAAWQ,CAAC,EAAE,GAAG,IAAIO,EAAE,MAAMyL,GAAG3L,CAAC,EAAE,IAAI4K,GAAG,wDAAwD,EAAE,EAAE,EAAE5K,EAAEL,IAAI,CAAC,EAAEO,CAAC,KAAM,KAAIP,EAAE,EAAEA,EAAEmB,EAAE,EAAEnB,EAAE,EAAE,EAAEK,EAAEL,IAAI,CAAC,EAAER,EAAEQ,CAAC,EAAE,OAAcT,IAAP,MAAUA,EAAE,KAAKyM,GAAG5K,CAAC,EAAEA,CAAC,EAAE,eAAeoK,GAAG,qBAAqBK,GAAG,GAAGtM,EAAE,CAACyM,GAAGzM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI2M,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,UAAU,EAAE,OAAOC,GAAG,CAAC5M,EAAEC,IAAI,CAAC,QAAQG,EAAEJ,GAAG,EAAE6B,EAAEzB,EAAEH,EAAE,EAAE,EAAEG,GAAGyB,IAAI,EAAE,EAAEzB,IAAI,CAAC,GAAG,EAAEA,EAAE,GAAG,IAAIA,IAAI,GAAGJ,GAAG2M,GAAG,OAAOA,GAAG,OAAO,EAAE,EAAE,MAAM3M,EAAEI,CAAC,CAAC,EAAE,IAAIA,EAAE,GAAGyB,EAAE,EAAE,EAAEA,GAAG5B,EAAE,GAAG,EAAE4B,EAAE,CAAC,IAAI6H,EAAEjJ,EAAE,EAAET,EAAE,EAAE6B,IAAI,IAAI,CAAC,EAAE,GAAM6H,GAAH,EAAK,MAAMtJ,GAAG,OAAO,aAAasJ,CAAC,CAAC,CAAC,OAAOtJ,CAAC,EAAEyM,GAAG,CAAC7M,EAAEC,EAAEG,IAAI,CAAC,GAAGA,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIsB,EAAEzB,EAAEG,GAAGA,GAAG,GAAG,EAAEJ,EAAE,OAAOI,EAAE,EAAEJ,EAAE,OAAO,QAAQ4B,EAAE,EAAEA,EAAExB,EAAE,EAAEwB,EAAE,CAAC,IAAIC,EAAE7B,EAAE,WAAW4B,CAAC,EAAEnB,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE4B,EAAE5B,GAAG,CAAC,CAAC,OAAOQ,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEyB,CAAC,EAAEoL,GAAG9M,GAAG,EAAEA,EAAE,OAAO+M,GAAG,CAAC/M,EAAEC,IAAI,CAAC,QAAQG,EAAE,EAAEsB,EAAE,GAAG,EAAEtB,GAAGH,EAAE,IAAI,CAAC,IAAIQ,EAAE,EAAE,EAAET,EAAE,EAAEI,IAAI,IAAI,CAAC,EAAE,GAAMK,GAAH,EAAK,MAAM,EAAEL,EAAE,OAAOK,GAAGA,GAAG,MAAMiB,GAAG,OAAO,aAAa,MAAMjB,GAAG,GAAG,MAAM,KAAKA,CAAC,GAAGiB,GAAG,OAAO,aAAajB,CAAC,CAAC,CAAC,OAAOiB,CAAC,EAAEsL,GAAG,CAAChN,EAAEC,EAAEG,IAAI,CAAC,GAAGH,KAAK,EAAEG,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIsB,EAAEzB,EAAEG,EAAEsB,EAAEtB,EAAE,EAAE,QAAQK,EAAE,EAAEA,EAAET,EAAE,OAAO,EAAES,EAAE,CAAC,IAAImB,EAAE5B,EAAE,WAAWS,CAAC,EAAE,GAAG,OAAOmB,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK5B,EAAE,WAAW,EAAES,CAAC,GAAG,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE2B,GAAG3B,GAAG,GAAG,EAAEG,EAAE,KAAK,CAAC,OAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEyB,CAAC,EAAEuL,GAAGjN,GAAG,CAAC,QAAQC,EAAE,EAAEG,EAAE,EAAEA,EAAEJ,EAAE,OAAO,EAAEI,EAAE,CAAC,IAAIsB,EAAE1B,EAAE,WAAWI,CAAC,EAAE,OAAOsB,GAAG,OAAOA,GAAG,EAAEtB,EAAEH,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAE,SAASiG,GAAGlG,EAAEC,EAAEG,EAAE,CAAC,GAAGJ,KAAK,EAAEC,KAAK,EAAEG,EAAEuL,GAAGvL,KAAK,CAAC,EAAMH,IAAJ,EAAM,IAAIyB,EAAEkL,GAAGnM,EAAEoM,GAAGhL,EAAEiL,GAAGhM,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,OAAWC,IAAJ,IAAQyB,EAAEqL,GAAGtM,EAAEuM,GAAGnL,EAAEoL,GAAGnM,EAAEd,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,GAAG+L,GAAG/L,EAAE,CAAC,KAAKI,EAAE,aAAaJ,GAAG,CAAC,QAAQI,EAAEK,EAAE,EAAE,EAAET,IAAI,IAAI,CAAC,EAAE4B,GAAE5B,EAAE,EAAE6B,GAAE,EAAEA,IAAGpB,EAAE,EAAEoB,GAAE,CAAC,IAAIb,GAAEhB,EAAE,EAAE6B,GAAE5B,EAAE4B,IAAGpB,GAAMK,EAAEE,EAAC,GAAN,IAAUY,GAAEF,EAAEE,GAAEZ,GAAEY,EAAC,EAAWxB,IAAT,OAAWA,EAAEwB,IAAGxB,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGwB,IAAGA,GAAEZ,GAAEf,EAAE,CAAC,OAAOwM,GAAGzM,CAAC,EAAEI,CAAC,EAAE,WAAW,CAACJ,EAAE0B,IAAI,CAAC,GAAa,OAAOA,GAAjB,SAAmB,MAAM,IAAIgK,GAAG,6CAA6CtL,CAAC,EAAE,EAAE,IAAIwB,EAAEC,EAAEH,CAAC,EAAEZ,GAAE4L,GAAG,EAAE9K,EAAE3B,CAAC,EAAE,OAAO,EAAE,EAAEa,KAAI,IAAI,CAAC,EAAEc,EAAE3B,EAAEQ,EAAEiB,EAAEZ,GAAE,EAAEc,EAAE3B,CAAC,EAASD,IAAP,MAAUA,EAAE,KAAKyM,GAAG3L,EAAC,EAAEA,EAAC,EAAE,eAAemL,GAAG,qBAAqBK,GAAG,GAAGtM,EAAE,CAACyM,GAAGzM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAASmG,GAAGnG,EAAEC,EAAE,CAAC8L,GAAG/L,KAAK,EAAE,CAAC,GAAG,GAAG,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,eAAe,EAAE,aAAa,IAAI,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAImG,GAAG,IAAI,EAAE,SAASC,GAAGrG,EAAE,CAAC2C,GAAG3C,IAAI,EAAE,CAACsB,EAAE,EAAE,CAACD,EAAE,OAAO,EAAE,EAAEyB,GAAG,CAAC,CAAC,IAAIoK,GAAGlN,GAAG,CAAC,GAAG,CAAC2D,GAAE,GAAG,CAAC,GAAG3D,EAAE,EAAE,EAAE,EAAEqK,IAAI,GAAG,CAAC9I,EAAE4B,GAAGK,CAAC,EAAEwF,GAAGxF,CAAC,CAAC,OAAOxD,EAAE,CAACA,aAAa6J,IAAc7J,GAAV,UAAamC,EAAE,EAAEnC,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAa6J,IAAc7J,GAAV,UAAamC,EAAE,EAAEnC,CAAC,CAAC,CAAC,EAAE,SAAS4C,GAAG5C,EAAE,CAACA,KAAK,EAAc,OAAO,QAAQ,IAA3B,aAAgC,QAAQ,GAAG,EAAE,EAAEA,IAAI,EAAEA,CAAC,EAAE,MAAM,KAAKoD,EAAE,EAAEpD,GAAG,IAAI,QAAQ,MAAM,EAAE,EAAEA,IAAI,EAAE,CAAC,EAAE,CAAC,IAAIoD,GAAG,IAAI,CAAC,IAAIpD,EAAEkD,GAAG,EAAElD,IAAI4C,GAAG5C,CAAC,EAAEkN,GAAGC,EAAE,EAAE,EAAE,SAAS7G,GAAGtG,EAAEC,EAAE,EAAED,KAAK,IAAIC,IAAI,EAAE,WAAWmD,EAAE,EAAE7B,EAAE,YAAY,CAAC,aAAavB,EAAE,IAAI,cAAc,CAAC,GAAGA,EAAEoK,GAAGpK,CAAC,IAAIA,EAAE,YAAY,CAAC,IAAI,cAAc,CAAC,CAAC,CAAC,IAAIoN,GAAG,CAAC,EAAE,SAAS7G,GAAGvG,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,IAAIR,KAAK,EAAEyB,GAAG,EAAE0L,GAAG,OAAO1L,EAAEtB,EAAEK,IAAI,IAAI,EAAEA,EAAE,EAAEA,EAAEiB,EAAEjB,IAAI2M,GAAG3M,CAAC,EAAEgD,EAAErD,EAAE,EAAEK,CAAC,EAAEgD,EAAErD,EAAE,EAAEK,EAAE,CAAC,EAAEO,EAAE,EAAEZ,EAAE,EAAEK,EAAE,IAAI,CAAC,EAAE,OAAOR,EAAEuJ,GAAGvJ,CAAC,EAAEoN,GAAGrN,CAAC,GAAG,GAAGoN,EAAE,CAAC,CAAC,SAAS5G,GAAGxG,EAAE,CAACA,KAAK,EAAEuB,EAAE,YAAY,CAAC,IAAI,gBAAgB,OAAOvB,CAAC,CAAC,EAAE6K,GAAGT,GAAGpK,CAAC,CAAC,CAAC,CAAC,SAASyG,GAAGzG,EAAE,CAAC,CAAC,IAAIsN,GAAG,CAACtN,EAAEC,IAAI,CAAC,IAAIG,EAAEyL,GAAG7L,CAAC,EAAE,GAAYI,IAAT,OAAW,MAAMJ,EAAEuN,GAAGvN,CAAC,EAAEI,EAAEuL,GAAG3L,CAAC,EAAEyM,GAAGzM,CAAC,EAAE,IAAI0L,GAAG,GAAGzL,CAAC,qBAAqBG,CAAC,EAAE,EAAE,OAAOA,CAAC,EAAEoN,GAAG,CAACxN,EAAEC,EAAEG,IAAI,CAAC,IAAIsB,EAAE,CAAC,EAAE,OAAO1B,EAAEA,EAAE,WAAW0B,EAAEtB,CAAC,EAAEsB,EAAE,SAAS,EAAE,EAAEzB,IAAI,IAAI,CAAC,EAAEoM,GAAG3K,CAAC,GAAG1B,CAAC,EAAE,SAAS0G,GAAG1G,EAAEC,EAAEG,EAAE,CAAC,OAAOH,KAAK,EAAEG,KAAK,EAAEJ,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEqN,GAAGrN,EAAE,WAAW,EAAEuN,GAAGvN,EAAEG,EAAEJ,CAAC,CAAC,CAAC,IAAIyN,GAAGzN,GAAG,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOA,EAAE,CAACmE,GAAEnE,CAAC,CAAC,CAAC,EAAE0N,GAAG,EAAE/L,GAAG,KAAKgM,GAAG,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAEjM,GAAG,KAAKkM,GAAG,CAAC,EAAE,SAASpE,GAAG5J,EAAE,CAAC,OAAO,SAASA,EAAE,CAAC,GAAG,CAAC2D,GAAE,CAAC,GAAO+J,KAAJ,EAAO,CAAC,IAAIzN,EAAE,GAAGG,EAAE,GAAGJ,EAAG,CAACA,EAAE,IAAI,CAAC,GAAG,CAAC2D,KAAIgK,GAAG3N,EAAEC,EAAE,GAAGG,GAAG,CAACsN,GAAG,EAAED,GAAI,IAAIQ,GAAGtM,EAAE,CAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAE3B,EAAE,GAAG,GAAG,CAAC,IAAI0B,EAAE,UAAU,CAAC,IAAI1B,EAAE,EAAE,EAAE2B,GAAG,IAAI,IAAI,CAAC,EAAE,OAAO3B,EAAEkO,GAAGJ,GAAG9N,CAAC,CAAC,EAAE,EAAEqK,GAAGrK,EAAE,CAAC,EAAE,CAAC,OAAOC,EAAE,CAACyB,EAAEzB,EAAED,EAAE,EAAE,CAAC,IAAIS,EAAE,GAAG,GAAG,CAACkB,GAAG,CAAC,IAAIC,EAAEE,GAAGF,IAAIE,GAAG,MAAM9B,EAAE4B,EAAE,OAAOA,EAAE,SAASF,CAAC,EAAEjB,EAAE,GAAG,CAAC,GAAGT,GAAG,CAACS,EAAE,MAAMiB,CAAC,CAAC,CAAE,EAAEtB,EAAE,GAAGH,IAAIyN,GAAG,EAAE/L,GAAG,UAAU,CAAC,IAAI3B,EAAE0M,GAAG,KAAK,EAAEzM,EAAED,EAAE,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEC,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAE2N,GAAG,CAAC,EAAE,IAAIxN,EAAEyN,GAAG5N,CAAC,EAAE,OAAgBG,IAAT,SAAaA,EAAE2N,KAAKF,GAAG5N,CAAC,EAAEG,EAAE0N,GAAG1N,CAAC,EAAEH,GAAGA,EAAEG,EAAE,EAAE,EAAEJ,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAED,CAAC,EAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEyN,GAAI,IAAIU,GAAGxM,EAAE,CAAE,EAAE,MAAU+L,KAAJ,GAAQA,GAAG,EAAED,GAAGW,EAAE,EAAE3B,GAAG9K,EAAE,EAAEA,GAAG,KAAKqM,GAAG,QAAQd,EAAE,GAAG/I,GAAE,kBAAkBuJ,EAAE,EAAE,EAAE,OAAOC,EAAE,CAAC,EAAG1N,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAE,CAAC,CAAC,SAAS0G,GAAG3G,EAAE,CAAC,OAAOA,KAAK,EAAE4J,GAAI,KAAK5J,EAAEoM,GAAGpM,CAAC,GAAG,KAAKqM,EAAE,CAAE,CAAC,CAAC,IAAIgC,GAAG,CAAC,EAAE,SAASzH,GAAG5G,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOtB,KAAK,EAAEsB,KAAK,GAAG1B,EAAEqO,GAAGrO,IAAI,CAAC,GAAG,KAAKC,EAAEmM,GAAGnM,IAAI,CAAC,EAAEG,EAAEsB,CAAC,CAAC,CAAC,IAAI4M,GAAG,CAAC,EAAEC,GAAGvO,GAAG,CAAC,IAAIC,EAAEqO,GAAGtO,CAAC,EAAE,OAAgBC,IAAT,OAAW0L,GAAG3L,CAAC,EAAEC,CAAC,EAAE,SAAS4G,GAAG7G,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAE,CAAC,OAAOL,KAAK,EAAEsB,KAAK,EAAEjB,KAAK,GAAGT,EAAEqO,GAAGrO,IAAI,CAAC,GAAGC,EAAEmM,GAAGnM,IAAI,CAAC,EAAEA,EAAEG,EAAEmO,GAAGnO,CAAC,CAAC,EAAEsB,EAAEjB,CAAC,CAAC,CAAC,IAAI+N,GAAG,IAAc,OAAO,YAAjB,SAA4B,WAAW,SAAS,aAAa,EAAE,EAAE,SAASzH,GAAG/G,EAAE,CAAC,OAAWA,KAAK,IAAT,EAAYqM,GAAGmC,GAAG,CAAC,GAAGxO,EAAEuO,GAAGvO,CAAC,EAAEqM,GAAGmC,GAAG,EAAExO,CAAC,CAAC,EAAE,CAAC,IAAIyO,GAAGzO,GAAG,CAAC,IAAIC,EAAEoO,GAAG,OAAO,OAAOA,GAAG,KAAKrO,CAAC,EAAEC,CAAC,EAAEyO,GAAG,CAAC1O,EAAEC,IAAI,CAAC,QAAQG,EAAE,MAAMJ,CAAC,EAAE0B,EAAE,EAAEA,EAAE1B,EAAE,EAAE0B,EAAEtB,EAAEsB,CAAC,EAAE4L,GAAG,EAAE,EAAErN,EAAE,EAAEyB,IAAI,IAAI,CAAC,EAAE,aAAaA,CAAC,EAAE,OAAOtB,CAAC,EAAEuO,GAAG,CAAC3O,EAAEC,IAAI,OAAO,eAAeA,EAAE,OAAO,CAAC,MAAMD,CAAC,CAAC,EAAE,SAASgH,GAAGhH,EAAEC,EAAEG,EAAE,CAAC,IAAIsB,GAAGzB,EAAEyO,GAAG1O,EAAEC,IAAI,CAAC,GAAG,MAAM,EAAED,IAAI,IAAIS,EAAE;AAAA,EAAwDmB,EAAE,EAAEC,EAAE,CAAC,EAAMzB,IAAJ,GAAOyB,EAAE,KAAK,KAAK,EAAE,QAAQ6H,EAAE,CAAC,SAAS,EAAE5I,EAAE,CAACY,CAAC,EAAEV,EAAE,EAAEA,EAAEhB,EAAE,EAAEgB,EAAEa,EAAE,KAAK,MAAMb,CAAC,EAAE0I,EAAE,KAAK,UAAU1I,CAAC,EAAEF,EAAE,KAAKb,EAAEe,CAAC,CAAC,EAAEP,GAAG,YAAYO,CAAC,aAAaA,CAAC,6BAA6BY,EAAE,IAAIA,EAAE,EAAE;AAAA,EAAOA,GAAG3B,EAAEe,CAAC,EAAE,eAAe,OAAOP,GAAG,cAAkBL,IAAJ,EAAM,WAAW,WAAW,IAAIyB,EAAE,KAAK,IAAI,CAAC;AAAA,EAAOH,EAAE,KAAKgI,EAAE,KAAK,mBAAmB,EAAE5I,EAAE,KAAK0M,EAAE,EAAE/M,GAAG;AAAA,GAA8DiJ,EAAE,KAAKjJ,EAAE;AAAA,CAAM,EAAET,EAAE,SAASA,GAAE,CAAC,IAAIC,GAAE,SAAS,GAAG,EAAEA,cAAa,UAAU,MAAM,IAAI,UAAU,qCAAqC,OAAOA,EAAC,0BAA0B,EAAE,IAAIG,GAAEuO,GAAG1O,GAAE,MAAM,sBAAuB,UAAU,CAAC,CAAE,EAAE,OAAOG,GAAE,UAAUH,GAAE,UAAUG,GAAE,IAAIA,IAAGJ,GAAEC,GAAE,MAAMG,GAAEJ,EAAC,aAAa,OAAOA,GAAEI,EAAC,EAAEsJ,CAAC,EAAE,GAAG5I,CAAC,EAAEV,EAAE,iBAAiBH,EAAE,IAAKD,IAAGA,GAAE,IAAK,EAAE,KAAK,IAAI,CAAC,QAAQ0B,EAAE,IAAI,IAAI+M,GAAGE,GAAGvO,EAAEJ,CAAC,CAAC,CAAC,CAAC,SAASiH,GAAGjH,EAAE,CAAC,OAAOA,EAAEuO,GAAGvO,IAAI,CAAC,EAAEqM,GAAGlL,EAAEnB,CAAC,CAAC,CAAC,CAAC,SAASkH,GAAGlH,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEmM,GAAGnM,CAAC,EAAEoM,GAAGrM,EAAEC,CAAC,CAAC,CAAC,CAAC,SAASkH,GAAGnH,EAAE,CAAC,GAAGA,KAAK,KAAKmM,GAAGnM,EAAE,CAAC,GAAG,EAAE,CAAC,SAASoH,IAAI,CAAC,OAAOiF,GAAG,CAAC,CAAC,CAAC,CAAC,SAAShF,GAAGrH,EAAE,CAACA,EAAEoM,GAAGpM,IAAI,CAAC,EAAE,QAAQC,EAAE,MAAMD,EAAE,MAAM,EAAEI,EAAE,EAAEA,EAAEJ,EAAE,OAAOI,IAAIH,EAAEG,CAAC,EAAEJ,EAAEI,CAAC,EAAE,OAAOiM,GAAGpM,CAAC,CAAC,CAAC,SAASqH,GAAGtH,EAAE,CAAC,OAAOqM,GAAGkC,GAAGvO,IAAI,CAAC,CAAC,CAAC,CAAC,SAASuH,IAAI,CAAC,OAAO8E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS7E,GAAGxH,EAAE,CAAC,QAAQC,EAAEmM,GAAGpM,KAAK,CAAC,EAAEC,EAAE,QAAQ,CAAC,IAAIG,EAAEH,EAAE,IAAI,EAAEA,EAAE,IAAI,EAAEG,CAAC,CAAC,CAAC0G,GAAG9G,CAAC,CAAC,CAAC,SAASyH,GAAGzH,EAAEC,EAAEG,EAAE,CAACH,KAAK,EAAEG,KAAK,EAAEJ,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEmM,GAAGnM,CAAC,EAAEG,EAAEgM,GAAGhM,CAAC,EAAEJ,EAAEC,CAAC,EAAEG,CAAC,CAAC,SAASsH,GAAG1H,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,GAAGA,EAAEsN,GAAGtN,IAAI,EAAE,mBAAmB,GAAG,qBAAqBC,CAAC,EAAEoM,GAAGrM,CAAC,CAAC,CAAC,SAAS2H,GAAG3H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,eAAe,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,UAAU,EAAEA,GAAGA,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,IAAI4O,GAAG5O,GAAMA,EAAE,GAAL,IAAYA,EAAE,KAAL,GAAaA,EAAE,KAAL,GAAU6O,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAE,SAASlH,GAAG5H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,QAAQ,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,OAAO,EAAE,IAAII,GAAGwO,GAAG5O,EAAE,YAAY,CAAC,EAAE6O,GAAGC,IAAI9O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAEG,EAAE,EAAE,EAAEH,EAAE,KAAK,IAAI,CAAC,EAAE,IAAID,EAAE,kBAAkB,EAAEI,EAAE,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE,IAAI0B,EAAE,IAAI,KAAK1B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEA,EAAE,GAAGI,GAAGsB,GAAG1B,EAAE,kBAAkB,GAAG,KAAK,IAAI0B,EAAEtB,CAAC,GAAG,EAAE,EAAEH,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,SAAS6H,GAAG7H,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAK,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE,KAAK,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,CAAC,EAAEI,EAAE,EAAE,EAAEJ,EAAE,KAAK,IAAI,CAAC,EAAE0B,EAAEzB,EAAE,kBAAkB,EAAEQ,EAAE,IAAI,KAAKR,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE2B,EAAE,IAAI,KAAK3B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEyJ,EAAE,KAAK,IAAI9H,EAAEnB,CAAC,EAAE,MAAO,GAAEL,EAAE,EAAE,EAAEJ,EAAE,KAAK,IAAI,CAAC,EAAE,EAAOS,GAAGmB,GAAG8H,GAAGhI,GAAG,EAAEtB,IAAIsJ,GAAGhI,KAAKjB,EAAE,KAAK,IAAImB,EAAEnB,CAAC,EAAER,EAAE,QAAQA,EAAE,QAAQ,EAAE,MAAM,EAAEG,EAAEsJ,EAAEjJ,GAAGiB,EAAE,GAAG,EAAE,EAAE1B,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAEG,GAAGwO,GAAG3O,EAAE,YAAY,CAAC,EAAE4O,GAAGC,IAAI7O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEI,EAAE,EAAE,EAAEJ,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAE,OAAO,MAAMD,CAAC,EAAE,GAAGA,EAAE,GAAG,CAAC,CAAC,SAAS8H,GAAG9H,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE,CAAC,OAAON,EAAE+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,CAAC,EAAE,GAAG,CAAC,SAASkG,GAAG/H,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAE,CAAC,GAAGL,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,CAAC,CAAC,SAASoG,GAAGhI,EAAEC,EAAEG,EAAEsB,EAAE,CAAC1B,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAE,IAAIjB,EAAG,IAAI,OAAM,YAAY,EAAEmB,EAAE,IAAI,KAAKnB,EAAE,EAAE,CAAC,EAAEK,EAAE,IAAI,KAAKL,EAAE,EAAE,CAAC,EAAEA,EAAEmB,EAAE,kBAAkB,EAAE,IAAIZ,EAAEF,EAAE,kBAAkB,EAAE6I,EAAE,KAAK,IAAIlJ,EAAEO,CAAC,EAAE,EAAE,EAAEhB,IAAI,IAAI,CAAC,EAAE,GAAG2J,EAAE,EAAE,EAAE1J,IAAI,IAAI,CAAC,EAAE,EAAOQ,GAAGO,GAAGY,GAAG5B,EAAEA,GAAGA,EAAE,mBAAmB,OAAO,CAAC,OAAO,GAAG,aAAa,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC,GAAG4B,CAAC,EAAEd,EAAEd,EAAEc,CAAC,EAAEE,EAAEP,GAAG+K,GAAG5J,EAAExB,EAAE,EAAE,EAAEoL,GAAG1K,EAAEY,EAAE,EAAE,IAAI8J,GAAG5J,EAAEF,EAAE,EAAE,EAAE8J,GAAG1K,EAAEV,EAAE,EAAE,EAAE,CAAC,IAAI2O,GAAG,CAAC,EAAEC,GAAG,CAAChP,EAAEC,IAAI,CAAC8O,GAAG,OAAO,EAAE,QAAQ3O,EAAEA,EAAE,EAAE,EAAEJ,MAAM,CAAC,GAAG,CAAC,IAAIS,EAAOL,GAAL,IAAOH,IAAIQ,GAAQL,GAAL,MAASH,EAAE,EAAE,EAAE,EAAE8O,GAAG,KAAU3O,GAAL,IAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAOG,GAAL,IAAOqD,EAAExD,IAAI,CAAC,EAAOG,GAAL,IAAO,EAAE,EAAEH,IAAI,IAAI,CAAC,EAAEe,EAAE,EAAEf,IAAI,IAAI,CAAC,CAAC,EAAEA,GAAGQ,EAAE,EAAE,CAAC,CAAC,OAAOsO,EAAE,EAAE,SAAS9G,GAAGjI,EAAEC,EAAEG,EAAE,CAAC,OAAOJ,KAAK,EAAEC,EAAE+O,GAAG/O,IAAI,EAAEG,IAAI,CAAC,EAAEoJ,GAAGxJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,SAASiI,GAAGlI,EAAEC,EAAEG,EAAE,CAAC,OAAOJ,KAAK,EAAEC,EAAE+O,GAAG/O,IAAI,EAAEG,IAAI,CAAC,EAAEoJ,GAAGxJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,IAAIkI,GAAG,IAAI,CAAC,EAAEC,GAAG,IAAI,KAAK,IAAI,EAAE,SAASC,GAAGrI,EAAEC,EAAE,CAAC,OAAOuC,EAAEiH,GAAGzJ,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIuI,GAAGF,GAAG,IAAI,CAAC,MAAM+B,IAAI,EAAE,QAAQ,EAAE,SAAS9B,IAAI,CAAC,MAAO,WAAU,CAACC,GAAG,IAAI,YAAY,WAAW,YAAY,IAAI,EAAE,IAAIC,GAAG,IAAI,UAAU,oBAAoB,SAASC,IAAI,CAAC,OAAOvE,GAAE,sEAAsE,EAAE,CAAC,CAAC,SAASwE,GAAG3I,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,EAAE,EAAE,OAAO,GAAGD,GAAGC,GAAG,WAAWD,EAAE,MAAM,GAAG,QAAQI,EAAE,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAIK,EAAER,GAAG,EAAE,GAAGG,GAAGK,EAAE,KAAK,IAAIA,EAAET,EAAE,SAAS,EAAE,IAAI4B,EAAE,KAAKnB,EAAE,KAAK,IAAIT,EAAES,CAAC,EAAE,EAAE,CAACmB,GAAGA,EAAE,IAAI,KAAKA,EAAE,WAAWnB,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEJ,GAAE,OAAO,WAAW,OAAO,MAAM,GAAG,CAACA,GAAE,KAAKuB,CAAC,EAAErB,GAAE,EAAE,IAAIsB,EAAE,EAAE,MAAM,CAAC,MAAS,CAAC,CAACA,EAAE,MAAM,CAAC,GAAGA,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,CAAC,IAAIoN,GAAG,KAAK9K,GAAE,iGAAiG,EAAE,GAAG+K,GAAG,CAAC,EAAEC,GAAGnP,GAAG,CAACA,EAAE,QAASA,GAAG,CAAC,IAAIC,EAAEgP,GAAG,EAAEhP,IAAIiP,GAAGjP,CAAC,EAAED,EAAE,CAAE,CAAC,EAAE,SAAS4I,IAAI,CAAC,IAAI5I,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,EAAE,OAAeA,EAAE,CAAC,GAAZ,SAAeA,EAAE,MAAM,EAAEmP,GAAGnP,CAAC,EAAEkP,GAAG,GAAGD,GAAG,EAAEC,GAAG,GAAGlP,EAAEkP,GAAG,EAAE,CAAC,SAASrG,GAAG7I,EAAEC,EAAEG,EAAE,CAAC,GAAGJ,KAAK,EAAEC,KAAK,EAAEiP,GAAG,IAAIlP,EAAE,IAAI0B,EAAEwN,GAAG,QAAiBxN,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,GAAG,CAAC,GAAnD,SAAsDA,EAAE,MAAM,EAAEyN,GAAGzN,CAAC,EAAE,QAAQjB,EAAE,EAAEiB,EAAEjB,CAAC,GAAGwO,GAAG,GAAGjP,GAAG,EAAES,EAAE,IAAIT,EAAE,EAAEA,EAAEI,GAAGsB,EAAE1B,EAAES,CAAC,EAAE,EAAET,EAAE,EAAE,EAAEC,EAAE,EAAED,IAAI,IAAI,CAAC,EAAEiP,GAAG,EAAE,OAAOjP,CAAC,CAAC,IAAIoP,GAAGC,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACF,GAAG,CAAC,IAAIpP,EAAEC,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAAG,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEiC,GAAG,gBAAgB,EAAE,IAAIlC,KAAKqP,GAAYA,GAAGrP,CAAC,IAAb,OAAe,OAAOC,EAAED,CAAC,EAAEC,EAAED,CAAC,EAAEqP,GAAGrP,CAAC,EAAE,IAAII,EAAE,CAAC,EAAE,IAAIJ,KAAKC,EAAEG,EAAE,KAAK,GAAGJ,CAAC,IAAIC,EAAED,CAAC,CAAC,EAAE,EAAEoP,GAAGhP,CAAC,CAAC,OAAOgP,EAAE,EAAE,SAAStG,GAAG9I,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIyB,EAAE,EAAE,OAAO4N,GAAG,EAAE,QAAS,CAAC7O,EAAEmB,IAAI,CAAC,IAAIC,EAAE5B,EAAEyB,EAAE,IAAIE,EAAE,EAAE,EAAE5B,EAAE,EAAE4B,IAAI,IAAI,CAAC,EAAEC,EAAEA,EAAE,EAAEA,EAAEpB,EAAE,OAAO,EAAEoB,EAAEzB,EAAE,EAAEwB,MAAM,CAAC,EAAEnB,EAAE,WAAWoB,CAAC,EAAEzB,EAAE,EAAEwB,IAAI,CAAC,EAAE,EAAEF,GAAGjB,EAAE,OAAO,CAAC,CAAE,EAAE,CAAC,CAAC,SAASsI,GAAG/I,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIG,EAAEkP,GAAG,EAAE,EAAE,EAAEtP,IAAI,IAAI,CAAC,EAAEI,EAAE,OAAO,IAAIsB,EAAE,EAAE,OAAOtB,EAAE,QAASJ,GAAG0B,GAAG1B,EAAE,OAAO,CAAE,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAEyB,EAAE,CAAC,CAAC,SAASuH,GAAGjJ,EAAE,CAAC,OAAOuB,EAAE+I,GAAG,GAAG,EAAEtK,CAAC,EAAE,EAAE,CAAC,SAASkJ,GAAGlJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,EAAE,EAAE,CAAC,SAASyH,GAAGnJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAOH,EAAE+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEsB,CAAC,EAAE,EAAE,CAAC,IAAI6N,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,SAASnG,GAAGpJ,EAAEC,EAAEG,EAAEK,EAAE,CAAC,GAAGc,EAAE,OAAO+I,GAAG,GAAG,EAAEtK,EAAEC,EAAEG,EAAEK,CAAC,EAAER,KAAK,EAAEG,KAAK,EAAEK,KAAK,EAAE,QAAQmB,EAAE,EAAEC,EAAE,EAAEA,EAAEzB,EAAEyB,IAAI,CAAC,IAAIf,EAAE,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAEe,EAAE,EAAE,EAAEf,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQ0J,EAAE,EAAEA,EAAE3I,EAAE2I,IAAI,CAAC,IAAIzI,EAAE,EAAE,EAAEJ,EAAE6I,IAAI,CAAC,EAAExI,GAAEoO,GAAGvP,CAAC,EAAMkB,IAAJ,GAAYA,IAAL,KAAalB,IAAJ,EAAMuC,EAAEC,GAAG6I,GAAGlK,GAAE,CAAC,CAAC,EAAEA,GAAE,OAAO,GAAGA,GAAE,KAAKD,CAAC,CAAC,CAACU,GAAGZ,CAAC,CAAC,OAAO,EAAE,EAAEP,IAAI,IAAI,CAAC,EAAEmB,EAAE,CAAC,CAAC,IAAI4N,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC1P,EAAEC,IAAI,CAACG,EAAE,EAAE,IAAIJ,EAAEC,IAAI,CAAC,CAAC,EAAE,SAASqJ,GAAGtJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,SAASjB,EAAET,EAAEC,GAAEG,GAAE,CAAC,IAAIJ,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,IAAGD,EAAEI,GAAE,CAAC,EAAEJ,EAAE,OAAOA,CAAC,CAAC,SAAS4B,EAAE5B,EAAEC,GAAE,CAAC,OAAOQ,EAAET,EAAEC,GAAE,GAAG,CAAC,CAAC,SAASa,EAAEd,EAAEC,GAAE,CAAC,SAASG,GAAEJ,GAAE,CAAC,MAAO,GAAEA,GAAE,GAAG,EAAEA,GAAE,EAAE,CAAC,CAAC,IAAI0B,GAAE,OAAYA,GAAEtB,GAAEJ,EAAE,YAAY,EAAEC,GAAE,YAAY,CAAC,KAAxC,IAAiDyB,GAAEtB,GAAEJ,EAAE,SAAS,EAAEC,GAAE,SAAS,CAAC,KAAlC,IAAuCyB,GAAEtB,GAAEJ,EAAE,QAAQ,EAAEC,GAAE,QAAQ,CAAC,GAAGyB,EAAC,CAAC,SAASV,EAAEhB,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAAS2J,EAAE3J,EAAE,CAAC,IAAIC,GAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAK,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,EAAE,EAAEC,IAAG,CAAC,IAAIG,GAAEJ,EAAE,SAAS,EAAE0B,IAAGkN,GAAG5O,EAAE,YAAY,CAAC,EAAEwP,GAAGC,IAAIrP,EAAC,EAAE,GAAG,EAAEH,GAAEyB,GAAE1B,EAAE,QAAQ,GAAG,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,EAAC,EAAE,KAAK,CAACA,IAAGyB,GAAE1B,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAGI,GAAEJ,EAAE,SAASI,GAAE,CAAC,GAAGJ,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,EAAE,CAAC,OAAOI,GAAE,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,GAAEe,EAAE,IAAI,KAAKhB,EAAE,YAAY,EAAE,EAAE,CAAC,CAAC,EAAEI,GAAEY,EAAEZ,EAAC,EAAE,GAAGU,EAAEb,GAAED,CAAC,EAAE,GAAGc,EAAEV,GAAEJ,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAACA,KAAK,EAAEC,KAAK,EAAEG,KAAK,EAAEsB,KAAK,EAAE,IAAIR,EAAE,EAAE,EAAEQ,EAAE,KAAK,IAAI,CAAC,EAAE,QAAQP,MAAKO,EAAE,CAAC,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAGR,EAAEuI,GAAGvI,CAAC,EAAE,EAAE,EAAEd,EAAEqJ,GAAGrJ,CAAC,EAAEc,EAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAEd,EAAEA,EAAE,QAAQ,IAAI,OAAOe,GAAE,GAAG,EAAED,EAAEC,EAAC,CAAC,EAAE,IAAIC,GAAE,2DAA2D,MAAM,GAAG,EAAEC,GAAE,wFAAwF,MAAM,GAAG,EAAE,IAAIF,MAAKD,EAAE,CAAC,KAAKlB,GAAGoB,GAAEpB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGoB,GAAEpB,EAAE,EAAE,EAAE,KAAKA,GAAGqB,GAAErB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGqB,GAAErB,EAAE,EAAE,EAAE,KAAKA,GAAG4B,GAAG5B,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGS,EAAET,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAG2J,EAAE3J,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAK2J,EAAE,KAAK3J,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAKA,KAAQA,EAAEA,EAAE,KAAR,EAAYA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAI4B,EAAE5B,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,GAAE,EAAEG,GAAE,EAAEA,IAAGJ,EAAE,GAAG,EAAEC,KAAI2O,GAAG5O,EAAE,GAAG,IAAI,EAAEwP,GAAGC,IAAIrP,IAAG,EAAE,CAAC,OAAOwB,EAAE5B,EAAE,GAAGC,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAG4B,EAAE5B,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAG4B,EAAE5B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAG4B,EAAE,KAAK,OAAO5B,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GAAG,CAAC,IAAIC,GAAE,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,GAAG,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,KAAIA,GAAMA,IAAJ,MAAYG,IAAGJ,EAAE,GAAG,IAAIA,EAAE,IAAI,IAAtB,GAA6BI,IAAH,GAAMwO,GAAG5O,EAAE,EAAE,IAAIC,GAAE,QAAQ,CAACA,GAAE,GAAG,IAAIG,IAAGJ,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAMI,IAAH,GAASA,IAAH,GAAMwO,GAAG5O,EAAE,GAAG,IAAI,CAAC,IAAIC,IAAG,CAAC,OAAO2B,EAAE3B,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAG4B,EAAE,KAAK,OAAO5B,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAAC,IAAIC,GAAE,IAAID,EAAEA,EAAE,IAAI,OAAOA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAIC,GAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAEI,EAAEA,EAAE,QAAQ,MAAM,MAAM,EAAEc,EAAEd,EAAE,SAASe,EAAC,IAAIf,EAAEA,EAAE,QAAQ,IAAI,OAAOe,GAAE,GAAG,EAAED,EAAEC,EAAC,EAAEO,CAAC,CAAC,GAAG,OAAOP,GAAE,SAASnB,EAAE,CAAC,IAAIC,GAAE,MAAMqL,GAAGtL,CAAC,EAAE,CAAC,EAAE,OAAOuL,GAAGvL,EAAEC,GAAE,EAAEA,GAAE,MAAM,EAAEA,EAAC,EAAEG,EAAEA,EAAE,QAAQ,QAAQ,GAAG,CAAC,EAAEe,GAAE,OAAOlB,EAAE,GAAGyP,GAAGvO,GAAEnB,CAAC,EAAEmB,GAAE,OAAO,EAAE,CAAC,SAASoI,GAAGvJ,EAAEC,EAAEG,EAAEsB,EAAE,CAAC,OAAO4H,GAAGtJ,IAAI,EAAEC,IAAI,EAAEG,IAAI,EAAEsB,IAAI,CAAC,CAAC,CAACH,GAAG,UAAU,CAAC,QAAQvB,EAAEmB,EAAE,WAAW,EAAEnB,KAAKiK,GAAG,EAAErG,GAAE,QAAS,IAAI,CAACG,KAAI,SAAS/D,EAAE,CAACuB,EAAEvB,EAAE,EAAE,QAAQ,IAAIgK,GAAG,IAAIE,EAAE,CAAC,EAAE,KAAKlK,CAAC,CAAC,EAAG,IAAIkE,GAAE,CAAE,CAAC,CAAE,CAAC,EAAE,EAAE,QAAQyL,GAAG,MAAM,GAAG,EAAEC,GAAG,EAAE,IAAIA,GAAG,EAAEA,GAAGD,GAAGC,EAAE,EAAE,OAAO,aAAaA,EAAE,EAAEnE,GAAGkE,GAAGjE,GAAGvK,EAAE,aAAa,cAAc,KAAK,CAAC,YAAYnB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,cAAc,CAAC,EAAEmB,EAAE,cAAc,cAAc,KAAK,CAAC,YAAYnB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,eAAe,CAAC,EAAEmM,GAAG,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,CAAC,EAAEhL,EAAE,oBAAoB,IAAIgL,GAAG,OAAO,EAAE,EAAED,GAAG,OAAO,IAAImB,GAAG,CAAChE,GAAGsB,GAAGQ,GAAGtG,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGqC,GAAGC,GAAGe,GAAGC,GAAGE,GAAGC,GAAGC,GAAGC,EAAE,EAAE8E,GAAG,UAAU,CAAC,SAASlO,EAAEA,EAAEC,EAAE,CAAC,OAAOiO,GAAGlO,EAAE,QAAQkO,GAAG,UAAU,CAAC,IAAIlO,EAAEkO,GAAGjO,EAAE,CAAC,EAAE,OAAO,CAACG,EAAEsB,CAAC,IAAI,OAAO,QAAQ1B,CAAC,EAAEC,EAAEG,CAAC,EAAc,OAAOsB,GAAnB,WAAqB,IAAI1B,IAAI,CAAC4N,GAAG,KAAKxN,CAAC,EAAE,GAAG,CAAC,OAAOsB,EAAE,GAAG1B,CAAC,CAAC,QAAC,CAAQ2D,KAAIiK,GAAG,IAAI,EAAEjM,IAAQ+L,KAAJ,GAAYE,GAAG,SAAP,IAAgBF,GAAG,EAAErD,IAAI,EAAEoD,GAAGoC,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEnO,EAAE,OAAOzB,CAAC,EAAE,EAAEiO,GAAG,UAAU,CAAC,IAAIlO,EAAEkO,GAAGjO,EAAED,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAEG,EAAEJ,GAAG,IAAIA,EAAE,IAAI,EAAE,OAAOA,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,GAAG,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGI,EAAEJ,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,kCAAkCI,EAAEJ,EAAE,iCAAiC,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGI,EAAEJ,EAAE,EAAE,EAAEA,CAAC,EAAE,EAAE4K,GAAG,KAAKsD,GAAG,EAAE,EAAErK,GAAE,QAAQqK,GAAG,EAAE,EAAE3K,GAAEtD,EAAEiE,GAAE,EAAEgK,EAAE,CAAC,IAAIjO,EAAEqD,GAAG,EAAE,GAAGS,KAAI5C,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBlB,EAAED,CAAC,CAAC,OAAOA,EAAE,CAACwC,EAAE,sDAAsDxC,CAAC,EAAE,EAAEkB,EAAElB,CAAC,CAAC,CAAC,OAAOoE,KAAKjD,EAAE,WAAWkD,GAAG,kCAAkC,EAAE,mCAAmClD,EAAE,WAAWA,EAAE,WAAW,mCAAmCiB,CAAC,EAAEA,EAAE,mCAAmC,IAAI,IAAI,mCAAmC,YAAY,GAAG,EAAE,KAAK,SAASpC,EAAEC,EAAE,CAAC,IAAIG,EAAEgE,GAAG,OAAkB,OAAO,YAAY,sBAA/B,YAAqDC,GAAGjE,CAAC,GAAGkE,GAAGlE,CAAC,GAAe,OAAO,OAAnB,WAAyBoE,GAAGpE,EAAEJ,EAAEC,CAAC,EAAE,MAAMG,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMsB,GAAG,YAAY,qBAAqBA,EAAE1B,CAAC,EAAE,KAAKC,EAAG,SAASyB,EAAE,CAAC,OAAOc,EAAE,kCAAkCd,CAAC,EAAE,EAAEc,EAAE,2CAA2C,EAAEgC,GAAGpE,EAAEJ,EAAEC,CAAC,CAAC,CAAE,CAAE,CAAC,EAAEA,EAAG,SAASA,EAAE,CAACD,EAAEC,EAAE,SAASA,EAAE,MAAM,CAAC,CAAE,EAAE,MAAMiB,CAAC,EAAE,CAAC,CAAC,EAAE,EAAEqM,GAAGvN,IAAIuN,GAAGW,GAAG,IAAIlO,CAAC,EAAEgD,GAAG,KAAKA,GAAGkL,GAAG,IAAI,EAAE/M,EAAE,SAAS,CAACnB,EAAEC,KAAKkB,EAAE,SAAS+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkB,EAAE,iBAAiB,CAACnB,EAAEC,KAAKkB,EAAE,iBAAiB+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkB,EAAE,yBAAyB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,KAAKG,EAAE,yBAAyB+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,EAAE5I,EAAEE,CAAC,EAAEG,EAAE,4BAA4B,CAACnB,EAAEC,KAAKkB,EAAE,4BAA4B+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkB,EAAE,6BAA6B,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,6BAA6B+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,0BAA0B,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,0BAA0B+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,0BAA0BnB,IAAImB,EAAE,0BAA0B+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,kBAAkB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,kBAAkB+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,mBAAmBnB,IAAImB,EAAE,mBAAmB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,wBAAwB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,wBAAwB+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,iBAAiB,CAACnB,EAAEC,KAAKkB,EAAE,iBAAiB+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkB,EAAE,kBAAkB,CAACnB,EAAEC,KAAKkB,EAAE,kBAAkB+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkB,EAAE,SAASnB,IAAImB,EAAE,SAAS+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,iBAAiB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,KAAKT,EAAE,iBAAiB+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,EAAET,EAAE,kBAAkB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAKU,EAAE,kBAAkB+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,kBAAkBnB,IAAImB,EAAE,kBAAkB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,qBAAqB,CAACnB,EAAEC,EAAEG,EAAEsB,KAAKP,EAAE,qBAAqB+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,sBAAsB,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,sBAAsB+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,sBAAsBnB,IAAImB,EAAE,sBAAsB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,kBAAkBnB,IAAImB,EAAE,kBAAkB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,cAAc,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,cAAc+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,eAAe,CAACnB,EAAEC,EAAEG,EAAEsB,KAAKP,EAAE,eAAe+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,CAAC,EAAEP,EAAE,sBAAsBnB,IAAImB,EAAE,sBAAsB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,mBAAmBnB,IAAImB,EAAE,mBAAmB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,mBAAmB,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAKU,EAAE,mBAAmB+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEU,EAAE,QAAQ,CAACnB,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,KAAKvI,EAAE,QAAQ+M,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,EAAEC,EAAE6H,CAAC,EAAEvI,EAAE,iBAAiBnB,IAAImB,EAAE,iBAAiB+M,GAAG,IAAIlO,CAAC,EAAEmB,EAAE,YAAY,CAACnB,EAAEC,EAAEG,KAAKe,EAAE,YAAY+M,GAAG,IAAIlO,EAAEC,EAAEG,CAAC,EAAEe,EAAE,iBAAiBnB,IAAImB,EAAE,iBAAiB+M,GAAG,IAAIlO,CAAC,EAAE,IAAI8P,GAAG5M,GAAG,KAAKA,GAAGgL,GAAG,IAAI,EAAExB,GAAGvL,EAAE,QAAQnB,IAAI0M,GAAGvL,EAAE,QAAQ+M,GAAG,IAAIlO,CAAC,EAAEyM,GAAGtL,EAAE,MAAMnB,IAAIyM,GAAGtL,EAAE,MAAM+M,GAAG,IAAIlO,CAAC,EAAE2C,GAAG,CAAC3C,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,KAAKe,GAAGuL,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,EAAEmB,CAAC,EAAEyB,GAAG,KAAKA,GAAG6K,GAAG,IAAI,EAAEzD,GAAG,CAACzK,EAAEC,EAAEG,EAAEsB,EAAEjB,KAAKgK,GAAGyD,GAAG,IAAIlO,EAAEC,EAAEG,EAAEsB,EAAEjB,CAAC,EAAEqK,GAAG9K,IAAI8K,GAAGoD,GAAG,IAAIlO,CAAC,EAAEmD,GAAGnD,IAAImD,GAAG+K,GAAG,IAAIlO,CAAC,EAAEmN,GAAG,KAAKA,GAAGe,GAAG,IAAI,EAAElD,GAAG,CAAChL,EAAEC,KAAK+K,GAAGkD,GAAG,IAAIlO,EAAEC,CAAC,EAAEyK,GAAG1K,IAAI0K,GAAGwD,GAAG,IAAIlO,CAAC,EAAEwK,GAAGxK,IAAIwK,GAAG0D,GAAG,IAAIlO,CAAC,EAAEuK,GAAG,KAAKA,GAAG2D,GAAG,IAAI,EAAEjD,GAAG9J,EAAE,WAAW,CAACnB,EAAEC,KAAKgL,GAAG9J,EAAE,WAAW+M,GAAG,IAAIlO,EAAEC,CAAC,EAAEkO,GAAGnO,IAAImO,GAAGD,GAAG,IAAIlO,CAAC,EAAE6P,GAAG,KAAKA,GAAG3B,GAAG,IAAI,EAAED,GAAGjO,IAAIiO,GAAGC,GAAG,IAAIlO,CAAC,EAAEoO,GAAG,KAAKA,GAAGF,GAAG,IAAI,EAAE,SAAS6B,IAAI,CAAC,GAAG,EAAE,EAAEhM,IAAG,GAAGxC,EAAE,EAAEJ,CAAC,EAAEI,GAAGwJ,GAAGlH,EAAC,EAAE,YAAY1C,CAAC,MAAM,CAAC,GAAGA,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQyC,GAAE,QAAQzC,EAAE,OAAO,MAAM,CAAC,EAAE4J,GAAGnH,EAAC,EAAE,EAAEG,IAAG+L,KAAKA,GAAG,GAAG3O,EAAE,UAAU,GAAGwC,KAAIpC,GAAGwJ,GAAGlH,EAAC,EAAE,EAAE1C,CAAC,EAAEI,GAAGwJ,GAAGjH,EAAC,GAAG,CAAC,CAAC,OAAO3C,EAAE,eAAe,QAAQA,EAAE,cAAc,QAAQA,EAAE,UAAU,IAAIoJ,GAAG,EAAEpJ,EAAE,aAAanB,GAAG0K,GAAG1K,CAAC,EAAEmB,EAAE,WAAWnB,GAAGwK,GAAGxK,CAAC,EAAEmB,EAAE,aAAasI,GAAGtI,EAAE,aAAaqK,GAAGrK,EAAE,gBAAgBmK,GAAGrH,GAAE,SAASjE,GAAG,CAAC8P,IAAIC,GAAG,EAAED,KAAK7L,GAAEjE,EAAE,EAAE+P,GAAG,EAAE3O,CAAC,GAAUrB,GAAQE,GAAiB,WAAW,MAAM,OAAhC,cAAsCA,GAAE,ICApw0C,IAWa+P,GAePC,GAKAC,GAwCAC,GAsBAC,GAeOC,GAoBPC,GAsBOC,GAtJbC,GAAAC,EAAA,kBAIAC,KAOaV,GAET,GAAS,OAEA,kBAEJ,OAAO,SAAa,IAAe,SAAS,eAAqC,IAE9C,OAAO,KAAS,IAAc,KAAK,UAAU,KAAO,QAO1FC,GAAS,IAAU,OAAO,SAAa,IAAc,OAAY,SAAS,OAK1EC,GAAe,CAACS,EAAkBC,IAA4B,CAClE,GAAI,CACF,IAAMC,EAAUD,GAAkBZ,GAElC,OADYa,EAAU,IAAI,IAAIF,EAAUE,CAAO,EAAI,IAAI,IAAIF,CAAQ,GACxD,SAAWV,EACxB,MAAQ,CACN,MAAO,EACT,CACF,EAgCME,GAAU,MAAMW,GAAyC,CAE7D,IAAMC,EAAO,MADI,MAAM,MAAMD,EAAa,CAAC,YAAa,aAAa,CAAC,GAC1C,KAAK,EACjC,OAAO,IAAI,gBAAgBC,CAAI,CACjC,EAkBMX,GAE0C,cAA+B,QAalEC,GAAoB,SAAkD,CACjF,GAAI,CAACL,GACH,MAAM,IAAI,MAAM,sEAAsE,EAIxF,GAAIE,GAAaF,EAAS,EACxB,MAAO,CAAC,OAAWI,GAAmB,CAAC,EAIzC,IAAMY,EAAM,MAAMb,GAAQH,EAAS,EACnC,MAAO,CAACgB,EAAKZ,GAAmBY,CAAG,CAAC,CACtC,EAOMV,GAGF,cAIK,QAeIC,GAAmB,MAC5BU,EAA+BL,EAC/BM,IAEO,CAAC,OAAWZ,EAAmB,IC1J1C,IAQIa,GACAC,GACAC,GACAC,GAEEC,GAwBAC,GAyBOC,GA+GAC,GA7KbC,GAAAC,EAAA,kBAMAC,KAGIT,GAAc,GACdC,GAAe,GACfC,GAAU,GAERC,GAAyB,IAAe,CAE5C,GAAI,OAAO,kBAAsB,IAC/B,MAAO,GAGT,GAAI,CAGF,OAAI,OAAO,eAAmB,KAC5B,IAAI,eAAe,EAAE,MAAM,YAAY,IAAI,kBAAkB,CAAC,CAAC,EAK1D,YAAY,SAAS,IAAI,WAAW,CACzC,EAAG,GAAI,IAAK,IAAK,EAAG,EAAI,EAAI,EAAG,EAAG,EAAG,EAAI,GAAI,EAAK,EAAI,EAAG,EAAG,EAAI,EAAG,EACnE,EAAG,EAAI,EAAK,EAAK,EAAG,GAAI,GAAI,EAAG,EAAG,EAAG,GAAI,EAAI,IAAK,GAAI,EAAG,EAAG,GAAI,EAClE,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMC,GAAkB,IAAe,CACrC,GAAI,CAeF,OAAO,YAAY,SAAS,IAAI,WAAW,CACzC,EAAK,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,GAAI,EAAK,GAAK,EAAG,GAAI,EACvF,IAAK,GAAI,IAAK,GAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAI,IAAK,IAAK,EAAG,GAAI,EACzF,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEaC,GAAwB,MAAMK,GAA+C,CACxF,GAAIV,GACF,OAAO,QAAQ,QAAQ,EAEzB,GAAIC,GACF,MAAM,IAAI,MAAM,uDAAyD,EAE3E,GAAIC,GACF,MAAM,IAAI,MAAM,oDAAsD,EAGxED,GAAe,GAGf,IAAMU,EAAUD,EAAM,YAClBE,EAAaF,EAAM,WAGvB,GAAI,CAACN,GAAgB,EACnB,MAAM,IAAI,MAAM,+DAA+D,EAIjF,IAAMS,EAAuBV,GAAuB,EAChDS,EAAa,GAAK,CAACC,IACjB,OAAO,KAAS,KAAe,CAAC,KAAK,qBAEvC,QAAQ,KACJ,iCAAmCD,EACnC,uIACkE,EAIxE,QAAQ,KACJ,4GACmC,EAGvCF,EAAM,WAAaE,EAAa,GAGlC,IAAME,EAAYJ,EAAM,UAClBK,EAAqB,OAAOD,GAAc,SAAWA,EAAY,OACjEE,EAAuBF,GAAiC,IACxDG,EAAmBD,GAA6B,MAAQA,EACxDE,EAAwBJ,GAAiC,KACzDK,EAAoBD,GAA8B,MAAQA,EAE1D,CAACE,EAAWC,CAAc,EAAK,MAAMC,GAAiBL,EAAiBF,EAAoBH,EAAa,CAAC,EAE3GW,EAAY,GAEVC,EAA8B,CAAC,EAqDrC,GAlDIb,EAAU,GACZa,EAAM,KAAK,IAAI,QAASC,GAAY,CAClC,WAAW,IAAM,CACfF,EAAY,GACZE,EAAQ,CACV,EAAGd,CAAO,CACZ,CAAC,CAAC,EAIJa,EAAM,KAAK,IAAI,QAAQ,CAACC,EAASC,IAAW,CAC1C,IAAMC,EAAiC,CAKrC,WAAAf,CACF,GAEIO,GAAoBJ,KAMtBY,EAAO,WAAa,CAACC,EAAUC,IAC3BV,IAAqBJ,GAAsBc,GAAmBD,GAGpEP,EAAeM,CAAM,EAAE,KAEnBG,GAAU,CACR7B,GAAe,GACfD,GAAc,GACdD,GAAO+B,EACPL,EAAQ,EACJL,GACF,IAAI,gBAAgBA,CAAS,CAEjC,EAECW,GAAS,CACR9B,GAAe,GACfC,GAAU,GACVwB,EAAOK,CAAI,CACb,CAAC,CACP,CAAC,CAAC,EAEF,MAAM,QAAQ,KAAKP,CAAK,EAEpBD,EACF,MAAM,IAAI,MAAM,2DAA2DZ,CAAO,IAAI,CAE1F,EAEaL,GAAc,IAAqB,CAC9C,GAAIN,IAAeD,GACjB,OAAOA,GAGT,MAAM,IAAI,MAAM,qCAAqC,CACvD,ICnLA,IAKaiC,GAeAC,GA6BAC,GAjDbC,GAAAC,EAAA,kBAGAC,KAEaL,GAAkB,CAACM,EAAcC,IAA6B,CACzE,IAAMC,EAAOC,GAAY,EAEnBC,EAAaF,EAAK,gBAAgBF,CAAI,EAAI,EAC1CK,EAAaH,EAAK,QAAQE,CAAU,EAC1C,OAAAF,EAAK,aAAaF,EAAMK,EAAYD,CAAU,EAC9CH,EAAO,KAAKI,CAAU,EAEfA,CACT,EAMaV,GACT,CAACW,EAAkCC,EAAgBC,EAClDC,IAAuC,CACtC,GAAI,OAAOH,GAAW,UAAYA,IAAY,KAAM,CAClD,GAAIE,EAAK,IAAIF,CAAO,EAClB,MAAM,IAAI,MAAM,+BAA+B,EAE/CE,EAAK,IAAIF,CAAO,CAEpB,CAEA,OAAO,QAAQA,CAAO,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAChD,IAAMC,EAAQL,EAAUA,EAASG,EAAMA,EACvC,GAAI,OAAOC,GAAU,SACnBhB,GAAoBgB,EAAkCC,EAAO,IAAKJ,EAAMC,CAAO,UACtE,OAAOE,GAAU,UAAY,OAAOA,GAAU,SACvDF,EAAQG,EAAMD,EAAM,SAAS,CAAC,UACrB,OAAOA,GAAU,UAC1BF,EAAQG,EAAOD,EAAS,IAAM,GAAG,MAEjC,OAAM,IAAI,MAAM,mCAAmC,OAAOA,CAAK,EAAE,CAErE,CAAC,CACH,EAMSf,GAAkBiB,GAA0B,CACvD,IAAMX,EAAOC,GAAY,EAEnBW,EAAQZ,EAAK,UAAU,EAC7B,GAAI,CACF,IAAMa,EAAeb,EAAK,WAAW,CAAC,EACtCA,EAAK,iBAAiBa,EAAcA,EAAe,CAAC,EACpD,IAAMC,EAAYd,EAAK,OAAOa,EAAe,CAAC,EACxCE,EAAsBf,EAAK,QAAQa,EAAe,EAAI,CAAC,EACvDG,EAAeD,EAAsBf,EAAK,aAAae,CAAmB,EAAI,GACpF,MAAM,IAAI,MAAM,GAAGJ,CAAO,gBAAgBG,CAAS,oBAAoBE,CAAY,EAAE,CACvF,QAAE,CACAhB,EAAK,aAAaY,CAAK,CACzB,CACF,IC/DA,IAQaK,GARbC,GAAAC,EAAA,kBAKAC,KACAC,KAEaJ,GAAiBK,GAA6D,CACzF,IAAMC,EAAOC,GAAY,EACrBC,EAAmB,EACjBC,EAAmB,CAAC,EAEpBC,EAA0CL,GAAW,CAAC,EAE5D,GAAI,CACF,GAAIA,GAAS,mBAAqB,OAChCK,EAAW,iBAAmB,UAE5B,OAAOL,EAAQ,kBAAqB,UAAY,CAAC,OAAO,UAAUA,EAAQ,gBAAgB,GAC1FA,EAAQ,iBAAmB,GAAKA,EAAQ,iBAAmB,EAC7D,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,gBAAgB,EAAE,EAGjF,GAAIA,GAAS,oBAAsB,OACjCK,EAAW,kBAAoB,UACtB,OAAOL,EAAQ,mBAAsB,UAAY,CAAC,OAAO,UAAUA,EAAQ,iBAAiB,EACrG,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,iBAAiB,EAAE,EAG9EA,GAAS,YAAc,SACzBK,EAAW,UAAY,IAGzB,IAAIC,EAAgB,EACpB,OAAIN,GAAS,MAAQ,SACnBM,EAAgBC,GAAgBP,EAAQ,IAAKI,CAAM,GAGrDD,EAAmBF,EAAK,qBACpBI,EAAW,iBAAmBA,EAAW,kBAAoB,CAAC,CAACA,EAAW,UAAYC,CAAa,EACnGH,IAAqB,GACvBK,GAAe,2BAA4B,EAGzCR,GAAS,QAAU,QACrBS,GAAoBT,EAAQ,MAAO,GAAI,IAAI,QAAoC,CAACU,EAAKC,IAAU,CAC7F,IAAMC,EAAgBL,GAAgBG,EAAKN,CAAM,EAC3CS,EAAkBN,GAAgBI,EAAOP,CAAM,EAEjDH,EAAK,sBAAsBE,EAAkBS,EAAeC,CAAe,IAAM,GACnFL,GAAe,iCAAiCE,CAAG,MAAMC,CAAK,GAAG,CAErE,CAAC,EAGI,CAACR,EAAkBC,CAAM,CAClC,OAASU,EAAG,CACV,MAAIX,IAAqB,GACvBF,EAAK,sBAAsBE,CAAgB,EAE7CC,EAAO,QAAQW,GAASd,EAAK,MAAMc,CAAK,CAAC,EACnCD,CACR,CACF,IChEA,IAQME,GAeAC,GAWAC,GAoBAC,GAwDOC,GA9GbC,GAAAC,EAAA,kBAKAC,KACAC,KAEMR,GAA4BS,GAAmD,CACnF,OAAQA,EAAwB,CAC9B,IAAK,WACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,IAAK,MACH,MAAO,IACT,QACE,MAAM,IAAI,MAAM,yCAAyCA,CAAsB,EAAE,CACrF,CACF,EAEMR,GAAoBS,GAAmD,CAC3E,OAAQA,EAAe,CACrB,IAAK,aACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,+BAA+BA,CAAa,EAAE,CAClE,CACF,EAEMR,GAAwBS,GAAmD,CAC1EA,EAAQ,QACXA,EAAQ,MAAQ,CAAC,GAEdA,EAAQ,MAAM,UACjBA,EAAQ,MAAM,QAAU,CAAC,GAE3B,IAAMC,EAAUD,EAAQ,MAAM,QACzBC,EAAQ,+BAEXA,EAAQ,6BAA+B,KAIrCD,EAAQ,oBACRA,EAAQ,mBAAmB,KAAKE,IAAO,OAAOA,GAAO,SAAWA,EAAKA,EAAG,QAAU,QAAQ,IAC5FF,EAAQ,iBAAmB,GAE/B,EAEMR,GACF,CAACW,EAA8BC,EAC9BC,IAA2B,CAC1B,QAAWH,KAAME,EAAoB,CACnC,IAAIE,EAAS,OAAOJ,GAAO,SAAWA,EAAKA,EAAG,KAG9C,OAAQI,EAAQ,CACd,IAAK,QAEH,GADAA,EAAS,QACL,OAAOJ,GAAO,SAAU,CAG1B,IAAMK,EAFeL,GAEsD,WAC3E,GAAIK,EAAY,CACd,IAAMC,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBF,EAAYF,CAAM,EACtDM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,CAAU,GAAG,CAEpF,CACF,CACA,MACF,IAAK,SAEH,GADAD,EAAS,KACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMW,EAAgBX,EACtB,GAAIW,GAAe,gBAAiB,CAClC,GAAIA,EAAc,kBAAoB,QAAUA,EAAc,kBAAoB,OAChF,MAAM,IAAI,MAAM,oDAAoDA,EAAc,eAAe,EAAE,EAErG,IAAML,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBI,EAAc,gBAAiBR,CAAM,EACzEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDC,EAAc,eAAe,GAAG,CAEjG,CACF,CACA,MACF,IAAK,OACL,IAAK,MACH,SACF,QACE,MAAM,IAAI,MAAM,qCAAqCP,CAAM,EAAE,CACjE,CAEA,IAAMQ,EAAmBL,GAAgBH,EAAQD,CAAM,EACnDM,GAAY,EAAE,4BAA4BR,EAAsBW,CAAgB,IAAM,GACxFF,GAAe,oCAAoCN,CAAM,GAAG,CAEhE,CACF,EAESb,GAAqBO,GAAkE,CAClG,IAAMe,EAAOJ,GAAY,EACrBR,EAAuB,EACrBE,EAAmB,CAAC,EAEpBW,EAAkDhB,GAAW,CAAC,EACpET,GAAqByB,CAAc,EAEnC,GAAI,CACF,IAAMlB,EAAyBT,GAAyB2B,EAAe,wBAA0B,KAAK,EAChGjB,EAAgBT,GAAiB0B,EAAe,eAAiB,YAAY,EAC7EC,EACF,OAAOD,EAAe,OAAU,SAAWP,GAAgBO,EAAe,MAAOX,CAAM,EAAI,EAEzFa,EAAmBF,EAAe,kBAAoB,EAC5D,GAAI,CAAC,OAAO,UAAUE,CAAgB,GAAKA,EAAmB,GAAKA,EAAmB,EACpF,MAAM,IAAI,MAAM,qCAAqCA,CAAgB,EAAE,EAGzE,IAAMC,EAAoBH,EAAe,mBAAqB,EAC9D,GAAI,CAAC,OAAO,UAAUG,CAAiB,GAAKA,EAAoB,GAAKA,EAAoB,EACvF,MAAM,IAAI,MAAM,qCAAqCA,CAAiB,EAAE,EAG1E,IAAMC,EAA+B,OAAOJ,EAAe,wBAA2B,SAClFP,GAAgBO,EAAe,uBAAwBX,CAAM,EAC7D,EAcJ,GAZAF,EAAuBY,EAAK,yBACxBjB,EAAwB,CAAC,CAACkB,EAAe,kBAAmB,CAAC,CAACA,EAAe,iBAAkBjB,EAC/F,CAAC,CAACiB,EAAe,gBAAiB,EAAGC,EAAiBC,EAAkBC,EACxEC,CAA4B,EAC5BjB,IAAyB,GAC3BS,GAAe,+BAAgC,EAG7CI,EAAe,oBACjBxB,GAAsBW,EAAsBa,EAAe,mBAAoBX,CAAM,EAGnFW,EAAe,qBAAuB,OAAW,CACnD,GAAI,OAAOA,EAAe,oBAAuB,UAC/C,MAAM,IAAI,MAAM,+CAA+CA,EAAe,kBAAkB,EAAE,EAEpG,IAAMR,EAAgBC,GAAgB,qBAAsBJ,CAAM,EAC5DK,EAAkBD,GAAgBO,EAAe,mBAAmB,SAAS,EAAGX,CAAM,EACxFU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GACI,4DAA4DI,EAAe,kBAAkB,GAAG,CAExG,CAEA,GAAIA,EAAe,uBACjB,OAAW,CAACK,EAAMC,CAAK,IAAK,OAAO,QAAQN,EAAe,sBAAsB,EAAG,CACjF,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,MAAM,kDAAkDA,CAAI,EAAE,EAE1E,GAAI,OAAOC,GAAU,UAAY,CAAC,OAAO,UAAUA,CAAK,GAAKA,EAAQ,EACnE,MAAM,IAAI,MAAM,iEAAiEA,CAAK,EAAE,EAE1F,IAAMC,EAAad,GAAgBY,EAAMhB,CAAM,EAC3CU,EAAK,6BAA6BZ,EAAsBoB,EAAYD,CAAK,IAAM,GACjFV,GAAe,wCAAwCS,CAAI,MAAMC,CAAK,GAAG,CAE7E,CAGF,OAAIN,EAAe,QAAU,QAC3BQ,GAAoBR,EAAe,MAAO,GAAI,IAAI,QAAoC,CAACS,EAAKH,IAAU,CACpG,IAAMd,EAAgBC,GAAgBgB,EAAKpB,CAAM,EAC3CK,EAAkBD,GAAgBa,EAAOjB,CAAM,EAEjDU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GAAe,qCAAqCa,CAAG,MAAMH,CAAK,GAAG,CAEzE,CAAC,EAGI,CAACnB,EAAsBE,CAAM,CACtC,OAASqB,EAAG,CACV,MAAIvB,IAAyB,GAC3BY,EAAK,0BAA0BZ,CAAoB,EAErDE,EAAO,QAAQsB,GAASZ,EAAK,MAAMY,CAAK,CAAC,EACnCD,CACR,CACF,ICpMA,IAuCaE,GAqCAC,GAsCAC,GAMAC,GAqCAC,GAoBAC,GAOAC,GAxLbC,GAAAC,EAAA,kBAuCaR,GAA8BS,GAA2B,CACpE,OAAQA,EAAM,CACZ,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IACT,IAAK,UACH,MAAO,IACT,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,IACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,EAKaR,GAA8BS,GAAqC,CAC9E,OAAQA,EAAW,CACjB,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,UACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,CACzD,CACF,EAMaR,GAAwBS,GACpB,CAAC,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,OAAW,MAAS,EAAEA,CAAQ,EAKxGR,GAAqCM,GAEoD,CAChG,OAAQA,EAAM,CACZ,IAAK,UAEH,OAAO,OAAO,aAAiB,KAAe,aAAa,KAAO,aAAe,YACnF,IAAK,UACH,OAAO,aACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,WACT,IAAK,UACH,OAAO,aACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,SACH,OAAO,eACT,QACE,MAAM,IAAI,MAAM,qBAAqBA,CAAI,EAAE,CAC/C,CACF,EAKSL,GAAwBQ,GAAkE,CACrG,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,EAKaP,GAA4BI,GAAyDA,IAAS,WACvGA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAAYA,IAAS,SAC5FA,IAAS,OAKAH,GAA4BO,GAA0C,CACjF,OAAQA,EAAU,CAChB,IAAK,OACH,MAAO,GACT,IAAK,MACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,ICvMA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KAQaH,GAAW,MAAMI,GAAsE,CAClG,GAAI,OAAOA,GAAS,SAClB,GAAI,GAEF,GAAI,CACF,GAAM,CAAC,SAAAC,CAAQ,EAAI,GAAQ,kBAAkB,EAC7C,OAAO,IAAI,WAAW,MAAMA,EAASD,CAAI,CAAC,CAC5C,OAAS,EAAG,CACV,GAAI,EAAE,OAAS,wBAAyB,CAEtC,GAAM,CAAC,iBAAAE,CAAgB,EAAI,GAAQ,SAAS,EACtCC,EAASD,EAAiBF,CAAI,EAC9BI,EAAuB,CAAC,EAC9B,cAAiBC,KAASF,EACxBC,EAAO,KAAKC,CAAK,EAEnB,OAAO,IAAI,WAAW,OAAO,OAAOD,CAAM,CAAC,CAC7C,CACA,MAAM,CACR,KACK,CAEL,IAAME,EAAW,MAAM,MAAMN,CAAI,EACjC,GAAI,CAACM,EAAS,GACZ,MAAM,IAAI,MAAM,sCAAsCN,CAAI,EAAE,EAE9D,IAAMO,EAAsBD,EAAS,QAAQ,IAAI,gBAAgB,EAC3DE,EAAWD,EAAsB,SAASA,EAAqB,EAAE,EAAI,EAC3E,GAAIC,EAAW,WAGb,OAAO,IAAI,WAAW,MAAMF,EAAS,YAAY,CAAC,EAC7C,CAEL,GAAI,CAACA,EAAS,KACZ,MAAM,IAAI,MAAM,sCAAsCN,CAAI,qBAAqB,EAEjF,IAAMS,EAASH,EAAS,KAAK,UAAU,EAEnCI,EACJ,GAAI,CAEFA,EAAS,IAAI,YAAYF,CAAQ,CACnC,OAASG,EAAG,CACV,GAAIA,aAAa,WAAY,CAE3B,IAAMC,EAAQ,KAAK,KAAKJ,EAAW,KAAK,EACxCE,EAAS,IAAI,YAAY,OAAO,CAAC,QAASE,EAAO,QAASA,CAAK,CAAC,EAAE,MACpE,KACE,OAAMD,CAEV,CAEA,IAAIE,EAAS,EAEb,OAAa,CACX,GAAM,CAAC,KAAAC,EAAM,MAAAC,CAAK,EAAI,MAAMN,EAAO,KAAK,EACxC,GAAIK,EACF,MAEF,IAAME,EAAYD,EAAM,WACV,IAAI,WAAWL,EAAQG,EAAQG,CAAS,EAChD,IAAID,CAAK,EACfF,GAAUG,CACZ,CACA,OAAO,IAAI,WAAWN,EAAQ,EAAGF,CAAQ,CAC3C,CACF,KAEK,QAAIR,aAAgB,KAClB,IAAI,WAAW,MAAMA,EAAK,YAAY,CAAC,EACrCA,aAAgB,WAClBA,EAEA,IAAI,WAAWA,CAAI,CAE9B,ICvFA,IAYMiB,GAEAC,GAKFC,GACAC,GAESC,GAQAC,GAWAC,GAzCbC,GAAAC,EAAA,kBAKAC,KAOMT,GAAiB,CAAC,IAAK,IAAK,IAAK,IAAK,GAAG,EAEzCC,GAAQ,CAACS,EAAeC,IAA0B,CAEtD,QAAQ,IAAI,IAAIX,GAAeU,CAAK,CAAC,IAAI,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIC,CAAO,EAAE,CAChF,EAKaP,GAAkB,CAACQ,EAA2BC,IAA0B,CACnFX,GAAiBU,EACjBT,GAAQU,CACV,EAKaR,GAAM,CAACS,EAAoBC,IAAuB,CAC7D,IAAMC,EAAeC,GAAqBH,CAAQ,EAC5CI,EAAcD,GAAqBf,EAAc,EACnDc,GAAgBE,GAClBjB,GAAMe,EAAc,OAAOD,GAAQ,WAAaA,EAAI,EAAIA,CAAG,CAE/D,EAKaT,GAAwB,IAAIa,IAAiC,CACpEhB,IACFE,GAAI,GAAGc,CAAI,CAEf,IC7CA,IAOaC,GAPbC,GAAAC,EAAA,kBAKAC,KAEaH,GAAa,CAACI,EAAyBC,IAE5C,IAAKC,GAAkCD,CAAI,GAAGD,CAAU,ICThE,IAAAG,GAAAC,EAAA,oBCAA,IA8EMC,GA+BAC,GAKAC,GAKAC,GAWFC,GACEC,GAYOC,GAkCPC,GAoSOC,GArdbC,GAAAC,EAAA,kBAIAC,KAEAC,KAwEMZ,GAAsC,IAAI,IAAI,CAClD,CAAC,GAAI,GAAG,EACR,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,EAAE,EACT,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,CAAC,EACZ,CAAC,SAAU,CAAC,EAGZ,CAAC,SAAU,CAAC,EACZ,CAAC,UAAW,CAAC,EACb,CAAC,UAAW,CAAC,CACf,CAAC,EAEKC,GAAsB,CAAC,EAKvBC,GAA4BW,GAAiB,KAAK,KAAKA,EAAO,EAAE,EAAI,GAKpEV,GAAwBU,GAAiB,CAC7C,QAASC,EAAM,EAAGA,EAAMb,GAAU,OAAQa,IAAO,CAC/C,IAAMC,EAAgBd,GAAUa,CAAG,EACnC,GAAID,GAAQE,EACV,OAAOA,CAEX,CAEA,OAAO,KAAK,KAAKF,EAAO,EAAE,EAAI,EAChC,EAEIT,GAAO,EACLC,GAAqB,IAAMD,KAYpBE,GACT,MAAMU,EAAwBC,EAAsBC,EAAsBC,IAC/C,CACrB,IAAMC,EAAalB,GAAyBgB,CAAY,EAClDG,EAAgBL,EAAQ,OAAO,aAEjC,CAAC,KAAMI,EAAY,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAChF,GAAI,CACF,IAAME,EAAiBN,EAAQ,kBAAkB,EACjDA,EAAQ,eAAe,EACvBM,EAAe,mBACXL,EAA+B,EAAuBI,EACtD,EAA4BD,CAChC,EACAJ,EAAQ,MAAM,EAEd,MAAMK,EAAc,SAAS,WAAW,IAAI,EAE5C,IAAME,EAAcF,EAAc,eAAe,EACjD,GAAIF,EAAiB,CAEnB,IAAMK,EAAeL,EAAgB,EACrC,OAAAK,EAAa,IAAI,IAAI,WAAWD,EAAa,EAAGL,CAAY,CAAC,EACtDM,CACT,KAGE,QAAO,IAAI,WAAWD,EAAY,MAAM,EAAGL,CAAY,CAAC,CAE5D,QAAE,CACAG,EAAc,QAAQ,CACxB,CACF,EAEFd,GAAN,KAAmD,CAqBjD,YAAoBS,EAAwB,CAAxB,aAAAA,EAClB,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,2BAA6B,CAAC,EACnC,KAAK,eAAiB,CAAC,EACvB,KAAK,gBAAkB,IAAI,IAC3B,KAAK,uBAAyB,IAAI,IAElC,OAAW,CAACS,CAAK,IAAKzB,GACpBC,GAAU,KAAKwB,CAAG,EAClB,KAAK,YAAY,IAAIA,EAAK,CAAC,CAAC,EAC5B,KAAK,mBAAmB,IAAIA,EAAK,CAAC,CAAC,CAEvC,CAEA,OAAOC,EAAeC,EAAwB,CAC5C,IAAMC,EAAiBD,EAAK,OACtBE,EAAYF,EAAK,WACjBG,EAAYH,EAAK,WACjBd,EAAOX,GAAyB4B,CAAS,EAGzCC,EAAe,KAAK,aAAa,IAAIL,CAAE,EAC7C,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,uCAAuC,EAEzD,GAAIA,EAAa,eAAiBD,EAChC,MAAM,IAAI,MAAM,yCAAyCC,EAAa,YAAY,eAAeD,CAAS,EAAE,EAI9G,IAAME,EAAwB,KAAK,QAAQ,OAAO,aAE9C,CAAC,iBAAkB,GAAM,KAAAnB,EAAM,MAAO,eAAe,UAAY,eAAe,QAAQ,CAAC,EAGvFU,EAAcS,EAAsB,eAAe,EACzD,IAAI,WAAWT,CAAW,EAAE,IAAI,IAAI,WAAWK,EAAgBC,EAAWC,CAAS,CAAC,EACpFE,EAAsB,MAAM,EAI5B,IAAMV,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBAAmBU,EAAuB,EAAGD,EAAa,QAAQ,OAAQ,EAAGlB,CAAI,EAEhGoB,GAAU,UAAW,IAAM,qCAAqCP,CAAE,GAAG,EAErE,KAAK,2BAA2B,KAAKM,CAAqB,CAC5D,CAEA,OAAOE,EAAqBC,EAAgC,CAE1D,IAAMC,EAAqB,KAAK,aAAa,IAAIF,CAAQ,EACzD,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,2CAA2C,EAG7D,IAAMC,EAA0B,KAAK,aAAa,IAAIF,CAAa,EACnE,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAID,EAAmB,eAAiBC,EAAwB,aAC9D,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMxB,EAAOX,GAAyBkC,EAAmB,YAAY,EAG/Dd,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBACXc,EAAmB,QAAQ,OAAQ,EAAGC,EAAwB,QAAQ,OAAQ,EAAGxB,CAAI,CAC3F,CAEA,uBAAuByB,EAAmBpB,EAAsBqB,EAAoC,CAClG,IAAIb,EACJ,GAAIa,EAAgB,CAElB,GADAb,EAAK,KAAK,gBAAgB,IAAIa,CAAc,EACxCb,IAAO,OACT,MAAM,IAAI,MAAM,mCAAmC,EAErD,GAAIY,IAAWC,EACb,OAAAN,GACI,UACA,IAAM,uDAAuDf,CAAY,WACrEQ,CAAE,6BAA6B,EAChCA,EACF,GAAI,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC5E,MAAM,IAAI,MAAM;AAAA,sDAC8B,EAEhD,KAAK,gBAAgB,OAAOa,CAAc,CAC5C,MACEb,EAAKrB,GAAmB,EAG1B,YAAK,aAAa,IAAIqB,EAAI,CAAC,QAAS,CAAC,GAAAA,EAAI,OAA2B,OAAAY,CAAM,EAAG,aAAApB,CAAY,CAAC,EAC1F,KAAK,gBAAgB,IAAIoB,EAAQZ,CAAE,EACnCO,GACI,UACA,IAAM,uDAAuDf,CAAY,WAAWQ,CAAE,eAAe,EAClGA,CACT,CAEA,yBAAyBY,EAAyB,CAChD,IAAMZ,EAAK,KAAK,gBAAgB,IAAIY,CAAM,EACtCZ,IAAO,SACT,KAAK,aAAa,OAAOA,CAAE,EAC3B,KAAK,gBAAgB,OAAOY,CAAM,EAClCL,GAAU,UAAW,IAAM,4DAA4DP,CAAE,EAAE,EAE/F,CAGA,OAAOb,EAAc2B,EAAQ,eAAe,QAAU,eAAe,SAAW,eAAe,SAAmB,CAChH,IAAMpB,EAAajB,GAAqBU,CAAI,EAExCI,EAGEwB,GAAaD,EAAQ,eAAe,WAAa,eAAe,QAEhEE,GAAaF,EAAQ,eAAe,WAAa,eAAe,QACtE,GAAIC,GAAaC,EAAW,CAE1B,IAAMC,GADcF,EAAY,KAAK,YAAc,KAAK,oBAC5B,IAAIrB,CAAU,EACrCuB,EAICA,EAAQ,OAAS,EAEnB1B,EAAY0B,EAAQ,IAAI,EAGxB1B,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAPxEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,CAU1E,MAEEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAGxE,IAAMI,EAAU,CAAC,GAAIvC,GAAmB,EAAG,OAA2B,OAAQY,CAAS,EACvF,YAAK,aAAa,IAAI2B,EAAQ,GAAI,CAAC,QAAAA,EAAS,aAAc/B,CAAI,CAAC,EAE/DoB,GAAU,UAAW,IAAM,uCAAuCpB,CAAI,WAAW+B,EAAQ,EAAE,EAAE,EACtFA,CACT,CAEA,IAAIlB,EAAkC,CACpC,OAAO,KAAK,aAAa,IAAIA,CAAE,GAAG,OACpC,CAEA,QAAQA,EAAuB,CAC7B,IAAMmB,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAAZ,GAAU,UAAW,IAAM,sCAAsCP,CAAE,gBAAgBmB,EAAW,QAAQ,EAAE,EAAE,EAE1G,KAAK,aAAa,OAAOnB,CAAE,EAC3B,KAAK,eAAe,KAAKmB,EAAW,QAAQ,MAAM,EAG3CA,EAAW,YACpB,CAEA,MAAM,SAASnB,EAAeP,EAAkD,CAC9E,IAAM0B,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,qBAAqB,EAEvC,MAAMvC,GAAgB,KAAK,QAASuC,EAAW,QAAQ,OAAQA,EAAW,aAAc1B,CAAe,CACzG,CAEA,uBAA8B,CAC5B,QAAWmB,KAAU,KAAK,2BAExBA,EAAO,QAAQ,EAIjB,GAFA,KAAK,2BAA6B,CAAC,EAE/B,KAAK,eAAe,SAAW,EAInC,GAAI,KAAK,QAAQ,gBAAkB,UAAW,CAC5C,QAAWA,KAAU,KAAK,eAAgB,CACxC,IAAMQ,EAAgB9C,GAAe,IAAIsC,EAAO,IAAI,EAGpD,IAAKA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAEtE,IAAMS,EAAW,KAAK,YAAY,IAAIT,EAAO,IAAI,GAAK,CAAC,EACnDQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAGxB,UAAYA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAE7E,IAAMS,EAAW,KAAK,mBAAmB,IAAIT,EAAO,IAAI,GAAK,CAAC,EAC1DQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAExB,MACEA,EAAO,QAAQ,CAEnB,CACA,KAAK,eAAiB,CAAC,CACzB,KAAO,CAGL,IAAIU,EAAkB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,gBAAiB,EAC/EA,IACHA,EAAkB,CAAC,EACnB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,iBAAmBA,CAAe,GAEjF,QAAWV,KAAU,KAAK,eACxBU,EAAgB,KAAKV,CAAM,EAE7B,KAAK,eAAiB,CAAC,CACzB,CACF,CAEA,SAAU,CACR,KAAK,YAAY,QAASK,GAAY,CACpCA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,mBAAmB,QAASK,GAAY,CAC3CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EAED,KAAK,aAAa,QAASW,GAAY,CACrCA,EAAQ,QAAQ,OAAO,QAAQ,CACjC,CAAC,EAED,KAAK,uBAAuB,QAASN,GAAY,CAC/CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,uBAAyB,IAAI,GACpC,CAEA,iBAAiBY,EAAmB,CAElC,IAAMC,EAAiB,KAAK,uBAAuB,IAAID,CAAS,EAC5DC,IACFA,EAAe,QAAQb,GAAU,CAC/BA,EAAO,QAAQ,CACjB,CAAC,EACD,KAAK,uBAAuB,OAAOY,CAAS,EAEhD,CACF,EAEa1C,GAAuB,IAAI4C,IACpC,IAAI7C,GAAmB,GAAG6C,CAAI,ICtdlC,IAGMC,GAsBOC,GAzBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EASaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,IC1B3C,IAKaE,GAaAC,GA6EAC,EA6IAC,GA0MAC,GAkDAC,GACAC,GAzebC,GAAAC,EAAA,kBAKaR,GAAN,KAAiB,CAOtB,OAAO,gBAAgBS,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAGaT,GAAN,KAAoB,CAQzB,OAAO,UAAUU,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFlB,GAAW,gBAAgB,CAACW,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAASC,EAAIN,EAAW,EAAI,EAAGM,GAAKH,EAAOG,IAAK,CAC9C,IAAMC,EAAON,EAAQK,EAAI,EAAI,EAAIR,EAAMG,EAAQK,CAAC,EAC1CE,EAAON,EAAQI,EAAI,EAAI,EAAIP,EAAMG,EAAQI,CAAC,EAEhD,GAAIC,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEF,IAAMC,EAAM,KAAK,IAAIF,EAAMC,CAAI,EAC/B,GAAID,GAAQC,EACVJ,EAAMD,EAAQG,CAAC,EAAI,KAAK,IAAIC,EAAMC,CAAI,MACjC,CAEL,GAAIC,EAAM,EACR,OAEFL,EAAMD,EAAQG,CAAC,EAAI,CACrB,CACF,CAEA,OAAOF,CACT,CAOA,OAAO,iBAAiBM,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CACF,EAGaxB,EAAN,MAAMyB,CAAU,CAIrB,OAAO,KAAKC,EAAiC,CAC3C,OAAOD,EAAU,0BAA0BC,EAAM,EAAGA,EAAK,MAAM,CACjE,CAKA,OAAO,aAAaA,EAAyBC,EAAO,EAAsB,CACxE,IAAMC,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EAEV,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC1B,EAAIA,EAAO,EACf,KAAO,GAAK,GAAG,CACb,GAAIF,EAAK,CAAC,EAAIC,IAAS,EAAG,CACxBE,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAAIC,EACvB,KACF,CACA,GAAIA,EAAOD,EAAK,CAAC,IAAM,EACrB,MAAM,IAAI,MAAM,sBAAsB,EAExCG,EAAQ,CAAC,EAAI,EACbF,GAAQD,EAAK,CAAC,EACd,GACF,CACA,IAAK,IAAK,GAAK,EAAG,IAChBG,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAErB,OAAOG,CACT,CAKA,OAAO,kBAAkBH,EAAyBI,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,wCAAwCJ,EAAK,MAAM,cAAc,EAE/G,OAAOD,EAAU,0BAA0BC,EAAMI,EAAMJ,EAAK,MAAM,CACpE,CAKA,OAAO,gBAAgBA,EAAyBI,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,sCAAsCJ,EAAK,MAAM,cAAc,EAE7G,OAAOD,EAAU,0BAA0BC,EAAM,EAAGI,CAAI,CAC1D,CAKA,OAAO,0BAA0BJ,EAAyBK,EAAeC,EAAqB,CAC5F,IAAIL,EAAO,EACX,QAAS,EAAII,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAIN,EAAK,CAAC,EAAI,EACZ,MAAM,IAAI,MAEN,+GAA+G,EAErHC,GAAQD,EAAK,CAAC,CAChB,CACA,OAAOC,CACT,CAEA,OAAO,eAAeD,EAA4C,CAChE,IAAME,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMK,EAAU,IAAI,MAAML,CAAI,EAC9BK,EAAQL,EAAO,CAAC,EAAI,EACpBK,EAAQL,EAAO,CAAC,EAAIF,EAAKE,EAAO,CAAC,EACjC,QAASX,EAAIW,EAAO,EAAGX,GAAK,EAAG,EAAEA,EAC/BgB,EAAQhB,CAAC,EAAIgB,EAAQhB,EAAI,CAAC,EAAIS,EAAKT,EAAI,CAAC,EAE1C,OAAOgB,CACT,CAKA,OAAO,cAAcH,EAAcI,EAA4B,CAC7D,GAAIJ,EAAO,CAACI,GAAcJ,GAAQI,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOJ,EAAO,EAAIA,EAAOI,EAAaJ,CACxC,CAEA,OAAO,cAAcK,EAAyBD,EAA+B,CAC3E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,GAAcC,EAAK,MAAM,CAAC,CACvE,CAQA,OAAO,gBAAgB5B,EAAsB8B,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAM/B,EAAE+B,CAAC,CAAC,EAEpB/B,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASmB,EAAyBa,EAA2C,CAClF,IAAMX,EAAOF,EAAK,OAClB,OAAOA,EAAK,IAAI,CAACY,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAIX,CAAI,CAAC,CACtD,CAOA,OAAO,SAASY,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGrB,IAAMqB,IAAMG,EAAOxB,CAAC,CAAC,CAC/C,CACF,EAEahB,GAAN,MAAMyC,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBZ,EAChFa,EAAqBC,EAAsB,CAC7C,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IACxCA,GAAOH,EAAY,OACrBA,EAAY,KAAKD,EAAUI,EAAM,CAAC,CAAC,EAEnCH,EAAYG,CAAG,EAAIJ,EAAUI,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMf,EAAQ,QAChB,GAAIA,EAAQe,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhEf,EAAQ,KAAK,CAAC,EAKlB,QAASe,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMF,EAAU,QAClB,GAAIA,EAAUE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEF,EAAU,KAAK,CAAC,EAKpB,QAASE,EAAM,EAAGA,EAAMH,EAAY,OAAS,EAAGG,IAC9C,GAAIA,EAAMD,EAAK,QACb,GAAIA,EAAKC,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DD,EAAK,KAAK,CAAC,EAKf,QAASC,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAAO,CACjD,GAAIH,EAAYG,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAID,EAAKC,CAAG,GAAKH,EAAYG,CAAG,GAAKD,EAAKC,EAAMH,EAAY,MAAM,GAAKA,EAAYG,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACHJ,EAA8BX,EAA4Ba,EAC1DD,EAAgCE,EAAgBE,EAAwBC,EAAwB,CAClG,GAAKA,EAIL,IAAIH,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIX,EAAQ,SAAYW,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAASI,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CN,EAAa,wBACTE,EAAUI,GAAOC,EAAgB,EAAI,EAAE,EAAGhB,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAChGA,EAAMJ,EAAU,OAAS,EAAGM,CAAO,EAE3C,CAaA,OAAO,uBACHP,EAA2BC,EAA8BX,EAAmBa,EAC5ED,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMO,EAAa,CAACP,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACpFC,CACT,CAYA,OAAO,uBACHP,EAA8BQ,EAA+BnB,EAAmBa,EAChFD,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,GAAKQ,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMD,EAAa,CAACP,EAAU,CAAC,EAAGQ,EAAW,CAAC,CAAC,EAE/C,OAAAV,EAAa,mBAAmB,GAAOE,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACrGC,CACT,CAKA,OAAe,mBACXR,EAA2BC,EAA8BO,EAAsBlB,EAC/Ea,EAA8BD,EAAgCE,EAAgBG,EAAkB,CAClG,GAAIP,EACF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAK,CAAC,MAGnB,SAASH,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAKT,EAAa,wBACzBE,EAAUI,EAAM,CAAC,EAAGf,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAAKA,EAAMJ,EAAU,OAAS,EACxGM,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXG,EAAgBC,EAAgBC,EAAkBC,EAAgBT,EAAgBU,EAClFC,EAAsBR,EAA0B,CAClD,IAAMS,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIN,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAH,EAAKU,CAAY,EAAI,EACrBV,EAAKW,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAN,EAAKU,CAAY,EACgB,KAAK,MAAjCP,IAAY,cAA4BU,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/Db,EAAKW,CAAY,EAAIE,EAAYb,EAAKU,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASN,EAAKU,CAAY,EAAIV,EAAKW,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEapD,GAAN,KAAe,CAIpB,OAAO,qBACH2D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAGahE,GAAW,sBACXC,GAAW,uBCzexB,IAiBakE,GAsMPC,GAoCOC,GAKAC,GAKAC,EAeAC,GAiBAC,GAcAC,GAgBAC,GAmBAC,EA+BPC,GAiTOC,EAaAC,EAaAC,GAgFPC,GAwJOC,GAaAC,GAr7BbC,GAAAC,EAAA,kBAGAC,KACAC,KAaapB,GAAiB,GAsMxBC,GAAoB,CAACoB,EAAcC,IAAiD,CACxF,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,OAAQD,EAAM,CACZ,QACE,OAAOC,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,QACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,QACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,OACE,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mBAAmB,EAErC,MAAO,CAAC,MAAO,YAAY,EAE7B,QACE,MAAM,IAAI,MAAM,sBAAsBD,CAAI,EAAE,CAChD,CACF,EAEanB,GAA8B,CAACmB,EAAgBC,EAAsB,IAAM,CACtF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEapB,GAA4B,CAACkB,EAAgBC,EAAsB,IAAM,CACpF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEanB,EAA6B,IAAIoB,IAA6D,CACzG,IAAMC,EAAoC,CAAC,EAC3C,OAAAD,EAAK,QAAQE,GAAO,CACdA,EAAI,SAAW,GACjBD,EAAgB,KACZ,CAAC,QAAuB,KAAMC,CAAG,EAAG,CAAC,QAAuB,KAAMC,EAAU,eAAeD,CAAG,CAAC,CAAC,CAExG,CAAC,EACMD,CACT,EAMapB,GAAoBuB,GAE3BA,EAAO,IAAM,EACR,EACEA,EAAO,IAAM,EACf,EAGF,EASItB,GAAa,CAACuB,EAAW,MAAOP,EAAqBQ,EAAQ,MACpE,CAACR,GAAcA,IAAe,EACzB,GAAGO,CAAQ,IAAIC,CAAK,IAGtB,MAAMR,CAAU,IAAIO,CAAQ,KAAKC,CAAK,IASlCvB,GAAY,CAACsB,EAAkBP,EAAoBQ,IAC1DD,IAAa,MACRC,EAELR,IAAe,EACV,OAAOQ,CAAK,IAGd,MAAMR,CAAU,SAASQ,CAAK,IAQ1BtB,GAAY,CAACuB,EAAcT,IAClCA,IAAe,EACV,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAC1CT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,MAClBT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAGlCA,EAUItB,EACT,CAACsB,EAAcC,EAAsBC,EAAgBZ,IAC/CU,EAAK,WAAW,WAAW,GAAKE,EAAS,EACvC,OAAQD,GAAW,SACjBX,IAAS,MACJ,GAAGU,CAAI,KAAKC,CAAK,WAAWA,CAAK,eAAeA,CAAK,aAErD,GAAGD,CAAI,KAAKC,CAAK,WAAWA,CAAK,SAGtCX,IAAS,MACJ,GAAGU,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAK,KAAK,MAAMA,EAAQ,EAAI,CAAC,CAAC,KAAKA,EAAQ,EAAI,CAAC,IAEhF,GAAGD,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAKA,EAAQ,CAAC,IAIlDC,EAAS,EAAI,GAAGF,CAAI,IAAIC,CAAK,IAAMD,EAc5CrB,GACF,CAACqB,EAAcG,EAAoBC,EAAuCC,EACzEd,IAAuC,CACtC,IAAMe,EAAa,OAAOF,GAAgB,SACpCG,EAAOD,EAAaF,EAAcA,EAAY,OAC9CI,EAAe,CAAC,GAAG,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAC,EACzCE,EAAcF,EAAO,EAAI,MAAQA,GAAQ,EAAI,MAAMA,CAAI,QAAU,cAAcA,CAAI,IACnFf,EAAatB,GAAkBiC,EAAYZ,CAAU,EACrDmB,EAAY,OAAOlB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACtEmB,EAAc,OAAOnB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACxEF,EAAO,CAAC,QAASmB,EAAa,MAAOC,EAAW,QAASC,EAAa,OAAQR,CAAU,EAExFS,EAAgBjB,GAA+B,OAAOA,GAAQ,SAAWA,EAAM,GAAGA,CAAG,IAErFkB,EAAqB,CACzB,gBAAiB,GACjB,gBAAiB,GACjB,2BAA4B,GAC5B,IAAK,GACL,aAAc,GACd,IAAK,GACL,aAAc,EAChB,EAEMC,EAAgBR,EAAa,YAAc,GAC3CS,EAAQ,GAAGD,CAAa,GAAGd,CAAI,SAC/BgB,EAAU,GAAGF,CAAa,GAAGd,CAAI,WAEnCiB,EAAa,GACjB,QAASC,EAAI,EAAGA,EAAIX,EAAO,EAAGW,IAC5BD,GAAc;AAAA,aACTC,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC9CW,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC/CW,CAAC,UAAUA,CAAC;AAAA,oBACNA,CAAC;AAAA,MAGfD,GAAc,WAAWV,EAAO,CAAC,eAEjC,IAAMY,EAAgCZ,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,oBAAoBV,EAAK,OAAO;AAAA,mBAC5BA,EAAK,OAAO;AAAA;AAAA,MAEzB2B,CAAU;AAAA;AAAA,KAIJG,EAAmBC,IACvBR,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIc,EAAY,OAAOrB,CAAI,IAAIqB,CAAS,KAGlDC,EAAoB,CAAC,EAC3B,GAAIf,GAAQ,EACV,QAASW,EAAIX,EAAO,EAAGW,GAAK,EAAGA,IAC7BI,EAAQ,KAAK,GAAG5C,EAAasC,EAASE,EAAGX,CAAI,CAAC,eAAeW,CAAC,IAAI,EAItE,IAAMK,EAAgChB,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,aAAaV,EAAK,OAAO;AAAA,aAC3BgC,EAAQ,KAAK,GAAG,CAAC;AAAA,KAGlBE,EAAmBC,IACvBZ,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIkB,EAAa,OAAOzB,CAAI,IAAIyB,CAAU,KAGpDC,EAAU,IAAIC,IAChBpB,IAAS,EAAI,KAAO,GAAGjB,EAAK,OAAO,IAAIqC,EAAK,IAAIf,CAAY,EAAE,KAAK,GAAG,CAAC,IAErEgB,EAAa,CAACH,EAAoBI,KAClCtB,EAAO,EACF,GAAGkB,CAAU,GAEb,GAAG/C,EAAa+C,EAAYI,GAAKtB,CAAI,CAAC,GAI3CuB,EAAa,CAACL,EAAoBI,GAAoB9B,KACtDQ,EAAO,EACF,GAAGkB,CAAU,IAAI1B,EAAK,IAEtB,GAAGrB,EAAa+C,EAAYI,GAAKtB,CAAI,CAAC,IAAIR,EAAK,IAIpDgC,EAAoE,CAAC,EACrEC,GAA6B,CAACP,EAAoBQ,KAA0B,CAChFpB,EAAmB,2BAA6B,GAChD,IAAMqB,GAAU,GAAGD,GAAO,IAAI,uBAAuBjC,CAAI,SACzD,GAAIkC,MAAWH,EACb,MAAO,GAAGG,EAAO,IAAIT,CAAU,IAEjC,IAAMH,GAAU,CAAC,EACjB,QAASJ,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAAK,CAClC,IAAMW,GAAMI,GAAO,WAAW,gBAAiBf,GAAIe,GAAO,KAAO1B,CAAI,EACrEe,GAAQ,KAAK,GAAGM,EAAWZ,EAASE,EAAC,CAAC,OAAOW,EAAG,MAAMD,EAAWb,EAAOG,EAAC,CAAC,GAAG,CAC/E,CACA,OAAAa,EAAyCG,EAAO,EAC5C,MAAMA,EAAO,mBAAmBD,GAAO,KAAK,OAAO;AAAA,sBACzCX,GAAQ,OAAS,EAAIA,GAAQ,KAAK,GAAG,EAAI,IAAI;AAAA,cAGpD,GAAGY,EAAO,IAAIT,CAAU,GACjC,EAEMU,GAAc,CAACC,EAAuBrC,MAAmB,IAAM,CACnE,GAAIT,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,KAAKrC,EAAK,IAC7B,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,EAAK,8BAA8BA,EAAK,UAC9E,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,EAAK,UAC3C,GAAIT,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,8DAA8DrC,EAAK,MAE3F,MAAM,IAAI,MAAM,6CAA6CT,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEG+C,GAAeD,IAA2B,IAAM,CACpD,GAAI9C,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,IACnB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,mBAAmBU,CAAI,IAAIoC,CAAM,oBAAoBpC,CAAI,IAAIoC,CAAM,sBAAsBpC,CAAI,IAChGoC,CAAM,wBAAwBpC,CAAI,IAAIoC,CAAM,oBAEhD,MAAM,IAAI,MAAM,6CAA6C9C,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEGgD,GAA6B/B,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,QAAQoB,CAAS;AAAA,aACrD2B,GAAY,OAAOrC,CAAI,WAAW,CAAC;AAAA,KAGpCuC,EAAoBhC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,QAAQ9B,CAAS;AAAA,iBACjCV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAE/C,GAAG,EAEGC,GAAM,IAAIhB,IAA0C,CACxD,GAAIA,EAAQ,SAAWnB,EACrB,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAGlD,IAAMoC,GAAoBjB,EAAQ,IAAId,CAAY,EAAE,KAAK,GAAG,EAE5D,OAAIL,IAAS,EACJ8B,GAAY,IAAI,EACd9B,IAAS,EACX8B,GAAYM,GAAkB,CAAC,CAAC,GAEvC9B,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,IAE3C,EAEMC,GAAgBnB,GAChBlB,EAAO,EACF8B,GAAYZ,CAAU,GAE7BZ,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAIvCoB,GAA6BtC,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,YAAYoB,CAAS;AAAA,MAChEyB,GAAY,OAAOnC,CAAI,YAAa,OAAO,CAAC;AAAA,KAGtC8C,GAAoBvC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,YAAY9B,CAAS;AAAA,UAC5CV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAExC,GAAG,EA0EH,MAAO,CACL,KAxCW,IAAM,CACjB,IAAMM,EAAQ,CAAC,EACXC,GAAmB,GACvB,OAAInC,EAAmB,kBACrBkC,EAAM,KAAK5B,CAA6B,EACxC6B,GAAmB,IAEjBnC,EAAmB,kBACrBkC,EAAM,KAAKxB,CAA6B,EACxCyB,GAAmB,IAEjBnC,EAAmB,6BACrB,OAAO,OAAOkB,CAAwC,EAAE,QAAQkB,IAAQF,EAAM,KAAKE,EAAI,CAAC,EACxFD,GAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKD,EAAiB,EAC5BE,GAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKF,EAA0B,EACrCG,GAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKR,CAAiB,EAC5BS,GAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKT,EAA0B,EACrCU,GAAmB,IAEjB,CAAC1C,GAAc0C,IACjBD,EAAM,QACF,SAAShC,CAAK,MAAMzB,EAAK,OAAO,IAAIc,EAAY,KAAK,GAAG,CAAC,KACzD,SAASY,CAAO,MAAM1B,EAAK,OAAO,IAAIM,EAAU,eAAeQ,CAAW,EAAE,KAAK,GAAG,CAAC,IAAI,EAExF2C,EAAM,KAAK;AAAA,CAAI,CACxB,EAIE,KAAAzD,EACA,gBAAA8B,EACA,gBAAAI,EACA,2BAAAQ,GACA,QAAAN,EACA,WAAAE,EACA,WAAAE,EACA,IAjFU,IAAIoB,IAAkD,CAChE,GAAIA,EAAgB,SAAW3C,EAAO,EACpC,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAElD,IAAMR,GAAQmD,EAAgB3C,CAAI,EAClC,GAAI,OAAOR,IAAU,SACnB,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAM4C,GAAoBO,EAAgB,MAAM,EAAG3C,CAAI,EAAE,IAAIK,CAAY,EAAE,KAAK,GAAG,EAEnF,OAAIL,IAAS,EACJ4B,GAAY,KAAMpC,EAAK,EACrBQ,IAAS,EACX4B,GAAYQ,GAAkB,CAAC,EAAG5C,EAAK,GAE9Cc,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,KAAK5C,EAAK,IAErD,EA6DE,YAAAoC,GACA,aA5DmB,CAACV,EAAoB1B,KACpCQ,EAAO,EACF4B,GAAYV,EAAY1B,EAAK,GAEpCc,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAAK1B,EAAK,MAuDrD,IAAA2C,GACA,YAAAL,GACA,aAAAO,GAEA,MAAAvC,EACA,KAAAL,EACA,QAAAgB,EACA,MAAAD,EACA,KAAAR,CACF,CACF,EAWS3B,EACT,CAACoB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,QAASb,CAAU,EAW3DV,EACT,CAACmB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,SAAUb,CAAU,EAW5DT,GACT,CAACkB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,WAAYb,CAAU,EA8ErER,GAAN,KAA+C,CAC7C,YAAoBoE,EAA2DC,EAA4B,CAAvF,6BAAAD,EAA2D,YAAAC,EAoG/E,KAAQ,kBAAqC,CAAC,EAC9C,KAAQ,UAA6B,CAAC,EACtC,KAAQ,SAA8B,CAAC,EAwBvC,KAAQ,cAAgB,CA9HoF,CAE5G,sCAAsCvD,EAA6B,CAGjE,MAAO,qBADY,OAAOA,GAAS,SAAW,GAAGA,CAAI,IAAMA,CACrB,eACxC,CAEA,UAAUwD,EAAiDpF,GAAgB,CACzE,IAAMqF,EAAiB,OAAOD,GAAkB,SAAWA,EAAgBA,EAAc,CAAC,EACpFE,EAAiB,OAAOF,GAAkB,SAAW,EAAIA,EAAc,CAAC,EACxEG,EAAiB,OAAOH,GAAkB,SAAW,EAAIA,EAAc,CAAC,EAE9E,GAAIC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,yBAC/B,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,yCAAyC,KAAK,OAAO,wBAAwB,KAC3F,KAAK,OAAO,wBAAwB,KAAK,KAAK,OAAO,wBAAwB,IAAI,EAGvF,GAAIF,EAAiBC,EAAiBC,EAAiB,KAAK,OAAO,kCACjE,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,+CACd,KAAK,OAAO,iCAAiC,GAAG,EAGtD,IAAMC,EAAuB,KAAK,wBAAwB,CAAC,IAAM,GAAK,KAAK,wBAAwB,CAAC,IAAM,EACpGC,EAAYD,EAAuB;AAAA;AAAA,wDAGA;AAAA;AAAA;AAAA;AAAA,yDAKnCE,EAAsBF,EACxB,4DACA;AAAA,mEAEIH,EAAiBC,EAAiBC,CAAc,iBAExD,MAAO,4BAA4BF,CAAc,KAAKC,CAAc,KAAKC,CAAc;AAAA,YAC/EE,CAAS;AAAA,MACfC,CAAmB;AAAA,GAEvB,CAEQ,uBAAuBC,EAA+B,CACxDA,EAAS,OAAS,IAChBA,EAAS,MAAM,WAAW,WAAW,GACvC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,MAAM,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAEpGA,EAAS,QAAQ,WAAW,WAAW,GACzC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,QAAQ,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAG9G,CAEQ,gBAAgBA,EAAyBC,EAA8B,CAC7E,GAAID,EAAS,QAAU,WACrB,MAAM,IAAI,MAAM,+FAA+F,EAEjH,KAAK,UAAU,KAAKA,CAAQ,EAC5B,KAAK,uBAAuBA,CAAQ,EAEpC,IAAME,EAASF,EAAS,QAAU,QAAU,OAAS,aAC/CjD,EAAciD,EAAS,KAAK,QAClC,MAAO,sBAAsBC,CAAY,kBAAkBC,CAAM,KAAKF,EAAS,IAAI,WAAWjD,CAAW,IAC3G,CAEA,oBAAoBoD,EAAoC,CACtD,OAAOA,EAAU,IAAIC,GAAK,KAAK,gBAAgBA,EAAG,KAAK,eAAe,CAAC,EAAE,KAAK;AAAA,CAAI,CACpF,CAEQ,yBAAyBJ,EAA+B,CAC9D,GAAIA,EAAS,QAAU,WACrB,MAAM,IAAI,MACN,sGAAsG,EAG5G,KAAK,kBAAkB,KAAKA,CAAQ,EACpC,KAAK,uBAAuBA,CAAQ,CACtC,CAEA,6BAA6BG,EAA0C,CACrE,OAAAA,EAAU,QAAQC,GAAK,KAAK,yBAAyBA,CAAC,CAAC,EAChD,IACT,CAEA,gBAAgBhE,EAAcV,EAA8BY,EAAS,EAAiB,CACpF,YAAK,SAAS,KAAK,CAAC,KAAAF,EAAM,KAAAV,EAAM,OAAAY,CAAM,CAAC,EAChC,IACT,CAEA,iBAAiB+D,EAAqD,CACpE,YAAK,SAAW,KAAK,SAAS,OAAOA,CAAkB,EAChD,IACT,CAKQ,oBAA6B,CACnC,GAAI,KAAK,SAAS,SAAW,EAC3B,MAAO,GAGT,IAAMC,EAA4B,CAAC,EACnC,OAAW,CAAC,KAAAlE,EAAM,KAAAV,EAAM,OAAAY,CAAM,IAAK,KAAK,SACtC,GAAIA,GAAUA,EAAS,EACjBZ,IAAS,MACX4E,EAAgB,KAAK,cAAclE,CAAI,iBAAiBV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,EAE1FgE,EAAgB,KAAK,GAAGlE,CAAI,eAAeV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,MAE1E,CACL,IAAMiE,EAAWjE,GAAU,MAAQA,IAAW,EAAIZ,EAAO,MAAMY,CAAM,IAAIZ,CAAI,IAC7E4E,EAAgB,KAAK,GAAGlE,CAAI,IAAImE,CAAQ,EAAE,CAC5C,CAGF,MAAO;AAAA,0BACeD,EAAgB,KAAK,IAAI,CAAC;AAAA,2BACzB,KAAK,aAAa,oCAC3C,CAMA,IAAI,2BAAoC,CACtC,OAAO,KAAK,mBAAmB,EAAI,KAAK,UAAU,IAAIhD,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,EAC1E,KAAK,kBAAkB,IAAIA,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,CACzD,CAKA,IAAI,eAAwD,CAC1D,GAAI,KAAK,SAAS,SAAW,EAC3B,OAGF,IAAMkD,EAA6B9E,GAC9B,UACe,EAAE,CAAC,MAAO,MAAO,MAAO,KAAK,EAAE,QAAQA,CAAI,CAAC,EAChE,OAAO,KAAK,SAAS,IAAI+E,GAAM,CAACD,EAA0BC,EAAE,IAAI,EAAGA,EAAE,QAAU,CAAC,CAAE,CACpF,CACF,EAEarF,GAAqB,CAACsF,EAAyClB,IACxE,IAAIrE,GAAiBuF,EAAelB,CAAM,EAYjCnE,GAAmB,CAACsF,EAA4BC,IAA0C,CACrG,IAAMC,EAASF,EAAQ,OACjB9E,EAAiB,CAAC,EACxB,QAASyB,EAAI,EAAGA,EAAIuD,EAAQvD,IAAK,CAC/B,IAAMvB,EAAM8E,EAAS,EAAIvD,EACnBwD,EAAIH,EAAQ5E,CAAG,GAAK,GAChB6E,EAASA,EAAS,OAAS,EAAItD,CAAC,GAAK,GACvC,GAAKwD,IAAM,GACjBjF,EAAK,QAAQE,CAAG,CAEpB,CACA,OAAOF,CACT,ICj8BA,IAeMkF,GAMAC,GAGAC,GAGAC,GAWOC,GA4DAC,GAKAC,GAvGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEMZ,GAAkB,CAACa,EAAmBC,IACvCA,GAAQA,EAAK,SAAWD,EAAa,CAAC,GAAI,IAAI,MAAMA,CAAS,EAAE,KAAK,CAAE,EAAE,QAAQ,EAAIC,EAEnFb,GAAiB,CAACc,EAA+BD,IACnDE,EAAU,gBAAgBD,EAAYf,GAAgBe,EAAW,OAAQD,CAAI,CAAC,EAE5EZ,GAAmB,CAACY,EAAgBG,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKJ,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAM,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEajB,GAA6B,CAACkB,EAAyBC,IAAoC,CACtG,IAAMC,EAAgBF,EAAY,SAC5BR,EAAYQ,EAAY,KAAK,OAC7BP,EAAOd,GAAgBa,EAAWS,CAAQ,EAC1CE,EAAcvB,GAAeoB,EAAY,KAAMP,CAAI,EACnDK,EAASM,EAAe,SAAUF,EAAeC,EAAY,MAAM,EACnEN,EAAQQ,EAAc,IAAKH,EAAeV,CAAS,EACrDc,EACJ,GAAIb,EAAK,SAAW,GAAKA,EAAK,CAAC,IAAM,GAAKA,EAAK,CAAC,IAAM,EAAG,CACvD,IAAMc,EAAWT,EAAO,KAAK,MACvBU,EAA0C,CAAC,GAAI,GAAI,CAAC,EAC1DF,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA,sCAChDS,CAAQ,KAAKC,EAAc,CAAC,EAAI,CAAC,MAAMA,EAAc,CAAC,CAAC;AAAA,IACzFC,EAAa,UAAUD,CAAa,CAAC;AAAA,+BACVA,EAAc,CAAC,CAAC;AAAA,+BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,uCAIRX,EAAM,YAAY,eAAe,CAAC;AAAA;AAAA;AAAA,2BAG9CW,EAAc,CAAC,CAAC;AAAA,2BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA,QAEnCV,EAAO,YAAY,iBAAkB,8BAA8B,CAAC;AAAA;AAAA,IAG1E,MACEQ,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA;AAAA,IAElFjB,GAAiBY,EAAMD,EAAWK,EAAOC,CAAM,CAAC;AAAA;AAAA,IAEhDW,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DX,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGlDA,EAAO,YAAY,aAAcD,EAAM,aAAa,UAAU,CAAC,CAAC;AAAA,KAGpE,MAAO,CACL,KAAM,YACN,YAAa,CAAC,KAAM,GAAGI,CAAQ,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC9D,WAAaV,GAAW,CACtB,IAAMmB,EAAaf,EAAU,KAAKQ,CAAW,EAC7C,MAAO,CACL,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKmB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGC,EAA2BpB,EAAO,CAAC,EAAE,KAAMY,CAAW,CAAC,CAC5G,CACF,EACA,gBAAAG,CACF,CACF,EAEavB,GAAY,CAAC6B,EAAyBC,IAA0C,CAC3FnC,GAAekC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ9B,GAA2B8B,EAAQ,OAAO,CAAC,EAAGC,EAAW,IAAI,CAAC,CAChF,EAEa7B,GAA4B6B,GACrCC,GAA4B,CAAC,KAAMD,EAAW,IAAgB,CAAC,ICxGnE,IAYME,GAaAC,GAaAC,GAaAC,GAYAC,GAQAC,GAYAC,GAcAC,GASAC,GAaOC,GAyEPC,GAkCOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAtQbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEM3B,GAAqC,CACzC,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,oCACX,UAAW,6BACX,GAAI,6BACJ,GAAI,oCACJ,OAAQ,uBACV,EAEMC,GAA2C,CAC/C,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,wBACX,UAAW,wBACX,GAAI,wBACJ,GAAI,wBACJ,OAAQ,uBACV,EAEMC,GAA4C,CAChD,IAAK,aACL,IAAK,aACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,UAAW,IACX,UAAW,IACX,GAAI,IACJ,GAAI,IACJ,OAAQ,GACV,EAEMC,GAA8C,CAClD,IAAK,YACL,IAAK,YACL,IAAK,YACL,KAAM,YACN,UAAW,YACX,UAAW,iBACX,GAAI,YACJ,GAAI,kBACJ,OAAQ,gBACV,EAEMC,GAAmB,CAACwB,EAAsBC,IAA2B,CACzE,IAAMC,EAAM,CAAC,EACb,QAASC,EAAIF,EAAOD,EAAcG,EAAIF,EAAM,EAAEE,EAC5CD,EAAI,KAAKC,CAAC,EAEZ,OAAOD,CACT,EAEMzB,GAA4B,CAAC2B,EAA0BC,IAAkD,CAC7G,IAAMC,EAAc,CAAC,EACfL,EAAOG,EAAM,OACnB,QAASG,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,IACxBD,EAAY,KAAKF,EAAMG,CAAG,CAAC,EAG/B,IAAMC,EAAcH,EAAK,IAAIE,GAAOH,EAAMG,CAAG,CAAC,EAC9C,MAAO,CAACD,EAAaE,CAAW,CAClC,EAEM9B,GAAuB,CAAC0B,EAAiBC,IAA6B,CAC1E,IAAMJ,EAAOG,EAAM,OAASC,EAAK,OAC3BI,EAAc,CAAC,EACjBC,EAAW,EACf,QAASH,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,GACxBE,EAAY,KAAKL,EAAMM,GAAU,CAAC,EAElCD,EAAY,KAAK,CAAC,EAGtB,OAAOA,CACT,EAEM9B,GAAuB,CAAC0B,EAAgBJ,IAA0B,CACtE,QAASE,EAAI,EAAGA,EAAIE,EAAK,OAAQ,EAAEF,EACjC,GAAIE,EAAKA,EAAK,OAASF,EAAI,CAAC,IAAMF,EAAO,EAAIE,EAC3C,MAAO,GAGX,MAAO,EACT,EAEMvB,GAAqB,CAACyB,EAAgBJ,IAA2B,CACrE,IAAMC,EAAM,CAAC,EACb,GAAI,CAACvB,GAAqB0B,EAAMJ,CAAI,EAAG,CACrC,QAASE,EAAI,EAAGA,EAAIF,EAAM,EAAEE,EACtBE,EAAK,QAAQF,CAAC,IAAM,IACtBD,EAAI,KAAKC,CAAC,EAGdE,EAAK,QAAQM,GAAQT,EAAI,KAAKS,CAAI,CAAC,CACrC,CACA,OAAOT,CACT,EAEarB,GACT,CAAC+B,EAAcC,EAAqCC,EAA+BC,EAClFC,EAA0BV,EAAuBE,IAAuC,CACvF,IAAMS,EAAaH,EAAO,CAAC,EAAE,KAEvBI,EAAaC,EAAU,KAAKb,CAAW,EACvCc,EAAaD,EAAU,KAAKX,CAAW,EAEvCa,EAAQC,EAAc,KAAMR,EAAO,CAAC,EAAE,SAAUG,CAAU,EAC1DM,EAASC,EAAe,SAAUR,EAAgBV,CAAW,EAE7DmB,EAAgB,GAEhBC,EAAsB;AAAA,oDACkBD,CAAa;AAAA,SA+C3D,MAAO,CACL,KAAAb,EACA,YAAAC,EACA,gBA/CuBc,GAA+B;AAAA,UACpDA,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBN,EAAOE,CAAM,CAAC;AAAA,UACjFG,CAAmB;AAAA;AAAA;AAAA;AAAA,WAIlBC,EAAa,UAAUF,CAAa,CAAC;AAAA;AAAA,2CAELA,CAAa;AAAA;AAAA;AAAA,gCAGxBnD,GAAiByC,CAAU,CAAC;AAAA;AAAA,wDAEJU,CAAa;AAAA,iCACpCJ,EAAM,YAAY,YAAY,CAAC;AAAA,yBACvCjD,GAAU2C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,wCAKNU,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAM3BpD,GAAgB0C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAS3CQ,EAAO,YACH,cACA,GACIR,IAAe,OAAS,GAAGQ,EAAO,KAAK,OAAO,yCACtB,GAAGA,EAAO,KAAK,OAAO,IAAIhD,GAAmBwC,CAAU,CAAC,GAAG,EAAE,CAAC;AAAA;AAAA,WAShG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUU,CAAc,CAAC,EACvD,cAAe,CAAC,EAAGE,CAAU,EAC7B,gBAAiB,CAAC,CAAC,QAAuB,KAAME,CAAU,CAAC,CAC7D,EACF,CACF,EAEEtC,GACF,CAAC8C,EAAyBhB,EAAciB,EACvCd,IAAiG,CAChG,IAAMe,EACFF,EAAQ,OAAO,SAAW,EAAIC,EAAaE,GAAiCH,EAAQ,OAAQC,CAAU,EAEtGG,EAAcF,EAAkB,KAChCE,EAAY,SAAW,GAAK,CAACF,EAAkB,oBACjDE,EAAcJ,EAAQ,OAAO,CAAC,EAAE,KAAK,IAAI,CAACK,EAAM9B,IAAMA,CAAC,GAEzD,IAAM+B,EAAgBf,EAAU,cAAca,EAAaJ,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EAEpFvB,EAAO6B,EACPb,EAAQO,EAAQ,OAAO,CAAC,EACtBO,EAAevD,GAAmByB,EAAMuB,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EACvEO,EAAa,OAAS,IACxBd,EAAQO,EAAQ,QACZQ,GAA2BR,EAAQ,OAAO,CAAC,EAAGO,CAAY,EAAG,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAChG9B,EAAO7B,GAAiB6B,EAAK,OAAQgB,EAAM,KAAK,MAAM,GAGxD,GAAM,CAACf,EAAaE,CAAW,EAAI/B,GAA0B4C,EAAM,KAAMhB,CAAI,EACzEgC,EAAmB/B,EACnBwB,EAAkB,WACpBO,EAAmB3D,GAAqB4B,EAAa4B,CAAa,GAGpEN,EAAQ,QACJ/C,GACI+B,EAAM,CAAC,KAAMkB,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACT,CAAK,EAAGN,EAChFa,EAAQ,OAAO,CAAC,EAAE,SAAUS,EAAkB7B,CAAW,EAC7D,CAAC,OAAQ,CAACa,CAAK,CAAC,CAAC,CACvB,EAEStC,GAAmB,CAAC6C,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEa7C,GAAiB,CAAC4C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa5C,GAAiB,CAAC2C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa3C,GAAwB,CAAC0C,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEa1C,GAAkB,CAACyC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEazC,GAAkB,CAACwC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEaxC,GAAmB,CAACuC,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEavC,GAAkB,CAACsC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEatC,GAAwB,CAACqC,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEarC,GAAqB,CAACoC,EAAyBC,IAAuC,CACjG/C,GAAa8C,EAAS,qBAAsBC,EAAY,QAAQ,CAClE,ICxQA,IAYMS,GAoBAC,GACOC,GA2EAC,GAUPC,GAeAC,GAWAC,GAWAC,GAWAC,GAWAC,GAoBAC,GAqBAC,GAoBAC,GAWAC,GAWAC,GAWAC,GAsBOC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GA7WbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KACAC,KAEMhC,GAAkBiC,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,0BAA0B,CAE9C,EAYMhC,GAAkBiC,GAAU,CAAC,GAAI,GAAI,eAAeA,EAAM,aAAa,eAAe,CAAC,IAAK,EAAE,EACvFhC,GACT,CAACiC,EAAcC,EAAqCH,EAA+BI,EAClFC,EAAqBC,EAA0BC,EAAW,GAAOC,EAAoB,KAAuB,CAC3G,IAAMC,EAAwB,CAAC,EACzBC,EAAaV,EAAO,CAAC,EAAE,KACvBW,EAAYD,EAAW,OACvBE,EAAOC,EAAU,cAAcR,EAAWM,CAAS,EACnDG,EAAkB,CAACN,GAAqBI,EAAK,SAAW,EAC9DF,EAAW,QAAQ,CAACK,EAAGC,IAAM,CACvBF,GAAmBF,EAAK,QAAQI,CAAC,GAAK,EACpCT,GACFE,EAAY,KAAK,CAAC,EAGpBA,EAAY,KAAKM,CAAC,CAEtB,CAAC,EACD,IAAME,EAAaR,EAAY,OACzBS,EAAaL,EAAU,KAAKJ,CAAW,EA4C7C,MAAO,CACL,KAAAP,EACA,YAAAC,EACA,gBA9CuBgB,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EAErBnB,EAAQoB,EAAc,KAAMrB,EAAO,CAAC,EAAE,SAAUW,CAAS,EACzDW,EAASC,EAAe,SAAUjB,EAAgBW,CAAU,EAC5DO,EAAMpB,EAASH,EAAOqB,EAAQV,CAAI,EACpCa,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGC,EAAI,EAAGD,EAAIf,EAAWe,IAEhCZ,GAAmBF,EAAK,QAAQc,CAAC,GAAK,GACpCnB,GACFoB,IAGFF,EAAY,YAAYC,CAAC,eAAeA,CAAC,MAAMhB,EAAWgB,CAAC,CAAC,MAAMA,CAAC;AAAA,oBAC3DF,EAAI,CAAC,EAAE,SAAS,YAAY,EAAI,qBAAqBE,CAAC,IAAM,EAAE;AAAA,oBAC9DzB,EAAM,WAAW,gBAAiByB,EAAG,IAAIA,CAAC,EAAE,CAAC;AAAA,oBAC7CD,CAAS;AAAA,qBAGjBL,EAAQ,KAAK,GAAGnB,EAAM,WAAW,gBAAiByB,EAAGJ,EAAO,WAAW,iBAAkBK,CAAC,CAAC,CAAC,GAAG,EAC/FA,KAGJ,MAAO;AAAA;AAAA,UAELR,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBlB,EAAOqB,CAAM,CAAC;AAAA;AAAA,UAElFH,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,+BACvDlB,EAAM,KAAK,OAAO;AAAA,iCAChBqB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,YAClBI,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,CAAC,CAAC;AAAA,YACNC,CAAS;AAAA,YACTD,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,SAAW,EAAIF,EAAO,YAAY,aAAc,OAAO,EAAIE,EAAI,MAAM,CAAC,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,UAE5F,EAME,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUH,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGU,EAA2BlB,EAAYD,CAAW,CAAC,CACxG,EACF,CACF,EAESvC,GACT,CAAC8B,EAA+B6B,IAAmD,CACjF,IAAMjB,EAAiB,CAAC,EACxB,OAAIZ,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,GACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQ8B,GAAKlB,EAAK,KAAK,OAAOkB,CAAC,CAAC,CAAC,EAEzDC,GACH,CAAC,KAAAnB,EAAM,SAAUiB,EAAW,SAAU,kBAAmBA,EAAW,iBAAiB,CAAC,CAC5F,EAEE1D,GACF,CAAC6D,EAAyB9B,EAAc2B,EAA8BzB,IAA6B,CACjG,IAAMJ,EAASgC,EAAQ,OACjBC,EACFjC,EAAO,SAAW,EAAI6B,EAAa3D,GAAiC8B,EAAQ6B,CAAU,EAE1FG,EAAQ,QACJ/D,GACIiC,EAAM,CAAC,KAAM+B,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACjC,EAAO,CAAC,CAAC,EACjFiC,EAAkB,mBAAqBA,EAAkB,KAAK,SAAW,EAAIjE,GAAOoC,EACpF6B,EAAkB,KAAMjC,EAAO,CAAC,EAAE,SAAUiC,EAAkB,SAC9DA,EAAkB,iBAAiB,EACvC,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEE7D,GAAoB,CAAC4D,EAAyBH,IAAuC,CACzF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,eAAgBH,EANf,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,qBACL,CAC8D,CAChE,EAEM5B,GAAgB,CAAC2D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,EACL,CAC0D,CAC5D,EAEM3B,GAAgB,CAAC0D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,sBAC1C,sBACL,CAC0D,CAC5D,EAEM1B,GAAuB,CAACyD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,qBACL,CACiE,CACnE,EAEMzB,GAAiB,CAACwD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAKlC,EAAM,WAAW,gBAAiByB,EAAG,CAAC,CAAC,EAIxD,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMxB,GAAkB,CAACuD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAiB7B7D,GAAiB6D,EAAS,aAAcH,EAhBb,CAAC5B,EAAOqB,EAAQV,IAAS,CAClD,IAAIwB,EAAO,EACX,QAASV,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,KAE1CwB,GAAQJ,EAAQ,OAAO,CAAC,EAAE,KAAKN,CAAC,GAIpC,MAAO,CACL,oBACA,GACA,cAAczB,EAAM,aAAa,eAAe,CAAC,KACjD,eAAeqB,EAAO,KAAK,KAAK,UAAUc,CAAI,IAChD,CACF,CAC4D,CAC9D,EAEM1D,GAAiB,CAACsD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAK,iBAAiBT,CAAC,QAAQ,EAI3C,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMtB,GAAkB,CAACqD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,aAAcH,EANb,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC4D,CAC9D,EAEMrB,GAAiB,CAACoD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,YAAaH,EANZ,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC2D,CAC7D,EAEMpB,GAAuB,CAACmD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,oBAC1C,EACL,CACiE,CACnE,EAEMnB,GACF,CAACuD,EAA0BzB,EAAyBJ,IAAwC,CAC1F,GAAII,EAAK,SAAW,EAClB,OAAOJ,EAGT,IAAIU,EAAa,EACboB,EAAa,EACjB,QAASC,EAAM,EAAGA,EAAM3B,EAAK,OAAQ2B,IAC/B3B,EAAK,QAAQ2B,CAAG,IAAM,GACxBrB,GAAcmB,EAAME,CAAG,EAEvBD,GAAcD,EAAME,CAAG,EAO3B,OAAOD,EAAa,IAAMpB,EAAa,IACzC,EAESnC,GAAa,CAACiD,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FpD,GAAgBuD,EAASH,CAAU,EAEnCW,GAAiBR,EAASH,CAAU,CAExC,EAEa7C,GAAW,CAACgD,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FxD,GAAc2D,EAASH,CAAU,EAEjCY,GAAeT,EAASH,CAAU,CAEtC,EAEa5C,GAAW,CAAC+C,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FvD,GAAc0D,EAASH,CAAU,EAEjCa,GAAeV,EAASH,CAAU,CAEtC,EAEa3C,GAAkB,CAAC8C,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FtD,GAAqByD,EAASH,CAAU,EAExCc,GAAsBX,EAASH,CAAU,CAE7C,EAEa1C,GAAY,CAAC6C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FrD,GAAewD,EAASH,CAAU,EAElCe,GAAgBZ,EAASH,CAAU,CAEvC,EAEazC,GAAY,CAAC4C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FnD,GAAesD,EAASH,CAAU,EAElCgB,GAAgBb,EAASH,CAAU,CAEvC,EAEaxC,GAAa,CAAC2C,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FlD,GAAgBqD,EAASH,CAAU,EAEnCiB,GAAiBd,EAASH,CAAU,CAExC,EAEavC,GAAY,CAAC0C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FjD,GAAeoD,EAASH,CAAU,EAElCkB,GAAgBf,EAASH,CAAU,CAEvC,EAEatC,GAAkB,CAACyC,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FhD,GAAqBmD,EAASH,CAAU,EAExCmB,GAAsBhB,EAASH,CAAU,CAE7C,EAEarC,GAAe,CAACwC,EAAyBH,IAAuC,CACvF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FzD,GAAkB4D,EAASH,CAAU,EAErCoB,GAAmBjB,EAASH,CAAU,CAE1C,ICnXA,IAcMqB,GAeOC,GA0BAC,GA0BAC,GAjFbC,GAAAC,EAAA,kBAOAC,KAEAC,KAGAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAQaR,GAAS,CAACS,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaT,GAAS,CAACQ,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaR,GAA4BQ,GACrCQ,GAA4BR,CAAoE,IClFpG,IAuEMS,GAmKAC,GAsGAC,GA2JAC,GA0HOC,GAqCPC,GAmHOC,GA7vBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAgEMX,GAA0B,CAACY,EAA+BC,IAAoD,CAmClH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAUH,EAAO,CAAC,EAClBI,EAAOJ,EAAO,CAAC,EACfK,EAAYL,EAAO,CAAC,EACpBM,EAAON,EAAO,CAAC,EACfO,EAAuBP,EAAO,CAAC,EAErC,GAAIM,GAAQC,EACV,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIL,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMM,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAkBR,EAAM,KAAK,CAAC,EAEpC,GAAIE,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIA,EAAQ,KAAK,CAAC,IAAMO,EACtB,MAAM,IAAI,MAAM,uEAAuE,EAGzF,GAAIN,EAAK,KAAK,CAAC,IAAMD,EAAQ,KAAK,CAAC,EACjC,MAAM,IAAI,MAAM,oFAAoF,EAGtG,IAAIQ,EAAcP,EAAK,KAAK,CAAC,EAAI,EAC7BQ,EAAcD,EACdE,EAAcD,EAClB,GAAIX,EAAW,eAAe,OAAS,EAAG,CACxC,GAAIA,EAAW,eAAe,SAAW,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAErE,QAAWa,KAAMb,EAAW,eAC1B,GAAIa,EAAKb,EAAW,WAAa,EAC/B,MAAM,IAAI,MAAM,mDAAmD,EAIvEU,EAAcV,EAAW,eAAe,CAAC,EACzCW,EAAcX,EAAW,eAAe,CAAC,EACzCY,EAAcZ,EAAW,eAAe,CAAC,CAC3C,CAEA,IAAMc,EAAmBN,EAEzB,GAAIE,IAAgBC,EAClB,MAAM,IAAI,MAAM,6DAA6D,EAG/E,GAAIR,EAAK,KAAK,CAAC,IAAMO,EAAcC,EAAcC,EAC/C,MAAM,IAAI,MAAM,+EAA+E,EAGjG,IAAIG,EAAqB,EACzB,GAAIV,EAAM,CACR,GAAIM,IAAgBC,EAClB,MAAM,IAAI,MAAM,oDAAoD,EAEtE,GAAIP,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,qCAAqC,EAEvD,GAAIA,EAAK,KAAK,CAAC,IAAM,EACnB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,KAAK,CAAC,IAAME,EACnB,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIF,EAAK,KAAK,CAAC,IAAML,EAAW,SAC9B,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAIK,EAAK,KAAK,CAAC,IAAMM,EAAcX,EAAW,SAC5C,MAAM,IAAI,MAAM,gEAAgE,EAG7EA,EAAW,yBACde,EAAqBV,EAAK,KAAK,CAAC,EAGpC,CAEA,IAAMW,EAAsBF,EAAmBC,EACzCE,EAAoB,GAEpBC,EAAW,EACjB,GAAId,EAGF,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAIC,EACF,MAAM,IAAI,MAAM,uBAAuB,EAGzC,MAAO,CACL,UAAAE,EACA,eAAAC,EACA,mBAAAO,EACA,iBAAAD,EACA,oBAAAE,EACA,kBAAAC,EACA,gBAAAR,EACA,WAAYC,EACZ,YAAAE,EACA,SAAU,KAAK,MAAMF,EAAcV,EAAW,QAAQ,EACtD,UAAW,KAAK,MAAMY,EAAcZ,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAqB,GACrB,aAAc,GACd,UAAW,CACb,CACF,EAEMZ,GAAkC,CAAC+B,EAA0BlB,EAAmB,EAAWmB,IAAc,CAC7G,IAAMC,EAAaC,GAAiBF,CAAC,EACjCG,EAAK,GACHC,EAAQJ,EAAIC,EACdG,EAAQD,EACVA,EAAK,EACIC,EAAQ,EAAI,KACrBD,EAAK,KAAK,KAAKC,EAAQ,CAAC,GAE1B,IAAMC,EAAoB,KAAK,KAAKL,EAAIC,EAAaE,CAAE,EACjDG,EAAoC,CACxC,CAAC,KAAMzB,EAAM,SAAU,KAAM,EAAImB,CAAC,EAAG,CAAC,QAAuB,KAAMI,CAAK,EACxE,CAAC,QAAuB,KAAMC,CAAiB,CACjD,EACME,EAAWC,GAA4B3B,EAAM,SAAUoB,CAAU,EACjEQ,EAAUC,KAA0CT,CAAU,EAE9DU,EAAmBC,GAA+B,CACtD,IAAMC,EAAcC,EAAe,IAAKjC,EAAM,SAAUA,EAAM,KAAMoB,CAAU,EAExEc,EAA8B,CAClC,CAAC,KAAM,QAAS,KAFIL,GAA0B7B,EAAM,QAAQ,CAEC,EAAG,CAAC,KAAM,SAAU,KAAM,KAAK,EAC5F,CAAC,KAAM,sBAAuB,KAAM,KAAK,CAC3C,EAEA,MAAO;AAAA,0CAC+BsB,CAAE;AAAA,0CACFA,CAAE;AAAA,IACxCS,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBF,CAAW,CAAC;AAAA,IACrED,EAAa,UAAU,CACrBT,EAAI,EAAG,CACT,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,8BAIwBM,CAAO;AAAA;AAAA,gCAELA,CAAO;AAAA;AAAA,+BAER,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,gDACT,IAAK,GACH,MAAO,oGACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA,uBAINM,CAAO;AAAA;AAAA,0BAEJA,CAAO;AAAA;AAAA,+BAEF,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,aACT,IAAK,GACH,MAAO,8BACT,IAAK,GACH,MAAO,4DACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAMHU,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,yBAIvBJ,CAAO;AAAA,0BACNI,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA,IAI9C,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CAAC,KAAM,GAAGV,CAAE,IAAII,CAAQ,IAAIN,CAAU,EAAE,EACrD,gBAAAU,EACA,WAAY,KAAO,CAAC,QAAS,CAAC,EAAG,cAAe,CAAC,EAAG,CAAC,EAAG,gBAAAL,CAAe,EACzE,CACF,EAEMrC,GACF,CAAC+C,EAAyBC,EAAeC,EAAiBC,EACzDjC,EAA4CkC,EAAiCxC,EAC7Ee,IAA+B,CAC9B,IAAMC,EAAsBD,EAAqByB,EAAW,iBACtDC,EAAa,CAACD,EAAW,UAAWA,EAAW,SAAUA,EAAW,eAAgBxB,CAAmB,EACvG0B,EAAaF,EAAW,aAAe,QAAaJ,EAAQ,YAAc,EAC1EO,EAAkBD,EACpB,CAACF,EAAW,UAAWA,EAAW,SAAUxB,EAAqBwB,EAAW,QAAQ,EACpF,OAIEI,EAAQ5C,EAAW,QAAU,EAAI,EAAM,KAAK,KAAKwC,EAAW,QAAQ,EAAIxC,EAAW,MACnFqB,EAAaC,GAAiBkB,EAAW,QAAQ,EACjDK,EAAqBL,EAAW,SAAWnB,EAC3CyB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAK/B,EAAsB8B,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMd,EAAoC,CACxC,CAAC,QAAuB,KAAMc,EAAW,cAAc,EAAG,CAAC,QAAuB,KAAMK,CAAkB,EAC1G,CAAC,QAAuB,KAAM7B,CAAmB,EAAG,CAAC,QAAuB,KAAMwB,EAAW,QAAQ,EACrG,CAAC,OAAsB,KAAMI,CAAK,EAAG,CAAC,QAAuB,KAAM7B,CAAkB,EACrF,CAAC,QAAuB,KAAMyB,EAAW,gBAAgB,CAC3D,EAEMQ,EAAwD,CAAC,OAAQ,MAAM,EACzET,GACFS,EAAkB,KAAK,MAAM,EAE3B1C,GACF0C,EAAkB,KAAK,MAAM,EAE/B,IAAMC,EAAU,CAAC,CAAC,KAAMR,EAAY,SAAUJ,EAAE,SAAU,aAAgC,CAAC,EACvFK,GACFO,EAAQ,KAAK,CAAC,KAAMN,EAAkB,SAAUN,EAAE,SAAU,aAAgC,CAAC,EAE/F,IAAMN,EAAmBC,GAA+B,CACtD,IAAMkB,EAASC,EAAc,IAAKd,EAAE,SAAUA,EAAE,KAAMhB,CAAU,EAC1D+B,EAASD,EAAc,MAAOb,EAAI,SAAUA,EAAI,KAAMjB,CAAU,EAChEgC,EAAY,CAACH,EAAQE,CAAM,EACjC,GAAIb,EAAS,CACX,IAAMe,GAAeH,EAAc,WAAYZ,EAAQ,SAAUA,EAAQ,KAAMlB,CAAU,EACzFgC,EAAU,KAAKC,EAAY,CAC7B,CACIhD,GACF+C,EAAU,KACNF,EAAc,yBAA0B7C,EAAqB,SAAUA,EAAqB,IAAI,CAAC,EAEvG,IAAMiD,EAASrB,EAAe,SAAUG,EAAE,SAAUI,CAAU,EACxDe,EAAa,CAACD,CAAM,EACtBb,GACFc,EAAW,KAAKtB,EAAe,cAAeG,EAAE,SAAUM,EAAkBtB,CAAU,CAAC,EAEzF,IAAMQ,EAAUC,KAA0CT,CAAU,EAE9Dc,GAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAM,KAA+B,EACvF,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA;AAAA,gCAECI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,gCAC7CI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,IACzEd,EAAa,iBAAiBG,EAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMH,IACOP,GAAWG,EACN;AAAA;AAAA,+EAIA;AAAA,wEAGR,CAAC;AAAA,MACNA,EAAa,4DAA8D,EAAE;AAAA,kBACjEb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOhB,IACKU,GAAWG,EACN;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAQA,yEAER,CAAC;AAAA,QAEAA,EACI,+FACA,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA,mBAKCb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBASF,IAAM,CACpB,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,wCACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA,8BACkBkC,EAAO,KAAK,KAAK,6BACnCjD,EAAuB,oCAAsC,KAAK;AAAA;AAAA,IAGxE,EACA,MAAO,CACL,KAAM,iBACN,YAAa,CACX,KAAM,GAAGe,CAAU,IAAIf,IAAyB,MAAS,IAAIiC,IAAY,MAAS,IAAIH,EAAQ,WAAW,GACzG,kBAAAY,CACF,EACA,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAGEzC,GACF,CAAC8C,EAAyBqB,EAAmBC,EAAeC,EAC3DC,EAA6B7C,IAA+B,CAC3D,IAAMC,EAAsBD,EAAqB6C,EAAO,iBAClDC,EAAQD,EAAO,MAAQA,EAAO,MAAQ,EACtCE,EAAsBF,EAAO,YAAcC,EAC3CE,EAAeH,EAAO,YAAc,MAAQxB,EAAQ,YAAc,EAClE4B,EACFD,EAAe,CAACH,EAAO,UAAWA,EAAO,SAAU5C,EAAqB4C,EAAO,QAAQ,EAAI,OACzFK,EAAc,CAACL,EAAO,UAAWA,EAAO,eAAgBE,CAAmB,EAC3EhB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKa,EAAO,UAAYd,CAAS,EACzC,EAAG,KAAK,KAAKc,EAAO,eAAiBd,CAAS,EAC9C,EAAGc,EAAO,UAAYA,EAAO,QAC/B,EAEMlC,EAAoC,CACxC,CAAC,QAAuB,KAAMkC,EAAO,cAAc,EAAG,CAAC,QAAuB,KAAM5C,CAAmB,EACvG,CAAC,QAAuB,KAAM4C,EAAO,SAAS,EAAG,CAAC,QAAuB,KAAMA,EAAO,QAAQ,EAC9F,CAAC,QAAuB,KAAME,CAAmB,EAAG,CAAC,QAAuB,KAAM/C,CAAkB,EACpG,CAAC,QAAuB,KAAM6C,EAAO,gBAAgB,CACvD,EACMZ,EACFW,EAAY,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,EACpDV,EAAU,CAAC,CAAC,KAAMgB,EAAa,SAAUR,EAAM,SAAU,aAAgC,CAAC,EAC5FM,GACFd,EAAQ,KAAK,CAAC,KAAMe,EAAoB,SAAUP,EAAM,SAAU,aAAgC,CAAC,EAErG,IAAM1B,EAAmBC,GAA+B,CACtD,IAAMkC,EAAcf,EAAc,QAASM,EAAM,SAAUA,EAAM,IAAI,EAC/DU,EAAUhB,EAAc,IAAKO,EAAE,SAAUA,EAAE,IAAI,EAC/CL,EAAY,CAACa,EAAaC,CAAO,EACnCR,GACFN,EAAU,KAAKF,EAAc,aAAcQ,EAAU,SAAUA,EAAU,IAAI,CAAC,EAGhF,IAAMH,EAAa,CADJtB,EAAe,SAAUuB,EAAM,SAAUQ,CAAW,CACzC,EACtBF,GACFP,EAAW,KAAKtB,EAAe,gBAAiBuB,EAAM,SAAUO,CAAkB,CAAC,EAErF,IAAM7B,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,gBAAiB,KAAM,KAAK,EACrE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA,gCACCoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,gCAChDoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,IAC5Ed,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMJ,IACQa,GAAaI,EACR;AAAA;AAAA;AAAA,QAKA;AAAA;AAAA,eAIR,CAAC;AAAA,MACNA,EAAe,kEAAoE,EAAE;AAAA,iBAC1EG,EAAY,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAO9B,IACGP,GAAaI,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA;AAAA,SAIR,CAAC;AAAA,UACFA,EAAe,kFAAoF,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAkBzG,EAEA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,KAAM,GAAGJ,IAAc,MAAS,IAAIvB,EAAQ,WAAW,GAAI,kBAAAY,CAAiB,EAC1F,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAESxC,GACT,CAAC6C,EAAyBC,EAAe+B,EAAeV,EAAeW,EACtEC,EAA6B/B,EAA+BoB,EAC5DrD,EAA4CkC,EAAiCxC,IAA+B,CAC3G,IAAMuE,EAAcnC,EAAQ,YACtBrB,EACFyB,EAAW,aAAe,QAAa+B,EAAc,EAAI/B,EAAW,mBAAqB,EACvFxB,EAAsBD,EAAqByB,EAAW,iBAEtDgC,EAAWhC,EAAW,aAAe,QAAa+B,EAAc,GAAKhC,EAAW,CAACF,EAAG+B,EAAG7B,CAAO,EAAI,CAACF,EAAG+B,CAAC,EACzG9D,GACFkE,EAAQ,KAAKlE,CAAoB,EAInC,IAAMmD,EAAQrB,EAAQ,QAClB/C,GACI+C,EAASC,EAAG+B,EAAGG,EAAc,EAAIhC,EAAU,OAAWjC,EAAsBkC,EAAYxC,EACxFe,CAAkB,EACtB,CAAC,OAAQyD,EAAS,QAAUhC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,GAAI,CAAC,EAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAG5GnC,EAAQ,QACJhD,GACIgD,EAASqB,EAAOjB,EAAW,UAAYA,EAAW,SAAWA,EAAW,eACxExB,CAAmB,EACvB,CAAC,OAAQ,CAACyC,CAAK,EAAG,QAAS,CAAC,CAAC,CAAC,EAGlC,IAAMgB,EACDjC,EAAW,aAAe,QAAa+B,EAAc,GAAKZ,EAAa,CAACF,EAAOC,EAAGC,CAAS,EAAI,CAACF,EAAOC,CAAC,EAC7GtB,EAAQ,QACJ9C,GACI8C,EAASqB,EAAOC,EAAGa,EAAc,GAAKZ,EAAYA,EAAY,OAAWnB,EAAYzB,CAAkB,EAC3G,CAAC,OAAQ0D,EAAS,QAAUjC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,EAAG,CAAC,EAAI,CAAC,CAAC,CAAC,CAAC,CACzG,EAEE/E,GAAU,CAAC4C,EAAyBI,IAAoC,CAC5E,IAAMyB,EAAc,CAClBzB,EAAW,UACXA,EAAW,SACXA,EAAW,eACXA,EAAW,QACb,EACMkC,EAAIlC,EAAW,eACfmC,EAAInC,EAAW,gBACfoC,EAAIpC,EAAW,SACfM,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKP,EAAW,SAAWM,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMzC,EAAS,CAACqC,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,CAAC,EACjEV,EAAoC,CACxC,CAAC,QAAuB,KAAMgD,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EACnG,CAAC,QAAuB,KAAMpC,EAAW,QAAQ,EAAG,CAAC,QAAuB,KAAMA,EAAW,QAAQ,EACrG,CAAC,QAAuB,KAAMA,EAAW,UAAU,EACnD,CAAC,QAAuB,KAAMA,EAAW,WAAaA,EAAW,WAAaA,EAAW,WAAW,CACtG,EAEMT,EAAmBC,GAA+B,CACtD,IAAM6C,EAAU3C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEa,EAAU5C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEc,EAAU7C,EAAe,WAAYnC,EAAO,CAAC,EAAE,SAAUkE,CAAW,EACpEhE,EAAQkD,EAAc,QAASpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEiF,EAAS7B,EAAc,SAAUpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACnEI,EAAOgD,EAAc,OAAQpD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/D4B,EAAW1B,EAAM,KAAK,QAEtBkC,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAC7G,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,MAAO,KAAM,KAAK,CACjG,EACA,MAAO;AAAA,sBACWW,CAAS;AAAA,oCACKnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAChCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,IACpEd,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBlC,EAAO+E,EAAQ7E,EAAM0E,EAASC,EAASC,CAAO,CAAC;AAAA,IACxG/C,EAAa,UAAU,CACrBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWanB,CAAQ;AAAA,mBACRA,CAAQ;AAAA,mBACRA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoCzB,EAEA,OAAOS,EAAQ,QACX,CACE,KAAM,mBACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,CAC5F,EACA,cAAeW,EACf,gBAAArB,CACF,GACA,gBAAAK,CACF,EACA,CAAC,OAAAhC,EAAQ,QAAS,CAAC,GAAI,GAAI,EAAE,CAAC,CAAC,CACrC,EAEaN,GAAY,CAAC2C,EAAyBpC,IAAqC,CACtF,IAAM4D,EAASzE,GAAwBiD,EAAQ,OAAQpC,CAAU,EAE3D,CAACqC,EAAG+B,EAAGV,CAAC,EAAIlE,GAAQ4C,EAASwB,CAAM,EAEzC,OAAOrE,GACH6C,EAASC,EAAG+B,EAAGV,EAAGtB,EAAQ,OAAO,CAAC,EAAG,OAAW,OAAW,OAAWA,EAAQ,OAAO,CAAC,EAAGwB,EAAQ5D,CAAU,CACjH,ICpwBA,IAsBMiF,GAkCAC,GAgFOC,GAGAC,GA3IbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAGAC,KAWMV,GAAiB,CAACW,EAA+BC,IAA0C,CAC/F,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAME,EAAkB,CAACC,EAA2BC,EAA6BC,IAAoB,CACnG,IAAMC,EAAIF,EAAS,OACnB,GAAIE,IAAMH,EAAO,OACf,MAAM,IAAI,MAAM,GAAGE,CAAO,uBAAuBC,CAAC,EAAE,EAEtDF,EAAS,QAAQ,CAACG,EAAGC,IAAM,CACzB,GAAID,IAAMJ,EAAOK,CAAC,EAChB,MAAM,IAAI,MAAM,GAAGH,CAAO,SAASG,CAAC,gBAAgB,CAExD,CAAC,CACH,EAEA,GAAIR,EAAO,CAAC,EAAE,KAAK,OAAS,EAAG,CAC7B,IAAMS,EAAQR,EAAW,SAAW,OAC/BA,EAAW,QAAUD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EACvBA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EAAE,OAAOA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,CAAC,EACxGA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGC,EAAW,QAAU,EAAI,MAAS,EAC9DC,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,qBAAqB,EAC5DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,iBAAiB,EACxDP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,oBAAoB,EAC3DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,mBAAmB,CAC5D,MACEP,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,qBAAqB,EAC1DE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,iBAAiB,EACtDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,oBAAoB,EACzDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,mBAAmB,CAE5D,EAEMV,GACF,CAACU,EAA+BC,IAAiD,CAC/E,GAAM,CAAC,QAAAS,EAAS,QAAAC,EAAS,OAAAC,CAAM,EAAIX,EAC7BY,EAASb,EAAO,CAAC,EAAE,KACnBc,EAAaH,EAAUI,GAAiBF,EAAOA,EAAO,OAAS,CAAC,CAAC,EAAI,EACrEG,EAAcJ,IAAW,QAAUC,EAAO,OAAS,EAAIC,EAAa,EACpEG,EAAaC,EAAU,KAAKL,CAAM,EAAIC,EAEtCK,EAAoBR,EACpBS,EAAcD,EAAoBN,EAAO,OAASA,EAClDQ,EAAIC,EAAc,IAAKtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMc,CAAU,EACrES,EAAQD,EAAc,QAAStB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC9EQ,EAAOF,EAAc,OAAQtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC5ES,EAAYH,EAAc,YAAatB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACtFU,EAAWJ,EAAc,WAAYtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACpFW,EAAIC,EAAe,IAAK5B,EAAO,CAAC,EAAE,SAAUoB,EAAaN,CAAU,EAGnEe,EAAc,IAAc,CAChC,IAAIC,EAAU,GACd,GAAInB,EACFmB,EAAU,iBACNjB,EAAO,SAAW,EAAM,KACpBD,IAAW,OAAS,iBAAiBC,EAAO,OAAS,CAAC,OAAOC,CAAU,GACnD,kBAAkB,YAE1CF,IAAW,OACbkB,EAAU;AAAA,cACRH,EAAE,WAAW,gBAAiB,IAAK,GAAG,CAAC;AAAA,4BACzBA,EAAE,gBAAgB,eAAe,CAAC,QAC7C,CAELG,EAAU,kBAAkBP,EAAM,KAAK,OAAO;AAAA,qDACLV,EAAO,OAAS,CAAC,KAE1D,QAASL,EAAI,EAAGA,EAAIe,EAAM,KAAMf,IAC9BsB,GAAW,YAAYtB,CAAC,qBAAqBA,CAAC,KAEhDsB,GAAW,iBAAiBP,EAAM,gBAAgB,UAAU,CAAC,GAC/D,CAEF,OAAOO,CACT,EACMC,EAAgCC,GAAyB;AAAA,oBACjDtB,CAAO;AAAA,IACvBsB,EAAO,gBAAgB,aAAc,KAAK,EAAE,iBAAiBX,EAAGE,EAAOC,EAAMC,EAAWC,EAAUC,CAAC,CAAC;AAAA,IACpGK,EAAO,UAAU,CAAC;AAAA,IAClBA,EAAO,sCAAsC,qBAAqB,CAAC;AAAA,0BAC7CL,EAAE,gBAAgB,gBAAgBb,CAAU,EAAE,CAAC;AAAA,MACnEe,EAAY,CAAC;AAAA,kBACDN,EAAM,YAAY,SAAS,CAAC;AAAA,iBAC7BC,EAAK,YAAY,SAAS,CAAC;AAAA,sBACtBC,EAAU,YAAY,SAAS,CAAC;AAAA,qBACjCC,EAAS,YAAY,SAAS,CAAC;AAAA,cACtCL,EAAE,YAAY,YAAY,CAAC;AAAA;AAAA,MAEnCM,EAAE,YAAY,aAAc,OAAO,CAAC;AAAA,KAEpC,MAAO,CACL,KAAM,qBACN,YAAa,CACX,KAAM,GAAG1B,EAAW,OAAO,IAAIA,EAAW,MAAM,IAAIU,CAAO,IAAIG,CAAU,GACzE,kBAAmBK,EAAoB,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,MAAM,EAAI,MACpF,EACA,gBAAiBY,EACjB,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM/B,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKiB,EAAa,EAAuB,CAAC,EAClE,gBAAiBE,EACb,CACE,CAAC,QAAuB,KAAMF,CAAU,EACxC,GAAGgB,EAA2BpB,CAAM,CACtC,EACA,CACE,CAAC,QAAuB,KAAMI,CAAU,CAC1C,CACN,EACF,CACF,EAES1B,GAA4BU,GACrCiC,GAA4BjC,CAAoE,EAEvFT,GAAY,CAAC2C,EAAyBlC,IAA8C,CAC/F,GAAM,CAAC,OAAAD,EAAQ,YAAAoC,CAAW,EAAID,EACxBE,EAAoB9C,GAAyB,CAAC,GAAGU,EAAY,YAAAmC,CAAW,CAAC,EAI/E,GAHIE,GAAI,OAAO,sBACbjD,GAAeW,EAAQqC,CAAiB,EAEtCpC,EAAW,aACb,MAAM,IAAI,MAAM,uDAAuD,EAEvEkC,EAAQ,QAAQ7C,GAAoCU,EAAQqC,CAAiB,CAAC,CAElF,ICtJA,IASME,GAkBAC,GAkCOC,GA7DbC,GAAAC,EAAA,kBAIAC,KAGAC,KAEMN,GAAkBO,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,IAAK,IAAK,IAAI,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC9C,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMN,GAA4BM,GAA+C,CAC/E,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAExBE,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAE3BG,EAAaC,EAAU,KAAKH,CAAW,EAAI,EAE3CI,EAAWL,EAAO,CAAC,EAAE,SACrBM,EAAQC,EAAc,QAASF,EAAUJ,EAAa,CAAC,EACvDO,EAAOD,EAAc,OAAQF,EAAU,CAACH,CAAQ,EAAG,CAAC,EACpDO,EAAWF,EAAc,WAAYF,EAAUJ,EAAa,CAAC,EAC7DS,EAASC,EAAe,SAAUN,EAAUJ,EAAa,CAAC,EAahE,MAAO,CACL,KAAM,UACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,CACpE,GACA,gBAjBuBS,GAA+B;AAAA,qBACrCV,CAAQ;AAAA,IACzBU,EAAa,iBAAiBN,EAAOE,EAAMC,EAAUC,CAAM,CAAC;AAAA;AAAA,IAE5DE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCT,CAAU,CAAC;AAAA,kBAClDG,EAAM,YAAY,YAAY,CAAC;AAAA,UACvCE,EAAK,YAAY,uBAAuB,CAAC,MAAMC,EAAS,YAAY,YAAY,CAAC;AAAA,MACrFC,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAU7C,CACF,EAEaf,GAAWkB,GAAkC,CACxDpB,GAAeoB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQnB,GAAyBmB,EAAQ,MAAM,CAAC,CAC1D,IChEA,IAeMC,GA4BAC,GAiBOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAGAC,GASAC,GAIAC,GA8BPC,GAMOC,GAaAC,GAIAC,GAIAC,GAQAC,GAGAC,GAgBAC,GAcAC,GAKAC,GAIAC,GAIAC,GAMAC,GAOAC,GAIAC,GAIAC,GAIAC,GAMAC,GASAC,GAMAC,GASAC,GAIAC,GAIAC,GAIAC,GAIAC,GAEAC,GAKAC,GAUAC,GAGAC,GAOAC,GAQAC,GAIAC,GAmBAC,GAEAC,GAlVbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMlD,GACF,CAACmD,EAA4BC,EAAkBC,EAAuBC,EACrEC,EAAmCC,IAA8C,CAChF,IAAMC,EAAU,KAAK,KAAKL,EAAW,CAAC,EAElCM,EAAa,GACb,OAAOH,GAAa,SACtBG,EAAa,GAAGH,CAAQ,MAExBG,EAAaH,EAAS,GAAG,EAG3B,IAAMI,EAAQC,EAAc,YAAaP,EAAe,CAACI,CAAO,EAAG,CAAC,EAC9DI,EAASC,EAAe,aAAcR,EAAgB,CAACG,CAAO,EAAG,CAAC,EAExE,MAAO;AAAA,QACLN,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBQ,EAAOE,CAAM,CAAC;AAAA;AAAA,IAEnFL,GAA4B,EAAE;AAAA;AAAA,IAE9BL,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA;AAAA,cAE/DQ,EAAM,YAAY,YAAY,CAAC;AAAA,MACvCE,EAAO,YAAY,aAAcH,CAAU,CAAC;AAAA,IAE9C,EAEEzD,GACF,CAAC0D,EAAmBI,EAAcR,EAAmCC,EACpEQ,EAAmBV,EAAyBK,EAAM,YAA2B,CAC5E,KAAAI,EACA,YAAa,CAAC,KAAMC,EAAU,kBAAmB,CAAC,MAAM,CAAC,EACzD,gBAAiBb,GAAgBnD,GAC7BmD,EAAcc,EAAU,KAAKN,EAAM,IAAI,EAAGA,EAAM,SAAUL,EAAgBC,EAAUC,CAAwB,EAChH,WAAaU,IAAkB,CAC7B,QAAS,CAAC,CAAC,KAAMP,EAAM,KAAM,SAAUL,CAAc,CAAC,EACtD,cACI,CAAC,EAAG,KAAK,KAAKW,EAAU,KAAKC,EAAa,CAAC,EAAE,IAAI,EAAI,GAA0B,CAAgB,CAAC,EACpG,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKD,EAAU,KAAKN,EAAM,IAAI,EAAI,CAAC,CAAC,CACzE,CACF,EACF,GAESzD,GAAOiE,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahE,GAAQgE,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa/D,GAAS+D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa9D,GAAQ8D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa7D,GAAS6D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa5D,GAAQ4D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EACa3D,GAAS2D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAOa1D,GAAuB2D,GAChCC,GAA4BD,CAA0B,EAG7C1D,GAAO,CAACyD,EAAyBC,IAAqC,CACjF,IAAIE,EACJ,OAAQF,EAAW,GAAI,CACrB,QACEE,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,QACEA,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,OACEA,EAAO,aACP,MACF,QACE,MAAM,IAAI,WAAW,0EAA0EF,EAAW,EAAE,EAAE,CAClH,CACAD,EAAQ,QACJlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQG,EAAM,OAAWF,EAAW,SAAUA,EAAW,EAAE,CAAC,CAClH,EAOMzD,GAAoC4D,GAAkD,CAC1F,IAAMC,EAAOD,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAIE,GACtFC,EAAOH,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAII,GAC5F,OAAON,GAA4B,CAAC,IAAAG,EAAK,IAAAE,CAAG,CAAC,CAC/C,EAEa9D,GAAO,CAACuD,EAAyBS,IAAyC,CACrF,IAAMR,EAAaD,EAAQ,OAAO,SAAW,EAAIS,EAAiBjE,GAAiCwD,EAAQ,MAAM,EAC3GU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QACJlE,GACIkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,0BAA2B;AAAA,4BACnDF,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,4BAC9CS,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,EAEhEA,EAAW,QAAQ,EACvB,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEavD,GAAQsD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEarD,GAAOqD,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEapD,GAAQoD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAManD,GAAwBoD,GACjCC,GAA4BD,CAA6B,EAEhDnD,GAAM,CAACkD,EAAyBC,IAAsC,CACjF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK;AAAA,uBAChCF,CAAQ,IAAIT,EAAW,KAAK;AAAA;AAAA,kBAEjCS,CAAQ,QAAQA,CAAQ;AAAA;AAAA;AAAA;AAAA,wBAIlBA,CAAQ,cAAcA,CAAQ;AAAA;AAAA,KAGhDT,EAAW,QAAQ,CAAC,CAC1B,EAEalD,GAAU,CAAC8D,EAAU,QAAU;AAAA,YAChCA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA;AAAA,sBAEGA,CAAO,cAAcA,CAAO;AAAA;AAAA;AAAA;AAAA,GAMrC7D,GAAOgD,GAAkC,CACpD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK7D,GAAQ2D,CAAQ,CAAC,CAAC,CAClH,EAEazD,GAAO+C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa9C,GAAS8C,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa7C,GAAQ6C,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,sBAAsBA,CAAC,0BAA2B7D,GAAQ2D,CAAQ,CAAC,CAAC,CACpH,EAEatD,GAAY,CAAC4C,EAAyBC,IAAsC,CACvF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaY,GAAK,8BAA8BA,CAAC,KAAKA,CAAC,KAAKA,CAAC,YAAYF,CAAQ,UACpG,6BAA6BA,CAAQ,IAAIT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,CACzF,EAEa5C,GAAO2C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEatD,GAAO0C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEarD,GAAcyC,GAAkC,CAC3DA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,aAAcY,GAAK,OAAOA,CAAC,EAAE,CAAC,CAChG,EAEapD,GAAQwC,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,WAAWF,CAAQ,SAAS,CAAC,CAC5G,EAEajD,GAAWuC,GAAkC,CACxDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,UAAWY,GAAK,sBAAsBA,CAAC,KAAK,CAAC,CAC/G,EAOalD,GAA8BuC,GACvCC,GAA4BD,CAG3B,EAEQtC,GAAc,CAACqC,EAAyBC,IAA4C,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,cACnBY,GAAK,YAAYF,CAAQ,oBAAoBA,CAAQ,WAAWT,EAAW,KAAK,MAAMW,CAAC,WAAWF,CAAQ,KACtGT,EAAW,IAAI,MACnB,OAAWA,EAAW,QAAQ,CAAC,CACrC,EAEarC,GAAOoC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEanC,GAAQmC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEalC,GAAQkC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEajC,GAAOiC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahC,GAAkB4C,GAAc,QAAQA,CAAC,yBAAyBA,CAAC,2BAA2BA,CAAC,MAE/F3C,GAAQ+B,GAAkC,CAErDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQhC,EAAc,CAAC,CACzF,EAEaE,GAAe,CAAC2C,EAAU,QAAU;AAAA,qBAC5BA,CAAO;AAAA,qBACPA,CAAO;AAAA,qBACPA,CAAO;AAAA;AAAA,oBAERA,CAAO,cAAcA,CAAO;AAAA,WACrC7C,GAAe,GAAG,CAAC;AAAA;AAAA,EAIjBG,GAAsB2C,GAC/B,uCAAuCA,CAAC,qBAAqBA,CAAC,MAAMA,CAAC,uBAAuBA,CAAC,GAEpF1C,GAAY4B,GAAkC,CACzD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,WAAY7B,GAAoBD,GAAawC,CAAQ,EAAG,OAC3EV,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,EAEa3B,GAAkB,CAAC2B,EAAyBC,IAAwC,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrE,OAAAA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,kBAAmBY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,8BACpF,wCAAwCF,CAAQ,KAAKT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,EAC5F,CACT,EAEa3B,GAAO0B,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEazB,GAAgB,CAACsC,EAAiBE,IAAkB;AAAA,qBAC5CF,CAAO,KAAKE,CAAK;AAAA,cACxBF,CAAO;AAAA,eACNA,CAAO;AAAA;AAAA,6BAEOA,CAAO,cAAcA,CAAO;AAAA;AAAA,kBAEvCA,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYZrC,GAAuBsC,GAAc,mBAAmBA,CAAC,IAEzDrC,GAAY,CAACuB,EAAyBC,IAAsC,CACvF,IAAMe,EAAQL,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAClEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaxB,GAAqBD,GAAcyC,EAAOf,EAAW,KAAK,EAAGA,EAAW,SACxGD,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,ICvVA,IAUMiB,GAkBAC,GAyCOC,GArEbC,GAAAC,EAAA,kBAIAC,KAGAC,KACAC,KAEMP,GAAkBQ,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,KAAM,KAAM,KAAK,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EACjD,MAAM,IAAI,MAAM,4CAA4C,EAG9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMP,GAAkCO,GAA+C,CACrF,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAAK,MAAM,EACzCC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAI,EAElC,IAAMC,EAAQC,EAAc,QAASH,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM,CAAC,EACpEI,EAAOD,EAAc,OAAQH,EAAO,CAAC,EAAE,SAAU,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAAG,CAAC,EACvEK,EAASC,EAAe,SAAUN,EAAO,CAAC,EAAE,SAAUC,EAAa,CAAC,EAEpEM,EAAaC,EAAU,KAAKP,CAAW,EAAI,EAC3CQ,EAAWC,GAA4BV,EAAO,CAAC,EAAE,QAAQ,EAsB/D,MAAO,CACL,KAAM,gBACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKO,EAAa,EAAuB,CAAC,CACpE,GACA,gBA1BuBI,GAA+B;AAAA;AAAA,yBAEjCX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,EAAI,CAAC;AAAA;AAAA,IAE9CW,EAAa,iBAAiBT,EAAOE,EAAMC,CAAM,CAAC;AAAA;AAAA,IAElDO,GAAQH,CAAQ,CAAC;AAAA;AAAA,IAEjBE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCJ,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ9DF,EAAO,YAAY,aAAc,uBAAuB,CAAC;AAAA,IAU7D,CACF,EAEaX,GAAiBmB,GAAkC,CAC9DrB,GAAeqB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQpB,GAA+BoB,EAAQ,MAAM,CAAC,CAChE,ICxEA,IAiBMC,GAqGAC,GAsEAC,GAQOC,GAIAC,GAIAC,GAMAC,GAIAC,GAsBAC,GAIAC,GAMAC,GAMAC,GAMAC,GAlQbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KASMjB,GACF,CAACkB,EAA4BC,EAA0BC,EAA0BC,EAChFC,EAAoBC,EAAsBC,EAAsCC,EAChFC,EAAeC,EAAeC,EAAoBC,IAAsC,CACvF,IAAIC,EACAC,EACA,OAAON,GAAa,SACtBK,EAAmBC,EAAmB,CAACC,EAAGC,IAAM,GAAGR,CAAQ,KAAKO,CAAC,MAAMC,CAAC,KAC/D,OAAOR,GAAa,WAC7BK,EAAmBC,EAAmBN,GAEtCK,EAAmBL,EAAS,OAC5BM,EAAmBN,EAAS,QAG9B,IAAMS,EAASC,EAAe,aAAcP,EAAYP,EAAW,OAAQ,CAAC,EACtEW,EAAII,EAAc,QAASV,EAAOP,EAAM,OAAQ,CAAC,EACjDc,EAAIG,EAAc,QAAST,EAAOP,EAAM,OAAQ,CAAC,EAEnDiB,EACJ,GAAIf,EACF,GAAIC,EAAa,CACf,IAAMe,EAAgBC,EAAU,KAAKpB,CAAK,IAAM,EAC1CqB,EAAgBD,EAAU,KAAKnB,CAAK,IAAM,EAC1CqB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC3EuB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC7EkB,GAAiBE,EACnBH,EAAaH,EAAO,YAChB,aACAH,EACIO,EAAgB,GAAGN,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,EACvFQ,EAAgB,GAAGP,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,CAAC,CAAC,EAEjGI,EAAa;AAAA,kCACSH,EAAO,gBAAgB,iBAAiB,CAAC;AAAA,4BAC/CF,EAAE,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,4BACrDD,EAAE,2BAA2B,gBAAiBC,CAAM,CAAC;AAAA,cAEjEA,EAAO,YACH,aACAH,EACIP,GAA+BiB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,kBACpDR,GAA+BkB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAAA,WAGvF,MACEI,EAAaH,EAAO,YAChB,aAAcH,EAAiBC,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAEzF,CACL,GAAI,CAACV,EACH,MAAM,IAAI,MAAM,sFAAsF,EAGxG,IAAMoB,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,eAAeF,CAAC,eAAeA,CAAC,IAC9CG,EAAc,eAAeH,CAAC,eAAeA,CAAC,IACpD,MAAO;AAAA,+BACcA,CAAC,MAAMX,EAAO,gBAAgB,qBAAqBW,CAAC,GAAG,CAAC;AAAA,yBAC9DA,CAAC,MAAMb,EAAE,2BAA2B,gBAAgBa,CAAC,GAAIX,CAAM,CAAC;AAAA,yBAChEW,CAAC,MAAMZ,EAAE,2BAA2B,gBAAgBY,CAAC,GAAIX,CAAM,CAAC;AAAA,wBACjEW,CAAC,aAAaA,CAAC;AAAA,wBACfA,CAAC,aAAaA,CAAC;AAAA,4BACXA,CAAC,aAAaA,CAAC;AAAA,4BACfA,CAAC,aAAaA,CAAC;AAAA,cAC7BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIhB,EAAiBiB,EAAaC,CAAW,CAAC;AAAA,WAE9E,EACIpB,IAAe,EACjBS,EAAa;AAAA;AAAA,cAETM,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,uGAGtCN,EAAa;AAAA,cACTM,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,WAGrD,CAEA,MAAO;AAAA,UACHzB,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBc,EAAGC,EAAGC,CAAM,CAAC;AAAA;AAAA,UAE9EL,GAA4B,EAAE;AAAA;AAAA,UAE9BX,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEmB,CAAU;AAAA,QAEhB,EAEEpC,GACF,CAACgD,EAAcC,EAAkBlB,EAAeC,EAAeR,EAC9DI,EAAmCsB,EAAyBnB,EAAE,WAA0B,CACvF,IAAMoB,EAAc,CAACb,EAAU,SAASP,EAAE,KAAMC,EAAE,IAAI,EAClDoB,EAAcrB,EAAE,KAChBsB,EAAaf,EAAU,KAAKP,EAAE,IAAI,EAElCV,EAAY,GACZE,EAA8B,GAG5B+B,EAAc,CAACH,CAAW,EAChC,GAAIA,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUzB,EAAE,KAAMC,EAAE,KAAM,EAAK,EACrE,GAAI,CAACuB,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEH,EAAcG,EACdF,EAAaf,EAAU,KAAKc,CAAW,EACvC,IAAMf,EAAgBC,EAAU,KAAKP,EAAE,IAAI,IAAM,EAC3CQ,EAAgBD,EAAU,KAAKN,EAAE,IAAI,IAAM,EAC3CQ,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EAC9EU,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EACpFsB,EAAY,KAAKjB,CAAa,EAC9BiB,EAAY,KAAKf,CAAa,EAC9Be,EAAY,KAAKd,CAAoB,EACrCc,EAAY,KAAKb,CAAoB,EAErC,IAAIgB,EAAkB,EACtB,QAASC,EAAI,EAAGA,EAAIN,EAAY,OAAQM,IAAK,CAC3C,IAAMC,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS2B,CAAC,GAAK,EACpCE,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS0B,CAAC,GAAK,EAC1C,GAAIC,IAASC,EACXH,GAAmBE,MAEnB,MAEJ,CACIF,EAAkB,IAAM,GAC1BlC,EAA8B,GAC9BF,EAAY,KACHgB,GAAiBE,GAAiBC,GAAwBC,KACnEpB,EAAY,GAEhB,MAEEA,EAAY,GAEd,OAAAiC,EAAY,KAAKjC,CAAS,EAEnB,CACL,KAAA2B,EACA,YAAa,CACX,KAAMC,EAAWK,EAAY,IAAKV,GAAMA,EAAE,SAAS,CAAC,EAAE,KAAK,GAAG,EAC9D,kBAAmB,CAAC,OAAQ,MAAM,CACpC,EACA,gBAAkB3B,GAAiBlB,GAC/BkB,EAAcc,EAAE,KAAMC,EAAE,KAAMoB,EAAa/B,EAAW8B,EAAa5B,EAA6BC,EAChGO,EAAE,SAAUC,EAAE,SAAUkB,EAAgBtB,CAAwB,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAsB,CAAC,EAC3F,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKf,EAAU,KAAKc,CAAW,EAAI,CAAC,CAAC,EACxE,GAAGS,EAA2B9B,EAAE,KAAMC,EAAE,KAAMoB,CAAW,CAC3D,CACF,EACF,CACF,EAEEnD,GACF,CAAC6D,EAAyBd,EAAcxB,EAA8BI,EACrEqB,EAAmBC,IAAkC,CACpDY,EAAQ,QAAQ9D,GACZgD,EAAMC,GAAY,GAAIa,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGtC,EAAUI,EACtEsB,CAAc,CAAC,CACrB,EAEShD,GAAO4D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa7B,GAAO2D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa5B,GAAS0D,GAAkC,CACtD7D,GACI6D,EAAS,QAAU,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEa3B,GAAOyD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa1B,GAAOwD,GAAkC,CACpD,IAAMC,EAAO5B,EAAc,QAAS2B,EAAQ,OAAO,CAAC,EAAE,SAAUA,EAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,MAE7F7D,GACI6D,EAAS,MAAQ,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,cAAcD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,qBAAqBD,CAAC,IAAIC,CAAC,GAAG,EAC7G;AAAA,wBACkB+B,CAAI,SAASA,CAAI,QAAQA,CAAI;AAAA,iBACpCA,CAAI;AAAA,iBACJA,CAAI;AAAA,uBACEA,CAAI;AAAA,iBACVA,CAAI;AAAA;AAAA,+BAEUA,CAAI,6BAA6BA,CAAI,qBAAqBA,CAAI,IAV1EA,IAAS,MAAQ,QAAU,EAW5B;AAAA;AAAA,oCAEkBA,CAAI,eAAeA,CAAI,cAAcA,CAAI;AAAA;AAAA,oBAEzDA,CAAI;AAAA;AAAA,OAEjB,CACP,EAEaxD,GAAOuD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEaxB,GAAWsD,GAAkC,CACxD7D,GACI6D,EAAS,UAAY,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEavB,GAAQqD,GAAkC,CACrD7D,GACI6D,EAAS,OAAS,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACnG,QAAwB,CAC9B,EAEatB,GAAkBoD,GAAkC,CAC/D7D,GACI6D,EAAS,iBAAmB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAC3G,OAAW,QAAwB,CACzC,EAEarB,GAAemD,GAAkC,CAC5D7D,GACI6D,EAAS,cAAgB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EACxG,OAAW,QAAwB,CACzC,ICtQA,IAeMgC,GA4BAC,GAWAC,GAmBAC,GAkEOC,GAcAC,GAzJbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMX,GAAiB,CAACY,EAA+BC,IAAuB,CAC5E,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,IAAME,EAAiB,EACjBC,EAAiBH,EAAOE,CAAc,EACtCE,EAAYD,EAAe,SAC3BE,EAAYF,EAAe,KAAK,OACtCH,EAAO,QAAQ,CAACM,EAAOC,IAAM,CAC3B,GAAIA,IAAML,EAIV,IAAII,EAAM,WAAaF,EACrB,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAE5DC,EAAM,KAAK,QAAQ,CAACE,EAAKD,IAAM,CAC7B,GAAIA,IAAMN,GAAQO,IAAQL,EAAe,KAAKI,CAAC,EAC7C,MAAM,IAAI,MAAM,kCAAkC,CAEtD,CAAC,EACH,CAAC,CACH,EAEMlB,GAA0B,CAACoB,EAAyBC,IAAwC;AAAA;AAAA,wCAE1DD,CAAe,MAAMC,CAAmB;AAAA,gCAChDD,CAAe;AAAA;AAAA;AAAA;AAAA;AAAA,aAKlCA,CAAe;AAAA,KAGtBnB,GAAmB,CAACU,EAAkCW,IAA0B,CACpF,IAAMF,EAAkBT,EAAO,OAEzBY,EAAsB,CAAC,EAC7B,QAASL,EAAI,EAAGA,EAAIE,EAAiB,EAAEF,EAAG,CACxC,IAAMM,EAAgBF,EAAO,YAAY,aAAcX,EAAOO,CAAC,EAAE,aAAa,SAAS,CAAC,EACpFE,IAAoB,EACtBG,EAAU,KAAKC,CAAa,EACnBN,IAAM,EACfK,EAAU,KAAK,qBAAqBL,CAAC,QAAQM,CAAa,IAAI,EACrDN,IAAME,EAAkB,EACjCG,EAAU,KAAK,UAAUC,CAAa,IAAI,EAE1CD,EAAU,KAAK,0BAA0BL,CAAC,OAAOM,CAAa,IAAI,CAEtE,CACA,OAAOD,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMrB,GACF,CAACS,EAA+Bc,EAAsBC,EAAuBC,IAAoC,CAC/G,IAAMC,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAmB,IAAI,MAAcnB,EAAO,MAAM,EAClDoB,EAAY,IAAI,MAAqBpB,EAAO,MAAM,EAEpDqB,EAAc,EACZC,EAAwD,CAAC,EACzDC,EAAa,CAAC,EACdC,EAAoC,CAAC,CAAC,QAAuB,KAAMP,CAAU,CAAC,EACpF,QAASV,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCc,GAAerB,EAAOO,CAAC,EAAE,KAAKO,CAAY,EAC1CK,EAAiBZ,CAAC,EAAIc,EACtBE,EAAW,KAAKvB,EAAOO,CAAC,EAAE,KAAK,MAAM,EACrCa,EAAUb,CAAC,EAAIkB,EAAc,QAAQlB,CAAC,GAAIS,EAAUO,EAAWhB,CAAC,CAAC,EACjEe,EAAkB,KAAK,MAAM,EAC7BE,EAAgB,KAAK,CAAC,QAAuB,KAAML,EAAiBZ,CAAC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCiB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAOO,CAAC,EAAE,IAAI,CAAC,EAEpEiB,EAAgB,KAAK,GAAGE,EAA2BX,CAAW,CAAC,EAE/D,IAAMJ,EAASgB,EAAe,SAAUX,EAAUD,EAAY,MAAM,EAC9Da,EAAcjB,EAAO,WAAW,UAAWG,CAAY,EACvDJ,EACF,MAAM,KAAK,MAAMS,EAAiB,MAAM,EAAE,KAAK,CAAC,EAAE,IAAIZ,GAAK,4BAA4BA,CAAC,EAAE,EAAE,KAAK,GAAG,EAClGsB,EAAmBC,GAA+B;AAAA;AAAA,KAEzD,IAAM,CACHA,EAAa,gBAAgB,aAAc,KAAK,EAChD,QAASvB,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjCuB,EAAa,gBAAgB,mBAAmBvB,CAAC,GAAI,KAAK,EAE5D,OAAOuB,EAAa,iBAAiB,GAAGV,EAAWT,CAAM,CAC3D,GAAG,CAAC;AAAA;AAAA,IAENtB,GAAwB8B,EAAiB,OAAQT,CAAmB,CAAC;AAAA;AAAA,IAErEoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DnB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,2CAEbiB,CAAW;AAAA;AAAA,0CAEZT,EAAiB,MAAM,MAAMT,CAAmB;AAAA,QAClFkB,CAAW;AAAA;AAAA;AAAA,MAGbtC,GAAiB8B,EAAWT,CAAM,CAAC;AAAA,KAGnC,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGG,CAAY,GAAI,kBAAAQ,CAAiB,EACxD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAa,SAAAC,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKC,EAAa,EAAuB,CAAC,EAClE,gBAAAO,CACF,GACA,gBAAAK,CACF,CACF,EAESrC,GAAS,CAACuC,EAAyBC,IAAuC,CACrF,IAAMhC,EAAS+B,EAAQ,OACjBE,EAAajC,EAAO,CAAC,EAAE,KACvBc,EAAeI,EAAU,cAAcc,EAAW,KAAMC,EAAW,MAAM,EAC/E7C,GAAeY,EAAQc,CAAY,EACnC,IAAMC,EAAckB,EAAW,MAAM,EACrClB,EAAYD,CAAY,EACpBd,EAAO,OAAO,CAACkC,EAAK5B,IAAU4B,GAAO5B,EAAM,KAAK,OAASQ,EAAeR,EAAM,KAAKQ,CAAY,EAAI,GAAI,CAAC,EAE5G,IAAMqB,EAAiBnC,EAAO,OAAOM,GAASY,EAAU,KAAKZ,EAAM,IAAI,EAAI,CAAC,EAC5EyB,EAAQ,QACJxC,GAAwB4C,EAAgBrB,EAAcC,EAAaf,EAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQmC,CAAc,CAAC,CACtH,EAEa1C,GAAyBuC,GAClCI,GAA4B,CAAC,KAAMJ,EAAW,IAAc,CAAC,IC1JjE,IAiBaK,GAuBAC,GAaAC,GAUAC,GA/DbC,GAAAC,EAAA,kBAGAC,KACAC,KAaaP,GACT,CAACQ,EAA0CC,EAAmBC,EAAW,QAAkB,CACzF,OAAQF,EAAW,WAAY,CAC7B,IAAK,OACH,MAAO,sBAAsBC,CAAS,UACxC,IAAK,UACH,MAAO,YAAYA,CAAS,YAAYA,CAAS,yBACnD,IAAK,OACH,MAAO,wBAAwBA,CAAS,IAAIC,CAAQ,yBAAyBD,CAAS,IAClFC,CAAQ,yBACd,IAAK,cACH,MAAO,eAAeD,CAAS,cAAcA,CAAS,UAAUC,CAAQ,8BACpEA,CAAQ,qBACd,IAAK,YACH,MAAO,kBAAkBA,CAAQ,6CAA6CD,CAAS,UACzF,IAAK,GACH,MAAO,GAET,QACE,MAAM,IAAI,MAAM,0BAA0BD,EAAW,UAAU,EAAE,CACrE,CACF,EAESP,GACT,CAACO,EAA0CG,IAAqC,CAC1EH,EAAW,aAAe,OAC5BG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,OAAQ,EAAG,CAAC,OAAsB,KAAMA,EAAW,OAAQ,CAAC,EAC/FA,EAAW,aAAe,cACnCG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,KAAM,EAAG,CAAC,OAAsB,KAAMA,EAAW,IAAK,CAAC,EAC1FA,EAAW,aAAe,aACnCG,EAAe,KAAK,CAAC,OAAsB,KAAMH,EAAW,KAAM,CAAC,CAEvE,EAESN,GAA2B,CAACM,EAA0CI,IAAgC,CAC7GJ,EAAW,aAAe,OAC5BI,EAAS,KAAK,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,WAAY,KAAM,KAAK,CAAC,EACrEJ,EAAW,aAAe,cACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAAC,EAC9DJ,EAAW,aAAe,aACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAE9C,EAEaT,GACRK,GAAgF,CAC/E,IAAMK,EAAaL,GAAY,YAAwB,GACvD,GAAIK,IAAe,cAAe,CAChC,GAAM,CAACC,EAAOC,CAAI,EAAIP,GAAY,mBAAyC,CAAC,GAAK,EAAG,EACpF,MAAO,CAAC,WAAAK,EAAY,MAAAC,EAAO,KAAAC,CAAI,CACjC,SAAWF,IAAe,OAAQ,CAChC,GAAM,CAACG,EAASC,CAAO,EAAIT,GAAY,mBAAyC,CAACU,GAAUC,EAAQ,EACnG,MAAO,CAAC,WAAAN,EAAY,QAAAI,EAAS,QAAAD,CAAO,CACtC,SAAWH,IAAe,YAAa,CACrC,GAAM,CAACC,CAAK,EAAIN,GAAY,mBAAiC,CAAC,GAAI,EAClE,MAAO,CAAC,WAAAK,EAAY,MAAAC,CAAK,CAC3B,CACA,MAAO,CAAC,WAAAD,CAAU,CACpB,IC7EJ,IAqBaO,GAeAC,GApCbC,GAAAC,EAAA,kBAqBaH,GAAc,CAACI,EAAmBC,IAAqB,CAClE,OAAQD,EAAW,CACjB,IAAK,GACH,OAAOC,EACT,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,QACE,MAAM,IAAI,MAAM,GAAGD,CAAS,8BAA8B,CAC9D,CACF,EAEaH,GAAeK,GAA6B;AAAA,QACjDA,EAAU,iDAAmD,EAAE;UCrCvE,IAqBaC,GArBbC,GAAAC,EAAA,kBAqBaF,GAAiBG,GAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAO3CA,CAAS,YAAYA,CAAS,YAAYA,CAAS;AAAA;IC5B7D,IA8BMC,GAiBAC,GAyBOC,GAuFPC,GAiBAC,GAKOC,GA0JPC,GA8EOC,GA7ZbC,GAAAC,EAAA,kBAqBAC,KAEAC,KAEAC,KACAC,KAEAC,KAEMd,GAA6B,CAACe,EAAoBC,IAClDD,EACK;AAAA;AAAA;AAAA,wDAG6CC,EAAY,iBAAmB,EAAE;AAAA,UAI9E;AAAA;AAAA;AAAA,gDAGqCA,EAAY,iBAAmB,EAAE;AAAA,UAK3Ef,GAAyB,CAACgB,EAAqBC,IAC/CD,EACK;AAAA;AAAA;AAAA;AAAA,UAIDC,IAAqB,EAAI,GAAK,6DAA6D;AAAA;AAAA;AAAA;AAAA;AAAA,YAKzFA,IAAqB,EAAI,GAAK,2CAA2C;AAAA,WAG1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMCA,IAAqB,EAAI,GAAK,yCAAyC;AAAA,WAKtEhB,GACT,CAACiB,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,KAAe,CACpF,IAAMC,EAAaL,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CO,EAAaN,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CQ,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EACtCP,EAAmBS,EAAaP,EAAc,CAAC,EAC/CS,EAAgBP,EAAYF,EAAc,CAAC,EAEjD,GAAI,GAAIH,GAAcC,IAAqB,GAAKC,EAAc,CAAC,IAAM,GAC7D,CAACF,IAAeC,IAAqB,GAAKA,IAAqB,KACjES,EAAaP,EAAc,CAAC,IAAM,GAAKE,EAAYF,EAAc,CAAC,IAAM,GAAKD,EAAc,CAAC,IAAM,GACtG,MAAM,IAAI,MAAM,iBAAiBF,CAAU,8BACvCC,CAAgB,yBAAyBC,EAAc,CAAC,CAAC;AAAA,oCACjCD,CAAgB;AAAA,eACrCS,CAAU,yCAAyCP,EAAc,CAAC,CAAC,eACtEE,CAAS,0CAA0CF,EAAc,CAAC,CAAC,kBACnED,EAAc,CAAC,CAAC,aAAa,EAEnC,MAAO;AAAA,yCAC4BD,CAAgB,IAAIG,CAAI,MAAMM,EAAaT,CAAgB,MAAMU,CAAU;AAAA,2CACzEP,CAAI,MAAMK,EAAaP,EAAc,CAAC,CAAC,MAAMG,CAAS;AAAA;AAAA,uBAE1EH,EAAc,CAAC,CAAC;AAAA,uBAChBA,EAAc,CAAC,CAAC;AAAA,2BACZD,CAAgB;AAAA,oBACvBI,CAAS;AAAA;AAAA,2BAEFF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAUrEG,EAAS,IAAM,iBAAiB;AAAA,IAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,8CACvCS,CAAU;AAAA;AAAA,oBAEpCF,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,iBACpGC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,wBAE9CH,CAAI;AAAA;AAAA;AAAA,8BAGEQ,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAM/B7B,GAA2BiB,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,0CAInBa,CAAa;AAAA;AAAA;AAAA,sFAI7Cb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAU/BE,IAAqB,EAAI,GAAK,4DAA4D;AAAA;AAAA,YAE1FjB,GAAuBgB,EAAYC,CAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5D,EAEEf,GAAyB,CAACY,EAAoBC,IAC9CD,EACK;AAAA;AAAA;AAAA,yCAG8BC,EAAY,iBAAmB,EAAE;AAAA,cAI/D;AAAA;AAAA;AAAA,iCAGsBA,EAAY,iBAAmB,EAAE;AAAA,cAK5DZ,GAA2Ba,GAC7BA,EAAa,gDAAkD,gDAItDZ,GACT,CAACc,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,GACtEM,EAA4B,KAAkB,CAC7C,IAAML,EAAaN,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CM,EAAaP,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CO,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EAE5C,GAAI,EAAEG,EAAaR,EAAc,CAAC,IAAM,GAAKO,EAAaP,EAAc,CAAC,IAAM,GACzEE,EAAYF,EAAc,CAAC,IAAM,GACrC,MAAM,IAAI,MAAM,cAAcQ,CAAU,yCACpCR,EAAc,CAAC,CAAC,gBAAgBO,CAAU,yCAC1CP,EAAc,CAAC,CAAC,eAAeE,CAAS,yCAAyCF,EAAc,CAAC,CAAC,EAAE,EAEzG,IAAMW,EAAgBH,EAAaR,EAAc,CAAC,EAC5CY,EAAgBL,EAAaP,EAAc,CAAC,EAC5CS,EAAgBP,EAAYF,EAAc,CAAC,EAC3Ca,EAAgBH,EAClB;AAAA;AAAA;AAAA,gDAGsCL,CAAU;AAAA,gDACVC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,iDAKTE,CAAU,2BAA2BR,EAAc,CAAC,CAAC;AAAA,mDACnDO,CAAU,2BAA2BP,EAAc,CAAC,CAAC;AAAA,YAC5FjB,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,iDAIRM,CAAS,2BAA2BF,EAAc,CAAC,CAAC;AAAA,uDAC9CM,CAAU,2BAA2BN,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,uCAGrEJ,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAO5CK,CAAI;AAAA;AAAA;AAAA,2DAG2BD,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,0BAI7DH,EAAa,oCAAoCG,EAAc,CAAC,CAAC,KACpD,iCAAiCA,EAAc,CAAC,CAAC,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAUzBA,EAAc,CAAC,CAAC;AAAA;AAAA,4DAEdA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,MAKlE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4CAMkCK,CAAU;AAAA;AAAA,kCAEpBM,CAAa;AAAA,kCACbC,CAAa;AAAA,kCACbH,CAAa;AAAA;AAAA;AAAA;AAAA,sCAITE,CAAa;AAAA,wCACXC,CAAa;AAAA;AAAA;AAAA,QAG7C7B,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sCAKfa,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMrBb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOvCK,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOpBjB,GAAwBa,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBrC,MAAO;AAAA,yCAC4BI,CAAI,KAAKM,CAAU,MAAMC,CAAU;AAAA,yCACnCP,CAAI,KAAKK,CAAU,MAAMJ,CAAS;AAAA,yBAClDH,EAAc,CAAC,CAAC;AAAA,yBAChBA,EAAc,CAAC,CAAC;AAAA,sBACnBG,CAAS;AAAA;AAAA,2BAEJF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,kBAInEG,EAAS,IAAM,iBAAiB;AAAA,MAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,sBAE7EO,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,mBACxFC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,4BAE5CH,CAAI;AAAA,MAC1BY,CAAa;AAAA;AAAA,CAGf,EAEE3B,GACF,CAAC4B,EAAmBC,EAAkBC,EAAyBC,EAC9DC,EAAuCC,EAAiB,KAAkB,CACzE,GAAM,CAACC,EAAaC,EAAaC,CAAU,EAAIJ,EACzC,CAACK,EAAeC,EAAWC,EAAWC,CAAc,EAAIT,EACxDU,EAAiBC,GAAiBR,EAAaE,CAAU,EACzDO,EAAiBD,GAAiBP,EAAaC,CAAU,EACzDQ,EAAWC,GAA4Bd,EAAU,CAAC,EAAE,KAAK,MAAM,EAC/De,EAAc,IAAM,CACxB,IAAMC,EAAQT,EAAU,KAClBU,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBX,EAAU,KAAK,OAAO,IACpD,QAASY,EAAIH,EAAQ,EAAI,EAAGI,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAV,EAAe,QAAQS,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcF,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBE,CACT,EACMG,EAAc,IAAM,CACxB,IAAMC,EAAQd,EAAU,KAClBS,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBV,EAAU,KAAK,OAAO,IACpD,QAASW,EAAIG,EAAQ,EAAI,EAAGF,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAR,EAAe,QAAQO,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcI,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBJ,CACT,EAwCA,MAvCe;AAAA,kEAC6CZ,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBkB,EAAY,CAAC;AAAA,kBACLR,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kEAKcD,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBwB,EAAY,CAAC;AAAA,kBACLb,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6DAKSe,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BACnEhB,CAAS;AAAA;AAAA;AAAA;AAAA,UAKzBC,EACI,mBAAmBI,EAAiB,cAAgB,GAAGqB,GAAY1B,EAAWgB,CAAQ,CAAC,aAAa,IAChE,EAAsC;AAAA,UAC9Ed,CAAe;AAAA,UACfU,EAAe,aAAa,oBAAqB,OAAO,CAAC;AAAA;AAAA;AAAA,KAK/D,EAESvC,GACT,CAACsD,EAA+BC,EAAoDC,EACnFC,EACAzB,EAAiB,KAAyD,CACzE,IAAM0B,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAASL,EAAO,CAAC,EAAE,KACnBM,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAYL,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAC5FO,EAAYC,EAAU,KAAKF,CAAS,EACpCG,EAAYP,EAAOA,EAAO,OAAS,CAAC,EACpCQ,EAAWR,EAAOA,EAAO,OAAS,CAAC,EACnCS,EAAYR,EAAOA,EAAO,OAAS,CAAC,EACpCS,EAASF,EAAW,IAAM,GAAKC,EAAY,IAAM,EAGjDE,EAAoBJ,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDpD,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDyD,EAAW,CACf,KAAK,KAAKH,EAAYtD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKJ,EAAYpD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKN,EAAYlD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,CAC/D,EAEME,EAAaH,EAAS,EAAI,EAC1BI,EAAa,CAAC,GAAGZ,EAAYK,EAAWC,EAAWK,CAAU,EAC7DzB,EAAQ0B,EAAW,OACnBC,EAAa,CAAC,GAAGZ,EAAYK,EAAUC,EAAYI,CAAU,EAC7DnB,EAAQqB,EAAW,OACnBC,EAAkB,CAACX,EAAWE,EAAWE,EAAYI,CAAU,EAC/DI,EAAoC,CACxC,CAAC,OAAsB,KAAMV,CAAS,EAAG,CAAC,OAAsB,KAAME,CAAS,EAC/E,CAAC,OAAsB,KAAMD,CAAQ,CACvC,EACAU,GAA6BrB,EAAsBoB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2Bf,EAAWU,EAAYC,CAAU,CAAC,EACrF,IAAMK,EAAwD,CAAC,OAAQ,MAAM,EAEvElD,EAAU0B,EAAO,OAAS,EAC5B1B,IACF+C,EAAgB,KAAK,GAAGE,EAA2BvB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEwB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BH,CAAe,CAAC,EAEnE,IAAMK,EAAmBC,IAA+B,CACtD,IAAMjC,GAAYe,EAAU,OACtBrD,GAAYwE,GAAiB,YAAa3B,EAAO,CAAC,EAAE,SAAUP,GAAW,CAAC,EAC1EJ,GAAWC,GAA4BU,EAAO,CAAC,EAAE,QAAQ,EAEzD4B,EAAIC,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUR,EAAOyB,CAAU,EAC5Da,GAAID,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUF,EAAOmB,CAAU,EAC5Dc,GAAS9C,EAAe,SAAUe,EAAO,CAAC,EAAE,SAAUoB,EAAgB,OAAQH,CAAU,EACxFe,GAAiB,CAACJ,EAAGE,EAAC,EAC5B,GAAIxD,EAAS,CACX,IAAM2D,EAAiBvD,EAAiBuC,EAAa,EACrDe,GAAe,KAAKH,EAAc,OAAQ7B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQiC,CAAc,CAAC,CACtG,CACA,IAAMC,GACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAC7GC,GAAyBlC,EAAsBiC,EAAQ,EACvD,IAAME,GAAW9C,GAA4ByC,GAAO,KAAK,MAAM,EACzDxD,GAAkB8D,GAAqBpC,EAAsB8B,GAAO,KAAK,MAAOK,EAAQ,EACxFE,GAAmB7F,GACrBwE,EAAY3C,EAASC,GAAiB,CAACpB,GAAWyE,EAAGE,GAAGC,EAAM,EAAG,CAACzB,EAAYC,EAAYC,CAAS,EACnG9B,CAAc,EAClB,MAAO;AAAA,IAEHgD,GAAa,iBAAiBQ,EAAQ,EAAE,0BAA0B/E,EAAS,EAAE,iBACzE,GAAG6E,GAAgBD,EAAM,CAAC;AAAA,IACtCO,EAAgB;AAAA,IAERxB,EAASzE,GAA2B0E,EAAmBxD,EAAe8B,GAAUlC,EAAS,EAChFX,GAAuBuE,EAAmBxD,EAAe8B,GAAUlC,EAAS,CAAC;AAAA,oBAE5F,EACA,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAG4D,CAAiB,IAAId,EAAqB,UAAU,IAAIa,CAAM,IAAIpC,CAAc,GACzF,kBAAA8C,CACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGgB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAK,CACF,GACA,gBAAAI,CACF,CACF,ICtfJ,IAiCMc,GA4HOC,GA7JbC,GAAAC,EAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAoBC,EAAoBC,EAAmBC,EAAU,GAC9FC,EAA4BC,EAAoB,EAAGC,EAAoB,EAAGC,EAAmB,EAC7FC,EAAW,QAAkB,CAC5B,IAAMC,EAAeF,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,uBACT,IAAK,GACH,MAAO,kBAAkBC,CAAQ,8CACnC,IAAK,GACH,MAAO,2BACT,QACE,MAAM,IAAI,MAAM,oBAAoBD,CAAgB,oBAAoB,CAC5E,CACF,EACMG,EAAeH,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,oDACT,IAAK,GACH,MAAO,wDACT,QACE,MAAM,IAAI,MAAM,oBAAoBA,CAAgB,oBAAoB,CAC5E,CACF,EACMI,EAAgBZ,EAAiB;AAAA;AAAA,MAGA;AAAA;AAAA,MAIjCa,EAAkBb,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCc,EAAUd,EAAiB,2BAA6B,2BACxDe,EAASf,EAAiB,2BAA6B,2BACvDgB,EAAMhB,EAAiB,MAAQ,MAC/BiB,EAAMjB,EAAiB,MAAQ,MAC/BkB,EAAe;AAAA;AAAA,qBAENlB,EAAiB,gCAAkC,+BAA+B;AAAA,mBACpFgB,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA,iBAELC,CAAG;AAAA,iBACHA,CAAG;AAAA;AAAA;AAAA,gBAGJA,CAAG;AAAA,oBACCE,GAAYb,EAAmBG,CAAQ,CAAC;AAAA;AAAA;AAAA,8BAG9BK,CAAO,2BAA2BC,CAAM;AAAA,QAC9DH,CAAa;AAAA;AAAA,QAEbF,EAAYJ,CAAiB,CAAC;AAAA;AAAA,qBAI1Bc,EAAUpB,EAAkBC,GAAaE,EAAW;AAAA,wBACxCG,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SACbN,GAAYD,EAAY;AAAA,wBACxCI,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SAEzCY,EAAU,GAAGV,EAAYJ,CAAiB,CAAC,GAE3Ce,EAAUH,GAAYX,EAAkBC,CAAQ,EAChDc,EACFvB,EAAiBmB,GAAYb,EAAmBG,CAAQ,EAAIU,GAAYZ,EAAmBE,CAAQ,EACjGe,EACFxB,EAAiBmB,GAAYZ,EAAmBE,CAAQ,EAAIU,GAAYb,EAAmBG,CAAQ,EACjGgB,EAAkBC,GAAqBrB,EAAYiB,EAASb,CAAQ,EAsB1E,MArBiB;AAAA,yDACkCc,CAAK;AAAA,QACtDvB,EAAiBoB,EAAUC,CAAO;AAAA;AAAA;AAAA,yDAGeG,CAAK;AAAA,QACtDxB,EAAiBqB,EAAUD,CAAO;AAAA;AAAA;AAAA,gEAGsBE,CAAO;AAAA,0BAC7Cd,CAAgB;AAAA;AAAA;AAAA;AAAA,uBAInBR,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGa,CAAe;AAAA,QACfc,GAAYvB,CAAO,CAAC;AAAA,QACpBqB,CAAe;AAAA;AAAA;AAAA,MAKnB,EAESnC,GACT,CAACsC,EAA+BvB,EAA4BwB,EAAgCC,EAC3FC,EAAmBC,EAAkBC,EAAkBC,IAAoD,CAC1G,IAAMlC,EAAiBK,EAAW,SAAW,OACvC8B,EAAanC,EAAiB4B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAWrC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAYtC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAcvC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAASxC,IAAmBmC,EAAa,IAAM,GAAKA,EAAa,IAAM,IAAMI,EAAc,IAAM,EAGjGE,EAAYzC,EAAiBuC,EAAcF,EAAWC,EACtDI,EAAY1C,EAAiBqC,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,iCAAiCD,CAAQ,EAAE,EAEtE,IAAMrC,EAAmBgC,EAAUxC,GAAkBmC,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EY,EAAaJ,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDI,EAAaL,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDK,EAAY,KAAK,IAAIN,EAAc,CAAC,EAAInC,EAAkBmC,EAAc,CAAC,CAAC,EAC1E1C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAW6B,EAAWiB,IAAc,EACpCC,EAAeV,EAAS,CAAChC,EAAkB,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EAE3D2C,GAAoC,CACxC,CAAC,OAAsB,KAAMrB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAM,CAAC3B,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EAC7G,CAAC,OAAsB,KAAMA,EAAW,OAAO,EAAG,CAAC,OAAsB,KAAMA,EAAW,SAAS,CACrG,EACA+C,GAA6B/C,EAAY8C,EAAe,EACxDA,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAM0B,GAAwD,CAAC,OAAQ,MAAM,EACzErB,IACFkB,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE0B,GAAkB,KAAK,MAAM,GAE/BH,GAAgB,KAAK,GAAGE,EAA2BxB,CAAW,CAAC,EAE/D,IAAM0B,GAAmBC,IAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,MAAO,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ,CAAC,EAC9E,CAAC,KAAM,WAAY,KAAM,MAAO,OAAQ,CAAC,CAC3C,EACAC,GAAyBrD,EAAYoD,CAAQ,EAG7C,IAAME,GAAanB,EAAS,EAAI,EAC1BoB,GAAIC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EACpDkC,GAAmB;AAAA,qDACsBtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,8BAChDpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,6EAEsBpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,qCAEjEpB,EAAS,MAAQ,EAAE;AAAA,SAE1CuB,GAAIC,EACN,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQpB,IAAqB,EAAI,EAAIA,CAAgB,EAC3FyD,GAAID,EAAc,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EAC5EO,GAAiB,CAACH,GAAGE,EAAC,EACtBE,GAASC,EAAe,SAAUxC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQ8B,EAAU,EAC1F,GAAI1B,EAAS,CACX,IAAMoC,EAAOL,EAAc,OAAQpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EACxFO,GAAe,KAAKG,CAAI,EACxBP,IAAoB;AAAA,0DAC4BtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,+BACpD5D,EAAiB,IAAM,GAAG,GAAGwC,EAAS,MAAQ,EAAE;AAAA,UAEvE,CAEA,MAAO;AAAA,UACL8B,GAAc,yBAAyB,CAAC;AAAA;AAAA;AAAA;AAAA,UAIxCd,GAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGS,GAAgBC,EAAM,CAAC;AAAA,UACnFL,EAAgB;AAAA,UAEdzE,GACIW,EAAgBC,EAAWC,EAAWC,EAAU8B,EAAS5B,EAAY6C,EAAa,CAAC,EAAGA,EAAa,CAAC,EACpGA,EAAa,CAAC,EAAGU,EAAC,CAAC;AAAA,UAEvBpB,EACI+B,GAA2B3B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,CAAS,EACrGuB,GACI5B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,EAAW,GAAO,OACnFf,CAAyB,CAAC,EACxC,EACA,MAAO,CACL,KAAM,eACN,YAAa,CACX,KAAM,GAAG7B,EAAW,QAAQ,IAAIG,CAAgB,IAAIgC,CAAM,IAAIvC,CAAS,IAAIC,CAAS,IAAIC,CAAQ,IAC5F4C,CAAU,IAAIC,CAAU,IAAIC,CAAS,GACzC,kBAAAK,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMzB,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,EACF,GACA,gBAAAI,EACF,CACF,IC9QJ,IA6BMkB,GAQAC,GAGAC,GAQAC,GAOAC,GAgBAC,GAmFOC,GA+DAC,GAzNbC,GAAAC,EAAA,kBAqBAC,KACAC,KAEAC,KAEAC,KAGMb,GAAgBc,GAAkB,CACtC,IAAIC,EAAU,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAC9BD,GAAWD,EAAIE,CAAC,EAElB,OAAOD,CACT,EAEMd,GAAoBgB,GACtB,OAAOA,GAAU,SAAW,CAACA,EAAOA,EAAOA,CAAK,EAAIA,EAElDf,GAAyB,CAACgB,EAAoBC,IAC9CA,GAAY,EACPD,EAGFA,GAAcA,EAAa,IAAMC,EAAW,GAG/ChB,GACF,CAACiB,EAA+DC,EAAmBC,EAAgBH,EAAW,IAChG,CACR,IAAMI,EAAqBrB,GAAuBmB,EAAWF,CAAQ,EACrE,OAAO,KAAK,OAAOC,EAAW,CAAC,GAAKE,EAAS,GAAKA,EAASC,GAAsB,CAAC,CACpF,EAEFnB,GACF,CAACoB,EAA2CC,EAAuCC,EAClFC,EAAmCC,IAAuD,CACrFA,GAAW,OAEbA,EAAUzB,GAAkBqB,EAASC,EAAY,CAAC,EAAGE,EAAQ,CAAC,CAAC,GAEjE,IAAME,EAA6C,CAAC,EAAG,EAAG,EAAGH,CAAW,EACxE,QAASI,EAAQ,EAAGA,EAAQ,EAAGA,IACzBN,EAAQM,CAAK,EAAI,EAAIF,GAAWH,EAAYK,CAAK,IACnDD,EAASC,CAAK,EAAI,KAAK,OAAON,EAAQM,CAAK,EAAIL,EAAYK,CAAK,EAAI,EAAIF,GAAWD,EAAQG,CAAK,EAAI,CAAC,GAGzG,OAAOD,CACT,EAEExB,GACF,CAAC0B,EAA6BC,EAAiBC,EAAkBC,EAAiBC,EACjFC,EAAsBC,EAAqBC,EAAqBC,EAChEC,IAAqG,CACpG,IAAIC,EACAC,EACAC,EACAC,EAOJ,GALIb,IAAQ,UAEVA,EAAM,GAGJ,OAAOA,GAAQ,SAAU,CAC3BU,EAAU,CAAC,IAAKV,EAAK,OAAQA,EAAK,KAAMA,EAAK,MAAOA,EAAK,MAAOA,EAAK,KAAMA,CAAG,EAC9E,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,CAAG,EACjDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAW,MAAM,QAAQE,CAAG,EAAG,CAC7B,GAAI,CAACA,EAAI,MAAM,CAACc,EAAKC,EAAGhC,IAAQ+B,IAAQ/B,EAAI,CAAC,CAAC,EAC5C,MAAM,MAAM,kCAAkCiB,CAAG,EAAE,EAErDU,EAAU,CAAC,IAAKV,EAAI,CAAC,EAAG,OAAQA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,CAAC,EAChG,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,EAAI,CAAC,CAAC,EACpDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAWE,IAAQ,aAAc,CAE/BW,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1CQ,EAAY,KAAK,KAAKV,EAAWG,CAAY,EAC7CQ,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1C,IAAMU,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,GAAkBL,EAAY,GAAKP,EAAeG,EAAeN,EACjEgB,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,EAAQ,KAAK,MAAMH,EAAgB,CAAC,EACpCI,EAAOJ,EAAgBG,EACvBE,EAAM,KAAK,MAAMJ,EAAiB,CAAC,EACnCK,EAASL,EAAiBI,EAC1BE,EAAO,KAAK,MAAML,EAAgB,CAAC,EACnCM,EAAQN,EAAgBK,EAE9Bb,EAAU,CAAC,IAAAW,EAAK,OAAAC,EAAQ,KAAAC,EAAM,MAAAC,EAAO,MAAAL,EAAO,KAAAC,CAAI,CAClD,KACE,OAAM,MAAM,8BAA8BpB,CAAG,EAAE,EAEjD,MAAO,CAAC,QAAAU,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,CAAQ,CAChD,EA8BStC,GACT,CAACkB,EAAmDC,EACnDE,EAA0C6B,EAA4CzB,EACtF0B,EAAY,GAAOC,EAA6C,iBAA+B,CAC9F,IAAIC,EAAW3B,EAASC,EAAUC,EAAS0B,EAC3C,GAAIF,IAAe,eACjB,CAACC,EAAW3B,EAASC,EAAUC,EAAS0B,CAAU,EAAIpC,UAC7CkC,IAAe,gBACxB,CAACC,EAAWC,EAAY5B,EAASC,EAAUC,CAAO,EAAIV,MAEtD,OAAM,IAAI,MAAM,sBAAsBkC,CAAU,EAAE,EAEpD,GAAM,CAACG,EAAgB,CAAEvB,EAAaC,EAAcC,CAAW,EAAIf,EAE7D,CAACU,EAAaC,EAAcC,CAAW,EAAIpC,GAAiB0B,CAAO,EACnE,CAACmC,EAAeC,EAAgBC,CAAa,EAAI/D,GAAiBuD,CAAS,EAE3ES,EAAuB/D,GAAuBoC,EAAawB,CAAa,EACxEI,EAAwBhE,GAAuBqC,EAAcwB,CAAc,EAC3EI,EAAuBjE,GAAuBsC,EAAawB,CAAa,EACxE,CAAC,QAAAvB,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,EAAQ,EAAIvC,GAC7C0B,EAAKC,EAASC,EAAUC,EAASC,EAAaC,EAAcC,EAAa4B,EACzEC,EAAuBC,CAAoB,EAEzCzC,GAAc+B,EAAYI,EAAiBD,EAAaC,EAE1DhC,GAAqD,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACvE,OAAI6B,IAAe,gBACjB7B,GAAW,CAAC8B,EAAWjC,GAAagB,EAAUC,EAAWC,EAAQ,EACxDc,IAAe,iBACxB7B,GAAW,CAAC8B,EAAWjB,EAAUC,EAAWC,GAAUlB,EAAW,GAG5D,CACL,UAAAiC,EACA,WAAAD,EACA,QAAA1B,EACA,SAAAC,EACA,QAAAC,EACA,WAAA0B,EACA,SAAAlB,EACA,UAAAC,EACA,SAAAC,GACA,YAAAlB,GACA,QAAAe,EACA,YAAAN,EACA,aAAAC,EACA,YAAAC,EACA,YAAAC,EACA,aAAAC,EACA,YAAAC,EACA,qBAAAyB,EACA,sBAAAC,EACA,qBAAAC,EACA,cAAAL,EACA,eAAAC,EACA,cAAAC,EACA,QAAAxC,EACA,SAAAK,GACA,YAAAJ,CACF,CACF,EAESlB,GACT,CAAC6D,EAA+BC,EAA4BC,EAC3DC,EAA+BC,EAAyBd,IAAoC,CAC3F,IAAMe,EAAiBf,IAAe,eAChCE,EAAaa,EAAiBL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAElEM,EAAS,GACTC,EAA0C,CAAC,GAAI,EAAG,CAAC,EACnDC,EAAiB,CAAC,EAAGN,EAAY,IAAI,CAACxB,EAAG9B,IAAMA,CAAC,CAAC,EACjD6D,EAAW,CAAC,KAAK,KAAK7E,GAAa4E,EAAe,EAAE,IAAIE,GAAKR,EAAYQ,CAAC,CAAC,CAAC,EAAKH,EAAc,CAAC,CAAE,EAAG,EAAG,CAAC,EAE/GI,GAAU,UAAW,IAAM,oCAAoCF,CAAQ,EAAE,EAEzE,IAAMG,EAAmBN,EAAUD,GAAkBb,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EqB,EAAaC,EAAU,KAAKZ,CAAW,EACvCa,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,CAAU,EACnF,CAAC,QAAuB,KAAMC,CAAI,EAAG,CAAC,QAAuB,KAAMH,EAAW,OAAO,EACrF,CAAC,QAAuB,KAAMA,EAAW,SAAS,CACpD,EACAc,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAMiB,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAUlB,EAAO,SAAW,EAC9BkB,IACFH,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEiB,EAAkB,KAAK,MAAM,GAE/BF,EAAgB,KAAK,GAAGC,EAA2Bd,CAAW,CAAC,EAE/D,IAAMiB,EAAmBC,GAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQlB,EAAW,MAAM,EAChG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAC/C,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQH,EAAW,QAAQ,MAAM,EAChE,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,UAAU,MAAM,CACtE,EAEMqB,EAAahB,EAAS,EAAI,EAC1BiB,EAAIC,GAA4BxB,EAAO,CAAC,EAAE,QAAQ,EAElDyB,EAAIC,EACN,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQY,IAAqB,EAAI,EAAIA,CAAgB,EAC3Fe,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EAC5EM,EAAiB,CAACH,EAAGE,CAAC,EACtBE,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUE,EAAY,OAAQoB,CAAU,EACtFS,EAAmB,GACvB,GAAIb,EAAS,CACX,IAAMc,EAAON,EAAc,OAAQ1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EACxFM,EAAe,KAAKI,CAAI,EACxBD,GAAoB;AAAA,8DACgCzB,EAAS,QAAQiB,CAAC,IAAMA,CAAC;AAAA,wBAC/DlB,EAAiB4B,EAAa,SAAU,EAAG,CAAC,EAAIA,EAAa,SAAU,EAAG,CAAC,CAAC,GACtF3B,EAAS,MAAQ,EAAE;AAAA,UAEzB,CAEA,MAAO;AAAA,cACDyB,CAAgB;AAAA;AAAA;AAAA,uBAGPN,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA,uBAI1BE,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA,YAErCP,EAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGO,EAAgBC,CAAM,CAAC;AAAA,YACnFT,EAAa,UAAU,CAAC;AAAA,YACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACzDS,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACrCI,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,yBAEjDpB,EAAiB4B,EAAa,SAAUR,EAAE,KAAO,EAAGA,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,2CAE/FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAClFpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAE1FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAChCQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA,8BAKlDpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAyB1GpB,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAUjBA,EAAiB;AAAA,0EAEA;AAAA,yEAC4C;AAAA;AAAA,wBAG7DA,EAAiB;AAAA;AAAA;AAAA,wBAIA;AAAA;AAAA;AAAA,qBAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOTA,EAAiB;AAAA;AAAA;AAAA;AAAA,wBAKA;AAAA;AAAA;AAAA;AAAA,qBAIR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAULa,EAAU,oDAAsD,EAAE;AAAA;AAAA,YAG5E,EACA,MAAO,CACL,KAAM,cACN,YACI,CAAC,KAAM,GAAGjB,EAAW,QAAQ,IAAII,CAAc,IAAIO,CAAgB,IAAIM,CAAO,GAAI,kBAAAD,CAAiB,EACvG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGS,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,CACF,GACA,gBAAAI,CACF,CACF,ICtZJ,IAgBae,GAuGAC,GAvHbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAMaR,GACT,CAACS,EAA+BC,EAC/BC,IAAqF,CACpF,IAAMC,EAAUH,EAAO,OAAS,EAC1BI,EAAcD,EAAU,8BAAgC,GACxDE,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KACnBO,EAAyBD,EAAO,CAAC,EAAIL,EAAW,MAEhDO,EAAgBP,EAAW,SAAW,OACtCQ,EAAcC,GAChBL,EAAQC,EAAQL,EAAW,UAAWA,EAAW,KAAMA,EAAW,QAASO,CAAa,EACtFG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,EAAW,SAAS,EAC7F,CAAC,QAAuB,KAAM,CAACA,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC5E,CAAC,QAAuB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EACtE,CAAC,QAAuB,KAAMM,CAAsB,CACtD,EACAO,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,CAAM,CAAC,EAClE,IAAMU,EAAwD,CAAC,OAAQ,MAAM,EACzEb,IACFU,EAAgB,KAAK,GAAGE,EAA2Bf,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEgB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BN,CAAW,CAAC,EAE/D,IAAMQ,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEY,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,MAAM,EACxDsB,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,MAAM,EACxDsB,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,CAAC,EAG9E,IAAM6B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ5B,EAAW,UAAU,MAAM,EACxG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,EAChF,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACA,OAAA6B,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA;AAAA,IAE9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,0BAEtDC,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,8CAEhBX,EAAgB,EAAI,CAAC;AAAA,yDACVA,EAAgB,EAAI,CAAC,oBAClEA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA,iBAGhBW,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMCX,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMrBA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA,uBAKnEA,EAAgBiB,EAAE,IAAI,QAAS,UAAW,SAAU,eAAe,EACnDA,EAAE,IAAI,QAAS,gBAAiB,UAAW,QAAQ,CAAC;AAAA,uBACzDE,EAAE,IAAI,iBAAkB,aAAc,UAAW,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,MAK3EvB,CAAW;AAAA,MACXmB,CAAe;AAAA,MACfJ,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAEzC,EACA,MAAO,CACL,KAAM,cACN,YAAa,CAAC,KAAMlB,EAAW,SAAU,kBAAAe,CAAiB,EAC1D,WAAY,KAAO,CACjB,QAAS,CAAC,CACR,KAAMd,EAA6BA,EAA2BO,CAAW,EAAIA,EAC7E,SAAUT,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,EAESzB,GACT,CAACQ,EAA+BC,EAA4BQ,IAAgD,CAC1G,IAAMN,EAAUH,EAAO,OAAS,EAC1B+B,EAAaC,GAAiBvB,EAAY,CAAC,CAAC,EAC5CwB,EAAeD,GAAiBvB,EAAY,CAAC,CAAC,EAC9CE,EAAaC,EAAU,KAAKH,CAAW,EAAIsB,EAAaE,EACxD5B,EAAS,CAACL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGzB,EAAS,CAACN,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGG,EAAsB,CAACzB,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAIsB,CAAU,EAElGlB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EACxC,CAAC,OAAsB,KAAM,CAACV,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC3E,CAAC,OAAsB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,CACvE,EACAa,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,EAAQ4B,CAAmB,CAAC,EACvF,IAAMC,GAAWF,EAAe,GAAKhC,EAAW,QAAQ,CAAC,EAAIK,EAAO,CAAC,EAC/DW,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUkC,EAAoB,OAAQH,CAAU,EAC5FV,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQ0B,CAAU,EACpEJ,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQyB,CAAU,EACpEH,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM+B,CAAU,CAAC,EAEnF,IAAM3B,EAAcD,EAAU,8BAAgC,GACxD0B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EACxC,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,CACvC,EACA,OAAAC,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA,IAC9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,8CAIlCe,CAAY;AAAA,oCACtBA,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOxBR,EAAE,KAAK,KAAK,KAAKU,CAAO;AAAA,wBACxBhB,EAAO,KAAK,KAAK,KAAKc,CAAY;AAAA;AAAA;AAAA,8CAGZ3B,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGzB6B,CAAO;AAAA;AAAA;AAAA,0BAGXV,EAAE,IAAI,QAAS,gBAAiB,eAAgB,eAAe,CAAC;AAAA;AAAA,0BAEhEA,EAAE,KAAK,KAAK;AAAA;AAAA;AAAA,gDAGUnB,EAAO,CAAC,CAAC;AAAA,wBACjCqB,EAAE,IAAI,WAAY,UAAW,IAAK,gBAAgB,CAAC;AAAA,iCAC1CM,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOlBA,CAAY;AAAA;AAAA,QAE/B7B,CAAW;AAAA,QACXmB,CAAe;AAAA,QACfJ,EAAO,IAAI,QAAS,MAAO,UAAW,iBAAkB,OAAO,CAAC;AAAA;AAAA,IAGlE,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CACX,KAAM,GAAGlB,EAAW,QAAQ,IAAI8B,CAAU,IAAIE,CAAY,IAAIE,CAAO,IAAI7B,EAAO,CAAC,CAAC,IAAIA,EAAO,CAAC,CAAC,GAC/F,kBAAmBH,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMM,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,ICvNJ,IAYamB,GA6IPC,GAUOC,GAnKbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEaT,GACT,CAACU,EAA+BC,EAAoDC,EACnFC,EACAC,EAAiB,KAAyD,CACzE,IAAMC,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KAEnBO,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIJ,EAAOA,EAAO,OAAS,CAAC,EAC5BK,EAAaC,GAAiBH,CAAC,EAC/BI,EAAcD,GAAiBF,CAAC,EAChCI,EAAeF,GAAiBJ,CAAC,EACjCO,EAAaC,EAAU,KAAKb,CAAW,EAAIQ,EAAaG,EACxDG,EAAUhB,EAAO,OAAS,EAC1BiB,EAAYd,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAE5FgB,EAAsB,CADVH,EAAU,KAAKE,CAAS,EACFV,EAAGC,CAAC,EAEtCW,EAAoC,CACxC,CAAC,QAAuB,KAAML,CAAU,EAAG,CAAC,QAAuB,KAAMP,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,CACjC,EACAW,GAA6BnB,EAAsBkB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2BJ,EAAWZ,EAAQC,CAAM,CAAC,EACzEU,GACFG,EAAgB,KAAK,GAAGE,EAA2BrB,EAAO,CAAC,EAAE,IAAI,CAAC,EAEpEmB,EAAgB,KAAK,GAAGE,EAA2BH,CAAmB,CAAC,EAEvE,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAYC,GAAiB,aAAczB,EAAO,CAAC,EAAE,SAAUiB,EAAU,MAAM,EAC/ES,EAAIC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQO,CAAW,EACrEgB,EAAID,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQI,CAAU,EACpEmB,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUkB,EAAoB,OAAQR,CAAU,EAC5FqB,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBjC,EAAsB4B,EAAO,KAAK,MAAOE,CAAQ,EACxFI,EAAiB,CAACT,EAAGE,CAAC,EACxBQ,GAAc,GAClB,GAAIpB,EAAS,CACX,IAAMqB,GAAiBjC,EAAiBM,EAAa,EACrDyB,EAAe,KAAKR,EAAc,OAAQ3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQqC,EAAc,CAAC,EACpGD,GAAc,GACVhC,EAAiB,uBAAuBiC,EAAc,KACrC,YAAYR,EAAO,KAAK,KAAK,kBAAkB,EACtE,CAEA,IAAMS,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAiBC,GAAiBH,GAAYrB,CAAS,EACvDyB,EAAiBD,GAAiBF,GAAYtB,CAAS,EACvD0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EACrF,CAAC,KAAM,IAAK,KAAM,KAAK,CACzB,EACAC,GAAyB3C,EAAsB0C,EAAQ,EAEvD,IAAME,GAAa,CAACC,GAAyBC,KAA4B,CACvE,IAAMC,GAAOF,GAAS,KAChBG,GAAOH,GAAS,KACtB,GAAIE,KAAS,EACX,MAAO,OAAOC,EAAI,cAAcH,GAAS,KAAK,OAAO,YAEvD,IAAMI,EAAY1B,EAAU,KACxB2B,GAAS,OAAOF,EAAI,aAAaH,GAAS,KAAK,OAAO,IAC1D,QAASM,GAAIJ,GAAO,EAAI,EAAGK,GAAIH,EAAY,EAAGE,IAAK,EAAGA,KAAKC,KACzDF,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,OAAOF,EAAY,EAAI,iBAAiBG,EAAC,IAAM,eAAe,IAEhG,OAAAN,GAAc,QAAQK,IAAK,CACzBD,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,QAClC,CAAC,EACDD,IAAU,GAAGF,EAAI,YAAYD,GAAO,CAAC;AAAA,uBACxBC,EAAI,YAAYD,GAAO,CAAC,UAC9BG,EACT,EAEMG,GAAa,IAAc,CAC/B,IAAIC,GAAU,eAAe7B,EAAE,KAAK,KAAK,IACzC,QAAS0B,GAAI,EAAGA,GAAIxC,EAAawC,KAC/BG,IAAW;AAAA,0BACGH,EAAC,yBAAyBA,EAAC,2BAA2B1C,CAAU,KAEhF,QAAS0C,GAAI,EAAGA,GAAIvC,EAAcuC,KAAK,CACrCG,IAAW,iCAAiCH,EAAC,yBAAyBxC,CAAW,KAEjF,QAASyC,GAAI,EAAGA,GAAIzC,EAAayC,KAC/BE,IAAW;AAAA,qBACJH,EAAC,WAAWxB,EAAE,KAAK,KAAK,UAAUhB,IAAgB,EAAI,GAAK,IAAIyC,EAAC,GAAG,YAAYA,EAAC,YACnFD,EAAC;AAAA,CAET,CACA,OAAOG,EACT,EAEA,MAAO;AAAA,IAEHhC,EAAa,iBAAiBoB,EAAQ,EAAE,0BAA0BnB,CAAS,EAAE,iBACzE,GAAGW,EAAgBN,CAAM,CAAC;AAAA,IACtCN,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,4CACpCb,CAAU,QAAQA,CAAU;AAAA,8CAC1BA,CAAU;AAAA,iCACvBG,CAAY;AAAA,qCACRA,CAAY;AAAA;AAAA;AAAA,MAG3CX,EAAY,SAAW,EAAI,GAAK,uBAAuBsB,EAAU,gBAAgB,OAAO,CAAC,GAAG;AAAA,MAC5FqB,GAAWnB,EAAGc,EAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,MAC7CmB,GAAWjB,EAAGc,CAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,wBAC3BC,EAAO,KAAK,KAAK,KAAKhB,CAAY;AAAA,oDACND,CAAW;AAAA,QACvD0C,GAAW,CAAC;AAAA;AAAA,2BAEOzC,CAAY;AAAA;AAAA,QAE/BuB,EAAW;AAAA,QACXH,CAAe;AAAA,0BACGJ,EAAO,KAAK,OAAO;AAAA,qBACxBA,EAAO,gBAAgB,aAAa,CAAC;AAAA,QAClDA,EAAO,YAAY,YAAYnB,CAAU,GAAI,OAAO,CAAC;AAAA;AAAA;AAAA,GAIvD,EACA,MAAO,CACL,KAAM,cACN,YAAa,CACX,KAAM,GAAGT,EAAqB,UAAU,IAAIS,CAAU,IAAIE,CAAW,IAAIC,CAAY,IAAIT,CAAc,GACvG,kBAAmBY,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMd,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAClE,gBAAAK,CACF,GACA,gBAAAG,CACF,CACF,EAEE/B,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,CAEtD,EAEaR,GAAUgE,GAAkC,CACvDjE,GAAeiE,EAAQ,MAAM,EAC7B,IAAMtD,EAAcuD,GAAc,UAAUD,EAAQ,OAAO,CAAC,EAAE,KAAMA,EAAQ,OAAO,CAAC,EAAE,KAAM,EAAI,EAChG,GAAI,CAACtD,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMM,EAAIN,EAAYA,EAAY,OAAS,CAAC,EACtCO,EAAI+C,EAAQ,OAAO,CAAC,EAAE,KAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EAC9DhD,EAAI,GAAKC,EAAI,EACf+C,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,EAE3FsD,EAAQ,QAAQE,GAAwBF,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,CAE1F,IChLA,IAgBayD,GA6BPC,GAEAC,GAkDAC,GAmBOC,GA0BPC,GAyIAC,GA0BAC,GAeOC,GAhUbC,GAAAC,EAAA,kBAIAC,KAIAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEalB,GACT,CAACmB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,EAA4BC,IAAqC,CAC/F,IAAMC,EAAYN,EAAW,CAAC,EACxBO,EAAoBP,EAAW,MAAMK,EAAgB,EAAI,EAAGA,EAAgB,EAAI,CAAC,EACjFG,EAAcD,EAAkB,OAChCE,EAAcR,EAAY,CAAC,EAE3BS,EADqBT,EAAY,MAAM,CAAC,EACA,IAAI,CAACU,EAAGC,IAAMD,GAAKA,EAAI,IAAMT,EAAUU,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIR,EAAWS,CAAC,EAAIT,EAAWS,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIR,EAAQQ,CAAC,GAAKR,EAAQQ,CAAC,CAAC,CAAC,EAC5G,OAAAC,EAAY,OAAO,EAAG,EAAGP,CAAS,EAClCO,EAAY,OAAOR,EAAgB,EAAI,EAAG,EAAGI,CAAW,EACjDI,CACT,EAcE/B,GAA2B,CAAC,EAAG,EAAG,EAAG,CAAC,EAEtCC,GAAiB,CAAC+B,EAA+BC,IAAqC,CAG1F,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAME,EAAcF,EAAO,CAAC,EAAE,KAAKC,EAAW,SAAW,OAASD,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFG,EAAkBH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAIC,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAIH,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMN,EAAcM,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWP,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIO,EAAW,QAAQ,SAAWP,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIO,EAAW,KAAK,SAAWP,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIO,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAEM9B,GAA4B,CAA2B+B,EAAeD,IAAqC,CAC/G,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EACvCb,EAAY,EAAI,CAAC,IAAM,IACzBA,EAAY,EAAI,CAAC,EAAIa,EAAO,CAAC,EAAE,KAAK,CAAC,GAGzC,IAAMI,EAAOH,EAAW,KAAK,MAAM,EACnCI,GAAa,yBACTL,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAaiB,EAAMH,EAAW,SAAW,OACnGA,EAAW,OAAO,EAGtB,IAAMK,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAiB,CAAI,CAAC,EACzCE,CACT,EAEanC,GAAuB8B,GAAwD,CAC1F,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBS,EAAU,CAAC,SAAU,QAAS,aAAc,YAAY,EAAET,EAAW,QAAkB,EACvFb,EAAYa,EAAW,UACvBU,EAAQV,EAAW,MACnBd,EAAcc,EAAW,aACzBG,EAAOH,EAAW,KAClBX,EAAUW,EAAW,QACrBW,EAAYX,EAAW,WAA6B,EAE1D,MAAO,CACL,QAAAS,EACA,OAAAD,EACA,UAAArB,EACA,MAAAuB,EACA,YAAAxB,EACA,KAAAiB,EACA,QAAAd,EACA,SAAAsB,EACA,GAAGL,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEMnC,GAAS,CAACyC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EAKjEe,EAAiBd,EAAW,SAAW,OAC7C,GAAIA,EAAW,QAAU,EAAG,CAM1B,GADmC,CAACY,EAAQ,YAAY,eAAe,QAAQ,GAC7CE,GAAkBf,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMC,EAAW,OACjFD,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAKC,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,EAAG,CAC7F,IAAMF,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZC,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAEhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3ChB,EAAO,SAAW,GACpBkB,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAE3Ba,EAAQ,QACJM,GAAsCD,EAAYJ,EAAoBf,CAAW,EAAG,CAAC,OAAQmB,CAAU,CAAC,CAC9G,MACEL,EAAQ,QAAQO,GAA6BpB,EAAQc,CAAkB,CAAC,EAE1E,MACF,CAEA,IAAMO,EAAUrB,EAAO,SAAW,EAC5BsB,EAActB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACnDQ,EAAavB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EAClDS,EAAgBxB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACrDU,EAAezB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/B0B,EAAc1B,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BD,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZY,EAAY5B,EAAYgB,EAAiB,EAAI,CAAC,EAC9Ca,EAAW7B,EAAYgB,EAAiB,EAAI,CAAC,EAC7CpB,EAAcI,EAAYgB,EAAiB,EAAI,CAAC,EAEhDc,EAAWd,GAAkBU,IAAiBH,GAAeI,IAAgBH,GAC/EtB,EAAW,KAAK,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,EACvD,GAAI4B,GACCJ,IAAiB,GAAKC,IAAgB,GAAKzB,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,GACxGA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,GACrFA,EAAW,KAAK,CAAC,IAAM,EAAI,CAE9B,IAAM6B,EAAQ/B,EAAY,CAAC,EACvBgC,EAAWC,EAAWC,EACpBC,EAAe,CAAC,EACtB,GAAInB,EAAgB,CAClB,IAAMC,GAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAIlE,GAHIA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,IAE5Ba,EAAU,CACZ,IAAMM,GAAYb,EAAcC,EAAaC,EAC7CO,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAG8B,EAAOK,EAAS,CAAC,EACnDH,EAAYhB,GAAiB,QAAQ,CAAC,EAAGmB,GAAWxC,CAAW,CAAC,EAChEsC,EAAoB,CAAC,EAAGH,EAAOnC,CAAW,CAC5C,MACEoC,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAOR,EAAcC,EAAYC,CAAa,CAAC,EAC9EQ,EAAYhB,GAAiB,QAAQ,CAAC,EAAGQ,EAAe7B,CAAW,CAAC,EACpEsC,EAAoB,CAACH,EAAOH,EAAYC,EAAUjC,CAAW,EAE/DuC,EAAa,KAAKH,CAAS,EAC3BG,EAAa,KAAKF,CAAS,CAC7B,MACED,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAON,EAAeF,EAAcC,CAAU,CAAC,EAC9ES,EAAYhC,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAGL,EAAa6B,CAAa,CAAC,EAC7DS,EAAoB,CAACH,EAAOnC,EAAagC,EAAYC,CAAQ,EAC7DM,EAAa,KAAKF,CAAS,EAC3BE,EAAa,KAAKH,CAAS,EAEzBV,GACFa,EAAa,KAAKlC,EAAO,CAAC,CAAC,EAE7B,IAAMoC,EAAIH,EAAkB,CAAC,EACvBI,GAAIH,EAAa,CAAC,EAAE,KAAKA,EAAa,CAAC,EAAE,KAAK,OAAS,CAAC,EAE1DE,EAAI,GAAKC,GAAI,EACfxB,EAAQ,QACJyB,GACIJ,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACpF,CAAC,OAAQmB,CAAY,CAAC,EAE1BrB,EAAQ,QACJ0B,GAAwBL,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACxG,CAAC,OAAQmB,CAAY,CAAC,EAE5B,MACF,CAIA,IAAMM,EAAgE,GAGhExB,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAIhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3CK,GACFH,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAI3B,IAAMyC,EAAY1B,EAAiBY,EAAYC,EAAWjC,EACpD+C,EAAY3B,EAAiBpB,EAAcgC,EAAYC,EACvDe,EAAWlB,EAAeC,EAAcF,EAC9CX,EAAQ,QACJ+B,GACI1B,EAAYJ,EAAoBf,EAAa0C,EAAWC,EAAWC,EAAUtB,EAC7EmB,CAAyB,EAC7B,CAAC,OAAQtB,CAAU,CAAC,CAC1B,EAEM7C,GAAS,CAACwC,EAAyBZ,IAAqC,CAE5E,IAAMV,EAAgBU,EAAW,SAAW,OACtCD,EAAS,CACba,EAAQ,OAAO,CAAC,EAAE,QACdtB,EAEI,CAACsB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5Bb,EAAO,KAAKa,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAMT,EAAO,CAAC,EAAGH,EAAW,KAAK,CAAC,EAAG,EAAGA,EAAW,KAAK,CAAC,CAAC,EACpDX,EAAU,CAAC,CAAC,EAAE,OAAOW,EAAW,OAAO,EACvCb,EAAY,CAAC,CAAC,EAAE,OAAOa,EAAW,SAAS,EAC3Cd,EAAc,CAAC,CAAC,EAAE,OAAOc,EAAW,WAAW,EAC/Ca,EAAqB5C,GAA0B,CAAC,GAAG+B,EAAY,KAAAG,EAAM,QAAAd,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGa,CAAM,EACnHa,EAAQ,QAAQO,GACZpB,EAAQc,EACRf,GAAeR,EAAgB,CAACQ,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAAI,CAAC,CAAC,CAAC,CAC3F,EAEMzB,GAAS,CAACuC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMQ,EAASR,EAAW,SAAW,OAAS,eAAiB,gBACzDa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EACjEI,EAAOH,EAAW,UAAY,SAAWA,EAAW,KAAOA,EAAW,QACtE4C,EAAWC,GACb9C,EAAO,CAAC,EAAE,KACVA,EAAO,CAAC,EAAE,KACVC,EAAW,QACXA,EAAW,UAAgDG,EAA2B,GAAOK,CAAM,EACvGI,EAAQ,QAAQkC,GACZ/C,EAAQc,EAAoB+B,EAAS,SACrC,CAACA,EAAS,YAAaA,EAAS,aAAcA,EAAS,WAAW,EAClE,CAACA,EAAS,QAAQ,MAAOA,EAAS,QAAQ,IAAKA,EAAS,QAAQ,IAAI,EAAGpC,CAAM,CAAC,CACpF,EAEalC,GAAO,CAACsC,EAAyBZ,IAAqC,CACjFhC,GAAe4C,EAAQ,OAAQZ,CAAU,EACrCY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpCxC,GAAOwC,EAASZ,CAAU,EACjBY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAC3CvC,GAAOuC,EAASA,EAAQ,OAAQZ,CAAU,EAE1C7B,GAAOyC,EAASA,EAAQ,OAAQZ,CAAU,CAE9C,ICzUA,IAiCM+C,GA2HOC,GA5JbC,GAAAC,EAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAU,GAAOC,EAAqCC,EAC/EC,EAAmB,IAAc,CAChC,IAAMC,EAAeD,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,sEACT,IAAK,GACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQED,CAAI;AAAA,cAEf,QACE,MAAM,IAAI,MAAM,oBAAoBC,CAAgB,oBAAoB,CAC5E,CACF,EACME,EAAgBN,EAAiB;AAAA;AAAA,QAGA;AAAA;AAAA,QAIjCO,EAAkBP,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCQ,EAAUR,EAAiB,2BAA6B,2BACxDS,EAAST,EAAiB,2BAA6B,2BACvDU,EAAMV,EAAiB,MAAQ,MAC/BW,EAAMX,EAAiB,MAAQ,MAE/BY,EAAe;AAAA,yBACFZ,EAAiB,2BAA6B,0BAA0B;AAAA,uBAC1EA,EAAiB,gCAAkC,+BAA+B;AAAA,qBACpFU,CAAG;AAAA,qBACHA,CAAG;AAAA;AAAA,mBAELC,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA;AAAA,kCAGYH,CAAO;AAAA,iBACxBL,CAAI;AAAA;AAAA,kCAEaM,CAAM;AAAA,iBACvBN,CAAI;AAAA;AAAA;AAAA;AAAA,kBAIHQ,CAAG;AAAA,QACbL,CAAa;AAAA,0EACqDF,CAAgB,KAE9ES,EAAUb,EAAiB;AAAA,0BACbI,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SACoB;AAAA,0BACbC,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SAEPW,EAAU;AAAA,0BACIV,CAAgB;AAAA,yBACjBJ,EAAiB,2BAA6B,0BAA0B;AAAA;AAAA;AAAA,YAIvFA,EAAiB,yDACA,wDAAwD;AAAA;AAAA;AAAA,UAGzEK,EAAYD,CAAgB,CAAC;AAAA;AAAA,eAExBD,CAAI;AAAA,QAGPY,EAAkBC,GAAqBd,EAAYC,CAAI,EAqB7D,MApBiB;AAAA,uDACgCA,CAAI;AAAA,MACrDH,EAAiBa,EAAUC,CAAO;AAAA;AAAA;AAAA,uDAGeX,CAAI;AAAA,MACrDH,EAAiBc,EAAUD,CAAO;AAAA;AAAA;AAAA,iEAGyBV,CAAI;AAAA,wBAC7CC,CAAgB;AAAA;AAAA;AAAA,uBAGjBJ,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGO,CAAe;AAAA,QACfU,GAAYhB,CAAO,CAAC;AAAA,QACpBc,CAAe;AAAA,8EACuDX,CAAgB;AAAA;AAAA,IAI1F,EAESd,GACT,CAAC4B,EAA+BhB,EAAqCiB,EACpEC,EAAmBC,EAAmBC,EAAkBC,EACxDC,IAAoD,CACnD,IAAMxB,EAAiBE,EAAW,SAAW,OACvCuB,EAAazB,EAAiBkB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAW3B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAY5B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAc7B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAAS9B,GAAmByB,EAAa,IAAM,GAAKA,EAAa,GAAMI,EAAc,IAAM,EAG3FE,EAAY/B,EAAiB6B,EAAcF,EAAWC,EACtDI,EAAYhC,EAAiB2B,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,wCAAwCD,CAAQ,EAAE,EAE7E,IAAM/B,EAAmB0B,EAAS,EAAI,EAChCO,EAAY,KAAK,IAAIJ,EAAc,CAAC,EAAI7B,EAAkB6B,EAAc,CAAC,CAAC,EAC1EK,EAAaR,EAAS,EAAI,EAC1BS,EACF,CAACrC,EAAW,YAAYF,EAAiB,EAAI,CAAC,EAAGE,EAAW,YAAYF,EAAiB,EAAI,CAAC,CAAC,EAC7FwC,EAAsB,CAC1BD,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,IACrGqC,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,GACvG,EACMuC,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFsC,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,CACvF,EAEMwC,EAAoC,CACxC,CAAC,OAAsB,KAAMtB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAMpB,EAAW,OAAO,EACvF,CAAC,OAAsB,KAAMA,EAAW,SAAS,EAAG,CAAC,OAAsB,KAAMqC,CAAU,EAC3F,CAAC,OAAsB,KAAME,CAAI,CACnC,EACAE,GAA6BzC,EAAYwC,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAElF,IAAM2B,EAAwD,CAAC,OAAQ,MAAM,EACzEtB,IACFmB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE2B,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BzB,CAAW,CAAC,EAE/D,IAAM2B,GAAmBC,IAA+B,CACtD,IAAMC,GAAIC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EAC5EY,GAAID,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACnEiC,EAASC,EAAe,SAAUlC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQmB,CAAU,EACpFe,GAAiB,CAACL,GAAGE,EAAC,EAExBI,GAAmB,GACvB,GAAI/B,EAAS,CACX,IAAMgC,GAAON,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EACxFe,GAAe,KAAKE,EAAI,EACxBD,IAAoB;AAAA,4DAC8BC,GAAK,KAAK,KAAK;AAAA,iCAC1CvD,EAAiB,IAAM,GAAG,GAAG8B,EAAS,MAAQ,EAAE;AAAA,YAEzE,CAEA,IAAM0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ,CAAC,EACrF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQjB,EAAW,MAAM,EAC5D,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQE,EAAK,MAAM,CACjD,EACAgB,GAAyBvD,EAAYsD,EAAQ,EAC7C,IAAME,GAAWC,GAA4BzC,EAAO,CAAC,EAAE,SAAU,CAAC,EAClE,GAAIwC,KAAa,OAASA,KAAa,MACrC,MAAM,IAAI,MAAM,YAAYA,EAAQ,oBAAoB,EAE1D,MAAO;AAAA,UACLE,GAAc,yBAAyB,CAAC;AAAA,UACxCb,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGH,GAAgBF,CAAM,CAAC;AAAA,UACnFG,EAAgB;AAAA,UAChBjE,GAA6BW,EAAgBuB,EAASrB,EAAY8C,GAAE,KAAK,MAAO5C,CAAgB,CAAC;AAAA,UAE/F0B,EAAS+B,GACI3B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,CAAS,EACrFyB,GACI5B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,EAAW,GACnF,OAAWb,CAAyB,CAAC,EACxD,EAEA,MAAO,CACL,KAAM,wBACN,YACI,CAAC,KAAM,GAAGtB,EAAW,QAAQ,IAAIgC,CAAiB,IAAID,CAAa,IAAIH,CAAM,GAAI,kBAAAe,CAAiB,EACtG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM1B,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAO,CACF,GACA,gBAAAI,EACF,CACF,ICvQJ,IA2BMiB,GAiMOC,GA5NbC,GAAAC,EAAA,kBAmBAC,KACAC,KAEAC,KAEAC,KAGMP,GACF,CAACQ,EAA4BC,EAA+BC,EAAgCC,EAC3FC,EAA+BC,EAAS,GAAOC,EAAkBC,EACjEC,EAAiB,KAAkB,CAClC,IAAMC,EAASD,EAAiB,EAAI,EAC9BE,EAASF,EAAiB,EAAI,EAC9BG,EAAaH,EAAiB,EAAI,EAClCI,EAAgBP,EAAS,EAAI,EAE/BQ,EAAmB;AAAA,iDACoBR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,0BAC9DD,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,KAEvDH,IACFU,GAAoB;AAAA,sDAC0BR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,2BAClEE,EAAiB,IAAM,GAAG,GAAGH,EAAS,MAAQ,EAAE;AAAA,QAGrE,IAAMS,EAAaT,EAAS,EAAI,EAC1BU,EAAIC,EAAc,IAAKf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC5EG,EAAKD,EAAc,KAAMf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC9EI,EAAiB,CAACD,EAAIF,CAAC,EACzBZ,GACFe,EAAe,KAAKF,EAAc,OAAQf,EAAO,CAAC,EAAE,SAAU,CAACC,EAAYS,CAAU,CAAC,EAAE,OAAQG,CAAU,CAAC,EAE7G,IAAMK,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQY,CAAU,EAEpFO,EAAe;AAAA,2BACAjB,EAAuB,cAAgB,gBAAgB;AAAA,kBAChEA,EAAuB,cAAgB,gBAAgB;AAAA,kBACvDA,EAAuB,cAAgB,gBAAgB,MAAMQ,CAAa;AAAA,wBACpER,EAAuB,cAAgB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM7CE,CAAQ,MAAMM,CAAa;AAAA,8BAC/BA,CAAa;AAAA,8BACbN,CAAQ;AAAA;AAAA;AAAA,uBAGfA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,oCAExCA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAOnBA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA,0BACpDA,CAAQ,wBAAwBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAO/CA,CAAQ;AAAA;AAAA;AAAA;AAAA,wCAINA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAUhBS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAMhBW,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA;AAAA,iDAEjBX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAMRK,CAAU;AAAA;AAAA,gCAE3BI,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCASZS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA,oCACjCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAUTM,CAAa;AAAA,qCACXT,EAAU,YAAc,QAAQG,CAAQ,QAAQ;AAAA,YACzEa,EAAO,IAAI,QAAS,IAAK,QAAS,KAAM,OAAO,CAAC;AAAA;AAAA,SAGhDG,EAAc;AAAA,gCACMH,EAAO,gBAAgB,YAAY,CAAC;AAAA,wBAC5CA,EAAO,WAAW,gBAAiB,CAAC,CAAC;AAAA,qBACxCA,EAAO,WAAW,gBAAiBR,CAAU,CAAC;AAAA,oBAC/CQ,EAAO,WAAW,gBAAiBV,CAAM,CAAC;AAAA,oBAC1CU,EAAO,WAAW,gBAAiBT,CAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQpCJ,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,yBAKTA,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,sCAEvCA,CAAQ,sBAAsBG,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAU/CH,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,wCAEvCA,CAAQ,sBAAsBI,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAQlEF,EAAiBS,EAAG,IAAI,QAAS,OAAQ,OAAQ,cAAc,EAC9CA,EAAG,IAAI,QAAS,eAAgB,OAAQ,MAAM,CAAC;AAAA,+BAC3CF,EAAE,IAAI,eAAgB,cAAe,cAAe,aAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM/DZ,EAAU,WAAa,GAAGG,CAAQ,OAAO;AAAA,YAC/Da,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,UAG/C,MAAO;AAAA,IACTnB,EAAa,iBAAiBO,CAAQ,EAAE,iBAAiB,GAAGW,EAAgBC,CAAM,CAAC;AAAA,IACnFN,CAAgB;AAAA;AAAA,MAEdb,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,IAC5EK,EAASgB,EAAeC,CAAW,GACnC,EAES7B,GACT,CAACQ,EAA+BsB,EAC/BC,IAAqF,CACpF,IAAMrB,EAAUF,EAAO,OAAS,EAE1BC,EAAcqB,EAAW,YACzBE,EAAaC,EAAU,KAAKxB,CAAW,EAMvCyB,EAAW,CACf,KAAK,KAAKF,EAAa,EAAE,EACzB,EACA,CACF,EACAG,GAAU,UAAW,IAAM,uCAAuCD,CAAQ,EAAE,EAE5E,IAAMnB,EAAiBe,EAAW,SAAW,OACvCM,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAU,CAACP,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,EACvDQ,EACF,CAACR,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAGe,EAAW,YAAYf,EAAiB,EAAI,CAAC,CAAC,EAC7FwB,EAAY,CAACT,EAAW,UAAU,CAAC,EAAGA,EAAW,UAAU,CAAC,CAAC,EAC7DU,EAAsB,CAC1BF,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,IAC3FQ,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,GAC7F,EACMW,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFU,EAAoB,CAAC,EAAI,EAAI,KAAK,MAAMV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,CAAC,EAAI,CACrF,EAEMlB,EAAS,GACT8B,EAAQZ,EAAW,MACnBa,EAASnC,EAAO,CAAC,EAAE,KACnBoC,EAAwBD,EAAO,CAAC,EAAID,EACpCG,EAAyBF,EAAO,CAAC,EAEjCG,EAAoC,CACxC,CAAC,QAAuB,KAAMd,CAAU,EAAG,CAAC,QAAuB,KAAMK,CAAO,EAChF,CAAC,QAAuB,KAAMC,CAAU,EAAG,CAAC,QAAuB,KAAMC,CAAS,EAClF,CAAC,QAAuB,KAAMC,CAAmB,EAAG,CAAC,OAAsB,KAAMC,CAAI,EACrF,CAAC,QAAuB,KAAMG,CAAqB,EAAG,CAAC,QAAuB,KAAMC,CAAsB,EAC1G,GAAGE,EAA2BvC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9D,EACIE,IACFoC,EAAgB,KAAK,GAAGC,EAA2BvC,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE4B,EAAkB,KAAK,MAAM,GAE/BU,EAAgB,KAAK,GAAGC,EAA2BtC,CAAW,CAAC,EAE/D,IAAME,EAAuBuB,EAAS,CAAC,IAAM,GAAKA,EAAS,CAAC,IAAM,EAC5Dc,EAAmBzC,GAA+B,CACtD,IAAMO,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQuB,EAAQ,MAAM,EACzF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQC,EAAW,MAAM,EAC5D,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,MAAM,EAC1D,CAAC,KAAM,wBAAyB,KAAM,MAAO,OAAQE,EAAoB,MAAM,EAC/E,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAAG,CAAC,KAAM,2BAA4B,KAAM,KAAK,EAChG,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACM5B,EAAWoC,GAA4BzC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO,GACHT,GACIQ,EAAcC,EAAQC,EAAaC,EAASC,EAAsBC,EAAQC,EAAUC,EACpFC,CAAc,CAAC,EACzB,EACA,MAAO,CACL,KAAM,kBACN,YAAa,CAAC,KAAM,GAAGe,EAAW,QAAQ,IAAK,kBAAAM,CAAiB,EAChE,WAAY,KAAO,CACjB,cAAe,CAAC,EAAGF,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,QAAS,CAAC,CACR,KAAMH,EAA6BA,EAA2BtB,CAAW,EAAIA,EAC7E,SAAUD,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,gBAAAsC,CACF,GACA,gBAAAE,CACF,CACF,ICpTJ,IAYME,GAIAC,GAWAC,GAiCAC,GAwCOC,GA+BPC,GAqEAC,GAEAC,GAsDAC,GA6COC,GA7SbC,GAAAC,EAAA,kBAMAC,KACAC,KAEAC,KACAC,KAEMf,GACF,CAACgB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DpB,GAAoB,CAACqB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEMzB,GACF,CAAC0B,EAA+BC,EAAgCC,EAA8BP,EAC7FQ,EAAeP,EAAgBQ,EAA4BC,EAAwBC,EACnFC,IAA0B,CACzB,IAAMC,EAAcR,EAAW,OAAS,EAClCS,EAAoBF,EAAY,SAAW,EACjD,GAAID,EAAc,SAAW,EAC3B,QAASI,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EACjCJ,EAAc,KAAK,CAAC,EAGxB,IAAMK,EAAYX,EAAW,CAAC,EACxBY,EAAcX,EAAYI,EAAgB,EAAI,CAAC,EAAIF,EACzD,QAASO,EAAI,EAAGG,EAAIb,EAAW,OAASQ,GAAeH,EAAgB,EAAI,GAAIK,EAAIF,EAAa,EAAEE,EAAG,EAAEG,EAAG,CACxG,IAAMC,EAASd,EAAWa,CAAC,EACrBpB,EAAUgB,EAAoBK,EAASV,EAAQM,CAAC,EAAIH,EAAYG,CAAC,EACjEhB,EAAWtB,GAAgB0C,EAAQV,EAAQM,CAAC,EAAGd,EAAKc,CAAC,EAAGT,EAAYY,CAAC,EAAGX,EAAUQ,CAAC,EAAGjB,CAAO,EACnGpB,GAAkBqB,EAAUC,EAASC,EAAMc,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRH,EAAQM,CAAC,GAAKI,EAAS,GAAKR,EAAcI,CAAC,GAAKT,EAAYY,CAAC,EAAI,GAAKX,EAAUQ,CAAC,EAAI,EAAId,EAAKc,CAAC,EAC/Fd,EAAKc,EAAIF,CAAW,CAAC,CAE7B,CACAD,EAAY,OAAO,EAAG,EAAGI,CAAS,EAClCJ,EAAY,OAAOF,EAAgB,EAAI,EAAG,EAAGO,CAAW,CAC1D,EAOErC,GACF,CAAoCwC,EAAeC,IAAqC,CACtF,IAAMf,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAClGjB,EAAY,OAAS,EACrB,QAASS,EAAI,EAAGA,EAAIM,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEN,EAC3CT,EAAY,KAAKe,EAAO,CAAC,EAAE,KAAKN,CAAC,CAAC,CAEtC,CACA,IAAMS,EAAiBJ,EAAW,SAAW,OAC7Cd,EAAY,OAAO,EAAG,EAAGe,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC1Cf,EAAY,OAAOkB,EAAiB,EAAI,EAAG,EAAGH,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAE/D,IAAMpB,EAAOmB,EAAW,KAAK,MAAM,EAC7BR,EAAcQ,EAAW,YAAY,MAAM,EAC3CT,EAAgBS,EAAW,cAAc,MAAM,EAC/Cf,EAAagB,EAAO,CAAC,EAAE,KACzBd,EAAYa,EAAW,UAAU,MAAM,EAC3C,GAAIb,EAAU,OAAO,CAACe,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC9C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5Cd,EAAY,IAAI,MAAMM,CAAW,EAAE,KAAK,CAAC,CAC3C,CACA,IAAIJ,EAAUW,EAAW,QAAQ,MAAM,EACvC,GAAIX,EAAQ,OAAO,CAACa,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC5C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5CZ,EAAU,IAAI,MAAMI,CAAW,EAAE,KAAK,CAAC,CACzC,CAGAlC,GACI0B,EAAYC,EAAaC,EAAWa,EAAW,QAASA,EAAW,MAAOnB,EAAMQ,EAASe,EACzFb,EAAeC,CAAW,EAG9B,IAAMa,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAL,EAAM,cAAAU,EAAe,YAAAC,EAAa,UAAAL,EAAW,QAAAE,CAAO,CAAC,EACzFgB,CACT,EAES5C,GAAgCuC,GAAiE,CAC5G,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBpB,EACF,CAAC,SAAU,QAAS,aACnB,YAAY,EAAE,OAAOoB,EAAW,QAAW,IAAc,EAAIA,EAAW,OAAiB,EACxFb,EAAYa,EAAW,UACvBZ,EAAQY,EAAW,MACnBd,EAAcc,EAAW,YACzBnB,EAAOmB,EAAW,KAClBX,EAAUW,EAAW,QACrBS,EAAYT,EAAW,SAA2B,EAClDT,EAAgBS,EAAW,cAC3BR,EAAcQ,EAAW,YAC/B,MAAO,CACL,QAAApB,EACA,OAAA4B,EACA,UAAArB,EACA,MAAAC,EACA,YAAAF,EACA,cAAAK,EACA,YAAAC,EACA,KAAAX,EACA,QAAAQ,EACA,SAAAoB,EACA,GAAGH,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEM5C,GAAiB,CAACuC,EAA+BD,IAA8C,CAGnG,GAAI,CAACC,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAG7D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAMS,EAAcT,EAAO,CAAC,EAAE,KAAKD,EAAW,SAAW,OAASC,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFU,EAAkBV,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIS,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAcX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAID,EAAW,MAGnD,GAAIC,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMW,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMnB,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAG5C,GAFqBD,EAAW,UAAU,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEnDH,EAAW,UAAU,SAAWP,EAClD,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAKvD,GAFmBO,EAAW,QAAQ,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEjDH,EAAW,QAAQ,SAAWP,EAC9C,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAKrD,GADgBO,EAAW,KAAK,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAC9CH,EAAW,KAAK,SAAWP,EAAc,EACtD,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIO,EAAW,cAAc,SAAWP,GAAeO,EAAW,cAAc,SAAW,EACzF,MAAM,IAAI,MAAM,4BAA4BP,CAAW,GAAG,EAM5D,GADuBO,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GACrDH,EAAW,YAAY,SAAW,GACpDA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5D,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAID,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAGMtC,GAAsB,CAAC,EAAG,EAAG,EAAG,CAAC,EAEjCC,GACF,CAACiD,EAAyBZ,EAA+BD,IAA8C,CACrG,IAAMc,EAAqBtD,GAAmCwC,EAAYC,CAAM,EAC1EG,EAAiBJ,EAAW,SAAW,OACvCR,EAAcsB,EAAmB,YACjCjB,EAAcL,EAAYY,EAAiB,EAAI,CAAC,EAChDW,EAAgBd,EAAO,CAAC,EAAE,KAAKG,EAAiB,EAAI,CAAC,EAI3D,GAAIU,EAAmB,QAAU,GAAMjB,IAAgB,GAAKkB,IAAkB,EAAI,CAChFF,EAAQ,QAAQG,GAAiCf,EAAQa,CAAkB,CAAC,EAC5E,MACF,CACA,IAAMG,EAAYzB,EAAYY,EAAiB,EAAI,CAAC,EAC9Cc,EAAW1B,EAAYY,EAAiB,EAAI,CAAC,EAC7Ce,EAAelB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/BmB,EAAcnB,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BoB,EAAYjB,EAAiBa,EAAYC,EAAWrB,EACpDyB,EAAYlB,EAAiBP,EAAcoB,EAAYC,EACvDK,EAAWJ,EAAeC,EAAcL,EAExCS,EAAgE,GAIhEC,EAAoBZ,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJa,GAA2BzB,EAAO,CAAC,EAAGtC,EAAmB,EACzD,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACqC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACa,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKY,GAIhC,IAAME,EAAsB,CAAC1B,EAAO,CAAC,EAAGwB,CAAgB,EAClDG,EAAU3B,EAAO,SAAW,EAC9B2B,IACE,CAACxB,GAAkBH,EAAO,CAAC,EAAE,KAAK,SAAW,EAC/C0B,EAAoB,KAAK1B,EAAO,CAAC,EAAE,QAAQ,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,CAAC,CAAC,EAErE0B,EAAoB,KAAK1B,EAAO,CAAC,CAAC,GAKtCY,EAAQ,QACJgB,GACIF,EAAqBb,EAAoBtB,EAAa6B,EAAWC,EAAWC,EAAUK,EACtFJ,CAAyB,EAC7B,CAAC,OAAQG,CAAmB,CAAC,CACnC,EAEE9D,GAAkB,CAACgD,EAAyBb,IAA8C,CAE9F,IAAMV,EAAgBU,EAAW,SAAW,OAEtCC,EAAS,CACbY,EAAQ,OAAO,CAAC,EAAE,QACdvB,EAEI,CAACuB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5BZ,EAAO,KAAKY,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAI3B,EAAcc,EAAW,aACzBd,EAAY,SAAW,GAAKA,EAAY,CAAC,IAAM,KACjDA,EAAc,CAAC2B,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,GAE1C,IAAI1B,EAAYa,EAAW,WACvBb,EAAU,SAAW,GAAKA,EAAU,CAAC,IAAM,KAC7CA,EAAY,CAAC,CAAC,GAEhB,IAAIE,EAAUW,EAAW,SACrBX,EAAQ,SAAW,GAAKA,EAAQ,CAAC,IAAM,KACzCA,EAAU,CAAC,CAAC,GAEd,IAAIR,EAAOmB,EAAW,KAClBnB,EAAK,SAAW,IAClBA,EAAO,CAAC,EAAG,CAAC,GAEdA,EAAO,CAAC,EAAGA,EAAK,CAAC,EAAG,EAAGA,EAAK,CAAC,CAAC,EAC9BQ,EAAU,CAAC,CAAC,EAAE,OAAOA,CAAO,EAC5BF,EAAY,CAAC,CAAC,EAAE,OAAOA,CAAS,EAChCD,EAAc,CAAC,CAAC,EAAE,OAAOA,CAAW,EACpC,IAAM4B,EACFtD,GAAmC,CAAC,GAAGwC,EAAY,KAAAnB,EAAM,QAAAQ,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGe,CAAM,EACrGY,EAAQ,QAAQG,GACZf,EAAQa,EACRtB,GAAeF,EAAgB,CAACE,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAC/C,CAACA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,CAAC,CAAC,CACtF,EAEa1B,GAAgB,CAAC+C,EAAyBb,IAA8C,CACnGtC,GAAemD,EAAQ,OAAQb,CAAU,EACrCa,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpChD,GAAgBgD,EAASb,CAAU,EAEnCpC,GAAgBiD,EAASA,EAAQ,OAAQb,CAAU,CAEvD,ICpTA,IAgBM8B,GAkDOC,GAOAC,GAzEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAOMR,GACF,CAACS,EAAmBC,EAA+BC,EAAuBC,IACvD,CACb,IAAMC,EAAaC,EAAU,KAAKJ,CAAU,EACtCK,EAAOL,EAAW,OAClBM,EAAQC,EAAc,QAASR,EAAWM,CAAI,EAC9CG,EAASC,EAAe,SAAUV,EAAWM,CAAI,EACjDK,EAAYT,EAAU,WAAa,EAAiBA,EAAU,cAAc,EAAE,CAAC,EAC3B,OAAOA,EAAU,iBAAiB,EAAE,CAAC,CAAC,EAC1FU,EAAOP,EAAU,cAAcM,EAAWL,CAAI,EAC9CO,EAAmBC,GAA+B,CACtD,IAAMC,EAAQ,QAAQR,EAAM,WAAW,eAAgB,eAAe,CAAC,KACjES,EAAMC,EAAa,uBAAwB,gBAAiBX,CAAI,EAChEY,EAAaf,EAAW,QAAUY,GAASZ,EAAW,UAAY,OAAS,IAAM,IACjFgB,EAAahB,EAAW,QAAUa,EAAMD,GAASZ,EAAW,UAAY,GAAK,QACnF,MAAO;AAAA,kBAEHW,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,CAAM,CAAC;AAAA,kBAClCK,EAAa,UAAU,CAAC;AAAA,oBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,uCACtDL,EAAO,gBAAgB,YAAY,CAAC;AAAA,8BAC7CA,EAAO,KAAK,KAAK;AAAA,sCACTS,CAAU;AAAA,qCACXC,CAAU;AAAA;AAAA,sBAEzBZ,EAAM,WAAW,eAAgB,gBAAiB,QAAQ,CAAC;AAAA,kCAC/CA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,oBAEhDE,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,kBAEjD,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMF,EAAY,SAAUD,CAAS,CAAC,EACjD,cAAe,CAAC,EAAG,KAAK,KAAKI,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,QAAuB,KAAMQ,CAAI,EAC7E,GAAGQ,EAA2BnB,EAAYA,CAAU,CACtD,CAEF,GACA,gBAAAY,CACF,CACF,EAGKrB,GAAS,CAAC6B,EAAyBlB,IAAuC,CACrF,IAAMF,EAAaoB,EAAQ,OAAO,CAAC,EAAE,KAC/BrB,EAAYqB,EAAQ,OAAO,CAAC,EAAE,SAC9BT,EAAOS,EAAQ,OAAO,CAAC,EAC7BA,EAAQ,QAAQ9B,GAAwBS,EAAWC,EAAYW,EAAMT,CAAU,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACjG,EAEaV,GAAyBU,GAA0D,CAC9F,IAAMmB,EAAYnB,EAAW,YAAwB,EAC/CoB,EAAUpB,EAAW,UAAsB,EACjD,OAAOqB,GAA4B,CAAC,UAAAF,EAAW,QAAAC,CAAO,CAAC,CACzD,IC7EA,IAoBME,GASAC,GAWAC,GA2DOC,GAKAC,GAxGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMV,GAAkBW,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,gCAAgC,EAElD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,iCAAiC,CAErD,EAEMV,GAAmB,CAACW,EAAgBC,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKF,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAI,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMd,GAAgC,CAACe,EAAyBC,IAAoD,CAClH,IAAI,EAAWC,EAAWC,EAAWC,EACjCC,EACAV,EACEW,EAAgBL,EAAW,SAAW,OACtCM,EAAYN,EAAW,UACvBO,EAAYP,EAAW,OAAS,MAClCK,GACF,CAAC,EAAGJ,EAAGC,EAAGC,CAAC,EAAIJ,EAAY,KAC3BK,EAAQG,EAAY,CAAC,EAAGN,EAAGC,EAAGI,EAAWA,EAAWH,EAAKG,GAAa,CAAE,EACpD,CAAC,EAAGL,EAAGC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,CAAS,EACxEZ,EAAOa,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,IAEzD,CAAC,EAAGN,EAAGC,EAAGC,CAAC,EAAI,CAACJ,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,CAAC,EAClGK,EAAQG,EAAY,CAAC,EAAGD,EAAWA,EAAWH,EAAKG,GAAa,EAAIL,EAAGC,CAAC,EACpD,CAAC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,EAAWL,EAAGC,CAAC,EACxER,EAAOa,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,GAE3D,IAAMC,EAAsBT,EAAY,QAAQK,CAAK,EAC/CK,EAAoBD,EAAoB,KAAK,OAC7CE,EAAgBX,EAAY,SAE5BY,EAAgBC,EAAc,IAAKF,EAAeD,CAAiB,EACnEI,EAAeC,EAAe,SAAUJ,EAAeD,CAAiB,EAExEM,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEhG9B,GAAiBW,EAAMe,EAAmBE,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEtEG,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DH,EAAa,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGxDA,EAAa,YAAY,aAAcF,EAAc,aAAa,UAAU,CAAC,CAAC;AAAA,KAGlF,MAAO,CACL,KAAM,eACN,YAAa,CAAC,KAAM,GAAGZ,EAAY,IAAI,IAAIC,EAAW,SAAS,IAAIA,EAAW,IAAI,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACjH,WAAaP,GAAW,CACtB,IAAMwB,EAAcZ,EAAgB,CAAC,EAAGJ,EAAIK,EAAWJ,EAAII,EAAWH,EAAKG,GAAa,CAAE,EACtD,CAAC,EAAGH,EAAKG,GAAa,EAAIL,EAAIK,EAAWJ,EAAII,CAAS,EACpFY,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAkBZ,EAAoB,KACtCa,EAAiBF,EAAU,gBAAgBC,EAAiB1B,CAAI,EACtE,MAAO,CACL,QAAS,CAAC,CAAC,KAAMuB,EAAa,SAAUxB,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKyB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGI,EAA2BF,EAAiBC,CAAc,CAAC,CAChH,CACF,EACA,gBAAAN,CACF,CACF,EAEa9B,GAAe,CAACsC,EAAyBvB,IAA6C,CACjGlB,GAAeyC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAA8BuC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CAC9E,EAEad,GAA+Bc,GACxCwB,GAA4B,CAC1B,UAAWxB,EAAW,UACtB,KAAMA,EAAW,KACjB,OAAQA,EAAW,MACrB,CAAC,IC7GL,IAsBMyB,GAEAC,GACAC,GACAC,GACAC,GAQAC,GAqBAC,GA4HAC,GAEAC,GA+GOC,GAOAC,GA5SbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAaMhB,GACF,qBACEC,GAAc,IAAMD,GAAgB,KACpCE,GAAkB,IAAMD,GAAc,IACtCE,GAAa,IAAMF,GAAc,MAAQA,GACzCG,GAAiB,IAAMD,GAAa,IAQpCE,GAAN,KAAiB,CACf,YAAYY,EAAa,GAAI,CAC3B,KAAK,gBAAkB,IAAI,IAC3B,KAAK,WAAaA,CACpB,CAGA,UAAUC,EAAgBC,EAAe,CACvC,IAAIC,EAAQ,KAAK,gBAAgB,IAAIF,CAAM,EACvCE,IAAU,OACZA,EAAQ,CAACD,CAAK,EAEdC,EAAM,KAAKD,CAAK,EAElB,KAAK,gBAAgB,IAAID,EAAQE,CAAK,CACxC,CAIF,EAEMd,GAAN,KAAqB,CACnB,YAAYe,EAA+CC,EAAkB,CAAlB,cAAAA,EACzD,KAAK,YAAc,GACnB,KAAK,aAAe,IAAI,IACxB,KAAK,IAAM,IAAI,MACf,KAAK,WAAa,CAAC,EAGnB,GAAI,CAACC,EAAKC,CAAG,EAAIF,EAAS,SAAS,IAAI,EAAIA,EAAS,MAAM,KAAM,CAAC,EAAI,CAACA,EAAU,EAAE,EAClF,GAAI,CAACC,EAAI,MAAM,OAAOnB,EAAc,CAAC,EACnC,MAAM,IAAI,MAAM,kBAAkB,EAapC,GAXmBmB,EAAI,MAAM,GAAG,EACrB,QAAQ,CAACE,EAAWN,IAAU,CACvC,IAAMO,EAAOL,EAAOF,CAAK,EAAE,KAAK,MAAM,EACtC,GAAI,CAACM,EAAU,MAAM,OAAOvB,EAAe,CAAC,EAC1C,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMyB,EAAa,KAAK,YAAYF,EAAW,GAAMC,EAAMP,CAAK,EAChE,KAAK,IAAI,KAAKQ,CAAU,CAC1B,CAAC,EAGGH,IAAQ,GAEVA,GAAO,CAAC,GAAG,KAAK,aAAa,QAAQ,CAAC,EAC1B,OAAO,CAAC,CAACI,EAAKC,CAAI,IAAOA,EAAK,QAAU,GAAKD,IAAQ,KAAM,EAC3D,IAAI,CAAC,CAACA,CAAG,IAAMA,CAAG,EAClB,KAAK,EAAE,UAEf,CAACJ,EAAI,MAAM,OAAOvB,EAAW,CAAC,EAChC,MAAM,IAAI,MAAM,aAAa,EAKduB,EAAI,MAAM,OAAOxB,GAAe,GAAG,CAAC,GAC3C,QAASkB,GAAW,CAC9B,GAAIA,IAAW,MACb,KAAK,WAAa,KAAK,WAAW,OAAO,KAAK,YAAY,MACrD,CACL,IAAMW,EAAO,KAAK,aAAa,IAAIX,CAAM,EACzC,GAAIW,IAAS,OACX,MAAM,IAAI,MAAM,oBAAoB,EAEtC,KAAK,WAAW,KAAKA,EAAK,QAAQ,CACpC,CACF,CAAC,EACD,KAAK,IAAM,KAAK,YAAYL,EAAK,GAAO,KAAK,UAAU,CACzD,CAGA,UAAUN,EAAgBY,EAAkBb,EAAoB,CAC9D,IAAIY,EAAO,KAAK,aAAa,IAAIX,CAAM,EACvC,GAAIW,IAAS,OAAW,CACtB,GAAIA,EAAK,WAAaC,GAAYD,EAAK,QAAU,EAC/C,MAAM,IAAI,MAAM,oBAAoB,EAEpCA,EAAK,QACLA,EAAK,aAAa,KAAKZ,CAAU,CAErC,MACEY,EAAO,CAAC,MAAO,EAAG,SAAAC,EAAU,aAAc,CAACb,CAAU,CAAC,EAExD,KAAK,aAAa,IAAIC,EAAQW,CAAI,CACpC,CAGA,YAAYE,EAAcC,EAAkBN,EAAyBP,EAAQ,GAAgB,CAC3F,IAAMc,EAAOP,EAAK,OACdQ,EAAW,GACXC,EAAe,CAAC,EAChBC,EAAU,EAEd,GAAI,CAACL,EAAK,MAAM,OAAO7B,EAAe,CAAC,GAAM,CAAC8B,GAAWD,IAAS,GAChE,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMM,EAAeN,EAAK,MAAM,OAAO/B,GAAe,GAAG,CAAC,EACpD2B,EAAa,IAAItB,GAAWc,CAAK,EAEvC,OAAAkB,GAAc,QAAQ,CAACnB,EAAgBoB,IAAc,CACnD,GAAIpB,IAAW,MAAO,CACpB,GAAIgB,EACF,MAAM,IAAI,MAAM,6CAA6C,EAE/DA,EAAW,GACX,IAAMK,EAAoBN,EAAOI,EAAa,OAAS,EACvD,GAAIE,EAAoB,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GADAJ,EAAeT,EAAK,MAAMU,EAASA,EAAUG,CAAiB,EAC1D,KAAK,aACP,GAAI,KAAK,aAAa,SAAWJ,EAAa,QAC1C,KAAK,aAAa,SAAS,IAAMA,EAAa,SAAS,EACzD,MAAM,IAAI,MAAM,8BAA8B,UAEvCH,EACT,KAAK,YAAc,GACnB,KAAK,aAAeG,MAEpB,OAAM,IAAI,MAAM,uCAAuC,EAGzD,QAASK,EAAI,EAAGA,EAAIL,EAAa,OAAQK,IAAK,CAC5C,IAAMtB,EAAS,OAAO,aAAa,IAAI,WAAW,CAAC,EAAIsB,CAAC,EACxDb,EAAW,UAAUT,EAAQoB,EAAIE,CAAC,EAClC,KAAK,UAAUtB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAC/C,CACF,MACEQ,EAAW,UAAUT,EAAQoB,GAAK,KAAK,YAAc,KAAK,aAAa,OAAS,EAAI,EAAE,EACtF,KAAK,UAAUpB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAEjD,CAAC,EACMQ,CACT,CAQF,EAEMpB,GAAakC,GAAyBA,EAAO,OAE7CjC,GACF,CAACkC,EAAuCC,EAAkBC,EACzDC,IAAgD,CAE/C,IAAMC,EADQJ,EAAY,IAAKhB,GAASA,EAAK,MAAM,EAC3B,IAAI,CAACO,EAAMd,IAAU4B,EAAc,QAAQ5B,CAAK,GAAIwB,EAAUV,CAAI,CAAC,EACrFe,EAAaC,EAAU,KAAKJ,CAAW,EACvCK,EAASC,EAAe,SAAUR,EAAUE,EAAY,MAAM,EAC9DO,EACF,CAAC,GAAGR,EAAe,aAAa,KAAK,CAAC,EAAE,OAAQ1B,GAAW,CAAC0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,CAAC,EACxGmC,EAAmBC,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EACrBC,EAAW,kBACXC,EAAU,iBACVC,EAAY,eACZC,EAAgC,CAAC,EACjCC,EAAiC,CAAC,EAClCC,EAAiC,CAAC,EAClCC,EAA4B,CAAC,EAC7BC,EAAyBnB,EAAe,aAAa,OAASA,EAAe,IAAI,gBAAgB,KACvGA,EAAe,aAAa,QAAQ,CAACf,EAAMX,IAAW,CACpD,GAAI0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,EAAG,CAClD,IAAM8C,EAAcpB,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,IAAI,CAAC,EAClE8C,IAAgB,QAClBpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBoC,EAAQ,KAAK,GACTT,EAAUR,CAAC,EAAE,WACT,QAAQA,CAAC,UAAWnB,EAAO+B,EAAO,WAAW,gBAAiBc,CAAW,CAAC,CAAC,EAAE,CACvF,CAAC,CACH,CACF,CAAC,CAEL,MACEpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBwC,EAAoB,KAAK,GAAGb,EAAUR,CAAC,EAAE,WAAW,QAAQA,CAAC,UAAWnB,EAAO,GAAGD,CAAM,EAAE,CAAC,EAAE,CAC/F,CAAC,EACD4C,EAAgB,KAAK,WAAWhB,EAAUR,CAAC,EAAE,aAAa,QAAQA,CAAC,SAAS,CAAC,GAAG,CAClF,CACF,CAAC,EACDsB,EAAqB,KACjB,WAAW1C,CAAM,cAAcA,CAAM,eAAeX,GAAUW,CAAM,CAAC,KAAKA,CAAM,OAAO,EAC3F2C,EAAqB,KAAK,GAAG,CAEjC,CAAC,EACD,IAAMK,EAAYH,EACd,CACE,GAAGR,EACH,aAAaT,EAAU,IAAI,CAACqB,EAAU7B,IAAM6B,EAAS,aAAa,QAAQ7B,CAAC,SAAS,CAAC,EAAE,KAAK,KAAK,CAAC,GACpG,EACA,CACE,GAAGiB,EACHE,EACA,GAAGG,EACH,GAAGD,EACHH,EACA,GAAGM,EACHJ,EACA,GAAGG,CACL,EACJ,MAAO;AAAA,cAEHP,EACK,iBAAiBF,EAAgB,IAAKlC,IAAY,CAAC,KAAM,GAAGX,GAAUW,CAAM,CAAC,GAAI,KAAM,KAAK,EAAE,CAAC,EAC/F,gBAAgB,aAAc,KAAK,EACnC,iBAAiB,GAAG4B,EAAWI,CAAM,CAAC;AAAA;AAAA,cAEzCI,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,kCACrDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA,cACxDJ,EAAU,IAAI,CAACsB,EAAM9B,IAAM,YAAYA,CAAC,YAAYQ,EAAUR,CAAC,EAAE,KAAK,OAAO,GAAG,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,cAC5F4B,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,cACpBhB,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,YAE/C,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAe,SAAU,kBAAmBF,EAAY,IAAI,IAAM,MAAM,CAAC,EAC7F,WAAY,IAAM,CAGhB,IAAM2B,EACFjB,EAAgB,OAAQlC,GAAW0B,EAAe,aAAa,IAAI1B,CAAM,CAAC,EACrE,IACIA,IACI,CAAC,QAAuB,KAAM0B,EAAe,aAAa,IAAI1B,CAAM,GAAG,UAAY,CAAC,EAAE,EACvGmD,EAAoB,KAAK,CAAC,QAAuB,KAAMrB,CAAU,CAAC,EAClE,IAAMsB,EACF5B,EAAY,IAAI,CAAChB,EAAM6C,IAAM,CAAC,GAAGC,EAA2B9C,CAAI,CAAC,CAAC,EAC7D,OAAO,CAAC+C,EAAKC,IAAyBD,EAAI,OAAOC,CAAoB,EAAGL,CAAmB,EACpG,OAAAC,EAAgB,KAAK,GAAGE,EAA2B3B,CAAW,CAAC,EACvD,CACN,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAAsB,CACF,CACF,EACA,gBAAAjB,CACF,CACF,EAES5C,GAAS,CAACkE,EAAyBC,IAAuC,CACrF,IAAMhC,EAAiB,IAAItC,GAAeqE,EAAQ,OAAQC,EAAW,QAAQ,EACvE/B,EAAcD,EAAe,WAC7BF,EAAciC,EAAQ,OAAO,IAAI,CAACE,EAAON,IAAMM,EAAM,IAAI,EAC/DF,EAAQ,QAAQnE,GAAwBkC,EAAaiC,EAAQ,OAAO,CAAC,EAAE,SAAU/B,EAAgBC,CAAW,CAAC,CAC/G,EAEanC,GAAyBkE,GAA0D,CAC9F,IAAMtD,EAAYsD,EAAW,SAAoB,QAAQ,OAAQ,EAAE,EACnE,OAAOE,GAA4B,CAAC,SAAAxD,CAAQ,CAAC,CAC/C,IC/SA,IAUMyD,GAiBAC,GAYAC,GAIAC,GAyDOC,GApGbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0BAA0B,EAE5C,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EAEzDG,EAAaD,EAAM,OAASD,EAAW,OAAS,EAAIC,EAAM,OAASD,EAAW,OAC9EG,EAAkBH,EAAW,OAASC,EAAM,OAAS,EAAID,EAAW,OAASC,EAAM,OACvF,KAAOC,EAAaD,EAAM,QAAUE,EAAkBH,EAAW,OAAQ,EAAEE,EAAY,EAAEC,EACvF,GAAIF,EAAMC,CAAU,IAAMF,EAAWG,CAAe,GAAKF,EAAMC,CAAU,IAAM,GAC3EF,EAAWG,CAAe,IAAM,EAClC,MAAM,IAAI,MAAM,oDAAoD,CAG1E,EAEMb,GAAmB,CAACc,EAA2BC,IAAwC,CAC3F,IAAMC,EAAOF,EAAO,OAASC,EAAO,OAC9BJ,EAAkB,CAAC,EACzB,QAASM,EAAI,EAAGA,EAAID,EAAM,EAAEC,EAC1BN,EAAM,KAAKG,EAAOG,CAAC,CAAC,EAEtB,QAASA,EAAI,EAAGA,EAAIF,EAAO,OAAQ,EAAEE,EACnCN,EAAM,KAAKI,EAAOE,CAAC,IAAM,EAAIH,EAAOG,EAAID,CAAI,EAAID,EAAOE,CAAC,CAAC,EAE3D,OAAON,CACT,EAEMV,GAAuB,CAACS,EAA+BC,IACxDD,EAAW,OAASC,EAAM,OAAUX,GAAiBU,EAAYC,CAAK,EAAIX,GAAiBW,EAAOD,CAAU,EAG3GR,GAA2BO,GAA+C,CAC9E,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EACvDS,EAAwBjB,GAAqBS,EAAYC,CAAK,EAC9DQ,EAAWV,EAAO,CAAC,EAAE,SACrBW,EAAaD,IAAa,EAAgB,EAAI,EAC9CE,EAAa,KAAK,KAAKC,EAAU,KAAKJ,CAAW,EAAIE,CAAU,EAE/DG,EAAmBC,GAA+B,CACtD,IAAMC,EAAQC,EAAc,QAASP,EAAUT,EAAW,OAAQU,CAAU,EACtEO,EAASC,EAAe,SAAUT,EAAUD,EAAY,OAAQE,CAAU,EAC5ES,EACJ,GAAIV,IAAa,EAAe,CAC9B,IAAMW,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO;AAAA,6BAChDD,CAAC,MAAML,EAAO,gBAAgB,kBAAkBK,CAAC,GAAG,CAAC;AAAA,sBAC5DA,CAAC,MAAMP,EAAM,2BAA2B,gBAAgBO,CAAC,GAAIL,CAAM,CAAC;AAAA,qBACrEK,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIR,EAAM,YAAY,QAAQO,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAEhFH,EAAa;AAAA,0CACuBT,CAAU;AAAA;AAAA,UAE1CU,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCH,EAAO,YAAY,aAAc,MAAM,CAAC;AAAA,QAE9C,MACEE,EAAa;AAAA,8BACWF,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACtCF,EAAM,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,UAC3EA,EAAO,YAAY,aAAcF,EAAM,YAAY,aAAa,CAAC,CAAC;AAAA,SAGxE,MAAO;AAAA,MACLD,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBC,EAAOE,CAAM,CAAC;AAAA,MAC/EH,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,MACvEK,CAAU,EACd,EAEMK,EACF,CAAC,CAAC,QAAuB,KAAMb,CAAU,EAAG,GAAGc,EAA2BzB,EAAYQ,CAAW,CAAC,EACtG,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGA,EAAY,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACxE,gBAAAK,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBAAAa,CACF,EACF,CACF,EAEa/B,GAAUiC,GAAkC,CACvDrC,GAAeqC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAwBkC,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxE,ICvGA,IAaMC,GAiDOC,GA9DbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KACAC,KAIMP,GAA6BQ,GAAqD,CACtF,IAAMC,EAAWD,EAAa,CAAC,EAAE,SAC3BE,EAAaC,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAChDI,EAAaD,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAEhDK,EAAUD,EAAa,IAAM,EAC7BE,EAAmBC,GAAuC,CAC9D,IAAMC,EAAIC,EAAc,IAAKR,EAAU,CAAC,CAAC,EAAG,CAAC,EACvCS,EAAOD,EAAc,OAAQR,EAAU,CAAC,CAAC,EAAG,CAAC,EAC7CU,EAAIC,EAAe,IAAKX,EAAU,CAAC,CAAC,EAAG,CAAC,EAExCY,EAA8B,CAAC,CAAC,KAAM,kBAAmB,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAEvGC,EAAqBC,GAAe;AAAA,gBAC9BA,CAAC,oCAAoCA,CAAC;AAAA,gBACtCA,CAAC,MAAML,EAAK,YAAY,OAAOK,CAAC,aAAa,CAAC,QAAQA,CAAC,gBAC7DC,EAAoBX,EACtB;AAAA,mBACWK,EAAK,YAAY,uCAAuC,CAAC,IACpE,GAAGI,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC;AAAA,mBACjFN,EAAE,KAAK,KAAK,gCAE3B,MAAO,GAAGD,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBL,EAAGE,EAAMC,CAAC,CAAC;AAAA;AAAA,MAEtEM,GAAaC,GAA0BjB,CAAQ,CAAC,CAAC;AAAA;AAAA,MAEvDM,EAAa,UAAUY,EAAc,CAAC;AAAA,QACpCZ,EAAa,sCAAsC,0BAA0B,CAAC;AAAA;AAAA,gBAEtEC,EAAE,YAAY,YAAY,CAAC;AAAA,QACnCQ,CAAiB;AAAA;AAAA,QAEjBL,EAAE,YAAY,aAAoBS,GAAmB,MAAM,CAAC,CAAC;AAAA,MAEnE,EAEA,MAAO,CACL,KAAM,mBACN,YAAa,CAAC,KAAM,GAAGf,CAAO,GAAI,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACrE,gBAAAC,EACA,WAAae,IAAY,CACvB,QAAS,CAAC,CAAC,KAAMA,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,gBACI,CAAC,CAAC,QAAuB,KAAM,KAAK,KAAKnB,EAAa,CAAC,CAAC,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACxG,cAAe,CAAC,EAAG,KAAK,KAAKF,EAAaiB,GAAiB,CAAC,CAAC,CAC/D,EACF,CACF,EAEa1B,GAAY6B,GAAkC,CACrDA,EAAQ,OAAO,OAAS,GAAKnB,EAAU,KAAKmB,EAAQ,OAAO,CAAC,EAAE,IAAI,IAAM,EACpE7B,GAAS6B,CAAO,EAEtBA,EAAQ,QAAQ9B,GAA0B8B,EAAQ,MAAM,CAAC,CAE7D,ICpEA,IAeMC,GAMAC,GAsGOC,GAGAC,GA9HbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,EAEMT,GAA0B,CAACS,EAA+BC,IAA8C,CAC5G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAeH,EAAO,CAAC,EAAE,KAEzBI,EAAYF,EAAW,OACvBG,EAAOC,EAAU,cAAcL,EAAW,KAAMG,CAAS,EAEzDG,EAAcL,EAAW,MAAM,CAAC,EACtCK,EAAY,OAAOF,EAAM,EAAG,GAAGF,CAAY,EAE3C,IAAMK,EAAeN,EAAWG,CAAI,EAC9BI,EAAaT,EAAO,CAAC,EAAE,WAAa,EAAgB,EAAI,EACxDU,EAAa,KAAK,KAAKJ,EAAU,KAAKC,CAAW,EAAIE,CAAU,EAE/DE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMH,CAAI,EAAG,GAAGO,EAA2BZ,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMO,CAAW,CAChH,EAEMM,EAAmBC,GAA+B,CACtD,IAAMC,EAAOC,EAAc,OAAQhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQS,CAAU,EAClFQ,EAAUD,EAAc,eAAgBhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACjFkB,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUO,EAAY,OAAQE,CAAU,EAEpFW,EAAmBC,GAA6B,CACpD,IAAMC,EAAcnB,EAAa,OAC7BoB,EAAU,qBAAqBF,CAAC,OAAOJ,EAAQ,KAAK,OAAO,OAC/D,QAASO,EAAI,EAAGA,EAAIF,EAAaE,IAC/BD,GAAW,GAAGD,EAAc,EAAI,iBAAiBD,CAAC,IAAIG,CAAC,IAAM,iBAAiBH,CAAC,EAAE,MAC7Ed,EAAY,OAAS,EAAI,gBAAgBc,CAAC,oBAAoBG,CAAC,IAAM,gBAAgBH,CAAC,EAAE,IAE9FE,GAAW;AAAA,mBACEF,CAAC,MAAMJ,EAAQ,aAAa,iBAAiBI,CAAC,EAAE,CAAC;AAAA,mBACjDA,CAAC;AAAA,iBACHA,CAAC,SAASA,CAAC;AAAA;AAAA,2BAEDA,CAAC,MAAMN,EAAK,KAAK,OAAO;AAAA,UAE7C,QAASS,EAAI,EAAGC,EAAI,EAAGD,EAAIpB,EAAWoB,IAChCA,IAAMnB,GACRkB,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,aAAaA,CAAC,KACvFI,GAAKH,IAELC,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,MACrEd,EAAY,OAAS,EAAI,gBAAgBc,CAAC,IAAII,CAAC,IAAM,gBAAgBJ,CAAC,EAAE,IAC5EI,KAGJ,OAAOF,CACT,EACIG,EACJ,GAAI1B,EAAO,CAAC,EAAE,WAAa,EAAe,CACxC,IAAM2B,EAAmB,CAACC,EAAgBP,EAAWQ,EAAW,KAAO;AAAA,6BAChDR,CAAC,MAAMH,EAAO,gBAAgB,kBAAkBG,CAAC,GAAG,CAAC;AAAA,YACtED,EAAgBC,CAAC,CAAC;AAAA,sBACRA,CAAC,MAAMN,EAAK,gBAAgB,cAAcM,CAAC,EAAE,CAAC;AAAA,qBAC/CA,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BO,CAAM,IAAIP,CAAC,OAAOQ,CAAQ,IAAId,EAAK,YAAY,QAAQM,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAE/EK,EAAa;AAAA,0CACuBjB,CAAU;AAAA;AAAA,UAE1CkB,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCT,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,OAE/C,MACEQ,EAAa;AAAA,4BACSR,EAAO,gBAAgB,YAAY,CAAC;AAAA,QACxDE,EAAgB,EAAE,CAAC;AAAA,oBACPL,EAAK,aAAa,aAAa,CAAC;AAAA,QAC5CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,QAG7C,MAAO;AAAA,QAEHJ,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBC,EAAME,EAASC,CAAM,CAAC;AAAA,QAC5CJ,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,UACzEY,CAAU;AAAA,QAElB,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMzB,EAAW,SAAU,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMM,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAE,CACF,CACF,EAEarB,GAAyBS,GAClC6B,GAA4B,CAAC,KAAM7B,EAAW,IAAc,CAAC,EAEpDR,GAAS,CAACsC,EAAyB9B,IAAuC,CACrF,IAAMD,EAAS+B,EAAQ,OACvBzC,GAAeU,CAAM,EACrB+B,EAAQ,QAAQxC,GAAwBwC,EAAQ,OAAQ9B,CAAU,CAAC,CACrE,IClIA,IAeM+B,GAeAC,GA+DOC,GAGAC,GAhGbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM;AAAA,4DACwC,CAE5D,EAEMT,GACF,CAACS,EAA+BC,IAAsD,CACpF,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAsBH,EAAO,CAAC,EAAE,SAChCI,EAAYF,EAAW,OAEvBG,EAAeL,EAAO,CAAC,EAAE,KACzBM,EAAkBN,EAAO,CAAC,EAAE,SAC5BO,EAAOC,EAAU,cAAcP,EAAW,KAAMG,CAAS,EACzDK,EAAeP,EAAWK,CAAI,EAE9BG,EAAcL,EAAa,MAAM,CAAC,EAClCM,EAAaH,EAAU,KAAKE,CAAW,EAEvCE,EAAQC,EAAc,QAASV,EAAqBC,CAAS,EAC7DU,EAAUD,EAAc,eAAgBP,EAAiBD,EAAa,MAAM,EAC5EU,EAASC,EAAe,SAAUb,EAAqBO,EAAY,MAAM,EAGzEO,EAAoC,CACxC,CAAC,QAAuB,KAAMN,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMF,CAAI,CACpC,EACA,OAAAU,EAAgB,KAAK,GAAGC,EAA2BhB,EAAYG,EAAcK,CAAW,CAAC,EA4BlF,CACL,KAAM,iBACN,YAAa,CAAC,kBA7B8C,CAAC,OAAQ,MAAM,CA6B5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAM,CACF,GACA,gBA9BuBE,GAA+B;AAAA,QAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,EAASC,CAAM,CAAC;AAAA,QAC/CI,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,4BAErDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,kBAE9CD,EAAQ,YAAY,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA,2BAIxBF,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,eAAgB,gBAAiB,UAAU,CAAC;AAAA,oBACjDA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,QAE9CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAY3C,CACF,EAESvB,GAAiCS,GAC1CmB,GAA4B,CAAC,KAAMnB,EAAW,IAAc,CAAC,EAEpDR,GAAiB,CAAC4B,EAAyBpB,IAA+C,CACrG,IAAMD,EAASqB,EAAQ,OACvB/B,GAAeU,CAAM,EACrBqB,EAAQ,QAAQ9B,GAAgC8B,EAAQ,OAAQpB,CAAU,CAAC,CAC7E,ICpGA,IAWMqB,GA0BAC,GAwFOC,GAQAC,GArIbC,GAAAC,EAAA,kBAGAC,KAEAC,KAIAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,UACjCA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,SAC3D,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EASMR,GAAwB,CAACQ,EAA+BC,IAA4C,CACxG,IAAMC,EAASF,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BG,EAASH,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACI,EAAGC,EAAGC,CAAC,EAAIC,GAAS,qBACvBL,EAAQD,EAAW,OAAQE,EAAQF,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGQ,EAAc,CAACJ,EAAGC,CAAC,EACzB,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAMC,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAML,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,OAAsB,KAAML,EAAW,KAAK,EAC/E,CAAC,OAAsB,KAAMA,EAAW,IAAI,CAC9C,EACMW,EAAwD,CAAC,OAAQ,MAAM,EACzEZ,EAAO,SAAW,IACpBW,EAAgB,KAAK,GAAGE,EAA2Bb,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEY,EAAkB,KAAK,MAAM,GAE/BD,EAAgB,KAAK,GAAGE,EAA2BL,CAAW,CAAC,EAE/D,IAAMM,EAAmBC,GAA+B,CACtD,IAAIC,EAAO,GACPf,EAAW,QAAUA,EAAW,OAClCe,EAAO,0DACEf,EAAW,QAAU,CAACA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAUA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAU,CAACA,EAAW,SAC3Ce,EAAO,2DAGT,IAAMC,EAAiBhB,EAAW,QAAU,EAAI,GAAK,2BAC/CiB,EAAIC,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDoB,EAAID,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDqB,EAAWH,EAAE,KAAK,MACpBI,EAAwB,KACtBC,EAAY,CAACL,EAAGE,CAAC,EACnBpB,EAAO,SAAW,IACpBsB,EAAIH,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAChEuB,EAAU,KAAKD,CAAC,GAElB,IAAME,EAASC,EAAe,SAAUzB,EAAO,CAAC,EAAE,SAAUQ,EAAY,MAAM,EAC9Ee,EAAU,KAAKC,CAAM,EACrB,IAAME,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC/G,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAC1D,EACA,MAAO;AAAA,IACPX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA;AAAA,IAEtER,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kBAK9DM,CAAQ;AAAA;AAAA,QAElBL,CAAI;AAAA;AAAA;AAAA,MAGNC,CAAc;AAAA,OACb,IACGK,GAAK,KACA,iBAAiBA,EAAE,2BAA2B,aAAcE,CAAM,CAAC,cACtEH,CAAQ,qBAAqBC,EAAE,YAAY,SAAS,CAAC,IAEpD,IACN,CAAC;AAAA;AAAA,IAGN,EAEA,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGrB,EAAW,QAAQ,GAAI,kBAAAW,CAAiB,EAC/D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAUR,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKS,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAG,CACF,CACF,EAEarB,GAAuBQ,GAAwD,CAC1F,IAAM0B,EAAS1B,EAAW,OACpB2B,EAAS3B,EAAW,OACpB4B,EAAQ5B,EAAW,MACnB6B,EAAO7B,EAAW,KACxB,MAAO,CAAC,OAAA0B,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,SAAU,GAAG7B,EAAW,MAAM,IAAIA,EAAW,MAAM,IAAIA,EAAW,QAAU,CAAC,EAAE,CACtH,EAEaP,GAAO,CAACqC,EAAyB9B,IAAqC,CACjFV,GAAewC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAAsBuC,EAAQ,OAAQ9B,CAAU,CAAC,CACnE,ICxIA,IAaM+B,GAGAC,GAiOOC,GAGPC,GAEAC,GA0COC,GA2BAC,GA3TbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMf,GAAW,CAACgB,EAA+BC,IAC5CD,EAAO,OAASC,GAAOD,EAAOC,CAAC,EAAE,KAAK,OAAS,GAAOC,EAAU,KAAKF,EAAOC,CAAC,EAAE,IAAI,EAAK,EAAID,EAAOC,CAAC,EAAI,OAEvGhB,GAAiB,CAACe,EAA+BG,IAAoD,CACzG,IAAMC,EAAQJ,EAAO,CAAC,EAChBK,EAAMrB,GAASgB,EAAQ,CAAC,EACxBM,EAAQtB,GAASgB,EAAQ,CAAC,EAC1BO,EAAOvB,GAASgB,EAAQ,CAAC,EACzBQ,EAAiBxB,GAASgB,EAAQ,CAAC,EACnCS,EAAuBzB,GAASgB,EAAQ,CAAC,EACzCU,EAAU1B,GAASgB,EAAQ,CAAC,EAC5BW,EAAY3B,GAASgB,EAAQ,CAAC,EAoCpC,GAAII,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMQ,EAAe,GACfC,EAAYT,EAAM,KAAK,CAAC,EACxBU,EAAiBV,EAAM,KAAK,CAAC,EAC7BW,EAAaX,EAAM,KAAK,SAAW,EAAKQ,EAAeR,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3EY,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaZ,EAAW,QAAQ,EAC5D,GAAIO,GAAWC,EAAW,CACxB,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIA,EAAQ,KAAK,CAAC,IAAMG,GAAaH,EAAQ,KAAK,CAAC,IAAMP,EAAW,UAAYO,EAAQ,KAAK,CAAC,IAAMS,EAClG,MAAM,IAAI,MAAM,iFAAiF,EAEnG,GAAIR,EAAU,KAAK,CAAC,IAAME,GAAaF,EAAU,KAAK,CAAC,IAAMR,EAAW,UACpEQ,EAAU,KAAK,CAAC,IAAMQ,EACxB,MAAM,IAAI,MAAM,mFAAmF,EAErG,GAAIT,EAAQ,KAAK,CAAC,IAAMC,EAAU,KAAK,CAAC,EACtC,MAAM,IAAI,MAAM,gFAAgF,EAElG,GAAIA,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEvEM,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,CACpC,SAAWA,GAAWC,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIS,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAIA,EAAI,KAAK,CAAC,IAAMD,EAAM,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,6DAA6D,EAE/EgB,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMc,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIb,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMc,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GC,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,GAAIb,EAAM,CACR,GAAIA,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8CAA8C,EAGhE,GAAID,GACEF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,CAAC,IAAM,EAC/C,MAAM,IAAI,MAAM,oCAAoC,CAG1D,CAEA,IAAIiB,IACJ,GAAIb,EAAgB,CAClBa,EAAW,EACX,IAAMC,EAAWd,EAAe,KAUhC,MATIc,EAAS,SAAW,EAClBA,EAAS,CAAC,IAAMT,EAClBQ,EAAW,EACFC,EAAS,CAAC,IAAM,EAAIT,EAAY,IACzCQ,EAAW,GAEJC,EAAS,SAAW,GAAKA,EAAS,CAAC,IAAMT,GAAaS,EAAS,CAAC,IAAMN,IAC/EK,EAAW,GAETA,IAAa,EACT,IAAI,MAAM,0FAA0F,EAEtG,IAAI,MAAM,oBAAoB,CACtC,CAEA,IAAIE,EAAe,GACfC,EAAcT,EAClB,GAAIT,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FkB,EAAclB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGkB,EAAclB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CiB,EAAe,EACjB,CACF,CAEA,IAAME,EAAsBR,EAAqBD,EAC3CU,EAAsB,GAE5B,GAAIlB,EACF,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIC,EAAsB,CACxB,GAAIA,EAAqB,KAAK,SAAW,EACvC,MAAM,IAAI,MAAM,iEAAiE,EAEnF,GAAKA,EAAqB,KAAK,CAAC,IAAMI,GAAaJ,EAAqB,KAAK,CAAC,IAAM,GAChFA,EAAqB,KAAK,CAAC,IAAMN,EAAW,UAAYM,EAAqB,KAAK,CAAC,IAAMK,GACzFL,EAAqB,KAAK,CAAC,IAAMgB,EACnC,MAAM,IAAI,MAAM,2FAA2F,CAE/G,CAEA,MAAO,CACL,UAAAZ,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAS,EACA,kBAAAP,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAS,EACA,SAAAL,EACA,UAAW,KAAK,MAAMK,EAAcrB,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAuB,EACA,aAAAH,EACA,UAAAH,CACF,CACF,EAEalC,GAAqCiB,GAC9CwB,GAA4B,CAAC,GAAGxB,CAAU,CAAC,EAEzChB,GAAgDwC,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhGvC,GACF,CAACwC,EAAyBC,EAAiBtB,EAAkBM,EAAmBC,EAC/EC,EAAoBe,IAAuB,CAC1C,IAAMC,EAAc,CAAClB,EAAWC,EAAgBC,CAAU,EACpDiB,EAAa9B,EAAU,KAAK6B,CAAW,EACvCE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,QAAuB,KAAMF,CAAU,EACnF,CAAC,QAAuB,KAAMf,CAAU,CAC1C,EAEMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,gBAAiBR,EAAI,SAAUE,CAAW,EAClEO,EAAWC,EAAc,MAAOV,EAAI,SAAUE,CAAW,EACzDS,EAAYD,EAAc,OAAQhC,EAAK,SAAUwB,CAAW,EAE5DU,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,CAC3G,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBH,EAAUE,EAAWJ,CAAM,CAAC;AAAA,IACrFD,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,IAK1E,EAEA,OAAOP,EAAQ,QACX,CACE,KAAM,4BACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACjD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMG,EAAa,SAAUF,EAAI,SAAU,aAAgC,CAAC,EACvF,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAC,CACF,EACA,CAAC,OAAQ,CAACL,EAAKtB,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC7C,EAESlB,GACT,CAACuC,EAAyBf,EAAmB6B,EAAkB5B,EAAwBK,EACtFwB,EAAmBpC,EAAmBuB,IAAwB,CAG7D,IAAIc,EAAgBD,EACpB,GAAKpC,EAOE,CACL,GAAIO,IAAmB,EACrB,MAAM,IAAI,MAAM,mFAAmF,EAEnG,OAAA8B,EACIxD,GAAiBwC,EAASe,EAAOpC,EAAMM,EAAWC,EAAgB4B,EAAWvB,EAAUW,CAAW,EACtGc,EAAgBA,EAAc,QAAQ,CAAC/B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,EAC9ES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAEnD,KAjBE,QAAID,EAAM,KAAK,SAAW,IACxBC,EAAgBD,EAAM,QAAQ,CAAC9B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,GAExES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAanD,EAEStD,GAAqB,CAACsC,EAAyBzB,IAAqC,CAC/F,IAAM2C,EAAS7D,GAAe2C,EAAQ,OAAQzB,CAAU,EAClDC,EAAQwB,EAAQ,OAAO,CAAC,EACxBvB,EAAMrB,GAAS4C,EAAQ,OAAQ,CAAC,EAChCtB,EAAQtB,GAAS4C,EAAQ,OAAQ,CAAC,EAClCrB,EAAOvB,GAAS4C,EAAQ,OAAQ,CAAC,EACjCpB,EAAiBxB,GAAS4C,EAAQ,OAAQ,CAAC,EAC3CnB,EAAuBzB,GAAS4C,EAAQ,OAAQ,CAAC,EACjDlB,EAAU1B,GAAS4C,EAAQ,OAAQ,CAAC,EACpCjB,EAAY3B,GAAS4C,EAAQ,OAAQ,CAAC,EAC5C,GAAIxB,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIC,GAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8BAA8B,EAIhD,IAAM0C,EAAS1C,GAAOC,GAASD,EAAI,KAAK,SAAW,GAAKC,EAAM,KAAK,SAAW,EAExE0C,EAAI3D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAU1C,EAAOG,EAAM,CAAC,EAEtG,GAAIwC,EACF,OAAOE,GACHrB,EAASoB,EAAG3C,EAAKC,EAAOE,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAC7F3C,CAAU,EAEhB,GAAI,CAACE,GAAO,CAACC,EACX,MAAM,IAAI,MAAM,gCAAgC,EAElD,IAAM4C,EAAI7D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,SAAUzC,EAAKE,EAC3FuC,EAAO,UAAU,EAEfK,EAAI9D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,UAAWxC,EAAOC,EAC9F,EAAIuC,EAAO,UAAU,EAEzBG,GACIrB,EAASoB,EAAGE,EAAGC,EAAG3C,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAAQ3C,CAAU,CAC/G,ICrWA,IAUMiD,GAIAC,GAyBAC,GAUOC,GAuCAC,GAxFbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAcU,GAChB,MAAM,KAAKA,EAAkB,iBAAiB,EAAG,MAAM,EAGrDT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,IAChEA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,oCAAoC,EAKtD,GAFmCX,GAAWW,EAAO,CAAC,CAAC,EAE3C,SAAWA,EAAO,CAAC,EAAE,KAAK,OACpC,MAAM,IAAI,MAAM,uFAAuF,CAE3G,EAEMT,GAAiB,CAACU,EAA+BC,IAAkD,CACvG,IAAMC,EAAwB,CAAC,EAE/B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQ,EAAEG,EACvCD,EAAY,KAAKF,EAAWG,CAAC,EAAIF,EAAQE,CAAC,CAAC,EAG7C,OAAOD,CACT,EAEaX,GAAwB,CAACQ,EAA+BK,IAAkC,CACrG,IAAMJ,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAA6BG,GAAgBhB,GAAWW,EAAO,CAAC,CAAC,EACjEG,EAAcZ,GAAeU,EAAYC,CAAO,EAChDI,EAAaC,EAAU,KAAKJ,CAAW,EAEvCK,EAAWR,EAAO,CAAC,EAAE,SACrBS,EAAQC,EAAc,QAASF,EAAUP,EAAW,MAAM,EAC1DU,EAASC,EAAe,SAAUJ,EAAUL,EAAY,MAAM,EAE9DU,EAAmBC,GAA+B;AAAA,2BAC/BL,EAAM,QAAQ,GAAGR,CAAU,CAAC;AAAA,QAC/Ca,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAOE,CAAM,CAAC;AAAA,QAClFG,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACrDH,EAAO,gBAAgB,YAAY,CAAC;AAAA,2BACtCF,EAAM,KAAK,OAAO;AAAA,4BACjBR,EAAW,MAAM;AAAA,4BACjBQ,EAAM,WAAW,uBAAwB,GAAG,CAAC;AAAA,gCACzCE,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,UAE9DF,EAAM,WAAW,gBAAiB,IAAK,iBAAiB,CAAC;AAAA;AAAA,QAE3DE,EAAO,YAAY,aAAcF,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,OAG3E,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGP,CAAO,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC7D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKM,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGS,EAA2Bf,EAAO,CAAC,EAAE,KAAMG,CAAW,CAAC,CAC5G,GACA,gBAAAU,CACF,CACF,EAEapB,GAAQuB,GAAkC,CACrD1B,GAAe0B,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxB,GAAsBwB,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACtE,IC3FA,IAeaC,GA6KPC,GAuGOC,GAGPC,GAEAC,GA+BOC,GAvUbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KAEaf,GAAiB,CAACgB,EAA+BC,IAAoD,CAChH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAMH,EAAO,CAAC,EACdI,EAAQJ,EAAO,CAAC,EAChBK,EAAUL,EAAO,CAAC,EAClBM,EAAYN,EAAO,CAAC,EA+B1B,GAAIE,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMK,EAAe,GACfC,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAaR,EAAM,KAAK,SAAW,EAAKK,EAAeL,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3ES,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaT,EAAW,QAAQ,EACtDc,EAAaV,GAAWA,EAAQ,KAAK,SAAW,EAChDW,EAAeV,GAAaA,EAAU,KAAK,SAAW,EAEtDW,EAAe,GACrB,GAAIF,GAAcC,EAAc,CAC9B,GAAIX,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIC,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEnEW,GAEFL,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,IAGlCO,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,EAEtC,SAAWU,GAAcC,EACvB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIE,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAID,EAAM,KAAK,CAAC,EAAIC,EAAI,KAAK,CAAC,IAAM,EAClC,MAAM,IAAI,MAAM,sDAAsD,EAExEe,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMW,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIV,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMW,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GI,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,IAAMC,IACFC,EAAe,GACfC,EAAcX,EAClB,GAAIN,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FiB,EAAcjB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGiB,EAAcjB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CgB,EAAe,EACjB,CACF,CACA,IAAME,EAAsBV,EAAqBD,EAC3CY,EAAsB,GAE5B,MAAO,CACL,UAAAf,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAW,EACA,kBAAAT,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAW,EACA,SAAAP,EACA,UAAW,KAAK,MAAMO,EAAcpB,EAAW,UAAW,EAC1D,SAAUA,EAAW,SACrB,WAAYA,EAAW,WACvB,MAAOA,EAAW,SAAWA,EAAW,WACxC,uBAAwB,GACxB,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAsB,EACA,aAAAH,EACA,UAAAF,EACA,aAAAD,CACF,CACF,EAEMhC,GACF,CAACuC,EAAeC,EAAyBC,EAAoBC,IAA6C,CACxG,IAAMC,EAAc,CAACD,EAAO,UAAWA,EAAO,oBAAqBA,EAAO,WAAaA,EAAO,QAAQ,EAChGE,EAAY,EACZC,EAAaC,EAAU,KAAKH,CAAW,EAAIC,EAC3CG,EAAwBL,EAAO,oBAC/BM,EAASC,EAAe,aAAcR,EAAUE,EAAY,OAAQC,CAAS,EAC7EM,EAASC,EAAc,SAAUZ,EAAE,SAAUA,EAAE,KAAK,OAAQK,CAAS,EACrEQ,EAASZ,EAAIW,EAAc,UAAWX,EAAE,SAAUA,EAAE,KAAK,OAAQI,CAAS,EAAI,OAE9ES,EAAI,KAAK,KAAKX,EAAO,SAAWE,CAAS,EACzCU,EAAW,CAAC,EAAGP,EAAuB,EAAGR,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,EAExDgB,EAAwDf,EAAI,CAAC,OAAQ,MAAM,EAAI,CAAC,MAAM,EAEtFgB,EAAoC,CACxC,CAAC,QAAuB,KAAMX,CAAU,EAAG,CAAC,QAAuB,KAAMH,EAAO,kBAAkB,EAClG,CAAC,QAAuB,KAAMA,EAAO,gBAAgB,EACrD,CAAC,QAAuB,KAAMA,EAAO,mBAAmB,CAC1D,EAEM3B,EAAS,CAACmC,CAAM,EAClBE,GACFI,EAAgB,KACZ,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2BjB,EAAG,IAAI,EAC5E,GAAGiB,EAA2Bd,CAAW,CAAC,EAC9C5B,EAAO,KAAKqC,CAAM,GAElBI,EAAgB,KAAK,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2Bd,CAAW,CAAC,EAExG,IAAMe,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,EACxG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CACtC,EAEMC,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDAOVC,EAAS;AAAA;AAAA;AAAA;AAAA,qDAKTC,EAAYrB,EAAI;AAAA,UAClBmB,CAAO;AAAA;AAAA,UAEPC,CAAM;AAAA,WAEY;AAAA,YAChBA,CAAM;AAAA,WAINE,EAAmBC,GAA+B;AAAA;AAAA,IAE1DA,EAAa,iBAAiBL,CAAQ,EAAE,iBAAiB,GAAG3C,EAAQiC,CAAM,CAAC;AAAA,IAC3Ee,EAAa,UAAU,CACnBV,EAAGX,EAAO,WAAa,CACzB,CAAC,CAAC;AAAA,MACFqB,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,oBAC5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKlCN,EAAO,UAAW;AAAA,cAC1BW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKKX,EAAO,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAajCmB,CAAS;AAAA,KAGT,MAAO,CACL,KAAM,gBACN,YAAa,CAAC,KAAM,GAAGnB,EAAO,UAAW,GAAGW,CAAC,GAAG,CAAC,CAACb,CAAC,GAAI,kBAAAe,CAAiB,EACxE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMZ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAea,EACf,gBAAAE,CACF,GACA,gBAAAM,CACF,CACF,EAES7D,GAAsCe,GAC/CgD,GAA4B,CAAC,GAAGhD,CAAU,CAAC,EAEzCd,GAAgD8D,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhG7D,GACF,CAAC8D,EAAyBC,EAAmBC,EAA8BzB,EAC1E0B,IAAwB,CACvB,IAAIC,EAAgBH,EACdI,EAAW5B,EAAO,WAClB6B,EAAQ7B,EAAO,MACrB,OAAIwB,EAAM,KAAK,SAAW,GAAKxB,EAAO,mBAAqB,IACzD2B,EAAgBH,EAAM,QAAQ,CAACxB,EAAO,UAAWA,EAAO,iBAAkB4B,EAAU5B,EAAO,QAAQ,CAAC,GAGlGyB,EACFE,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAeF,EAAQE,EAAc,SAAU3B,CAAM,EAC7E,CAAC,OAAQ,CAAC2B,EAAeF,CAAM,EAAG,QAAS,CAACzB,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAE3FC,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAe,OAAWA,EAAc,SAAU3B,CAAM,EAChF,CAAC,OAAQ,CAAC2B,CAAa,EAAG,QAAS,CAAC3B,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjFG,IAAU,IACZF,EAAgBJ,EAAQ,QACpBO,GAAsB,CAACH,CAAa,EAAG,CAAC,EAAG,EAAG,EAAGE,CAAK,CAAC,EAAG,CAAC,OAAQ,CAACF,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EACzGA,EACIA,EAAc,QAAQ,CAAC3B,EAAO,UAAWA,EAAO,oBAAqB4B,EAAWC,EAAO7B,EAAO,QAAQ,CAAC,GAGtGuB,EAAQ,QACXQ,GAA2BJ,EAAenE,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACmE,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CACjD,EAESjE,GAAsB,CAAC6D,EAAyBjD,IAAqC,CAChG,IAAM0B,EAAS3C,GAAekE,EAAQ,OAAQjD,CAAU,EACxD,GAAIiD,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpC,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIA,EAAQ,OAAO,CAAC,GAAG,KAAK,SAAW,EACrC,MAAM,IAAI,MAAM,8BAA8B,EAGhD,IAAMS,EAAIC,GACNV,EAASvB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAUuB,EAAQ,OAAO,CAAC,EAAG,OACvG,CAAC,EACC7C,EAAU6C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OACzF5C,EAAY4C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OAC3FW,EAAIzE,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG7C,EAASsB,EAAQ,CAAC,EAChFmC,EAAI1E,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG5C,EAAWqB,EAAQ,CAAC,EACxFoC,GAAeb,EAASS,EAAGE,EAAGC,EAAG,OAAW,OAAW,OAAW,OAAW,OAAWnC,EAAQ1B,CAAU,CAC5G,ICzVA,IAeM+D,GAwGAC,GAwHAC,GAoDOC,GAnSbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAOMR,GACF,CAACS,EAA+BC,IAAoD,CAClF,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdE,EAAO,EACPC,EAAYC,EAAU,gBAAgBJ,EAAQE,CAAI,EAClDG,EAAWD,EAAU,kBAAkBJ,EAAQE,CAAI,EACnDI,EAAaC,GAAiBF,CAAQ,EACtCG,EAAiBH,EAAWC,EAC5BG,EAAa,CAACT,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGQ,CAAc,EAClDE,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EC,EACF,CAAC,CAAC,QAAuB,KAAMN,CAAQ,EAAG,CAAC,QAAuB,KAAMG,CAAc,CAAC,EAC3FG,EAAgB,KAAK,GAAGC,EAA2BH,EAAYA,CAAU,CAAC,EAE1E,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAKlB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACxEW,EAAQD,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEoB,EAAOF,EAAc,OAAQlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/DqB,EAASC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACnFe,EAAY,CAACN,EAAGE,EAAOC,EAAMC,CAAM,EACnCG,EAAWP,EAAE,KAAK,MAClBQ,EAAUjB,IAAe,EAAI,MAAQ,MAAMA,CAAU,QACrDkB,EAAgB,GAEhBC,EAA8B,CAAC,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CAAC,EAC3G,MAAO;AAAA;AAAA;AAAA,2CAG4BF,CAAO,KAAKC,CAAa;AAAA,0BAC1CA,CAAa;AAAA,IACnCV,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGJ,CAAS,CAAC;AAAA,IACtEP,EAAa,UAAUU,CAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOrBD,CAAO;AAAA;AAAA,4BAECA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAahDW,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKhDiB,CAAO;AAAA;AAAA,yBAEEA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,OAAOQ,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAcpDG,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,mFAIYP,EAAW,OAAO;AAAA,yCAC5DkB,EAAM,YAAY,SAAS,CAAC;AAAA,6BACxCC,EAAK,YAAY,SAAS,CAAC;AAAA;AAAA,oBAEpCH,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,MAAMO,CAAQ,IAAIC,CAAO,qBAAqBD,CAAQ,IAC5FC,CAAO;AAAA,QACXJ,EAAO,IAAI,QAAS,UAAW,IAAK,OAAO,CAAC;AAAA;AAAA,IAG9C,EACA,MAAO,CACD,KAAM,wBAEV,YAAa,CAAC,KAAM,GAAGpB,EAAW,OAAO,IAAIO,CAAU,GAAI,kBAAAI,CAAiB,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAGK,CAAS,EAC5B,gBAAAQ,CACF,GACA,gBAAAE,CACF,CACF,EAEEvB,GACF,CAACqC,EAAyBC,EAAmBX,EAAmBC,EAAkBW,EAAWC,EAAWC,EACvGC,IAAoB,CACnB,IAAM1B,EAAaC,GAAiBwB,CAAC,EAC/BE,EAAK,GAGLC,EAAa5B,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC5D6B,EAAc7B,IAAe,EAAI,MAAQ,MAAMA,CAAU,IACzD8B,EAAiB,CAACC,EAAcC,IAAiB,GAAGJ,CAAU,IAAIG,CAAI,KAAKC,CAAI,IAC/EC,EAAcV,EAAIE,EAAIzB,EACtBkC,EAAS,KAAK,KAAKV,EAAIG,CAAE,EAEzBQ,EAA4D,CAAC,MAAM,EACnEC,EAAwC,CAC5C,CAAC,QAAuB,KAAMF,CAAM,EAAG,CAAC,QAAuB,KAAMV,CAAC,EACtE,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAMwB,EAAIC,EAAIzB,CAAU,CAAC,CAC9D,EAEMqC,EAAuB7B,GAA+B,CAC1D,IAAM8B,EAAc5B,EAAc,QAASY,EAAM,SAAUA,EAAM,KAAMtB,CAAU,EACjF,MAAO;AAAA,IACXQ,EAAa,iBAAiB8B,CAAW,CAAC;AAAA,kEACoBV,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAUmB,CAAE,CAAC;AAAA,4CACcA,CAAE;AAAA,+CACCA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAQjCY,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA;AAAA,sBAE9B6B,CAAW;AAAA;AAAA;AAAA;AAAA,2BAINC,EAAe,MAAO,YAAY,CAAC;AAAA,IAExD,EAEMU,EAAanB,EAAQ,QACvB,CACE,KAAM,0BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAmBmC,CAAqB,EAC7E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACZ,EAAGE,EAAGE,EAAI,CAAC,EAAG,UAAwB,CAChD,EACA,cAAe,CAAC,EAAGJ,EAAIE,EAAIzB,CAAU,EACrC,gBAAiBoC,CACnB,GACA,gBAAiBC,CACnB,EACA,CAAC,OAAQ,CAACf,CAAK,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjCjB,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,CAAW,EAAG,CAAC,QAAuB,KAAMT,CAAC,EAC3E,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAM2B,EAAKF,EAAIzB,CAAU,CAAC,CAC/D,EACMI,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EG,EAAmBC,GAA+B,CACtD,IAAMiC,EAAc/B,EAAc,QAASC,EAAM,SAAUA,EAAM,KAAMX,CAAU,EAC3E0C,EAAahC,EAAc,OAAQE,EAAK,SAAUA,EAAK,KAAMZ,CAAU,EAC7E,MAAO;AAAA,2DAC4C4B,CAAU;AAAA,2DACVa,EAAY,KAAK,OAAO;AAAA,0DACzBC,EAAW,KAAK,OAAO;AAAA,kEACfd,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,wBAAwB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKlE+B,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA,mCACjB2B,CAAE;AAAA,gEAC2BA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+DAMHD,CAAO;AAAA,qCACjCG,CAAW;AAAA,yBACvBA,CAAW;AAAA;AAAA,2BAETC,EAAe,eAAgB,cAAc,CAAC;AAAA,IAEnE,EACA,OAAOT,EAAQ,QACX,CACE,KAAM,uCAEN,YAAa,CAAC,KAAM,GAAGrB,CAAU,IAAI0B,CAAO,GAAI,kBAAAtB,CAAiB,EACjE,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACmB,EAAGE,EAAG,CAAC,EAAG,UAAwB,CAC5C,EACA,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAc,EAAuB,CAAC,EACnE,gBAAA5B,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACiC,EAAY7B,EAAOC,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC3D,EAEE3B,GACF,CAACoC,EAAyB7B,EAA+BC,IAAuC,CAC9F,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdiD,EAAIjD,EAAO,CAAC,EACZkD,EAAIlD,EAAOA,EAAO,OAAS,CAAC,EAC5BmD,EAAI/C,EAAU,kBAAkBJ,EAAQ,CAAC,EAAIkD,EAC7C5C,EAAaC,GAAiB2C,CAAC,EAC/BE,EAAahD,EAAU,KAAKH,CAAW,EAAIK,EAC3CK,EACF,CAAC,CAAC,QAAuB,KAAMwC,CAAC,EAAG,CAAC,QAAuB,KAAM,KAAK,MAAMD,EAAI5C,CAAU,CAAC,CAAC,EAC1FI,EAAwD,CAAC,OAAQ,MAAM,EAEvE2C,EAAoB/D,GAAYqC,EAAS7B,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGmD,EAAGE,EAAGD,EAAGnD,EAAW,OAAO,EACrGc,EAAmBC,GAA+B,CACtD,IAAMQ,EAAWgC,GAA4BxD,EAAO,CAAC,EAAE,QAAQ,EACzDyD,EAAYjD,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC3DkD,EAAgBlD,IAAe,EAAIgB,EAAW,MAAMhB,CAAU,IAAIgB,CAAQ,IAE1EsB,EAAc5B,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMQ,CAAU,EACnFmD,EAAerC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUG,EAAaK,CAAU,EAEzF,MAAO;AAAA,2DAC4CsC,EAAY,KAAK,OAAO;AAAA,gEACnBW,CAAS;AAAA,kEACPE,EAAa,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,IAIvF3C,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kDAMsB0C,CAAa,eAAeA,CAAa;AAAA,IAErF,EACA7B,EAAQ,QACJ,CACE,KAAM,4BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAAI,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKsD,EAAa,EAAuB,CAAC,EAClE,gBAAAzC,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACf,EAAO,CAAC,EAAGuD,CAAiB,CAAC,CAAC,CAC9C,EAES7D,GAAe,CAACmC,EAAyB5B,IAA6C,CAC7FA,EAAW,SAAW,OACxBR,GAAkCoC,EAASA,EAAQ,OAAQ5B,CAAU,EAErE4B,EAAQ,QAAQtC,GAA8BsC,EAAQ,OAAQ5B,CAAU,CAAC,CAE7E,ICzSA,IAgBM2D,GAMAC,GA6GOC,GAnIbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAQMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,CAE3D,EAEMP,GACF,CAACO,EAA+BC,EAAiCC,IAAqC,CACpG,IAAMC,EAAaF,EAAW,WAExBG,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAAQL,EAAO,CAAC,EAChBM,EAAO,CAACH,GAAcH,EAAO,CAAC,EAE9BO,EAAcH,EACdI,EAAOC,EAAU,cAAcR,EAAW,KAAMG,EAAO,MAAM,EAC7DM,EAAYD,EAAU,gBAAgBL,EAAQI,CAAI,EAClDG,EAAWF,EAAU,kBAAkBL,EAAQI,CAAI,EAEnDI,EAAYH,EAAU,KAAKJ,EAAM,IAAI,EACrCQ,EAAWP,EAAOG,EAAU,KAAKH,EAAK,IAAI,EAAI,EACpD,GAAIM,IAAcD,GAAaL,GAAQO,IAAaF,EAClD,MAAM,IAAI,MAAM,+BAA+BA,CAAQ;AAAA;AAAA,2BAEpCC,CAAS,qBAAqBC,CAAQ,EAAE,EAG7D,IAAMC,EAA6B,CAAC,EACpC,QAASC,EAAI,EAAGA,EAAIX,EAAO,OAAQ,EAAEW,EAC/BA,EAAIP,EACNM,EAAiB,KAAKV,EAAOW,CAAC,CAAC,EAE/BD,EAAiB,KAAK,CAAC,EAG3B,IAAME,EAAaC,GAAiBN,CAAQ,EACtCO,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAQ,EAC/E,CAAC,QAAuB,KAAM,KAAK,MAAMA,EAAWK,CAAU,CAAC,EAC/D,CAAC,OAAsB,KAAMf,EAAW,OAAO,CACjD,EACIK,GACFY,EAAkB,KAAK,MAAM,EAE/B,IAAME,EAAoBlB,EAAc,EAClCmB,EAAkBnB,EAAc,EAEhCoB,EAAmBC,GAA+B,CACtD,IAAMC,EAAWC,GAA4BzB,EAAO,CAAC,EAAE,QAAQ,EACzD0B,EAAY,CAChBC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAU,EACjEW,EAAc,QAAStB,EAAM,SAAUA,EAAM,KAAMW,CAAU,CAC/D,EACIV,GACFoB,EAAU,KAAKC,EAAc,OAAQrB,EAAK,SAAUA,EAAK,KAAMU,CAAU,CAAC,EAE5EU,EAAU,KAAKE,EAAe,SAAU5B,EAAO,CAAC,EAAE,SAAUO,EAAaS,CAAU,CAAC,EAChFI,GACFM,EAAU,KAAKE,EAAe,qBAAoCd,CAAgB,CAAC,EAEjFO,GACFK,EAAU,KAAKE,EAAe,mBAAkCd,CAAgB,CAAC,EAGnF,IAAMe,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAClE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,CAC5E,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA,IACtEH,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,wBAEvDO,GAAW,MAAOd,CAAU,CAAC;AAAA,+BACtBc,GAAW,MAAOd,CAAU,CAAC;AAAA;AAAA;AAAA,oBAGxCe,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA,iBAInDgB,GAAU,cAAehB,CAAU,CAAC;AAAA,oCACjBgB,GAAU,qBAAsBhB,CAAU,CAAC,yBACnEb,EAAa,GAAK,eAAe;AAAA;AAAA;AAAA,uBAGtB4B,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA,uBAChDe,GAAUP,EAAUR,EAAY,UAAU,CAAC;AAAA,6BACrCU,EAAU,CAAC,EAAE,KAAK,KAAK,cAAcvB,EAAa,GAAK,QAAQ;AAAA,UAClFG,EAAO,KAAKyB,GAAUP,EAAUR,EAAY,SAAS,CAAC,GAAK,EAAE;AAAA;AAAA;AAAA;AAAA,MAIjEI,EAAoB,sCAAwC,EAAE;AAAA,MAC9DC,EAAkB,2CAA6C,EAAE;AAAA,IAEjE,EACMY,EAAU,CAAC,CAAC,KAAM1B,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIoB,GACFa,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAE7DO,GACFY,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAG1D,CACL,KAAM,qBACN,YAAa,CAAC,KAAM,GAAGE,CAAU,IAAId,CAAW,IAAIC,CAAU,GAAI,kBAAAe,CAAiB,EACnF,WAAY,KACP,CAAC,QAAAe,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKvB,EAAY,EAAuB,CAAC,EAAG,gBAAAS,CAAe,GAClG,gBAAAG,CACF,CACF,EAES5B,GAAY,CAACwC,EAAyBjC,IAA0C,CAC3FT,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQzC,GAA2ByC,EAAQ,OAAQjC,EAAYiC,EAAQ,WAAW,CAAC,CAC7F,ICtIA,IAoBMC,GA+BOC,GAqPAC,GAQAC,GAhTbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMT,GAAiB,CAACU,EAA+BC,IAA4C,CACjG,GAAID,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,oCAAoC,EAEtD,IAAME,EAAIF,EAAO,CAAC,EACZG,EAAQD,EAAE,KAAK,OACrB,GAAIA,EAAE,KAAKC,EAAQ,CAAC,IAAMF,EAAW,EACnC,MAAM,IAAI,MAAM,wDAAwD,EAE1E,IAAMG,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3FI,EAAWJ,EAAW,UAAY,EAAIA,EAAW,KACjDK,EAAIN,EAAO,CAAC,EAClB,GAAI,CAACO,EAAU,SAASD,EAAE,KAAM,CAACL,EAAW,EAAGG,EAAeC,CAAQ,CAAC,EACrE,MAAM,IAAI,MAAM,6EAA6E,EAG/F,IAAMG,EADSR,EAAO,CAAC,EACI,KAC3B,GAAIO,EAAU,KAAKC,CAAW,IAAMP,EAAW,EAAIG,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAE5C,GAAIJ,EAAO,SAAW,EAAG,CAEvB,IAAMS,EADaT,EAAO,CAAC,EACQ,KAC7BU,EACFT,EAAW,KAAO,EAAKA,EAAW,EAAIG,EAAiBH,EAAW,EAAI,KAAK,OAAOG,EAAgB,GAAK,CAAC,EAC5G,GAAIG,EAAU,KAAKE,CAAe,IAAMC,EACtC,MAAM,IAAI,MAAM,8BAA8B,CAElD,CACF,EAEanB,GACT,CAACS,EAA+BC,EAC/BU,EAAoDC,IAAwD,CAC3G,IAAMC,EAAab,EAAO,CAAC,EAAE,KACvBG,EAAQU,EAAW,OACnBT,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3Fa,EAAYD,EAAWV,EAAQ,CAAC,EAChCY,EAAWd,EAAW,EACtBe,EAAYf,EAAW,EACvBgB,EAAYJ,EAAW,MAAM,EAAGV,EAAQ,CAAC,EACzCe,EAAYX,EAAU,KAAKU,CAAS,EAEpCE,EADWlB,EAAW,UAAY,EAAIA,EAAW,KACpB,EAC7BmB,EAAWpB,EAAO,CAAC,EAAE,SACrBqB,EAAeC,GAAiBR,CAAS,EACzCS,EAAcD,GAAiBrB,EAAW,CAAC,EAC3CuB,EAAcF,GAAiBH,CAAe,EAC9CM,EAAcC,GAAqBN,CAAQ,EAC3CO,EAAsBb,EAAYV,EAAgBqB,EAClDG,EAAwB,KAAK,MAAMhB,EAAiCe,CAAmB,EACvFE,EAA0BzB,GAAiBO,EAAyB,CAAC,GAAKiB,EAAwB,EAClGE,EAAc,CAACD,GAA2BD,GAAyB,EAAKN,GAAiBN,CAAS,EAClGY,GAAyB,GAAMN,GAAiBN,CAAS,GAAK,EAAU,EACA,EACxEe,EAAcd,EAAU,OAAO,CAACH,EAAWE,CAAS,CAAC,EACrDgB,EAAazB,EAAU,KAAKwB,CAAW,EAAID,EAAaT,EAExDY,EAAoCJ,EACtC,CAAC,EACD,CAAC,CAAC,QAAuB,KAAMG,CAAU,EAAG,CAAC,QAAuB,KAAM/B,EAAW,SAAS,CAAC,EAC7FiC,EAAiB,CAAChB,EAAWJ,EAAWC,EAAWQ,CAAW,EAC9DY,EAAS5B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAC5DmC,EAAO,OAAO,GAAI,EAAGhB,EAAkBK,CAAW,EAClDS,EAAgB,KAAK,GAAGG,EAA2BF,CAAc,CAAC,EAClED,EAAgB,KAAK,GAAGG,EAA2BD,CAAM,CAAC,EAC1DF,EAAgB,KAAK,GAAGG,EAA2BpC,EAAO,CAAC,EAAE,IAAI,CAAC,EAC9DA,EAAO,SAAW,GACpBiC,EAAgB,KAAK,GAAGG,EAA2B7B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAE5F,IAAMqC,GAAkB,CAACnB,EAAWJ,EAAWE,EAAYc,CAAU,EACrEG,EAAgB,KAAK,GAAGG,EAA2BC,EAAe,CAAC,EACnE,IAAMC,GAAmBC,IAA+B,CACtD,IAAMC,GAAYN,EAAe,OAC3BhC,EAAIuC,EAAc,IAAKzC,EAAO,CAAC,EAAE,SAAUwC,GAAWjB,CAAW,EACjEjB,GAAImC,EAAc,OAAsBN,EAAO,OAAQX,CAAW,EAClEkB,GAASD,EAAc,SAAUzC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC1E2C,GAAiB,CAACzC,EAAGI,GAAGoC,EAAM,EAC9BE,GACF5C,EAAO,SAAW,EAAIyC,EAAc,iBAAgCzC,EAAO,CAAC,EAAE,KAAK,MAAM,EAAI,OAC7F4C,IACFD,GAAe,KAAKC,EAAU,EAEhC,IAAMC,GAAaR,GAAgB,OAC7BS,GAASC,EAAe,SAAU/C,EAAO,CAAC,EAAE,SAAU6C,GAAYf,CAAU,EAC5EkB,GAA8B,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACpG5B,EAAW6B,GAA4BjD,EAAO,CAAC,EAAE,QAAQ,EAEzDkD,IAAe,IAAM,CACzB,OAAQ3B,EAAa,CACnB,IAAK,GACH,MAAO,SAASH,CAAQ,OAC1B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,QACE,MAAM,IAAI,MAAM,GAAGG,CAAW,8BAA8B,CAChE,CACF,GAAG,EAEG4B,GAAkB;AAAA,yCACShC,CAAe,aAAaK,CAAW;AAAA,YACpElB,GAAE,WAAW,YAAa,IAAK,MAAM,CAAC;AAAA,yBACzBA,GAAE,aAAa,WAAW,CAAC;AAAA,qCACfkB,CAAW;AAAA,iCACfA,IAAgB,EAAI,SAAW,kBAAkB;AAAA;AAAA;AAAA;AAAA,uCAI3C0B,EAAW,IACtC,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,GAAGjC,CAAQ,kBAAkBiC,EAAC,OAAOjC,CAAQ,kBAAkBiC,EAAC,IAAI,EACjG,KAAK,IAAI,CAAC;AAAA,0CACe,IAC5B9B,IAAgB,EACX,GAAG2B,EAAW,IACjB,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,uBAAuBA,EAAC,yBAAyB,EAAE,KAAK,IAAI,CAAC,KAE5F,yBAAyBH,EAAW,IAAI,MAAM,CAAC,EAAE,KAAK,YAAY,EAAE,KAAK,GAAG,CAAC,eAErF,CAAC;AAAA;AAAA,uCAE2BrB,EAA0Bf,EAAYO,CAAY;AAAA,gBACzEnB,EAAE,WAAW,YAAasC,GAAY,EAAGX,EAA0B,IAAM,SAASR,CAAY,MAAM,CAAC;AAAA,gBACrGnB,EAAE,WAAW,YAAasC,GAAY,EAAG,aAAa,CAAC;AAAA,mCACpCtC,EAAE,gBAAgB,WAAW,CAAC;AAAA,4BACrCgD,EAAW;AAAA,yCACE,EAAI3B,CAAW;AAAA,8BAC1BrB,EAAE,YAAY,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG3C2B,EAA0B,gDAAkD,kBAAkB,GAClGC,EAAa,EAAI,MAAQ,EAAE,OAC3B,MACK,KACG,CAAC,OAAQ,EAAIP,CAAW,EACxB,CAAC6B,GAAGC,KAAM,GACN9B,IAAgB,EAAI,UAAU8B,EAAC,4BAA4BA,EAAC,IACxC,cAAcA,EAAC,2BAA2BA,EAAC,IAAI,EAAE,EAC5E,KAAK,KAAK,CAAC;AAAA;AAAA,6BAEC,EAAI9B,CAAW;AAAA;AAAA,WAG9B+B,GAAuBV,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKlBA,GAAW,YAAY,kBAAkB,CAAC;AAAA,aAExB,GAE1C,OAAOf,EAA0B;AAAA,iDACQiB,GAAO,KAAK,KAAK,KAAKhC,EAAYV,CAAa;AAAA,UACtFmC,GAAa,iBAAiB,GAAGI,GAAgBG,EAAM,CAAC;AAAA,UACxDP,GAAa,UAAU,CACvBnC,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA,2BACiBF,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAI7BA,EAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,qCAEd4B,CAAU;AAAA,sDACOA,CAAU;AAAA,gBAEnBc,GAAa;AAAA,mDACPxC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yCAMvBwC,GAAW,YAAY,uBAAuB,CAAC,8BAC9B,EAAE;AAAA,6BAC/BtC,GAAE,KAAK,OAAO;AAAA,cAC7BA,GAAE,WAAW,YAAa,IAAK,6BAA6B,CAAC;AAAA;AAAA,+DAEZF,CAAa;AAAA,0BAClDsC,GAAO,YAAY,cAAc,CAAC;AAAA;AAAA,+BAE7BtB,CAAQ,IAAIwB,GAAa,2BAA6B,CAAG;AAAA,cAC1EtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA,6CACRL,EAAW,UAAYsB,CAAW;AAAA,yDACtBT,CAAS;AAAA,cACpDqC,EAAe;AAAA;AAAA;AAAA,gCAGGL,GAAO,KAAK,OAAO;AAAA,2CACR,KAAK,KAAKhC,EAAYV,CAAa,CAAC;AAAA,YACnE0C,GAAO,WAAW,iBAAkB,IAAK,OAAO,CAAC;AAAA,YACjDA,GAAO,WAAW,iBAAkBD,GAAa,EAAG,KAAK,CAAC;AAAA,YAC1DC,GAAO,WAAW,iBAAkBD,GAAa,EAAG,kCAAkC,CAAC;AAAA,gCACnEC,GAAO,gBAAgB,gBAAgB,CAAC;AAAA;AAAA;AAAA,wBAGhDhC,CAAS;AAAA,kCACCgC,GAAO,KAAK,KAAK,MAAMA,GAAO,KAAK,KAAK;AAAA;AAAA,0CAEhC1C,CAAa;AAAA;AAAA,6CAEVU,CAAS;AAAA;AAAA,gBAEtCgC,GAAO,YAAY,gBAAiB,cAAc,CAAC;AAAA,iCAClC9B,EAAYc,CAAU;AAAA;AAAA;AAAA,WAId;AAAA,UAC/BS,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGL,GAAgBG,EAAM,CAAC;AAAA,UACnFP,GAAa,UAAU,CAAC;AAAA,YACtBA,GAAa,sCAAsC,sBAAsB,CAAC;AAAA,qCACjDO,GAAO,KAAK,KAAK,KAAKzB,CAAY;AAAA,iCACtCyB,GAAO,gBAAgB,YAAY,CAAC;AAAA,sBAC/CA,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,sBACnDC,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,2BAC9C3C,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAKI0C,GAAa;AAAA,8CACZd,CAAU,QAAQ1B,CAAa;AAAA;AAAA,uCAEtCwC,GAAW,YAAY,kBAAkB,CAAC;AAAA,yEAEvB,EAAE;AAAA,oCACxBxC,EAAgB0B,CAAU;AAAA,2BACnCxB,GAAE,KAAK,OAAO;AAAA,qCACJwB,CAAU;AAAA,cACjCxB,GAAE,WAAW,YAAa,IAAK,SAASwB,CAAU,MAAM,CAAC;AAAA;AAAA,+CAExB1B,CAAa;AAAA;AAAA,4BAEhCsC,GAAO,YAAY,aAAa,CAAC;AAAA;AAAA,iCAE5BtB,CAAQ,IAAIwB,GAAa,qDAAuD,CAAG;AAAA,gBACpGtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,gBAEvC6C,EAAe;AAAA;AAAA,gBAEfG,EAAoB;AAAA,sDACkB/B,CAAW;AAAA;AAAA;AAAA,cAIpBqB,GAAa;AAAA,kBACxCU,EAAoB;AAAA,iBAEoB,EAAE;AAAA;AAAA,wCAEpBjC,CAAY;AAAA,gBACpCyB,GAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAGxB,CAAY,YAAY,CAAC;AAAA,gBAChFyB,GAAO,aAAa,iBAAkB,kBAAkB,CAAC;AAAA;AAAA,UAGnE,EACA,MAAO,CACL,KAAMjB,EAA0B,uBAAyB,cACzD,YAAa,CACX,KAAM,GAAG5B,EAAW,QAAQ,IAAIa,CAAS,IAAIM,CAAQ,IAAIpB,EAAO,MAAM,GACtE,kBAAmB,MAAMA,EAAO,MAAM,EAAE,KAAK,MAAM,CACrD,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM+B,EAAa,SAAAX,CAAQ,CAAC,EACvC,KAAMS,EAA0B,uBAAyB,cACzD,cAAeA,EAA0B,CAAC,EAAG,EAAG,EAAG,KAAK,KAAKb,EAAYc,CAAU,EAAG,EAAGZ,CAAS,EACzD,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAC5F,gBAAAC,CACF,GACA,gBAAAK,EACF,CACF,EAES9C,GAAc,CAAC+D,EAAyBtD,IAA4C,CAC/FX,GAAeiE,EAAQ,OAAQtD,CAAU,EACzC,IAAMU,EAAqD4C,EAAQ,4BAA4B,EACzF3C,EAAiC2C,EAAQ,kCAAkC,EACjFA,EAAQ,QAAQhE,GACZgE,EAAQ,OAAQtD,EAAYU,EAA0BC,CAA8B,CAAC,CAC3F,EAEanB,GAA8BQ,GACvCuD,GAA4BvD,CAAsE,ICjTtG,IAiBMwD,GAmBAC,GA0BAC,GA2BAC,GAuBAC,GAuBAC,GAeAC,GAiDAC,GA0BOC,GAjObC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KASMb,GAAkBc,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAIA,EAAO,QAAU,EAAG,CACtB,IAAIC,EAAYD,EAAO,CAAC,EAAE,KAAK,OAAS,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EAI9D,GAHIA,EAAO,SAAW,IACpBC,EAAYD,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAEpD,CAACC,EACH,MAAM,IAAI,MAAM,6EAA6E,CAEjG,CACF,EAEMd,GAAiB,CAACe,EAAuBC,EAAmBC,IAA+B,CAC/F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,sBACSH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,2BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA,gCAGzCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,UAI9E,MAAO;AAAA,oBACWD,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,cAIvBG,CAAK;AAAA;AAAA;AAAA,OAInB,EAEMjB,GAAgB,CAACc,EAAuBC,EAAmBC,IAA+B,CAC9F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,yCAKnEG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,gCAEvDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,oCAI1CI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMhB,GAAa,CAACa,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,+BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,4BACjDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEtCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMf,GAAa,CAACY,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA,6BAE/EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,+BAE5CI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,6BAChDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEvCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMd,GAAgB,CAACW,EAAuBC,EAAmBK,IAAsC,CACrG,OAAQA,EAAW,KAAM,CACvB,IAAK,GACH,OAAOrB,GAAee,EAAQC,EAAWK,EAAW,KAAK,MAAM,EACjE,IAAK,GACH,OAAOpB,GAAcc,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAChE,IAAK,GACH,OAAOnB,GAAWa,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,IAAK,GACH,OAAOlB,GAAWY,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMhB,GAAuB,CAACQ,EAA+BQ,IAA2C,CACtG,IAAMC,EAAcC,EAAU,SAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGQ,EAAW,IAAI,EACxEG,EAAYX,EAAO,CAAC,EAAE,KACtBY,EAAaF,EAAU,KAAKD,CAAW,EACvCI,EACF,CAAC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMJ,EAAW,IAAI,CAAC,EACzFA,EAAW,OAAS,GACtBK,EAAgB,KAAK,CAAC,KAAMb,EAAO,CAAC,EAAE,SAAU,KAAMQ,EAAW,KAAK,CAAC,EAGzEK,EAAgB,KAAK,GAAGC,EAA2Bd,EAAO,CAAC,EAAE,KAAMS,CAAW,CAAC,EAC/E,IAAMM,EAAwD,CAAC,MAAM,EAE/DC,EAAmBC,GAA+B,CACtD,IAAMf,EAASgB,EAAe,SAAUlB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEU,EAAQC,EAAc,IAAKpB,EAAO,CAAC,EAAE,SAAUW,EAAU,MAAM,EAC/DU,EAAWF,EAAM,KAAK,MACtBG,EAAa/B,GAAcW,EAAQS,EAAU,OAAQH,CAAU,EAC/De,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQf,EAAW,KAAK,MAAM,CAAC,EACpG,OAAIA,EAAW,OAAS,GACtBe,EAAS,KAAK,CAAC,KAAM,iBAAkB,KAAMF,CAAkC,CAAC,EAG3E;AAAA,cACGJ,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBJ,EAAOjB,CAAM,CAAC;AAAA,cACvEe,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,4BAE5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,0BAEtCmB,CAAQ;AAAA,cACpBC,CAAU;AAAA;AAAA,UAGtB,EAEA,MAAO,CACL,KAAM,MACN,YAAa,CAAC,KAAM,GAAGd,EAAW,IAAI,GAAI,kBAAAO,CAAiB,EAC3D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMN,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAU,KAAKD,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAAG,CACF,CACF,EAEMvB,GAAgC,CAACO,EAA+BQ,IAA6C,CACjH,GAAIR,EAAO,OAAS,EAAG,CACrB,IAAMwB,EAAexB,EAAO,CAAC,EAAE,iBAAiB,EAC1CyB,EAASzB,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,KAAQA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAI,EAElFG,EAAYH,EAAO,CAAC,EAAE,KAAK,OAC3B0B,EAAa,IAAI,WAAW,EAAIvB,CAAS,EAAE,KAAK,CAAC,EACvD,GAAIH,EAAO,QAAU,EAAG,CACtB,IAAM2B,EAAO3B,EAAO,CAAC,EAAE,iBAAiB,EACxC,QAASM,EAAI,EAAGA,EAAIqB,EAAK,OAAQrB,IAC/BoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,CAAC,EAAI,OAAOkB,EAAalB,CAAC,CAAC,EACpDoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,EAAIH,CAAS,EAAI,OAAOqB,EAAalB,EAAIqB,EAAK,MAAM,CAAC,CAElF,MACEH,EAAa,QAAQ,CAACI,EAAGtB,IAAMoB,EAAW,OAAOpB,CAAC,CAAC,EAAK,OAAOsB,CAAC,CAAE,EAGpE,IAAMC,EAAiB,CAAC,EACxB,OAAAH,EAAW,QAAQE,GAAKC,EAAK,KAAKD,CAAC,CAAC,EAE7B,CAAC,KAAMpB,EAAW,KAAM,MAAAiB,EAAO,KAAAI,CAAI,CAC5C,KACE,QAAOrB,CAEX,EAEad,GAAM,CAACoC,EAAyBtB,IAAoC,CAC/EtB,GAAe4C,EAAQ,MAAM,EAC7B,IAAMC,EAAoBtC,GAA8BqC,EAAQ,OAAQtB,CAAU,EAClFsB,EAAQ,QAAQtC,GAAqBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxF,ICrOA,IAmBMC,GAMAC,GA4BAC,GA2DAC,GAsJAC,GAGAC,GAGAC,GAGAC,GAaAC,GAiCOC,GAYAC,GAKPC,GAWOC,GAKAC,GAUPC,GA6BOC,GAKAC,GAgBAC,GAKAC,GA/ZbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KAIAC,KAQMxB,GAAkByB,GAAwC,CAC9D,GAAIC,GAAI,OAAO,uBAAyB,CAACD,GAAUA,EAAO,SAAW,GACnE,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EAEMxB,GAA0C,CAC5C0B,EAAmBC,EAA2BC,IAAyD,CACzG,IAAMC,EAAiBF,EAAW,SAAW,OACvCG,EAA2BJ,EAAM,KAAK,MAAM,EAC9CG,GACFC,EAAyB,OAAO,EAAG,EAAGA,EAAyB,IAAI,CAAE,EAEvE,IAAMC,EAAe,OAAO,eAAe,KAAKJ,EAAY,WAAW,EACjEK,EAAcL,EAAW,YAAY,MAAM,EAC3CM,EAAUN,EAAW,QAAQ,MAAM,EACnCO,EAAsBH,EAAgBJ,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCS,GAAa,qBAAqBR,EAAkBE,EAA0BE,EAAaC,EAASC,EAAWC,CAAI,EAEnH,IAAME,EAA4BD,GAAa,uBAC3CR,EAAkBE,EAA0BG,EAASC,EAAWF,EAAaG,EAAMR,EAAW,OAAO,EAEnGW,EAAgB,OAAO,OAAO,CAAC,EAAGX,CAAU,EAC9CI,EACF,OAAO,OAAOO,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,UAAAD,EAAW,SAAUP,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOW,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAE1F,IAAMY,EAA2BF,EAA0B,MAAM,EACjE,OAAAE,EAAyB,KAAKA,EAAyB,OAAO,EAAG,CAAC,EAAE,CAAC,CAAC,EAC/D,CAACD,EAAeT,EAAiBU,EAA2BF,CAAyB,CAC9F,EAEMpC,GAAuB,CACzBuC,EACAb,IAAgG,CAClG,IAAME,EAAiBF,EAAW,SAAW,OACvCc,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAaD,EAAU,KAAKf,EAAW,WAAW,EAClDiB,EACF,CAAC,CAAC,QAAuB,KAAMH,CAAU,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACnFE,EAA8B,CAAC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACzG,GAAIlB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAMmB,EAAKnB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoB,EAAKpB,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqB,EAAUrB,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsB,EAAQtB,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuB,EAAoB,CAAC,EAAEF,EAAUC,GACvCL,EAAgB,KACZ,CAAC,QAAuB,KAAME,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAO,EACrC,CAAC,QAAuB,KAAMC,CAAK,CACvC,EACAJ,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,EAEhC,IAAIM,EAAoB,GACxB,GAAIxB,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMyB,EAAKzB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D0B,EAAK1B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD2B,EAAU3B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD4B,EAAQ5B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EACxDwB,EAAoB,CAAC,EAAEG,EAAUC,GACjCX,EAAgB,KACZ,CAAC,QAAuB,KAAMQ,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAO,EAC3G,CAAC,QAAuB,KAAMC,CAAK,CAAC,EAExCV,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAClC,CACA,MAAO,CAACD,EAAiBC,EAAU,GAAMK,EAAmBC,CAAiB,CAC/E,KAAO,CACL,GAAItB,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM2B,EAAgBd,EAAU,eAAef,EAAW,WAAW,EACrEiB,EAAgB,KACZ,CAAC,QAAuB,KAAMY,CAAa,EAAG,CAAC,QAAuB,KAAM7B,EAAW,IAAI,EAC3F,CAAC,QAAuB,KAAMA,EAAW,OAAO,CAAC,EACrDkB,EAAS,KACL,CAAC,KAAM,gBAAiB,KAAM,MAAO,OAAQW,EAAc,MAAM,EACjE,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ7B,EAAW,KAAK,MAAM,EAC1D,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQA,EAAW,QAAQ,MAAM,CAAC,EAErE,IAAM8B,EAAU9B,EAAW,KAAK,OAAO,CAAC+B,EAAKC,IAAQD,EAAMC,CAAG,EAC9D,MAAO,CAACf,EAAiBC,EAAU,CAAC,CAACY,EAAS,GAAO,EAAK,CAC5D,CACF,EAEMvD,GAAsB,CACxB0D,EAA4BC,EAAkBC,EAAcC,EAAyBpC,EACrFqC,EAAaC,EAAaC,EAAerB,EAA6BY,EAAkBP,EACxFC,IAAuC,CACzC,IAAMtB,EAAiBF,EAAW,SAAW,OACvCwC,EAAWN,EAAE,KAAK,MAClBO,EAASC,EAAe,SAAUR,EAAE,KAAK,OAAQE,CAAe,EAEtE,GAAIpC,EAAW,YAAY,QAAU,EAAG,CACtC,IAAI2C,EAAQ,GACRC,EAAQ,GACRC,EAAW,GACTC,EAAUX,GAAQjC,EAAiB,EAAI,GAsB7C,GArBIqB,EACFoB,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO;AAAA,4CACxBA,CAAO;AAAA;AAAA;AAAA;AAAA,kCAIjBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAGjBM,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,kCACxBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAIfrC,EAAW,YAAY,SAAW,EAAG,CACvC,IAAM+C,EAAUZ,GAAQjC,EAAiB,EAAI,GACzCsB,EACFoB,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO,yBAAyBA,CAAO;AAAA;AAAA;AAAA;AAAA,gBAM5FH,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,kBAGpDF,EAAW;AAAA;AAAA,aAGb,CAoBA,MAlBoB;AAAA,cACVZ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,8BAE3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,4BAEvCD,CAAQ,IAAID,CAAK;AAAA;AAAA,gBAE7BK,CAAK;AAAA,gBACLD,CAAK;AAAA,gBACLE,CAAQ;AAAA,gBACRP,CAAG;AAAA;AAAA;AAAA,cAKjB,KAAO,CACL,GAAIpC,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM8C,EAAchD,EAAW,YAAY,OACrCiD,EAAWjD,EAAW,KAAK,OAC7BkD,EAAU,GACd,OAAIpB,EACFoB,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAQgBhB,EAAE,gBAAgB,UAAU,CAAC;AAAA,kBAC3CG,CAAG;AAAA,iBAGfa,EAAU;AAAA;AAAA,8BAEchB,EAAE,gBAAgB,UAAU,CAAC;AAAA,gBAC3CG,CAAG;AAAA,cAGK;AAAA,cACVJ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,8BAC3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,wCAE3BO,CAAW;AAAA;AAAA,4BAEvBR,CAAQ,IAAID,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uCAMNS,EAAc,CAAC;AAAA,0CACZG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA,2CACvDG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA;AAAA,0BAEzEA,EAAc,CAAC;AAAA;AAAA;AAAA,+BAGVb,EAAOa,CAAW,UAAUb,CAAI;AAAA,+CAEvDgB,EAAa,mBAAoB,OAAOhB,EAAOa,CAAW,IAAKA,CAAW,CAAC;AAAA,oCAC/Cb,EAAOa,CAAW,QAAQG,EAAa,gBAAiB,SAAUF,CAAQ,CAAC;AAAA,oBAC3FC,CAAO;AAAA;AAAA,gBAEXZ,CAAG;AAAA;AAAA;AAAA,cAKjB,CACF,EAcM9D,GAAiCwB,GAClC,GAAGA,EAAW,MAAM,IAAIA,EAAW,QAAQ,IAAIA,EAAW,OAAO,IAAIA,EAAW,YAAY,MAAM,GAEjGvB,GAA4CuB,GAC7C,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,eAAe,GAEzEtB,GAAwCsB,GACzC,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,YAAY,IAAIA,EAAW,SAAS,GAE9FrB,GAA6BqB,IAA+D,CAChG,OAAQA,EAAW,OACnB,QAAS,CAAC,SAAU,QAAS,aAAc,YAAY,EAAEA,EAAW,QAAkB,EACtF,SAAUA,EAAW,UACrB,YAAaA,EAAW,aACxB,QAASA,EAAW,QACpB,KAAMA,EAAW,IACnB,GAMMpB,GACF,CAACwE,EAAcrD,EAAmBE,EAA2BD,IAAmD,CAC9G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEiC,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyC,EAAWN,EAAE,KAAK,MAElBG,EAAM,kBACRC,EAAM,GACNe,EAAmB,gBACrBf,GAAO,YAAYE,CAAQ,yBAE3BF,GAAO,YAAYE,CAAQ,oCAE7B,GAAM,CAACvB,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxDpC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EAC3E,IAAM2C,EAAwD,CAAC,MAAM,EACrE,MAAO,CACL,KAAAJ,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAAK,EAAKpB,EAC3FY,EAASP,EAAmBC,CAAiB,CACnD,CACF,EAES3C,GAA8BmB,GAA+D,CACxG,IAAMyD,EAAmBzD,EAAW,oBAAiC,EAE/D0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI0D,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAE1F,IAAMC,EAAwB,CAAC,gBAAAF,EAAiB,GAAGC,EAAM,SAAU,EAAE,EACrE,MAAO,CAAC,GAAGC,EAAuB,SAAUlF,GAAyCkF,CAAqB,CAAC,CAC7G,EAEa7E,GAAc,CAAC8E,EAAyB5D,IAA4C,CAC/F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,cAAegF,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CACnG,EAEMjB,GAAuB,CAC3B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,CACd,EAEaC,GAAoCgB,GAA+D,CAC9G,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEa5E,GAAoB,CAAC2E,EAAyB5D,IAA4C,CACrG5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,oBAAqBgF,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CACxG,EAOMd,GACF,CAACkE,EAAcrD,EAAmBE,EAA2BD,IAA+C,CAC1G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEoC,EAAM;AAAA;AAAA,MAGNC,EAAM,GACNJ,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyD,EAAwD,CAAC,MAAM,EAC/D,CAACvC,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxD,OAAApC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EACpE,CACL,KAAAuC,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAChFvC,EAAM,WAAa,GAAoB,OAAS,KAAMmB,EAAUY,EAASP,EAC1EC,CAAiB,CACvB,CACF,EAESrC,GAAU,CAACyE,EAAyB5D,IAAwC,CACvF5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,UAAW0E,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CAC3F,EAEaZ,GAA0BY,GAA2D,CAChG,IAAM8D,EAAe9D,EAAW,cAC1BO,EAAYP,EAAW,UAEvB0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI8D,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAIJ,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,oEAAoE,EAEtF,IAAMK,EAAoB,CAAC,aAAAD,EAAc,UAAAvD,EAAW,GAAGmD,EAAM,SAAU,EAAE,EACzE,MAAO,CAAC,GAAGK,EAAmB,SAAUrF,GAAqCqF,CAAiB,CAAC,CACjG,EAEa1E,GAAgCW,GAA2D,CACtG,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEavE,GAAgB,CAACsE,EAAyB5D,IAAwC,CAC7F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,gBAAiB0E,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CAChG,IClaA,IAUMgE,GAUAC,GAoCOC,GAxDbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GAAwB,CAACQ,EAAeC,EAAeC,IAAwB,CACnF,IAAMC,EAAiBH,IAAUC,EAC3BG,EAA8BJ,EAAQC,GAASC,EAAQ,EACvDG,EAA8BL,EAAQC,GAASC,EAAQ,EAE7D,GAAIC,GAAkBC,GAA+BC,EACnD,MAAM,IAAI,MAAM,2CAA4C,CAEhE,EAEMZ,GAAyB,CAACO,EAAeC,EAAeC,EAAeI,IAAoC,CAC/G,IAAMC,EAAc,KAAK,IAAI,KAAK,MAAMN,EAAQD,GAASE,CAAK,CAAC,EACzDM,EAAwB,CAACD,CAAW,EACpCE,EAAaF,EACbG,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,KAAMH,EAAU,KAAMN,CAAK,EAAG,CAAC,KAAMM,EAAU,KAAMJ,CAAK,EACtG,GAAGS,EAA2BH,CAAW,CAC3C,EAEMI,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUT,EAAUE,EAAY,MAAM,EAC9DQ,EAAWF,EAAO,KAAK,MACvBG,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAMD,CAAkC,EAC3F,CAAC,KAAM,QAAS,KAAMA,CAAkC,CAC1D,EACA,MAAO;AAAA,UACDH,EAAa,iBAAiBI,CAAQ,EAAE,iBAAiBH,CAAM,CAAC;AAAA,UAChED,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,gDACnCG,CAAQ;AAAA,QAEtD,EAEA,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGV,CAAQ,EAAE,EACjC,gBAAAM,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,EACF,CACF,EAEahB,GAASwB,GAAkC,CACtD,IAAIlB,EAAQ,EACRC,EAAQ,EACRC,EAAQ,EACRgB,EAAQ,OAAO,CAAC,EAAE,WAAa,GACjClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,GAClCA,EAAQ,OAAO,CAAC,EAAE,WAAa,IACxClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,GAE3CC,GAAI,OAAO,sBACb3B,GAAsBQ,EAAOC,EAAOC,CAAK,EAG3CgB,EAAQ,QAAQzB,GAAuBO,EAAOC,EAAOC,EAAOgB,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CACvG,IC1EA,IAiCME,GAuBAC,GASAC,GA6CAC,GAkDAC,GAkCAC,GAaAC,GAwBAC,GAyBAC,GAuBAC,GAkCAC,GAWAC,GAQAC,GAsDAC,GA6EAC,GAwEAC,GAoHAC,GAOOC,GAiBAC,GAnqBbC,GAAAC,EAAA,kBAIAC,KAEAC,KACAC,KAGAC,KAuBMxB,GAAiB,CAACyB,EAAkBC,IAAuC,CAK/E,GAJAD,EAAO,MAAOE,GAAUA,EAAQ,IAAM,IAAM,CAClB,MAAM,IAAI,MAAM,oDAAoD,CACtE,EAAE,EAEtBF,EAAO,OAAS,GAClB,GAAIC,EAAW,OAAS,UACtB,GAAI,EAAED,EAAO,SAAW,GAAKA,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACtGA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACxDA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MACN;AAAA,oGACwF,UAErFC,EAAW,OAAS,SACzB,EAAED,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC/EA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MAAM,+DAA+D,EAIvF,EAEMxB,GAAe,CAACwB,EAA2BG,EAAyBC,IAA2B,CACnGD,EAAK,MAAOD,GAAUA,GAAS,GAAKA,EAAQE,IAAS,IAAM,CACnC,MAAM,IAAI,MAAM,qEAAqE,CACvF,EAAE,EACxB,IAAMC,EAAY,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAG,EAC1C,OAAAD,EAAK,QAAQ,CAACD,EAAOI,IAAUD,EAAUH,CAAK,EAAIF,EAAOM,CAAK,CAAC,EACxDD,CACT,EAEM5B,GACF,CAAC8B,EAA+BN,EAA8BO,EAAsBR,EACnFS,EAAiBC,IAAwB,CACxC,GAAM,CAACC,EAAeC,EAAkBC,CAAe,EAClDL,EAAe,GAAM,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,GAAKD,EAAO,OAAS,EAAK,EAAI,GAAI,EAAE,EACrEH,EAAOG,EAAO,CAAC,EAAE,KAAK,OAC5B,GAAII,EAAgB,GAAKJ,EAAO,OAASI,GAAiBJ,EAAOI,CAAa,EAAE,KAAK,OAAS,EAC5FJ,EAAOI,CAAa,EAAE,gBAAgB,EAAE,QAAST,GAAUQ,EAAI,KAAKR,CAAK,CAAC,UACjED,EAAW,0BAA4B,qBAChD,MAAM,IAAI,MAAM,2FAA2F,EAG7G,GAAIW,EAAmB,GAAKL,EAAO,OAASK,GAAoBL,EAAOK,CAAgB,EAAE,KAAK,OAAS,EAAG,CAExG,GADAL,EAAOK,CAAgB,EAAE,gBAAgB,EAAE,QAASV,GAAUF,EAAO,KAAKE,CAAK,CAAC,EAC5EF,EAAO,SAAW,GACjBA,EAAO,SAAWI,GAASI,GAAgB,IAAMR,EAAO,SAAWC,EAAW,KAAK,OACtF,MAAM,IAAI,MACN,6FAA6F,EAEnG1B,GAAeyB,EAAQC,CAAU,EAC7BA,EAAW,KAAK,OAAS,GAC3BzB,GAAawB,EAAQC,EAAW,KAAMG,CAAI,EAAE,QAAQ,CAACF,EAAOI,IAAUN,EAAOM,CAAK,EAAIJ,CAAK,CAE/F,CACA,GAAIW,EAAkB,GAAKN,EAAO,OAASM,IACzCN,EAAOM,CAAe,EAAE,iBAAiB,EAAE,QAASX,GAAUO,EAAM,KAAK,OAAOP,CAAK,CAAC,CAAC,EACnFO,EAAM,SAAWL,GAASI,GAAgB,IAAMC,EAAM,SAAWR,EAAW,KAAK,QACnF,MAAM,IAAI,MAAM,4FAA4F,EAIhH,GAAIA,EAAW,KAAK,OAAS,EAAG,CAC9B,GAAID,EAAO,SAAWC,EAAW,KAAK,OACpC,MAAM,IAAI,MAAM,0FAA0F,EAE5G,GAAIQ,EAAM,SAAWR,EAAW,KAAK,OACnC,MAAM,IAAI,MACN,8FAA8F,CAEtG,CACA,GAAI,OAAOD,EAAW,KAAe,OAAOS,EAAU,KAAeT,EAAO,OAAS,GAAKS,EAAM,OAASL,EACvG,MAAM,IAAI,MAAM,yDAAyD,CAE7E,EAEE1B,GACF,CAACoC,EAAiDC,IAC9C;AAAA,2DACmDA,CAAK,OAC3D,IAAM,CACD,OAAQD,EAAwB,CAC9B,IAAK,aACH,MAAO,UAAUC,CAAK,gBAAgBA,CAAK,YAC7C,IAAK,qBACH,MAAO;AAAA,8BACSA,CAAK,uBAAuBA,CAAK;AAAA;AAAA;AAAA,qBAInD,IAAK,uBACH,MAAO,WAAWA,CAAK,uBAAuBA,CAAK,YACrD,IAAK,gBACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMaA,CAAK;AAAA;AAAA,0BAEbA,CAAK,6DAA6DA,CAAK;AAAA;AAAA,qBAGrF,IAAK,qBACH,MAAO;AAAA,6BACQA,CAAK,gBAAgBA,CAAK;AAAA,2BAC5BA,CAAK,gBAAgBA,CAAK,yBAAyBA,CAAK;AAAA,0BACzDA,CAAK;AAAA;AAAA,mCAEIA,CAAK,yBAAyBA,CAAK;AAAA,qBAE1D,IAAK,uBACH,MAAO,uBAAuBA,CAAK,YAAYA,CAAK;AAAA,uCAC3BA,CAAK;AAAA,mCACTA,CAAK;AAAA;AAAA,sCAEFA,CAAK,uBAAuBA,CAAK,mBAC3D,IAAK,aACH,MAAO,YAAYA,CAAK,uBAAuBA,CAAK,mBACtD,QACE,MAAM,IAAI,MAAM,6BAA6BD,CAAsB,mBAAmB,CAC1F,CACF,GAAG,EACP,IAEEnC,GAA8B,CAACqC,EAA0BR,EAAsBO,IACjF,6CAA6CA,CAAK,4BAA4BA,CAAK,MAAQ,IAAM,CAC/F,OAAQC,EAAa,CACnB,IAAK,oBACH,MAAO,yIAKT,IAAK,QACH,MAAO,2BACT,IAAK,OACH,MAAO,0BACT,IAAK,qBACH,MAAO,0KAKT,IAAK,SACL,QACE,GAAIR,EAAe,GACjB,MAAO,mLAOT,MAAM,IAAI,MAAM,gBAAgBQ,CAAW,mBAAmB,CAClE,CACF,GAAG,EACH,IAEEpC,GAAY,CAAC8B,EAAwBP,EAAyBC,IAA2B,CAC7F,IAAMa,EAAS,IAAI,MAAMb,CAAI,EAAE,KAAK,CAAC,EAAE,OAAO,IAAI,MAAMA,CAAI,EAAE,KAAK,CAAC,CAAC,EAC/Dc,EAAWR,EAAI,SAAW,EAAIO,EAASP,EAAI,MAAM,EACvD,OAAIP,EAAK,OAAS,GAChBA,EAAK,QAAQ,CAACgB,EAAGC,IAAM,CACrBH,EAAOE,CAAC,EAAID,EAASE,CAAC,EACtBH,EAAOG,EAAIhB,CAAI,EAAIc,EAASf,EAAK,OAASiB,CAAC,CAC7C,CAAC,EACMH,GAEFC,CACT,EAEMrC,GACF,CAACwC,EAA+BrB,EAA2BS,EAA0BN,IACrE,CACV,IAAImB,EAAwB,CAAC,EAC7B,GAAIb,EAAM,OAAS,EACjB,GAAIN,EAAK,OAAS,EAAG,CAEnB,GADAkB,EAAW,QAASF,GAAMG,EAAY,KAAKH,CAAC,CAAC,EACzC,KAAK,IAAI,GAAGhB,CAAI,EAAIkB,EAAW,OACjC,MAAM,IAAI,MAAM,sBAAsB,EAExClB,EAAK,QAAQ,CAACgB,EAAGC,IAAME,EAAYH,CAAC,EAAIV,EAAMW,CAAC,CAAC,CAClD,MACEX,EAAM,QAASU,GAAMG,EAAY,KAAKH,CAAC,CAAC,MAErC,CACL,GAAInB,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyC,EAEzDsB,EAAcD,EAAW,IAAI,CAACnB,EAAOI,IAAU,KAAK,MAAMJ,EAAQF,EAAOM,CAAK,CAAC,CAAC,CAEpF,CACA,OAAOgB,CACT,EAEFxC,GAAoB,CAACuC,EAA+BrB,EAAkBC,IAAiC,CAC3G,IAAMsB,GAAiB,IAAM,CAC3B,OAAQtB,EAAW,sBAAuB,CACxC,IAAK,aACH,OAAOA,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,IAAK,cACH,OAAOC,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,QACE,MAAM,IAAI,MAAM,4BAA4BC,EAAW,qBAAqB,mBAAmB,CACnG,CACF,GAAG,EACHD,EAAO,KAAK,EAAK,EAAGA,EAAO,MAAM,EACjC,IAAMwB,EAAsBH,EAAW,MAAM,EAC7C,OAAIpB,EAAW,KAAK,OAAS,GAC3BA,EAAW,KAAK,QAASkB,GAAMnB,EAAOmB,CAAC,EAAII,CAAa,EACxDtB,EAAW,KAAK,QAASkB,GAAMK,EAAoBL,CAAC,EAAI,KAAK,MAAME,EAAWF,CAAC,EAAInB,EAAOmB,CAAC,CAAC,CAAC,IAE7FnB,EAAO,KAAKuB,EAAe,EAAGvB,EAAO,MAAM,EAC3CwB,EAAoB,QAAQ,CAACL,EAAGC,IAAMI,EAAoBJ,CAAC,EAAI,KAAK,MAAMD,EAAInB,EAAOoB,CAAC,CAAC,CAAC,GAEnFI,CACT,EAEMzC,GACF,CAAC0C,EAAuBJ,EAA+BC,EAAgCI,EACtFC,IAA8B;AAAA,mEACgCF,EAAO,KAAK,OAAO,cAC9EA,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,oCACZG,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,gCAC5CA,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA,sBAC/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA,wBAChDE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,uBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA;AAAA,kCAExDF,EAAO,KAAK,KAAK;AAAA;AAAA,gCAEnBG,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQzFtC,GACF,CAAC6C,EAAsBJ,EAAuBJ,EAA+BC,EAC5EI,EAAsBC,EAAmBG,IAAsC;AAAA,gEACpBL,EAAO,KAAK,OAAO,QAAQI,EAAM,KAAK,OAAO;AAAA,2BAClFA,EAAM,KAAK,OAAO;AAAA,gCACbP,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,sBAE/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA;AAAA;AAAA;AAAA,0BAI9CE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,yBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA,gCAC5DC,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA,iBAG9EQ,CAAgB,4CAA4CL,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA,wCAGtDA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAS/CI,EAAM,WAAW,gBAAiB,IAAK,cAAc,CAAC;AAAA;AAAA;AAAA,OAI1D5C,GAAoB,CAAC4C,EAAsBR,IAA0C;AAAA,0CACjDQ,EAAM,KAAK,OAAO;AAAA,gCAC5BR,EAAW,MAAM;AAAA,4BACrBQ,EAAM,WAAW,gBAAiB,GAAG,CAAC;AAAA,gDAClBD,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,OAOtGnC,GACF,CAAC2C,EAAsBE,EAAoBC,EAAkBC,IACzDJ,EAAM,KAAOI,EAAc;AAAA,MAC7BJ,EAAM,WAAW,gBAAiBE,EAAY,SAAS,CAAC;AAAA,MACxDF,EAAM,WAAW,gBAAiBG,EAAU,OAAO,CAAC;AAAA,EAEvB,GAE7B7C,GACF,CAAC0C,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUG,EAAWC,EAAUL,CAAU,EAC5CV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,CAAC,EAC9DN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wEAC2Dd,CAAK;AAAA,2BAClDc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBM,EAAW,mBAAmBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QAC9FN,EAAM,WAAW,gBAAiBO,EAAU,mBAAmBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC5FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,+CAGHJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,gBAE/DA,CAAK,sBAAsBoB,CAAS;AAAA,gBACpCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAEzCN,EACI,yBAAyBT,EAAWc,CAAS,CAAC,8BAA8Bd,EAAWe,CAAQ,CAAC;AAAA,iBAC7FF,CAAkB;AAAA,SAErB,EAAE;AAAA,8BACcb,EAAWc,CAAS,CAAC;AAAA,8BACrBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKvBf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA,iBAC1EjB,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWjC,EAEE3B,GACF,CAACyC,EAAsBJ,EAAuBJ,EAA+BC,EAC5EtB,EAA2BU,EAAwB2B,EAAqBP,EACxEI,EAA4BI,IAAoC,CAC/D,IAAMC,EAAOlB,EAAW,SAAW,EAC7BmB,EAAS,GACT,CAACL,EAAWC,CAAQ,EAAIG,EAAO,CAAC,EAAG,CAAC,EAAIC,EAAS,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAC/DzB,EAAQc,EAAM,KAAK,MACnBY,EAAoCC,GAAwB,CAChE,IAAMC,EAAYD,IAAQP,EAAY,MAAQ,MAC9C,MAAO;AAAA,WACJQ,CAAS,qCAAqCd,EAAM,KAAK,OAAO,qBAC/DJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,6BACfU,EAAO,WAAW,iBAAkBiB,CAAG,CAAC;AAAA,2BAC1C3B,CAAK,+DAA+Df,EAAO0C,CAAG,CAAC;AAAA,UAChGpB,EAAYoB,CAAG,CAAC,KAAKrB,EAAWqB,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,MAAMrB,EAAW,MAAM;AAAA,gCAC/DN,CAAK;AAAA;AAAA;AAAA,cAGvBe,CAAgB,0CAA0CT,EAAWqB,CAAG,CAAC;AAAA,mBACpER,CAAkB;AAAA;AAAA,0BAEXnB,CAAK,gBAAgBA,CAAK;AAAA;AAAA,gBAEpC4B,CAAS,KAAK5B,CAAK,oBAAoBA,CAAK;AAAA,gBAC5C4B,CAAS,WAAWA,CAAS,OAAOtB,EAAWqB,CAAG,CAAC;AAAA,eACpD,IACDJ,EACK;AAAA,mCAEER,EACF,UAAUI,CAAkB,IAE5B,GAAGS,CAAS,iBAAiBA,CAAS,KAAKtB,EAAWqB,CAAG,CAAC,WAElE,CAAC;AAAA;AAAA,kCAEsBb,EAAM,KAAK,OAAO;AAAA,YACxCA,EAAM,WAAW,qBAAsBa,EAAK,OAAOC,CAAS,GAAG,CAAC;AAAA,0BAEhED,IAAQP,EAAYN,EAAM,aAAa,oBAAoB,EACvC,2DAA2D;AAAA;AAAA;AAAA,QAIrF,EAEA,MAAO;AAAA,MACPY,EAAiCN,CAAS,CAAC;AAAA,MAC3CM,EAAiCL,CAAQ,CAAC;AAAA,qCACXrB,CAAK,cAAcA,CAAK;AAAA;AAAA,wBAErCA,CAAK,gBAAgBA,CAAK;AAAA,wBAC1BA,CAAK;AAAA,wBACLA,CAAK;AAAA,uBACNA,CAAK;AAAA,oBACRsB,CAAW,wBAAwBA,CAAW,yBACxDA,CAAW,yBAAyBA,CAAW;AAAA,oBACrCA,CAAW,mBAAmBA,CAAW;AAAA,oBACzCA,CAAW,2BAA2BA,CAAW;AAAA,oBACjDA,CAAW,yBAAyBA,CAAW,0BACzDA,CAAW,0BAA0BA,CAAW;AAAA;AAAA;AAAA;AAAA,qCAIrBtB,CAAK,sBAAsBA,CAAK,YAAYA,CAAK;AAAA,oBAClEA,CAAK;AAAA;AAAA;AAAA;AAAA,4CAImBU,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,yBACnDc,EAAM,KAAK,OAAO;AAAA;AAAA;AAAA,KAIvC,EAEExC,GACF,CAACwC,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUY,EAAUT,EAAWC,EAAUL,CAAU,EACtDV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACpEN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wFAC2Ed,CAAK;AAAA,2BAClEc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBe,EAAU,qBAAqBvB,EAAWuB,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9Ff,EAAM,WAAW,gBAAiBM,EAAW,sBAAsBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QACjGN,EAAM,WAAW,gBAAiBO,EAAU,qBAAqBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,gDAGFJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,kBAE9DA,CAAK,sBAAsB6B,CAAQ;AAAA,mBAClC7B,CAAK,sBAAsBoB,CAAS;AAAA,kBACrCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAE3CN,EAAmB,6BAA6BT,EAAWuB,CAAQ,CAAC,oCAC7CvB,EAAWc,CAAS,CAAC,kCAAkCd,EAAWe,CAAQ,CAAC;AAAA,eAC7FF,CAAkB;AAAA,WAEJ,EAAE;AAAA;AAAA,gCAECb,EAAWuB,CAAQ,CAAC;AAAA,oCAChBvB,EAAWc,CAAS,CAAC;AAAA,kCACvBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAO3Bf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA;AAAA,kBAEzEjB,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,iBACNA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,mBAAmBA,CAAK;AAAA,iBAC7BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAgBjC,EAEEzB,GACF,CAACuD,EAAyB5C,EAA8BO,EAAsBsC,EAC7ErC,EAA0BsC,IAA6C,CACtE,IAAM1B,EAAawB,EAAY,KACzBnC,EAAM9B,GAAUmE,EAAU9C,EAAW,KAAMoB,EAAW,MAAM,EAE9DC,EAAczC,GAAgBwC,EAAYyB,EAAarC,EAAOR,EAAW,IAAI,EAC7ED,EAAS8C,EAAY,MAAM,EAC3BA,EAAY,SAAW,IACzB9C,EAASqB,EAAW,IAAI,CAACnB,EAAOI,IAAUJ,IAAU,EAAI,EAAMoB,EAAYhB,CAAK,EAAIJ,CAAK,EACpFD,EAAW,wBAA0B,YACvCqB,EAAcxC,GAAkBuC,EAAYrB,EAAQC,CAAU,IAGlE,IAAMwB,EAASuB,EAAe,SAAUH,EAAY,SAAUvB,EAAY,MAAM,EAC1EO,EAAQoB,EAAc,QAASJ,EAAY,SAAUxB,EAAW,MAAM,EACtE6B,EAAaC,EAAU,KAAK7B,CAAW,EACvC8B,EAAU/B,EAAW,SAAWC,EAAY,QAAUD,EAAW,MAAM,CAACgC,EAAGjC,IAAMiC,IAAM/B,EAAYF,CAAC,CAAC,EACrGU,EAAmB7B,EAAW,0BAA4B,qBAC1DiC,EAAqBjC,EAAW,mBAChCqD,EAAWzB,EAAM,KAAK,MACtB0B,EAAmBC,GAA+B;AAAA,QACtDJ,EAAU,GAAK;AAAA,QACf1E,GAA2CuB,EAAW,wBAAyBqD,CAAQ,CAAC;AAAA,SACvF,IAAM,CACP,OAAQrD,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA,gBACHhB,GAAkB4C,EAAOR,CAAU,CAAC;AAAA,gBACpC1C,GAA4BsB,EAAW,YAAaO,EAAc8C,CAAQ,CAAC;AAAA,gBAE3EtE,GACI6C,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,OAAQoB,CAAgB,CAAC;AAAA,gBAE9F,IAAK,SACH,MAAO;AAAA,gBACH/C,GAA0C0C,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,MAAM,CAAC;AAAA,iBACpG,IAAM,CACT,GAAIW,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GAAGlC,GAAsB0C,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAC3F,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EAC1D,MAAO,GAAGhC,GAAuBwC,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAEjG,MAAM,MAAM,kFAAkF,CAElG,GAAG,CAAC;AAAA,cAEN,IAAK,QACH,MAAO;AAAA,eACJ,IAAM,CACP,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GACHjC,GACIyC,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAQU,EAAKT,EAAW,YAAa6B,EAC7E7B,EAAW,mBAAoBA,EAAW,cAAc,CAAC,GAEjE,MAAM,MAAM,2EAA2E,CAE3F,GAAG,CAAC;AAAA,cAEN,QACE,MAAM,MAAM,qBAAqB,CACrC,CACF,GAAG,CAAC;AAAA,OACH;AAAA,QAEGuD,EAAa,gBAAgB,cAAe,KAAK,EAC5C,gBAAgB,SAAU,MAAOxD,EAAO,MAAM,EAC9C,gBAAgB,MAAO,MAAOU,EAAI,MAAM,EACxC,iBAAiBmB,EAAOJ,CAAM,CAAC;AAAA,QACtC+B,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,UAC1EJ,EAAU,0CAA4C;AAAA,+BACjC3B,EAAO,gBAAgB,YAAY,CAAC;AAAA,6BACtCI,EAAM,KAAK,OAAO;AAAA,WACpC,IAAM,CACT,OAAQ5B,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA;AAAA,yCAEsB4B,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,yCAEnC5B,EAAW,kBAAkB;AAAA,mBAE5D,IAAK,SACH,MAAO,wBACFoB,EAAW,SAAW,GAAKA,EAAW,SAAW,EAAK,wBACA,wBAAwB,oBACrF,IAAK,QACH,MAAO,6DACT,QACE,MAAM,MAAM,4BAA4BpB,EAAW,IAAI,EAAE,CAC7D,CACF,GAAG,CAAC;AAAA,CACT;AAAA,SAGK,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAGA,EAAW,QAAQ,IAAIO,CAAY,IAAIR,EAAO,OAAS,EAAIA,EAAS,EAAE,IAC3ES,EAAM,OAAS,EAAIA,EAAQ,EAAE,IAAIC,EAAI,OAAS,EAAIA,EAAM,EAAE,IAAI0C,CAAO,IAAI/B,CAAU,GACvF,kBAAmB,CAAC,MAAM,CAC5B,EACA,gBAAAkC,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMjC,EAAa,SAAUuB,EAAY,QAAQ,CAAC,EAC7D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,OAAsB,KAAMlD,CAAM,EAC9E,CAAC,OAAsB,KAAMU,CAAG,EAAG,GAAG+C,EAA2BpC,EAAYC,CAAW,CAC1F,CACF,EACF,CACF,EAEE/B,GAAuCmE,GAAoC,CAC/E,IAAMC,EAAmBD,EAAQ,iBAGjC,OAF2B,IAAI,YAAYC,EAAkBA,EAAiB,WAAY,CAAC,EACnD,CAAC,CAE3C,EAEanE,GAAS,CAACkE,EAAyBzD,IAAuC,CACrF,IAAMD,EAAmB,CAAC,EACpBS,EAAkB,CAAC,EACnBC,EAAgB,CAAC,EAKjBF,EAAejB,GAAoCmE,CAAO,EAChE,GAAIzD,EAAW,YAAc,EAC3B,MAAM,MAAM,6DAA6D,EAE3ExB,GAAeiF,EAAQ,OAAQzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAC3EgD,EAAQ,QACJpE,GAAwBoE,EAAQ,OAAO,CAAC,EAAGzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC7G,EAEajB,GAAyBQ,GAA0D,CAC9F,IAAM2D,EAAY3D,EAAW,UACvBE,EAAOF,EAAW,KAClB4D,EACF5D,EAAW,wBACToC,EAAcpC,EAAW,YACzBqC,EAAiBrC,EAAW,iBAA6B,EACzDiC,EAAqBjC,EAAW,mBAChC6D,EAA+C7D,EAAW,sBAC1D8D,EAAa9D,EAAW,KAExBe,EAA4Bf,EAAW,cAAgB,GAAK,SAAWA,EAAW,YACxF,OAAO+D,GAA4B,CACjC,UAAAJ,EACA,KAAAzD,EACA,wBAAA0D,EACA,YAAAxB,EACA,eAAAC,EACA,mBAAAJ,EACA,sBAAA4B,EACA,KAAAC,EACA,YAAA/C,CACF,CAAC,CACH,IC1rBA,IAkBMiD,GAqDAC,GA+FOC,GAtKbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KASMR,GAAiB,CAACS,EAA+BC,IAAgD,CACrG,GAAM,CAACC,EAAOC,EAAaC,EAAUC,CAAQ,EAAIL,EAC3C,CAAC,SAAAM,EAAU,mBAAAC,CAAkB,EAAIN,EAEvC,GAAIC,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wDAAwDA,EAAM,KAAK,MAAM,EAAE,EAE7F,GAAI,CAACM,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,GAAK,CAACK,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,CAAC,GACtFA,EAAY,KAAK,SAAW,EAC9B,MAAM,IAAI,MAAM,uEAAuEA,EAAY,KAAK,MAAM,EAAE,EAElH,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAI,CAACG,EAAU,SAASJ,EAAS,KAAMC,EAAS,IAAI,EAClD,MAAM,IAAI,MAAM,wEAA4E,EAG9F,GAAIE,EAAqB,GAAKD,IAAa,EACzC,MAAM,IAAI,MAAM,iEAAiE,EAGnF,IAAMG,EAAYP,EAAM,KAAK,CAAC,EACxBQ,EAAiBR,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACjDS,EAAoBP,EAAS,KAAK,CAAC,EACnCQ,EAAaJ,EAAU,kBAAkBN,EAAM,KAAM,CAAC,EAAIQ,EAC1DG,EAAWN,IAAuB,EAAIH,EAAS,KAAK,CAAC,EAAI,EAAIQ,EAAaN,EAChF,GAAIC,EAAqBM,EACvB,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIV,EAAY,KAAK,SAAW,EAAG,CACjC,GAAIM,IAAcN,EAAY,KAAK,CAAC,EAClC,MAAM,IAAI,MAAM,sEAAsEA,EAAY,KAAK,CAAC,CAAC,EAAE,EAE7G,GAAIO,IAAmBP,EAAY,KAAK,CAAC,EACvC,MAAM,IAAI,MAAM,2EAA2EA,EAAY,KAAK,CAAC,CAAC,EAAE,CAEpH,CAEA,GAAIU,EAAW,IAAMT,EAAS,KAAK,CAAC,GAAKG,EAAqB,IAAMH,EAAS,KAAK,CAAC,EACjF,MAAM,IAAI,MAAM,kGACZA,EAAS,KAAK,CAAC,CAAC,EAAE,EAGxB,GAAIM,EAAiBC,EACnB,MAAM,IAAI,MAAM,gFAAgF,CAEpG,EAEMnB,GACF,CAACQ,EAA+BC,IAAuD,CACrF,GAAM,CAAC,YAAAa,EAAa,SAAAR,EAAU,mBAAAC,EAAoB,MAAAQ,CAAK,EAAId,EACrDQ,EAAYT,EAAO,CAAC,EAAE,KAAK,CAAC,EAC5BgB,EAAcR,EAAU,kBAAkBR,EAAO,CAAC,EAAE,KAAM,CAAC,EAC3DU,EAAiBV,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACzDY,EAAaI,EAAcN,EAC3BO,EAAyBjB,EAAO,CAAC,EAAE,KAAK,CAAC,EACzCa,EAAWN,IAAuB,EAAIU,EAAyB,EAAIL,EAAaN,EAKhFY,EACF,IAAI,MAAcT,EAAWC,EAAgBE,EAAaC,EAAUA,EAAWI,CAAsB,EACnGE,EAAgBX,EAAU,eAAeU,CAAW,EAEpDE,EAAoC,CACxC,CAAC,OAAsB,KAAML,CAAK,EAClC,CAAC,QAAuB,KAAMG,CAAW,EACzC,CAAC,QAAuB,KAAMC,CAAa,EAI3C,GAAInB,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MAAsB,CAAC,QAAuB,KAAM,CAACgB,EAAaJ,EAAYC,EAAU,CAAC,CAAC,CAAC,EAC/F,CAAC,EACT,GAAIb,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MACA,CAAC,QAAuB,KAAM,CAACgB,EAAaH,EAAUH,EAAiBG,EAAU,CAAC,CAAC,CAAC,EACxF,CAAC,EAET,GAAGQ,EAA2BrB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9G,EAEMsB,EAAmBC,GAA+B,CACtD,IAAMrB,EAAQsB,EAAc,QAASxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEG,EAAcqB,EAAc,eAAgBxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACrFI,EAAWoB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EK,EAAWmB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EyB,EAASC,EAAe,SAAU1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAEjF,OAAAuB,EAAa,iBAAiB,CAC5B,CAAC,KAAM,QAAS,KAAM,KAAK,EAC3B,CAAC,KAAM,eAAgB,KAAM,MAAO,OAAQL,EAAY,MAAM,EAC9D,CAAC,KAAM,iBAAkB,KAAM,MAAO,OAAQC,EAAc,MAAM,EAClE,CAAC,KAAM,uBAAwB,KAAM,MAAO,OAAQA,EAAc,MAAM,CAC1E,CAAC,EAEM;AAAA,UACLI,EAAa,iBAAiBrB,EAAOC,EAAaC,EAAUC,EAAUoB,CAAM,CAAC;AAAA;AAAA,UAE7EF,EAAa,UAAUI,EAAc,CAAC;AAAA,+CACDvB,EAAS,IAAI;AAAA;AAAA;AAAA,YAGhDmB,EAAa,sCAAsC,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA,kBAIpDpB,EAAY,2BAA2B,UAAWuB,EAAe,GAAIvB,EAAY,KAAK,OAAQ,CAAC,CAAC,CAAC;AAAA;AAAA,sBAE7FA,EAAY,YAAY,kBAAkB,CAAC;AAAA,oFACmBW,CAAW;AAAA,yDACtCA,CAAW;AAAA,uBAC7CZ,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEF,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEoB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA,uBACpBvB,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEH,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEqB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA;AAAA;AAAA,cAG7BA,EAAO,YAAY,IAAKvB,EAAM,YAAY,GAAG,CAAC,CAAC;AAAA;AAAA,UAGvD,EAEA,MAAO,CACL,KAAM,kBACN,YAAa,CACX,KAAM0B,GAA4B,CAC1B,YAAAd,CACF,CAAC,EAAE,SACT,kBAAmB,CAAC,OAAQ,OAAQ,OAAQ,MAAM,CACpD,EACA,gBAAAQ,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAU,KAAKU,CAAW,EAAIS,EAAc,CAAC,EAC1E,gBAAAP,CACF,EACF,CACF,EAES3B,GAAkB,CAACoC,EAAyB5B,IAAgD,CACvGV,GAAesC,EAAQ,OAAQ5B,CAAU,EACzC4B,EAAQ,QAAQrC,GAAiCqC,EAAQ,OAAQ5B,CAAU,CAAC,CAC9E,ICzKA,IAeM6B,GAwDAC,GA4IOC,GAnNbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAOMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMC,EAAoBD,EAAO,CAAC,EAC5BE,EAAmBF,EAAO,CAAC,EAC3BG,EAAoBH,EAAO,CAAC,EAElC,GAAIC,EAAM,WAAaC,EAAK,UAAYD,EAAM,WAAaE,EAAM,SAC/D,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIC,EAAK,KAAK,SAAW,GAAKA,EAAK,KAAK,SAAW,EACjD,MAAM,IAAI,MAAM,uBAAuB,EAGzC,IAAME,EAAaH,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EAC7CI,EAAiBJ,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACvD,GAAIC,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAME,EACtC,MAAM,IAAI,MAAM,8CAA8C,EAEhE,GAAIF,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMG,EACtC,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIF,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,IAAMC,EACxC,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBN,EAAO,CAAC,EACjC,GAAIM,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMF,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACA,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMO,EAAmBP,EAAO,CAAC,EACjC,GAAIO,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMH,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACF,EAEMX,GACF,CAACO,EAA+BQ,EAAqCC,EAAqBC,IACvE,CACb,IAAMC,EAAaH,EAAW,WAExBI,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAcH,EACdI,EAAaH,EACbT,EAAaQ,EAAW,MAAM,EAAE,EAAE,CAAC,EACnCK,EAAmBP,EAAaE,EAAW,MAAM,EAAG,EAAE,EAAE,OAAO,CAAC,EAAI,CAAC,EACrEM,EAAe,CAACP,GAAcX,EAAO,OAAS,EAC9CmB,EAAenB,EAAO,OAAS,EAC/BoB,EAAgBV,GAAcD,EAAc,EAC5CY,EAAqBX,GAAcD,EAAc,EACjDa,EAA4Bb,EAAc,EAC1Cc,EAAgB,GAEhBC,EAAaC,GAAiBrB,CAAU,EAExCsB,EAAoC,CACxC,CAAC,QAAuB,KAAMV,CAAU,EACxC,CAAC,QAAuB,KAAMQ,CAAU,EACxC,CAAC,QAAuB,KAAMpB,CAAU,EACxC,CAAC,OAAsB,KAAMI,EAAW,OAAO,CACjD,EACMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAAmC,CACvC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,aAAc,KAAM,KAAK,EAChC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,KAAK,CAC/B,EACMC,EAAY,CAChBC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACjEO,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACpEO,EAAc,QAAS/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CACvE,EACIN,GACFY,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAElFL,GACFW,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAEtFM,EAAU,KAAKE,EAAe,SAAUhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAChFJ,GACFU,EAAU,KAAKE,EAAe,gBAA+Bf,CAAgB,CAAC,EAE5EI,GACFS,EAAU,KAAKE,EAAe,mBAAkCf,CAAgB,CAAC,EAE/EK,GACFQ,EAAU,KAAKE,EAAe,sBAAuBhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAEnG,IAAMS,EAAWC,GAA4BlC,EAAO,CAAC,EAAE,QAAQ,EACzDmC,EAAcD,KAA4CV,CAAU,EAC1E,MAAO;AAAA;AAAA,QAEXI,EAAa,iBAAiBC,CAAa,EAAE,iBAAiB,GAAGC,CAAS,CAAC;AAAA,0CACzCK,CAAW,KAAKZ,CAAa;AAAA,kDACrBY,CAAW,KAAKZ,CAAa;AAAA;AAAA,QAEvEK,EAAa,UAAU,CACjBL,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA;AAAA,iCAEmBA,CAAa;AAAA;AAAA;AAAA,gDAGEA,CAAa;AAAA;AAAA;AAAA,oBAGzCA,EAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKRJ,EAAe,qBAAuBc,EAAW,OAAO;AAAA;AAAA;AAAA,YAGzEX,EAA4B,2CAA6C,EAAE;AAAA;AAAA,4BAE3Dc,GAAUH,EAAUT,EAAY,OAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMlCD,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAY1Bc,GAAU,MAAOb,CAAU,CAAC;AAAA,wCACTa,GAAU,aAAcb,CAAU,CAAC,gCAC3Db,EAAa,GAAK,eAAe;AAAA,UACvCS,EAAgB,kCAAoC,EAAE;AAAA,UACtDC,EAAqB,4CAA8C,EAAE;AAAA;AAAA;AAAA,qDAG1BV,EAAa,GAAK,KAAKsB,CAAQ,QAAQ;AAAA,cAC9EA,CAAQ;AAAA,cACRf,EAAe,uBAAyB,EAAE;AAAA;AAAA,QAG9C,EACMoB,EAAU,CAAC,CAAC,KAAMvB,EAAa,SAAUf,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIS,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAM1B,EAAY,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAExD,CACL,KAAM,yBACN,YAAa,CACX,KAAM,GAAGwB,CAAU,IAAIJ,CAAa,IAAIC,CAAkB,IAAIC,CAAyB,GACvF,kBAAmBtB,EAAO,IAAI,CAACuC,EAAQC,IAAW,MAAM,CAC1D,EACA,gBAAAb,EACA,WAAY,KAAO,CACjB,QAAAW,EACA,cAAe,CACb,EAAG,KAAK,KAAKtB,EAAaZ,CAAU,CACtC,EACA,gBAAAsB,CACF,EACF,CACF,EAEKhC,GAAgB,CAAC+C,EAAyBjC,IAA8C,CAGnGhB,GAAeiD,EAAQ,MAAM,EAG7B,IAAMH,EAAU,CAAC,CAAC,EACdG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAK,CAAC,EAEhBG,EAAQ,QACJhD,GAA+BgD,EAAQ,OAAQjC,EAAYiC,EAAQ,YAAa,EAAU,EAAG,CAAC,QAAAH,CAAO,CAAC,CAC5G,ICrOA,IAiBMI,GAkBAC,GAcAC,GAeAC,GAcAC,GAsBAC,GAmFOC,GAYAC,GAnMbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMb,GAAiB,CAACc,EAA+BC,IAAsC,CAC3F,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIC,EAAW,KAAK,SAAW,GAC7B,GAAIA,EAAW,KAAK,SAAWA,EAAW,OAAO,QAAUA,EAAW,KAAK,SAAWA,EAAW,KAAK,OACpG,MAAM,IAAI,MAAM,iDAAiD,UAE1DA,EAAW,OAAO,SAAWA,EAAW,KAAK,OACtD,MAAM,IAAI,MAAM,2CAA2C,EAE7DD,EAAO,MAAM,CAAC,EAAE,QAAQ,CAACE,EAAGC,IAAQ,CAClC,GAAIH,EAAOG,EAAM,CAAC,EAAE,WAAa,GAAkBH,EAAOG,EAAM,CAAC,EAAE,WAAa,EAC9E,MAAM,IAAI,MAAM,SAASA,CAAG,qCAAqC,CAErE,CAAC,CACH,EAEMhB,GAAY,CAACa,EAA+BG,IAA0B,CAC1E,IAAMC,EAAkB,CAAC,EACzB,GAAIJ,EAAO,OAASG,EAClB,GAAIH,EAAOG,CAAG,EAAE,WAAa,EAC3BH,EAAOG,CAAG,EAAE,iBAAiB,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,UACxDL,EAAOG,CAAG,EAAE,WAAa,EAClCH,EAAOG,CAAG,EAAE,cAAc,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,MAE9D,OAAM,IAAI,MAAM,SAASF,CAAG,qCAAqC,EAGrE,OAAOC,CACT,EAEMhB,GACF,CAACY,EAA+BC,IAAiD,CAC/E,GAAID,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBnB,GAAUa,EAAQ,CAAC,EACtCO,EAAiBpB,GAAUa,EAAQ,CAAC,EACtCQ,EAAiBrB,GAAUa,EAAQ,CAAC,EACxC,OAAIQ,EAAK,SAAW,IAClBA,EAAO,CAAC,GAAG,MAAMR,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,CAAC,GAEzCS,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,KACE,QAAOP,CAEX,EAEEZ,GACF,CAACqB,EAAeC,EAAeC,EAA+BJ,EAAyBK,IACzE,CACR,IAAIC,EAAWJ,EAIf,OAHIA,EAAQ,IACVI,GAAYF,EAAWJ,EAAKG,CAAK,CAAC,GAEhCE,EAAMF,CAAK,EAAI,EACV,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,EAAI,CAAC,CAAC,EAE3D,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,CAAC,CAAC,CAElE,EAEFrB,GACF,CAACc,EAAsBW,EAAuBH,IAC1C,4CAA4CG,EAAO,KAAK,OAAO,QAAQX,EAAM,KAAK,OAAO;AAAA,+BAClEA,EAAM,KAAK,OAAO;AAAA;AAAA,yBAExBQ,EAAW,MAAM;AAAA,kCACRI,EAAa,uBAAwB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BAClEI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BACtDI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,6BACrDI,EAAa,kBAAmB,IAAKJ,EAAW,MAAM,CAAC;AAAA,iCACnDG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAO3DX,EAAM,WAAW,gBAAiB,IAAK,aAAa,CAAC;AAAA;AAAA;AAAA,SAK7Db,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBiB,EAAYC,EAAU,KAAKN,CAAU,EACrCJ,EAAQP,EAAW,KAAK,OAAS,EAAKiB,EAAU,cAAcjB,EAAW,KAAMW,EAAW,MAAM,EAC1D,CAAC,GAAG,MAAMA,EAAW,MAAM,EAAE,KAAK,CAAC,EAC3EC,EAAQ1B,GAAUa,EAAQ,CAAC,EAC/Ba,EAAM,QAASM,GAASA,IAAS,IAAM,IAAM,CACnB,MAAM,IAAI,MAAM,kBAAkB,CACpC,EAAE,EACtBN,EAAM,SAAW,IACnBA,EAAQ,MAAML,EAAK,MAAM,EAAE,KAAK,CAAC,GAEnC,IAAMF,EAASL,EAAW,OAAO,IAAI,CAACmB,EAAOC,IAAMhC,GAAkB+B,EAAOC,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAEjGN,EAAON,EAAW,KAAK,IAAI,CAACqB,EAAKD,IAAMhC,GAAkBiC,EAAKD,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAE/F,GAAIL,EAAK,SAAWF,EAAO,QAAUE,EAAK,SAAWD,EAAK,OACxD,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIC,EAAK,SAAWI,EAAW,OAC7B,QAASS,EAAI,EAAGA,EAAIT,EAAW,OAAQ,EAAES,EAClCb,EAAK,SAASa,CAAC,IAClBf,EAAO,OAAOe,EAAG,EAAG,CAAC,EACrBd,EAAK,OAAOc,EAAG,EAAGT,EAAWS,CAAC,CAAC,EAC/BR,EAAM,OAAOQ,EAAG,EAAG,CAAC,GAI1B,IAAME,EAAQV,EAAM,IAAIM,GAAQ,KAAK,KAAKA,CAAI,CAAC,EAE/CN,EAAM,QAAQ,CAACM,EAAME,EAAGG,IAAU,CAChC,GAAIL,EAAO,EAAG,CACZ,IAAMM,GAAYlB,EAAKc,CAAC,EAAIf,EAAOe,CAAC,GAAKF,EACnCO,EAASpB,EAAOe,CAAC,EACjBM,EAAWD,EAASD,EAAWZ,EAAMQ,CAAC,EAC5Cf,EAAOe,CAAC,EAAIM,EACZpB,EAAKc,CAAC,EAAIK,EACVF,EAAMH,CAAC,EAAI,CAACF,CACd,CACF,CAAC,EAED,IAAMS,EAAchB,EAAW,MAAM,CAAC,EACtCJ,EAAK,QAAQ,CAACqB,EAAM3B,IAAM,CACxB0B,EAAYC,CAAI,EAAI,KAAK,MAAMtB,EAAKsB,CAAI,EAAIvB,EAAOuB,CAAI,GAAKhB,EAAMgB,CAAI,CAAC,CACzE,CAAC,EACD,IAAMC,EAA+B,CAAC,KAAMF,EAAa,SAAU5B,EAAO,CAAC,EAAE,QAAQ,EAE/Ee,EAASgB,EAAe,SAAU/B,EAAO,CAAC,EAAE,SAAU4B,EAAY,MAAM,EACxExB,EAAQ4B,EAAc,QAAShC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEiC,EAAaf,EAAU,KAAKU,CAAW,EACvCM,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ5B,EAAO,MAAM,EACtF,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQiB,EAAM,MAAM,EAAG,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQV,EAAM,MAAM,CACvG,EAEMsB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAM3B,CAAM,EAC/E,CAAC,OAAsB,KAAMiB,CAAK,EAAG,CAAC,QAAuB,KAAMV,CAAK,EACxE,GAAGuB,EAA2BpC,EAAO,CAAC,EAAE,KAAM4B,CAAW,CAC3D,EAEMS,EAAmBC,GAA+B;AAAA,QAClDA,EAAa,iBAAiBJ,CAAQ,EAAE,iBAAiB9B,EAAOW,CAAM,CAAC;AAAA,UACrEzB,GAA0Bc,EAAOW,EAAQH,CAAU,CAAC;AAAA,UACpD0B,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,iCACpDvB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDA,EAAO,YAAY,aAAcX,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,SAE/E,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGmB,EAAM,MAAM,IAAIjB,EAAO,MAAM,IAAIO,EAAM,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACnG,gBAAAwB,EACA,WAAY,KAAO,CACjB,QAAS,CAACP,CAAgB,EAC1B,cAAe,CAAC,EAAG,KAAK,KAAKb,EAAY,EAAuB,CAAC,EACjE,gBAAAkB,CACF,EACF,CACF,EAEa3C,GAAQ,CAAC+C,EAAyBtC,IAAsC,CACnFf,GAAeqD,EAAQ,OAAQtC,CAAU,EACzC,IAAMuC,EAAoBpD,GAAgCmD,EAAQ,OAAQtC,CAAU,EACpFsC,EAAQ,QAAQhD,GAAuBgD,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAO1F,EAEa/C,GAAwBQ,GAAyD,CAC5F,IAAMK,EAASL,EAAW,OACpBM,EAAON,EAAW,KAClBO,EAAOP,EAAW,KACxB,OAAOQ,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,ICxMA,IAeMiC,GAUAC,GAwHOC,GAKAC,GAtJbC,GAAAC,EAAA,kBAOAC,KAEAC,KACAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,CAElD,EAMMT,GAA2B,CAACU,EAAmBC,IAA+C,CAClG,IAAMC,EAAQF,EAAM,KACdG,EAAaC,EAAU,KAAKF,CAAK,EACjCG,EAAK,GACPC,EAAOL,EAAW,KAItB,GAHIK,EAAO,IACTA,EAAOJ,EAAM,OAASI,GAEpBA,EAAOJ,EAAM,OAAS,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMK,EAAOL,EAAMI,CAAI,EACjBE,EAAOL,EAAaI,EACpBE,EAAaC,GAAiBH,CAAI,EAClCI,EAAaJ,EAAOE,EAEpBG,EAAY,CAACC,EAAcJ,IAC3BA,IAAe,EACV,WAAWI,CAAI,OAAOA,CAAI,YAAYA,CAAI,OAAOA,CAAI,OACnDJ,IAAe,EACjB,OAAOI,CAAI,OAAOA,CAAI,MACpBJ,IAAe,EACjB,WAAWI,CAAI,OAAOA,CAAI,QAAQA,CAAI,MAGxCA,EAEHC,EAAIC,EAAc,IAAKf,EAAM,SAAUA,EAAM,KAAMS,CAAU,EAC7DO,EAASC,EAAe,SAAUjB,EAAM,SAAUA,EAAM,KAAMS,CAAU,EACxES,EAAYJ,EAAE,KAAK,MAEnBK,EAAgBC,GAA4BpB,EAAM,QAAQ,IAAM,MAClE,mBAAmBkB,CAAS,oBAC5B,mBAAmBA,CAAS,eAC1BG,EAAmBC,GAA+B;AAAA,sCACpBJ,CAAS;AAAA,sCACTA,CAAS;AAAA,4CACHA,CAAS,KAAKb,CAAE;AAAA;AAAA,4DAEAa,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA,gEAKLA,CAAS;AAAA;AAAA;AAAA;AAAA,QAIjEI,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBR,EAAGE,CAAM,CAAC;AAAA,QAC7EM,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA,qBAGXjB,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMbc,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAmBID,CAAS,IAAIN,EAAU,kBAAmBH,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKtDS,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAeRA,CAAS,IAAIK,GAAU,kBAAmBd,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAU9E,MAAO,CACL,KAAM,UACN,YAAa,CAAC,KAAM,GAAGA,CAAU,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAChE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAO,SAAUF,EAAM,QAAQ,CAAC,EACjD,cAAe,CAAC,EAAGQ,CAAI,EACvB,gBAAiB,CAAC,CAAC,OAAsB,KAAMG,CAAU,CAAC,CAC5D,GACA,gBAAAU,CACF,CACF,EAEa9B,GAAU,CAACiC,EAAyBvB,IAAwC,CACvFZ,GAAemC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAyBkC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CACzE,EAEaT,GAA0BS,GACnCwB,GAA4B,CAAC,KAAMxB,EAAW,IAAc,CAAC,ICvJjE,IAiBMyB,GAMAC,GAWAC,GASAC,GAqBAC,GAuDOC,GAOAC,GA9HbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,CAEpC,EAEMZ,GACF,CAACY,EAA+BC,IAAiD,CAC/E,IAAMC,EAAuB,CAAC,EAC1BC,EAAqBF,EAAW,WACpC,OAAID,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQI,GAAKF,EAAW,KAAK,OAAOE,CAAC,CAAC,CAAC,EACpED,EAAaD,EAAW,QAEnBG,GAA4B,CAAC,WAAAF,EAAY,KAAMF,EAAW,KAAM,WAAAC,CAAU,CAAC,CACpF,EAEEb,GAA4BiB,GAAoC;AAAA;AAAA,gCAEtCA,CAAe;AAAA,kBAC7BC,EAAa,8BAA+B,IAAKD,CAAe,CAAC;AAAA;AAAA;AAAA;AAAA,aAItEA,CAAe;AAAA,GAEtBhB,GAAuBkB,GAAsC,CACjE,IAAMF,EAAkBE,EAAQ,OAC1BC,EAAsB,CAAC,EAC7B,QAASC,EAAI,EAAGA,EAAIJ,EAAiB,EAAEI,EAAG,CACxC,IAAMC,EAAgBH,EAAQE,CAAC,EAAE,aAAa,UAAW,mBAAmB,EACxEJ,IAAoB,EACtBG,EAAU,KAAKE,CAAa,EACnBD,IAAM,EACfD,EAAU,KAAK,wBAAwBC,CAAC,QAAQC,CAAa,IAAI,EACxDD,IAAMJ,EAAkB,EACjCG,EAAU,KAAK,UAAUE,CAAa,IAAI,EAE1CF,EAAU,KAAK,6BAA6BC,CAAC,OAAOC,CAAa,IAAI,CAEzE,CACA,MAAO;AAAA,wDAC+CH,EAAQ,CAAC,EAAE,KAAK,OAAO;AAAA,UACrEC,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,QAE9B,EAEMlB,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAWf,EAAO,CAAC,EAAE,SACrBgB,EAAOF,EAAU,cAAcb,EAAW,KAAMW,EAAW,MAAM,EACjEJ,EAAU,IAAI,MAAqBP,EAAW,UAAU,EACxDgB,EAAQC,EAAc,QAASH,EAAUH,EAAW,MAAM,EAC1DO,EAAkB,IAAI,MAAclB,EAAW,UAAU,EACzDmB,EAAkC,CAAC,EACnCC,EAA2B,CAAC,EAC9BC,EAAc,EACZC,EAAoC,CAAC,CAAC,QAAuB,KAAMV,CAAS,CAAC,EACnF,QAASH,EAAI,EAAGA,EAAIT,EAAW,WAAYS,IAAK,CAC9CY,GAAerB,EAAW,WAAWS,CAAC,EACtCS,EAAgBT,CAAC,EAAIY,EACrB,IAAME,EAAcZ,EAAW,MAAM,EACrCY,EAAYvB,EAAW,IAAI,EAAIA,EAAW,WAAWS,CAAC,EACtDW,EAAa,KAAKG,CAAW,EAC7BhB,EAAQE,CAAC,EAAIe,EAAe,SAASf,CAAC,GAAIK,EAAUS,EAAY,MAAM,EACtEJ,EAAkB,KAAK,CAAC,KAAMC,EAAaX,CAAC,EAAG,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,CAC9E,CACAuB,EAAgB,KACZ,CAAC,QAAuB,KAAMJ,CAAe,EAAG,GAAGO,EAA2Bd,EAAY,GAAGS,CAAY,CAAC,EAC9G,IAAMM,EAAmBC,GAA+B;AAAA,IAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,qBAAsB,MAAOT,EAAgB,MAAM,EACnE,iBAAiBF,EAAO,GAAGT,CAAO,CAAC;AAAA,IAC1CnB,GAAyB8B,EAAgB,MAAM,CAAC;AAAA,IAChD7B,GAAoBkB,CAAO,CAAC;AAAA;AAAA,IAE5BoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DX,EAAM,gBAAgB,YAAY,CAAC;AAAA,kBACrCA,EAAM,WAAW,UAAWD,CAAI,CAAC;AAAA;AAAA;AAAA,iBAGlCT,EAAa,8BAA+B,qBAAsBY,EAAgB,MAAM,CAAC;AAAA,QAClGF,EAAM,WAAW,UAAWD,EAAM,OAAO,CAAC;AAAA;AAAA;AAAA,KAIhD,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAMf,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,gBAAA0B,EACA,WAAY,KAAO,CACjB,QAASP,EACT,cAAe,CAAC,EAAG,KAAK,KAAKP,EAAY,EAAuB,CAAC,EACjE,gBAAAU,CACF,EACF,CACF,EAEa/B,GAAQ,CAACqC,EAAyB5B,IAAsC,CACnFd,GAAe0C,EAAQ,MAAM,EAC7B,IAAMC,EACFD,EAAQ,OAAO,SAAW,EAAI5B,EAAab,GAAgCyC,EAAQ,OAAQ5B,CAAU,EACzG4B,EAAQ,QAAQtC,GAAuBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC1F,EAEarC,GAAwBQ,GAAyD,CAC5F,IAAMe,EAAOf,EAAW,KAClBC,EAAuBD,EAAW,WAClCE,EAAaF,EAAW,WAAuB,EAAIC,EAAW,OAASD,EAAW,WACxF,GAAIE,IAAeD,EAAW,OAC5B,MAAM,IAAI,MAAM,+CAA+C,EAEjE,OAAOG,GAA4B,CAAC,KAAAW,EAAM,WAAAb,EAAY,WAAAD,CAAU,CAAC,CACnE,ICtIA,IAUM6B,GA4DAC,GAoCOC,GA1GbC,GAAAC,EAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GACF,CAACQ,EAA4BC,EAA+BC,EAA+BC,EAC1FC,IAAuB,CACtB,IAAMC,EAASC,EAAe,cAAeF,EAAYF,EAAW,OAAQ,CAAC,EACvEK,EAAIC,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEQ,EAAID,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxES,EAAIF,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EAE1EU,EACEC,EAAa,CAACL,EAAWE,EAAWC,IAAc,UAAUD,CAAC,KAAKF,CAAC,KAAKG,CAAC,IAC/E,GAAI,CAACP,EACHQ,EAAaN,EAAO,YAChB,aACAO,EAAWL,EAAE,YAAY,YAAY,EAAGE,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAChG,CACL,IAAMG,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,iBAAiBF,CAAC,gBAAgBA,CAAC,IACjDG,EAAc,iBAAiBH,CAAC,gBAAgBA,CAAC,IAEjDI,EAAc,sBAAsBJ,CAAC,6BAA6BA,CAAC,UACzE,MAAO;AAAA,gCACeA,CAAC,MAAMV,EAAO,gBAAgB,qBAAqBU,CAAC,GAAG,CAAC;AAAA,0BAC9DA,CAAC,MAAMR,EAAE,2BAA2B,iBAAiBQ,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMN,EAAE,2BAA2B,iBAAiBM,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAML,EAAE,2BAA2B,iBAAiBK,CAAC,GAAIV,CAAM,CAAC;AAAA,yBAClEU,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,6BACZA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,cAC/BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIJ,EAAWK,EAAaC,EAAaC,CAAW,CAAC;AAAA,WAErF,EACIf,IAAe,EACjBO,EAAa;AAAA;AAAA,cAETE,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,wGAGtCF,EAAa;AAAA,cACTE,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,WAGtD,CAEA,MAAO;AAAA,UACHb,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBU,EAAGH,EAAGE,EAAGJ,CAAM,CAAC;AAAA,UACjFL,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEW,CAAU;AAAA,QAEhB,EAEElB,GAA4BQ,GAA+C,CAC/E,IAAMmB,EAAQnB,EAAO,CAAC,EAAE,KAClBoB,EAAQpB,EAAO,CAAC,EAAE,KAClBqB,EAAQrB,EAAO,CAAC,EAAE,KAClBsB,EAAiBtB,EAAO,CAAC,EAAE,SAE3BE,EAAc,EAAEqB,EAAU,SAASJ,EAAOC,CAAK,GAAKG,EAAU,SAASH,EAAOC,CAAK,GACrFG,EAAcL,EACdM,EAAaF,EAAU,KAAKJ,CAAK,EAGrC,GAAIjB,EAAa,CACf,IAAMwB,EAAkBC,GAAc,UAAUA,GAAc,UAAUR,EAAOC,EAAO,EAAK,EAAIC,EAAO,EAAK,EAC3G,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,6CAA8C,EAEhEF,EAAcE,EACdD,EAAaF,EAAU,KAAKC,CAAW,CACzC,CAEA,IAAMI,EAAU,KAAK,KAAKH,EAAa,CAAC,EAExC,MAAO,CACL,KAAM,QACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,gBAAkB1B,GACdR,GAA2BQ,EAAcC,EAAQwB,EAAatB,EAAaoB,CAAc,EAC7F,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAME,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAgB,CAAC,EACrF,gBACI,CAAC,CAAC,QAAuB,KAAMG,CAAO,EAAG,GAAGC,EAA2BR,EAAOF,EAAOC,EAAOI,CAAW,CAAC,CAC9G,EACF,CACF,EAEa/B,GAASqC,GAAkC,CACtDA,EAAQ,QAAQtC,GAAyBsC,EAAQ,MAAM,CAAC,CAC1D,IC5GA,IA8CaC,GA9CbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAOavC,GAA+D,IAAI,IAAI,CAClF,CAAC,MAAO,CAAUwC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAwB,CAAC,EAC7C,CAAC,SAAU,CAACC,GAAQD,EAAwB,CAAC,EAC7C,CAAC,OAAQ,CAAUE,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACC,EAAS,CAAC,EAEzB,CAAC,cAAe,CAAMC,GAAkBC,EAA0B,CAAC,EACnE,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,UAAW,CAACC,EAAO,CAAC,EACrB,CAAC,gBAAiB,CAACC,EAAa,CAAC,EACjC,CAAC,OAAQ,CAAUC,GAAeC,EAAmB,CAAC,EACtD,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,gBAAiB,CAACC,GAAeC,EAA4B,CAAC,EAC/D,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,eAAgB,CAACC,GAAcC,EAA2B,CAAC,EAC5D,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,MAAO,CAAUC,GAAcC,EAAoB,CAAC,EACrD,CAAC,QAAS,CAAWC,EAAK,CAAC,EAC3B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACpB,GAAMC,EAAmB,CAAC,EACzC,CAAC,SAAU,CAACoB,GAAQC,EAAqB,CAAC,EAC1C,CAAC,iBAAkB,CAACC,GAAgBC,EAA6B,CAAC,EAClE,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,oBAAqB,CAAMC,GAAwBC,EAAgC,CAAC,EACrF,CAAC,gBAAiB,CAAMC,GAAoBC,EAA4B,CAAC,EACzE,CAAC,UAAW,CAAWC,EAAO,CAAC,EAC/B,CAAC,iBAAkB,CAAWC,EAAc,CAAC,EAC7C,CAAC,sBAAuB,CAACC,GAAqBC,EAAkC,CAAC,EACjF,CAAC,cAAe,CAAUC,GAAsBC,EAA0B,CAAC,EAC3E,CAAC,wBAAyB,CAACC,EAAY,CAAC,EACxC,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,YAAa,CAAUC,GAAoB1B,EAAoB,CAAC,EACjE,CAAC,OAAQ,CAAW2B,EAAI,CAAC,EACzB,CAAC,cAAe,CAAWC,EAAW,CAAC,EACvC,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,cAAe,CAACC,GAAaC,EAA0B,CAAC,EAEzD,CAAC,UAAW,CAAMC,GAAcC,EAAsB,CAAC,EACvD,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,qBAAsB,CAACC,GAAoBC,EAAiC,CAAC,EAC9E,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAACC,EAAG,CAAC,EACb,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,YAAa,CAAUC,GAAoB1C,EAAoB,CAAC,EACjE,CAAC,QAAS,CAAC2C,EAAK,CAAC,EACjB,CAAC,aAAc,CAAUC,EAAU,CAAC,EACpC,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,eAAgB,CAACC,EAAY,CAAC,EAC/B,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,UAAW,CAAUC,EAAO,CAAC,EAC9B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,yBAA0B,CAACC,EAAa,CAAC,EAC1C,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,UAAW,CAACC,GAASC,EAAsB,CAAC,EAC7C,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,kBAAmB,CAAUC,GAA0BzE,EAAoB,CAAC,EAC7E,CAAC,OAAQ,CAAC0E,EAAI,CAAC,EACf,CAAC,YAAa,CAACC,GAAWC,EAAwB,CAAC,EACnD,CAAC,QAAS,CAACC,EAAK,CAAC,CACnB,CAAC,IC5ID,IAoBaC,GApBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEAC,KAYaL,GAAN,KAAqB,CAI1B,YAAoBM,EAAwB,CAAxB,aAAAA,EAClB,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAmBC,EAAoBC,EAChEC,EAA0D,CAC5DC,GAAiBL,EAAc,YAAY,IAAI,EAC/C,IAAMM,EAAS,KAAK,QAAQ,OACtBC,EAAqB,KAAK,QAAQ,sBAAsB,EAC9D,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,CAAC,EAClE,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAASR,EAClBO,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQC,EAAM,MAAM,CAAC,CAAC,EAE1E,QAAWC,KAAUR,EACnBM,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQE,EAAO,MAAM,CAAC,CAAC,EAEvEN,GACFI,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAUJ,CAAoB,CAAC,EAExE,IAAMO,EAAYL,EAAO,gBACrB,CAAC,OAAQN,EAAc,gBAAgB,mBAAmB,CAAC,EAAG,QAAAQ,EAAS,MAAOR,EAAc,YAAY,IAAI,CAAC,EAEjH,GAAI,KAAK,QAAQ,gBAAkB,YAAa,CAC9C,IAAMY,EAAc,CAClB,SAAU,KAAK,QAAQ,gBACvB,gBAAiBZ,EAAc,gBAC/B,UAAAW,EACA,cAAAR,CACF,EAC2B,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC1E,KAAKS,CAAW,CACtC,CAEAL,EAAmB,YAAYP,EAAc,eAAe,EAC5DO,EAAmB,aAAa,EAAGI,CAAS,EAC5CJ,EAAmB,mBAAmB,GAAGJ,CAAa,EACtD,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,EAAI,CAAC,EACtE,KAAK,QAAQ,yBAET,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACnD,KAAK,QAAQ,YAAc,cAC7B,KAAK,QAAQ,eAAe,EAE1B,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACrD,KAAK,QAAQ,MAAM,EAErBU,GAAeb,EAAc,YAAY,IAAI,CAC/C,CACA,SAAgB,CAEhB,CACA,MAAMc,EAA0BC,EAAiE,CAC/FV,GAAiBS,EAAY,IAAI,EACjC,IAAMR,EAAS,KAAK,QAAQ,OACtBU,EAAuB,CAAC,EAC1BV,EAAO,SAAS,IAAI,YAAY,GAClCU,EAAW,KAAK,aAAa,EAE/B,IAAMC,EAAeC,GAAmBH,EAA6B,KAAK,QAAQ,OAAO,MAAM,EACzFI,EAAWL,EAAY,gBAAgBG,CAAY,EACnDG,EAAO,GAAGJ,EAAW,KAAK;AAAA,CAAI,CAAC;AAAA,EAAKC,EAAa,yBAAyB;AAAA,EAAKE,CAAQ,GACvFE,EAAef,EAAO,mBAAmB,CAAC,KAAAc,EAAM,MAAON,EAAY,IAAI,CAAC,EAC9EQ,GAAU,UAAW,IAAM,YAAYR,EAAY,IAAI,iBAAiBM,CAAI,EAAE,EAE9E,IAAMG,EAAkBjB,EAAO,sBAC3B,CAAC,QAAS,CAAC,OAAQe,EAAc,WAAY,MAAM,EAAG,OAAQ,OAAQ,MAAOP,EAAY,IAAI,CAAC,EAElG,OAAAD,GAAeC,EAAY,IAAI,EACxB,CAAC,YAAAA,EAAa,gBAAAS,EAAiB,qBAAsBN,EAAa,aAAa,CACxF,CAEA,2BAA2Bd,EACE,CAC3B,IAAMqB,EAAI,OAAOrB,GAAkB,SAAWA,EAAgBA,EAAc,EACtEsB,EAAI,OAAOtB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEuB,EAAI,OAAOvB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEwB,EAAoB,KAAK,QAAQ,OAAO,OAAO,iCACrD,GAAIH,GAAKG,GAAqBF,GAAKE,GAAqBD,GAAKC,EAC3D,MAAO,CAACH,EAAGC,EAAGC,CAAC,EAEjB,IAAME,EAAOJ,EAAIC,EAAIC,EACjBG,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EAC/C,GAAIC,EAAkBF,EAAmB,CAEvC,GADAE,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EACvCC,EAAkBF,EACpB,MAAM,IAAI,MAAM,6CAA6C,EAE/D,MAAO,CAACE,EAAiBA,EAAiBA,CAAe,CAC3D,KACE,OAAO,CAACA,EAAiBA,EAAiB,CAAC,CAE/C,CACF,IC3HA,IAmCMC,GA4CAC,GAiBAC,GAwBOC,GAxHbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KACAC,KACAC,KACAC,KAwBMZ,GACF,CAACa,EAAqCC,IAA2E,CAC/G,GAAIA,EAAkB,SAAWD,EAAa,OAC5C,MAAM,IAAI,MAAM,4BAA4BC,EAAkB,MAAM,wCAChED,EAAa,MAAM,GAAG,EAG5B,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIH,EAAa,OAAQ,EAAEG,EAAG,CAC5C,IAAMC,EAAOJ,EAAaG,CAAC,EAAE,SAC7B,OAAQF,EAAkBE,CAAC,EAAG,CAC5B,IAAK,OAAQ,CACXD,EAAW,KAAK,EAAE,EAClB,KACF,CACA,IAAK,OAAQ,CACXA,EAAW,KAAK,GAAGE,CAAI,EAAE,EACzB,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAOL,EAAaG,CAAC,EAAE,KAAK,OAClCD,EAAW,KAAK,GAAGE,CAAI,IAAIC,CAAI,EAAE,EACjC,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAON,EAAaG,CAAC,EAAE,KAAK,KAAK,GAAG,EAC1CD,EAAW,KAAK,GAAGE,CAAI,IAAIE,CAAI,EAAE,EACjC,KACF,CACA,QACE,MAAM,IAAI,MAAM,iCAAiCL,EAAkBE,CAAC,CAAC,EAAE,CAC3E,CACF,CAEA,OAAOD,EAAW,KAAK,GAAG,CAC5B,EASEd,GACF,CAACmB,EAA0BP,EAAqCQ,IAA0C,CAGxG,IAAIC,EAAMF,EAAY,KACtB,OAAIA,EAAY,aAAa,OAC3BE,GAAO,IAAMF,EAAY,YAAY,KAAO,KAE9CE,GAAO,IAAMD,EACT,IACOrB,GACIa,EACAO,EAAY,aAAa,mBACrB,IAAI,MAAwCP,EAAa,MAAM,EAAE,KAAK,MAAM,CAAC,CAAC,GAC1FS,CACT,EAEEpB,GAAN,KAA6C,CAI3C,YAAYqB,EAA6B,CACnCA,IACF,KAAK,aAAeA,EAAY,aAChC,KAAK,OAASA,EAAY,OAE9B,CAEA,eAAeC,EAAwC,CACrD,OAAO,KAAK,eAAiBA,CAC/B,CAEA,SAASC,EAA4B,CACnC,OAAO,KAAK,SAAWA,CACzB,CACF,EAMatB,GAAN,KAAoB,CAApB,cAkBL,sBAAgC,KAOhC,qBAA+B,KAgC/B,KAAQ,eAAyC,KACjD,KAAQ,mBAAiD,KACzD,uBAAoB,GACpB,2BAAwB,EAGxB,KAAQ,eAAsC,CAAC,EAE/C,KAAQ,eAAsD,IAAI,IAOlE,mBAA8B,UAI9B,yBAAkD,IAAI,IAKtD,KAAQ,uBAA2D,IAAI,IAKvE,gCAA4E,IAAI,IA7ChF,IAAI,yBAAoD,CACtD,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,yEAAyE,EAG3F,IAAIuB,EAAO,KAAK,iBAAiB,IAAI,KAAK,eAAe,EACzD,OAAKA,IACHA,EAAO,CAAC,EACR,KAAK,iBAAiB,IAAI,KAAK,gBAAiBA,CAAI,GAG/CA,CACT,CAmCA,MAAM,WAAWC,EAAUC,EAAoC,CAC7D,KAAK,IAAMD,EACX,IAAME,EAAqC,CAAC,EACtCC,EAAwC,CAC5C,eAAgB,CACd,+BAAgCF,EAAQ,OAAO,+BAC/C,iCAAkCA,EAAQ,OAAO,iCACjD,4BAA6BA,EAAQ,OAAO,4BAC5C,cAAeA,EAAQ,OAAO,cAC9B,kCAAmCA,EAAQ,OAAO,kCAClD,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,wBAC3C,EACA,iBAAAC,CACF,EAEID,EAAQ,SAAS,IAAI,qDAAqD,EAC5EC,EAAiB,KAAK,qDAAuE,EACpFD,EAAQ,SAAS,IAAI,iBAAiB,GAC/CC,EAAiB,KAAK,iBAAiB,EAErCD,EAAQ,SAAS,IAAI,YAAY,GACnCC,EAAiB,KAAK,YAAY,EAGpC,KAAK,OAAS,MAAMD,EAAQ,cAAcE,CAAgB,EAC1D,KAAK,YAAc,IAAI5B,GAAgB0B,EAAQ,MAAQ,MAAMA,EAAQ,mBAAmB,CAAC,EACzF,KAAK,eAAiBG,GAAqB,IAAI,EAC/C,KAAK,eAAiB,IAAIC,GAAe,IAAI,EAC7C,KAAK,QAAU,IAAI,IACnB,KAAK,qBAAuB,IAAI,IAChC,KAAK,iBAAmB,IAAI,IAG5BC,GAAgBN,EAAI,SAAW,CAAC,CAACA,EAAI,KAAK,EAI1C,KAAK,OAAO,kBAAoBO,GAAM,CAChCA,EAAG,iBAAiB,oBAEtB,QAAQ,MAAM,mDAAmDA,EAAG,MAAM,OAAO,EAAE,CAEvF,EAEA,OAAO,eACH,KAAK,IAAI,OAAQ,SAAU,CAAC,MAAO,KAAK,OAAQ,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAC3G,OAAO,eACH,KAAK,IAAI,OAAQ,UAAW,CAAC,MAAON,EAAS,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAGxG,KAAK,aAAa,CACpB,CAEA,SAAgB,CACV,OAAO,KAAK,SAAa,KAC3B,KAAK,SAAS,QAAQ,EAExB,KAAK,eAAe,QAAQ,CAC9B,CAEA,mBAAuC,CACrC,OAAK,KAAK,iBACR,KAAK,eAAiB,KAAK,OAAO,qBAAqB,GAElD,KAAK,cACd,CAEA,uBAA+C,CAC7C,GAAI,CAAC,KAAK,mBAAoB,CAC5B,IAAMO,EAAiB,KAAK,kBAAkB,EACxCC,EAAkD,CAAC,EAErD,KAAK,YAAc,cACrBA,EAAsB,gBAAkB,CACtC,SAAU,KAAK,SACf,0BAA2B,KAAK,sBAAwB,EACxD,oBAAqB,KAAK,sBAAwB,EAAI,CACxD,GAGF,KAAK,mBAAqBD,EAAe,iBAAiBC,CAAqB,CACjF,CACA,OAAO,KAAK,kBACd,CAEA,gBAAuB,CACjB,KAAK,qBACP,KAAK,mBAAmB,IAAI,EAC5B,KAAK,mBAAqB,KAE9B,CAEA,OAAc,CACZ,GAAI,CAAC,KAAK,eACR,OAGFC,GAAiB,EAEjB,KAAK,eAAe,EACpB,IAAIC,EACA,KAAK,YAAc,SACrB,KAAK,eAAe,gBAChB,KAAK,SAAW,EAAG,KAAK,sBAAwB,EAAG,KAAK,mBAAqB,CAAC,EAElFA,EAAkB,KAAK,OAAO,aAE1B,CAAC,KAAM,KAAK,sBAAwB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAExG,KAAK,eAAe,IAAIA,EAAiB,KAAK,cAAc,EAC5D,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAe,mBAChB,KAAK,mBAAqB,EAAGA,EAAiB,EAAG,KAAK,sBAAwB,EAAI,CAAC,GAGzF,KAAK,OAAO,MAAM,OAAO,CAAC,KAAK,eAAe,OAAO,CAAC,CAAC,EACvD,KAAK,eAAe,sBAAsB,EAC1C,KAAK,eAAiB,KACtB,KAAK,sBAAwB,EAEzB,KAAK,YAAc,QAChBA,EAAiB,SAAS,WAAW,IAAI,EAAE,KAAK,IAAM,CACzD,IAAMC,EAAa,IAAI,eAAeD,EAAgB,eAAe,CAAC,EAChEE,EAAiB,KAAK,eAAe,IAAIF,CAAe,EAC9D,QAAStB,EAAI,EAAGA,EAAIuB,EAAW,OAAS,EAAGvB,IAAK,CAC9C,IAAMyB,EAAoBD,EAAexB,CAAC,EACpC0B,EAAWD,EAAkB,SAC7BE,EAAa,KAAK,QAAQ,IAAID,CAAQ,EACtCE,EAAaD,EAAW,WACxBE,EAAaF,EAAW,WACxBG,EAAcL,EAAkB,YAChCM,EAAmBN,EAAkB,iBACrCO,EAAoBP,EAAkB,kBACtCQ,EAAeV,EAAWvB,EAAI,CAAC,EAC/BkC,EAAaX,EAAWvB,EAAI,EAAI,CAAC,EAEnC,OAAO,KAAK,cAAkB,MAChC,KAAK,cAAgBiC,GAGvB,IAAME,EAAY,OAAOF,EAAe,KAAK,aAAa,EACpDG,EAAU,OAAOF,EAAa,KAAK,aAAa,EAEtD,GAAI,CAAC,OAAO,cAAcC,CAAS,GAAK,CAAC,OAAO,cAAcC,CAAO,EACnE,MAAM,IAAI,WAAW,2BAA2B,EAGlD,GAAI,KAAK,IAAI,OAAO,WAAW,OAC7B,KAAK,IAAI,OAAO,UAAU,OAAO,CAC/B,QAAS,EACT,eAAgBL,EAAiB,IAC7BM,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,gBAAiBL,EAAkB,IAC/BK,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,SAAAX,EACA,WAAAE,EACA,WAAAC,EACA,YAAAC,EACA,UAAAK,EACA,QAAAC,CACF,CAAC,MACI,CAEL,IAAIG,EAAc,GAClBR,EAAiB,QAAQ,CAACM,EAAOrC,IAAM,CACrCuC,GAAe,SAASvC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC7F,CAAC,EACD,IAAIG,EAAe,GACnBR,EAAkB,QAAQ,CAACK,EAAOrC,IAAM,CACtCwC,GAAgB,UAAUxC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC/F,CAAC,EAED,QAAQ,IAAI,uBAAuBX,CAAQ,IAAIE,CAAU,IAAIC,CAAU,IAAIC,CAAW,KAAKS,CAAW,GAClGC,CAAY,mBAAmBJ,EAAUD,CAAS,KAAK,CAC7D,CACAM,GAAM,MAAO,GAAGX,CAAW,KAAKG,CAAY,KAAKC,CAAU,EAAE,CAC/D,CACAZ,EAAgB,MAAM,EACtB,KAAK,eAAe,OAAOA,CAAe,CAC5C,CAAC,EAEHoB,GAAe,CACjB,CAaA,IAAIC,EAAsBZ,EAAyCa,EAC/DC,EACAC,EACAC,EAAmC,CACrC1B,GAAiBsB,EAAQ,IAAI,EAE7B,IAAMK,EAAwB,CAAC,EAC/B,QAAShD,EAAI,EAAGA,EAAI+B,EAAiB,OAAQ,EAAE/B,EAAG,CAChD,IAAMU,EAAOqB,EAAiB/B,CAAC,EAAE,KAEjC,GAAIU,IAAS,EACX,SAEF,IAAMuC,EAAU,KAAK,eAAe,IAAIvC,CAAI,EAC5C,GAAI,CAACuC,EACH,MAAM,IAAI,MAAM,0BAA0BvC,CAAI,EAAE,EAElDsC,EAAW,KAAKC,CAAO,CACzB,CAEA,GAAM,CAAC,QAAAC,EAAS,cAAAC,EAAe,gBAAAC,CAAe,EAAIT,EAAQ,WAAWZ,CAAgB,EAG/EsB,EAAyBT,EAAc,SAAW,EAAIM,EAAQ,IAAI,CAACI,EAAGtD,IAAMA,CAAC,EAAI4C,EACvF,GAAIS,EAAuB,SAAWH,EAAQ,OAC5C,MAAM,IAAI,MAAM,eAAeG,EAAuB,MAAM,qBAAqBH,EAAQ,MAAM,GAAG,EAIpG,IAAMlB,EAAkC,CAAC,EACnCuB,EAAyB,CAAC,EAChC,QAASvD,EAAI,EAAGA,EAAIkD,EAAQ,OAAQ,EAAElD,EAAG,CAIvC,GAAI,CAAC,OAAO,UAAUqD,EAAuBrD,CAAC,CAAC,GAAKqD,EAAuBrD,CAAC,EAAI,IAC5EqD,EAAuBrD,CAAC,GAAK+C,EAC/B,MAAM,IAAI,MAAM,yBAAyBM,EAAuBrD,CAAC,CAAC,EAAE,EAEtE,GAAIqD,EAAuBrD,CAAC,IAAM,GAChC,SAEF,IAAMwD,EAAcH,EAAuBrD,CAAC,IAAM,GAC5CyD,EAAeJ,EAAuBrD,CAAC,IAAM,GAC7C0D,EAAcF,GAAeC,EAC/BX,EAAyBI,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAC7D6C,EAAmBQ,EAAuBrD,CAAC,EAAGkD,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAGtF,GAFAgC,EAAkB,KAAK0B,CAAU,EAE7BA,EAAW,OAAS,EACtB,SAEF,IAAMT,EAAU,KAAK,eAAe,IAAIS,EAAW,IAAI,EACvD,GAAI,CAACT,EACH,MAAM,IAAI,MAAM,2BAA2BS,EAAW,IAAI,EAAE,EAK9D,GAHIF,GACF,KAAK,cAAc,KAAKP,CAAO,EAE7BQ,EAAc,CAChB,IAAIE,EAAiB,KAAK,qBAAqB,IAAI,KAAK,eAAgB,EACnEA,IACHA,EAAiB,CAAC,EAClB,KAAK,qBAAqB,IAAI,KAAK,gBAAkBA,CAAc,GAErEA,EAAe,KAAKV,CAAO,CAC7B,CACAM,EAAY,KAAKN,CAAO,CAC1B,CAIA,GAAID,EAAW,SAAWjB,EAAiB,QAAUwB,EAAY,SAAWvB,EAAkB,OAAQ,CAEpG,GAAIuB,EAAY,SAAW,EACzB,OAAAb,GAAeC,EAAQ,IAAI,EACpBX,EAMT,MAAM,IAAI,MACN,WAAWW,EAAQ,IAAI,4EAA4E,CACzG,CAKA,IAAIiB,EACJ,GAAIR,EAAiB,CACnB,IAAIS,EAAgB,EACdC,EAAoB,CAAC,EAE3BV,EAAgB,QAAQW,GAAK,CAC3B,IAAMrD,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIrD,EAAK,SAAW,EAClB,OAGF,IAAMsD,EAAgBD,EAAE,OAAS,GAAmB,EAAI,EACpDE,EACAC,GACAH,EAAE,OAAS,IACbG,GAAgBxD,EAAK,OAAS,EAAI,GAAMA,EAAK,OAAS,EAAI,EAAIA,EAAK,OAASsD,EAC5EC,EAAiBvD,EAAK,OAAS,EAAI,GAAKsD,EAAgBtD,EAAK,SAE7DwD,GAAgBxD,EAAK,QAAU,EAAIA,EAAK,OAASsD,EAAgB,GACjEC,EAAiB,IAEnBJ,EAAgB,KAAK,KAAKA,EAAgBK,EAAa,EAAIA,GAC3DJ,EAAQ,KAAKD,CAAa,EAM1B,IAAMM,GAAqBJ,EAAE,OAAS,GAAmB,EAAI,EAC7DF,GAAiBnD,EAAK,OAAS,EAAI,KAAK,KAAKA,EAAK,OAASyD,EAAkB,EAAIF,EAC9CvD,EAAK,OAASsD,CACnD,CAAC,EAID,IAAMI,EAAsB,GAC5BP,EAAgB,KAAK,KAAKA,EAAgBO,CAAmB,EAAIA,EACjE,IAAMC,EAAc,IAAI,YAAYR,CAAa,EACjDT,EAAgB,QAAQ,CAACW,EAAG/D,IAAM,CAChC,IAAMsE,EAASR,EAAQ9D,CAAC,EAClBU,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIA,EAAE,OAAS,EACb,IAAI,WAAWM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UAChDqD,EAAE,OAAS,GACpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,GAEpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,EACpB,IAAI,aAAaM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,MAE3D,OAAM,IAAI,MAAM,6BAA6B4B,GAA2ByB,EAAE,IAAI,CAAC,EAAE,CAErF,CAAC,EAED,IAAMQ,EAEF,KAAK,eAAe,OAAOV,EAAe,eAAe,SAAW,eAAe,OAAO,EAC9F,KAAK,OAAO,MAAM,YAAYU,EAAkB,OAAQ,EAAGF,EAAa,EAAGR,CAAa,EACxF,KAAK,eAAe,QAAQU,EAAkB,EAAE,EAChDX,EAAuB,CAAC,OAAQ,EAAG,KAAMC,EAAe,OAAQU,EAAkB,MAAM,CAC1F,CAEA,IAAMC,EAA0B,KAAK,eAAe,2BAA2BrB,CAAa,EACtF9C,EAAuBmE,EAAwB,CAAC,IAAM,GAAKA,EAAwB,CAAC,IAAM,EAE1FlE,EAAMrB,GAAwB0D,EAASZ,EAAkB1B,CAAoB,EAC/EoE,EAAW,KAAK,eAAe,YAAYnE,CAAG,EAQlD,GAPKmE,IACHA,EAAW,KAAK,eAAe,MAAM9B,EAAS6B,CAAuB,EACrE,KAAK,eAAe,YAAYlE,EAAKmE,CAAQ,EAC7CC,GAAU,OAAQ,IAAM,mBAAmBpE,CAAG,kBAAkBqC,EAAQ,IAAI,EAAE,GAI5ES,GAAmBqB,EAAS,qBAAsB,CACpD,GAAIrB,EAAgB,SAAWqB,EAAS,qBAAqB,OAC3D,MAAM,IAAI,MAAM,4CAA4CA,EAAS,qBAAqB,MAAM,SAC5FrB,EAAgB,MAAM,gBAAgBqB,EAAS,YAAY,IAAI,IAAI,EAEzE,QAASzE,EAAI,EAAGA,EAAIoD,EAAgB,OAAQpD,IAAK,CAC/C,IAAM2E,EAAUvB,EAAgBpD,CAAC,EAC3B4E,EAAaD,EAAQ,KACrBE,EAAe,OAAOF,EAAQ,MAAS,SAAW,EAAIA,EAAQ,KAAK,OACnE,CAAC1E,EAAM6E,CAAM,EAAIL,EAAS,qBAAqBzE,CAAC,EACtD,GAAI4E,IAAe3E,GAAQ4E,IAAiBC,EAC1C,MAAM,IAAI,MAAM,oBAAoB9E,CAAC,0BAA0BC,CAAI,cAAc6E,CAAM,cACnFF,CAAU,cAAcC,CAAY,gBAAgBJ,EAAS,YAAY,IAAI,IAAI,CAEzF,CACF,CAOA,GALAC,GACI,OACA,IAAM,yBAAyB/B,EAAQ,IAAI,UAAUrC,CAAG,UAAUkE,EAAwB,CAAC,CAAC,IACxFA,EAAwB,CAAC,CAAC,IAAIA,EAAwB,CAAC,CAAC,EAAE,EAE9D,KAAK,YAAc,QAAU,KAAK,gBAAkB,YAAa,CACnE,IAAM/C,EAAuC,CAC3C,SAAU,KAAK,gBACf,YAAagD,EAAS,YAAY,KAClC,iBAAA1C,EACA,kBAAAC,CACF,EACA,KAAK,eAAe,KAAKP,CAAiB,EAEtC,KAAK,gBAAkB,aACK,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC7D,KAAKA,CAAiB,CAEjD,CAEA,YAAK,eAAe,IAAIgD,EAAUzB,EAAYO,EAAaiB,EAAyBZ,CAAoB,EAExGlB,GAAeC,EAAQ,IAAI,EACpBX,CACT,CAEA,OAAO+C,EAAmBrE,EAAwB,CAChD,KAAK,eAAe,OAAOqE,EAAWrE,CAAI,CAC5C,CAEA,OAAOsE,EAAaC,EAAmB,CACrC,KAAK,eAAe,OAAOD,EAAKC,CAAG,CACrC,CAEA,MAAM,SAASF,EAAmBG,EAAkD,CAGlF,MAAM,KAAK,eAAe,SAASH,EAAWG,CAAe,CAC/D,CAEA,MAAMC,EAAsB,CAC1B,OAAO,KAAK,eAAe,OAAOA,CAAI,EAAE,EAC1C,CAEA,KAAKC,EAAqB,CACxB,OAAO,KAAK,eAAe,QAAQA,CAAG,CACxC,CAEA,aAAaxD,EAAoBF,EAAkB2D,EAAoBxD,EAA0B,CAC/F,IAAMyD,EAAKC,GAAwB,IAAI3D,CAAU,EACjD,GAAI,CAAC0D,EACH,MAAM,IAAI,MAAM,2BAA2B1D,CAAU,EAAE,EAGzD,IAAMD,EAAyB,CAC7B,WAAAC,EACA,WAAAC,EACA,YAAayD,EAAG,CAAC,EACjB,WAAY,CAACA,EAAG,CAAC,EAAGD,CAAS,CAC/B,EACA,KAAK,QAAQ,IAAI3D,EAAUC,CAAU,CACvC,CAEA,cAAcD,EAAwB,CACpC,IAAMiC,EAAiB,KAAK,qBAAqB,IAAIjC,CAAQ,EAC7D,GAAIiC,EAAgB,CAClB,QAAWjD,KAAQiD,EACjB,KAAK,eAAe,QAAQjD,EAAK,EAAE,EAErC,KAAK,qBAAqB,OAAOgB,CAAQ,CAC3C,CAEA,KAAK,iBAAiB,OAAOA,CAAQ,EACrC,KAAK,QAAQ,OAAOA,CAAQ,CAC9B,CAEA,cAAcA,EAAkB8D,EAAyBC,EAA6C,CACpG,IAAMC,EAAS,KAAK,QAAQ,IAAIhE,CAAQ,EACxC,GAAI,CAACgE,EACH,MAAM,IAAI,MAAM,uBAAuBhE,CAAQ,EAAE,EAEnD,IAAME,EAAa8D,EAAO,WACpB7D,EAAa6D,EAAO,WACpBC,EAAcD,EAAO,YACrBE,EAAaF,EAAO,WAC1B,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,YAAY9D,CAAU,KAAKC,CAAU,2CAA2C,EAElG,KAAK,gBAAkBH,EAGnBkE,EAAW,CAAC,IACdA,EAAW,CAAC,EAAIA,EAAW,CAAC,EAAEA,EAAW,CAAC,CAAC,EAC3CA,EAAW,CAAC,EAAI,QAGlBlB,GAAU,OAAQ,IAAM,kCAAkC9C,CAAU,KAAKC,CAAU,MAAM,EAEzF,IAAMgE,EAAgB,KAAK,IAAI,MAE/B,KAAK,cAAgB,CAAC,EACtB,GAAI,CACF,OAAIA,GACF,KAAK,OAAO,eAAe,YAAY,EAGzCF,EAAYH,EAASI,EAAW,CAAC,CAAC,EAC3B,CACT,OAASE,EAAG,CACV,OAAAL,EAAO,KAAK,QAAQ,QAAQ,qBAAqB7D,CAAU,KAAKC,CAAU,aAAaiE,CAAC,EAAE,CAAC,EACpF,CACT,QAAE,CACID,GACFJ,EAAO,KAAK,KAAK,OAAO,cAAc,EAAE,KACpCM,GAAOA,EAAM,qCAAqCnE,CAAU,KAAKC,CAAU,MAAMkE,EAAI,OAAO,GAAK,IAAI,CAAC,EAG5G,QAAWrF,KAAQ,KAAK,cACtB,KAAK,eAAe,QAAQA,EAAK,EAAE,EAErC,KAAK,cAAgB,CAAC,EACtB,KAAK,gBAAkB,IACzB,CACF,CAGA,eAAesF,EAAmBC,EAAeC,EAAmBf,EAAsB,CACxF,IAAIgB,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EACxEG,IACHA,EAA4B,IAAI,IAChC,KAAK,2BAA2B,IAAIH,EAAWG,CAAyB,GAG1E,IAAMC,EAAiBD,EAA0B,IAAIF,CAAK,EACpDI,EAAK,KAAK,eAAe,uBAAuBH,EAAQf,EAAMiB,IAAiB,CAAC,CAAC,EACvF,OAAAD,EAA0B,IAAIF,EAAO,CAACI,EAAIH,CAAM,CAAC,EAC1CG,CACT,CACA,kBAAkBL,EAAyB,CACzC,IAAMG,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EAC3EG,IACFA,EAA0B,QAAQG,GAAc,KAAK,eAAe,yBAAyBA,EAAW,CAAC,CAAC,CAAC,EAC3G,KAAK,2BAA2B,OAAON,CAAS,EAEpD,CACA,UAAUjB,EAA8B,CACtC,IAAM9B,EAAU,KAAK,eAAe,IAAI8B,CAAS,EACjD,GAAI,CAAC9B,EACH,MAAM,IAAI,MAAM,2BAA2B8B,CAAS,EAAE,EAExD,OAAO9B,EAAQ,MACjB,CACA,iBAAiBsD,EAAsBpB,EAAclF,EAClB,CACjC,MAAO,UAAY,CACjB,IAAMS,EAAO,MAAM8F,GAAgB,KAAMD,EAAWpB,CAAI,EACxD,OAAOsB,GAAW/F,EAAK,OAAQT,CAAI,CACrC,CACF,CAEA,eAAegG,EAAqB,CAC9B,KAAK,YAAc,iBAKtB,KAAK,mBAA2B,eAAe,KAAK,SAAUA,CAAK,CACtE,CACA,cAAqB,CACnB,KAAK,UAAY,QACb,KAAK,IAAI,OAAO,WAAW,OAAS,YACnC,OAAO,KAAK,IAAI,MAAU,IAAc,KAAK,IAAI,KAAK,MAAQ,KAAK,IAAI,UACtE,KAAK,OAAO,SAAS,IAAI,qDAAqD,EAChF,KAAK,UAAY,gBACR,KAAK,OAAO,SAAS,IAAI,iBAAiB,IACnD,KAAK,UAAY,aAGf,KAAK,YAAc,QAAU,OAAO,KAAK,SAAa,MACxD,KAAK,SAAW,KAAK,OAAO,eAAe,CACzC,KAAM,YACN,MAAO,KAAK,kBAAoB,CAClC,CAAC,EACD,KAAK,mBAAqB,KAAK,OAAO,aAElC,CAAC,KAAM,KAAK,kBAAoB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,aAAa,CAAC,GAG/G,CAEA,cAAqB,CACnBvB,GAAU,OAAQ,cAAc,EAC3B,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,GACtD,KAAK,oBAAoB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAEpD,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,GACzD,KAAK,uBAAuB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAG5D,KAAK,MAAM,EACX,KAAK,cAAgB,WACvB,CACA,YAAmB,CACjBA,GAAU,OAAQ,YAAY,EAE9B,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CACA,QAAe,CACbA,GAAU,OAAQ,QAAQ,EAC1B,KAAK,cAAgB,YACrB,IAAMgC,EAAqB,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,EACxEC,EAAwB,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC9E7B,EAAS4B,EAAoB,OACnC,KAAK,eAAiB,CAAC,EACvB,QAAS1G,EAAI,EAAGA,EAAI8E,EAAQ9E,IAAK,CAC/B,IAAM4G,EAAqB,KAAK,sBAAsB,EAChDC,EAAUH,EAAoB1G,CAAC,EACrC,KAAK,eAAe,KAAK,sBAAwB,CAAC,EAClD4G,EAAmB,YAAYC,EAAQ,eAAe,EACtDD,EAAmB,aAAa,EAAGC,EAAQ,SAAS,EACpDD,EAAmB,mBAAmB,GAAGC,EAAQ,aAAa,EAC9D,KAAK,eAAe,KAAK,sBAAwB,EAAI,CAAC,EACtD,KAAK,wBACD,KAAK,YAAc,QACrB,KAAK,eAAe,KAAKF,EAAuB3G,CAAC,CAAC,GAEhD,KAAK,uBAAyB,KAAK,mBAAqB,KAAK,YAAc,cAC7E,KAAK,eAAe,EAElB,KAAK,uBAAyB,KAAK,mBACrC,KAAK,MAAM,CAEf,CAEA,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CAEA,iBAAiBgG,EAAyB,CACxC,KAAK,kBAAkBA,CAAS,EAC5B,KAAK,oBAAoB,IAAIA,CAAS,GACxC,KAAK,oBAAoB,OAAOA,CAAS,EAEvC,KAAK,uBAAuB,IAAIA,CAAS,GAC3C,KAAK,uBAAuB,OAAOA,CAAS,EAE9C,KAAK,eAAe,iBAAiBA,CAAS,CAChD,CAEA,WAAWA,EAAyB,CAClC,KAAK,iBAAmBA,EACxB,KAAK,aAAa,CACpB,CACF,ICx0BA,IAAAc,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAgBMC,GAuCAC,GAoHOF,GA3KbG,GAAAC,EAAA,kBAMAC,KAEAC,KACAC,KAEAC,KAKMP,GAAN,MAAMQ,CAAqC,CACzC,YACYC,EAAuCC,EAAkCC,EACjEC,EAAyB,CADjC,YAAAH,EAAuC,cAAAC,EAAkC,UAAAC,EACjE,UAAAC,CAA0B,CAE9C,iBAAgC,CAC9B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMC,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,aACJ,IAAI,aAAa,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CAChG,CAEA,kBAAkC,CAChC,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,cACJ,IAAI,cAAc,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjG,CAEA,eAA4B,CAC1B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,WAAe,IAAI,WAAW,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjH,CAEA,QAAQE,EAAwC,CAC9C,GAAID,EAAU,KAAKC,CAAO,IAAMD,EAAU,KAAK,KAAK,IAAI,EACtD,MAAM,IAAI,MAAM,mBAAmB,EAErC,OAAO,IAAIN,EAAe,KAAK,OAAQ,KAAK,SAAU,KAAK,KAAMO,CAAO,CAC1E,CACF,EAEMd,GAAN,KAAmD,CAajD,YAAoBQ,EAA+BO,EAAwBC,EAA2B,CAAlF,YAAAR,EAA+B,aAAAO,EAFnD,KAAQ,iBAAmB,EAC3B,KAAQ,eAAiB,EAEvB,KAAK,YAAcA,EAAQ,YAC3B,IAAME,EAAUT,EAAO,QAGnBU,EAAaF,IAAsB,EACvC,KAAK,gBAAkBC,EAAQC,GAAW,EAC1C,IAAMC,EAAaF,EAAQC,GAAW,EACtC,KAAK,YAAcD,EAAQC,GAAW,EACtC,KAAK,iBAAmBD,EAAQC,GAAW,EAC3C,KAAK,eAAiBD,EAAQC,GAAW,EAEzC,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIF,EAAYE,IAAK,CACnC,IAAMZ,EAAWQ,EAAQC,GAAW,EAC9BR,EAAOO,EAAQC,GAAW,EAC1BI,EAAML,EAAQC,GAAW,EACzBP,EAAiB,CAAC,EACxB,QAASY,EAAI,EAAGA,EAAID,EAAKC,IACvBZ,EAAK,KAAKM,EAAQC,GAAW,CAAC,EAEhCE,EAAO,KAAK,IAAIrB,GAAeS,EAAQC,EAAUC,EAAMC,CAAI,CAAC,CAC9D,CACA,KAAK,OAASS,CAChB,CAhCA,IAAI,kBAA6C,CAC/C,OAAO,KAAK,QAAQ,uBACtB,CACA,IAAI,kBAA+B,CACjC,OAAO,KAAK,OAAO,OAAO,SAAS,KAAK,iBAAkB,KAAK,iBAAmB,KAAK,cAAc,CACvG,CA6BA,6BAAwD,CACtD,MAAO,CACL,KAAK,QAAQ,OAAO,OAAO,yBAA0B,KAAK,QAAQ,OAAO,OAAO,yBAChF,KAAK,QAAQ,OAAO,OAAO,wBAC7B,CACF,CAEA,mCAA4C,CAC1C,OAAO,KAAK,QAAQ,OAAO,OAAO,8BACpC,CAEA,QAAQI,EAAsBC,EAAyE,CAErG,IAAMC,EACFD,GAAsB,QAAQ,IAAIJ,GAAK,OAAOA,GAAM,SAAW,KAAK,OAAOA,CAAC,EAAIA,CAAC,GAAK,KAAK,OAEzFM,EAAgBF,GAAsB,SAAW,CAAC,EAClDG,EAAqB,CAACC,EAAepB,EAAkBE,IACzD,IAAIZ,GAAe,KAAK,OAAQU,EAAU,KAAK,OAAOoB,EAAOlB,CAAI,EAAGA,CAAI,EACtEmB,EAAwB,CAACrB,EAAkBE,IAAwC,CACvF,IAAMoB,EAAcC,GAAqBvB,CAAQ,EACjD,GAAI,CAACsB,EACH,MAAM,IAAI,MAAM,0BAA0BtB,CAAQ,EAAE,EAEtD,IAAMwB,EAAaF,EAAclB,EAAU,KAAKF,CAAI,EAC9CuB,EAAYD,EAAa,EAAI,KAAK,QAAQ,eAAe,OAAOA,CAAU,EAAE,GAAK,EACvF,OAAO,IAAIlC,GAAe,KAAK,OAAQU,EAAUyB,EAAWvB,CAAI,CAClE,EACA,OAAO,KAAK,QAAQ,IAChBa,EAASE,EAAcC,EAAeC,EAAoBE,EAAuB,KAAK,WAAW,CACvG,CAEA,OAAOD,EAAelB,EAAiC,CACrD,IAAMwB,EAAQ,KAAK,OAAO,UAAU,EACpC,GAAI,CACF,IAAMzB,EAAO,KAAK,OAAO,YAAY,EAAIC,EAAK,QAAU,CAAsB,EAC1EyB,EAAS1B,GAAQ,EACrB,KAAK,OAAO,QAAQ0B,GAAQ,EAAIzB,EAAK,OACrC,QAASU,EAAI,EAAGA,EAAIV,EAAK,OAAQU,IAC/B,KAAK,OAAO,QAAQe,GAAQ,EAAIzB,EAAKU,CAAC,EAExC,OAAO,KAAK,OAAO,YAAa,KAAK,gBAAiBQ,EAAOnB,CAAI,CACnE,OAAS2B,EAAG,CACV,MAAM,IAAI,MACN,sCAAsCR,CAAK,gBAAgBlB,CAAI,8GAErD0B,CAAC,EAAE,CACnB,QAAE,CACA,KAAK,OAAO,aAAaF,CAAK,CAChC,CACF,CACF,EA0BarC,GACT,MAAMwC,EAAwB9B,EAAuB+B,EAAUC,IAA2C,CAC5G,IAAMC,EAAWjC,EAAO,SACxB,GAAI,CAACiC,EACH,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIH,IAAS,SAAU,CACrB,IAAMvB,EAAU,IAAI2B,GACpB,MAAM3B,EAAQ,WAAWwB,EAAKC,CAAW,EAEzCC,EAAS,SAAU,CAEjB1B,EAGC4B,GAAiB5B,EAAQ,MAAM4B,CAAI,EAGnCC,GAAgB7B,EAAQ,KAAK6B,CAAG,EAGjC,CAACC,EAAaC,EAAaH,EAAcI,EAAc,KAAU,CAC/D,GAAIA,EACFC,GAAU,UAAW,IAAM,kCAAkCH,CAAG,SAASC,CAAG,UAAUH,CAAI,EAAE,EAC5F5B,EAAQ,OAAO8B,EAAKC,CAAG,MAClB,CACLE,GAAU,UAAW,IAAM,yCAAyCH,CAAG,eAAeC,CAAG,UAAUH,CAAI,EAAE,EACzG,IAAMjC,EAAOF,EAAO,OAAO,SAASqC,IAAQ,GAAIA,IAAQ,GAAKF,CAAI,EACjE5B,EAAQ,OAAO+B,EAAKpC,CAAI,CAC1B,CACF,EAGA,MAAMwB,EAAmBe,EAAoBN,IACxB,CACfK,GACI,UACA,IAAM,wCAAwCd,CAAS,gBAAgBe,CAAU,UAAUN,CAAI,EAAE,EAErG,MAAM5B,EAAQ,SACVmB,EAAW,IAAM1B,EAAO,OAAO,SAASyC,IAAe,GAAIA,IAAe,GAAKN,CAAI,CAAC,CAC1F,EAGJ,CAACO,EAAoBC,EAAkBC,IAAuBrC,EAAQ,aAClEmC,EAAYC,EAAUC,EAAW5C,EAAO,aAAaA,EAAO,iBAAkB2C,CAAQ,CAAC,CAAC,EAG3FE,GAAmBtC,EAAQ,cAAcsC,CAAM,EAGhD,CAACA,EAAgBrC,EAA2BsC,EAAuBC,IAAwC,CACzGP,GACI,UACA,IAAM,mCAAmCM,CAAa,YAAYD,CAAM,uBACpErC,CAAiB,EAAE,EAC3B,IAAMwC,EAAU,IAAIxD,GAAmBQ,EAAQO,EAASC,CAAiB,EACzE,OAAOD,EAAQ,cAAcsC,EAAQG,EAASD,CAAM,CACtD,EAEA,IAAMxC,EAAQ,aAAa,EAE3B,IAAMA,EAAQ,WAAW,EAEzB,IAAMA,EAAQ,OAAO,CACvB,CAAC,CACH,MACE0B,EAAS,OAAO,CAEpB,ICjPA,IAoEMgB,GAWOC,GAWAC,GAoFPC,GAOAC,GAqBOC,GAkBAC,GAmKAC,GAuBAC,GA+EAC,GA6OAC,GAgBAC,GAluBbC,GAAAC,EAAA,kBAWAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAoDMnB,GAAU,CAACoB,EAAoBC,IAA+B,CAChDC,GAAY,EAAE,SAASF,EAAYC,CAAY,IAC/C,GAChBE,GAAe,+BAAgC,CAEnD,EAMatB,GAAc,MAAMuB,GAA4B,CAE3DxB,GAAQwB,EAAI,KAAK,WAAaC,GAAqBD,EAAI,QAAQ,CAAC,CAClE,EAQatB,GAAS,MAAMsB,EAAUE,IAAkC,CACxC,CAE5B,IAAMC,EAAW,cAAuB,KAExC,GAAID,IAAW,SAAU,CAEvB,GAAI,OAAO,UAAc,KAAe,CAAC,UAAU,IACjD,MAAM,IAAI,MAAM,gDAAgD,EAGlE,IAAIE,EAAUJ,EAAI,OAAO,QACzB,GAAKI,GAmBH,GAAI,OAAOA,EAAQ,QAAW,UAAY,OAAOA,EAAQ,UAAa,UAClE,OAAOA,EAAQ,eAAkB,WACnC,MAAM,IAAI,MAAM,kFAAkF,MArBxF,CAEZ,IAAMC,EAAkBL,EAAI,OAAO,gBACnC,GAAIK,IAAoB,QAAaA,IAAoB,aACrDA,IAAoB,mBACtB,MAAM,IAAI,MAAM,qCAAqCA,CAAe,GAAG,EAEzE,IAAMC,EAAuBN,EAAI,OAAO,qBACxC,GAAIM,IAAyB,QAAa,OAAOA,GAAyB,UACxE,MAAM,IAAI,MAAM,0CAA0CA,CAAoB,GAAG,EAGnF,GADAF,EAAU,MAAM,UAAU,IAAI,eAAe,CAAC,gBAAAC,EAAiB,qBAAAC,CAAoB,CAAC,EAChF,CAACF,EACH,MAAM,IAAI,MACN,0GAC+E,CAEvF,CAQA,MAAMD,EAAS,SAAUL,GAAY,EAAGE,EAAKI,CAAO,CACtD,CACA,GAAIF,IAAW,QAAS,CAEtB,GAAI,OAAO,UAAc,KAAe,CAAE,UAAuC,GAC/E,MAAM,IAAI,MAAM,+CAA+C,EAGjE,MAAMC,EAAS,QAASL,GAAY,EAAGE,CAAG,CAC5C,CACF,CACF,EAoCMrB,GAAiB,IAAI,IAOrBC,GAA8B2B,GAA4C,CAC9E,IAAMC,EAAOV,GAAY,EACnBW,EAAQD,EAAK,UAAU,EAC7B,GAAI,CACF,IAAME,EAAaF,EAAK,WAAW,CAAC,EAEpC,OADkBA,EAAK,wBAAwBD,EAAeG,EAAYA,EAAa,CAAC,IACtE,GAChBX,GAAe,uCAAwC,EAElD,CAACS,EAAK,OAAOE,EAAa,CAAC,EAAGF,EAAK,OAAOE,EAAa,EAAI,CAAC,CAAC,CACtE,QAAE,CACAF,EAAK,aAAaC,CAAK,CACzB,CACF,EAQa5B,GAA0B8B,GAAwC,CAC7E,IAAMH,EAAOV,GAAY,EACnBc,EAAkBJ,EAAK,QAAQG,EAAM,UAAU,EACrD,GAAIC,IAAoB,EACtB,MAAM,IAAI,MAAM,+DAA+DD,EAAM,UAAU,GAAG,EAEpG,OAAAH,EAAK,OAAO,IAAIG,EAAOC,CAAe,EAC/B,CAACA,EAAiBD,EAAM,UAAU,CAC3C,EAUa7B,GAAgB,MACzB+B,EACAC,IAAoF,CACtF,IAAIF,EAAyBG,EACvBP,EAAOV,GAAY,EAErB,MAAM,QAAQe,CAAS,EAEzB,CAACD,EAAiBG,CAAe,EAAIF,EAC5BA,EAAU,SAAWL,EAAK,OAAO,OAE1C,CAACI,EAAiBG,CAAe,EAAI,CAACF,EAAU,WAAYA,EAAU,UAAU,EAGhF,CAACD,EAAiBG,CAAe,EAAIlC,GAAuBgC,CAAS,EAGvE,IAAIN,EAAgB,EAChBS,EAAuB,EACvBC,EAAkB,EAClBC,EAAmB,CAAC,EAClBC,EAAwB,CAAC,EACzBC,EAAyB,CAAC,EAEhC,GAAI,CAGF,GAFA,CAACJ,EAAsBE,CAAM,EAAIG,GAAkBP,CAAO,EAEtDA,GAAS,cAAgBN,EAAK,kBAAmB,CACnD,IAAMc,EAAkB,CAAC,EACzB,QAAWC,KAAQT,EAAQ,aAAc,CACvC,IAAMU,EAAO,OAAOD,GAAS,SAAWA,EAAOA,EAAK,KACpDD,EAAgB,KAAKG,GAAS,OAAOF,GAAS,SAAWA,EAAOA,EAAK,IAAI,EAAE,KAAKG,GAAQ,CACtFlB,EAAK,kBAAmBgB,EAAME,CAAI,CACpC,CAAC,CAAC,CACJ,CAGA,MAAM,QAAQ,IAAIJ,CAAe,CACnC,CAEA,QAAWK,KAAYb,GAAS,oBAAsB,CAAC,EAErD,IADqB,OAAOa,GAAa,SAAWA,EAAWA,EAAS,QACnD,QAAS,CAC5B,GAAInB,EAAK,eACP,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAI,OAAOmB,GAAa,SAAU,CAChC,IAAMC,EAAeD,EACfE,EAAWD,GAA6D,QACxEE,EAAaF,GAAsD,UACnEG,EAAcH,GAAuD,WACrEhC,EAAcgC,GAAuD,WACrEvB,EAAmBuB,GAAuD,gBAC5EC,EACFrB,EAAK,eAAiBqB,EACbC,EACTtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAcsB,CAAS,EAEhEtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,CAAC,WAAAuB,EAAY,WAAAnC,EAAY,gBAAAS,CAAe,CAAC,CAEpG,MACEG,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,EAEzD,KACF,CAGFD,EAAgB,MAAMC,EAAK,kBAAkBI,EAAiBG,EAAiBC,CAAoB,EAC/FT,IAAkB,GACpBR,GAAe,yBAA0B,EAIvCS,EAAK,iBACPA,EAAK,eAAiB,QAGxB,GAAM,CAACwB,EAAYC,CAAW,EAAIrD,GAA2B2B,CAAa,EAEpE2B,EAAqB,CAAC,CAACpB,GAAS,mBAEhCqB,EAAa,CAAC,EACdC,EAAc,CAAC,EACfC,EAAwE,CAAC,EAC/E,QAASC,EAAI,EAAGA,EAAIN,EAAYM,IAAK,CACnC,IAAMC,EAAO/B,EAAK,iBAAiBD,EAAe+B,CAAC,EAC/CC,IAAS,GACXxC,GAAe,0BAA2B,EAE5CoB,EAAsB,KAAKoB,CAAI,EAC/BJ,EAAW,KAAK3B,EAAK,aAAa+B,CAAI,CAAC,CACzC,CACA,QAASD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMC,EAAO/B,EAAK,kBAAkBD,EAAe+B,CAAC,EAChDC,IAAS,GACXxC,GAAe,2BAA4B,EAE7CqB,EAAuB,KAAKmB,CAAI,EAChC,IAAMC,EAAahC,EAAK,aAAa+B,CAAI,EACzCH,EAAY,KAAKI,CAAU,EAEG,CAC5B,GAAIN,GAAsBpB,GAAS,0BAA4B,OAAW,CACxEuB,EAAyB,KAAK,YAAY,EAC1C,QACF,CACA,IAAMI,EAAW,OAAO3B,GAAS,yBAA4B,SACzDA,EAAQ,wBACRA,GAAS,0BAA0B0B,CAAU,GAAK,MACtD,GAAIC,IAAa,OAASA,IAAa,cAAgBA,IAAa,aAClE,MAAM,IAAI,MAAM,4CAA4CA,CAAQ,GAAG,EAEzE,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MAAM,4CACZA,CAAQ,4EAA4E,EAE1FJ,EAAyB,KAAKI,CAAQ,CACxC,CACF,CAGA,IAAIC,EAAoC,KACxC,OAAgCL,EAAyB,KAAKM,GAAKA,IAAM,YAAY,IACnF1B,EAAkBT,EAAK,kBAAkBD,CAAa,EAClDU,IAAoB,GACtBlB,GAAe,0BAA2B,EAG5C2C,EAAe,CACb,OAAQzB,EACR,yBAAAoB,EACA,gCAAiCA,EAAyB,IAAIM,GAAKC,GAAyBD,CAAC,CAAC,CAChG,GAGFhE,GAAe,IACX4B,EACA,CAACA,EAAeY,EAAuBC,EAAwBsB,EAAcR,EAAoB,EAAK,CAAC,EACpG,CAAC3B,EAAe4B,EAAYC,CAAW,CAChD,OAASS,EAAG,CACV,MAAA1B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EAEpD7B,IAAoB,GACtBT,EAAK,mBAAmBS,CAAe,EAGrCV,IAAkB,GACpBC,EAAK,mBAAmBD,CAAa,EAEjCsC,CACR,QAAE,CACArC,EAAK,MAAMI,CAAe,EACtBI,IAAyB,GAC3BR,EAAK,0BAA0BQ,CAAoB,EAErDE,EAAO,QAAQ6B,GAASvC,EAAK,MAAMuC,CAAK,CAAC,EAGzCvC,EAAK,sBAAsB,CAC7B,CACF,EAEazB,GAAkBiE,GAA4B,CACzD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,+CAA+CD,CAAS,EAAE,EAE5E,GAAM,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,CAAkB,EAAIe,EAEvGC,IACEhB,GACF1B,EAAK,sBAAsB0C,EAAe,MAAM,EAElD1C,EAAK,mBAAmB0C,EAAe,MAAM,GAG/C1C,EAAK,uBAAuBwC,CAAS,EAErC7B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACxDtC,EAAK,mBAAmBD,CAAa,EACrC5B,GAAe,OAAOqE,CAAS,CACjC,EAEahE,GACT,CAACmE,EAA6BC,EAAyBlC,EAAkB8B,EAAmBK,EAC3FnB,EAAqB,KAAgB,CACpC,GAAI,CAACiB,EAAQ,CACXC,EAAc,KAAK,CAAC,EACpB,MACF,CAEA,IAAM5C,EAAOV,GAAY,EAEnBwD,EAAWH,EAAO,CAAC,EACnBI,EAAOJ,EAAO,CAAC,EACfV,EAAWU,EAAO,CAAC,EAErBK,EACAC,EAEJ,GAAIH,IAAa,UAAYb,IAAa,aACxC,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MACN,2DAA2DY,CAAK,mCAAmC,EAGzG,GAAIZ,IAAa,aAAc,CAC7B,IAAMiB,EAAYP,EAAO,CAAC,EAAE,UACtBQ,EAAqBC,GAAqBC,GAA2BP,CAAQ,CAAC,EACpFG,EAAiBF,EAAK,OAAO,CAACO,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAIJ,EAEnD,IAAMK,EAAiBxD,EAAK,mBAC5B,GAAI,CAACwD,EACH,MAAM,IAAI,MAAM,qEAAqE,EAEvFR,EAAUQ,EAAehB,EAAWK,EAAOK,EAAWD,CAAc,CACtE,KAAO,CACL,IAAM/B,EAAOyB,EAAO,CAAC,EAErB,GAAI,MAAM,QAAQzB,CAAI,EAAG,CAEvB+B,EAAiB,EAAI/B,EAAK,OAC1B8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnB,IAAIS,EAAYT,EAAU,EAC1B,QAASlB,EAAI,EAAGA,EAAIZ,EAAK,OAAQY,IAAK,CACpC,GAAI,OAAOZ,EAAKY,CAAC,GAAM,SACrB,MAAM,IAAI,UAAU,wBAAwBA,CAAC,kBAAkB,EAEjE9B,EAAK,QAAQyD,GAAW,EAAIC,GAAgBxC,EAAKY,CAAC,EAAGpB,CAAM,CAC7D,CACF,MACEuC,EAAiB/B,EAAK,WACtB8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnBhD,EAAK,OAAO,IAAI,IAAI,WAAWkB,EAAK,OAAQA,EAAK,WAAY+B,CAAc,EAAGD,CAAO,CAEzF,CAEA,IAAM/C,EAAQD,EAAK,UAAU,EACvB2D,EAAa3D,EAAK,WAAW,EAAI+C,EAAK,MAAM,EAClD,GAAI,CACF,IAAIa,EAAWD,EAAa,EAC5BZ,EAAK,QAAQc,GAAK7D,EAAK,OAAO4D,GAAU,EAAIC,CAAC,EAC7C,IAAMlB,EAAS3C,EAAK,iBAChBqD,GAA2BP,CAAQ,EAAGE,EAASC,EAAgBU,EAAYZ,EAAK,OAChFX,GAAyBH,CAAQ,CAAC,EAClCU,IAAW,GACbpD,GAAe,iDAAiDiD,CAAS,WAAWK,CAAK,GAAG,EAE9FD,EAAc,KAAKD,CAAM,CAC3B,QAAE,CACA3C,EAAK,aAAaC,CAAK,CACzB,CACF,EAKSxB,GAAM,MACf+D,EAAmBsB,EAAwBC,EAAgCC,EAC3EC,EAA2C3D,IAAoE,CACjH,IAAMN,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,6CAA6CD,CAAS,EAAE,EAE1E,IAAMzC,EAAgB0C,EAAQ,CAAC,EACzB9B,EAAwB8B,EAAQ,CAAC,EACjC7B,EAAyB6B,EAAQ,CAAC,EAClCC,EAAiBD,EAAQ,CAAC,EAC1Bf,EAAqBe,EAAQ,CAAC,EAC9ByB,EAAmBzB,EAAQ,CAAC,EAE5BjB,EAAasC,EAAa,OAC1BrC,EAAcuC,EAAc,OAE9BG,EAAmB,EACnBC,EAA6B,CAAC,EAE5BC,EAA+B,CAAC,EAChCC,EAAgC,CAAC,EACjCC,EAA8B,CAAC,EAE/BC,EAAiBxE,EAAK,UAAU,EAChCyE,EAAoBzE,EAAK,WAAWwB,EAAa,CAAC,EAClDkD,EAAmB1E,EAAK,WAAWwB,EAAa,CAAC,EACjDmD,EAAqB3E,EAAK,WAAWyB,EAAc,CAAC,EACpDmD,EAAoB5E,EAAK,WAAWyB,EAAc,CAAC,EAEzD,GAAI,CACF,CAAC0C,EAAkBC,CAAgB,EAAIS,GAAcvE,CAAO,EAG5D,QAASwB,EAAI,EAAGA,EAAIN,EAAYM,IAC9BtD,GACIuF,EAAajC,CAAC,EAAGuC,EAAoBE,EAAmB/B,EAAWsB,EAAahC,CAAC,EAAGJ,CAAkB,EAI5G,QAASI,EAAI,EAAGA,EAAIL,EAAaK,IAC/BtD,GACIyF,EAAcnC,CAAC,EAAGwC,EAAqBC,EAAmB/B,EAAWhB,EAAawC,EAAclC,CAAC,EACjGJ,CAAkB,EAGxB,IAAIoD,EAAmBL,EAAoB,EACvCM,EAAkBL,EAAmB,EACrCM,GAAoBL,EAAqB,EACzCM,GAAmBL,EAAoB,EAC3C,QAAS9C,EAAI,EAAGA,EAAIN,EAAYM,IAC9B9B,EAAK,QAAQ8E,GAAkB,EAAIT,EAAmBvC,CAAC,EACvD9B,EAAK,QAAQ+E,GAAiB,EAAIpE,EAAsBmD,EAAahC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIL,EAAaK,IAC/B9B,EAAK,QAAQgF,IAAmB,EAAIV,EAAoBxC,CAAC,EACzD9B,EAAK,QAAQiF,IAAkB,EAAIrE,EAAuBoD,EAAclC,CAAC,CAAC,EAG5E,GAAgCY,GAAkB,CAACwB,EAAkB,CACnE,GAAM,CAAC,OAAAgB,EAAQ,yBAAArD,GAA0B,gCAAAsD,EAA+B,EAAIzC,EAE5E,GAAI/B,EAAsB,SAAWa,EACnC,MAAM,IAAI,MAAM,2BACZA,CAAU,4DAA4Db,EAAsB,MAAM,IAAI,EAI5G,QAASmB,GAAI,EAAGA,GAAIN,EAAYM,KAAK,CACnC,IAAMe,GAAQiB,EAAahC,EAAC,EACV,MAAM9B,EAAK,cAAckF,EAAQvE,EAAsBkC,EAAK,EAAGwB,EAAmBvC,EAAC,CAAC,IACpF,GAChBvC,GAAe,oBAAoBuC,EAAC,iBAAiBU,CAAS,GAAG,CAErE,CAGA,QAASV,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMe,GAAQmB,EAAclC,EAAC,EACZmC,EAAcnC,EAAC,IAAI,CAAC,EAIjB9B,EAAK,eAAekF,EAAQtE,EAAuBiC,EAAK,EAAGyB,EAAoBxC,EAAC,EAAG,CAAC,IACpF,GAChBvC,GAAe,mCAAmCuC,EAAC,iBAAiBU,CAAS,GAAG,EAK9ExC,EAAK,eAAekF,EAAQtE,EAAuBiC,EAAK,EAAG,EAAGsC,GAAgCtC,EAAK,CAAC,IACtF,GAChBtD,GAAe,qBAAqBuC,EAAC,QAAQD,GAAyBC,EAAC,CAAC,gBAAgBU,CAAS,GAAG,CAG1G,CACArE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAI,CAAC,CAC9G,CAEA1B,EAAK,iBAAiBD,CAAa,EACnC,IAAIqF,GAC4B1C,EAC9B0C,GAAY,MAAMpF,EAAK,mBACnBD,EAAe2C,EAAe,OAAQjB,EAAakD,EAAoBR,CAAgB,EAE3FiB,GAAY,MAAMpF,EAAK,QACnBD,EAAe2E,EAAkBD,EAAmBjD,EAAYoD,EAAmBnD,EACnFkD,EAAoBR,CAAgB,EAGtCiB,KAAc,GAChB7F,GAAe,0BAA0B,EAG3C,IAAM8F,GAA2B,CAAC,EAElC,QAASvD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMa,GAAS3C,EAAK,QAAQ2E,EAAqB,EAAI7C,CAAC,EACtD,GAAIa,KAAW2B,EAAoBxC,CAAC,EAAG,CAErCuD,GAAO,KAAKpB,EAAcnC,CAAC,CAAE,EAC7B,QACF,CAEA,IAAMwD,GAA2BtF,EAAK,UAAU,EAE1CuF,GAAmBvF,EAAK,WAAW,EAAI,CAAC,EAE1CwF,GAAmB,GACnBC,GAA6BvF,GAAa,EAC9C,GAAI,CACgBF,EAAK,kBACnB2C,GAAQ4C,GAAkBA,GAAmB,EAAGA,GAAmB,EAAGA,GAAmB,EAAE,IAC7E,GAChBhG,GAAe,4CAA4CuC,CAAC,GAAG,EAEjE,IAAI4D,EAAkBH,GAAmB,EACnCzC,GAAW9C,EAAK,QAAQ0F,GAAiB,EAC/CxF,GAAaF,EAAK,QAAQ0F,GAAiB,EAC3C,IAAM/B,GAAa3D,EAAK,QAAQ0F,GAAiB,EAC3CC,GAAa3F,EAAK,QAAQ0F,GAAiB,EAC3C3C,GAAO,CAAC,EACd,QAASjB,GAAI,EAAGA,GAAI6D,GAAY7D,KAC9BiB,GAAK,KAAK/C,EAAK,QAAQ2D,GAAa,EAAI7B,EAAC,CAAC,EAE5C9B,EAAK,SAAS2D,EAAU,EAExB,IAAMiC,GAAO7C,GAAK,OAAO,CAACO,GAAGC,KAAMD,GAAIC,GAAG,CAAC,EAC3CkC,GAAOI,GAA2B/C,EAAQ,EAE1C,IAAMgD,GAAoBpD,GAAgB,yBAAyBsB,EAAclC,CAAC,CAAC,EAEnF,GAAI2D,KAAS,SAAU,CACrB,GAAIK,KAAsB,aACxB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMC,GAAuB,CAAC,EAC1BtC,GAAYvD,GAAa,EAC7B,QAAS4B,GAAI,EAAGA,GAAI8D,GAAM9D,KAAK,CAC7B,IAAMkE,GAAShG,EAAK,QAAQyD,IAAW,EACjCwC,GAAiBnE,KAAM8D,GAAO,EAAI,OAAY5F,EAAK,QAAQyD,EAAS,EAAIuC,GAC9ED,GAAW,KAAK/F,EAAK,aAAagG,GAAQC,EAAc,CAAC,CAC3D,CACAZ,GAAO,KAAK,CAACI,GAAM1C,GAAMgD,GAAY,KAAK,CAAC,CAC7C,SAGMD,KAAsB,cAAgBF,GAAO,EAAG,CAClD,IAAMM,GAAYlG,EAAK,cACvB,GAAI,CAACkG,GACH,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAMhD,GAAYgD,GAAUhG,EAAU,EAChCiG,GAAc/C,GAAqBN,EAAQ,EACjD,GAAIqD,KAAgB,QAAa,CAACC,GAAyBX,EAAI,EAC7D,MAAM,IAAI,MAAM,0BAA0BA,EAAI,EAAE,EAIlDD,GAAmB,GAEnBH,GAAO,KAAK,CACVI,GAAM1C,GAAM,CACV,UAAAG,GACA,SAAUlD,EAAK,qBAAsBkD,GAAW0C,GAAOO,GAAaV,EAAI,EACxE,QAAS,IAAM,CACbzF,EAAK,kBAAkB2C,EAAM,CAC/B,CACF,EACA,YACF,CAAC,CACH,KAAO,CACL,IAAM0D,GAAwBC,GAAkCb,EAAI,EAC9DvE,GAAO,IAAImF,GAAsBT,EAAI,EAC3C,IAAI,WAAW1E,GAAK,OAAQA,GAAK,WAAYA,GAAK,UAAU,EACvD,IAAIlB,EAAK,OAAO,SAASE,GAAYA,GAAagB,GAAK,UAAU,CAAC,EACvEmE,GAAO,KAAK,CAACI,GAAM1C,GAAM7B,GAAM,KAAK,CAAC,CACvC,CAEJ,QAAE,CACAlB,EAAK,aAAasF,EAAwB,EACtCG,KAAS,UAAYvF,IACvBF,EAAK,MAAME,EAAU,EAElBsF,IACHxF,EAAK,kBAAkB2C,EAAM,CAEjC,CACF,CAEA,OAAID,GAAkB,CAAChB,IACrB1B,EAAK,sBAAsB0C,EAAe,MAAM,EAChDvE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAK,CAAC,GAExG2D,EACT,QAAE,CACArF,EAAK,aAAawE,CAAc,EAEhCH,EAAmB,QAAQkC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EACzDjC,EAAoB,QAAQiC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EAC1DhC,EAAkB,QAAQiC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,EAExCrC,IAAqB,GACvBnE,EAAK,sBAAsBmE,CAAgB,EAE7CC,EAAiB,QAAQoC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,CAC7C,CACF,EAKa9H,GAAgB8D,GAA4B,CACvD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,oBAAoB,EAEtC,IAAM1C,EAAgB0C,EAAQ,CAAC,EAGzBgE,EAAkBzG,EAAK,iBAAiBD,CAAa,EACvD0G,IAAoB,GACtBlH,GAAe,iCAAkC,EAEnDS,EAAK,SAASyG,CAAe,CAC/B,EAEa9H,GAA8B+H,GAAsE,CAC/G,IAAMC,EAA6B,CAAC,EACpC,QAAWhE,KAAU+D,EAAS,CAC5B,IAAMxF,EAAOyB,EAAO,CAAC,EACjB,CAAC,MAAM,QAAQzB,CAAI,GAAK,WAAYA,GACtCyF,EAAQ,KAAKzF,EAAK,MAAM,CAE5B,CACA,OAAOyF,CACT,IC3uBA,IAUMC,GACFC,GACAC,GACAC,GACAC,GACAC,GAGAC,GACEC,GAEAC,GASAC,GAMAC,GAmCOC,GA8CAC,GAaAC,GAaAC,GAuBAC,GAaAC,GAyBAC,GA5MbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEMvB,GAAU,IAAe,CAAC,CAACwB,GAAI,KAAK,OAAS,OAAO,SAAa,IAEnEtB,GAAe,GACfC,GAAc,GACdC,GAAU,GAKRG,GAAiF,IAAI,IAErFC,GAAmB,CAACiB,EAA8BC,IAA+C,CACrG,IAAMC,EAAQpB,GAAgB,IAAIkB,CAAI,EAClCE,EACFA,EAAM,KAAKD,CAAS,EAEpBnB,GAAgB,IAAIkB,EAAM,CAACC,CAAS,CAAC,CAEzC,EAEMjB,GAAe,IAAY,CAC/B,GAAIP,IAAgB,CAACC,IAAeC,IAAW,CAACH,GAC9C,MAAM,IAAI,MAAM,kBAAkB,CAEtC,EAEMS,GAAwBkB,GAA2C,CACvE,OAAQA,EAAG,KAAK,KAAM,CACpB,IAAK,YACH1B,GAAe,GACX0B,EAAG,KAAK,KACVxB,GAAU,GACVE,GAAkB,CAAC,EAAEsB,EAAG,KAAK,GAAG,IAEhCzB,GAAc,GACdG,GAAkB,CAAC,EAAE,GAEnBD,KACF,IAAI,gBAAgBA,EAAkB,EACtCA,GAAqB,QAEvB,MACF,IAAK,UACL,IAAK,YACL,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,gBAAiB,CACpB,IAAMqB,EAAYnB,GAAgB,IAAIqB,EAAG,KAAK,IAAI,EAC9CA,EAAG,KAAK,IACVF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAG,EAEjCF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAI,EAEpC,KACF,CACA,QACF,CACF,EAGajB,GAAqC,SAA0B,CAC1E,GAAI,CAAAR,GAGJ,IAAID,GACF,MAAM,IAAI,MAAM,0CAA4C,EAE9D,GAAIE,GACF,MAAM,IAAI,MAAM,uCAAyC,EAK3D,GAFAF,GAAe,GAEuBF,GAAQ,EAC5C,OAAO,IAAI,QAAc,CAAC6B,EAASC,IAAW,CAC5C7B,IAAa,UAAU,EAElB8B,GAAkB,EAAE,KAAK,CAAC,CAACC,EAAWC,CAAM,IAAM,CACrD,GAAI,CACFhC,GAAcgC,EACdhC,GAAY,QAAW2B,GAAmBE,EAAOF,CAAE,EACnD3B,GAAY,UAAYS,GACxBJ,GAAoB,CAACuB,EAASC,CAAM,EACpC,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAKV,EAAG,EAC5DvB,GAAY,YAAYiC,CAAO,EAC/B7B,GAAqB2B,CACvB,OAASG,EAAG,CACVL,EAAOK,CAAC,CACV,CACF,EAAGL,CAAM,CACX,CAAC,EAGD,GAAI,CACF,MAAMM,GAAsBZ,GAAI,IAAI,EACpC,MAAWa,GAAYb,EAAG,EAC1BrB,GAAc,EAChB,OAASgC,EAAG,CACV,MAAA/B,GAAU,GACJ+B,CACR,QAAE,CACAjC,GAAe,EACjB,EAEJ,EAEaU,GAAkB,MAAM0B,GAAkC,CACrE,GAAsCtC,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAK,CAAC,OAAAI,EAAQ,IAAAd,EAAG,CAAC,EACpEvB,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAED,MAAWK,GAAOf,GAAKc,CAAM,CAEjC,EAEazB,GAAyB,MAAM2B,GACJxC,GAAQ,GAC5CS,GAAa,EACN,IAAI,QAAoC,CAACoB,EAASC,IAAW,CAClEtB,GAAiB,YAAa,CAACqB,EAASC,CAAM,CAAC,EAC/C,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAK,CAAC,OAAAM,CAAM,CAAC,EACjEvC,GAAa,YAAYiC,EAAS,CAACM,EAAO,MAAM,CAAC,CACnD,CAAC,GAEW3B,GAAuB2B,CAAM,EAIhC1B,GACT,MAAM2B,EAA8CC,IACR,CACtC,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI0C,GAAS,wBACX,MAAM,IAAI,MAAM,sEAAsE,EAExF,OAAAjC,GAAa,EACN,IAAI,QAAqC,CAACoB,EAASC,IAAW,CACnEtB,GAAiB,SAAU,CAACqB,EAASC,CAAM,CAAC,EAC5C,IAAMI,EAA0B,CAAC,KAAM,SAAU,GAAK,CAAC,MAAAO,EAAO,QAAS,CAAC,GAAGC,CAAO,CAAC,CAAC,EAC9EC,EAA+B,CAAC,EAClCF,aAAiB,YACnBE,EAAa,KAAKF,EAAM,MAAM,EAEhCxC,GAAa,YAAYiC,EAASS,CAAY,CAChD,CAAC,CACH,KACE,QAAY7B,GAAc2B,EAAOC,CAAO,CAE5C,EAEK3B,GAAiB,MAAM6B,GAAqC,CACvE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAKU,CAAS,EAChE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEInB,GAAe6B,CAAS,CAEjC,EAEa5B,GAAM,MACf4B,EAAmBC,EAAwBC,EAA0BC,EACrEC,EAAqCN,IAAoE,CAC3G,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI8C,EAAO,KAAKG,GAAKA,EAAE,CAAC,IAAM,KAAK,EACjC,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAID,EAAQ,KAAKC,GAAKA,CAAC,EACrB,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAAxC,GAAa,EACN,IAAI,QAAsC,CAACoB,EAASC,IAAW,CACpEtB,GAAiB,MAAO,CAACqB,EAASC,CAAM,CAAC,EACzC,IAAMoB,EAAqBJ,EACrBZ,EACF,CAAC,KAAM,MAAO,GAAK,CAAC,UAAAU,EAAW,aAAAC,EAAc,OAAQK,EAAoB,cAAAH,EAAe,QAAAL,CAAO,CAAC,EACpGzC,GAAa,YAAYiC,EAAciB,GAA2BD,CAAkB,CAAC,CACvF,CAAC,CACH,KACE,QAAYlC,GAAI4B,EAAWC,EAAcC,EAAQC,EAAeC,EAASN,CAAO,CAEpF,EAEazB,GAAe,MAAM2B,GAAqC,CACrE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,gBAAiB,CAACqB,EAASC,CAAM,CAAC,EACnD,IAAMI,EAA0B,CAAC,KAAM,gBAAiB,GAAKU,CAAS,EACtE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEIjB,GAAa2B,CAAS,CAE/B,ICvNA,IAWaQ,GAWAC,GAiBAC,GAvCbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KACAC,KAEaT,GAAuB,CAACU,EAAgBC,IAA0C,CAC7F,OAAQD,EAAO,SAAU,CACvB,IAAK,MACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAMA,EAAO,KAAM,KAAK,EACtD,IAAK,aACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAM,CAAC,UAAWA,EAAO,SAAS,EAAG,YAAY,EAC/E,QACE,MAAM,IAAI,MAAM,0BAA0BA,EAAO,QAAQ,QAAQC,EAAQ,CAAC,EAAE,CAChF,CACF,EAEaV,GAAwBS,GAAmC,CACtE,OAAQA,EAAO,CAAC,EAAG,CACjB,IAAK,MACH,OAAO,IAAIE,GAAOF,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EACnD,IAAK,aAAc,CACjB,IAAMG,EAAWH,EAAO,CAAC,EACzB,GAAI,CAACI,GAAyBD,CAAQ,EACpC,MAAM,IAAI,MAAM,4BAA4BA,CAAQ,+BAA+B,EAErF,GAAM,CAAC,UAAAE,EAAW,SAAAC,EAAU,QAAAC,CAAO,EAAIP,EAAO,CAAC,EAC/C,OAAOE,GAAO,cAAcG,EAAW,CAAC,SAAAF,EAAU,KAAMH,EAAO,CAAC,EAAG,SAAAM,EAAU,QAAAC,CAAO,CAAC,CACvF,CACA,QACE,MAAM,IAAI,MAAM,0BAA0BP,EAAO,CAAC,CAAC,EAAE,CACzD,CACF,EAEaR,GAAN,KAA8E,CAMnF,MAAM,8BAA8BgB,EAAmD,CAErF,OAAOC,GAAuB,MAAMC,GAASF,CAAI,CAAC,CACpD,CAEA,MAAM,UAAUG,EAAiCC,EAA0D,CACzGC,GAAiB,EACjB,IAAIC,EAEA,OAAOH,GAAiB,SACtB,GAEFG,EAAQ,MAAMJ,GAASC,CAAY,EAInCG,EAAQ,MAAM,KAAK,8BAA8BH,CAAY,EAG/DG,EAAQH,EAGV,CAAC,KAAK,UAAW,KAAK,WAAY,KAAK,WAAW,EAAI,MAAMI,GAAcD,EAAOF,CAAO,EACxFI,GAAe,CACjB,CAEA,MAAM,SAAyB,CAC7B,OAAOC,GAAe,KAAK,SAAS,CACtC,CAEA,MAAM,IAAIC,EAAiCC,EAAqCP,EACzC,CACrCC,GAAiB,EACjB,IAAMO,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAChC,OAAO,QAAQH,CAAK,EAAE,QAAQI,GAAO,CACnC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,WAAW,QAAQD,CAAI,EAC1C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,kBAAkBD,CAAI,GAAG,EAE3CH,EAAW,KAAKpB,CAAM,EACtBqB,EAAa,KAAKG,CAAK,CACzB,CAAC,EAED,IAAMC,EAAkC,CAAC,EACnCC,EAA0B,CAAC,EACjC,OAAO,QAAQP,CAAO,EAAE,QAAQG,GAAO,CACrC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,YAAY,QAAQD,CAAI,EAC3C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,mBAAmBD,CAAI,GAAG,EAE5CE,EAAY,KAAKzB,CAAM,EACvB0B,EAAc,KAAKF,CAAK,CAC1B,CAAC,EAED,IAAMG,EACFP,EAAW,IAAI,CAACQ,EAAGC,IAAMvC,GAAqBsC,EAAG,IAAM,UAAU,KAAK,WAAWP,EAAaQ,CAAC,CAAC,CAAC,GAAG,CAAC,EACnGC,EAAUL,EAAY,IACxB,CAACG,EAAGC,IAAMD,EAAItC,GAAqBsC,EAAG,IAAM,WAAW,KAAK,YAAYF,EAAcG,CAAC,CAAC,CAAC,GAAG,EAAI,IAAI,EAElGE,EAAU,MAAMC,GAAI,KAAK,UAAWX,EAAcM,EAAQD,EAAeI,EAASlB,CAAO,EAEzFqB,EAAuC,CAAC,EAC9C,QAASJ,EAAI,EAAGA,EAAIE,EAAQ,OAAQF,IAClCI,EAAU,KAAK,YAAYP,EAAcG,CAAC,CAAC,CAAC,EAAIJ,EAAYI,CAAC,GAAKtC,GAAqBwC,EAAQF,CAAC,CAAC,EAEnG,OAAAb,GAAe,EACRiB,CACT,CAEA,gBAAuB,CAEvB,CAEA,cAAqB,CACdC,GAAa,KAAK,SAAS,CAClC,CACF,IC9HA,IAeaC,GAiDAC,GAhEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAQaP,GAAkB,IAAY,CAoBzC,IAnBI,OAAOQ,GAAI,KAAK,aAAgB,UAAYA,GAAI,KAAK,YAAc,KACrEA,GAAI,KAAK,YAAc,GAGrBA,GAAI,KAAK,OAAS,IAEpB,QAAQ,KACJ,8HACyE,EAG3E,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,YAAe,UAAY,CAAC,OAAO,UAAUA,GAAI,KAAK,UAAU,GAAKA,GAAI,KAAK,YAAc,EAY9G,GAAI,OAAO,KAAS,KAAe,CAAC,KAAK,oBACvCA,GAAI,KAAK,WAAa,MACjB,CACL,IAAMC,EACF,OAAO,UAAc,IAAc,GAAQ,SAAS,EAAE,KAAK,EAAE,OAAS,UAAU,oBACpFD,GAAI,KAAK,WAAa,KAAK,IAAI,EAAG,KAAK,MAAMC,GAAsB,GAAK,CAAC,CAAC,CAC5E,CASJ,EAEaR,GAAN,KAAuD,CAS5D,MAAM,KAAKS,EAAoC,CAE7CV,GAAgB,EAGhB,MAAMW,GAAmC,EAGzC,MAAMC,GAAgBF,CAAW,CACnC,CAKA,MAAM,8BAA8BG,EAAiCC,EAChC,CACnC,IAAMC,EAAU,IAAIC,GACpB,aAAMD,EAAQ,UAAUF,EAAcC,CAAO,EACtC,QAAQ,QAAQC,CAAO,CAChC,CACF,IC7FA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,KAAA,IAIaA,GAJbC,GAAAC,EAAA,kBAGAC,KACaH,GAAc,IAAII,KCK/BC,KACAA,KAGAA,KCPO,IAAMC,GAAU,SDKvB,IAAOC,GAAQC,GAKgB,CAC7B,IAAMC,EAAgB,cAA4B,cAClDC,GAAgB,QAASD,EAAe,GAAG,CAC7C,CAE8B,CAC5B,IAAME,EAA4C,cAAoC,YAGpFD,GAAgB,SAAUC,EAAa,CAAC,EACxCD,GAAgB,QAASC,EAAa,CAAC,EAEzCD,GAAgB,MAAOC,EAAa,EAAE,EACtCD,GAAgB,OAAQC,EAAa,EAAE,CACzC,CAEA,OAAO,eAAeC,GAAI,SAAU,MAAO,CAAC,MAAOC,GAAS,WAAY,EAAI,CAAC", + "names": ["backends", "backendsSortedByPriority", "registerBackend", "tryResolveAndInitializeBackend", "resolveBackendAndExecutionProviders", "init_backend_impl", "__esmMin", "name", "backend", "priority", "currentBackend", "i", "backendName", "backendInfo", "isInitializing", "e", "options", "eps", "backendHints", "backendNames", "errors", "availableBackendNames", "resolveResult", "err", "filteredEps", "target", "prop", "init_backend", "__esmMin", "init_backend_impl", "version", "init_version", "__esmMin", "logLevelValue", "env", "init_env_impl", "__esmMin", "init_version", "version", "value", "env", "init_env", "__esmMin", "init_env_impl", "tensorToDataURL", "tensorToImageData", "init_tensor_conversion_impl", "__esmMin", "tensor", "options", "canvas", "pixels2DContext", "width", "height", "inputformat", "norm", "normMean", "normBias", "stride", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "j", "R", "G", "B", "A", "image", "channels", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "bufferToTensor", "tensorFromImage", "tensorFromTexture", "tensorFromGpuBuffer", "tensorFromPinnedBuffer", "init_tensor_factory_impl", "__esmMin", "init_tensor_impl", "buffer", "options", "height", "width", "norm", "normMean", "normBias", "inputformat", "outputformat", "stride", "float32Data", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "Tensor", "image", "isHTMLImageEle", "isImageDataEle", "isImageBitmap", "isString", "data", "bufferToTensorOptions", "createCanvas", "createCanvasContext", "canvas", "pixels2DContext", "tempCanvas", "resolve", "reject", "context", "newImage", "img", "texture", "download", "dispose", "dims", "gpuBuffer", "dataType", "type", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "isTypedArrayChecked", "checkTypedArray", "init_tensor_impl_type_mapping", "__esmMin", "isBigInt64ArrayAvailable", "isBigUint64ArrayAvailable", "isFloat16ArrayAvailable", "calculateSize", "tensorReshape", "init_tensor_utils_impl", "__esmMin", "init_tensor_impl", "dims", "size", "i", "dim", "tensor", "Tensor", "Tensor", "init_tensor_impl", "__esmMin", "init_tensor_conversion_impl", "init_tensor_factory_impl", "init_tensor_impl_type_mapping", "init_tensor_utils_impl", "arg0", "arg1", "arg2", "checkTypedArray", "type", "dims", "expectedTypedArrayConstructor", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "data", "maybeDims", "typedArrayConstructor", "firstElementType", "mappedType", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "size", "calculateSize", "image", "options", "tensorFromImage", "texture", "tensorFromTexture", "gpuBuffer", "tensorFromGpuBuffer", "buffer", "tensorFromPinnedBuffer", "tensorToDataURL", "tensorToImageData", "releaseData", "tensorReshape", "Tensor", "init_tensor", "__esmMin", "init_tensor_impl", "TRACE", "TRACE_FUNC", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "init_trace", "__esmMin", "init_env_impl", "deviceType", "label", "env", "msg", "extraMsg", "stack", "hasTraceFunc", "i", "InferenceSession", "init_inference_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "init_trace", "_InferenceSession", "handler", "feeds", "arg1", "arg2", "TRACE_FUNC_BEGIN", "fetches", "options", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "TRACE_FUNC_END", "arg0", "arg3", "filePathOrUint8Array", "buffer", "byteOffset", "byteLength", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "InferenceSession", "init_inference_session", "__esmMin", "init_inference_session_impl", "init_tensor_conversion", "__esmMin", "init_tensor_factory", "__esmMin", "init_onnx_model", "__esmMin", "init_onnx_value", "__esmMin", "noBackendErrMsg", "TrainingSession", "init_training_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "_TrainingSession", "handler", "hasOptimizerModel", "hasEvalModel", "trainingOptions", "sessionOptions", "evalModel", "optimizerModel", "options", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "inputNames", "outputNames", "feeds", "arg1", "arg2", "fetches", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "trainableOnly", "array", "paramsSize", "TrainingSession", "init_training_session", "__esmMin", "init_training_session_impl", "esm_exports", "__export", "InferenceSession", "TRACE", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "Tensor", "TrainingSession", "env", "registerBackend", "init_esm", "__esmMin", "init_backend", "init_env", "init_inference_session", "init_tensor", "init_tensor_conversion", "init_tensor_factory", "init_trace", "init_onnx_model", "init_onnx_value", "init_training_session", "log", "arg0", "arg1", "arg2", "arg3", "createCategorizedLogger", "logInternal", "category", "severity", "content", "_stack", "config", "LOGGER_CONFIG_MAP", "SEVERITY_VALUE", "LOGGER_PROVIDER_MAP", "NoOpLoggerProvider", "ConsoleLoggerProvider", "LOGGER_DEFAULT_CONFIG", "Logger", "Event", "EventRecord", "Profiler", "now", "init_instrument", "__esmMin", "_severity", "_content", "_category", "verbose", "info", "warning", "error", "fatal", "reset", "set", "previousConfig", "setWithEnv", "env", "name", "startTime", "endCallback", "timer", "ctx", "endTime", "maxNumberEvents", "flushBatchSize", "flushIntervalInMilliseconds", "func", "event", "isPromise", "res", "resolve", "reject", "value", "reason", "eventRes", "e", "currentTime", "previousPointer", "resolveOperator", "node", "opsets", "rules", "rule", "opType", "domain", "versionSelector", "opImpl", "opInit", "opset", "matchSelector", "set", "version", "selector", "rangeStart", "pair", "rangeEnd", "init_opset", "__esmMin", "require_guid", "__commonJSMin", "exports", "Guid", "guid", "value", "count", "out", "i", "other", "Long", "low", "high", "unsigned", "isLong", "obj", "ctz32", "value", "c", "fromInt", "cachedObj", "cache", "UINT_CACHE", "fromBits", "INT_CACHE", "fromNumber", "UZERO", "ZERO", "TWO_PWR_64_DBL", "MAX_UNSIGNED_VALUE", "TWO_PWR_63_DBL", "MIN_VALUE", "MAX_VALUE", "TWO_PWR_32_DBL", "lowBits", "highBits", "fromString", "str", "radix", "p", "radixToPower", "pow_dbl", "result", "i", "size", "power", "fromValue", "val", "wasm", "TWO_PWR_16_DBL", "TWO_PWR_24_DBL", "TWO_PWR_24", "ONE", "UONE", "NEG_ONE", "LongPrototype", "long_default", "init_long", "__esmMin", "radixLong", "div", "rem1", "rem", "remDiv", "intval", "digits", "bit", "other", "thisNeg", "otherNeg", "addend", "a48", "a32", "a16", "a00", "b48", "b32", "b16", "b00", "c48", "c32", "c16", "c00", "subtrahend", "multiplier", "divisor", "approx", "res", "halfThis", "log2", "delta", "approxRes", "approxRem", "numBits", "b", "le", "hi", "lo", "bytes", "flatbuffers", "init_flatbuffers", "__esmMin", "low", "high", "other", "opt_initial_size", "initial_size", "forceDefaults", "size", "additional_bytes", "align_size", "old_buf_size", "byte_size", "i", "value", "voffset", "defaultValue", "obj", "bb", "new_buf_size", "nbb", "offset", "numfields", "vtableloc", "trimmed_size", "standard_fields", "len", "existing_vtable", "vt1", "outer_loop", "vt2", "j", "root_table", "opt_file_identifier", "opt_size_prefix", "size_prefix", "file_identifier", "table", "field", "table_start", "vtable_start", "ok", "elem_size", "num_elems", "alignment", "s", "utf8", "codePoint", "a", "b", "bytes", "position", "result", "bb_pos", "vtable_offset", "vtable", "t", "opt_encoding", "length", "c", "d", "ident", "onnxruntime", "init_ort_generated", "__esmMin", "init_flatbuffers", "experimental", "fbs", "AttributeType", "DimensionValueType", "TensorDataType", "NodeType", "TypeInfoValue", "Shape", "i", "bb", "obj", "flatbuffers", "index", "offset", "builder", "dimOffset", "data", "numElems", "Dimension", "optionalEncoding", "valueOffset", "denotationOffset", "DimensionValue", "dimType", "dimValue", "dimParamOffset", "TensorTypeAndShape", "elemType", "shapeOffset", "MapType", "keyType", "valueTypeOffset", "SequenceType", "elemTypeOffset", "EdgeEnd", "node_index", "src_arg_index", "dst_arg_index", "NodeEdge", "nodeIndex", "inputEdgesOffset", "outputEdgesOffset", "Node", "nameOffset", "docStringOffset", "domainOffset", "sinceVersion", "opTypeOffset", "type", "executionProviderTypeOffset", "inputsOffset", "outputsOffset", "attributesOffset", "inputArgCountsOffset", "implicitInputsOffset", "ValueInfo", "typeOffset", "TypeInfo", "valueType", "OperatorSetId", "version", "Tensor", "dimsOffset", "dataType", "rawDataOffset", "stringDataOffset", "SparseTensor", "valuesOffset", "indicesOffset", "Attribute", "f", "sOffset", "tOffset", "gOffset", "floatsOffset", "intsOffset", "stringsOffset", "tensorsOffset", "graphsOffset", "Graph", "initializersOffset", "nodeArgsOffset", "nodesOffset", "maxNodeIndex", "nodeEdgesOffset", "sparseInitializersOffset", "Model", "irVersion", "opsetImportOffset", "producerNameOffset", "producerVersionOffset", "modelVersion", "graphOffset", "graphDocStringOffset", "KernelCreateInfos", "nodeIndicesOffset", "kernelDefHashesOffset", "SubGraphSessionState", "graphIdOffset", "sessionStateOffset", "SessionState", "kernelsOffset", "subGraphSessionStatesOffset", "InferenceSession", "ortVersionOffset", "modelOffset", "require_aspromise", "__commonJSMin", "exports", "module", "asPromise", "fn", "ctx", "params", "offset", "index", "pending", "resolve", "reject", "err", "require_base64", "__commonJSMin", "exports", "base64", "string", "p", "n", "b64", "s64", "i", "buffer", "start", "end", "parts", "chunk", "j", "t", "b", "invalidEncoding", "offset", "c", "require_eventemitter", "__commonJSMin", "exports", "module", "EventEmitter", "evt", "fn", "ctx", "listeners", "i", "args", "require_float", "__commonJSMin", "exports", "module", "factory", "f32", "f8b", "le", "writeFloat_f32_cpy", "val", "buf", "pos", "writeFloat_f32_rev", "readFloat_f32_cpy", "readFloat_f32_rev", "writeFloat_ieee754", "writeUint", "sign", "exponent", "mantissa", "writeUintLE", "writeUintBE", "readFloat_ieee754", "readUint", "uint", "readUintLE", "readUintBE", "f64", "writeDouble_f64_cpy", "writeDouble_f64_rev", "readDouble_f64_cpy", "readDouble_f64_rev", "writeDouble_ieee754", "off0", "off1", "readDouble_ieee754", "lo", "hi", "require_inquire", "__commonJSMin", "e", "require_utf8", "__commonJSMin", "exports", "utf8", "string", "len", "c", "i", "buffer", "start", "end", "parts", "chunk", "t", "offset", "c1", "c2", "require_pool", "__commonJSMin", "exports", "module", "pool", "alloc", "slice", "size", "SIZE", "MAX", "slab", "offset", "buf", "require_longbits", "__commonJSMin", "exports", "module", "LongBits", "util", "lo", "hi", "zero", "zeroHash", "value", "sign", "unsigned", "charCodeAt", "hash", "mask", "part0", "part1", "part2", "require_minimal", "__commonJSMin", "exports", "util", "value", "obj", "prop", "Buffer", "sizeOrArray", "hash", "unsigned", "bits", "merge", "dst", "src", "ifNotSet", "keys", "i", "str", "newError", "name", "CustomError", "message", "properties", "fieldNames", "fieldMap", "encoding", "size", "require_writer", "__commonJSMin", "exports", "module", "Writer", "util", "BufferWriter", "LongBits", "base64", "utf8", "Op", "fn", "len", "val", "noop", "State", "writer", "create", "size", "writeByte", "buf", "pos", "writeVarint32", "VarintOp", "value", "writeVarint64", "bits", "writeFixed32", "writeBytes", "i", "head", "tail", "BufferWriter_", "require_writer_buffer", "__commonJSMin", "exports", "module", "BufferWriter", "Writer", "util", "val", "buf", "pos", "i", "value", "len", "writeStringBuffer", "require_reader", "__commonJSMin", "exports", "module", "Reader", "util", "BufferReader", "LongBits", "utf8", "indexOutOfRange", "reader", "writeLength", "buffer", "create_array", "create", "value", "readLongVarint", "bits", "i", "readFixed32_end", "buf", "end", "readFixed64", "length", "start", "nativeBuffer", "bytes", "wireType", "BufferReader_", "fn", "require_reader_buffer", "__commonJSMin", "exports", "module", "BufferReader", "Reader", "util", "buffer", "len", "require_service", "__commonJSMin", "exports", "module", "Service", "util", "rpcImpl", "requestDelimited", "responseDelimited", "rpcCall", "method", "requestCtor", "responseCtor", "request", "callback", "self", "err", "response", "endedByRPC", "require_rpc", "__commonJSMin", "exports", "rpc", "require_roots", "__commonJSMin", "exports", "module", "require_index_minimal", "__commonJSMin", "exports", "protobuf", "configure", "require_minimal", "__commonJSMin", "exports", "module", "require_onnx", "__commonJSMin", "exports", "module", "$protobuf", "$Reader", "$Writer", "$util", "$root", "onnx", "valuesById", "values", "AttributeProto", "properties", "keys", "i", "message", "writer", "reader", "length", "end", "tag", "end2", "error", "object", "options", "long", "j", "typeUrlPrefix", "ValueInfoProto", "NodeProto", "TrainingInfoProto", "ModelProto", "StringStringEntryProto", "TensorAnnotation", "GraphProto", "TensorProto", "Segment", "SparseTensorProto", "TensorShapeProto", "Dimension", "$oneOfFields", "TypeProto", "Tensor", "Sequence", "Map", "Optional", "SparseTensor", "OperatorSetIdProto", "FunctionProto", "assert", "expr", "msg", "decodeUtf8String", "buffer", "import_onnx", "ArrayUtil", "MatMulUtil", "BroadcastUtil", "GemmUtil", "ProtoUtil", "LongUtil", "ShapeUtil", "SplitUtil", "PoolConvUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "init_flatbuffers", "init_long", "init_tensor", "n1", "n2", "i", "dimsA", "dimsB", "a", "b", "outputShape", "aRank", "bRank", "_BroadcastUtil", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "aLen", "bLen", "broadcastedIndices", "originalShape", "originalIndices", "dimOffset", "op", "inplace", "resultType", "size", "c", "Tensor", "outputIndices", "originalIndicesA", "originalIndicesB", "valA", "valB", "isAScalar", "isBScalar", "rest", "j", "shape", "finalShape", "inputRank", "finalRank", "inputShape", "inRank", "dims", "dim", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "_ProtoUtil", "typeProto", "type", "d", "long_default", "valueType", "tensor", "node", "attributes", "n", "unsigned", "flatbuffers", "_ShapeUtil", "axis", "start", "end", "rank", "strides", "indices", "offset", "tensorRank", "axes", "x", "index", "axisToIncrementOn", "k", "originalDims", "shapeHints", "nDims", "reshapedDims", "unknownDimension", "newTensorSize", "oldTensorSize", "perm", "v", "pad", "shape1", "shape2", "total", "y", "right", "outputDims", "inSqueezeList", "inputDimsIterator", "_SplitUtil", "split", "numOutputs", "shapes", "offsets", "numElementsAlongAxis", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "autoPad", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "sizeof", "type", "sizeofProto", "createView", "dataBuffer", "dataviewConstructor", "longToNumber", "i", "ortFbs", "readProto", "view", "byteOffset", "long_default", "import_guid_typescript", "import_onnx", "Tensor", "init_tensor", "__esmMin", "init_long", "init_ort_generated", "init_util", "onnxruntime", "_Tensor", "dims", "dataProvider", "asyncDataProvider", "cache", "dataId", "ShapeUtil", "size", "empty", "constructor", "buf", "data", "indices", "value", "tensorProto", "ProtoUtil", "str", "decodeUtf8String", "dataDest", "dataSource", "elementSize", "length", "n", "array", "element", "ortTensor", "getGlsl", "version", "GLSL_ES_2_0", "GLSL_ES_3_0", "getVertexShaderSource", "glsl", "getFragShaderPreamble", "getDefaultFragShaderMain", "outputShapeLength", "init_glsl_source", "__esmMin", "init_types", "__esmMin", "repeatedTry", "checkFn", "delayFn", "_counter", "maxCounter", "resolve", "reject", "tryCount", "tryFn", "nextBackoff", "generateShaderFuncNameFromInputSamplerName", "samplerName", "assert", "generateShaderFuncNameFromInputSamplerNameAtOutCoords", "squeezeInputShape", "inputShape", "squeezedShape", "newInputShape", "getSqueezedParams", "params", "keptDims", "d", "getCoordsDataType", "rank", "getGlChannels", "init_utils", "__esmMin", "init_util", "getVecChannels", "name", "rank", "getGlChannels", "d", "getChannels", "unpackFromChannel", "init_packing_utils", "__esmMin", "init_utils", "getOutOfBoundsCondition", "rank", "shape", "dims", "cond", "i", "getOutput", "coord00", "coord01", "coord10", "coord11", "D", "getSetup", "rows", "cols", "packProgramMetadata", "createPackProgramInfo", "createPackProgramInfoLoader", "init_pack", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "handler", "input", "glsl", "getGlsl", "inputShape", "inputRank", "outputRank", "coordsDataType", "getCoordsDataType", "channels", "getChannels", "setup", "reversedInputWH", "outOfBoundsCondition", "output", "shaderSource", "processDims3D", "shape", "batch", "i", "isReshapeCheap", "dims", "reshapedDims", "isCheapReshape", "getReshapedInputCoords", "strides", "ShapeUtil", "coords", "index", "stride", "line1", "line2", "getFlattenedIndexFrom3D", "createPackedReshape3DProgramMetadata", "createPackedReshape3DProgramInfo", "createPackedReshape3DProgramInfoLoader", "init_reshape_packed", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_packing_utils", "outputShape3D", "handler", "input3D", "metadata", "inputShape3D", "squeezedOutputShape", "mainLoop", "outputCoords", "glsl", "getGlsl", "shaderSource", "unpackFromChannel", "encodeAsUint8", "init_uint8_encode", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "input", "outputShape", "glsl", "getGlsl", "shaderSource", "programInfo", "getSourceCoords", "rank", "dims", "coords", "i", "unpackProgramMetadata", "createUnpackProgramInfo", "createUnpackProgramInfoLoader", "init_unpack", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "handler", "input", "channels", "getChannels", "innerDims", "coordsDataType", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "sourceCoords", "glsl", "getGlsl", "shaderSource", "RedFloat32DataEncoder", "RGBAFloatDataEncoder", "Uint8DataEncoder", "init_texture_data_encoder", "__esmMin", "init_instrument", "gl", "channels", "src", "textureSize", "result", "source", "Logger", "v", "i", "size", "buffer", "dataSize", "_value", "index", "textureType", "dest", "_textureSize", "createTextureLayoutFromTextureType", "calculateTextureWidthAndHeight", "createTextureLayoutFromShape", "init_texture_layout", "__esmMin", "init_util", "init_types", "textureLayoutStrategy", "shape", "textureType", "channel", "isPacked", "reverseWH", "breakAxis", "unpackedShape", "d", "i", "layout", "channels", "prefs", "width", "height", "rank", "inferredDims", "ShapeUtil", "getProgramInfoUniqueKey", "WebGLInferenceHandler", "init_inference_handler", "__esmMin", "init_instrument", "init_tensor", "init_util", "init_pack", "init_reshape_packed", "init_uint8_encode", "init_unpack", "init_texture_data_encoder", "init_texture_layout", "init_types", "programInfo", "inputTextureDatas", "inputs", "texture", "key", "session", "shape", "textureType", "calculateTextureWidthAndHeight", "program", "i", "artifact", "outputTextureLayout", "createTextureLayoutFromTextureType", "outputTextureData", "output", "tensor", "td", "layout", "adjustedKernelShape", "adjustedLayout", "buffer", "numFeatureMaps", "oldRowSize", "newRowSize", "newSize", "f", "oldOffset", "newOffset", "unpackedTextureLayout", "createTextureLayoutFromShape", "unpackedTextureData", "dataType", "data", "usage", "Logger", "input", "reshapedDims", "inputTD", "newTextureLayout", "ShapeUtil", "isReshapeCheap", "squeezedInputShape", "processDims3D", "squeezedOutputShape", "squeezedInputTensor", "squeezedOutputTensor", "createPackedReshape3DProgramInfoLoader", "type", "tensorId", "textureData", "Tensor", "_id", "isPacked", "encodeAsUint8", "createPackProgramInfoLoader", "createUnpackProgramInfoLoader", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "batchNormalizationProgramMetadata", "batchNormalization", "parseBatchNormalizationAttributes", "createBatchNormalizationProgramInfo", "validateInputs", "init_batch_normalization", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "epsilon", "momentum", "spatial", "createAttributeWithCacheKey", "glsl", "getGlsl", "rank", "scaleWidth", "scaleHeight", "shaderSource", "X", "scale", "B", "mean", "var_", "GlslContext", "GlslLib", "GlslLibRoutine", "GlslLibRoutineNode", "TopologicalSortGlslRoutines", "init_glsl_definitions", "__esmMin", "glContext", "programInfo", "inputTextureLayouts", "outputTextureLayout", "context", "routineBody", "dependencies", "name", "node", "nodes", "cycleCheck", "alreadyTraversed", "result", "graphNodes", "root", "i", "glslAdd", "name", "glslDiv", "glslMul", "glslSub", "glslEqual", "glslGreater", "glslLess", "glslAnd", "glslOr", "glslXor", "glslPow", "glslBuiltinBinary", "glslPRelu", "fname", "createBinaryProgramInfoLoader", "createBinaryProgramInfo", "add", "and", "div", "equal", "greater", "less", "mul", "or", "pow", "pRelu", "sub", "xor", "init_binary_op", "__esmMin", "init_util", "init_glsl_definitions", "init_glsl_source", "init_types", "handler", "inputs", "glslFunc", "outputTensorType", "cacheKey", "textureType", "isBroadcast", "ShapeUtil", "outputShape", "usePackedTexture", "calculatedShape", "BroadcastUtil", "outputRank", "aRank", "bRank", "aBcast", "bBcast", "glsl", "getGlsl", "shaderSource", "cast", "parseCastAttributes", "validateInputs", "init_cast", "__esmMin", "init_util", "handler", "inputs", "to", "node", "ProtoUtil", "createPackedConcatProgramMetadata", "createPackedConcatProgramInfo", "createPackedConcatProgramInfoLoader", "getShiftedChannelsSnippet", "init_concat_packed", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "inputCount", "cacheHint", "_v", "i", "handler", "metadata", "inputs", "axis", "inputShape", "outputShape", "dataNShape", "axisIndex", "rank", "coords", "getChannels", "dtype", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "shapes", "channels", "getGlChannels", "offsets", "channel", "lastChannels", "allChannels", "getValueSnippet", "shift", "lastIndex", "glsl", "getGlsl", "shaderSource", "x", "attributes", "channelIdx", "c", "idx", "concat", "createUnpackedConcatProgramMetadata", "createUnpackedConcatProgramInfo", "createUnpackedConcatProgramInfoLoader", "getTextureIndexWhereDataResidesLinearSearch", "getTextureIndexWhereDataResidesBinarySearch", "getFetchDataFromCorrectTextureMethod", "getGetSizeInConcatAxisValueFromIndexMethod", "parseConcatAttributes", "validateInputs", "init_concat", "__esmMin", "init_attribute_with_cache_key", "init_types", "init_concat_packed", "inferenceHandler", "inputs", "attributes", "createPackedConcatProgramInfoLoader", "inputCount", "cacheHint", "_v", "i", "_handler", "metadata", "axis", "inputShape", "outputShape", "dataNShape", "axisIndex", "rank", "sizeInConcatAxis", "previousSum", "getTextureIndexWhereDataResidesMethod", "fetchDataFromCorrectTextureMethod", "getSizeInConcatAxisValueFromIndexMethod", "shaderSource", "handler", "size", "numberOfTensors", "tensorRank", "codeLines", "node", "createAttributeWithCacheKey", "inputType", "inputDimensionality", "input", "glslAbs", "glslBuiltinUnary", "glslAcos", "glslAsin", "glslAtan", "glslCeil", "glslCos", "glslElu", "alpha", "name", "glslExp", "glslFloor", "glslClip", "min", "max", "glslIdentity", "glslLeakyRelu", "glslLog", "glslNeg", "glslNot", "glslSin", "glslRelu", "glslSigmoid", "glslSqrt", "glslTan", "glslTanh", "createElementwiseProgramInfo", "createElementwiseProgramInfoLoader", "abs", "acos", "asin", "atan", "clip", "parseClipAttributes", "clipV11", "generateClipAttributesFromInputs", "ceil", "cos", "elu", "parseEluAttributes", "exp", "floor", "identity", "leakyRelu", "parseLeakyReluAttributes", "log", "neg", "not", "relu", "sigmoid", "sin", "sqrt", "tan", "tanh", "init_unary_op", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_definitions", "init_glsl_source", "init_types", "handler", "metadata", "input", "glslFunc", "textureType", "glsl", "getGlsl", "cacheKey", "inputs", "attributes", "node", "createAttributeWithCacheKey", "MIN_CLIP", "MAX_CLIP", "getActivationSnippet", "attributes", "func", "glslRelu", "glslSigmoid", "glslClip", "activationName", "activationFunction", "applyActivation", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_util", "init_unary_op", "activation", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "createUnpackedGroupedConvProgramMetadata", "createUnpackedGroupedConvProgramInfo", "createUnpackedGroupedConvProgramInfoLoader", "init_conv_grouped", "__esmMin", "init_instrument", "init_glsl_source", "init_types", "init_conv", "init_fuse_utils", "hasBias", "cacheHint", "inferenceHandler", "inputs", "metadata", "attributes", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "Logger", "outputShape", "calculateOutputShape", "glsl", "getGlsl", "activationFunction", "applyActivation", "getActivationSnippet", "shaderSource", "createPackedIm2ColProgramMetadata", "createPackedIm2ColProgramInfo", "createPackedIm2ColProgramInfoLoader", "init_im2col_pack", "__esmMin", "init_glsl_source", "init_types", "init_packing_utils", "cacheHint", "inferenceHandler", "metadata", "x", "w", "outputShape", "attributes", "xshape", "wshape", "rowDim", "colDim", "rank", "im2colShape", "kernelSize", "unpackChannel", "unpackFromChannel", "glsl", "getGlsl", "unrolled", "row", "col", "shaderSource", "createMatmulProgramInfo", "metadata", "inputs", "activationAttributes", "aShape", "bShape", "outputShape", "BroadcastUtil", "coordsDataType", "getCoordsDataType", "allGlChannels", "getGlChannels", "activationFunction", "applyActivation", "getActivationSnippet", "hasBias", "processBias", "getBiasForMatmulSnippet", "getBiasForMatmul", "rank", "arank", "brank", "sharedDim", "shaderSource", "createMatmulProgramInfoLoader", "createMatmulProgramMetadata", "inShape", "outShape", "isPacked", "unpackedCoordsSnippet", "inRank", "outRank", "rankDiff", "_s", "i", "coordsSnippet", "d", "isInputScalar", "ShapeUtil", "output", "matMul", "parseMatMulAttributes", "validateInputs", "init_matmul", "__esmMin", "init_util", "init_types", "init_utils", "init_fuse_utils", "init_matmul_pack", "inferenceHandler", "attributes", "createPackedMatmulProgramInfoLoader", "node", "parseInternalActivationAttributes", "cacheHint", "getBcastSamplerForMatmul", "coordsDataType", "allGlChannels", "inputs", "outShape", "unpackedACoordsSnippet", "unpackedBCoordsSnippet", "inAShape", "inBShape", "inARank", "inBRank", "outRank", "rankADiff", "rankBDiff", "_s", "i", "broadcastADims", "BroadcastUtil", "broadcastBDims", "coordsASnippet", "d", "coordsBSnippet", "swapDimSnippet", "getA", "rank", "res", "getB", "createPackedMatmulProgramMetadata", "createPackedMatmulProgramInfo", "createPackedMatmulProgramInfoLoader", "init_matmul_pack", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_utils", "init_fuse_utils", "init_matmul", "hasBias", "cacheHint", "inferenceHandler", "metadata", "activationAttributes", "processBias", "aShape", "bShape", "outputShape", "isBroadcast", "ShapeUtil", "sharedDim", "sharedDimIndex", "aRank", "bRank", "glsl", "getGlsl", "getCoordsDataType", "getGlChannels", "activationFunction", "applyActivation", "getActivationSnippet", "getBiasForMatmulSnippet", "getBiasForMatmul", "getBcastedSamplerForMatmulSnippet", "getSamplerAInLoopSnippet", "getSamplerBInLoopSnippet", "getOutputCoordsSnippet", "shaderSource", "conv2DPacked", "init_conv_pack", "__esmMin", "init_conv", "init_im2col_pack", "init_matmul_pack", "inferenceHandler", "inputs", "attributes", "xshape", "kshape", "outputShape", "calculateOutputShape", "im2colOutput", "createPackedIm2ColProgramInfoLoader", "kernelReshaped", "matmulInputs", "matmulOutput", "createPackedMatmulProgramInfoLoader", "createIm2ColProgramMetadata", "createIm2ColProgramInfo", "createIm2ColProgramInfoLoader", "calculateIm2ColDims", "init_im2col", "__esmMin", "init_types", "cacheHint", "_inferenceHandler", "metadata", "x", "w", "outputShape", "attributes", "xshape", "wshape", "rank", "im2colDims", "shaderSource", "inferenceHandler", "inputShape", "kernelShape", "channels", "createDotProductProgramMetadata", "createDotProductProgramInfo", "createDotProductProgramInfoLoader", "init_dot_product", "__esmMin", "init_util", "init_glsl_source", "init_types", "init_fuse_utils", "init_im2col", "hasBias", "attributes", "inferenceHandler", "metadata", "inputs", "outputShape", "xshape", "kshape", "adjustedKernelShape", "im2colShape", "calculateIm2ColDims", "kWidth", "kHeight", "im2colStrides", "ShapeUtil", "im2colWidth", "im2colHeight", "rank", "initValue", "sharedDim", "activationFunction", "applyActivation", "getActivationSnippet", "glsl", "getGlsl", "shaderSource", "calculateOutputShape", "conv", "conv2d", "conv2DUnpackedPointwise", "conv2DUnpacked", "getAdjustedConvAttributes", "parseConvAttributes", "validateInputs", "init_conv", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_conv_grouped", "init_conv_pack", "init_dot_product", "init_fuse_utils", "init_im2col", "init_matmul", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputSpatialShape", "inferenceHandler", "inputs", "attributes", "adjustedAttributes", "packMode", "isPointwise", "createUnpackedGroupedConvProgramInfoLoader", "conv2DPacked", "xshape", "kshape", "outputShape", "reshapedX", "reshapedK", "matmulInputs", "matmulOutput", "createMatmulProgramInfoLoader", "xIm2Col", "createIm2ColProgramInfoLoader", "dotProductInputs", "createDotProductProgramInfoLoader", "pads", "PoolConvUtil", "newAttributes", "node", "activationAttributes", "parseInternalActivationAttributes", "autoPad", "group", "createAttributeWithCacheKey", "dataChannel", "filterInChannel", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "convTranspose", "convTranspose2d", "createConvTransposeProgramMetadata", "createUnpackedConvTransposeProgramInfo", "createUnpackedConvTransposeProgramInfoLoader", "convTranspose2DUnpacked", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "init_conv_transpose", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "init_fuse_utils", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "strides", "outputPadding", "outputShape", "spatialRank", "updateShape", "i", "inferenceHandler", "inputs", "attributes", "adjustedAttributes", "hasBias", "cacheHint", "metadata", "valueInit", "xShape", "wShape", "outputChannelsPerGroup", "inputChannelsPerGroup", "glsl", "getGlsl", "activationFunction", "applyActivation", "getActivationSnippet", "shaderSource", "newAttributes", "node", "activationAttributes", "parseInternalActivationAttributes", "group", "createAttributeWithCacheKey", "dataChannel", "filterInChannel", "featureMaps", "transposeProgramMetadata", "transpose", "parseTransposeAttributes", "createTransposeProgramInfo", "getAdjustedPerm", "getOutputShape", "getPermFunctionBody", "validateInputs", "init_transpose", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "createAttributeWithCacheKey", "_inferenceHandler", "input", "perm", "inputShape", "unpackedOutputShape", "rank", "shaderSource", "ShapeUtil", "name", "reverseFunc", "i", "depthToSpace", "parseDepthToSpaceAttributes", "validateInputs", "init_depth_to_space", "__esmMin", "init_transpose", "inferenceHandler", "inputs", "attributes", "blocksize", "blocksizeSqr", "transposePerm", "firstReshapeShape", "firstReshapedTensor", "transposeAttributes", "transposeOutput", "transpose", "secondReshapeShape", "node", "mode", "flatten", "parseFlattenAttributes", "validateInputs", "init_flatten", "__esmMin", "init_util", "inferenceHandler", "inputs", "axis", "outputDims", "ShapeUtil", "node", "r", "NUMBER_TYPES", "init_operators", "__esmMin", "gather", "parseGatherAttributes", "gatherProgramMetadata", "createGatherProgramInfo", "createGatherProgramInfoLoader", "validateInputs", "init_gather", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "createAttributeWithCacheKey", "_handler", "metadata", "axis", "inputShape", "indexDataShape", "outputShape", "ShapeUtil", "indexCopyOps", "i", "orank", "irank", "iDrank", "shaderSource", "handler", "tensorRank", "NUMBER_TYPES", "gemm", "parseGemmAttributes", "parseGemmAttributesV7", "parseGemmAttributesV11", "createGemmProgramInfoLoader", "createGemmProgramInfo", "validateInputs", "init_gemm", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "isOptionalC", "transA", "transB", "alpha", "beta", "createAttributeWithCacheKey", "metadata", "aShape", "bShape", "M", "N", "GemmUtil", "outputShape", "sharedDim", "line", "rank", "declareC", "broadcastC", "calculateC", "shaderSource", "imageScaler", "parseImageScalerAttributes", "imageScalerProgramMetadata", "createImageScalerProgramInfo", "createImageScalerProgramInfoLoader", "createGetBiasMethod", "validateInputs", "init_image_scaler", "__esmMin", "init_attribute_with_cache_key", "init_types", "inferenceHandler", "inputs", "attributes", "node", "scale", "bias", "createAttributeWithCacheKey", "_handler", "metadata", "outputShape", "rank", "shaderSource", "handler", "numChannels", "codeLines", "i", "instanceNormalization", "parseInstanceNormalizationAttributes", "meanAndVarianceProgramMetadata", "createMeanAndVarianceProgramInfo", "createMeanAndVarianceProgramInfoLoader", "computeOutputProgramMetadata", "createComputeOutputProgramInfo", "createComputeOutputProgramInfoLoader", "validateInputs", "init_instance_normalization", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "epsilon", "meanAndVariance", "node", "metadata", "input", "xDims", "channel", "channelSize", "outputShape", "shaderSource", "meanAndVarianceShape", "glsl", "getGlsl", "textureWidth", "textureHeight", "meanAndVarianceWidth", "meanAndVarianceHeight", "X", "scale", "B", "createLrnProgramInfo", "inputs", "attributes", "C", "rank", "from", "to", "alpha", "bias", "beta", "shaderSource", "lrnProgramMetadata", "createLrnProgramInfoLoader", "lrn", "parseLrnAttributes", "validateInputs", "init_lrn", "__esmMin", "init_attribute_with_cache_key", "init_types", "inferenceHandler", "node", "size", "createAttributeWithCacheKey", "padProgramMetadata", "padV2", "parsePadAttributesV2", "padV11", "parsePadAttributesV11", "generatePadAttributesFromInputs", "createPadProgramInfo", "validateInputsV2", "validateInputsV11", "getPadFunction", "getPadConstant", "getPadReflect", "getPadEdge", "init_pad", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "mode", "value", "pads", "createAttributeWithCacheKey", "attrubutes", "input", "outputShape", "ShapeUtil", "rank", "shaderSource", "glsl", "getGlsl", "width", "height", "strides", "shape", "block", "i", "averagePool", "parseAveragePoolAttributes", "createAveragePoolProgramInfo", "globalAveragePool", "parseGlobalAveragePoolAttributes", "maxPool", "parseMaxPoolAttributes", "createMaxPoolProgramInfo", "getAdjustedPoolAttributesAndOutputShape", "globalMaxPoolAttributes", "globalMaxPoolMetadata", "globalMaxPool", "validateInputs", "generatePoolingCode", "copyArray", "offsetToIndices", "init_pool", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "metadata", "node", "autoPad", "ceilMode", "countIncludePad", "kernelShape", "strides", "pads", "createAttributeWithCacheKey", "isGlobalOperator", "adjustedAttributes", "outputShape", "kernelSize", "ShapeUtil", "op1", "op2", "shaderSource", "storageOrder", "dilations", "inputShape", "hasDilations", "PoolConvUtil", "newAttributes", "inputDims", "start", "rank", "kw", "sw", "pwStart", "pwEnd", "dimW", "codeW", "codeH", "codeHEnd", "kh", "sh", "phStart", "phEnd", "dimH", "kernelStrides", "stridesRank", "padsRank", "offsetToIndicesFunction", "copyInputDims", "copyPads", "copyKernelStrides", "copyStrides", "hasPads", "sum", "cur", "padCode", "array", "arrayName", "block", "i", "reduce", "parseReduceAttributes", "createReduceProgramInfo", "validateInputs", "reduceSum", "reduceMean", "reduceMax", "reduceMin", "reduceProd", "reduceLogSum", "reduceLogSumSquare", "init_reduce", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "name", "reduceOp", "reduceProgramMetadata", "node", "axes", "keepDims", "createAttributeWithCacheKey", "_handler", "_name", "outputShape", "iRank", "idxCopy", "ShapeUtil", "ops", "reduceOps", "k", "shaderSource", "NUMBER_TYPES", "size", "idxZero", "reshape", "init_reshape", "__esmMin", "init_util", "handler", "inputs", "reshapedDims", "ShapeUtil", "upsampleProgramMetadata", "upsample", "parseUpsampleAttributesV7", "parseUpsampleAttributesV9", "parseUpsampleAttributes", "createUpsampleProgramInfo", "validateInputs", "scalesValidation", "init_upsample", "__esmMin", "init_attribute_with_cache_key", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "attributes", "node", "opset", "isResize", "mode", "scales", "extrapolationValue", "coordinateTransformMode", "needRoiInput", "useExtrapolation", "nearestMode", "cubicCoefficientA", "excludeOutside", "useNearest2xOptimization", "roiInputIdx", "scalesInputIdx", "sizesInputIdx", "createAttributeWithCacheKey", "glsl", "getGlsl", "inputWidth", "inputHeight", "outputShape", "dim", "i", "outputWidth", "outputHeight", "outputPitches", "inputPitches", "precalculatedPitches", "d", "getInputFloatFunction", "shaderSource", "x", "attribute", "scale", "resizeProgramMetadata", "resize", "parseResizeAttributesV10", "parseResizeAttributesV11", "createPackedResizeProgramInfo", "prepareInputs", "parseScalesData", "parseScalesDataFromOutputSize", "init_resize_packed", "__esmMin", "init_glsl_source", "init_types", "init_utils", "init_packing_utils", "init_upsample", "inferenceHandler", "inputs", "attributes", "validateInputs", "node", "parseUpsampleAttributes", "glsl", "getGlsl", "scales", "outputShape", "s", "dim", "outputHeight", "outputWidth", "inputShape", "inputHeight", "inputWidth", "scalesHeight", "scalesWidth", "getSourceFracIndex", "coordsDataType", "getCoordsDataType", "unpackChannel", "unpackFromChannel", "shaderSource", "xDims", "outputSizes", "scalesTensor", "sizesTensor", "yDims", "i", "scale", "mode", "isResize", "scalesValidation", "length", "end", "shape", "validateInputs", "init_shape", "__esmMin", "init_tensor", "_inferenceHandler", "inputs", "Tensor", "sliceProgramMetadata", "slice", "parseSliceAttributes", "createSliceProgramInfo", "validateInputs", "sliceV10", "generateSliceAttributesFromInputs", "validateInputsV10", "init_slice", "__esmMin", "init_attribute_with_cache_key", "init_operators", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "node", "starts", "ends", "axes", "createAttributeWithCacheKey", "_inferenceHandler", "input", "_val", "i", "normalizedAxes", "ShapeUtil", "start", "end", "outputShape", "sliceOps", "shaderSource", "NUMBER_TYPES", "cacheKey", "softmaxComputeMaxProgramMetadata", "softmaxComputeScaleProgramMetadata", "softmaxProgramMetadata", "softmax", "parseSoftmaxAttributes", "parseSoftmaxAttributesV13", "softmaxV13", "computeSoftmax", "createComputeMaxProgramInfo", "createComputScaleProgramInfo", "createSoftMaxProgramInfo", "validateInputs", "init_softmax", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_glsl_source", "init_types", "init_transpose", "inferenceHandler", "inputs", "attributes", "inputShape", "axis", "ShapeUtil", "logicalRowCount", "featureCount", "node", "createAttributeWithCacheKey", "rank", "isTransposeRequired", "transposedInputShape", "perm", "transposedInputs", "transposeAttribute", "_", "i", "p", "transpose", "output", "computeMaxProgramInfo", "max", "computeScaleProgramInfo", "scale", "softMaxProgramInfo", "input", "outputShape", "textureWidth", "textureHeight", "glsl", "getGlsl", "shaderSource", "maxElementPerLogicalRow", "normalizationPerLogicalRow", "splitProgramMetadata", "split", "parseSplitAttributes", "getProgramCount", "createSplitProgramInfo", "validateInputs", "init_split", "__esmMin", "init_attribute_with_cache_key", "init_util", "init_types", "inferenceHandler", "inputs", "attributes", "axis", "ShapeUtil", "count", "output", "i", "node", "numOutputs", "createAttributeWithCacheKey", "_inferenceHandler", "offsets", "SplitUtil", "input", "index", "shapes", "offset", "outputShape", "shaderSource", "squeeze", "squeezeV13", "parseSqueezeAttributes", "validateInputs", "validateInputsV13", "init_squeeze", "__esmMin", "init_util", "inferenceHandler", "inputs", "axes", "outputShape", "ShapeUtil", "node", "sum", "createSumProgramInfo", "validateInputs", "init_sum", "__esmMin", "init_glsl_source", "init_types", "inferenceHandler", "inputs", "sumProgramMetadata", "_v", "glsl", "getGlsl", "outputShape", "shaderSource", "i", "length", "j", "tile", "createTileProgramInfo", "validateInputs", "init_tile", "__esmMin", "init_operators", "init_types", "inferenceHandler", "inputs", "tileProgramMetadata", "_handler", "inputShape", "outputShape", "tileOps", "i", "rank", "shaderSource", "NUMBER_TYPES", "unsqueeze", "unsqueezeV13", "parseUnsqueezeAttributes", "validateInputs", "validateInputsV13", "init_unsqueeze", "__esmMin", "init_util", "inferenceHandler", "inputs", "axes", "outputShape", "ShapeUtil", "node", "WEBGL_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_batch_normalization", "init_binary_op", "init_cast", "init_concat", "init_conv", "init_conv_transpose", "init_depth_to_space", "init_flatten", "init_gather", "init_gemm", "init_image_scaler", "init_instance_normalization", "init_lrn", "init_matmul", "init_pad", "init_pool", "init_reduce", "init_reshape", "init_resize_packed", "init_shape", "init_slice", "init_softmax", "init_split", "init_squeeze", "init_sum", "init_tile", "init_transpose", "init_unary_op", "init_unsqueeze", "init_upsample", "abs", "acos", "add", "and", "asin", "atan", "averagePool", "parseAveragePoolAttributes", "batchNormalization", "parseBatchNormalizationAttributes", "cast", "parseCastAttributes", "ceil", "clip", "parseClipAttributes", "clipV11", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "div", "identity", "depthToSpace", "parseDepthToSpaceAttributes", "equal", "elu", "parseEluAttributes", "exp", "flatten", "parseFlattenAttributes", "floor", "gather", "parseGatherAttributes", "gemm", "parseGemmAttributesV7", "parseGemmAttributesV11", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "greater", "imageScaler", "parseImageScalerAttributes", "instanceNormalization", "parseInstanceNormalizationAttributes", "leakyRelu", "parseLeakyReluAttributes", "less", "lrn", "parseLrnAttributes", "log", "matMul", "parseMatMulAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "neg", "not", "or", "padV2", "parsePadAttributesV2", "padV11", "parsePadAttributesV11", "pow", "pRelu", "reduceLogSum", "parseReduceAttributes", "reduceMax", "reduceMean", "reduceMin", "reduceProd", "reduceSum", "reduceLogSumSquare", "relu", "reshape", "resize", "parseResizeAttributesV10", "parseResizeAttributesV11", "shape", "sigmoid", "sin", "sliceV10", "slice", "parseSliceAttributes", "softmax", "parseSoftmaxAttributes", "softmaxV13", "parseSoftmaxAttributesV13", "split", "parseSplitAttributes", "sqrt", "squeeze", "parseSqueezeAttributes", "squeezeV13", "sub", "sum", "tan", "tanh", "tile", "transpose", "parseTransposeAttributes", "upsample", "parseUpsampleAttributesV7", "parseUpsampleAttributesV9", "unsqueeze", "parseUnsqueezeAttributes", "unsqueezeV13", "xor", "replaceInlines", "script", "inlineDefs", "match", "INLINE_FUNC_DEF_REGEX", "params", "s", "tokens", "v", "name", "regexString", "FUNC_CALL_REGEX", "regex", "type", "variable", "declLine", "newBody", "paramRedecLine", "i", "replacement", "init_glsl_function_inliner", "__esmMin", "squeezeShape", "shape", "axis", "newShape", "keptDims", "isEmptyArray", "axes", "parseAxisParam", "j", "i", "rank", "_s", "assert", "ax", "isInt", "a", "sizeFromShape", "size", "sizeToSquarishShape", "width", "PreferLogicalStrategy", "init_texture_layout_strategy", "__esmMin", "init_instrument", "init_util", "maxTextureSize", "prefs", "wh", "isPacked", "wsize", "b", "hsize", "Logger", "logShape", "_d", "d", "CoordsGlslLib", "init_glsl_coordinate_lib", "__esmMin", "init_util", "init_glsl_definitions", "init_glsl_source", "init_texture_layout_strategy", "init_utils", "GlslLib", "context", "funcName", "GlslLibRoutine", "outputLayout", "outShape", "outTexShape", "result", "floatTextureSetRGBASource", "getGlsl", "floatTextureSetRGBAFuncName", "floatTextureSetRSource", "floatTextureSetRFuncName", "_shape", "texShape", "packedTexShape", "source", "shape", "ArrayUtil", "texelsInLogicalRow", "texelsInBatch", "texelsInBatchN", "batches", "coords", "b", "rank", "strides", "i", "coordsToCompute", "coordsFromIndexSnippet", "stride", "line1", "line2", "glsl", "samplerName", "inputLayout", "generateShaderFuncNameFromInputSamplerName", "outCoordFuncName", "generateShaderFuncNameFromInputSamplerNameAtOutCoords", "name", "inShape", "texFuncSnippet", "inRank", "outRank", "broadcastDims", "BroadcastUtil", "type", "getCoordsDataType", "rankDiff", "coordsSnippet", "fields", "getGlChannels", "d", "unpackedCoordsSnippet", "_s", "output", "isInputScalar", "ShapeUtil", "isOutputScalar", "rows", "cols", "swapLastDimsSnippet", "inTexShape", "texNumR", "texNumC", "packedSampler", "valuesPerRow", "squeezedShape", "keptDims", "newInputShape", "squeezeInputShape", "params", "newInputLayout", "samplerRoutine", "getSqueezedParams", "index", "tNumR", "tNumC", "newShape", "squeezeShape", "stride0", "stride1", "routine", "revDims", "stride2", "stride3", "stride4", "xScale", "yScale", "stridesBlock", "body", "layout", "varName", "width", "height", "transpose", "EncodingGlslLib", "init_glsl_encoding_lib", "__esmMin", "init_glsl_definitions", "_EncodingGlslLib", "GlslLib", "context", "GlslLibRoutine", "endianness", "b", "a", "c", "FragColorGlslLib", "init_glsl_fragcolor_lib", "__esmMin", "init_glsl_definitions", "init_glsl_source", "GlslLib", "context", "glsl", "getGlsl", "GlslLibRoutine", "ShapeUtilsGlslLib", "init_glsl_shape_utils_lib", "__esmMin", "init_glsl_definitions", "_ShapeUtilsGlslLib", "GlslLib", "context", "outputRank", "result", "name", "i", "shape", "rank", "dimOffset", "funcName", "block", "body", "GlslLibRoutine", "strides", "stridesBlock", "shapeInit", "VecGlslLib", "init_glsl_vec_lib", "__esmMin", "init_glsl_definitions", "GlslLib", "context", "rank", "nameOp", "result", "name", "fname", "assignmentBlock", "i", "body", "GlslLibRoutine", "block", "glslRegistry", "init_glsl_registered_libs", "__esmMin", "init_glsl_coordinate_lib", "init_glsl_encoding_lib", "init_glsl_fragcolor_lib", "init_glsl_shape_utils_lib", "init_glsl_vec_lib", "EncodingGlslLib", "FragColorGlslLib", "VecGlslLib", "ShapeUtilsGlslLib", "CoordsGlslLib", "GlslPreprocessor", "init_glsl_preprocessor", "__esmMin", "init_glsl_definitions", "init_glsl_function_inliner", "init_glsl_registered_libs", "init_glsl_source", "glContext", "programInfo", "inputTextureLayouts", "outputTextureLayout", "GlslContext", "glslRegistry", "name", "lib", "map", "libName", "routinesInLib", "routine", "key", "currentNode", "GlslLibRoutineNode", "dependencies", "i", "node", "source", "getDefaultFragShaderMain", "replaceInlines", "getFragShaderPreamble", "script", "routinesIncluded", "routines", "nodes", "classAndRoutine", "TopologicalSortGlslRoutines", "samplers", "variables", "uniformLines", "sampler", "variable", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_instrument", "init_glsl_preprocessor", "init_glsl_source", "profiler", "glContext", "textureLayoutStrategy", "key", "artifact", "buildArtifact", "inputs", "output", "gl", "program", "err", "Logger", "a", "programInfo", "inputTextureLayouts", "outputTextureLayout", "preprocessor", "GlslPreprocessor", "fragScript", "fragShaderScript", "vertexShaderScript", "getVertexShaderSource", "env", "fragShader", "td", "width", "height", "attribLocations", "positionHandle", "textureCoordHandle", "uniformLocations", "variables", "textures", "texturePosition", "name", "type", "location", "arrayLength", "value", "v", "uniformHandle", "position", "samplers", "sampler", "variable", "reference", "TextureManager", "init_texture_manager", "__esmMin", "init_instrument", "init_texture_data_encoder", "glContext", "layoutStrategy", "profiler", "config", "dataType", "layout", "data", "usage", "textureDataType", "encoder", "width", "height", "key", "inUseTextures", "idleTextures", "texture", "Logger", "td", "channels", "dataSize", "a", "b", "dataId", "subscribers", "resolve", "tensorData", "textureData", "deleteTexture", "index", "_dataType", "WebGLSessionHandler", "init_session_handler", "__esmMin", "init_instrument", "init_opset", "init_inference_handler", "init_op_resolve_rules", "init_program_manager", "init_texture_layout_strategy", "init_texture_manager", "backend", "context", "PreferLogicalStrategy", "ProgramManager", "TextureManager", "WebGLInferenceHandler", "graph", "initializers", "v", "tensorId", "isPacked", "textureData", "Logger", "td", "node", "opsets", "op", "resolveOperator", "WEBGL_OP_RESOLVE_RULES", "linearSearchLastTrue", "arr", "i", "WebGLContext", "init_webgl_context", "__esmMin", "init_esm", "init_texture_data_encoder", "init_utils", "gl", "version", "width", "height", "encoder", "data", "texture", "buffer", "dataSize", "dataType", "channels", "positionHandle", "textureCoordHandle", "vertexShader", "fragShader", "program", "shaderSource", "shaderType", "shader", "position", "uniformHandle", "env", "error", "label", "usage", "RedFloat32DataEncoder", "RGBAFloatDataEncoder", "Uint8DataEncoder", "unit", "geometry", "fb", "internalFormat", "frameBuffer", "isComplete", "fragmentShader", "gl2", "ext", "query", "available", "disjoint", "timeElapsed", "repeatedTry", "fenceContext", "isFencePassed", "status", "resolve", "index", "x", "resolveFn", "isDoneFn", "createWebGLContext", "contextId", "context", "cache", "offscreenCanvas", "createOffscreenCanvas", "createNewWebGLContext", "canvas", "createCanvas", "gl", "contextAttributes", "ca", "WebGLContext", "err", "Logger", "init_webgl_context_factory", "__esmMin", "init_instrument", "init_webgl_context", "WebGLBackend", "init_backend_webgl", "__esmMin", "init_esm", "init_instrument", "init_session_handler", "init_webgl_context_factory", "env", "value", "createWebGLContext", "Logger", "context", "WebGLSessionHandler", "resolveBackend", "hint", "hints", "backendHint", "cache", "backendsCache", "backend", "tryLoadBackend", "backendObj", "isBackend", "init", "obj", "o", "init_backend", "__esmMin", "init_backend_webgl", "WebGLBackend", "KernelOp", "ExecutionPlan", "init_execution_plan", "__esmMin", "init_instrument", "op", "node", "graph", "ops", "profiler", "graphNodes", "i", "resolved", "input", "sessionHandler", "modelInputs", "inferenceHandler", "graphInputs", "index", "sequence", "graphValues", "rear", "thisOpIndex", "thisOp", "inputList", "inputTensors", "Logger", "t", "outputList", "output", "j", "downstreamNodes", "_output", "currentDownstreamNodeIndex", "currentDownstreamNode", "k", "outputIndex", "outputTensor", "import_onnx", "ortFbs", "Attribute", "init_attribute", "__esmMin", "init_ort_generated", "init_tensor", "init_util", "onnxruntime", "_Attribute", "attributes", "attr", "key", "type", "value", "defaultValue", "valueAndType", "attrType", "LongUtil", "arr", "numberValue", "i", "maybeLong", "Tensor", "decodeUtf8String", "ints", "strings", "tensors", "import_onnx", "ortFbs", "Graph", "Value", "Node", "GraphImpl", "init_graph", "__esmMin", "init_attribute", "init_ort_generated", "init_tensor", "init_util", "onnxruntime", "graphProto", "initializer", "valueInfo", "ProtoUtil", "_nodeProto", "name", "Attribute", "graph", "graphInitializer", "dataIndices", "nodesIndices", "inputValueNames", "currentIndex", "index", "value", "Tensor", "nodeProto", "pick", "node", "output", "dataIndex", "input", "inputName", "j", "type", "shape", "dims", "k", "LongUtil", "outputName", "starters", "i", "nodesStack", "nodesState", "nodeIndex", "outgoingEdgeIndex", "data", "downstreamNodeIndex", "offset", "newIndices", "nodePossition", "ind", "currentData", "inputValueIndex", "outputValueIndex", "nodesConsumingOutput", "delIndex", "replaceIndex", "n", "next", "child", "MIN_CLIP", "MAX_CLIP", "import_onnx", "ortFbs", "Model", "init_model", "__esmMin", "init_flatbuffers", "init_graph", "init_ort_generated", "init_util", "onnxruntime", "buf", "graphInitializer", "isOrtFormat", "onnxError", "e", "modelProto", "LongUtil", "Graph", "fb", "flatbuffers", "ortModel", "i", "opsetId", "Session", "init_session", "__esmMin", "init_backend", "init_execution_plan", "init_instrument", "init_model", "config", "Profiler", "arg", "byteOffset", "length", "backend", "resolveBackend", "Model", "isOrtFormat", "buf", "arr", "modelProtoBlob", "graphInitializer", "ExecutionPlan", "inputs", "inputTensors", "outputTensors", "modelInputNames", "sortedInputs", "sortedInputsIndex", "tensor", "modelInputIndices", "modelValues", "graphInputDims", "i", "graphInput", "graphInputTypes", "givenInputs", "expectedType", "actualType", "noneDimSupported", "expectedDims", "actualDims", "modelOutputNames", "output", "graph", "nodes", "OnnxjsSessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_tensor", "session", "feeds", "_fetches", "_options", "inputMap", "name", "feed", "Tensor", "outputMap", "output", "tensor", "backend_onnxjs_exports", "__export", "onnxjsBackend", "OnnxjsBackend", "init_backend_onnxjs", "__esmMin", "init_session", "init_session_handler_inference", "pathOrBuffer", "options", "session", "Session", "OnnxjsSessionHandler", "init_wasm_utils_env", "__esmMin", "main_exports", "__export", "main_default", "WORKER_NAME", "isProxyWorker", "init_main", "__esmMin", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "ev", "type", "message", "initializeWebAssembly", "initRuntime", "err", "epName", "env", "initEp", "buffer", "bufferData", "copyFromExternalBuffer", "model", "options", "createSession", "sessionMetadata", "releaseSession", "sessionId", "inputIndices", "inputs", "outputIndices", "run", "outputs", "o", "extractTransferableBuffers", "endProfiling", "urlOverride", "scriptSrc", "ort_wasm_simd_threaded_jsep_exports", "__export", "ort_wasm_simd_threaded_jsep_default", "e", "r", "init_ort_wasm_simd_threaded_jsep", "__esmMin", "t", "x", "R", "L", "H", "a", "D", "F", "P", "B", "u", "I", "f", "$", "c", "d", "b", "m", "p", "h", "g", "v", "n", "zr", "o", "i", "Qr", "y", "w", "A", "_", "C", "O", "j", "T", "S", "W", "Pn", "E", "_n", "Rr", "Te", "_e", "M", "gn", "Se", "yn", "Tn", "Hr", "Cn", "oe", "N", "k", "U", "G", "Y", "z", "V", "q", "J", "X", "K", "Q", "Z", "ee", "re", "te", "ne", "ae", "ue", "se", "Ee", "xe", "He", "De", "Ie", "Ue", "Ge", "$e", "Ye", "Le", "ze", "Ve", "qe", "Je", "Xe", "Ze", "ir", "ur", "hr", "vr", "yr", "wr", "Ar", "Er", "Mr", "xr", "Nr", "Dr", "Pr", "Br", "Ir", "$r", "rt", "nt", "it", "cr", "ut", "dt", "bt", "mt", "pt", "ht", "gt", "vt", "yt", "wt", "At", "_t", "Ct", "St", "Wt", "Et", "Mt", "xt", "Rt", "Ht", "Dt", "Ft", "Pt", "It", "Ut", "Bt", "Gt", "$t", "Yt", "qt", "Jt", "Zt", "en", "he", "rn", "tn", "nn", "on", "me", "ln", "cn", "ie", "Re", "s", "l", "et", "fe", "le", "ce", "ge", "Oe", "Ce", "ve", "we", "de", "be", "xn", "Mn", "On", "En", "pe", "ye", "Ae", "jn", "je", "Wn", "Nn", "We", "Me", "Ne", "ke", "Fe", "Pe", "Be", "Ke", "Qe", "er", "rr", "tr", "nr", "ar", "or", "sr", "fr", "lr", "dr", "br", "mr", "pr", "gr", "An", "wn", "_r", "Cr", "Or", "jr", "Tr", "Sr", "Wr", "kr", "Sn", "Fr", "mn", "Ur", "hn", "Gr", "Yr", "Lr", "Vr", "qr", "Jr", "Xr", "Kr", "Zr", "Hn", "pn", "kn", "Dn", "tt", "at", "ot", "st", "ft", "lt", "ct", "Ot", "jt", "Tt", "Nt", "kt", "Lt", "zt", "Vt", "Xt", "Kt", "Qt", "an", "sn", "un", "fn", "dn", "bn", "Rn", "vn", "Fn", "scriptSrc", "origin", "isSameOrigin", "preload", "createProxyWorker", "importProxyWorker", "embeddedWasmModule", "importWasmModule", "init_wasm_utils_import", "__esmMin", "init_wasm_utils_env", "filename", "prefixOverride", "baseUrl", "absoluteUrl", "blob", "url", "urlOverride", "isMultiThreaded", "wasm", "initialized", "initializing", "aborted", "isMultiThreadSupported", "isSimdSupported", "initializeWebAssembly", "getInstance", "init_wasm_factory", "__esmMin", "init_wasm_utils_import", "flags", "timeout", "numThreads", "multiThreadSupported", "wasmPaths", "wasmPrefixOverride", "mjsPathOverrideFlag", "mjsPathOverride", "wasmPathOverrideFlag", "wasmPathOverride", "objectUrl", "ortWasmFactory", "importWasmModule", "isTimeout", "tasks", "resolve", "reject", "config", "fileName", "scriptDirectory", "module", "what", "allocWasmString", "iterateExtraOptions", "checkLastError", "init_wasm_utils", "__esmMin", "init_wasm_factory", "data", "allocs", "wasm", "getInstance", "dataLength", "dataOffset", "options", "prefix", "seen", "handler", "key", "value", "name", "message", "stack", "paramsOffset", "errorCode", "errorMessagePointer", "errorMessage", "setRunOptions", "init_run_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "options", "wasm", "getInstance", "runOptionsHandle", "allocs", "runOptions", "tagDataOffset", "allocWasmString", "checkLastError", "iterateExtraOptions", "key", "value", "keyDataOffset", "valueDataOffset", "e", "alloc", "getGraphOptimzationLevel", "getExecutionMode", "appendDefaultOptions", "setExecutionProviders", "setSessionOptions", "init_session_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "graphOptimizationLevel", "executionMode", "options", "session", "ep", "sessionOptionsHandle", "executionProviders", "allocs", "epName", "deviceType", "keyDataOffset", "allocWasmString", "valueDataOffset", "getInstance", "checkLastError", "webgpuOptions", "epNameDataOffset", "wasm", "sessionOptions", "logIdDataOffset", "logSeverityLevel", "logVerbosityLevel", "optimizedModelFilePathOffset", "name", "value", "nameOffset", "iterateExtraOptions", "key", "e", "alloc", "tensorDataTypeStringToEnum", "tensorDataTypeEnumToString", "getTensorElementSize", "tensorTypeToTypedArrayConstructor", "logLevelStringToEnum", "isGpuBufferSupportedType", "dataLocationStringToEnum", "init_wasm_common", "__esmMin", "type", "typeProto", "dateType", "logLevel", "location", "loadFile", "init_wasm_utils_load_file", "__esmMin", "init_wasm_utils_env", "file", "readFile", "createReadStream", "stream", "chunks", "chunk", "response", "contentLengthHeader", "fileSize", "reader", "buffer", "e", "pages", "offset", "done", "value", "chunkSize", "logLevelPrefix", "doLog", "configLogLevel", "debug", "configureLogger", "LOG", "LOG_DEBUG", "init_log", "__esmMin", "init_wasm_common", "level", "message", "$configLogLevel", "$debug", "logLevel", "msg", "messageLevel", "logLevelStringToEnum", "configLevel", "args", "createView", "init_tensor_view", "__esmMin", "init_wasm_common", "dataBuffer", "type", "tensorTypeToTypedArrayConstructor", "init_types", "__esmMin", "bucketFreelist", "bucketArr", "calcNormalizedBufferSize", "calcBucketBufferSize", "guid", "createNewGpuDataId", "downloadGpuData", "GpuDataManagerImpl", "createGpuDataManager", "init_gpu_data_manager", "__esmMin", "init_log", "init_types", "size", "idx", "sizeForBucket", "backend", "gpuBuffer", "originalSize", "getTargetBuffer", "bufferSize", "gpuReadBuffer", "commandEncoder", "arrayBuffer", "targetBuffer", "key", "id", "data", "srcArrayBuffer", "srcOffset", "srcLength", "gpuDataCache", "gpuBufferForUploading", "LOG_DEBUG", "sourceId", "destinationId", "sourceGpuDataCache", "destinationGpuDataCache", "buffer", "previousBuffer", "usage", "isStorage", "isUniform", "buffers", "gpuData", "cachedData", "maxInFreeList", "freelist", "capturedBuffers", "storage", "sessionId", "pendingBuffers", "args", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "MatMulUtil", "BroadcastUtil", "ShapeUtil", "PoolConvUtil", "GemmUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "a", "b", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "i", "aLen", "bLen", "max", "shape", "finalShape", "inputRank", "finalRank", "_ShapeUtil", "dims", "size", "rank", "newDims", "axis", "start", "end", "strides", "tensorRank", "axes", "x", "perm", "v", "pad", "shape1", "shape2", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "dim", "isChannelLast", "autoPad", "outputDims", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "WORKGROUP_SIZE", "getWgslMappedType", "tensorTypeToWsglStorageType", "tensorTypeToWsglValueType", "createTensorShapeVariables", "getMaxComponents", "fillVector", "castToF32", "sumVector", "getElementAt", "createIndicesHelper", "inputVariable", "outputVariable", "internalVariable", "ShaderHelperImpl", "createShaderHelper", "getBroadcastDims", "init_common", "__esmMin", "init_wasm_common", "init_util", "type", "components", "mappedType", "dims", "programUniforms", "dim", "ShapeUtil", "size", "dataType", "value", "name", "index", "length", "tensorType", "shapeOrRank", "usage", "useUniform", "rank", "rankIdentity", "indicesType", "valueType", "storageType", "normalizeDim", "implementationUsed", "uniformPrefix", "shape", "strides", "o2iSnippet", "i", "offsetToIndicesImplementation", "offsetToIndices", "varOffset", "offsets", "indicesToOffsetImplementation", "indicesToOffset", "varIndices", "indices", "init", "indicesGet", "idx", "indicesSet", "broadcastedIndicesToOffsetImplementation", "broadcastedIndicesToOffset", "output", "implKey", "setByOffset", "offset", "getByOffset", "getByIndicesImplementation", "getImplementation", "functionParams", "dimsParams", "get", "normalizedIndices", "getByIndices", "setByIndicesImplementation", "setImplementation", "impls", "needShapeStrides", "impl", "indicesAndValue", "normalizedDispatchGroup", "limits", "workgroupSize", "workgroupSizeX", "workgroupSizeY", "workgroupSizeZ", "is1DimensionDispatch", "paramList", "globalIdxDefinition", "variable", "bindingIndex", "access", "variables", "v", "additionalUniforms", "uniformSnippets", "typeTemp", "uniformWgslTypeToDataType", "u", "dispatchGroup", "inShape", "outShape", "inRank", "a", "validateInputs", "getAdjustedPerm", "getOutputShape", "permFunctionBody", "createTransposeProgramInfo", "transpose", "parseTransposeAttributes", "init_transpose", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "inputRank", "perm", "inputShape", "ShapeUtil", "rank", "input", "output", "reverseFunc", "inputTensor", "permAttr", "inputDataType", "outputShape", "outputVariable", "inputVariable", "getShaderSource", "wgslType", "workgroupSize", "shaderHelper", "outputSize", "createTensorShapeVariables", "context", "attributes", "createAttributeWithCacheKey", "reduceOps", "reduceSharedOps", "reduceInitValues", "reduceOutputValues", "getInnerMostAxes", "computeOutAndReduceShapes", "expandShapeToKeepDim", "areAxesInnerMostDims", "getAxesPermutation", "createReduceSharedProgramInfo", "reduceCommon", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "init_reduce_shared", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_reduce", "init_transpose", "numInnerAxes", "rank", "res", "i", "shape", "axes", "outputShape", "dim", "reduceShape", "expandShape", "shapeIdx", "axis", "name", "shaderCache", "inputs", "reduceType", "outputDataType", "inputShape", "outputSize", "ShapeUtil", "reduceSize", "input", "inputVariable", "output", "outputVariable", "workgroupSize", "sharedMemorySnippet", "shaderHelper", "context", "attributes", "updatedAttributes", "createReduceAttributesFromInputs", "updatedAxes", "_dim", "normalizeAxes", "permutedAxes", "createTransposeProgramInfo", "finalOutputShape", "validateInputs", "noOp", "createReduceProgramInfo", "createReduceAttributesFromInputs", "runReduceProgram", "reduceLogSumNaive", "reduceL1Naive", "reduceL2Naive", "reduceLogSumExpNaive", "reduceMaxNaive", "reduceMeanNaive", "reduceMinNaive", "reduceProdNaive", "reduceSumNaive", "reduceSumSquareNaive", "useNaiveReduceMethod", "reduceMean", "reduceL1", "reduceL2", "reduceLogSumExp", "reduceMax", "reduceMin", "reduceProd", "reduceSum", "reduceSumSquare", "reduceLogSum", "init_reduce", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "init_reduce_shared", "inputs", "input", "name", "shaderCache", "reduceOp", "axesInput", "outputDataType", "keepDims", "noopWithEmptyAxes", "outputShape", "inputShape", "inputRank", "axes", "ShapeUtil", "reduceOnAllAxes", "d", "i", "outputRank", "outputSize", "shaderHelper", "idxCopy", "inputVariable", "output", "outputVariable", "ops", "reduceOps", "k", "l", "createTensorShapeVariables", "attributes", "v", "createAttributeWithCacheKey", "context", "updatedAttributes", "_output", "idxZero", "size", "shape", "reduceSize", "dim", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "validateInputs", "argMin", "argMax", "parseArgMinMaxAttributes", "init_argminmax", "__esmMin", "init_wasm_common", "init_attribute_with_cache_key", "init_reduce", "inputs", "context", "attributes", "argMinMaxOp", "input", "output", "axes", "idxZero", "k", "createReduceProgramInfo", "createAttributeWithCacheKey", "validateAttentionInputs", "createInPlaceSoftmaxProgramInfo", "createAttentionProbsProgramInfo", "createVxAttentionScoreProgramInfo", "applyAttention", "prepare", "attention", "init_attention", "__esmMin", "init_wasm_common", "init_types", "init_common", "inputs", "attributes", "input", "weights", "bias", "maskIndex", "past", "relativePositionBias", "batchSize", "sequenceLength", "inputHiddenSize", "qHiddenSize", "kHiddenSize", "vHiddenSize", "sz", "kvSequenceLength", "pastSequenceLength", "totalSequenceLength", "maxSequenceLength", "maskType", "_context", "d", "components", "getMaxComponents", "WG", "dComp", "elementsPerThread", "programUniforms", "dataType", "tensorTypeToWsglStorageType", "f32Type", "tensorTypeToWsglValueType", "getShaderSource", "shaderHelper", "inputHelper", "outputVariable", "uniforms", "context", "q", "key", "pastKey", "parameters", "probsShape", "presentKey", "presentKeyShape", "alpha", "vectorizedHeadSize", "TILE_SIZE", "dispatch", "inputDependencies", "outputs", "qInput", "inputVariable", "kInput", "inputVars", "pastKeyInput", "output", "outputVars", "probs", "v", "pastValue", "params", "nReps", "repeatedVHiddenSize", "presentValue", "presentValueShape", "outputShape", "probsHelper", "vHelper", "k", "_maskIndex", "_past", "outputCount", "inputsK", "inputsV", "M", "K", "N", "outputQ", "outputK", "outputV", "weight", "validateInputs", "createBatchNormInferenceProgramInfo", "parseBatchNormAttributes", "batchNorm", "init_batch_norm", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "checkShapeEqual", "actual", "expected", "message", "r", "v", "i", "shape", "epsilon", "spatial", "format", "yShape", "components", "getMaxComponents", "cComponents", "outputSize", "ShapeUtil", "useShapesUniforms", "shapeOrRank", "x", "inputVariable", "scale", "bias", "inputMean", "inputVar", "y", "outputVariable", "calcCOffset", "cOffset", "getInferenceModeShaderSource", "helper", "createTensorShapeVariables", "createAttributeWithCacheKey", "context", "outputCount", "updatedAttributes", "env", "validateInputs", "createBiasAddProgramInfo", "biasAdd", "init_bias_add", "__esmMin", "init_util", "init_common", "inputs", "outputShape", "channels", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "bias", "residual", "output", "outputVariable", "shaderHelper", "context", "createElementwiseProgramShader", "createElementwiseProgramInfo", "abs", "acos", "acosh", "asin", "asinh", "atan", "atanh", "parseCastAttributes", "cast", "generateClipAttributesFromInputs", "clip", "ceil", "cos", "cosh", "parseAlphaAttributes", "elu", "erfImpl", "erf", "exp", "floor", "gelu", "leakyRelu", "not", "neg", "reciprocal", "relu", "sigmoid", "parseHardSigmoidAttributes", "hardSigmoid", "sin", "sinh", "sqrt", "tan", "tanhExpression", "tanh", "fastGeluImpl", "fastGeluExpression", "fastGelu", "thresholdedRelu", "log", "quickGeluImpl", "quickGeluExpression", "quickgelu", "init_unary_op", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "shaderHelper", "datasize", "inputDataType", "outputDataType", "funcCall", "additionalImplementation", "vecSize", "expression", "input", "inputVariable", "output", "outputVariable", "name", "cacheKey", "ShapeUtil", "inputTensors", "context", "attributes", "createAttributeWithCacheKey", "func", "inputs", "min", "MIN_CLIP", "max", "MAX_CLIP", "clipAttributes", "dataType", "tensorTypeToWsglValueType", "a", "varType", "x", "alpha", "dType", "validateInputs", "createBiasSplitGeluProgramInfo", "biasSplitGelu", "init_bias_split_gelu", "__esmMin", "init_util", "init_common", "init_unary_op", "inputs", "outputShape", "input", "inputVariable", "bias", "output", "outputVariable", "outputSize", "ShapeUtil", "dataType", "tensorTypeToWsglStorageType", "shaderHelper", "erfImpl", "context", "createBinaryOpProgramShader", "createBinaryOpProgramInfo", "runBinaryOp", "add", "div", "equal", "mul", "pow", "sub", "greater", "less", "greaterOrEqual", "lessOrEqual", "init_binary_op", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "dimsA", "dimsB", "dimsOutput", "vectorize", "doBroadcast", "sharedDimensionDivisibleBy4", "funcCall", "typeA", "typeB", "typeOutput", "additionalImplementation", "expressionScalar", "expressionVector", "a", "b", "output", "outputVariable", "inputVariable", "assignment", "isAOneElement", "ShapeUtil", "isBOneElement", "aLastDimDivisibleBy4", "bLastDimDivisibleBy4", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "name", "cacheKey", "outputDataType", "isBroadcast", "outputShape", "outputSize", "cacheKeyAux", "calculatedShape", "BroadcastUtil", "sharedDimension", "i", "dimA", "dimB", "createTensorShapeVariables", "context", "type", "validateInputs", "calculateInputIndexImpl", "assignOutputData", "createConcatProgramInfo", "concat", "parseConcatAttributes", "init_concat", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "axis", "referenceIndex", "referenceInput", "inputType", "inputRank", "input", "i", "dim", "numberOfTensors", "sizeInConcatAxisStr", "output", "codeLines", "returnSnippet", "adjustedAxis", "outputShape", "dataType", "outputSize", "ShapeUtil", "sizeInConcatAxis", "inputVars", "previousSum", "inputDependencies", "inputRanks", "programUniforms", "inputVariable", "createTensorShapeVariables", "outputVariable", "indicesAxis", "getShaderSource", "shaderHelper", "context", "attributes", "inputShape", "sum", "nonEmptyInputs", "createAttributeWithCacheKey", "getActivationSnippet", "appendActivationUniformsData", "appendActivationUniforms", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_wasm_common", "init_util", "attributes", "valueType", "baseType", "programUniform", "uniforms", "activation", "alpha", "beta", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "typeSnippet", "biasSnippet", "init_activation_util", "__esmMin", "component", "dataType", "hasBias", "utilFunctions", "init_conv_util", "__esmMin", "strideStr", "writeDataToSubAVec4Snippet", "calculateResultSnippet", "makeMatMulPackedVec4Source", "writeDataToSubASnippet", "readDataFromSubASnippet", "makeMatMulPackedSource", "matMulReadWriteFnSource", "createMatmulProgramInfo", "init_matmul_packed_webgpu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_fuse_utils", "init_activation_util", "transpose", "batchDims", "transposeA", "innerElementSize", "workPerThread", "workgroupSize", "type", "tileInner", "splitK", "splitedDimInner", "tileAOuter", "tileBOuter", "tileAWidth", "tileAHight", "rowPerThreadB", "sequentialAccessByThreads", "rowPerThreadA", "colPerThreadA", "matmulSnippet", "component", "hasBias", "applyActivation", "variables", "batchShapes", "isChannelsLast", "batchAShape", "batchBShape", "batchShape", "batchVariable", "aVariable", "bVariable", "outputVariable", "broadCastADims", "getBroadcastDims", "broadCastBDims", "dataType", "tensorTypeToWsglStorageType", "getAIndices", "aRank", "batchRank", "resStr", "i", "j", "getBIndices", "bRank", "typeSnippet", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "aShape", "bShape", "outerDimsA", "outerDimsB", "outerDims", "batchSize", "ShapeUtil", "dimAOuter", "dimInner", "dimBOuter", "isVec4", "elementsPerThread", "dispatch", "components", "aShapeTemp", "bShapeTemp", "outputShapeTemp", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "internalVariable", "A", "inputVariable", "B", "output", "inputVariables", "biasComponents", "uniforms", "appendActivationUniforms", "baseType", "getActivationSnippet", "declareFunctions", "conv2dCommonSnippet", "createConv2DMatMulProgramInfo", "init_conv2d_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "fitAOuter", "fitBOuter", "fitInner", "addBias", "attributes", "innerElementSizeX", "innerElementSizeW", "innerElementSize", "dataType", "getXSnippet", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readXSnippet", "typeSnippet", "sampleX", "sampleW", "resType", "aType", "bType", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileAOuter", "tileBOuter", "tileInner", "elementsSize", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "uniforms", "appendActivationUniforms", "components", "t", "tensorTypeToWsglStorageType", "declareFunctions", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "bias", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "arrayProduct", "parse3TupleParam", "getEffectiveFilterSize", "computeDefaultPad", "computeOutputShape4D", "get3DPadAndOutInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "init_conv3d_naive_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "arr", "product", "i", "param", "filterSize", "dilation", "inputShape", "fieldSize", "stride", "effectiveFieldSize", "inShape", "filterShape", "outChannels", "strides", "zeroPad", "outShape", "index", "pad", "inDepth", "inHeight", "inWidth", "strideDepth", "strideHeight", "strideWidth", "filterDepth", "filterHeight", "filterWidth", "padInfo", "outDepth", "outHeight", "outWidth", "val", "_", "padAlongDepth", "padAlongHeight", "padAlongWidth", "front", "back", "top", "bottom", "left", "right", "dilations", "depthwise", "dataFormat", "batchSize", "inChannels", "filterChannels", "dilationDepth", "dilationHeight", "dilationWidth", "effectiveFilterDepth", "effectiveFilterHeight", "effectiveFilterWidth", "inputs", "attributes", "outputShape", "filterDims", "pads", "isChannelsLast", "isVec4", "workGroupSize", "dispatchLayout", "dispatch", "d", "LOG_DEBUG", "innerElementSize", "outputSize", "ShapeUtil", "programUniforms", "createTensorShapeVariables", "inputDependencies", "hasBias", "getShaderSource", "shaderHelper", "uniforms", "components", "t", "tensorTypeToWsglStorageType", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "declareFunctions", "bias", "getElementAt", "createGroupedConvProgramInfo", "createGroupedConvVectorizeProgramInfo", "init_conv_grouped", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_conv", "init_fuse_utils", "inputs", "attributes", "squeezeOutputShapeFunction", "hasBias", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "isChannelLast", "outputShape", "calculateOutputShape", "outputSize", "ShapeUtil", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "x", "inputVariable", "w", "inputVars", "uniforms", "appendActivationUniforms", "components", "getMaxComponents", "outputNumber", "outputShapeInShader", "xNumber", "createNaiveMatmulProgramInfo", "validateInputs", "matMul", "init_matmul", "__esmMin", "init_wasm_common", "init_util", "init_matmul_packed_webgpu", "init_common", "init_fuse_utils", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "isChannelsLast", "aShape", "bShape", "M", "N", "K", "components", "getMaxComponents", "aComponents", "outputNumber", "outputSize", "ShapeUtil", "hasBias", "outerDims", "outputShapeInShader", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "batchDims", "internalVariable", "a", "inputVariable", "b", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "inputVariables", "processBias", "biasComponents", "outerDimsA", "outerDimsB", "broadCastADims", "getBroadcastDims", "broadCastBDims", "uniforms", "appendActivationUniforms", "getIndices", "variable", "broadCastDims", "rank", "name", "batchRank", "resStr", "i", "j", "calcResult", "calcStr", "context", "BroadcastUtil", "createMatmulProgramInfo", "calculateOutputShape", "weightTransposeAttribute", "validateInputs", "getAdjustedConvAttributes", "parseConvAttributes", "conv2d", "conv1d", "conv3d", "conv", "init_conv", "__esmMin", "init_util", "init_conv2d_mm_webgpu", "init_conv3d_naive_webgpu", "init_matmul_packed_webgpu", "init_conv_grouped", "init_fuse_utils", "init_matmul", "init_transpose", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "isChannelLast", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputShape", "inputs", "attributes", "dataChannel", "filterInChannel", "pads", "PoolConvUtil", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "autoPad", "group", "wIsConst", "context", "adjustedAttributes", "isChannelsLast", "transposedWeight", "createTransposeProgramInfo", "convInputs", "createGroupedConvVectorizeProgramInfo", "createGroupedConvProgramInfo", "hasBias", "inputHeight", "inputWidth", "inputChannels", "weightHeight", "weightWidth", "outHeight", "outWidth", "sameSize", "batch", "xReshaped", "wReshaped", "matmulOutputShape", "matmulInputs", "sharedDim", "N", "K", "createNaiveMatmulProgramInfo", "createMatmulProgramInfo", "sequentialAccessByThreads", "dimAOuter", "dimBOuter", "dimInner", "createConv2DMatMulProgramInfo", "convInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "conv2dTransposeCommonSnippet", "createConv2DTransposeMatMulProgramInfo", "init_conv_backprop_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "addBias", "attributes", "type", "innerElementSize", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readASnippet", "sampleA", "sampleW", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileInner", "components", "filterDims", "effectiveFilterDims", "pads", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "x", "inputVariable", "w", "output", "outputVariable", "inputVariables", "declareFunctions", "bias", "uniforms", "appendActivationUniforms", "elemType", "tensorTypeToWsglStorageType", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createConvTranspose2DOpProgramShaderSource", "createConvTranspose2DProgramInfo", "init_conv_backprop_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "shaderHelper", "inputs", "outputShape", "hasBias", "is1DimensionDispatch", "isVec4", "dataType", "uniforms", "isChannelsLast", "rowDim", "colDim", "channelDim", "workPerThread", "declareFunctions", "components", "w", "inputVariable", "dy", "inputVariables", "output", "outputVariable", "codeSnippet4", "codeSnippet", "attributes", "squeezeOutputShapeFunction", "outputSize", "ShapeUtil", "dispatch", "LOG_DEBUG", "inputDependencies", "strides", "filterDims", "dilations", "effectiveFilterDims", "pads", "group", "wShape", "inputChannelsPerGroup", "outputChannelsPerGroup", "programUniforms", "createTensorShapeVariables", "getShaderSource", "tensorTypeToWsglStorageType", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "weightTransposePerm", "convTranspose2d", "convTranspose1d", "convTranspose", "init_conv_transpose", "__esmMin", "init_conv_backprop_mm_webgpu", "init_conv_backprop_webgpu", "init_fuse_utils", "init_transpose", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "group", "strides", "isChannelLast", "outputPadding", "outputShape", "spatialRank", "updateOutputShape", "i", "batchSize", "outChannels", "j", "inSize", "attributes", "inputs", "a", "b", "isChannelsLast", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "wIsConst", "dataChannel", "filterInChannel", "featureMaps", "context", "adjustedAttributes", "inputChannels", "createConvTranspose2DProgramInfo", "outHeight", "outWidth", "weightHeight", "weightWidth", "dimAOuter", "dimBOuter", "dimInner", "sequentialAccessByThreads", "transposedWeight", "createTransposeProgramInfo", "convTransposeInputs", "hasBias", "createConv2DTransposeMatMulProgramInfo", "createCumsumProgramInfo", "cumsum", "parseCumSumAttributes", "init_cumsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputType", "inputShape", "axisInput", "attributes", "outputSize", "ShapeUtil", "rank", "input", "inputVariable", "output", "outputVariable", "axisValue", "axis", "getShaderSource", "shaderHelper", "index", "max", "getElementAt", "lowerLimit", "upperLimit", "createTensorShapeVariables", "context", "exclusive", "reverse", "createAttributeWithCacheKey", "validateInputs", "permFunctionBody", "createDepthToSpaceProgramInfo", "depthToSpace", "parseDepthToSpaceAttributes", "init_depth_to_space", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "perm", "rank", "input", "output", "reverseFunc", "inputTensor", "attributes", "h", "w", "c", "shape", "isChannelLast", "blocksize", "isDCRmode", "reshapedInputTensor", "reshapedInputRank", "inputDataType", "reshapedInput", "inputVariable", "permedOutput", "outputVariable", "getShaderSource", "shaderHelper", "outputShape", "outputSize", "ShapeUtil", "shapeBeforePerm", "shapeAfterPerm", "createTensorShapeVariables", "context", "createAttributeWithCacheKey", "symbolPattern", "termPattern", "termPatternOnly", "lhsPattern", "lhsPatternOnly", "EinsumTerm", "EinsumEquation", "appendMax", "createEinsumProgramInfo", "einsum", "parseEinsumAttributes", "init_einsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputIndex", "symbol", "index", "value", "inputs", "equation", "lhs", "rhs", "inputTerm", "dims", "einsumTerm", "sym", "info", "dimValue", "term", "isInput", "rank", "ellipsis", "ellipsisDims", "nextDim", "indexSymbols", "i", "ellipsisDimLength", "j", "name", "inputShapes", "dataType", "einsumEquation", "outputShape", "inputVars", "inputVariable", "outputSize", "ShapeUtil", "output", "outputVariable", "uniformsSymbols", "getShaderSource", "shaderHelper", "idxCopy", "initProd", "initSum", "updateSum", "reduceOpsSetIndices", "reduceOpsLoopHeaders", "reduceOpsLoopFooters", "reduceOpCompute", "isReduceOpsWithoutLoop", "outputIndex", "indices", "reduceOps", "inputVar", "_var", "programUniformsInit", "programUniforms", "_", "createTensorShapeVariables", "acc", "inputProgramUniforms", "context", "attributes", "input", "createAttributeWithCacheKey", "validateInputs", "getAdjustedShape", "calculateOutputShape", "createExpandProgramInfo", "expand", "init_expand", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "inputShape", "shape", "shapeIndex", "inputShapeIndex", "shape1", "shape2", "diff", "i", "outputShape", "dataType", "components", "outputSize", "ShapeUtil", "getShaderSource", "shaderHelper", "input", "inputVariable", "output", "outputVariable", "assignment", "singleAssignment", "resStr", "x", "typeCast", "programUniforms", "createTensorShapeVariables", "context", "createFastGeluProgramInfo", "fastGelu", "init_fast_gelu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_unary_op", "inputTensors", "dataType", "outputSize", "ShapeUtil", "biasLength", "useVec4", "getShaderSource", "shaderHelper", "x", "inputVariable", "bias", "y", "outputVariable", "uniforms", "singleElementBias", "i", "biasGetExpression", "fastGeluImpl", "tensorTypeToWsglValueType", "WORKGROUP_SIZE", "fastGeluExpression", "inputs", "context", "validateInputs", "createGatherProgramInfo", "parseGatherAttributes", "gather", "init_gather", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "indicesShape", "inputRank", "axis", "ShapeUtil", "outputShape", "axisDimLimit", "components", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "data", "inputVariable", "indices", "output", "outputVariable", "calcDataIndices", "x", "indicesRank", "calcStr", "i", "j", "assignment", "singleAssignment", "resStr", "typeCast", "createAttributeWithCacheKey", "context", "validateInputs", "createGatherElementsProgramInfo", "parseGatherElementsAttributes", "gatherElements", "init_gather_elements", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "inputOutputDataType", "inputRank", "indicesShape", "indicesDataType", "axis", "ShapeUtil", "axisDimLimit", "outputShape", "outputSize", "input", "inputVariable", "indices", "output", "outputVariable", "programUniforms", "createTensorShapeVariables", "shaderHelper", "createAttributeWithCacheKey", "context", "validateInputs", "createGemmProgramInfo", "parseGemmAttributes", "gemm", "init_gemm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "aShape", "bShape", "M", "N", "K", "GemmUtil", "outputShape", "outputSize", "ShapeUtil", "programUniforms", "inputDependencies", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "line", "calculateAlpha", "a", "inputVariable", "b", "dataType", "c", "variables", "output", "outputVariable", "uniforms", "transA", "transB", "alpha", "beta", "context", "getInput", "validateInputs", "parseMultiHeadAttentionAttributes", "weightTransposeAttribute", "addBiasTranspose", "maybeTransposeToBNSHAndAddBias", "multiHeadAttention", "init_multihead_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_types", "init_attention", "init_common", "init_transpose", "inputs", "i", "ShapeUtil", "attributes", "query", "key", "value", "bias", "keyPaddingMask", "relativePositionBias", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "qkvFormat", "maskType", "maskDims", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "createAttributeWithCacheKey", "context", "qkv", "biasOffset", "outputShape", "outputSize", "programUniforms", "getShaderSource", "shaderHelper", "output", "outputVariable", "qkvInput", "inputVariable", "biasInput", "uniforms", "numHeads", "input", "reshapedInput", "createTransposeProgramInfo", "params", "kvBNSH", "Q", "applyAttention", "K", "V", "getRepeats", "validateInputs", "getOutputShape", "createTileProgramInfo", "tile", "init_tile", "__esmMin", "init_wasm_common", "init_util", "init_common", "repeatsTensorView", "inputs", "inputShape", "repeats", "outputShape", "i", "shape", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "output", "outputVariable", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "validateInputs", "createConcatProgramInfo", "parseGroupQueryAttentionAttributes", "weightTransposeAttribute", "maybeExpandAndTransposeToBNSH", "groupQueryAttention", "init_group_query_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_attention", "init_common", "init_multihead_attention", "init_tile", "init_transpose", "inputs", "attributes", "query", "key", "value", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "hasPastKey", "hasPastValue", "isPastkvBSNH", "qkvFormat", "maskType", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "a", "b", "dataType", "params", "outputShape", "component", "outputSize", "ShapeUtil", "presentSequenceLength", "output", "outputVariable", "inputA", "inputVariable", "inputB", "H", "dispatch", "inputDependencies", "programUniforms", "createTensorShapeVariables", "uniforms", "pastStr", "newStr", "concatStr", "getShaderSource", "shaderHelper", "createAttributeWithCacheKey", "context", "input", "pastKV", "outputIndex", "reshapedInput", "numHeads", "nReps", "createTileProgramInfo", "createTransposeProgramInfo", "Q", "maybeTransposeToBNSHAndAddBias", "K", "V", "applyAttention", "createInstanceNormProgramInfo", "computeMean", "createInstanceNormNHWCProgramInfo", "instanceNorm", "init_instance_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "xShape", "outputShape", "axis", "normCount", "ShapeUtil", "normSize", "components", "getMaxComponents", "normPackedSize", "inputShape", "inputDependencies", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "x", "inputVariable", "scale", "bias", "output", "outputVariable", "variables", "dataType", "f32Type", "workgroupSize", "uniforms", "sumVector", "context", "input", "n", "h", "c", "epsilon", "WG", "outputType", "sumCastType", "setOutputValue", "var1", "var2", "unitsOfWork", "wgSize", "meanInputDependencies", "meanProgramUniforms", "getMeanShaderSource", "inputHelper", "fillVector", "meanValues", "scaleHelper", "biasHelper", "N", "C", "H", "outputSize", "channelScaleShift", "tensorTypeToWsglStorageType", "scaleType", "scaleCastType", "outputHelper", "validateInputs", "createLayerNormProgramInfo", "layerNorm", "init_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "outputCount", "simplified", "xShape", "scale", "bias", "outputShape", "axis", "ShapeUtil", "normCount", "normSize", "scaleSize", "biasSize", "meanInvStdDevDim", "i", "components", "getMaxComponents", "inputDependencies", "programUniforms", "hasMeanDataOutput", "hasInvStdOutput", "getShaderSource", "shaderHelper", "dataType", "tensorTypeToWsglStorageType", "variables", "inputVariable", "outputVariable", "uniforms", "fillVector", "castToF32", "sumVector", "outputs", "context", "validateInputs", "createMatMulNBitsProgramInfo", "matMulNBits", "parseMatMulNBitsAttributes", "init_matmulnbits", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "a", "aRank", "nBlocksPerCol", "blobSize", "b", "ShapeUtil", "scalesShape", "zeroPointsShape", "expectedZeroPointsSize", "maxComputeWorkgroupSizes", "maxComputeWorkgroupStorageSize", "inputShape", "dimAOuter", "dimInner", "dimBOuter", "batchDims", "batchSize", "blobSizeInWords", "dataType", "outputNumber", "getMaxComponents", "aComponents", "bComponents", "elementSize", "getTensorElementSize", "workgroupOutputSize", "maxNumberOfComponents", "useBlockwiseMatMulNBits", "components", "outputShape", "outputSize", "programUniforms", "inputShapeTemp", "bShape", "createTensorShapeVariables", "outputShapeTemp", "getShaderSource", "shaderHelper", "inputRank", "inputVariable", "scales", "inputVariables", "zeroPoints", "outputRank", "output", "outputVariable", "uniforms", "tensorTypeToWsglStorageType", "qDqDataType", "processOneBlock", "_", "i", "updateZeroPointIndex", "context", "createAttributeWithCacheKey", "validateInputs", "getPadConstant", "getPadReflect", "getPadEdge", "getPadWrap", "getPadSnippet", "createPadProgramInfo", "createPadAttributesFromInputs", "pad", "init_pad", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "validPads", "output", "inputRank", "padsLength", "block", "i", "getElementAt", "attributes", "outputShape", "ShapeUtil", "inputDims", "outputSize", "programUniforms", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "outputVariable", "input", "inputVariable", "dataType", "padSnippet", "uniforms", "bigInt64Pads", "value", "updatePads", "axes", "v", "pads", "context", "updatedAttributes", "validateInputs", "getAdjustedPoolAttributesAndOutputShape", "getUniformAndPadInfo", "generatePoolingCode", "createShaderKeyFromAttributes", "createAveragePoolShaderKeyFromAttributes", "createMaxPoolShaderKeyFromAttributes", "parsePoolCommonAttributes", "createAveragePoolProgramInfo", "parseAveragePoolAttributes", "averagePool", "globalPoolAttributes", "parseGlobalAveragePoolAttributes", "globalAveragePool", "createMaxPoolProgramInfo", "maxPool", "parseMaxPoolAttributes", "parseGlobalMaxPoolAttributes", "globalMaxPool", "init_pool", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_common", "inputs", "env", "input", "attributes", "isGlobalOperator", "isChannelsLast", "inputShapeAsChannelFirst", "hasDilations", "kernelShape", "strides", "dilations", "pads", "PoolConvUtil", "outputShapeAsChannelFirst", "newAttributes", "outputShapeAsChannelLast", "outputShape", "outputSize", "ShapeUtil", "kernelSize", "programUniforms", "uniforms", "kw", "sw", "pwStart", "pwEnd", "pwStartEndNotZero", "phStartEndNotZero", "kh", "sh", "phStart", "phEnd", "kernelStrides", "hasPads", "sum", "cur", "shaderHelper", "x", "rank", "outputShapeRank", "op1", "op2", "start", "dataType", "output", "outputVariable", "codeW", "codeH", "codeHEnd", "dimIdxW", "dimIdxH", "stridesRank", "padsRank", "padCode", "getElementAt", "name", "adjustedAttributes", "inputVariable", "createTensorShapeVariables", "inputDependencies", "countIncludePad", "attr", "averagePoolAttributes", "context", "format", "storageOrder", "maxPoolAttributes", "validateInputsContent", "createRangeProgramInfo", "range", "init_range", "__esmMin", "init_esm", "init_wasm_common", "init_common", "start", "limit", "delta", "sameStartLimit", "increasingRangeNegativeStep", "decreasingRangePositiveStep", "dataType", "numElements", "outputShape", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "output", "outputVariable", "wgslType", "uniforms", "context", "env", "validateScales", "updateScales", "validateInputs", "getOriginalCoordinateFromResizedCoordinate", "getNearestPixelFromOriginal", "updateRoI", "initOutputShape", "adjustOutputShape", "calculateOriginalIndicesFromOutputIndices", "calculateInputIndicesFromOutputIndices", "checkInputIndices", "setChannelAndBatchIndices", "bilinearInterpolation", "bicubicInterpolation", "trilinearInterpolation", "createResizeProgramInfo", "getOpsetVersionFromCustomDataBuffer", "resize", "parseResizeAttributes", "init_resize", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "scales", "attributes", "value", "axes", "rank", "newScales", "index", "inputs", "opsetVersion", "sizes", "roi", "roiInputIndex", "scalesInputIndex", "sizesInputIndex", "coordinateTransferMode", "dType", "nearestMode", "roiTmp", "roiLocal", "v", "i", "inputShape", "outputShape", "scaleInPolicy", "adjustedOutputShape", "output", "scalesLength", "roiLength", "getElementAt", "input", "useExtrapolation", "channelIdx", "batchIdx", "spacialDims", "extrapolationValue", "heightIdx", "widthIdx", "cubicCoeffA", "excludeOutside", "is2D", "isNchw", "createCubicInterpolationFunction", "idx", "direction", "depthIdx", "inputTensor", "scalesInput", "roiInput", "outputVariable", "inputVariable", "outputSize", "ShapeUtil", "noScale", "d", "dataType", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "customDataBuffer", "antialias", "coordinateTransformMode", "keepAspectRatioPolicy", "mode", "createAttributeWithCacheKey", "validateInputs", "createRotaryEmbeddingProgramInfo", "rotaryEmbedding", "init_rotary_embedding", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "input", "positionIds", "cosCache", "sinCache", "numHeads", "rotaryEmbeddingDim", "ShapeUtil", "batchSize", "sequenceLength", "maxSequenceLength", "hiddenSize", "headSize", "interleaved", "scale", "batchStride", "halfRotaryEmbeddingDim", "globalShape", "globalStrides", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "inputVariable", "output", "outputVariable", "WORKGROUP_SIZE", "createAttributeWithCacheKey", "context", "validateInputs", "createSkipLayerNormProgramInfo", "skipLayerNorm", "init_skip_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "input", "skip", "gamma", "hiddenSize", "sequenceLength", "beta", "bias", "attributes", "outputCount", "isTraining", "simplified", "inputShape", "inputSize", "ShapeUtil", "outputShape", "outputSize", "meanInvStdDevDim", "hasBetaInput", "hasBiasInput", "hasMeanOutput", "hasInvStdDevOutput", "hasInputSkipBiasSumOutput", "workgroupSize", "components", "getMaxComponents", "programUniforms", "getShaderSource", "shaderHelper", "uniformsArray", "variables", "inputVariable", "outputVariable", "dataType", "tensorTypeToWsglStorageType", "vecDataType", "castToF32", "sumVector", "outputs", "_input", "_index", "context", "validateInputs", "readInput", "createSliceAttributesFromInputs", "fixStartEndValues", "calculateInputIndicesImpl", "createSliceProgramInfo", "slice", "parseSliceAttributes", "init_slice", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "_", "idx", "input", "v", "starts", "ends", "axes", "createAttributeWithCacheKey", "value", "index", "inputShape", "steps", "newValue", "output", "getElementAt", "inputSize", "ShapeUtil", "step", "start", "i", "end", "signs", "array", "numSteps", "newEnd", "newStart", "outputShape", "axis", "outputTensorInfo", "outputVariable", "inputVariable", "outputSize", "uniforms", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "validateInputs", "createSoftmaxProgramInfo", "softmax", "parseSoftmaxAttributes", "init_softmax", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "input", "attributes", "shape", "outputSize", "ShapeUtil", "WG", "axis", "cols", "rows", "components", "getMaxComponents", "packedCols", "maxVector", "name", "x", "inputVariable", "output", "outputVariable", "valueType", "threadMaxDecl", "tensorTypeToWsglStorageType", "getShaderSource", "shaderHelper", "sumVector", "context", "createAttributeWithCacheKey", "validateInputs", "createSplitAttributesFromInputs", "calculateOutputIndexImpl", "writeBufferDataImpl", "createSplitProgramInfo", "split", "parseSplitAttributes", "init_split", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "splitSizes", "numOutputs", "v", "createAttributeWithCacheKey", "numberOfTensors", "getElementAt", "outputs", "codeLines", "i", "returnSnippet", "inputShape", "inputSize", "ShapeUtil", "dataType", "axis", "input", "inputVariable", "sizeInSplitAxis", "outputsTensorInfo", "outputShapes", "previousSum", "programUniforms", "outputShape", "outputVariable", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "createWhereOpProgramShader", "createWhereOpProgramInfo", "where", "init_where", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "inputs", "dimsOutput", "isBroadcast", "typeOutput", "output", "outputVariable", "a", "inputVariable", "b", "c", "assignment", "expression", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "expressionC", "dimsA", "dimsB", "dimsC", "outputDataType", "ShapeUtil", "outputShape", "outputSize", "calculatedShape", "BroadcastUtil", "vecSize", "createTensorShapeVariables", "context", "WEBGPU_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_argminmax", "init_attention", "init_batch_norm", "init_bias_add", "init_bias_split_gelu", "init_binary_op", "init_concat", "init_conv", "init_conv_transpose", "init_cumsum", "init_depth_to_space", "init_einsum", "init_expand", "init_fast_gelu", "init_gather", "init_gather_elements", "init_gemm", "init_group_query_attention", "init_instance_norm", "init_layer_norm", "init_matmul", "init_matmulnbits", "init_multihead_attention", "init_pad", "init_pool", "init_range", "init_reduce", "init_resize", "init_rotary_embedding", "init_skip_layer_norm", "init_slice", "init_softmax", "init_split", "init_tile", "init_transpose", "init_unary_op", "init_where", "abs", "acos", "acosh", "add", "argMax", "parseArgMinMaxAttributes", "argMin", "asin", "asinh", "atan", "atanh", "attention", "averagePool", "parseAveragePoolAttributes", "batchNorm", "biasAdd", "biasSplitGelu", "cast", "parseCastAttributes", "ceil", "clip", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "cosh", "cumsum", "parseCumSumAttributes", "depthToSpace", "parseDepthToSpaceAttributes", "div", "einsum", "parseEinsumAttributes", "elu", "parseAlphaAttributes", "equal", "erf", "exp", "expand", "fastGelu", "floor", "gather", "parseGatherAttributes", "gatherElements", "parseGatherElementsAttributes", "gelu", "gemm", "parseGemmAttributes", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "parseGlobalMaxPoolAttributes", "greater", "greaterOrEqual", "groupQueryAttention", "parseGroupQueryAttentionAttributes", "hardSigmoid", "parseHardSigmoidAttributes", "instanceNorm", "layerNorm", "leakyRelu", "less", "lessOrEqual", "log", "matMul", "matMulNBits", "parseMatMulNBitsAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "multiHeadAttention", "parseMultiHeadAttentionAttributes", "neg", "not", "pad", "pow", "quickgelu", "range", "reciprocal", "reduceMin", "reduceMean", "reduceMax", "reduceSum", "reduceProd", "reduceL1", "reduceL2", "reduceLogSum", "reduceLogSumExp", "reduceSumSquare", "relu", "resize", "parseResizeAttributes", "rotaryEmbedding", "sigmoid", "sin", "sinh", "slice", "parseSliceAttributes", "skipLayerNorm", "split", "parseSplitAttributes", "sqrt", "softmax", "parseSoftmaxAttributes", "sub", "tan", "tanh", "thresholdedRelu", "tile", "transpose", "parseTransposeAttributes", "where", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_log", "init_common", "backend", "key", "artifact", "buildArtifact", "inputs", "outputs", "dispatchGroup", "uniformBufferBinding", "TRACE_FUNC_BEGIN", "device", "computePassEncoder", "entries", "input", "output", "bindGroup", "commandInfo", "TRACE_FUNC_END", "programInfo", "normalizedDispatchGroupSize", "extensions", "shaderHelper", "createShaderHelper", "userCode", "code", "shaderModule", "LOG_DEBUG", "computePipeline", "x", "y", "z", "limitPerDimension", "size", "dispatchAverage", "getProgramInputTensorInfoDependencyKey", "getProgramInfoUniqueKey", "AdapterInfoImpl", "WebGpuBackend", "init_backend_webgpu", "__esmMin", "init_esm", "init_wasm_common", "init_log", "init_tensor_view", "init_gpu_data_manager", "init_op_resolve_rules", "init_program_manager", "inputTensors", "inputDependencies", "inputInfos", "i", "type", "rank", "dims", "programInfo", "is1DimensionDispatch", "key", "adapterInfo", "architecture", "vendor", "data", "env", "adapter", "requiredFeatures", "deviceDescriptor", "createGpuDataManager", "ProgramManager", "configureLogger", "ev", "commandEncoder", "computePassDescriptor", "TRACE_FUNC_BEGIN", "queryReadBuffer", "mappedData", "pendingKernels", "pendingKernelInfo", "kernelId", "kernelInfo", "kernelType", "kernelName", "programName", "inputTensorViews", "outputTensorViews", "startTimeU64", "endTimeU64", "startTime", "endTime", "value", "tensorDataTypeEnumToString", "inputShapes", "outputShapes", "TRACE", "TRACE_FUNC_END", "program", "outputIndices", "createKernelOutput", "createIntermediateOutput", "outputCount", "inputDatas", "gpuData", "outputs", "dispatchGroup", "programUniforms", "validatedOutputIndices", "_", "outputDatas", "isTemporary", "isPersistent", "tensorView", "persistentData", "uniformBufferBinding", "currentOffset", "offsets", "v", "sizeOfElement", "sizeOfVecOrMat", "baseAlignment", "elementPerVecOrMat", "maxAlignmentOfField", "arrayBuffer", "offset", "uniformBufferData", "normalizedDispatchGroup", "artifact", "LOG_DEBUG", "uniform", "actualType", "actualLength", "length", "gpuDataId", "src", "dst", "getTargetBuffer", "size", "ptr", "attribute", "op", "WEBGPU_OP_RESOLVE_RULES", "context", "errors", "kernel", "kernelEntry", "attributes", "useErrorScope", "e", "err", "sessionId", "index", "buffer", "sessionInputOutputMapping", "previousBuffer", "id", "bufferInfo", "gpuBuffer", "downloadGpuData", "createView", "sessionCommandList", "sessionPendingKernels", "computePassEncoder", "command", "init_exports", "__export", "init", "TensorViewImpl", "ComputeContextImpl", "init_init", "__esmMin", "init_wasm_common", "init_backend_webgpu", "init_log", "init_util", "_TensorViewImpl", "module", "dataType", "data", "dims", "elementCount", "ShapeUtil", "newDims", "backend", "contextDataOffset", "heapU32", "dataIndex", "inputCount", "inputs", "i", "dim", "d", "program", "inputsOutputsMapping", "mappedInputs", "outputIndices", "createKernelOutput", "index", "createTemporaryOutput", "elementSize", "getTensorElementSize", "bufferSize", "gpuDataId", "stack", "offset", "e", "name", "env", "gpuAdapter", "jsepInit", "WebGpuBackend", "size", "ptr", "src", "dst", "isSourceGpu", "LOG_DEBUG", "dataOffset", "kernelType", "kernelId", "attribute", "kernel", "sessionHandle", "errors", "context", "initOrt", "initRuntime", "initEp", "activeSessions", "getSessionInputOutputCount", "copyFromExternalBuffer", "createSession", "releaseSession", "prepareInputOutputTensor", "run", "endProfiling", "extractTransferableBuffers", "init_wasm_core_impl", "__esmMin", "init_run_options", "init_session_options", "init_wasm_common", "init_wasm_factory", "init_wasm_utils", "init_wasm_utils_load_file", "numThreads", "loggingLevel", "getInstance", "checkLastError", "env", "logLevelStringToEnum", "epName", "initJsep", "adapter", "powerPreference", "forceFallbackAdapter", "sessionHandle", "wasm", "stack", "dataOffset", "model", "modelDataOffset", "modelData", "options", "modelDataLength", "sessionOptionsHandle", "ioBindingHandle", "allocs", "inputNamesUTF8Encoded", "outputNamesUTF8Encoded", "setSessionOptions", "loadingPromises", "file", "path", "loadFile", "data", "provider", "webnnOptions", "context", "gpuDevice", "deviceType", "inputCount", "outputCount", "enableGraphCapture", "inputNames", "outputNames", "outputPreferredLocations", "i", "name", "nameString", "location", "bindingState", "l", "dataLocationStringToEnum", "e", "buf", "alloc", "sessionId", "session", "ioBindingState", "tensor", "tensorHandles", "index", "dataType", "dims", "rawData", "dataByteLength", "gpuBuffer", "elementSizeInBytes", "getTensorElementSize", "tensorDataTypeStringToEnum", "a", "b", "registerBuffer", "dataIndex", "allocWasmString", "dimsOffset", "dimIndex", "d", "inputIndices", "inputTensors", "outputIndices", "outputTensors", "inputOutputBound", "runOptionsHandle", "runOptionsAllocs", "inputTensorHandles", "outputTensorHandles", "inputOutputAllocs", "beforeRunStack", "inputValuesOffset", "inputNamesOffset", "outputValuesOffset", "outputNamesOffset", "setRunOptions", "inputValuesIndex", "inputNamesIndex", "outputValuesIndex", "outputNamesIndex", "handle", "outputPreferredLocationsEncoded", "errorCode", "output", "beforeGetTensorDataStack", "tensorDataOffset", "keepOutputTensor", "type", "tensorDataIndex", "dimsLength", "size", "tensorDataTypeEnumToString", "preferredLocation", "stringData", "offset", "maxBytesToRead", "getBuffer", "elementSize", "isGpuBufferSupportedType", "typedArrayConstructor", "tensorTypeToTypedArrayConstructor", "v", "p", "profileFileName", "tensors", "buffers", "isProxy", "proxyWorker", "initializing", "initialized", "aborted", "temporaryObjectUrl", "initWasmCallbacks", "queuedCallbacks", "enqueueCallbacks", "ensureWorker", "onProxyWorkerMessage", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "copyFromExternalBuffer", "createSession", "releaseSession", "run", "endProfiling", "init_proxy_wrapper", "__esmMin", "init_esm", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "env", "type", "callbacks", "queue", "ev", "resolve", "reject", "importProxyWorker", "objectUrl", "worker", "message", "e", "initializeWebAssembly", "initRuntime", "epName", "initEp", "buffer", "model", "options", "transferable", "sessionId", "inputIndices", "inputs", "outputIndices", "outputs", "t", "serializableInputs", "extractTransferableBuffers", "encodeTensorMetadata", "decodeTensorMetadata", "OnnxruntimeWebAssemblySessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_proxy_wrapper", "init_wasm_common", "init_wasm_utils_env", "init_wasm_utils_load_file", "tensor", "getName", "Tensor", "dataType", "isGpuBufferSupportedType", "gpuBuffer", "download", "dispose", "path", "copyFromExternalBuffer", "loadFile", "pathOrBuffer", "options", "TRACE_FUNC_BEGIN", "model", "createSession", "TRACE_FUNC_END", "releaseSession", "feeds", "fetches", "inputArray", "inputIndices", "kvp", "name", "index", "outputArray", "outputIndices", "inputs", "t", "i", "outputs", "results", "run", "resultMap", "endProfiling", "initializeFlags", "OnnxruntimeWebAssemblyBackend", "init_backend_wasm", "__esmMin", "init_esm", "init_proxy_wrapper", "init_session_handler_inference", "init_wasm_utils_import", "env", "numCpuLogicalCores", "backendName", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "pathOrBuffer", "options", "handler", "OnnxruntimeWebAssemblySessionHandler", "backend_wasm_inference_exports", "__export", "wasmBackend", "init_backend_wasm_inference", "__esmMin", "init_backend_wasm", "OnnxruntimeWebAssemblyBackend", "init_esm", "version", "lib_default", "esm_exports", "onnxjsBackend", "registerBackend", "wasmBackend", "env", "version"] } diff --git a/assets/dist/ort.webgpu.bundle.min.mjs b/assets/dist/ort.webgpu.bundle.min.mjs index 8ce72dd..310b3a7 100644 --- a/assets/dist/ort.webgpu.bundle.min.mjs +++ b/assets/dist/ort.webgpu.bundle.min.mjs @@ -3,17 +3,17 @@ * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ -var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Object.getOwnPropertyNames;var np=Object.prototype.hasOwnProperty;var Bn=(e=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(e,{get:(t,r)=>(typeof require<"u"?require:t)[r]}):e)(function(e){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+e+'" is not supported')});var U=(e,t)=>()=>(e&&(t=e(e=0)),t);var Lt=(e,t)=>{for(var r in t)Dn(e,r,{get:t[r],enumerable:!0})},op=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of rp(t))!np.call(e,o)&&o!==r&&Dn(e,o,{get:()=>t[o],enumerable:!(n=tp(t,o))||n.enumerable});return e};var yr=e=>op(Dn({},"__esModule",{value:!0}),e);var br,$t,_t,ip,wr,vr=U(()=>{"use strict";br=new Map,$t=[],_t=(e,t,r)=>{if(t&&typeof t.init=="function"&&typeof t.createInferenceSessionHandler=="function"){let n=br.get(e);if(n===void 0)br.set(e,{backend:t,priority:r});else{if(n.priority>r)return;if(n.priority===r&&n.backend!==t)throw new Error(`cannot register backend "${e}" using priority ${r}`)}if(r>=0){let o=$t.indexOf(e);o!==-1&&$t.splice(o,1);for(let i=0;i<$t.length;i++)if(br.get($t[i]).priority<=r){$t.splice(i,0,e);return}$t.push(e)}return}throw new TypeError("not a valid backend")},ip=async e=>{let t=br.get(e);if(!t)return"backend not found.";if(t.initialized)return t.backend;if(t.aborted)return t.error;{let r=!!t.initPromise;try{return r||(t.initPromise=t.backend.init(e)),await t.initPromise,t.initialized=!0,t.backend}catch(n){return r||(t.error=`${n}`,t.aborted=!0),t.error}finally{delete t.initPromise}}},wr=async e=>{let t=e.executionProviders||[],r=t.map(d=>typeof d=="string"?d:d.name),n=r.length===0?$t:r,o,i=[],a=new Set;for(let d of n){let p=await ip(d);typeof p=="string"?i.push({name:d,err:p}):(o||(o=p),o===p&&a.add(d))}if(!o)throw new Error(`no available backend found. ERR: ${i.map(d=>`[${d.name}] ${d.err}`).join(", ")}`);for(let{name:d,err:p}of i)r.includes(d)&&console.warn(`removing requested execution provider "${d}" from session options because it is not available: ${p}`);let l=t.filter(d=>a.has(typeof d=="string"?d:d.name));return[o,new Proxy(e,{get:(d,p)=>p==="executionProviders"?l:Reflect.get(d,p)})]}});var ji=U(()=>{"use strict";vr()});var Yi,Xi=U(()=>{"use strict";Yi="1.19.0"});var Zi,Ne,Rn=U(()=>{"use strict";Xi();Zi="warning",Ne={wasm:{},webgl:{},webgpu:{},versions:{common:Yi},set logLevel(e){if(e!==void 0){if(typeof e!="string"||["verbose","info","warning","error","fatal"].indexOf(e)===-1)throw new Error(`Unsupported logging level: ${e}`);Zi=e}},get logLevel(){return Zi}};Object.defineProperty(Ne,"logLevel",{enumerable:!0})});var ye,Qi=U(()=>{"use strict";Rn();ye=Ne});var Ji,ea,ta=U(()=>{"use strict";Ji=(e,t)=>{let r=typeof document<"u"?document.createElement("canvas"):new OffscreenCanvas(1,1);r.width=e.dims[3],r.height=e.dims[2];let n=r.getContext("2d");if(n!=null){let o,i;t?.tensorLayout!==void 0&&t.tensorLayout==="NHWC"?(o=e.dims[2],i=e.dims[3]):(o=e.dims[3],i=e.dims[2]);let a=t?.format!==void 0?t.format:"RGB",l=t?.norm,d,p;l===void 0||l.mean===void 0?d=[255,255,255,255]:typeof l.mean=="number"?d=[l.mean,l.mean,l.mean,l.mean]:(d=[l.mean[0],l.mean[1],l.mean[2],0],l.mean[3]!==void 0&&(d[3]=l.mean[3])),l===void 0||l.bias===void 0?p=[0,0,0,0]:typeof l.bias=="number"?p=[l.bias,l.bias,l.bias,l.bias]:(p=[l.bias[0],l.bias[1],l.bias[2],0],l.bias[3]!==void 0&&(p[3]=l.bias[3]));let m=i*o,u=0,h=m,w=m*2,g=-1;a==="RGBA"?(u=0,h=m,w=m*2,g=m*3):a==="RGB"?(u=0,h=m,w=m*2):a==="RBG"&&(u=0,w=m,h=m*2);for(let b=0;b{let r=typeof document<"u"?document.createElement("canvas").getContext("2d"):new OffscreenCanvas(1,1).getContext("2d"),n;if(r!=null){let o,i,a;t?.tensorLayout!==void 0&&t.tensorLayout==="NHWC"?(o=e.dims[2],i=e.dims[1],a=e.dims[3]):(o=e.dims[3],i=e.dims[2],a=e.dims[1]);let l=t!==void 0&&t.format!==void 0?t.format:"RGB",d=t?.norm,p,m;d===void 0||d.mean===void 0?p=[255,255,255,255]:typeof d.mean=="number"?p=[d.mean,d.mean,d.mean,d.mean]:(p=[d.mean[0],d.mean[1],d.mean[2],255],d.mean[3]!==void 0&&(p[3]=d.mean[3])),d===void 0||d.bias===void 0?m=[0,0,0,0]:typeof d.bias=="number"?m=[d.bias,d.bias,d.bias,d.bias]:(m=[d.bias[0],d.bias[1],d.bias[2],0],d.bias[3]!==void 0&&(m[3]=d.bias[3]));let u=i*o;if(t!==void 0&&(t.format!==void 0&&a===4&&t.format!=="RGBA"||a===3&&t.format!=="RGB"&&t.format!=="BGR"))throw new Error("Tensor format doesn't match input tensor dims");let h=4,w=0,g=1,b=2,x=3,_=0,$=u,S=u*2,I=-1;l==="RGBA"?(_=0,$=u,S=u*2,I=u*3):l==="RGB"?(_=0,$=u,S=u*2):l==="RBG"&&(_=0,S=u,$=u*2),n=r.createImageData(o,i);for(let T=0;T{"use strict";$r();Mn=(e,t)=>{if(e===void 0)throw new Error("Image buffer must be defined");if(t.height===void 0||t.width===void 0)throw new Error("Image height and width must be defined");if(t.tensorLayout==="NHWC")throw new Error("NHWC Tensor layout is not supported yet");let{height:r,width:n}=t,o=t.norm??{mean:255,bias:0},i,a;typeof o.mean=="number"?i=[o.mean,o.mean,o.mean,o.mean]:i=[o.mean[0],o.mean[1],o.mean[2],o.mean[3]??255],typeof o.bias=="number"?a=[o.bias,o.bias,o.bias,o.bias]:a=[o.bias[0],o.bias[1],o.bias[2],o.bias[3]??0];let l=t.format!==void 0?t.format:"RGBA",d=t.tensorFormat!==void 0&&t.tensorFormat!==void 0?t.tensorFormat:"RGB",p=r*n,m=d==="RGBA"?new Float32Array(p*4):new Float32Array(p*3),u=4,h=0,w=1,g=2,b=3,x=0,_=p,$=p*2,S=-1;l==="RGB"&&(u=3,h=0,w=1,g=2,b=-1),d==="RGBA"?S=p*3:d==="RBG"?(x=0,$=p,_=p*2):d==="BGR"&&($=0,_=p,x=p*2);for(let T=0;T{let r=typeof HTMLImageElement<"u"&&e instanceof HTMLImageElement,n=typeof ImageData<"u"&&e instanceof ImageData,o=typeof ImageBitmap<"u"&&e instanceof ImageBitmap,i=typeof e=="string",a,l=t??{},d=()=>{if(typeof document<"u")return document.createElement("canvas");if(typeof OffscreenCanvas<"u")return new OffscreenCanvas(1,1);throw new Error("Canvas is not supported")},p=m=>m instanceof HTMLCanvasElement||m instanceof OffscreenCanvas?m.getContext("2d"):null;if(r){let m=d();m.width=e.width,m.height=e.height;let u=p(m);if(u!=null){let h=e.height,w=e.width;if(t!==void 0&&t.resizedHeight!==void 0&&t.resizedWidth!==void 0&&(h=t.resizedHeight,w=t.resizedWidth),t!==void 0){if(l=t,t.tensorFormat!==void 0)throw new Error("Image input config format must be RGBA for HTMLImageElement");l.tensorFormat="RGBA",l.height=h,l.width=w}else l.tensorFormat="RGBA",l.height=h,l.width=w;u.drawImage(e,0,0),a=u.getImageData(0,0,w,h).data}else throw new Error("Can not access image data")}else if(n){let m,u;if(t!==void 0&&t.resizedWidth!==void 0&&t.resizedHeight!==void 0?(m=t.resizedHeight,u=t.resizedWidth):(m=e.height,u=e.width),t!==void 0&&(l=t),l.format="RGBA",l.height=m,l.width=u,t!==void 0){let h=d();h.width=u,h.height=m;let w=p(h);if(w!=null)w.putImageData(e,0,0),a=w.getImageData(0,0,u,m).data;else throw new Error("Can not access image data")}else a=e.data}else if(o){if(t===void 0)throw new Error("Please provide image config with format for Imagebitmap");let m=d();m.width=e.width,m.height=e.height;let u=p(m);if(u!=null){let h=e.height,w=e.width;return u.drawImage(e,0,0,w,h),a=u.getImageData(0,0,w,h).data,l.height=h,l.width=w,Mn(a,l)}else throw new Error("Can not access image data")}else{if(i)return new Promise((m,u)=>{let h=d(),w=p(h);if(!e||!w)return u();let g=new Image;g.crossOrigin="Anonymous",g.src=e,g.onload=()=>{h.width=g.width,h.height=g.height,w.drawImage(g,0,0,h.width,h.height);let b=w.getImageData(0,0,h.width,h.height);l.height=h.height,l.width=h.width,m(Mn(b.data,l))}});throw new Error("Input data provided is not supported - aborted tensor creation")}if(a!==void 0)return Mn(a,l);throw new Error("Input data provided is not supported - aborted tensor creation")},na=(e,t)=>{let{width:r,height:n,download:o,dispose:i}=t,a=[1,n,r,4];return new Re({location:"texture",type:"float32",texture:e,dims:a,download:o,dispose:i})},oa=(e,t)=>{let{dataType:r,dims:n,download:o,dispose:i}=t;return new Re({location:"gpu-buffer",type:r??"float32",gpuBuffer:e,dims:n,download:o,dispose:i})},ia=(e,t,r)=>new Re({location:"cpu-pinned",type:e,data:t,dims:r??[t.length]})});var xt,Ft,sa,ua,da=U(()=>{"use strict";xt=new Map([["float32",Float32Array],["uint8",Uint8Array],["int8",Int8Array],["uint16",Uint16Array],["int16",Int16Array],["int32",Int32Array],["bool",Uint8Array],["float64",Float64Array],["uint32",Uint32Array]]),Ft=new Map([[Float32Array,"float32"],[Uint8Array,"uint8"],[Int8Array,"int8"],[Uint16Array,"uint16"],[Int16Array,"int16"],[Int32Array,"int32"],[Float64Array,"float64"],[Uint32Array,"uint32"]]),sa=!1,ua=()=>{if(!sa){sa=!0;let e=typeof BigInt64Array<"u"&&BigInt64Array.from,t=typeof BigUint64Array<"u"&&BigUint64Array.from,r=typeof Float16Array<"u"&&Float16Array.from;e&&(xt.set("int64",BigInt64Array),Ft.set(BigInt64Array,"int64")),t&&(xt.set("uint64",BigUint64Array),Ft.set(BigUint64Array,"uint64")),r?(xt.set("float16",Float16Array),Ft.set(Float16Array,"float16")):xt.set("float16",Uint16Array)}}});var la,ca,pa=U(()=>{"use strict";$r();la=e=>{let t=1;for(let r=0;r{switch(e.location){case"cpu":return new Re(e.type,e.data,t);case"cpu-pinned":return new Re({location:"cpu-pinned",data:e.data,type:e.type,dims:t});case"texture":return new Re({location:"texture",texture:e.texture,type:e.type,dims:t});case"gpu-buffer":return new Re({location:"gpu-buffer",gpuBuffer:e.gpuBuffer,type:e.type,dims:t});default:throw new Error(`tensorReshape: tensor location ${e.location} is not supported`)}}});var Re,$r=U(()=>{"use strict";ta();aa();da();pa();Re=class{constructor(t,r,n){ua();let o,i;if(typeof t=="object"&&"location"in t)switch(this.dataLocation=t.location,o=t.type,i=t.dims,t.location){case"cpu-pinned":{let l=xt.get(o);if(!l)throw new TypeError(`unsupported type "${o}" to create tensor from pinned buffer`);if(!(t.data instanceof l))throw new TypeError(`buffer should be of type ${l.name}`);this.cpuData=t.data;break}case"texture":{if(o!=="float32")throw new TypeError(`unsupported type "${o}" to create tensor from texture`);this.gpuTextureData=t.texture,this.downloader=t.download,this.disposer=t.dispose;break}case"gpu-buffer":{if(o!=="float32"&&o!=="float16"&&o!=="int32"&&o!=="int64"&&o!=="uint32"&&o!=="uint8"&&o!=="bool")throw new TypeError(`unsupported type "${o}" to create tensor from gpu buffer`);this.gpuBufferData=t.gpuBuffer,this.downloader=t.download,this.disposer=t.dispose;break}default:throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`)}else{let l,d;if(typeof t=="string")if(o=t,d=n,t==="string"){if(!Array.isArray(r))throw new TypeError("A string tensor's data must be a string array.");l=r}else{let p=xt.get(t);if(p===void 0)throw new TypeError(`Unsupported tensor type: ${t}.`);if(Array.isArray(r)){if(t==="float16"&&p===Uint16Array)throw new TypeError("Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.");t==="uint64"||t==="int64"?l=p.from(r,BigInt):l=p.from(r)}else if(r instanceof p)l=r;else throw new TypeError(`A ${o} tensor's data must be type of ${p}`)}else if(d=r,Array.isArray(t)){if(t.length===0)throw new TypeError("Tensor type cannot be inferred from an empty array.");let p=typeof t[0];if(p==="string")o="string",l=t;else if(p==="boolean")o="bool",l=Uint8Array.from(t);else throw new TypeError(`Invalid element type of data array: ${p}.`)}else{let p=Ft.get(t.constructor);if(p===void 0)throw new TypeError(`Unsupported type for tensor data: ${t.constructor}.`);o=p,l=t}if(d===void 0)d=[l.length];else if(!Array.isArray(d))throw new TypeError("A tensor's dims must be a number array");i=d,this.cpuData=l,this.dataLocation="cpu"}let a=la(i);if(this.cpuData&&a!==this.cpuData.length)throw new Error(`Tensor's size(${a}) does not match data length(${this.cpuData.length}).`);this.type=o,this.dims=i,this.size=a}static async fromImage(t,r){return ra(t,r)}static fromTexture(t,r){return na(t,r)}static fromGpuBuffer(t,r){return oa(t,r)}static fromPinnedBuffer(t,r,n){return ia(t,r,n)}toDataURL(t){return Ji(this,t)}toImageData(t){return ea(this,t)}get data(){if(this.ensureValid(),!this.cpuData)throw new Error("The data is not on CPU. Use `getData()` to download GPU data to CPU, or use `texture` or `gpuBuffer` property to access the GPU data directly.");return this.cpuData}get location(){return this.dataLocation}get texture(){if(this.ensureValid(),!this.gpuTextureData)throw new Error("The data is not stored as a WebGL texture.");return this.gpuTextureData}get gpuBuffer(){if(this.ensureValid(),!this.gpuBufferData)throw new Error("The data is not stored as a WebGPU buffer.");return this.gpuBufferData}async getData(t){switch(this.ensureValid(),this.dataLocation){case"cpu":case"cpu-pinned":return this.data;case"texture":case"gpu-buffer":{if(!this.downloader)throw new Error("The current tensor is not created with a specified data downloader.");if(this.isDownloading)throw new Error("The current tensor is being downloaded.");try{this.isDownloading=!0;let r=await this.downloader();return this.downloader=void 0,this.dataLocation="cpu",this.cpuData=r,t&&this.disposer&&(this.disposer(),this.disposer=void 0),r}finally{this.isDownloading=!1}}default:throw new Error(`cannot get data from location: ${this.dataLocation}`)}}dispose(){if(this.isDownloading)throw new Error("The current tensor is being downloaded.");this.disposer&&(this.disposer(),this.disposer=void 0),this.cpuData=void 0,this.gpuTextureData=void 0,this.gpuBufferData=void 0,this.downloader=void 0,this.isDownloading=void 0,this.dataLocation="none"}ensureValid(){if(this.dataLocation==="none")throw new Error("The tensor is disposed.")}reshape(t){if(this.ensureValid(),this.downloader||this.disposer)throw new Error("Cannot reshape a tensor that owns GPU resource.");return ca(this,t)}}});var ze,_r=U(()=>{"use strict";$r();ze=Re});var xr,ma,We,Me,Un=U(()=>{"use strict";Rn();xr=(e,t)=>{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||console.timeStamp(`${e}::ORT::${t}`)},ma=(e,t)=>{let r=new Error().stack?.split(/\r\n|\r|\n/g)||[],n=!1;for(let o=0;o{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||ma("BEGIN",e)},Me=e=>{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||ma("END",e)}});var Sr,fa=U(()=>{"use strict";vr();_r();Un();Sr=class e{constructor(t){this.handler=t}async run(t,r,n){We();let o={},i={};if(typeof t!="object"||t===null||t instanceof ze||Array.isArray(t))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let a=!0;if(typeof r=="object"){if(r===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(r instanceof ze)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(r)){if(r.length===0)throw new TypeError("'fetches' cannot be an empty array.");a=!1;for(let p of r){if(typeof p!="string")throw new TypeError("'fetches' must be a string array or an object.");if(this.outputNames.indexOf(p)===-1)throw new RangeError(`'fetches' contains invalid output name: ${p}.`);o[p]=null}if(typeof n=="object"&&n!==null)i=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else{let p=!1,m=Object.getOwnPropertyNames(r);for(let u of this.outputNames)if(m.indexOf(u)!==-1){let h=r[u];(h===null||h instanceof ze)&&(p=!0,a=!1,o[u]=h)}if(p){if(typeof n=="object"&&n!==null)i=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else i=r}}else if(typeof r<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let p of this.inputNames)if(typeof t[p]>"u")throw new Error(`input '${p}' is missing in 'feeds'.`);if(a)for(let p of this.outputNames)o[p]=null;let l=await this.handler.run(t,o,i),d={};for(let p in l)if(Object.hasOwnProperty.call(l,p)){let m=l[p];m instanceof ze?d[p]=m:d[p]=new ze(m.type,m.data,m.dims)}return Me(),d}async release(){return this.handler.dispose()}static async create(t,r,n,o){We();let i,a={};if(typeof t=="string"){if(i=t,typeof r=="object"&&r!==null)a=r;else if(typeof r<"u")throw new TypeError("'options' must be an object.")}else if(t instanceof Uint8Array){if(i=t,typeof r=="object"&&r!==null)a=r;else if(typeof r<"u")throw new TypeError("'options' must be an object.")}else if(t instanceof ArrayBuffer||typeof SharedArrayBuffer<"u"&&t instanceof SharedArrayBuffer){let m=t,u=0,h=t.byteLength;if(typeof r=="object"&&r!==null)a=r;else if(typeof r=="number"){if(u=r,!Number.isSafeInteger(u))throw new RangeError("'byteOffset' must be an integer.");if(u<0||u>=m.byteLength)throw new RangeError(`'byteOffset' is out of range [0, ${m.byteLength}).`);if(h=t.byteLength-u,typeof n=="number"){if(h=n,!Number.isSafeInteger(h))throw new RangeError("'byteLength' must be an integer.");if(h<=0||u+h>m.byteLength)throw new RangeError(`'byteLength' is out of range (0, ${m.byteLength-u}].`);if(typeof o=="object"&&o!==null)a=o;else if(typeof o<"u")throw new TypeError("'options' must be an object.")}else if(typeof n<"u")throw new TypeError("'byteLength' must be a number.")}else if(typeof r<"u")throw new TypeError("'options' must be an object.");i=new Uint8Array(m,u,h)}else throw new TypeError("Unexpected argument[0]: must be 'path' or 'buffer'.");let[l,d]=await wr(a),p=await l.createInferenceSessionHandler(i,d);return Me(),new e(p)}startProfiling(){this.handler.startProfiling()}endProfiling(){this.handler.endProfiling()}get inputNames(){return this.handler.inputNames}get outputNames(){return this.handler.outputNames}}});var ap,ha=U(()=>{"use strict";fa();ap=Sr});var ga=U(()=>{"use strict"});var ya=U(()=>{"use strict"});var ba=U(()=>{"use strict"});var wa=U(()=>{"use strict"});var sp,Ir,va=U(()=>{"use strict";vr();_r();sp="Training backend could not be resolved. Make sure you're using the correct configuration & WebAssembly files.",Ir=class e{constructor(t,r,n){this.handler=t,this.hasOptimizerModel=r,this.hasEvalModel=n}get trainingInputNames(){return this.handler.inputNames}get trainingOutputNames(){return this.handler.outputNames}get evalInputNames(){if(this.hasEvalModel)return this.handler.evalInputNames;throw new Error("This training session has no evalModel loaded.")}get evalOutputNames(){if(this.hasEvalModel)return this.handler.evalOutputNames;throw new Error("This training session has no evalModel loaded.")}static async create(t,r){let n=t.evalModel||"",o=t.optimizerModel||"",i=r||{},[a,l]=await wr(i);if(a.createTrainingSessionHandler){let d=await a.createTrainingSessionHandler(t.checkpointState,t.trainModel,n,o,l);return new e(d,!!t.optimizerModel,!!t.evalModel)}else throw new Error(sp)}typeNarrowingForRunStep(t,r,n,o,i){let a={},l={};if(typeof n!="object"||n===null||n instanceof ze||Array.isArray(n))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let d=!0;if(typeof o=="object"){if(o===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(o instanceof ze)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(o)){if(o.length===0)throw new TypeError("'fetches' cannot be an empty array.");d=!1;for(let p of o){if(typeof p!="string")throw new TypeError("'fetches' must be a string array or an object.");if(r.indexOf(p)===-1)throw new RangeError(`'fetches' contains invalid output name: ${p}.`);a[p]=null}if(typeof i=="object"&&i!==null)l=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else{let p=!1,m=Object.getOwnPropertyNames(o);for(let u of r)if(m.indexOf(u)!==-1){let h=o[u];(h===null||h instanceof ze)&&(p=!0,d=!1,a[u]=h)}if(p){if(typeof i=="object"&&i!==null)l=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else l=o}}else if(typeof o<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let p of t)if(typeof n[p]>"u")throw new Error(`input '${p}' is missing in 'feeds'.`);if(d)for(let p of r)a[p]=null;return[a,l]}convertHandlerReturnTypeToMapOfTensors(t){let r={};for(let n in t)if(Object.hasOwnProperty.call(t,n)){let o=t[n];o instanceof ze?r[n]=o:r[n]=new ze(o.type,o.data,o.dims)}return r}async lazyResetGrad(){await this.handler.lazyResetGrad()}async runTrainStep(t,r,n){let[o,i]=this.typeNarrowingForRunStep(this.trainingInputNames,this.trainingOutputNames,t,r,n),a=await this.handler.runTrainStep(t,o,i);return this.convertHandlerReturnTypeToMapOfTensors(a)}async runOptimizerStep(t){if(this.hasOptimizerModel)await this.handler.runOptimizerStep(t||{});else throw new Error("This TrainingSession has no OptimizerModel loaded.")}async runEvalStep(t,r,n){if(this.hasEvalModel){let[o,i]=this.typeNarrowingForRunStep(this.evalInputNames,this.evalOutputNames,t,r,n),a=await this.handler.runEvalStep(t,o,i);return this.convertHandlerReturnTypeToMapOfTensors(a)}else throw new Error("This TrainingSession has no EvalModel loaded.")}async getParametersSize(t=!0){return this.handler.getParametersSize(t)}async loadParametersBuffer(t,r=!0){let n=await this.getParametersSize(r);if(t.length!==4*n)throw new Error("Size of the buffer passed into loadParametersBuffer must match the number of parameters in the model. Please use getParametersSize method to check.");return this.handler.loadParametersBuffer(t,r)}async getContiguousParameters(t=!0){return this.handler.getContiguousParameters(t)}async release(){return this.handler.dispose()}}});var up,$a=U(()=>{"use strict";va();up=Ir});var Vn={};Lt(Vn,{InferenceSession:()=>ap,TRACE:()=>xr,TRACE_FUNC_BEGIN:()=>We,TRACE_FUNC_END:()=>Me,Tensor:()=>ze,TrainingSession:()=>up,env:()=>ye,registerBackend:()=>_t});var Le=U(()=>{"use strict";ji();Qi();ha();_r();ga();ya();Un();ba();wa();$a()});var Cr=U(()=>{"use strict"});var Ia={};Lt(Ia,{default:()=>dp});var xa,Sa,dp,Ca=U(()=>{"use strict";Nn();St();qt();xa="ort-wasm-proxy-worker",Sa=globalThis.self?.name===xa;Sa&&(self.onmessage=e=>{let{type:t,in:r}=e.data;try{switch(t){case"init-wasm":Tr(r.wasm).then(()=>{Ar(r).then(()=>{postMessage({type:t})},n=>{postMessage({type:t,err:n})})},n=>{postMessage({type:t,err:n})});break;case"init-ep":{let{epName:n,env:o}=r;Er(o,n).then(()=>{postMessage({type:t})},i=>{postMessage({type:t,err:i})});break}case"copy-from":{let{buffer:n}=r,o=Kt(n);postMessage({type:t,out:o});break}case"create":{let{model:n,options:o}=r;kr(n,o).then(i=>{postMessage({type:t,out:i})},i=>{postMessage({type:t,err:i})});break}case"release":Pr(r),postMessage({type:t});break;case"run":{let{sessionId:n,inputIndices:o,inputs:i,outputIndices:a,options:l}=r;Or(n,o,i,a,new Array(a.length).fill(null),l).then(d=>{d.some(p=>p[3]!=="cpu")?postMessage({type:t,err:"Proxy does not support non-cpu tensor location."}):postMessage({type:t,out:d},Dr([...i,...d]))},d=>{postMessage({type:t,err:d})});break}case"end-profiling":zr(r),postMessage({type:t});break;default:}}catch(n){postMessage({type:t,err:n})}});dp=Sa?null:e=>new Worker(e??Mt,{type:"module",name:xa})});var Aa={};Lt(Aa,{default:()=>lp});var Wn,Ta,lp,Ea=U(()=>{"use strict";Ta=(Wn=import.meta.url,async function(e={}){function t(){return X.buffer!=ie.buffer&&ve(),ie}function r(){return X.buffer!=ie.buffer&&ve(),le}function n(){return X.buffer!=ie.buffer&&ve(),se}function o(){return X.buffer!=ie.buffer&&ve(),Z}function i(){return X.buffer!=ie.buffer&&ve(),re}function a(){return X.buffer!=ie.buffer&&ve(),J}function l(){return X.buffer!=ie.buffer&&ve(),Pe}function d(){return X.buffer!=ie.buffer&&ve(),ue}var p,m,u=Object.assign({},e),h=new Promise((s,c)=>{p=s,m=c}),w=typeof window=="object",g=typeof importScripts=="function",b=g&&self.name=="em-pthread";u.mountExternalData=(s,c)=>{(u.Cb||(u.Cb=new Map)).set(s,c)},u.unmountExternalData=()=>{delete u.Cb};var x=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let _=()=>{let s=(f,y,v)=>(...C)=>{let O=Je,B=y?.();C=f(...C);let N=y?.();return B!==N&&(f=N,v(B),y=v=null),Je!=O?new Promise((L,j)=>{Cn={resolve:L,reject:j}}):C},c=f=>async(...y)=>{try{if(u.Bb)throw Error("Session already started");let v=u.Bb={Zb:y[0],errors:[]},C=await f(...y);if(u.Bb!==v)throw Error("Session mismatch");u.Jb?.flush();let O=v.errors;if(0N),0u._OrtCreateSession,f=>u._OrtCreateSession=f),u._OrtRun=c(s(u._OrtRun,()=>u._OrtRun,f=>u._OrtRun=f)),u._OrtRunWithBinding=c(s(u._OrtRunWithBinding,()=>u._OrtRunWithBinding,f=>u._OrtRunWithBinding=f)),u._OrtBindInput=s(u._OrtBindInput,()=>u._OrtBindInput,f=>u._OrtBindInput=f),_=void 0};u.jsepInit=(s,c)=>{if(_?.(),s==="webgpu"){[u.Jb,u.Qb,u.Ub,u.Kb,u.Tb,u.gb,u.Vb,u.Xb,u.Rb,u.Sb,u.Wb]=c;let f=u.Jb;u.jsepRegisterBuffer=(y,v,C,O)=>f.registerBuffer(y,v,C,O),u.jsepGetBuffer=y=>f.getBuffer(y),u.jsepCreateDownloader=(y,v,C)=>f.createDownloader(y,v,C),u.jsepOnReleaseSession=y=>{f.onReleaseSession(y)},u.jsepOnRunStart=y=>f.onRunStart(y)}};var $,S,I=Object.assign({},u),T="./this.program",A=(s,c)=>{throw c},z="";(w||g)&&(g?z=self.location.href:typeof document<"u"&&document.currentScript&&(z=document.currentScript.src),Wn&&(z=Wn),z=z.startsWith("blob:")?"":z.substr(0,z.replace(/[?#].*/,"").lastIndexOf("/")+1),g&&(S=s=>{var c=new XMLHttpRequest;return c.open("GET",s,!1),c.responseType="arraybuffer",c.send(null),new Uint8Array(c.response)}),$=(s,c,f)=>{var y=new XMLHttpRequest;y.open("GET",s,!0),y.responseType="arraybuffer",y.onload=()=>{y.status==200||y.status==0&&y.response?c(y.response):f()},y.onerror=f,y.send(null)});var D=console.log.bind(console),H=console.error.bind(console),W=D,F=H;if(Object.assign(u,I),I=null,b){let s=function(c){try{var f=c.data,y=f.cmd;if(y==="load"){let v=[];self.onmessage=C=>v.push(C),self.startWorker=()=>{postMessage({cmd:"loaded"});for(let C of v)s(C);self.onmessage=s};for(let C of f.handlers)u[C]&&!u[C].proxy||(u[C]=(...O)=>{postMessage({Ib:"callHandler",hc:C,args:O})},C=="print"&&(W=u[C]),C=="printErr"&&(F=u[C]));X=f.wasmMemory,ve(),de(f.wasmModule)}else if(y==="run"){kn(f.pthread_ptr,0,0,1,0,0),xn(f.pthread_ptr),Nl(),Vo(),ce||(Ri(),ce=!0);try{Wl(f.start_routine,f.arg)}catch(v){if(v!="unwind")throw v}}else y==="cancel"?Rt()&&hr(-1):f.target!=="setimmediate"&&(y==="checkMailbox"?ce&&ar():y&&(F(`worker: received unknown command ${y}`),F(f)))}catch(v){throw Mi(),v}};var _h=s,de,ce=!1;F=function(...c){c=c.join(" "),console.error(c)},self.alert=function(...c){postMessage({Ib:"alert",text:c.join(" "),jc:Rt()})},u.instantiateWasm=(c,f)=>new Promise(y=>{de=v=>{v=new WebAssembly.Instance(v,zo()),f(v),y()}}),self.onunhandledrejection=c=>{throw c.reason||c},self.onmessage=s}var X,xe,q,ie,le,se,Z,re,J,Pe,R,Y,ue,Te=!1;function ve(){var s=X.buffer;u.HEAP8=ie=new Int8Array(s),u.HEAP16=se=new Int16Array(s),u.HEAPU8=le=new Uint8Array(s),u.HEAPU16=Z=new Uint16Array(s),u.HEAP32=re=new Int32Array(s),u.HEAPU32=J=new Uint32Array(s),u.HEAPF32=Pe=new Float32Array(s),u.HEAPF64=ue=new Float64Array(s),u.HEAP64=R=new BigInt64Array(s),u.HEAPU64=Y=new BigUint64Array(s)}if(!b){if(u.wasmMemory)X=u.wasmMemory;else if(!((X=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof x))throw F("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),Error("bad memory");ve()}var Se=[],Ot=[],Be=[],Ae=0,Xe=null,lt=null;function or(){if(--Ae==0&&(Xe!==null&&(clearInterval(Xe),Xe=null),lt)){var s=lt;lt=null,s()}}function Gt(s){throw F(s="Aborted("+s+")"),Te=!0,q=1,s=new WebAssembly.RuntimeError(s+". Build with -sASSERTIONS for more info."),m(s),s}var Ao,Eo=s=>s.startsWith("data:application/octet-stream;base64,"),ko=s=>s.startsWith("file://");function Po(s){if(S)return S(s);throw"both async and sync fetching of the wasm failed"}function Oo(s,c,f){return function(y){if(w||g){if(typeof fetch=="function"&&!ko(y))return fetch(y,{credentials:"same-origin"}).then(v=>{if(!v.ok)throw`failed to load wasm binary file at '${y}'`;return v.arrayBuffer()}).catch(()=>Po(y));if($)return new Promise((v,C)=>{$(y,O=>v(new Uint8Array(O)),C)})}return Promise.resolve().then(()=>Po(y))}(s).then(y=>WebAssembly.instantiate(y,c)).then(f,y=>{F(`failed to asynchronously prepare wasm: ${y}`),Gt(y)})}function zo(){return{a:{wa:Vl,b:Gl,Y:Go,y:qo,ma:Ko,U:Yo,W:Xo,na:Zo,ka:Qo,da:Jo,ja:ei,I:ti,V:ri,S:ni,la:oi,T:ii,sa:Ll,C:ql,M:Kl,L:Yl,B:Zl,s:Ql,p:Jl,D:ec,x:sc,N:uc,ra:dc,ga:lc,Q:cc,Z:pc,E:mc,fa:xn,pa:fc,u:hc,A:bc,o:wc,k:$c,c:$n,n:_c,j:Ic,xa:Cc,r:Tc,d:Ac,v:Ec,m:kc,g:Pc,l:Oc,i:zc,h:Dc,e:Bc,aa:Rc,ba:Mc,ca:Uc,_:wi,$:vi,P:Vc,f:Nc,K:Wc,F:Hc,J:Gc,ta:Lc,oa:Fc,R:qc,t:_i,w:Kc,O:jc,va:Yc,ua:Xc,ha:Ii,ia:Ci,X:gn,z:Ti,H:Ai,ea:Ei,G:ki,a:X,qa:zi,q:Jc}}}var mn={1336340:(s,c,f,y)=>{if(u===void 0||!u.Cb)return 1;if((s=ke(s>>>0)).startsWith("./")&&(s=s.substring(2)),!(s=u.Cb.get(s)))return 2;if(y>>>=0,(c>>>=0)+(f>>>=0)>s.byteLength)return 3;try{return r().set(s.subarray(c,c+f),y>>>0),0}catch{return 4}},1336841:()=>{u.Rb()},1336872:()=>{u.Sb()},1336901:()=>{u.Wb()},1336926:s=>u.Qb(s),1336959:s=>u.Ub(s),1336991:(s,c,f)=>{u.Kb(s,c,f,!0)},1337030:(s,c,f)=>{u.Kb(s,c,f)},1337063:()=>typeof wasmOffsetConverter<"u",1337120:s=>{u.gb("Abs",s,void 0)},1337171:s=>{u.gb("Neg",s,void 0)},1337222:s=>{u.gb("Floor",s,void 0)},1337275:s=>{u.gb("Ceil",s,void 0)},1337327:s=>{u.gb("Reciprocal",s,void 0)},1337385:s=>{u.gb("Sqrt",s,void 0)},1337437:s=>{u.gb("Exp",s,void 0)},1337488:s=>{u.gb("Erf",s,void 0)},1337539:s=>{u.gb("Sigmoid",s,void 0)},1337594:(s,c,f)=>{u.gb("HardSigmoid",s,{alpha:c,beta:f})},1337673:s=>{u.gb("Log",s,void 0)},1337724:s=>{u.gb("Sin",s,void 0)},1337775:s=>{u.gb("Cos",s,void 0)},1337826:s=>{u.gb("Tan",s,void 0)},1337877:s=>{u.gb("Asin",s,void 0)},1337929:s=>{u.gb("Acos",s,void 0)},1337981:s=>{u.gb("Atan",s,void 0)},1338033:s=>{u.gb("Sinh",s,void 0)},1338085:s=>{u.gb("Cosh",s,void 0)},1338137:s=>{u.gb("Asinh",s,void 0)},1338190:s=>{u.gb("Acosh",s,void 0)},1338243:s=>{u.gb("Atanh",s,void 0)},1338296:s=>{u.gb("Tanh",s,void 0)},1338348:s=>{u.gb("Not",s,void 0)},1338399:(s,c,f)=>{u.gb("Clip",s,{min:c,max:f})},1338468:s=>{u.gb("Clip",s,void 0)},1338520:(s,c)=>{u.gb("Elu",s,{alpha:c})},1338578:s=>{u.gb("Relu",s,void 0)},1338630:(s,c)=>{u.gb("LeakyRelu",s,{alpha:c})},1338694:(s,c)=>{u.gb("ThresholdedRelu",s,{alpha:c})},1338764:(s,c)=>{u.gb("Cast",s,{to:c})},1338822:s=>{u.gb("Add",s,void 0)},1338873:s=>{u.gb("Sub",s,void 0)},1338924:s=>{u.gb("Mul",s,void 0)},1338975:s=>{u.gb("Div",s,void 0)},1339026:s=>{u.gb("Pow",s,void 0)},1339077:s=>{u.gb("Equal",s,void 0)},1339130:s=>{u.gb("Greater",s,void 0)},1339185:s=>{u.gb("GreaterOrEqual",s,void 0)},1339247:s=>{u.gb("Less",s,void 0)},1339299:s=>{u.gb("LessOrEqual",s,void 0)},1339358:(s,c,f,y,v)=>{u.gb("ReduceMean",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1339517:(s,c,f,y,v)=>{u.gb("ReduceMax",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1339675:(s,c,f,y,v)=>{u.gb("ReduceMin",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1339833:(s,c,f,y,v)=>{u.gb("ReduceProd",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1339992:(s,c,f,y,v)=>{u.gb("ReduceSum",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340150:(s,c,f,y,v)=>{u.gb("ReduceL1",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340307:(s,c,f,y,v)=>{u.gb("ReduceL2",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340464:(s,c,f,y,v)=>{u.gb("ReduceLogSum",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340625:(s,c,f,y,v)=>{u.gb("ReduceSumSquare",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340789:(s,c,f,y,v)=>{u.gb("ReduceLogSumExp",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340953:s=>{u.gb("Where",s,void 0)},1341006:(s,c,f)=>{u.gb("Transpose",s,{perm:c?Array.from(i().subarray(c>>>0,f>>>0)):[]})},1341114:(s,c,f,y)=>{u.gb("DepthToSpace",s,{blocksize:c,mode:ke(f),format:y?"NHWC":"NCHW"})},1341247:(s,c,f,y)=>{u.gb("DepthToSpace",s,{blocksize:c,mode:ke(f),format:y?"NHWC":"NCHW"})},1341380:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee)=>{u.gb("ConvTranspose",s,{format:N?"NHWC":"NCHW",autoPad:c,dilations:[f],group:y,kernelShape:[v],pads:[C,O],strides:[B],wIsConst:()=>!!t()[L>>>0],outputPadding:j?Array.from(i().subarray(j>>>0,pe>>>0)):[],outputShape:fe?Array.from(i().subarray(fe>>>0,P>>>0)):[],activation:ke(ee)})},1341781:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P)=>{u.gb("ConvTranspose",s,{format:B?"NHWC":"NCHW",autoPad:c,dilations:Array.from(i().subarray(f>>>0,2+(f>>>0)>>>0)),group:y,kernelShape:Array.from(i().subarray(v>>>0,2+(v>>>0)>>>0)),pads:Array.from(i().subarray(C>>>0,4+(C>>>0)>>>0)),strides:Array.from(i().subarray(O>>>0,2+(O>>>0)>>>0)),wIsConst:()=>!!t()[N>>>0],outputPadding:L?Array.from(i().subarray(L>>>0,j>>>0)):[],outputShape:pe?Array.from(i().subarray(pe>>>0,fe>>>0)):[],activation:ke(P)})},1342346:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee)=>{u.gb("ConvTranspose",s,{format:N?"NHWC":"NCHW",autoPad:c,dilations:[f],group:y,kernelShape:[v],pads:[C,O],strides:[B],wIsConst:()=>!!t()[L>>>0],outputPadding:j?Array.from(i().subarray(j>>>0,pe>>>0)):[],outputShape:fe?Array.from(i().subarray(fe>>>0,P>>>0)):[],activation:ke(ee)})},1342747:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P)=>{u.gb("ConvTranspose",s,{format:B?"NHWC":"NCHW",autoPad:c,dilations:Array.from(i().subarray(f>>>0,2+(f>>>0)>>>0)),group:y,kernelShape:Array.from(i().subarray(v>>>0,2+(v>>>0)>>>0)),pads:Array.from(i().subarray(C>>>0,4+(C>>>0)>>>0)),strides:Array.from(i().subarray(O>>>0,2+(O>>>0)>>>0)),wIsConst:()=>!!t()[N>>>0],outputPadding:L?Array.from(i().subarray(L>>>0,j>>>0)):[],outputShape:pe?Array.from(i().subarray(pe>>>0,fe>>>0)):[],activation:ke(P)})},1343312:(s,c)=>{u.gb("GlobalAveragePool",s,{format:c?"NHWC":"NCHW"})},1343403:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee,ge)=>{u.gb("AveragePool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,j,pe,fe],strides:[P,ee]})},1343687:(s,c)=>{u.gb("GlobalAveragePool",s,{format:c?"NHWC":"NCHW"})},1343778:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee,ge)=>{u.gb("AveragePool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,j,pe,fe],strides:[P,ee]})},1344062:(s,c)=>{u.gb("GlobalMaxPool",s,{format:c?"NHWC":"NCHW"})},1344149:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee,ge)=>{u.gb("MaxPool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,j,pe,fe],strides:[P,ee]})},1344429:(s,c)=>{u.gb("GlobalMaxPool",s,{format:c?"NHWC":"NCHW"})},1344516:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee,ge)=>{u.gb("MaxPool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,j,pe,fe],strides:[P,ee]})},1344796:(s,c,f,y,v)=>{u.gb("Gemm",s,{alpha:c,beta:f,transA:y,transB:v})},1344900:s=>{u.gb("MatMul",s,void 0)},1344954:(s,c,f,y)=>{u.gb("ArgMax",s,{keepDims:!!c,selectLastIndex:!!f,axis:y})},1345062:(s,c,f,y)=>{u.gb("ArgMin",s,{keepDims:!!c,selectLastIndex:!!f,axis:y})},1345170:(s,c)=>{u.gb("Softmax",s,{axis:c})},1345233:(s,c)=>{u.gb("Concat",s,{axis:c})},1345293:(s,c,f,y,v)=>{u.gb("Split",s,{axis:c,numOutputs:f,splitSizes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1345433:s=>{u.gb("Expand",s,void 0)},1345487:(s,c)=>{u.gb("Gather",s,{axis:Number(c)})},1345558:(s,c)=>{u.gb("GatherElements",s,{axis:Number(c)})},1345637:(s,c,f,y,v,C,O,B,N,L,j)=>{u.gb("Resize",s,{antialias:c,axes:f?Array.from(i().subarray(f>>>0,y>>>0)):[],coordinateTransformMode:ke(v),cubicCoeffA:C,excludeOutside:O,extrapolationValue:B,keepAspectRatioPolicy:ke(N),mode:ke(L),nearestMode:ke(j)})},1345983:(s,c,f,y,v,C,O)=>{u.gb("Slice",s,{starts:c?Array.from(i().subarray(c>>>0,f>>>0)):[],ends:y?Array.from(i().subarray(y>>>0,v>>>0)):[],axes:C?Array.from(i().subarray(C>>>0,O>>>0)):[]})},1346199:s=>{u.gb("Tile",s,void 0)},1346251:(s,c,f)=>{u.gb("InstanceNormalization",s,{epsilon:c,format:f?"NHWC":"NCHW"})},1346365:(s,c,f)=>{u.gb("InstanceNormalization",s,{epsilon:c,format:f?"NHWC":"NCHW"})},1346479:s=>{u.gb("Range",s,void 0)},1346532:(s,c)=>{u.gb("Einsum",s,{equation:ke(c)})},1346613:(s,c,f,y,v)=>{u.gb("Pad",s,{mode:c,value:f,pads:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1346740:(s,c,f,y,v,C)=>{u.gb("BatchNormalization",s,{epsilon:c,momentum:f,spatial:!!v,trainingMode:!!y,format:C?"NHWC":"NCHW"})},1346909:(s,c,f,y,v,C)=>{u.gb("BatchNormalization",s,{epsilon:c,momentum:f,spatial:!!v,trainingMode:!!y,format:C?"NHWC":"NCHW"})},1347078:(s,c,f)=>{u.gb("CumSum",s,{exclusive:Number(c),reverse:Number(f)})},1347175:(s,c,f,y,v,C,O,B,N)=>{u.gb("Attention",s,{numHeads:c,isUnidirectional:f,maskFilterValue:y,scale:v,doRotary:C,qkvHiddenSizes:O?Array.from(i().subarray(Number(B)>>>0,Number(B)+O>>>0)):[],pastPresentShareBuffer:!!N})},1347447:s=>{u.gb("BiasAdd",s,void 0)},1347502:s=>{u.gb("BiasSplitGelu",s,void 0)},1347563:s=>{u.gb("FastGelu",s,void 0)},1347619:(s,c,f,y,v,C,O,B,N,L,j,pe,fe,P,ee,ge)=>{u.gb("Conv",s,{format:pe?"NHWC":"NCHW",auto_pad:c,dilations:f?Array.from(i().subarray(f>>>0,y>>>0)):[],group:v,kernel_shape:C?Array.from(i().subarray(C>>>0,O>>>0)):[],pads:B?Array.from(i().subarray(B>>>0,N>>>0)):[],strides:L?Array.from(i().subarray(L>>>0,j>>>0)):[],w_is_const:()=>!!t()[fe>>>0],activation:ke(P),activation_params:ee?Array.from(l().subarray(ee>>>0,ge>>>0)):[]})},1348115:s=>{u.gb("Gelu",s,void 0)},1348167:(s,c,f,y)=>{u.gb("GroupQueryAttention",s,{numHeads:c,kvNumHeads:f,scale:y})},1348280:(s,c,f,y)=>{u.gb("LayerNormalization",s,{axis:c,epsilon:f,simplified:!!y})},1348391:(s,c,f,y)=>{u.gb("LayerNormalization",s,{axis:c,epsilon:f,simplified:!!y})},1348502:(s,c,f,y,v,C)=>{u.gb("MatMulNBits",s,{k:c,n:f,accuracyLevel:y,bits:v,blockSize:C})},1348629:(s,c,f,y,v,C)=>{u.gb("MultiHeadAttention",s,{numHeads:c,isUnidirectional:f,maskFilterValue:y,scale:v,doRotary:C})},1348788:(s,c)=>{u.gb("QuickGelu",s,{alpha:c})},1348852:(s,c,f,y,v)=>{u.gb("RotaryEmbedding",s,{interleaved:!!c,numHeads:f,rotaryEmbeddingDim:y,scale:v})},1348991:(s,c,f)=>{u.gb("SkipLayerNormalization",s,{epsilon:c,simplified:!!f})},1349093:s=>{u.Vb(s)},1349127:(s,c)=>u.Xb(s,c,u.Bb.Zb,u.Bb.errors),1349239:(s,c,f)=>{u.gb("SkipLayerNormalization",s,{epsilon:c,simplified:!!f})}};function Vl(s,c,f){return fi(async()=>{await u.Tb(s,c,f)})}function fn(s){this.name="ExitStatus",this.message=`Program terminated with exit(${s})`,this.status=s}var hn=s=>{s.terminate(),s.onmessage=()=>{}},Do=s=>{ct.length==0&&(Wo(),No(ct[0]));var c=ct.pop();if(!c)return 6;wt.push(c),Ze[s.xb]=c,c.xb=s.xb;var f={cmd:"run",start_routine:s.$b,arg:s.Mb,pthread_ptr:s.xb};return c.postMessage(f,s.ec),0},bt=0,_e=(s,c,...f)=>{for(var y=2*f.length,v=zn(),C=On(8*y),O=C>>>3,B=0;B>>0]=N)}return s=Ui(s,0,y,C,c),gr(v),s};function Bo(s){if(b)return _e(0,1,s);if(q=s,!(0{if(q=s,b)throw Ro(s),"unwind";Bo(s)},ct=[],wt=[],Mo=[],Ze={},Uo=s=>{var c=s.xb;delete Ze[c],ct.push(s),wt.splice(wt.indexOf(s),1),s.xb=0,Pn(c)};function Vo(){Mo.forEach(s=>s())}var No=s=>new Promise(c=>{s.onmessage=v=>{var C=(v=v.data).cmd;if(v.targetThread&&v.targetThread!=Rt()){var O=Ze[v.targetThread];O?O.postMessage(v,v.transferList):F(`Internal error! Worker sent a message "${C}" to target pthread ${v.targetThread}, but that thread no longer exists!`)}else C==="checkMailbox"?ar():C==="spawnThread"?Do(v):C==="cleanupThread"?Uo(Ze[v.thread]):C==="killThread"?(v=v.thread,C=Ze[v],delete Ze[v],hn(C),Pn(v),wt.splice(wt.indexOf(C),1),C.xb=0):C==="cancelThread"?Ze[v.thread].postMessage({cmd:"cancel"}):C==="loaded"?(s.loaded=!0,c(s)):C==="alert"?alert(`Thread ${v.threadId}: ${v.text}`):v.target==="setimmediate"?s.postMessage(v):C==="callHandler"?u[v.handler](...v.args):C&&F(`worker sent an unknown command ${C}`)},s.onerror=v=>{throw F(`worker sent an error! ${v.filename}:${v.lineno}: ${v.message}`),v};var f,y=[];for(f of["onExit"])u.hasOwnProperty(f)&&y.push(f);s.postMessage({cmd:"load",handlers:y,wasmMemory:X,wasmModule:xe})});function Wo(){var s=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});ct.push(s)}var ir=s=>{for(;0{var s=Rt(),c=a()[s+52>>>2>>>0];s=a()[s+56>>>2>>>0],Ni(c,c-s),gr(c)},Wl=(s,c)=>{bt=0,s=Wi(s,c),0>>=0);throw c>>>=0,f>>>=0,a()[y.Fb+16>>>2>>>0]=0,a()[y.Fb+4>>>2>>>0]=c,a()[y.Fb+8>>>2>>>0]=f,s}function Ho(s,c,f,y){return b?_e(2,1,s,c,f,y):Go(s,c,f,y)}function Go(s,c,f,y){if(s>>>=0,c>>>=0,f>>>=0,y>>>=0,x===void 0)return F("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var v=[];return b&&v.length===0?Ho(s,c,f,y):(s={$b:f,xb:s,Mb:y,ec:v},b?(s.Ib="spawnThread",postMessage(s,v),0):Do(s))}var Lo=typeof TextDecoder<"u"?new TextDecoder("utf8"):void 0,Fo=(s,c,f)=>{var y=(c>>>=0)+f;for(f=c;s[f]&&!(f>=y);)++f;if(16(v=(240&v)==224?(15&v)<<12|C<<6|O:(7&v)<<18|C<<12|O<<6|63&s[c++])?y+=String.fromCharCode(v):(v-=65536,y+=String.fromCharCode(55296|v>>10,56320|1023&v))}}else y+=String.fromCharCode(v)}return y},ke=(s,c)=>(s>>>=0)?Fo(r(),s,c):"";function qo(s,c,f){return b?_e(3,1,s,c,f):0}function Ko(s,c){if(b)return _e(4,1,s,c)}var yn=s=>{for(var c=0,f=0;f=y?c++:2047>=y?c+=2:55296<=y&&57343>=y?(c+=4,++f):c+=3}return c},jo=(s,c,f,y)=>{if(!(0>>=0;y=f+y-1;for(var C=0;C=O&&(O=65536+((1023&O)<<10)|1023&s.charCodeAt(++C)),127>=O){if(f>=y)break;c[f++>>>0]=O}else{if(2047>=O){if(f+1>=y)break;c[f++>>>0]=192|O>>6}else{if(65535>=O){if(f+2>=y)break;c[f++>>>0]=224|O>>12}else{if(f+3>=y)break;c[f++>>>0]=240|O>>18,c[f++>>>0]=128|O>>12&63}c[f++>>>0]=128|O>>6&63}c[f++>>>0]=128|63&O}}return c[f>>>0]=0,f-v},zt=(s,c,f)=>jo(s,r(),c,f);function Yo(s,c){if(b)return _e(5,1,s,c)}function Xo(s,c,f){if(b)return _e(6,1,s,c,f)}function Zo(s,c,f){return b?_e(7,1,s,c,f):0}function Qo(s,c){if(b)return _e(8,1,s,c)}function Jo(s,c,f){if(b)return _e(9,1,s,c,f)}function ei(s,c,f,y){if(b)return _e(10,1,s,c,f,y)}function ti(s,c,f,y){if(b)return _e(11,1,s,c,f,y)}function ri(s,c,f,y){if(b)return _e(12,1,s,c,f,y)}function ni(s){if(b)return _e(13,1,s)}function oi(s,c){if(b)return _e(14,1,s,c)}function ii(s,c,f){if(b)return _e(15,1,s,c,f)}var ai,pt,Ll=()=>{Gt("")},Qe=s=>{for(var c="";r()[s>>>0];)c+=ai[r()[s++>>>0]];return c},bn={},wn={},Fl={};function st(s,c,f={}){if(!("argPackAdvance"in c))throw new TypeError("registerType registeredInstance requires argPackAdvance");return function(y,v,C={}){var O=v.name;if(!y)throw new pt(`type "${O}" must have a positive integer typeid pointer`);if(wn.hasOwnProperty(y)){if(C.Ob)return;throw new pt(`Cannot register type '${O}' twice`)}wn[y]=v,delete Fl[y],bn.hasOwnProperty(y)&&(v=bn[y],delete bn[y],v.forEach(B=>B()))}(s,c,f)}var si=(s,c,f)=>{switch(c){case 1:return f?y=>t()[y>>>0]:y=>r()[y>>>0];case 2:return f?y=>n()[y>>>1>>>0]:y=>o()[y>>>1>>>0];case 4:return f?y=>i()[y>>>2>>>0]:y=>a()[y>>>2>>>0];case 8:return f?y=>R[y>>>3]:y=>Y[y>>>3];default:throw new TypeError(`invalid integer width (${c}): ${s}`)}};function ql(s,c,f){f>>>=0,st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:y=>y,toWireType:function(y,v){if(typeof v!="bigint"&&typeof v!="number")throw v=v===null?"null":(y=typeof v)=="object"||y==="array"||y==="function"?v.toString():""+v,new TypeError(`Cannot convert "${v}" to ${this.name}`);return typeof v=="number"&&(v=BigInt(v)),v},argPackAdvance:mt,readValueFromPointer:si(c,f,c.indexOf("u")==-1),Ab:null})}var mt=8;function Kl(s,c,f,y){st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:function(v){return!!v},toWireType:function(v,C){return C?f:y},argPackAdvance:mt,readValueFromPointer:function(v){return this.fromWireType(r()[v>>>0])},Ab:null})}var vn=[],ut=[];function $n(s){9<(s>>>=0)&&--ut[s+1]==0&&(ut[s]=void 0,vn.push(s))}var He=s=>{if(!s)throw new pt("Cannot use deleted val. handle = "+s);return ut[s]},Ge=s=>{switch(s){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:let c=vn.pop()||ut.length;return ut[c]=s,ut[c+1]=1,c}};function _n(s){return this.fromWireType(a()[s>>>2>>>0])}var jl={name:"emscripten::val",fromWireType:s=>{var c=He(s);return $n(s),c},toWireType:(s,c)=>Ge(c),argPackAdvance:mt,readValueFromPointer:_n,Ab:null};function Yl(s){return st(s>>>0,jl)}var Xl=(s,c)=>{switch(c){case 4:return function(f){return this.fromWireType(l()[f>>>2>>>0])};case 8:return function(f){return this.fromWireType(d()[f>>>3>>>0])};default:throw new TypeError(`invalid float width (${c}): ${s}`)}};function Zl(s,c,f){f>>>=0,st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:y=>y,toWireType:(y,v)=>v,argPackAdvance:mt,readValueFromPointer:Xl(c,f),Ab:null})}function Ql(s,c,f,y,v){if(s>>>=0,f>>>=0,c=Qe(c>>>0),v===-1&&(v=4294967295),v=B=>B,y===0){var C=32-8*f;v=B=>B<>>C}var O=c.includes("unsigned")?function(B,N){return N>>>0}:function(B,N){return N};st(s,{name:c,fromWireType:v,toWireType:O,argPackAdvance:mt,readValueFromPointer:si(c,f,y!==0),Ab:null})}function Jl(s,c,f){function y(C){var O=a()[C>>>2>>>0];return C=a()[C+4>>>2>>>0],new v(t().buffer,C,O)}var v=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][c];st(s>>>=0,{name:f=Qe(f>>>0),fromWireType:y,argPackAdvance:mt,readValueFromPointer:y},{Ob:!0})}function ec(s,c){s>>>=0;var f=(c=Qe(c>>>0))==="std::string";st(s,{name:c,fromWireType:function(y){var v=a()[y>>>2>>>0],C=y+4;if(f)for(var O=C,B=0;B<=v;++B){var N=C+B;if(B==v||r()[N>>>0]==0){if(O=ke(O,N-O),L===void 0)var L=O;else L+=String.fromCharCode(0),L+=O;O=N+1}}else{for(L=Array(v),B=0;B>>0]);L=L.join("")}return et(y),L},toWireType:function(y,v){v instanceof ArrayBuffer&&(v=new Uint8Array(v));var C=typeof v=="string";if(!(C||v instanceof Uint8Array||v instanceof Uint8ClampedArray||v instanceof Int8Array))throw new pt("Cannot pass non-string to std::string");var O=f&&C?yn(v):v.length,B=fr(4+O+1),N=B+4;if(a()[B>>>2>>>0]=O,f&&C)zt(v,N,O+1);else if(C)for(C=0;C>>0]=L}else for(C=0;C>>0]=v[C];return y!==null&&y.push(et,B),B},argPackAdvance:mt,readValueFromPointer:_n,Ab(y){et(y)}})}var ui=typeof TextDecoder<"u"?new TextDecoder("utf-16le"):void 0,tc=(s,c)=>{for(var f=s>>1,y=f+c/2;!(f>=y)&&o()[f>>>0];)++f;if(32<(f<<=1)-s&&ui)return ui.decode(r().slice(s,f));for(f="",y=0;!(y>=c/2);++y){var v=n()[s+2*y>>>1>>>0];if(v==0)break;f+=String.fromCharCode(v)}return f},rc=(s,c,f)=>{if(f??=2147483647,2>f)return 0;var y=c;f=(f-=2)<2*s.length?f/2:s.length;for(var v=0;v>>1>>>0]=C,c+=2}return n()[c>>>1>>>0]=0,c-y},nc=s=>2*s.length,oc=(s,c)=>{for(var f=0,y="";!(f>=c/4);){var v=i()[s+4*f>>>2>>>0];if(v==0)break;++f,65536<=v?(v-=65536,y+=String.fromCharCode(55296|v>>10,56320|1023&v)):y+=String.fromCharCode(v)}return y},ic=(s,c,f)=>{if(c>>>=0,f??=2147483647,4>f)return 0;var y=c;f=y+f-4;for(var v=0;v=C&&(C=65536+((1023&C)<<10)|1023&s.charCodeAt(++v)),i()[c>>>2>>>0]=C,(c+=4)+4>f)break}return i()[c>>>2>>>0]=0,c-y},ac=s=>{for(var c=0,f=0;f=y&&++f,c+=4}return c};function sc(s,c,f){if(s>>>=0,c>>>=0,f=Qe(f>>>=0),c===2)var y=tc,v=rc,C=nc,O=B=>o()[B>>>1>>>0];else c===4&&(y=oc,v=ic,C=ac,O=B=>a()[B>>>2>>>0]);st(s,{name:f,fromWireType:B=>{for(var N,L=a()[B>>>2>>>0],j=B+4,pe=0;pe<=L;++pe){var fe=B+4+pe*c;pe!=L&&O(fe)!=0||(j=y(j,fe-j),N===void 0?N=j:(N+=String.fromCharCode(0),N+=j),j=fe+c)}return et(B),N},toWireType:(B,N)=>{if(typeof N!="string")throw new pt(`Cannot pass non-string to C++ string type ${f}`);var L=C(N),j=fr(4+L+c);return a()[j>>>2>>>0]=L/c,v(N,j+4,L+c),B!==null&&B.push(et,j),j},argPackAdvance:mt,readValueFromPointer:_n,Ab(B){et(B)}})}function uc(s,c){st(s>>>=0,{Pb:!0,name:c=Qe(c>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var dc=()=>1;function lc(s){kn(s>>>0,!g,1,!w,131072,!1),Vo()}var di=s=>{if(!Te)try{if(s(),!(0>>=0,typeof Atomics.fc=="function"&&(Atomics.fc(i(),s>>>2,s).value.then(ar),s+=128,Atomics.store(i(),s>>>2,1))}var ar=()=>{var s=Rt();s&&(xn(s),di(Vi))};function cc(s,c){(s>>>=0)==c>>>0?setTimeout(ar):b?postMessage({targetThread:s,cmd:"checkMailbox"}):(s=Ze[s])&&s.postMessage({cmd:"checkMailbox"})}var Sn=[];function pc(s,c,f,y,v){for(c>>>=0,y/=2,Sn.length=y,f=v>>>0>>>3,v=0;v>>0];return(c?mn[c]:ep[s])(...Sn)}function mc(s){s>>>=0,b?postMessage({cmd:"cleanupThread",thread:s}):Uo(Ze[s])}function fc(s){}var In=(s,c)=>{var f=wn[s];if(f===void 0)throw s=Bi(s),f=Qe(s),et(s),new pt(`${c} has unknown type ${f}`);return f},li=(s,c,f)=>{var y=[];return s=s.toWireType(y,f),y.length&&(a()[c>>>2>>>0]=Ge(y)),s};function hc(s,c,f){return c>>>=0,f>>>=0,s=He(s>>>0),c=In(c,"emval::as"),li(c,f,s)}var sr=s=>{try{s()}catch(c){Gt(c)}},ft=0,Je=null,ci=0,ur=[],pi={},mi={},gc=0,Cn=null,yc=[];function fi(s){return function(c){if(!Te){if(ft===0){var f=!1,y=!1;c((v=0)=>{if(!Te&&(ci=v,f=!0,y)){ft=2,sr(()=>Li(Je)),typeof Browser<"u"&&Browser.Gb.Nb&&Browser.Gb.resume(),v=!1;try{var C=function(){var N=i()[Je+8>>>2>>>0];return N=K[mi[N]],--bt,N()}()}catch(N){C=N,v=!0}var O=!1;if(!Je){var B=Cn;B&&(Cn=null,(v?B.reject:B.resolve)(C),O=!0)}if(v&&!O)throw C}}),y=!0,f||(ft=1,Je=function(){var v=fr(65548),C=v+12;a()[v>>>2>>>0]=C,a()[v+4>>>2>>>0]=C+65536,C=ur[0];var O=pi[C];return O===void 0&&(O=gc++,pi[C]=O,mi[O]=C),C=O,i()[v+8>>>2>>>0]=C,v}(),typeof Browser<"u"&&Browser.Gb.Nb&&Browser.Gb.pause(),sr(()=>Hi(Je)))}else ft===2?(ft=0,sr(Fi),et(Je),Je=null,yc.forEach(di)):Gt(`invalid state: ${ft}`);return ci}}(c=>{s().then(c)})}function bc(s){return s>>>=0,fi(()=>(s=He(s)).then(Ge))}var dr=[];function wc(s,c,f,y){return f>>>=0,y>>>=0,(s=dr[s>>>0])(null,c=He(c>>>0),f,y)}var vc={},lr=s=>{var c=vc[s];return c===void 0?Qe(s):c};function $c(s,c,f,y,v){return f>>>=0,y>>>=0,v>>>=0,(s=dr[s>>>0])(c=He(c>>>0),c[f=lr(f)],y,v)}var hi=()=>typeof globalThis=="object"?globalThis:Function("return this")();function _c(s){return(s>>>=0)==0?Ge(hi()):(s=lr(s),Ge(hi()[s]))}var xc=s=>{var c=dr.length;return dr.push(s),c},Sc=(s,c)=>{for(var f=Array(s),y=0;y>>2>>>0],"parameter "+y);return f},gi=(s,c)=>Object.defineProperty(c,"name",{value:s});function Ic(s,c,f){var y=(c=Sc(s,c>>>0)).shift();s--;var v=`return function (obj, func, destructorsRef, args) { +var Bn=Object.defineProperty;var np=Object.getOwnPropertyDescriptor;var op=Object.getOwnPropertyNames;var ip=Object.prototype.hasOwnProperty;var Rn=(e=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(e,{get:(t,r)=>(typeof require<"u"?require:t)[r]}):e)(function(e){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+e+'" is not supported')});var U=(e,t)=>()=>(e&&(t=e(e=0)),t);var Lt=(e,t)=>{for(var r in t)Bn(e,r,{get:t[r],enumerable:!0})},ap=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of op(t))!ip.call(e,o)&&o!==r&&Bn(e,o,{get:()=>t[o],enumerable:!(n=np(t,o))||n.enumerable});return e};var yr=e=>ap(Bn({},"__esModule",{value:!0}),e);var br,$t,_t,sp,wr,vr=U(()=>{"use strict";br=new Map,$t=[],_t=(e,t,r)=>{if(t&&typeof t.init=="function"&&typeof t.createInferenceSessionHandler=="function"){let n=br.get(e);if(n===void 0)br.set(e,{backend:t,priority:r});else{if(n.priority>r)return;if(n.priority===r&&n.backend!==t)throw new Error(`cannot register backend "${e}" using priority ${r}`)}if(r>=0){let o=$t.indexOf(e);o!==-1&&$t.splice(o,1);for(let i=0;i<$t.length;i++)if(br.get($t[i]).priority<=r){$t.splice(i,0,e);return}$t.push(e)}return}throw new TypeError("not a valid backend")},sp=async e=>{let t=br.get(e);if(!t)return"backend not found.";if(t.initialized)return t.backend;if(t.aborted)return t.error;{let r=!!t.initPromise;try{return r||(t.initPromise=t.backend.init(e)),await t.initPromise,t.initialized=!0,t.backend}catch(n){return r||(t.error=`${n}`,t.aborted=!0),t.error}finally{delete t.initPromise}}},wr=async e=>{let t=e.executionProviders||[],r=t.map(d=>typeof d=="string"?d:d.name),n=r.length===0?$t:r,o,i=[],a=new Set;for(let d of n){let p=await sp(d);typeof p=="string"?i.push({name:d,err:p}):(o||(o=p),o===p&&a.add(d))}if(!o)throw new Error(`no available backend found. ERR: ${i.map(d=>`[${d.name}] ${d.err}`).join(", ")}`);for(let{name:d,err:p}of i)r.includes(d)&&console.warn(`removing requested execution provider "${d}" from session options because it is not available: ${p}`);let l=t.filter(d=>a.has(typeof d=="string"?d:d.name));return[o,new Proxy(e,{get:(d,p)=>p==="executionProviders"?l:Reflect.get(d,p)})]}});var Ki=U(()=>{"use strict";vr()});var Yi,Xi=U(()=>{"use strict";Yi="1.19.0"});var Zi,Ne,Mn=U(()=>{"use strict";Xi();Zi="warning",Ne={wasm:{},webgl:{},webgpu:{},versions:{common:Yi},set logLevel(e){if(e!==void 0){if(typeof e!="string"||["verbose","info","warning","error","fatal"].indexOf(e)===-1)throw new Error(`Unsupported logging level: ${e}`);Zi=e}},get logLevel(){return Zi}};Object.defineProperty(Ne,"logLevel",{enumerable:!0})});var ye,Qi=U(()=>{"use strict";Mn();ye=Ne});var Ji,ea,ta=U(()=>{"use strict";Ji=(e,t)=>{let r=typeof document<"u"?document.createElement("canvas"):new OffscreenCanvas(1,1);r.width=e.dims[3],r.height=e.dims[2];let n=r.getContext("2d");if(n!=null){let o,i;t?.tensorLayout!==void 0&&t.tensorLayout==="NHWC"?(o=e.dims[2],i=e.dims[3]):(o=e.dims[3],i=e.dims[2]);let a=t?.format!==void 0?t.format:"RGB",l=t?.norm,d,p;l===void 0||l.mean===void 0?d=[255,255,255,255]:typeof l.mean=="number"?d=[l.mean,l.mean,l.mean,l.mean]:(d=[l.mean[0],l.mean[1],l.mean[2],0],l.mean[3]!==void 0&&(d[3]=l.mean[3])),l===void 0||l.bias===void 0?p=[0,0,0,0]:typeof l.bias=="number"?p=[l.bias,l.bias,l.bias,l.bias]:(p=[l.bias[0],l.bias[1],l.bias[2],0],l.bias[3]!==void 0&&(p[3]=l.bias[3]));let m=i*o,u=0,h=m,w=m*2,g=-1;a==="RGBA"?(u=0,h=m,w=m*2,g=m*3):a==="RGB"?(u=0,h=m,w=m*2):a==="RBG"&&(u=0,w=m,h=m*2);for(let b=0;b{let r=typeof document<"u"?document.createElement("canvas").getContext("2d"):new OffscreenCanvas(1,1).getContext("2d"),n;if(r!=null){let o,i,a;t?.tensorLayout!==void 0&&t.tensorLayout==="NHWC"?(o=e.dims[2],i=e.dims[1],a=e.dims[3]):(o=e.dims[3],i=e.dims[2],a=e.dims[1]);let l=t!==void 0&&t.format!==void 0?t.format:"RGB",d=t?.norm,p,m;d===void 0||d.mean===void 0?p=[255,255,255,255]:typeof d.mean=="number"?p=[d.mean,d.mean,d.mean,d.mean]:(p=[d.mean[0],d.mean[1],d.mean[2],255],d.mean[3]!==void 0&&(p[3]=d.mean[3])),d===void 0||d.bias===void 0?m=[0,0,0,0]:typeof d.bias=="number"?m=[d.bias,d.bias,d.bias,d.bias]:(m=[d.bias[0],d.bias[1],d.bias[2],0],d.bias[3]!==void 0&&(m[3]=d.bias[3]));let u=i*o;if(t!==void 0&&(t.format!==void 0&&a===4&&t.format!=="RGBA"||a===3&&t.format!=="RGB"&&t.format!=="BGR"))throw new Error("Tensor format doesn't match input tensor dims");let h=4,w=0,g=1,b=2,x=3,_=0,$=u,S=u*2,I=-1;l==="RGBA"?(_=0,$=u,S=u*2,I=u*3):l==="RGB"?(_=0,$=u,S=u*2):l==="RBG"&&(_=0,S=u,$=u*2),n=r.createImageData(o,i);for(let T=0;T{"use strict";$r();Un=(e,t)=>{if(e===void 0)throw new Error("Image buffer must be defined");if(t.height===void 0||t.width===void 0)throw new Error("Image height and width must be defined");if(t.tensorLayout==="NHWC")throw new Error("NHWC Tensor layout is not supported yet");let{height:r,width:n}=t,o=t.norm??{mean:255,bias:0},i,a;typeof o.mean=="number"?i=[o.mean,o.mean,o.mean,o.mean]:i=[o.mean[0],o.mean[1],o.mean[2],o.mean[3]??255],typeof o.bias=="number"?a=[o.bias,o.bias,o.bias,o.bias]:a=[o.bias[0],o.bias[1],o.bias[2],o.bias[3]??0];let l=t.format!==void 0?t.format:"RGBA",d=t.tensorFormat!==void 0&&t.tensorFormat!==void 0?t.tensorFormat:"RGB",p=r*n,m=d==="RGBA"?new Float32Array(p*4):new Float32Array(p*3),u=4,h=0,w=1,g=2,b=3,x=0,_=p,$=p*2,S=-1;l==="RGB"&&(u=3,h=0,w=1,g=2,b=-1),d==="RGBA"?S=p*3:d==="RBG"?(x=0,$=p,_=p*2):d==="BGR"&&($=0,_=p,x=p*2);for(let T=0;T{let r=typeof HTMLImageElement<"u"&&e instanceof HTMLImageElement,n=typeof ImageData<"u"&&e instanceof ImageData,o=typeof ImageBitmap<"u"&&e instanceof ImageBitmap,i=typeof e=="string",a,l=t??{},d=()=>{if(typeof document<"u")return document.createElement("canvas");if(typeof OffscreenCanvas<"u")return new OffscreenCanvas(1,1);throw new Error("Canvas is not supported")},p=m=>m instanceof HTMLCanvasElement||m instanceof OffscreenCanvas?m.getContext("2d"):null;if(r){let m=d();m.width=e.width,m.height=e.height;let u=p(m);if(u!=null){let h=e.height,w=e.width;if(t!==void 0&&t.resizedHeight!==void 0&&t.resizedWidth!==void 0&&(h=t.resizedHeight,w=t.resizedWidth),t!==void 0){if(l=t,t.tensorFormat!==void 0)throw new Error("Image input config format must be RGBA for HTMLImageElement");l.tensorFormat="RGBA",l.height=h,l.width=w}else l.tensorFormat="RGBA",l.height=h,l.width=w;u.drawImage(e,0,0),a=u.getImageData(0,0,w,h).data}else throw new Error("Can not access image data")}else if(n){let m,u;if(t!==void 0&&t.resizedWidth!==void 0&&t.resizedHeight!==void 0?(m=t.resizedHeight,u=t.resizedWidth):(m=e.height,u=e.width),t!==void 0&&(l=t),l.format="RGBA",l.height=m,l.width=u,t!==void 0){let h=d();h.width=u,h.height=m;let w=p(h);if(w!=null)w.putImageData(e,0,0),a=w.getImageData(0,0,u,m).data;else throw new Error("Can not access image data")}else a=e.data}else if(o){if(t===void 0)throw new Error("Please provide image config with format for Imagebitmap");let m=d();m.width=e.width,m.height=e.height;let u=p(m);if(u!=null){let h=e.height,w=e.width;return u.drawImage(e,0,0,w,h),a=u.getImageData(0,0,w,h).data,l.height=h,l.width=w,Un(a,l)}else throw new Error("Can not access image data")}else{if(i)return new Promise((m,u)=>{let h=d(),w=p(h);if(!e||!w)return u();let g=new Image;g.crossOrigin="Anonymous",g.src=e,g.onload=()=>{h.width=g.width,h.height=g.height,w.drawImage(g,0,0,h.width,h.height);let b=w.getImageData(0,0,h.width,h.height);l.height=h.height,l.width=h.width,m(Un(b.data,l))}});throw new Error("Input data provided is not supported - aborted tensor creation")}if(a!==void 0)return Un(a,l);throw new Error("Input data provided is not supported - aborted tensor creation")},na=(e,t)=>{let{width:r,height:n,download:o,dispose:i}=t,a=[1,n,r,4];return new Re({location:"texture",type:"float32",texture:e,dims:a,download:o,dispose:i})},oa=(e,t)=>{let{dataType:r,dims:n,download:o,dispose:i}=t;return new Re({location:"gpu-buffer",type:r??"float32",gpuBuffer:e,dims:n,download:o,dispose:i})},ia=(e,t,r)=>new Re({location:"cpu-pinned",type:e,data:t,dims:r??[t.length]})});var xt,Ft,sa,ua,da=U(()=>{"use strict";xt=new Map([["float32",Float32Array],["uint8",Uint8Array],["int8",Int8Array],["uint16",Uint16Array],["int16",Int16Array],["int32",Int32Array],["bool",Uint8Array],["float64",Float64Array],["uint32",Uint32Array]]),Ft=new Map([[Float32Array,"float32"],[Uint8Array,"uint8"],[Int8Array,"int8"],[Uint16Array,"uint16"],[Int16Array,"int16"],[Int32Array,"int32"],[Float64Array,"float64"],[Uint32Array,"uint32"]]),sa=!1,ua=()=>{if(!sa){sa=!0;let e=typeof BigInt64Array<"u"&&BigInt64Array.from,t=typeof BigUint64Array<"u"&&BigUint64Array.from,r=typeof Float16Array<"u"&&Float16Array.from;e&&(xt.set("int64",BigInt64Array),Ft.set(BigInt64Array,"int64")),t&&(xt.set("uint64",BigUint64Array),Ft.set(BigUint64Array,"uint64")),r?(xt.set("float16",Float16Array),Ft.set(Float16Array,"float16")):xt.set("float16",Uint16Array)}}});var la,ca,pa=U(()=>{"use strict";$r();la=e=>{let t=1;for(let r=0;r{switch(e.location){case"cpu":return new Re(e.type,e.data,t);case"cpu-pinned":return new Re({location:"cpu-pinned",data:e.data,type:e.type,dims:t});case"texture":return new Re({location:"texture",texture:e.texture,type:e.type,dims:t});case"gpu-buffer":return new Re({location:"gpu-buffer",gpuBuffer:e.gpuBuffer,type:e.type,dims:t});default:throw new Error(`tensorReshape: tensor location ${e.location} is not supported`)}}});var Re,$r=U(()=>{"use strict";ta();aa();da();pa();Re=class{constructor(t,r,n){ua();let o,i;if(typeof t=="object"&&"location"in t)switch(this.dataLocation=t.location,o=t.type,i=t.dims,t.location){case"cpu-pinned":{let l=xt.get(o);if(!l)throw new TypeError(`unsupported type "${o}" to create tensor from pinned buffer`);if(!(t.data instanceof l))throw new TypeError(`buffer should be of type ${l.name}`);this.cpuData=t.data;break}case"texture":{if(o!=="float32")throw new TypeError(`unsupported type "${o}" to create tensor from texture`);this.gpuTextureData=t.texture,this.downloader=t.download,this.disposer=t.dispose;break}case"gpu-buffer":{if(o!=="float32"&&o!=="float16"&&o!=="int32"&&o!=="int64"&&o!=="uint32"&&o!=="uint8"&&o!=="bool")throw new TypeError(`unsupported type "${o}" to create tensor from gpu buffer`);this.gpuBufferData=t.gpuBuffer,this.downloader=t.download,this.disposer=t.dispose;break}default:throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`)}else{let l,d;if(typeof t=="string")if(o=t,d=n,t==="string"){if(!Array.isArray(r))throw new TypeError("A string tensor's data must be a string array.");l=r}else{let p=xt.get(t);if(p===void 0)throw new TypeError(`Unsupported tensor type: ${t}.`);if(Array.isArray(r)){if(t==="float16"&&p===Uint16Array)throw new TypeError("Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.");t==="uint64"||t==="int64"?l=p.from(r,BigInt):l=p.from(r)}else if(r instanceof p)l=r;else throw new TypeError(`A ${o} tensor's data must be type of ${p}`)}else if(d=r,Array.isArray(t)){if(t.length===0)throw new TypeError("Tensor type cannot be inferred from an empty array.");let p=typeof t[0];if(p==="string")o="string",l=t;else if(p==="boolean")o="bool",l=Uint8Array.from(t);else throw new TypeError(`Invalid element type of data array: ${p}.`)}else{let p=Ft.get(t.constructor);if(p===void 0)throw new TypeError(`Unsupported type for tensor data: ${t.constructor}.`);o=p,l=t}if(d===void 0)d=[l.length];else if(!Array.isArray(d))throw new TypeError("A tensor's dims must be a number array");i=d,this.cpuData=l,this.dataLocation="cpu"}let a=la(i);if(this.cpuData&&a!==this.cpuData.length)throw new Error(`Tensor's size(${a}) does not match data length(${this.cpuData.length}).`);this.type=o,this.dims=i,this.size=a}static async fromImage(t,r){return ra(t,r)}static fromTexture(t,r){return na(t,r)}static fromGpuBuffer(t,r){return oa(t,r)}static fromPinnedBuffer(t,r,n){return ia(t,r,n)}toDataURL(t){return Ji(this,t)}toImageData(t){return ea(this,t)}get data(){if(this.ensureValid(),!this.cpuData)throw new Error("The data is not on CPU. Use `getData()` to download GPU data to CPU, or use `texture` or `gpuBuffer` property to access the GPU data directly.");return this.cpuData}get location(){return this.dataLocation}get texture(){if(this.ensureValid(),!this.gpuTextureData)throw new Error("The data is not stored as a WebGL texture.");return this.gpuTextureData}get gpuBuffer(){if(this.ensureValid(),!this.gpuBufferData)throw new Error("The data is not stored as a WebGPU buffer.");return this.gpuBufferData}async getData(t){switch(this.ensureValid(),this.dataLocation){case"cpu":case"cpu-pinned":return this.data;case"texture":case"gpu-buffer":{if(!this.downloader)throw new Error("The current tensor is not created with a specified data downloader.");if(this.isDownloading)throw new Error("The current tensor is being downloaded.");try{this.isDownloading=!0;let r=await this.downloader();return this.downloader=void 0,this.dataLocation="cpu",this.cpuData=r,t&&this.disposer&&(this.disposer(),this.disposer=void 0),r}finally{this.isDownloading=!1}}default:throw new Error(`cannot get data from location: ${this.dataLocation}`)}}dispose(){if(this.isDownloading)throw new Error("The current tensor is being downloaded.");this.disposer&&(this.disposer(),this.disposer=void 0),this.cpuData=void 0,this.gpuTextureData=void 0,this.gpuBufferData=void 0,this.downloader=void 0,this.isDownloading=void 0,this.dataLocation="none"}ensureValid(){if(this.dataLocation==="none")throw new Error("The tensor is disposed.")}reshape(t){if(this.ensureValid(),this.downloader||this.disposer)throw new Error("Cannot reshape a tensor that owns GPU resource.");return ca(this,t)}}});var De,_r=U(()=>{"use strict";$r();De=Re});var xr,ma,We,Me,Vn=U(()=>{"use strict";Mn();xr=(e,t)=>{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||console.timeStamp(`${e}::ORT::${t}`)},ma=(e,t)=>{let r=new Error().stack?.split(/\r\n|\r|\n/g)||[],n=!1;for(let o=0;o{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||ma("BEGIN",e)},Me=e=>{(typeof Ne.trace>"u"?!Ne.wasm.trace:!Ne.trace)||ma("END",e)}});var Sr,fa=U(()=>{"use strict";vr();_r();Vn();Sr=class e{constructor(t){this.handler=t}async run(t,r,n){We();let o={},i={};if(typeof t!="object"||t===null||t instanceof De||Array.isArray(t))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let a=!0;if(typeof r=="object"){if(r===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(r instanceof De)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(r)){if(r.length===0)throw new TypeError("'fetches' cannot be an empty array.");a=!1;for(let p of r){if(typeof p!="string")throw new TypeError("'fetches' must be a string array or an object.");if(this.outputNames.indexOf(p)===-1)throw new RangeError(`'fetches' contains invalid output name: ${p}.`);o[p]=null}if(typeof n=="object"&&n!==null)i=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else{let p=!1,m=Object.getOwnPropertyNames(r);for(let u of this.outputNames)if(m.indexOf(u)!==-1){let h=r[u];(h===null||h instanceof De)&&(p=!0,a=!1,o[u]=h)}if(p){if(typeof n=="object"&&n!==null)i=n;else if(typeof n<"u")throw new TypeError("'options' must be an object.")}else i=r}}else if(typeof r<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let p of this.inputNames)if(typeof t[p]>"u")throw new Error(`input '${p}' is missing in 'feeds'.`);if(a)for(let p of this.outputNames)o[p]=null;let l=await this.handler.run(t,o,i),d={};for(let p in l)if(Object.hasOwnProperty.call(l,p)){let m=l[p];m instanceof De?d[p]=m:d[p]=new De(m.type,m.data,m.dims)}return Me(),d}async release(){return this.handler.dispose()}static async create(t,r,n,o){We();let i,a={};if(typeof t=="string"){if(i=t,typeof r=="object"&&r!==null)a=r;else if(typeof r<"u")throw new TypeError("'options' must be an object.")}else if(t instanceof Uint8Array){if(i=t,typeof r=="object"&&r!==null)a=r;else if(typeof r<"u")throw new TypeError("'options' must be an object.")}else if(t instanceof ArrayBuffer||typeof SharedArrayBuffer<"u"&&t instanceof SharedArrayBuffer){let m=t,u=0,h=t.byteLength;if(typeof r=="object"&&r!==null)a=r;else if(typeof r=="number"){if(u=r,!Number.isSafeInteger(u))throw new RangeError("'byteOffset' must be an integer.");if(u<0||u>=m.byteLength)throw new RangeError(`'byteOffset' is out of range [0, ${m.byteLength}).`);if(h=t.byteLength-u,typeof n=="number"){if(h=n,!Number.isSafeInteger(h))throw new RangeError("'byteLength' must be an integer.");if(h<=0||u+h>m.byteLength)throw new RangeError(`'byteLength' is out of range (0, ${m.byteLength-u}].`);if(typeof o=="object"&&o!==null)a=o;else if(typeof o<"u")throw new TypeError("'options' must be an object.")}else if(typeof n<"u")throw new TypeError("'byteLength' must be a number.")}else if(typeof r<"u")throw new TypeError("'options' must be an object.");i=new Uint8Array(m,u,h)}else throw new TypeError("Unexpected argument[0]: must be 'path' or 'buffer'.");let[l,d]=await wr(a),p=await l.createInferenceSessionHandler(i,d);return Me(),new e(p)}startProfiling(){this.handler.startProfiling()}endProfiling(){this.handler.endProfiling()}get inputNames(){return this.handler.inputNames}get outputNames(){return this.handler.outputNames}}});var up,ha=U(()=>{"use strict";fa();up=Sr});var ga=U(()=>{"use strict"});var ya=U(()=>{"use strict"});var ba=U(()=>{"use strict"});var wa=U(()=>{"use strict"});var dp,Ir,va=U(()=>{"use strict";vr();_r();dp="Training backend could not be resolved. Make sure you're using the correct configuration & WebAssembly files.",Ir=class e{constructor(t,r,n){this.handler=t,this.hasOptimizerModel=r,this.hasEvalModel=n}get trainingInputNames(){return this.handler.inputNames}get trainingOutputNames(){return this.handler.outputNames}get evalInputNames(){if(this.hasEvalModel)return this.handler.evalInputNames;throw new Error("This training session has no evalModel loaded.")}get evalOutputNames(){if(this.hasEvalModel)return this.handler.evalOutputNames;throw new Error("This training session has no evalModel loaded.")}static async create(t,r){let n=t.evalModel||"",o=t.optimizerModel||"",i=r||{},[a,l]=await wr(i);if(a.createTrainingSessionHandler){let d=await a.createTrainingSessionHandler(t.checkpointState,t.trainModel,n,o,l);return new e(d,!!t.optimizerModel,!!t.evalModel)}else throw new Error(dp)}typeNarrowingForRunStep(t,r,n,o,i){let a={},l={};if(typeof n!="object"||n===null||n instanceof De||Array.isArray(n))throw new TypeError("'feeds' must be an object that use input names as keys and OnnxValue as corresponding values.");let d=!0;if(typeof o=="object"){if(o===null)throw new TypeError("Unexpected argument[1]: cannot be null.");if(o instanceof De)throw new TypeError("'fetches' cannot be a Tensor");if(Array.isArray(o)){if(o.length===0)throw new TypeError("'fetches' cannot be an empty array.");d=!1;for(let p of o){if(typeof p!="string")throw new TypeError("'fetches' must be a string array or an object.");if(r.indexOf(p)===-1)throw new RangeError(`'fetches' contains invalid output name: ${p}.`);a[p]=null}if(typeof i=="object"&&i!==null)l=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else{let p=!1,m=Object.getOwnPropertyNames(o);for(let u of r)if(m.indexOf(u)!==-1){let h=o[u];(h===null||h instanceof De)&&(p=!0,d=!1,a[u]=h)}if(p){if(typeof i=="object"&&i!==null)l=i;else if(typeof i<"u")throw new TypeError("'options' must be an object.")}else l=o}}else if(typeof o<"u")throw new TypeError("Unexpected argument[1]: must be 'fetches' or 'options'.");for(let p of t)if(typeof n[p]>"u")throw new Error(`input '${p}' is missing in 'feeds'.`);if(d)for(let p of r)a[p]=null;return[a,l]}convertHandlerReturnTypeToMapOfTensors(t){let r={};for(let n in t)if(Object.hasOwnProperty.call(t,n)){let o=t[n];o instanceof De?r[n]=o:r[n]=new De(o.type,o.data,o.dims)}return r}async lazyResetGrad(){await this.handler.lazyResetGrad()}async runTrainStep(t,r,n){let[o,i]=this.typeNarrowingForRunStep(this.trainingInputNames,this.trainingOutputNames,t,r,n),a=await this.handler.runTrainStep(t,o,i);return this.convertHandlerReturnTypeToMapOfTensors(a)}async runOptimizerStep(t){if(this.hasOptimizerModel)await this.handler.runOptimizerStep(t||{});else throw new Error("This TrainingSession has no OptimizerModel loaded.")}async runEvalStep(t,r,n){if(this.hasEvalModel){let[o,i]=this.typeNarrowingForRunStep(this.evalInputNames,this.evalOutputNames,t,r,n),a=await this.handler.runEvalStep(t,o,i);return this.convertHandlerReturnTypeToMapOfTensors(a)}else throw new Error("This TrainingSession has no EvalModel loaded.")}async getParametersSize(t=!0){return this.handler.getParametersSize(t)}async loadParametersBuffer(t,r=!0){let n=await this.getParametersSize(r);if(t.length!==4*n)throw new Error("Size of the buffer passed into loadParametersBuffer must match the number of parameters in the model. Please use getParametersSize method to check.");return this.handler.loadParametersBuffer(t,r)}async getContiguousParameters(t=!0){return this.handler.getContiguousParameters(t)}async release(){return this.handler.dispose()}}});var lp,$a=U(()=>{"use strict";va();lp=Ir});var Nn={};Lt(Nn,{InferenceSession:()=>up,TRACE:()=>xr,TRACE_FUNC_BEGIN:()=>We,TRACE_FUNC_END:()=>Me,Tensor:()=>De,TrainingSession:()=>lp,env:()=>ye,registerBackend:()=>_t});var Le=U(()=>{"use strict";Ki();Qi();ha();_r();ga();ya();Vn();ba();wa();$a()});var Cr=U(()=>{"use strict"});var Ia={};Lt(Ia,{default:()=>cp});var xa,Sa,cp,Ca=U(()=>{"use strict";Wn();St();qt();xa="ort-wasm-proxy-worker",Sa=globalThis.self?.name===xa;Sa&&(self.onmessage=e=>{let{type:t,in:r}=e.data;try{switch(t){case"init-wasm":Tr(r.wasm).then(()=>{Ar(r).then(()=>{postMessage({type:t})},n=>{postMessage({type:t,err:n})})},n=>{postMessage({type:t,err:n})});break;case"init-ep":{let{epName:n,env:o}=r;Er(o,n).then(()=>{postMessage({type:t})},i=>{postMessage({type:t,err:i})});break}case"copy-from":{let{buffer:n}=r,o=jt(n);postMessage({type:t,out:o});break}case"create":{let{model:n,options:o}=r;kr(n,o).then(i=>{postMessage({type:t,out:i})},i=>{postMessage({type:t,err:i})});break}case"release":Pr(r),postMessage({type:t});break;case"run":{let{sessionId:n,inputIndices:o,inputs:i,outputIndices:a,options:l}=r;Or(n,o,i,a,new Array(a.length).fill(null),l).then(d=>{d.some(p=>p[3]!=="cpu")?postMessage({type:t,err:"Proxy does not support non-cpu tensor location."}):postMessage({type:t,out:d},zr([...i,...d]))},d=>{postMessage({type:t,err:d})});break}case"end-profiling":Dr(r),postMessage({type:t});break;default:}}catch(n){postMessage({type:t,err:n})}});cp=Sa?null:e=>new Worker(e??Ut,{type:"module",name:xa})});var Aa={};Lt(Aa,{default:()=>pp});var Hn,Ta,pp,Ea=U(()=>{"use strict";Ta=(Hn=import.meta.url,async function(e={}){function t(){return X.buffer!=ie.buffer&&ve(),ie}function r(){return X.buffer!=ie.buffer&&ve(),le}function n(){return X.buffer!=ie.buffer&&ve(),se}function o(){return X.buffer!=ie.buffer&&ve(),Z}function i(){return X.buffer!=ie.buffer&&ve(),re}function a(){return X.buffer!=ie.buffer&&ve(),J}function l(){return X.buffer!=ie.buffer&&ve(),Pe}function d(){return X.buffer!=ie.buffer&&ve(),ue}var p,m,u=Object.assign({},e),h=new Promise((s,c)=>{p=s,m=c}),w=typeof window=="object",g=typeof importScripts=="function",b=g&&self.name=="em-pthread";u.mountExternalData=(s,c)=>{(u.Fb||(u.Fb=new Map)).set(s,c)},u.unmountExternalData=()=>{delete u.Fb};var x=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let _=()=>{let s=(f,y,v)=>(...C)=>{let O=Je,B=y?.();C=f(...C);let N=y?.();return B!==N&&(f=N,v(B),y=v=null),Je!=O?new Promise((L,K)=>{Tn={resolve:L,reject:K}}):C},c=f=>async(...y)=>{try{if(u.Eb)throw Error("Session already started");let v=u.Eb={bc:y[0],errors:[]},C=await f(...y);if(u.Eb!==v)throw Error("Session mismatch");u.Mb?.flush();let O=v.errors;if(0N),0u._OrtCreateSession,f=>u._OrtCreateSession=f),u._OrtRun=c(s(u._OrtRun,()=>u._OrtRun,f=>u._OrtRun=f)),u._OrtRunWithBinding=c(s(u._OrtRunWithBinding,()=>u._OrtRunWithBinding,f=>u._OrtRunWithBinding=f)),u._OrtBindInput=s(u._OrtBindInput,()=>u._OrtBindInput,f=>u._OrtBindInput=f),_=void 0};u.jsepInit=(s,c)=>{if(_?.(),s==="webgpu"){[u.Mb,u.Tb,u.Xb,u.Nb,u.Wb,u.jb,u.Yb,u.$b,u.Ub,u.Vb,u.Zb]=c;let f=u.Mb;u.jsepRegisterBuffer=(y,v,C,O)=>f.registerBuffer(y,v,C,O),u.jsepGetBuffer=y=>f.getBuffer(y),u.jsepCreateDownloader=(y,v,C)=>f.createDownloader(y,v,C),u.jsepOnReleaseSession=y=>{f.onReleaseSession(y)},u.jsepOnRunStart=y=>f.onRunStart(y)}};var $,S,I=Object.assign({},u),T="./this.program",A=(s,c)=>{throw c},D="";(w||g)&&(g?D=self.location.href:typeof document<"u"&&document.currentScript&&(D=document.currentScript.src),Hn&&(D=Hn),D=D.startsWith("blob:")?"":D.substr(0,D.replace(/[?#].*/,"").lastIndexOf("/")+1),g&&(S=s=>{var c=new XMLHttpRequest;return c.open("GET",s,!1),c.responseType="arraybuffer",c.send(null),new Uint8Array(c.response)}),$=(s,c,f)=>{var y=new XMLHttpRequest;y.open("GET",s,!0),y.responseType="arraybuffer",y.onload=()=>{y.status==200||y.status==0&&y.response?c(y.response):f()},y.onerror=f,y.send(null)});var z=console.log.bind(console),H=console.error.bind(console),W=z,F=H;if(Object.assign(u,I),I=null,b){let s=function(c){try{var f=c.data,y=f.cmd;if(y==="load"){let v=[];self.onmessage=C=>v.push(C),self.startWorker=()=>{postMessage({cmd:"loaded"});for(let C of v)s(C);self.onmessage=s};for(let C of f.handlers)u[C]&&!u[C].proxy||(u[C]=(...O)=>{postMessage({Lb:"callHandler",kc:C,args:O})},C=="print"&&(W=u[C]),C=="printErr"&&(F=u[C]));X=f.wasmMemory,ve(),de(f.wasmModule)}else if(y==="run"){Pn(f.pthread_ptr,0,0,1,0,0),Sn(f.pthread_ptr),Wl(),Vo(),ce||(Ri(),ce=!0);try{Hl(f.start_routine,f.arg)}catch(v){if(v!="unwind")throw v}}else y==="cancel"?Mt()&&hr(-1):f.target!=="setimmediate"&&(y==="checkMailbox"?ce&&ar():y&&(F(`worker: received unknown command ${y}`),F(f)))}catch(v){throw Mi(),v}};var Sh=s,de,ce=!1;F=function(...c){c=c.join(" "),console.error(c)},self.alert=function(...c){postMessage({Lb:"alert",text:c.join(" "),mc:Mt()})},u.instantiateWasm=(c,f)=>new Promise(y=>{de=v=>{v=new WebAssembly.Instance(v,zo()),f(v),y()}}),self.onunhandledrejection=c=>{throw c.reason||c},self.onmessage=s}var X,xe,q,ie,le,se,Z,re,J,Pe,R,Y,ue,Te=!1;function ve(){var s=X.buffer;u.HEAP8=ie=new Int8Array(s),u.HEAP16=se=new Int16Array(s),u.HEAPU8=le=new Uint8Array(s),u.HEAPU16=Z=new Uint16Array(s),u.HEAP32=re=new Int32Array(s),u.HEAPU32=J=new Uint32Array(s),u.HEAPF32=Pe=new Float32Array(s),u.HEAPF64=ue=new Float64Array(s),u.HEAP64=R=new BigInt64Array(s),u.HEAPU64=Y=new BigUint64Array(s)}if(!b){if(u.wasmMemory)X=u.wasmMemory;else if(!((X=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof x))throw F("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"),Error("bad memory");ve()}var Se=[],Ot=[],Be=[],Ae=0,Xe=null,lt=null;function or(){if(--Ae==0&&(Xe!==null&&(clearInterval(Xe),Xe=null),lt)){var s=lt;lt=null,s()}}function Dt(s){throw F(s="Aborted("+s+")"),Te=!0,q=1,s=new WebAssembly.RuntimeError(s+". Build with -sASSERTIONS for more info."),m(s),s}var Eo,ko=s=>s.startsWith("data:application/octet-stream;base64,"),Po=s=>s.startsWith("file://");function Oo(s){if(S)return S(s);throw"both async and sync fetching of the wasm failed"}function Do(s,c,f){return function(y){if(w||g){if(typeof fetch=="function"&&!Po(y))return fetch(y,{credentials:"same-origin"}).then(v=>{if(!v.ok)throw`failed to load wasm binary file at '${y}'`;return v.arrayBuffer()}).catch(()=>Oo(y));if($)return new Promise((v,C)=>{$(y,O=>v(new Uint8Array(O)),C)})}return Promise.resolve().then(()=>Oo(y))}(s).then(y=>WebAssembly.instantiate(y,c)).then(f,y=>{F(`failed to asynchronously prepare wasm: ${y}`),Dt(y)})}function zo(){return{a:{M:Nl,za:Vl,b:Ll,$:Go,z:qo,pa:jo,X:Yo,Z:Xo,qa:Zo,na:Qo,ga:Jo,ma:ei,J:ti,Y:ri,V:ni,oa:oi,W:ii,va:Fl,D:jl,P:Kl,O:Xl,C:Ql,s:Jl,p:ec,E:tc,y:uc,Q:dc,ta:lc,ja:cc,T:pc,aa:mc,F:fc,ia:Sn,sa:hc,u:gc,B:wc,o:vc,k:_c,c:_n,n:xc,j:Cc,Aa:Tc,r:Ac,d:Ec,v:kc,m:Pc,g:Oc,l:Dc,i:zc,h:Bc,e:Rc,da:Mc,ea:Uc,fa:Vc,ba:wi,ca:vi,S:Nc,f:Wc,N:Hc,G:Gc,K:Lc,w:Fc,ra:qc,U:jc,t:_i,x:Kc,L:Yc,R:Xc,ya:Zc,xa:Qc,ka:Ii,la:Ci,_:yn,A:Ti,I:Ai,ha:Ei,H:ki,a:X,wa:gn,ua:Di,q:tp}}}var mn={1337716:(s,c,f,y)=>{if(u===void 0||!u.Fb)return 1;if((s=ke(s>>>0)).startsWith("./")&&(s=s.substring(2)),!(s=u.Fb.get(s)))return 2;if(y>>>=0,(c>>>=0)+(f>>>=0)>s.byteLength)return 3;try{return r().set(s.subarray(c,c+f),y>>>0),0}catch{return 4}},1338217:()=>{u.Ub()},1338248:()=>{u.Vb()},1338277:()=>{u.Zb()},1338302:s=>u.Tb(s),1338335:s=>u.Xb(s),1338367:(s,c,f)=>{u.Nb(s,c,f,!0)},1338406:(s,c,f)=>{u.Nb(s,c,f)},1338439:()=>typeof wasmOffsetConverter<"u",1338496:s=>{u.jb("Abs",s,void 0)},1338547:s=>{u.jb("Neg",s,void 0)},1338598:s=>{u.jb("Floor",s,void 0)},1338651:s=>{u.jb("Ceil",s,void 0)},1338703:s=>{u.jb("Reciprocal",s,void 0)},1338761:s=>{u.jb("Sqrt",s,void 0)},1338813:s=>{u.jb("Exp",s,void 0)},1338864:s=>{u.jb("Erf",s,void 0)},1338915:s=>{u.jb("Sigmoid",s,void 0)},1338970:(s,c,f)=>{u.jb("HardSigmoid",s,{alpha:c,beta:f})},1339049:s=>{u.jb("Log",s,void 0)},1339100:s=>{u.jb("Sin",s,void 0)},1339151:s=>{u.jb("Cos",s,void 0)},1339202:s=>{u.jb("Tan",s,void 0)},1339253:s=>{u.jb("Asin",s,void 0)},1339305:s=>{u.jb("Acos",s,void 0)},1339357:s=>{u.jb("Atan",s,void 0)},1339409:s=>{u.jb("Sinh",s,void 0)},1339461:s=>{u.jb("Cosh",s,void 0)},1339513:s=>{u.jb("Asinh",s,void 0)},1339566:s=>{u.jb("Acosh",s,void 0)},1339619:s=>{u.jb("Atanh",s,void 0)},1339672:s=>{u.jb("Tanh",s,void 0)},1339724:s=>{u.jb("Not",s,void 0)},1339775:(s,c,f)=>{u.jb("Clip",s,{min:c,max:f})},1339844:s=>{u.jb("Clip",s,void 0)},1339896:(s,c)=>{u.jb("Elu",s,{alpha:c})},1339954:s=>{u.jb("Relu",s,void 0)},1340006:(s,c)=>{u.jb("LeakyRelu",s,{alpha:c})},1340070:(s,c)=>{u.jb("ThresholdedRelu",s,{alpha:c})},1340140:(s,c)=>{u.jb("Cast",s,{to:c})},1340198:s=>{u.jb("Add",s,void 0)},1340249:s=>{u.jb("Sub",s,void 0)},1340300:s=>{u.jb("Mul",s,void 0)},1340351:s=>{u.jb("Div",s,void 0)},1340402:s=>{u.jb("Pow",s,void 0)},1340453:s=>{u.jb("Equal",s,void 0)},1340506:s=>{u.jb("Greater",s,void 0)},1340561:s=>{u.jb("GreaterOrEqual",s,void 0)},1340623:s=>{u.jb("Less",s,void 0)},1340675:s=>{u.jb("LessOrEqual",s,void 0)},1340734:(s,c,f,y,v)=>{u.jb("ReduceMean",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1340893:(s,c,f,y,v)=>{u.jb("ReduceMax",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341051:(s,c,f,y,v)=>{u.jb("ReduceMin",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341209:(s,c,f,y,v)=>{u.jb("ReduceProd",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341368:(s,c,f,y,v)=>{u.jb("ReduceSum",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341526:(s,c,f,y,v)=>{u.jb("ReduceL1",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341683:(s,c,f,y,v)=>{u.jb("ReduceL2",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1341840:(s,c,f,y,v)=>{u.jb("ReduceLogSum",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1342001:(s,c,f,y,v)=>{u.jb("ReduceSumSquare",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1342165:(s,c,f,y,v)=>{u.jb("ReduceLogSumExp",s,{keepDims:!!c,noopWithEmptyAxes:!!f,axes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1342329:s=>{u.jb("Where",s,void 0)},1342382:(s,c,f)=>{u.jb("Transpose",s,{perm:c?Array.from(i().subarray(c>>>0,f>>>0)):[]})},1342490:(s,c,f,y)=>{u.jb("DepthToSpace",s,{blocksize:c,mode:ke(f),format:y?"NHWC":"NCHW"})},1342623:(s,c,f,y)=>{u.jb("DepthToSpace",s,{blocksize:c,mode:ke(f),format:y?"NHWC":"NCHW"})},1342756:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee)=>{u.jb("ConvTranspose",s,{format:N?"NHWC":"NCHW",autoPad:c,dilations:[f],group:y,kernelShape:[v],pads:[C,O],strides:[B],wIsConst:()=>!!t()[L>>>0],outputPadding:K?Array.from(i().subarray(K>>>0,pe>>>0)):[],outputShape:fe?Array.from(i().subarray(fe>>>0,P>>>0)):[],activation:ke(ee)})},1343157:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P)=>{u.jb("ConvTranspose",s,{format:B?"NHWC":"NCHW",autoPad:c,dilations:Array.from(i().subarray(f>>>0,2+(f>>>0)>>>0)),group:y,kernelShape:Array.from(i().subarray(v>>>0,2+(v>>>0)>>>0)),pads:Array.from(i().subarray(C>>>0,4+(C>>>0)>>>0)),strides:Array.from(i().subarray(O>>>0,2+(O>>>0)>>>0)),wIsConst:()=>!!t()[N>>>0],outputPadding:L?Array.from(i().subarray(L>>>0,K>>>0)):[],outputShape:pe?Array.from(i().subarray(pe>>>0,fe>>>0)):[],activation:ke(P)})},1343722:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee)=>{u.jb("ConvTranspose",s,{format:N?"NHWC":"NCHW",autoPad:c,dilations:[f],group:y,kernelShape:[v],pads:[C,O],strides:[B],wIsConst:()=>!!t()[L>>>0],outputPadding:K?Array.from(i().subarray(K>>>0,pe>>>0)):[],outputShape:fe?Array.from(i().subarray(fe>>>0,P>>>0)):[],activation:ke(ee)})},1344123:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P)=>{u.jb("ConvTranspose",s,{format:B?"NHWC":"NCHW",autoPad:c,dilations:Array.from(i().subarray(f>>>0,2+(f>>>0)>>>0)),group:y,kernelShape:Array.from(i().subarray(v>>>0,2+(v>>>0)>>>0)),pads:Array.from(i().subarray(C>>>0,4+(C>>>0)>>>0)),strides:Array.from(i().subarray(O>>>0,2+(O>>>0)>>>0)),wIsConst:()=>!!t()[N>>>0],outputPadding:L?Array.from(i().subarray(L>>>0,K>>>0)):[],outputShape:pe?Array.from(i().subarray(pe>>>0,fe>>>0)):[],activation:ke(P)})},1344688:(s,c)=>{u.jb("GlobalAveragePool",s,{format:c?"NHWC":"NCHW"})},1344779:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee,ge)=>{u.jb("AveragePool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,K,pe,fe],strides:[P,ee]})},1345063:(s,c)=>{u.jb("GlobalAveragePool",s,{format:c?"NHWC":"NCHW"})},1345154:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee,ge)=>{u.jb("AveragePool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,K,pe,fe],strides:[P,ee]})},1345438:(s,c)=>{u.jb("GlobalMaxPool",s,{format:c?"NHWC":"NCHW"})},1345525:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee,ge)=>{u.jb("MaxPool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,K,pe,fe],strides:[P,ee]})},1345805:(s,c)=>{u.jb("GlobalMaxPool",s,{format:c?"NHWC":"NCHW"})},1345892:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee,ge)=>{u.jb("MaxPool",s,{format:ge?"NHWC":"NCHW",auto_pad:c,ceil_mode:f,count_include_pad:y,storage_order:v,dilations:[C,O],kernel_shape:[B,N],pads:[L,K,pe,fe],strides:[P,ee]})},1346172:(s,c,f,y,v)=>{u.jb("Gemm",s,{alpha:c,beta:f,transA:y,transB:v})},1346276:s=>{u.jb("MatMul",s,void 0)},1346330:(s,c,f,y)=>{u.jb("ArgMax",s,{keepDims:!!c,selectLastIndex:!!f,axis:y})},1346438:(s,c,f,y)=>{u.jb("ArgMin",s,{keepDims:!!c,selectLastIndex:!!f,axis:y})},1346546:(s,c)=>{u.jb("Softmax",s,{axis:c})},1346609:(s,c)=>{u.jb("Concat",s,{axis:c})},1346669:(s,c,f,y,v)=>{u.jb("Split",s,{axis:c,numOutputs:f,splitSizes:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1346809:s=>{u.jb("Expand",s,void 0)},1346863:(s,c)=>{u.jb("Gather",s,{axis:Number(c)})},1346934:(s,c)=>{u.jb("GatherElements",s,{axis:Number(c)})},1347013:(s,c,f,y,v,C,O,B,N,L,K)=>{u.jb("Resize",s,{antialias:c,axes:f?Array.from(i().subarray(f>>>0,y>>>0)):[],coordinateTransformMode:ke(v),cubicCoeffA:C,excludeOutside:O,extrapolationValue:B,keepAspectRatioPolicy:ke(N),mode:ke(L),nearestMode:ke(K)})},1347359:(s,c,f,y,v,C,O)=>{u.jb("Slice",s,{starts:c?Array.from(i().subarray(c>>>0,f>>>0)):[],ends:y?Array.from(i().subarray(y>>>0,v>>>0)):[],axes:C?Array.from(i().subarray(C>>>0,O>>>0)):[]})},1347575:s=>{u.jb("Tile",s,void 0)},1347627:(s,c,f)=>{u.jb("InstanceNormalization",s,{epsilon:c,format:f?"NHWC":"NCHW"})},1347741:(s,c,f)=>{u.jb("InstanceNormalization",s,{epsilon:c,format:f?"NHWC":"NCHW"})},1347855:s=>{u.jb("Range",s,void 0)},1347908:(s,c)=>{u.jb("Einsum",s,{equation:ke(c)})},1347989:(s,c,f,y,v)=>{u.jb("Pad",s,{mode:c,value:f,pads:y?Array.from(i().subarray(y>>>0,v>>>0)):[]})},1348116:(s,c,f,y,v,C)=>{u.jb("BatchNormalization",s,{epsilon:c,momentum:f,spatial:!!v,trainingMode:!!y,format:C?"NHWC":"NCHW"})},1348285:(s,c,f,y,v,C)=>{u.jb("BatchNormalization",s,{epsilon:c,momentum:f,spatial:!!v,trainingMode:!!y,format:C?"NHWC":"NCHW"})},1348454:(s,c,f)=>{u.jb("CumSum",s,{exclusive:Number(c),reverse:Number(f)})},1348551:(s,c,f,y,v,C,O,B,N)=>{u.jb("Attention",s,{numHeads:c,isUnidirectional:f,maskFilterValue:y,scale:v,doRotary:C,qkvHiddenSizes:O?Array.from(i().subarray(Number(B)>>>0,Number(B)+O>>>0)):[],pastPresentShareBuffer:!!N})},1348823:s=>{u.jb("BiasAdd",s,void 0)},1348878:s=>{u.jb("BiasSplitGelu",s,void 0)},1348939:s=>{u.jb("FastGelu",s,void 0)},1348995:(s,c,f,y,v,C,O,B,N,L,K,pe,fe,P,ee,ge)=>{u.jb("Conv",s,{format:pe?"NHWC":"NCHW",auto_pad:c,dilations:f?Array.from(i().subarray(f>>>0,y>>>0)):[],group:v,kernel_shape:C?Array.from(i().subarray(C>>>0,O>>>0)):[],pads:B?Array.from(i().subarray(B>>>0,N>>>0)):[],strides:L?Array.from(i().subarray(L>>>0,K>>>0)):[],w_is_const:()=>!!t()[fe>>>0],activation:ke(P),activation_params:ee?Array.from(l().subarray(ee>>>0,ge>>>0)):[]})},1349491:s=>{u.jb("Gelu",s,void 0)},1349543:(s,c,f,y)=>{u.jb("GroupQueryAttention",s,{numHeads:c,kvNumHeads:f,scale:y})},1349656:(s,c,f,y)=>{u.jb("LayerNormalization",s,{axis:c,epsilon:f,simplified:!!y})},1349767:(s,c,f,y)=>{u.jb("LayerNormalization",s,{axis:c,epsilon:f,simplified:!!y})},1349878:(s,c,f,y,v,C)=>{u.jb("MatMulNBits",s,{k:c,n:f,accuracyLevel:y,bits:v,blockSize:C})},1350005:(s,c,f,y,v,C)=>{u.jb("MultiHeadAttention",s,{numHeads:c,isUnidirectional:f,maskFilterValue:y,scale:v,doRotary:C})},1350164:(s,c)=>{u.jb("QuickGelu",s,{alpha:c})},1350228:(s,c,f,y,v)=>{u.jb("RotaryEmbedding",s,{interleaved:!!c,numHeads:f,rotaryEmbeddingDim:y,scale:v})},1350367:(s,c,f)=>{u.jb("SkipLayerNormalization",s,{epsilon:c,simplified:!!f})},1350469:s=>{u.Yb(s)},1350503:(s,c)=>u.$b(s,c,u.Eb.bc,u.Eb.errors),1350615:(s,c,f)=>{u.jb("SkipLayerNormalization",s,{epsilon:c,simplified:!!f})}};function Vl(s,c,f){return fi(async()=>{await u.Wb(s,c,f)})}function Nl(){return typeof wasmOffsetConverter<"u"}function fn(s){this.name="ExitStatus",this.message=`Program terminated with exit(${s})`,this.status=s}var hn=s=>{s.terminate(),s.onmessage=()=>{}},Bo=s=>{ct.length==0&&(Wo(),No(ct[0]));var c=ct.pop();if(!c)return 6;wt.push(c),Ze[s.Ab]=c,c.Ab=s.Ab;var f={cmd:"run",start_routine:s.cc,arg:s.Pb,pthread_ptr:s.Ab};return c.postMessage(f,s.ic),0},bt=0,_e=(s,c,...f)=>{for(var y=2*f.length,v=zn(),C=Dn(8*y),O=C>>>3,B=0;B>>0]=N)}return s=Ui(s,0,y,C,c),gr(v),s};function gn(s){if(b)return _e(0,1,s);if(q=s,!(0{if(q=s,b)throw Ro(s),"unwind";gn(s)},ct=[],wt=[],Mo=[],Ze={},Uo=s=>{var c=s.Ab;delete Ze[c],ct.push(s),wt.splice(wt.indexOf(s),1),s.Ab=0,On(c)};function Vo(){Mo.forEach(s=>s())}var No=s=>new Promise(c=>{s.onmessage=v=>{var C=(v=v.data).cmd;if(v.targetThread&&v.targetThread!=Mt()){var O=Ze[v.targetThread];O?O.postMessage(v,v.transferList):F(`Internal error! Worker sent a message "${C}" to target pthread ${v.targetThread}, but that thread no longer exists!`)}else C==="checkMailbox"?ar():C==="spawnThread"?Bo(v):C==="cleanupThread"?Uo(Ze[v.thread]):C==="killThread"?(v=v.thread,C=Ze[v],delete Ze[v],hn(C),On(v),wt.splice(wt.indexOf(C),1),C.Ab=0):C==="cancelThread"?Ze[v.thread].postMessage({cmd:"cancel"}):C==="loaded"?(s.loaded=!0,c(s)):C==="alert"?alert(`Thread ${v.threadId}: ${v.text}`):v.target==="setimmediate"?s.postMessage(v):C==="callHandler"?u[v.handler](...v.args):C&&F(`worker sent an unknown command ${C}`)},s.onerror=v=>{throw F(`worker sent an error! ${v.filename}:${v.lineno}: ${v.message}`),v};var f,y=[];for(f of["onExit"])u.hasOwnProperty(f)&&y.push(f);s.postMessage({cmd:"load",handlers:y,wasmMemory:X,wasmModule:xe})});function Wo(){var s=new Worker(new URL(import.meta.url),{type:"module",workerData:"em-pthread",name:"em-pthread"});ct.push(s)}var ir=s=>{for(;0{var s=Mt(),c=a()[s+52>>>2>>>0];s=a()[s+56>>>2>>>0],Ni(c,c-s),gr(c)},Hl=(s,c)=>{bt=0,s=Wi(s,c),0>>=0);throw c>>>=0,f>>>=0,a()[y.Ib+16>>>2>>>0]=0,a()[y.Ib+4>>>2>>>0]=c,a()[y.Ib+8>>>2>>>0]=f,s}function Ho(s,c,f,y){return b?_e(2,1,s,c,f,y):Go(s,c,f,y)}function Go(s,c,f,y){if(s>>>=0,c>>>=0,f>>>=0,y>>>=0,x===void 0)return F("Current environment does not support SharedArrayBuffer, pthreads are not available!"),6;var v=[];return b&&v.length===0?Ho(s,c,f,y):(s={cc:f,Ab:s,Pb:y,ic:v},b?(s.Lb="spawnThread",postMessage(s,v),0):Bo(s))}var Lo=typeof TextDecoder<"u"?new TextDecoder("utf8"):void 0,Fo=(s,c,f)=>{var y=(c>>>=0)+f;for(f=c;s[f]&&!(f>=y);)++f;if(16(v=(240&v)==224?(15&v)<<12|C<<6|O:(7&v)<<18|C<<12|O<<6|63&s[c++])?y+=String.fromCharCode(v):(v-=65536,y+=String.fromCharCode(55296|v>>10,56320|1023&v))}}else y+=String.fromCharCode(v)}return y},ke=(s,c)=>(s>>>=0)?Fo(r(),s,c):"";function qo(s,c,f){return b?_e(3,1,s,c,f):0}function jo(s,c){if(b)return _e(4,1,s,c)}var bn=s=>{for(var c=0,f=0;f=y?c++:2047>=y?c+=2:55296<=y&&57343>=y?(c+=4,++f):c+=3}return c},Ko=(s,c,f,y)=>{if(!(0>>=0;y=f+y-1;for(var C=0;C=O&&(O=65536+((1023&O)<<10)|1023&s.charCodeAt(++C)),127>=O){if(f>=y)break;c[f++>>>0]=O}else{if(2047>=O){if(f+1>=y)break;c[f++>>>0]=192|O>>6}else{if(65535>=O){if(f+2>=y)break;c[f++>>>0]=224|O>>12}else{if(f+3>=y)break;c[f++>>>0]=240|O>>18,c[f++>>>0]=128|O>>12&63}c[f++>>>0]=128|O>>6&63}c[f++>>>0]=128|63&O}}return c[f>>>0]=0,f-v},zt=(s,c,f)=>Ko(s,r(),c,f);function Yo(s,c){if(b)return _e(5,1,s,c)}function Xo(s,c,f){if(b)return _e(6,1,s,c,f)}function Zo(s,c,f){return b?_e(7,1,s,c,f):0}function Qo(s,c){if(b)return _e(8,1,s,c)}function Jo(s,c,f){if(b)return _e(9,1,s,c,f)}function ei(s,c,f,y){if(b)return _e(10,1,s,c,f,y)}function ti(s,c,f,y){if(b)return _e(11,1,s,c,f,y)}function ri(s,c,f,y){if(b)return _e(12,1,s,c,f,y)}function ni(s){if(b)return _e(13,1,s)}function oi(s,c){if(b)return _e(14,1,s,c)}function ii(s,c,f){if(b)return _e(15,1,s,c,f)}var ai,pt,Fl=()=>{Dt("")},Qe=s=>{for(var c="";r()[s>>>0];)c+=ai[r()[s++>>>0]];return c},wn={},vn={},ql={};function st(s,c,f={}){if(!("argPackAdvance"in c))throw new TypeError("registerType registeredInstance requires argPackAdvance");return function(y,v,C={}){var O=v.name;if(!y)throw new pt(`type "${O}" must have a positive integer typeid pointer`);if(vn.hasOwnProperty(y)){if(C.Rb)return;throw new pt(`Cannot register type '${O}' twice`)}vn[y]=v,delete ql[y],wn.hasOwnProperty(y)&&(v=wn[y],delete wn[y],v.forEach(B=>B()))}(s,c,f)}var si=(s,c,f)=>{switch(c){case 1:return f?y=>t()[y>>>0]:y=>r()[y>>>0];case 2:return f?y=>n()[y>>>1>>>0]:y=>o()[y>>>1>>>0];case 4:return f?y=>i()[y>>>2>>>0]:y=>a()[y>>>2>>>0];case 8:return f?y=>R[y>>>3]:y=>Y[y>>>3];default:throw new TypeError(`invalid integer width (${c}): ${s}`)}};function jl(s,c,f){f>>>=0,st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:y=>y,toWireType:function(y,v){if(typeof v!="bigint"&&typeof v!="number")throw v=v===null?"null":(y=typeof v)=="object"||y==="array"||y==="function"?v.toString():""+v,new TypeError(`Cannot convert "${v}" to ${this.name}`);return typeof v=="number"&&(v=BigInt(v)),v},argPackAdvance:mt,readValueFromPointer:si(c,f,c.indexOf("u")==-1),Db:null})}var mt=8;function Kl(s,c,f,y){st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:function(v){return!!v},toWireType:function(v,C){return C?f:y},argPackAdvance:mt,readValueFromPointer:function(v){return this.fromWireType(r()[v>>>0])},Db:null})}var $n=[],ut=[];function _n(s){9<(s>>>=0)&&--ut[s+1]==0&&(ut[s]=void 0,$n.push(s))}var He=s=>{if(!s)throw new pt("Cannot use deleted val. handle = "+s);return ut[s]},Ge=s=>{switch(s){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:let c=$n.pop()||ut.length;return ut[c]=s,ut[c+1]=1,c}};function xn(s){return this.fromWireType(a()[s>>>2>>>0])}var Yl={name:"emscripten::val",fromWireType:s=>{var c=He(s);return _n(s),c},toWireType:(s,c)=>Ge(c),argPackAdvance:mt,readValueFromPointer:xn,Db:null};function Xl(s){return st(s>>>0,Yl)}var Zl=(s,c)=>{switch(c){case 4:return function(f){return this.fromWireType(l()[f>>>2>>>0])};case 8:return function(f){return this.fromWireType(d()[f>>>3>>>0])};default:throw new TypeError(`invalid float width (${c}): ${s}`)}};function Ql(s,c,f){f>>>=0,st(s>>>=0,{name:c=Qe(c>>>0),fromWireType:y=>y,toWireType:(y,v)=>v,argPackAdvance:mt,readValueFromPointer:Zl(c,f),Db:null})}function Jl(s,c,f,y,v){if(s>>>=0,f>>>=0,c=Qe(c>>>0),v===-1&&(v=4294967295),v=B=>B,y===0){var C=32-8*f;v=B=>B<>>C}var O=c.includes("unsigned")?function(B,N){return N>>>0}:function(B,N){return N};st(s,{name:c,fromWireType:v,toWireType:O,argPackAdvance:mt,readValueFromPointer:si(c,f,y!==0),Db:null})}function ec(s,c,f){function y(C){var O=a()[C>>>2>>>0];return C=a()[C+4>>>2>>>0],new v(t().buffer,C,O)}var v=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][c];st(s>>>=0,{name:f=Qe(f>>>0),fromWireType:y,argPackAdvance:mt,readValueFromPointer:y},{Rb:!0})}function tc(s,c){s>>>=0;var f=(c=Qe(c>>>0))==="std::string";st(s,{name:c,fromWireType:function(y){var v=a()[y>>>2>>>0],C=y+4;if(f)for(var O=C,B=0;B<=v;++B){var N=C+B;if(B==v||r()[N>>>0]==0){if(O=ke(O,N-O),L===void 0)var L=O;else L+=String.fromCharCode(0),L+=O;O=N+1}}else{for(L=Array(v),B=0;B>>0]);L=L.join("")}return et(y),L},toWireType:function(y,v){v instanceof ArrayBuffer&&(v=new Uint8Array(v));var C=typeof v=="string";if(!(C||v instanceof Uint8Array||v instanceof Uint8ClampedArray||v instanceof Int8Array))throw new pt("Cannot pass non-string to std::string");var O=f&&C?bn(v):v.length,B=fr(4+O+1),N=B+4;if(a()[B>>>2>>>0]=O,f&&C)zt(v,N,O+1);else if(C)for(C=0;C>>0]=L}else for(C=0;C>>0]=v[C];return y!==null&&y.push(et,B),B},argPackAdvance:mt,readValueFromPointer:xn,Db(y){et(y)}})}var ui=typeof TextDecoder<"u"?new TextDecoder("utf-16le"):void 0,rc=(s,c)=>{for(var f=s>>1,y=f+c/2;!(f>=y)&&o()[f>>>0];)++f;if(32<(f<<=1)-s&&ui)return ui.decode(r().slice(s,f));for(f="",y=0;!(y>=c/2);++y){var v=n()[s+2*y>>>1>>>0];if(v==0)break;f+=String.fromCharCode(v)}return f},nc=(s,c,f)=>{if(f??=2147483647,2>f)return 0;var y=c;f=(f-=2)<2*s.length?f/2:s.length;for(var v=0;v>>1>>>0]=C,c+=2}return n()[c>>>1>>>0]=0,c-y},oc=s=>2*s.length,ic=(s,c)=>{for(var f=0,y="";!(f>=c/4);){var v=i()[s+4*f>>>2>>>0];if(v==0)break;++f,65536<=v?(v-=65536,y+=String.fromCharCode(55296|v>>10,56320|1023&v)):y+=String.fromCharCode(v)}return y},ac=(s,c,f)=>{if(c>>>=0,f??=2147483647,4>f)return 0;var y=c;f=y+f-4;for(var v=0;v=C&&(C=65536+((1023&C)<<10)|1023&s.charCodeAt(++v)),i()[c>>>2>>>0]=C,(c+=4)+4>f)break}return i()[c>>>2>>>0]=0,c-y},sc=s=>{for(var c=0,f=0;f=y&&++f,c+=4}return c};function uc(s,c,f){if(s>>>=0,c>>>=0,f=Qe(f>>>=0),c===2)var y=rc,v=nc,C=oc,O=B=>o()[B>>>1>>>0];else c===4&&(y=ic,v=ac,C=sc,O=B=>a()[B>>>2>>>0]);st(s,{name:f,fromWireType:B=>{for(var N,L=a()[B>>>2>>>0],K=B+4,pe=0;pe<=L;++pe){var fe=B+4+pe*c;pe!=L&&O(fe)!=0||(K=y(K,fe-K),N===void 0?N=K:(N+=String.fromCharCode(0),N+=K),K=fe+c)}return et(B),N},toWireType:(B,N)=>{if(typeof N!="string")throw new pt(`Cannot pass non-string to C++ string type ${f}`);var L=C(N),K=fr(4+L+c);return a()[K>>>2>>>0]=L/c,v(N,K+4,L+c),B!==null&&B.push(et,K),K},argPackAdvance:mt,readValueFromPointer:xn,Db(B){et(B)}})}function dc(s,c){st(s>>>=0,{Sb:!0,name:c=Qe(c>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var lc=()=>1;function cc(s){Pn(s>>>0,!g,1,!w,131072,!1),Vo()}var di=s=>{if(!Te)try{if(s(),!(0>>=0,typeof Atomics.jc=="function"&&(Atomics.jc(i(),s>>>2,s).value.then(ar),s+=128,Atomics.store(i(),s>>>2,1))}var ar=()=>{var s=Mt();s&&(Sn(s),di(Vi))};function pc(s,c){(s>>>=0)==c>>>0?setTimeout(ar):b?postMessage({targetThread:s,cmd:"checkMailbox"}):(s=Ze[s])&&s.postMessage({cmd:"checkMailbox"})}var In=[];function mc(s,c,f,y,v){for(c>>>=0,y/=2,In.length=y,f=v>>>0>>>3,v=0;v>>0];return(c?mn[c]:rp[s])(...In)}function fc(s){s>>>=0,b?postMessage({cmd:"cleanupThread",thread:s}):Uo(Ze[s])}function hc(s){}var Cn=(s,c)=>{var f=vn[s];if(f===void 0)throw s=Bi(s),f=Qe(s),et(s),new pt(`${c} has unknown type ${f}`);return f},li=(s,c,f)=>{var y=[];return s=s.toWireType(y,f),y.length&&(a()[c>>>2>>>0]=Ge(y)),s};function gc(s,c,f){return c>>>=0,f>>>=0,s=He(s>>>0),c=Cn(c,"emval::as"),li(c,f,s)}var sr=s=>{try{s()}catch(c){Dt(c)}},ft=0,Je=null,ci=0,ur=[],pi={},mi={},yc=0,Tn=null,bc=[];function fi(s){return function(c){if(!Te){if(ft===0){var f=!1,y=!1;c((v=0)=>{if(!Te&&(ci=v,f=!0,y)){ft=2,sr(()=>Li(Je)),typeof Browser<"u"&&Browser.Jb.Qb&&Browser.Jb.resume(),v=!1;try{var C=function(){var N=i()[Je+8>>>2>>>0];return N=j[mi[N]],--bt,N()}()}catch(N){C=N,v=!0}var O=!1;if(!Je){var B=Tn;B&&(Tn=null,(v?B.reject:B.resolve)(C),O=!0)}if(v&&!O)throw C}}),y=!0,f||(ft=1,Je=function(){var v=fr(65548),C=v+12;a()[v>>>2>>>0]=C,a()[v+4>>>2>>>0]=C+65536,C=ur[0];var O=pi[C];return O===void 0&&(O=yc++,pi[C]=O,mi[O]=C),C=O,i()[v+8>>>2>>>0]=C,v}(),typeof Browser<"u"&&Browser.Jb.Qb&&Browser.Jb.pause(),sr(()=>Hi(Je)))}else ft===2?(ft=0,sr(Fi),et(Je),Je=null,bc.forEach(di)):Dt(`invalid state: ${ft}`);return ci}}(c=>{s().then(c)})}function wc(s){return s>>>=0,fi(()=>(s=He(s)).then(Ge))}var dr=[];function vc(s,c,f,y){return f>>>=0,y>>>=0,(s=dr[s>>>0])(null,c=He(c>>>0),f,y)}var $c={},lr=s=>{var c=$c[s];return c===void 0?Qe(s):c};function _c(s,c,f,y,v){return f>>>=0,y>>>=0,v>>>=0,(s=dr[s>>>0])(c=He(c>>>0),c[f=lr(f)],y,v)}var hi=()=>typeof globalThis=="object"?globalThis:Function("return this")();function xc(s){return(s>>>=0)==0?Ge(hi()):(s=lr(s),Ge(hi()[s]))}var Sc=s=>{var c=dr.length;return dr.push(s),c},Ic=(s,c)=>{for(var f=Array(s),y=0;y>>2>>>0],"parameter "+y);return f},gi=(s,c)=>Object.defineProperty(c,"name",{value:s});function Cc(s,c,f){var y=(c=Ic(s,c>>>0)).shift();s--;var v=`return function (obj, func, destructorsRef, args) { `,C=0,O=[];f===0&&O.push("obj");for(var B=["retType"],N=[y],L=0;Lj.name).join(", ")}) => ${y.name}>`,xc(gi(f,s))}function Cc(s){return s=lr(s>>>0),Ge(u[s])}function Tc(s,c){return c>>>=0,s=He(s>>>0),c=He(c),Ge(s[c])}function Ac(s){9<(s>>>=0)&&(ut[s+1]+=1)}function Ec(){return Ge([])}function kc(s){s=He(s>>>0);for(var c=Array(s.length),f=0;f>>0))}function Oc(){return Ge({})}function zc(s){for(var c=He(s>>>=0);c.length;){var f=c.pop();c.pop()(f)}$n(s)}function Dc(s,c,f){c>>>=0,f>>>=0,s=He(s>>>0),c=He(c),f=He(f),s[c]=f}function Bc(s,c){return c>>>=0,s=(s=In(s>>>0,"_emval_take_value")).readValueFromPointer(c),Ge(s)}function Rc(s,c){s=-9007199254740992>s||9007199254740992>>=0,s=new Date(1e3*s),i()[c>>>2>>>0]=s.getUTCSeconds(),i()[c+4>>>2>>>0]=s.getUTCMinutes(),i()[c+8>>>2>>>0]=s.getUTCHours(),i()[c+12>>>2>>>0]=s.getUTCDate(),i()[c+16>>>2>>>0]=s.getUTCMonth(),i()[c+20>>>2>>>0]=s.getUTCFullYear()-1900,i()[c+24>>>2>>>0]=s.getUTCDay(),s=(s.getTime()-Date.UTC(s.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[c+28>>>2>>>0]=s}var Dt=s=>s%4==0&&(s%100!=0||s%400==0),yi=[0,31,60,91,121,152,182,213,244,274,305,335],bi=[0,31,59,90,120,151,181,212,243,273,304,334];function Mc(s,c){s=-9007199254740992>s||9007199254740992>>=0,s=new Date(1e3*s),i()[c>>>2>>>0]=s.getSeconds(),i()[c+4>>>2>>>0]=s.getMinutes(),i()[c+8>>>2>>>0]=s.getHours(),i()[c+12>>>2>>>0]=s.getDate(),i()[c+16>>>2>>>0]=s.getMonth(),i()[c+20>>>2>>>0]=s.getFullYear()-1900,i()[c+24>>>2>>>0]=s.getDay();var f=(Dt(s.getFullYear())?yi:bi)[s.getMonth()]+s.getDate()-1|0;i()[c+28>>>2>>>0]=f,i()[c+36>>>2>>>0]=-60*s.getTimezoneOffset(),f=new Date(s.getFullYear(),6,1).getTimezoneOffset();var y=new Date(s.getFullYear(),0,1).getTimezoneOffset();s=0|(f!=y&&s.getTimezoneOffset()==Math.min(y,f)),i()[c+32>>>2>>>0]=s}function Uc(s){s>>>=0;var c=new Date(i()[s+20>>>2>>>0]+1900,i()[s+16>>>2>>>0],i()[s+12>>>2>>>0],i()[s+8>>>2>>>0],i()[s+4>>>2>>>0],i()[s>>>2>>>0],0),f=i()[s+32>>>2>>>0],y=c.getTimezoneOffset(),v=new Date(c.getFullYear(),6,1).getTimezoneOffset(),C=new Date(c.getFullYear(),0,1).getTimezoneOffset(),O=Math.min(C,v);return 0>f?i()[s+32>>>2>>>0]=+(v!=C&&O==y):0>>2>>>0]=c.getDay(),f=(Dt(c.getFullYear())?yi:bi)[c.getMonth()]+c.getDate()-1|0,i()[s+28>>>2>>>0]=f,i()[s>>>2>>>0]=c.getSeconds(),i()[s+4>>>2>>>0]=c.getMinutes(),i()[s+8>>>2>>>0]=c.getHours(),i()[s+12>>>2>>>0]=c.getDate(),i()[s+16>>>2>>>0]=c.getMonth(),i()[s+20>>>2>>>0]=c.getYear(),s=c.getTime(),BigInt(isNaN(s)?-1:s/1e3)}function wi(s,c,f,y,v,C,O){return b?_e(16,1,s,c,f,y,v,C,O):-52}function vi(s,c,f,y,v,C){if(b)return _e(17,1,s,c,f,y,v,C)}function Vc(s,c,f,y){s>>>=0,c>>>=0,f>>>=0,y>>>=0;var v=new Date().getFullYear(),C=new Date(v,0,1),O=new Date(v,6,1);v=C.getTimezoneOffset();var B=O.getTimezoneOffset(),N=Math.max(v,B);a()[s>>>2>>>0]=60*N,i()[c>>>2>>>0]=+(v!=B),C=(s=L=>L.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1])(C),O=s(O),B{Tn.length=0;for(var f;f=r()[s++>>>0];){var y=f!=105;c+=(y&=f!=112)&&c%8?4:0,Tn.push(f==112?a()[c>>>2>>>0]:f==106?R[c>>>3]:f==105?i()[c>>>2>>>0]:d()[c>>>3>>>0]),c+=y?8:4}return Tn};function Nc(s,c,f){return s>>>=0,c=$i(c>>>0,f>>>0),mn[s](...c)}function Wc(s,c,f){return s>>>=0,c=$i(c>>>0,f>>>0),mn[s](...c)}var Hc=()=>{},Gc=()=>Date.now();function Lc(s,c){return F(ke(s>>>0,c>>>0))}var _i,Fc=()=>{throw bt+=1,"unwind"};function qc(){return 4294901760}_i=()=>performance.timeOrigin+performance.now();var Kc=()=>navigator.hardwareConcurrency;function jc(s){s>>>=0;var c=r().length;if(s<=c||4294901760=f;f*=2){var y=c*(1+.2/f);y=Math.min(y,s+100663296);var v=Math;y=Math.max(s,y);e:{v=(v.min.call(v,4294901760,y+(65536-y%65536)%65536)-X.buffer.byteLength+65535)/65536;try{X.grow(v),ve();var C=1;break e}catch{}C=void 0}if(C)return!0}return!1}var cr=()=>(Gt("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER"),0),Bt={},xi=s=>{s.forEach(c=>{var f=cr();f&&(Bt[f]=c)})};function Yc(){var s=Error().stack.toString().split(` -`);return s[0]=="Error"&&s.shift(),xi(s),Bt.Lb=cr(),Bt.Yb=s,Bt.Lb}function Xc(s,c,f){if(s>>>=0,c>>>=0,Bt.Lb==s)var y=Bt.Yb;else(y=Error().stack.toString().split(` -`))[0]=="Error"&&y.shift(),xi(y);for(var v=3;y[v]&&cr()!=s;)++v;for(s=0;s>>2>>>0]=cr();return s}var An,En={},Si=()=>{if(!An){var s,c={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:(typeof navigator=="object"&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:T||"./this.program"};for(s in En)En[s]===void 0?delete c[s]:c[s]=En[s];var f=[];for(s in c)f.push(`${s}=${c[s]}`);An=f}return An};function Ii(s,c){if(b)return _e(18,1,s,c);s>>>=0,c>>>=0;var f=0;return Si().forEach((y,v)=>{var C=c+f;for(v=a()[s+4*v>>>2>>>0]=C,C=0;C>>0]=y.charCodeAt(C);t()[v>>>0]=0,f+=y.length+1}),0}function Ci(s,c){if(b)return _e(19,1,s,c);s>>>=0,c>>>=0;var f=Si();a()[s>>>2>>>0]=f.length;var y=0;return f.forEach(v=>y+=v.length+1),a()[c>>>2>>>0]=y,0}function Ti(s){return b?_e(20,1,s):52}function Ai(s,c,f,y){return b?_e(21,1,s,c,f,y):52}function Ei(s,c,f,y){return b?_e(22,1,s,c,f,y):70}var Zc=[null,[],[]];function ki(s,c,f,y){if(b)return _e(23,1,s,c,f,y);c>>>=0,f>>>=0,y>>>=0;for(var v=0,C=0;C>>2>>>0],B=a()[c+4>>>2>>>0];c+=8;for(var N=0;N>>0],j=Zc[s];L===0||L===10?((s===1?W:F)(Fo(j,0)),j.length=0):j.push(L)}v+=B}return a()[y>>>2>>>0]=v,0}var Pi=[31,29,31,30,31,30,31,31,30,31,30,31],Oi=[31,28,31,30,31,30,31,31,30,31,30,31],Qc=(s,c)=>{t().set(s,c>>>0)};function zi(s,c,f,y){function v(P,ee,ge){for(P=typeof P=="number"?P.toString():P||"";P.lengthKi?-1:0vt-P.getDate())){P.setDate(P.getDate()+ee);break}ee-=vt-P.getDate()+1,P.setDate(1),11>ge?P.setMonth(ge+1):(P.setMonth(0),P.setFullYear(P.getFullYear()+1))}return ge=new Date(P.getFullYear()+1,0,4),ee=B(new Date(P.getFullYear(),0,4)),ge=B(ge),0>=O(ee,P)?0>=O(ge,P)?P.getFullYear()+1:P.getFullYear():P.getFullYear()-1}s>>>=0,c>>>=0,f>>>=0,y>>>=0;var L=a()[y+40>>>2>>>0];for(var j in y={cc:i()[y>>>2>>>0],bc:i()[y+4>>>2>>>0],Db:i()[y+8>>>2>>>0],Hb:i()[y+12>>>2>>>0],Eb:i()[y+16>>>2>>>0],zb:i()[y+20>>>2>>>0],rb:i()[y+24>>>2>>>0],yb:i()[y+28>>>2>>>0],kc:i()[y+32>>>2>>>0],ac:i()[y+36>>>2>>>0],dc:L?ke(L):""},f=ke(f),L={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y","%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"})f=f.replace(new RegExp(j,"g"),L[j]);var pe="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),fe="January February March April May June July August September October November December".split(" ");for(j in L={"%a":P=>pe[P.rb].substring(0,3),"%A":P=>pe[P.rb],"%b":P=>fe[P.Eb].substring(0,3),"%B":P=>fe[P.Eb],"%C":P=>C((P.zb+1900)/100|0,2),"%d":P=>C(P.Hb,2),"%e":P=>v(P.Hb,2," "),"%g":P=>N(P).toString().substring(2),"%G":N,"%H":P=>C(P.Db,2),"%I":P=>((P=P.Db)==0?P=12:12{for(var ee=0,ge=0;ge<=P.Eb-1;ee+=(Dt(P.zb+1900)?Pi:Oi)[ge++]);return C(P.Hb+ee,3)},"%m":P=>C(P.Eb+1,2),"%M":P=>C(P.bc,2),"%n":()=>` -`,"%p":P=>0<=P.Db&&12>P.Db?"AM":"PM","%S":P=>C(P.cc,2),"%t":()=>" ","%u":P=>P.rb||7,"%U":P=>C(Math.floor((P.yb+7-P.rb)/7),2),"%V":P=>{var ee=Math.floor((P.yb+7-(P.rb+6)%7)/7);if(2>=(P.rb+371-P.yb-2)%7&&ee++,ee)ee==53&&((ge=(P.rb+371-P.yb)%7)==4||ge==3&&Dt(P.zb)||(ee=1));else{ee=52;var ge=(P.rb+7-P.yb-1)%7;(ge==4||ge==5&&Dt(P.zb%400-1))&&ee++}return C(ee,2)},"%w":P=>P.rb,"%W":P=>C(Math.floor((P.yb+7-(P.rb+6)%7)/7),2),"%y":P=>(P.zb+1900).toString().substring(2),"%Y":P=>P.zb+1900,"%z":P=>{var ee=0<=(P=P.ac);return P=Math.abs(P)/60,(ee?"+":"-")+("0000"+(P/60*100+P%60)).slice(-4)},"%Z":P=>P.dc,"%%":()=>"%"},f=f.replace(/%%/g,"\0\0"),L)f.includes(j)&&(f=f.replace(new RegExp(j,"g"),L[j](y)));return j=function(P){var ee=Array(yn(P)+1);return jo(P,ee,0,ee.length),ee}(f=f.replace(/\0\0/g,"%")),j.length>c?0:(Qc(j,s),j.length-1)}function Jc(s,c,f,y){return zi(s>>>0,c>>>0,f>>>0,y>>>0)}b||function(){for(var s=u.numThreads-1;s--;)Wo();Se.unshift(()=>{Ae++,function(c){b?c():Promise.all(ct.map(No)).then(c)}(()=>or())})}();for(var Di=Array(256),pr=0;256>pr;++pr)Di[pr]=String.fromCharCode(pr);ai=Di,pt=u.BindingError=class extends Error{constructor(s){super(s),this.name="BindingError"}},u.InternalError=class extends Error{constructor(s){super(s),this.name="InternalError"}},ut.push(0,1,void 0,1,null,1,!0,1,!1,1),u.count_emval_handles=()=>ut.length/2-5-vn.length;var ep=[Bo,Ro,Ho,qo,Ko,Yo,Xo,Zo,Qo,Jo,ei,ti,ri,ni,oi,ii,wi,vi,Ii,Ci,Ti,Ai,Ei,ki],K=function(){function s(f,y){return K=f.exports,K=function(){var v=K,C={};for(let[O,B]of Object.entries(v))C[O]=typeof B=="function"?(...N)=>{ur.push(O);try{return B(...N)}finally{Te||(ur.pop(),Je&&ft===1&&ur.length===0&&(ft=0,bt+=1,sr(Gi),typeof Fibers<"u"&&Fibers.lc()))}}:B;return C}(),K=function(){var v=K,C=B=>N=>B(N)>>>0,O=B=>()=>B()>>>0;return(v=Object.assign({},v)).za=C(v.za),v.cb=O(v.cb),v.db=C(v.db),v.emscripten_main_runtime_thread_id=O(v.emscripten_main_runtime_thread_id),v.pb=C(v.pb),v.qb=O(v.qb),v}(),Mo.push(K.fb),Ot.unshift(K.ya),xe=y,or(),K}var c=zo();if(Ae++,u.instantiateWasm)try{return u.instantiateWasm(c,s)}catch(f){F(`Module.instantiateWasm callback failed with error: ${f}`),m(f)}return Ao||=u.locateFile?Eo("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":u.locateFile?u.locateFile("ort-wasm-simd-threaded.jsep.wasm",z):z+"ort-wasm-simd-threaded.jsep.wasm":new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url).href,function(f,y){var v=Ao;return typeof WebAssembly.instantiateStreaming!="function"||Eo(v)||ko(v)||typeof fetch!="function"?Oo(v,f,y):fetch(v,{credentials:"same-origin"}).then(C=>WebAssembly.instantiateStreaming(C,f).then(y,function(O){return F(`wasm streaming compile failed: ${O}`),F("falling back to ArrayBuffer instantiation"),Oo(v,f,y)}))}(c,function(f){s(f.instance,f.module)}).catch(m),{}}(),Bi=s=>(Bi=K.za)(s),Ri=()=>(Ri=K.Aa)();u._OrtInit=(s,c)=>(u._OrtInit=K.Ba)(s,c),u._OrtGetLastError=(s,c)=>(u._OrtGetLastError=K.Ca)(s,c),u._OrtCreateSessionOptions=(s,c,f,y,v,C,O,B,N,L)=>(u._OrtCreateSessionOptions=K.Da)(s,c,f,y,v,C,O,B,N,L),u._OrtAppendExecutionProvider=(s,c)=>(u._OrtAppendExecutionProvider=K.Ea)(s,c),u._OrtAddFreeDimensionOverride=(s,c,f)=>(u._OrtAddFreeDimensionOverride=K.Fa)(s,c,f),u._OrtAddSessionConfigEntry=(s,c,f)=>(u._OrtAddSessionConfigEntry=K.Ga)(s,c,f),u._OrtReleaseSessionOptions=s=>(u._OrtReleaseSessionOptions=K.Ha)(s),u._OrtCreateSession=(s,c,f)=>(u._OrtCreateSession=K.Ia)(s,c,f),u._OrtReleaseSession=s=>(u._OrtReleaseSession=K.Ja)(s),u._OrtGetInputOutputCount=(s,c,f)=>(u._OrtGetInputOutputCount=K.Ka)(s,c,f),u._OrtGetInputName=(s,c)=>(u._OrtGetInputName=K.La)(s,c),u._OrtGetOutputName=(s,c)=>(u._OrtGetOutputName=K.Ma)(s,c),u._OrtFree=s=>(u._OrtFree=K.Na)(s),u._OrtCreateTensor=(s,c,f,y,v,C)=>(u._OrtCreateTensor=K.Oa)(s,c,f,y,v,C),u._OrtGetTensorData=(s,c,f,y,v)=>(u._OrtGetTensorData=K.Pa)(s,c,f,y,v),u._OrtReleaseTensor=s=>(u._OrtReleaseTensor=K.Qa)(s),u._OrtCreateRunOptions=(s,c,f,y)=>(u._OrtCreateRunOptions=K.Ra)(s,c,f,y),u._OrtAddRunConfigEntry=(s,c,f)=>(u._OrtAddRunConfigEntry=K.Sa)(s,c,f),u._OrtReleaseRunOptions=s=>(u._OrtReleaseRunOptions=K.Ta)(s),u._OrtCreateBinding=s=>(u._OrtCreateBinding=K.Ua)(s),u._OrtBindInput=(s,c,f)=>(u._OrtBindInput=K.Va)(s,c,f),u._OrtBindOutput=(s,c,f,y)=>(u._OrtBindOutput=K.Wa)(s,c,f,y),u._OrtClearBoundOutputs=s=>(u._OrtClearBoundOutputs=K.Xa)(s),u._OrtReleaseBinding=s=>(u._OrtReleaseBinding=K.Ya)(s),u._OrtRunWithBinding=(s,c,f,y,v)=>(u._OrtRunWithBinding=K.Za)(s,c,f,y,v),u._OrtRun=(s,c,f,y,v,C,O,B)=>(u._OrtRun=K._a)(s,c,f,y,v,C,O,B),u._OrtEndProfiling=s=>(u._OrtEndProfiling=K.$a)(s),u._JsepOutput=(s,c,f)=>(u._JsepOutput=K.ab)(s,c,f),u._JsepGetNodeName=s=>(u._JsepGetNodeName=K.bb)(s);var mr,Rt=()=>(Rt=K.cb)(),fr=u._malloc=s=>(fr=u._malloc=K.db)(s),et=u._free=s=>(et=u._free=K.eb)(s),kn=(s,c,f,y,v,C)=>(kn=K.hb)(s,c,f,y,v,C),Mi=()=>(Mi=K.ib)(),Ui=(s,c,f,y,v)=>(Ui=K.jb)(s,c,f,y,v),Pn=s=>(Pn=K.kb)(s),hr=s=>(hr=K.lb)(s),Vi=()=>(Vi=K.mb)(),Ni=(s,c)=>(Ni=K.nb)(s,c),gr=s=>(gr=K.ob)(s),On=s=>(On=K.pb)(s),zn=()=>(zn=K.qb)(),Wi=u.dynCall_ii=(s,c)=>(Wi=u.dynCall_ii=K.sb)(s,c),Hi=s=>(Hi=K.tb)(s),Gi=()=>(Gi=K.ub)(),Li=s=>(Li=K.vb)(s),Fi=()=>(Fi=K.wb)();function qi(){if(!(0zn(),u.stackRestore=s=>gr(s),u.stackAlloc=s=>On(s),u.UTF8ToString=ke,u.stringToUTF8=zt,u.lengthBytesUTF8=yn,lt=function s(){mr||qi(),mr||(lt=s)},qi(),h}),lp=Ta;globalThis.self?.name==="em-pthread"&&Ta()});var Mt,cp,pp,mp,ka,Pa,fp,Oa,qt=U(()=>{"use strict";Cr();Mt=!1?void 0:import.meta.url??(typeof document<"u"?document.currentScript?.src:typeof self<"u"?self.location?.href:void 0),cp=!1||typeof location>"u"?void 0:location.origin,pp=(e,t)=>{try{let r=t??Mt;return(r?new URL(e,r):new URL(e)).origin===cp}catch{return!1}},mp=async e=>{let r=await(await fetch(e,{credentials:"same-origin"})).blob();return URL.createObjectURL(r)},ka=(Ca(),yr(Ia)).default,Pa=async()=>{if(!Mt)throw new Error("Failed to load proxy worker: cannot determine the script source URL.");if(pp(Mt))return[void 0,ka()];let e=await mp(Mt);return[e,ka(e)]},fp=(Ea(),yr(Aa)).default,Oa=async(e,t,r)=>[void 0,fp]});var Hn,Gn,Br,za,hp,gp,Tr,Ie,St=U(()=>{"use strict";qt();Gn=!1,Br=!1,za=!1,hp=()=>{if(typeof SharedArrayBuffer>"u")return!1;try{return typeof MessageChannel<"u"&&new MessageChannel().port1.postMessage(new SharedArrayBuffer(1)),WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,5,4,1,3,1,1,10,11,1,9,0,65,0,254,16,2,0,26,11]))}catch{return!1}},gp=()=>{try{return WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,10,30,1,28,0,65,0,253,15,253,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,186,1,26,11]))}catch{return!1}},Tr=async e=>{if(Gn)return Promise.resolve();if(Br)throw new Error("multiple calls to 'initializeWebAssembly()' detected.");if(za)throw new Error("previous call to 'initializeWebAssembly()' failed.");Br=!0;let t=e.initTimeout,r=e.numThreads;if(!gp())throw new Error("WebAssembly SIMD is not supported in the current environment.");let n=hp();r>1&&!n&&(typeof self<"u"&&!self.crossOriginIsolated&&console.warn("env.wasm.numThreads is set to "+r+", but this will not work unless you enable crossOriginIsolated mode. See https://web.dev/cross-origin-isolation-guide/ for more info."),console.warn("WebAssembly multi-threading is not supported in the current environment. Falling back to single-threading."),e.numThreads=r=1);let o=e.wasmPaths,i=typeof o=="string"?o:void 0,a=o?.mjs,l=a?.href??a,d=o?.wasm,p=d?.href??d,[m,u]=await Oa(l,i,r>1),h=!1,w=[];if(t>0&&w.push(new Promise(g=>{setTimeout(()=>{h=!0,g()},t)})),w.push(new Promise((g,b)=>{let x={numThreads:r};(p||i)&&(x.locateFile=(_,$)=>p??(i??$)+_),u(x).then(_=>{Br=!1,Gn=!0,Hn=_,g(),m&&URL.revokeObjectURL(m)},_=>{Br=!1,za=!0,b(_)})})),await Promise.race(w),h)throw new Error(`WebAssembly backend initializing failed due to timeout: ${t}ms`)},Ie=()=>{if(Gn&&Hn)return Hn;throw new Error("WebAssembly is not initialized yet.")}});var Ee,jt,$e,Rr=U(()=>{"use strict";St();Ee=(e,t)=>{let r=Ie(),n=r.lengthBytesUTF8(e)+1,o=r._malloc(n);return r.stringToUTF8(e,o,n),t.push(o),o},jt=(e,t,r,n)=>{if(typeof e=="object"&&e!==null){if(r.has(e))throw new Error("Circular reference in options");r.add(e)}Object.entries(e).forEach(([o,i])=>{let a=t?t+o:o;if(typeof i=="object")jt(i,a+".",r,n);else if(typeof i=="string"||typeof i=="number")n(a,i.toString());else if(typeof i=="boolean")n(a,i?"1":"0");else throw new Error(`Can't handle extra config type: ${typeof i}`)})},$e=e=>{let t=Ie(),r=t.stackSave();try{let n=t.stackAlloc(8);t._OrtGetLastError(n,n+4);let o=t.HEAP32[n/4],i=t.HEAPU32[n/4+1],a=i?t.UTF8ToString(i):"";throw new Error(`${e} ERROR_CODE: ${o}, ERROR_MESSAGE: ${a}`)}finally{t.stackRestore(r)}}});var Da,Ba=U(()=>{"use strict";St();Rr();Da=e=>{let t=Ie(),r=0,n=[],o=e||{};try{if(e?.logSeverityLevel===void 0)o.logSeverityLevel=2;else if(typeof e.logSeverityLevel!="number"||!Number.isInteger(e.logSeverityLevel)||e.logSeverityLevel<0||e.logSeverityLevel>4)throw new Error(`log serverity level is not valid: ${e.logSeverityLevel}`);if(e?.logVerbosityLevel===void 0)o.logVerbosityLevel=0;else if(typeof e.logVerbosityLevel!="number"||!Number.isInteger(e.logVerbosityLevel))throw new Error(`log verbosity level is not valid: ${e.logVerbosityLevel}`);e?.terminate===void 0&&(o.terminate=!1);let i=0;return e?.tag!==void 0&&(i=Ee(e.tag,n)),r=t._OrtCreateRunOptions(o.logSeverityLevel,o.logVerbosityLevel,!!o.terminate,i),r===0&&$e("Can't create run options."),e?.extra!==void 0&&jt(e.extra,"",new WeakSet,(a,l)=>{let d=Ee(a,n),p=Ee(l,n);t._OrtAddRunConfigEntry(r,d,p)!==0&&$e(`Can't set a run config entry: ${a} - ${l}.`)}),[r,n]}catch(i){throw r!==0&&t._OrtReleaseRunOptions(r),n.forEach(a=>t._free(a)),i}}});var yp,bp,wp,vp,Ra,Ma=U(()=>{"use strict";St();Rr();yp=e=>{switch(e){case"disabled":return 0;case"basic":return 1;case"extended":return 2;case"all":return 99;default:throw new Error(`unsupported graph optimization level: ${e}`)}},bp=e=>{switch(e){case"sequential":return 0;case"parallel":return 1;default:throw new Error(`unsupported execution mode: ${e}`)}},wp=e=>{e.extra||(e.extra={}),e.extra.session||(e.extra.session={});let t=e.extra.session;t.use_ort_model_bytes_directly||(t.use_ort_model_bytes_directly="1"),e.executionProviders&&e.executionProviders.some(r=>(typeof r=="string"?r:r.name)==="webgpu")&&(e.enableMemPattern=!1)},vp=(e,t,r)=>{for(let n of t){let o=typeof n=="string"?n:n.name;switch(o){case"webnn":if(o="WEBNN",typeof n!="string"){let l=n?.deviceType;if(l){let d=Ee("deviceType",r),p=Ee(l,r);Ie()._OrtAddSessionConfigEntry(e,d,p)!==0&&$e(`Can't set a session config entry: 'deviceType' - ${l}.`)}}break;case"webgpu":if(o="JS",typeof n!="string"){let a=n;if(a?.preferredLayout){if(a.preferredLayout!=="NCHW"&&a.preferredLayout!=="NHWC")throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${a.preferredLayout}`);let l=Ee("preferredLayout",r),d=Ee(a.preferredLayout,r);Ie()._OrtAddSessionConfigEntry(e,l,d)!==0&&$e(`Can't set a session config entry: 'preferredLayout' - ${a.preferredLayout}.`)}}break;case"wasm":case"cpu":continue;default:throw new Error(`not supported execution provider: ${o}`)}let i=Ee(o,r);Ie()._OrtAppendExecutionProvider(e,i)!==0&&$e(`Can't append execution provider: ${o}.`)}},Ra=e=>{let t=Ie(),r=0,n=[],o=e||{};wp(o);try{let i=yp(o.graphOptimizationLevel??"all"),a=bp(o.executionMode??"sequential"),l=typeof o.logId=="string"?Ee(o.logId,n):0,d=o.logSeverityLevel??2;if(!Number.isInteger(d)||d<0||d>4)throw new Error(`log serverity level is not valid: ${d}`);let p=o.logVerbosityLevel??0;if(!Number.isInteger(p)||p<0||p>4)throw new Error(`log verbosity level is not valid: ${p}`);let m=typeof o.optimizedModelFilePath=="string"?Ee(o.optimizedModelFilePath,n):0;if(r=t._OrtCreateSessionOptions(i,!!o.enableCpuMemArena,!!o.enableMemPattern,a,!!o.enableProfiling,0,l,d,p,m),r===0&&$e("Can't create session options."),o.executionProviders&&vp(r,o.executionProviders,n),o.enableGraphCapture!==void 0){if(typeof o.enableGraphCapture!="boolean")throw new Error(`enableGraphCapture must be a boolean value: ${o.enableGraphCapture}`);let u=Ee("enableGraphCapture",n),h=Ee(o.enableGraphCapture.toString(),n);t._OrtAddSessionConfigEntry(r,u,h)!==0&&$e(`Can't set a session config entry: 'enableGraphCapture' - ${o.enableGraphCapture}.`)}if(o.freeDimensionOverrides)for(let[u,h]of Object.entries(o.freeDimensionOverrides)){if(typeof u!="string")throw new Error(`free dimension override name must be a string: ${u}`);if(typeof h!="number"||!Number.isInteger(h)||h<0)throw new Error(`free dimension override value must be a non-negative integer: ${h}`);let w=Ee(u,n);t._OrtAddFreeDimensionOverride(r,w,h)!==0&&$e(`Can't set a free dimension override: ${u} - ${h}.`)}return o.extra!==void 0&&jt(o.extra,"",new WeakSet,(u,h)=>{let w=Ee(u,n),g=Ee(h,n);t._OrtAddSessionConfigEntry(r,w,g)!==0&&$e(`Can't set a session config entry: ${u} - ${h}.`)}),[r,n]}catch(i){throw r!==0&&t._OrtReleaseSessionOptions(r),n.forEach(a=>t._free(a)),i}}});var Ln,ht,It,Mr,Yt,Ur,Fn,Q=U(()=>{"use strict";Ln=e=>{switch(e){case"int8":return 3;case"uint8":return 2;case"bool":return 9;case"int16":return 5;case"uint16":return 4;case"int32":return 6;case"uint32":return 12;case"float16":return 10;case"float32":return 1;case"float64":return 11;case"string":return 8;case"int64":return 7;case"uint64":return 13;default:throw new Error(`unsupported data type: ${e}`)}},ht=e=>{switch(e){case 3:return"int8";case 2:return"uint8";case 9:return"bool";case 5:return"int16";case 4:return"uint16";case 6:return"int32";case 12:return"uint32";case 10:return"float16";case 1:return"float32";case 11:return"float64";case 8:return"string";case 7:return"int64";case 13:return"uint64";default:throw new Error(`unsupported data type: ${e}`)}},It=e=>[void 0,4,1,1,2,2,4,8,void 0,1,2,8,4,8,void 0,void 0,void 0][e],Mr=e=>{switch(e){case"float16":return typeof Float16Array<"u"&&Float16Array.from?Float16Array:Uint16Array;case"float32":return Float32Array;case"uint8":return Uint8Array;case"int8":return Int8Array;case"uint16":return Uint16Array;case"int16":return Int16Array;case"int32":return Int32Array;case"bool":return Uint8Array;case"float64":return Float64Array;case"uint32":return Uint32Array;case"int64":return BigInt64Array;case"uint64":return BigUint64Array;default:throw new Error(`unsupported type: ${e}`)}},Yt=e=>{switch(e){case"verbose":return 0;case"info":return 1;case"warning":return 2;case"error":return 3;case"fatal":return 4;default:throw new Error(`unsupported logging level: ${e}`)}},Ur=e=>e==="float32"||e==="float16"||e==="int32"||e==="int64"||e==="uint32"||e==="uint8"||e==="bool",Fn=e=>{switch(e){case"none":return 0;case"cpu":return 1;case"cpu-pinned":return 2;case"texture":return 3;case"gpu-buffer":return 4;default:throw new Error(`unsupported data location: ${e}`)}}});var Xt,qn=U(()=>{"use strict";Cr();Xt=async e=>{if(typeof e=="string")if(!1)try{let{readFile:t}=Bn("node:fs/promises");return new Uint8Array(await t(e))}catch(t){if(t.code==="ERR_FS_FILE_TOO_LARGE"){let{createReadStream:r}=Bn("node:fs"),n=r(e),o=[];for await(let i of n)o.push(i);return new Uint8Array(Buffer.concat(o))}throw t}else{let t=await fetch(e);if(!t.ok)throw new Error(`failed to load external data file: ${e}`);let r=t.headers.get("Content-Length"),n=r?parseInt(r,10):0;if(n<1073741824)return new Uint8Array(await t.arrayBuffer());{if(!t.body)throw new Error(`failed to load external data file: ${e}, no response body.`);let o=t.body.getReader(),i;try{i=new ArrayBuffer(n)}catch(l){if(l instanceof RangeError){let d=Math.ceil(n/65536);i=new WebAssembly.Memory({initial:d,maximum:d}).buffer}else throw l}let a=0;for(;;){let{done:l,value:d}=await o.read();if(l)break;let p=d.byteLength;new Uint8Array(i,a,p).set(d),a+=p}return new Uint8Array(i,0,n)}}else return e instanceof Blob?new Uint8Array(await e.arrayBuffer()):e instanceof Uint8Array?e:new Uint8Array(e)}});var $p,_p,Ua,Va,Na,xp,be,dt=U(()=>{"use strict";Q();$p=["V","I","W","E","F"],_p=(e,t)=>{console.log(`[${$p[e]},${new Date().toISOString()}]${t}`)},Na=(e,t)=>{Ua=e,Va=t},xp=(e,t)=>{let r=Yt(e),n=Yt(Ua);r>=n&&_p(r,typeof t=="function"?t():t)},be=(...e)=>{Va&&xp(...e)}});var Wa,Ha=U(()=>{"use strict";Q();Wa=(e,t)=>new(Mr(t))(e)});var Vr=U(()=>{"use strict"});var Ga,Kn,jn,Sp,Ip,La,Xn,Yn,qa,Ka=U(()=>{"use strict";dt();Vr();Ga=new Map([[64,250],[128,200],[256,200],[512,200],[2048,230],[4096,200],[8192,50],[16384,50],[32768,50],[65536,50],[131072,50],[262144,50],[524288,50],[1048576,50],[2097152,30],[4194304,20],[8388608,10],[12582912,10],[16777216,10],[26214400,15],[33554432,22],[44236800,2],[58982400,6],[67108864,6],[134217728,6],[167772160,6]]),Kn=[],jn=e=>Math.ceil(e/16)*16,Sp=e=>{for(let t=0;tIp++,Xn=async(e,t,r,n)=>{let o=jn(r),i=e.device.createBuffer({size:o,usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});try{let a=e.getCommandEncoder();e.endComputePass(),a.copyBufferToBuffer(t,0,i,0,o),e.flush(),await i.mapAsync(GPUMapMode.READ);let l=i.getMappedRange();if(n){let d=n();return d.set(new Uint8Array(l,0,r)),d}else return new Uint8Array(l.slice(0,r))}finally{i.destroy()}},Yn=class{constructor(t){this.backend=t;this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.buffersForUploadingPending=[],this.buffersPending=[],this.externalBuffers=new Map,this.capturedPendingBuffers=new Map;for(let[r]of Ga)Kn.push(r),this.freeBuffers.set(r,[]),this.freeUniformBuffers.set(r,[])}upload(t,r){let n=r.buffer,o=r.byteOffset,i=r.byteLength,a=jn(i),l=this.storageCache.get(t);if(!l)throw new Error("gpu data for uploading does not exist");if(l.originalSize!==i)throw new Error(`inconsistent data size. gpu data size=${l.originalSize}, data size=${i}`);let d=this.backend.device.createBuffer({mappedAtCreation:!0,size:a,usage:GPUBufferUsage.MAP_WRITE|GPUBufferUsage.COPY_SRC}),p=d.getMappedRange();new Uint8Array(p).set(new Uint8Array(n,o,i)),d.unmap();let m=this.backend.getCommandEncoder();this.backend.endComputePass(),m.copyBufferToBuffer(d,0,l.gpuData.buffer,0,a),be("verbose",()=>`[WebGPU] GpuDataManager.upload(id=${t})`),this.buffersForUploadingPending.push(d)}memcpy(t,r){let n=this.storageCache.get(t);if(!n)throw new Error("source gpu data for memcpy does not exist");let o=this.storageCache.get(r);if(!o)throw new Error("destination gpu data for memcpy does not exist");if(n.originalSize!==o.originalSize)throw new Error("inconsistent source and destination gpu data size");let i=jn(n.originalSize),a=this.backend.getCommandEncoder();this.backend.endComputePass(),a.copyBufferToBuffer(n.gpuData.buffer,0,o.gpuData.buffer,0,i)}registerExternalBuffer(t,r,n){let o;if(n){if(o=this.externalBuffers.get(n),o===void 0)throw new Error("previous buffer is not registered");if(t===n)return be("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${r}) => id=${o}, buffer is the same, skip.`),o;if(this.backend.capturedCommandList.has(this.backend.currentSessionId))throw new Error(`Registering a different external buffer under graph capture mode is not supported yet. - Please use the previous external buffer!`);this.externalBuffers.delete(n)}else o=La();return this.storageCache.set(o,{gpuData:{id:o,type:0,buffer:t},originalSize:r}),this.externalBuffers.set(t,o),be("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${r}) => id=${o}, registered.`),o}unregisterExternalBuffer(t){let r=this.externalBuffers.get(t);r!==void 0&&(this.storageCache.delete(r),this.externalBuffers.delete(t),be("verbose",()=>`[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${r}`))}create(t,r=GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_SRC|GPUBufferUsage.COPY_DST){let n=Sp(t),o,i=(r&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE,a=(r&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM;if(i||a){let p=(i?this.freeBuffers:this.freeUniformBuffers).get(n);p?p.length>0?o=p.pop():o=this.backend.device.createBuffer({size:n,usage:r}):o=this.backend.device.createBuffer({size:n,usage:r})}else o=this.backend.device.createBuffer({size:n,usage:r});let l={id:La(),type:0,buffer:o};return this.storageCache.set(l.id,{gpuData:l,originalSize:t}),be("verbose",()=>`[WebGPU] GpuDataManager.create(size=${t}) => id=${l.id}`),l}get(t){return this.storageCache.get(t)?.gpuData}release(t){let r=this.storageCache.get(t);if(!r)throw new Error("releasing data does not exist");return be("verbose",()=>`[WebGPU] GpuDataManager.release(id=${t}), gpuDataId=${r.gpuData.id}`),this.storageCache.delete(t),this.buffersPending.push(r.gpuData.buffer),r.originalSize}async download(t,r){let n=this.storageCache.get(t);if(!n)throw new Error("data does not exist");await Xn(this.backend,n.gpuData.buffer,n.originalSize,r)}refreshPendingBuffers(){for(let t of this.buffersForUploadingPending)t.destroy();if(this.buffersForUploadingPending=[],this.buffersPending.length!==0)if(this.backend.sessionStatus==="default"){for(let t of this.buffersPending){let r=Ga.get(t.size);if((t.usage&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE){let n=this.freeBuffers.get(t.size)||[];r===void 0||n.length>=r?t.destroy():n.push(t)}else if((t.usage&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM){let n=this.freeUniformBuffers.get(t.size)||[];r===void 0||n.length>=r?t.destroy():n.push(t)}else t.destroy()}this.buffersPending=[]}else{let t=this.capturedPendingBuffers.get(this.backend.currentSessionId);t||(t=[],this.capturedPendingBuffers.set(this.backend.currentSessionId,t));for(let r of this.buffersPending)t.push(r);this.buffersPending=[]}}dispose(){this.freeBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.freeUniformBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.storageCache.forEach(t=>{t.gpuData.buffer.destroy()}),this.capturedPendingBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.capturedPendingBuffers=new Map}onReleaseSession(t){let r=this.capturedPendingBuffers.get(t);r&&(r.forEach(n=>{n.destroy()}),this.capturedPendingBuffers.delete(t))}},qa=(...e)=>new Yn(...e)});var Zn,te,Ce=U(()=>{"use strict";Zn=class{constructor(t){Object.assign(this,t)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(t=>`${this[t]}`).join(";")),this.key}},te=e=>new Zn(e)});var Qn,tt,k,Ct,Nr,Wr,Hr,ae=U(()=>{"use strict";Qn=class{static calcMatMulShape(t,r){return t[1]!==r[0]?void 0:[t[0],r[1]]}},tt=class{static calcShape(t,r,n=!1){let o=t.length,i=r.length;if(o===0)return r;if(i===0)return t;let a=Math.max(t.length,r.length),l=new Array(a);if(n){if(o<2||i<2)return;let d=Qn.calcMatMulShape([t[o-2],t[o-1]],[r[i-2],r[i-1]]);if(d===void 0)return;[l[a-2],l[a-1]]=d}for(let d=n?3:1;d<=a;d++){let p=o-d<0?1:t[o-d],m=i-d<0?1:r[i-d];if(p!==m&&p>1&&m>1)return;let u=Math.max(p,m);if(p&&m)l[a-d]=Math.max(p,m);else{if(u>1)return;l[a-d]=0}}return l}static isValidBroadcast(t,r){let n=t.length,o=r.length;if(n>o)return!1;for(let i=1;i<=n;i++)if(t[n-i]!==1&&t[n-i]!==r[o-i])return!1;return!0}},k=class e{static size(t){return e.getSizeFromDimensionRange(t,0,t.length)}static convertShape(t,r=4){let n=t.length;if(n===0)return[];let o=new Array(n),i=n-1;for(;i>=0;){if(t[i]%r===0){o[i]=t[i]/r;break}if(r%t[i]!==0)throw new Error("cannot convert shape");o[i]=1,r/=t[i],i--}for(i--;i>=0;i--)o[i]=t[i];return o}static sizeFromDimension(t,r){if(r<0||r>t.length)throw new Error(`invalid dimension of ${r} for sizeFromDimension as Tensor has ${t.length} dimensions.`);return e.getSizeFromDimensionRange(t,r,t.length)}static sizeToDimension(t,r){if(r<0||r>t.length)throw new Error(`invalid dimension of ${r} for sizeToDimension as Tensor has ${t.length} dimensions.`);return e.getSizeFromDimensionRange(t,0,r)}static getSizeFromDimensionRange(t,r,n){let o=1;for(let i=r;i=0;--o)n[o]=n[o+1]*t[o+1];return n}static normalizeAxis(t,r){if(t<-r&&t>=r)throw new Error("unsupported axis for this operation.");return t<0?t+r:t}static normalizeAxes(t,r){return t.map(n=>this.normalizeAxis(n,r??t.length))}static sortBasedOnPerm(t,r){return r?r.map(n=>t[n]):t.slice().reverse()}static padShape(t,r){let n=t.length;return t.map((o,i)=>o+r[i]+r[i+n])}static areEqual(t,r){return t.length!==r.length?!1:t.every((n,o)=>n===r[o])}},Ct=class e{static adjustPoolAttributes(t,r,n,o,i,a){if(!t&&n.length!==r.length-2)throw new Error("length of specified kernel shapes should be 2 less than length of input dimensions");if(t)for(let l=0;l=n.length?n.push(r[l+2]):n[l]=r[l+2];for(let l=0;l=n[l]||a[l+n.length]>=n[l])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(t,r,n,o,i,a,l){if(l){if(i.length!==2*(t.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(r.length!==t.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==t.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let d=0;d{"use strict";Q();ae();Tt=64,eo=(e,t)=>{if(t===3)throw new Error("vec3 has same alignment as vec4, use vec4 instead");switch(e){case 10:return t>1?`vec${t}`:"f16";case 1:return t>1?`vec${t}`:"f32";case 6:return t>1?`vec${t}`:"i32";case 12:return t>1?`vec${t}`:"u32";case 7:if(t>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","i32"];case 13:if(t>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","u32"];case 9:if(t!==4)throw new Error("bool must be vec4");return["u32","vec4"];default:throw new Error(`Unknown data type: ${e}`)}},he=(e,t=1)=>{let r=eo(e,t);return typeof r=="string"?r:r[0]},Oe=(e,t=1)=>{let r=eo(e,t);return typeof r=="string"?r:r[1]},V=(...e)=>{let t=[];return e.forEach(r=>{r.length!==0&&t.push({type:12,data:r},{type:12,data:k.computeStrides(r)})}),t},we=e=>e%4===0?4:e%2===0?2:1,gt=(e="f32",t,r="0")=>!t||t===1?`${e}(${r})`:`vec${t}<${e}>(${r})`,At=(e,t,r)=>e==="f32"?r:t===1?`f32(${r})`:`vec${t}(${r})`,qe=(e,t)=>t===4?`(${e}.x + ${e}.y + ${e}.z + ${e}.w)`:t===2?`(${e}.x + ${e}.y)`:t===3?`(${e}.x + ${e}.y + ${e}.z)`:e,G=(e,t,r,n)=>e.startsWith("uniforms.")&&r>4?typeof t=="string"?n==="f16"?`${e}[(${t}) / 8][(${t}) % 8 / 4][(${t}) % 8 % 4]`:`${e}[(${t}) / 4][(${t}) % 4]`:n==="f16"?`${e}[${Math.floor(t/8)}][${Math.floor(t%8/4)}][${t%8%4}]`:`${e}[${Math.floor(t/4)}][${t%4}]`:r>1?`${e}[${t}]`:e,to=(e,t,r,n,o)=>{let i=typeof r=="number",a=i?r:r.length,l=[...new Array(a).keys()],d=a<2?"u32":a<=4?`vec${a}`:`array`,p=eo(t,o),m=typeof p=="string"?p:p[1],u=typeof p=="string"?p:p[0],h={indices:d,value:m,storage:u,tensor:t},w=R=>typeof R=="string"?R:`${R}u`,g={offsetToIndices:!1,indicesToOffset:!1,broadcastedIndicesToOffset:!1,set:!1,setByIndices:!1,get:!1,getByIndices:!1},b=i?"uniforms.":"",x=`${b}${e}_shape`,_=`${b}${e}_strides`,$="";for(let R=0;RK.name).join(", ")}) => ${y.name}>`,Sc(gi(f,s))}function Tc(s){return s=lr(s>>>0),Ge(u[s])}function Ac(s,c){return c>>>=0,s=He(s>>>0),c=He(c),Ge(s[c])}function Ec(s){9<(s>>>=0)&&(ut[s+1]+=1)}function kc(){return Ge([])}function Pc(s){s=He(s>>>0);for(var c=Array(s.length),f=0;f>>0))}function Dc(){return Ge({})}function zc(s){for(var c=He(s>>>=0);c.length;){var f=c.pop();c.pop()(f)}_n(s)}function Bc(s,c,f){c>>>=0,f>>>=0,s=He(s>>>0),c=He(c),f=He(f),s[c]=f}function Rc(s,c){return c>>>=0,s=(s=Cn(s>>>0,"_emval_take_value")).readValueFromPointer(c),Ge(s)}function Mc(s,c){s=-9007199254740992>s||9007199254740992>>=0,s=new Date(1e3*s),i()[c>>>2>>>0]=s.getUTCSeconds(),i()[c+4>>>2>>>0]=s.getUTCMinutes(),i()[c+8>>>2>>>0]=s.getUTCHours(),i()[c+12>>>2>>>0]=s.getUTCDate(),i()[c+16>>>2>>>0]=s.getUTCMonth(),i()[c+20>>>2>>>0]=s.getUTCFullYear()-1900,i()[c+24>>>2>>>0]=s.getUTCDay(),s=(s.getTime()-Date.UTC(s.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[c+28>>>2>>>0]=s}var Bt=s=>s%4==0&&(s%100!=0||s%400==0),yi=[0,31,60,91,121,152,182,213,244,274,305,335],bi=[0,31,59,90,120,151,181,212,243,273,304,334];function Uc(s,c){s=-9007199254740992>s||9007199254740992>>=0,s=new Date(1e3*s),i()[c>>>2>>>0]=s.getSeconds(),i()[c+4>>>2>>>0]=s.getMinutes(),i()[c+8>>>2>>>0]=s.getHours(),i()[c+12>>>2>>>0]=s.getDate(),i()[c+16>>>2>>>0]=s.getMonth(),i()[c+20>>>2>>>0]=s.getFullYear()-1900,i()[c+24>>>2>>>0]=s.getDay();var f=(Bt(s.getFullYear())?yi:bi)[s.getMonth()]+s.getDate()-1|0;i()[c+28>>>2>>>0]=f,i()[c+36>>>2>>>0]=-60*s.getTimezoneOffset(),f=new Date(s.getFullYear(),6,1).getTimezoneOffset();var y=new Date(s.getFullYear(),0,1).getTimezoneOffset();s=0|(f!=y&&s.getTimezoneOffset()==Math.min(y,f)),i()[c+32>>>2>>>0]=s}function Vc(s){s>>>=0;var c=new Date(i()[s+20>>>2>>>0]+1900,i()[s+16>>>2>>>0],i()[s+12>>>2>>>0],i()[s+8>>>2>>>0],i()[s+4>>>2>>>0],i()[s>>>2>>>0],0),f=i()[s+32>>>2>>>0],y=c.getTimezoneOffset(),v=new Date(c.getFullYear(),6,1).getTimezoneOffset(),C=new Date(c.getFullYear(),0,1).getTimezoneOffset(),O=Math.min(C,v);return 0>f?i()[s+32>>>2>>>0]=+(v!=C&&O==y):0>>2>>>0]=c.getDay(),f=(Bt(c.getFullYear())?yi:bi)[c.getMonth()]+c.getDate()-1|0,i()[s+28>>>2>>>0]=f,i()[s>>>2>>>0]=c.getSeconds(),i()[s+4>>>2>>>0]=c.getMinutes(),i()[s+8>>>2>>>0]=c.getHours(),i()[s+12>>>2>>>0]=c.getDate(),i()[s+16>>>2>>>0]=c.getMonth(),i()[s+20>>>2>>>0]=c.getYear(),s=c.getTime(),BigInt(isNaN(s)?-1:s/1e3)}function wi(s,c,f,y,v,C,O){return b?_e(16,1,s,c,f,y,v,C,O):-52}function vi(s,c,f,y,v,C){if(b)return _e(17,1,s,c,f,y,v,C)}function Nc(s,c,f,y){s>>>=0,c>>>=0,f>>>=0,y>>>=0;var v=new Date().getFullYear(),C=new Date(v,0,1),O=new Date(v,6,1);v=C.getTimezoneOffset();var B=O.getTimezoneOffset(),N=Math.max(v,B);a()[s>>>2>>>0]=60*N,i()[c>>>2>>>0]=+(v!=B),C=(s=L=>L.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:"short"}).split(" ")[1])(C),O=s(O),B{An.length=0;for(var f;f=r()[s++>>>0];){var y=f!=105;c+=(y&=f!=112)&&c%8?4:0,An.push(f==112?a()[c>>>2>>>0]:f==106?R[c>>>3]:f==105?i()[c>>>2>>>0]:d()[c>>>3>>>0]),c+=y?8:4}return An};function Wc(s,c,f){return s>>>=0,c=$i(c>>>0,f>>>0),mn[s](...c)}function Hc(s,c,f){return s>>>=0,c=$i(c>>>0,f>>>0),mn[s](...c)}var Gc=()=>{},Lc=()=>Date.now();function Fc(s,c){return F(ke(s>>>0,c>>>0))}var _i,qc=()=>{throw bt+=1,"unwind"};function jc(){return 4294901760}_i=()=>performance.timeOrigin+performance.now();var Kc=()=>navigator.hardwareConcurrency;function Yc(){return Dt("Cannot use emscripten_pc_get_function without -sUSE_OFFSET_CONVERTER"),0}function Xc(s){s>>>=0;var c=r().length;if(s<=c||4294901760=f;f*=2){var y=c*(1+.2/f);y=Math.min(y,s+100663296);var v=Math;y=Math.max(s,y);e:{v=(v.min.call(v,4294901760,y+(65536-y%65536)%65536)-X.buffer.byteLength+65535)/65536;try{X.grow(v),ve();var C=1;break e}catch{}C=void 0}if(C)return!0}return!1}var cr=()=>(Dt("Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER"),0),Rt={},xi=s=>{s.forEach(c=>{var f=cr();f&&(Rt[f]=c)})};function Zc(){var s=Error().stack.toString().split(` +`);return s[0]=="Error"&&s.shift(),xi(s),Rt.Ob=cr(),Rt.ac=s,Rt.Ob}function Qc(s,c,f){if(s>>>=0,c>>>=0,Rt.Ob==s)var y=Rt.ac;else(y=Error().stack.toString().split(` +`))[0]=="Error"&&y.shift(),xi(y);for(var v=3;y[v]&&cr()!=s;)++v;for(s=0;s>>2>>>0]=cr();return s}var En,kn={},Si=()=>{if(!En){var s,c={USER:"web_user",LOGNAME:"web_user",PATH:"/",PWD:"/",HOME:"/home/web_user",LANG:(typeof navigator=="object"&&navigator.languages&&navigator.languages[0]||"C").replace("-","_")+".UTF-8",_:T||"./this.program"};for(s in kn)kn[s]===void 0?delete c[s]:c[s]=kn[s];var f=[];for(s in c)f.push(`${s}=${c[s]}`);En=f}return En};function Ii(s,c){if(b)return _e(18,1,s,c);s>>>=0,c>>>=0;var f=0;return Si().forEach((y,v)=>{var C=c+f;for(v=a()[s+4*v>>>2>>>0]=C,C=0;C>>0]=y.charCodeAt(C);t()[v>>>0]=0,f+=y.length+1}),0}function Ci(s,c){if(b)return _e(19,1,s,c);s>>>=0,c>>>=0;var f=Si();a()[s>>>2>>>0]=f.length;var y=0;return f.forEach(v=>y+=v.length+1),a()[c>>>2>>>0]=y,0}function Ti(s){return b?_e(20,1,s):52}function Ai(s,c,f,y){return b?_e(21,1,s,c,f,y):52}function Ei(s,c,f,y){return b?_e(22,1,s,c,f,y):70}var Jc=[null,[],[]];function ki(s,c,f,y){if(b)return _e(23,1,s,c,f,y);c>>>=0,f>>>=0,y>>>=0;for(var v=0,C=0;C>>2>>>0],B=a()[c+4>>>2>>>0];c+=8;for(var N=0;N>>0],K=Jc[s];L===0||L===10?((s===1?W:F)(Fo(K,0)),K.length=0):K.push(L)}v+=B}return a()[y>>>2>>>0]=v,0}var Pi=[31,29,31,30,31,30,31,31,30,31,30,31],Oi=[31,28,31,30,31,30,31,31,30,31,30,31],ep=(s,c)=>{t().set(s,c>>>0)};function Di(s,c,f,y){function v(P,ee,ge){for(P=typeof P=="number"?P.toString():P||"";P.lengthji?-1:0vt-P.getDate())){P.setDate(P.getDate()+ee);break}ee-=vt-P.getDate()+1,P.setDate(1),11>ge?P.setMonth(ge+1):(P.setMonth(0),P.setFullYear(P.getFullYear()+1))}return ge=new Date(P.getFullYear()+1,0,4),ee=B(new Date(P.getFullYear(),0,4)),ge=B(ge),0>=O(ee,P)?0>=O(ge,P)?P.getFullYear()+1:P.getFullYear():P.getFullYear()-1}s>>>=0,c>>>=0,f>>>=0,y>>>=0;var L=a()[y+40>>>2>>>0];for(var K in y={fc:i()[y>>>2>>>0],ec:i()[y+4>>>2>>>0],Gb:i()[y+8>>>2>>>0],Kb:i()[y+12>>>2>>>0],Hb:i()[y+16>>>2>>>0],Cb:i()[y+20>>>2>>>0],ub:i()[y+24>>>2>>>0],Bb:i()[y+28>>>2>>>0],nc:i()[y+32>>>2>>>0],dc:i()[y+36>>>2>>>0],hc:L?ke(L):""},f=ke(f),L={"%c":"%a %b %d %H:%M:%S %Y","%D":"%m/%d/%y","%F":"%Y-%m-%d","%h":"%b","%r":"%I:%M:%S %p","%R":"%H:%M","%T":"%H:%M:%S","%x":"%m/%d/%y","%X":"%H:%M:%S","%Ec":"%c","%EC":"%C","%Ex":"%m/%d/%y","%EX":"%H:%M:%S","%Ey":"%y","%EY":"%Y","%Od":"%d","%Oe":"%e","%OH":"%H","%OI":"%I","%Om":"%m","%OM":"%M","%OS":"%S","%Ou":"%u","%OU":"%U","%OV":"%V","%Ow":"%w","%OW":"%W","%Oy":"%y"})f=f.replace(new RegExp(K,"g"),L[K]);var pe="Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),fe="January February March April May June July August September October November December".split(" ");for(K in L={"%a":P=>pe[P.ub].substring(0,3),"%A":P=>pe[P.ub],"%b":P=>fe[P.Hb].substring(0,3),"%B":P=>fe[P.Hb],"%C":P=>C((P.Cb+1900)/100|0,2),"%d":P=>C(P.Kb,2),"%e":P=>v(P.Kb,2," "),"%g":P=>N(P).toString().substring(2),"%G":N,"%H":P=>C(P.Gb,2),"%I":P=>((P=P.Gb)==0?P=12:12{for(var ee=0,ge=0;ge<=P.Hb-1;ee+=(Bt(P.Cb+1900)?Pi:Oi)[ge++]);return C(P.Kb+ee,3)},"%m":P=>C(P.Hb+1,2),"%M":P=>C(P.ec,2),"%n":()=>` +`,"%p":P=>0<=P.Gb&&12>P.Gb?"AM":"PM","%S":P=>C(P.fc,2),"%t":()=>" ","%u":P=>P.ub||7,"%U":P=>C(Math.floor((P.Bb+7-P.ub)/7),2),"%V":P=>{var ee=Math.floor((P.Bb+7-(P.ub+6)%7)/7);if(2>=(P.ub+371-P.Bb-2)%7&&ee++,ee)ee==53&&((ge=(P.ub+371-P.Bb)%7)==4||ge==3&&Bt(P.Cb)||(ee=1));else{ee=52;var ge=(P.ub+7-P.Bb-1)%7;(ge==4||ge==5&&Bt(P.Cb%400-1))&&ee++}return C(ee,2)},"%w":P=>P.ub,"%W":P=>C(Math.floor((P.Bb+7-(P.ub+6)%7)/7),2),"%y":P=>(P.Cb+1900).toString().substring(2),"%Y":P=>P.Cb+1900,"%z":P=>{var ee=0<=(P=P.dc);return P=Math.abs(P)/60,(ee?"+":"-")+("0000"+(P/60*100+P%60)).slice(-4)},"%Z":P=>P.hc,"%%":()=>"%"},f=f.replace(/%%/g,"\0\0"),L)f.includes(K)&&(f=f.replace(new RegExp(K,"g"),L[K](y)));return K=function(P){var ee=Array(bn(P)+1);return Ko(P,ee,0,ee.length),ee}(f=f.replace(/\0\0/g,"%")),K.length>c?0:(ep(K,s),K.length-1)}function tp(s,c,f,y){return Di(s>>>0,c>>>0,f>>>0,y>>>0)}b||function(){for(var s=u.numThreads-1;s--;)Wo();Se.unshift(()=>{Ae++,function(c){b?c():Promise.all(ct.map(No)).then(c)}(()=>or())})}();for(var zi=Array(256),pr=0;256>pr;++pr)zi[pr]=String.fromCharCode(pr);ai=zi,pt=u.BindingError=class extends Error{constructor(s){super(s),this.name="BindingError"}},u.InternalError=class extends Error{constructor(s){super(s),this.name="InternalError"}},ut.push(0,1,void 0,1,null,1,!0,1,!1,1),u.count_emval_handles=()=>ut.length/2-5-$n.length;var rp=[gn,Ro,Ho,qo,jo,Yo,Xo,Zo,Qo,Jo,ei,ti,ri,ni,oi,ii,wi,vi,Ii,Ci,Ti,Ai,Ei,ki],j=function(){function s(f,y){return j=f.exports,j=function(){var v=j,C={};for(let[O,B]of Object.entries(v))C[O]=typeof B=="function"?(...N)=>{ur.push(O);try{return B(...N)}finally{Te||(ur.pop(),Je&&ft===1&&ur.length===0&&(ft=0,bt+=1,sr(Gi),typeof Fibers<"u"&&Fibers.oc()))}}:B;return C}(),j=function(){var v=j,C=B=>N=>B(N)>>>0,O=B=>()=>B()>>>0;return(v=Object.assign({},v)).Ca=C(v.Ca),v.fb=O(v.fb),v.gb=C(v.gb),v.emscripten_main_runtime_thread_id=O(v.emscripten_main_runtime_thread_id),v.sb=C(v.sb),v.tb=O(v.tb),v}(),Mo.push(j.ib),Ot.unshift(j.Ba),xe=y,or(),j}var c=zo();if(Ae++,u.instantiateWasm)try{return u.instantiateWasm(c,s)}catch(f){F(`Module.instantiateWasm callback failed with error: ${f}`),m(f)}return Eo||=u.locateFile?ko("ort-wasm-simd-threaded.jsep.wasm")?"ort-wasm-simd-threaded.jsep.wasm":u.locateFile?u.locateFile("ort-wasm-simd-threaded.jsep.wasm",D):D+"ort-wasm-simd-threaded.jsep.wasm":new URL("ort-wasm-simd-threaded.jsep.wasm",import.meta.url).href,function(f,y){var v=Eo;return typeof WebAssembly.instantiateStreaming!="function"||ko(v)||Po(v)||typeof fetch!="function"?Do(v,f,y):fetch(v,{credentials:"same-origin"}).then(C=>WebAssembly.instantiateStreaming(C,f).then(y,function(O){return F(`wasm streaming compile failed: ${O}`),F("falling back to ArrayBuffer instantiation"),Do(v,f,y)}))}(c,function(f){s(f.instance,f.module)}).catch(m),{}}(),Bi=s=>(Bi=j.Ca)(s),Ri=()=>(Ri=j.Da)();u._OrtInit=(s,c)=>(u._OrtInit=j.Ea)(s,c),u._OrtGetLastError=(s,c)=>(u._OrtGetLastError=j.Fa)(s,c),u._OrtCreateSessionOptions=(s,c,f,y,v,C,O,B,N,L)=>(u._OrtCreateSessionOptions=j.Ga)(s,c,f,y,v,C,O,B,N,L),u._OrtAppendExecutionProvider=(s,c)=>(u._OrtAppendExecutionProvider=j.Ha)(s,c),u._OrtAddFreeDimensionOverride=(s,c,f)=>(u._OrtAddFreeDimensionOverride=j.Ia)(s,c,f),u._OrtAddSessionConfigEntry=(s,c,f)=>(u._OrtAddSessionConfigEntry=j.Ja)(s,c,f),u._OrtReleaseSessionOptions=s=>(u._OrtReleaseSessionOptions=j.Ka)(s),u._OrtCreateSession=(s,c,f)=>(u._OrtCreateSession=j.La)(s,c,f),u._OrtReleaseSession=s=>(u._OrtReleaseSession=j.Ma)(s),u._OrtGetInputOutputCount=(s,c,f)=>(u._OrtGetInputOutputCount=j.Na)(s,c,f),u._OrtGetInputName=(s,c)=>(u._OrtGetInputName=j.Oa)(s,c),u._OrtGetOutputName=(s,c)=>(u._OrtGetOutputName=j.Pa)(s,c),u._OrtFree=s=>(u._OrtFree=j.Qa)(s),u._OrtCreateTensor=(s,c,f,y,v,C)=>(u._OrtCreateTensor=j.Ra)(s,c,f,y,v,C),u._OrtGetTensorData=(s,c,f,y,v)=>(u._OrtGetTensorData=j.Sa)(s,c,f,y,v),u._OrtReleaseTensor=s=>(u._OrtReleaseTensor=j.Ta)(s),u._OrtCreateRunOptions=(s,c,f,y)=>(u._OrtCreateRunOptions=j.Ua)(s,c,f,y),u._OrtAddRunConfigEntry=(s,c,f)=>(u._OrtAddRunConfigEntry=j.Va)(s,c,f),u._OrtReleaseRunOptions=s=>(u._OrtReleaseRunOptions=j.Wa)(s),u._OrtCreateBinding=s=>(u._OrtCreateBinding=j.Xa)(s),u._OrtBindInput=(s,c,f)=>(u._OrtBindInput=j.Ya)(s,c,f),u._OrtBindOutput=(s,c,f,y)=>(u._OrtBindOutput=j.Za)(s,c,f,y),u._OrtClearBoundOutputs=s=>(u._OrtClearBoundOutputs=j._a)(s),u._OrtReleaseBinding=s=>(u._OrtReleaseBinding=j.$a)(s),u._OrtRunWithBinding=(s,c,f,y,v)=>(u._OrtRunWithBinding=j.ab)(s,c,f,y,v),u._OrtRun=(s,c,f,y,v,C,O,B)=>(u._OrtRun=j.bb)(s,c,f,y,v,C,O,B),u._OrtEndProfiling=s=>(u._OrtEndProfiling=j.cb)(s),u._JsepOutput=(s,c,f)=>(u._JsepOutput=j.db)(s,c,f),u._JsepGetNodeName=s=>(u._JsepGetNodeName=j.eb)(s);var mr,Mt=()=>(Mt=j.fb)(),fr=u._malloc=s=>(fr=u._malloc=j.gb)(s),et=u._free=s=>(et=u._free=j.hb)(s),Pn=(s,c,f,y,v,C)=>(Pn=j.kb)(s,c,f,y,v,C),Mi=()=>(Mi=j.lb)(),Ui=(s,c,f,y,v)=>(Ui=j.mb)(s,c,f,y,v),On=s=>(On=j.nb)(s),hr=s=>(hr=j.ob)(s),Vi=()=>(Vi=j.pb)(),Ni=(s,c)=>(Ni=j.qb)(s,c),gr=s=>(gr=j.rb)(s),Dn=s=>(Dn=j.sb)(s),zn=()=>(zn=j.tb)(),Wi=u.dynCall_ii=(s,c)=>(Wi=u.dynCall_ii=j.vb)(s,c),Hi=s=>(Hi=j.wb)(s),Gi=()=>(Gi=j.xb)(),Li=s=>(Li=j.yb)(s),Fi=()=>(Fi=j.zb)();function qi(){if(!(0zn(),u.stackRestore=s=>gr(s),u.stackAlloc=s=>Dn(s),u.UTF8ToString=ke,u.stringToUTF8=zt,u.lengthBytesUTF8=bn,lt=function s(){mr||qi(),mr||(lt=s)},qi(),h}),pp=Ta;globalThis.self?.name==="em-pthread"&&Ta()});var Ut,mp,fp,hp,ka,Pa,gp,Oa,qt=U(()=>{"use strict";Cr();Ut=!1?void 0:import.meta.url??(typeof document<"u"?document.currentScript?.src:typeof self<"u"?self.location?.href:void 0),mp=!1||typeof location>"u"?void 0:location.origin,fp=(e,t)=>{try{let r=t??Ut;return(r?new URL(e,r):new URL(e)).origin===mp}catch{return!1}},hp=async e=>{let r=await(await fetch(e,{credentials:"same-origin"})).blob();return URL.createObjectURL(r)},ka=(Ca(),yr(Ia)).default,Pa=async()=>{if(!Ut)throw new Error("Failed to load proxy worker: cannot determine the script source URL.");if(fp(Ut))return[void 0,ka()];let e=await hp(Ut);return[e,ka(e)]},gp=(Ea(),yr(Aa)).default,Oa=async(e,t,r)=>[void 0,gp]});var Gn,Ln,Br,Da,yp,bp,Tr,Ie,St=U(()=>{"use strict";qt();Ln=!1,Br=!1,Da=!1,yp=()=>{if(typeof SharedArrayBuffer>"u")return!1;try{return typeof MessageChannel<"u"&&new MessageChannel().port1.postMessage(new SharedArrayBuffer(1)),WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,5,4,1,3,1,1,10,11,1,9,0,65,0,254,16,2,0,26,11]))}catch{return!1}},bp=()=>{try{return WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,10,30,1,28,0,65,0,253,15,253,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,186,1,26,11]))}catch{return!1}},Tr=async e=>{if(Ln)return Promise.resolve();if(Br)throw new Error("multiple calls to 'initializeWebAssembly()' detected.");if(Da)throw new Error("previous call to 'initializeWebAssembly()' failed.");Br=!0;let t=e.initTimeout,r=e.numThreads;if(!bp())throw new Error("WebAssembly SIMD is not supported in the current environment.");let n=yp();r>1&&!n&&(typeof self<"u"&&!self.crossOriginIsolated&&console.warn("env.wasm.numThreads is set to "+r+", but this will not work unless you enable crossOriginIsolated mode. See https://web.dev/cross-origin-isolation-guide/ for more info."),console.warn("WebAssembly multi-threading is not supported in the current environment. Falling back to single-threading."),e.numThreads=r=1);let o=e.wasmPaths,i=typeof o=="string"?o:void 0,a=o?.mjs,l=a?.href??a,d=o?.wasm,p=d?.href??d,[m,u]=await Oa(l,i,r>1),h=!1,w=[];if(t>0&&w.push(new Promise(g=>{setTimeout(()=>{h=!0,g()},t)})),w.push(new Promise((g,b)=>{let x={numThreads:r};(p||i)&&(x.locateFile=(_,$)=>p??(i??$)+_),u(x).then(_=>{Br=!1,Ln=!0,Gn=_,g(),m&&URL.revokeObjectURL(m)},_=>{Br=!1,Da=!0,b(_)})})),await Promise.race(w),h)throw new Error(`WebAssembly backend initializing failed due to timeout: ${t}ms`)},Ie=()=>{if(Ln&&Gn)return Gn;throw new Error("WebAssembly is not initialized yet.")}});var Ee,Kt,$e,Rr=U(()=>{"use strict";St();Ee=(e,t)=>{let r=Ie(),n=r.lengthBytesUTF8(e)+1,o=r._malloc(n);return r.stringToUTF8(e,o,n),t.push(o),o},Kt=(e,t,r,n)=>{if(typeof e=="object"&&e!==null){if(r.has(e))throw new Error("Circular reference in options");r.add(e)}Object.entries(e).forEach(([o,i])=>{let a=t?t+o:o;if(typeof i=="object")Kt(i,a+".",r,n);else if(typeof i=="string"||typeof i=="number")n(a,i.toString());else if(typeof i=="boolean")n(a,i?"1":"0");else throw new Error(`Can't handle extra config type: ${typeof i}`)})},$e=e=>{let t=Ie(),r=t.stackSave();try{let n=t.stackAlloc(8);t._OrtGetLastError(n,n+4);let o=t.HEAP32[n/4],i=t.HEAPU32[n/4+1],a=i?t.UTF8ToString(i):"";throw new Error(`${e} ERROR_CODE: ${o}, ERROR_MESSAGE: ${a}`)}finally{t.stackRestore(r)}}});var za,Ba=U(()=>{"use strict";St();Rr();za=e=>{let t=Ie(),r=0,n=[],o=e||{};try{if(e?.logSeverityLevel===void 0)o.logSeverityLevel=2;else if(typeof e.logSeverityLevel!="number"||!Number.isInteger(e.logSeverityLevel)||e.logSeverityLevel<0||e.logSeverityLevel>4)throw new Error(`log serverity level is not valid: ${e.logSeverityLevel}`);if(e?.logVerbosityLevel===void 0)o.logVerbosityLevel=0;else if(typeof e.logVerbosityLevel!="number"||!Number.isInteger(e.logVerbosityLevel))throw new Error(`log verbosity level is not valid: ${e.logVerbosityLevel}`);e?.terminate===void 0&&(o.terminate=!1);let i=0;return e?.tag!==void 0&&(i=Ee(e.tag,n)),r=t._OrtCreateRunOptions(o.logSeverityLevel,o.logVerbosityLevel,!!o.terminate,i),r===0&&$e("Can't create run options."),e?.extra!==void 0&&Kt(e.extra,"",new WeakSet,(a,l)=>{let d=Ee(a,n),p=Ee(l,n);t._OrtAddRunConfigEntry(r,d,p)!==0&&$e(`Can't set a run config entry: ${a} - ${l}.`)}),[r,n]}catch(i){throw r!==0&&t._OrtReleaseRunOptions(r),n.forEach(a=>t._free(a)),i}}});var wp,vp,$p,_p,Ra,Ma=U(()=>{"use strict";St();Rr();wp=e=>{switch(e){case"disabled":return 0;case"basic":return 1;case"extended":return 2;case"all":return 99;default:throw new Error(`unsupported graph optimization level: ${e}`)}},vp=e=>{switch(e){case"sequential":return 0;case"parallel":return 1;default:throw new Error(`unsupported execution mode: ${e}`)}},$p=e=>{e.extra||(e.extra={}),e.extra.session||(e.extra.session={});let t=e.extra.session;t.use_ort_model_bytes_directly||(t.use_ort_model_bytes_directly="1"),e.executionProviders&&e.executionProviders.some(r=>(typeof r=="string"?r:r.name)==="webgpu")&&(e.enableMemPattern=!1)},_p=(e,t,r)=>{for(let n of t){let o=typeof n=="string"?n:n.name;switch(o){case"webnn":if(o="WEBNN",typeof n!="string"){let l=n?.deviceType;if(l){let d=Ee("deviceType",r),p=Ee(l,r);Ie()._OrtAddSessionConfigEntry(e,d,p)!==0&&$e(`Can't set a session config entry: 'deviceType' - ${l}.`)}}break;case"webgpu":if(o="JS",typeof n!="string"){let a=n;if(a?.preferredLayout){if(a.preferredLayout!=="NCHW"&&a.preferredLayout!=="NHWC")throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${a.preferredLayout}`);let l=Ee("preferredLayout",r),d=Ee(a.preferredLayout,r);Ie()._OrtAddSessionConfigEntry(e,l,d)!==0&&$e(`Can't set a session config entry: 'preferredLayout' - ${a.preferredLayout}.`)}}break;case"wasm":case"cpu":continue;default:throw new Error(`not supported execution provider: ${o}`)}let i=Ee(o,r);Ie()._OrtAppendExecutionProvider(e,i)!==0&&$e(`Can't append execution provider: ${o}.`)}},Ra=e=>{let t=Ie(),r=0,n=[],o=e||{};$p(o);try{let i=wp(o.graphOptimizationLevel??"all"),a=vp(o.executionMode??"sequential"),l=typeof o.logId=="string"?Ee(o.logId,n):0,d=o.logSeverityLevel??2;if(!Number.isInteger(d)||d<0||d>4)throw new Error(`log serverity level is not valid: ${d}`);let p=o.logVerbosityLevel??0;if(!Number.isInteger(p)||p<0||p>4)throw new Error(`log verbosity level is not valid: ${p}`);let m=typeof o.optimizedModelFilePath=="string"?Ee(o.optimizedModelFilePath,n):0;if(r=t._OrtCreateSessionOptions(i,!!o.enableCpuMemArena,!!o.enableMemPattern,a,!!o.enableProfiling,0,l,d,p,m),r===0&&$e("Can't create session options."),o.executionProviders&&_p(r,o.executionProviders,n),o.enableGraphCapture!==void 0){if(typeof o.enableGraphCapture!="boolean")throw new Error(`enableGraphCapture must be a boolean value: ${o.enableGraphCapture}`);let u=Ee("enableGraphCapture",n),h=Ee(o.enableGraphCapture.toString(),n);t._OrtAddSessionConfigEntry(r,u,h)!==0&&$e(`Can't set a session config entry: 'enableGraphCapture' - ${o.enableGraphCapture}.`)}if(o.freeDimensionOverrides)for(let[u,h]of Object.entries(o.freeDimensionOverrides)){if(typeof u!="string")throw new Error(`free dimension override name must be a string: ${u}`);if(typeof h!="number"||!Number.isInteger(h)||h<0)throw new Error(`free dimension override value must be a non-negative integer: ${h}`);let w=Ee(u,n);t._OrtAddFreeDimensionOverride(r,w,h)!==0&&$e(`Can't set a free dimension override: ${u} - ${h}.`)}return o.extra!==void 0&&Kt(o.extra,"",new WeakSet,(u,h)=>{let w=Ee(u,n),g=Ee(h,n);t._OrtAddSessionConfigEntry(r,w,g)!==0&&$e(`Can't set a session config entry: ${u} - ${h}.`)}),[r,n]}catch(i){throw r!==0&&t._OrtReleaseSessionOptions(r),n.forEach(a=>t._free(a)),i}}});var Fn,ht,It,Mr,Yt,Ur,qn,Q=U(()=>{"use strict";Fn=e=>{switch(e){case"int8":return 3;case"uint8":return 2;case"bool":return 9;case"int16":return 5;case"uint16":return 4;case"int32":return 6;case"uint32":return 12;case"float16":return 10;case"float32":return 1;case"float64":return 11;case"string":return 8;case"int64":return 7;case"uint64":return 13;default:throw new Error(`unsupported data type: ${e}`)}},ht=e=>{switch(e){case 3:return"int8";case 2:return"uint8";case 9:return"bool";case 5:return"int16";case 4:return"uint16";case 6:return"int32";case 12:return"uint32";case 10:return"float16";case 1:return"float32";case 11:return"float64";case 8:return"string";case 7:return"int64";case 13:return"uint64";default:throw new Error(`unsupported data type: ${e}`)}},It=e=>[void 0,4,1,1,2,2,4,8,void 0,1,2,8,4,8,void 0,void 0,void 0][e],Mr=e=>{switch(e){case"float16":return typeof Float16Array<"u"&&Float16Array.from?Float16Array:Uint16Array;case"float32":return Float32Array;case"uint8":return Uint8Array;case"int8":return Int8Array;case"uint16":return Uint16Array;case"int16":return Int16Array;case"int32":return Int32Array;case"bool":return Uint8Array;case"float64":return Float64Array;case"uint32":return Uint32Array;case"int64":return BigInt64Array;case"uint64":return BigUint64Array;default:throw new Error(`unsupported type: ${e}`)}},Yt=e=>{switch(e){case"verbose":return 0;case"info":return 1;case"warning":return 2;case"error":return 3;case"fatal":return 4;default:throw new Error(`unsupported logging level: ${e}`)}},Ur=e=>e==="float32"||e==="float16"||e==="int32"||e==="int64"||e==="uint32"||e==="uint8"||e==="bool",qn=e=>{switch(e){case"none":return 0;case"cpu":return 1;case"cpu-pinned":return 2;case"texture":return 3;case"gpu-buffer":return 4;default:throw new Error(`unsupported data location: ${e}`)}}});var Xt,jn=U(()=>{"use strict";Cr();Xt=async e=>{if(typeof e=="string")if(!1)try{let{readFile:t}=Rn("node:fs/promises");return new Uint8Array(await t(e))}catch(t){if(t.code==="ERR_FS_FILE_TOO_LARGE"){let{createReadStream:r}=Rn("node:fs"),n=r(e),o=[];for await(let i of n)o.push(i);return new Uint8Array(Buffer.concat(o))}throw t}else{let t=await fetch(e);if(!t.ok)throw new Error(`failed to load external data file: ${e}`);let r=t.headers.get("Content-Length"),n=r?parseInt(r,10):0;if(n<1073741824)return new Uint8Array(await t.arrayBuffer());{if(!t.body)throw new Error(`failed to load external data file: ${e}, no response body.`);let o=t.body.getReader(),i;try{i=new ArrayBuffer(n)}catch(l){if(l instanceof RangeError){let d=Math.ceil(n/65536);i=new WebAssembly.Memory({initial:d,maximum:d}).buffer}else throw l}let a=0;for(;;){let{done:l,value:d}=await o.read();if(l)break;let p=d.byteLength;new Uint8Array(i,a,p).set(d),a+=p}return new Uint8Array(i,0,n)}}else return e instanceof Blob?new Uint8Array(await e.arrayBuffer()):e instanceof Uint8Array?e:new Uint8Array(e)}});var xp,Sp,Ua,Va,Na,Ip,be,dt=U(()=>{"use strict";Q();xp=["V","I","W","E","F"],Sp=(e,t)=>{console.log(`[${xp[e]},${new Date().toISOString()}]${t}`)},Na=(e,t)=>{Ua=e,Va=t},Ip=(e,t)=>{let r=Yt(e),n=Yt(Ua);r>=n&&Sp(r,typeof t=="function"?t():t)},be=(...e)=>{Va&&Ip(...e)}});var Wa,Ha=U(()=>{"use strict";Q();Wa=(e,t)=>new(Mr(t))(e)});var Vr=U(()=>{"use strict"});var Ga,Kn,Yn,Cp,Tp,La,Zn,Xn,qa,ja=U(()=>{"use strict";dt();Vr();Ga=new Map([[64,250],[128,200],[256,200],[512,200],[2048,230],[4096,200],[8192,50],[16384,50],[32768,50],[65536,50],[131072,50],[262144,50],[524288,50],[1048576,50],[2097152,30],[4194304,20],[8388608,10],[12582912,10],[16777216,10],[26214400,15],[33554432,22],[44236800,2],[58982400,6],[67108864,6],[134217728,6],[167772160,6]]),Kn=[],Yn=e=>Math.ceil(e/16)*16,Cp=e=>{for(let t=0;tTp++,Zn=async(e,t,r,n)=>{let o=Yn(r),i=e.device.createBuffer({size:o,usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});try{let a=e.getCommandEncoder();e.endComputePass(),a.copyBufferToBuffer(t,0,i,0,o),e.flush(),await i.mapAsync(GPUMapMode.READ);let l=i.getMappedRange();if(n){let d=n();return d.set(new Uint8Array(l,0,r)),d}else return new Uint8Array(l.slice(0,r))}finally{i.destroy()}},Xn=class{constructor(t){this.backend=t;this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.buffersForUploadingPending=[],this.buffersPending=[],this.externalBuffers=new Map,this.capturedPendingBuffers=new Map;for(let[r]of Ga)Kn.push(r),this.freeBuffers.set(r,[]),this.freeUniformBuffers.set(r,[])}upload(t,r){let n=r.buffer,o=r.byteOffset,i=r.byteLength,a=Yn(i),l=this.storageCache.get(t);if(!l)throw new Error("gpu data for uploading does not exist");if(l.originalSize!==i)throw new Error(`inconsistent data size. gpu data size=${l.originalSize}, data size=${i}`);let d=this.backend.device.createBuffer({mappedAtCreation:!0,size:a,usage:GPUBufferUsage.MAP_WRITE|GPUBufferUsage.COPY_SRC}),p=d.getMappedRange();new Uint8Array(p).set(new Uint8Array(n,o,i)),d.unmap();let m=this.backend.getCommandEncoder();this.backend.endComputePass(),m.copyBufferToBuffer(d,0,l.gpuData.buffer,0,a),be("verbose",()=>`[WebGPU] GpuDataManager.upload(id=${t})`),this.buffersForUploadingPending.push(d)}memcpy(t,r){let n=this.storageCache.get(t);if(!n)throw new Error("source gpu data for memcpy does not exist");let o=this.storageCache.get(r);if(!o)throw new Error("destination gpu data for memcpy does not exist");if(n.originalSize!==o.originalSize)throw new Error("inconsistent source and destination gpu data size");let i=Yn(n.originalSize),a=this.backend.getCommandEncoder();this.backend.endComputePass(),a.copyBufferToBuffer(n.gpuData.buffer,0,o.gpuData.buffer,0,i)}registerExternalBuffer(t,r,n){let o;if(n){if(o=this.externalBuffers.get(n),o===void 0)throw new Error("previous buffer is not registered");if(t===n)return be("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${r}) => id=${o}, buffer is the same, skip.`),o;if(this.backend.capturedCommandList.has(this.backend.currentSessionId))throw new Error(`Registering a different external buffer under graph capture mode is not supported yet. + Please use the previous external buffer!`);this.externalBuffers.delete(n)}else o=La();return this.storageCache.set(o,{gpuData:{id:o,type:0,buffer:t},originalSize:r}),this.externalBuffers.set(t,o),be("verbose",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${r}) => id=${o}, registered.`),o}unregisterExternalBuffer(t){let r=this.externalBuffers.get(t);r!==void 0&&(this.storageCache.delete(r),this.externalBuffers.delete(t),be("verbose",()=>`[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${r}`))}create(t,r=GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_SRC|GPUBufferUsage.COPY_DST){let n=Cp(t),o,i=(r&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE,a=(r&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM;if(i||a){let p=(i?this.freeBuffers:this.freeUniformBuffers).get(n);p?p.length>0?o=p.pop():o=this.backend.device.createBuffer({size:n,usage:r}):o=this.backend.device.createBuffer({size:n,usage:r})}else o=this.backend.device.createBuffer({size:n,usage:r});let l={id:La(),type:0,buffer:o};return this.storageCache.set(l.id,{gpuData:l,originalSize:t}),be("verbose",()=>`[WebGPU] GpuDataManager.create(size=${t}) => id=${l.id}`),l}get(t){return this.storageCache.get(t)?.gpuData}release(t){let r=this.storageCache.get(t);if(!r)throw new Error("releasing data does not exist");return be("verbose",()=>`[WebGPU] GpuDataManager.release(id=${t}), gpuDataId=${r.gpuData.id}`),this.storageCache.delete(t),this.buffersPending.push(r.gpuData.buffer),r.originalSize}async download(t,r){let n=this.storageCache.get(t);if(!n)throw new Error("data does not exist");await Zn(this.backend,n.gpuData.buffer,n.originalSize,r)}refreshPendingBuffers(){for(let t of this.buffersForUploadingPending)t.destroy();if(this.buffersForUploadingPending=[],this.buffersPending.length!==0)if(this.backend.sessionStatus==="default"){for(let t of this.buffersPending){let r=Ga.get(t.size);if((t.usage&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE){let n=this.freeBuffers.get(t.size)||[];r===void 0||n.length>=r?t.destroy():n.push(t)}else if((t.usage&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM){let n=this.freeUniformBuffers.get(t.size)||[];r===void 0||n.length>=r?t.destroy():n.push(t)}else t.destroy()}this.buffersPending=[]}else{let t=this.capturedPendingBuffers.get(this.backend.currentSessionId);t||(t=[],this.capturedPendingBuffers.set(this.backend.currentSessionId,t));for(let r of this.buffersPending)t.push(r);this.buffersPending=[]}}dispose(){this.freeBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.freeUniformBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.storageCache.forEach(t=>{t.gpuData.buffer.destroy()}),this.capturedPendingBuffers.forEach(t=>{t.forEach(r=>{r.destroy()})}),this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.capturedPendingBuffers=new Map}onReleaseSession(t){let r=this.capturedPendingBuffers.get(t);r&&(r.forEach(n=>{n.destroy()}),this.capturedPendingBuffers.delete(t))}},qa=(...e)=>new Xn(...e)});var Qn,te,Ce=U(()=>{"use strict";Qn=class{constructor(t){Object.assign(this,t)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(t=>`${this[t]}`).join(";")),this.key}},te=e=>new Qn(e)});var Jn,tt,k,Ct,Nr,Wr,Hr,ae=U(()=>{"use strict";Jn=class{static calcMatMulShape(t,r){return t[1]!==r[0]?void 0:[t[0],r[1]]}},tt=class{static calcShape(t,r,n=!1){let o=t.length,i=r.length;if(o===0)return r;if(i===0)return t;let a=Math.max(t.length,r.length),l=new Array(a);if(n){if(o<2||i<2)return;let d=Jn.calcMatMulShape([t[o-2],t[o-1]],[r[i-2],r[i-1]]);if(d===void 0)return;[l[a-2],l[a-1]]=d}for(let d=n?3:1;d<=a;d++){let p=o-d<0?1:t[o-d],m=i-d<0?1:r[i-d];if(p!==m&&p>1&&m>1)return;let u=Math.max(p,m);if(p&&m)l[a-d]=Math.max(p,m);else{if(u>1)return;l[a-d]=0}}return l}static isValidBroadcast(t,r){let n=t.length,o=r.length;if(n>o)return!1;for(let i=1;i<=n;i++)if(t[n-i]!==1&&t[n-i]!==r[o-i])return!1;return!0}},k=class e{static size(t){return e.getSizeFromDimensionRange(t,0,t.length)}static convertShape(t,r=4){let n=t.length;if(n===0)return[];let o=new Array(n),i=n-1;for(;i>=0;){if(t[i]%r===0){o[i]=t[i]/r;break}if(r%t[i]!==0)throw new Error("cannot convert shape");o[i]=1,r/=t[i],i--}for(i--;i>=0;i--)o[i]=t[i];return o}static sizeFromDimension(t,r){if(r<0||r>t.length)throw new Error(`invalid dimension of ${r} for sizeFromDimension as Tensor has ${t.length} dimensions.`);return e.getSizeFromDimensionRange(t,r,t.length)}static sizeToDimension(t,r){if(r<0||r>t.length)throw new Error(`invalid dimension of ${r} for sizeToDimension as Tensor has ${t.length} dimensions.`);return e.getSizeFromDimensionRange(t,0,r)}static getSizeFromDimensionRange(t,r,n){let o=1;for(let i=r;i=0;--o)n[o]=n[o+1]*t[o+1];return n}static normalizeAxis(t,r){if(t<-r&&t>=r)throw new Error("unsupported axis for this operation.");return t<0?t+r:t}static normalizeAxes(t,r){return t.map(n=>this.normalizeAxis(n,r??t.length))}static sortBasedOnPerm(t,r){return r?r.map(n=>t[n]):t.slice().reverse()}static padShape(t,r){let n=t.length;return t.map((o,i)=>o+r[i]+r[i+n])}static areEqual(t,r){return t.length!==r.length?!1:t.every((n,o)=>n===r[o])}},Ct=class e{static adjustPoolAttributes(t,r,n,o,i,a){if(!t&&n.length!==r.length-2)throw new Error("length of specified kernel shapes should be 2 less than length of input dimensions");if(t)for(let l=0;l=n.length?n.push(r[l+2]):n[l]=r[l+2];for(let l=0;l=n[l]||a[l+n.length]>=n[l])throw new Error("pads should be smaller than kernel")}}static adjustPadsBasedOnAutoPad(t,r,n,o,i,a,l){if(l){if(i.length!==2*(t.length-2))throw new Error("length of pads should be twice the length of data dimensions");if(r.length!==t.length-2)throw new Error("length of strides should be the length of data dimensions");if(o.length!==t.length-2)throw new Error("length of kernel shapes should be the length of data dimensions");for(let d=0;d{"use strict";Q();ae();Tt=64,to=(e,t)=>{if(t===3)throw new Error("vec3 has same alignment as vec4, use vec4 instead");switch(e){case 10:return t>1?`vec${t}`:"f16";case 1:return t>1?`vec${t}`:"f32";case 6:return t>1?`vec${t}`:"i32";case 12:return t>1?`vec${t}`:"u32";case 7:if(t>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","i32"];case 13:if(t>1)throw new Error("currently not supported vecX of uint64 yet");return["vec2","u32"];case 9:if(t!==4)throw new Error("bool must be vec4");return["u32","vec4"];default:throw new Error(`Unknown data type: ${e}`)}},he=(e,t=1)=>{let r=to(e,t);return typeof r=="string"?r:r[0]},Oe=(e,t=1)=>{let r=to(e,t);return typeof r=="string"?r:r[1]},V=(...e)=>{let t=[];return e.forEach(r=>{r.length!==0&&t.push({type:12,data:r},{type:12,data:k.computeStrides(r)})}),t},we=e=>e%4===0?4:e%2===0?2:1,gt=(e="f32",t,r="0")=>!t||t===1?`${e}(${r})`:`vec${t}<${e}>(${r})`,At=(e,t,r)=>e==="f32"?r:t===1?`f32(${r})`:`vec${t}(${r})`,qe=(e,t)=>t===4?`(${e}.x + ${e}.y + ${e}.z + ${e}.w)`:t===2?`(${e}.x + ${e}.y)`:t===3?`(${e}.x + ${e}.y + ${e}.z)`:e,G=(e,t,r,n)=>e.startsWith("uniforms.")&&r>4?typeof t=="string"?n==="f16"?`${e}[(${t}) / 8][(${t}) % 8 / 4][(${t}) % 8 % 4]`:`${e}[(${t}) / 4][(${t}) % 4]`:n==="f16"?`${e}[${Math.floor(t/8)}][${Math.floor(t%8/4)}][${t%8%4}]`:`${e}[${Math.floor(t/4)}][${t%4}]`:r>1?`${e}[${t}]`:e,ro=(e,t,r,n,o)=>{let i=typeof r=="number",a=i?r:r.length,l=[...new Array(a).keys()],d=a<2?"u32":a<=4?`vec${a}`:`array`,p=to(t,o),m=typeof p=="string"?p:p[1],u=typeof p=="string"?p:p[0],h={indices:d,value:m,storage:u,tensor:t},w=R=>typeof R=="string"?R:`${R}u`,g={offsetToIndices:!1,indicesToOffset:!1,broadcastedIndicesToOffset:!1,set:!1,setByIndices:!1,get:!1,getByIndices:!1},b=i?"uniforms.":"",x=`${b}${e}_shape`,_=`${b}${e}_strides`,$="";for(let R=0;R(g.offsetToIndices=!0,a<2?R:`o2i_${e}(${R})`),T=[];if(a>=2)for(let R=a-1;R>=0;R--)T.push(`${G(_,R,a)} * (indices[${R}])`);let A=a<2?"":` fn i2o_${e}(indices: ${h.indices}) -> u32 { return ${T.join("+")}; - }`,z=R=>(g.indicesToOffset=!0,a<2?R:`i2o_${e}(${R})`),D=(...R)=>a===0?"0u":`${h.indices}(${R.map(w).join(",")})`,H=(R,Y)=>a<2?`${R}`:`${G(R,Y,a)}`,W=(R,Y,ue)=>a<2?`${R}=${ue};`:`${G(R,Y,a)}=${ue};`,F={},de=(R,Y)=>{g.broadcastedIndicesToOffset=!0;let ue=`${Y.name}broadcastedIndicesTo${e}Offset`;if(ue in F)return`${ue}(${R})`;let Te=[];for(let ve=a-1;ve>=0;ve--){let Se=Y.indicesGet("outputIndices",ve+Y.rank-a);Te.push(`${H(_,ve)} * (${Se} % ${H(x,ve)})`)}return F[ue]=`fn ${ue}(outputIndices: ${Y.type.indices}) -> u32 { + }`,D=R=>(g.indicesToOffset=!0,a<2?R:`i2o_${e}(${R})`),z=(...R)=>a===0?"0u":`${h.indices}(${R.map(w).join(",")})`,H=(R,Y)=>a<2?`${R}`:`${G(R,Y,a)}`,W=(R,Y,ue)=>a<2?`${R}=${ue};`:`${G(R,Y,a)}=${ue};`,F={},de=(R,Y)=>{g.broadcastedIndicesToOffset=!0;let ue=`${Y.name}broadcastedIndicesTo${e}Offset`;if(ue in F)return`${ue}(${R})`;let Te=[];for(let ve=a-1;ve>=0;ve--){let Se=Y.indicesGet("outputIndices",ve+Y.rank-a);Te.push(`${H(_,ve)} * (${Se} % ${H(x,ve)})`)}return F[ue]=`fn ${ue}(outputIndices: ${Y.type.indices}) -> u32 { return ${Te.length>0?Te.join("+"):"0u"}; }`,`${ue}(${R})`},ce=(R,Y)=>(()=>{if(h.storage===h.value)return`${e}[${R}]=${Y};`;if(h.storage==="vec2"&&h.value==="i32")return`${e}[${R}]=vec2(u32(${Y}), select(0u, 0xFFFFFFFFu, ${Y} < 0));`;if(h.storage==="vec2"&&h.value==="u32")return`${e}[${R}]=vec2(u32(${Y}), 0u);`;if(h.storage==="u32"&&h.value==="vec4")return`${e}[${R}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${Y}));`;throw new Error(`not supported combination of storage type ${h.storage} and value type ${h.value} yet`)})(),X=R=>(()=>{if(h.storage===h.value)return`${e}[${R}]`;if(h.storage==="vec2"&&h.value==="i32")return`i32(${e}[${R}].x)`;if(h.storage==="vec2"&&h.value==="u32")return`u32(${e}[${R}].x)`;if(h.storage==="u32"&&h.value==="vec4")return`vec4(bool(${e}[${R}] & 0xFFu), bool(${e}[${R}] & 0xFF00u), bool(${e}[${R}] & 0xFF0000u), bool(${e}[${R}] & 0xFF000000u))`;throw new Error(`not supported combination of storage type ${h.storage} and value type ${h.value} yet`)})(),xe=a<2?"":` fn get_${e}ByIndices(indices: ${h.indices}) -> ${m} { return ${X(`i2o_${e}(indices)`)}; }`,q=a<2?"":(()=>{let R=l.map(ue=>`d${ue}: u32`).join(", "),Y=l.map(ue=>`d${ue}`).join(", ");return` fn get_${e}(${R}) -> ${m} { - return get_${e}ByIndices(${D(Y)}); + return get_${e}ByIndices(${z(Y)}); }`})(),ie=(...R)=>{if(R.length!==a)throw new Error(`indices length must be ${a}`);let Y=R.map(w).join(",");return a===0?X("0u"):a===1?X(Y[0]):(g.get=!0,g.getByIndices=!0,g.indicesToOffset=!0,`get_${e}(${Y})`)},le=R=>a<2?X(R):(g.getByIndices=!0,g.indicesToOffset=!0,`get_${e}ByIndices(${R})`),se=a<2?"":` fn set_${e}ByIndices(indices: ${h.indices}, value: ${m}) { ${ce(`i2o_${e}(indices)`,"value")} }`,Z=a<2?"":(()=>{let R=l.map(ue=>`d${ue}: u32`).join(", "),Y=l.map(ue=>`d${ue}`).join(", ");return` fn set_${e}(${R}, value: ${m}) { - set_${e}ByIndices(${D(Y)}, value); + set_${e}ByIndices(${z(Y)}, value); }`})();return{impl:()=>{let R=[],Y=!1;return g.offsetToIndices&&(R.push(S),Y=!0),g.indicesToOffset&&(R.push(A),Y=!0),g.broadcastedIndicesToOffset&&(Object.values(F).forEach(ue=>R.push(ue)),Y=!0),g.set&&(R.push(Z),Y=!0),g.setByIndices&&(R.push(se),Y=!0),g.get&&(R.push(q),Y=!0),g.getByIndices&&(R.push(xe),Y=!0),!i&&Y&&R.unshift(`const ${x} = ${h.indices}(${r.join(",")});`,`const ${_} = ${h.indices}(${k.computeStrides(r).join(",")});`),R.join(` -`)},type:h,offsetToIndices:I,indicesToOffset:z,broadcastedIndicesToOffset:de,indices:D,indicesGet:H,indicesSet:W,set:(...R)=>{if(R.length!==a+1)throw new Error(`indices length must be ${a}`);let Y=R[a];if(typeof Y!="string")throw new Error("value must be string");let ue=R.slice(0,a).map(w).join(",");return a===0?ce("0u",Y):a===1?ce(ue[0],Y):(g.set=!0,g.setByIndices=!0,g.indicesToOffset=!0,`set_${e}(${ue}, ${Y})`)},setByOffset:ce,setByIndices:(R,Y)=>a<2?ce(R,Y):(g.setByIndices=!0,g.indicesToOffset=!0,`set_${e}ByIndices(${R}, ${Y});`),get:ie,getByOffset:X,getByIndices:le,usage:n,name:e,strides:_,shape:x,rank:a}},E=(e,t,r,n=1)=>to(e,t,r,"input",n),M=(e,t,r,n=1)=>to(e,t,r,"output",n),Gr=(e,t,r,n=1)=>to(e,t,r,"internal",n),Jn=class{constructor(t,r){this.normalizedDispatchGroup=t;this.limits=r;this.internalVariables=[];this.variables=[];this.uniforms=[];this.variableIndex=0}guardAgainstOutOfBoundsWorkgroupSizes(t){return`if (global_idx >= ${typeof t=="number"?`${t}u`:t}) { return; }`}mainStart(t=Tt){let r=typeof t=="number"?t:t[0],n=typeof t=="number"?1:t[1],o=typeof t=="number"?1:t[2];if(r>this.limits.maxComputeWorkgroupSizeX||n>this.limits.maxComputeWorkgroupSizeY||o>this.limits.maxComputeWorkgroupSizeZ)throw new Error(`workgroup size [${r}, ${n}, ${o}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);if(r*n*o>this.limits.maxComputeInvocationsPerWorkgroup)throw new Error(`workgroup size [${r}, ${n}, ${o}] exceeds the maximum workgroup invocations ${this.limits.maxComputeInvocationsPerWorkgroup}.`);let i=this.normalizedDispatchGroup[1]===1&&this.normalizedDispatchGroup[2]===1,a=i?`@builtin(global_invocation_id) global_id : vec3, +`)},type:h,offsetToIndices:I,indicesToOffset:D,broadcastedIndicesToOffset:de,indices:z,indicesGet:H,indicesSet:W,set:(...R)=>{if(R.length!==a+1)throw new Error(`indices length must be ${a}`);let Y=R[a];if(typeof Y!="string")throw new Error("value must be string");let ue=R.slice(0,a).map(w).join(",");return a===0?ce("0u",Y):a===1?ce(ue[0],Y):(g.set=!0,g.setByIndices=!0,g.indicesToOffset=!0,`set_${e}(${ue}, ${Y})`)},setByOffset:ce,setByIndices:(R,Y)=>a<2?ce(R,Y):(g.setByIndices=!0,g.indicesToOffset=!0,`set_${e}ByIndices(${R}, ${Y});`),get:ie,getByOffset:X,getByIndices:le,usage:n,name:e,strides:_,shape:x,rank:a}},E=(e,t,r,n=1)=>ro(e,t,r,"input",n),M=(e,t,r,n=1)=>ro(e,t,r,"output",n),Gr=(e,t,r,n=1)=>ro(e,t,r,"internal",n),eo=class{constructor(t,r){this.normalizedDispatchGroup=t;this.limits=r;this.internalVariables=[];this.variables=[];this.uniforms=[];this.variableIndex=0}guardAgainstOutOfBoundsWorkgroupSizes(t){return`if (global_idx >= ${typeof t=="number"?`${t}u`:t}) { return; }`}mainStart(t=Tt){let r=typeof t=="number"?t:t[0],n=typeof t=="number"?1:t[1],o=typeof t=="number"?1:t[2];if(r>this.limits.maxComputeWorkgroupSizeX||n>this.limits.maxComputeWorkgroupSizeY||o>this.limits.maxComputeWorkgroupSizeZ)throw new Error(`workgroup size [${r}, ${n}, ${o}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);if(r*n*o>this.limits.maxComputeInvocationsPerWorkgroup)throw new Error(`workgroup size [${r}, ${n}, ${o}] exceeds the maximum workgroup invocations ${this.limits.maxComputeInvocationsPerWorkgroup}.`);let i=this.normalizedDispatchGroup[1]===1&&this.normalizedDispatchGroup[2]===1,a=i?`@builtin(global_invocation_id) global_id : vec3, @builtin(workgroup_id) workgroup_id : vec3, @builtin(local_invocation_id) local_id : vec3`:`@builtin(global_invocation_id) global_id : vec3, @builtin(local_invocation_id) local_id : vec3, @@ -57,9 +57,9 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec struct Uniforms { ${t.join(", ")} }; @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`}get additionalImplementations(){return this.uniformDeclaration()+this.variables.map(t=>t.impl()).join(` `)+this.internalVariables.map(t=>t.impl()).join(` -`)}get variablesInfo(){if(this.uniforms.length===0)return;let t=r=>[12,10,1,6][["u32","f16","f32","i32"].indexOf(r)];return this.uniforms.map(r=>[t(r.type),r.length??1])}},ja=(e,t)=>new Jn(e,t),Vt=(e,t)=>{let r=e.length,n=[];for(let o=0;o1&&a===1&&n.unshift(i)}return n}});var Cp,Ya,Tp,Ap,Ue,Xa,Za,Et=U(()=>{"use strict";Q();ae();Ce();oe();Cp=e=>{if(!e||e.length!==1)throw new Error("Transpose requires 1 input.")},Ya=(e,t)=>t&&t.length!==e?[...new Array(e).keys()].reverse():t,Tp=(e,t)=>k.sortBasedOnPerm(e,Ya(e.length,t)),Ap=(e,t,r,n)=>{let o=[];o.push(`fn perm(i: ${n.type.indices}) -> ${r.type.indices} { +`)}get variablesInfo(){if(this.uniforms.length===0)return;let t=r=>[12,10,1,6][["u32","f16","f32","i32"].indexOf(r)];return this.uniforms.map(r=>[t(r.type),r.length??1])}},Ka=(e,t)=>new eo(e,t),Nt=(e,t)=>{let r=e.length,n=[];for(let o=0;o1&&a===1&&n.unshift(i)}return n}});var Ap,Ya,Ep,kp,Ue,Xa,Za,Et=U(()=>{"use strict";Q();ae();Ce();oe();Ap=e=>{if(!e||e.length!==1)throw new Error("Transpose requires 1 input.")},Ya=(e,t)=>t&&t.length!==e?[...new Array(e).keys()].reverse():t,Ep=(e,t)=>k.sortBasedOnPerm(e,Ya(e.length,t)),kp=(e,t,r,n)=>{let o=[];o.push(`fn perm(i: ${n.type.indices}) -> ${r.type.indices} { var a: ${r.type.indices};`);for(let i=0;i{let r=e.dataType,n=e.dims.length,o=Ya(n,t),i=Tp(e.dims,o),a=M("output",r,i.length),l=E("a",r,n),d;if(o.length===2&&o[0]===1&&o[1]===0){let p=a.type.value,m=[16,16,1];d=u=>` +`)},Ue=(e,t)=>{let r=e.dataType,n=e.dims.length,o=Ya(n,t),i=Ep(e.dims,o),a=M("output",r,i.length),l=E("a",r,n),d;if(o.length===2&&o[0]===1&&o[1]===0){let p=a.type.value,m=[16,16,1];d=u=>` ${u.registerUniform("output_size","u32").declareVariables(l,a)} var tile : array, ${m[0]}>; ${u.mainStart(m)} @@ -79,7 +79,7 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec }`}else d=p=>` ${p.registerUniform("output_size","u32").declareVariables(l,a)} - ${Ap(o,n,l,a)} + ${kp(o,n,l,a)} ${p.mainStart()} ${p.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -88,7 +88,7 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec let aIndices = perm(indices); ${a.setByOffset("global_idx",l.getByIndices("aIndices"))} - }`;return{name:"Transpose",shaderCache:{hint:`${t}`,inputDependencies:["rank"]},getRunData:p=>{let m=k.size(i);return{outputs:[{dims:i,dataType:p[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:[{type:12,data:m},...V(p[0].dims,i)]}},getShaderSource:d}},Xa=(e,t)=>{Cp(e.inputs),e.compute(Ue(e.inputs[0],t.perm))},Za=e=>te({perm:e.perm})});var Ep,kp,Pp,Op,zp,Dp,Bp,Rp,Mp,Up,rt,Qa,Ja,es,ts,rs,ns,os,is,as,ss,us=U(()=>{"use strict";Q();ae();oe();Lr();Et();Ep={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate * candidate",logSumExp:"bestValue + exp(candidate)",l1:"bestValue + abs(candidate)",l2:"bestValue + candidate * candidate",logSum:"bestValue + candidate"},kp={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate",logSumExp:"bestValue + candidate",l1:"bestValue + candidate",l2:"bestValue + candidate",logSum:"bestValue + candidate"},Pp={max:"_A[offset]",min:"_A[offset]",mean:"0",sum:"0",prod:"1",sumSquare:"0",logSumExp:"0",l1:"0",l2:"0",logSum:"0"},Op={max:"bestValue",min:"bestValue",sum:"bestValue",prod:"bestValue",sumSquare:"bestValue",logSumExp:"log(bestValue)",l1:"bestValue",l2:"sqrt(bestValue)",logSum:"log(bestValue)"},zp=(e,t)=>{let r=[];for(let n=t-e;n{let r=[],n=e.length;for(let i=0;ie[i]);return[r,o]},Bp=(e,t)=>{let r=e.length+t.length,n=[],o=0;for(let i=0;i{for(let r=0;r{let r=[];if(!Rp(e,t)){for(let n=0;nr.push(n))}return r},Up=(e,t,r,n,o,i,a)=>{let l=r[0].dims,d=k.size(i),p=k.size(a),m=E("_A",r[0].dataType,l),u=M("output",o,i),h=32,w=` + }`;return{name:"Transpose",shaderCache:{hint:`${t}`,inputDependencies:["rank"]},getRunData:p=>{let m=k.size(i);return{outputs:[{dims:i,dataType:p[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:[{type:12,data:m},...V(p[0].dims,i)]}},getShaderSource:d}},Xa=(e,t)=>{Ap(e.inputs),e.compute(Ue(e.inputs[0],t.perm))},Za=e=>te({perm:e.perm})});var Pp,Op,Dp,zp,Bp,Rp,Mp,Up,Vp,Np,rt,Qa,Ja,es,ts,rs,ns,os,is,as,ss,us=U(()=>{"use strict";Q();ae();oe();Lr();Et();Pp={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate * candidate",logSumExp:"bestValue + exp(candidate)",l1:"bestValue + abs(candidate)",l2:"bestValue + candidate * candidate",logSum:"bestValue + candidate"},Op={max:"select(bestValue, candidate, candidate > bestValue)",min:"select(bestValue, candidate, candidate < bestValue)",mean:"bestValue + candidate",sum:"bestValue + candidate",prod:"bestValue * candidate",sumSquare:"bestValue + candidate",logSumExp:"bestValue + candidate",l1:"bestValue + candidate",l2:"bestValue + candidate",logSum:"bestValue + candidate"},Dp={max:"_A[offset]",min:"_A[offset]",mean:"0",sum:"0",prod:"1",sumSquare:"0",logSumExp:"0",l1:"0",l2:"0",logSum:"0"},zp={max:"bestValue",min:"bestValue",sum:"bestValue",prod:"bestValue",sumSquare:"bestValue",logSumExp:"log(bestValue)",l1:"bestValue",l2:"sqrt(bestValue)",logSum:"log(bestValue)"},Bp=(e,t)=>{let r=[];for(let n=t-e;n{let r=[],n=e.length;for(let i=0;ie[i]);return[r,o]},Mp=(e,t)=>{let r=e.length+t.length,n=[],o=0;for(let i=0;i{for(let r=0;r{let r=[];if(!Up(e,t)){for(let n=0;nr.push(n))}return r},Np=(e,t,r,n,o,i,a)=>{let l=r[0].dims,d=k.size(i),p=k.size(a),m=E("_A",r[0].dataType,l),u=M("output",o,i),h=32,w=` var aBestValues : array; `;return{name:e,shaderCache:t,getShaderSource:b=>` ${b.registerUniform("reduceSize","u32").declareVariables(m,u)} @@ -101,11 +101,11 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec let outputIndex = global_idx / ${h}; let offset = outputIndex * uniforms.reduceSize; - var bestValue = f32(${Pp[n]}); + var bestValue = f32(${Dp[n]}); let Length = uniforms.reduceSize; for (var k = local_idx; k < Length; k = k + ${h}) { let candidate = f32(${m.getByOffset("offset + k")}); - bestValue = ${Ep[n]}; + bestValue = ${Pp[n]}; } aBestValues[local_idx] = bestValue; workgroupBarrier(); @@ -116,7 +116,7 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec let interval = DIV_CEIL(reduceSize, 2u); if (local_idx < currentSize) { let candidate = aBestValues[local_idx + interval]; - bestValue = ${kp[n]}; + bestValue = ${Op[n]}; aBestValues[local_idx] = bestValue; } reduceSize = interval; @@ -124,13 +124,13 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec } if (local_idx == 0u) { - ${u.setByOffset("outputIndex",`${n==="mean"?`${u.type.storage}(bestValue / f32(uniforms.reduceSize))`:`${u.type.storage}(${Op[n]})`}`)}; + ${u.setByOffset("outputIndex",`${n==="mean"?`${u.type.storage}(bestValue / f32(uniforms.reduceSize))`:`${u.type.storage}(${zp[n]})`}`)}; } - }`,getRunData:()=>({outputs:[{dims:i,dataType:o}],dispatchGroup:{x:d},programUniforms:[{type:12,data:p}]})}},rt=(e,t,r,n)=>{let o=e.inputs.length===1?r:ro(e.inputs,r),i=o.axes;i.length===0&&!o.noopWithEmptyAxes&&(i=e.inputs[0].dims.map((w,g)=>g));let a=k.normalizeAxes(i,e.inputs[0].dims.length),l=a,d=e.inputs[0],p=Mp(l,e.inputs[0].dims.length);p.length>0&&(d=e.compute(Ue(e.inputs[0],p),{inputs:[0],outputs:[-1]})[0],l=zp(l.length,d.dims.length));let[m,u]=Dp(d.dims,l),h=m;o.keepDims&&(h=Bp(m,a)),e.compute(Up(t,{hint:o.cacheKey,inputDependencies:["type"]},[d],n,e.inputs[0].dataType,h,u),{inputs:[d]})},Qa=(e,t)=>{rt(e,"ReduceMeanShared",t,"mean")},Ja=(e,t)=>{rt(e,"ReduceL1Shared",t,"l1")},es=(e,t)=>{rt(e,"ReduceL2Shared",t,"l2")},ts=(e,t)=>{rt(e,"ReduceLogSumExpShared",t,"logSumExp")},rs=(e,t)=>{rt(e,"ReduceMaxShared",t,"max")},ns=(e,t)=>{rt(e,"ReduceMinShared",t,"min")},os=(e,t)=>{rt(e,"ReduceProdShared",t,"prod")},is=(e,t)=>{rt(e,"ReduceSumShared",t,"sum")},as=(e,t)=>{rt(e,"ReduceSumSquareShared",t,"sumSquare")},ss=(e,t)=>{rt(e,"ReduceLogSumShared",t,"logSum")}});var nt,Vp,Fr,ro,ot,Np,Wp,Hp,Gp,Lp,Fp,qp,Kp,jp,Yp,it,ds,ls,cs,ps,ms,fs,hs,gs,ys,bs,Lr=U(()=>{"use strict";Q();ae();Ce();oe();us();nt=e=>{if(!e||e.length===0||e.length>2)throw new Error("Reduce op requires 1 or 2 inputs.");if(e.length===2&&e[1].dims.length!==1)throw new Error("Invalid axes input dims.")},Vp=e=>["","",`var value = ${e.getByIndices("input_indices")};`,""],Fr=(e,t,r,n,o,i,a=!1,l=!1)=>{let d=[],p=r[0].dims,m=p.length,u=k.normalizeAxes(o,m),h=!l&&u.length===0;p.forEach((x,_)=>{h||u.indexOf(_)>=0?a&&d.push(1):d.push(x)});let w=d.length,g=k.size(d);return{name:e,shaderCache:t,getShaderSource:x=>{let _=[],$=E("_A",r[0].dataType,m),S=M("output",i,w),I=n($,S,u),T=I[2];for(let A=0,z=0;A=0?(a&&z++,T=`for(var j${A}: u32 = 0; j${A} < ${p[A]}; j${A}++) { + }`,getRunData:()=>({outputs:[{dims:i,dataType:o}],dispatchGroup:{x:d},programUniforms:[{type:12,data:p}]})}},rt=(e,t,r,n)=>{let o=e.inputs.length===1?r:no(e.inputs,r),i=o.axes;i.length===0&&!o.noopWithEmptyAxes&&(i=e.inputs[0].dims.map((w,g)=>g));let a=k.normalizeAxes(i,e.inputs[0].dims.length),l=a,d=e.inputs[0],p=Vp(l,e.inputs[0].dims.length);p.length>0&&(d=e.compute(Ue(e.inputs[0],p),{inputs:[0],outputs:[-1]})[0],l=Bp(l.length,d.dims.length));let[m,u]=Rp(d.dims,l),h=m;o.keepDims&&(h=Mp(m,a)),e.compute(Np(t,{hint:o.cacheKey,inputDependencies:["type"]},[d],n,e.inputs[0].dataType,h,u),{inputs:[d]})},Qa=(e,t)=>{rt(e,"ReduceMeanShared",t,"mean")},Ja=(e,t)=>{rt(e,"ReduceL1Shared",t,"l1")},es=(e,t)=>{rt(e,"ReduceL2Shared",t,"l2")},ts=(e,t)=>{rt(e,"ReduceLogSumExpShared",t,"logSumExp")},rs=(e,t)=>{rt(e,"ReduceMaxShared",t,"max")},ns=(e,t)=>{rt(e,"ReduceMinShared",t,"min")},os=(e,t)=>{rt(e,"ReduceProdShared",t,"prod")},is=(e,t)=>{rt(e,"ReduceSumShared",t,"sum")},as=(e,t)=>{rt(e,"ReduceSumSquareShared",t,"sumSquare")},ss=(e,t)=>{rt(e,"ReduceLogSumShared",t,"logSum")}});var nt,Wp,Fr,no,ot,Hp,Gp,Lp,Fp,qp,jp,Kp,Yp,Xp,Zp,it,ds,ls,cs,ps,ms,fs,hs,gs,ys,bs,Lr=U(()=>{"use strict";Q();ae();Ce();oe();us();nt=e=>{if(!e||e.length===0||e.length>2)throw new Error("Reduce op requires 1 or 2 inputs.");if(e.length===2&&e[1].dims.length!==1)throw new Error("Invalid axes input dims.")},Wp=e=>["","",`var value = ${e.getByIndices("input_indices")};`,""],Fr=(e,t,r,n,o,i,a=!1,l=!1)=>{let d=[],p=r[0].dims,m=p.length,u=k.normalizeAxes(o,m),h=!l&&u.length===0;p.forEach((x,_)=>{h||u.indexOf(_)>=0?a&&d.push(1):d.push(x)});let w=d.length,g=k.size(d);return{name:e,shaderCache:t,getShaderSource:x=>{let _=[],$=E("_A",r[0].dataType,m),S=M("output",i,w),I=n($,S,u),T=I[2];for(let A=0,D=0;A=0?(a&&D++,T=`for(var j${A}: u32 = 0; j${A} < ${p[A]}; j${A}++) { ${I[2].includes("last_index")?`let last_index = j${A};`:""} ${$.indicesSet("input_indices",A,`j${A}`)} ${T} - }`):(_.push(`${$.indicesSet("input_indices",A,S.indicesGet("output_indices",z))};`),z++);return` + }`):(_.push(`${$.indicesSet("input_indices",A,S.indicesGet("output_indices",D))};`),D++);return` ${x.registerUniform("output_size","u32").declareVariables($,S)} @@ -147,9 +147,9 @@ var Dn=Object.defineProperty;var tp=Object.getOwnPropertyDescriptor;var rp=Objec ${I[3]} ${I.length===4?S.setByOffset("global_idx","value"):I.slice(4).join(` `)} - }`},getRunData:()=>({outputs:[{dims:d,dataType:i}],dispatchGroup:{x:Math.ceil(g/64)},programUniforms:[{type:12,data:g},...V(p,d)]})}},ro=(e,t)=>{let r=[];return e[1].dims[0]>0&&e[1].getBigInt64Array().forEach(n=>r.push(Number(n))),te({axes:r,keepDims:t.keepDims,noopWithEmptyAxes:t.noopWithEmptyAxes})},ot=(e,t,r,n)=>{let o=e.inputs,i=o.length===1?r:ro(o,r);e.compute(Fr(t,{hint:i.cacheKey,inputDependencies:["rank"]},[o[0]],i.noopWithEmptyAxes&&i.axes.length===0?Vp:n,i.axes,o[0].dataType,i.keepDims,i.noopWithEmptyAxes),{inputs:[0]})},Np=(e,t)=>{nt(e.inputs),ot(e,"ReduceLogSum",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${n.getByIndices("input_indices")};`,"value = log(value);"])},Wp=(e,t)=>{nt(e.inputs),ot(e,"ReduceL1",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += abs(${n.getByIndices("input_indices")});`,""])},Hp=(e,t)=>{nt(e.inputs),ot(e,"ReduceL2",t,(n,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${n.getByIndices("input_indices")}; value += (t * t);`,"value = sqrt(value);"])},Gp=(e,t)=>{nt(e.inputs),ot(e,"ReduceLogSumExp",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += exp(${n.getByIndices("input_indices")});`,"value = log(value);"])},Lp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMax",t,(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(n.indicesSet("input_indices",l,0));return[`${a.join(` -`)}`,`var value = ${n.getByIndices("input_indices")};`,`value = max(value, ${n.getByIndices("input_indices")});`,""]})},Fp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMean",t,(n,o,i)=>{let a=1;for(let l=0;l=0||i.length===0)&&(a*=e.inputs[0].dims[l]);return["var sum = f32(0);","",`sum += f32(${n.getByIndices("input_indices")});`,`let value = ${o.type.value}(sum / ${a});`]})},qp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMin",t,(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(`input_indices[${l}] = 0;`);return[`${a.join(` -`)}`,`var value = ${n.getByIndices("input_indices")};`,`value = min(value, ${n.getByIndices("input_indices")});`,""]})},Kp=(e,t)=>{nt(e.inputs),ot(e,"ReduceProd",t,(n,o)=>[`var value = ${o.type.storage}(1);`,"",`value *= ${n.getByIndices("input_indices")};`,""])},jp=(e,t)=>{nt(e.inputs),ot(e,"ReduceSum",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${n.getByIndices("input_indices")};`,""])},Yp=(e,t)=>{nt(e.inputs),ot(e,"ReduceSumSquare",t,(n,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${n.getByIndices("input_indices")}; value += t * t;`,""])},it=(e,t,r)=>{if(t.length===0)return r;let n=1,o=1;for(let i=0;i1024},ds=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Fp(e,t):Qa(e,t)},ls=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Wp(e,t):Ja(e,t)},cs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Hp(e,t):es(e,t)},ps=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Gp(e,t):ts(e,t)},ms=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Lp(e,t):rs(e,t)},fs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?qp(e,t):ns(e,t)},hs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Kp(e,t):os(e,t)},gs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?jp(e,t):is(e,t)},ys=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Yp(e,t):as(e,t)},bs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Np(e,t):ss(e,t)}});var ws,vs,$s,no,_s=U(()=>{"use strict";Q();Ce();Lr();ws=e=>{if(!e||e.length===0||e.length>2)throw new Error("ArgMinMaxOp op requires 1 or 2 inputs.");if(e[0].dataType!==1)throw new Error("Invalid input type.")},vs=(e,t)=>{ws(e.inputs);let r=(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(`input_indices[${l}] = 0;`);return[`${a.join(` + }`},getRunData:()=>({outputs:[{dims:d,dataType:i}],dispatchGroup:{x:Math.ceil(g/64)},programUniforms:[{type:12,data:g},...V(p,d)]})}},no=(e,t)=>{let r=[];return e[1].dims[0]>0&&e[1].getBigInt64Array().forEach(n=>r.push(Number(n))),te({axes:r,keepDims:t.keepDims,noopWithEmptyAxes:t.noopWithEmptyAxes})},ot=(e,t,r,n)=>{let o=e.inputs,i=o.length===1?r:no(o,r);e.compute(Fr(t,{hint:i.cacheKey,inputDependencies:["rank"]},[o[0]],i.noopWithEmptyAxes&&i.axes.length===0?Wp:n,i.axes,o[0].dataType,i.keepDims,i.noopWithEmptyAxes),{inputs:[0]})},Hp=(e,t)=>{nt(e.inputs),ot(e,"ReduceLogSum",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${n.getByIndices("input_indices")};`,"value = log(value);"])},Gp=(e,t)=>{nt(e.inputs),ot(e,"ReduceL1",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += abs(${n.getByIndices("input_indices")});`,""])},Lp=(e,t)=>{nt(e.inputs),ot(e,"ReduceL2",t,(n,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${n.getByIndices("input_indices")}; value += (t * t);`,"value = sqrt(value);"])},Fp=(e,t)=>{nt(e.inputs),ot(e,"ReduceLogSumExp",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += exp(${n.getByIndices("input_indices")});`,"value = log(value);"])},qp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMax",t,(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(n.indicesSet("input_indices",l,0));return[`${a.join(` +`)}`,`var value = ${n.getByIndices("input_indices")};`,`value = max(value, ${n.getByIndices("input_indices")});`,""]})},jp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMean",t,(n,o,i)=>{let a=1;for(let l=0;l=0||i.length===0)&&(a*=e.inputs[0].dims[l]);return["var sum = f32(0);","",`sum += f32(${n.getByIndices("input_indices")});`,`let value = ${o.type.value}(sum / ${a});`]})},Kp=(e,t)=>{nt(e.inputs),ot(e,"ReduceMin",t,(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(`input_indices[${l}] = 0;`);return[`${a.join(` +`)}`,`var value = ${n.getByIndices("input_indices")};`,`value = min(value, ${n.getByIndices("input_indices")});`,""]})},Yp=(e,t)=>{nt(e.inputs),ot(e,"ReduceProd",t,(n,o)=>[`var value = ${o.type.storage}(1);`,"",`value *= ${n.getByIndices("input_indices")};`,""])},Xp=(e,t)=>{nt(e.inputs),ot(e,"ReduceSum",t,(n,o)=>[`var value = ${o.type.storage}(0);`,"",`value += ${n.getByIndices("input_indices")};`,""])},Zp=(e,t)=>{nt(e.inputs),ot(e,"ReduceSumSquare",t,(n,o)=>[`var t = ${o.type.value}(0); var value = ${o.type.value}(0);`,"",`t = ${n.getByIndices("input_indices")}; value += t * t;`,""])},it=(e,t,r)=>{if(t.length===0)return r;let n=1,o=1;for(let i=0;i1024},ds=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?jp(e,t):Qa(e,t)},ls=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Gp(e,t):Ja(e,t)},cs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Lp(e,t):es(e,t)},ps=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Fp(e,t):ts(e,t)},ms=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?qp(e,t):rs(e,t)},fs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Kp(e,t):ns(e,t)},hs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Yp(e,t):os(e,t)},gs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Xp(e,t):is(e,t)},ys=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Zp(e,t):as(e,t)},bs=(e,t)=>{it(e.inputs[0].dims,t.axes,t.noopWithEmptyAxes)?Hp(e,t):ss(e,t)}});var ws,vs,$s,oo,_s=U(()=>{"use strict";Q();Ce();Lr();ws=e=>{if(!e||e.length===0||e.length>2)throw new Error("ArgMinMaxOp op requires 1 or 2 inputs.");if(e[0].dataType!==1)throw new Error("Invalid input type.")},vs=(e,t)=>{ws(e.inputs);let r=(n,o,i)=>{let a=[];for(let l=0;l=0||i.length===0)&&a.push(`input_indices[${l}] = 0;`);return[`${a.join(` `)}`,`var value = ${n.getByIndices("input_indices")}; var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLastIndex>0?"<=":"<"} value) { value = ${n.getByIndices("input_indices")}; @@ -159,7 +159,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLastIndex>0?">=":">"} value) { value = ${n.getByIndices("input_indices")}; best_index = i32(last_index); - }`,"",o.setByOffset("global_idx","best_index")]};e.compute(Fr("argMax",{hint:t.cacheKey,inputDependencies:["rank"]},[e.inputs[0]],r,[t.axis],7,t.keepDims),{inputs:[0]})},no=e=>te(e)});var Xp,Zp,Qp,Jp,Nt,em,xs,qr=U(()=>{"use strict";Q();Vr();oe();Xp=(e,t)=>{let r=e[0],n=e[1],o=e[2],i=e[3],a=e[4],l=e[5];if(a&&l)throw new Error("Attention cannot have both past and relative_position_bias");if(r.dims.length!==3)throw new Error('Input "input" must have 3 dimensions');let d=r.dims[0],p=r.dims[1],m=r.dims[2];if(o.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimensions');if(n.dims.length!==2)throw new Error('Input "weights" is expected to have 2 dimensions');if(n.dims[0]!==m)throw new Error("Input 1 dimension 0 should have same length as dimension 2 of input 0");if(o.dims[0]!==n.dims[1])throw new Error('Input "bias" dimension 0 should have same length as dimension 1 of input "weights"');let u=o.dims[0]/3,h=u,w=h;if(t.qkvHiddenSizes.length>0){if(t.qkvHiddenSizes.length!==3)throw new Error("qkv_hidden_sizes attribute should have 3 elements");for(let S of t.qkvHiddenSizes)if(S%t.numHeads!==0)throw new Error("qkv_hidden_sizes should be divisible by num_heads");u=t.qkvHiddenSizes[0],h=t.qkvHiddenSizes[1],w=t.qkvHiddenSizes[2]}let g=p;if(u!==h)throw new Error("qkv_hidden_sizes first element should be same as the second");if(o.dims[0]!==u+h+w)throw new Error('Input "bias" dimension 0 should have same length as sum of Q/K/V hidden sizes');let b=0;if(a){if(h!==w)throw new Error('Input "past" expect k_hidden_size == v_hidden_size');if(a.dims.length!==5)throw new Error('Input "past" must have 5 dimensions');if(a.dims[0]!==2)throw new Error('Input "past" first dimension must be 2');if(a.dims[1]!==d)throw new Error('Input "past" second dimension must be batch_size');if(a.dims[2]!==t.numHeads)throw new Error('Input "past" third dimension must be num_heads');if(a.dims[4]!==h/t.numHeads)throw new Error('Input "past" fifth dimension must be k_hidden_size / num_heads');t.pastPresentShareBuffer||(b=a.dims[3])}let x=g+b,_=-1,$=0;if(i)throw new Error("Mask not supported");if(a)throw new Error("past is not supported");return{batchSize:d,sequenceLength:p,pastSequenceLength:b,kvSequenceLength:g,totalSequenceLength:x,maxSequenceLength:_,inputHiddenSize:m,hiddenSize:u,vHiddenSize:w,headSize:Math.floor(u/t.numHeads),vHeadSize:Math.floor(w/t.numHeads),numHeads:t.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:t.maskFilterValue,maskType:$,scale:t.scale,broadcastResPosBias:!1,passPastInKv:!1,qkvFormat:1}},Zp=(e,t,r,n)=>{let o=we(n),i=64,a=n/o;a{let w=M("x",t.dataType,t.dims,o),b=[{name:"d_inv",type:Oe(t.dataType)},{name:"d_comp",type:"u32"},{name:"elements_per_thread",type:"u32"}];return` + }`,"",o.setByOffset("global_idx","best_index")]};e.compute(Fr("argMax",{hint:t.cacheKey,inputDependencies:["rank"]},[e.inputs[0]],r,[t.axis],7,t.keepDims),{inputs:[0]})},oo=e=>te(e)});var Qp,Jp,em,tm,Wt,rm,xs,qr=U(()=>{"use strict";Q();Vr();oe();Qp=(e,t)=>{let r=e[0],n=e[1],o=e[2],i=e[3],a=e[4],l=e[5];if(a&&l)throw new Error("Attention cannot have both past and relative_position_bias");if(r.dims.length!==3)throw new Error('Input "input" must have 3 dimensions');let d=r.dims[0],p=r.dims[1],m=r.dims[2];if(o.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimensions');if(n.dims.length!==2)throw new Error('Input "weights" is expected to have 2 dimensions');if(n.dims[0]!==m)throw new Error("Input 1 dimension 0 should have same length as dimension 2 of input 0");if(o.dims[0]!==n.dims[1])throw new Error('Input "bias" dimension 0 should have same length as dimension 1 of input "weights"');let u=o.dims[0]/3,h=u,w=h;if(t.qkvHiddenSizes.length>0){if(t.qkvHiddenSizes.length!==3)throw new Error("qkv_hidden_sizes attribute should have 3 elements");for(let S of t.qkvHiddenSizes)if(S%t.numHeads!==0)throw new Error("qkv_hidden_sizes should be divisible by num_heads");u=t.qkvHiddenSizes[0],h=t.qkvHiddenSizes[1],w=t.qkvHiddenSizes[2]}let g=p;if(u!==h)throw new Error("qkv_hidden_sizes first element should be same as the second");if(o.dims[0]!==u+h+w)throw new Error('Input "bias" dimension 0 should have same length as sum of Q/K/V hidden sizes');let b=0;if(a){if(h!==w)throw new Error('Input "past" expect k_hidden_size == v_hidden_size');if(a.dims.length!==5)throw new Error('Input "past" must have 5 dimensions');if(a.dims[0]!==2)throw new Error('Input "past" first dimension must be 2');if(a.dims[1]!==d)throw new Error('Input "past" second dimension must be batch_size');if(a.dims[2]!==t.numHeads)throw new Error('Input "past" third dimension must be num_heads');if(a.dims[4]!==h/t.numHeads)throw new Error('Input "past" fifth dimension must be k_hidden_size / num_heads');t.pastPresentShareBuffer||(b=a.dims[3])}let x=g+b,_=-1,$=0;if(i)throw new Error("Mask not supported");if(a)throw new Error("past is not supported");return{batchSize:d,sequenceLength:p,pastSequenceLength:b,kvSequenceLength:g,totalSequenceLength:x,maxSequenceLength:_,inputHiddenSize:m,hiddenSize:u,vHiddenSize:w,headSize:Math.floor(u/t.numHeads),vHeadSize:Math.floor(w/t.numHeads),numHeads:t.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:t.maskFilterValue,maskType:$,scale:t.scale,broadcastResPosBias:!1,passPastInKv:!1,qkvFormat:1}},Jp=(e,t,r,n)=>{let o=we(n),i=64,a=n/o;a{let w=M("x",t.dataType,t.dims,o),b=[{name:"d_inv",type:Oe(t.dataType)},{name:"d_comp",type:"u32"},{name:"elements_per_thread",type:"u32"}];return` var thread_max: array; var thread_sum: array; ${h.registerUniforms(b).declareVariables(w)} @@ -201,12 +201,12 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas x[offset + i] = ${w.type.value}(exp(f32input - max_value) / sum); } } - }`};return{name:"AttentionProbsSoftmax",shaderCache:{hint:`${i};${p};${o}`},getShaderSource:u,getRunData:()=>({outputs:[],dispatchGroup:{x:r},programUniforms:d})}},Qp=(e,t,r,n,o,i,a,l)=>{let d=l+i.kvSequenceLength,p=[i.batchSize,i.numHeads,i.sequenceLength,d],m=i.kvNumHeads===void 0&&e.outputCount>1,u=m?[i.batchSize,i.numHeads,d,i.headSize]:void 0,h=a.scale===0?1/Math.sqrt(i.headSize):a.scale,w=we(i.headSize),g=i.headSize/w,b=12,x={x:Math.ceil(d/b),y:Math.ceil(i.sequenceLength/b),z:i.batchSize*i.numHeads},_=[{type:12,data:i.sequenceLength},{type:12,data:g},{type:12,data:d},{type:12,data:i.numHeads},{type:1,data:h},{type:12,data:l},{type:12,data:i.kvSequenceLength}],$=["type","type"];n&&$.push("type"),o&&$.push("type");let S=[{dims:p,dataType:t.dataType,gpuDataType:0}];m&&S.push({dims:u,dataType:t.dataType,gpuDataType:0});let I=T=>{let A=E("q",t.dataType,t.dims,w),z=E("key",r.dataType,r.dims,w),D=[A,z];if(n){let ce=E("past_key",n.dataType,n.dims,w);D.push(ce)}o&&D.push(E("relative_position_bias",o.dataType,o.dims));let H=M("output",t.dataType,p),W=[H];m&&W.push(M("present_key",t.dataType,u,w));let F=Oe(1,w),de=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"alpha",type:"f32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` + }`};return{name:"AttentionProbsSoftmax",shaderCache:{hint:`${i};${p};${o}`},getShaderSource:u,getRunData:()=>({outputs:[],dispatchGroup:{x:r},programUniforms:d})}},em=(e,t,r,n,o,i,a,l)=>{let d=l+i.kvSequenceLength,p=[i.batchSize,i.numHeads,i.sequenceLength,d],m=i.kvNumHeads===void 0&&e.outputCount>1,u=m?[i.batchSize,i.numHeads,d,i.headSize]:void 0,h=a.scale===0?1/Math.sqrt(i.headSize):a.scale,w=we(i.headSize),g=i.headSize/w,b=12,x={x:Math.ceil(d/b),y:Math.ceil(i.sequenceLength/b),z:i.batchSize*i.numHeads},_=[{type:12,data:i.sequenceLength},{type:12,data:g},{type:12,data:d},{type:12,data:i.numHeads},{type:1,data:h},{type:12,data:l},{type:12,data:i.kvSequenceLength}],$=["type","type"];n&&$.push("type"),o&&$.push("type");let S=[{dims:p,dataType:t.dataType,gpuDataType:0}];m&&S.push({dims:u,dataType:t.dataType,gpuDataType:0});let I=T=>{let A=E("q",t.dataType,t.dims,w),D=E("key",r.dataType,r.dims,w),z=[A,D];if(n){let ce=E("past_key",n.dataType,n.dims,w);z.push(ce)}o&&z.push(E("relative_position_bias",o.dataType,o.dims));let H=M("output",t.dataType,p),W=[H];m&&W.push(M("present_key",t.dataType,u,w));let F=Oe(1,w),de=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"alpha",type:"f32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` const TILE_SIZE = ${b}u; var tileQ: array<${A.type.storage}, ${b*b}>; var tileK: array<${A.type.storage}, ${b*b}>; - ${T.registerUniforms(de).declareVariables(...D,...W)} + ${T.registerUniforms(de).declareVariables(...z,...W)} ${T.mainStart([b,b,1])} // x holds the N and y holds the M let headIdx = workgroup_id.z; @@ -249,11 +249,11 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas var sum: f32 = ${(()=>{switch(w){case 1:return"value";case 2:return"value.x + value.y";case 4:return"value.x + value.y + value.z + value.w";default:throw new Error(`Unsupported components: ${w}`)}})()}; output[outputIdx] = ${H.type.value} (sum * uniforms.alpha) + ${o?"relative_position_bias[outputIdx]":"0.0"}; } - }`};return{name:"AttentionProbs",shaderCache:{hint:`${w};${o!==void 0};${n!==void 0};${e.outputCount}`,inputDependencies:$},getRunData:()=>({outputs:S,dispatchGroup:x,programUniforms:_}),getShaderSource:I}},Jp=(e,t,r,n,o,i)=>{let a=i+o.kvSequenceLength,l=o.nReps?o.nReps:1,d=o.vHiddenSize*l,p=o.kvNumHeads==null&&e.outputCount>1,m=p?[o.batchSize,o.numHeads,a,o.headSize]:void 0,u=[o.batchSize,o.sequenceLength,d],h=12,w={x:Math.ceil(o.vHeadSize/h),y:Math.ceil(o.sequenceLength/h),z:o.batchSize*o.numHeads},g=[{type:12,data:o.sequenceLength},{type:12,data:a},{type:12,data:o.vHeadSize},{type:12,data:o.numHeads},{type:12,data:d},{type:12,data:i},{type:12,data:o.kvSequenceLength}],b=n?["type","type","type"]:["type","type"],x=[{dims:u,dataType:t.dataType,gpuDataType:0}];p&&x.push({dims:m,dataType:t.dataType,gpuDataType:0});let _=$=>{let S=E("probs",t.dataType,t.dims),I=E("v",r.dataType,r.dims),T=[S,I];n&&T.push(E("past_value",n.dataType,n.dims));let z=[M("output",t.dataType,u)];p&&z.push(M("present_value",t.dataType,m));let D=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"v_hidden_size",type:"u32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` + }`};return{name:"AttentionProbs",shaderCache:{hint:`${w};${o!==void 0};${n!==void 0};${e.outputCount}`,inputDependencies:$},getRunData:()=>({outputs:S,dispatchGroup:x,programUniforms:_}),getShaderSource:I}},tm=(e,t,r,n,o,i)=>{let a=i+o.kvSequenceLength,l=o.nReps?o.nReps:1,d=o.vHiddenSize*l,p=o.kvNumHeads==null&&e.outputCount>1,m=p?[o.batchSize,o.numHeads,a,o.headSize]:void 0,u=[o.batchSize,o.sequenceLength,d],h=12,w={x:Math.ceil(o.vHeadSize/h),y:Math.ceil(o.sequenceLength/h),z:o.batchSize*o.numHeads},g=[{type:12,data:o.sequenceLength},{type:12,data:a},{type:12,data:o.vHeadSize},{type:12,data:o.numHeads},{type:12,data:d},{type:12,data:i},{type:12,data:o.kvSequenceLength}],b=n?["type","type","type"]:["type","type"],x=[{dims:u,dataType:t.dataType,gpuDataType:0}];p&&x.push({dims:m,dataType:t.dataType,gpuDataType:0});let _=$=>{let S=E("probs",t.dataType,t.dims),I=E("v",r.dataType,r.dims),T=[S,I];n&&T.push(E("past_value",n.dataType,n.dims));let D=[M("output",t.dataType,u)];p&&D.push(M("present_value",t.dataType,m));let z=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"v_hidden_size",type:"u32"},{name:"past_sequence_length",type:"u32"},{name:"kv_sequence_length",type:"u32"}];return` const TILE_SIZE = ${h}u; var tileQ: array<${S.type.value}, ${h*h}>; var tileK: array<${S.type.value}, ${h*h}>; - ${$.registerUniforms(D).declareVariables(...T,...z)} + ${$.registerUniforms(z).declareVariables(...T,...D)} ${$.mainStart([h,h,1])} let headIdx = workgroup_id.z; let m = global_id.y; @@ -300,7 +300,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas + currentBatchHeadNumber * uniforms.N + n; output[outputIdx] = value; } - }`};return{name:"AttentionScore",shaderCache:{hint:`${n!==void 0};${e.outputCount}`,inputDependencies:b},getRunData:()=>({outputs:x,dispatchGroup:w,programUniforms:g}),getShaderSource:_}},Nt=(e,t,r,n,o,i,a,l,d,p,m)=>{let u=e.outputCount,h=p.kvNumHeads!==void 0||u>1?p.pastSequenceLength:0,w=h+p.kvSequenceLength,g=p.kvNumHeads===void 0&&u>1&&a?[t,r,a]:[t,r];d&&g.push(d);let b=e.compute(Qp(e,t,r,u>1?a:void 0,d,p,m,h),{inputs:g,outputs:p.kvNumHeads===void 0&&u>1?[-1,1]:[-1]})[0];e.compute(Zp(e,b,p.batchSize*p.numHeads*p.sequenceLength,w),{inputs:[b],outputs:[]});let x=p.kvNumHeads===void 0&&u>1&&l?[b,n,l]:[b,n];e.compute(Jp(e,b,n,u>1&&l?l:void 0,p,h),{inputs:x,outputs:p.kvNumHeads===void 0&&u>1?[0,2]:[0]})},em=(e,t)=>{let r=[t.batchSize,t.numHeads,t.sequenceLength,t.headSize],n=t.sequenceLength,o=t.inputHiddenSize,i=t.headSize,a=12,l={x:Math.ceil(t.headSize/a),y:Math.ceil(t.sequenceLength/a),z:t.batchSize*t.numHeads},d=[e.inputs[0],e.inputs[1],e.inputs[2]],p=[{type:12,data:n},{type:12,data:o},{type:12,data:i},{type:12,data:t.numHeads},{type:12,data:t.headSize},{type:12,data:t.hiddenSize},{type:12,data:t.hiddenSize+t.hiddenSize+t.vHiddenSize}],m=u=>{let h=M("output_q",d[0].dataType,r),w=M("output_k",d[0].dataType,r),g=M("output_v",d[0].dataType,r),b=E("input",d[0].dataType,d[0].dims),x=E("weight",d[1].dataType,d[1].dims),_=E("bias",d[2].dataType,d[2].dims),$=b.type.storage,S=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"head_size",type:"u32"},{name:"hidden_size",type:"u32"},{name:"ldb",type:"u32"}];return` + }`};return{name:"AttentionScore",shaderCache:{hint:`${n!==void 0};${e.outputCount}`,inputDependencies:b},getRunData:()=>({outputs:x,dispatchGroup:w,programUniforms:g}),getShaderSource:_}},Wt=(e,t,r,n,o,i,a,l,d,p,m)=>{let u=e.outputCount,h=p.kvNumHeads!==void 0||u>1?p.pastSequenceLength:0,w=h+p.kvSequenceLength,g=p.kvNumHeads===void 0&&u>1&&a?[t,r,a]:[t,r];d&&g.push(d);let b=e.compute(em(e,t,r,u>1?a:void 0,d,p,m,h),{inputs:g,outputs:p.kvNumHeads===void 0&&u>1?[-1,1]:[-1]})[0];e.compute(Jp(e,b,p.batchSize*p.numHeads*p.sequenceLength,w),{inputs:[b],outputs:[]});let x=p.kvNumHeads===void 0&&u>1&&l?[b,n,l]:[b,n];e.compute(tm(e,b,n,u>1&&l?l:void 0,p,h),{inputs:x,outputs:p.kvNumHeads===void 0&&u>1?[0,2]:[0]})},rm=(e,t)=>{let r=[t.batchSize,t.numHeads,t.sequenceLength,t.headSize],n=t.sequenceLength,o=t.inputHiddenSize,i=t.headSize,a=12,l={x:Math.ceil(t.headSize/a),y:Math.ceil(t.sequenceLength/a),z:t.batchSize*t.numHeads},d=[e.inputs[0],e.inputs[1],e.inputs[2]],p=[{type:12,data:n},{type:12,data:o},{type:12,data:i},{type:12,data:t.numHeads},{type:12,data:t.headSize},{type:12,data:t.hiddenSize},{type:12,data:t.hiddenSize+t.hiddenSize+t.vHiddenSize}],m=u=>{let h=M("output_q",d[0].dataType,r),w=M("output_k",d[0].dataType,r),g=M("output_v",d[0].dataType,r),b=E("input",d[0].dataType,d[0].dims),x=E("weight",d[1].dataType,d[1].dims),_=E("bias",d[2].dataType,d[2].dims),$=b.type.storage,S=[{name:"M",type:"u32"},{name:"K",type:"u32"},{name:"N",type:"u32"},{name:"num_heads",type:"u32"},{name:"head_size",type:"u32"},{name:"hidden_size",type:"u32"},{name:"ldb",type:"u32"}];return` const TILE_SIZE = ${a}u; var tileInput: array<${$}, ${a*a}>; var tileWeightQ: array<${$}, ${a*a}>; @@ -355,7 +355,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas output_k[outputIdx] = valueK; output_v[outputIdx] = valueV; } - }`};return e.compute({name:"AttentionPrepare",shaderCache:{inputDependencies:["type","type","type"]},getRunData:()=>({outputs:[{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0}],dispatchGroup:l,programUniforms:p}),getShaderSource:m},{inputs:d,outputs:[-1,-1,-1]})},xs=(e,t)=>{let r=Xp(e.inputs,t),[n,o,i]=em(e,r);return Nt(e,n,o,i,e.inputs[4],void 0,void 0,void 0,e.inputs[5],r,t)}});var tm,rm,nm,Ss,Is=U(()=>{"use strict";Le();Q();ae();Ce();oe();tm=(e,t)=>{if(!e||e.length!==5)throw new Error("BatchNormalization requires 5 inputs");let r=(n,o,i)=>{let a=o.length;if(a!==n.length)throw new Error(`${i}: num dimensions != ${a}`);o.forEach((l,d)=>{if(l!==n[d])throw new Error(`${i}: dim[${d}] do not match`)})};if(e[0].dims.length>1){let n=t.format==="NHWC"?t.spatial?e[0].dims.slice(-1):e[0].dims.slice(-1).concat(e[0].dims.slice(1,e[0].dims.length-1)):e[0].dims.slice(1,t.spatial?2:void 0);r(e[1].dims,n,"Invalid input scale"),r(e[2].dims,n,"Invalid input B"),r(e[3].dims,n,"Invalid input mean"),r(e[4].dims,n,"Invalid input var")}else r(e[1].dims,[1],"Invalid input scale"),r(e[2].dims,[1],"Invalid input B"),r(e[3].dims,[1],"Invalid input mean"),r(e[4].dims,[1],"Invalid input var")},rm=(e,t)=>{let{epsilon:r,spatial:n,format:o}=t,i=e[0].dims,a=n?we(i[i.length-1]):1,l=o==="NHWC"&&i.length>1?a:1,d=k.size(i)/a,p=n,m=p?i.length:i,u=E("x",e[0].dataType,e[0].dims,a),h=E("scale",e[1].dataType,e[1].dims,l),w=E("bias",e[2].dataType,e[2].dims,l),g=E("inputMean",e[3].dataType,e[3].dims,l),b=E("inputVar",e[4].dataType,e[4].dims,l),x=M("y",e[0].dataType,m,a),_=()=>{let S="";if(n)S=`let cOffset = ${i.length===1?"0u":o==="NHWC"?`outputIndices[${i.length-1}] / ${a}`:"outputIndices[1]"};`;else if(o==="NCHW")S=` + }`};return e.compute({name:"AttentionPrepare",shaderCache:{inputDependencies:["type","type","type"]},getRunData:()=>({outputs:[{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:r,dataType:e.inputs[0].dataType,gpuDataType:0}],dispatchGroup:l,programUniforms:p}),getShaderSource:m},{inputs:d,outputs:[-1,-1,-1]})},xs=(e,t)=>{let r=Qp(e.inputs,t),[n,o,i]=rm(e,r);return Wt(e,n,o,i,e.inputs[4],void 0,void 0,void 0,e.inputs[5],r,t)}});var nm,om,im,Ss,Is=U(()=>{"use strict";Le();Q();ae();Ce();oe();nm=(e,t)=>{if(!e||e.length!==5)throw new Error("BatchNormalization requires 5 inputs");let r=(n,o,i)=>{let a=o.length;if(a!==n.length)throw new Error(`${i}: num dimensions != ${a}`);o.forEach((l,d)=>{if(l!==n[d])throw new Error(`${i}: dim[${d}] do not match`)})};if(e[0].dims.length>1){let n=t.format==="NHWC"?t.spatial?e[0].dims.slice(-1):e[0].dims.slice(-1).concat(e[0].dims.slice(1,e[0].dims.length-1)):e[0].dims.slice(1,t.spatial?2:void 0);r(e[1].dims,n,"Invalid input scale"),r(e[2].dims,n,"Invalid input B"),r(e[3].dims,n,"Invalid input mean"),r(e[4].dims,n,"Invalid input var")}else r(e[1].dims,[1],"Invalid input scale"),r(e[2].dims,[1],"Invalid input B"),r(e[3].dims,[1],"Invalid input mean"),r(e[4].dims,[1],"Invalid input var")},om=(e,t)=>{let{epsilon:r,spatial:n,format:o}=t,i=e[0].dims,a=n?we(i[i.length-1]):1,l=o==="NHWC"&&i.length>1?a:1,d=k.size(i)/a,p=n,m=p?i.length:i,u=E("x",e[0].dataType,e[0].dims,a),h=E("scale",e[1].dataType,e[1].dims,l),w=E("bias",e[2].dataType,e[2].dims,l),g=E("inputMean",e[3].dataType,e[3].dims,l),b=E("inputVar",e[4].dataType,e[4].dims,l),x=M("y",e[0].dataType,m,a),_=()=>{let S="";if(n)S=`let cOffset = ${i.length===1?"0u":o==="NHWC"?`outputIndices[${i.length-1}] / ${a}`:"outputIndices[1]"};`;else if(o==="NCHW")S=` ${x.indicesSet("outputIndices","0","0")} let cOffset = ${x.indicesToOffset("outputIndices")};`;else{S=`var cIndices = ${h.type.indices}(0); cIndices[0] = outputIndices[${i.length-1}];`;for(let I=1;I` @@ -372,7 +372,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas let x = ${u.getByOffset("global_idx")}; let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias; ${x.setByOffset("global_idx","value")} - }`;return{name:"BatchNormalization",shaderCache:{hint:`${t.epsilon}_${t.format}_${n}_${a}`,inputDependencies:p?["rank","type","type","type","type"]:void 0},getShaderSource:$,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p?[{type:12,data:d},...V(i)]:[{type:12,data:d}]})}},nm=e=>te(e),Ss=(e,t)=>{let{inputs:r,outputCount:n}=e,o=nm({...t,outputCount:n});if(ye.webgpu.validateInputContent&&tm(r,o),t.trainingMode)throw new Error("BatchNormalization trainingMode is not supported yet.");e.compute(rm(r,o))}});var om,im,Cs,Ts=U(()=>{"use strict";ae();oe();om=e=>{if(e[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![320,640,1280].includes(e[0].dims[2]))throw new Error("number of channels should be 320, 640 or 1280");if(e[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(e[0].dims[2]!==e[1].dims[0])throw new Error("last dimension of input and bias are not the same")},im=e=>{let t=e[0].dims,r=e[0].dims[2],n=k.size(t)/4,o=e[0].dataType,i=E("input",o,t,4),a=E("bias",o,[r],4),l=E("residual",o,t,4),d=M("output",o,t,4);return{name:"BiasAdd",getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(n/64)}}),getShaderSource:m=>` + }`;return{name:"BatchNormalization",shaderCache:{hint:`${t.epsilon}_${t.format}_${n}_${a}`,inputDependencies:p?["rank","type","type","type","type"]:void 0},getShaderSource:$,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p?[{type:12,data:d},...V(i)]:[{type:12,data:d}]})}},im=e=>te(e),Ss=(e,t)=>{let{inputs:r,outputCount:n}=e,o=im({...t,outputCount:n});if(ye.webgpu.validateInputContent&&nm(r,o),t.trainingMode)throw new Error("BatchNormalization trainingMode is not supported yet.");e.compute(om(r,o))}});var am,sm,Cs,Ts=U(()=>{"use strict";ae();oe();am=e=>{if(e[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![320,640,1280].includes(e[0].dims[2]))throw new Error("number of channels should be 320, 640 or 1280");if(e[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(e[0].dims[2]!==e[1].dims[0])throw new Error("last dimension of input and bias are not the same")},sm=e=>{let t=e[0].dims,r=e[0].dims[2],n=k.size(t)/4,o=e[0].dataType,i=E("input",o,t,4),a=E("bias",o,[r],4),l=E("residual",o,t,4),d=M("output",o,t,4);return{name:"BiasAdd",getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(n/64)}}),getShaderSource:m=>` const channels = ${r}u / 4; ${m.declareVariables(i,a,l,d)} @@ -381,7 +381,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas let value = ${i.getByOffset("global_idx")} + ${a.getByOffset("global_idx % channels")} + ${l.getByOffset("global_idx")}; ${d.setByOffset("global_idx","value")} - }`}},Cs=e=>{om(e.inputs),e.compute(im(e.inputs))}});var am,me,As,Es,ks,Ps,Os,zs,Ds,Bs,Rs,sm,Ms,Us,Vs,Ns,Zt,Ws,Kr,Hs,Gs,Ls,Fs,qs,Ks,js,Ys,Xs,Zs,Qs,Js,eu,tu,ru,nu,ou,iu,oo,io,au,su,uu,um,dm,du,jr=U(()=>{"use strict";Q();ae();Ce();oe();am=(e,t,r,n,o,i)=>{let a=Math.ceil(t/4),l="";typeof o=="string"?l=`${o}(a)`:l=o("a");let d=E("inputData",r,[a],4),p=M("outputData",n,[a],4);return` + }`}},Cs=e=>{am(e.inputs),e.compute(sm(e.inputs))}});var um,me,As,Es,ks,Ps,Os,Ds,zs,Bs,Rs,dm,Ms,Us,Vs,Ns,Zt,Ws,jr,Hs,Gs,Ls,Fs,qs,js,Ks,Ys,Xs,Zs,Qs,Js,eu,tu,ru,nu,ou,iu,io,ao,au,su,uu,lm,cm,du,Kr=U(()=>{"use strict";Q();ae();Ce();oe();um=(e,t,r,n,o,i)=>{let a=Math.ceil(t/4),l="";typeof o=="string"?l=`${o}(a)`:l=o("a");let d=E("inputData",r,[a],4),p=M("outputData",n,[a],4);return` ${e.registerUniform("vec_size","u32").declareVariables(d,p)} ${i??""} @@ -391,7 +391,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas let a = ${d.getByOffset("global_idx")}; ${p.setByOffset("global_idx",l)} - }`},me=(e,t,r,n,o,i=e.dataType)=>({name:t,shaderCache:{hint:o,inputDependencies:["type"]},getShaderSource:a=>am(a,k.size(e.dims),e.dataType,i,r,n),getRunData:a=>({outputs:[{dims:e.dims,dataType:i}],dispatchGroup:{x:Math.ceil(k.size(a[0].dims)/64/4)},programUniforms:[{type:12,data:Math.ceil(k.size(e.dims)/4)}]})}),As=e=>{e.compute(me(e.inputs[0],"Abs","abs"))},Es=e=>{e.compute(me(e.inputs[0],"Acos","acos"))},ks=e=>{e.compute(me(e.inputs[0],"Acosh","acosh"))},Ps=e=>{e.compute(me(e.inputs[0],"Asin","asin"))},Os=e=>{e.compute(me(e.inputs[0],"Asinh","asinh"))},zs=e=>{e.compute(me(e.inputs[0],"Atan","atan"))},Ds=e=>{e.compute(me(e.inputs[0],"Atanh","atanh"))},Bs=e=>te(e),Rs=(e,t)=>{let r;switch(t.to){case 10:r="vec4";break;case 1:r="vec4";break;case 12:r="vec4";break;case 6:r="vec4";break;case 9:r="vec4";break;default:throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${t.to}`)}e.compute(me(e.inputs[0],"Cast",r,void 0,t.cacheKey,t.to))},sm=e=>{let t=e.length>=2&&e[1].data!==0?e[1].getFloat32Array()[0]:Wr,r=e.length>=3&&e[2].data!==0?e[2].getFloat32Array()[0]:Hr;return te({min:t,max:r})},Ms=(e,t)=>{let r=e.inputs.length===1?t:sm(e.inputs),n=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Clip",o=>`clamp(${o}, clip_min_, clip_max_)`,` + }`},me=(e,t,r,n,o,i=e.dataType)=>({name:t,shaderCache:{hint:o,inputDependencies:["type"]},getShaderSource:a=>um(a,k.size(e.dims),e.dataType,i,r,n),getRunData:a=>({outputs:[{dims:e.dims,dataType:i}],dispatchGroup:{x:Math.ceil(k.size(a[0].dims)/64/4)},programUniforms:[{type:12,data:Math.ceil(k.size(e.dims)/4)}]})}),As=e=>{e.compute(me(e.inputs[0],"Abs","abs"))},Es=e=>{e.compute(me(e.inputs[0],"Acos","acos"))},ks=e=>{e.compute(me(e.inputs[0],"Acosh","acosh"))},Ps=e=>{e.compute(me(e.inputs[0],"Asin","asin"))},Os=e=>{e.compute(me(e.inputs[0],"Asinh","asinh"))},Ds=e=>{e.compute(me(e.inputs[0],"Atan","atan"))},zs=e=>{e.compute(me(e.inputs[0],"Atanh","atanh"))},Bs=e=>te(e),Rs=(e,t)=>{let r;switch(t.to){case 10:r="vec4";break;case 1:r="vec4";break;case 12:r="vec4";break;case 6:r="vec4";break;case 9:r="vec4";break;default:throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${t.to}`)}e.compute(me(e.inputs[0],"Cast",r,void 0,t.cacheKey,t.to))},dm=e=>{let t=e.length>=2&&e[1].data!==0?e[1].getFloat32Array()[0]:Wr,r=e.length>=3&&e[2].data!==0?e[2].getFloat32Array()[0]:Hr;return te({min:t,max:r})},Ms=(e,t)=>{let r=e.inputs.length===1?t:dm(e.inputs),n=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Clip",o=>`clamp(${o}, clip_min_, clip_max_)`,` const clip_min_: vec4<${n}> = vec4(${n}(${r.min})); const clip_max_: vec4<${n}> = vec4(${n}(${r.max})); `,r.cacheKey),{inputs:[0]})},Us=e=>{e.compute(me(e.inputs[0],"Ceil","ceil"))},Vs=e=>{e.compute(me(e.inputs[0],"Cos","cos"))},Ns=e=>{e.compute(me(e.inputs[0],"Cosh","cosh"))},Zt=e=>te(e),Ws=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Elu",n=>`elu_vf32(${n})`,` @@ -403,7 +403,7 @@ var best_index : i32 = 0;`,`if (${n.getByIndices("input_indices")} ${t.selectLas fn elu_vf32(v: vec4<${r}>) -> vec4<${r}> { return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w)); - }`,t.cacheKey))},Kr=(e="f32")=>` + }`,t.cacheKey))},jr=(e="f32")=>` const r0: ${e} = 0.3275911; const r1: ${e} = 0.254829592; const r2: ${e} = -0.284496736; @@ -415,7 +415,7 @@ fn erf_vf32(v: vec4<${e}>) -> vec4<${e}> { let absv = abs(v); let x = 1.0 / (1.0 + r0 * absv); return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv)); -}`,Hs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Erf",r=>`erf_vf32(${r})`,Kr(t)))},Gs=e=>{e.compute(me(e.inputs[0],"Exp","exp"))},Ls=e=>{e.compute(me(e.inputs[0],"Floor","floor"))},Fs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Gelu",r=>`0.5 * ${r} * (1.0 + erf_vf32(${r} * 0.7071067811865475))`,Kr(t)))},qs=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"LeakyRelu",n=>`select(leaky_relu_alpha_ * ${n}, ${n}, ${n} >= vec4<${r}>(0.0))`,`const leaky_relu_alpha_ = ${r}(${t.alpha});`,t.cacheKey))},Ks=e=>{e.compute(me(e.inputs[0],"Not",t=>`!${t}`))},js=e=>{e.compute(me(e.inputs[0],"Neg",t=>`-${t}`))},Ys=e=>{e.compute(me(e.inputs[0],"Reciprocal",t=>`1.0/${t}`))},Xs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Relu",r=>`select(vec4<${t}>(0.0), ${r}, ${r} > vec4<${t}>(0.0))`))},Zs=e=>{e.compute(me(e.inputs[0],"Sigmoid",t=>`(1.0 / (1.0 + exp(-${t})))`))},Qs=e=>te(e),Js=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"HardSigmoid",n=>`max(vec4<${r}>(0.0), min(vec4<${r}>(1.0), ${t.alpha} * ${n} + vec4<${r}>(${t.beta})))`,void 0,t.cacheKey))},eu=e=>{e.compute(me(e.inputs[0],"Sin","sin"))},tu=e=>{e.compute(me(e.inputs[0],"Sinh","sinh"))},ru=e=>{e.compute(me(e.inputs[0],"Sqrt","sqrt"))},nu=e=>{e.compute(me(e.inputs[0],"Tan","tan"))},ou=e=>`sign(${e}) * (1 - exp(-2 * abs(${e}))) / (1 + exp(-2 * abs(${e})))`,iu=e=>{e.compute(me(e.inputs[0],"Tanh",ou))},oo=(e="f32")=>` +}`,Hs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Erf",r=>`erf_vf32(${r})`,jr(t)))},Gs=e=>{e.compute(me(e.inputs[0],"Exp","exp"))},Ls=e=>{e.compute(me(e.inputs[0],"Floor","floor"))},Fs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Gelu",r=>`0.5 * ${r} * (1.0 + erf_vf32(${r} * 0.7071067811865475))`,jr(t)))},qs=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"LeakyRelu",n=>`select(leaky_relu_alpha_ * ${n}, ${n}, ${n} >= vec4<${r}>(0.0))`,`const leaky_relu_alpha_ = ${r}(${t.alpha});`,t.cacheKey))},js=e=>{e.compute(me(e.inputs[0],"Not",t=>`!${t}`))},Ks=e=>{e.compute(me(e.inputs[0],"Neg",t=>`-${t}`))},Ys=e=>{e.compute(me(e.inputs[0],"Reciprocal",t=>`1.0/${t}`))},Xs=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"Relu",r=>`select(vec4<${t}>(0.0), ${r}, ${r} > vec4<${t}>(0.0))`))},Zs=e=>{e.compute(me(e.inputs[0],"Sigmoid",t=>`(1.0 / (1.0 + exp(-${t})))`))},Qs=e=>te(e),Js=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"HardSigmoid",n=>`max(vec4<${r}>(0.0), min(vec4<${r}>(1.0), ${t.alpha} * ${n} + vec4<${r}>(${t.beta})))`,void 0,t.cacheKey))},eu=e=>{e.compute(me(e.inputs[0],"Sin","sin"))},tu=e=>{e.compute(me(e.inputs[0],"Sinh","sinh"))},ru=e=>{e.compute(me(e.inputs[0],"Sqrt","sqrt"))},nu=e=>{e.compute(me(e.inputs[0],"Tan","tan"))},ou=e=>`sign(${e}) * (1 - exp(-2 * abs(${e}))) / (1 + exp(-2 * abs(${e})))`,iu=e=>{e.compute(me(e.inputs[0],"Tanh",ou))},io=(e="f32")=>` const fast_gelu_a: ${e} = 0.5; const fast_gelu_b: ${e} = 0.7978845608028654; const fast_gelu_c: ${e} = 0.035677408136300125; @@ -423,7 +423,7 @@ const fast_gelu_c: ${e} = 0.035677408136300125; fn tanh_v(v: vec4<${e}>) -> vec4<${e}> { return ${ou("v")}; } -`,io=e=>`(fast_gelu_a + fast_gelu_a * tanh_v(${e} * (fast_gelu_c * ${e} * ${e} + fast_gelu_b))) * ${e}`,au=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"FastGelu",io,oo(t),void 0,e.inputs[0].dataType))},su=(e,t)=>{let r=Oe(e.inputs[0].dataType);return e.compute(me(e.inputs[0],"ThresholdedRelu",n=>`select(vec4<${r}>(0.0), ${n}, ${n} > thresholded_relu_alpha_)`,`const thresholded_relu_alpha_ = vec4<${r}>(${t.alpha});`,t.cacheKey)),0},uu=e=>{e.compute(me(e.inputs[0],"Log","log"))},um=(e,t)=>` +`,ao=e=>`(fast_gelu_a + fast_gelu_a * tanh_v(${e} * (fast_gelu_c * ${e} * ${e} + fast_gelu_b))) * ${e}`,au=e=>{let t=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"FastGelu",ao,io(t),void 0,e.inputs[0].dataType))},su=(e,t)=>{let r=Oe(e.inputs[0].dataType);return e.compute(me(e.inputs[0],"ThresholdedRelu",n=>`select(vec4<${r}>(0.0), ${n}, ${n} > thresholded_relu_alpha_)`,`const thresholded_relu_alpha_ = vec4<${r}>(${t.alpha});`,t.cacheKey)),0},uu=e=>{e.compute(me(e.inputs[0],"Log","log"))},lm=(e,t)=>` const alpha = vec4<${e}>(${t}); const one = ${e}(1.0); const zero = ${e}(0.0); @@ -440,13 +440,13 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { } return x * x1; } -`,dm=e=>`quick_gelu_impl(${e})`,du=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"QuickGelu",dm,um(r,t.alpha),t.cacheKey,e.inputs[0].dataType))}});var lm,cm,cu,pu=U(()=>{"use strict";ae();oe();jr();lm=e=>{if(e[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![2560,5120,10240].includes(e[0].dims[2]))throw new Error("hidden state should be 2560, 5120 or 10240");if(e[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(e[0].dims[2]!==e[1].dims[0])throw new Error("last dimension of input and bias are not the same")},cm=e=>{let t=e[0].dims.slice();t[2]=t[2]/2;let r=E("input",e[0].dataType,e[0].dims,4),n=E("bias",e[0].dataType,[e[0].dims[2]],4),o=M("output",e[0].dataType,t,4),i=k.size(t)/4,a=he(e[0].dataType);return{name:"BiasSplitGelu",getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)}}),getShaderSource:d=>` +`,cm=e=>`quick_gelu_impl(${e})`,du=(e,t)=>{let r=Oe(e.inputs[0].dataType);e.compute(me(e.inputs[0],"QuickGelu",cm,lm(r,t.alpha),t.cacheKey,e.inputs[0].dataType))}});var pm,mm,cu,pu=U(()=>{"use strict";ae();oe();Kr();pm=e=>{if(e[0].dims.length!==3)throw new Error("input should have 3 dimensions");if(![2560,5120,10240].includes(e[0].dims[2]))throw new Error("hidden state should be 2560, 5120 or 10240");if(e[1].dims.length!==1)throw new Error("bias is expected to have 1 dimensions");if(e[0].dims[2]!==e[1].dims[0])throw new Error("last dimension of input and bias are not the same")},mm=e=>{let t=e[0].dims.slice();t[2]=t[2]/2;let r=E("input",e[0].dataType,e[0].dims,4),n=E("bias",e[0].dataType,[e[0].dims[2]],4),o=M("output",e[0].dataType,t,4),i=k.size(t)/4,a=he(e[0].dataType);return{name:"BiasSplitGelu",getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)}}),getShaderSource:d=>` const M_SQRT2 = sqrt(2.0); const halfChannels = ${e[0].dims[2]/4/2}u; ${d.declareVariables(r,n,o)} - ${Kr(a)} + ${jr(a)} ${d.mainStart()} ${d.guardAgainstOutOfBoundsWorkgroupSizes(i)} @@ -458,12 +458,12 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1); ${o.setByOffset("global_idx","valueLeft * geluRight")} - }`}},cu=e=>{lm(e.inputs),e.compute(cm(e.inputs))}});var pm,mm,at,mu,fu,hu,gu,yu,bu,wu,vu,$u,_u,xu=U(()=>{"use strict";Q();ae();oe();pm=(e,t,r,n,o,i,a,l,d,p,m,u)=>{let h,w;typeof l=="string"?h=w=($,S)=>`${l}((${$}),(${S}))`:typeof l=="function"?h=w=l:(h=l.scalar,w=l.vector);let g=M("outputData",m,n.length,4),b=E("aData",d,t.length,4),x=E("bData",p,r.length,4),_;if(o)if(i){let $=k.size(t)===1,S=k.size(r)===1,I=t.length>0&&t[t.length-1]%4===0,T=r.length>0&&r[r.length-1]%4===0;$||S?_=g.setByOffset("global_idx",w($?`${b.type.value}(${b.getByOffset("0")}.x)`:b.getByOffset("global_idx"),S?`${x.type.value}(${x.getByOffset("0")}.x)`:x.getByOffset("global_idx"))):_=` + }`}},cu=e=>{pm(e.inputs),e.compute(mm(e.inputs))}});var fm,hm,at,mu,fu,hu,gu,yu,bu,wu,vu,$u,_u,xu=U(()=>{"use strict";Q();ae();oe();fm=(e,t,r,n,o,i,a,l,d,p,m,u)=>{let h,w;typeof l=="string"?h=w=($,S)=>`${l}((${$}),(${S}))`:typeof l=="function"?h=w=l:(h=l.scalar,w=l.vector);let g=M("outputData",m,n.length,4),b=E("aData",d,t.length,4),x=E("bData",p,r.length,4),_;if(o)if(i){let $=k.size(t)===1,S=k.size(r)===1,I=t.length>0&&t[t.length-1]%4===0,T=r.length>0&&r[r.length-1]%4===0;$||S?_=g.setByOffset("global_idx",w($?`${b.type.value}(${b.getByOffset("0")}.x)`:b.getByOffset("global_idx"),S?`${x.type.value}(${x.getByOffset("0")}.x)`:x.getByOffset("global_idx"))):_=` let outputIndices = ${g.offsetToIndices("global_idx * 4u")}; let offsetA = ${b.broadcastedIndicesToOffset("outputIndices",g)}; let offsetB = ${x.broadcastedIndicesToOffset("outputIndices",g)}; ${g.setByOffset("global_idx",w(a||I?b.getByOffset("offsetA / 4u"):`${b.type.value}(${b.getByOffset("offsetA / 4u")}[offsetA % 4u])`,a||T?x.getByOffset("offsetB / 4u"):`${x.type.value}(${x.getByOffset("offsetB / 4u")}[offsetB % 4u])`))} - `}else _=g.setByOffset("global_idx",w(b.getByOffset("global_idx"),x.getByOffset("global_idx")));else{if(!i)throw new Error("no necessary to use scalar implementation for element-wise binary op implementation.");let $=(S,I,T="")=>{let A=`aData[indexA${I}][componentA${I}]`,z=`bData[indexB${I}][componentB${I}]`;return` + `}else _=g.setByOffset("global_idx",w(b.getByOffset("global_idx"),x.getByOffset("global_idx")));else{if(!i)throw new Error("no necessary to use scalar implementation for element-wise binary op implementation.");let $=(S,I,T="")=>{let A=`aData[indexA${I}][componentA${I}]`,D=`bData[indexB${I}][componentB${I}]`;return` let outputIndices${I} = ${g.offsetToIndices(`global_idx * 4u + ${I}u`)}; let offsetA${I} = ${b.broadcastedIndicesToOffset(`outputIndices${I}`,g)}; let offsetB${I} = ${x.broadcastedIndicesToOffset(`outputIndices${I}`,g)}; @@ -471,7 +471,7 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { let indexB${I} = offsetB${I} / 4u; let componentA${I} = offsetA${I} % 4u; let componentB${I} = offsetB${I} % 4u; - ${S}[${I}] = ${T}(${h(A,z)}); + ${S}[${I}] = ${T}(${h(A,D)}); `};m===9?_=` var data = vec4(0); ${$("data",0,"u32")} @@ -491,7 +491,7 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { ${e.mainStart()} ${e.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} ${_} - }`},mm=(e,t,r,n,o,i,a=r.dataType)=>{let l=!k.areEqual(r.dims,n.dims),d=r.dims,p=k.size(r.dims),m=!1,u=!1,h=[l];if(l){let w=tt.calcShape(r.dims,n.dims,!1);if(!w)throw new Error("Can't perform binary op on the given tensors");d=w,p=k.size(d);let g=k.size(r.dims)===1,b=k.size(n.dims)===1,x=r.dims.length>0&&r.dims[r.dims.length-1]%4===0,_=n.dims.length>0&&n.dims[n.dims.length-1]%4===0;h.push(g),h.push(b),h.push(x),h.push(_);let $=1;for(let S=1;Sw.toString()).join("_"),inputDependencies:["rank","rank"]},getShaderSource:w=>pm(w,r.dims,n.dims,d,m,l,u,o,r.dataType,n.dataType,a,i),getRunData:()=>({outputs:[{dims:d,dataType:a}],dispatchGroup:{x:Math.ceil(p/64/4)},programUniforms:[{type:12,data:Math.ceil(k.size(d)/4)},...V(r.dims,n.dims,d)]})}},at=(e,t,r,n,o,i)=>{e.compute(mm(t,o??"",e.inputs[0],e.inputs[1],r,n,i))},mu=e=>{at(e,"Add",(t,r)=>`${t}+${r}`)},fu=e=>{at(e,"Div",(t,r)=>`${t}/${r}`)},hu=e=>{at(e,"Equal",{scalar:(t,r)=>`u32(${t}==${r})`,vector:(t,r)=>`vec4(${t}==${r})`},void 0,void 0,9)},gu=e=>{at(e,"Mul",(t,r)=>`${t}*${r}`)},yu=e=>{let t=E("input",e.inputs[0].dataType,e.inputs[0].dims).type.value;at(e,"Pow",{scalar:(n,o)=>`pow_custom(${n},${o})`,vector:(n,o)=>`pow_vector_custom(${n},${o})`},` + }`},hm=(e,t,r,n,o,i,a=r.dataType)=>{let l=!k.areEqual(r.dims,n.dims),d=r.dims,p=k.size(r.dims),m=!1,u=!1,h=[l];if(l){let w=tt.calcShape(r.dims,n.dims,!1);if(!w)throw new Error("Can't perform binary op on the given tensors");d=w,p=k.size(d);let g=k.size(r.dims)===1,b=k.size(n.dims)===1,x=r.dims.length>0&&r.dims[r.dims.length-1]%4===0,_=n.dims.length>0&&n.dims[n.dims.length-1]%4===0;h.push(g),h.push(b),h.push(x),h.push(_);let $=1;for(let S=1;Sw.toString()).join("_"),inputDependencies:["rank","rank"]},getShaderSource:w=>fm(w,r.dims,n.dims,d,m,l,u,o,r.dataType,n.dataType,a,i),getRunData:()=>({outputs:[{dims:d,dataType:a}],dispatchGroup:{x:Math.ceil(p/64/4)},programUniforms:[{type:12,data:Math.ceil(k.size(d)/4)},...V(r.dims,n.dims,d)]})}},at=(e,t,r,n,o,i)=>{e.compute(hm(t,o??"",e.inputs[0],e.inputs[1],r,n,i))},mu=e=>{at(e,"Add",(t,r)=>`${t}+${r}`)},fu=e=>{at(e,"Div",(t,r)=>`${t}/${r}`)},hu=e=>{at(e,"Equal",{scalar:(t,r)=>`u32(${t}==${r})`,vector:(t,r)=>`vec4(${t}==${r})`},void 0,void 0,9)},gu=e=>{at(e,"Mul",(t,r)=>`${t}*${r}`)},yu=e=>{let t=E("input",e.inputs[0].dataType,e.inputs[0].dims).type.value;at(e,"Pow",{scalar:(n,o)=>`pow_custom(${n},${o})`,vector:(n,o)=>`pow_vector_custom(${n},${o})`},` fn pow_custom(a : ${t}, b : ${t}) -> ${t} { if (b == ${t}(0.0)) { return ${t}(1.0); @@ -504,7 +504,7 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { // TODO: implement vectorized pow return vec4<${t}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w)); } - `)},bu=e=>{at(e,"Sub",(t,r)=>`${t}-${r}`)},wu=e=>{at(e,"Greater",{scalar:(t,r)=>`u32(${t}>${r})`,vector:(t,r)=>`vec4(${t}>${r})`},void 0,void 0,9)},vu=e=>{at(e,"Less",{scalar:(t,r)=>`u32(${t}<${r})`,vector:(t,r)=>`vec4(${t}<${r})`},void 0,void 0,9)},$u=e=>{at(e,"GreaterOrEqual",{scalar:(t,r)=>`u32(${t}>=${r})`,vector:(t,r)=>`vec4(${t}>=${r})`},void 0,void 0,9)},_u=e=>{at(e,"LessOrEqual",{scalar:(t,r)=>`u32(${t}<=${r})`,vector:(t,r)=>`vec4(${t}<=${r})`},void 0,void 0,9)}});var hm,gm,ym,bm,Su,Iu,Cu=U(()=>{"use strict";Q();ae();Ce();oe();hm=(e,t)=>{if(!e||e.length<1)throw new Error("too few inputs");let r=0,n=e[r],o=n.dataType,i=n.dims.length;e.forEach((a,l)=>{if(l!==r){if(a.dataType!==o)throw new Error("input tensors should be one type");if(a.dims.length!==i)throw new Error("input tensors should have the same shape");a.dims.forEach((d,p)=>{if(p!==t&&d!==n.dims[p])throw new Error("non concat dimensions must match")})}})},gm=(e,t)=>` + `)},bu=e=>{at(e,"Sub",(t,r)=>`${t}-${r}`)},wu=e=>{at(e,"Greater",{scalar:(t,r)=>`u32(${t}>${r})`,vector:(t,r)=>`vec4(${t}>${r})`},void 0,void 0,9)},vu=e=>{at(e,"Less",{scalar:(t,r)=>`u32(${t}<${r})`,vector:(t,r)=>`vec4(${t}<${r})`},void 0,void 0,9)},$u=e=>{at(e,"GreaterOrEqual",{scalar:(t,r)=>`u32(${t}>=${r})`,vector:(t,r)=>`vec4(${t}>=${r})`},void 0,void 0,9)},_u=e=>{at(e,"LessOrEqual",{scalar:(t,r)=>`u32(${t}<=${r})`,vector:(t,r)=>`vec4(${t}<=${r})`},void 0,void 0,9)}});var ym,bm,wm,vm,Su,Iu,Cu=U(()=>{"use strict";Q();ae();Ce();oe();ym=(e,t)=>{if(!e||e.length<1)throw new Error("too few inputs");let r=0,n=e[r],o=n.dataType,i=n.dims.length;e.forEach((a,l)=>{if(l!==r){if(a.dataType!==o)throw new Error("input tensors should be one type");if(a.dims.length!==i)throw new Error("input tensors should have the same shape");a.dims.forEach((d,p)=>{if(p!==t&&d!==n.dims[p])throw new Error("non concat dimensions must match")})}})},bm=(e,t)=>` fn calculateInputIndex(index: u32) -> u32 { let sizeInConcatAxis = array(${t}); for (var i: u32 = 0u; i < ${e}; i += 1u ) { @@ -513,12 +513,12 @@ fn quick_gelu_impl(x: vec4<${e}>) -> vec4<${e}> { } } return ${e}u; - }`,ym=(e,t)=>{let r=e.length,n=[];for(let o=0;o{let o=k.size(r),i=new Array(e.length),a=new Array(e.length),l=0,d=[],p=[],m=[{type:12,data:o}];for(let b=0;b`uniforms.sizeInConcatAxis${b}`).join(","),g=b=>` + }`,wm=(e,t)=>{let r=e.length,n=[];for(let o=0;o{let o=k.size(r),i=new Array(e.length),a=new Array(e.length),l=0,d=[],p=[],m=[{type:12,data:o}];for(let b=0;b`uniforms.sizeInConcatAxis${b}`).join(","),g=b=>` ${(()=>{b.registerUniform("outputSize","u32");for(let x=0;x) -> vec4<${e}> { ${h} -= sizeInConcatAxis[inputIndex - 1u]; } - ${ym(a,u)} - }`;return{name:"Concat",shaderCache:{hint:`${t}`,inputDependencies:d},getRunData:()=>({outputs:[{dims:r,dataType:n}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:m}),getShaderSource:g}},Su=(e,t)=>{let r=e.inputs,n=r[0].dims,o=k.normalizeAxis(t.axis,n.length);hm(r,o);let i=n.slice();i[o]=r.reduce((l,d)=>l+(d.dims.length>o?d.dims[o]:0),0);let a=r.filter(l=>k.size(l.dims)>0);e.compute(bm(a,o,i,r[0].dataType),{inputs:a})},Iu=e=>te({axis:e.axis})});var Ke,je,Ye,Yr,yt=U(()=>{"use strict";Q();ae();Ke=(e,t,r="f32")=>{switch(e.activation){case"Relu":return`value = max(value, ${t}(0.0));`;case"Sigmoid":return`value = (${t}(1.0) / (${t}(1.0) + exp(-value)));`;case"Clip":return`value = clamp(value, ${t}(${r}(uniforms.clip_min)), ${t}(${r}(uniforms.clip_max)));`;case"HardSigmoid":return`value = max(${t}(0.0), min(${t}(1.0), ${r}(uniforms.alpha) * value + ${r}(uniforms.beta)));`;case"LeakyRelu":return`value = select(${r}(uniforms.alpha) * value, value, value >= ${t}(0.0));`;case"":return"";default:throw new Error(`Unsupported activation ${e.activation}`)}},je=(e,t)=>{e.activation==="Clip"?t.push({type:1,data:e.clipMax},{type:1,data:e.clipMin}):e.activation==="HardSigmoid"?t.push({type:1,data:e.alpha},{type:1,data:e.beta}):e.activation==="LeakyRelu"&&t.push({type:1,data:e.alpha})},Ye=(e,t)=>{e.activation==="Clip"?t.push({name:"clip_max",type:"f32"},{name:"clip_min",type:"f32"}):e.activation==="HardSigmoid"?t.push({name:"alpha",type:"f32"},{name:"beta",type:"f32"}):e.activation==="LeakyRelu"&&t.push({name:"alpha",type:"f32"})},Yr=e=>{let t=e?.activation||"";if(t==="HardSigmoid"){let[r,n]=e?.activation_params||[.2,.5];return{activation:t,alpha:r,beta:n}}else if(t==="Clip"){let[r,n]=e?.activation_params||[Wr,Hr];return{activation:t,clipMax:n,clipMin:r}}else if(t==="LeakyRelu"){let[r]=e?.activation_params||[.01];return{activation:t,alpha:r}}return{activation:t}}});var De,Xr,Zr=U(()=>{"use strict";De=(e,t)=>{switch(e){case 1:return t;case 2:return`vec2<${t}>`;case 3:return`vec3<${t}>`;case 4:return`vec4<${t}>`;default:throw new Error(`${e}-component is not supported.`)}},Xr=e=>` + ${wm(a,u)} + }`;return{name:"Concat",shaderCache:{hint:`${t}`,inputDependencies:d},getRunData:()=>({outputs:[{dims:r,dataType:n}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:m}),getShaderSource:g}},Su=(e,t)=>{let r=e.inputs,n=r[0].dims,o=k.normalizeAxis(t.axis,n.length);ym(r,o);let i=n.slice();i[o]=r.reduce((l,d)=>l+(d.dims.length>o?d.dims[o]:0),0);let a=r.filter(l=>k.size(l.dims)>0);e.compute(vm(a,o,i,r[0].dataType),{inputs:a})},Iu=e=>te({axis:e.axis})});var je,Ke,Ye,Yr,yt=U(()=>{"use strict";Q();ae();je=(e,t,r="f32")=>{switch(e.activation){case"Relu":return`value = max(value, ${t}(0.0));`;case"Sigmoid":return`value = (${t}(1.0) / (${t}(1.0) + exp(-value)));`;case"Clip":return`value = clamp(value, ${t}(${r}(uniforms.clip_min)), ${t}(${r}(uniforms.clip_max)));`;case"HardSigmoid":return`value = max(${t}(0.0), min(${t}(1.0), ${r}(uniforms.alpha) * value + ${r}(uniforms.beta)));`;case"LeakyRelu":return`value = select(${r}(uniforms.alpha) * value, value, value >= ${t}(0.0));`;case"":return"";default:throw new Error(`Unsupported activation ${e.activation}`)}},Ke=(e,t)=>{e.activation==="Clip"?t.push({type:1,data:e.clipMax},{type:1,data:e.clipMin}):e.activation==="HardSigmoid"?t.push({type:1,data:e.alpha},{type:1,data:e.beta}):e.activation==="LeakyRelu"&&t.push({type:1,data:e.alpha})},Ye=(e,t)=>{e.activation==="Clip"?t.push({name:"clip_max",type:"f32"},{name:"clip_min",type:"f32"}):e.activation==="HardSigmoid"?t.push({name:"alpha",type:"f32"},{name:"beta",type:"f32"}):e.activation==="LeakyRelu"&&t.push({name:"alpha",type:"f32"})},Yr=e=>{let t=e?.activation||"";if(t==="HardSigmoid"){let[r,n]=e?.activation_params||[.2,.5];return{activation:t,alpha:r,beta:n}}else if(t==="Clip"){let[r,n]=e?.activation_params||[Wr,Hr];return{activation:t,clipMax:n,clipMin:r}}else if(t==="LeakyRelu"){let[r]=e?.activation_params||[.01];return{activation:t,alpha:r}}return{activation:t}}});var ze,Xr,Zr=U(()=>{"use strict";ze=(e,t)=>{switch(e){case 1:return t;case 2:return`vec2<${t}>`;case 3:return`vec3<${t}>`;case 4:return`vec4<${t}>`;default:throw new Error(`${e}-component is not supported.`)}},Xr=e=>` ${e?"value = value + getBiasByOutputCoords(coords);":""} - `});var Qr,ao=U(()=>{"use strict";Qr=e=>` + `});var Qr,so=U(()=>{"use strict";Qr=e=>` fn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 { return dot(coords, vec4( shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1)); @@ -543,7 +543,7 @@ fn getOutputIndexFromCoords(coords : vec4) -> i32 { return dot(coords, vec4( i32(${e}.x), i32(${e}.y), i32(${e}.z), 1)); } -`});var wm,vm,Qt,Tu,$m,Jt,_m,Jr,er=U(()=>{"use strict";Q();ae();oe();yt();Zr();wm=(e,t)=>e?` +`});var $m,_m,Qt,Tu,xm,Jt,Sm,Jr,er=U(()=>{"use strict";Q();ae();oe();yt();Zr();$m=(e,t)=>e?` mm_Asub[inputRow][inputCol] = mm_readA(batch, kStart + inputRow, globalRowStart / innerElementSize + inputCol${t?", batchIndices":""}); @@ -551,7 +551,7 @@ fn getOutputIndexFromCoords(coords : vec4) -> i32 { mm_Asub[inputRow][inputCol] = mm_readA(batch, globalRow + innerRow, kStart / innerElementSize + inputCol${t?", batchIndices":""}); - `,vm=(e,t)=>e?` + `,_m=(e,t)=>e?` let ACached0 = mm_Asub[k * innerElementSize][localRow]; let ACached1 = mm_Asub[k * innerElementSize + 1][localRow]; let ACached2 = mm_Asub[k * innerElementSize + 2][localRow]; @@ -605,7 +605,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) { let inputRow = tileRow + innerRow; let inputCol = tileCol; - ${wm(o,n)} + ${$m(o,n)} } // Load one tile of B into local memory. @@ -624,7 +624,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol]; ${h===3?"":"let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];"} - ${vm(o,h)} + ${_m(o,h)} } workgroupBarrier(); @@ -641,7 +641,7 @@ fn main(@builtin(local_invocation_id) localId : vec3, mm_Asub[inputRow][inputCol] = mm_readA(batch, globalRowStart + inputRow, kStart + inputCol${t?", batchIndices":""}); - `,$m=e=>e?"let ACached = mm_Asub[k][tileRow + innerRow];":"let ACached = mm_Asub[tileRow + innerRow][k];",Jt=(e,t,r="f32",n,o=!1,i=32,a=!1,l=32,d=!1)=>{let p=e[1]*t[1],m=e[0]*t[0],u=o?p:i,h=o?i:p;if(!(h%t[1]===0&&u%t[0]===0&&i%t[1]===0))throw new Error(`tileAHight ${h} must be divisible by workgroupSize[1]${t[1]}, tileAWidth ${u} must be divisible by workgroupSize[0]${t[0]}, tileInner ${i} must be divisible by workgroupSize[1]${t[1]}`);let w=h/t[1],g=u/t[0],b=i/t[1],x=d?` + `,xm=e=>e?"let ACached = mm_Asub[k][tileRow + innerRow];":"let ACached = mm_Asub[tileRow + innerRow][k];",Jt=(e,t,r="f32",n,o=!1,i=32,a=!1,l=32,d=!1)=>{let p=e[1]*t[1],m=e[0]*t[0],u=o?p:i,h=o?i:p;if(!(h%t[1]===0&&u%t[0]===0&&i%t[1]===0))throw new Error(`tileAHight ${h} must be divisible by workgroupSize[1]${t[1]}, tileAWidth ${u} must be divisible by workgroupSize[0]${t[0]}, tileInner ${i} must be divisible by workgroupSize[1]${t[1]}`);let w=h/t[1],g=u/t[0],b=i/t[1],x=d?` let localRow = i32(localId.y); let localCol = i32(localId.x); let globalRowStart = i32(workgroupId.y) * ${p}; @@ -732,7 +732,7 @@ for (var t = 0; t < num_tiles; t = t + 1) { } for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) { - ${$m(o)} + ${xm(o)} for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) { acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol]; } @@ -767,17 +767,17 @@ fn main(@builtin(local_invocation_id) localId : vec3, var acc : array, rowPerThread>; ${x} } -`},_m=(e,t,r,n,o,i=!1)=>{let[a,l,d]=o,[p,m,u,h]=n,w=Vt(a,d),g=Vt(l,d),b=he(n[0].type.tensor),x=()=>{let S=m.rank,I=p.rank,T=`var aIndices: ${m.type.indices};`;for(let A=S-2-1,z=I-1;A>=0;A--,z--)T+=` -aIndices[${A}] = ${I>1?`batchIndices[${z}]`:"batchIndices"};`;return w.forEach(A=>{T+=` +`},Sm=(e,t,r,n,o,i=!1)=>{let[a,l,d]=o,[p,m,u,h]=n,w=Nt(a,d),g=Nt(l,d),b=he(n[0].type.tensor),x=()=>{let S=m.rank,I=p.rank,T=`var aIndices: ${m.type.indices};`;for(let A=S-2-1,D=I-1;A>=0;A--,D--)T+=` +aIndices[${A}] = ${I>1?`batchIndices[${D}]`:"batchIndices"};`;return w.forEach(A=>{T+=` aIndices[${A}] = 0;`}),T+=` aIndices[${S-2}] = u32(row); - aIndices[${S-1}] = u32(colIn);`,T},_=()=>{let S=u.rank,I=p.rank,T=`var bIndices: ${u.type.indices};`;for(let A=S-2-1,z=I-1;A>=0;A--,z--)T+=` -bIndices[${A}] = ${I>1?`batchIndices[${z}]`:"batchIndices"};`;return g.forEach(A=>{T+=` + aIndices[${S-1}] = u32(colIn);`,T},_=()=>{let S=u.rank,I=p.rank,T=`var bIndices: ${u.type.indices};`;for(let A=S-2-1,D=I-1;A>=0;A--,D--)T+=` +bIndices[${A}] = ${I>1?`batchIndices[${D}]`:"batchIndices"};`;return g.forEach(A=>{T+=` bIndices[${A}] = 0;`}),T+=` bIndices[${S-2}] = u32(row); bIndices[${S-1}] = u32(colIn);`,T};return` - fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${p.type.indices}) -> ${De(e,b)} { - var value = ${De(e,b)}(0.0); + fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${p.type.indices}) -> ${ze(e,b)} { + var value = ${ze(e,b)}(0.0); let col = colIn * ${e}; if(row < uniforms.dim_a_outer && col < uniforms.dim_inner) { @@ -787,8 +787,8 @@ bIndices[${S-2}] = u32(row); return value; } - fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${p.type.indices}) -> ${De(e,b)} { - var value = ${De(e,b)}(0.0); + fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${p.type.indices}) -> ${ze(e,b)} { + var value = ${ze(e,b)}(0.0); let col = colIn * ${e}; if(row < uniforms.dim_inner && col < uniforms.dim_b_outer) { @@ -798,21 +798,21 @@ bIndices[${S-2}] = u32(row); return value; } - fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${De(e,b)}) { + fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${ze(e,b)}) { let col = colIn * ${e}; if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) { var value = valueIn; let coords = vec3(batch, row, colIn); - ${t?`value = value + ${i?"bias[colIn]":`${De(e,b)}(bias[row])`};`:""} + ${t?`value = value + ${i?"bias[colIn]":`${ze(e,b)}(bias[row])`};`:""} ${r} ${h.setByIndices("vec3(coords)","value")} } } - `},Jr=(e,t,r,n,o=!1)=>{let i=e[0].dims,a=e[1].dims,l=i.slice(0,-2),d=a.slice(0,-2),p=n?n.slice(0,-2):r.slice(0,-2),m=k.size(p),u=i[i.length-2],h=i[i.length-1],w=a[a.length-1],g=h%4===0&&w%4===0,b=u<=8?[4,1,1]:[4,4,1],x=[8,8,1],_=[Math.ceil(w/x[0]/b[0]),Math.ceil(u/x[1]/b[1]),Math.ceil(m/x[2]/b[2])],$=g?4:1,S=[...l,u,h/$],I=S.length,T=[...d,h,w/$],A=T.length,z=[m,u,w/$],D=[{type:6,data:u},{type:6,data:w},{type:6,data:h}];je(t,D),D.push(...V(p,S,T));let H=["rank","rank"],W=e.length>2;W&&(D.push(...V(e[2].dims)),H.push("rank")),D.push(...V(z));let F=de=>{let ce=p.length,X=Gr("batchDims",e[0].dataType,ce,1),xe=he(e[0].dataType),q=E("a",e[0].dataType,I,$),ie=E("b",e[1].dataType,A,$),le=M("result",e[0].dataType,z.length,$),se=[q,ie];if(W){let R=o?$:1;se.push(E("bias",e[2].dataType,e[2].dims.length,R))}let Z=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"}];Ye(t,Z);let re=he(le.type.tensor),J=Ke(t,le.type.value,re),Pe=_m($,W,J,[X,q,ie,le],[l,d,p],o);return` + `},Jr=(e,t,r,n,o=!1)=>{let i=e[0].dims,a=e[1].dims,l=i.slice(0,-2),d=a.slice(0,-2),p=n?n.slice(0,-2):r.slice(0,-2),m=k.size(p),u=i[i.length-2],h=i[i.length-1],w=a[a.length-1],g=h%4===0&&w%4===0,b=u<=8?[4,1,1]:[4,4,1],x=[8,8,1],_=[Math.ceil(w/x[0]/b[0]),Math.ceil(u/x[1]/b[1]),Math.ceil(m/x[2]/b[2])],$=g?4:1,S=[...l,u,h/$],I=S.length,T=[...d,h,w/$],A=T.length,D=[m,u,w/$],z=[{type:6,data:u},{type:6,data:w},{type:6,data:h}];Ke(t,z),z.push(...V(p,S,T));let H=["rank","rank"],W=e.length>2;W&&(z.push(...V(e[2].dims)),H.push("rank")),z.push(...V(D));let F=de=>{let ce=p.length,X=Gr("batchDims",e[0].dataType,ce,1),xe=he(e[0].dataType),q=E("a",e[0].dataType,I,$),ie=E("b",e[1].dataType,A,$),le=M("result",e[0].dataType,D.length,$),se=[q,ie];if(W){let R=o?$:1;se.push(E("bias",e[2].dataType,e[2].dims.length,R))}let Z=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"}];Ye(t,Z);let re=he(le.type.tensor),J=je(t,le.type.value,re),Pe=Sm($,W,J,[X,q,ie,le],[l,d,p],o);return` ${de.registerUniforms(Z).registerInternalVariables(X).declareVariables(...se,le)} ${Pe} ${g?Qt(b,x,xe,X):Jt(b,x,xe,X)} - `};return{name:"MatMul",shaderCache:{hint:`${b};${t.activation};${g};${o}`,inputDependencies:H},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:_[0],y:_[1],z:_[2]},programUniforms:D}),getShaderSource:F}}});var xm,Au,Eu=U(()=>{"use strict";Q();dt();oe();yt();Zr();ao();er();xm=(e,t,r,n,o=!1,i,a=4,l=4,d=4,p="f32")=>{let m=W=>{switch(W){case 1:return"resData = x[xIndex];";case 3:return`resData = vec3<${p}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;case 4:return"resData = x[xIndex / 4];";default:throw new Error(`innerElementSize ${W} is not supported.`)}},u=W=>{switch(W){case 1:return"return w[row * i32(uniforms.w_shape[3]) + colIn];";case 4:return"return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];";default:throw new Error(`innerElementSize ${W} is not supported.`)}},h=e?` + `};return{name:"MatMul",shaderCache:{hint:`${b};${t.activation};${g};${o}`,inputDependencies:H},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:_[0],y:_[1],z:_[2]},programUniforms:z}),getShaderSource:F}}});var Im,Au,Eu=U(()=>{"use strict";Q();dt();oe();yt();Zr();so();er();Im=(e,t,r,n,o=!1,i,a=4,l=4,d=4,p="f32")=>{let m=W=>{switch(W){case 1:return"resData = x[xIndex];";case 3:return`resData = vec3<${p}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;case 4:return"resData = x[xIndex / 4];";default:throw new Error(`innerElementSize ${W} is not supported.`)}},u=W=>{switch(W){case 1:return"return w[row * i32(uniforms.w_shape[3]) + colIn];";case 4:return"return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];";default:throw new Error(`innerElementSize ${W} is not supported.`)}},h=e?` let coord = vec4(batch, xRow, xCol, xCh); `:` let coord = vec4(batch, xCh, xRow, xCol); @@ -839,7 +839,7 @@ bIndices[${S-2}] = u32(row); let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0]; let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1]; let xCh = ${_} % inChannels; - var resData = ${De(a,p)}(0.0); + var resData = ${ze(a,p)}(0.0); // The bounds checking is always needed since we use it to pad zero for // the 'same' padding type. if (xRow >= 0 && xRow < ${g} && xCol >= 0 && xCol < ${b}) { @@ -854,19 +854,19 @@ bIndices[${S-2}] = u32(row); if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) { ${$} } - return ${De(a,p)}(0.0);`:n&&r?` + return ${ze(a,p)}(0.0);`:n&&r?` let col = colIn * ${a}; ${$}`:` let col = colIn * ${a}; if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) { ${$} } - return ${De(a,p)}(0.0);`,I=`${u(l)}`,T=De(d,p),A=e?De(a,p):De(l,p),z=e?De(l,p):De(a,p),D=Ke(i,T,p);return` + return ${ze(a,p)}(0.0);`,I=`${u(l)}`,T=ze(d,p),A=e?ze(a,p):ze(l,p),D=e?ze(l,p):ze(a,p),z=je(i,T,p);return` fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${A} { ${e?S:I} } - fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${z} { + fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${D} { ${e?I:S} } @@ -878,10 +878,10 @@ bIndices[${S-2}] = u32(row); let outWidth = ${e?"i32(uniforms.result_shape[2])":"i32(uniforms.result_shape[3])"}; ${w} ${Xr(o)} - ${D} + ${z} setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value); } - }`},Au=(e,t,r,n,o,i,a,l)=>{let d=t.format==="NHWC",p=d?e[0].dims[3]:e[0].dims[1],m=r[0],u=d?r[2]:r[3],h=d?r[1]:r[2],w=d?r[3]:r[1],g=d&&(p%4===0||p%3===0)&&w%4===0,b=d?w:u*h,x=d?u*h:w,_=[8,8,1],$=n<=8?[4,1,1]:[4,4,1],S=[Math.ceil(b/_[0]/$[0]),Math.ceil(x/_[1]/$[1]),Math.ceil(m/_[2]/$[2])];be("verbose",()=>`[conv2d_mm_webgpu] dispatch = ${S}`);let I=g?d&&p%4!==0?3:4:1,T=_[1]*$[1],A=_[0]*$[0],z=Math.max(_[0]*I,_[1]),D=n%T===0,H=o%A===0,W=i%z===0,F=g?[I,4,4]:[1,1,1],de=[{type:6,data:n},{type:6,data:o},{type:6,data:i},{type:6,data:[t.pads[0],t.pads[1]]},{type:6,data:t.strides},{type:6,data:t.dilations}];je(t,de),de.push(...V(e[0].dims,e[1].dims));let ce=["rank","rank"];a&&(de.push(...V(e[2].dims)),ce.push("rank")),de.push(...V(r));let X=xe=>{let q=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"pad",type:"i32",length:2},{name:"stride",type:"i32",length:2},{name:"dilation",type:"i32",length:2}];Ye(t,q);let ie=g?4:1,le=he(e[0].dataType),se=` + }`},Au=(e,t,r,n,o,i,a,l)=>{let d=t.format==="NHWC",p=d?e[0].dims[3]:e[0].dims[1],m=r[0],u=d?r[2]:r[3],h=d?r[1]:r[2],w=d?r[3]:r[1],g=d&&(p%4===0||p%3===0)&&w%4===0,b=d?w:u*h,x=d?u*h:w,_=[8,8,1],$=n<=8?[4,1,1]:[4,4,1],S=[Math.ceil(b/_[0]/$[0]),Math.ceil(x/_[1]/$[1]),Math.ceil(m/_[2]/$[2])];be("verbose",()=>`[conv2d_mm_webgpu] dispatch = ${S}`);let I=g?d&&p%4!==0?3:4:1,T=_[1]*$[1],A=_[0]*$[0],D=Math.max(_[0]*I,_[1]),z=n%T===0,H=o%A===0,W=i%D===0,F=g?[I,4,4]:[1,1,1],de=[{type:6,data:n},{type:6,data:o},{type:6,data:i},{type:6,data:[t.pads[0],t.pads[1]]},{type:6,data:t.strides},{type:6,data:t.dilations}];Ke(t,de),de.push(...V(e[0].dims,e[1].dims));let ce=["rank","rank"];a&&(de.push(...V(e[2].dims)),ce.push("rank")),de.push(...V(r));let X=xe=>{let q=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"pad",type:"i32",length:2},{name:"stride",type:"i32",length:2},{name:"dilation",type:"i32",length:2}];Ye(t,q);let ie=g?4:1,le=he(e[0].dataType),se=` fn setOutputAtIndex(flatIndex : i32, value : ${g?`vec4<${le}>`:le}) { result[flatIndex] = ${g?`vec4<${le}>`:le}(value); } @@ -898,8 +898,8 @@ bIndices[${S-2}] = u32(row); // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 }; ${xe.registerUniforms(q).declareVariables(...J,Pe)} ${se} - ${xm(d,D,H,W,a,t,F[0],F[1],F[2],le)} - ${g?Qt($,_,le,void 0,!d,z):Jt($,_,le,void 0,!d,z,!1,void 0,l)}`};return{name:"Conv2DMatMul",shaderCache:{hint:`${t.cacheKey};${I};${g};${D};${H};${W};${T};${A};${z}`,inputDependencies:ce},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:de}),getShaderSource:X}}});var Sm,ku,en,Im,Pu,Cm,Ou,zu,Du=U(()=>{"use strict";Q();dt();ae();oe();Sm=e=>{let t=1;for(let r=0;rtypeof e=="number"?[e,e,e]:e,en=(e,t)=>t<=1?e:e+(e-1)*(t-1),Im=(e,t,r,n=1)=>{let o=en(t,n);return Math.floor((e[0]*(r-1)-r+o)/2)},Pu=(e,t,r,n,o)=>{o==null&&(o=Im(e,t[0],n[0]));let i=[0,0,0,r];for(let a=0;a<3;a++)e[a]+2*o>=t[a]&&(i[a]=Math.trunc((e[a]-t[a]+2*o)/n[a]+1));return i},Cm=(e,t,r,n,o,i,a,l,d,p)=>{let m,u,h,w;if(e==="VALID"&&(e=0),typeof e=="number"){m={top:e,bottom:e,left:e,right:e,front:e,back:e};let g=Pu([t,r,n,1],[l,d,p],1,[o,i,a],e);u=g[0],h=g[1],w=g[2]}else if(Array.isArray(e)){if(!e.every((b,x,_)=>b===_[0]))throw Error(`Unsupported padding parameter: ${e}`);m={top:e[0],bottom:e[1],left:e[2],right:e[3],front:e[4],back:e[5]};let g=Pu([t,r,n,1],[l,d,p],1,[o,i,a],e[0]);u=g[0],h=g[1],w=g[2]}else if(e==="SAME_UPPER"){u=Math.ceil(t/o),h=Math.ceil(r/i),w=Math.ceil(n/a);let g=(u-1)*o+l-t,b=(h-1)*i+d-r,x=(w-1)*a+p-n,_=Math.floor(g/2),$=g-_,S=Math.floor(b/2),I=b-S,T=Math.floor(x/2),A=x-T;m={top:S,bottom:I,left:T,right:A,front:_,back:$}}else throw Error(`Unknown padding parameter: ${e}`);return{padInfo:m,outDepth:u,outHeight:h,outWidth:w}},Ou=(e,t,r,n,o,i=!1,a="channelsLast")=>{let l,d,p,m,u;if(a==="channelsLast")[l,d,p,m,u]=e;else if(a==="channelsFirst")[l,u,d,p,m]=e;else throw new Error(`Unknown dataFormat ${a}`);let[h,,w,g,b]=t,[x,_,$]=ku(r),[S,I,T]=ku(n),A=en(w,S),z=en(g,I),D=en(b,T),{padInfo:H,outDepth:W,outHeight:F,outWidth:de}=Cm(o,d,p,m,x,_,$,A,z,D),ce=i?h*u:h,X=[0,0,0,0,0];return a==="channelsFirst"?X=[l,ce,W,F,de]:a==="channelsLast"&&(X=[l,W,F,de,ce]),{batchSize:l,dataFormat:a,inDepth:d,inHeight:p,inWidth:m,inChannels:u,outDepth:W,outHeight:F,outWidth:de,outChannels:ce,padInfo:H,strideDepth:x,strideHeight:_,strideWidth:$,filterDepth:w,filterHeight:g,filterWidth:b,effectiveFilterDepth:A,effectiveFilterHeight:z,effectiveFilterWidth:D,dilationDepth:S,dilationHeight:I,dilationWidth:T,inShape:e,outShape:X,filterShape:t}},zu=(e,t,r,n,o,i)=>{let a=i==="channelsLast",l=a?e[0].dims[3]:e[0].dims[1],d=!1,p=[64,1,1],m={x:r.map(($,S)=>S)},u=[Math.ceil(Sm(m.x.map($=>r[$]))/p[0]),1,1];be("verbose",()=>`[conv3d_naive_webgpu] dispatch = ${u}`);let h=d?a&&l%4!==0?3:4:1,w=k.size(r),g=[{type:12,data:w},{type:12,data:n},{type:12,data:o},{type:12,data:t.strides},{type:12,data:t.dilations}];g.push(...V(e[0].dims,e[1].dims));let b=["rank","rank"],x=e.length===3;x&&(g.push(...V(e[2].dims)),b.push("rank")),g.push(...V(r));let _=$=>{let S=[{name:"output_size",type:"u32"},{name:"filter_dims",type:"u32",length:n.length},{name:"pads",type:"u32",length:o.length},{name:"strides",type:"u32",length:t.strides.length},{name:"dilations",type:"u32",length:t.dilations.length}],I=d?4:1,T=he(e[0].dataType),A=E("x",e[0].dataType,e[0].dims.length,h===3?1:h),z=E("W",e[1].dataType,e[1].dims.length,I),D=[A,z],H=M("result",e[0].dataType,r.length,I),W="";if(x){let F=E("bias",e[2].dataType,e[2].dims.length,I);D.push(F),W+=` + ${Im(d,z,H,W,a,t,F[0],F[1],F[2],le)} + ${g?Qt($,_,le,void 0,!d,D):Jt($,_,le,void 0,!d,D,!1,void 0,l)}`};return{name:"Conv2DMatMul",shaderCache:{hint:`${t.cacheKey};${I};${g};${z};${H};${W};${T};${A};${D}`,inputDependencies:ce},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:de}),getShaderSource:X}}});var Cm,ku,en,Tm,Pu,Am,Ou,Du,zu=U(()=>{"use strict";Q();dt();ae();oe();Cm=e=>{let t=1;for(let r=0;rtypeof e=="number"?[e,e,e]:e,en=(e,t)=>t<=1?e:e+(e-1)*(t-1),Tm=(e,t,r,n=1)=>{let o=en(t,n);return Math.floor((e[0]*(r-1)-r+o)/2)},Pu=(e,t,r,n,o)=>{o==null&&(o=Tm(e,t[0],n[0]));let i=[0,0,0,r];for(let a=0;a<3;a++)e[a]+2*o>=t[a]&&(i[a]=Math.trunc((e[a]-t[a]+2*o)/n[a]+1));return i},Am=(e,t,r,n,o,i,a,l,d,p)=>{let m,u,h,w;if(e==="VALID"&&(e=0),typeof e=="number"){m={top:e,bottom:e,left:e,right:e,front:e,back:e};let g=Pu([t,r,n,1],[l,d,p],1,[o,i,a],e);u=g[0],h=g[1],w=g[2]}else if(Array.isArray(e)){if(!e.every((b,x,_)=>b===_[0]))throw Error(`Unsupported padding parameter: ${e}`);m={top:e[0],bottom:e[1],left:e[2],right:e[3],front:e[4],back:e[5]};let g=Pu([t,r,n,1],[l,d,p],1,[o,i,a],e[0]);u=g[0],h=g[1],w=g[2]}else if(e==="SAME_UPPER"){u=Math.ceil(t/o),h=Math.ceil(r/i),w=Math.ceil(n/a);let g=(u-1)*o+l-t,b=(h-1)*i+d-r,x=(w-1)*a+p-n,_=Math.floor(g/2),$=g-_,S=Math.floor(b/2),I=b-S,T=Math.floor(x/2),A=x-T;m={top:S,bottom:I,left:T,right:A,front:_,back:$}}else throw Error(`Unknown padding parameter: ${e}`);return{padInfo:m,outDepth:u,outHeight:h,outWidth:w}},Ou=(e,t,r,n,o,i=!1,a="channelsLast")=>{let l,d,p,m,u;if(a==="channelsLast")[l,d,p,m,u]=e;else if(a==="channelsFirst")[l,u,d,p,m]=e;else throw new Error(`Unknown dataFormat ${a}`);let[h,,w,g,b]=t,[x,_,$]=ku(r),[S,I,T]=ku(n),A=en(w,S),D=en(g,I),z=en(b,T),{padInfo:H,outDepth:W,outHeight:F,outWidth:de}=Am(o,d,p,m,x,_,$,A,D,z),ce=i?h*u:h,X=[0,0,0,0,0];return a==="channelsFirst"?X=[l,ce,W,F,de]:a==="channelsLast"&&(X=[l,W,F,de,ce]),{batchSize:l,dataFormat:a,inDepth:d,inHeight:p,inWidth:m,inChannels:u,outDepth:W,outHeight:F,outWidth:de,outChannels:ce,padInfo:H,strideDepth:x,strideHeight:_,strideWidth:$,filterDepth:w,filterHeight:g,filterWidth:b,effectiveFilterDepth:A,effectiveFilterHeight:D,effectiveFilterWidth:z,dilationDepth:S,dilationHeight:I,dilationWidth:T,inShape:e,outShape:X,filterShape:t}},Du=(e,t,r,n,o,i)=>{let a=i==="channelsLast",l=a?e[0].dims[3]:e[0].dims[1],d=!1,p=[64,1,1],m={x:r.map(($,S)=>S)},u=[Math.ceil(Cm(m.x.map($=>r[$]))/p[0]),1,1];be("verbose",()=>`[conv3d_naive_webgpu] dispatch = ${u}`);let h=d?a&&l%4!==0?3:4:1,w=k.size(r),g=[{type:12,data:w},{type:12,data:n},{type:12,data:o},{type:12,data:t.strides},{type:12,data:t.dilations}];g.push(...V(e[0].dims,e[1].dims));let b=["rank","rank"],x=e.length===3;x&&(g.push(...V(e[2].dims)),b.push("rank")),g.push(...V(r));let _=$=>{let S=[{name:"output_size",type:"u32"},{name:"filter_dims",type:"u32",length:n.length},{name:"pads",type:"u32",length:o.length},{name:"strides",type:"u32",length:t.strides.length},{name:"dilations",type:"u32",length:t.dilations.length}],I=d?4:1,T=he(e[0].dataType),A=E("x",e[0].dataType,e[0].dims.length,h===3?1:h),D=E("W",e[1].dataType,e[1].dims.length,I),z=[A,D],H=M("result",e[0].dataType,r.length,I),W="";if(x){let F=E("bias",e[2].dataType,e[2].dims.length,I);z.push(F),W+=` fn getBiasByOutputCoords(coords : array) -> ${d?`vec4<${T}>`:T} { return bias[${a?G("coords",4,5):G("coords",1,5)}${d?"/ 4":""}]; }`}return` @@ -910,9 +910,9 @@ bIndices[${S-2}] = u32(row); } fn getW(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 { let aIndices = array(d0, d1, d2, d3, d4); - return ${z.getByIndices("aIndices")}; + return ${D.getByIndices("aIndices")}; } - ${$.registerUniforms(S).declareVariables(...D,H)} + ${$.registerUniforms(S).declareVariables(...z,H)} ${$.mainStart()} ${$.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} let coords = ${H.offsetToIndices("global_idx")}; @@ -1006,7 +1006,7 @@ bIndices[${S-2}] = u32(row); } ${x?"dotProd = dotProd + getBiasByOutputCoords(coords)":""}; result[global_idx] = f32(dotProd); - }`};return{name:"Conv3DNaive",shaderCache:{hint:`${t.cacheKey};${a};${h};${x}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:u[0],y:u[1],z:u[2]},programUniforms:g}),getShaderSource:_}}});var so,Bu,Ru=U(()=>{"use strict";Q();ae();oe();uo();yt();so=(e,t,r)=>{let n=e.length>2,o=n?"value += b[output_channel];":"",i=e[0].dims,a=e[1].dims,l=a[0]/t.group,d=t.format==="NHWC",p=tn(i,a,t.dilations,t.pads,t.strides,d),m=k.size(p),u=[{type:12,data:m},{type:12,data:t.dilations},{type:12,data:[t.strides[0],t.strides[1]]},{type:12,data:[t.pads[0],t.pads[1]]},{type:12,data:l}];je(t,u),u.push(...V(i,a));let h=["rank","rank"];n&&(u.push(...V(e[2].dims)),h.push("rank")),u.push(...V(p));let w=g=>{let b=M("output",e[0].dataType,p.length),x=he(b.type.tensor),_=Ke(t,b.type.value,x),$=E("x",e[0].dataType,i.length),S=E("w",e[1].dataType,a.length),I=[$,S];n&&I.push(E("b",e[2].dataType,e[2].dims.length));let T=[{name:"output_size",type:"u32"},{name:"dilations",type:"u32",length:t.dilations.length},{name:"strides",type:"u32",length:2},{name:"pads",type:"u32",length:2},{name:"output_channels_per_group",type:"u32"}];return Ye(t,T),` + }`};return{name:"Conv3DNaive",shaderCache:{hint:`${t.cacheKey};${a};${h};${x}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:u[0],y:u[1],z:u[2]},programUniforms:g}),getShaderSource:_}}});var uo,Bu,Ru=U(()=>{"use strict";Q();ae();oe();lo();yt();uo=(e,t,r)=>{let n=e.length>2,o=n?"value += b[output_channel];":"",i=e[0].dims,a=e[1].dims,l=a[0]/t.group,d=t.format==="NHWC",p=tn(i,a,t.dilations,t.pads,t.strides,d),m=k.size(p),u=[{type:12,data:m},{type:12,data:t.dilations},{type:12,data:[t.strides[0],t.strides[1]]},{type:12,data:[t.pads[0],t.pads[1]]},{type:12,data:l}];Ke(t,u),u.push(...V(i,a));let h=["rank","rank"];n&&(u.push(...V(e[2].dims)),h.push("rank")),u.push(...V(p));let w=g=>{let b=M("output",e[0].dataType,p.length),x=he(b.type.tensor),_=je(t,b.type.value,x),$=E("x",e[0].dataType,i.length),S=E("w",e[1].dataType,a.length),I=[$,S];n&&I.push(E("b",e[2].dataType,e[2].dims.length));let T=[{name:"output_size",type:"u32"},{name:"dilations",type:"u32",length:t.dilations.length},{name:"strides",type:"u32",length:2},{name:"pads",type:"u32",length:2},{name:"output_channels_per_group",type:"u32"}];return Ye(t,T),` ${g.registerUniforms(T).declareVariables(...I,b)} ${g.mainStart()} @@ -1043,7 +1043,7 @@ bIndices[${S-2}] = u32(row); ${o} ${_} ${b.setByOffset("global_idx","value")} - }`};return{name:"GroupedConv",shaderCache:{hint:t.cacheKey,inputDependencies:h},getRunData:()=>({outputs:[{dims:r?r(p):p,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:u}),getShaderSource:w}},Bu=(e,t,r)=>{let n=e.length>2,o=we(r[3]),i=we(r[2]),a=k.size(r)/o/i,l=[e[0].dims[0],e[0].dims[1],e[0].dims[2],e[0].dims[3]/o],d=[e[1].dims[0],e[1].dims[1],e[1].dims[2],e[1].dims[3]/o],p=[r[0],r[1],r[2],r[3]/o],m=[{type:12,data:a},{type:6,data:[t.strides[0],t.strides[1]]},{type:6,data:[t.pads[0],t.pads[1]]}];je(t,m),m.push(...V(l,d,p));let u=(i-1)*t.strides[1]+d[1],h=w=>{let g=M("output",e[0].dataType,p.length,o),b=he(g.type.tensor),x=Ke(t,g.type.value,b),_=E("x",e[0].dataType,l.length,o),$=E("w",e[1].dataType,d.length,o),S=[_,$];n&&S.push(E("b",e[2].dataType,e[2].dims,o));let I=n?"value += b[output_channel];":"",T=[{name:"output_size",type:"u32"},{name:"strides",type:"i32",length:2},{name:"pads",type:"i32",length:2}];return Ye(t,T),` + }`};return{name:"GroupedConv",shaderCache:{hint:t.cacheKey,inputDependencies:h},getRunData:()=>({outputs:[{dims:r?r(p):p,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:u}),getShaderSource:w}},Bu=(e,t,r)=>{let n=e.length>2,o=we(r[3]),i=we(r[2]),a=k.size(r)/o/i,l=[e[0].dims[0],e[0].dims[1],e[0].dims[2],e[0].dims[3]/o],d=[e[1].dims[0],e[1].dims[1],e[1].dims[2],e[1].dims[3]/o],p=[r[0],r[1],r[2],r[3]/o],m=[{type:12,data:a},{type:6,data:[t.strides[0],t.strides[1]]},{type:6,data:[t.pads[0],t.pads[1]]}];Ke(t,m),m.push(...V(l,d,p));let u=(i-1)*t.strides[1]+d[1],h=w=>{let g=M("output",e[0].dataType,p.length,o),b=he(g.type.tensor),x=je(t,g.type.value,b),_=E("x",e[0].dataType,l.length,o),$=E("w",e[1].dataType,d.length,o),S=[_,$];n&&S.push(E("b",e[2].dataType,e[2].dims,o));let I=n?"value += b[output_channel];":"",T=[{name:"output_size",type:"u32"},{name:"strides",type:"i32",length:2},{name:"pads",type:"i32",length:2}];return Ye(t,T),` ${w.registerUniforms(T).declareVariables(...S,g)} ${w.mainStart()} ${w.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -1088,14 +1088,14 @@ bIndices[${S-2}] = u32(row); ${x} ${g.set("batch","row","col + i","output_channel","value")}; } - }`};return{name:"GroupedConv-Vectorize",shaderCache:{hint:`${t.cacheKey};${o};${i};${u};${d[0]};${d[1]}`,inputDependencies:n?["rank","rank","type"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:m}),getShaderSource:h}}});var lo,Tm,Mu,co=U(()=>{"use strict";Q();ae();er();oe();yt();lo=(e,t,r,n,o=!1)=>{let i=e[0].dims,a=e[1].dims,l=i[i.length-2],d=a[a.length-1],p=i[i.length-1],m=we(d),u=we(p),h=we(l),w=k.size(r)/m/h,g=e.length>2,b=n?n.slice(0,-2):r.slice(0,-2),_=[k.size(b),l,d],$=[{type:12,data:w},{type:12,data:l},{type:12,data:d},{type:12,data:p}];je(t,$),$.push(...V(b,i,a)),g&&$.push(...V(e[2].dims)),$.push(...V(_));let S=I=>{let T=Gr("batch_dims",e[0].dataType,b.length),A=E("a",e[0].dataType,i.length,u),z=E("b",e[1].dataType,a.length,m),D=M("output",e[0].dataType,_.length,m),H=he(D.type.tensor),W=Ke(t,D.type.value,H),F=[A,z],de="";if(g){let Z=o?m:1;F.push(E("bias",e[2].dataType,e[2].dims.length,Z)),de=`${o?`value += bias[col / ${Z}];`:`value += ${D.type.value}(bias[row + i]);`}`}let ce=i.slice(0,-2),X=a.slice(0,-2),xe=Vt(ce,b),q=Vt(X,b),ie=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"}];Ye(t,ie);let le=(Z,re)=>{let J=Z.rank,Pe=Z.name;if(J===2)return`var ${Pe}_indices = ${Z.type.indices}(0u, 0u);`;let R=T.rank,Y=`var ${Pe}_indices: ${Z.type.indices};`;for(let ue=J-2-1,Te=R-1;ue>=0;ue--,Te--)Y+=` + }`};return{name:"GroupedConv-Vectorize",shaderCache:{hint:`${t.cacheKey};${o};${i};${u};${d[0]};${d[1]}`,inputDependencies:n?["rank","rank","type"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:m}),getShaderSource:h}}});var co,Em,Mu,po=U(()=>{"use strict";Q();ae();er();oe();yt();co=(e,t,r,n,o=!1)=>{let i=e[0].dims,a=e[1].dims,l=i[i.length-2],d=a[a.length-1],p=i[i.length-1],m=we(d),u=we(p),h=we(l),w=k.size(r)/m/h,g=e.length>2,b=n?n.slice(0,-2):r.slice(0,-2),_=[k.size(b),l,d],$=[{type:12,data:w},{type:12,data:l},{type:12,data:d},{type:12,data:p}];Ke(t,$),$.push(...V(b,i,a)),g&&$.push(...V(e[2].dims)),$.push(...V(_));let S=I=>{let T=Gr("batch_dims",e[0].dataType,b.length),A=E("a",e[0].dataType,i.length,u),D=E("b",e[1].dataType,a.length,m),z=M("output",e[0].dataType,_.length,m),H=he(z.type.tensor),W=je(t,z.type.value,H),F=[A,D],de="";if(g){let Z=o?m:1;F.push(E("bias",e[2].dataType,e[2].dims.length,Z)),de=`${o?`value += bias[col / ${Z}];`:`value += ${z.type.value}(bias[row + i]);`}`}let ce=i.slice(0,-2),X=a.slice(0,-2),xe=Nt(ce,b),q=Nt(X,b),ie=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"}];Ye(t,ie);let le=(Z,re)=>{let J=Z.rank,Pe=Z.name;if(J===2)return`var ${Pe}_indices = ${Z.type.indices}(0u, 0u);`;let R=T.rank,Y=`var ${Pe}_indices: ${Z.type.indices};`;for(let ue=J-2-1,Te=R-1;ue>=0;ue--,Te--)Y+=` ${Pe}_indices[${ue}] = ${R>1?`batch_indices[${Te}]`:"batch_indices"};`;return re.forEach(ue=>{Y+=` ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${Pe}_indices[${J-1}] = 0u;`,Y},se=()=>{let Z=`var a_data: ${A.type.value};`;for(let re=0;re; + ${le(D,q)} + let b_offset = ${D.indicesToOffset("b_indices")}; + var values: array<${z.type.value}, ${h}>; for (var k: u32 = 0u; k < uniforms.K; k = k + ${u}) { ${se()} } @@ -1117,12 +1117,12 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var value = values[i]; ${de} ${W} - let cur_indices = ${D.type.indices}(batch, row + i, col); - let offset = ${D.indicesToOffset("cur_indices")}; - ${D.setByOffset(`offset / ${m}`,"value")}; + let cur_indices = ${z.type.indices}(batch, row + i, col); + let offset = ${z.indicesToOffset("cur_indices")}; + ${z.setByOffset(`offset / ${m}`,"value")}; } } - `};return{name:"MatMulNaive",shaderCache:{hint:`${t.activation};${m};${u};${h};${o}`,inputDependencies:g?["rank","rank","rank"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:$}),getShaderSource:S}},Tm=e=>{if(!e||e.length!==2)throw new Error("MatMul requires 2 inputs.");if(e[0].dims[e[0].dims.length-1]!==e[1].dims[e[1].dims.length-2])throw new Error("shared dimension does not match.")},Mu=e=>{Tm(e.inputs);let t=tt.calcShape(e.inputs[0].dims,e.inputs[1].dims,!0);if(!t)throw new Error("Can't use matmul on the given tensors");let r=t[t.length-1],n=e.inputs[0].dims[e.inputs[0].dims.length-1];r<8&&n<8?e.compute(lo(e.inputs,{activation:""},t)):e.compute(Jr(e.inputs,{activation:""},t))}});var tn,po,Am,mo,fo,Em,km,Pm,ho,uo=U(()=>{"use strict";ae();Eu();Du();er();Ru();yt();co();Et();tn=(e,t,r,n,o,i)=>{let a=e[0],l=e.slice(i?1:2,i?3:4),d=l.length,p=t[0],u=t.slice(2).map((g,b)=>g+(g-1)*(r[b]-1)),w=l.map((g,b)=>g+n[b]+n[b+d]).map((g,b)=>Math.floor((g-u[b]+o[b])/o[b]));return w.splice(0,0,a),w.splice(i?3:1,0,p),w},po=[2,3,1,0],Am=(e,t)=>{if(!e||e.length!==2&&e.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(e[0].dims.length>5)throw new Error("greater than 5D is not supported");if(e[0].dims.length!==e[1].dims.length)throw new Error("filter does not have same dimension as input");let r=e[0].dims[t.format==="NHWC"?e[0].dims.length-1:1],n=e[1].dims[1]*t.group;if(r!==n)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(e.length===3&&(e[2].dims.length!==1||e[1].dims[0]!==e[2].dims[0]))throw new Error("invalid bias");let o=e[0].dims.length-2;if(t.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(t.strides.length!==o)throw new Error(`strides should be ${o}D`);if(t.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(t.kernelShape.length!==0&&t.kernelShape.length!==e[1].dims.length-2)throw new Error("invalid kernel shape")},mo=(e,t)=>{let r=e.kernelShape.slice();for(let i=2;i{let t=Yr(e),r=e.format,n=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][e.auto_pad],o=e.dilations,i=e.group,a=e.kernel_shape,l=e.pads,d=e.strides,p=e.w_is_const();return{autoPad:n,format:r,dilations:o,group:i,kernelShape:a,pads:l,strides:d,wIsConst:p,...t,cacheKey:`${e.format};${t.activation};`}},Em=(e,t,r)=>{let n=mo(r,t),o=r.format==="NHWC";if(r.group!==1){if(!e.adapterInfo.isArchitecture("ampere")&&o&&t[1].dims[0]===r.group&&t[1].dims[1]===1&&r.dilations[0]===1&&r.dilations[1]===1){let z=tn(t[0].dims,t[1].dims,r.dilations,n.pads,r.strides,o),D=e.kernelCustomData.wT??e.compute(Ue(t[1],po),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=D);let H=[t[0],D];t.length===3&&H.push(t[2]),e.compute(Bu(H,n,z),{inputs:H})}else e.compute(so(t,n));return}let i=t.length===3,a=t[0].dims[o?1:2],l=t[0].dims[o?2:3],d=t[0].dims[o?3:1],p=t[1].dims[2],m=t[1].dims[3],u=tn(t[0].dims,t[1].dims,r.dilations,n.pads,r.strides,o),h=u[o?1:2],w=u[o?2:3],g=u[o?3:1],b=o&&p===a&&m===l&&r.pads[0]===0&&r.pads[1]===0;if(b||p===1&&m===1&&r.dilations[0]===1&&r.dilations[1]===1&&r.strides[0]===1&&r.strides[1]===1&&r.pads[0]===0&&r.pads[1]===0){let A=u[0],z,D,H,W=[];if(o){let ce=e.kernelCustomData.wT??e.compute(Ue(t[1],po),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];if(r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=ce),b){let X=a*l*d;z=t[0].reshape([1,A,X]),D=ce.reshape([1,X,g]),H=[1,A,g]}else z=t[0].reshape([A,a*l,d]),D=ce.reshape([1,d,g]),H=[A,h*w,g];W.push(z),W.push(D)}else z=t[0].reshape([A,d,a*l]),D=t[1].reshape([1,g,d]),H=[A,g,h*w],W.push(D),W.push(z);i&&W.push(t[2]);let F=H[2],de=W[0].dims[W[0].dims.length-1];F<8&&de<8?e.compute(lo(W,n,u,H,o),{inputs:W}):e.compute(Jr(W,n,u,H,o),{inputs:W});return}let x=!0,_=e.kernelCustomData.wT??e.compute(Ue(t[1],po),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=_);let $=[t[0],_];i&&$.push(t[2]);let S=o?h*w:g,I=o?g:h*w,T=p*m*d;e.compute(Au($,n,u,S,I,T,i,x),{inputs:$})},km=(e,t)=>{let r=t.format==="NHWC",n=[e.inputs[0].reshape(r?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&n.push(e.inputs[2]);let o=[0,t.pads[0],0,t.pads[1]],i=[1].concat(t.strides),a=[1].concat(t.dilations),l=[1].concat(t.kernelShape),d=mo({...t,pads:o,strides:i,dilations:a,kernelShape:l},n);e.compute(so(n,d,p=>r?[p[0],p[2],p[3]]:[]))},Pm=(e,t,r)=>{let n=r.format==="NHWC"?"channelsLast":"channelsFirst",o=mo(r,t),i=r.autoPad==="NOTSET"?r.pads:r.autoPad,a=Ou(t[0].dims,t[1].dims,r.strides,r.dilations,i,!1,n);e.compute(zu(t,o,a.outShape,[a.filterDepth,a.filterHeight,a.filterWidth],[a.padInfo.front,a.padInfo.top,a.padInfo.left],n))},ho=(e,t)=>{Am(e.inputs,t),e.inputs[0].dims.length===3?km(e,t):e.inputs[0].dims.length===5?Pm(e,e.inputs,t):Em(e,e.inputs,t)}});var Om,Uu,Vu=U(()=>{"use strict";Q();dt();oe();yt();Zr();ao();er();Om=(e,t=!1,r,n,o=4)=>{let i=_=>{switch(_){case 1:return"return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];";case 4:return` + `};return{name:"MatMulNaive",shaderCache:{hint:`${t.activation};${m};${u};${h};${o}`,inputDependencies:g?["rank","rank","rank"]:["rank","rank"]},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:$}),getShaderSource:S}},Em=e=>{if(!e||e.length!==2)throw new Error("MatMul requires 2 inputs.");if(e[0].dims[e[0].dims.length-1]!==e[1].dims[e[1].dims.length-2])throw new Error("shared dimension does not match.")},Mu=e=>{Em(e.inputs);let t=tt.calcShape(e.inputs[0].dims,e.inputs[1].dims,!0);if(!t)throw new Error("Can't use matmul on the given tensors");let r=t[t.length-1],n=e.inputs[0].dims[e.inputs[0].dims.length-1];r<8&&n<8?e.compute(co(e.inputs,{activation:""},t)):e.compute(Jr(e.inputs,{activation:""},t))}});var tn,mo,km,fo,ho,Pm,Om,Dm,go,lo=U(()=>{"use strict";ae();Eu();zu();er();Ru();yt();po();Et();tn=(e,t,r,n,o,i)=>{let a=e[0],l=e.slice(i?1:2,i?3:4),d=l.length,p=t[0],u=t.slice(2).map((g,b)=>g+(g-1)*(r[b]-1)),w=l.map((g,b)=>g+n[b]+n[b+d]).map((g,b)=>Math.floor((g-u[b]+o[b])/o[b]));return w.splice(0,0,a),w.splice(i?3:1,0,p),w},mo=[2,3,1,0],km=(e,t)=>{if(!e||e.length!==2&&e.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(e[0].dims.length>5)throw new Error("greater than 5D is not supported");if(e[0].dims.length!==e[1].dims.length)throw new Error("filter does not have same dimension as input");let r=e[0].dims[t.format==="NHWC"?e[0].dims.length-1:1],n=e[1].dims[1]*t.group;if(r!==n)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");if(e.length===3&&(e[2].dims.length!==1||e[1].dims[0]!==e[2].dims[0]))throw new Error("invalid bias");let o=e[0].dims.length-2;if(t.dilations.length!==o)throw new Error(`dilations should be ${o}D`);if(t.strides.length!==o)throw new Error(`strides should be ${o}D`);if(t.pads.length!==o*2)throw new Error(`pads should be ${o*2}D`);if(t.kernelShape.length!==0&&t.kernelShape.length!==e[1].dims.length-2)throw new Error("invalid kernel shape")},fo=(e,t)=>{let r=e.kernelShape.slice();for(let i=2;i{let t=Yr(e),r=e.format,n=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][e.auto_pad],o=e.dilations,i=e.group,a=e.kernel_shape,l=e.pads,d=e.strides,p=e.w_is_const();return{autoPad:n,format:r,dilations:o,group:i,kernelShape:a,pads:l,strides:d,wIsConst:p,...t,cacheKey:`${e.format};${t.activation};`}},Pm=(e,t,r)=>{let n=fo(r,t),o=r.format==="NHWC";if(r.group!==1){if(!e.adapterInfo.isArchitecture("ampere")&&o&&t[1].dims[0]===r.group&&t[1].dims[1]===1&&r.dilations[0]===1&&r.dilations[1]===1){let D=tn(t[0].dims,t[1].dims,r.dilations,n.pads,r.strides,o),z=e.kernelCustomData.wT??e.compute(Ue(t[1],mo),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=z);let H=[t[0],z];t.length===3&&H.push(t[2]),e.compute(Bu(H,n,D),{inputs:H})}else e.compute(uo(t,n));return}let i=t.length===3,a=t[0].dims[o?1:2],l=t[0].dims[o?2:3],d=t[0].dims[o?3:1],p=t[1].dims[2],m=t[1].dims[3],u=tn(t[0].dims,t[1].dims,r.dilations,n.pads,r.strides,o),h=u[o?1:2],w=u[o?2:3],g=u[o?3:1],b=o&&p===a&&m===l&&r.pads[0]===0&&r.pads[1]===0;if(b||p===1&&m===1&&r.dilations[0]===1&&r.dilations[1]===1&&r.strides[0]===1&&r.strides[1]===1&&r.pads[0]===0&&r.pads[1]===0){let A=u[0],D,z,H,W=[];if(o){let ce=e.kernelCustomData.wT??e.compute(Ue(t[1],mo),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];if(r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=ce),b){let X=a*l*d;D=t[0].reshape([1,A,X]),z=ce.reshape([1,X,g]),H=[1,A,g]}else D=t[0].reshape([A,a*l,d]),z=ce.reshape([1,d,g]),H=[A,h*w,g];W.push(D),W.push(z)}else D=t[0].reshape([A,d,a*l]),z=t[1].reshape([1,g,d]),H=[A,g,h*w],W.push(z),W.push(D);i&&W.push(t[2]);let F=H[2],de=W[0].dims[W[0].dims.length-1];F<8&&de<8?e.compute(co(W,n,u,H,o),{inputs:W}):e.compute(Jr(W,n,u,H,o),{inputs:W});return}let x=!0,_=e.kernelCustomData.wT??e.compute(Ue(t[1],mo),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=_);let $=[t[0],_];i&&$.push(t[2]);let S=o?h*w:g,I=o?g:h*w,T=p*m*d;e.compute(Au($,n,u,S,I,T,i,x),{inputs:$})},Om=(e,t)=>{let r=t.format==="NHWC",n=[e.inputs[0].reshape(r?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&n.push(e.inputs[2]);let o=[0,t.pads[0],0,t.pads[1]],i=[1].concat(t.strides),a=[1].concat(t.dilations),l=[1].concat(t.kernelShape),d=fo({...t,pads:o,strides:i,dilations:a,kernelShape:l},n);e.compute(uo(n,d,p=>r?[p[0],p[2],p[3]]:[]))},Dm=(e,t,r)=>{let n=r.format==="NHWC"?"channelsLast":"channelsFirst",o=fo(r,t),i=r.autoPad==="NOTSET"?r.pads:r.autoPad,a=Ou(t[0].dims,t[1].dims,r.strides,r.dilations,i,!1,n);e.compute(Du(t,o,a.outShape,[a.filterDepth,a.filterHeight,a.filterWidth],[a.padInfo.front,a.padInfo.top,a.padInfo.left],n))},go=(e,t)=>{km(e.inputs,t),e.inputs[0].dims.length===3?Om(e,t):e.inputs[0].dims.length===5?Dm(e,e.inputs,t):Pm(e,e.inputs,t)}});var zm,Uu,Vu=U(()=>{"use strict";Q();dt();oe();yt();Zr();so();er();zm=(e,t=!1,r,n,o=4)=>{let i=_=>{switch(_){case 1:return"return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];";case 4:return` let coord1 = vec4(coordX, coordY, col + 1, rowInner); let coord2 = vec4(coordX, coordY, col + 2, rowInner); let coord3 = vec4(coordX, coordY, col + 3, rowInner); @@ -1188,7 +1188,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${i(o)} } return ${n}(0.0); - `,b=Ke(r,n);return` + `,b=je(r,n);return` fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${n} { ${e?w:g} } @@ -1207,15 +1207,15 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${b} result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${o}] = value; } - }`},Uu=(e,t,r,n,o,i,a,l)=>{let d=t.format==="NHWC",p=d?e[0].dims[3]:e[0].dims[1],m=r[0],u=d?r[2]:r[3],h=d?r[1]:r[2],w=d?r[3]:r[1],g=d&&p%4===0&&p%3&&w%4===0,b=d?w:u*h,x=d?u*h:w,_=[8,8,1],$=n<=8?[4,1,1]:[4,4,1],S=[Math.ceil(b/_[0]/$[0]),Math.ceil(x/_[1]/$[1]),Math.ceil(m/_[2]/$[2])];be("verbose",()=>`[conv_backprop_mm_webgpu] dispatch = ${S}`);let I=g?4:1,T=Math.max(_[0]*I,_[1]),A=g?4:1,z=[t.kernelShape[d?1:2],t.kernelShape[d?2:3]],D=[z[0]+(t.dilations[0]<=1?0:(z[0]-1)*(t.dilations[0]-1)),z[1]+(t.dilations[1]<=1?0:(z[1]-1)*(t.dilations[1]-1))],H=[D[0]-1-Math.floor((t.pads[0]+t.pads[2])/2),D[1]-1-Math.floor((t.pads[1]+t.pads[3])/2)],W=[{type:6,data:n},{type:6,data:o},{type:6,data:i},{type:6,data:t.strides},{type:6,data:t.dilations},{type:6,data:z},{type:6,data:H}];je(t,W),W.push(...V(e[0].dims,e[1].dims));let F=["rank","rank"];a&&(W.push(...V(e[2].dims)),F.push("rank")),W.push(...V(r));let de=ce=>{let X=E("x",e[0].dataType,e[0].dims.length,A),xe=E("w",e[1].dataType,e[1].dims.length,1),q=M("result",e[0].dataType,r.length,A),ie=[X,xe],le="";if(a){let re=E("bias",e[2].dataType,e[2].dims.length,A);ie.push(re),le+=` + }`},Uu=(e,t,r,n,o,i,a,l)=>{let d=t.format==="NHWC",p=d?e[0].dims[3]:e[0].dims[1],m=r[0],u=d?r[2]:r[3],h=d?r[1]:r[2],w=d?r[3]:r[1],g=d&&p%4===0&&p%3&&w%4===0,b=d?w:u*h,x=d?u*h:w,_=[8,8,1],$=n<=8?[4,1,1]:[4,4,1],S=[Math.ceil(b/_[0]/$[0]),Math.ceil(x/_[1]/$[1]),Math.ceil(m/_[2]/$[2])];be("verbose",()=>`[conv_backprop_mm_webgpu] dispatch = ${S}`);let I=g?4:1,T=Math.max(_[0]*I,_[1]),A=g?4:1,D=[t.kernelShape[d?1:2],t.kernelShape[d?2:3]],z=[D[0]+(t.dilations[0]<=1?0:(D[0]-1)*(t.dilations[0]-1)),D[1]+(t.dilations[1]<=1?0:(D[1]-1)*(t.dilations[1]-1))],H=[z[0]-1-Math.floor((t.pads[0]+t.pads[2])/2),z[1]-1-Math.floor((t.pads[1]+t.pads[3])/2)],W=[{type:6,data:n},{type:6,data:o},{type:6,data:i},{type:6,data:t.strides},{type:6,data:t.dilations},{type:6,data:D},{type:6,data:H}];Ke(t,W),W.push(...V(e[0].dims,e[1].dims));let F=["rank","rank"];a&&(W.push(...V(e[2].dims)),F.push("rank")),W.push(...V(r));let de=ce=>{let X=E("x",e[0].dataType,e[0].dims.length,A),xe=E("w",e[1].dataType,e[1].dims.length,1),q=M("result",e[0].dataType,r.length,A),ie=[X,xe],le="";if(a){let re=E("bias",e[2].dataType,e[2].dims.length,A);ie.push(re),le+=` fn getBiasByOutputCoords(coords : vec4) -> ${re.type.value} { return bias[coords.${d?"w":"y"}${g?"/ 4":""}]; - }`}let se=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"strides",type:"i32",length:2},{name:"dilations",type:"i32",length:2},{name:"filter_dims",type:"i32",length:z.length},{name:"pads",type:"i32",length:H.length}];Ye(t,se);let Z=he(e[0].dataType,1);if(Z!=="f16"&&Z!=="f32")throw new Error(`elemType ${Z} is not supported.`);return` + }`}let se=[{name:"dim_a_outer",type:"i32"},{name:"dim_b_outer",type:"i32"},{name:"dim_inner",type:"i32"},{name:"strides",type:"i32",length:2},{name:"dilations",type:"i32",length:2},{name:"filter_dims",type:"i32",length:D.length},{name:"pads",type:"i32",length:H.length}];Ye(t,se);let Z=he(e[0].dataType,1);if(Z!=="f16"&&Z!=="f32")throw new Error(`elemType ${Z} is not supported.`);return` ${Qr("uniforms.result_strides")} ${ce.registerUniforms(se).declareVariables(...ie,q)}; ${le} - ${Om(d,a,t,X.type.value,I)} - ${g?Qt($,_,Z,void 0,!d,T):Jt($,_,Z,void 0,!d,T,!1,void 0,l)}`};return{name:"Conv2DTransposeMatMul",shaderCache:{hint:`${t.cacheKey};${$};${_};${g}`,inputDependencies:F},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:W}),getShaderSource:de}}});var zm,go,Nu=U(()=>{"use strict";Q();dt();ae();oe();zm=(e,t,r,n,o,i=!1,a,l,d=!1)=>{let p=d?1:2,m=d?2:3,u=d?3:1,h=i?2:1,w=` + ${zm(d,a,t,X.type.value,I)} + ${g?Qt($,_,Z,void 0,!d,T):Jt($,_,Z,void 0,!d,T,!1,void 0,l)}`};return{name:"Conv2DTransposeMatMul",shaderCache:{hint:`${t.cacheKey};${$};${_};${g}`,inputDependencies:F},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:S[0],y:S[1],z:S[2]},programUniforms:W}),getShaderSource:de}}});var Bm,yo,Nu=U(()=>{"use strict";Q();dt();ae();oe();Bm=(e,t,r,n,o,i=!1,a,l,d=!1)=>{let p=d?1:2,m=d?2:3,u=d?3:1,h=i?2:1,w=` fn setOutputAtIndex(flatIndex : u32, value : ${i?`vec4<${a}>`:a}) { result[flatIndex] = ${i?`vec4<${a}>`:a}(value); }`;n&&(w+=` @@ -1378,7 +1378,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${e.mainStart()} ${e.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")}; - ${i?S:I}}`},go=(e,t,r)=>{let n=e.length>2,o=t.outputShape,i=k.size(o),a=[Math.ceil(i/64),1,1];be("verbose",()=>`[conv2d_backprop_webgpu] dispatch = ${a}`);let l=t.format==="NHWC",d=["rank","rank"],p=[t.strides[0],t.strides[1]],m=[t.kernelShape[l?1:2],t.kernelShape[l?2:3]],u=[t.dilations[0],t.dilations[1]],h=[m[0]+(t.dilations[0]<=1?0:(t.kernelShape[l?1:2]-1)*(t.dilations[0]-1)),m[1]+(t.dilations[1]<=1?0:(t.kernelShape[l?2:3]-1)*(t.dilations[1]-1))],w=[h[0]-1-Math.floor((t.pads[0]+t.pads[2])/2),h[1]-1-Math.floor(t.pads[1]+t.pads[3])/2],g=!1,b=t.group,x=e[1].dims,_=x[0]/b,$=x[1],S=[{type:12,data:i},{type:12,data:p},{type:12,data:m},{type:12,data:u},{type:12,data:h},{type:6,data:w},{type:12,data:_},{type:12,data:$},...V(e[0].dims,e[1].dims)];n&&(S.push(...V(e[2].dims)),d.push("rank")),S.push(...V(o));let I=a[1]===1&&a[2]===1,T=A=>{let z=[{name:"output_size",type:"u32"},{name:"strides",type:"u32",length:p.length},{name:"filter_dims",type:"u32",length:m.length},{name:"dilations",type:"u32",length:m.length},{name:"effective_filter_dims",type:"u32",length:h.length},{name:"pads",type:"i32",length:w.length},{name:"input_channels_per_group",type:"u32"},{name:"output_channels_per_group",type:"u32"}],D=he(e[0].dataType);return`${zm(A,e,o,n,I,g,D,z,l)}`};return{name:"ConvTranspose2D",shaderCache:{hint:`${t.cacheKey};`,inputDependencies:d},getRunData:()=>({dispatchGroup:{x:a[0],y:a[1],z:a[2]},outputs:[{dims:r?r(o):o,dataType:e[0].dataType}],programUniforms:S}),getShaderSource:T}}});var Dm,Bm,Rm,Wu,Hu,Mm,Um,Vm,Nm,Gu,Lu=U(()=>{"use strict";Vu();Nu();yt();Et();Dm=(e,t,r,n,o,i)=>(e-1)*t+r+(n-1)*o+1-i,Bm=(e,t,r,n,o)=>{let i=Math.floor(e/2);t==="SAME_UPPER"?(r[n]=i,r[o]=e-i):t==="SAME_LOWER"&&(r[n]=e-i,r[o]=i)},Rm=(e,t,r,n,o,i,a,l,d,p)=>{let m=e.length-2,u=p.length===0;if(d.length===0)for(let g=0;g{let r=e.kernelShape.slice();if(e.kernelShape.length===0||e.kernelShape.reduce((u,h)=>u*h,1)===0){r.length=0;for(let u=2;uu+h,0)===0){let u=t[0].dims.length-2;d=new Array(u).fill(1)}let p=e.strides.slice();if(p.reduce((u,h)=>u+h,0)===0){let u=t[0].dims.length-2;p=new Array(u).fill(1)}Rm(l,r,d,e.autoPad,e.group,o,p,n,a,i);let m=Object.assign({},e);return Object.assign(m,{kernelShape:r,pads:o,outputPadding:a,outputShape:i,dilations:d,strides:p}),m},Hu=e=>{let t=Yr(e),r=e.format,n=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][typeof e.autoPad>"u"?0:e.autoPad],o=e.dilations,i=e.group,a=e.kernelShape,l=e.pads,d=e.strides,p=e.wIsConst(),m=e.outputPadding,u=e.outputShape;return{autoPad:n,format:r,dilations:o,group:i,kernelShape:a,outputPadding:m,outputShape:u,pads:l,strides:d,wIsConst:p,...t,cacheKey:`${e.format};${t.activation};`}},Mm=(e,t)=>{if(!e||e.length!==2&&e.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(e[0].dims.length!==4&&e[0].dims.length!==3)throw new Error("currently only support 2-dimensional conv");if(e[0].dims.length!==e[1].dims.length)throw new Error("filter does not have same dimension as input");let r=e[0].dims[t.format==="NHWC"?e[0].dims.length-1:1],n=e[1].dims[0];if(r!==n)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=e[1].dims[1]*t.group;if(e.length===3&&(e[2].dims.length!==1||e[2].dims[0]!==o))throw new Error("invalid bias");let i=e[0].dims.length-2;if(t.dilations.reduce((m,u)=>m+u,0)>0&&t.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(t.strides.reduce((m,u)=>m+u,0)>0&&t.strides.length!==i)throw new Error(`strides should be ${i}D`);if(t.pads.reduce((m,u)=>m+u,0)>0&&t.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(t.outputPadding.length!==i&&t.outputPadding.length!==0)throw new Error(`output_padding should be ${i}D`);if(t.kernelShape.reduce((m,u)=>m+u,0)>0&&t.kernelShape.length!==0&&t.kernelShape.length!==e[1].dims.length-2)throw new Error("invalid kernel shape");if(t.outputShape.length!==0&&t.outputShape.length!==e[0].dims.length-2)throw new Error("invalid output shape")},Um=[2,3,1,0],Vm=(e,t,r)=>{let n=Wu(r,t),o=r.format==="NHWC",i=n.outputShape,a=i[o?3:1],l=t[0].dims[o?3:1];if(n.group!==1||a===1&&l===1){e.compute(go(t,n));return}let d=i[o?1:2],p=i[o?2:3],m=t[1].dims[2],u=t[1].dims[3],h=o?d*p:a,w=o?a:d*p,g=m*u*l,b=!0,x=e.kernelCustomData.wT??e.compute(Ue(t[1],Um),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=x);let _=[t[0],x],$=t.length===3;$&&(!o&&t[2].dims.length===1?_.push(t[2].reshape([t[2].dims[0],1,1])):_.push(t[2])),e.compute(Uu(_,n,i,h,w,g,$,b),{inputs:_})},Nm=(e,t)=>{let r=t.format==="NHWC",n=[e.inputs[0].reshape(r?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&n.push(e.inputs[2]);let o=t.kernelShape;(o.length===0||o[0]===0)&&(o=[e.inputs[1].dims[2]]);let i=t.dilations;(i.length===0||i[0]===0)&&(i=[1]);let a=t.strides;(a.length===0||a[0]===0)&&(a=[1]);let l=t.pads;l.length===0&&(l=[0,0]),l=[0,l[0],0,l[1]],a=[1].concat(a),i=[1].concat(i),o=[1].concat(o);let d=Wu({...t,pads:l,strides:a,dilations:i,kernelShape:o},n);e.compute(go(n,d,p=>r?[p[0],p[2],p[3]]:[p[0],p[1],p[3]]))},Gu=(e,t)=>{Mm(e.inputs,t),e.inputs[0].dims.length===3?Nm(e,t):Vm(e,e.inputs,t)}});var Wm,Fu,qu,Ku=U(()=>{"use strict";Q();ae();Ce();oe();Wm=(e,t,r,n)=>{let o=k.size(t),i=t.length,a=E("input",e,i),l=M("output",e,i),d=r.dataType===6?r.getInt32Array()[0]:Number(r.getBigInt64Array()[0]),p=k.normalizeAxis(d,i),m=u=>{let h=` i32(${a.indicesGet("inputIndices","uniforms.axis")}) `,w=G("uniforms.input_shape","uniforms.axis",i),g=n.reverse?h+(n.exclusive?" + 1":""):"0",b=n.reverse?w:h+(n.exclusive?"":" + 1");return` + ${i?S:I}}`},yo=(e,t,r)=>{let n=e.length>2,o=t.outputShape,i=k.size(o),a=[Math.ceil(i/64),1,1];be("verbose",()=>`[conv2d_backprop_webgpu] dispatch = ${a}`);let l=t.format==="NHWC",d=["rank","rank"],p=[t.strides[0],t.strides[1]],m=[t.kernelShape[l?1:2],t.kernelShape[l?2:3]],u=[t.dilations[0],t.dilations[1]],h=[m[0]+(t.dilations[0]<=1?0:(t.kernelShape[l?1:2]-1)*(t.dilations[0]-1)),m[1]+(t.dilations[1]<=1?0:(t.kernelShape[l?2:3]-1)*(t.dilations[1]-1))],w=[h[0]-1-Math.floor((t.pads[0]+t.pads[2])/2),h[1]-1-Math.floor(t.pads[1]+t.pads[3])/2],g=!1,b=t.group,x=e[1].dims,_=x[0]/b,$=x[1],S=[{type:12,data:i},{type:12,data:p},{type:12,data:m},{type:12,data:u},{type:12,data:h},{type:6,data:w},{type:12,data:_},{type:12,data:$},...V(e[0].dims,e[1].dims)];n&&(S.push(...V(e[2].dims)),d.push("rank")),S.push(...V(o));let I=a[1]===1&&a[2]===1,T=A=>{let D=[{name:"output_size",type:"u32"},{name:"strides",type:"u32",length:p.length},{name:"filter_dims",type:"u32",length:m.length},{name:"dilations",type:"u32",length:m.length},{name:"effective_filter_dims",type:"u32",length:h.length},{name:"pads",type:"i32",length:w.length},{name:"input_channels_per_group",type:"u32"},{name:"output_channels_per_group",type:"u32"}],z=he(e[0].dataType);return`${Bm(A,e,o,n,I,g,z,D,l)}`};return{name:"ConvTranspose2D",shaderCache:{hint:`${t.cacheKey};`,inputDependencies:d},getRunData:()=>({dispatchGroup:{x:a[0],y:a[1],z:a[2]},outputs:[{dims:r?r(o):o,dataType:e[0].dataType}],programUniforms:S}),getShaderSource:T}}});var Rm,Mm,Um,Wu,Hu,Vm,Nm,Wm,Hm,Gu,Lu=U(()=>{"use strict";Vu();Nu();yt();Et();Rm=(e,t,r,n,o,i)=>(e-1)*t+r+(n-1)*o+1-i,Mm=(e,t,r,n,o)=>{let i=Math.floor(e/2);t==="SAME_UPPER"?(r[n]=i,r[o]=e-i):t==="SAME_LOWER"&&(r[n]=e-i,r[o]=i)},Um=(e,t,r,n,o,i,a,l,d,p)=>{let m=e.length-2,u=p.length===0;if(d.length===0)for(let g=0;g{let r=e.kernelShape.slice();if(e.kernelShape.length===0||e.kernelShape.reduce((u,h)=>u*h,1)===0){r.length=0;for(let u=2;uu+h,0)===0){let u=t[0].dims.length-2;d=new Array(u).fill(1)}let p=e.strides.slice();if(p.reduce((u,h)=>u+h,0)===0){let u=t[0].dims.length-2;p=new Array(u).fill(1)}Um(l,r,d,e.autoPad,e.group,o,p,n,a,i);let m=Object.assign({},e);return Object.assign(m,{kernelShape:r,pads:o,outputPadding:a,outputShape:i,dilations:d,strides:p}),m},Hu=e=>{let t=Yr(e),r=e.format,n=["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][typeof e.autoPad>"u"?0:e.autoPad],o=e.dilations,i=e.group,a=e.kernelShape,l=e.pads,d=e.strides,p=e.wIsConst(),m=e.outputPadding,u=e.outputShape;return{autoPad:n,format:r,dilations:o,group:i,kernelShape:a,outputPadding:m,outputShape:u,pads:l,strides:d,wIsConst:p,...t,cacheKey:`${e.format};${t.activation};`}},Vm=(e,t)=>{if(!e||e.length!==2&&e.length!==3)throw new Error("Conv requires 2 or 3 inputs");if(e[0].dims.length!==4&&e[0].dims.length!==3)throw new Error("currently only support 2-dimensional conv");if(e[0].dims.length!==e[1].dims.length)throw new Error("filter does not have same dimension as input");let r=e[0].dims[t.format==="NHWC"?e[0].dims.length-1:1],n=e[1].dims[0];if(r!==n)throw new Error("FILTER_IN_CHANNEL should be equal to DATA_CHANNEL");let o=e[1].dims[1]*t.group;if(e.length===3&&(e[2].dims.length!==1||e[2].dims[0]!==o))throw new Error("invalid bias");let i=e[0].dims.length-2;if(t.dilations.reduce((m,u)=>m+u,0)>0&&t.dilations.length!==i)throw new Error(`dilations should be ${i}D`);if(t.strides.reduce((m,u)=>m+u,0)>0&&t.strides.length!==i)throw new Error(`strides should be ${i}D`);if(t.pads.reduce((m,u)=>m+u,0)>0&&t.pads.length!==i*2)throw new Error(`pads should be ${i*2}D`);if(t.outputPadding.length!==i&&t.outputPadding.length!==0)throw new Error(`output_padding should be ${i}D`);if(t.kernelShape.reduce((m,u)=>m+u,0)>0&&t.kernelShape.length!==0&&t.kernelShape.length!==e[1].dims.length-2)throw new Error("invalid kernel shape");if(t.outputShape.length!==0&&t.outputShape.length!==e[0].dims.length-2)throw new Error("invalid output shape")},Nm=[2,3,1,0],Wm=(e,t,r)=>{let n=Wu(r,t),o=r.format==="NHWC",i=n.outputShape,a=i[o?3:1],l=t[0].dims[o?3:1];if(n.group!==1||a===1&&l===1){e.compute(yo(t,n));return}let d=i[o?1:2],p=i[o?2:3],m=t[1].dims[2],u=t[1].dims[3],h=o?d*p:a,w=o?a:d*p,g=m*u*l,b=!0,x=e.kernelCustomData.wT??e.compute(Ue(t[1],Nm),{inputs:[1],outputs:[r.wIsConst?-2:-1]})[0];r.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=x);let _=[t[0],x],$=t.length===3;$&&(!o&&t[2].dims.length===1?_.push(t[2].reshape([t[2].dims[0],1,1])):_.push(t[2])),e.compute(Uu(_,n,i,h,w,g,$,b),{inputs:_})},Hm=(e,t)=>{let r=t.format==="NHWC",n=[e.inputs[0].reshape(r?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&n.push(e.inputs[2]);let o=t.kernelShape;(o.length===0||o[0]===0)&&(o=[e.inputs[1].dims[2]]);let i=t.dilations;(i.length===0||i[0]===0)&&(i=[1]);let a=t.strides;(a.length===0||a[0]===0)&&(a=[1]);let l=t.pads;l.length===0&&(l=[0,0]),l=[0,l[0],0,l[1]],a=[1].concat(a),i=[1].concat(i),o=[1].concat(o);let d=Wu({...t,pads:l,strides:a,dilations:i,kernelShape:o},n);e.compute(yo(n,d,p=>r?[p[0],p[2],p[3]]:[p[0],p[1],p[3]]))},Gu=(e,t)=>{Vm(e.inputs,t),e.inputs[0].dims.length===3?Hm(e,t):Wm(e,e.inputs,t)}});var Gm,Fu,qu,ju=U(()=>{"use strict";Q();ae();Ce();oe();Gm=(e,t,r,n)=>{let o=k.size(t),i=t.length,a=E("input",e,i),l=M("output",e,i),d=r.dataType===6?r.getInt32Array()[0]:Number(r.getBigInt64Array()[0]),p=k.normalizeAxis(d,i),m=u=>{let h=` i32(${a.indicesGet("inputIndices","uniforms.axis")}) `,w=G("uniforms.input_shape","uniforms.axis",i),g=n.reverse?h+(n.exclusive?" + 1":""):"0",b=n.reverse?w:h+(n.exclusive?"":" + 1");return` ${u.registerUniform("outputSize","u32").registerUniform("axis","u32").declareVariables(a,l)} ${u.mainStart()} ${u.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} @@ -1391,12 +1391,12 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; sum = sum + ${a.getByIndices("inputIndices")}; } ${l.setByOffset("global_idx","sum")}; - }`};return{name:"CumSum",shaderCache:{hint:n.cacheKey,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:t,dataType:e}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:[{type:12,data:o},{type:12,data:p},...V(t,t)]}),getShaderSource:m}},Fu=(e,t)=>{let r=e.inputs[0].dims,n=e.inputs[0].dataType,o=e.inputs[1];e.compute(Wm(n,r,o,t),{inputs:[0]})},qu=e=>{let t=e.exclusive===1,r=e.reverse===1;return te({exclusive:t,reverse:r})}});var Hm,Gm,Lm,ju,Yu,Xu=U(()=>{"use strict";Q();ae();Ce();oe();Hm=e=>{if(!e||e.length!==1)throw new Error("DepthToSpace requires 1 input.");if(e[0].dims.length!==4)throw new Error("DepthToSpace requires 4D input.")},Gm=(e,t,r,n)=>{let o=[];o.push(`fn perm(i: ${n.type.indices}) -> ${r.type.indices} { + }`};return{name:"CumSum",shaderCache:{hint:n.cacheKey,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:t,dataType:e}],dispatchGroup:{x:Math.ceil(o/64)},programUniforms:[{type:12,data:o},{type:12,data:p},...V(t,t)]}),getShaderSource:m}},Fu=(e,t)=>{let r=e.inputs[0].dims,n=e.inputs[0].dataType,o=e.inputs[1];e.compute(Gm(n,r,o,t),{inputs:[0]})},qu=e=>{let t=e.exclusive===1,r=e.reverse===1;return te({exclusive:t,reverse:r})}});var Lm,Fm,qm,Ku,Yu,Xu=U(()=>{"use strict";Q();ae();Ce();oe();Lm=e=>{if(!e||e.length!==1)throw new Error("DepthToSpace requires 1 input.");if(e[0].dims.length!==4)throw new Error("DepthToSpace requires 4D input.")},Fm=(e,t,r,n)=>{let o=[];o.push(`fn perm(i: ${n.type.indices}) -> ${r.type.indices} { var a: ${r.type.indices};`);for(let i=0;i{let r,n,o,i,a,l,d=t.format==="NHWC",p=t.blocksize,m=t.mode==="DCR";d?([r,n,o,i]=e.dims,a=m?[r,n,o,p,p,i/p**2]:[r,n,o,i/p**2,p,p],l=m?[0,1,3,2,4,5]:[0,1,4,2,5,3]):([r,n,o,i]=[e.dims[0],e.dims[2],e.dims[3],e.dims[1]],a=m?[r,p,p,i/p**2,n,o]:[r,i/p**2,p,p,n,o],l=m?[0,3,4,1,5,2]:[0,1,4,2,5,3]);let u=e.reshape(a),h=u.dims.length,w=e.dataType,g=E("a",w,h),b=M("output",w,h),x=_=>` +`)},qm=(e,t)=>{let r,n,o,i,a,l,d=t.format==="NHWC",p=t.blocksize,m=t.mode==="DCR";d?([r,n,o,i]=e.dims,a=m?[r,n,o,p,p,i/p**2]:[r,n,o,i/p**2,p,p],l=m?[0,1,3,2,4,5]:[0,1,4,2,5,3]):([r,n,o,i]=[e.dims[0],e.dims[2],e.dims[3],e.dims[1]],a=m?[r,p,p,i/p**2,n,o]:[r,i/p**2,p,p,n,o],l=m?[0,3,4,1,5,2]:[0,1,4,2,5,3]);let u=e.reshape(a),h=u.dims.length,w=e.dataType,g=E("a",w,h),b=M("output",w,h),x=_=>` ${_.registerUniform("output_size","u32").declareVariables(g,b)} - ${Gm(l,h,g,b)} + ${Fm(l,h,g,b)} ${_.mainStart()} ${_.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -1405,7 +1405,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let aIndices = perm(indices); ${b.setByOffset("global_idx",g.getByIndices("aIndices"))} - }`;return{name:"DepthToSpace",shaderCache:{hint:`${e.dims};${t.blocksize};${t.mode}`,inputDependencies:["rank"]},getRunData:_=>{let $=d?[r,n*p,o*p,i/p**2]:[r,i/p**2,n*p,o*p],S=k.size($),I=u.dims,T=k.sortBasedOnPerm(I,l);return{outputs:[{dims:$,dataType:_[0].dataType}],dispatchGroup:{x:Math.ceil(S/64)},programUniforms:[{type:12,data:S},...V(I,T)]}},getShaderSource:x}},ju=(e,t)=>{Hm(e.inputs),e.compute(Lm(e.inputs[0],t))},Yu=e=>te({blocksize:e.blocksize,mode:e.mode,format:e.format})});var yo,rn,Zu,Fm,qm,bo,wo,Qu,Km,Ju,ed,td=U(()=>{"use strict";Q();ae();Ce();oe();yo="[a-zA-Z]|\\.\\.\\.",rn="("+yo+")+",Zu="^"+rn+"$",Fm="("+rn+",)*"+rn,qm="^"+Fm+"$",bo=class{constructor(t=-1){this.symbolToIndices=new Map,this.inputIndex=t}addSymbol(t,r){let n=this.symbolToIndices.get(t);n===void 0?n=[r]:n.push(r),this.symbolToIndices.set(t,n)}},wo=class{constructor(t,r){this.equation=r;this.hasEllipsis=!1,this.symbolToInfo=new Map,this.lhs=new Array,this.outputDims=[];let[n,o]=r.includes("->")?r.split("->",2):[r,""];if(!n.match(RegExp(qm)))throw new Error("Invalid LHS term");if(n.split(",").forEach((l,d)=>{let p=t[d].dims.slice();if(!l.match(RegExp(Zu)))throw new Error("Invalid LHS term");let m=this.processTerm(l,!0,p,d);this.lhs.push(m)}),o==="")o+=[...this.symbolToInfo.entries()].filter(([l,d])=>d.count===1||l==="...").map(([l])=>l).join("");else if(!o.match(RegExp(rn)))throw new Error("Invalid RHS");o.match(RegExp(yo,"g"))?.forEach(l=>{if(l==="...")this.outputDims=this.outputDims.concat(this.ellipsisDims);else{let d=this.symbolToInfo.get(l);if(d===void 0)throw new Error("Invalid RHS symbol");this.outputDims.push(d.dimValue)}}),this.rhs=this.processTerm(o,!1,this.outputDims)}addSymbol(t,r,n){let o=this.symbolToInfo.get(t);if(o!==void 0){if(o.dimValue!==r&&o.count!==1)throw new Error("Dimension mismatch");o.count++,o.inputIndices.push(n)}else o={count:1,dimValue:r,inputIndices:[n]};this.symbolToInfo.set(t,o)}processTerm(t,r,n,o=-1){let i=n.length,a=!1,l=[],d=0;if(!t.match(RegExp(Zu))&&!r&&t!=="")throw new Error("Invalid LHS term");let p=t.match(RegExp(yo,"g")),m=new bo(o);return p?.forEach((u,h)=>{if(u==="..."){if(a)throw new Error("Only one ellipsis is allowed per input term");a=!0;let w=i-p.length+1;if(w<0)throw new Error("Ellipsis out of bounds");if(l=n.slice(d,d+w),this.hasEllipsis){if(this.ellipsisDims.length!==l.length||this.ellipsisDims.toString()!==l.toString())throw new Error("Ellipsis dimensions mismatch")}else if(r)this.hasEllipsis=!0,this.ellipsisDims=l;else throw new Error("Ellipsis must be specified in the LHS");for(let g=0;ge+"_max",Km=(e,t,r,n)=>{let i=e.map(m=>m.length).map((m,u)=>E(`input${u}`,t,m)),a=k.size(n),l=M("output",t,n.length),d=[...r.symbolToInfo.keys()].filter(m=>!r.rhs.symbolToIndices.has(m)),p=m=>{let u=[],h="var prod = 1.0;",w="var sum = 0.0;",g="sum += prod;",b=[],x=[],_=[],$=[],S=r.symbolToInfo.size===r.rhs.symbolToIndices.size;r.symbolToInfo.forEach((T,A)=>{if(r.rhs.symbolToIndices.has(A)){let z=r.rhs.symbolToIndices.get(A)?.[0];z!==void 0&&r.lhs.forEach((D,H)=>{if(T.inputIndices.includes(H)){let W=D.symbolToIndices.get(A);if(W===void 0)throw new Error("Invalid symbol error");W.forEach(F=>{u.push(`${i[H].indicesSet(`input${H}Indices`,F,l.indicesGet("outputIndices",z))}`)})}})}else r.lhs.forEach((z,D)=>{if(T.inputIndices.includes(D)){let H=z.symbolToIndices.get(A);if(H===void 0)throw new Error("Invalid symbol error");H.forEach(W=>{b.push(`${i[D].indicesSet(`input${D}Indices`,W,`${A}`)}`)}),$.push(`prod *= ${i[D].getByIndices(`input${D}Indices`)};`)}}),x.push(`for(var ${A}: u32 = 0; ${A} < uniforms.${Qu(A)}; ${A}++) {`),_.push("}")});let I=S?[...u,`let sum = ${i.map((T,A)=>T.getByIndices(`input${A}Indices`)).join(" * ")};`]:[...u,w,...x,...b,h,...$,g,..._];return` + }`;return{name:"DepthToSpace",shaderCache:{hint:`${e.dims};${t.blocksize};${t.mode}`,inputDependencies:["rank"]},getRunData:_=>{let $=d?[r,n*p,o*p,i/p**2]:[r,i/p**2,n*p,o*p],S=k.size($),I=u.dims,T=k.sortBasedOnPerm(I,l);return{outputs:[{dims:$,dataType:_[0].dataType}],dispatchGroup:{x:Math.ceil(S/64)},programUniforms:[{type:12,data:S},...V(I,T)]}},getShaderSource:x}},Ku=(e,t)=>{Lm(e.inputs),e.compute(qm(e.inputs[0],t))},Yu=e=>te({blocksize:e.blocksize,mode:e.mode,format:e.format})});var bo,rn,Zu,jm,Km,wo,vo,Qu,Ym,Ju,ed,td=U(()=>{"use strict";Q();ae();Ce();oe();bo="[a-zA-Z]|\\.\\.\\.",rn="("+bo+")+",Zu="^"+rn+"$",jm="("+rn+",)*"+rn,Km="^"+jm+"$",wo=class{constructor(t=-1){this.symbolToIndices=new Map,this.inputIndex=t}addSymbol(t,r){let n=this.symbolToIndices.get(t);n===void 0?n=[r]:n.push(r),this.symbolToIndices.set(t,n)}},vo=class{constructor(t,r){this.equation=r;this.hasEllipsis=!1,this.symbolToInfo=new Map,this.lhs=new Array,this.outputDims=[];let[n,o]=r.includes("->")?r.split("->",2):[r,""];if(!n.match(RegExp(Km)))throw new Error("Invalid LHS term");if(n.split(",").forEach((l,d)=>{let p=t[d].dims.slice();if(!l.match(RegExp(Zu)))throw new Error("Invalid LHS term");let m=this.processTerm(l,!0,p,d);this.lhs.push(m)}),o==="")o+=[...this.symbolToInfo.entries()].filter(([l,d])=>d.count===1||l==="...").map(([l])=>l).join("");else if(!o.match(RegExp(rn)))throw new Error("Invalid RHS");o.match(RegExp(bo,"g"))?.forEach(l=>{if(l==="...")this.outputDims=this.outputDims.concat(this.ellipsisDims);else{let d=this.symbolToInfo.get(l);if(d===void 0)throw new Error("Invalid RHS symbol");this.outputDims.push(d.dimValue)}}),this.rhs=this.processTerm(o,!1,this.outputDims)}addSymbol(t,r,n){let o=this.symbolToInfo.get(t);if(o!==void 0){if(o.dimValue!==r&&o.count!==1)throw new Error("Dimension mismatch");o.count++,o.inputIndices.push(n)}else o={count:1,dimValue:r,inputIndices:[n]};this.symbolToInfo.set(t,o)}processTerm(t,r,n,o=-1){let i=n.length,a=!1,l=[],d=0;if(!t.match(RegExp(Zu))&&!r&&t!=="")throw new Error("Invalid LHS term");let p=t.match(RegExp(bo,"g")),m=new wo(o);return p?.forEach((u,h)=>{if(u==="..."){if(a)throw new Error("Only one ellipsis is allowed per input term");a=!0;let w=i-p.length+1;if(w<0)throw new Error("Ellipsis out of bounds");if(l=n.slice(d,d+w),this.hasEllipsis){if(this.ellipsisDims.length!==l.length||this.ellipsisDims.toString()!==l.toString())throw new Error("Ellipsis dimensions mismatch")}else if(r)this.hasEllipsis=!0,this.ellipsisDims=l;else throw new Error("Ellipsis must be specified in the LHS");for(let g=0;ge+"_max",Ym=(e,t,r,n)=>{let i=e.map(m=>m.length).map((m,u)=>E(`input${u}`,t,m)),a=k.size(n),l=M("output",t,n.length),d=[...r.symbolToInfo.keys()].filter(m=>!r.rhs.symbolToIndices.has(m)),p=m=>{let u=[],h="var prod = 1.0;",w="var sum = 0.0;",g="sum += prod;",b=[],x=[],_=[],$=[],S=r.symbolToInfo.size===r.rhs.symbolToIndices.size;r.symbolToInfo.forEach((T,A)=>{if(r.rhs.symbolToIndices.has(A)){let D=r.rhs.symbolToIndices.get(A)?.[0];D!==void 0&&r.lhs.forEach((z,H)=>{if(T.inputIndices.includes(H)){let W=z.symbolToIndices.get(A);if(W===void 0)throw new Error("Invalid symbol error");W.forEach(F=>{u.push(`${i[H].indicesSet(`input${H}Indices`,F,l.indicesGet("outputIndices",D))}`)})}})}else r.lhs.forEach((D,z)=>{if(T.inputIndices.includes(z)){let H=D.symbolToIndices.get(A);if(H===void 0)throw new Error("Invalid symbol error");H.forEach(W=>{b.push(`${i[z].indicesSet(`input${z}Indices`,W,`${A}`)}`)}),$.push(`prod *= ${i[z].getByIndices(`input${z}Indices`)};`)}}),x.push(`for(var ${A}: u32 = 0; ${A} < uniforms.${Qu(A)}; ${A}++) {`),_.push("}")});let I=S?[...u,`let sum = ${i.map((T,A)=>T.getByIndices(`input${A}Indices`)).join(" * ")};`]:[...u,w,...x,...b,h,...$,g,..._];return` ${m.registerUniforms(d.map(T=>({name:`${Qu(T)}`,type:"u32"}))).registerUniform("outputSize","u32").declareVariables(...i,l)} ${m.mainStart()} @@ -1416,7 +1416,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${I.join(` `)}; ${l.setByOffset("global_idx","sum")}; - }`};return{name:"Einsum",shaderCache:{hint:r.equation,inputDependencies:e.map(()=>"rank")},getRunData:()=>{let m=d.filter(h=>r.symbolToInfo.has(h)).map(h=>({type:12,data:r.symbolToInfo.get(h)?.dimValue||0}));m.push({type:12,data:a});let u=e.map((h,w)=>[...V(h)]).reduce((h,w)=>h.concat(w),m);return u.push(...V(n)),{outputs:[{dims:n,dataType:t}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:u}},getShaderSource:p}},Ju=(e,t)=>{let r=new wo(e.inputs,t.equation),n=r.outputDims,o=e.inputs.map((i,a)=>i.dims);e.compute(Km(o,e.inputs[0].dataType,r,n))},ed=e=>{let t=e.equation.replace(/\s+/g,"");return te({equation:t})}});var jm,rd,Ym,Xm,nd,od=U(()=>{"use strict";Q();ae();oe();jm=e=>{if(!e||e.length!==2)throw new Error("Expand requires 2 input.");let t=e[0].dims,r=Array.from(e[1].getBigInt64Array(),Number),n=r.length{let r=e.length-t.length,n=[];for(let o=0;oe.length>t.length?rd(e,t):rd(t,e),Xm=e=>{let t=e[0].dims,r=Array.from(e[1].getBigInt64Array(),Number),n=Ym(t,r),o=e[0].dataType,i=o===9?4:1,a=Math.ceil(k.size(n)/i),l=p=>{let m=E("input",o,t.length,i),u=M("output",o,n.length,i),h;if(o===9){let w=(g,b,x="")=>` + }`};return{name:"Einsum",shaderCache:{hint:r.equation,inputDependencies:e.map(()=>"rank")},getRunData:()=>{let m=d.filter(h=>r.symbolToInfo.has(h)).map(h=>({type:12,data:r.symbolToInfo.get(h)?.dimValue||0}));m.push({type:12,data:a});let u=e.map((h,w)=>[...V(h)]).reduce((h,w)=>h.concat(w),m);return u.push(...V(n)),{outputs:[{dims:n,dataType:t}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:u}},getShaderSource:p}},Ju=(e,t)=>{let r=new vo(e.inputs,t.equation),n=r.outputDims,o=e.inputs.map((i,a)=>i.dims);e.compute(Ym(o,e.inputs[0].dataType,r,n))},ed=e=>{let t=e.equation.replace(/\s+/g,"");return te({equation:t})}});var Xm,rd,Zm,Qm,nd,od=U(()=>{"use strict";Q();ae();oe();Xm=e=>{if(!e||e.length!==2)throw new Error("Expand requires 2 input.");let t=e[0].dims,r=Array.from(e[1].getBigInt64Array(),Number),n=r.length{let r=e.length-t.length,n=[];for(let o=0;oe.length>t.length?rd(e,t):rd(t,e),Qm=e=>{let t=e[0].dims,r=Array.from(e[1].getBigInt64Array(),Number),n=Zm(t,r),o=e[0].dataType,i=o===9?4:1,a=Math.ceil(k.size(n)/i),l=p=>{let m=E("input",o,t.length,i),u=M("output",o,n.length,i),h;if(o===9){let w=(g,b,x="")=>` let outputIndices${b} = ${u.offsetToIndices(`outputOffset + ${b}u`)}; let offset${b} = ${m.broadcastedIndicesToOffset(`outputIndices${b}`,u)}; let index${b} = offset${b} / 4u; @@ -1438,13 +1438,13 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${p.registerUniform("vec_size","u32").declareVariables(m,u)} ${p.mainStart()} ${p.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} - ${h}`},d=[{type:12,data:a},...V(t,n)];return{name:"Expand",shaderCache:{hint:`${n.length}`,inputDependencies:["rank"]},getShaderSource:l,getRunData:()=>({outputs:[{dims:n,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:d})}},nd=e=>{jm(e.inputs),e.compute(Xm(e.inputs),{inputs:[0]})}});var Zm,id,ad=U(()=>{"use strict";Q();ae();oe();jr();Zm=e=>{let t=e[0].dataType,r=k.size(e[0].dims),n=k.size(e[1].dims),o=n%4===0,i=a=>{let l=E("x",t,[1],4),d=E("bias",t,[1],4),p=M("y",t,[1],4),m=[{name:"output_vec_size",type:"u32"},{name:"bias_size",type:"u32"}],u=w=>` + ${h}`},d=[{type:12,data:a},...V(t,n)];return{name:"Expand",shaderCache:{hint:`${n.length}`,inputDependencies:["rank"]},getShaderSource:l,getRunData:()=>({outputs:[{dims:n,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:d})}},nd=e=>{Xm(e.inputs),e.compute(Qm(e.inputs),{inputs:[0]})}});var Jm,id,ad=U(()=>{"use strict";Q();ae();oe();Kr();Jm=e=>{let t=e[0].dataType,r=k.size(e[0].dims),n=k.size(e[1].dims),o=n%4===0,i=a=>{let l=E("x",t,[1],4),d=E("bias",t,[1],4),p=M("y",t,[1],4),m=[{name:"output_vec_size",type:"u32"},{name:"bias_size",type:"u32"}],u=w=>` let bias${w}_offset: u32 = (global_idx * 4 + ${w}) % uniforms.bias_size; let bias${w} = ${d.getByOffset(`bias${w}_offset / 4`)}[bias${w}_offset % 4];`,h=o?` let bias = ${d.getByOffset("global_idx % (uniforms.bias_size / 4)")};`:`${u(0)}${u(1)}${u(2)}${u(3)} let bias = ${l.type.value}(bias0, bias1, bias2, bias3);`;return`${a.registerUniforms(m).declareVariables(l,d,p)} - ${oo(Oe(t))} + ${io(Oe(t))} ${a.mainStart(Tt)} ${a.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_vec_size")} @@ -1452,8 +1452,8 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let x = ${l.getByOffset("global_idx")}; ${h} let x_in = x + bias; - ${p.setByOffset("global_idx",io("x_in"))} - }`};return{name:"FastGeluWithBias",shaderCache:{hint:`${o}`,inputDependencies:["type","type"]},getShaderSource:i,getRunData:a=>({outputs:[{dims:a[0].dims,dataType:a[0].dataType}],programUniforms:[{type:12,data:Math.ceil(r/4)},{type:12,data:n}],dispatchGroup:{x:Math.ceil(r/Tt/4)}})}},id=e=>{e.inputs.length<2||k.size(e.inputs[1].dims)===0?au(e):e.compute(Zm(e.inputs))}});var Qm,Jm,sd,ud,dd=U(()=>{"use strict";Q();ae();Ce();oe();Qm=e=>{if(!e||e.length!==2)throw new Error("Gather requires 2 inputs.")},Jm=(e,t)=>{let r=e[0].dims,n=e[1].dims,o=r.length,i=k.normalizeAxis(t.axis,o),a=r.slice(0);a.splice(i,1,...n);let l=r[i],d=e[0].dataType===9?4:1,p=Math.ceil(k.size(a)/d),m=[{type:12,data:p},{type:6,data:l},{type:12,data:i},...V(e[0].dims,e[1].dims,a)],u=h=>{let w=E("data",e[0].dataType,e[0].dims.length,d),g=E("inputIndices",e[1].dataType,e[1].dims.length),b=M("output",e[0].dataType,a.length,d),x=$=>{let S=n.length,I=`var indicesIndices${$} = ${g.type.indices}(0);`;for(let T=0;T1?`indicesIndices${$}[${T}]`:`indicesIndices${$}`} = ${a.length>1?`outputIndices${$}[uniforms.axis + ${T}]`:`outputIndices${$}`};`;I+=` + ${p.setByOffset("global_idx",ao("x_in"))} + }`};return{name:"FastGeluWithBias",shaderCache:{hint:`${o}`,inputDependencies:["type","type"]},getShaderSource:i,getRunData:a=>({outputs:[{dims:a[0].dims,dataType:a[0].dataType}],programUniforms:[{type:12,data:Math.ceil(r/4)},{type:12,data:n}],dispatchGroup:{x:Math.ceil(r/Tt/4)}})}},id=e=>{e.inputs.length<2||k.size(e.inputs[1].dims)===0?au(e):e.compute(Jm(e.inputs))}});var ef,tf,sd,ud,dd=U(()=>{"use strict";Q();ae();Ce();oe();ef=e=>{if(!e||e.length!==2)throw new Error("Gather requires 2 inputs.")},tf=(e,t)=>{let r=e[0].dims,n=e[1].dims,o=r.length,i=k.normalizeAxis(t.axis,o),a=r.slice(0);a.splice(i,1,...n);let l=r[i],d=e[0].dataType===9?4:1,p=Math.ceil(k.size(a)/d),m=[{type:12,data:p},{type:6,data:l},{type:12,data:i},...V(e[0].dims,e[1].dims,a)],u=h=>{let w=E("data",e[0].dataType,e[0].dims.length,d),g=E("inputIndices",e[1].dataType,e[1].dims.length),b=M("output",e[0].dataType,a.length,d),x=$=>{let S=n.length,I=`var indicesIndices${$} = ${g.type.indices}(0);`;for(let T=0;T1?`indicesIndices${$}[${T}]`:`indicesIndices${$}`} = ${a.length>1?`outputIndices${$}[uniforms.axis + ${T}]`:`outputIndices${$}`};`;I+=` var idx${$} = ${g.getByIndices(`indicesIndices${$}`)}; if (idx${$} < 0) { idx${$} = idx${$} + uniforms.axisDimLimit; @@ -1484,8 +1484,8 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${h.mainStart()} ${h.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} ${_} - }`};return{name:"Gather",shaderCache:{hint:t.cacheKey,inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:a,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:m}),getShaderSource:u}},sd=e=>te({axis:e.axis}),ud=(e,t)=>{let r=e.inputs;Qm(r),e.compute(Jm(e.inputs,t))}});var ef,tf,ld,cd,pd=U(()=>{"use strict";Q();ae();Ce();oe();ef=e=>{if(!e||e.length!==2)throw new Error("GatherElements requires 2 inputs.");if(e[0].dims.length<1)throw new Error("GatherElements requires that the data input be rank >= 1.");if(e[0].dims.length!==e[1].dims.length)throw new Error(`GatherElements requires that the data input and - indices input tensors be of same rank.`)},tf=(e,t)=>{let r=e[0].dims,n=e[0].dataType,o=r.length,i=e[1].dims,a=e[1].dataType,l=k.normalizeAxis(t.axis,o),d=r[l],p=i.slice(0),m=k.size(p),u=E("input",n,o),h=E("indicesInput",a,i.length),w=M("output",n,p.length),g=[{type:12,data:m},{type:6,data:d},{type:12,data:l}];return g.push(...V(r,i,p)),{name:"GatherElements",shaderCache:{inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:p,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:g}),getShaderSource:_=>` + }`};return{name:"Gather",shaderCache:{hint:t.cacheKey,inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:a,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:m}),getShaderSource:u}},sd=e=>te({axis:e.axis}),ud=(e,t)=>{let r=e.inputs;ef(r),e.compute(tf(e.inputs,t))}});var rf,nf,ld,cd,pd=U(()=>{"use strict";Q();ae();Ce();oe();rf=e=>{if(!e||e.length!==2)throw new Error("GatherElements requires 2 inputs.");if(e[0].dims.length<1)throw new Error("GatherElements requires that the data input be rank >= 1.");if(e[0].dims.length!==e[1].dims.length)throw new Error(`GatherElements requires that the data input and + indices input tensors be of same rank.`)},nf=(e,t)=>{let r=e[0].dims,n=e[0].dataType,o=r.length,i=e[1].dims,a=e[1].dataType,l=k.normalizeAxis(t.axis,o),d=r[l],p=i.slice(0),m=k.size(p),u=E("input",n,o),h=E("indicesInput",a,i.length),w=M("output",n,p.length),g=[{type:12,data:m},{type:6,data:d},{type:12,data:l}];return g.push(...V(r,i,p)),{name:"GatherElements",shaderCache:{inputDependencies:["rank","rank"]},getRunData:()=>({outputs:[{dims:p,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(m/64)},programUniforms:g}),getShaderSource:_=>` ${_.registerUniform("outputSize","u32").registerUniform("axisDimLimit","i32").registerUniform("axis","u32").declareVariables(u,h,w)} ${_.mainStart()} ${_.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} @@ -1501,7 +1501,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let value = ${u.getByIndices("inputIndices")}; ${w.setByOffset("global_idx","value")}; - }`}},ld=e=>te({axis:e.axis}),cd=(e,t)=>{let r=e.inputs;ef(r),e.compute(tf(e.inputs,t))}});var rf,nf,md,fd,hd=U(()=>{"use strict";Q();ae();oe();rf=e=>{if(!e)throw new Error("Input is missing");if(e.length<2||e.length>3)throw new Error("Invaid input number.");if(e.length===3&&e[2].dims.length>2)throw new Error("Invalid input shape of C");if(e[0].dataType!==e[1].dataType||e.length===3&&e[0].dataType!==e[2].dataType)throw new Error("Input types are mismatched")},nf=(e,t)=>{let r=e[0].dims.slice(),n=e[1].dims.slice(),[o,i,a]=Nr.getShapeOfGemmResult(r,t.transA,n,t.transB,e.length===3?e[2].dims:void 0),l=[o,i];if(!l)throw new Error("Can't use gemm on the given tensors");let d=k.size(l),p=[{type:12,data:d},{type:12,data:o},{type:12,data:i},{type:12,data:a},{type:1,data:t.alpha},{type:1,data:t.beta}],m=["type","type"];e.length===3&&(p.push(...V(e[2].dims)),m.push("rank")),p.push(...V(l));let u=h=>{let w="";t.transA&&t.transB?w="value += a[k * uniforms.M + m] * b[n * uniforms.K + k];":t.transA&&!t.transB?w="value += a[k * uniforms.M + m] * b[k * uniforms.N + n];":!t.transA&&t.transB?w="value += a[m * uniforms.K + k] * b[n * uniforms.K + k];":!t.transA&&!t.transB&&(w="value += a[m * uniforms.K + k] * b[k * uniforms.N + n];");let g=t.alpha===1?"":"value *= uniforms.alpha;",b=E("a",e[0].dataType,e[0].dims),x=E("b",e[1].dataType,e[1].dims),_=b.type.value,$=null,S=[b,x];e.length===3&&($=E("c",e[2].dataType,e[2].dims.length),S.push($));let I=M("output",e[0].dataType,l.length);S.push(I);let T=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"},{name:"alpha",type:"f32"},{name:"beta",type:"f32"}];return` + }`}},ld=e=>te({axis:e.axis}),cd=(e,t)=>{let r=e.inputs;rf(r),e.compute(nf(e.inputs,t))}});var of,af,md,fd,hd=U(()=>{"use strict";Q();ae();oe();of=e=>{if(!e)throw new Error("Input is missing");if(e.length<2||e.length>3)throw new Error("Invaid input number.");if(e.length===3&&e[2].dims.length>2)throw new Error("Invalid input shape of C");if(e[0].dataType!==e[1].dataType||e.length===3&&e[0].dataType!==e[2].dataType)throw new Error("Input types are mismatched")},af=(e,t)=>{let r=e[0].dims.slice(),n=e[1].dims.slice(),[o,i,a]=Nr.getShapeOfGemmResult(r,t.transA,n,t.transB,e.length===3?e[2].dims:void 0),l=[o,i];if(!l)throw new Error("Can't use gemm on the given tensors");let d=k.size(l),p=[{type:12,data:d},{type:12,data:o},{type:12,data:i},{type:12,data:a},{type:1,data:t.alpha},{type:1,data:t.beta}],m=["type","type"];e.length===3&&(p.push(...V(e[2].dims)),m.push("rank")),p.push(...V(l));let u=h=>{let w="";t.transA&&t.transB?w="value += a[k * uniforms.M + m] * b[n * uniforms.K + k];":t.transA&&!t.transB?w="value += a[k * uniforms.M + m] * b[k * uniforms.N + n];":!t.transA&&t.transB?w="value += a[m * uniforms.K + k] * b[n * uniforms.K + k];":!t.transA&&!t.transB&&(w="value += a[m * uniforms.K + k] * b[k * uniforms.N + n];");let g=t.alpha===1?"":"value *= uniforms.alpha;",b=E("a",e[0].dataType,e[0].dims),x=E("b",e[1].dataType,e[1].dims),_=b.type.value,$=null,S=[b,x];e.length===3&&($=E("c",e[2].dataType,e[2].dims.length),S.push($));let I=M("output",e[0].dataType,l.length);S.push(I);let T=[{name:"output_size",type:"u32"},{name:"M",type:"u32"},{name:"N",type:"u32"},{name:"K",type:"u32"},{name:"alpha",type:"f32"},{name:"beta",type:"f32"}];return` ${h.registerUniforms(T).declareVariables(...S)} ${h.mainStart()} @@ -1518,14 +1518,14 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${g} ${(()=>$!=null?`let cOffset = ${$.broadcastedIndicesToOffset("vec2(m, n)",I)}; value += ${_}(uniforms.beta) * ${$.getByOffset("cOffset")};`:"")()} output[global_idx] = value; - }`};return{name:"Gemm",shaderCache:{hint:`${t.cacheKey}`,inputDependencies:m},getRunData:()=>({outputs:[{dims:l,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p}),getShaderSource:u}},md=e=>{let t=e.transA,r=e.transB,n=e.alpha,o=e.beta;return{transA:t,transB:r,alpha:n,beta:o,cacheKey:`${e.transA};${e.transB};${e.alpha===1}`}},fd=(e,t)=>{rf(e.inputs),e.compute(nf(e.inputs,t))}});var Ve,sf,yd,gd,uf,tr,bd,vo=U(()=>{"use strict";Q();ae();Ce();Vr();qr();oe();Et();Ve=(e,t)=>e.length>t&&e[t].dims.length>0&&k.size(e[t].dims)>0?e[t]:void 0,sf=(e,t)=>{let r=e[0],n=Ve(e,1),o=Ve(e,2),i=Ve(e,3),a=Ve(e,4),l=Ve(e,5),d=Ve(e,6),p=Ve(e,7);if(r.dims.length!==3&&r.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let m=!1,u=r.dims[0],h=r.dims[1],w=r.dims.length===3?m?r.dims[2]/3:r.dims[2]:t.numHeads*r.dims[4],g=h,b=0,x=0,_=Math.floor(w/t.numHeads);if(d&&p){if(d.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(d.dims[0]!==u||d.dims[1]!==t.numHeads||d.dims[3]!==_)throw new Error('Input "past_key" shape (batch_size, num_heads, past_sequence_length, head_size)');if(p.dims[0]!==u||p.dims[1]!==t.numHeads||p.dims[3]!==_)throw new Error('Input "past_value" shape (batch_size, num_heads, past_sequence_length, head_size)');if(d.dims[2]!==p.dims[2])throw new Error('Input "past_key" and "past_value" shall have same dim 2 (past_sequence_length)');if(p.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');b=d.dims[2],x=d.dims[2]}else if(d||p)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let $;if(n){if(r.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(n.dims.length<3||n.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(r.dims[0]!==n.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(n.dims.length===3){if(n.dims[2]!==r.dims[2])throw new Error('Input "query" and "key" shall have same dim 2 (hidden_size)');$=2,g=n.dims[1]}else if(n.dims.length===5){if(n.dims[2]!==t.numHeads||n.dims[3]!==2||n.dims[4]!==_)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');$=5,g=n.dims[1]}else{if(n.dims[1]!==t.numHeads||n.dims[3]!==_)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');$=0,g=n.dims[2]}}else{if(r.dims.length!==3&&r.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(r.dims.length===5&&(r.dims[2]!==t.numHeads||r.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');$=3}if(i){if(i.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimension');if(o&&r.dims.length===5&&r.dims[3]===2)throw new Error("bias is not allowed for packed kv.")}let S=0;if(a){S=8;let D=a.dims;throw D.length===1?D[0]===u?S=1:D[0]===3*u+2&&(S=3):D.length===2&&D[0]===u&&D[1]===g&&(S=5),S===8?new Error('Input "key_padding_mask" shape shall be (batch_size) or (batch_size, kv_sequence_length)'):new Error("Mask not supported")}let I=!1,T=w;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(r.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(g!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');T=o.dims[2]}else{if(g!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');T=o.dims[1]*o.dims[3],I=!0}}let A=b+g,z=!1;if(a)throw new Error("Key padding mask is not supported");if(l){if(l.dims.length!==4)throw new Error('Input "relative_position_bias" is expected to have 4 dimensions');if(l.dims[0]!==u&&l.dims[0]!==1||l.dims[1]!==t.numHeads||l.dims[2]!==h||l.dims[3]!==A)throw new Error('Input "relative_position_bias" shape (batch_size, 1, sequence_length, kv_sequence_length)')}return{batchSize:u,sequenceLength:h,pastSequenceLength:b,kvSequenceLength:g,totalSequenceLength:A,maxSequenceLength:x,inputHiddenSize:0,hiddenSize:w,vHiddenSize:T,headSize:_,vHeadSize:Math.floor(T/t.numHeads),numHeads:t.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:t.maskFilterValue,maskType:S,scale:t.scale,broadcastResPosBias:z,passPastInKv:I,qkvFormat:$}},yd=e=>te({...e}),gd=te({perm:[0,2,1,3]}),uf=(e,t,r,n,o,i,a)=>{let l=[n,o,i],d=k.size(l),p=[{type:12,data:d},{type:12,data:a},{type:12,data:i}],m=u=>{let h=M("qkv_with_bias",t.dataType,l),w=E("qkv",t.dataType,l),g=E("bias",r.dataType,l),b=[{name:"output_size",type:"u32"},{name:"bias_offset",type:"u32"},{name:"hidden_size",type:"u32"}];return` + }`};return{name:"Gemm",shaderCache:{hint:`${t.cacheKey}`,inputDependencies:m},getRunData:()=>({outputs:[{dims:l,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p}),getShaderSource:u}},md=e=>{let t=e.transA,r=e.transB,n=e.alpha,o=e.beta;return{transA:t,transB:r,alpha:n,beta:o,cacheKey:`${e.transA};${e.transB};${e.alpha===1}`}},fd=(e,t)=>{of(e.inputs),e.compute(af(e.inputs,t))}});var Ve,df,yd,gd,lf,tr,bd,$o=U(()=>{"use strict";Q();ae();Ce();Vr();qr();oe();Et();Ve=(e,t)=>e.length>t&&e[t].dims.length>0&&k.size(e[t].dims)>0?e[t]:void 0,df=(e,t)=>{let r=e[0],n=Ve(e,1),o=Ve(e,2),i=Ve(e,3),a=Ve(e,4),l=Ve(e,5),d=Ve(e,6),p=Ve(e,7);if(r.dims.length!==3&&r.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let m=!1,u=r.dims[0],h=r.dims[1],w=r.dims.length===3?m?r.dims[2]/3:r.dims[2]:t.numHeads*r.dims[4],g=h,b=0,x=0,_=Math.floor(w/t.numHeads);if(d&&p){if(d.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(d.dims[0]!==u||d.dims[1]!==t.numHeads||d.dims[3]!==_)throw new Error('Input "past_key" shape (batch_size, num_heads, past_sequence_length, head_size)');if(p.dims[0]!==u||p.dims[1]!==t.numHeads||p.dims[3]!==_)throw new Error('Input "past_value" shape (batch_size, num_heads, past_sequence_length, head_size)');if(d.dims[2]!==p.dims[2])throw new Error('Input "past_key" and "past_value" shall have same dim 2 (past_sequence_length)');if(p.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');b=d.dims[2],x=d.dims[2]}else if(d||p)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let $;if(n){if(r.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(n.dims.length<3||n.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(r.dims[0]!==n.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(n.dims.length===3){if(n.dims[2]!==r.dims[2])throw new Error('Input "query" and "key" shall have same dim 2 (hidden_size)');$=2,g=n.dims[1]}else if(n.dims.length===5){if(n.dims[2]!==t.numHeads||n.dims[3]!==2||n.dims[4]!==_)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');$=5,g=n.dims[1]}else{if(n.dims[1]!==t.numHeads||n.dims[3]!==_)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');$=0,g=n.dims[2]}}else{if(r.dims.length!==3&&r.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(r.dims.length===5&&(r.dims[2]!==t.numHeads||r.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');$=3}if(i){if(i.dims.length!==1)throw new Error('Input "bias" is expected to have 1 dimension');if(o&&r.dims.length===5&&r.dims[3]===2)throw new Error("bias is not allowed for packed kv.")}let S=0;if(a){S=8;let z=a.dims;throw z.length===1?z[0]===u?S=1:z[0]===3*u+2&&(S=3):z.length===2&&z[0]===u&&z[1]===g&&(S=5),S===8?new Error('Input "key_padding_mask" shape shall be (batch_size) or (batch_size, kv_sequence_length)'):new Error("Mask not supported")}let I=!1,T=w;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(r.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(g!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');T=o.dims[2]}else{if(g!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');T=o.dims[1]*o.dims[3],I=!0}}let A=b+g,D=!1;if(a)throw new Error("Key padding mask is not supported");if(l){if(l.dims.length!==4)throw new Error('Input "relative_position_bias" is expected to have 4 dimensions');if(l.dims[0]!==u&&l.dims[0]!==1||l.dims[1]!==t.numHeads||l.dims[2]!==h||l.dims[3]!==A)throw new Error('Input "relative_position_bias" shape (batch_size, 1, sequence_length, kv_sequence_length)')}return{batchSize:u,sequenceLength:h,pastSequenceLength:b,kvSequenceLength:g,totalSequenceLength:A,maxSequenceLength:x,inputHiddenSize:0,hiddenSize:w,vHiddenSize:T,headSize:_,vHeadSize:Math.floor(T/t.numHeads),numHeads:t.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:t.maskFilterValue,maskType:S,scale:t.scale,broadcastResPosBias:D,passPastInKv:I,qkvFormat:$}},yd=e=>te({...e}),gd=te({perm:[0,2,1,3]}),lf=(e,t,r,n,o,i,a)=>{let l=[n,o,i],d=k.size(l),p=[{type:12,data:d},{type:12,data:a},{type:12,data:i}],m=u=>{let h=M("qkv_with_bias",t.dataType,l),w=E("qkv",t.dataType,l),g=E("bias",r.dataType,l),b=[{name:"output_size",type:"u32"},{name:"bias_offset",type:"u32"},{name:"hidden_size",type:"u32"}];return` ${u.registerUniforms(b).declareVariables(w,g,h)} ${u.mainStart()} ${u.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset; qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx]; - }`};return e.compute({name:"MultiHeadAttentionAddBias",shaderCache:{inputDependencies:["type","type"]},getRunData:()=>({outputs:[{dims:l,dataType:t.dataType,gpuDataType:0}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p}),getShaderSource:m},{inputs:[t,r],outputs:[-1]})[0]},tr=(e,t,r,n,o,i,a,l)=>{let d=i;if(a){if(n===1)throw new Error("AddBiasReshape is not implemented. Please export your model with packed QKV or KV");return d=uf(e,i,a,t,n,r*o,l),d=d.reshape([t,n,r,o]),e.compute(Ue(d,gd.perm),{inputs:[d],outputs:[-1]})[0]}else return i.dims.length===3&&(d=i.reshape([t,n,r,o])),e.compute(Ue(d,gd.perm),{inputs:[d],outputs:[-1]})[0]},bd=(e,t)=>{let r=sf(e.inputs,t),n=e.inputs[0],o=Ve(e.inputs,1),i=Ve(e.inputs,2),a=Ve(e.inputs,3),l=Ve(e.inputs,4),d=Ve(e.inputs,5),p=Ve(e.inputs,6),m=Ve(e.inputs,7);if(n.dims.length===5)throw new Error("Packed QKV is not implemented");if(o?.dims.length===5)throw new Error("Packed KV is not implemented");let u=o&&i&&o.dims.length===4&&i.dims.length===4,h=tr(e,r.batchSize,r.numHeads,r.sequenceLength,r.headSize,n,a,0);if(u)return Nt(e,h,o,i,l,void 0,p,m,d,r,t);if(!o||!i)throw new Error("key and value must be provided");let w=tr(e,r.batchSize,r.numHeads,r.kvSequenceLength,r.headSize,o,a,r.hiddenSize),g=tr(e,r.batchSize,r.numHeads,r.kvSequenceLength,r.vHeadSize,i,a,2*r.hiddenSize);Nt(e,h,w,g,l,void 0,p,m,d,r,t)}});var wd,df,lf,$o,vd,_o=U(()=>{"use strict";Q();ae();oe();wd=e=>Array.from(e.getBigInt64Array(),Number),df=e=>{if(!e||e.length!==2)throw new Error("Tile requires 2 inputs.");if(e[0].dataType!==1&&e[0].dataType!==10&&e[0].dataType!==6&&e[0].dataType!==12)throw new Error("Tile only support float, float16, int32, and uint32 data types");if(e[1].dataType!==7)throw new Error("Tile `repeats` input should be of int64 data type");if(e[1].dims.length!==1)throw new Error("Tile `repeats` input should be 1-D");if(wd(e[1]).length!==e[0].dims.length)throw new Error("Tile `repeats` input should have same number of elements as rank of input data tensor")},lf=(e,t)=>{let r=[];for(let n=0;n{let r=e[0].dims,n=t??wd(e[1]),o=lf(r,n),i=k.size(o),a=e[0].dataType,l=E("input",a,r.length),d=M("output",a,o.length),p=m=>` + }`};return e.compute({name:"MultiHeadAttentionAddBias",shaderCache:{inputDependencies:["type","type"]},getRunData:()=>({outputs:[{dims:l,dataType:t.dataType,gpuDataType:0}],dispatchGroup:{x:Math.ceil(d/64)},programUniforms:p}),getShaderSource:m},{inputs:[t,r],outputs:[-1]})[0]},tr=(e,t,r,n,o,i,a,l)=>{let d=i;if(a){if(n===1)throw new Error("AddBiasReshape is not implemented. Please export your model with packed QKV or KV");return d=lf(e,i,a,t,n,r*o,l),d=d.reshape([t,n,r,o]),e.compute(Ue(d,gd.perm),{inputs:[d],outputs:[-1]})[0]}else return i.dims.length===3&&(d=i.reshape([t,n,r,o])),e.compute(Ue(d,gd.perm),{inputs:[d],outputs:[-1]})[0]},bd=(e,t)=>{let r=df(e.inputs,t),n=e.inputs[0],o=Ve(e.inputs,1),i=Ve(e.inputs,2),a=Ve(e.inputs,3),l=Ve(e.inputs,4),d=Ve(e.inputs,5),p=Ve(e.inputs,6),m=Ve(e.inputs,7);if(n.dims.length===5)throw new Error("Packed QKV is not implemented");if(o?.dims.length===5)throw new Error("Packed KV is not implemented");let u=o&&i&&o.dims.length===4&&i.dims.length===4,h=tr(e,r.batchSize,r.numHeads,r.sequenceLength,r.headSize,n,a,0);if(u)return Wt(e,h,o,i,l,void 0,p,m,d,r,t);if(!o||!i)throw new Error("key and value must be provided");let w=tr(e,r.batchSize,r.numHeads,r.kvSequenceLength,r.headSize,o,a,r.hiddenSize),g=tr(e,r.batchSize,r.numHeads,r.kvSequenceLength,r.vHeadSize,i,a,2*r.hiddenSize);Wt(e,h,w,g,l,void 0,p,m,d,r,t)}});var wd,cf,pf,_o,vd,xo=U(()=>{"use strict";Q();ae();oe();wd=e=>Array.from(e.getBigInt64Array(),Number),cf=e=>{if(!e||e.length!==2)throw new Error("Tile requires 2 inputs.");if(e[0].dataType!==1&&e[0].dataType!==10&&e[0].dataType!==6&&e[0].dataType!==12)throw new Error("Tile only support float, float16, int32, and uint32 data types");if(e[1].dataType!==7)throw new Error("Tile `repeats` input should be of int64 data type");if(e[1].dims.length!==1)throw new Error("Tile `repeats` input should be 1-D");if(wd(e[1]).length!==e[0].dims.length)throw new Error("Tile `repeats` input should have same number of elements as rank of input data tensor")},pf=(e,t)=>{let r=[];for(let n=0;n{let r=e[0].dims,n=t??wd(e[1]),o=pf(r,n),i=k.size(o),a=e[0].dataType,l=E("input",a,r.length),d=M("output",a,o.length),p=m=>` const inputShape = ${l.indices(...r)}; ${m.registerUniform("output_size","u32").declareVariables(l,d)} ${m.mainStart()} @@ -1539,7 +1539,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${l.indicesSet("input_indices","i","input_dim_value")} } ${d.setByOffset("global_idx",l.getByIndices("input_indices"))} - }`;return{name:"Tile",shaderCache:{hint:`${n}`,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:o,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)},programUniforms:[{type:12,data:i},...V(e[0].dims,o)]}),getShaderSource:p}},vd=e=>{df(e.inputs),e.compute($o(e.inputs),{inputs:[0]})}});var cf,$d,xd,pf,_d,Sd,Id=U(()=>{"use strict";Q();ae();Ce();qr();oe();vo();_o();Et();cf=(e,t)=>{let r=e[0],n=e[1],o=e[2],i=e[3],a=e[4];if(r.dims.length!==3&&r.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let l=!1,d=r.dims[0],p=r.dims[1],m=r.dims.length===3?l?r.dims[2]/3:r.dims[2]:t.numHeads*r.dims[4],u=p,h=0,w=0,g=Math.floor(m/t.numHeads),b=i&&i.dims.length!==0,x=a&&a.dims.length!==0,_=!0;if(b&&x){if(i.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(a.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');_?(h=i.dims[1],w=i.dims[1]):(h=i.dims[2],w=i.dims[2])}else if(b||x)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let $;if(n){if(r.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(n.dims.length<3||n.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(r.dims[0]!==n.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(n.dims.length===3){if(r.dims[2]%n.dims[2]!==0)throw new Error('Dimension 2 of "query" should be a multiple of "key"');$=2,u=n.dims[1]}else if(n.dims.length===5){if(n.dims[2]!==t.numHeads||n.dims[3]!==2||n.dims[4]!==g)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');$=5,u=n.dims[1]}else{if(n.dims[1]!==t.numHeads||n.dims[3]!==g)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');$=0,u=n.dims[2]}}else{if(r.dims.length!==3&&r.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(r.dims.length===5&&(r.dims[2]!==t.numHeads||r.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');$=3}let S=0,I=!1,T=m;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(r.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(u!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');T=o.dims[2]}else{if(u!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');T=o.dims[1]*o.dims[3],I=!0}}let A=h+u,z=!1;return{batchSize:d,sequenceLength:p,pastSequenceLength:h,kvSequenceLength:u,totalSequenceLength:A,maxSequenceLength:w,inputHiddenSize:0,hiddenSize:m,vHiddenSize:T,headSize:g,vHeadSize:Math.floor(T/t.kvNumHeads),numHeads:t.numHeads,kvNumHeads:t.kvNumHeads,nReps:t.numHeads/t.kvNumHeads,pastPresentShareBuffer:!1,maskType:S,scale:t.scale,broadcastResPosBias:z,passPastInKv:I,qkvFormat:$,isPastkvBSNH:_}},$d=(e,t,r,n)=>{let o=[n.batchSize,n.totalSequenceLength,n.kvNumHeads,n.headSize],i=4,a=k.size(o)/i,l=n.totalSequenceLength,d=M("present_kv",r,o.length,i),p=E("new_kv",e.dataType,e.dims.length,i),m=t?E("past_kv",t.dataType,t.dims.length,i):void 0,u=Math.ceil(n.headSize/i),h={x:l,y:e.dims[0],z:1},w=t?["rank","rank"]:["rank"],g=[{type:12,data:a},{type:12,data:n.pastSequenceLength},{type:12,data:n.kvSequenceLength},{type:12,data:n.totalSequenceLength}],b=[p];m?(g.push(...V(e.dims),...V(t.dims),...V(o)),b.push(m)):g.push(...V(e.dims),...V(o));let x=[{name:"output_size",type:"u32"},{name:"past_seqlen",type:"u32"},{name:"new_seqlen",type:"u32"},{name:"present_seqlen",type:"u32"}],_=` let past_batch_stride = uniforms.past_seqlen * num_heads * H; + }`;return{name:"Tile",shaderCache:{hint:`${n}`,inputDependencies:["rank"]},getRunData:()=>({outputs:[{dims:o,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(i/64)},programUniforms:[{type:12,data:i},...V(e[0].dims,o)]}),getShaderSource:p}},vd=e=>{cf(e.inputs),e.compute(_o(e.inputs),{inputs:[0]})}});var mf,$d,xd,ff,_d,Sd,Id=U(()=>{"use strict";Q();ae();Ce();qr();oe();$o();xo();Et();mf=(e,t)=>{let r=e[0],n=e[1],o=e[2],i=e[3],a=e[4];if(r.dims.length!==3&&r.dims.length!==5)throw new Error("Input query is expected to have 3 or 5 dimensions");let l=!1,d=r.dims[0],p=r.dims[1],m=r.dims.length===3?l?r.dims[2]/3:r.dims[2]:t.numHeads*r.dims[4],u=p,h=0,w=0,g=Math.floor(m/t.numHeads),b=i&&i.dims.length!==0,x=a&&a.dims.length!==0,_=!0;if(b&&x){if(i.dims.length!==4)throw new Error('Input "past_key" is expected to have 4 dimensions');if(a.dims.length!==4)throw new Error('Input "past_value" is expected to have 4 dimensions');_?(h=i.dims[1],w=i.dims[1]):(h=i.dims[2],w=i.dims[2])}else if(b||x)throw new Error('Input "past_key" and "past_value" shall be both present or both absent');let $;if(n){if(r.dims.length!==3)throw new Error('Input "query" is expected to have 3 dimensions when key is given');if(n.dims.length<3||n.dims.length>5)throw new Error('Input "key" is expected to have 3, 4, or 5 dimensions');if(r.dims[0]!==n.dims[0])throw new Error('Input "query" and "key" shall have same dim 0 (batch size)');if(n.dims.length===3){if(r.dims[2]%n.dims[2]!==0)throw new Error('Dimension 2 of "query" should be a multiple of "key"');$=2,u=n.dims[1]}else if(n.dims.length===5){if(n.dims[2]!==t.numHeads||n.dims[3]!==2||n.dims[4]!==g)throw new Error('Expect "key" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(o)throw new Error('Expect "value" be none when "key" has packed kv format.');$=5,u=n.dims[1]}else{if(n.dims[1]!==t.numHeads||n.dims[3]!==g)throw new Error('Expect "key" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');$=0,u=n.dims[2]}}else{if(r.dims.length!==3&&r.dims.length!==5)throw new Error('Input "query" is expected to have 3 or 5 dimensions when key is empty');if(r.dims.length===5&&(r.dims[2]!==t.numHeads||r.dims[3]!==3))throw new Error('Expect "query" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');$=3}let S=0,I=!1,T=m;if(o){if(o.dims.length!==3&&o.dims.length!==4)throw new Error('Input "value" is expected to have 3 or 4 dimensions');if(r.dims[0]!==o.dims[0])throw new Error('Input "query" and "value" shall have same dim 0 (batch_size)');if(o.dims.length===3){if(u!==o.dims[1])throw new Error('Input "key" and "value" shall have the same dim 1 (kv_sequence_length)');T=o.dims[2]}else{if(u!==o.dims[2])throw new Error('Input "past_key" and "past_value" shall have the same dim 2 (kv_sequence_length)');T=o.dims[1]*o.dims[3],I=!0}}let A=h+u,D=!1;return{batchSize:d,sequenceLength:p,pastSequenceLength:h,kvSequenceLength:u,totalSequenceLength:A,maxSequenceLength:w,inputHiddenSize:0,hiddenSize:m,vHiddenSize:T,headSize:g,vHeadSize:Math.floor(T/t.kvNumHeads),numHeads:t.numHeads,kvNumHeads:t.kvNumHeads,nReps:t.numHeads/t.kvNumHeads,pastPresentShareBuffer:!1,maskType:S,scale:t.scale,broadcastResPosBias:D,passPastInKv:I,qkvFormat:$,isPastkvBSNH:_}},$d=(e,t,r,n)=>{let o=[n.batchSize,n.totalSequenceLength,n.kvNumHeads,n.headSize],i=4,a=k.size(o)/i,l=n.totalSequenceLength,d=M("present_kv",r,o.length,i),p=E("new_kv",e.dataType,e.dims.length,i),m=t?E("past_kv",t.dataType,t.dims.length,i):void 0,u=Math.ceil(n.headSize/i),h={x:l,y:e.dims[0],z:1},w=t?["rank","rank"]:["rank"],g=[{type:12,data:a},{type:12,data:n.pastSequenceLength},{type:12,data:n.kvSequenceLength},{type:12,data:n.totalSequenceLength}],b=[p];m?(g.push(...V(e.dims),...V(t.dims),...V(o)),b.push(m)):g.push(...V(e.dims),...V(o));let x=[{name:"output_size",type:"u32"},{name:"past_seqlen",type:"u32"},{name:"new_seqlen",type:"u32"},{name:"present_seqlen",type:"u32"}],_=` let past_batch_stride = uniforms.past_seqlen * num_heads * H; var past_head_stride = uniforms.past_seqlen * H; if (is_bsnh) { past_head_stride = H; @@ -1585,7 +1585,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h; ${S} - }`;return{name:"ConcatPastNew",shaderCache:{hint:`${n.kvNumHeads}${u}${!!t}`,inputDependencies:w},getRunData:()=>({outputs:[{dims:o,dataType:r}],dispatchGroup:h,programUniforms:g}),getShaderSource:I}},xd=e=>te({...e}),pf=te({perm:[0,2,1,3]}),_d=(e,t,r,n,o)=>{let i=t,a=n.kvNumHeads,l=n.nReps;return t.dims.length===3&&n.kvSequenceLength!==0&&(i=t.reshape([n.batchSize,n.kvSequenceLength,a,n.headSize])),r?i=e.compute($d(i,r,i.dataType,n),{inputs:[i,r],outputs:[n.isPastkvBSNH?o:-1]})[0]:i=e.compute($d(i,void 0,i.dataType,n),{inputs:[i],outputs:[n.isPastkvBSNH?o:-1]})[0],l!==1&&(i=e.compute($o([i],[1,1,1,l]),{inputs:[i],outputs:[-1]})[0],i=i.reshape([n.batchSize,n.totalSequenceLength,a*l,n.headSize])),e.compute(Ue(i,pf.perm),{inputs:[i],outputs:[-1]})[0]},Sd=(e,t)=>{let r=cf(e.inputs,t);if(e.inputs[0].dims.length===5)throw new Error("Packed QKV is not implemented");if(e.inputs[1]?.dims.length===5)throw new Error("Packed KV is not implemented");let n=tr(e,r.batchSize,r.numHeads,r.sequenceLength,r.headSize,e.inputs[0],void 0,0),o=e.inputs[3]&&e.inputs[3].dims.length!==0?e.inputs[3]:void 0,i=e.inputs[4]&&e.inputs[4].dims.length!==0?e.inputs[4]:void 0,a=_d(e,e.inputs[1],o,r,1),l=_d(e,e.inputs[2],i,r,2);Nt(e,n,a,l,void 0,void 0,void 0,void 0,void 0,r,t)}});var mf,ff,hf,Cd,Td=U(()=>{"use strict";Q();ae();oe();mf=(e,t)=>{let r=e[0].dims,n=r,o=2,i=k.sizeToDimension(r,o),a=k.sizeFromDimension(r,o),l=we(a),d=a/l,p=[r[0],r[1],d],m=["rank","type","type"],u=[{type:12,data:a},{type:12,data:d}];u.push(...V(p,p));let h=w=>{let g=E("x",e[0].dataType,p.length,l),b=E("scale",e[1].dataType,e[1].dims),x=E("bias",e[2].dataType,e[2].dims),_=M("output",e[0].dataType,p.length,l),$=[g,b,x,_],S=g.type.value,I=l===1?"f32":`vec${l}`,T=64,A=[{name:"normSize",type:"u32"},{name:"normPackedSize",type:"u32"}];return` + }`;return{name:"ConcatPastNew",shaderCache:{hint:`${n.kvNumHeads}${u}${!!t}`,inputDependencies:w},getRunData:()=>({outputs:[{dims:o,dataType:r}],dispatchGroup:h,programUniforms:g}),getShaderSource:I}},xd=e=>te({...e}),ff=te({perm:[0,2,1,3]}),_d=(e,t,r,n,o)=>{let i=t,a=n.kvNumHeads,l=n.nReps;return t.dims.length===3&&n.kvSequenceLength!==0&&(i=t.reshape([n.batchSize,n.kvSequenceLength,a,n.headSize])),r?i=e.compute($d(i,r,i.dataType,n),{inputs:[i,r],outputs:[n.isPastkvBSNH?o:-1]})[0]:i=e.compute($d(i,void 0,i.dataType,n),{inputs:[i],outputs:[n.isPastkvBSNH?o:-1]})[0],l!==1&&(i=e.compute(_o([i],[1,1,1,l]),{inputs:[i],outputs:[-1]})[0],i=i.reshape([n.batchSize,n.totalSequenceLength,a*l,n.headSize])),e.compute(Ue(i,ff.perm),{inputs:[i],outputs:[-1]})[0]},Sd=(e,t)=>{let r=mf(e.inputs,t);if(e.inputs[0].dims.length===5)throw new Error("Packed QKV is not implemented");if(e.inputs[1]?.dims.length===5)throw new Error("Packed KV is not implemented");let n=tr(e,r.batchSize,r.numHeads,r.sequenceLength,r.headSize,e.inputs[0],void 0,0),o=e.inputs[3]&&e.inputs[3].dims.length!==0?e.inputs[3]:void 0,i=e.inputs[4]&&e.inputs[4].dims.length!==0?e.inputs[4]:void 0,a=_d(e,e.inputs[1],o,r,1),l=_d(e,e.inputs[2],i,r,2);Wt(e,n,a,l,void 0,void 0,void 0,void 0,void 0,r,t)}});var hf,gf,yf,Cd,Td=U(()=>{"use strict";Q();ae();oe();hf=(e,t)=>{let r=e[0].dims,n=r,o=2,i=k.sizeToDimension(r,o),a=k.sizeFromDimension(r,o),l=we(a),d=a/l,p=[r[0],r[1],d],m=["rank","type","type"],u=[{type:12,data:a},{type:12,data:d}];u.push(...V(p,p));let h=w=>{let g=E("x",e[0].dataType,p.length,l),b=E("scale",e[1].dataType,e[1].dims),x=E("bias",e[2].dataType,e[2].dims),_=M("output",e[0].dataType,p.length,l),$=[g,b,x,_],S=g.type.value,I=l===1?"f32":`vec${l}`,T=64,A=[{name:"normSize",type:"u32"},{name:"normPackedSize",type:"u32"}];return` var meanShared : f32; var squaredNormShared : f32; var workgroupShared : array<${I}, ${T}>; @@ -1645,8 +1645,8 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let value = ${g.get("batch","channel","h")} * ${S}(${I}(channelScale)) + ${S}(${I}(channelShift)); ${_.set("batch","channel","h","value")}; } - }`};return{name:"InstanceNormalization",shaderCache:{hint:`${t.epsilon};${l}`,inputDependencies:m},getRunData:()=>({outputs:[{dims:n,dataType:e[0].dataType}],dispatchGroup:{x:i},programUniforms:u}),getShaderSource:h}},ff=(e,t,r,n,o,i,a,l)=>{let d=we(a),p=64,m=d===1?"vec2f":`mat2x${d}f`,u=d===1?"f32":`vec${d}f`,h=(A,z)=>`${m}(${A}, ${z})`,w=o*a/d,g=Math.ceil(i/p),b=["type"],x=[{type:12,data:g},{type:12,data:i},{type:12,data:Math.floor(a/d)},{type:12,data:Math.floor(i*a/d)}],_=A=>{let z=E("input",t.dataType,t.dims,d);return` - ${A.declareVariables(z)} + }`};return{name:"InstanceNormalization",shaderCache:{hint:`${t.epsilon};${l}`,inputDependencies:m},getRunData:()=>({outputs:[{dims:n,dataType:e[0].dataType}],dispatchGroup:{x:i},programUniforms:u}),getShaderSource:h}},gf=(e,t,r,n,o,i,a,l)=>{let d=we(a),p=64,m=d===1?"vec2f":`mat2x${d}f`,u=d===1?"f32":`vec${d}f`,h=(A,D)=>`${m}(${A}, ${D})`,w=o*a/d,g=Math.ceil(i/p),b=["type"],x=[{type:12,data:g},{type:12,data:i},{type:12,data:Math.floor(a/d)},{type:12,data:Math.floor(i*a/d)}],_=A=>{let D=E("input",t.dataType,t.dims,d);return` + ${A.declareVariables(D)} @group(0) @binding(1) var output : array<${m}>; struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32}; @group(0) @binding(2) var uniforms: Uniforms; @@ -1669,10 +1669,10 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; squaredSum += value * value; } output[global_idx] = ${h("sum","squaredSum")}; - }`},$=e.compute({name:"InstanceNormComputeMean",shaderCache:{hint:`${d}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:[o,a,p,2],dataType:1}],dispatchGroup:{x:o*a/d},programUniforms:x}),getShaderSource:_},{inputs:[t],outputs:[-1]})[0],S=[{type:12,data:w},{type:12,data:i},{type:12,data:Math.floor(a/d)},{type:12,data:Math.floor(p*a/d)}],I=["type","type","type"],T=A=>{let z=E("scale",r.dataType,r.dims,d),D=E("bias",n.dataType,n.dims,d);return` + }`},$=e.compute({name:"InstanceNormComputeMean",shaderCache:{hint:`${d}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:[o,a,p,2],dataType:1}],dispatchGroup:{x:o*a/d},programUniforms:x}),getShaderSource:_},{inputs:[t],outputs:[-1]})[0],S=[{type:12,data:w},{type:12,data:i},{type:12,data:Math.floor(a/d)},{type:12,data:Math.floor(p*a/d)}],I=["type","type","type"],T=A=>{let D=E("scale",r.dataType,r.dims,d),z=E("bias",n.dataType,n.dims,d);return` @group(0) @binding(0) var input : array<${m}>; - @group(0) @binding(1) var scale : array<${z.type.storage}>; - @group(0) @binding(2) var bias : array<${D.type.storage}>; + @group(0) @binding(1) var scale : array<${D.type.storage}>; + @group(0) @binding(2) var bias : array<${z.type.storage}>; @group(0) @binding(3) var output : array<${m}>; struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32}; @group(0) @binding(4) var uniforms: Uniforms; @@ -1697,7 +1697,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let channelShift = ${u}(bias[currentChannelNumber]) - sum * channelScale; output[global_idx] = ${h("channelScale","channelShift")}; - }`};return e.compute({name:"InstanceNormComputeChannelScaleShift",shaderCache:{hint:`${d};${l}`,inputDependencies:I},getRunData:()=>({outputs:[{dims:[o,a,2],dataType:1}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:S}),getShaderSource:T},{inputs:[$,r,n],outputs:[-1]})[0]},hf=(e,t,r)=>{let n=t[0].dims,o=n,i=n[0],a=n[n.length-1],l=k.sizeFromDimension(n,1)/a,d=we(a),p=k.size(o)/d,m=[{type:12,data:l},{type:12,data:Math.floor(a/d)}],u=["type","type"],h=ff(e,t[0],t[1],t[2],i,l,a,r.epsilon),w=g=>{let b=he(t[0].dataType),x=d===1?"vec2f":`mat2x${d}f`,_=d===1?b:`vec${d}<${b}>`,$=E("input",t[0].dataType,t[0].dims,d),S=M("output",t[0].dataType,o,d);return` + }`};return e.compute({name:"InstanceNormComputeChannelScaleShift",shaderCache:{hint:`${d};${l}`,inputDependencies:I},getRunData:()=>({outputs:[{dims:[o,a,2],dataType:1}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:S}),getShaderSource:T},{inputs:[$,r,n],outputs:[-1]})[0]},yf=(e,t,r)=>{let n=t[0].dims,o=n,i=n[0],a=n[n.length-1],l=k.sizeFromDimension(n,1)/a,d=we(a),p=k.size(o)/d,m=[{type:12,data:l},{type:12,data:Math.floor(a/d)}],u=["type","type"],h=gf(e,t[0],t[1],t[2],i,l,a,r.epsilon),w=g=>{let b=he(t[0].dataType),x=d===1?"vec2f":`mat2x${d}f`,_=d===1?b:`vec${d}<${b}>`,$=E("input",t[0].dataType,t[0].dims,d),S=M("output",t[0].dataType,o,d);return` @group(0) @binding(0) var input : array<${$.type.storage}>; @group(0) @binding(1) var scaleInput : array<${x}>; @group(0) @binding(2) var output : array<${S.type.storage}>; @@ -1711,10 +1711,10 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber; let scale = scaleInput[scaleOffset]; output[global_idx] = fma(input[global_idx], ${_}(scale[0]), ${_}(scale[1])); - }`};e.compute({name:"InstanceNormalizationNHWC",shaderCache:{hint:`${d}`,inputDependencies:u},getRunData:()=>({outputs:[{dims:o,dataType:t[0].dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:m}),getShaderSource:w},{inputs:[t[0],h]})},Cd=(e,t)=>{t.format==="NHWC"?hf(e,e.inputs,t):e.compute(mf(e.inputs,t))}});var gf,yf,Ad,Ed=U(()=>{"use strict";Q();ae();oe();gf=e=>{if(!e||e.length<2)throw new Error("layerNorm requires at least 2 inputs.")},yf=(e,t,r)=>{let n=t.simplified,o=e[0].dims,i=e[1],a=!n&&e[2],l=o,d=k.normalizeAxis(t.axis,o.length),p=k.sizeToDimension(o,d),m=k.sizeFromDimension(o,d),u=k.size(i.dims),h=a?k.size(a.dims):0;if(u!==m||a&&h!==m)throw new Error(`Size of X.shape()[axis:] == ${m}. + }`};e.compute({name:"InstanceNormalizationNHWC",shaderCache:{hint:`${d}`,inputDependencies:u},getRunData:()=>({outputs:[{dims:o,dataType:t[0].dataType}],dispatchGroup:{x:Math.ceil(p/64)},programUniforms:m}),getShaderSource:w},{inputs:[t[0],h]})},Cd=(e,t)=>{t.format==="NHWC"?yf(e,e.inputs,t):e.compute(hf(e.inputs,t))}});var bf,wf,Ad,Ed=U(()=>{"use strict";Q();ae();oe();bf=e=>{if(!e||e.length<2)throw new Error("layerNorm requires at least 2 inputs.")},wf=(e,t,r)=>{let n=t.simplified,o=e[0].dims,i=e[1],a=!n&&e[2],l=o,d=k.normalizeAxis(t.axis,o.length),p=k.sizeToDimension(o,d),m=k.sizeFromDimension(o,d),u=k.size(i.dims),h=a?k.size(a.dims):0;if(u!==m||a&&h!==m)throw new Error(`Size of X.shape()[axis:] == ${m}. Size of scale and bias (if provided) must match this. - Got scale size of ${u} and bias size of ${h}`);let w=[];for(let T=0;T1,$=r>2,S=T=>{let A=he(e[0].dataType),z=[E("x",e[0].dataType,e[0].dims,g),E("scale",i.dataType,i.dims,g)];a&&z.push(E("bias",a.dataType,a.dims,g)),z.push(M("output",e[0].dataType,l,g)),_&&z.push(M("mean_data_output",1,w)),$&&z.push(M("inv_std_output",1,w));let D=[{name:"norm_count",type:"u32"},{name:"norm_size",type:"f32"},{name:"norm_size_vectorized",type:"u32"},{name:"epsilon",type:"f32"}];return` - ${T.registerUniforms(D).declareVariables(...z)} + Got scale size of ${u} and bias size of ${h}`);let w=[];for(let T=0;T1,$=r>2,S=T=>{let A=he(e[0].dataType),D=[E("x",e[0].dataType,e[0].dims,g),E("scale",i.dataType,i.dims,g)];a&&D.push(E("bias",a.dataType,a.dims,g)),D.push(M("output",e[0].dataType,l,g)),_&&D.push(M("mean_data_output",1,w)),$&&D.push(M("inv_std_output",1,w));let z=[{name:"norm_count",type:"u32"},{name:"norm_size",type:"f32"},{name:"norm_size_vectorized",type:"u32"},{name:"epsilon",type:"f32"}];return` + ${T.registerUniforms(z).declareVariables(...D)} ${T.mainStart()} ${T.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.norm_count")} let offset = global_idx * uniforms.norm_size_vectorized; @@ -1732,14 +1732,14 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) { let f32input = ${At(A,g,"x[j + offset]")}; let f32scale = ${At(A,g,"scale[j]")}; - output[j + offset] = ${z[0].type.value}((f32input ${n?"":"- mean"}) * inv_std_dev * f32scale + output[j + offset] = ${D[0].type.value}((f32input ${n?"":"- mean"}) * inv_std_dev * f32scale ${a?`+ ${At(A,g,"bias[j]")}`:""} ); } ${_?"mean_data_output[global_idx] = mean":""}; ${$?"inv_std_output[global_idx] = inv_std_dev":""}; - }`},I=[{dims:l,dataType:e[0].dataType}];return _&&I.push({dims:w,dataType:1}),$&&I.push({dims:w,dataType:1}),{name:"LayerNormalization",shaderCache:{hint:`${g};${r};${n}`,inputDependencies:b},getRunData:()=>({outputs:I,dispatchGroup:{x:Math.ceil(p/64)},programUniforms:x}),getShaderSource:S}},Ad=(e,t)=>{gf(e.inputs),e.compute(yf(e.inputs,t,e.outputCount))}});var bf,wf,kd,Pd,Od=U(()=>{"use strict";Q();ae();Ce();oe();bf=(e,t)=>{if(e.length<3||e.length>4)throw new Error("MatMulNBits requires 3 or 4 inputs");let r=e[0],n=r.dims.length;if(r.dims[n-1]!==t.k)throw new Error("The last dim of input shape does not match the k value");let o=Math.floor((t.k+t.blockSize-1)/t.blockSize),i=t.blockSize/8*t.bits,a=e[1];if(!k.areEqual(a.dims,[t.n,o,i]))throw new Error("The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize");let d=e[2].dims;if(k.size(d)!==t.n*o)throw new Error("scales input size error.");if(e.length===4){let m=e[3].dims,u=t.bits>4?t.n*o:t.n*Math.floor((o+1)/2);if(k.size(m)!==u)throw new Error("zeroPoints input size error.")}},wf=(e,t,r,n)=>{let o=e[0].dims,i=o.length,a=Math.floor((t.k+t.blockSize-1)/t.blockSize),l=o[i-2],d=t.k,p=t.n,m=o.slice(0,i-2),u=k.size(m),w=t.blockSize/8*t.bits/4,g=e[0].dataType,b=we(l),x=we(t.k),_=we(w),$=It(g),S=l*a*$,I=Math.floor(n/S),T=a<=r[0]&&I>0,A=!T||I>=4?we(p):I>=2&&we(p)>=2?2:1,z=m.concat([l,p]),D=k.size(z)/A/b,H=T?[]:[{type:12,data:D},{type:12,data:t.blockSize}],W=[u,l,d/x],F=k.convertShape(e[1].dims).slice();F.splice(-1,1,w/_),H.push(...V(W)),H.push(...V(F)),H.push(...V(e[2].dims)),e.length===4&&H.push(...V(k.convertShape(e[3].dims)));let de=[u,l,p/A];H.push(...V(de));let ce=X=>{let xe=W.length,q=E("a",e[0].dataType,xe,x),ie=E("b",12,F.length,_),le=E("scales",e[2].dataType,e[2].dims.length),se=[q,ie,le],Z=e.length===4?E("zero_points",12,e[3].dims.length):void 0;Z&&se.push(Z);let re=de.length,J=M("output",e[0].dataType,re,A),Pe=[{name:"output_size",type:"u32"},{name:"block_size",type:"u32"}],R=he(e[0].dataType),Y=(()=>{switch(x){case 1:return`array<${R}, 8>`;case 2:return`mat4x2<${R}>`;case 4:return`mat2x4<${R}>`;default:throw new Error(`${x}-component is not supported.`)}})(),ue=` + }`},I=[{dims:l,dataType:e[0].dataType}];return _&&I.push({dims:w,dataType:1}),$&&I.push({dims:w,dataType:1}),{name:"LayerNormalization",shaderCache:{hint:`${g};${r};${n}`,inputDependencies:b},getRunData:()=>({outputs:I,dispatchGroup:{x:Math.ceil(p/64)},programUniforms:x}),getShaderSource:S}},Ad=(e,t)=>{bf(e.inputs),e.compute(wf(e.inputs,t,e.outputCount))}});var vf,$f,kd,Pd,Od=U(()=>{"use strict";Q();ae();Ce();oe();vf=(e,t)=>{if(e.length<3||e.length>4)throw new Error("MatMulNBits requires 3 or 4 inputs");let r=e[0],n=r.dims.length;if(r.dims[n-1]!==t.k)throw new Error("The last dim of input shape does not match the k value");let o=Math.floor((t.k+t.blockSize-1)/t.blockSize),i=t.blockSize/8*t.bits,a=e[1];if(!k.areEqual(a.dims,[t.n,o,i]))throw new Error("The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize");let d=e[2].dims;if(k.size(d)!==t.n*o)throw new Error("scales input size error.");if(e.length===4){let m=e[3].dims,u=t.bits>4?t.n*o:t.n*Math.floor((o+1)/2);if(k.size(m)!==u)throw new Error("zeroPoints input size error.")}},$f=(e,t,r,n)=>{let o=e[0].dims,i=o.length,a=Math.floor((t.k+t.blockSize-1)/t.blockSize),l=o[i-2],d=t.k,p=t.n,m=o.slice(0,i-2),u=k.size(m),w=t.blockSize/8*t.bits/4,g=e[0].dataType,b=we(l),x=we(t.k),_=we(w),$=It(g),S=l*a*$,I=Math.floor(n/S),T=a<=r[0]&&I>0,A=!T||I>=4?we(p):I>=2&&we(p)>=2?2:1,D=m.concat([l,p]),z=k.size(D)/A/b,H=T?[]:[{type:12,data:z},{type:12,data:t.blockSize}],W=[u,l,d/x],F=k.convertShape(e[1].dims).slice();F.splice(-1,1,w/_),H.push(...V(W)),H.push(...V(F)),H.push(...V(e[2].dims)),e.length===4&&H.push(...V(k.convertShape(e[3].dims)));let de=[u,l,p/A];H.push(...V(de));let ce=X=>{let xe=W.length,q=E("a",e[0].dataType,xe,x),ie=E("b",12,F.length,_),le=E("scales",e[2].dataType,e[2].dims.length),se=[q,ie,le],Z=e.length===4?E("zero_points",12,e[3].dims.length):void 0;Z&&se.push(Z);let re=de.length,J=M("output",e[0].dataType,re,A),Pe=[{name:"output_size",type:"u32"},{name:"block_size",type:"u32"}],R=he(e[0].dataType),Y=(()=>{switch(x){case 1:return`array<${R}, 8>`;case 2:return`mat4x2<${R}>`;case 4:return`mat2x4<${R}>`;default:throw new Error(`${x}-component is not supported.`)}})(),ue=` for (var word: u32 = 0; word < ${w}; word += ${_}) { ${ie.indicesSet("b_indices","2","word")}; let b_data = ${ie.getByIndices("b_indices")}; @@ -1865,7 +1865,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${J.indicesSet("output_indices",re-2,`${b} * row + k`)}; ${J.setByIndices("output_indices","output_values[k]")} } - }`};return{name:T?"BlockwiseMatMulNBits":"MatMulNBits",shaderCache:{hint:`${t.cacheKey};${l};${g};${e.length}`,inputDependencies:Array(e.length).fill("rank")},getRunData:()=>({outputs:[{dims:z,dataType:g}],name:T?"BlockwiseMatMulNBits":"MatMulNBits",dispatchGroup:T?{x:1,y:Math.ceil(p/A),z:u}:{x:Math.ceil(D/64)},programUniforms:H}),getShaderSource:ce}},kd=(e,t)=>{bf(e.inputs,t);let r=e.getMaxComputeWorkgroupSizes(),n=e.getMaxComputeWorkgroupStoragesize();e.compute(wf(e.inputs,t,r,n))},Pd=e=>te(e)});var vf,$f,_f,xf,Sf,If,Cf,Tf,zd,Dd=U(()=>{"use strict";Q();ae();oe();vf=e=>{if(!e||e.length<1)throw new Error("Too few inputs");if(e[0].dataType!==1&&e[0].dataType!==10)throw new Error("Input type must be float or float16.");if(e.length>=2){let t=e[0].dims.length*2===e[1].dims[0];if(e.length===4&&(t=e[3].dims[0]*2===e[1].dims[0]),!t)throw new Error("The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].")}},$f=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` + }`};return{name:T?"BlockwiseMatMulNBits":"MatMulNBits",shaderCache:{hint:`${t.cacheKey};${l};${g};${e.length}`,inputDependencies:Array(e.length).fill("rank")},getRunData:()=>({outputs:[{dims:D,dataType:g}],name:T?"BlockwiseMatMulNBits":"MatMulNBits",dispatchGroup:T?{x:1,y:Math.ceil(p/A),z:u}:{x:Math.ceil(z/64)},programUniforms:H}),getShaderSource:ce}},kd=(e,t)=>{vf(e.inputs,t);let r=e.getMaxComputeWorkgroupSizes(),n=e.getMaxComputeWorkgroupStoragesize();e.compute($f(e.inputs,t,r,n))},Pd=e=>te(e)});var _f,xf,Sf,If,Cf,Tf,Af,Ef,Dd,zd=U(()=>{"use strict";Q();ae();oe();_f=e=>{if(!e||e.length<1)throw new Error("Too few inputs");if(e[0].dataType!==1&&e[0].dataType!==10)throw new Error("Input type must be float or float16.");if(e.length>=2){let t=e[0].dims.length*2===e[1].dims[0];if(e.length===4&&(t=e[3].dims[0]*2===e[1].dims[0]),!t)throw new Error("The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].")}},xf=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` k = i32(${e.indicesGet("indices",o)}) - ${G("uniforms.pads",o,r)}; if (k < 0) { break; @@ -1882,7 +1882,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${n} value = x[offset]; } - `},_f=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` + `},Sf=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` k = i32(${e.indicesGet("indices",o)}) - ${G("uniforms.pads",o,r)}; if (k < 0) { k = -k; @@ -1900,7 +1900,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var k = 0; ${n} value = x[offset]; - `},xf=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` + `},If=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` k = i32(${e.indicesGet("indices",o)}) - ${G("uniforms.pads",o,r)}; if (k < 0) { k = 0; @@ -1914,7 +1914,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var k = 0; ${n} value = x[offset]; - `},Sf=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` + `},Cf=(e,t,r)=>{let n="";for(let o=t-1;o>=0;--o)n+=` k = i32(${e.indicesGet("indices",o)}) - ${G("uniforms.pads",o,r)}; if (k < 0) { k += i32(${G("uniforms.x_shape",o,t)}]); @@ -1928,7 +1928,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var k = 0; ${n} value = x[offset]; - `},If=(e,t,r)=>{switch(r.mode){case 0:return $f(e,t,r.pads.length);case 1:return _f(e,t,r.pads.length);case 2:return xf(e,t,r.pads.length);case 3:return Sf(e,t,r.pads.length);default:throw new Error("Invalid mode")}},Cf=(e,t)=>{let r=k.padShape(e[0].dims.slice(),t.pads),n=e[0].dims,o=k.size(r),i=[{type:12,data:o},{type:6,data:t.pads}];t.mode===0&&i.push({type:e[0].dataType,data:t.value}),i.push(...V(e[0].dims,r));let a=["rank"],l=d=>{let p=M("output",e[0].dataType,r.length),m=E("x",e[0].dataType,n.length),u=m.type.value,h=If(p,n.length,t),w=[{name:"output_size",type:"u32"},{name:"pads",type:"i32",length:t.pads.length}];return t.mode===0&&w.push({name:"constant_value",type:u}),` + `},Tf=(e,t,r)=>{switch(r.mode){case 0:return xf(e,t,r.pads.length);case 1:return Sf(e,t,r.pads.length);case 2:return If(e,t,r.pads.length);case 3:return Cf(e,t,r.pads.length);default:throw new Error("Invalid mode")}},Af=(e,t)=>{let r=k.padShape(e[0].dims.slice(),t.pads),n=e[0].dims,o=k.size(r),i=[{type:12,data:o},{type:6,data:t.pads}];t.mode===0&&i.push({type:e[0].dataType,data:t.value}),i.push(...V(e[0].dims,r));let a=["rank"],l=d=>{let p=M("output",e[0].dataType,r.length),m=E("x",e[0].dataType,n.length),u=m.type.value,h=Tf(p,n.length,t),w=[{name:"output_size",type:"u32"},{name:"pads",type:"i32",length:t.pads.length}];return t.mode===0&&w.push({name:"constant_value",type:u}),` ${d.registerUniforms(w).declareVariables(m,p)} ${d.mainStart()} ${d.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.output_size")} @@ -1938,7 +1938,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var value = ${u}(0); ${h} output[global_idx] = value; - }`};return{name:"Pad",shaderCache:{hint:`${t.mode}`,inputDependencies:a},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(k.size(r)/64)},programUniforms:i}),getShaderSource:l}},Tf=(e,t)=>{if(e.length>1){let r=e[1].getBigInt64Array(),n=e.length>=3&&e[2].data?e[2].getFloat32Array()[0]:0,o=e[0].dims.length,i=new Int32Array(2*o).fill(0);if(e.length>=4){let l=e[3].getBigInt64Array();for(let d=0;di[Number(d)]=Number(l));let a=[];return i.forEach(l=>a.push(l)),{mode:t.mode,value:n,pads:a}}else return t},zd=(e,t)=>{vf(e.inputs);let r=Tf(e.inputs,t);e.compute(Cf(e.inputs,r),{inputs:[0]})}});var nn,Bd,Rd,Md,Ud,Af,Ef,Vd,Nd,Wd,Hd,Gd,Ld,Fd,qd,Kd,jd,Yd,Xd,Zd=U(()=>{"use strict";Le();Q();ae();oe();nn=e=>{if(ye.webgpu.validateInputContent&&(!e||e.length!==1))throw new Error("Pool ops requires 1 input.")},Bd=(e,t,r)=>{let n=t.format==="NHWC",o=e.dims.slice();n&&o.splice(1,0,o.pop());let i=Object.hasOwnProperty.call(t,"dilations"),a=t.kernelShape.slice(),l=t.strides.slice(),d=i?t.dilations.slice():[],p=t.pads.slice();Ct.adjustPoolAttributes(r,o,a,l,d,p);let m=Ct.computePoolOutputShape(r,o,l,d,a,p,t.autoPad),u=Object.assign({},t);i?Object.assign(u,{kernelShape:a,strides:l,pads:p,dilations:d,cacheKey:t.cacheKey}):Object.assign(u,{kernelShape:a,strides:l,pads:p,cacheKey:t.cacheKey});let h=m.slice();return h.push(h.splice(1,1)[0]),[u,n?h:m]},Rd=(e,t)=>{let r=t.format==="NHWC",n=k.size(e),o=k.size(t.kernelShape),i=[{type:12,data:n},{type:12,data:o}],a=[{name:"outputSize",type:"u32"},{name:"kernelSize",type:"u32"}];if(t.kernelShape.length<=2){let l=t.kernelShape[t.kernelShape.length-1],d=t.strides[t.strides.length-1],p=t.pads[t.pads.length/2-1],m=t.pads[t.pads.length-1],u=!!(p+m);i.push({type:12,data:l},{type:12,data:d},{type:12,data:p},{type:12,data:m}),a.push({name:"kw",type:"u32"},{name:"sw",type:"u32"},{name:"pwStart",type:"u32"},{name:"pwEnd",type:"u32"});let h=!1;if(t.kernelShape.length===2){let w=t.kernelShape[t.kernelShape.length-2],g=t.strides[t.strides.length-2],b=t.pads[t.pads.length/2-2],x=t.pads[t.pads.length-2];h=!!(b+x),i.push({type:12,data:w},{type:12,data:g},{type:12,data:b},{type:12,data:x}),a.push({name:"kh",type:"u32"},{name:"sh",type:"u32"},{name:"phStart",type:"u32"},{name:"phEnd",type:"u32"})}return[i,a,!0,u,h]}else{if(r)throw new Error("Pooling with kernelShape.length > 2 is not supported for NHWC format.");let l=k.computeStrides(t.kernelShape);i.push({type:12,data:l},{type:12,data:t.pads},{type:12,data:t.strides}),a.push({name:"kernelStrides",type:"u32",length:l.length},{name:"pads",type:"u32",length:t.pads.length},{name:"strides",type:"u32",length:t.strides.length});let d=t.pads.reduce((p,m)=>p+m);return[i,a,!!d,!1,!1]}},Md=(e,t,r,n,o,i,a,l,d,p,m,u)=>{let h=o.format==="NHWC",w=t.type.value,g=M("output",t.type.tensor,n);if(o.kernelShape.length<=2){let b="",x="",_="",$=r-(h?2:1);if(m?b=` + }`};return{name:"Pad",shaderCache:{hint:`${t.mode}`,inputDependencies:a},getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(k.size(r)/64)},programUniforms:i}),getShaderSource:l}},Ef=(e,t)=>{if(e.length>1){let r=e[1].getBigInt64Array(),n=e.length>=3&&e[2].data?e[2].getFloat32Array()[0]:0,o=e[0].dims.length,i=new Int32Array(2*o).fill(0);if(e.length>=4){let l=e[3].getBigInt64Array();for(let d=0;di[Number(d)]=Number(l));let a=[];return i.forEach(l=>a.push(l)),{mode:t.mode,value:n,pads:a}}else return t},Dd=(e,t)=>{_f(e.inputs);let r=Ef(e.inputs,t);e.compute(Af(e.inputs,r),{inputs:[0]})}});var nn,Bd,Rd,Md,Ud,kf,Pf,Vd,Nd,Wd,Hd,Gd,Ld,Fd,qd,jd,Kd,Yd,Xd,Zd=U(()=>{"use strict";Le();Q();ae();oe();nn=e=>{if(ye.webgpu.validateInputContent&&(!e||e.length!==1))throw new Error("Pool ops requires 1 input.")},Bd=(e,t,r)=>{let n=t.format==="NHWC",o=e.dims.slice();n&&o.splice(1,0,o.pop());let i=Object.hasOwnProperty.call(t,"dilations"),a=t.kernelShape.slice(),l=t.strides.slice(),d=i?t.dilations.slice():[],p=t.pads.slice();Ct.adjustPoolAttributes(r,o,a,l,d,p);let m=Ct.computePoolOutputShape(r,o,l,d,a,p,t.autoPad),u=Object.assign({},t);i?Object.assign(u,{kernelShape:a,strides:l,pads:p,dilations:d,cacheKey:t.cacheKey}):Object.assign(u,{kernelShape:a,strides:l,pads:p,cacheKey:t.cacheKey});let h=m.slice();return h.push(h.splice(1,1)[0]),[u,n?h:m]},Rd=(e,t)=>{let r=t.format==="NHWC",n=k.size(e),o=k.size(t.kernelShape),i=[{type:12,data:n},{type:12,data:o}],a=[{name:"outputSize",type:"u32"},{name:"kernelSize",type:"u32"}];if(t.kernelShape.length<=2){let l=t.kernelShape[t.kernelShape.length-1],d=t.strides[t.strides.length-1],p=t.pads[t.pads.length/2-1],m=t.pads[t.pads.length-1],u=!!(p+m);i.push({type:12,data:l},{type:12,data:d},{type:12,data:p},{type:12,data:m}),a.push({name:"kw",type:"u32"},{name:"sw",type:"u32"},{name:"pwStart",type:"u32"},{name:"pwEnd",type:"u32"});let h=!1;if(t.kernelShape.length===2){let w=t.kernelShape[t.kernelShape.length-2],g=t.strides[t.strides.length-2],b=t.pads[t.pads.length/2-2],x=t.pads[t.pads.length-2];h=!!(b+x),i.push({type:12,data:w},{type:12,data:g},{type:12,data:b},{type:12,data:x}),a.push({name:"kh",type:"u32"},{name:"sh",type:"u32"},{name:"phStart",type:"u32"},{name:"phEnd",type:"u32"})}return[i,a,!0,u,h]}else{if(r)throw new Error("Pooling with kernelShape.length > 2 is not supported for NHWC format.");let l=k.computeStrides(t.kernelShape);i.push({type:12,data:l},{type:12,data:t.pads},{type:12,data:t.strides}),a.push({name:"kernelStrides",type:"u32",length:l.length},{name:"pads",type:"u32",length:t.pads.length},{name:"strides",type:"u32",length:t.strides.length});let d=t.pads.reduce((p,m)=>p+m);return[i,a,!!d,!1,!1]}},Md=(e,t,r,n,o,i,a,l,d,p,m,u)=>{let h=o.format==="NHWC",w=t.type.value,g=M("output",t.type.tensor,n);if(o.kernelShape.length<=2){let b="",x="",_="",$=r-(h?2:1);if(m?b=` for (var i: u32 = 0u; i < uniforms.kw; i++) { xIndices[${$}] = indices[${$}] * uniforms.sw - uniforms.pwStart + i; if (xIndices[${$}] < 0 || xIndices[${$}] @@ -2027,15 +2027,15 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${a} output[global_idx] = value; - }`}},Ud=e=>`${e.format};${e.ceilMode};${e.autoPad};${e.kernelShape.length}`,Af=e=>`${Ud(e)};${e.countIncludePad}`,Ef=e=>`${Ud(e)};${e.storageOrder};${e.dilations}`,Vd=e=>({format:e.format,autoPad:["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][e.auto_pad],ceilMode:e.ceil_mode,kernelShape:e.kernel_shape,strides:e.strides,pads:e.pads}),Nd=(e,t,r,n)=>{let[o,i]=Bd(t,n,r),a=E("x",t.dataType,t.dims.length),l=a.type.value,d="value += x_val;",p="";o.countIncludePad?p+=`value /= ${l}(uniforms.kernelSize);`:p+=`value /= ${l}(i32(uniforms.kernelSize) - pad);`;let[m,u,h,w,g]=Rd(i,o);m.push(...V(t.dims,i));let b=["rank"];return{name:e,shaderCache:{hint:`${n.cacheKey};${h};${w};${g}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:i,dataType:t.dataType}],dispatchGroup:{x:Math.ceil(k.size(i)/64)},programUniforms:m}),getShaderSource:x=>Md(x,a,t.dims.length,i.length,o,d,p,0,u,h,w,g)}},Wd=e=>{let t=e.count_include_pad!==0,r=Vd(e);if(r.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");let n={countIncludePad:t,...r,cacheKey:""};return{...n,cacheKey:Af(n)}},Hd=(e,t)=>{nn(e.inputs),e.compute(Nd("AveragePool",e.inputs[0],!1,t))},Gd={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[]},Ld=e=>{let t=e.format;return{format:t,...Gd,cacheKey:t}},Fd=(e,t)=>{nn(e.inputs),e.compute(Nd("GlobalAveragePool",e.inputs[0],!0,t))},qd=(e,t,r,n)=>{let[o,i]=Bd(t,n,r),a=` + }`}},Ud=e=>`${e.format};${e.ceilMode};${e.autoPad};${e.kernelShape.length}`,kf=e=>`${Ud(e)};${e.countIncludePad}`,Pf=e=>`${Ud(e)};${e.storageOrder};${e.dilations}`,Vd=e=>({format:e.format,autoPad:["NOTSET","VALID","SAME_UPPER","SAME_LOWER"][e.auto_pad],ceilMode:e.ceil_mode,kernelShape:e.kernel_shape,strides:e.strides,pads:e.pads}),Nd=(e,t,r,n)=>{let[o,i]=Bd(t,n,r),a=E("x",t.dataType,t.dims.length),l=a.type.value,d="value += x_val;",p="";o.countIncludePad?p+=`value /= ${l}(uniforms.kernelSize);`:p+=`value /= ${l}(i32(uniforms.kernelSize) - pad);`;let[m,u,h,w,g]=Rd(i,o);m.push(...V(t.dims,i));let b=["rank"];return{name:e,shaderCache:{hint:`${n.cacheKey};${h};${w};${g}`,inputDependencies:b},getRunData:()=>({outputs:[{dims:i,dataType:t.dataType}],dispatchGroup:{x:Math.ceil(k.size(i)/64)},programUniforms:m}),getShaderSource:x=>Md(x,a,t.dims.length,i.length,o,d,p,0,u,h,w,g)}},Wd=e=>{let t=e.count_include_pad!==0,r=Vd(e);if(r.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for AveragePool");let n={countIncludePad:t,...r,cacheKey:""};return{...n,cacheKey:kf(n)}},Hd=(e,t)=>{nn(e.inputs),e.compute(Nd("AveragePool",e.inputs[0],!1,t))},Gd={autoPad:"",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[]},Ld=e=>{let t=e.format;return{format:t,...Gd,cacheKey:t}},Fd=(e,t)=>{nn(e.inputs),e.compute(Nd("GlobalAveragePool",e.inputs[0],!0,t))},qd=(e,t,r,n)=>{let[o,i]=Bd(t,n,r),a=` value = max(x_val, value); - `,l="",d=E("x",t.dataType,t.dims.length),p=["rank"],[m,u,h,w,g]=Rd(i,o);return m.push(...V(t.dims,i)),{name:e,shaderCache:{hint:`${n.cacheKey};${h};${w};${g}`,inputDependencies:p},getRunData:()=>({outputs:[{dims:i,dataType:t.dataType}],dispatchGroup:{x:Math.ceil(k.size(i)/64)},programUniforms:m}),getShaderSource:b=>Md(b,d,t.dims.length,i.length,o,a,l,t.dataType===10?-65504:-1e5,u,h,w,g)}},Kd=(e,t)=>{nn(e.inputs),e.compute(qd("MaxPool",e.inputs[0],!1,t))},jd=e=>{let t=e.storage_order,r=e.dilations,n=Vd(e);if(t!==0)throw new Error("column major storage order is not yet supported for MaxPool");if(n.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for MaxPool");let o={storageOrder:t,dilations:r,...n,cacheKey:""};return{...o,cacheKey:Ef(o)}},Yd=e=>{let t=e.format;return{format:t,...Gd,cacheKey:t}},Xd=(e,t)=>{nn(e.inputs),e.compute(qd("GlobalMaxPool",e.inputs[0],!0,t))}});var Pf,Of,Qd,Jd=U(()=>{"use strict";Le();Q();oe();Pf=(e,t,r)=>{let n=e===t,o=et&&r>0;if(n||o||i)throw new Error("Range these inputs' contents are invalid.")},Of=(e,t,r,n)=>{let o=Math.abs(Math.ceil((t-e)/r)),i=[o],a=o,l=[{type:12,data:a},{type:n,data:e},{type:n,data:r},...V(i)],d=p=>{let m=M("output",n,i.length),u=m.type.value,h=[{name:"outputSize",type:"u32"},{name:"start",type:u},{name:"delta",type:u}];return` + `,l="",d=E("x",t.dataType,t.dims.length),p=["rank"],[m,u,h,w,g]=Rd(i,o);return m.push(...V(t.dims,i)),{name:e,shaderCache:{hint:`${n.cacheKey};${h};${w};${g}`,inputDependencies:p},getRunData:()=>({outputs:[{dims:i,dataType:t.dataType}],dispatchGroup:{x:Math.ceil(k.size(i)/64)},programUniforms:m}),getShaderSource:b=>Md(b,d,t.dims.length,i.length,o,a,l,t.dataType===10?-65504:-1e5,u,h,w,g)}},jd=(e,t)=>{nn(e.inputs),e.compute(qd("MaxPool",e.inputs[0],!1,t))},Kd=e=>{let t=e.storage_order,r=e.dilations,n=Vd(e);if(t!==0)throw new Error("column major storage order is not yet supported for MaxPool");if(n.ceilMode!==0)throw new Error("using ceil() in shape computation is not yet supported for MaxPool");let o={storageOrder:t,dilations:r,...n,cacheKey:""};return{...o,cacheKey:Pf(o)}},Yd=e=>{let t=e.format;return{format:t,...Gd,cacheKey:t}},Xd=(e,t)=>{nn(e.inputs),e.compute(qd("GlobalMaxPool",e.inputs[0],!0,t))}});var Df,zf,Qd,Jd=U(()=>{"use strict";Le();Q();oe();Df=(e,t,r)=>{let n=e===t,o=et&&r>0;if(n||o||i)throw new Error("Range these inputs' contents are invalid.")},zf=(e,t,r,n)=>{let o=Math.abs(Math.ceil((t-e)/r)),i=[o],a=o,l=[{type:12,data:a},{type:n,data:e},{type:n,data:r},...V(i)],d=p=>{let m=M("output",n,i.length),u=m.type.value,h=[{name:"outputSize",type:"u32"},{name:"start",type:u},{name:"delta",type:u}];return` ${p.registerUniforms(h).declareVariables(m)} ${p.mainStart()} ${p.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} output[global_idx] = uniforms.start + ${u}(global_idx) * uniforms.delta; - }`};return{name:"Range",shaderCache:{hint:`${n}`},getShaderSource:d,getRunData:()=>({outputs:[{dims:i,dataType:n}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:l})}},Qd=e=>{let t=0,r=0,n=0;e.inputs[0].dataType===6?(t=e.inputs[0].getInt32Array()[0],r=e.inputs[1].getInt32Array()[0],n=e.inputs[2].getInt32Array()[0]):e.inputs[0].dataType===1&&(t=e.inputs[0].getFloat32Array()[0],r=e.inputs[1].getFloat32Array()[0],n=e.inputs[2].getFloat32Array()[0]),ye.webgpu.validateInputContent&&Pf(t,r,n),e.compute(Of(t,r,n,e.inputs[0].dataType),{inputs:[]})}});var zf,Df,Bf,Rf,Mf,Uf,Vf,Nf,Wf,Hf,Gf,el,Lf,Ff,qf,Kf,jf,tl,rl,nl=U(()=>{"use strict";Q();ae();Ce();oe();zf=(e,t)=>{if(e.every(r=>r>0||(()=>{throw new Error("Resize requires scales input values to be positive")})),e.length>0){if(t.mode==="linear"){if(!(e.length===2||e.length===3||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1||e.length===5&&e[0]===1&&e[1]===1))throw new Error(`For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and - one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`)}else if(t.mode==="cubic"&&!(e.length===2||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1))throw new Error("Resize requires scales input size to be 2 or 4 for cubic mode")}},Df=(e,t,r)=>{t.every(o=>o>=0&&o{throw new Error("Resize requires axes input values to be positive and less than rank")}));let n=new Array(r).fill(1);return t.forEach((o,i)=>n[o]=e[i]),n},Bf=(e,t,r,n,o,i)=>{let[a,l,d]=r>10?[1,2,3]:[-1,e.length>1?1:-1,-1],p=e[0].dims.length;if(a>0&&e.length>a&&e[a].dims.length>0)e[a].getFloat32Array().forEach(m=>i.push(m));else if(t.coordinateTransformMode==="tf_crop_and_resize")throw new Error("Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize");if(l>0&&e.length>l&&e[l].dims.length>0){if(e[l].getFloat32Array().forEach(m=>n.push(m)),n.length!==0&&n.length!==p&&r>=18&&n.length!==t.axes.length)throw new Error("Resize requires scales input size to be same as input rank or axes size for opset 18 and up");zf(n,t),t.axes.length>0&&Df(n,t.axes,p).forEach((m,u)=>n[u]=m)}if(d>0&&e.length>d&&(e[d].getBigInt64Array().forEach(m=>o.push(Number(m))),o.length!==p||r>=18&&o.length===t.axes.length))throw new Error("Resize requires sizes input size to be same as input rank or axes size for opset 18 and up");if(t.axes.length>0){if(n.length!==t.axes.length)throw new Error('Resize requires "scales" input size to be of axes rank when axes attributes is specified');if(o.length!==t.axes.length)throw new Error('Resize requires "sizes" input size to be of rank axes rank when axes attributes is specified')}if(typeof n<"u"&&typeof o<"u"&&n.length>0&&o.length>p)throw new Error("Resize requires only of scales or sizes to be specified")},Rf=(e,t)=>`fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32, + }`};return{name:"Range",shaderCache:{hint:`${n}`},getShaderSource:d,getRunData:()=>({outputs:[{dims:i,dataType:n}],dispatchGroup:{x:Math.ceil(a/64)},programUniforms:l})}},Qd=e=>{let t=0,r=0,n=0;e.inputs[0].dataType===6?(t=e.inputs[0].getInt32Array()[0],r=e.inputs[1].getInt32Array()[0],n=e.inputs[2].getInt32Array()[0]):e.inputs[0].dataType===1&&(t=e.inputs[0].getFloat32Array()[0],r=e.inputs[1].getFloat32Array()[0],n=e.inputs[2].getFloat32Array()[0]),ye.webgpu.validateInputContent&&Df(t,r,n),e.compute(zf(t,r,n,e.inputs[0].dataType),{inputs:[]})}});var Bf,Rf,Mf,Uf,Vf,Nf,Wf,Hf,Gf,Lf,Ff,el,qf,jf,Kf,Yf,Xf,tl,rl,nl=U(()=>{"use strict";Q();ae();Ce();oe();Bf=(e,t)=>{if(e.every(r=>r>0||(()=>{throw new Error("Resize requires scales input values to be positive")})),e.length>0){if(t.mode==="linear"){if(!(e.length===2||e.length===3||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1||e.length===5&&e[0]===1&&e[1]===1))throw new Error(`For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and + one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`)}else if(t.mode==="cubic"&&!(e.length===2||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1))throw new Error("Resize requires scales input size to be 2 or 4 for cubic mode")}},Rf=(e,t,r)=>{t.every(o=>o>=0&&o{throw new Error("Resize requires axes input values to be positive and less than rank")}));let n=new Array(r).fill(1);return t.forEach((o,i)=>n[o]=e[i]),n},Mf=(e,t,r,n,o,i)=>{let[a,l,d]=r>10?[1,2,3]:[-1,e.length>1?1:-1,-1],p=e[0].dims.length;if(a>0&&e.length>a&&e[a].dims.length>0)e[a].getFloat32Array().forEach(m=>i.push(m));else if(t.coordinateTransformMode==="tf_crop_and_resize")throw new Error("Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize");if(l>0&&e.length>l&&e[l].dims.length>0){if(e[l].getFloat32Array().forEach(m=>n.push(m)),n.length!==0&&n.length!==p&&r>=18&&n.length!==t.axes.length)throw new Error("Resize requires scales input size to be same as input rank or axes size for opset 18 and up");Bf(n,t),t.axes.length>0&&Rf(n,t.axes,p).forEach((m,u)=>n[u]=m)}if(d>0&&e.length>d&&(e[d].getBigInt64Array().forEach(m=>o.push(Number(m))),o.length!==p||r>=18&&o.length===t.axes.length))throw new Error("Resize requires sizes input size to be same as input rank or axes size for opset 18 and up");if(t.axes.length>0){if(n.length!==t.axes.length)throw new Error('Resize requires "scales" input size to be of axes rank when axes attributes is specified');if(o.length!==t.axes.length)throw new Error('Resize requires "sizes" input size to be of rank axes rank when axes attributes is specified')}if(typeof n<"u"&&typeof o<"u"&&n.length>0&&o.length>p)throw new Error("Resize requires only of scales or sizes to be specified")},Uf=(e,t)=>`fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32, lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${t} { `+(()=>{switch(e){case"asymmetric":return`return ${t}(xResized) / ${t}(xScale);`;case"pytorch_half_pixel":return`if (lengthResized > 1) { return (${t}(xResized) + 0.5) / ${t}(xScale) - 0.5; } else { @@ -2060,7 +2060,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; const adjustment = ${t}(lengthResized) / outputWidth; const center = ${t}(lengthOriginal) / 2; const offset = center * (1 - adjustment); - return offset + ((${t}(xResized) + 0.5) / ${t}(xScale)) - 0.5;`;case"half_pixel":return`return ((${t}(xResized) + 0.5) / ${t}(xScale)) - 0.5;`;default:throw new Error(`Coordinate transform mode ${e} is not supported`)}})()+"}",Mf=(e,t,r)=>`fn getNearestPixelFromOriginal(xOriginal: ${r}, isDownSample: bool) -> ${r} {`+(()=>{switch(e){case"round_prefer_ceil":return"if (fract(xOriginal) == 0.5) { return ceil(xOriginal); } else { return round(xOriginal); }";case"floor":return"return floor(xOriginal);";case"ceil":return"return ceil(xOriginal);";case"round_prefer_floor":return"if (fract(xOriginal) == 0.5) { return floor(xOriginal); } else { return round(xOriginal); }";case"simple":default:if(t<11)return"if (isDownSample) { return ceil(xOriginal); } else { return xOriginal; }";throw new Error(`Nearest mode ${e} is not supported`)}})()+"}",Uf=(e,t,r)=>{let n=new Array(r).fill(0).concat(new Array(r).fill(1)),o=e.length===0?n:e.slice();return t.length>0?(t.forEach((i,a)=>{n[i]=o[a],n[a+r]=o[t.length+a]}),n):o},Vf=(e,t,r,n)=>{let o=[];if(r.length>0)if(n.length>0){if(e.forEach(i=>o.push(i)),Math.max(...n)>e.length)throw new Error("axes is out of bound");n.forEach((i,a)=>o[i]=r[a])}else r.forEach(i=>o.push(i));else{if(t.length===0)throw new Error("Resize requires either scales or sizes.");o=e.map((i,a)=>Math.round(i*t[a]))}return o},Nf=(e,t,r)=>{let n=(()=>{switch(r.keepAspectRatioPolicy){case"not_larger":return r.axes.length>0?Math.min(...r.axes.map(i=>t[i]),Number.MAX_VALUE):Math.min(...t,Number.MAX_VALUE);case"not_smaller":return r.axes.length>0?Math.max(...r.axes.map(i=>t[i]),Number.MIN_VALUE):Math.max(...t,Number.MIN_VALUE);default:throw new Error(`Keep aspect ratio policy ${r.keepAspectRatioPolicy} is not supported`)}})();t.fill(1,0,t.length);let o=e.slice();return r.axes.length>0?(r.axes.forEach(i=>t[i]=n),r.axes.forEach(i=>o[i]=Math.round(e[i]*t[i]))):(t.fill(n,0,t.length),o.forEach((i,a)=>o[a]=Math.round(i*t[a]))),o},Wf=(e,t,r,n,o)=>` + return offset + ((${t}(xResized) + 0.5) / ${t}(xScale)) - 0.5;`;case"half_pixel":return`return ((${t}(xResized) + 0.5) / ${t}(xScale)) - 0.5;`;default:throw new Error(`Coordinate transform mode ${e} is not supported`)}})()+"}",Vf=(e,t,r)=>`fn getNearestPixelFromOriginal(xOriginal: ${r}, isDownSample: bool) -> ${r} {`+(()=>{switch(e){case"round_prefer_ceil":return"if (fract(xOriginal) == 0.5) { return ceil(xOriginal); } else { return round(xOriginal); }";case"floor":return"return floor(xOriginal);";case"ceil":return"return ceil(xOriginal);";case"round_prefer_floor":return"if (fract(xOriginal) == 0.5) { return floor(xOriginal); } else { return round(xOriginal); }";case"simple":default:if(t<11)return"if (isDownSample) { return ceil(xOriginal); } else { return xOriginal; }";throw new Error(`Nearest mode ${e} is not supported`)}})()+"}",Nf=(e,t,r)=>{let n=new Array(r).fill(0).concat(new Array(r).fill(1)),o=e.length===0?n:e.slice();return t.length>0?(t.forEach((i,a)=>{n[i]=o[a],n[a+r]=o[t.length+a]}),n):o},Wf=(e,t,r,n)=>{let o=[];if(r.length>0)if(n.length>0){if(e.forEach(i=>o.push(i)),Math.max(...n)>e.length)throw new Error("axes is out of bound");n.forEach((i,a)=>o[i]=r[a])}else r.forEach(i=>o.push(i));else{if(t.length===0)throw new Error("Resize requires either scales or sizes.");o=e.map((i,a)=>Math.round(i*t[a]))}return o},Hf=(e,t,r)=>{let n=(()=>{switch(r.keepAspectRatioPolicy){case"not_larger":return r.axes.length>0?Math.min(...r.axes.map(i=>t[i]),Number.MAX_VALUE):Math.min(...t,Number.MAX_VALUE);case"not_smaller":return r.axes.length>0?Math.max(...r.axes.map(i=>t[i]),Number.MIN_VALUE):Math.max(...t,Number.MIN_VALUE);default:throw new Error(`Keep aspect ratio policy ${r.keepAspectRatioPolicy} is not supported`)}})();t.fill(1,0,t.length);let o=e.slice();return r.axes.length>0?(r.axes.forEach(i=>t[i]=n),r.axes.forEach(i=>o[i]=Math.round(e[i]*t[i]))):(t.fill(n,0,t.length),o.forEach((i,a)=>o[a]=Math.round(i*t[a]))),o},Gf=(e,t,r,n,o)=>` fn calculateOriginalIndicesFromOutputIndices(output_indices: ${e.type.indices}) -> array<${e.type.value}, ${r.length}> { var original_indices: array<${e.type.value}, ${r.length}>; for (var i:u32 = 0; i < ${r.length}; i++) { @@ -2078,7 +2078,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; } } return original_indices; - }`,Hf=(e,t,r,n,o,i,a)=>` + }`,Lf=(e,t,r,n,o,i,a)=>` fn calculateInputIndicesFromOutputIndices(output_indices: ${t.type.indices}) -> ${e.type.indices} { var input_indices: ${e.type.indices}; for (var i:u32 = 0; i < ${n.length}; i++) { @@ -2109,7 +2109,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${e.indicesSet("input_indices","i"," input_index")} } return input_indices; - }`,Gf=(e,t)=>` + }`,Ff=(e,t)=>` fn checkInputIndices(input_indices: ${e.type.indices}) -> bool { for (var i:u32 = 0; i < ${t.length}; i++) { var input_index = ${e.indicesGet("input_indices","i")}; @@ -2121,7 +2121,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; }`,el=(e,t,r,n)=>e.rank>n?` ${e.indicesSet("input_indices",t,"channel")}; ${e.indicesSet("input_indices",r,"batch")}; -`:"",Lf=(e,t,r,n,o)=>{let[a,l,d,p]=r.length===2?[-1,0,1,-1]:[0,2,3,1],m=e.type.value;return` +`:"",qf=(e,t,r,n,o)=>{let[a,l,d,p]=r.length===2?[-1,0,1,-1]:[0,2,3,1],m=e.type.value;return` fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${m} { var input_indices: ${e.type.indices}; ${e.indicesSet("input_indices",l,`max(0, min(row, ${r[l]} - 1))`)}; @@ -2162,7 +2162,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; dy2 = 0.5; } return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1); - }`},Ff=(e,t,r,n,o,i,a,l,d,p)=>{let m=r.length===2,u=!0,[h,w]=m?[0,1]:u?[2,3]:[1,2],g=e.type.value,b=x=>{let _=x===h?"row":"col";return` + }`},jf=(e,t,r,n,o,i,a,l,d,p)=>{let m=r.length===2,u=!0,[h,w]=m?[0,1]:u?[2,3]:[1,2],g=e.type.value,b=x=>{let _=x===h?"row":"col";return` fn ${_}CubicInterpolation(input_indices: ${e.type.indices}, output_indices: ${t.type.indices}) -> ${g} { var output_index = ${t.indicesGet("output_indices",x)}; var originalIdx: ${g} = getOriginalCoordinateFromResizedCoordinate(output_index, ${o[x]}, @@ -2210,7 +2210,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; var input_indices: ${e.type.indices} = output_indices; return colCubicInterpolation(input_indices, output_indices); } - `},qf=(e,t,r,n,o)=>{let[a,l,d,p,m]=r.length===3?[-1,0,1,2,-1]:[0,2,3,4,1],u=e.type.value;return` + `},Kf=(e,t,r,n,o)=>{let[a,l,d,p,m]=r.length===3?[-1,0,1,2,-1]:[0,2,3,4,1],u=e.type.value;return` fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${u} { var input_indices: ${e.type.indices}; ${e.indicesSet("input_indices",l,`max(0, min(depth, ${r[l]} - 1))`)}; @@ -2269,18 +2269,18 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; } return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 + x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1); - }`},Kf=(e,t,r,n,o,i)=>{let a=e.dims,l=Uf(i,t.axes,a.length),d=Vf(a,n,o,t.axes),p=n.slice();n.length===0&&(p=a.map(($,S)=>$===0?1:d[S]/$),t.keepAspectRatioPolicy!=="stretch"&&(d=Nf(a,p,t)));let m=M("output",e.dataType,d.length),u=E("input",e.dataType,a.length),h=k.size(d),w=a.length===d.length&&a.every(($,S)=>$===d[S]),g=t.coordinateTransformMode==="tf_crop_and_resize",b=t.extrapolationValue,x=u.type.value,_=$=>` + }`},Yf=(e,t,r,n,o,i)=>{let a=e.dims,l=Nf(i,t.axes,a.length),d=Wf(a,n,o,t.axes),p=n.slice();n.length===0&&(p=a.map(($,S)=>$===0?1:d[S]/$),t.keepAspectRatioPolicy!=="stretch"&&(d=Hf(a,p,t)));let m=M("output",e.dataType,d.length),u=E("input",e.dataType,a.length),h=k.size(d),w=a.length===d.length&&a.every(($,S)=>$===d[S]),g=t.coordinateTransformMode==="tf_crop_and_resize",b=t.extrapolationValue,x=u.type.value,_=$=>` ${w?"":` - ${Rf(t.coordinateTransformMode,x)}; + ${Uf(t.coordinateTransformMode,x)}; ${(()=>{switch(t.mode){case"nearest":return` - ${Gf(u,a)}; - ${Mf(t.nearestMode,r,x)}; - ${Hf(u,m,a,d,p.length,l.length,g)}; + ${Ff(u,a)}; + ${Vf(t.nearestMode,r,x)}; + ${Lf(u,m,a,d,p.length,l.length,g)}; `;case"linear":return` - ${Wf(m,a,d,p.length,l.length)}; - ${(()=>{if(a.length===2||a.length===4)return`${Lf(u,m,a,g,b)}`;if(a.length===3||a.length===5)return`${qf(u,m,a,g,b)}`;throw Error("Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.")})()}; + ${Gf(m,a,d,p.length,l.length)}; + ${(()=>{if(a.length===2||a.length===4)return`${qf(u,m,a,g,b)}`;if(a.length===3||a.length===5)return`${Kf(u,m,a,g,b)}`;throw Error("Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.")})()}; `;case"cubic":return` - ${(()=>{if(a.length===2||a.length===4)return`${Ff(u,m,a,d,p,l,t.cubicCoeffA,g,t.extrapolationValue,t.excludeOutside)}`;throw Error("Cubic mode only supports input dims 2 and 4 are supported in linear mode.")})()}; + ${(()=>{if(a.length===2||a.length===4)return`${jf(u,m,a,d,p,l,t.cubicCoeffA,g,t.extrapolationValue,t.excludeOutside)}`;throw Error("Cubic mode only supports input dims 2 and 4 are supported in linear mode.")})()}; `;default:throw Error("Invalid resize mode")}})()}; `} ${$.registerUniform("output_size","u32").registerUniform("scales","f32",p.length).registerUniform("roi","f32",l.length).declareVariables(u,m)} @@ -2296,7 +2296,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; output[global_idx] = ${t.extrapolationValue}; }`;case"linear":return`output[global_idx] = ${a.length===2||a.length===4?"bilinearInterpolation":"trilinearInterpolation"}(output_indices);`;case"cubic":return"output[global_idx] = bicubicInterpolation(output_indices);";default:throw Error(`Unsupported resize mode: ${t.mode}`)}})()}; `} - }`;return{name:"Resize",shaderCache:{hint:`${t.cacheKey}|${r}|${p.length>0?p:""}|${o.length>0?o:""}|${l.length>0?l:""}|${w}|${a}`,inputDependencies:["rank"]},getShaderSource:_,getRunData:()=>({outputs:[{dims:d,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(h/64)},programUniforms:[{type:12,data:h},{type:1,data:p},{type:1,data:l},...V(a,d)]})}},jf=e=>{let t=e.customDataBuffer;return new Uint32Array(t,t.byteOffset,1)[0]},tl=(e,t)=>{let r=[],n=[],o=[],i=jf(e);if(t.antialias!==0)throw Error("Only default value (0) for Antialias attribute is supported");Bf(e.inputs,t,i,r,n,o),e.compute(Kf(e.inputs[0],t,i,r,n,o),{inputs:[0]})},rl=e=>{let t=e.antialias,r=e.axes,n=e.coordinateTransformMode,o=e.cubicCoeffA,i=e.excludeOutside!==0,a=e.extrapolationValue,l=e.keepAspectRatioPolicy,d=e.mode,p=e.nearestMode===""?"simple":e.nearestMode;return te({antialias:t,axes:r,coordinateTransformMode:n,cubicCoeffA:o,excludeOutside:i,extrapolationValue:a,keepAspectRatioPolicy:l,mode:d,nearestMode:p})}});var Yf,Xf,ol,il=U(()=>{"use strict";Q();ae();Ce();oe();Yf=(e,t)=>{let[r,n,o,i]=e,{numHeads:a,rotaryEmbeddingDim:l}=t;if(r.dims.length!==3&&r.dims.length!==4)throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${r.dims.length}`);if(!k.areEqual(n.dims,[])&&!k.areEqual(n.dims,[1])&&n.dims.length!==2)throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${n.dims.length}`);if(o.dims.length!==2)throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${o.dims.length}`);if(i.dims.length!==2)throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${i.dims.length}`);if(!k.areEqual(o.dims,i.dims))throw new Error("Inputs 'cos_cache' and 'sin_cache' are expected to have the same shape");if(l>0&&a===0)throw new Error("num_heads must be provided if rotary_embedding_dim is specified");let d=r.dims[0],p=r.dims[r.dims.length-2],m=o.dims[0],u=k.sizeFromDimension(r.dims,1)/p,h=l===0?o.dims[1]*2:u/a;if(l>h)throw new Error("rotary_embedding_dim must be less than or equal to head_size");if(n.dims.length===2){if(d!==n.dims[0])throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${n.dims[0]}`);if(p!==n.dims[1])throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${n.dims[1]}`)}if(h/2!==o.dims[1]&&l/2!==o.dims[1])throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${o.dims[1]}`);if(p>m)throw new Error("Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported")},Xf=(e,t)=>{let{interleaved:r,numHeads:n,rotaryEmbeddingDim:o,scale:i}=t,a=e[0].dims[0],l=k.sizeFromDimension(e[0].dims,1),d=e[0].dims[e[0].dims.length-2],p=l/d,m=e[2].dims[1],u=o===0?m*2:p/n,h=new Array(a,d,p/u,u-m),w=k.computeStrides(h),g=[{type:1,data:i},{type:12,data:h},{type:12,data:w},...e[0].dims.length===3?new Array({type:12,data:[l,p,u,1]}):[],...e[0].dims.length===4?new Array({type:12,data:[l,u,d*u,1]}):[],...V(e[0].dims,e[1].dims,e[2].dims,e[3].dims,e[0].dims)],b=x=>{let _=E("input",e[0].dataType,e[0].dims.length),$=E("position_ids",e[1].dataType,e[1].dims.length),S=E("cos_cache",e[2].dataType,e[2].dims.length),I=E("sin_cache",e[3].dataType,e[3].dims.length),T=M("output",e[0].dataType,e[0].dims.length);return x.registerUniforms([{name:"scale",type:"f32"},{name:"global_shape",type:"u32",length:h.length},{name:"global_strides",type:"u32",length:w.length},{name:"input_output_strides",type:"u32",length:w.length}]),` + }`;return{name:"Resize",shaderCache:{hint:`${t.cacheKey}|${r}|${p.length>0?p:""}|${o.length>0?o:""}|${l.length>0?l:""}|${w}|${a}`,inputDependencies:["rank"]},getShaderSource:_,getRunData:()=>({outputs:[{dims:d,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(h/64)},programUniforms:[{type:12,data:h},{type:1,data:p},{type:1,data:l},...V(a,d)]})}},Xf=e=>{let t=e.customDataBuffer;return new Uint32Array(t,t.byteOffset,1)[0]},tl=(e,t)=>{let r=[],n=[],o=[],i=Xf(e);if(t.antialias!==0)throw Error("Only default value (0) for Antialias attribute is supported");Mf(e.inputs,t,i,r,n,o),e.compute(Yf(e.inputs[0],t,i,r,n,o),{inputs:[0]})},rl=e=>{let t=e.antialias,r=e.axes,n=e.coordinateTransformMode,o=e.cubicCoeffA,i=e.excludeOutside!==0,a=e.extrapolationValue,l=e.keepAspectRatioPolicy,d=e.mode,p=e.nearestMode===""?"simple":e.nearestMode;return te({antialias:t,axes:r,coordinateTransformMode:n,cubicCoeffA:o,excludeOutside:i,extrapolationValue:a,keepAspectRatioPolicy:l,mode:d,nearestMode:p})}});var Zf,Qf,ol,il=U(()=>{"use strict";Q();ae();Ce();oe();Zf=(e,t)=>{let[r,n,o,i]=e,{numHeads:a,rotaryEmbeddingDim:l}=t;if(r.dims.length!==3&&r.dims.length!==4)throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${r.dims.length}`);if(!k.areEqual(n.dims,[])&&!k.areEqual(n.dims,[1])&&n.dims.length!==2)throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${n.dims.length}`);if(o.dims.length!==2)throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${o.dims.length}`);if(i.dims.length!==2)throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${i.dims.length}`);if(!k.areEqual(o.dims,i.dims))throw new Error("Inputs 'cos_cache' and 'sin_cache' are expected to have the same shape");if(l>0&&a===0)throw new Error("num_heads must be provided if rotary_embedding_dim is specified");let d=r.dims[0],p=r.dims[r.dims.length-2],m=o.dims[0],u=k.sizeFromDimension(r.dims,1)/p,h=l===0?o.dims[1]*2:u/a;if(l>h)throw new Error("rotary_embedding_dim must be less than or equal to head_size");if(n.dims.length===2){if(d!==n.dims[0])throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${n.dims[0]}`);if(p!==n.dims[1])throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${n.dims[1]}`)}if(h/2!==o.dims[1]&&l/2!==o.dims[1])throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${o.dims[1]}`);if(p>m)throw new Error("Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported")},Qf=(e,t)=>{let{interleaved:r,numHeads:n,rotaryEmbeddingDim:o,scale:i}=t,a=e[0].dims[0],l=k.sizeFromDimension(e[0].dims,1),d=e[0].dims[e[0].dims.length-2],p=l/d,m=e[2].dims[1],u=o===0?m*2:p/n,h=new Array(a,d,p/u,u-m),w=k.computeStrides(h),g=[{type:1,data:i},{type:12,data:h},{type:12,data:w},...e[0].dims.length===3?new Array({type:12,data:[l,p,u,1]}):[],...e[0].dims.length===4?new Array({type:12,data:[l,u,d*u,1]}):[],...V(e[0].dims,e[1].dims,e[2].dims,e[3].dims,e[0].dims)],b=x=>{let _=E("input",e[0].dataType,e[0].dims.length),$=E("position_ids",e[1].dataType,e[1].dims.length),S=E("cos_cache",e[2].dataType,e[2].dims.length),I=E("sin_cache",e[3].dataType,e[3].dims.length),T=M("output",e[0].dataType,e[0].dims.length);return x.registerUniforms([{name:"scale",type:"f32"},{name:"global_shape",type:"u32",length:h.length},{name:"global_strides",type:"u32",length:w.length},{name:"input_output_strides",type:"u32",length:w.length}]),` ${x.declareVariables(_,$,S,I,T)} ${x.mainStart(Tt)} @@ -2322,9 +2322,9 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim; ${T.setByOffset("k",_.getByOffset("k"))} } - }`};return{name:"RotaryEmbedding",shaderCache:{hint:te({interleaved:r}).cacheKey,inputDependencies:["rank","rank","rank","rank"]},getShaderSource:b,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(k.size(h)/Tt)},programUniforms:g})}},ol=(e,t)=>{Yf(e.inputs,t),e.compute(Xf(e.inputs,t))}});var Zf,Qf,al,sl=U(()=>{"use strict";Q();ae();oe();Zf=e=>{if(!e||e.length<3)throw new Error("layerNorm requires at least 3 inputs.");let t=e[0],r=e[1],n=e[2];if(t.dataType!==r.dataType||t.dataType!==n.dataType)throw new Error("All inputs must have the same data type");if(t.dims.length!==3&&t.dims.length!==2)throw new Error("Input must be 2D or 3D");if(r.dims.length!==3&&r.dims.length!==2)throw new Error("Skip must be 2D or 3D");let o=t.dims[t.dims.length-1],i=t.dims[t.dims.length-2];if(r.dims[r.dims.length-1]!==o)throw new Error("Skip must have the same hidden size as input");if(r.dims[r.dims.length-2]!==i)throw new Error("Skip must have the same sequence length as input");if(n.dims.length!==1)throw new Error("Gamma must be 1D");if(n.dims[n.dims.length-1]!==o)throw new Error("Gamma must have the same hidden size as input");if(e.length>3){let a=e[3];if(a.dims.length!==1)throw new Error("Beta must be 1D");if(a.dims[a.dims.length-1]!==o)throw new Error("Beta must have the same hidden size as input")}if(e.length>4){let a=e[4];if(a.dims.length!==1)throw new Error("Bias must be 1D");if(a.dims[a.dims.length-1]!==o)throw new Error("Bias must have the same hidden size as input")}},Qf=(e,t,r,n)=>{let o=t.simplified,i=e[0].dims,a=k.size(i),l=i,d=a,p=i.slice(-1)[0],m=n?i.slice(0,-1).concat(1):[],u=!o&&e.length>3,h=e.length>4,w=n&&r>1,g=n&&r>2,b=r>3,x=64,_=we(p),$=[{type:12,data:d},{type:12,data:_},{type:12,data:p},{type:1,data:t.epsilon}],S=T=>{let A=[{name:"output_size",type:"u32"},{name:"components",type:"u32"},{name:"hidden_size",type:"u32"},{name:"epsilon",type:"f32"}],z=[E("x",e[0].dataType,e[0].dims,_),E("skip",e[1].dataType,e[1].dims,_),E("gamma",e[2].dataType,e[2].dims,_)];u&&z.push(E("beta",e[3].dataType,e[3].dims,_)),h&&z.push(E("bias",e[4].dataType,e[4].dims,_)),z.push(M("output",e[0].dataType,l,_)),w&&z.push(M("mean_output",1,m)),g&&z.push(M("inv_std_output",1,m)),b&&z.push(M("input_skip_bias_sum",e[0].dataType,l,_));let D=he(e[0].dataType),H=he(1,_);return` + }`};return{name:"RotaryEmbedding",shaderCache:{hint:te({interleaved:r}).cacheKey,inputDependencies:["rank","rank","rank","rank"]},getShaderSource:b,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(k.size(h)/Tt)},programUniforms:g})}},ol=(e,t)=>{Zf(e.inputs,t),e.compute(Qf(e.inputs,t))}});var Jf,eh,al,sl=U(()=>{"use strict";Q();ae();oe();Jf=e=>{if(!e||e.length<3)throw new Error("layerNorm requires at least 3 inputs.");let t=e[0],r=e[1],n=e[2];if(t.dataType!==r.dataType||t.dataType!==n.dataType)throw new Error("All inputs must have the same data type");if(t.dims.length!==3&&t.dims.length!==2)throw new Error("Input must be 2D or 3D");if(r.dims.length!==3&&r.dims.length!==2)throw new Error("Skip must be 2D or 3D");let o=t.dims[t.dims.length-1],i=t.dims[t.dims.length-2];if(r.dims[r.dims.length-1]!==o)throw new Error("Skip must have the same hidden size as input");if(r.dims[r.dims.length-2]!==i)throw new Error("Skip must have the same sequence length as input");if(n.dims.length!==1)throw new Error("Gamma must be 1D");if(n.dims[n.dims.length-1]!==o)throw new Error("Gamma must have the same hidden size as input");if(e.length>3){let a=e[3];if(a.dims.length!==1)throw new Error("Beta must be 1D");if(a.dims[a.dims.length-1]!==o)throw new Error("Beta must have the same hidden size as input")}if(e.length>4){let a=e[4];if(a.dims.length!==1)throw new Error("Bias must be 1D");if(a.dims[a.dims.length-1]!==o)throw new Error("Bias must have the same hidden size as input")}},eh=(e,t,r,n)=>{let o=t.simplified,i=e[0].dims,a=k.size(i),l=i,d=a,p=i.slice(-1)[0],m=n?i.slice(0,-1).concat(1):[],u=!o&&e.length>3,h=e.length>4,w=n&&r>1,g=n&&r>2,b=r>3,x=64,_=we(p),$=[{type:12,data:d},{type:12,data:_},{type:12,data:p},{type:1,data:t.epsilon}],S=T=>{let A=[{name:"output_size",type:"u32"},{name:"components",type:"u32"},{name:"hidden_size",type:"u32"},{name:"epsilon",type:"f32"}],D=[E("x",e[0].dataType,e[0].dims,_),E("skip",e[1].dataType,e[1].dims,_),E("gamma",e[2].dataType,e[2].dims,_)];u&&D.push(E("beta",e[3].dataType,e[3].dims,_)),h&&D.push(E("bias",e[4].dataType,e[4].dims,_)),D.push(M("output",e[0].dataType,l,_)),w&&D.push(M("mean_output",1,m)),g&&D.push(M("inv_std_output",1,m)),b&&D.push(M("input_skip_bias_sum",e[0].dataType,l,_));let z=he(e[0].dataType),H=he(1,_);return` - ${T.registerUniforms(A).declareVariables(...z)} + ${T.registerUniforms(A).declareVariables(...D)} var sum_shared : array<${H}, ${x}>; var sum_squared_shared : array<${H}, ${x}>; @@ -2341,12 +2341,12 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; } for (var i: u32 = 0; i < stride; i++) { let skip_value = skip[offset + i]; - let bias_value = ${h?"bias[offset1d + i]":D+"(0.0)"}; + let bias_value = ${h?"bias[offset1d + i]":z+"(0.0)"}; let input_value = x[offset + i]; let value = input_value + skip_value + bias_value; ${b?"input_skip_bias_sum[offset + i] = value;":""} output[offset + i] = value; - let f32_value = ${At(D,_,"value")}; + let f32_value = ${At(z,_,"value")}; sum_shared[ix] += f32_value; sum_squared_shared[ix] += f32_value * f32_value; } @@ -2370,11 +2370,11 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${g?"inv_std_output[global_idx] = inv_std_dev;":""} for (var i: u32 = 0; i < stride; i++) { - output[offset + i] = (output[offset + i] ${o?"":`- ${D}(mean)`}) * - ${D}(inv_std_dev) * gamma[offset1d + i] + output[offset + i] = (output[offset + i] ${o?"":`- ${z}(mean)`}) * + ${z}(inv_std_dev) * gamma[offset1d + i] ${u?"+ beta[offset1d + i]":""}; } - }`},I=[{dims:l,dataType:e[0].dataType}];return r>1&&I.push({dims:m,dataType:1}),r>2&&I.push({dims:m,dataType:1}),r>3&&I.push({dims:i,dataType:e[0].dataType}),{name:"SkipLayerNormalization",shaderCache:{hint:`${_};${w};${g};${b}`,inputDependencies:e.map((T,A)=>"type")},getShaderSource:S,getRunData:()=>({outputs:I,dispatchGroup:{x:Math.ceil(d/p)},programUniforms:$})}},al=(e,t)=>{Zf(e.inputs);let n=[0];e.outputCount>1&&n.push(-3),e.outputCount>2&&n.push(-3),e.outputCount>3&&n.push(3),e.compute(Qf(e.inputs,t,e.outputCount,!1),{outputs:n})}});var Jf,on,eh,ul,th,rh,dl,ll,cl=U(()=>{"use strict";Q();ae();Ce();oe();Jf=(e,t)=>{if(!e||e.length<1)throw new Error("too few inputs");if(t.axes.length!==0){if(t.axes.length!==t.starts.length||t.axes.length!==t.ends.length)throw new Error("axes, starts and ends must have the same length")}else if(t.starts.length!==t.ends.length)throw new Error("starts and ends must have the same length");e.slice(1).forEach((r,n)=>{if(e[n+1].dataType!==6&&e[n+1].dataType!==7)throw new Error(`Input ${n} must be an array of int32 or int64`)})},on=(e,t)=>{let r=[];if(e.length>t)if(e[t].dataType===7)e[t].getBigInt64Array().forEach(n=>r.push(Number(n)));else if(e[t].dataType===6)e[t].getInt32Array().forEach(n=>r.push(Number(n)));else throw new Error(`Input ${t} must be an array of int32 or int64`);return r},eh=(e,t)=>{if(e.length>1){let r=on(e,1),n=on(e,2),o=on(e,3);return o.length===0&&(o=[...Array(e[0].dims.length).keys()]),te({starts:r,ends:n,axes:o})}else return t},ul=(e,t,r,n,o)=>{let i=e;return e<0&&(i+=r[n[t]]),o[t]<0?Math.max(0,Math.min(i,r[n[t]]-1)):Math.max(0,Math.min(i,r[n[t]]))},th=(e,t,r)=>`fn calculateInputIndices(output_indices: ${t.type.indices}) -> ${e.type.indices} { + }`},I=[{dims:l,dataType:e[0].dataType}];return r>1&&I.push({dims:m,dataType:1}),r>2&&I.push({dims:m,dataType:1}),r>3&&I.push({dims:i,dataType:e[0].dataType}),{name:"SkipLayerNormalization",shaderCache:{hint:`${_};${w};${g};${b}`,inputDependencies:e.map((T,A)=>"type")},getShaderSource:S,getRunData:()=>({outputs:I,dispatchGroup:{x:Math.ceil(d/p)},programUniforms:$})}},al=(e,t)=>{Jf(e.inputs);let n=[0];e.outputCount>1&&n.push(-3),e.outputCount>2&&n.push(-3),e.outputCount>3&&n.push(3),e.compute(eh(e.inputs,t,e.outputCount,!1),{outputs:n})}});var th,on,rh,ul,nh,oh,dl,ll,cl=U(()=>{"use strict";Q();ae();Ce();oe();th=(e,t)=>{if(!e||e.length<1)throw new Error("too few inputs");if(t.axes.length!==0){if(t.axes.length!==t.starts.length||t.axes.length!==t.ends.length)throw new Error("axes, starts and ends must have the same length")}else if(t.starts.length!==t.ends.length)throw new Error("starts and ends must have the same length");e.slice(1).forEach((r,n)=>{if(e[n+1].dataType!==6&&e[n+1].dataType!==7)throw new Error(`Input ${n} must be an array of int32 or int64`)})},on=(e,t)=>{let r=[];if(e.length>t)if(e[t].dataType===7)e[t].getBigInt64Array().forEach(n=>r.push(Number(n)));else if(e[t].dataType===6)e[t].getInt32Array().forEach(n=>r.push(Number(n)));else throw new Error(`Input ${t} must be an array of int32 or int64`);return r},rh=(e,t)=>{if(e.length>1){let r=on(e,1),n=on(e,2),o=on(e,3);return o.length===0&&(o=[...Array(e[0].dims.length).keys()]),te({starts:r,ends:n,axes:o})}else return t},ul=(e,t,r,n,o)=>{let i=e;return e<0&&(i+=r[n[t]]),o[t]<0?Math.max(0,Math.min(i,r[n[t]]-1)):Math.max(0,Math.min(i,r[n[t]]))},nh=(e,t,r)=>`fn calculateInputIndices(output_indices: ${t.type.indices}) -> ${e.type.indices} { var input_indices: ${e.type.indices}; var carry = 0u; for (var i = ${r.length}; i >= 0; i--) { @@ -2392,15 +2392,15 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; ${e.indicesSet("input_indices","i","input_index")}; } return input_indices; - }`,rh=(e,t)=>{let r=e[0].dims,n=k.size(r),o=t.axes.length>0?k.normalizeAxes(t.axes,r.length):[...Array(r.length).keys()],i=on(e,4);i.forEach(_=>_!==0||(()=>{throw new Error("step cannot be 0")})),i.length===0&&(i=Array(o.length).fill(1));let a=t.starts.map((_,$)=>ul(_,$,r,o,i)),l=t.ends.map((_,$)=>ul(_,$,r,o,i));if(o.length!==a.length||o.length!==l.length)throw new Error("start, ends and axes should have the same number of elements");if(o.length!==r.length)for(let _=0;_Math.sign(_));i.forEach((_,$,S)=>{if(_<0){let I=(l[$]-a[$])/_,T=a[$],A=T+I*i[$];a[$]=A,l[$]=T,S[$]=-_}});let p=r.slice(0);o.forEach((_,$)=>{p[_]=Math.ceil((l[_]-a[_])/i[_])});let m={dims:p,dataType:e[0].dataType},u=M("output",e[0].dataType,p.length),h=E("input",e[0].dataType,e[0].dims.length),w=k.size(p),g=[{name:"outputSize",type:"u32"},{name:"starts",type:"u32",length:a.length},{name:"signs",type:"i32",length:d.length},{name:"steps",type:"u32",length:i.length}],b=[{type:12,data:w},{type:12,data:a},{type:6,data:d},{type:12,data:i},...V(e[0].dims,p)],x=_=>` + }`,oh=(e,t)=>{let r=e[0].dims,n=k.size(r),o=t.axes.length>0?k.normalizeAxes(t.axes,r.length):[...Array(r.length).keys()],i=on(e,4);i.forEach(_=>_!==0||(()=>{throw new Error("step cannot be 0")})),i.length===0&&(i=Array(o.length).fill(1));let a=t.starts.map((_,$)=>ul(_,$,r,o,i)),l=t.ends.map((_,$)=>ul(_,$,r,o,i));if(o.length!==a.length||o.length!==l.length)throw new Error("start, ends and axes should have the same number of elements");if(o.length!==r.length)for(let _=0;_Math.sign(_));i.forEach((_,$,S)=>{if(_<0){let I=(l[$]-a[$])/_,T=a[$],A=T+I*i[$];a[$]=A,l[$]=T,S[$]=-_}});let p=r.slice(0);o.forEach((_,$)=>{p[_]=Math.ceil((l[_]-a[_])/i[_])});let m={dims:p,dataType:e[0].dataType},u=M("output",e[0].dataType,p.length),h=E("input",e[0].dataType,e[0].dims.length),w=k.size(p),g=[{name:"outputSize",type:"u32"},{name:"starts",type:"u32",length:a.length},{name:"signs",type:"i32",length:d.length},{name:"steps",type:"u32",length:i.length}],b=[{type:12,data:w},{type:12,data:a},{type:6,data:d},{type:12,data:i},...V(e[0].dims,p)],x=_=>` ${_.registerUniforms(g).declareVariables(h,u)} - ${th(h,u,r)} + ${nh(h,u,r)} ${_.mainStart()} ${_.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.outputSize")} let output_indices = ${u.offsetToIndices("global_idx")}; let input_indices = calculateInputIndices(output_indices); ${u.setByOffset("global_idx",h.getByIndices("input_indices"))} - }`;return{name:"Slice",shaderCache:{hint:`${d.length}_${a.length}_${i.length}`,inputDependencies:["rank"]},getShaderSource:x,getRunData:()=>({outputs:[m],dispatchGroup:{x:Math.ceil(n/64)},programUniforms:b})}},dl=(e,t)=>{Jf(e.inputs,t);let r=eh(e.inputs,t);e.compute(rh(e.inputs,r),{inputs:[0]})},ll=e=>{let t=e.starts,r=e.ends,n=e.axes;return te({starts:t,ends:r,axes:n})}});var nh,oh,pl,ml,fl=U(()=>{"use strict";Q();ae();Ce();oe();nh=e=>{if(!e||e.length!==1)throw new Error("Softmax op requires 1 input.")},oh=(e,t)=>{let r=e.dims,n=k.size(r),o=64,i=t.axis;if(i<0&&(i=r.length+i),i_===4?`max(max(${x}.x, ${x}.y), max(${x}.z, ${x}.w))`:_===2?`max(${x}.x, ${x}.y)`:_===3?`max(max(${x}.x, ${x}.y), ${x}.z)`:x,u=E("x",e.dataType,e.dims,d),h=M("result",e.dataType,e.dims,d),w=u.type.value,g=he(e.dataType)==="f32"?`var threadMax = ${w}(-3.402823e+38f);`:`var threadMax = ${w}(-65504.0h);`,b=x=>` + }`;return{name:"Slice",shaderCache:{hint:`${d.length}_${a.length}_${i.length}`,inputDependencies:["rank"]},getShaderSource:x,getRunData:()=>({outputs:[m],dispatchGroup:{x:Math.ceil(n/64)},programUniforms:b})}},dl=(e,t)=>{th(e.inputs,t);let r=rh(e.inputs,t);e.compute(oh(e.inputs,r),{inputs:[0]})},ll=e=>{let t=e.starts,r=e.ends,n=e.axes;return te({starts:t,ends:r,axes:n})}});var ih,ah,pl,ml,fl=U(()=>{"use strict";Q();ae();Ce();oe();ih=e=>{if(!e||e.length!==1)throw new Error("Softmax op requires 1 input.")},ah=(e,t)=>{let r=e.dims,n=k.size(r),o=64,i=t.axis;if(i<0&&(i=r.length+i),i_===4?`max(max(${x}.x, ${x}.y), max(${x}.z, ${x}.w))`:_===2?`max(${x}.x, ${x}.y)`:_===3?`max(max(${x}.x, ${x}.y), ${x}.z)`:x,u=E("x",e.dataType,e.dims,d),h=M("result",e.dataType,e.dims,d),w=u.type.value,g=he(e.dataType)==="f32"?`var threadMax = ${w}(-3.402823e+38f);`:`var threadMax = ${w}(-65504.0h);`,b=x=>` var rowMaxShared : ${w}; var rowSumShared : ${w}; var threadShared : array<${w}, ${o}>; @@ -2472,7 +2472,7 @@ ${Pe}_indices[${ue}] = 0;`}),Y+=`${Pe}_indices[${J-2}] = 0u; let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared; setValue(row, col, row_stride, value); } - }`;return{name:"Softmax",shaderCache:{hint:`${d}`,inputDependencies:["type"]},getRunData:()=>({outputs:[{dims:r,dataType:e.dataType}],dispatchGroup:{x:l},programUniforms:[{type:6,data:p}]}),getShaderSource:b}},pl=(e,t)=>{nh(e.inputs),e.compute(oh(e.inputs[0],t))},ml=e=>te({axis:e.axis})});var ih,ah,sh,uh,dh,hl,gl,yl=U(()=>{"use strict";Q();ae();Ce();oe();ih=e=>{if(!e||e.length<1)throw new Error("too few inputs")},ah=(e,t)=>{let r=[],n=t.numOutputs;return e[1].dims[0]>0&&(e[1].getBigInt64Array().forEach(o=>r.push(Number(o))),n=r.length),te({numOutputs:n,axis:t.axis,splitSizes:r})},sh=e=>` + }`;return{name:"Softmax",shaderCache:{hint:`${d}`,inputDependencies:["type"]},getRunData:()=>({outputs:[{dims:r,dataType:e.dataType}],dispatchGroup:{x:l},programUniforms:[{type:6,data:p}]}),getShaderSource:b}},pl=(e,t)=>{ih(e.inputs),e.compute(ah(e.inputs[0],t))},ml=e=>te({axis:e.axis})});var sh,uh,dh,lh,ch,hl,gl,yl=U(()=>{"use strict";Q();ae();Ce();oe();sh=e=>{if(!e||e.length<1)throw new Error("too few inputs")},uh=(e,t)=>{let r=[],n=t.numOutputs;return e[1].dims[0]>0&&(e[1].getBigInt64Array().forEach(o=>r.push(Number(o))),n=r.length),te({numOutputs:n,axis:t.axis,splitSizes:r})},dh=e=>` fn calculateOutputIndex(index: u32) -> u32 { for (var i: u32 = 0u; i < ${e}u; i += 1u ) { if (index < ${G("uniforms.size_in_split_axis","i",e)}) { @@ -2480,14 +2480,14 @@ fn calculateOutputIndex(index: u32) -> u32 { } } return ${e}u; -}`,uh=e=>{let t=e.length,r=[];for(let n=0;n{let t=e.length,r=[];for(let n=0;n{let r=e[0].dims,n=k.size(r),o=e[0].dataType,i=k.normalizeAxis(t.axis,r.length),a=new Array(t.numOutputs),l=E("input",o,r.length),d=new Array(t.numOutputs),p=[],m=[],u=0,h=[{type:12,data:n}];for(let g=0;g` + }`},ch=(e,t)=>{let r=e[0].dims,n=k.size(r),o=e[0].dataType,i=k.normalizeAxis(t.axis,r.length),a=new Array(t.numOutputs),l=E("input",o,r.length),d=new Array(t.numOutputs),p=[],m=[],u=0,h=[{type:12,data:n}];for(let g=0;g` ${g.registerUniform("input_size","u32").registerUniform("size_in_split_axis","u32",d.length).declareVariables(l,...a)} - ${sh(d.length)} - ${uh(a)} + ${dh(d.length)} + ${lh(a)} ${g.mainStart()} ${g.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.input_size")} @@ -2500,7 +2500,7 @@ fn calculateOutputIndex(index: u32) -> u32 { ${l.indicesSet("indices",i,"index")}; } writeBufferData(output_number, indices, global_idx); - }`;return{name:"Split",shaderCache:{hint:t.cacheKey,inputDependencies:["rank"]},getShaderSource:w,getRunData:()=>({outputs:p,dispatchGroup:{x:Math.ceil(n/64)},programUniforms:h})}},hl=(e,t)=>{ih(e.inputs);let r=e.inputs.length===1?t:ah(e.inputs,t);e.compute(dh(e.inputs,r),{inputs:[0]})},gl=e=>{let t=e.axis,r=e.splitSizes,n=e.numOutputs<0?r.length:e.numOutputs;if(n!==r.length)throw new Error("numOutputs and splitSizes lengh must be equal");return te({axis:t,numOutputs:n,splitSizes:r})}});var lh,ch,bl,wl=U(()=>{"use strict";Q();ae();oe();lh=(e,t,r,n,o)=>{let i=M("output_data",o,r.length,4),a=E("a_data",t[1].dataType,t[1].dims.length,4),l=E("b_data",t[2].dataType,t[2].dims.length,4),d=E("c_data",t[0].dataType,t[0].dims.length,4),p,m=(u,h,w)=>`select(${h}, ${u}, ${w})`;if(!n)p=i.setByOffset("global_idx",m(a.getByOffset("global_idx"),l.getByOffset("global_idx"),d.getByOffset("global_idx")));else{let u=(h,w,g="")=>{let b=`a_data[index_a${w}][component_a${w}]`,x=`b_data[index_b${w}][component_b${w}]`,_=`bool(c_data[index_c${w}] & (0xffu << (component_c${w} * 8)))`;return` + }`;return{name:"Split",shaderCache:{hint:t.cacheKey,inputDependencies:["rank"]},getShaderSource:w,getRunData:()=>({outputs:p,dispatchGroup:{x:Math.ceil(n/64)},programUniforms:h})}},hl=(e,t)=>{sh(e.inputs);let r=e.inputs.length===1?t:uh(e.inputs,t);e.compute(ch(e.inputs,r),{inputs:[0]})},gl=e=>{let t=e.axis,r=e.splitSizes,n=e.numOutputs<0?r.length:e.numOutputs;if(n!==r.length)throw new Error("numOutputs and splitSizes lengh must be equal");return te({axis:t,numOutputs:n,splitSizes:r})}});var ph,mh,bl,wl=U(()=>{"use strict";Q();ae();oe();ph=(e,t,r,n,o)=>{let i=M("output_data",o,r.length,4),a=E("a_data",t[1].dataType,t[1].dims.length,4),l=E("b_data",t[2].dataType,t[2].dims.length,4),d=E("c_data",t[0].dataType,t[0].dims.length,4),p,m=(u,h,w)=>`select(${h}, ${u}, ${w})`;if(!n)p=i.setByOffset("global_idx",m(a.getByOffset("global_idx"),l.getByOffset("global_idx"),d.getByOffset("global_idx")));else{let u=(h,w,g="")=>{let b=`a_data[index_a${w}][component_a${w}]`,x=`b_data[index_b${w}][component_b${w}]`,_=`bool(c_data[index_c${w}] & (0xffu << (component_c${w} * 8)))`;return` let output_indices${w} = ${i.offsetToIndices(`global_idx * 4u + ${w}u`)}; let offset_a${w} = ${a.broadcastedIndicesToOffset(`output_indices${w}`,i)}; let offset_b${w} = ${l.broadcastedIndicesToOffset(`output_indices${w}`,i)}; @@ -2528,10 +2528,10 @@ fn calculateOutputIndex(index: u32) -> u32 { ${e.mainStart()} ${e.guardAgainstOutOfBoundsWorkgroupSizes("uniforms.vec_size")} ${p} - }`},ch=e=>{let t=e[1].dims,r=e[2].dims,n=e[0].dims,o=e[1].dataType,i=!(k.areEqual(t,r)&&k.areEqual(r,n)),a=t,l=k.size(t);if(i){let p=tt.calcShape(tt.calcShape(t,r,!1),n,!1);if(!p)throw new Error("Can't perform where op on the given tensors");a=p,l=k.size(a)}let d=Math.ceil(l/4);return{name:"Where",shaderCache:{inputDependencies:["rank","rank","rank"]},getShaderSource:p=>lh(p,e,a,i,o),getRunData:()=>({outputs:[{dims:a,dataType:o}],dispatchGroup:{x:Math.ceil(l/64/4)},programUniforms:[{type:12,data:d},...V(n,t,r,a)]})}},bl=e=>{e.compute(ch(e.inputs))}});var vl,$l=U(()=>{"use strict";_s();qr();Is();Ts();pu();xu();Cu();uo();Lu();Ku();Xu();td();od();ad();dd();pd();hd();Id();Td();Ed();co();Od();vo();Dd();Zd();Jd();Lr();nl();il();sl();cl();fl();yl();_o();Et();jr();wl();vl=new Map([["Abs",[As]],["Acos",[Es]],["Acosh",[ks]],["Add",[mu]],["ArgMax",[$s,no]],["ArgMin",[vs,no]],["Asin",[Ps]],["Asinh",[Os]],["Atan",[zs]],["Atanh",[Ds]],["Attention",[xs]],["AveragePool",[Hd,Wd]],["BatchNormalization",[Ss]],["BiasAdd",[Cs]],["BiasSplitGelu",[cu]],["Cast",[Rs,Bs]],["Ceil",[Us]],["Clip",[Ms]],["Concat",[Su,Iu]],["Conv",[ho,fo]],["ConvTranspose",[Gu,Hu]],["Cos",[Vs]],["Cosh",[Ns]],["CumSum",[Fu,qu]],["DepthToSpace",[ju,Yu]],["Div",[fu]],["Einsum",[Ju,ed]],["Elu",[Ws,Zt]],["Equal",[hu]],["Erf",[Hs]],["Exp",[Gs]],["Expand",[nd]],["FastGelu",[id]],["Floor",[Ls]],["FusedConv",[ho,fo]],["Gather",[ud,sd]],["GatherElements",[cd,ld]],["Gelu",[Fs]],["Gemm",[fd,md]],["GlobalAveragePool",[Fd,Ld]],["GlobalMaxPool",[Xd,Yd]],["Greater",[wu]],["GreaterOrEqual",[$u]],["GroupQueryAttention",[Sd,xd]],["HardSigmoid",[Js,Qs]],["InstanceNormalization",[Cd]],["LayerNormalization",[Ad]],["LeakyRelu",[qs,Zt]],["Less",[vu]],["LessOrEqual",[_u]],["Log",[uu]],["MatMul",[Mu]],["MatMulNBits",[kd,Pd]],["MaxPool",[Kd,jd]],["Mul",[gu]],["MultiHeadAttention",[bd,yd]],["Neg",[js]],["Not",[Ks]],["Pad",[zd]],["Pow",[yu]],["QuickGelu",[du,Zt]],["Range",[Qd]],["Reciprocal",[Ys]],["ReduceMin",[fs]],["ReduceMean",[ds]],["ReduceMax",[ms]],["ReduceSum",[gs]],["ReduceProd",[hs]],["ReduceL1",[ls]],["ReduceL2",[cs]],["ReduceLogSum",[bs]],["ReduceLogSumExp",[ps]],["ReduceSumSquare",[ys]],["Relu",[Xs]],["Resize",[tl,rl]],["RotaryEmbedding",[ol]],["Sigmoid",[Zs]],["Sin",[eu]],["Sinh",[tu]],["Slice",[dl,ll]],["SkipLayerNormalization",[al]],["Split",[hl,gl]],["Sqrt",[ru]],["Softmax",[pl,ml]],["Sub",[bu]],["Tan",[nu]],["Tanh",[iu]],["ThresholdedRelu",[su,Zt]],["Tile",[vd]],["Transpose",[Xa,Za]],["Where",[bl]]])});var an,_l=U(()=>{"use strict";Le();dt();oe();an=class{constructor(t){this.backend=t;this.repo=new Map,this.attributesBound=!1}getArtifact(t){return this.repo.get(t)}setArtifact(t,r){this.repo.set(t,r)}run(t,r,n,o,i){We(t.programInfo.name);let a=this.backend.device,l=this.backend.getComputePassEncoder();this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2);let d=[];for(let m of r)d.push({binding:d.length,resource:{buffer:m.buffer}});for(let m of n)d.push({binding:d.length,resource:{buffer:m.buffer}});i&&d.push({binding:d.length,resource:i});let p=a.createBindGroup({layout:t.computePipeline.getBindGroupLayout(0),entries:d,label:t.programInfo.name});if(this.backend.sessionStatus==="capturing"){let m={kernelId:this.backend.currentKernelId,computePipeline:t.computePipeline,bindGroup:p,dispatchGroup:o};this.backend.capturedCommandList.get(this.backend.currentSessionId).push(m)}l.setPipeline(t.computePipeline),l.setBindGroup(0,p),l.dispatchWorkgroups(...o),this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2+1),this.backend.pendingDispatchNumber++,(this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber||this.backend.queryType==="at-passes")&&this.backend.endComputePass(),this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber&&this.backend.flush(),Me(t.programInfo.name)}dispose(){}build(t,r){We(t.name);let n=this.backend.device,o=[];n.features.has("shader-f16")&&o.push("enable f16;");let i=ja(r,this.backend.device.limits),a=t.getShaderSource(i),l=`${o.join(` + }`},mh=e=>{let t=e[1].dims,r=e[2].dims,n=e[0].dims,o=e[1].dataType,i=!(k.areEqual(t,r)&&k.areEqual(r,n)),a=t,l=k.size(t);if(i){let p=tt.calcShape(tt.calcShape(t,r,!1),n,!1);if(!p)throw new Error("Can't perform where op on the given tensors");a=p,l=k.size(a)}let d=Math.ceil(l/4);return{name:"Where",shaderCache:{inputDependencies:["rank","rank","rank"]},getShaderSource:p=>ph(p,e,a,i,o),getRunData:()=>({outputs:[{dims:a,dataType:o}],dispatchGroup:{x:Math.ceil(l/64/4)},programUniforms:[{type:12,data:d},...V(n,t,r,a)]})}},bl=e=>{e.compute(mh(e.inputs))}});var vl,$l=U(()=>{"use strict";_s();qr();Is();Ts();pu();xu();Cu();lo();Lu();ju();Xu();td();od();ad();dd();pd();hd();Id();Td();Ed();po();Od();$o();zd();Zd();Jd();Lr();nl();il();sl();cl();fl();yl();xo();Et();Kr();wl();vl=new Map([["Abs",[As]],["Acos",[Es]],["Acosh",[ks]],["Add",[mu]],["ArgMax",[$s,oo]],["ArgMin",[vs,oo]],["Asin",[Ps]],["Asinh",[Os]],["Atan",[Ds]],["Atanh",[zs]],["Attention",[xs]],["AveragePool",[Hd,Wd]],["BatchNormalization",[Ss]],["BiasAdd",[Cs]],["BiasSplitGelu",[cu]],["Cast",[Rs,Bs]],["Ceil",[Us]],["Clip",[Ms]],["Concat",[Su,Iu]],["Conv",[go,ho]],["ConvTranspose",[Gu,Hu]],["Cos",[Vs]],["Cosh",[Ns]],["CumSum",[Fu,qu]],["DepthToSpace",[Ku,Yu]],["Div",[fu]],["Einsum",[Ju,ed]],["Elu",[Ws,Zt]],["Equal",[hu]],["Erf",[Hs]],["Exp",[Gs]],["Expand",[nd]],["FastGelu",[id]],["Floor",[Ls]],["FusedConv",[go,ho]],["Gather",[ud,sd]],["GatherElements",[cd,ld]],["Gelu",[Fs]],["Gemm",[fd,md]],["GlobalAveragePool",[Fd,Ld]],["GlobalMaxPool",[Xd,Yd]],["Greater",[wu]],["GreaterOrEqual",[$u]],["GroupQueryAttention",[Sd,xd]],["HardSigmoid",[Js,Qs]],["InstanceNormalization",[Cd]],["LayerNormalization",[Ad]],["LeakyRelu",[qs,Zt]],["Less",[vu]],["LessOrEqual",[_u]],["Log",[uu]],["MatMul",[Mu]],["MatMulNBits",[kd,Pd]],["MaxPool",[jd,Kd]],["Mul",[gu]],["MultiHeadAttention",[bd,yd]],["Neg",[Ks]],["Not",[js]],["Pad",[Dd]],["Pow",[yu]],["QuickGelu",[du,Zt]],["Range",[Qd]],["Reciprocal",[Ys]],["ReduceMin",[fs]],["ReduceMean",[ds]],["ReduceMax",[ms]],["ReduceSum",[gs]],["ReduceProd",[hs]],["ReduceL1",[ls]],["ReduceL2",[cs]],["ReduceLogSum",[bs]],["ReduceLogSumExp",[ps]],["ReduceSumSquare",[ys]],["Relu",[Xs]],["Resize",[tl,rl]],["RotaryEmbedding",[ol]],["Sigmoid",[Zs]],["Sin",[eu]],["Sinh",[tu]],["Slice",[dl,ll]],["SkipLayerNormalization",[al]],["Split",[hl,gl]],["Sqrt",[ru]],["Softmax",[pl,ml]],["Sub",[bu]],["Tan",[nu]],["Tanh",[iu]],["ThresholdedRelu",[su,Zt]],["Tile",[vd]],["Transpose",[Xa,Za]],["Where",[bl]]])});var an,_l=U(()=>{"use strict";Le();dt();oe();an=class{constructor(t){this.backend=t;this.repo=new Map,this.attributesBound=!1}getArtifact(t){return this.repo.get(t)}setArtifact(t,r){this.repo.set(t,r)}run(t,r,n,o,i){We(t.programInfo.name);let a=this.backend.device,l=this.backend.getComputePassEncoder();this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2);let d=[];for(let m of r)d.push({binding:d.length,resource:{buffer:m.buffer}});for(let m of n)d.push({binding:d.length,resource:{buffer:m.buffer}});i&&d.push({binding:d.length,resource:i});let p=a.createBindGroup({layout:t.computePipeline.getBindGroupLayout(0),entries:d,label:t.programInfo.name});if(this.backend.sessionStatus==="capturing"){let m={kernelId:this.backend.currentKernelId,computePipeline:t.computePipeline,bindGroup:p,dispatchGroup:o};this.backend.capturedCommandList.get(this.backend.currentSessionId).push(m)}l.setPipeline(t.computePipeline),l.setBindGroup(0,p),l.dispatchWorkgroups(...o),this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2+1),this.backend.pendingDispatchNumber++,(this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber||this.backend.queryType==="at-passes")&&this.backend.endComputePass(),this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber&&this.backend.flush(),Me(t.programInfo.name)}dispose(){}build(t,r){We(t.name);let n=this.backend.device,o=[];n.features.has("shader-f16")&&o.push("enable f16;");let i=Ka(r,this.backend.device.limits),a=t.getShaderSource(i),l=`${o.join(` `)} ${i.additionalImplementations} -${a}`,d=n.createShaderModule({code:l,label:t.name});be("verbose",()=>`[WebGPU] ${t.name} shader code: ${l}`);let p=n.createComputePipeline({compute:{module:d,entryPoint:"main"},layout:"auto",label:t.name});return Me(t.name),{programInfo:t,computePipeline:p,uniformVariablesInfo:i.variablesInfo}}normalizeDispatchGroupSize(t){let r=typeof t=="number"?t:t.x,n=typeof t=="number"?1:t.y||1,o=typeof t=="number"?1:t.z||1,i=this.backend.device.limits.maxComputeWorkgroupsPerDimension;if(r<=i&&n<=i&&o<=i)return[r,n,o];let a=r*n*o,l=Math.ceil(Math.sqrt(a));if(l>i){if(l=Math.ceil(Math.cbrt(a)),l>i)throw new Error("Total dispatch size exceeds WebGPU maximum.");return[l,l,l]}else return[l,l,1]}}});var ph,mh,xo,sn,xl=U(()=>{"use strict";Le();Q();dt();Ha();Ka();$l();_l();ph=(e,t)=>{if(t.length!==e.length)throw new Error(`inputDependencies length ${t.length} is not equal to inputTensors length ${e.length}.`);let r=[];for(let n=0;n{let n=e.name;return e.shaderCache?.hint&&(n+="["+e.shaderCache.hint+"]"),n+=":"+r+`:${ph(t,e.shaderCache?.inputDependencies??new Array(t.length).fill("dims"))}`,n},xo=class{constructor(t){t&&(this.architecture=t.architecture,this.vendor=t.vendor)}isArchitecture(t){return this.architecture===t}isVendor(t){return this.vendor===t}},sn=class{constructor(){this.currentSessionId=null;this.currentKernelId=null;this.commandEncoder=null;this.computePassEncoder=null;this.maxDispatchNumber=16;this.pendingDispatchNumber=0;this.pendingKernels=[];this.pendingQueries=new Map;this.sessionStatus="default";this.capturedCommandList=new Map;this.capturedPendingKernels=new Map;this.sessionExternalDataMapping=new Map}get currentKernelCustomData(){if(this.currentKernelId===null)throw new Error("currentKernelCustomData(): currentKernelId is null. (should not happen)");let t=this.kernelCustomData.get(this.currentKernelId);return t||(t={},this.kernelCustomData.set(this.currentKernelId,t)),t}async initialize(t,r){this.env=t;let n=[],o={requiredLimits:{maxComputeWorkgroupStorageSize:r.limits.maxComputeWorkgroupStorageSize,maxComputeWorkgroupsPerDimension:r.limits.maxComputeWorkgroupsPerDimension,maxStorageBufferBindingSize:r.limits.maxStorageBufferBindingSize,maxBufferSize:r.limits.maxBufferSize,maxComputeInvocationsPerWorkgroup:r.limits.maxComputeInvocationsPerWorkgroup,maxComputeWorkgroupSizeX:r.limits.maxComputeWorkgroupSizeX,maxComputeWorkgroupSizeY:r.limits.maxComputeWorkgroupSizeY,maxComputeWorkgroupSizeZ:r.limits.maxComputeWorkgroupSizeZ},requiredFeatures:n};r.features.has("chromium-experimental-timestamp-query-inside-passes")?n.push("chromium-experimental-timestamp-query-inside-passes"):r.features.has("timestamp-query")&&n.push("timestamp-query"),r.features.has("shader-f16")&&n.push("shader-f16"),this.device=await r.requestDevice(o),this.adapterInfo=new xo(r.info||await r.requestAdapterInfo()),this.gpuDataManager=qa(this),this.programManager=new an(this),this.kernels=new Map,this.kernelPersistentData=new Map,this.kernelCustomData=new Map,Na(t.logLevel,!!t.debug),this.device.onuncapturederror=i=>{i.error instanceof GPUValidationError&&console.error(`An uncaught WebGPU validation error was raised: ${i.error.message}`)},Object.defineProperty(this.env.webgpu,"device",{value:this.device,writable:!1,enumerable:!0,configurable:!1}),Object.defineProperty(this.env.webgpu,"adapter",{value:r,writable:!1,enumerable:!0,configurable:!1}),this.setQueryType()}dispose(){typeof this.querySet<"u"&&this.querySet.destroy(),this.gpuDataManager.dispose()}getCommandEncoder(){return this.commandEncoder||(this.commandEncoder=this.device.createCommandEncoder()),this.commandEncoder}getComputePassEncoder(){if(!this.computePassEncoder){let t=this.getCommandEncoder(),r={};this.queryType==="at-passes"&&(r.timestampWrites={querySet:this.querySet,beginningOfPassWriteIndex:this.pendingDispatchNumber*2,endOfPassWriteIndex:this.pendingDispatchNumber*2+1}),this.computePassEncoder=t.beginComputePass(r)}return this.computePassEncoder}endComputePass(){this.computePassEncoder&&(this.computePassEncoder.end(),this.computePassEncoder=null)}flush(){if(!this.commandEncoder)return;We(),this.endComputePass();let t;this.queryType!=="none"&&(this.commandEncoder.resolveQuerySet(this.querySet,0,this.pendingDispatchNumber*2,this.queryResolveBuffer,0),t=this.device.createBuffer({size:this.pendingDispatchNumber*2*8,usage:GPUBufferUsage.MAP_READ|GPUBufferUsage.COPY_DST}),this.pendingQueries.set(t,this.pendingKernels),this.pendingKernels=[],this.commandEncoder.copyBufferToBuffer(this.queryResolveBuffer,0,t,0,this.pendingDispatchNumber*2*8)),this.device.queue.submit([this.commandEncoder.finish()]),this.gpuDataManager.refreshPendingBuffers(),this.commandEncoder=null,this.pendingDispatchNumber=0,this.queryType!=="none"&&t.mapAsync(GPUMapMode.READ).then(()=>{let r=new BigUint64Array(t.getMappedRange()),n=this.pendingQueries.get(t);for(let o=0;o"u"&&(this.queryTimeBase=w);let b=Number(w-this.queryTimeBase),x=Number(g-this.queryTimeBase);if(!Number.isSafeInteger(b)||!Number.isSafeInteger(x))throw new RangeError("incorrect timestamp range");if(this.env.webgpu.profiling?.ondata)this.env.webgpu.profiling.ondata({version:1,inputsMetadata:u.map(_=>({dims:_.dims,dataType:ht(_.dataType)})),outputsMetadata:h.map(_=>({dims:_.dims,dataType:ht(_.dataType)})),kernelId:a,kernelType:d,kernelName:p,programName:m,startTime:b,endTime:x});else{let _="";u.forEach((S,I)=>{_+=`input[${I}]: [${S.dims}] | ${ht(S.dataType)}, `});let $="";h.forEach((S,I)=>{$+=`output[${I}]: [${S.dims}] | ${ht(S.dataType)}, `}),console.log(`[profiling] kernel "${a}|${d}|${p}|${m}" ${_}${$}execution time: ${x-b} ns`)}xr("GPU",`${m}::${w}::${g}`)}t.unmap(),this.pendingQueries.delete(t)}),Me()}run(t,r,n,o,i,a){We(t.name);let l=[];for(let S=0;SI):n;if(u.length!==d.length)throw new Error(`Output size ${u.length} must be equal to ${d.length}.`);let h=[],w=[];for(let S=0;S=a)throw new Error(`Invalid output index: ${u[S]}`);if(u[S]===-3)continue;let I=u[S]===-1,T=u[S]===-2,A=I||T?i(d[S].dataType,d[S].dims):o(u[S],d[S].dataType,d[S].dims);if(h.push(A),A.data===0)continue;let z=this.gpuDataManager.get(A.data);if(!z)throw new Error(`no GPU data for output: ${A.data}`);if(I&&this.temporaryData.push(z),T){let D=this.kernelPersistentData.get(this.currentKernelId);D||(D=[],this.kernelPersistentData.set(this.currentKernelId,D)),D.push(z)}w.push(z)}if(l.length!==r.length||w.length!==h.length){if(w.length===0)return Me(t.name),h;throw new Error(`Program ${t.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`)}let g;if(m){let S=0,I=[];m.forEach(D=>{let H=typeof D.data=="number"?[D.data]:D.data;if(H.length===0)return;let W=D.type===10?2:4,F,de;D.type===10?(de=H.length>4?16:H.length>2?8:H.length*W,F=H.length>4?16:W*H.length):(de=H.length<=2?H.length*W:16,F=16),S=Math.ceil(S/de)*de,I.push(S);let ce=D.type===10?8:4;S+=H.length>4?Math.ceil(H.length/ce)*F:H.length*W});let T=16;S=Math.ceil(S/T)*T;let A=new ArrayBuffer(S);m.forEach((D,H)=>{let W=I[H],F=typeof D.data=="number"?[D.data]:D.data;if(D.type===6)new Int32Array(A,W,F.length).set(F);else if(D.type===12)new Uint32Array(A,W,F.length).set(F);else if(D.type===10)new Uint16Array(A,W,F.length).set(F);else if(D.type===1)new Float32Array(A,W,F.length).set(F);else throw new Error(`Unsupported uniform type: ${ht(D.type)}`)});let z=this.gpuDataManager.create(S,GPUBufferUsage.COPY_DST|GPUBufferUsage.UNIFORM);this.device.queue.writeBuffer(z.buffer,0,A,0,S),this.gpuDataManager.release(z.id),g={offset:0,size:S,buffer:z.buffer}}let b=this.programManager.normalizeDispatchGroupSize(p),x=b[1]===1&&b[2]===1,_=mh(t,r,x),$=this.programManager.getArtifact(_);if($||($=this.programManager.build(t,b),this.programManager.setArtifact(_,$),be("info",()=>`[artifact] key: ${_}, programName: ${t.name}`)),m&&$.uniformVariablesInfo){if(m.length!==$.uniformVariablesInfo.length)throw new Error(`Uniform variables count mismatch: expect ${$.uniformVariablesInfo.length}, got ${m.length} in program "${$.programInfo.name}".`);for(let S=0;S`[ProgramManager] run "${t.name}" (key=${_}) with ${b[0]}x${b[1]}x${b[2]}`),this.queryType!=="none"||this.sessionStatus==="capturing"){let S={kernelId:this.currentKernelId,programName:$.programInfo.name,inputTensorViews:r,outputTensorViews:h};this.pendingKernels.push(S),this.sessionStatus==="capturing"&&this.capturedPendingKernels.get(this.currentSessionId).push(S)}return this.programManager.run($,l,w,b,g),Me(t.name),h}upload(t,r){this.gpuDataManager.upload(t,r)}memcpy(t,r){this.gpuDataManager.memcpy(t,r)}async download(t,r){await this.gpuDataManager.download(t,r)}alloc(t){return this.gpuDataManager.create(t).id}free(t){return this.gpuDataManager.release(t)}createKernel(t,r,n,o){let i=vl.get(t);if(!i)throw new Error(`kernel not implemented: ${t}`);let a={kernelType:t,kernelName:o,kernelEntry:i[0],attributes:[i[1],n]};this.kernels.set(r,a)}releaseKernel(t){let r=this.kernelPersistentData.get(t);if(r){for(let n of r)this.gpuDataManager.release(n.id);this.kernelPersistentData.delete(t)}this.kernelCustomData.delete(t),this.kernels.delete(t)}computeKernel(t,r,n){let o=this.kernels.get(t);if(!o)throw new Error(`kernel not created: ${t}`);let i=o.kernelType,a=o.kernelName,l=o.kernelEntry,d=o.attributes;if(this.currentKernelId!==null)throw new Error(`kernel "[${i}] ${a}" is not allowed to be called recursively`);this.currentKernelId=t,d[0]&&(d[1]=d[0](d[1]),d[0]=void 0),be("info",()=>`[WebGPU] Start to run kernel "[${i}] ${a}"...`);let p=this.env.debug;this.temporaryData=[];try{return p&&this.device.pushErrorScope("validation"),l(r,d[1]),0}catch(m){return n.push(Promise.resolve(`[WebGPU] Kernel "[${i}] ${a}" failed. ${m}`)),1}finally{p&&n.push(this.device.popErrorScope().then(m=>m?`GPU validation error for kernel "[${i}] ${a}": ${m.message}`:null));for(let m of this.temporaryData)this.gpuDataManager.release(m.id);this.temporaryData=[],this.currentKernelId=null}}registerBuffer(t,r,n,o){let i=this.sessionExternalDataMapping.get(t);i||(i=new Map,this.sessionExternalDataMapping.set(t,i));let a=i.get(r),l=this.gpuDataManager.registerExternalBuffer(n,o,a?.[1]);return i.set(r,[l,n]),l}unregisterBuffers(t){let r=this.sessionExternalDataMapping.get(t);r&&(r.forEach(n=>this.gpuDataManager.unregisterExternalBuffer(n[1])),this.sessionExternalDataMapping.delete(t))}getBuffer(t){let r=this.gpuDataManager.get(t);if(!r)throw new Error(`no GPU data for buffer: ${t}`);return r.buffer}createDownloader(t,r,n){return async()=>{let o=await Xn(this,t,r);return Wa(o.buffer,n)}}writeTimestamp(t){this.queryType==="inside-passes"&&this.computePassEncoder.writeTimestamp(this.querySet,t)}setQueryType(){this.queryType="none",(this.env.webgpu.profiling?.mode==="default"||(typeof this.env.trace>"u"?this.env.wasm.trace:this.env.trace))&&(this.device.features.has("chromium-experimental-timestamp-query-inside-passes")?this.queryType="inside-passes":this.device.features.has("timestamp-query")&&(this.queryType="at-passes"),this.queryType!=="none"&&typeof this.querySet>"u"&&(this.querySet=this.device.createQuerySet({type:"timestamp",count:this.maxDispatchNumber*2}),this.queryResolveBuffer=this.device.createBuffer({size:this.maxDispatchNumber*2*8,usage:GPUBufferUsage.COPY_SRC|GPUBufferUsage.QUERY_RESOLVE})))}captureBegin(){be("info","captureBegin"),this.capturedCommandList.get(this.currentSessionId)||this.capturedCommandList.set(this.currentSessionId,[]),this.capturedPendingKernels.get(this.currentSessionId)||this.capturedPendingKernels.set(this.currentSessionId,[]),this.flush(),this.sessionStatus="capturing"}captureEnd(){be("info","captureEnd"),this.flush(),this.sessionStatus="default"}replay(){be("info","replay"),this.sessionStatus="replaying";let t=this.capturedCommandList.get(this.currentSessionId),r=this.capturedPendingKernels.get(this.currentSessionId),n=t.length;this.pendingKernels=[];for(let o=0;o=this.maxDispatchNumber||this.queryType==="at-passes")&&this.endComputePass(),this.pendingDispatchNumber>=this.maxDispatchNumber&&this.flush()}this.flush(),this.sessionStatus="default"}onReleaseSession(t){this.unregisterBuffers(t),this.capturedCommandList.has(t)&&this.capturedCommandList.delete(t),this.capturedPendingKernels.has(t)&&this.capturedPendingKernels.delete(t),this.gpuDataManager.onReleaseSession(t)}onRunStart(t){this.currentSessionId=t,this.setQueryType()}}});var Sl={};Lt(Sl,{init:()=>fh});var rr,So,fh,Il=U(()=>{"use strict";Q();xl();dt();ae();rr=class e{constructor(t,r,n,o){this.module=t;this.dataType=r;this.data=n;this.dims=o}getFloat32Array(){if(this.dataType!==1)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new Float32Array:new Float32Array(this.module.HEAP8.buffer,this.data,t)}getBigInt64Array(){if(this.dataType!==7)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new BigInt64Array:new BigInt64Array(this.module.HEAP8.buffer,this.data,t)}getInt32Array(){if(this.dataType!==6)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new Int32Array:new Int32Array(this.module.HEAP8.buffer,this.data,t)}reshape(t){if(k.size(t)!==k.size(this.dims))throw new Error("Invalid new shape");return new e(this.module,this.dataType,this.data,t)}},So=class{constructor(t,r,n){this.module=t;this.backend=r;this.customDataOffset=0;this.customDataSize=0;this.adapterInfo=r.adapterInfo;let o=t.HEAPU32,i=n>>>2;this.opKernelContext=o[i++];let a=o[i++];this.outputCount=o[i++],this.customDataOffset=o[i++],this.customDataSize=o[i++];let l=[];for(let d=0;dtypeof l=="number"?this.inputs[l]:l)??this.inputs,o=r?.outputs??[],i=(l,d,p)=>new rr(this.module,d,this.output(l,p),p),a=(l,d)=>{let p=It(l);if(!p)throw new Error(`Unsupported data type: ${l}`);let m=p*k.size(d),u=m>0?this.backend.gpuDataManager.create(m).id:0;return new rr(this.module,l,u,d)};return this.backend.run(t,n,o,i,a,this.outputCount)}output(t,r){let n=this.module.stackSave();try{let o=this.module.stackAlloc((1+r.length)*4),i=o>>2;this.module.HEAPU32[i++]=r.length;for(let a=0;a{let o=t.jsepInit;if(!o)throw new Error("Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.");if(e==="webgpu"){let i=new sn;await i.initialize(r,n),o("webgpu",[i,a=>i.alloc(a),a=>i.free(a),(a,l,d,p=!1)=>{if(p)be("verbose",()=>`[WebGPU] jsepCopyGpuToGpu: src=${a}, dst=${l}, size=${d}`),i.memcpy(a,l);else{be("verbose",()=>`[WebGPU] jsepCopyCpuToGpu: dataOffset=${a}, gpuDataId=${l}, size=${d}`);let m=t.HEAPU8.subarray(a>>>0,(a>>>0)+d);i.upload(l,m)}},async(a,l,d)=>{be("verbose",()=>`[WebGPU] jsepCopyGpuToCpu: gpuDataId=${a}, dataOffset=${l}, size=${d}`),await i.download(a,()=>t.HEAPU8.subarray(l>>>0,(l>>>0)+d))},(a,l,d)=>i.createKernel(a,l,d,t.UTF8ToString(t._JsepGetNodeName(l))),a=>i.releaseKernel(a),(a,l,d,p)=>{be("verbose",()=>`[WebGPU] jsepRun: sessionHandle=${d}, kernel=${a}, contextDataOffset=${l}`);let m=new So(t,i,l);return i.computeKernel(a,m,p)},()=>i.captureBegin(),()=>i.captureEnd(),()=>i.replay()])}else o("webnn")}});var hh,Ar,Er,kt,gh,Kt,kr,Pr,Cl,Or,zr,Dr,Nn=U(()=>{"use strict";Ba();Ma();Q();St();Rr();qn();hh=(e,t)=>{Ie()._OrtInit(e,t)!==0&&$e("Can't initialize onnxruntime.")},Ar=async e=>{hh(e.wasm.numThreads,Yt(e.logLevel))},Er=async(e,t)=>{{let r=(Il(),yr(Sl)).init;if(t==="webgpu"){if(typeof navigator>"u"||!navigator.gpu)throw new Error("WebGPU is not supported in current environment");let n=e.webgpu.adapter;if(n){if(typeof n.limits!="object"||typeof n.features!="object"||typeof n.requestDevice!="function")throw new Error("Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.")}else{let o=e.webgpu.powerPreference;if(o!==void 0&&o!=="low-power"&&o!=="high-performance")throw new Error(`Invalid powerPreference setting: "${o}"`);let i=e.webgpu.forceFallbackAdapter;if(i!==void 0&&typeof i!="boolean")throw new Error(`Invalid forceFallbackAdapter setting: "${i}"`);if(n=await navigator.gpu.requestAdapter({powerPreference:o,forceFallbackAdapter:i}),!n)throw new Error('Failed to get GPU adapter. You may need to enable flag "--enable-unsafe-webgpu" if you are using Chrome.')}await r("webgpu",Ie(),e,n)}if(t==="webnn"){if(typeof navigator>"u"||!navigator.ml)throw new Error("WebNN is not supported in current environment");await r("webnn",Ie(),e)}}},kt=new Map,gh=e=>{let t=Ie(),r=t.stackSave();try{let n=t.stackAlloc(8);return t._OrtGetInputOutputCount(e,n,n+4)!==0&&$e("Can't get session input/output count."),[t.HEAP32[n/4],t.HEAP32[n/4+1]]}finally{t.stackRestore(r)}},Kt=e=>{let t=Ie(),r=t._malloc(e.byteLength);if(r===0)throw new Error(`Can't create a session. failed to allocate a buffer of size ${e.byteLength}.`);return t.HEAPU8.set(e,r),[r,e.byteLength]},kr=async(e,t)=>{let r,n,o=Ie();Array.isArray(e)?[r,n]=e:e.buffer===o.HEAPU8.buffer?[r,n]=[e.byteOffset,e.byteLength]:[r,n]=Kt(e);let i=0,a=0,l=0,d=[],p=[],m=[];try{if([a,d]=Ra(t),t?.externalData&&o.mountExternalData){let $=[];for(let S of t.externalData){let I=typeof S=="string"?S:S.path;$.push(Xt(typeof S=="string"?S:S.data).then(T=>{o.mountExternalData(I,T)}))}await Promise.all($)}for(let $ of t?.executionProviders??[])if((typeof $=="string"?$:$.name)==="webnn"){if(o.currentContext)throw new Error("WebNN execution provider is already set.");if(typeof $!="string"){let I=$,T=I?.context,A=I?.gpuDevice,z=I?.deviceType,D=I?.numThreads,H=I?.powerPreference;T?o.currentContext=T:A?o.currentContext=await navigator.ml.createContext(A):o.currentContext=await navigator.ml.createContext({deviceType:z,numThreads:D,powerPreference:H})}else o.currentContext=await navigator.ml.createContext();break}i=await o._OrtCreateSession(r,n,a),i===0&&$e("Can't create a session."),o.currentContext&&(o.currentContext=void 0);let[u,h]=gh(i),w=!!t?.enableGraphCapture,g=[],b=[],x=[];for(let $=0;$$==="gpu-buffer")&&(l=o._OrtCreateBinding(i),l===0&&$e("Can't create IO binding."),_={handle:l,outputPreferredLocations:x,outputPreferredLocationsEncoded:x.map($=>Fn($))}),kt.set(i,[i,p,m,_,w,!1]),[i,g,b]}catch(u){throw p.forEach(h=>o._OrtFree(h)),m.forEach(h=>o._OrtFree(h)),l!==0&&o._OrtReleaseBinding(l),i!==0&&o._OrtReleaseSession(i),u}finally{o._free(r),a!==0&&o._OrtReleaseSessionOptions(a),d.forEach(u=>o._free(u)),o.unmountExternalData?.()}},Pr=e=>{let t=Ie(),r=kt.get(e);if(!r)throw new Error(`cannot release session. invalid session id: ${e}`);let[n,o,i,a,l]=r;a&&(l&&t._OrtClearBoundOutputs(a.handle),t._OrtReleaseBinding(a.handle)),t.jsepOnReleaseSession?.(e),o.forEach(d=>t._OrtFree(d)),i.forEach(d=>t._OrtFree(d)),t._OrtReleaseSession(n),kt.delete(e)},Cl=(e,t,r,n,o,i=!1)=>{if(!e){t.push(0);return}let a=Ie(),l=e[0],d=e[1],p=e[3],m,u;if(l==="string"&&p==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");if(i&&p!=="gpu-buffer")throw new Error(`External buffer must be provided for input/output index ${o} when enableGraphCapture is true.`);if(p==="gpu-buffer"){let g=e[2].gpuBuffer,b=It(Ln(l));u=d.reduce((_,$)=>_*$,1)*b;let x=a.jsepRegisterBuffer;if(!x)throw new Error('Tensor location "gpu-buffer" is not supported without using WebGPU.');m=x(n,o,g,u)}else{let g=e[2];if(Array.isArray(g)){u=4*g.length,m=a._malloc(u),r.push(m);let b=m/4;for(let x=0;xa.HEAP32[g++]=x);let b=a._OrtCreateTensor(Ln(l),m,u,w,d.length,Fn(p));b===0&&$e(`Can't create tensor for input/output. session=${n}, index=${o}.`),t.push(b)}finally{a.stackRestore(h)}},Or=async(e,t,r,n,o,i)=>{let a=Ie(),l=kt.get(e);if(!l)throw new Error(`cannot run inference. invalid session id: ${e}`);let d=l[0],p=l[1],m=l[2],u=l[3],h=l[4],w=l[5],g=t.length,b=n.length,x=0,_=[],$=[],S=[],I=[],T=a.stackSave(),A=a.stackAlloc(g*4),z=a.stackAlloc(g*4),D=a.stackAlloc(b*4),H=a.stackAlloc(b*4);try{[x,_]=Da(i);for(let q=0;qBe*Ae,1);re=ht(Y);let Ot=u?.outputPreferredLocations[n[q]];if(re==="string"){if(Ot==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");let Be=[],Ae=J/4;for(let Xe=0;Xe0){let Be=a.jsepGetBuffer;if(!Be)throw new Error('preferredLocation "gpu-buffer" is not supported without using WebGPU.');let Ae=Be(J),Xe=It(Y);if(Xe===void 0||!Ur(re))throw new Error(`Unsupported data type: ${re}`);Z=!0,xe.push([re,ve,{gpuBuffer:Ae,download:a.jsepCreateDownloader(Ae,Se*Xe,re),dispose:()=>{a._OrtReleaseTensor(ie)}},"gpu-buffer"])}else{let Be=Mr(re),Ae=new Be(Se);new Uint8Array(Ae.buffer,Ae.byteOffset,Ae.byteLength).set(a.HEAPU8.subarray(J,J+Ae.byteLength)),xe.push([re,ve,Ae,"cpu"])}}finally{a.stackRestore(le),re==="string"&&J&&a._free(J),Z||a._OrtReleaseTensor(ie)}}return u&&!h&&(a._OrtClearBoundOutputs(u.handle),kt.set(e,[d,p,m,u,h,!1])),xe}finally{a.stackRestore(T),$.forEach(W=>a._OrtReleaseTensor(W)),S.forEach(W=>a._OrtReleaseTensor(W)),I.forEach(W=>a._free(W)),x!==0&&a._OrtReleaseRunOptions(x),_.forEach(W=>a._free(W))}},zr=e=>{let t=Ie(),r=kt.get(e);if(!r)throw new Error("invalid session id");let n=r[0],o=t._OrtEndProfiling(n);o===0&&$e("Can't get an profile file name."),t._OrtFree(o)},Dr=e=>{let t=[];for(let r of e){let n=r[2];!Array.isArray(n)&&"buffer"in n&&t.push(n.buffer)}return t}});var Pt,Fe,nr,dn,ln,un,Io,Co,Wt,Ht,bh,Tl,Al,El,kl,Pl,Ol,zl,To=U(()=>{"use strict";Le();Nn();St();qt();Pt=()=>!!ye.wasm.proxy&&typeof document<"u",nr=!1,dn=!1,ln=!1,Co=new Map,Wt=(e,t)=>{let r=Co.get(e);r?r.push(t):Co.set(e,[t])},Ht=()=>{if(nr||!dn||ln||!Fe)throw new Error("worker not ready")},bh=e=>{switch(e.data.type){case"init-wasm":nr=!1,e.data.err?(ln=!0,Io[1](e.data.err)):(dn=!0,Io[0]()),un&&(URL.revokeObjectURL(un),un=void 0);break;case"init-ep":case"copy-from":case"create":case"release":case"run":case"end-profiling":{let t=Co.get(e.data.type);e.data.err?t.shift()[1](e.data.err):t.shift()[0](e.data.out);break}default:}},Tl=async()=>{if(!dn){if(nr)throw new Error("multiple calls to 'initWasm()' detected.");if(ln)throw new Error("previous call to 'initWasm()' failed.");if(nr=!0,Pt())return new Promise((e,t)=>{Fe?.terminate(),Pa().then(([r,n])=>{try{Fe=n,Fe.onerror=i=>t(i),Fe.onmessage=bh,Io=[e,t];let o={type:"init-wasm",in:ye};Fe.postMessage(o),un=r}catch(o){t(o)}},t)});try{await Tr(ye.wasm),await Ar(ye),dn=!0}catch(e){throw ln=!0,e}finally{nr=!1}}},Al=async e=>{if(Pt())return Ht(),new Promise((t,r)=>{Wt("init-ep",[t,r]);let n={type:"init-ep",in:{epName:e,env:ye}};Fe.postMessage(n)});await Er(ye,e)},El=async e=>Pt()?(Ht(),new Promise((t,r)=>{Wt("copy-from",[t,r]);let n={type:"copy-from",in:{buffer:e}};Fe.postMessage(n,[e.buffer])})):Kt(e),kl=async(e,t)=>{if(Pt()){if(t?.preferredOutputLocation)throw new Error('session option "preferredOutputLocation" is not supported for proxy.');return Ht(),new Promise((r,n)=>{Wt("create",[r,n]);let o={type:"create",in:{model:e,options:{...t}}},i=[];e instanceof Uint8Array&&i.push(e.buffer),Fe.postMessage(o,i)})}else return kr(e,t)},Pl=async e=>{if(Pt())return Ht(),new Promise((t,r)=>{Wt("release",[t,r]);let n={type:"release",in:e};Fe.postMessage(n)});Pr(e)},Ol=async(e,t,r,n,o,i)=>{if(Pt()){if(r.some(a=>a[3]!=="cpu"))throw new Error("input tensor on GPU is not supported for proxy.");if(o.some(a=>a))throw new Error("pre-allocated output tensor is not supported for proxy.");return Ht(),new Promise((a,l)=>{Wt("run",[a,l]);let d=r,p={type:"run",in:{sessionId:e,inputIndices:t,inputs:d,outputIndices:n,options:i}};Fe.postMessage(p,Dr(d))})}else return Or(e,t,r,n,o,i)},zl=async e=>{if(Pt())return Ht(),new Promise((t,r)=>{Wt("end-profiling",[t,r]);let n={type:"end-profiling",in:e};Fe.postMessage(n)});zr(e)}});var Dl,wh,cn,Bl=U(()=>{"use strict";Le();To();Q();Cr();qn();Dl=(e,t)=>{switch(e.location){case"cpu":return[e.type,e.dims,e.data,"cpu"];case"gpu-buffer":return[e.type,e.dims,{gpuBuffer:e.gpuBuffer},"gpu-buffer"];default:throw new Error(`invalid data location: ${e.location} for ${t()}`)}},wh=e=>{switch(e[3]){case"cpu":return new ze(e[0],e[2],e[1]);case"gpu-buffer":{let t=e[0];if(!Ur(t))throw new Error(`not supported data type: ${t} for deserializing GPU tensor`);let{gpuBuffer:r,download:n,dispose:o}=e[2];return ze.fromGpuBuffer(r,{dataType:t,dims:e[1],download:n,dispose:o})}default:throw new Error(`invalid data location: ${e[3]}`)}},cn=class{async fetchModelAndCopyToWasmMemory(t){return El(await Xt(t))}async loadModel(t,r){We();let n;typeof t=="string"?!1?n=await Xt(t):n=await this.fetchModelAndCopyToWasmMemory(t):n=t,[this.sessionId,this.inputNames,this.outputNames]=await kl(n,r),Me()}async dispose(){return Pl(this.sessionId)}async run(t,r,n){We();let o=[],i=[];Object.entries(t).forEach(h=>{let w=h[0],g=h[1],b=this.inputNames.indexOf(w);if(b===-1)throw new Error(`invalid input '${w}'`);o.push(g),i.push(b)});let a=[],l=[];Object.entries(r).forEach(h=>{let w=h[0],g=h[1],b=this.outputNames.indexOf(w);if(b===-1)throw new Error(`invalid output '${w}'`);a.push(g),l.push(b)});let d=o.map((h,w)=>Dl(h,()=>`input "${this.inputNames[i[w]]}"`)),p=a.map((h,w)=>h?Dl(h,()=>`output "${this.outputNames[l[w]]}"`):null),m=await Ol(this.sessionId,i,d,l,p,n),u={};for(let h=0;h{"use strict";Le();To();Bl();qt();vh=()=>{if((typeof ye.wasm.initTimeout!="number"||ye.wasm.initTimeout<0)&&(ye.wasm.initTimeout=0),ye.wasm.simd===!1&&console.warn('Deprecated property "env.wasm.simd" is set to false. non-SIMD build is no longer provided, and this setting will be ignored.'),typeof ye.wasm.proxy!="boolean"&&(ye.wasm.proxy=!1),typeof ye.wasm.trace!="boolean"&&(ye.wasm.trace=!1),typeof ye.wasm.numThreads!="number"||!Number.isInteger(ye.wasm.numThreads)||ye.wasm.numThreads<=0)if(typeof self<"u"&&!self.crossOriginIsolated)ye.wasm.numThreads=1;else{let e=typeof navigator>"u"?Bn("node:os").cpus().length:navigator.hardwareConcurrency;ye.wasm.numThreads=Math.min(4,Math.ceil((e||1)/2))}},pn=class{async init(t){vh(),await Tl(),await Al(t)}async createInferenceSessionHandler(t,r){let n=new cn;return await n.loadModel(t,r),Promise.resolve(n)}}});var Ml={};Lt(Ml,{wasmBackend:()=>$h});var $h,Ul=U(()=>{"use strict";Rl();$h=new pn});Le();Le();Le();var _a="1.19.0";var G_=Vn;{let e=(Ul(),yr(Ml)).wasmBackend;_t("webgpu",e,5),_t("webnn",e,5),_t("cpu",e,10),_t("wasm",e,10)}Object.defineProperty(ye.versions,"web",{value:_a,enumerable:!0});export{ap as InferenceSession,xr as TRACE,We as TRACE_FUNC_BEGIN,Me as TRACE_FUNC_END,ze as Tensor,up as TrainingSession,G_ as default,ye as env,_t as registerBackend}; +${a}`,d=n.createShaderModule({code:l,label:t.name});be("verbose",()=>`[WebGPU] ${t.name} shader code: ${l}`);let p=n.createComputePipeline({compute:{module:d,entryPoint:"main"},layout:"auto",label:t.name});return Me(t.name),{programInfo:t,computePipeline:p,uniformVariablesInfo:i.variablesInfo}}normalizeDispatchGroupSize(t){let r=typeof t=="number"?t:t.x,n=typeof t=="number"?1:t.y||1,o=typeof t=="number"?1:t.z||1,i=this.backend.device.limits.maxComputeWorkgroupsPerDimension;if(r<=i&&n<=i&&o<=i)return[r,n,o];let a=r*n*o,l=Math.ceil(Math.sqrt(a));if(l>i){if(l=Math.ceil(Math.cbrt(a)),l>i)throw new Error("Total dispatch size exceeds WebGPU maximum.");return[l,l,l]}else return[l,l,1]}}});var fh,hh,So,sn,xl=U(()=>{"use strict";Le();Q();dt();Ha();ja();$l();_l();fh=(e,t)=>{if(t.length!==e.length)throw new Error(`inputDependencies length ${t.length} is not equal to inputTensors length ${e.length}.`);let r=[];for(let n=0;n{let n=e.name;return e.shaderCache?.hint&&(n+="["+e.shaderCache.hint+"]"),n+=":"+r+`:${fh(t,e.shaderCache?.inputDependencies??new Array(t.length).fill("dims"))}`,n},So=class{constructor(t){t&&(this.architecture=t.architecture,this.vendor=t.vendor)}isArchitecture(t){return this.architecture===t}isVendor(t){return this.vendor===t}},sn=class{constructor(){this.currentSessionId=null;this.currentKernelId=null;this.commandEncoder=null;this.computePassEncoder=null;this.maxDispatchNumber=16;this.pendingDispatchNumber=0;this.pendingKernels=[];this.pendingQueries=new Map;this.sessionStatus="default";this.capturedCommandList=new Map;this.capturedPendingKernels=new Map;this.sessionExternalDataMapping=new Map}get currentKernelCustomData(){if(this.currentKernelId===null)throw new Error("currentKernelCustomData(): currentKernelId is null. (should not happen)");let t=this.kernelCustomData.get(this.currentKernelId);return t||(t={},this.kernelCustomData.set(this.currentKernelId,t)),t}async initialize(t,r){this.env=t;let n=[],o={requiredLimits:{maxComputeWorkgroupStorageSize:r.limits.maxComputeWorkgroupStorageSize,maxComputeWorkgroupsPerDimension:r.limits.maxComputeWorkgroupsPerDimension,maxStorageBufferBindingSize:r.limits.maxStorageBufferBindingSize,maxBufferSize:r.limits.maxBufferSize,maxComputeInvocationsPerWorkgroup:r.limits.maxComputeInvocationsPerWorkgroup,maxComputeWorkgroupSizeX:r.limits.maxComputeWorkgroupSizeX,maxComputeWorkgroupSizeY:r.limits.maxComputeWorkgroupSizeY,maxComputeWorkgroupSizeZ:r.limits.maxComputeWorkgroupSizeZ},requiredFeatures:n};r.features.has("chromium-experimental-timestamp-query-inside-passes")?n.push("chromium-experimental-timestamp-query-inside-passes"):r.features.has("timestamp-query")&&n.push("timestamp-query"),r.features.has("shader-f16")&&n.push("shader-f16"),this.device=await r.requestDevice(o),this.adapterInfo=new So(r.info||await r.requestAdapterInfo()),this.gpuDataManager=qa(this),this.programManager=new an(this),this.kernels=new Map,this.kernelPersistentData=new Map,this.kernelCustomData=new Map,Na(t.logLevel,!!t.debug),this.device.onuncapturederror=i=>{i.error instanceof GPUValidationError&&console.error(`An uncaught WebGPU validation error was raised: ${i.error.message}`)},Object.defineProperty(this.env.webgpu,"device",{value:this.device,writable:!1,enumerable:!0,configurable:!1}),Object.defineProperty(this.env.webgpu,"adapter",{value:r,writable:!1,enumerable:!0,configurable:!1}),this.setQueryType()}dispose(){typeof this.querySet<"u"&&this.querySet.destroy(),this.gpuDataManager.dispose()}getCommandEncoder(){return this.commandEncoder||(this.commandEncoder=this.device.createCommandEncoder()),this.commandEncoder}getComputePassEncoder(){if(!this.computePassEncoder){let t=this.getCommandEncoder(),r={};this.queryType==="at-passes"&&(r.timestampWrites={querySet:this.querySet,beginningOfPassWriteIndex:this.pendingDispatchNumber*2,endOfPassWriteIndex:this.pendingDispatchNumber*2+1}),this.computePassEncoder=t.beginComputePass(r)}return this.computePassEncoder}endComputePass(){this.computePassEncoder&&(this.computePassEncoder.end(),this.computePassEncoder=null)}flush(){if(!this.commandEncoder)return;We(),this.endComputePass();let t;this.queryType!=="none"&&(this.commandEncoder.resolveQuerySet(this.querySet,0,this.pendingDispatchNumber*2,this.queryResolveBuffer,0),t=this.device.createBuffer({size:this.pendingDispatchNumber*2*8,usage:GPUBufferUsage.MAP_READ|GPUBufferUsage.COPY_DST}),this.pendingQueries.set(t,this.pendingKernels),this.pendingKernels=[],this.commandEncoder.copyBufferToBuffer(this.queryResolveBuffer,0,t,0,this.pendingDispatchNumber*2*8)),this.device.queue.submit([this.commandEncoder.finish()]),this.gpuDataManager.refreshPendingBuffers(),this.commandEncoder=null,this.pendingDispatchNumber=0,this.queryType!=="none"&&t.mapAsync(GPUMapMode.READ).then(()=>{let r=new BigUint64Array(t.getMappedRange()),n=this.pendingQueries.get(t);for(let o=0;o"u"&&(this.queryTimeBase=w);let b=Number(w-this.queryTimeBase),x=Number(g-this.queryTimeBase);if(!Number.isSafeInteger(b)||!Number.isSafeInteger(x))throw new RangeError("incorrect timestamp range");if(this.env.webgpu.profiling?.ondata)this.env.webgpu.profiling.ondata({version:1,inputsMetadata:u.map(_=>({dims:_.dims,dataType:ht(_.dataType)})),outputsMetadata:h.map(_=>({dims:_.dims,dataType:ht(_.dataType)})),kernelId:a,kernelType:d,kernelName:p,programName:m,startTime:b,endTime:x});else{let _="";u.forEach((S,I)=>{_+=`input[${I}]: [${S.dims}] | ${ht(S.dataType)}, `});let $="";h.forEach((S,I)=>{$+=`output[${I}]: [${S.dims}] | ${ht(S.dataType)}, `}),console.log(`[profiling] kernel "${a}|${d}|${p}|${m}" ${_}${$}execution time: ${x-b} ns`)}xr("GPU",`${m}::${w}::${g}`)}t.unmap(),this.pendingQueries.delete(t)}),Me()}run(t,r,n,o,i,a){We(t.name);let l=[];for(let S=0;SI):n;if(u.length!==d.length)throw new Error(`Output size ${u.length} must be equal to ${d.length}.`);let h=[],w=[];for(let S=0;S=a)throw new Error(`Invalid output index: ${u[S]}`);if(u[S]===-3)continue;let I=u[S]===-1,T=u[S]===-2,A=I||T?i(d[S].dataType,d[S].dims):o(u[S],d[S].dataType,d[S].dims);if(h.push(A),A.data===0)continue;let D=this.gpuDataManager.get(A.data);if(!D)throw new Error(`no GPU data for output: ${A.data}`);if(I&&this.temporaryData.push(D),T){let z=this.kernelPersistentData.get(this.currentKernelId);z||(z=[],this.kernelPersistentData.set(this.currentKernelId,z)),z.push(D)}w.push(D)}if(l.length!==r.length||w.length!==h.length){if(w.length===0)return Me(t.name),h;throw new Error(`Program ${t.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`)}let g;if(m){let S=0,I=[];m.forEach(z=>{let H=typeof z.data=="number"?[z.data]:z.data;if(H.length===0)return;let W=z.type===10?2:4,F,de;z.type===10?(de=H.length>4?16:H.length>2?8:H.length*W,F=H.length>4?16:W*H.length):(de=H.length<=2?H.length*W:16,F=16),S=Math.ceil(S/de)*de,I.push(S);let ce=z.type===10?8:4;S+=H.length>4?Math.ceil(H.length/ce)*F:H.length*W});let T=16;S=Math.ceil(S/T)*T;let A=new ArrayBuffer(S);m.forEach((z,H)=>{let W=I[H],F=typeof z.data=="number"?[z.data]:z.data;if(z.type===6)new Int32Array(A,W,F.length).set(F);else if(z.type===12)new Uint32Array(A,W,F.length).set(F);else if(z.type===10)new Uint16Array(A,W,F.length).set(F);else if(z.type===1)new Float32Array(A,W,F.length).set(F);else throw new Error(`Unsupported uniform type: ${ht(z.type)}`)});let D=this.gpuDataManager.create(S,GPUBufferUsage.COPY_DST|GPUBufferUsage.UNIFORM);this.device.queue.writeBuffer(D.buffer,0,A,0,S),this.gpuDataManager.release(D.id),g={offset:0,size:S,buffer:D.buffer}}let b=this.programManager.normalizeDispatchGroupSize(p),x=b[1]===1&&b[2]===1,_=hh(t,r,x),$=this.programManager.getArtifact(_);if($||($=this.programManager.build(t,b),this.programManager.setArtifact(_,$),be("info",()=>`[artifact] key: ${_}, programName: ${t.name}`)),m&&$.uniformVariablesInfo){if(m.length!==$.uniformVariablesInfo.length)throw new Error(`Uniform variables count mismatch: expect ${$.uniformVariablesInfo.length}, got ${m.length} in program "${$.programInfo.name}".`);for(let S=0;S`[ProgramManager] run "${t.name}" (key=${_}) with ${b[0]}x${b[1]}x${b[2]}`),this.queryType!=="none"||this.sessionStatus==="capturing"){let S={kernelId:this.currentKernelId,programName:$.programInfo.name,inputTensorViews:r,outputTensorViews:h};this.pendingKernels.push(S),this.sessionStatus==="capturing"&&this.capturedPendingKernels.get(this.currentSessionId).push(S)}return this.programManager.run($,l,w,b,g),Me(t.name),h}upload(t,r){this.gpuDataManager.upload(t,r)}memcpy(t,r){this.gpuDataManager.memcpy(t,r)}async download(t,r){await this.gpuDataManager.download(t,r)}alloc(t){return this.gpuDataManager.create(t).id}free(t){return this.gpuDataManager.release(t)}createKernel(t,r,n,o){let i=vl.get(t);if(!i)throw new Error(`kernel not implemented: ${t}`);let a={kernelType:t,kernelName:o,kernelEntry:i[0],attributes:[i[1],n]};this.kernels.set(r,a)}releaseKernel(t){let r=this.kernelPersistentData.get(t);if(r){for(let n of r)this.gpuDataManager.release(n.id);this.kernelPersistentData.delete(t)}this.kernelCustomData.delete(t),this.kernels.delete(t)}computeKernel(t,r,n){let o=this.kernels.get(t);if(!o)throw new Error(`kernel not created: ${t}`);let i=o.kernelType,a=o.kernelName,l=o.kernelEntry,d=o.attributes;if(this.currentKernelId!==null)throw new Error(`kernel "[${i}] ${a}" is not allowed to be called recursively`);this.currentKernelId=t,d[0]&&(d[1]=d[0](d[1]),d[0]=void 0),be("info",()=>`[WebGPU] Start to run kernel "[${i}] ${a}"...`);let p=this.env.debug;this.temporaryData=[];try{return p&&this.device.pushErrorScope("validation"),l(r,d[1]),0}catch(m){return n.push(Promise.resolve(`[WebGPU] Kernel "[${i}] ${a}" failed. ${m}`)),1}finally{p&&n.push(this.device.popErrorScope().then(m=>m?`GPU validation error for kernel "[${i}] ${a}": ${m.message}`:null));for(let m of this.temporaryData)this.gpuDataManager.release(m.id);this.temporaryData=[],this.currentKernelId=null}}registerBuffer(t,r,n,o){let i=this.sessionExternalDataMapping.get(t);i||(i=new Map,this.sessionExternalDataMapping.set(t,i));let a=i.get(r),l=this.gpuDataManager.registerExternalBuffer(n,o,a?.[1]);return i.set(r,[l,n]),l}unregisterBuffers(t){let r=this.sessionExternalDataMapping.get(t);r&&(r.forEach(n=>this.gpuDataManager.unregisterExternalBuffer(n[1])),this.sessionExternalDataMapping.delete(t))}getBuffer(t){let r=this.gpuDataManager.get(t);if(!r)throw new Error(`no GPU data for buffer: ${t}`);return r.buffer}createDownloader(t,r,n){return async()=>{let o=await Zn(this,t,r);return Wa(o.buffer,n)}}writeTimestamp(t){this.queryType==="inside-passes"&&this.computePassEncoder.writeTimestamp(this.querySet,t)}setQueryType(){this.queryType="none",(this.env.webgpu.profiling?.mode==="default"||(typeof this.env.trace>"u"?this.env.wasm.trace:this.env.trace))&&(this.device.features.has("chromium-experimental-timestamp-query-inside-passes")?this.queryType="inside-passes":this.device.features.has("timestamp-query")&&(this.queryType="at-passes"),this.queryType!=="none"&&typeof this.querySet>"u"&&(this.querySet=this.device.createQuerySet({type:"timestamp",count:this.maxDispatchNumber*2}),this.queryResolveBuffer=this.device.createBuffer({size:this.maxDispatchNumber*2*8,usage:GPUBufferUsage.COPY_SRC|GPUBufferUsage.QUERY_RESOLVE})))}captureBegin(){be("info","captureBegin"),this.capturedCommandList.get(this.currentSessionId)||this.capturedCommandList.set(this.currentSessionId,[]),this.capturedPendingKernels.get(this.currentSessionId)||this.capturedPendingKernels.set(this.currentSessionId,[]),this.flush(),this.sessionStatus="capturing"}captureEnd(){be("info","captureEnd"),this.flush(),this.sessionStatus="default"}replay(){be("info","replay"),this.sessionStatus="replaying";let t=this.capturedCommandList.get(this.currentSessionId),r=this.capturedPendingKernels.get(this.currentSessionId),n=t.length;this.pendingKernels=[];for(let o=0;o=this.maxDispatchNumber||this.queryType==="at-passes")&&this.endComputePass(),this.pendingDispatchNumber>=this.maxDispatchNumber&&this.flush()}this.flush(),this.sessionStatus="default"}onReleaseSession(t){this.unregisterBuffers(t),this.capturedCommandList.has(t)&&this.capturedCommandList.delete(t),this.capturedPendingKernels.has(t)&&this.capturedPendingKernels.delete(t),this.gpuDataManager.onReleaseSession(t)}onRunStart(t){this.currentSessionId=t,this.setQueryType()}}});var Sl={};Lt(Sl,{init:()=>gh});var rr,Io,gh,Il=U(()=>{"use strict";Q();xl();dt();ae();rr=class e{constructor(t,r,n,o){this.module=t;this.dataType=r;this.data=n;this.dims=o}getFloat32Array(){if(this.dataType!==1)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new Float32Array:new Float32Array(this.module.HEAP8.buffer,this.data,t)}getBigInt64Array(){if(this.dataType!==7)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new BigInt64Array:new BigInt64Array(this.module.HEAP8.buffer,this.data,t)}getInt32Array(){if(this.dataType!==6)throw new Error("Invalid data type");let t=k.size(this.dims);return t===0?new Int32Array:new Int32Array(this.module.HEAP8.buffer,this.data,t)}reshape(t){if(k.size(t)!==k.size(this.dims))throw new Error("Invalid new shape");return new e(this.module,this.dataType,this.data,t)}},Io=class{constructor(t,r,n){this.module=t;this.backend=r;this.customDataOffset=0;this.customDataSize=0;this.adapterInfo=r.adapterInfo;let o=t.HEAPU32,i=n>>>2;this.opKernelContext=o[i++];let a=o[i++];this.outputCount=o[i++],this.customDataOffset=o[i++],this.customDataSize=o[i++];let l=[];for(let d=0;dtypeof l=="number"?this.inputs[l]:l)??this.inputs,o=r?.outputs??[],i=(l,d,p)=>new rr(this.module,d,this.output(l,p),p),a=(l,d)=>{let p=It(l);if(!p)throw new Error(`Unsupported data type: ${l}`);let m=p*k.size(d),u=m>0?this.backend.gpuDataManager.create(m).id:0;return new rr(this.module,l,u,d)};return this.backend.run(t,n,o,i,a,this.outputCount)}output(t,r){let n=this.module.stackSave();try{let o=this.module.stackAlloc((1+r.length)*4),i=o>>2;this.module.HEAPU32[i++]=r.length;for(let a=0;a{let o=t.jsepInit;if(!o)throw new Error("Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.");if(e==="webgpu"){let i=new sn;await i.initialize(r,n),o("webgpu",[i,a=>i.alloc(a),a=>i.free(a),(a,l,d,p=!1)=>{if(p)be("verbose",()=>`[WebGPU] jsepCopyGpuToGpu: src=${a}, dst=${l}, size=${d}`),i.memcpy(a,l);else{be("verbose",()=>`[WebGPU] jsepCopyCpuToGpu: dataOffset=${a}, gpuDataId=${l}, size=${d}`);let m=t.HEAPU8.subarray(a>>>0,(a>>>0)+d);i.upload(l,m)}},async(a,l,d)=>{be("verbose",()=>`[WebGPU] jsepCopyGpuToCpu: gpuDataId=${a}, dataOffset=${l}, size=${d}`),await i.download(a,()=>t.HEAPU8.subarray(l>>>0,(l>>>0)+d))},(a,l,d)=>i.createKernel(a,l,d,t.UTF8ToString(t._JsepGetNodeName(l))),a=>i.releaseKernel(a),(a,l,d,p)=>{be("verbose",()=>`[WebGPU] jsepRun: sessionHandle=${d}, kernel=${a}, contextDataOffset=${l}`);let m=new Io(t,i,l);return i.computeKernel(a,m,p)},()=>i.captureBegin(),()=>i.captureEnd(),()=>i.replay()])}else o("webnn")}});var yh,Ar,Er,kt,bh,jt,kr,Pr,Cl,Or,Dr,zr,Wn=U(()=>{"use strict";Ba();Ma();Q();St();Rr();jn();yh=(e,t)=>{Ie()._OrtInit(e,t)!==0&&$e("Can't initialize onnxruntime.")},Ar=async e=>{yh(e.wasm.numThreads,Yt(e.logLevel))},Er=async(e,t)=>{{let r=(Il(),yr(Sl)).init;if(t==="webgpu"){if(typeof navigator>"u"||!navigator.gpu)throw new Error("WebGPU is not supported in current environment");let n=e.webgpu.adapter;if(n){if(typeof n.limits!="object"||typeof n.features!="object"||typeof n.requestDevice!="function")throw new Error("Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.")}else{let o=e.webgpu.powerPreference;if(o!==void 0&&o!=="low-power"&&o!=="high-performance")throw new Error(`Invalid powerPreference setting: "${o}"`);let i=e.webgpu.forceFallbackAdapter;if(i!==void 0&&typeof i!="boolean")throw new Error(`Invalid forceFallbackAdapter setting: "${i}"`);if(n=await navigator.gpu.requestAdapter({powerPreference:o,forceFallbackAdapter:i}),!n)throw new Error('Failed to get GPU adapter. You may need to enable flag "--enable-unsafe-webgpu" if you are using Chrome.')}await r("webgpu",Ie(),e,n)}if(t==="webnn"){if(typeof navigator>"u"||!navigator.ml)throw new Error("WebNN is not supported in current environment");await r("webnn",Ie(),e)}}},kt=new Map,bh=e=>{let t=Ie(),r=t.stackSave();try{let n=t.stackAlloc(8);return t._OrtGetInputOutputCount(e,n,n+4)!==0&&$e("Can't get session input/output count."),[t.HEAP32[n/4],t.HEAP32[n/4+1]]}finally{t.stackRestore(r)}},jt=e=>{let t=Ie(),r=t._malloc(e.byteLength);if(r===0)throw new Error(`Can't create a session. failed to allocate a buffer of size ${e.byteLength}.`);return t.HEAPU8.set(e,r),[r,e.byteLength]},kr=async(e,t)=>{let r,n,o=Ie();Array.isArray(e)?[r,n]=e:e.buffer===o.HEAPU8.buffer?[r,n]=[e.byteOffset,e.byteLength]:[r,n]=jt(e);let i=0,a=0,l=0,d=[],p=[],m=[];try{if([a,d]=Ra(t),t?.externalData&&o.mountExternalData){let $=[];for(let S of t.externalData){let I=typeof S=="string"?S:S.path;$.push(Xt(typeof S=="string"?S:S.data).then(T=>{o.mountExternalData(I,T)}))}await Promise.all($)}for(let $ of t?.executionProviders??[])if((typeof $=="string"?$:$.name)==="webnn"){if(o.currentContext)throw new Error("WebNN execution provider is already set.");if(typeof $!="string"){let I=$,T=I?.context,A=I?.gpuDevice,D=I?.deviceType,z=I?.numThreads,H=I?.powerPreference;T?o.currentContext=T:A?o.currentContext=await navigator.ml.createContext(A):o.currentContext=await navigator.ml.createContext({deviceType:D,numThreads:z,powerPreference:H})}else o.currentContext=await navigator.ml.createContext();break}i=await o._OrtCreateSession(r,n,a),i===0&&$e("Can't create a session."),o.currentContext&&(o.currentContext=void 0);let[u,h]=bh(i),w=!!t?.enableGraphCapture,g=[],b=[],x=[];for(let $=0;$$==="gpu-buffer")&&(l=o._OrtCreateBinding(i),l===0&&$e("Can't create IO binding."),_={handle:l,outputPreferredLocations:x,outputPreferredLocationsEncoded:x.map($=>qn($))}),kt.set(i,[i,p,m,_,w,!1]),[i,g,b]}catch(u){throw p.forEach(h=>o._OrtFree(h)),m.forEach(h=>o._OrtFree(h)),l!==0&&o._OrtReleaseBinding(l),i!==0&&o._OrtReleaseSession(i),u}finally{o._free(r),a!==0&&o._OrtReleaseSessionOptions(a),d.forEach(u=>o._free(u)),o.unmountExternalData?.()}},Pr=e=>{let t=Ie(),r=kt.get(e);if(!r)throw new Error(`cannot release session. invalid session id: ${e}`);let[n,o,i,a,l]=r;a&&(l&&t._OrtClearBoundOutputs(a.handle),t._OrtReleaseBinding(a.handle)),t.jsepOnReleaseSession?.(e),o.forEach(d=>t._OrtFree(d)),i.forEach(d=>t._OrtFree(d)),t._OrtReleaseSession(n),kt.delete(e)},Cl=(e,t,r,n,o,i=!1)=>{if(!e){t.push(0);return}let a=Ie(),l=e[0],d=e[1],p=e[3],m,u;if(l==="string"&&p==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");if(i&&p!=="gpu-buffer")throw new Error(`External buffer must be provided for input/output index ${o} when enableGraphCapture is true.`);if(p==="gpu-buffer"){let g=e[2].gpuBuffer,b=It(Fn(l));u=d.reduce((_,$)=>_*$,1)*b;let x=a.jsepRegisterBuffer;if(!x)throw new Error('Tensor location "gpu-buffer" is not supported without using WebGPU.');m=x(n,o,g,u)}else{let g=e[2];if(Array.isArray(g)){u=4*g.length,m=a._malloc(u),r.push(m);let b=m/4;for(let x=0;xa.HEAP32[g++]=x);let b=a._OrtCreateTensor(Fn(l),m,u,w,d.length,qn(p));b===0&&$e(`Can't create tensor for input/output. session=${n}, index=${o}.`),t.push(b)}finally{a.stackRestore(h)}},Or=async(e,t,r,n,o,i)=>{let a=Ie(),l=kt.get(e);if(!l)throw new Error(`cannot run inference. invalid session id: ${e}`);let d=l[0],p=l[1],m=l[2],u=l[3],h=l[4],w=l[5],g=t.length,b=n.length,x=0,_=[],$=[],S=[],I=[],T=a.stackSave(),A=a.stackAlloc(g*4),D=a.stackAlloc(g*4),z=a.stackAlloc(b*4),H=a.stackAlloc(b*4);try{[x,_]=za(i);for(let q=0;qBe*Ae,1);re=ht(Y);let Ot=u?.outputPreferredLocations[n[q]];if(re==="string"){if(Ot==="gpu-buffer")throw new Error("String tensor is not supported on GPU.");let Be=[],Ae=J/4;for(let Xe=0;Xe0){let Be=a.jsepGetBuffer;if(!Be)throw new Error('preferredLocation "gpu-buffer" is not supported without using WebGPU.');let Ae=Be(J),Xe=It(Y);if(Xe===void 0||!Ur(re))throw new Error(`Unsupported data type: ${re}`);Z=!0,xe.push([re,ve,{gpuBuffer:Ae,download:a.jsepCreateDownloader(Ae,Se*Xe,re),dispose:()=>{a._OrtReleaseTensor(ie)}},"gpu-buffer"])}else{let Be=Mr(re),Ae=new Be(Se);new Uint8Array(Ae.buffer,Ae.byteOffset,Ae.byteLength).set(a.HEAPU8.subarray(J,J+Ae.byteLength)),xe.push([re,ve,Ae,"cpu"])}}finally{a.stackRestore(le),re==="string"&&J&&a._free(J),Z||a._OrtReleaseTensor(ie)}}return u&&!h&&(a._OrtClearBoundOutputs(u.handle),kt.set(e,[d,p,m,u,h,!1])),xe}finally{a.stackRestore(T),$.forEach(W=>a._OrtReleaseTensor(W)),S.forEach(W=>a._OrtReleaseTensor(W)),I.forEach(W=>a._free(W)),x!==0&&a._OrtReleaseRunOptions(x),_.forEach(W=>a._free(W))}},Dr=e=>{let t=Ie(),r=kt.get(e);if(!r)throw new Error("invalid session id");let n=r[0],o=t._OrtEndProfiling(n);o===0&&$e("Can't get an profile file name."),t._OrtFree(o)},zr=e=>{let t=[];for(let r of e){let n=r[2];!Array.isArray(n)&&"buffer"in n&&t.push(n.buffer)}return t}});var Pt,Fe,nr,dn,ln,un,Co,To,Ht,Gt,vh,Tl,Al,El,kl,Pl,Ol,Dl,Ao=U(()=>{"use strict";Le();Wn();St();qt();Pt=()=>!!ye.wasm.proxy&&typeof document<"u",nr=!1,dn=!1,ln=!1,To=new Map,Ht=(e,t)=>{let r=To.get(e);r?r.push(t):To.set(e,[t])},Gt=()=>{if(nr||!dn||ln||!Fe)throw new Error("worker not ready")},vh=e=>{switch(e.data.type){case"init-wasm":nr=!1,e.data.err?(ln=!0,Co[1](e.data.err)):(dn=!0,Co[0]()),un&&(URL.revokeObjectURL(un),un=void 0);break;case"init-ep":case"copy-from":case"create":case"release":case"run":case"end-profiling":{let t=To.get(e.data.type);e.data.err?t.shift()[1](e.data.err):t.shift()[0](e.data.out);break}default:}},Tl=async()=>{if(!dn){if(nr)throw new Error("multiple calls to 'initWasm()' detected.");if(ln)throw new Error("previous call to 'initWasm()' failed.");if(nr=!0,Pt())return new Promise((e,t)=>{Fe?.terminate(),Pa().then(([r,n])=>{try{Fe=n,Fe.onerror=i=>t(i),Fe.onmessage=vh,Co=[e,t];let o={type:"init-wasm",in:ye};Fe.postMessage(o),un=r}catch(o){t(o)}},t)});try{await Tr(ye.wasm),await Ar(ye),dn=!0}catch(e){throw ln=!0,e}finally{nr=!1}}},Al=async e=>{if(Pt())return Gt(),new Promise((t,r)=>{Ht("init-ep",[t,r]);let n={type:"init-ep",in:{epName:e,env:ye}};Fe.postMessage(n)});await Er(ye,e)},El=async e=>Pt()?(Gt(),new Promise((t,r)=>{Ht("copy-from",[t,r]);let n={type:"copy-from",in:{buffer:e}};Fe.postMessage(n,[e.buffer])})):jt(e),kl=async(e,t)=>{if(Pt()){if(t?.preferredOutputLocation)throw new Error('session option "preferredOutputLocation" is not supported for proxy.');return Gt(),new Promise((r,n)=>{Ht("create",[r,n]);let o={type:"create",in:{model:e,options:{...t}}},i=[];e instanceof Uint8Array&&i.push(e.buffer),Fe.postMessage(o,i)})}else return kr(e,t)},Pl=async e=>{if(Pt())return Gt(),new Promise((t,r)=>{Ht("release",[t,r]);let n={type:"release",in:e};Fe.postMessage(n)});Pr(e)},Ol=async(e,t,r,n,o,i)=>{if(Pt()){if(r.some(a=>a[3]!=="cpu"))throw new Error("input tensor on GPU is not supported for proxy.");if(o.some(a=>a))throw new Error("pre-allocated output tensor is not supported for proxy.");return Gt(),new Promise((a,l)=>{Ht("run",[a,l]);let d=r,p={type:"run",in:{sessionId:e,inputIndices:t,inputs:d,outputIndices:n,options:i}};Fe.postMessage(p,zr(d))})}else return Or(e,t,r,n,o,i)},Dl=async e=>{if(Pt())return Gt(),new Promise((t,r)=>{Ht("end-profiling",[t,r]);let n={type:"end-profiling",in:e};Fe.postMessage(n)});Dr(e)}});var zl,$h,cn,Bl=U(()=>{"use strict";Le();Ao();Q();Cr();jn();zl=(e,t)=>{switch(e.location){case"cpu":return[e.type,e.dims,e.data,"cpu"];case"gpu-buffer":return[e.type,e.dims,{gpuBuffer:e.gpuBuffer},"gpu-buffer"];default:throw new Error(`invalid data location: ${e.location} for ${t()}`)}},$h=e=>{switch(e[3]){case"cpu":return new De(e[0],e[2],e[1]);case"gpu-buffer":{let t=e[0];if(!Ur(t))throw new Error(`not supported data type: ${t} for deserializing GPU tensor`);let{gpuBuffer:r,download:n,dispose:o}=e[2];return De.fromGpuBuffer(r,{dataType:t,dims:e[1],download:n,dispose:o})}default:throw new Error(`invalid data location: ${e[3]}`)}},cn=class{async fetchModelAndCopyToWasmMemory(t){return El(await Xt(t))}async loadModel(t,r){We();let n;typeof t=="string"?!1?n=await Xt(t):n=await this.fetchModelAndCopyToWasmMemory(t):n=t,[this.sessionId,this.inputNames,this.outputNames]=await kl(n,r),Me()}async dispose(){return Pl(this.sessionId)}async run(t,r,n){We();let o=[],i=[];Object.entries(t).forEach(h=>{let w=h[0],g=h[1],b=this.inputNames.indexOf(w);if(b===-1)throw new Error(`invalid input '${w}'`);o.push(g),i.push(b)});let a=[],l=[];Object.entries(r).forEach(h=>{let w=h[0],g=h[1],b=this.outputNames.indexOf(w);if(b===-1)throw new Error(`invalid output '${w}'`);a.push(g),l.push(b)});let d=o.map((h,w)=>zl(h,()=>`input "${this.inputNames[i[w]]}"`)),p=a.map((h,w)=>h?zl(h,()=>`output "${this.outputNames[l[w]]}"`):null),m=await Ol(this.sessionId,i,d,l,p,n),u={};for(let h=0;h{"use strict";Le();Ao();Bl();qt();_h=()=>{if((typeof ye.wasm.initTimeout!="number"||ye.wasm.initTimeout<0)&&(ye.wasm.initTimeout=0),ye.wasm.simd===!1&&console.warn('Deprecated property "env.wasm.simd" is set to false. non-SIMD build is no longer provided, and this setting will be ignored.'),typeof ye.wasm.proxy!="boolean"&&(ye.wasm.proxy=!1),typeof ye.wasm.trace!="boolean"&&(ye.wasm.trace=!1),typeof ye.wasm.numThreads!="number"||!Number.isInteger(ye.wasm.numThreads)||ye.wasm.numThreads<=0)if(typeof self<"u"&&!self.crossOriginIsolated)ye.wasm.numThreads=1;else{let e=typeof navigator>"u"?Rn("node:os").cpus().length:navigator.hardwareConcurrency;ye.wasm.numThreads=Math.min(4,Math.ceil((e||1)/2))}},pn=class{async init(t){_h(),await Tl(),await Al(t)}async createInferenceSessionHandler(t,r){let n=new cn;return await n.loadModel(t,r),Promise.resolve(n)}}});var Ml={};Lt(Ml,{wasmBackend:()=>xh});var xh,Ul=U(()=>{"use strict";Rl();xh=new pn});Le();Le();Le();var _a="1.19.0";var F_=Nn;{let e=(Ul(),yr(Ml)).wasmBackend;_t("webgpu",e,5),_t("webnn",e,5),_t("cpu",e,10),_t("wasm",e,10)}Object.defineProperty(ye.versions,"web",{value:_a,enumerable:!0});export{up as InferenceSession,xr as TRACE,We as TRACE_FUNC_BEGIN,Me as TRACE_FUNC_END,De as Tensor,lp as TrainingSession,F_ as default,ye as env,_t as registerBackend}; /** * @license * Copyright 2021 Google LLC. All Rights Reserved. diff --git a/assets/dist/ort.webgpu.bundle.min.mjs.map b/assets/dist/ort.webgpu.bundle.min.mjs.map index f74beae..228fafa 100644 --- a/assets/dist/ort.webgpu.bundle.min.mjs.map +++ b/assets/dist/ort.webgpu.bundle.min.mjs.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["../../common/lib/backend-impl.ts", "../../common/lib/backend.ts", "../../common/lib/version.ts", "../../common/lib/env-impl.ts", "../../common/lib/env.ts", "../../common/lib/tensor-conversion-impl.ts", "../../common/lib/tensor-factory-impl.ts", "../../common/lib/tensor-impl-type-mapping.ts", "../../common/lib/tensor-utils-impl.ts", "../../common/lib/tensor-impl.ts", "../../common/lib/tensor.ts", "../../common/lib/trace.ts", "../../common/lib/inference-session-impl.ts", "../../common/lib/inference-session.ts", "../../common/lib/tensor-conversion.ts", "../../common/lib/tensor-factory.ts", "../../common/lib/onnx-model.ts", "../../common/lib/onnx-value.ts", "../../common/lib/training-session-impl.ts", "../../common/lib/training-session.ts", "../../common/lib/index.ts", "../lib/wasm/wasm-utils-env.ts", "../lib/wasm/proxy-worker/main.ts", "ort-wasm-simd-threaded.jsep.mjs", "../lib/wasm/wasm-utils-import.ts", "../lib/wasm/wasm-factory.ts", "../lib/wasm/wasm-utils.ts", "../lib/wasm/run-options.ts", "../lib/wasm/session-options.ts", "../lib/wasm/wasm-common.ts", "../lib/wasm/wasm-utils-load-file.ts", "../lib/wasm/jsep/log.ts", "../lib/wasm/jsep/tensor-view.ts", "../lib/wasm/jsep/webgpu/types.ts", "../lib/wasm/jsep/webgpu/gpu-data-manager.ts", "../lib/wasm/jsep/webgpu/attribute-with-cache-key.ts", "../lib/wasm/jsep/util.ts", "../lib/wasm/jsep/webgpu/ops/common.ts", "../lib/wasm/jsep/webgpu/ops/transpose.ts", "../lib/wasm/jsep/webgpu/ops/reduce-shared.ts", "../lib/wasm/jsep/webgpu/ops/reduce.ts", "../lib/wasm/jsep/webgpu/ops/argminmax.ts", "../lib/wasm/jsep/webgpu/ops/attention.ts", "../lib/wasm/jsep/webgpu/ops/batch-norm.ts", "../lib/wasm/jsep/webgpu/ops/bias-add.ts", "../lib/wasm/jsep/webgpu/ops/unary-op.ts", "../lib/wasm/jsep/webgpu/ops/bias-split-gelu.ts", "../lib/wasm/jsep/webgpu/ops/binary-op.ts", "../lib/wasm/jsep/webgpu/ops/concat.ts", "../lib/wasm/jsep/webgpu/ops/fuse-utils.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/activation_util.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_util.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/matmul_packed_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv2d_mm_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv3d_naive_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/conv-grouped.ts", "../lib/wasm/jsep/webgpu/ops/matmul.ts", "../lib/wasm/jsep/webgpu/ops/conv.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_mm_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_webgpu.ts", "../lib/wasm/jsep/webgpu/ops/conv-transpose.ts", "../lib/wasm/jsep/webgpu/ops/cumsum.ts", "../lib/wasm/jsep/webgpu/ops/depth-to-space.ts", "../lib/wasm/jsep/webgpu/ops/einsum.ts", "../lib/wasm/jsep/webgpu/ops/expand.ts", "../lib/wasm/jsep/webgpu/ops/fast-gelu.ts", "../lib/wasm/jsep/webgpu/ops/gather.ts", "../lib/wasm/jsep/webgpu/ops/gather-elements.ts", "../lib/wasm/jsep/webgpu/ops/gemm.ts", "../lib/wasm/jsep/webgpu/ops/multihead-attention.ts", "../lib/wasm/jsep/webgpu/ops/tile.ts", "../lib/wasm/jsep/webgpu/ops/group-query-attention.ts", "../lib/wasm/jsep/webgpu/ops/instance-norm.ts", "../lib/wasm/jsep/webgpu/ops/layer-norm.ts", "../lib/wasm/jsep/webgpu/ops/matmulnbits.ts", "../lib/wasm/jsep/webgpu/ops/pad.ts", "../lib/wasm/jsep/webgpu/ops/pool.ts", "../lib/wasm/jsep/webgpu/ops/range.ts", "../lib/wasm/jsep/webgpu/ops/resize.ts", "../lib/wasm/jsep/webgpu/ops/rotary-embedding.ts", "../lib/wasm/jsep/webgpu/ops/skip-layer-norm.ts", "../lib/wasm/jsep/webgpu/ops/slice.ts", "../lib/wasm/jsep/webgpu/ops/softmax.ts", "../lib/wasm/jsep/webgpu/ops/split.ts", "../lib/wasm/jsep/webgpu/ops/where.ts", "../lib/wasm/jsep/webgpu/op-resolve-rules.ts", "../lib/wasm/jsep/webgpu/program-manager.ts", "../lib/wasm/jsep/backend-webgpu.ts", "../lib/wasm/jsep/init.ts", "../lib/wasm/wasm-core-impl.ts", "../lib/wasm/proxy-wrapper.ts", "../lib/wasm/session-handler-inference.ts", "../lib/backend-wasm.ts", "../lib/backend-wasm-inference.ts", "../lib/index.ts", "../lib/version.ts"], - "sourcesContent": ["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend} from './backend.js';\nimport {InferenceSession} from './inference-session.js';\n\ninterface BackendInfo {\n backend: Backend;\n priority: number;\n\n initPromise?: Promise;\n initialized?: boolean;\n aborted?: boolean;\n error?: string;\n}\n\nconst backends: Map = new Map();\nconst backendsSortedByPriority: string[] = [];\n\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @ignore\n */\nexport const registerBackend = (name: string, backend: Backend, priority: number): void => {\n if (backend && typeof backend.init === 'function' && typeof backend.createInferenceSessionHandler === 'function') {\n const currentBackend = backends.get(name);\n if (currentBackend === undefined) {\n backends.set(name, {backend, priority});\n } else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n } else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends.get(backendsSortedByPriority[i])!.priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n\n throw new TypeError('not a valid backend');\n};\n\n/**\n * Try to resolve and initialize a backend.\n *\n * @param backendName - the name of the backend.\n * @returns the backend instance if resolved and initialized successfully, or an error message if failed.\n */\nconst tryResolveAndInitializeBackend = async(backendName: string): Promise => {\n const backendInfo = backends.get(backendName);\n if (!backendInfo) {\n return 'backend not found.';\n }\n\n if (backendInfo.initialized) {\n return backendInfo.backend;\n } else if (backendInfo.aborted) {\n return backendInfo.error!;\n } else {\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init(backendName);\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n } catch (e) {\n if (!isInitializing) {\n backendInfo.error = `${e}`;\n backendInfo.aborted = true;\n }\n return backendInfo.error!;\n } finally {\n delete backendInfo.initPromise;\n }\n }\n};\n\n/**\n * Resolve execution providers from the specific session options.\n *\n * @param options - the session options object.\n * @returns a promise that resolves to a tuple of an initialized backend instance and a session options object with\n * filtered EP list.\n *\n * @ignore\n */\nexport const resolveBackendAndExecutionProviders = async(options: InferenceSession.SessionOptions):\n Promise<[backend: Backend, options: InferenceSession.SessionOptions]> => {\n // extract backend hints from session options\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n\n // try to resolve and initialize all requested backends\n let backend: Backend|undefined;\n const errors = [];\n const availableBackendNames = new Set();\n for (const backendName of backendNames) {\n const resolveResult = await tryResolveAndInitializeBackend(backendName);\n if (typeof resolveResult === 'string') {\n errors.push({name: backendName, err: resolveResult});\n } else {\n if (!backend) {\n backend = resolveResult;\n }\n if (backend === resolveResult) {\n availableBackendNames.add(backendName);\n }\n }\n }\n\n // if no backend is available, throw error.\n if (!backend) {\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n }\n\n // for each explicitly requested backend, if it's not available, output warning message.\n for (const {name, err} of errors) {\n if (backendHints.includes(name)) {\n // eslint-disable-next-line no-console\n console.warn(`removing requested execution provider \"${\n name}\" from session options because it is not available: ${err}`);\n }\n }\n\n const filteredEps = eps.filter(i => availableBackendNames.has(typeof i === 'string' ? i : i.name));\n\n return [\n backend, new Proxy(options, {\n get: (target, prop) => {\n if (prop === 'executionProviders') {\n return filteredEps;\n }\n return Reflect.get(target, prop);\n }\n })\n ];\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession} from './training-session.js';\n\n/**\n * @ignore\n */\nexport declare namespace SessionHandler {\n type FeedsType = {[name: string]: OnnxValue};\n type FetchesType = {[name: string]: OnnxValue | null};\n type ReturnType = {[name: string]: OnnxValue};\n}\n\n/**\n * Represents shared SessionHandler functionality\n *\n * @ignore\n */\ninterface SessionHandler {\n dispose(): Promise;\n\n readonly inputNames: readonly string[];\n readonly outputNames: readonly string[];\n}\n\n/**\n * Represent a handler instance of an inference session.\n *\n * @ignore\n */\nexport interface InferenceSessionHandler extends SessionHandler {\n startProfiling(): void;\n endProfiling(): void;\n\n run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n}\n\n/**\n * Represent a handler instance of a training inference session.\n *\n * @ignore\n */\nexport interface TrainingSessionHandler extends SessionHandler {\n readonly evalInputNames: readonly string[];\n readonly evalOutputNames: readonly string[];\n\n lazyResetGrad(): Promise;\n runTrainStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n runOptimizerStep(options: InferenceSession.RunOptions): Promise;\n runEvalStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n\n getParametersSize(trainableOnly: boolean): Promise;\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n getContiguousParameters(trainableOnly: boolean): Promise;\n}\n\n/**\n * Represent a backend that provides implementation of model inferencing.\n *\n * @ignore\n */\nexport interface Backend {\n /**\n * Initialize the backend asynchronously. Should throw when failed.\n */\n init(backendName: string): Promise;\n\n createInferenceSessionHandler(uriOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n\n createTrainingSessionHandler?\n (checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,\n evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,\n options: InferenceSession.SessionOptions): Promise;\n}\n\nexport {registerBackend} from './backend-impl.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from './env.js';\nimport {version} from './version.js';\n\ntype LogLevelType = Env['logLevel'];\n\nlet logLevelValue: Required = 'warning';\n\nexport const env: Env = {\n wasm: {} as Env.WebAssemblyFlags,\n webgl: {} as Env.WebGLFlags,\n webgpu: {} as Env.WebGpuFlags,\n versions: {common: version},\n\n set logLevel(value: LogLevelType) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n logLevelValue = value;\n },\n get logLevel(): Required {\n return logLevelValue;\n },\n};\n\n// set property 'logLevel' so that they can be correctly transferred to worker by `postMessage()`.\nObject.defineProperty(env, 'logLevel', {enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env as envImpl} from './env-impl.js';\n\nexport declare namespace Env {\n export type WasmPathPrefix = string;\n export interface WasmFilePaths {\n /**\n * Specify the override path for the main .wasm file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .wasm file is:\n * - `ort-wasm-simd-threaded.wasm` for default build\n * - `ort-wasm-simd-threaded.jsep.wasm` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.wasm` for training build\n */\n wasm?: URL|string;\n /**\n * Specify the override path for the main .mjs file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .mjs file is:\n * - `ort-wasm-simd-threaded.mjs` for default build\n * - `ort-wasm-simd-threaded.jsep.mjs` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.mjs` for training build\n */\n mjs?: URL|string;\n }\n export type WasmPrefixOrFilePaths = WasmPathPrefix|WasmFilePaths;\n export interface WebAssemblyFlags {\n /**\n * set or get number of thread(s). If omitted or set to 0, number of thread(s) will be determined by system. If set\n * to 1, no worker thread will be spawned.\n *\n * This setting is available only when WebAssembly multithread feature is available in current context.\n *\n * @defaultValue `0`\n */\n numThreads?: number;\n\n /**\n * set or get a boolean value indicating whether to enable SIMD. If set to false, SIMD will be forcely disabled.\n *\n * This setting is available only when WebAssembly SIMD feature is available in current context.\n *\n * @deprecated This property is deprecated. Since SIMD is supported by all major JavaScript engines, non-SIMD\n * build is no longer provided. This property will be removed in future release.\n * @defaultValue `true`\n */\n simd?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @deprecated Use `env.trace` instead. If `env.trace` is set, this property will be ignored.\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Set or get a number specifying the timeout for initialization of WebAssembly backend, in milliseconds. A zero\n * value indicates no timeout is set.\n *\n * @defaultValue `0`\n */\n initTimeout?: number;\n\n /**\n * Set a custom URL prefix to the .wasm/.mjs files, or an object of overrides for both .wasm/.mjs file. The override\n * path should be an absolute path.\n */\n wasmPaths?: WasmPrefixOrFilePaths;\n\n /**\n * Set or get a boolean value indicating whether to proxy the execution of main thread to a worker thread.\n *\n * @defaultValue `false`\n */\n proxy?: boolean;\n }\n\n export interface WebGLFlags {\n /**\n * Set or get the WebGL Context ID (webgl or webgl2).\n *\n * @defaultValue `'webgl2'`\n */\n contextId?: 'webgl'|'webgl2';\n /**\n * Get the WebGL rendering context.\n */\n readonly context: WebGLRenderingContext;\n /**\n * Set or get the maximum batch size for matmul. 0 means to disable batching.\n *\n * @deprecated\n */\n matmulMaxBatchSize?: number;\n /**\n * Set or get the texture cache mode.\n *\n * @defaultValue `'full'`\n */\n textureCacheMode?: 'initializerOnly'|'full';\n /**\n * Set or get the packed texture mode\n *\n * @defaultValue `false`\n */\n pack?: boolean;\n /**\n * Set or get whether enable async download.\n *\n * @defaultValue `false`\n */\n async?: boolean;\n }\n\n export interface WebGpuProfilingDataV1TensorMetadata {\n dims: readonly number[];\n dataType: string;\n }\n export interface WebGpuProfilingDataV1 {\n version: 1;\n inputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n outputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n kernelId: number;\n kernelType: string;\n kernelName: string;\n programName: string;\n startTime: number;\n endTime: number;\n }\n\n export type WebGpuProfilingData = WebGpuProfilingDataV1;\n\n export interface WebGpuFlags {\n /**\n * Set or get the profiling mode.\n *\n * @deprecated Use `env.webgpu.profiling.mode` instead. If `env.webgpu.profiling.mode` is set, this property will be\n * ignored.\n */\n profilingMode?: 'off'|'default';\n /**\n * Set or get the profiling configuration.\n */\n profiling?: {\n /**\n * Set or get the profiling mode.\n *\n * @defaultValue `'off'`\n */\n mode?: 'off'|'default';\n\n /**\n * Set or get a callback function when a profiling data is received. If not set, the profiling data will be\n * printed to console.\n */\n ondata?: (data: WebGpuProfilingData) => void;\n };\n /**\n * Set or get the power preference.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n powerPreference?: 'low-power'|'high-performance';\n /**\n * Set or get the force fallback adapter flag.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n forceFallbackAdapter?: boolean;\n /**\n * Set or get the adapter for WebGPU.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as the GPU adapter for the underlying WebGPU backend to create GPU device.\n *\n * If this property is not set, it will be available to get after the first WebGPU inference session is created. The\n * value will be the GPU adapter that created by the underlying WebGPU backend.\n *\n * When use with TypeScript, the type of this property is `GPUAdapter` defined in \"@webgpu/types\".\n * Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType}\n */\n adapter: unknown;\n /**\n * Get the device for WebGPU.\n *\n * This property is only available after the first WebGPU inference session is created.\n *\n * When use with TypeScript, the type of this property is `GPUDevice` defined in \"@webgpu/types\".\n * Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in \"@webgpu/types\".\n */\n readonly device: unknown;\n /**\n * Set or get whether validate input content.\n *\n * @defaultValue `false`\n */\n validateInputContent?: boolean;\n }\n}\n\nexport interface Env {\n /**\n * set the severity level for logging.\n *\n * @defaultValue `'warning'`\n */\n logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal';\n\n /**\n * Indicate whether run in debug mode.\n *\n * @defaultValue `false`\n */\n debug?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Get version of the current package.\n */\n readonly versions: {\n readonly common: string;\n readonly web?: string;\n readonly node?: string;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n readonly 'react-native'?: string;\n };\n\n /**\n * Represent a set of flags for WebAssembly\n */\n readonly wasm: Env.WebAssemblyFlags;\n\n /**\n * Represent a set of flags for WebGL\n */\n readonly webgl: Env.WebGLFlags;\n\n /**\n * Represent a set of flags for WebGPU\n */\n readonly webgpu: Env.WebGpuFlags;\n\n [name: string]: unknown;\n}\n\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env: Env = envImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {Tensor} from './tensor.js';\n\n/**\n * implementation of Tensor.toDataURL()\n */\nexport const tensorToDataURL = (tensor: Tensor, options?: TensorToDataUrlOptions): string => {\n const canvas = typeof document !== 'undefined' ? document.createElement('canvas') : (new OffscreenCanvas(1, 1));\n canvas.width = tensor.dims[3];\n canvas.height = tensor.dims[2];\n const pixels2DContext =\n canvas.getContext('2d') as (CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null);\n\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n }\n\n const inputformat = options?.format !== undefined ? options.format : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 0];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n // Default pointer assignments\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < height; i++) {\n for (let j = 0; j < width; j++) {\n const R = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n const G = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n const B = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n const A = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n pixels2DContext.fillStyle = 'rgba(' + R + ',' + G + ',' + B + ',' + A + ')';\n pixels2DContext.fillRect(j, i, 1, 1);\n }\n }\n if ('toDataURL' in canvas) {\n return canvas.toDataURL();\n } else {\n throw new Error('toDataURL is not supported');\n }\n } else {\n throw new Error('Can not access image data');\n }\n};\n\n/**\n * implementation of Tensor.toImageData()\n */\nexport const tensorToImageData = (tensor: Tensor, options?: TensorToImageDataOptions): ImageData => {\n const pixels2DContext = typeof document !== 'undefined' ?\n document.createElement('canvas').getContext('2d') :\n new OffscreenCanvas(1, 1).getContext('2d') as OffscreenCanvasRenderingContext2D;\n let image: ImageData;\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n let channels: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[1];\n channels = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n channels = tensor.dims[1];\n }\n const inputformat = options !== undefined ? (options.format !== undefined ? options.format : 'RGB') : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 255];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n if (options !== undefined) {\n if (options.format !== undefined && (channels === 4 && options.format !== 'RGBA') ||\n (channels === 3 && (options.format !== 'RGB' && options.format !== 'BGR'))) {\n throw new Error('Tensor format doesn\\'t match input tensor dims');\n }\n }\n\n // Default pointer assignments\n const step = 4;\n let rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n image = pixels2DContext.createImageData(width, height);\n\n for (let i = 0; i < height * width;\n rImagePointer += step, gImagePointer += step, bImagePointer += step, aImagePointer += step, i++) {\n image.data[rImagePointer] = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n image.data[gImagePointer] = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n image.data[bImagePointer] = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n image.data[aImagePointer] = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n }\n\n } else {\n throw new Error('Can not access image data');\n }\n return image;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsDimensions, OptionsFormat, OptionsNormalizationParameters, OptionsTensorFormat, OptionsTensorLayout, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\ninterface BufferToTensorOptions extends OptionsDimensions, OptionsTensorLayout, OptionsNormalizationParameters,\n OptionsFormat, OptionsTensorFormat {}\n\n/**\n * Create a new tensor object from image object\n *\n * @param buffer - Extracted image buffer data - assuming RGBA format\n * @param imageFormat - input image configuration - required configurations height, width, format\n * @param tensorFormat - output tensor configuration - Default is RGB format\n */\nexport const bufferToTensor = (buffer: Uint8ClampedArray|undefined, options: BufferToTensorOptions): Tensor => {\n if (buffer === undefined) {\n throw new Error('Image buffer must be defined');\n }\n if (options.height === undefined || options.width === undefined) {\n throw new Error('Image height and width must be defined');\n }\n if (options.tensorLayout === 'NHWC') {\n throw new Error('NHWC Tensor layout is not supported yet');\n }\n\n const {height, width} = options;\n\n const norm = options.norm ?? {mean: 255, bias: 0};\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean![0], norm.mean![1], norm.mean![2], norm.mean![3] ?? 255];\n }\n\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias![0], norm.bias![1], norm.bias![2], norm.bias![3] ?? 0];\n }\n\n const inputformat = options.format !== undefined ? options.format : 'RGBA';\n // default value is RGBA since imagedata and HTMLImageElement uses it\n\n const outputformat =\n options.tensorFormat !== undefined ? (options.tensorFormat !== undefined ? options.tensorFormat : 'RGB') : 'RGB';\n const stride = height * width;\n const float32Data = outputformat === 'RGBA' ? new Float32Array(stride * 4) : new Float32Array(stride * 3);\n\n // Default pointer assignments\n let step = 4, rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGB') {\n step = 3;\n rImagePointer = 0;\n gImagePointer = 1;\n bImagePointer = 2;\n aImagePointer = -1;\n }\n\n // Updating the pointer assignments based on the output tensor format\n if (outputformat === 'RGBA') {\n aTensorPointer = stride * 3;\n } else if (outputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n } else if (outputformat === 'BGR') {\n bTensorPointer = 0;\n gTensorPointer = stride;\n rTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < stride;\n i++, rImagePointer += step, bImagePointer += step, gImagePointer += step, aImagePointer += step) {\n float32Data[rTensorPointer++] = (buffer[rImagePointer] + normBias[0]) / normMean[0];\n float32Data[gTensorPointer++] = (buffer[gImagePointer] + normBias[1]) / normMean[1];\n float32Data[bTensorPointer++] = (buffer[bImagePointer] + normBias[2]) / normMean[2];\n if (aTensorPointer !== -1 && aImagePointer !== -1) {\n float32Data[aTensorPointer++] = (buffer[aImagePointer] + normBias[3]) / normMean[3];\n }\n }\n\n // Float32Array -> ort.Tensor\n const outputTensor = outputformat === 'RGBA' ? new Tensor('float32', float32Data, [1, 4, height, width]) :\n new Tensor('float32', float32Data, [1, 3, height, width]);\n return outputTensor;\n};\n\n/**\n * implementation of Tensor.fromImage().\n */\nexport const tensorFromImage = async(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise => {\n // checking the type of image object\n const isHTMLImageEle = typeof (HTMLImageElement) !== 'undefined' && image instanceof HTMLImageElement;\n const isImageDataEle = typeof (ImageData) !== 'undefined' && image instanceof ImageData;\n const isImageBitmap = typeof (ImageBitmap) !== 'undefined' && image instanceof ImageBitmap;\n const isString = typeof image === 'string';\n\n let data: Uint8ClampedArray|undefined;\n let bufferToTensorOptions: BufferToTensorOptions = options ?? {};\n\n const createCanvas = () => {\n if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n } else if (typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1);\n } else {\n throw new Error('Canvas is not supported');\n }\n };\n const createCanvasContext = (canvas: HTMLCanvasElement|OffscreenCanvas) => {\n if (canvas instanceof HTMLCanvasElement) {\n return canvas.getContext('2d');\n } else if (canvas instanceof OffscreenCanvas) {\n return canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n } else {\n return null;\n }\n };\n // filling and checking image configuration options\n if (isHTMLImageEle) {\n // HTMLImageElement - image object - format is RGBA by default\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n let height = image.height;\n let width = image.width;\n if (options !== undefined && options.resizedHeight !== undefined && options.resizedWidth !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n if (options.tensorFormat !== undefined) {\n throw new Error('Image input config format must be RGBA for HTMLImageElement');\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n }\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n }\n\n pixels2DContext.drawImage(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isImageDataEle) {\n let height: number;\n let width: number;\n\n if (options !== undefined && options.resizedWidth !== undefined && options.resizedHeight !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n } else {\n height = image.height;\n width = image.width;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n }\n bufferToTensorOptions.format = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n\n if (options !== undefined) {\n const tempCanvas = createCanvas();\n\n tempCanvas.width = width;\n tempCanvas.height = height;\n\n const pixels2DContext = createCanvasContext(tempCanvas);\n\n if (pixels2DContext != null) {\n pixels2DContext.putImageData(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else {\n data = image.data;\n }\n } else if (isImageBitmap) {\n // ImageBitmap - image object - format must be provided by user\n if (options === undefined) {\n throw new Error('Please provide image config with format for Imagebitmap');\n }\n\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n const height = image.height;\n const width = image.width;\n pixels2DContext.drawImage(image, 0, 0, width, height);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isString) {\n return new Promise((resolve, reject) => {\n const canvas = createCanvas();\n const context = createCanvasContext(canvas);\n if (!image || !context) {\n return reject();\n }\n const newImage = new Image();\n newImage.crossOrigin = 'Anonymous';\n newImage.src = image;\n newImage.onload = () => {\n canvas.width = newImage.width;\n canvas.height = newImage.height;\n context.drawImage(newImage, 0, 0, canvas.width, canvas.height);\n const img = context.getImageData(0, 0, canvas.width, canvas.height);\n\n bufferToTensorOptions.height = canvas.height;\n bufferToTensorOptions.width = canvas.width;\n resolve(bufferToTensor(img.data, bufferToTensorOptions));\n };\n });\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n\n if (data !== undefined) {\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n};\n\n/**\n * implementation of Tensor.fromTexture().\n */\nexport const tensorFromTexture = (\n texture: TensorInterface.TextureType, options: TensorFromTextureOptions): Tensor => {\n const {width, height, download, dispose} = options;\n // Always assume RGBAF32. TODO: support different texture format\n const dims = [1, height, width, 4];\n return new Tensor({location: 'texture', type: 'float32', texture, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromGpuBuffer().\n */\nexport const tensorFromGpuBuffer = (\n gpuBuffer: TensorInterface.GpuBufferType, options: TensorFromGpuBufferOptions): Tensor => {\n const {dataType, dims, download, dispose} = options;\n return new Tensor({location: 'gpu-buffer', type: dataType ?? 'float32', gpuBuffer, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromPinnedBuffer().\n */\nexport const tensorFromPinnedBuffer = (\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor =>\n new Tensor({location: 'cpu-pinned', type, data: buffer, dims: dims ?? [buffer.length]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type SupportedTypedArrayConstructors = Float32ArrayConstructor|Uint8ArrayConstructor|Int8ArrayConstructor|\n Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|Uint8ArrayConstructor|\n Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor;\nexport type SupportedTypedArray = InstanceType;\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([\n ['float32', Float32Array],\n ['uint8', Uint8Array],\n ['int8', Int8Array],\n ['uint16', Uint16Array],\n ['int16', Int16Array],\n ['int32', Int32Array],\n ['bool', Uint8Array],\n ['float64', Float64Array],\n ['uint32', Uint32Array],\n]);\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP = new Map([\n [Float32Array, 'float32'],\n [Uint8Array, 'uint8'],\n [Int8Array, 'int8'],\n [Uint16Array, 'uint16'],\n [Int16Array, 'int16'],\n [Int32Array, 'int32'],\n [Float64Array, 'float64'],\n [Uint32Array, 'uint32'],\n]);\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// the following code allows delaying execution of BigInt/Float16Array checking. This allows lazy initialization for\n// NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP and NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, which allows BigInt/Float16Array\n// polyfill if available.\nlet isTypedArrayChecked = false;\nexport const checkTypedArray = () => {\n if (!isTypedArrayChecked) {\n isTypedArrayChecked = true;\n const isBigInt64ArrayAvailable = typeof BigInt64Array !== 'undefined' && BigInt64Array.from;\n const isBigUint64ArrayAvailable = typeof BigUint64Array !== 'undefined' && BigUint64Array.from;\n const isFloat16ArrayAvailable = typeof Float16Array !== 'undefined' && Float16Array.from;\n\n if (isBigInt64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('int64', BigInt64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigInt64Array, 'int64');\n }\n if (isBigUint64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('uint64', BigUint64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigUint64Array, 'uint64');\n }\n if (isFloat16ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Float16Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(Float16Array, 'float16');\n } else {\n // if Float16Array is not available, use 'Uint16Array' to store the data.\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Uint16Array);\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TextureConstructorParameters} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\n\n/**\n * calculate size from dims.\n *\n * @param dims the dims array. May be an illegal input.\n */\nexport const calculateSize = (dims: readonly unknown[]): number => {\n let size = 1;\n for (let i = 0; i < dims.length; i++) {\n const dim = dims[i];\n if (typeof dim !== 'number' || !Number.isSafeInteger(dim)) {\n throw new TypeError(`dims[${i}] must be an integer, got: ${dim}`);\n }\n if (dim < 0) {\n throw new RangeError(`dims[${i}] must be a non-negative integer, got: ${dim}`);\n }\n size *= dim;\n }\n return size;\n};\n\n/**\n * implementation of Tensor.reshape()\n */\nexport const tensorReshape = (tensor: Tensor, dims: readonly number[]): Tensor => {\n switch (tensor.location) {\n case 'cpu':\n return new Tensor(tensor.type, tensor.data, dims);\n case 'cpu-pinned':\n return new Tensor({\n location: 'cpu-pinned',\n data: tensor.data as CpuPinnedConstructorParameters['data'],\n type: tensor.type as CpuPinnedConstructorParameters['type'],\n dims,\n });\n case 'texture':\n return new Tensor({\n location: 'texture',\n texture: tensor.texture,\n type: tensor.type as TextureConstructorParameters['type'],\n dims,\n });\n case 'gpu-buffer':\n return new Tensor({\n location: 'gpu-buffer',\n gpuBuffer: tensor.gpuBuffer,\n type: tensor.type as GpuBufferConstructorParameters['type'],\n dims,\n });\n default:\n throw new Error(`tensorReshape: tensor location ${tensor.location} is not supported`);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {tensorToDataURL, tensorToImageData} from './tensor-conversion-impl.js';\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {tensorFromGpuBuffer, tensorFromImage, tensorFromPinnedBuffer, tensorFromTexture} from './tensor-factory-impl.js';\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions, TextureConstructorParameters} from './tensor-factory.js';\nimport {checkTypedArray, NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP, NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, SupportedTypedArray, SupportedTypedArrayConstructors} from './tensor-impl-type-mapping.js';\nimport {calculateSize, tensorReshape} from './tensor-utils-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\n// type aliases for those exported from Tensor interface\n\ntype TensorType = TensorInterface.Type;\ntype TensorDataType = TensorInterface.DataType;\ntype TensorDataLocation = TensorInterface.DataLocation;\ntype TensorTextureType = TensorInterface.TextureType;\ntype TensorGpuBufferType = TensorInterface.GpuBufferType;\n\n/**\n * the implementation of Tensor interface.\n *\n * @ignore\n */\nexport class Tensor implements TensorInterface {\n // #region constructors\n\n /**\n * Construct a new CPU tensor object from the given type, data and dims.\n */\n constructor(\n type: TensorType, data: TensorDataType|readonly string[]|readonly number[]|readonly boolean[],\n dims?: readonly number[]);\n /**\n * Construct a new CPU tensor object from the given data and dims. Type is inferred from data.\n */\n constructor(data: TensorDataType|readonly string[]|readonly boolean[], dims?: readonly number[]);\n /**\n * Construct a new tensor object from the pinned CPU data with the given type and dims.\n *\n * Tensor's location will be set to 'cpu-pinned'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: CpuPinnedConstructorParameters);\n /**\n * Construct a new tensor object from the WebGL texture with the given type and dims.\n *\n * Tensor's location will be set to 'texture'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: TextureConstructorParameters);\n /**\n * Construct a new tensor object from the WebGPU buffer with the given type and dims.\n *\n * Tensor's location will be set to 'gpu-buffer'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: GpuBufferConstructorParameters);\n\n /**\n * implementation.\n */\n constructor(\n arg0: TensorType|TensorDataType|readonly string[]|readonly boolean[]|CpuPinnedConstructorParameters|\n TextureConstructorParameters|GpuBufferConstructorParameters,\n arg1?: TensorDataType|readonly number[]|readonly string[]|readonly boolean[], arg2?: readonly number[]) {\n // perform one-time check for BigInt/Float16Array support\n checkTypedArray();\n\n let type: TensorType;\n let dims: readonly number[];\n\n if (typeof arg0 === 'object' && 'location' in arg0) {\n //\n // constructing tensor from specific location\n //\n this.dataLocation = arg0.location;\n type = arg0.type;\n dims = arg0.dims;\n switch (arg0.location) {\n case 'cpu-pinned': {\n const expectedTypedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(type);\n if (!expectedTypedArrayConstructor) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from pinned buffer`);\n }\n if (!(arg0.data instanceof expectedTypedArrayConstructor)) {\n throw new TypeError(`buffer should be of type ${expectedTypedArrayConstructor.name}`);\n }\n this.cpuData = arg0.data;\n break;\n }\n case 'texture': {\n if (type !== 'float32') {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from texture`);\n }\n this.gpuTextureData = arg0.texture;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n case 'gpu-buffer': {\n if ((type !== 'float32' && type !== 'float16' && type !== 'int32' && type !== 'int64' && type !== 'uint32' &&\n type !== 'uint8' && type !== 'bool')) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from gpu buffer`);\n }\n this.gpuBufferData = arg0.gpuBuffer;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n default:\n throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`);\n }\n } else {\n //\n // constructing tensor of location 'cpu'\n //\n let data: TensorDataType;\n let maybeDims: typeof arg1|typeof arg2;\n // check whether arg0 is type or data\n if (typeof arg0 === 'string') {\n //\n // Override: constructor(type, data, ...)\n //\n type = arg0;\n maybeDims = arg2;\n if (arg0 === 'string') {\n // string tensor\n if (!Array.isArray(arg1)) {\n throw new TypeError('A string tensor\\'s data must be a string array.');\n }\n // we don't check whether every element in the array is string; this is too slow. we assume it's correct and\n // error will be populated at inference\n data = arg1;\n } else {\n // numeric tensor\n const typedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(arg0);\n if (typedArrayConstructor === undefined) {\n throw new TypeError(`Unsupported tensor type: ${arg0}.`);\n }\n if (Array.isArray(arg1)) {\n if (arg0 === 'float16' && typedArrayConstructor === Uint16Array) {\n // When no Float16Array polyfill is used, we cannot create 'float16' tensor from number array.\n //\n // Throw error here because when user try to use number array as data,\n // e.g. new Tensor('float16', [1, 2, 3, 4], dims)), it will actually call\n // Uint16Array.from(arg1) which generates wrong data.\n throw new TypeError(\n 'Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.');\n } else if (arg0 === 'uint64' || arg0 === 'int64') {\n // use 'as any' here because:\n // 1. TypeScript's check on type of 'Array.isArray()' does not work with readonly arrays.\n // see https://github.com/microsoft/TypeScript/issues/17002\n // 2. TypeScript's check on union type of '(BigInt64ArrayConstructor|BigUint64ArrayConstructor).from()'\n // does not accept parameter mapFn.\n // 3. parameters of 'SupportedTypedArrayConstructors.from()' does not match the requirement of the union\n // type.\n\n // assume 'arg1' is of type \"readonly number[]|readonly bigint[]\" here.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1, BigInt);\n } else {\n // assume 'arg1' is of type \"readonly number[]\" here.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1);\n }\n } else if (arg1 instanceof typedArrayConstructor) {\n data = arg1;\n } else {\n throw new TypeError(`A ${type} tensor's data must be type of ${typedArrayConstructor}`);\n }\n }\n } else {\n //\n // Override: constructor(data, ...)\n //\n maybeDims = arg1;\n if (Array.isArray(arg0)) {\n // only boolean[] and string[] is supported\n if (arg0.length === 0) {\n throw new TypeError('Tensor type cannot be inferred from an empty array.');\n }\n const firstElementType = typeof arg0[0];\n if (firstElementType === 'string') {\n type = 'string';\n data = arg0;\n } else if (firstElementType === 'boolean') {\n type = 'bool';\n // 'arg0' is of type 'boolean[]'. Uint8Array.from(boolean[]) actually works, but typescript thinks this is\n // wrong type. We use 'as any' to make it happy.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = Uint8Array.from(arg0 as any[]);\n } else {\n throw new TypeError(`Invalid element type of data array: ${firstElementType}.`);\n }\n } else {\n // get tensor type from TypedArray\n const mappedType =\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.get(arg0.constructor as SupportedTypedArrayConstructors);\n if (mappedType === undefined) {\n throw new TypeError(`Unsupported type for tensor data: ${arg0.constructor}.`);\n }\n type = mappedType;\n data = arg0 as SupportedTypedArray;\n }\n }\n\n // type and data is processed, now processing dims\n if (maybeDims === undefined) {\n // assume 1-D tensor if dims omitted\n maybeDims = [data.length];\n } else if (!Array.isArray(maybeDims)) {\n throw new TypeError('A tensor\\'s dims must be a number array');\n }\n dims = maybeDims as readonly number[];\n\n this.cpuData = data;\n this.dataLocation = 'cpu';\n }\n\n // perform check on dims\n const size = calculateSize(dims);\n // if data is on CPU, check whether data length matches tensor size\n if (this.cpuData && size !== this.cpuData.length) {\n throw new Error(`Tensor's size(${size}) does not match data length(${this.cpuData.length}).`);\n }\n\n this.type = type;\n this.dims = dims;\n this.size = size;\n }\n // #endregion\n\n // #region factory\n static async fromImage(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise {\n return tensorFromImage(image, options);\n }\n\n static fromTexture(\n texture: TensorTextureType, options: TensorFromTextureOptions): TensorInterface {\n return tensorFromTexture(texture, options);\n }\n\n static fromGpuBuffer(\n gpuBuffer: TensorGpuBufferType, options: TensorFromGpuBufferOptions): TensorInterface {\n return tensorFromGpuBuffer(gpuBuffer, options);\n }\n\n static fromPinnedBuffer(\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor {\n return tensorFromPinnedBuffer(type, buffer, dims);\n }\n\n // #endregion\n\n // #region conversions\n toDataURL(options?: TensorToDataUrlOptions): string {\n return tensorToDataURL(this, options);\n }\n\n toImageData(options?: TensorToImageDataOptions): ImageData {\n return tensorToImageData(this, options);\n }\n // #endregion\n\n // #region public fields\n readonly dims: readonly number[];\n readonly type: TensorType;\n readonly size: number;\n // #endregion\n\n // #region private fields\n\n /**\n * stores the location of the data.\n */\n private dataLocation: TensorDataLocation;\n\n /**\n * stores the data on CPU, if location is 'cpu' or 'cpu-pinned'. otherwise empty.\n */\n private cpuData?: TensorDataType;\n\n /**\n * stores the underlying texture when location is 'texture'. otherwise empty.\n */\n private gpuTextureData?: TensorTextureType;\n\n /**\n * stores the underlying GPU buffer when location is 'gpu-buffer'. otherwise empty.\n */\n private gpuBufferData?: TensorGpuBufferType;\n\n /**\n * stores an optional downloader function to download data from GPU to CPU.\n */\n private downloader?(): Promise;\n\n /**\n * a flag indicating whether the data is being downloaded from GPU to CPU.\n */\n private isDownloading?: boolean;\n\n /**\n * stores an optional disposer function to dispose the underlying data.\n */\n private disposer?(): void;\n // #endregion\n\n // #region properties\n get data(): TensorDataType {\n this.ensureValid();\n if (!this.cpuData) {\n throw new Error(\n 'The data is not on CPU. Use `getData()` to download GPU data to CPU, ' +\n 'or use `texture` or `gpuBuffer` property to access the GPU data directly.');\n }\n return this.cpuData;\n }\n\n get location(): TensorDataLocation {\n return this.dataLocation;\n }\n\n get texture(): TensorTextureType {\n this.ensureValid();\n if (!this.gpuTextureData) {\n throw new Error('The data is not stored as a WebGL texture.');\n }\n return this.gpuTextureData;\n }\n\n get gpuBuffer(): TensorGpuBufferType {\n this.ensureValid();\n if (!this.gpuBufferData) {\n throw new Error('The data is not stored as a WebGPU buffer.');\n }\n return this.gpuBufferData;\n }\n // #endregion\n\n // #region methods\n\n async getData(releaseData?: boolean): Promise {\n this.ensureValid();\n switch (this.dataLocation) {\n case 'cpu':\n case 'cpu-pinned':\n return this.data;\n case 'texture':\n case 'gpu-buffer': {\n if (!this.downloader) {\n throw new Error('The current tensor is not created with a specified data downloader.');\n }\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n try {\n this.isDownloading = true;\n const data = await this.downloader();\n this.downloader = undefined;\n this.dataLocation = 'cpu';\n this.cpuData = data;\n\n if (releaseData && this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n\n return data;\n\n } finally {\n this.isDownloading = false;\n }\n }\n default:\n throw new Error(`cannot get data from location: ${this.dataLocation}`);\n }\n }\n\n dispose(): void {\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n\n if (this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n this.cpuData = undefined;\n this.gpuTextureData = undefined;\n this.gpuBufferData = undefined;\n this.downloader = undefined;\n this.isDownloading = undefined;\n\n this.dataLocation = 'none';\n }\n\n // #endregion\n\n // #region tensor utilities\n private ensureValid(): void {\n if (this.dataLocation === 'none') {\n throw new Error('The tensor is disposed.');\n }\n }\n\n reshape(dims: readonly number[]): TensorInterface {\n this.ensureValid();\n if (this.downloader || this.disposer) {\n throw new Error('Cannot reshape a tensor that owns GPU resource.');\n }\n return tensorReshape(this, dims);\n }\n // #endregion\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorFactory} from './tensor-factory.js';\nimport {Tensor as TensorImpl} from './tensor-impl.js';\nimport {TypedTensorUtils} from './tensor-utils.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\n/**\n * represent a basic tensor with specified dimensions and data type.\n */\ninterface TypedTensorBase {\n /**\n * Get the dimensions of the tensor.\n */\n readonly dims: readonly number[];\n /**\n * Get the data type of the tensor.\n */\n readonly type: T;\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is not on CPU (eg. it's in the form of WebGL texture or WebGPU buffer), throw error.\n */\n readonly data: Tensor.DataTypeMap[T];\n /**\n * Get the location of the data.\n */\n readonly location: Tensor.DataLocation;\n /**\n * Get the WebGL texture that holds the tensor data.\n *\n * If the data is not on GPU as WebGL texture, throw error.\n */\n readonly texture: Tensor.TextureType;\n /**\n * Get the WebGPU buffer that holds the tensor data.\n *\n * If the data is not on GPU as WebGPU buffer, throw error.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is on CPU, returns the data immediately.\n * If the data is on GPU, downloads the data and returns the promise.\n *\n * @param releaseData - whether release the data on GPU. Ignore if data is already on CPU.\n */\n getData(releaseData?: boolean): Promise;\n\n /**\n * Dispose the tensor data.\n *\n * If the data is on CPU, remove its internal reference to the underlying data.\n * If the data is on GPU, release the data on GPU.\n *\n * After calling this function, the tensor is considered no longer valid. Its location will be set to 'none'.\n */\n dispose(): void;\n}\n\nexport declare namespace Tensor {\n interface DataTypeMap {\n float32: Float32Array;\n uint8: Uint8Array;\n int8: Int8Array;\n uint16: Uint16Array;\n int16: Int16Array;\n int32: Int32Array;\n int64: BigInt64Array;\n string: string[];\n bool: Uint8Array;\n float16: Uint16Array; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: Float64Array;\n uint32: Uint32Array;\n uint64: BigUint64Array;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n interface ElementTypeMap {\n float32: number;\n uint8: number;\n int8: number;\n uint16: number;\n int16: number;\n int32: number;\n int64: bigint;\n string: string;\n bool: boolean;\n float16: number; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: number;\n uint32: number;\n uint64: bigint;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n type DataType = DataTypeMap[Type];\n type ElementType = ElementTypeMap[Type];\n\n /**\n * supported data types for constructing a tensor from a pinned CPU buffer\n */\n export type CpuPinnedDataTypes = Exclude;\n\n /**\n * type alias for WebGL texture\n */\n export type TextureType = WebGLTexture;\n\n /**\n * supported data types for constructing a tensor from a WebGL texture\n */\n export type TextureDataTypes = 'float32';\n\n /**\n * type alias for WebGPU buffer\n *\n * The reason why we don't use type \"GPUBuffer\" defined in webgpu.d.ts from @webgpu/types is because \"@webgpu/types\"\n * requires \"@types/dom-webcodecs\" as peer dependency when using TypeScript < v5.1 and its version need to be chosen\n * carefully according to the TypeScript version being used. This means so far there is not a way to keep every\n * TypeScript version happy. It turns out that we will easily broke users on some TypeScript version.\n *\n * for more info see https://github.com/gpuweb/types/issues/127\n */\n export type GpuBufferType = {size: number; mapState: 'unmapped' | 'pending' | 'mapped'};\n\n /**\n * supported data types for constructing a tensor from a WebGPU buffer\n */\n export type GpuBufferDataTypes = 'float32'|'float16'|'int32'|'int64'|'uint32'|'uint8'|'bool';\n\n /**\n * represent where the tensor data is stored\n */\n export type DataLocation = 'none'|'cpu'|'cpu-pinned'|'texture'|'gpu-buffer';\n\n /**\n * represent the data type of a tensor\n */\n export type Type = keyof DataTypeMap;\n}\n\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface TypedTensor extends TypedTensorBase, TypedTensorUtils {}\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface Tensor extends TypedTensorBase, TypedTensorUtils {}\n\n/**\n * type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.\n */\nexport interface TensorConstructor extends TensorFactory {\n // #region CPU tensor - specify element type\n /**\n * Construct a new string tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'string', data: Tensor.DataTypeMap['string']|readonly string[],\n dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'bool', data: Tensor.DataTypeMap['bool']|readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new 64-bit integer typed tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(\n type: T, data: Tensor.DataTypeMap[T]|readonly bigint[]|readonly number[],\n dims?: readonly number[]): TypedTensor;\n\n /**\n * Construct a new numeric tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new>(\n type: T, data: Tensor.DataTypeMap[T]|readonly number[], dims?: readonly number[]): TypedTensor;\n // #endregion\n\n // #region CPU tensor - infer element types\n\n /**\n * Construct a new float32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float32Array, dims?: readonly number[]): TypedTensor<'float32'>;\n\n /**\n * Construct a new int8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int8Array, dims?: readonly number[]): TypedTensor<'int8'>;\n\n /**\n * Construct a new uint8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint8Array, dims?: readonly number[]): TypedTensor<'uint8'>;\n\n /**\n * Construct a new uint16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint16Array, dims?: readonly number[]): TypedTensor<'uint16'>;\n\n /**\n * Construct a new int16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int16Array, dims?: readonly number[]): TypedTensor<'int16'>;\n\n /**\n * Construct a new int32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int32Array, dims?: readonly number[]): TypedTensor<'int32'>;\n\n /**\n * Construct a new int64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigInt64Array, dims?: readonly number[]): TypedTensor<'int64'>;\n\n /**\n * Construct a new string tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly string[], dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new float64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float64Array, dims?: readonly number[]): TypedTensor<'float64'>;\n\n /**\n * Construct a new uint32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint32Array, dims?: readonly number[]): TypedTensor<'uint32'>;\n\n /**\n * Construct a new uint64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigUint64Array, dims?: readonly number[]): TypedTensor<'uint64'>;\n\n // #endregion\n\n // #region CPU tensor - fall back to non-generic tensor type declaration\n\n /**\n * Construct a new tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: Tensor.Type, data: Tensor.DataType|readonly number[]|readonly string[]|readonly bigint[]|readonly boolean[],\n dims?: readonly number[]): Tensor;\n\n /**\n * Construct a new tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Tensor.DataType, dims?: readonly number[]): Tensor;\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const Tensor = TensorImpl as TensorConstructor;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from './env-impl.js';\n\n/**\n * @ignore\n */\nexport const TRACE = (deviceType: string, label: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n // eslint-disable-next-line no-console\n console.timeStamp(`${deviceType}::ORT::${label}`);\n};\n\nconst TRACE_FUNC = (msg: string, extraMsg?: string) => {\n const stack = new Error().stack?.split(/\\r\\n|\\r|\\n/g) || [];\n let hasTraceFunc = false;\n for (let i = 0; i < stack.length; i++) {\n if (hasTraceFunc && !stack[i].includes('TRACE_FUNC')) {\n let label = `FUNC_${msg}::${stack[i].trim().split(' ')[1]}`;\n if (extraMsg) {\n label += `::${extraMsg}`;\n }\n TRACE('CPU', label);\n return;\n }\n if (stack[i].includes('TRACE_FUNC')) {\n hasTraceFunc = true;\n }\n }\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_BEGIN = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('BEGIN', extraMsg);\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_END = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('END', extraMsg);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {InferenceSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSessionInterface} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from './trace.js';\n\ntype SessionOptions = InferenceSessionInterface.SessionOptions;\ntype RunOptions = InferenceSessionInterface.RunOptions;\ntype FeedsType = InferenceSessionInterface.FeedsType;\ntype FetchesType = InferenceSessionInterface.FetchesType;\ntype ReturnType = InferenceSessionInterface.ReturnType;\n\nexport class InferenceSession implements InferenceSessionInterface {\n private constructor(handler: InferenceSessionHandler) {\n this.handler = handler;\n }\n run(feeds: FeedsType, options?: RunOptions): Promise;\n run(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async run(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n TRACE_FUNC_BEGIN();\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSessionInterface.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n\n // feeds, fetches and options are prepared\n\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n TRACE_FUNC_END();\n return returnValue;\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n\n static create(path: string, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: SessionOptions):\n Promise;\n static create(buffer: Uint8Array, options?: SessionOptions): Promise;\n static async create(\n arg0: string|ArrayBufferLike|Uint8Array, arg1?: SessionOptions|number, arg2?: number,\n arg3?: SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n // either load from a file or buffer\n let filePathOrUint8Array: string|Uint8Array;\n let options: SessionOptions = {};\n\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (\n arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n } else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n } else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n const handler = await backend.createInferenceSessionHandler(filePathOrUint8Array, optionsWithValidatedEPs);\n TRACE_FUNC_END();\n return new InferenceSession(handler);\n }\n\n startProfiling(): void {\n this.handler.startProfiling();\n }\n endProfiling(): void {\n this.handler.endProfiling();\n }\n\n get inputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get outputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n private handler: InferenceSessionHandler;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession as InferenceSessionImpl} from './inference-session-impl.js';\nimport {OnnxModelOptions} from './onnx-model.js';\nimport {OnnxValue, OnnxValueDataLocation} from './onnx-value.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace InferenceSession {\n // #region input/output types\n\n type OnnxValueMapType = {readonly [name: string]: OnnxValue};\n type NullableOnnxValueMapType = {readonly [name: string]: OnnxValue | null};\n\n /**\n * A feeds (model inputs) is an object that uses input names as keys and OnnxValue as corresponding values.\n */\n type FeedsType = OnnxValueMapType;\n\n /**\n * A fetches (model outputs) could be one of the following:\n *\n * - Omitted. Use model's output names definition.\n * - An array of string indicating the output names.\n * - An object that use output names as keys and OnnxValue or null as corresponding values.\n *\n * @remark\n * different from input argument, in output, OnnxValue is optional. If an OnnxValue is present it will be\n * used as a pre-allocated value by the inference engine; if omitted, inference engine will allocate buffer\n * internally.\n */\n type FetchesType = readonly string[]|NullableOnnxValueMapType;\n\n /**\n * A inferencing return type is an object that uses output names as keys and OnnxValue as corresponding values.\n */\n type ReturnType = OnnxValueMapType;\n\n // #endregion\n\n // #region session options\n\n /**\n * A set of configurations for session behavior.\n */\n export interface SessionOptions extends OnnxModelOptions {\n /**\n * An array of execution provider options.\n *\n * An execution provider option can be a string indicating the name of the execution provider,\n * or an object of corresponding type.\n */\n executionProviders?: readonly ExecutionProviderConfig[];\n\n /**\n * The intra OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n intraOpNumThreads?: number;\n\n /**\n * The inter OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n interOpNumThreads?: number;\n\n /**\n * The free dimension override.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n freeDimensionOverrides?: {readonly [dimensionName: string]: number};\n\n /**\n * The optimization level.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n graphOptimizationLevel?: 'disabled'|'basic'|'extended'|'all';\n\n /**\n * Whether enable CPU memory arena.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableCpuMemArena?: boolean;\n\n /**\n * Whether enable memory pattern.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableMemPattern?: boolean;\n\n /**\n * Execution mode.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n executionMode?: 'sequential'|'parallel';\n\n /**\n * Optimized model file path.\n *\n * If this setting is specified, the optimized model will be dumped. In browser, a blob will be created\n * with a pop-up window.\n */\n optimizedModelFilePath?: string;\n\n /**\n * Whether enable profiling.\n *\n * This setting is a placeholder for a future use.\n */\n enableProfiling?: boolean;\n\n /**\n * File prefix for profiling.\n *\n * This setting is a placeholder for a future use.\n */\n profileFilePrefix?: string;\n\n /**\n * Log ID.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logId?: string;\n\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Specify string as a preferred data location for all outputs, or an object that use output names as keys and a\n * preferred data location as corresponding values.\n *\n * This setting is available only in ONNXRuntime Web for WebGL and WebGPU EP.\n */\n preferredOutputLocation?: OnnxValueDataLocation|{readonly [outputName: string]: OnnxValueDataLocation};\n\n /**\n * Whether enable graph capture.\n * This setting is available only in ONNXRuntime Web for WebGPU EP.\n */\n enableGraphCapture?: boolean;\n\n /**\n * Store configurations for a session. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_session_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n * ```js\n * extra: {\n * session: {\n * set_denormal_as_zero: \"1\",\n * disable_prepacking: \"1\"\n * },\n * optimization: {\n * enable_gelu_approximation: \"1\"\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #region execution providers\n\n // Currently, we have the following backends to support execution providers:\n // Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).\n // Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.\n // Backend ONNX.js: supports 'webgl'.\n // Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).\n interface ExecutionProviderOptionMap {\n coreml: CoreMLExecutionProviderOption;\n cpu: CpuExecutionProviderOption;\n cuda: CudaExecutionProviderOption;\n dml: DmlExecutionProviderOption;\n nnapi: NnapiExecutionProviderOption;\n tensorrt: TensorRtExecutionProviderOption;\n wasm: WebAssemblyExecutionProviderOption;\n webgl: WebGLExecutionProviderOption;\n webgpu: WebGpuExecutionProviderOption;\n webnn: WebNNExecutionProviderOption;\n qnn: QnnExecutionProviderOption;\n xnnpack: XnnpackExecutionProviderOption;\n }\n\n type ExecutionProviderName = keyof ExecutionProviderOptionMap;\n type ExecutionProviderConfig =\n ExecutionProviderOptionMap[ExecutionProviderName]|ExecutionProviderOption|ExecutionProviderName|string;\n\n export interface ExecutionProviderOption {\n readonly name: string;\n }\n export interface CpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cpu';\n useArena?: boolean;\n }\n export interface CudaExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cuda';\n deviceId?: number;\n }\n export interface DmlExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'dml';\n deviceId?: number;\n }\n export interface TensorRtExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'tensorrt';\n deviceId?: number;\n }\n export interface WebAssemblyExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'wasm';\n }\n export interface WebGLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgl';\n // TODO: add flags\n }\n export interface XnnpackExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'xnnpack';\n }\n export interface WebGpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgpu';\n preferredLayout?: 'NCHW'|'NHWC';\n }\n\n // #region WebNN options\n\n interface WebNNExecutionProviderName extends ExecutionProviderOption {\n readonly name: 'webnn';\n }\n\n /**\n * Represents a set of options for creating a WebNN MLContext.\n *\n * @see https://www.w3.org/TR/webnn/#dictdef-mlcontextoptions\n */\n export interface WebNNContextOptions {\n deviceType?: 'cpu'|'gpu'|'npu';\n numThreads?: number;\n powerPreference?: 'default'|'low-power'|'high-performance';\n }\n\n /**\n * Represents a set of options for WebNN execution provider without MLContext.\n */\n export interface WebNNOptionsWithoutMLContext extends WebNNExecutionProviderName, WebNNContextOptions {\n context?: never;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext.\n *\n * When MLContext is provided, the deviceType is also required so that the WebNN EP can determine the preferred\n * channel layout.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext\n */\n export interface WebNNOptionsWithMLContext extends WebNNExecutionProviderName,\n Omit,\n Required> {\n context: unknown /* MLContext */;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext which is created from GPUDevice.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext-gpudevice\n */\n export interface WebNNOptionsWebGpu extends WebNNExecutionProviderName {\n context: unknown /* MLContext */;\n gpuDevice: unknown /* GPUDevice */;\n }\n\n /**\n * Options for WebNN execution provider.\n */\n export type WebNNExecutionProviderOption = WebNNOptionsWithoutMLContext|WebNNOptionsWithMLContext|WebNNOptionsWebGpu;\n\n // #endregion\n\n export interface QnnExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'qnn';\n // TODO add flags\n }\n export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'coreml';\n /**\n * The bit flags for CoreML execution provider.\n *\n * ```\n * COREML_FLAG_USE_CPU_ONLY = 0x001\n * COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002\n * COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004\n * COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008\n * COREML_FLAG_CREATE_MLPROGRAM = 0x010\n * ```\n *\n * See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.\n *\n * This flag is available only in ONNXRuntime (Node.js binding).\n */\n coreMlFlags?: number;\n /**\n * Specify whether to use CPU only in CoreML EP.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n useCPUOnly?: boolean;\n /**\n * Specify whether to enable CoreML EP on subgraph.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n enableOnSubgraph?: boolean;\n /**\n * Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n onlyEnableDeviceWithANE?: boolean;\n }\n export interface NnapiExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'nnapi';\n useFP16?: boolean;\n useNCHW?: boolean;\n cpuDisabled?: boolean;\n cpuOnly?: boolean;\n }\n // #endregion\n\n // #endregion\n\n // #region run options\n\n /**\n * A set of configurations for inference run behavior\n */\n export interface RunOptions {\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Terminate all incomplete OrtRun calls as soon as possible if true\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n terminate?: boolean;\n\n /**\n * A tag for the Run() calls using this\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n tag?: string;\n\n /**\n * Set a single run configuration entry. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_run_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n *\n * ```js\n * extra: {\n * memory: {\n * enable_memory_arena_shrinkage: \"1\",\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #endregion\n\n // #region value metadata\n\n // eslint-disable-next-line @typescript-eslint/no-empty-interface\n interface ValueMetadata {\n // TBD\n }\n\n // #endregion\n}\n\n/**\n * Represent a runtime instance of an ONNX model.\n */\nexport interface InferenceSession {\n // #region run()\n\n /**\n * Execute the model asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Execute the model asynchronously with the given feeds, fetches and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param fetches - Representation of the model output. See type description of `InferenceSession.OutputType` for\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n\n // #endregion\n\n // #region profiling\n\n /**\n * Start profiling.\n */\n startProfiling(): void;\n\n /**\n * End profiling.\n */\n endProfiling(): void;\n\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded model.\n */\n readonly inputNames: readonly string[];\n\n /**\n * Get output names of the loaded model.\n */\n readonly outputNames: readonly string[];\n\n // /**\n // * Get input metadata of the loaded model.\n // */\n // readonly inputMetadata: ReadonlyArray>;\n\n // /**\n // * Get output metadata of the loaded model.\n // */\n // readonly outputMetadata: ReadonlyArray>;\n\n // #endregion\n}\n\nexport interface InferenceSessionFactory {\n // #region create()\n\n /**\n * Create a new inference session and load model asynchronously from an ONNX model file.\n *\n * @param uri - The URI or file path of the model to load.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(uri: string, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from segment of an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param byteOffset - The beginning of the specified portion of the array buffer.\n * @param byteLength - The length in bytes of the array buffer.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: InferenceSession.SessionOptions):\n Promise;\n\n /**\n * Create a new inference session and load model asynchronously from a Uint8Array.\n *\n * @param buffer - A Uint8Array representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: Uint8Array, options?: InferenceSession.SessionOptions): Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession: InferenceSessionFactory = InferenceSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsFormat, OptionsNormalizationParameters, OptionsTensorLayout} from './tensor-factory.js';\n\nexport interface TensorToDataUrlOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface TensorToImageDataOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface ConversionUtils {\n /**\n * creates a DataURL instance from tensor\n *\n * @param options - An optional object representing options for creating a DataURL instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns a DataURL string representing the image converted from tensor data\n */\n toDataURL(options?: TensorToDataUrlOptions): string;\n\n /**\n * creates an ImageData instance from tensor\n *\n * @param options - An optional object representing options for creating an ImageData instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns an ImageData instance representing the image converted from tensor data\n */\n toImageData(options?: TensorToImageDataOptions): ImageData;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor, TypedTensor} from './tensor.js';\n\nexport type ImageFormat = 'RGB'|'RGBA'|'BGR'|'RBG';\nexport type ImageTensorLayout = 'NHWC'|'NCHW';\n\n// the following region contains type definitions for constructing tensor from a specific location.\n\n// #region types for constructing a tensor from a specific location\n\n/**\n * represent common properties of the parameter for constructing a tensor from a specific location.\n */\ninterface CommonConstructorParameters extends Pick {\n /**\n * Specify the data type of the tensor.\n */\n readonly type: T;\n}\n\n/**\n * represent the parameter for constructing a tensor from a GPU resource.\n */\ninterface GpuResourceConstructorParameters {\n /**\n * an optional callback function to download data from GPU to CPU.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n download?(): Promise;\n\n /**\n * an optional callback function that will be called when the tensor is disposed.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n dispose?(): void;\n}\n\n/**\n * represent the parameter for constructing a tensor from a pinned CPU buffer\n */\nexport interface CpuPinnedConstructorParameters extends\n CommonConstructorParameters {\n /**\n * Specify the location of the data to be 'cpu-pinned'.\n */\n readonly location: 'cpu-pinned';\n /**\n * Specify the CPU pinned buffer that holds the tensor data.\n */\n readonly data: Tensor.DataTypeMap[T];\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGL texture\n */\nexport interface TextureConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'texture'.\n */\n readonly location: 'texture';\n /**\n * Specify the WebGL texture that holds the tensor data.\n */\n readonly texture: Tensor.TextureType;\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGPU buffer\n */\nexport interface GpuBufferConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'gpu-buffer'.\n */\n readonly location: 'gpu-buffer';\n /**\n * Specify the WebGPU buffer that holds the tensor data.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n}\n\n// #endregion\n\n// the following region contains type definitions of each individual options.\n// the tensor factory functions use a composition of those options as the parameter type.\n\n// #region Options fields\n\nexport interface OptionsFormat {\n /**\n * Describes the image format represented in RGBA color space.\n */\n format?: ImageFormat;\n}\n\nexport interface OptionsTensorFormat {\n /**\n * Describes the image format of the tensor.\n *\n * NOTE: this is different from option 'format'. While option 'format' represents the original image, 'tensorFormat'\n * represents the target format of the tensor. A transpose will be performed if they are different.\n */\n tensorFormat?: ImageFormat;\n}\n\nexport interface OptionsTensorDataType {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: 'float32'|'uint8';\n}\n\nexport interface OptionsTensorLayout {\n /**\n * Describes the tensor layout when representing data of one or more image(s).\n */\n tensorLayout?: ImageTensorLayout;\n}\n\nexport interface OptionsDimensions {\n /**\n * Describes the image height in pixel\n */\n height?: number;\n /**\n * Describes the image width in pixel\n */\n width?: number;\n}\n\nexport interface OptionResizedDimensions {\n /**\n * Describes the resized height. If omitted, original height will be used.\n */\n resizedHeight?: number;\n /**\n * Describes resized width - can be accessed via tensor dimensions as well\n */\n resizedWidth?: number;\n}\n\nexport interface OptionsNormalizationParameters {\n /**\n * Describes normalization parameters when preprocessing the image as model input.\n *\n * Data element are ranged from 0 to 255.\n */\n norm?: {\n /**\n * The 'bias' value for image normalization.\n * - If omitted, use default value 0.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n bias?: number|[number, number, number]|[number, number, number, number];\n /**\n * The 'mean' value for image normalization.\n * - If omitted, use default value 255.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n mean?: number | [number, number, number] | [number, number, number, number];\n };\n}\n\n// #endregion\n\n// #region Options composition\n\nexport interface TensorFromImageDataOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromImageElementOptions extends OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromUrlOptions extends OptionsDimensions, OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromImageBitmapOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromTextureOptions extends\n Required, OptionsFormat, GpuResourceConstructorParameters/* TODO: add more */ {}\n\nexport interface TensorFromGpuBufferOptions extends\n Pick, GpuResourceConstructorParameters {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: T;\n}\n\n// #endregion\n\n/**\n * type TensorFactory defines the factory functions of 'Tensor' to create tensor instances from existing data or\n * resources.\n */\nexport interface TensorFactory {\n /**\n * create a tensor from an ImageData object\n *\n * @param imageData - the ImageData object to create tensor from\n * @param options - An optional object representing options for creating tensor from ImageData.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageData: ImageData, options?: TensorFromImageDataOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a HTMLImageElement object\n *\n * @param imageElement - the HTMLImageElement object to create tensor from\n * @param options - An optional object representing options for creating tensor from HTMLImageElement.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from URL\n *\n * @param urlSource - a string as a URL to the image or a data URL containing the image data.\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(urlSource: string, options?: TensorFromUrlOptions): Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from an ImageBitmap object\n *\n * @param bitmap - the ImageBitmap object to create tensor from\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(bitmap: ImageBitmap, options: TensorFromImageBitmapOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a WebGL texture\n *\n * @param texture - the WebGLTexture object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGL texture.\n *\n * The options include following properties:\n * - `width`: the width of the texture. Required.\n * - `height`: the height of the texture. Required.\n * - `format`: the format of the texture. If omitted, assume 'RGBA'.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromTexture(\n texture: Tensor.TextureType, options: TensorFromTextureOptions): TypedTensor<'float32'>;\n\n /**\n * create a tensor from a WebGPU buffer\n *\n * @param buffer - the GPUBuffer object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGPU buffer.\n *\n * The options include following properties:\n * - `dataType`: the data type of the tensor. If omitted, assume 'float32'.\n * - `dims`: the dimension of the tensor. Required.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromGpuBuffer(\n buffer: Tensor.GpuBufferType, options: TensorFromGpuBufferOptions): TypedTensor;\n\n /**\n * create a tensor from a pre-allocated buffer. The buffer will be used as a pinned buffer.\n *\n * @param type - the tensor element type.\n * @param buffer - a TypedArray corresponding to the type.\n * @param dims - specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n *\n * @returns a tensor object\n */\n fromPinnedBuffer>(\n type: T, buffer: Tensor.DataTypeMap[T], dims?: readonly number[]): TypedTensor;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * A string that represents a file's URL or path.\n *\n * Path is vailable only in onnxruntime-node or onnxruntime-web running in Node.js.\n */\nexport type FileUrlOrPath = string;\n\n/**\n * A Blob object that represents a file.\n */\nexport type FileBlob = Blob;\n\n/**\n * A Uint8Array, ArrayBuffer or SharedArrayBuffer object that represents a file content.\n *\n * When it is an ArrayBuffer or SharedArrayBuffer, the whole buffer is assumed to be the file content.\n */\nexport type FileData = Uint8Array|ArrayBufferLike;\n\n/**\n * Represents a file that can be loaded by the ONNX Runtime JavaScript API.\n */\nexport type FileType = FileUrlOrPath|FileBlob|FileData;\n\n/**\n * Represents an external data file.\n */\nexport interface ExternalDataFileDescription {\n /**\n * Specify the external data file.\n */\n data: FileType;\n /**\n * Specify the file path.\n */\n path: string;\n}\n\n/**\n * Represents an external data file.\n *\n * When using a string, it should be a file URL or path that in the same directory as the model file.\n */\nexport type ExternalDataFileType = ExternalDataFileDescription|FileUrlOrPath;\n\n/**\n * Options for model loading.\n */\nexport interface OnnxModelOptions {\n /**\n * Specifying a list of files that represents the external data.\n */\n externalData?: readonly ExternalDataFileType[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type NonTensorType = never;\n\n/**\n * Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.\n *\n * NOTE: currently not support non-tensor\n */\nexport type OnnxValue = Tensor|NonTensorType;\n\n/**\n * Type OnnxValueDataLocation represents the location of the data of an OnnxValue.\n */\nexport type OnnxValueDataLocation = Tensor.DataLocation;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {SessionHandler, TrainingSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TrainingSession as TrainingSessionInterface, TrainingSessionCreateOptions} from './training-session.js';\n\ntype SessionOptions = InferenceSession.SessionOptions;\ntype FeedsType = InferenceSession.FeedsType;\ntype FetchesType = InferenceSession.FetchesType;\ntype ReturnType = InferenceSession.ReturnType;\ntype RunOptions = InferenceSession.RunOptions;\n\nconst noBackendErrMsg: string = 'Training backend could not be resolved. ' +\n 'Make sure you\\'re using the correct configuration & WebAssembly files.';\n\nexport class TrainingSession implements TrainingSessionInterface {\n private constructor(handler: TrainingSessionHandler, hasOptimizerModel: boolean, hasEvalModel: boolean) {\n this.handler = handler;\n this.hasOptimizerModel = hasOptimizerModel;\n this.hasEvalModel = hasEvalModel;\n }\n private handler: TrainingSessionHandler;\n private hasOptimizerModel: boolean;\n private hasEvalModel: boolean;\n\n get trainingInputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get trainingOutputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n get evalInputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalInputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n get evalOutputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalOutputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n\n static async create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: SessionOptions):\n Promise {\n const evalModel: string|Uint8Array = trainingOptions.evalModel || '';\n const optimizerModel: string|Uint8Array = trainingOptions.optimizerModel || '';\n const options: SessionOptions = sessionOptions || {};\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n if (backend.createTrainingSessionHandler) {\n const handler = await backend.createTrainingSessionHandler(\n trainingOptions.checkpointState, trainingOptions.trainModel, evalModel, optimizerModel,\n optionsWithValidatedEPs);\n return new TrainingSession(handler, !!trainingOptions.optimizerModel, !!trainingOptions.evalModel);\n } else {\n throw new Error(noBackendErrMsg);\n }\n }\n\n /**\n * Helper function for runTrainStep and future runStep methods that handles the type-narrowing conversion from\n * the given parameters to SessionHandler.FetchesType and RunOptions.\n *\n * @param inputNames the feeds object is checked that they contain all input names in the provided list of input\n * names.\n * @param outputNames the fetches object is checked that their keys match up with valid names in the list of output\n * names.\n * @param feeds the required input\n * @param arg1 narrowed & converted into the SessionHandler.FetchesType or RunOptions object\n * @param arg2 optional RunOptions object.\n * @returns\n */\n typeNarrowingForRunStep(\n inputNames: readonly string[], outputNames: readonly string[], feeds: FeedsType, arg1?: FetchesType|RunOptions,\n arg2?: RunOptions): [SessionHandler.FetchesType, RunOptions] {\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSession.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of outputNames) {\n fetches[name] = null;\n }\n }\n\n return [fetches, options];\n }\n\n /**\n * Helper method for runTrainStep and any other runStep methods. Takes the ReturnType result from the SessionHandler\n * and changes it into a map of Tensors.\n *\n * @param results\n * @returns\n */\n convertHandlerReturnTypeToMapOfTensors(results: SessionHandler.ReturnType): ReturnType {\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n return returnValue;\n }\n\n async lazyResetGrad(): Promise {\n await this.handler.lazyResetGrad();\n }\n\n runTrainStep(feeds: FeedsType, options?: RunOptions): Promise;\n runTrainStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async runTrainStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.trainingInputNames, this.trainingOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runTrainStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n }\n\n async runOptimizerStep(options?: InferenceSession.RunOptions|undefined): Promise {\n if (this.hasOptimizerModel) {\n await this.handler.runOptimizerStep(options || {});\n } else {\n throw new Error('This TrainingSession has no OptimizerModel loaded.');\n }\n }\n\n runEvalStep(feeds: FeedsType, options?: RunOptions|undefined): Promise;\n runEvalStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions|undefined): Promise;\n async runEvalStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n if (this.hasEvalModel) {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.evalInputNames, this.evalOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runEvalStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n } else {\n throw new Error('This TrainingSession has no EvalModel loaded.');\n }\n }\n\n async getParametersSize(trainableOnly = true): Promise {\n return this.handler.getParametersSize(trainableOnly);\n }\n\n async loadParametersBuffer(array: Uint8Array, trainableOnly = true): Promise {\n const paramsSize = await this.getParametersSize(trainableOnly);\n // checking that the size of the Uint8Array is equivalent to the byte length of a Float32Array of the number\n // of parameters\n if (array.length !== 4 * paramsSize) {\n throw new Error(\n 'Size of the buffer passed into loadParametersBuffer must match the number of parameters in ' +\n 'the model. Please use getParametersSize method to check.');\n }\n return this.handler.loadParametersBuffer(array, trainableOnly);\n }\n\n async getContiguousParameters(trainableOnly = true): Promise {\n return this.handler.getContiguousParameters(trainableOnly);\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession as TrainingSessionImpl} from './training-session-impl.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace TrainingSession {\n /**\n * Either URI file path (string) or Uint8Array containing model or checkpoint information.\n */\n type UriOrBuffer = string|Uint8Array;\n}\n\n/**\n * Represent a runtime instance of an ONNX training session,\n * which contains a model that can be trained, and, optionally,\n * an eval and optimizer model.\n */\nexport interface TrainingSession {\n // #region run()\n\n /**\n * Lazily resets the gradients of all trainable parameters to zero. Should happen after the invocation of\n * runOptimizerStep.\n */\n lazyResetGrad(): Promise;\n\n /**\n * Run TrainStep asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for\n detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n runTrainStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single train step with the given inputs and options.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runTrainStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Runs a single optimizer step, which performs weight updates for the trainable parameters using the optimizer model.\n *\n * @param options - Optional. A set of options that controls the behavior of model optimizing.\n */\n runOptimizerStep(options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region copy parameters\n\n /**\n * Retrieves the size of all parameters for the training state. Calculates the total number of primitive (datatype of\n * the parameters) elements of all the parameters in the training state.\n *\n * @param trainableOnly - When set to true, the size is calculated for trainable params only. Default value is true.\n */\n getParametersSize(trainableOnly: boolean): Promise;\n\n /**\n * Copies parameter values from the given buffer to the training state. Currently, only supporting models with\n * parameters of type Float32.\n *\n * @param buffer - A Uint8Array representation of Float32 parameters.\n * @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.\n */\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n\n /**\n * Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.\n * Currently, only supporting models with parameters of type Float32.\n *\n * @param trainableOnly - When set to true, only trainable parameters are copied. Trainable parameters are parameters\n * for which requires_grad is set to true. Default value is true.\n * @returns A promise that resolves to a Float32 OnnxValue of the requested parameters.\n */\n getContiguousParameters(trainableOnly: boolean): Promise;\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded training model.\n */\n readonly trainingInputNames: readonly string[];\n\n /**\n * Get output names of the loaded training model.\n */\n readonly trainingOutputNames: readonly string[];\n\n /**\n * Get input names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalInputNames: readonly string[];\n\n /**\n * Get output names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalOutputNames: readonly string[];\n\n // #endregion\n}\n\n/**\n * Represents the optional parameters that can be passed into the TrainingSessionFactory.\n */\nexport interface TrainingSessionCreateOptions {\n /**\n * URI or buffer for a .ckpt file that contains the checkpoint for the training model.\n */\n checkpointState: TrainingSession.UriOrBuffer;\n /**\n * URI or buffer for the .onnx training file.\n */\n trainModel: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx optimizer model file.\n */\n optimizerModel?: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx eval model file.\n */\n evalModel?: TrainingSession.UriOrBuffer;\n}\n\n/**\n * Defines method overload possibilities for creating a TrainingSession.\n */\nexport interface TrainingSessionFactory {\n // #region create()\n\n /**\n * Creates a new TrainingSession and asynchronously loads any models passed in through trainingOptions\n *\n * @param trainingOptions specify models and checkpoints to load into the Training Session\n * @param sessionOptions specify configuration for training session behavior\n *\n * @returns Promise that resolves to a TrainingSession object\n */\n create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: InferenceSession.SessionOptions):\n Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const TrainingSession: TrainingSessionFactory = TrainingSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * # ONNX Runtime JavaScript API\n *\n * ONNX Runtime JavaScript API is a unified API for all JavaScript usages, including the following NPM packages:\n *\n * - [onnxruntime-node](https://www.npmjs.com/package/onnxruntime-node)\n * - [onnxruntime-web](https://www.npmjs.com/package/onnxruntime-web)\n * - [onnxruntime-react-native](https://www.npmjs.com/package/onnxruntime-react-native)\n *\n * See also:\n * - [Get Started](https://onnxruntime.ai/docs/get-started/with-javascript/)\n * - [Inference examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js)\n *\n * @packageDocumentation\n */\n\nexport * from './backend.js';\nexport * from './env.js';\nexport * from './inference-session.js';\nexport * from './tensor.js';\nexport * from './tensor-conversion.js';\nexport * from './tensor-factory.js';\nexport * from './trace.js';\nexport * from './onnx-model.js';\nexport * from './onnx-value.js';\nexport * from './training-session.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nexport const isNode = !!(typeof process !== 'undefined' && process.versions && process.versions.node);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/// \n\n//\n// * type hack for \"HTMLImageElement\"\n//\n// in typescript, the type of \"HTMLImageElement\" is defined in lib.dom.d.ts, which is conflict with lib.webworker.d.ts.\n// when we use webworker, the lib.webworker.d.ts will be used, which does not have HTMLImageElement defined.\n//\n// we will get the following errors complaining that HTMLImageElement is not defined:\n//\n// ====================================================================================================================\n//\n// ../common/dist/cjs/tensor-factory.d.ts:187:29 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 187 fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n// Promise | TypedTensor<'uint8'>>;\n// ~~~~~~~~~~~~~~~~\n//\n// node_modules/@webgpu/types/dist/index.d.ts:83:7 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 83 | HTMLImageElement\n// ~~~~~~~~~~~~~~~~\n//\n// ====================================================================================================================\n//\n// `HTMLImageElement` is only used in type declaration and not in real code. So we define it as `unknown` here to\n// bypass the type check.\n\n//\n// * type hack for \"document\"\n//\n// in typescript, the type of \"document\" is defined in lib.dom.d.ts, so it's not available in webworker.\n//\n// we will get the following errors complaining that document is not defined:\n//\n// ====================================================================================================================\n//\n// lib/wasm/wasm-utils-import.ts:7:33 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:61 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:88 - error TS2552: Cannot find name 'HTMLScriptElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~~~~~~~~~~\n// ====================================================================================================================\n//\n// `document` is used to get the current script URL, which is not available in webworker. This file is served as a\n// \"dual\" file for entries of both webworker and the esm module.\n//\ndeclare global {\n type HTMLImageElement = unknown;\n type HTMLScriptElement = {src?: string};\n const document: undefined|{currentScript?: HTMLScriptElement};\n}\n\n/**\n * @summary\n *\n * This file is served as a \"dual\" file for both entries of the following:\n * - The proxy worker itself.\n * - When used as a worker, it listens to the messages from the main thread and performs the corresponding operations.\n * - Should be imported directly using `new Worker()` in the main thread.\n *\n * - The ESM module that creates the proxy worker (as a worker launcher).\n * - When used as a worker launcher, it creates the proxy worker and returns it.\n * - Should be imported using `import()` in the main thread, with the query parameter `import=1`.\n *\n * This file will be always compiling into ESM format.\n */\n\nimport type {OrtWasmMessage, SerializableTensorMetadata} from '../proxy-messages.js';\nimport {createSession, copyFromExternalBuffer, endProfiling, extractTransferableBuffers, initEp, initRuntime, releaseSession, run} from '../wasm-core-impl.js';\nimport {initializeWebAssembly} from '../wasm-factory.js';\nimport {scriptSrc} from '../wasm-utils-import.js';\n\nconst WORKER_NAME = 'ort-wasm-proxy-worker';\nconst isProxyWorker = globalThis.self?.name === WORKER_NAME;\n\nif (isProxyWorker) {\n // Worker thread\n self.onmessage = (ev: MessageEvent): void => {\n const {type, in : message} = ev.data;\n try {\n switch (type) {\n case 'init-wasm':\n initializeWebAssembly(message!.wasm)\n .then(\n () => {\n initRuntime(message!).then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n },\n err => {\n postMessage({type, err});\n });\n break;\n case 'init-ep': {\n const {epName, env} = message!;\n initEp(env, epName)\n .then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'copy-from': {\n const {buffer} = message!;\n const bufferData = copyFromExternalBuffer(buffer);\n postMessage({type, out: bufferData} as OrtWasmMessage);\n break;\n }\n case 'create': {\n const {model, options} = message!;\n createSession(model, options)\n .then(\n sessionMetadata => {\n postMessage({type, out: sessionMetadata} as OrtWasmMessage);\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'release':\n releaseSession(message!);\n postMessage({type});\n break;\n case 'run': {\n const {sessionId, inputIndices, inputs, outputIndices, options} = message!;\n run(sessionId, inputIndices, inputs, outputIndices, new Array(outputIndices.length).fill(null), options)\n .then(\n outputs => {\n if (outputs.some(o => o[3] !== 'cpu')) {\n postMessage({type, err: 'Proxy does not support non-cpu tensor location.'});\n } else {\n postMessage(\n {type, out: outputs} as OrtWasmMessage,\n extractTransferableBuffers([...inputs, ...outputs] as SerializableTensorMetadata[]));\n }\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'end-profiling':\n endProfiling(message!);\n postMessage({type});\n break;\n default:\n }\n } catch (err) {\n postMessage({type, err} as OrtWasmMessage);\n }\n };\n}\n\nexport default isProxyWorker ?\n null :\n (urlOverride?: string) =>\n new Worker(urlOverride ?? scriptSrc!, {type: BUILD_DEFS.IS_ESM ? 'module' : 'classic', name: WORKER_NAME});\n", "var r,e=(r=import.meta.url,async function(e={}){function t(){return k.buffer!=R.buffer&&Y(),R}function n(){return k.buffer!=R.buffer&&Y(),P}function a(){return k.buffer!=R.buffer&&Y(),D}function o(){return k.buffer!=R.buffer&&Y(),F}function i(){return k.buffer!=R.buffer&&Y(),B}function s(){return k.buffer!=R.buffer&&Y(),I}function u(){return k.buffer!=R.buffer&&Y(),U}function f(){return k.buffer!=R.buffer&&Y(),G}var l,c,d=Object.assign({},e),b=new Promise(((r,e)=>{l=r,c=e})),g=\"object\"==typeof window,m=\"function\"==typeof importScripts,p=m&&\"em-pthread\"==self.name;d.mountExternalData=(r,e)=>{(d.Cb||(d.Cb=new Map)).set(r,e)},d.unmountExternalData=()=>{delete d.Cb};var h=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let v=()=>{const r=(r,e,t)=>(...n)=>{const a=Ye,o=e?.();n=r(...n);const i=e?.();return o!==i&&(r=i,t(o),e=t=null),Ye!=a?new Promise(((r,e)=>{Qe={resolve:r,reject:e}})):n},e=r=>async(...e)=>{try{if(d.Bb)throw Error(\"Session already started\");const t=d.Bb={Zb:e[0],errors:[]},n=await r(...e);if(d.Bb!==t)throw Error(\"Session mismatch\");d.Jb?.flush();const a=t.errors;if(0r)),0d._OrtCreateSession),(r=>d._OrtCreateSession=r)),d._OrtRun=e(r(d._OrtRun,(()=>d._OrtRun),(r=>d._OrtRun=r))),d._OrtRunWithBinding=e(r(d._OrtRunWithBinding,(()=>d._OrtRunWithBinding),(r=>d._OrtRunWithBinding=r))),d._OrtBindInput=r(d._OrtBindInput,(()=>d._OrtBindInput),(r=>d._OrtBindInput=r)),v=void 0};d.jsepInit=(r,e)=>{if(v?.(),\"webgpu\"===r){[d.Jb,d.Qb,d.Ub,d.Kb,d.Tb,d.gb,d.Vb,d.Xb,d.Rb,d.Sb,d.Wb]=e;const r=d.Jb;d.jsepRegisterBuffer=(e,t,n,a)=>r.registerBuffer(e,t,n,a),d.jsepGetBuffer=e=>r.getBuffer(e),d.jsepCreateDownloader=(e,t,n)=>r.createDownloader(e,t,n),d.jsepOnReleaseSession=e=>{r.onReleaseSession(e)},d.jsepOnRunStart=e=>r.onRunStart(e)}};var y,w,A=Object.assign({},d),_=\"./this.program\",C=(r,e)=>{throw e},O=\"\";(g||m)&&(m?O=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(O=document.currentScript.src),r&&(O=r),O=O.startsWith(\"blob:\")?\"\":O.substr(0,O.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),r=>{var e=new XMLHttpRequest;return e.open(\"GET\",r,!1),e.send(null),e.responseText},m&&(w=r=>{var e=new XMLHttpRequest;return e.open(\"GET\",r,!1),e.responseType=\"arraybuffer\",e.send(null),new Uint8Array(e.response)}),y=(r,e,t)=>{var n=new XMLHttpRequest;n.open(\"GET\",r,!0),n.responseType=\"arraybuffer\",n.onload=()=>{200==n.status||0==n.status&&n.response?e(n.response):t()},n.onerror=t,n.send(null)});var T=console.log.bind(console),S=console.error.bind(console),W=T,E=S;if(Object.assign(d,A),A=null,p){var x,M=!1;function Dn(r){try{var e=r.data,t=e.cmd;if(\"load\"===t){let r=[];self.onmessage=e=>r.push(e),self.startWorker=()=>{postMessage({cmd:\"loaded\"});for(let e of r)Dn(e);self.onmessage=Dn};for(const r of e.handlers)d[r]&&!d[r].proxy||(d[r]=(...e)=>{postMessage({Ib:\"callHandler\",hc:r,args:e})},\"print\"==r&&(W=d[r]),\"printErr\"==r&&(E=d[r]));k=e.wasmMemory,Y(),x(e.wasmModule)}else if(\"run\"===t){wn(e.pthread_ptr,0,0,1,0,0),He(e.pthread_ptr),Tr(),Ar(),M||(mn(),M=!0);try{Sr(e.start_routine,e.arg)}catch(r){if(\"unwind\"!=r)throw r}}else\"cancel\"===t?hn()&&On(-1):\"setimmediate\"!==e.target&&(\"checkMailbox\"===t?M&&Re():t&&(E(`worker: received unknown command ${t}`),E(e)))}catch(r){throw An(),r}}E=function(...r){r=r.join(\" \"),console.error(r)},self.alert=function(...r){postMessage({Ib:\"alert\",text:r.join(\" \"),jc:hn()})},d.instantiateWasm=(r,e)=>new Promise((r=>{x=t=>{t=new WebAssembly.Instance(t,or()),e(t),r()}})),self.onunhandledrejection=r=>{throw r.reason||r},self.onmessage=Dn}var k,N,H,R,P,D,F,B,I,U,j,$,G,z=!1;function Y(){var r=k.buffer;d.HEAP8=R=new Int8Array(r),d.HEAP16=D=new Int16Array(r),d.HEAPU8=P=new Uint8Array(r),d.HEAPU16=F=new Uint16Array(r),d.HEAP32=B=new Int32Array(r),d.HEAPU32=I=new Uint32Array(r),d.HEAPF32=U=new Float32Array(r),d.HEAPF64=G=new Float64Array(r),d.HEAP64=j=new BigInt64Array(r),d.HEAPU64=$=new BigUint64Array(r)}if(!p){if(d.wasmMemory)k=d.wasmMemory;else if(!((k=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof h))throw E(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),Error(\"bad memory\");Y()}var L=[],V=[],q=[],J=0,X=null,Q=null;function Z(){if(0==--J&&(null!==X&&(clearInterval(X),X=null),Q)){var r=Q;Q=null,r()}}function K(r){throw E(r=\"Aborted(\"+r+\")\"),z=!0,H=1,r=new WebAssembly.RuntimeError(r+\". Build with -sASSERTIONS for more info.\"),c(r),r}var rr,er=r=>r.startsWith(\"data:application/octet-stream;base64,\"),tr=r=>r.startsWith(\"file://\");function nr(r){if(w)return w(r);throw\"both async and sync fetching of the wasm failed\"}function ar(r,e,t){return function(r){if(g||m){if(\"function\"==typeof fetch&&!tr(r))return fetch(r,{credentials:\"same-origin\"}).then((e=>{if(!e.ok)throw`failed to load wasm binary file at '${r}'`;return e.arrayBuffer()})).catch((()=>nr(r)));if(y)return new Promise(((e,t)=>{y(r,(r=>e(new Uint8Array(r))),t)}))}return Promise.resolve().then((()=>nr(r)))}(r).then((r=>WebAssembly.instantiate(r,e))).then(t,(r=>{E(`failed to asynchronously prepare wasm: ${r}`),K(r)}))}function or(){return{a:{wa:sr,b:Er,Y:Mr,y:Rr,ma:Pr,U:Ir,W:Ur,na:jr,ka:$r,da:Gr,ja:zr,I:Yr,V:Lr,S:Vr,la:qr,T:Jr,sa:Zr,C:oe,M:se,L:me,B:he,s:ve,p:ye,D:we,x:Ee,N:xe,ra:Me,ga:ke,Q:Pe,Z:Fe,E:Be,fa:He,pa:Ie,u:$e,A:rt,o:tt,k:ot,c:le,n:st,j:ct,xa:dt,r:bt,d:gt,v:mt,m:pt,g:ht,l:vt,i:yt,h:wt,e:At,aa:_t,ba:St,ca:Wt,_:Et,$:xt,P:Mt,f:Ht,K:Rt,F:Pt,J:Dt,ta:Ft,oa:It,R:Ut,t:Bt,w:jt,O:$t,va:Lt,ua:Vt,ha:Qt,ia:Zt,X:mr,z:Kt,H:rn,ea:en,G:nn,a:k,qa:un,q:fn}}}var ir={1336340:(r,e,t,a)=>{if(void 0===d||!d.Cb)return 1;if((r=Hr(r>>>0)).startsWith(\"./\")&&(r=r.substring(2)),!(r=d.Cb.get(r)))return 2;if(a>>>=0,(e>>>=0)+(t>>>=0)>r.byteLength)return 3;try{return n().set(r.subarray(e,e+t),a>>>0),0}catch{return 4}},1336841:()=>{d.Rb()},1336872:()=>{d.Sb()},1336901:()=>{d.Wb()},1336926:r=>d.Qb(r),1336959:r=>d.Ub(r),1336991:(r,e,t)=>{d.Kb(r,e,t,!0)},1337030:(r,e,t)=>{d.Kb(r,e,t)},1337063:()=>\"undefined\"!=typeof wasmOffsetConverter,1337120:r=>{d.gb(\"Abs\",r,void 0)},1337171:r=>{d.gb(\"Neg\",r,void 0)},1337222:r=>{d.gb(\"Floor\",r,void 0)},1337275:r=>{d.gb(\"Ceil\",r,void 0)},1337327:r=>{d.gb(\"Reciprocal\",r,void 0)},1337385:r=>{d.gb(\"Sqrt\",r,void 0)},1337437:r=>{d.gb(\"Exp\",r,void 0)},1337488:r=>{d.gb(\"Erf\",r,void 0)},1337539:r=>{d.gb(\"Sigmoid\",r,void 0)},1337594:(r,e,t)=>{d.gb(\"HardSigmoid\",r,{alpha:e,beta:t})},1337673:r=>{d.gb(\"Log\",r,void 0)},1337724:r=>{d.gb(\"Sin\",r,void 0)},1337775:r=>{d.gb(\"Cos\",r,void 0)},1337826:r=>{d.gb(\"Tan\",r,void 0)},1337877:r=>{d.gb(\"Asin\",r,void 0)},1337929:r=>{d.gb(\"Acos\",r,void 0)},1337981:r=>{d.gb(\"Atan\",r,void 0)},1338033:r=>{d.gb(\"Sinh\",r,void 0)},1338085:r=>{d.gb(\"Cosh\",r,void 0)},1338137:r=>{d.gb(\"Asinh\",r,void 0)},1338190:r=>{d.gb(\"Acosh\",r,void 0)},1338243:r=>{d.gb(\"Atanh\",r,void 0)},1338296:r=>{d.gb(\"Tanh\",r,void 0)},1338348:r=>{d.gb(\"Not\",r,void 0)},1338399:(r,e,t)=>{d.gb(\"Clip\",r,{min:e,max:t})},1338468:r=>{d.gb(\"Clip\",r,void 0)},1338520:(r,e)=>{d.gb(\"Elu\",r,{alpha:e})},1338578:r=>{d.gb(\"Relu\",r,void 0)},1338630:(r,e)=>{d.gb(\"LeakyRelu\",r,{alpha:e})},1338694:(r,e)=>{d.gb(\"ThresholdedRelu\",r,{alpha:e})},1338764:(r,e)=>{d.gb(\"Cast\",r,{to:e})},1338822:r=>{d.gb(\"Add\",r,void 0)},1338873:r=>{d.gb(\"Sub\",r,void 0)},1338924:r=>{d.gb(\"Mul\",r,void 0)},1338975:r=>{d.gb(\"Div\",r,void 0)},1339026:r=>{d.gb(\"Pow\",r,void 0)},1339077:r=>{d.gb(\"Equal\",r,void 0)},1339130:r=>{d.gb(\"Greater\",r,void 0)},1339185:r=>{d.gb(\"GreaterOrEqual\",r,void 0)},1339247:r=>{d.gb(\"Less\",r,void 0)},1339299:r=>{d.gb(\"LessOrEqual\",r,void 0)},1339358:(r,e,t,n,a)=>{d.gb(\"ReduceMean\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339517:(r,e,t,n,a)=>{d.gb(\"ReduceMax\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339675:(r,e,t,n,a)=>{d.gb(\"ReduceMin\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339833:(r,e,t,n,a)=>{d.gb(\"ReduceProd\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1339992:(r,e,t,n,a)=>{d.gb(\"ReduceSum\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340150:(r,e,t,n,a)=>{d.gb(\"ReduceL1\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340307:(r,e,t,n,a)=>{d.gb(\"ReduceL2\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340464:(r,e,t,n,a)=>{d.gb(\"ReduceLogSum\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340625:(r,e,t,n,a)=>{d.gb(\"ReduceSumSquare\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340789:(r,e,t,n,a)=>{d.gb(\"ReduceLogSumExp\",r,{keepDims:!!e,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340953:r=>{d.gb(\"Where\",r,void 0)},1341006:(r,e,t)=>{d.gb(\"Transpose\",r,{perm:e?Array.from(i().subarray(e>>>0,t>>>0)):[]})},1341114:(r,e,t,n)=>{d.gb(\"DepthToSpace\",r,{blocksize:e,mode:Hr(t),format:n?\"NHWC\":\"NCHW\"})},1341247:(r,e,t,n)=>{d.gb(\"DepthToSpace\",r,{blocksize:e,mode:Hr(t),format:n?\"NHWC\":\"NCHW\"})},1341380:(r,e,n,a,o,s,u,f,l,c,b,g,m,p,h)=>{d.gb(\"ConvTranspose\",r,{format:l?\"NHWC\":\"NCHW\",autoPad:e,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,g>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Hr(h)})},1341781:(r,e,n,a,o,s,u,f,l,c,b,g,m,p)=>{d.gb(\"ConvTranspose\",r,{format:f?\"NHWC\":\"NCHW\",autoPad:e,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:g?Array.from(i().subarray(g>>>0,m>>>0)):[],activation:Hr(p)})},1342346:(r,e,n,a,o,s,u,f,l,c,b,g,m,p,h)=>{d.gb(\"ConvTranspose\",r,{format:l?\"NHWC\":\"NCHW\",autoPad:e,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,g>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Hr(h)})},1342747:(r,e,n,a,o,s,u,f,l,c,b,g,m,p)=>{d.gb(\"ConvTranspose\",r,{format:f?\"NHWC\":\"NCHW\",autoPad:e,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:g?Array.from(i().subarray(g>>>0,m>>>0)):[],activation:Hr(p)})},1343312:(r,e)=>{d.gb(\"GlobalAveragePool\",r,{format:e?\"NHWC\":\"NCHW\"})},1343403:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"AveragePool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1343687:(r,e)=>{d.gb(\"GlobalAveragePool\",r,{format:e?\"NHWC\":\"NCHW\"})},1343778:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"AveragePool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344062:(r,e)=>{d.gb(\"GlobalMaxPool\",r,{format:e?\"NHWC\":\"NCHW\"})},1344149:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"MaxPool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344429:(r,e)=>{d.gb(\"GlobalMaxPool\",r,{format:e?\"NHWC\":\"NCHW\"})},1344516:(r,e,t,n,a,o,i,s,u,f,l,c,b,g,m,p)=>{d.gb(\"MaxPool\",r,{format:p?\"NHWC\":\"NCHW\",auto_pad:e,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[g,m]})},1344796:(r,e,t,n,a)=>{d.gb(\"Gemm\",r,{alpha:e,beta:t,transA:n,transB:a})},1344900:r=>{d.gb(\"MatMul\",r,void 0)},1344954:(r,e,t,n)=>{d.gb(\"ArgMax\",r,{keepDims:!!e,selectLastIndex:!!t,axis:n})},1345062:(r,e,t,n)=>{d.gb(\"ArgMin\",r,{keepDims:!!e,selectLastIndex:!!t,axis:n})},1345170:(r,e)=>{d.gb(\"Softmax\",r,{axis:e})},1345233:(r,e)=>{d.gb(\"Concat\",r,{axis:e})},1345293:(r,e,t,n,a)=>{d.gb(\"Split\",r,{axis:e,numOutputs:t,splitSizes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1345433:r=>{d.gb(\"Expand\",r,void 0)},1345487:(r,e)=>{d.gb(\"Gather\",r,{axis:Number(e)})},1345558:(r,e)=>{d.gb(\"GatherElements\",r,{axis:Number(e)})},1345637:(r,e,t,n,a,o,s,u,f,l,c)=>{d.gb(\"Resize\",r,{antialias:e,axes:t?Array.from(i().subarray(t>>>0,n>>>0)):[],coordinateTransformMode:Hr(a),cubicCoeffA:o,excludeOutside:s,extrapolationValue:u,keepAspectRatioPolicy:Hr(f),mode:Hr(l),nearestMode:Hr(c)})},1345983:(r,e,t,n,a,o,s)=>{d.gb(\"Slice\",r,{starts:e?Array.from(i().subarray(e>>>0,t>>>0)):[],ends:n?Array.from(i().subarray(n>>>0,a>>>0)):[],axes:o?Array.from(i().subarray(o>>>0,s>>>0)):[]})},1346199:r=>{d.gb(\"Tile\",r,void 0)},1346251:(r,e,t)=>{d.gb(\"InstanceNormalization\",r,{epsilon:e,format:t?\"NHWC\":\"NCHW\"})},1346365:(r,e,t)=>{d.gb(\"InstanceNormalization\",r,{epsilon:e,format:t?\"NHWC\":\"NCHW\"})},1346479:r=>{d.gb(\"Range\",r,void 0)},1346532:(r,e)=>{d.gb(\"Einsum\",r,{equation:Hr(e)})},1346613:(r,e,t,n,a)=>{d.gb(\"Pad\",r,{mode:e,value:t,pads:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1346740:(r,e,t,n,a,o)=>{d.gb(\"BatchNormalization\",r,{epsilon:e,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1346909:(r,e,t,n,a,o)=>{d.gb(\"BatchNormalization\",r,{epsilon:e,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1347078:(r,e,t)=>{d.gb(\"CumSum\",r,{exclusive:Number(e),reverse:Number(t)})},1347175:(r,e,t,n,a,o,s,u,f)=>{d.gb(\"Attention\",r,{numHeads:e,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o,qkvHiddenSizes:s?Array.from(i().subarray(Number(u)>>>0,Number(u)+s>>>0)):[],pastPresentShareBuffer:!!f})},1347447:r=>{d.gb(\"BiasAdd\",r,void 0)},1347502:r=>{d.gb(\"BiasSplitGelu\",r,void 0)},1347563:r=>{d.gb(\"FastGelu\",r,void 0)},1347619:(r,e,n,a,o,s,f,l,c,b,g,m,p,h,v,y)=>{d.gb(\"Conv\",r,{format:m?\"NHWC\":\"NCHW\",auto_pad:e,dilations:n?Array.from(i().subarray(n>>>0,a>>>0)):[],group:o,kernel_shape:s?Array.from(i().subarray(s>>>0,f>>>0)):[],pads:l?Array.from(i().subarray(l>>>0,c>>>0)):[],strides:b?Array.from(i().subarray(b>>>0,g>>>0)):[],w_is_const:()=>!!t()[p>>>0],activation:Hr(h),activation_params:v?Array.from(u().subarray(v>>>0,y>>>0)):[]})},1348115:r=>{d.gb(\"Gelu\",r,void 0)},1348167:(r,e,t,n)=>{d.gb(\"GroupQueryAttention\",r,{numHeads:e,kvNumHeads:t,scale:n})},1348280:(r,e,t,n)=>{d.gb(\"LayerNormalization\",r,{axis:e,epsilon:t,simplified:!!n})},1348391:(r,e,t,n)=>{d.gb(\"LayerNormalization\",r,{axis:e,epsilon:t,simplified:!!n})},1348502:(r,e,t,n,a,o)=>{d.gb(\"MatMulNBits\",r,{k:e,n:t,accuracyLevel:n,bits:a,blockSize:o})},1348629:(r,e,t,n,a,o)=>{d.gb(\"MultiHeadAttention\",r,{numHeads:e,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o})},1348788:(r,e)=>{d.gb(\"QuickGelu\",r,{alpha:e})},1348852:(r,e,t,n,a)=>{d.gb(\"RotaryEmbedding\",r,{interleaved:!!e,numHeads:t,rotaryEmbeddingDim:n,scale:a})},1348991:(r,e,t)=>{d.gb(\"SkipLayerNormalization\",r,{epsilon:e,simplified:!!t})},1349093:r=>{d.Vb(r)},1349127:(r,e)=>d.Xb(r,e,d.Bb.Zb,d.Bb.errors),1349239:(r,e,t)=>{d.gb(\"SkipLayerNormalization\",r,{epsilon:e,simplified:!!t})}};function sr(r,e,t){return Ke((async()=>{await d.Tb(r,e,t)}))}function ur(r){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${r})`,this.status=r}var fr=r=>{r.terminate(),r.onmessage=()=>{}},lr=r=>{0==pr.length&&(Cr(),_r(pr[0]));var e=pr.pop();if(!e)return 6;hr.push(e),yr[r.xb]=e,e.xb=r.xb;var t={cmd:\"run\",start_routine:r.$b,arg:r.Mb,pthread_ptr:r.xb};return e.postMessage(t,r.ec),0},cr=0,dr=(r,e,...t)=>{for(var n=2*t.length,a=xn(),o=En(8*n),i=o>>>3,s=0;s>>0]=u)}return r=_n(r,0,n,o,e),Wn(a),r};function br(r){if(p)return dr(0,1,r);if(H=r,!(0{if(H=r,p)throw gr(r),\"unwind\";br(r)},pr=[],hr=[],vr=[],yr={},wr=r=>{var e=r.xb;delete yr[e],pr.push(r),hr.splice(hr.indexOf(r),1),r.xb=0,Cn(e)};function Ar(){vr.forEach((r=>r()))}var _r=r=>new Promise((e=>{r.onmessage=t=>{var n=(t=t.data).cmd;if(t.targetThread&&t.targetThread!=hn()){var a=yr[t.targetThread];a?a.postMessage(t,t.transferList):E(`Internal error! Worker sent a message \"${n}\" to target pthread ${t.targetThread}, but that thread no longer exists!`)}else\"checkMailbox\"===n?Re():\"spawnThread\"===n?lr(t):\"cleanupThread\"===n?wr(yr[t.thread]):\"killThread\"===n?(t=t.thread,n=yr[t],delete yr[t],fr(n),Cn(t),hr.splice(hr.indexOf(n),1),n.xb=0):\"cancelThread\"===n?yr[t.thread].postMessage({cmd:\"cancel\"}):\"loaded\"===n?(r.loaded=!0,e(r)):\"alert\"===n?alert(`Thread ${t.threadId}: ${t.text}`):\"setimmediate\"===t.target?r.postMessage(t):\"callHandler\"===n?d[t.handler](...t.args):n&&E(`worker sent an unknown command ${n}`)},r.onerror=r=>{throw E(`worker sent an error! ${r.filename}:${r.lineno}: ${r.message}`),r};var t,n=[];for(t of[\"onExit\"])d.hasOwnProperty(t)&&n.push(t);r.postMessage({cmd:\"load\",handlers:n,wasmMemory:k,wasmModule:N})}));function Cr(){var r=new Worker(new URL(import.meta.url),{type:\"module\",workerData:\"em-pthread\",name:\"em-pthread\"});pr.push(r)}var Or=r=>{for(;0{var r=hn(),e=s()[r+52>>>2>>>0];r=s()[r+56>>>2>>>0],Sn(e,e-r),Wn(e)},Sr=(r,e)=>{cr=0,r=Mn(r,e),0>>=0);throw e>>>=0,t>>>=0,s()[n.Fb+16>>>2>>>0]=0,s()[n.Fb+4>>>2>>>0]=e,s()[n.Fb+8>>>2>>>0]=t,r}function xr(r,e,t,n){return p?dr(2,1,r,e,t,n):Mr(r,e,t,n)}function Mr(r,e,t,n){if(r>>>=0,e>>>=0,t>>>=0,n>>>=0,void 0===h)return E(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var a=[];return p&&0===a.length?xr(r,e,t,n):(r={$b:t,xb:r,Mb:n,ec:a},p?(r.Ib=\"spawnThread\",postMessage(r,a),0):lr(r))}var kr=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,Nr=(r,e,t)=>{var n=(e>>>=0)+t;for(t=e;r[t]&&!(t>=n);)++t;if(16(a=224==(240&a)?(15&a)<<12|o<<6|i:(7&a)<<18|o<<12|i<<6|63&r[e++])?n+=String.fromCharCode(a):(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a))}}else n+=String.fromCharCode(a)}return n},Hr=(r,e)=>(r>>>=0)?Nr(n(),r,e):\"\";function Rr(r,e,t){return p?dr(3,1,r,e,t):0}function Pr(r,e){if(p)return dr(4,1,r,e)}var Dr=r=>{for(var e=0,t=0;t=n?e++:2047>=n?e+=2:55296<=n&&57343>=n?(e+=4,++t):e+=3}return e},Fr=(r,e,t,n)=>{if(!(0>>=0;n=t+n-1;for(var o=0;o=i&&(i=65536+((1023&i)<<10)|1023&r.charCodeAt(++o)),127>=i){if(t>=n)break;e[t++>>>0]=i}else{if(2047>=i){if(t+1>=n)break;e[t++>>>0]=192|i>>6}else{if(65535>=i){if(t+2>=n)break;e[t++>>>0]=224|i>>12}else{if(t+3>=n)break;e[t++>>>0]=240|i>>18,e[t++>>>0]=128|i>>12&63}e[t++>>>0]=128|i>>6&63}e[t++>>>0]=128|63&i}}return e[t>>>0]=0,t-a},Br=(r,e,t)=>Fr(r,n(),e,t);function Ir(r,e){if(p)return dr(5,1,r,e)}function Ur(r,e,t){if(p)return dr(6,1,r,e,t)}function jr(r,e,t){return p?dr(7,1,r,e,t):0}function $r(r,e){if(p)return dr(8,1,r,e)}function Gr(r,e,t){if(p)return dr(9,1,r,e,t)}function zr(r,e,t,n){if(p)return dr(10,1,r,e,t,n)}function Yr(r,e,t,n){if(p)return dr(11,1,r,e,t,n)}function Lr(r,e,t,n){if(p)return dr(12,1,r,e,t,n)}function Vr(r){if(p)return dr(13,1,r)}function qr(r,e){if(p)return dr(14,1,r,e)}function Jr(r,e,t){if(p)return dr(15,1,r,e,t)}var Xr,Qr,Zr=()=>{K(\"\")},Kr=r=>{for(var e=\"\";n()[r>>>0];)e+=Xr[n()[r++>>>0]];return e},re={},ee={},te={};function ne(r,e,t={}){if(!(\"argPackAdvance\"in e))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return function(r,e,t={}){var n=e.name;if(!r)throw new Qr(`type \"${n}\" must have a positive integer typeid pointer`);if(ee.hasOwnProperty(r)){if(t.Ob)return;throw new Qr(`Cannot register type '${n}' twice`)}ee[r]=e,delete te[r],re.hasOwnProperty(r)&&(e=re[r],delete re[r],e.forEach((r=>r())))}(r,e,t)}var ae=(r,e,u)=>{switch(e){case 1:return u?r=>t()[r>>>0]:r=>n()[r>>>0];case 2:return u?r=>a()[r>>>1>>>0]:r=>o()[r>>>1>>>0];case 4:return u?r=>i()[r>>>2>>>0]:r=>s()[r>>>2>>>0];case 8:return u?r=>j[r>>>3]:r=>$[r>>>3];default:throw new TypeError(`invalid integer width (${e}): ${r}`)}};function oe(r,e,t){t>>>=0,ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:r=>r,toWireType:function(r,e){if(\"bigint\"!=typeof e&&\"number\"!=typeof e)throw e=null===e?\"null\":\"object\"==(r=typeof e)||\"array\"===r||\"function\"===r?e.toString():\"\"+e,new TypeError(`Cannot convert \"${e}\" to ${this.name}`);return\"number\"==typeof e&&(e=BigInt(e)),e},argPackAdvance:ie,readValueFromPointer:ae(e,t,-1==e.indexOf(\"u\")),Ab:null})}var ie=8;function se(r,e,t,a){ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:function(r){return!!r},toWireType:function(r,e){return e?t:a},argPackAdvance:ie,readValueFromPointer:function(r){return this.fromWireType(n()[r>>>0])},Ab:null})}var ue=[],fe=[];function le(r){9<(r>>>=0)&&0==--fe[r+1]&&(fe[r]=void 0,ue.push(r))}var ce=r=>{if(!r)throw new Qr(\"Cannot use deleted val. handle = \"+r);return fe[r]},de=r=>{switch(r){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:const e=ue.pop()||fe.length;return fe[e]=r,fe[e+1]=1,e}};function be(r){return this.fromWireType(s()[r>>>2>>>0])}var ge={name:\"emscripten::val\",fromWireType:r=>{var e=ce(r);return le(r),e},toWireType:(r,e)=>de(e),argPackAdvance:ie,readValueFromPointer:be,Ab:null};function me(r){return ne(r>>>0,ge)}var pe=(r,e)=>{switch(e){case 4:return function(r){return this.fromWireType(u()[r>>>2>>>0])};case 8:return function(r){return this.fromWireType(f()[r>>>3>>>0])};default:throw new TypeError(`invalid float width (${e}): ${r}`)}};function he(r,e,t){t>>>=0,ne(r>>>=0,{name:e=Kr(e>>>0),fromWireType:r=>r,toWireType:(r,e)=>e,argPackAdvance:ie,readValueFromPointer:pe(e,t),Ab:null})}function ve(r,e,t,n,a){if(r>>>=0,t>>>=0,e=Kr(e>>>0),-1===a&&(a=4294967295),a=r=>r,0===n){var o=32-8*t;a=r=>r<>>o}var i=e.includes(\"unsigned\")?function(r,e){return e>>>0}:function(r,e){return e};ne(r,{name:e,fromWireType:a,toWireType:i,argPackAdvance:ie,readValueFromPointer:ae(e,t,0!==n),Ab:null})}function ye(r,e,n){function a(r){var e=s()[r>>>2>>>0];return r=s()[r+4>>>2>>>0],new o(t().buffer,r,e)}var o=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][e];ne(r>>>=0,{name:n=Kr(n>>>0),fromWireType:a,argPackAdvance:ie,readValueFromPointer:a},{Ob:!0})}function we(r,e){r>>>=0;var t=\"std::string\"===(e=Kr(e>>>0));ne(r,{name:e,fromWireType:function(r){var e=s()[r>>>2>>>0],a=r+4;if(t)for(var o=a,i=0;i<=e;++i){var u=a+i;if(i==e||0==n()[u>>>0]){if(o=Hr(o,u-o),void 0===f)var f=o;else f+=String.fromCharCode(0),f+=o;o=u+1}}else{for(f=Array(e),i=0;i>>0]);f=f.join(\"\")}return yn(r),f},toWireType:function(r,e){e instanceof ArrayBuffer&&(e=new Uint8Array(e));var a=\"string\"==typeof e;if(!(a||e instanceof Uint8Array||e instanceof Uint8ClampedArray||e instanceof Int8Array))throw new Qr(\"Cannot pass non-string to std::string\");var o=t&&a?Dr(e):e.length,i=vn(4+o+1),u=i+4;if(s()[i>>>2>>>0]=o,t&&a)Br(e,u,o+1);else if(a)for(a=0;a>>0]=f}else for(a=0;a>>0]=e[a];return null!==r&&r.push(yn,i),i},argPackAdvance:ie,readValueFromPointer:be,Ab(r){yn(r)}})}var Ae=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf-16le\"):void 0,_e=(r,e)=>{for(var t=r>>1,i=t+e/2;!(t>=i)&&o()[t>>>0];)++t;if(32<(t<<=1)-r&&Ae)return Ae.decode(n().slice(r,t));for(t=\"\",i=0;!(i>=e/2);++i){var s=a()[r+2*i>>>1>>>0];if(0==s)break;t+=String.fromCharCode(s)}return t},Ce=(r,e,t)=>{if(t??=2147483647,2>t)return 0;var n=e;t=(t-=2)<2*r.length?t/2:r.length;for(var o=0;o>>1>>>0]=i,e+=2}return a()[e>>>1>>>0]=0,e-n},Oe=r=>2*r.length,Te=(r,e)=>{for(var t=0,n=\"\";!(t>=e/4);){var a=i()[r+4*t>>>2>>>0];if(0==a)break;++t,65536<=a?(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a)):n+=String.fromCharCode(a)}return n},Se=(r,e,t)=>{if(e>>>=0,t??=2147483647,4>t)return 0;var n=e;t=n+t-4;for(var a=0;a=o&&(o=65536+((1023&o)<<10)|1023&r.charCodeAt(++a)),i()[e>>>2>>>0]=o,(e+=4)+4>t)break}return i()[e>>>2>>>0]=0,e-n},We=r=>{for(var e=0,t=0;t=n&&++t,e+=4}return e};function Ee(r,e,t){if(r>>>=0,e>>>=0,t=Kr(t>>>=0),2===e)var n=_e,a=Ce,i=Oe,u=r=>o()[r>>>1>>>0];else 4===e&&(n=Te,a=Se,i=We,u=r=>s()[r>>>2>>>0]);ne(r,{name:t,fromWireType:r=>{for(var t,a=s()[r>>>2>>>0],o=r+4,i=0;i<=a;++i){var f=r+4+i*e;i!=a&&0!=u(f)||(o=n(o,f-o),void 0===t?t=o:(t+=String.fromCharCode(0),t+=o),o=f+e)}return yn(r),t},toWireType:(r,n)=>{if(\"string\"!=typeof n)throw new Qr(`Cannot pass non-string to C++ string type ${t}`);var o=i(n),u=vn(4+o+e);return s()[u>>>2>>>0]=o/e,a(n,u+4,o+e),null!==r&&r.push(yn,u),u},argPackAdvance:ie,readValueFromPointer:be,Ab(r){yn(r)}})}function xe(r,e){ne(r>>>=0,{Pb:!0,name:e=Kr(e>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var Me=()=>1;function ke(r){wn(r>>>0,!m,1,!g,131072,!1),Ar()}var Ne=r=>{if(!z)try{if(r(),!(0>>=0,\"function\"==typeof Atomics.fc&&(Atomics.fc(i(),r>>>2,r).value.then(Re),r+=128,Atomics.store(i(),r>>>2,1))}var Re=()=>{var r=hn();r&&(He(r),Ne(Tn))};function Pe(r,e){(r>>>=0)==e>>>0?setTimeout(Re):p?postMessage({targetThread:r,cmd:\"checkMailbox\"}):(r=yr[r])&&r.postMessage({cmd:\"checkMailbox\"})}var De=[];function Fe(r,e,t,n,a){for(e>>>=0,n/=2,De.length=n,t=a>>>0>>>3,a=0;a>>0];return(e?ir[e]:dn[r])(...De)}function Be(r){r>>>=0,p?postMessage({cmd:\"cleanupThread\",thread:r}):wr(yr[r])}function Ie(r){}var Ue=(r,e)=>{var t=ee[r];if(void 0===t)throw r=gn(r),t=Kr(r),yn(r),new Qr(`${e} has unknown type ${t}`);return t},je=(r,e,t)=>{var n=[];return r=r.toWireType(n,t),n.length&&(s()[e>>>2>>>0]=de(n)),r};function $e(r,e,t){return e>>>=0,t>>>=0,r=ce(r>>>0),e=Ue(e,\"emval::as\"),je(e,t,r)}var Ge=r=>{try{r()}catch(r){K(r)}},ze=0,Ye=null,Le=0,Ve=[],qe={},Je={},Xe=0,Qe=null,Ze=[];function Ke(r){return function(r){if(!z){if(0===ze){var e=!1,t=!1;r(((r=0)=>{if(!z&&(Le=r,e=!0,t)){ze=2,Ge((()=>Hn(Ye))),\"undefined\"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.resume(),r=!1;try{var n=function(){var r=i()[Ye+8>>>2>>>0];return r=bn[Je[r]],--cr,r()}()}catch(e){n=e,r=!0}var a=!1;if(!Ye){var o=Qe;o&&(Qe=null,(r?o.reject:o.resolve)(n),a=!0)}if(r&&!a)throw n}})),t=!0,e||(ze=1,Ye=function(){var r=vn(65548),e=r+12;s()[r>>>2>>>0]=e,s()[r+4>>>2>>>0]=e+65536,e=Ve[0];var t=qe[e];return void 0===t&&(t=Xe++,qe[e]=t,Je[t]=e),e=t,i()[r+8>>>2>>>0]=e,r}(),\"undefined\"!=typeof Browser&&Browser.Gb.Nb&&Browser.Gb.pause(),Ge((()=>kn(Ye))))}else 2===ze?(ze=0,Ge(Rn),yn(Ye),Ye=null,Ze.forEach(Ne)):K(`invalid state: ${ze}`);return Le}}((e=>{r().then(e)}))}function rt(r){return r>>>=0,Ke((()=>(r=ce(r)).then(de)))}var et=[];function tt(r,e,t,n){return t>>>=0,n>>>=0,(r=et[r>>>0])(null,e=ce(e>>>0),t,n)}var nt={},at=r=>{var e=nt[r];return void 0===e?Kr(r):e};function ot(r,e,t,n,a){return t>>>=0,n>>>=0,a>>>=0,(r=et[r>>>0])(e=ce(e>>>0),e[t=at(t)],n,a)}var it=()=>\"object\"==typeof globalThis?globalThis:Function(\"return this\")();function st(r){return 0==(r>>>=0)?de(it()):(r=at(r),de(it()[r]))}var ut=r=>{var e=et.length;return et.push(r),e},ft=(r,e)=>{for(var t=Array(r),n=0;n>>2>>>0],\"parameter \"+n);return t},lt=(r,e)=>Object.defineProperty(e,\"name\",{value:r});function ct(r,e,t){var n=(e=ft(r,e>>>0)).shift();r--;var a=\"return function (obj, func, destructorsRef, args) {\\n\",o=0,i=[];0===t&&i.push(\"obj\");for(var s=[\"retType\"],u=[n],f=0;fr.name)).join(\", \")}) => ${n.name}>`,ut(lt(t,r))}function dt(r){return r=at(r>>>0),de(d[r])}function bt(r,e){return e>>>=0,r=ce(r>>>0),e=ce(e),de(r[e])}function gt(r){9<(r>>>=0)&&(fe[r+1]+=1)}function mt(){return de([])}function pt(r){r=ce(r>>>0);for(var e=Array(r.length),t=0;t>>0))}function vt(){return de({})}function yt(r){for(var e=ce(r>>>=0);e.length;){var t=e.pop();e.pop()(t)}le(r)}function wt(r,e,t){e>>>=0,t>>>=0,r=ce(r>>>0),e=ce(e),t=ce(t),r[e]=t}function At(r,e){return e>>>=0,r=(r=Ue(r>>>0,\"_emval_take_value\")).readValueFromPointer(e),de(r)}function _t(r,e){r=-9007199254740992>r||9007199254740992>>=0,r=new Date(1e3*r),i()[e>>>2>>>0]=r.getUTCSeconds(),i()[e+4>>>2>>>0]=r.getUTCMinutes(),i()[e+8>>>2>>>0]=r.getUTCHours(),i()[e+12>>>2>>>0]=r.getUTCDate(),i()[e+16>>>2>>>0]=r.getUTCMonth(),i()[e+20>>>2>>>0]=r.getUTCFullYear()-1900,i()[e+24>>>2>>>0]=r.getUTCDay(),r=(r.getTime()-Date.UTC(r.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[e+28>>>2>>>0]=r}var Ct=r=>0==r%4&&(0!=r%100||0==r%400),Ot=[0,31,60,91,121,152,182,213,244,274,305,335],Tt=[0,31,59,90,120,151,181,212,243,273,304,334];function St(r,e){r=-9007199254740992>r||9007199254740992>>=0,r=new Date(1e3*r),i()[e>>>2>>>0]=r.getSeconds(),i()[e+4>>>2>>>0]=r.getMinutes(),i()[e+8>>>2>>>0]=r.getHours(),i()[e+12>>>2>>>0]=r.getDate(),i()[e+16>>>2>>>0]=r.getMonth(),i()[e+20>>>2>>>0]=r.getFullYear()-1900,i()[e+24>>>2>>>0]=r.getDay();var t=(Ct(r.getFullYear())?Ot:Tt)[r.getMonth()]+r.getDate()-1|0;i()[e+28>>>2>>>0]=t,i()[e+36>>>2>>>0]=-60*r.getTimezoneOffset(),t=new Date(r.getFullYear(),6,1).getTimezoneOffset();var n=new Date(r.getFullYear(),0,1).getTimezoneOffset();r=0|(t!=n&&r.getTimezoneOffset()==Math.min(n,t)),i()[e+32>>>2>>>0]=r}function Wt(r){r>>>=0;var e=new Date(i()[r+20>>>2>>>0]+1900,i()[r+16>>>2>>>0],i()[r+12>>>2>>>0],i()[r+8>>>2>>>0],i()[r+4>>>2>>>0],i()[r>>>2>>>0],0),t=i()[r+32>>>2>>>0],n=e.getTimezoneOffset(),a=new Date(e.getFullYear(),6,1).getTimezoneOffset(),o=new Date(e.getFullYear(),0,1).getTimezoneOffset(),s=Math.min(o,a);return 0>t?i()[r+32>>>2>>>0]=Number(a!=o&&s==n):0>>2>>>0]=e.getDay(),t=(Ct(e.getFullYear())?Ot:Tt)[e.getMonth()]+e.getDate()-1|0,i()[r+28>>>2>>>0]=t,i()[r>>>2>>>0]=e.getSeconds(),i()[r+4>>>2>>>0]=e.getMinutes(),i()[r+8>>>2>>>0]=e.getHours(),i()[r+12>>>2>>>0]=e.getDate(),i()[r+16>>>2>>>0]=e.getMonth(),i()[r+20>>>2>>>0]=e.getYear(),r=e.getTime(),BigInt(isNaN(r)?-1:r/1e3)}function Et(r,e,t,n,a,o,i){return p?dr(16,1,r,e,t,n,a,o,i):-52}function xt(r,e,t,n,a,o){if(p)return dr(17,1,r,e,t,n,a,o)}function Mt(r,e,t,n){r>>>=0,e>>>=0,t>>>=0,n>>>=0;var a=(new Date).getFullYear(),o=new Date(a,0,1),u=new Date(a,6,1);a=o.getTimezoneOffset();var f=u.getTimezoneOffset(),l=Math.max(a,f);s()[r>>>2>>>0]=60*l,i()[e>>>2>>>0]=Number(a!=f),o=(r=r=>r.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1])(o),u=r(u),f{kt.length=0;for(var t;t=n()[r++>>>0];){var a=105!=t;e+=(a&=112!=t)&&e%8?4:0,kt.push(112==t?s()[e>>>2>>>0]:106==t?j[e>>>3]:105==t?i()[e>>>2>>>0]:f()[e>>>3>>>0]),e+=a?8:4}return kt};function Ht(r,e,t){return r>>>=0,e=Nt(e>>>0,t>>>0),ir[r](...e)}function Rt(r,e,t){return r>>>=0,e=Nt(e>>>0,t>>>0),ir[r](...e)}var Pt=()=>{},Dt=()=>Date.now();function Ft(r,e){return E(Hr(r>>>0,e>>>0))}var Bt,It=()=>{throw cr+=1,\"unwind\"};function Ut(){return 4294901760}Bt=()=>performance.timeOrigin+performance.now();var jt=()=>navigator.hardwareConcurrency;function $t(r){r>>>=0;var e=n().length;if(r<=e||4294901760=t;t*=2){var a=e*(1+.2/t);a=Math.min(a,r+100663296);var o=Math;a=Math.max(r,a);r:{o=(o.min.call(o,4294901760,a+(65536-a%65536)%65536)-k.buffer.byteLength+65535)/65536;try{k.grow(o),Y();var i=1;break r}catch(r){}i=void 0}if(i)return!0}return!1}var Gt=()=>(K(\"Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER\"),0),zt={},Yt=r=>{r.forEach((r=>{var e=Gt();e&&(zt[e]=r)}))};function Lt(){var r=Error().stack.toString().split(\"\\n\");return\"Error\"==r[0]&&r.shift(),Yt(r),zt.Lb=Gt(),zt.Yb=r,zt.Lb}function Vt(r,e,t){if(r>>>=0,e>>>=0,zt.Lb==r)var n=zt.Yb;else\"Error\"==(n=Error().stack.toString().split(\"\\n\"))[0]&&n.shift(),Yt(n);for(var a=3;n[a]&&Gt()!=r;)++a;for(r=0;r>>2>>>0]=Gt();return r}var qt,Jt={},Xt=()=>{if(!qt){var r,e={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:_||\"./this.program\"};for(r in Jt)void 0===Jt[r]?delete e[r]:e[r]=Jt[r];var t=[];for(r in e)t.push(`${r}=${e[r]}`);qt=t}return qt};function Qt(r,e){if(p)return dr(18,1,r,e);r>>>=0,e>>>=0;var n=0;return Xt().forEach(((a,o)=>{var i=e+n;for(o=s()[r+4*o>>>2>>>0]=i,i=0;i>>0]=a.charCodeAt(i);t()[o>>>0]=0,n+=a.length+1})),0}function Zt(r,e){if(p)return dr(19,1,r,e);r>>>=0,e>>>=0;var t=Xt();s()[r>>>2>>>0]=t.length;var n=0;return t.forEach((r=>n+=r.length+1)),s()[e>>>2>>>0]=n,0}function Kt(r){return p?dr(20,1,r):52}function rn(r,e,t,n){return p?dr(21,1,r,e,t,n):52}function en(r,e,t,n){return p?dr(22,1,r,e,t,n):70}var tn=[null,[],[]];function nn(r,e,t,a){if(p)return dr(23,1,r,e,t,a);e>>>=0,t>>>=0,a>>>=0;for(var o=0,i=0;i>>2>>>0],f=s()[e+4>>>2>>>0];e+=8;for(var l=0;l>>0],d=tn[r];0===c||10===c?((1===r?W:E)(Nr(d,0)),d.length=0):d.push(c)}o+=f}return s()[a>>>2>>>0]=o,0}var an=[31,29,31,30,31,30,31,31,30,31,30,31],on=[31,28,31,30,31,30,31,31,30,31,30,31],sn=(r,e)=>{t().set(r,e>>>0)};function un(r,e,t,n){function a(r,e,t){for(r=\"number\"==typeof r?r.toString():r||\"\";r.lengthr?-1:0n-r.getDate())){r.setDate(r.getDate()+e);break}e-=n-r.getDate()+1,r.setDate(1),11>t?r.setMonth(t+1):(r.setMonth(0),r.setFullYear(r.getFullYear()+1))}return t=new Date(r.getFullYear()+1,0,4),e=f(new Date(r.getFullYear(),0,4)),t=f(t),0>=u(e,r)?0>=u(t,r)?r.getFullYear()+1:r.getFullYear():r.getFullYear()-1}r>>>=0,e>>>=0,t>>>=0,n>>>=0;var c=s()[n+40>>>2>>>0];for(var d in n={cc:i()[n>>>2>>>0],bc:i()[n+4>>>2>>>0],Db:i()[n+8>>>2>>>0],Hb:i()[n+12>>>2>>>0],Eb:i()[n+16>>>2>>>0],zb:i()[n+20>>>2>>>0],rb:i()[n+24>>>2>>>0],yb:i()[n+28>>>2>>>0],kc:i()[n+32>>>2>>>0],ac:i()[n+36>>>2>>>0],dc:c?Hr(c):\"\"},t=Hr(t),c={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"})t=t.replace(new RegExp(d,\"g\"),c[d]);var b=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),g=\"January February March April May June July August September October November December\".split(\" \");for(d in c={\"%a\":r=>b[r.rb].substring(0,3),\"%A\":r=>b[r.rb],\"%b\":r=>g[r.Eb].substring(0,3),\"%B\":r=>g[r.Eb],\"%C\":r=>o((r.zb+1900)/100|0,2),\"%d\":r=>o(r.Hb,2),\"%e\":r=>a(r.Hb,2,\" \"),\"%g\":r=>l(r).toString().substring(2),\"%G\":l,\"%H\":r=>o(r.Db,2),\"%I\":r=>(0==(r=r.Db)?r=12:12{for(var e=0,t=0;t<=r.Eb-1;e+=(Ct(r.zb+1900)?an:on)[t++]);return o(r.Hb+e,3)},\"%m\":r=>o(r.Eb+1,2),\"%M\":r=>o(r.bc,2),\"%n\":()=>\"\\n\",\"%p\":r=>0<=r.Db&&12>r.Db?\"AM\":\"PM\",\"%S\":r=>o(r.cc,2),\"%t\":()=>\"\\t\",\"%u\":r=>r.rb||7,\"%U\":r=>o(Math.floor((r.yb+7-r.rb)/7),2),\"%V\":r=>{var e=Math.floor((r.yb+7-(r.rb+6)%7)/7);if(2>=(r.rb+371-r.yb-2)%7&&e++,e)53==e&&(4==(t=(r.rb+371-r.yb)%7)||3==t&&Ct(r.zb)||(e=1));else{e=52;var t=(r.rb+7-r.yb-1)%7;(4==t||5==t&&Ct(r.zb%400-1))&&e++}return o(e,2)},\"%w\":r=>r.rb,\"%W\":r=>o(Math.floor((r.yb+7-(r.rb+6)%7)/7),2),\"%y\":r=>(r.zb+1900).toString().substring(2),\"%Y\":r=>r.zb+1900,\"%z\":r=>{var e=0<=(r=r.ac);return r=Math.abs(r)/60,(e?\"+\":\"-\")+String(\"0000\"+(r/60*100+r%60)).slice(-4)},\"%Z\":r=>r.dc,\"%%\":()=>\"%\"},t=t.replace(/%%/g,\"\\0\\0\"),c)t.includes(d)&&(t=t.replace(new RegExp(d,\"g\"),c[d](n)));return d=function(r){var e=Array(Dr(r)+1);return Fr(r,e,0,e.length),e}(t=t.replace(/\\0\\0/g,\"%\")),d.length>e?0:(sn(d,r),d.length-1)}function fn(r,e,t,n){return un(r>>>0,e>>>0,t>>>0,n>>>0)}p||function(){for(var r=d.numThreads-1;r--;)Cr();L.unshift((()=>{J++,function(r){p?r():Promise.all(pr.map(_r)).then(r)}((()=>Z()))}))}();for(var ln=Array(256),cn=0;256>cn;++cn)ln[cn]=String.fromCharCode(cn);Xr=ln,Qr=d.BindingError=class extends Error{constructor(r){super(r),this.name=\"BindingError\"}},d.InternalError=class extends Error{constructor(r){super(r),this.name=\"InternalError\"}},fe.push(0,1,void 0,1,null,1,!0,1,!1,1),d.count_emval_handles=()=>fe.length/2-5-ue.length;var dn=[br,gr,xr,Rr,Pr,Ir,Ur,jr,$r,Gr,zr,Yr,Lr,Vr,qr,Jr,Et,xt,Qt,Zt,Kt,rn,en,nn],bn=function(){function r(r,e){return bn=r.exports,bn=function(){var r=bn,e={};for(let[t,n]of Object.entries(r))e[t]=\"function\"==typeof n?(...r)=>{Ve.push(t);try{return n(...r)}finally{z||(Ve.pop(),Ye&&1===ze&&0===Ve.length&&(ze=0,cr+=1,Ge(Nn),\"undefined\"!=typeof Fibers&&Fibers.lc()))}}:n;return e}(),bn=function(){var r=bn,e=r=>e=>r(e)>>>0,t=r=>()=>r()>>>0;return(r=Object.assign({},r)).za=e(r.za),r.cb=t(r.cb),r.db=e(r.db),r.emscripten_main_runtime_thread_id=t(r.emscripten_main_runtime_thread_id),r.pb=e(r.pb),r.qb=t(r.qb),r}(),vr.push(bn.fb),V.unshift(bn.ya),N=e,Z(),bn}var e=or();if(J++,d.instantiateWasm)try{return d.instantiateWasm(e,r)}catch(r){E(`Module.instantiateWasm callback failed with error: ${r}`),c(r)}return rr||=d.locateFile?er(\"ort-wasm-simd-threaded.jsep.wasm\")?\"ort-wasm-simd-threaded.jsep.wasm\":d.locateFile?d.locateFile(\"ort-wasm-simd-threaded.jsep.wasm\",O):O+\"ort-wasm-simd-threaded.jsep.wasm\":new URL(\"ort-wasm-simd-threaded.jsep.wasm\",import.meta.url).href,function(r,e){var t=rr;return\"function\"!=typeof WebAssembly.instantiateStreaming||er(t)||tr(t)||\"function\"!=typeof fetch?ar(t,r,e):fetch(t,{credentials:\"same-origin\"}).then((n=>WebAssembly.instantiateStreaming(n,r).then(e,(function(n){return E(`wasm streaming compile failed: ${n}`),E(\"falling back to ArrayBuffer instantiation\"),ar(t,r,e)}))))}(e,(function(e){r(e.instance,e.module)})).catch(c),{}}(),gn=r=>(gn=bn.za)(r),mn=()=>(mn=bn.Aa)();d._OrtInit=(r,e)=>(d._OrtInit=bn.Ba)(r,e),d._OrtGetLastError=(r,e)=>(d._OrtGetLastError=bn.Ca)(r,e),d._OrtCreateSessionOptions=(r,e,t,n,a,o,i,s,u,f)=>(d._OrtCreateSessionOptions=bn.Da)(r,e,t,n,a,o,i,s,u,f),d._OrtAppendExecutionProvider=(r,e)=>(d._OrtAppendExecutionProvider=bn.Ea)(r,e),d._OrtAddFreeDimensionOverride=(r,e,t)=>(d._OrtAddFreeDimensionOverride=bn.Fa)(r,e,t),d._OrtAddSessionConfigEntry=(r,e,t)=>(d._OrtAddSessionConfigEntry=bn.Ga)(r,e,t),d._OrtReleaseSessionOptions=r=>(d._OrtReleaseSessionOptions=bn.Ha)(r),d._OrtCreateSession=(r,e,t)=>(d._OrtCreateSession=bn.Ia)(r,e,t),d._OrtReleaseSession=r=>(d._OrtReleaseSession=bn.Ja)(r),d._OrtGetInputOutputCount=(r,e,t)=>(d._OrtGetInputOutputCount=bn.Ka)(r,e,t),d._OrtGetInputName=(r,e)=>(d._OrtGetInputName=bn.La)(r,e),d._OrtGetOutputName=(r,e)=>(d._OrtGetOutputName=bn.Ma)(r,e),d._OrtFree=r=>(d._OrtFree=bn.Na)(r),d._OrtCreateTensor=(r,e,t,n,a,o)=>(d._OrtCreateTensor=bn.Oa)(r,e,t,n,a,o),d._OrtGetTensorData=(r,e,t,n,a)=>(d._OrtGetTensorData=bn.Pa)(r,e,t,n,a),d._OrtReleaseTensor=r=>(d._OrtReleaseTensor=bn.Qa)(r),d._OrtCreateRunOptions=(r,e,t,n)=>(d._OrtCreateRunOptions=bn.Ra)(r,e,t,n),d._OrtAddRunConfigEntry=(r,e,t)=>(d._OrtAddRunConfigEntry=bn.Sa)(r,e,t),d._OrtReleaseRunOptions=r=>(d._OrtReleaseRunOptions=bn.Ta)(r),d._OrtCreateBinding=r=>(d._OrtCreateBinding=bn.Ua)(r),d._OrtBindInput=(r,e,t)=>(d._OrtBindInput=bn.Va)(r,e,t),d._OrtBindOutput=(r,e,t,n)=>(d._OrtBindOutput=bn.Wa)(r,e,t,n),d._OrtClearBoundOutputs=r=>(d._OrtClearBoundOutputs=bn.Xa)(r),d._OrtReleaseBinding=r=>(d._OrtReleaseBinding=bn.Ya)(r),d._OrtRunWithBinding=(r,e,t,n,a)=>(d._OrtRunWithBinding=bn.Za)(r,e,t,n,a),d._OrtRun=(r,e,t,n,a,o,i,s)=>(d._OrtRun=bn._a)(r,e,t,n,a,o,i,s),d._OrtEndProfiling=r=>(d._OrtEndProfiling=bn.$a)(r),d._JsepOutput=(r,e,t)=>(d._JsepOutput=bn.ab)(r,e,t),d._JsepGetNodeName=r=>(d._JsepGetNodeName=bn.bb)(r);var pn,hn=()=>(hn=bn.cb)(),vn=d._malloc=r=>(vn=d._malloc=bn.db)(r),yn=d._free=r=>(yn=d._free=bn.eb)(r),wn=(r,e,t,n,a,o)=>(wn=bn.hb)(r,e,t,n,a,o),An=()=>(An=bn.ib)(),_n=(r,e,t,n,a)=>(_n=bn.jb)(r,e,t,n,a),Cn=r=>(Cn=bn.kb)(r),On=r=>(On=bn.lb)(r),Tn=()=>(Tn=bn.mb)(),Sn=(r,e)=>(Sn=bn.nb)(r,e),Wn=r=>(Wn=bn.ob)(r),En=r=>(En=bn.pb)(r),xn=()=>(xn=bn.qb)(),Mn=d.dynCall_ii=(r,e)=>(Mn=d.dynCall_ii=bn.sb)(r,e),kn=r=>(kn=bn.tb)(r),Nn=()=>(Nn=bn.ub)(),Hn=r=>(Hn=bn.vb)(r),Rn=()=>(Rn=bn.wb)();function Pn(){if(!(0xn(),d.stackRestore=r=>Wn(r),d.stackAlloc=r=>En(r),d.UTF8ToString=Hr,d.stringToUTF8=Br,d.lengthBytesUTF8=Dr,Q=function r(){pn||Pn(),pn||(Q=r)},Pn(),b});export default e;\"em-pthread\"===globalThis.self?.name&&e();", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {isNode} from './wasm-utils-env';\n\n/**\n * The classic script source URL. This is not always available in non ESModule environments.\n *\n * In Node.js, this is undefined.\n */\nexport const scriptSrc =\n // if Nodejs, return undefined\n isNode ? undefined :\n // if It's ESM, use import.meta.url\n BUILD_DEFS.ESM_IMPORT_META_URL ??\n // use `document.currentScript.src` if available\n (typeof document !== 'undefined' ? (document.currentScript as HTMLScriptElement)?.src :\n // use `self.location.href` if available\n (typeof self !== 'undefined' ? self.location?.href : undefined));\n\n/**\n * The origin of the current location.\n *\n * In Node.js, this is undefined.\n */\nconst origin = isNode || typeof location === 'undefined' ? undefined : location.origin;\n\n/**\n * Check if the given filename with prefix is from the same origin.\n */\nconst isSameOrigin = (filename: string, prefixOverride?: string) => {\n try {\n const baseUrl = prefixOverride ?? scriptSrc;\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.origin === origin;\n } catch {\n return false;\n }\n};\n\n/**\n * Normalize the inputs to an absolute URL with the given prefix override. If failed, return undefined.\n */\nconst normalizeUrl = (filename: string, prefixOverride?: string) => {\n const baseUrl = prefixOverride ?? scriptSrc;\n try {\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.href;\n } catch {\n return undefined;\n }\n};\n\n/**\n * Create a fallback URL if an absolute URL cannot be created by the normalizeUrl function.\n */\nconst fallbackUrl = (filename: string, prefixOverride?: string) => `${prefixOverride ?? './'}${filename}`;\n\n/**\n * This helper function is used to preload a module from a URL.\n *\n * If the origin of the worker URL is different from the current origin, the worker cannot be loaded directly.\n * See discussions in https://github.com/webpack-contrib/worker-loader/issues/154\n *\n * In this case, we will fetch the worker URL and create a new Blob URL with the same origin as a workaround.\n *\n * @param absoluteUrl - The absolute URL to preload.\n *\n * @returns - A promise that resolves to a new Blob URL\n */\nconst preload = async(absoluteUrl: string): Promise => {\n const response = await fetch(absoluteUrl, {credentials: 'same-origin'});\n const blob = await response.blob();\n return URL.createObjectURL(blob);\n};\n\n/**\n * This helper function is used to dynamically import a module from a URL.\n *\n * The build script has special handling for this function to ensure that the URL is not bundled into the final output.\n *\n * @param url - The URL to import.\n *\n * @returns - A promise that resolves to the default export of the module.\n */\nconst dynamicImportDefault = async(url: string): Promise => (await import(/* webpackIgnore: true */ url)).default;\n\n/**\n * The proxy worker factory imported from the proxy worker module.\n *\n * This is only available when the WebAssembly proxy is not disabled.\n */\nconst createProxyWorker: ((urlOverride?: string) => Worker)|undefined =\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n BUILD_DEFS.DISABLE_WASM_PROXY ? undefined : require('./proxy-worker/main').default;\n\n/**\n * Import the proxy worker.\n *\n * This function will perform the following steps:\n * 1. If a preload is needed, it will preload the module and return the object URL.\n * 2. Use the proxy worker factory to create the proxy worker.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The proxy worker.\n */\nexport const importProxyWorker = async(): Promise<[undefined | string, Worker]> => {\n if (!scriptSrc) {\n throw new Error('Failed to load proxy worker: cannot determine the script source URL.');\n }\n\n // If the script source is from the same origin, we can use the embedded proxy module directly.\n if (isSameOrigin(scriptSrc)) {\n return [undefined, createProxyWorker!()];\n }\n\n // Otherwise, need to preload\n const url = await preload(scriptSrc);\n return [url, createProxyWorker!(url)];\n};\n\n/**\n * The embedded WebAssembly module.\n *\n * This is only available in ESM and when embedding is not disabled.\n */\nconst embeddedWasmModule: EmscriptenModuleFactory|undefined =\n BUILD_DEFS.IS_ESM && BUILD_DEFS.DISABLE_DYNAMIC_IMPORT ?\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n require(\n !BUILD_DEFS.DISABLE_TRAINING ? '../../dist/ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? '../../dist/ort-wasm-simd-threaded.jsep.mjs' :\n '../../dist/ort-wasm-simd-threaded.mjs')\n .default :\n undefined;\n\n/**\n * Import the WebAssembly module.\n *\n * This function will perform the following steps:\n * 1. If BUILD_DEFS.DISABLE_DYNAMIC_IMPORT is true, use the embedded module.\n * 2. If a preload is needed, it will preload the module and return the object URL.\n * 3. Otherwise, it will perform a dynamic import of the module.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The default export of the module, which is a factory function to create the WebAssembly module.\n */\nexport const importWasmModule = async(\n urlOverride: string|undefined, prefixOverride: string|undefined,\n isMultiThreaded: boolean): Promise<[undefined | string, EmscriptenModuleFactory]> => {\n if (BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n return [undefined, embeddedWasmModule!];\n } else {\n const wasmModuleFilename = !BUILD_DEFS.DISABLE_TRAINING ? 'ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? 'ort-wasm-simd-threaded.jsep.mjs' :\n 'ort-wasm-simd-threaded.mjs';\n const wasmModuleUrl = urlOverride ?? normalizeUrl(wasmModuleFilename, prefixOverride);\n // need to preload if all of the following conditions are met:\n // 1. not in Node.js.\n // - Node.js does not have the same origin policy for creating workers.\n // 2. multi-threaded is enabled.\n // - If multi-threaded is disabled, no worker will be created. So we don't need to preload the module.\n // 3. the absolute URL is available.\n // - If the absolute URL is failed to be created, the origin cannot be determined. In this case, we will not\n // preload the module.\n // 4. the worker URL is not from the same origin.\n // - If the worker URL is from the same origin, we can create the worker directly.\n const needPreload = !isNode && isMultiThreaded && wasmModuleUrl && !isSameOrigin(wasmModuleUrl, prefixOverride);\n const url = needPreload ? (await preload(wasmModuleUrl)) :\n (wasmModuleUrl ?? fallbackUrl(wasmModuleFilename, prefixOverride));\n return [needPreload ? url : undefined, await dynamicImportDefault>(url)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {importWasmModule} from './wasm-utils-import';\n\nlet wasm: OrtWasmModule|undefined;\nlet initialized = false;\nlet initializing = false;\nlet aborted = false;\n\nconst isMultiThreadSupported = (): boolean => {\n // If 'SharedArrayBuffer' is not available, WebAssembly threads will not work.\n if (typeof SharedArrayBuffer === 'undefined') {\n return false;\n }\n\n try {\n // Test for transferability of SABs (for browsers. needed for Firefox)\n // https://groups.google.com/forum/#!msg/mozilla.dev.platform/IHkBZlHETpA/dwsMNchWEQAJ\n if (typeof MessageChannel !== 'undefined') {\n new MessageChannel().port1.postMessage(new SharedArrayBuffer(1));\n }\n\n // Test for WebAssembly threads capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing threaded instructions.\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 5,\n 4, 1, 3, 1, 1, 10, 11, 1, 9, 0, 65, 0, 254, 16, 2, 0, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst isSimdSupported = (): boolean => {\n try {\n // Test for WebAssembly SIMD capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing SIMD instructions.\n\n // The binary data is generated from the following code by wat2wasm:\n //\n // (module\n // (type $t0 (func))\n // (func $f0 (type $t0)\n // (drop\n // (i32x4.dot_i16x8_s\n // (i8x16.splat\n // (i32.const 0))\n // (v128.const i32x4 0x00000000 0x00000000 0x00000000 0x00000000)))))\n\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 10, 30, 1, 28, 0, 65, 0,\n 253, 15, 253, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 253, 186, 1, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nexport const initializeWebAssembly = async(flags: Env.WebAssemblyFlags): Promise => {\n if (initialized) {\n return Promise.resolve();\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initializeWebAssembly()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initializeWebAssembly()\\' failed.');\n }\n\n initializing = true;\n\n // wasm flags are already initialized\n const timeout = flags.initTimeout!;\n let numThreads = flags.numThreads!;\n\n // ensure SIMD is supported\n if (!isSimdSupported()) {\n throw new Error('WebAssembly SIMD is not supported in the current environment.');\n }\n\n // check if multi-threading is supported\n const multiThreadSupported = isMultiThreadSupported();\n if (numThreads > 1 && !multiThreadSupported) {\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', but this will not work unless you enable crossOriginIsolated mode. ' +\n 'See https://web.dev/cross-origin-isolation-guide/ for more info.');\n }\n\n // eslint-disable-next-line no-console\n console.warn(\n 'WebAssembly multi-threading is not supported in the current environment. ' +\n 'Falling back to single-threading.');\n\n // set flags.numThreads to 1 so that OrtInit() will not create a global thread pool.\n flags.numThreads = numThreads = 1;\n }\n\n const wasmPaths = flags.wasmPaths;\n const wasmPrefixOverride = typeof wasmPaths === 'string' ? wasmPaths : undefined;\n const mjsPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.mjs;\n const mjsPathOverride = (mjsPathOverrideFlag as URL)?.href ?? mjsPathOverrideFlag;\n const wasmPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.wasm;\n const wasmPathOverride = (wasmPathOverrideFlag as URL)?.href ?? wasmPathOverrideFlag;\n\n const [objectUrl, ortWasmFactory] = (await importWasmModule(mjsPathOverride, wasmPrefixOverride, numThreads > 1));\n\n let isTimeout = false;\n\n const tasks: Array> = [];\n\n // promise for timeout\n if (timeout > 0) {\n tasks.push(new Promise((resolve) => {\n setTimeout(() => {\n isTimeout = true;\n resolve();\n }, timeout);\n }));\n }\n\n // promise for module initialization\n tasks.push(new Promise((resolve, reject) => {\n const config: Partial = {\n /**\n * The number of threads. WebAssembly will create (Module.numThreads - 1) workers. If it is 1, no worker will be\n * created.\n */\n numThreads,\n };\n\n if (wasmPathOverride || wasmPrefixOverride) {\n /**\n * A callback function to locate the WebAssembly file. The function should return the full path of the file.\n *\n * Since Emscripten 3.1.58, this function is only called for the .wasm file.\n */\n config.locateFile = (fileName, scriptDirectory) =>\n wasmPathOverride ?? (wasmPrefixOverride ?? scriptDirectory) + fileName;\n }\n\n ortWasmFactory(config).then(\n // wasm module initialized successfully\n module => {\n initializing = false;\n initialized = true;\n wasm = module;\n resolve();\n if (objectUrl) {\n URL.revokeObjectURL(objectUrl);\n }\n },\n // wasm module failed to initialize\n (what) => {\n initializing = false;\n aborted = true;\n reject(what);\n });\n }));\n\n await Promise.race(tasks);\n\n if (isTimeout) {\n throw new Error(`WebAssembly backend initializing failed due to timeout: ${timeout}ms`);\n }\n};\n\nexport const getInstance = (): OrtWasmModule => {\n if (initialized && wasm) {\n return wasm;\n }\n\n throw new Error('WebAssembly is not initialized yet.');\n};\n\nexport const dispose = (): void => {\n if (initialized && !initializing && !aborted) {\n // TODO: currently \"PThread.terminateAllThreads()\" is not exposed in the wasm module.\n // And this function is not yet called by any code.\n // If it is needed in the future, we should expose it in the wasm module and uncomment the following line.\n\n // wasm?.PThread?.terminateAllThreads();\n wasm = undefined;\n\n initializing = false;\n initialized = false;\n aborted = true;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getInstance} from './wasm-factory';\n\nexport const allocWasmString = (data: string, allocs: number[]): number => {\n const wasm = getInstance();\n\n const dataLength = wasm.lengthBytesUTF8(data) + 1;\n const dataOffset = wasm._malloc(dataLength);\n wasm.stringToUTF8(data, dataOffset, dataLength);\n allocs.push(dataOffset);\n\n return dataOffset;\n};\n\ninterface ExtraOptionsHandler {\n (name: string, value: string): void;\n}\n\nexport const iterateExtraOptions =\n (options: Record, prefix: string, seen: WeakSet>,\n handler: ExtraOptionsHandler): void => {\n if (typeof options == 'object' && options !== null) {\n if (seen.has(options)) {\n throw new Error('Circular reference in options');\n } else {\n seen.add(options);\n }\n }\n\n Object.entries(options).forEach(([key, value]) => {\n const name = (prefix) ? prefix + key : key;\n if (typeof value === 'object') {\n iterateExtraOptions(value as Record, name + '.', seen, handler);\n } else if (typeof value === 'string' || typeof value === 'number') {\n handler(name, value.toString());\n } else if (typeof value === 'boolean') {\n handler(name, (value) ? '1' : '0');\n } else {\n throw new Error(`Can't handle extra config type: ${typeof value}`);\n }\n });\n };\n\n/**\n * check web assembly API's last error and throw error if any error occurred.\n * @param message a message used when an error occurred.\n */\nexport const checkLastError = (message: string): void => {\n const wasm = getInstance();\n\n const stack = wasm.stackSave();\n try {\n const paramsOffset = wasm.stackAlloc(8);\n wasm._OrtGetLastError(paramsOffset, paramsOffset + 4);\n const errorCode = wasm.HEAP32[paramsOffset / 4];\n const errorMessagePointer = wasm.HEAPU32[paramsOffset / 4 + 1];\n const errorMessage = errorMessagePointer ? wasm.UTF8ToString(errorMessagePointer) : '';\n throw new Error(`${message} ERROR_CODE: ${errorCode}, ERROR_MESSAGE: ${errorMessage}`);\n } finally {\n wasm.stackRestore(stack);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nexport const setRunOptions = (options: InferenceSession.RunOptions): [number, number[]] => {\n const wasm = getInstance();\n let runOptionsHandle = 0;\n const allocs: number[] = [];\n\n const runOptions: InferenceSession.RunOptions = options || {};\n\n try {\n if (options?.logSeverityLevel === undefined) {\n runOptions.logSeverityLevel = 2; // Default to warning\n } else if (\n typeof options.logSeverityLevel !== 'number' || !Number.isInteger(options.logSeverityLevel) ||\n options.logSeverityLevel < 0 || options.logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${options.logSeverityLevel}`);\n }\n\n if (options?.logVerbosityLevel === undefined) {\n runOptions.logVerbosityLevel = 0; // Default to 0\n } else if (typeof options.logVerbosityLevel !== 'number' || !Number.isInteger(options.logVerbosityLevel)) {\n throw new Error(`log verbosity level is not valid: ${options.logVerbosityLevel}`);\n }\n\n if (options?.terminate === undefined) {\n runOptions.terminate = false;\n }\n\n let tagDataOffset = 0;\n if (options?.tag !== undefined) {\n tagDataOffset = allocWasmString(options.tag, allocs);\n }\n\n runOptionsHandle = wasm._OrtCreateRunOptions(\n runOptions.logSeverityLevel!, runOptions.logVerbosityLevel!, !!runOptions.terminate!, tagDataOffset);\n if (runOptionsHandle === 0) {\n checkLastError('Can\\'t create run options.');\n }\n\n if (options?.extra !== undefined) {\n iterateExtraOptions(options.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddRunConfigEntry(runOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a run config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [runOptionsHandle, allocs];\n } catch (e) {\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nconst getGraphOptimzationLevel = (graphOptimizationLevel: string|unknown): number => {\n switch (graphOptimizationLevel) {\n case 'disabled':\n return 0;\n case 'basic':\n return 1;\n case 'extended':\n return 2;\n case 'all':\n return 99;\n default:\n throw new Error(`unsupported graph optimization level: ${graphOptimizationLevel}`);\n }\n};\n\nconst getExecutionMode = (executionMode: 'sequential'|'parallel'): number => {\n switch (executionMode) {\n case 'sequential':\n return 0;\n case 'parallel':\n return 1;\n default:\n throw new Error(`unsupported execution mode: ${executionMode}`);\n }\n};\n\nconst appendDefaultOptions = (options: InferenceSession.SessionOptions): void => {\n if (!options.extra) {\n options.extra = {};\n }\n if (!options.extra.session) {\n options.extra.session = {};\n }\n const session = options.extra.session as Record;\n if (!session.use_ort_model_bytes_directly) {\n // eslint-disable-next-line camelcase\n session.use_ort_model_bytes_directly = '1';\n }\n\n // if using JSEP with WebGPU, always disable memory pattern\n if (options.executionProviders &&\n options.executionProviders.some(ep => (typeof ep === 'string' ? ep : ep.name) === 'webgpu')) {\n options.enableMemPattern = false;\n }\n};\n\nconst setExecutionProviders =\n (sessionOptionsHandle: number, executionProviders: readonly InferenceSession.ExecutionProviderConfig[],\n allocs: number[]): void => {\n for (const ep of executionProviders) {\n let epName = typeof ep === 'string' ? ep : ep.name;\n\n // check EP name\n switch (epName) {\n case 'webnn':\n epName = 'WEBNN';\n if (typeof ep !== 'string') {\n const webnnOptions = ep as InferenceSession.WebNNExecutionProviderOption;\n // const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n if (deviceType) {\n const keyDataOffset = allocWasmString('deviceType', allocs);\n const valueDataOffset = allocWasmString(deviceType, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'deviceType' - ${deviceType}.`);\n }\n }\n }\n break;\n case 'webgpu':\n epName = 'JS';\n if (typeof ep !== 'string') {\n const webgpuOptions = ep as InferenceSession.WebGpuExecutionProviderOption;\n if (webgpuOptions?.preferredLayout) {\n if (webgpuOptions.preferredLayout !== 'NCHW' && webgpuOptions.preferredLayout !== 'NHWC') {\n throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${webgpuOptions.preferredLayout}`);\n }\n const keyDataOffset = allocWasmString('preferredLayout', allocs);\n const valueDataOffset = allocWasmString(webgpuOptions.preferredLayout, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'preferredLayout' - ${webgpuOptions.preferredLayout}.`);\n }\n }\n }\n break;\n case 'wasm':\n case 'cpu':\n continue;\n default:\n throw new Error(`not supported execution provider: ${epName}`);\n }\n\n const epNameDataOffset = allocWasmString(epName, allocs);\n if (getInstance()._OrtAppendExecutionProvider(sessionOptionsHandle, epNameDataOffset) !== 0) {\n checkLastError(`Can't append execution provider: ${epName}.`);\n }\n }\n };\n\nexport const setSessionOptions = (options?: InferenceSession.SessionOptions): [number, number[]] => {\n const wasm = getInstance();\n let sessionOptionsHandle = 0;\n const allocs: number[] = [];\n\n const sessionOptions: InferenceSession.SessionOptions = options || {};\n appendDefaultOptions(sessionOptions);\n\n try {\n const graphOptimizationLevel = getGraphOptimzationLevel(sessionOptions.graphOptimizationLevel ?? 'all');\n const executionMode = getExecutionMode(sessionOptions.executionMode ?? 'sequential');\n const logIdDataOffset =\n typeof sessionOptions.logId === 'string' ? allocWasmString(sessionOptions.logId, allocs) : 0;\n\n const logSeverityLevel = sessionOptions.logSeverityLevel ?? 2; // Default to 2 - warning\n if (!Number.isInteger(logSeverityLevel) || logSeverityLevel < 0 || logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${logSeverityLevel}`);\n }\n\n const logVerbosityLevel = sessionOptions.logVerbosityLevel ?? 0; // Default to 0 - verbose\n if (!Number.isInteger(logVerbosityLevel) || logVerbosityLevel < 0 || logVerbosityLevel > 4) {\n throw new Error(`log verbosity level is not valid: ${logVerbosityLevel}`);\n }\n\n const optimizedModelFilePathOffset = typeof sessionOptions.optimizedModelFilePath === 'string' ?\n allocWasmString(sessionOptions.optimizedModelFilePath, allocs) :\n 0;\n\n sessionOptionsHandle = wasm._OrtCreateSessionOptions(\n graphOptimizationLevel, !!sessionOptions.enableCpuMemArena, !!sessionOptions.enableMemPattern, executionMode,\n !!sessionOptions.enableProfiling, 0, logIdDataOffset, logSeverityLevel, logVerbosityLevel,\n optimizedModelFilePathOffset);\n if (sessionOptionsHandle === 0) {\n checkLastError('Can\\'t create session options.');\n }\n\n if (sessionOptions.executionProviders) {\n setExecutionProviders(sessionOptionsHandle, sessionOptions.executionProviders, allocs);\n }\n\n if (sessionOptions.enableGraphCapture !== undefined) {\n if (typeof sessionOptions.enableGraphCapture !== 'boolean') {\n throw new Error(`enableGraphCapture must be a boolean value: ${sessionOptions.enableGraphCapture}`);\n }\n const keyDataOffset = allocWasmString('enableGraphCapture', allocs);\n const valueDataOffset = allocWasmString(sessionOptions.enableGraphCapture.toString(), allocs);\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(\n `Can't set a session config entry: 'enableGraphCapture' - ${sessionOptions.enableGraphCapture}.`);\n }\n }\n\n if (sessionOptions.freeDimensionOverrides) {\n for (const [name, value] of Object.entries(sessionOptions.freeDimensionOverrides)) {\n if (typeof name !== 'string') {\n throw new Error(`free dimension override name must be a string: ${name}`);\n }\n if (typeof value !== 'number' || !Number.isInteger(value) || value < 0) {\n throw new Error(`free dimension override value must be a non-negative integer: ${value}`);\n }\n const nameOffset = allocWasmString(name, allocs);\n if (wasm._OrtAddFreeDimensionOverride(sessionOptionsHandle, nameOffset, value) !== 0) {\n checkLastError(`Can't set a free dimension override: ${name} - ${value}.`);\n }\n }\n }\n\n if (sessionOptions.extra !== undefined) {\n iterateExtraOptions(sessionOptions.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a session config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [sessionOptionsHandle, allocs];\n } catch (e) {\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// This file includes common definitions. They do NOT have dependency on the WebAssembly instance.\n\n/**\n * Copied from ONNX definition. Use this to drop dependency 'onnx_proto' to decrease compiled .js file size.\n */\nexport const enum DataType {\n undefined = 0,\n float = 1,\n uint8 = 2,\n int8 = 3,\n uint16 = 4,\n int16 = 5,\n int32 = 6,\n int64 = 7,\n string = 8,\n bool = 9,\n float16 = 10,\n double = 11,\n uint32 = 12,\n uint64 = 13,\n complex64 = 14,\n complex128 = 15,\n bfloat16 = 16\n}\n\n/**\n * Map string tensor data to enum value\n */\nexport const tensorDataTypeStringToEnum = (type: string): DataType => {\n switch (type) {\n case 'int8':\n return DataType.int8;\n case 'uint8':\n return DataType.uint8;\n case 'bool':\n return DataType.bool;\n case 'int16':\n return DataType.int16;\n case 'uint16':\n return DataType.uint16;\n case 'int32':\n return DataType.int32;\n case 'uint32':\n return DataType.uint32;\n case 'float16':\n return DataType.float16;\n case 'float32':\n return DataType.float;\n case 'float64':\n return DataType.double;\n case 'string':\n return DataType.string;\n case 'int64':\n return DataType.int64;\n case 'uint64':\n return DataType.uint64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n};\n\n/**\n * Map enum value to string tensor data\n */\nexport const tensorDataTypeEnumToString = (typeProto: DataType): Tensor.Type => {\n switch (typeProto) {\n case DataType.int8:\n return 'int8';\n case DataType.uint8:\n return 'uint8';\n case DataType.bool:\n return 'bool';\n case DataType.int16:\n return 'int16';\n case DataType.uint16:\n return 'uint16';\n case DataType.int32:\n return 'int32';\n case DataType.uint32:\n return 'uint32';\n case DataType.float16:\n return 'float16';\n case DataType.float:\n return 'float32';\n case DataType.double:\n return 'float64';\n case DataType.string:\n return 'string';\n case DataType.int64:\n return 'int64';\n case DataType.uint64:\n return 'uint64';\n\n default:\n throw new Error(`unsupported data type: ${typeProto}`);\n }\n};\n\n/**\n * get tensor element size in bytes by the given data type\n * @returns size in integer or undefined if the data type is not supported\n */\nexport const getTensorElementSize = (dateType: number): number|\n undefined => [undefined, 4, 1, 1, 2, 2, 4, 8, undefined, 1, 2, 8, 4, 8, undefined, undefined, undefined][dateType];\n\n/**\n * get typed array constructor by the given tensor type\n */\nexport const tensorTypeToTypedArrayConstructor = (type: Tensor.Type): Float32ArrayConstructor|Uint8ArrayConstructor|\n Int8ArrayConstructor|Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|\n Uint8ArrayConstructor|Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor => {\n switch (type) {\n case 'float16':\n // allow Float16Array polyfill.\n return typeof Float16Array !== 'undefined' && Float16Array.from ? Float16Array : Uint16Array;\n case 'float32':\n return Float32Array;\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'uint16':\n return Uint16Array;\n case 'int16':\n return Int16Array;\n case 'int32':\n return Int32Array;\n case 'bool':\n return Uint8Array;\n case 'float64':\n return Float64Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'uint64':\n return BigUint64Array;\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n };\n\n/**\n * Map string log level to integer value\n */\nexport const logLevelStringToEnum = (logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal'): number => {\n switch (logLevel) {\n case 'verbose':\n return 0;\n case 'info':\n return 1;\n case 'warning':\n return 2;\n case 'error':\n return 3;\n case 'fatal':\n return 4;\n default:\n throw new Error(`unsupported logging level: ${logLevel}`);\n }\n};\n\n/**\n * Check whether the given tensor type is supported by GPU buffer\n */\nexport const isGpuBufferSupportedType = (type: Tensor.Type): type is Tensor.GpuBufferDataTypes => type === 'float32' ||\n type === 'float16' || type === 'int32' || type === 'int64' || type === 'uint32' || type === 'uint8' ||\n type === 'bool';\n\n/**\n * Map string data location to integer value\n */\nexport const dataLocationStringToEnum = (location: Tensor.DataLocation): number => {\n switch (location) {\n case 'none':\n return 0;\n case 'cpu':\n return 1;\n case 'cpu-pinned':\n return 2;\n case 'texture':\n return 3;\n case 'gpu-buffer':\n return 4;\n default:\n throw new Error(`unsupported data location: ${location}`);\n }\n};\n\n/**\n * Map integer data location to string value\n */\nexport const dataLocationEnumToString = (location: number): Tensor.DataLocation|undefined =>\n (['none', 'cpu', 'cpu-pinned', 'texture', 'gpu-buffer'] as const)[location];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {isNode} from './wasm-utils-env';\n\n/**\n * Load a file into a Uint8Array.\n *\n * @param file - the file to load. Can be a URL/path, a Blob, an ArrayBuffer, or a Uint8Array.\n * @returns a Uint8Array containing the file data.\n */\nexport const loadFile = async(file: string|Blob|ArrayBufferLike|Uint8Array): Promise => {\n if (typeof file === 'string') {\n if (isNode) {\n // load file into ArrayBuffer in Node.js\n try {\n const {readFile} = require('node:fs/promises');\n return new Uint8Array(await readFile(file));\n } catch (e) {\n if (e.code === 'ERR_FS_FILE_TOO_LARGE') {\n // file is too large, use fs.createReadStream instead\n const {createReadStream} = require('node:fs');\n const stream = createReadStream(file);\n const chunks: Uint8Array[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n return new Uint8Array(Buffer.concat(chunks));\n }\n throw e;\n }\n } else {\n // load file into ArrayBuffer in browsers\n const response = await fetch(file);\n if (!response.ok) {\n throw new Error(`failed to load external data file: ${file}`);\n }\n const contentLengthHeader = response.headers.get('Content-Length');\n const fileSize = contentLengthHeader ? parseInt(contentLengthHeader, 10) : 0;\n if (fileSize < 1073741824 /* 1GB */) {\n // when Content-Length header is not set, we cannot determine the file size. We assume it is small enough to\n // load into memory.\n return new Uint8Array(await response.arrayBuffer());\n } else {\n // file is too large, use stream instead\n if (!response.body) {\n throw new Error(`failed to load external data file: ${file}, no response body.`);\n }\n const reader = response.body.getReader();\n\n let buffer;\n try {\n // try to create ArrayBuffer directly\n buffer = new ArrayBuffer(fileSize);\n } catch (e) {\n if (e instanceof RangeError) {\n // use WebAssembly Memory to allocate larger ArrayBuffer\n const pages = Math.ceil(fileSize / 65536);\n buffer = new WebAssembly.Memory({initial: pages, maximum: pages}).buffer;\n } else {\n throw e;\n }\n }\n\n let offset = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const {done, value} = await reader.read();\n if (done) {\n break;\n }\n const chunkSize = value.byteLength;\n const chunk = new Uint8Array(buffer, offset, chunkSize);\n chunk.set(value);\n offset += chunkSize;\n }\n return new Uint8Array(buffer, 0, fileSize);\n }\n }\n\n } else if (file instanceof Blob) {\n return new Uint8Array(await file.arrayBuffer());\n } else if (file instanceof Uint8Array) {\n return file;\n } else {\n return new Uint8Array(file);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {logLevelStringToEnum} from '../wasm-common';\n\ntype LogLevel = NonNullable;\ntype MessageString = string;\ntype MessageFunction = () => string;\ntype Message = MessageString|MessageFunction;\n\nconst logLevelPrefix = ['V', 'I', 'W', 'E', 'F'];\n\nconst doLog = (level: number, message: string): void => {\n // eslint-disable-next-line no-console\n console.log(`[${logLevelPrefix[level]},${new Date().toISOString()}]${message}`);\n};\n\nlet configLogLevel: LogLevel|undefined;\nlet debug: boolean|undefined;\n\nexport const configureLogger = ($configLogLevel: LogLevel, $debug: boolean): void => {\n configLogLevel = $configLogLevel;\n debug = $debug;\n};\n\n/**\n * A simple logging utility to log messages to the console.\n */\nexport const LOG = (logLevel: LogLevel, msg: Message): void => {\n const messageLevel = logLevelStringToEnum(logLevel);\n const configLevel = logLevelStringToEnum(configLogLevel);\n if (messageLevel >= configLevel) {\n doLog(messageLevel, typeof msg === 'function' ? msg() : msg);\n }\n};\n\n/**\n * A simple logging utility to log messages to the console. Only logs when debug is enabled.\n */\nexport const LOG_DEBUG: typeof LOG = (...args: Parameters) => {\n if (debug) {\n LOG(...args);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\nimport {tensorTypeToTypedArrayConstructor} from '../wasm-common';\n\nexport const createView = (dataBuffer: ArrayBuffer, type: Tensor.Type): Int32Array|Uint32Array|BigInt64Array|\n BigUint64Array|Uint8Array|Float32Array|Float64Array|Int8Array|Int16Array|Uint16Array =>\n new (tensorTypeToTypedArrayConstructor(type))(dataBuffer);\n\n/**\n * a TensorView does not own the data.\n */\nexport interface TensorView {\n readonly data: number;\n readonly dataType: number;\n readonly dims: readonly number[];\n\n /**\n * get a Float32Array data view of the tensor data. tensor data must be on CPU.\n */\n getFloat32Array(): Float32Array;\n\n /**\n * get a BigInt64Array data view of the tensor data. tensor data must be on CPU.\n */\n getBigInt64Array(): BigInt64Array;\n\n /**\n * get a Int32Array data view of the tensor data. tensor data must be on CPU.\n */\n getInt32Array(): Int32Array;\n\n /**\n * create a new tensor view with the same data but different dimensions.\n */\n reshape(newDims: readonly number[]): TensorView;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../wasm-common';\nimport {TensorView} from '../tensor-view';\n\nimport {ShaderHelper} from './ops/common';\n\nexport type SessionState = 'default'|'capturing'|'replaying';\n\nexport enum GpuDataType {\n default = 0,\n upload = 1,\n profile = 2\n}\nexport type GpuDataId = number;\n\nexport type GpuArchitecture = 'ampere';\nexport type GpuVendor = 'amd'|'intel'|'nvidia';\nexport interface AdapterInfo {\n isArchitecture: (architecture: GpuArchitecture) => boolean;\n isVendor: (vendor: GpuVendor) => boolean;\n}\n\nexport interface GpuData {\n type: GpuDataType;\n id: GpuDataId;\n buffer: GPUBuffer;\n}\n\nexport interface TensorInfo {\n dims: readonly number[];\n dataType: number;\n}\n\nexport interface ProgramUniform {\n type: DataType;\n data: number|readonly number[];\n}\n\nexport type ProgramUniformVariableInfo = [type: DataType, length: number];\n\n/**\n * Represent the dependency of a program on a specific input tensor.\n *\n * - 'none': the shader/uniform does not depend on this input's info\n * - 'type': the shader/uniform depends on data type of this input\n * - 'rank': the shader/uniform depends on data type and the rank of this input\n * - 'dims': the shader/uniform depends on data type and the dims of this input\n * - 'data': the shader/uniform depends on data type, the dims and the data of this input\n */\nexport type ProgramInputTensorInfoDependency = 'none'|'type'|'rank'|'dims'|'data';\n\n/**\n * Represent information about a program's cache for shader.\n */\nexport interface ProgramShaderCacheInfo {\n /**\n * an optional string as a cache hint in the artifact cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains initializing-time information, such as the attributes or any information of\n * initializers. It should NOT contain any runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'dims' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n/**\n * Represent information about a program's cache for uniform.\n */\nexport interface ProgramUniformCacheInfo {\n /**\n * an optional string as a cache hint in the uniform cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'none' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n\n /**\n * an optional object describing the cache information of the program shader.\n *\n * If this is not specified, assume hint is empty and inputDependencies are ['dims'] for all inputs.\n */\n shaderCache?: ProgramShaderCacheInfo;\n\n /**\n * the shader's processing source code.\n *\n * This function will be called when shader cache missed.\n */\n getShaderSource: (shaderHelper: ShaderHelper) => string;\n\n /**\n * A function to get run data required to run the program.\n *\n * This function will be called every time the program is executed. Should keep this function as simple as possible.\n */\n getRunData: (inputs: readonly TensorView[]) => {\n outputs: readonly TensorInfo[];\n dispatchGroup: {x: number; y?: number; z?: number};\n programUniforms?: readonly ProgramUniform[];\n };\n}\n\nexport interface Artifact {\n programInfo: ProgramInfo;\n computePipeline: GPUComputePipeline;\n uniformVariablesInfo: readonly ProgramUniformVariableInfo[]|undefined;\n}\n\nexport interface ComputeContextInputsOutputsMapping {\n /**\n * specify the mapping to the program's inputs. the value can be a number or a tensor view.\n * - if it's a number, it's the index of the kernel's input\n * - if it's a tensor view, it's an existing tensor view that will be used as the input\n *\n * if inputs is not specified, the mapping will be the kernel's inputs in order.\n */\n readonly inputs?: ReadonlyArray;\n /**\n * specify the mapping to the program's outputs. the value must be a number.\n * - if it's a non-negative number, it's the index of the kernel's output\n * - if it's -1, it's an output that will be created as a temporary value. this value will be released after\n * the kernel is executed.\n * - if it's -2, it's an output that will be created as a persistent value. this value will be released when the\n * kernel is released.\n *\n * if outputs is not specified, the mapping will be the kernel's outputs in order.\n */\n readonly outputs?: readonly number[];\n}\n\n/**\n * A ComputeContext instance carries the states that representing the current running of a kernel.\n */\nexport interface ComputeContext {\n /**\n * gpu adapter info\n */\n readonly adapterInfo: AdapterInfo;\n\n /**\n * stores the pointer to OpKernelContext\n */\n readonly opKernelContext: number;\n\n /**\n * a list of inputs, each input is an instance of TensorView\n */\n readonly inputs: readonly TensorView[];\n\n /**\n * a custom data object that can be used to store any data that is needed by the kernel\n */\n readonly kernelCustomData: {[key: string]: unknown};\n\n /**\n * a buffer that can be used to access custom data created each time the kernel is executed\n */\n readonly customDataBuffer: Uint8Array;\n\n /**\n * a number of outputs for the node\n */\n readonly outputCount: number;\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[];\n output(index: number, dims: readonly number[]): number;\n getMaxComputeWorkgroupSizes(): [number, number, number];\n getMaxComputeWorkgroupStoragesize(): number;\n}\n\nexport type TimestampQuery = 'none'|'inside-passes'|'at-passes';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {GpuData, GpuDataId, GpuDataType} from './types';\n\n/**\n * manages GpuDataId -> GpuBuffer\n */\nexport interface GpuDataManager {\n /**\n * copy data from CPU to GPU.\n */\n upload(id: GpuDataId, data: Uint8Array): void;\n /**\n * copy data from GPU to GPU.\n */\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void;\n /**\n * create new data on GPU.\n */\n create(size: number, usage?: number): GpuData;\n /**\n * get GPU data by ID.\n */\n get(id: GpuDataId): GpuData|undefined;\n /**\n * release the data on GPU by ID.\n *\n * @return size of the data released\n */\n release(id: GpuDataId): number;\n /**\n * copy data from GPU to CPU.\n */\n download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise;\n\n /**\n * refresh the buffers that marked for release.\n *\n * when release() is called, the buffer is not released immediately. this is because we need to wait for the commands\n * to be submitted to the GPU. this function is called after the commands are submitted so that the buffers can be\n * actually released.\n */\n refreshPendingBuffers(): void;\n\n /**\n * register an external buffer for IO Binding. If the buffer is already registered, return the existing GPU data ID.\n *\n * GPU data manager only manages a mapping between the buffer and the GPU data ID. It will not manage the lifecycle of\n * the external buffer.\n */\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number;\n\n /**\n * unregister an external buffer for IO Binding.\n */\n unregisterExternalBuffer(buffer: GPUBuffer): void;\n\n /**\n * destroy all gpu buffers.\n */\n dispose(): void;\n\n /**\n * release session related data.\n * @param sessionId - specify the session ID.\n */\n onReleaseSession(sessionId: number): void;\n}\n\ninterface StorageCacheValue {\n gpuData: GpuData;\n originalSize: number;\n}\n\nconst bucketFreelist: Map = new Map([\n [64, 250],\n [128, 200],\n [256, 200],\n [512, 200],\n [2048, 230],\n [4096, 200],\n [8192, 50],\n [16384, 50],\n [32768, 50],\n [65536, 50],\n [131072, 50],\n [262144, 50],\n [524288, 50],\n [1048576, 50],\n [2097152, 30],\n [4194304, 20],\n [8388608, 10],\n [12582912, 10],\n [16777216, 10],\n [26214400, 15],\n [33554432, 22],\n [44236800, 2],\n [58982400, 6],\n // we don't want to cache the bucket sizes below but not caching them\n // results in some major performance hits for models like sd-turbo.\n [67108864, 6],\n [134217728, 6],\n [167772160, 6],\n]);\n\nconst bucketArr: number[] = [];\n\n/**\n * normalize the buffer size so that it fits the 128-bits (16 bytes) alignment.\n */\nconst calcNormalizedBufferSize = (size: number) => Math.ceil(size / 16) * 16;\n\n/**\n * calculate the buffer size so that it fits into buckets.\n */\nconst calcBucketBufferSize = (size: number) => {\n for (let idx = 0; idx < bucketArr.length; idx++) {\n const sizeForBucket = bucketArr[idx];\n if (size <= sizeForBucket) {\n return sizeForBucket;\n }\n }\n // not in bucket list -> caller will not cache, round up to 16.\n return Math.ceil(size / 16) * 16;\n};\n\nlet guid = 1;\nconst createNewGpuDataId = () => guid++;\n\n/**\n * exported standard download function. This function is used by the session to download the data from GPU, and also by\n * factory to create GPU tensors with the capacity of downloading data from GPU.\n *\n * @param backend - the WebGPU backend\n * @param gpuBuffer - the GPU buffer to download\n * @param originalSize - the original size of the data\n * @param getTargetBuffer - optional. If provided, the data will be copied to the target buffer. Otherwise, a new buffer\n * will be created and returned.\n */\nexport const downloadGpuData =\n async(backend: WebGpuBackend, gpuBuffer: GPUBuffer, originalSize: number, getTargetBuffer?: () => Uint8Array):\n Promise => {\n const bufferSize = calcNormalizedBufferSize(originalSize);\n const gpuReadBuffer = backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: bufferSize, usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ});\n try {\n const commandEncoder = backend.getCommandEncoder();\n backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n gpuBuffer /* source buffer */, 0 /* source offset */, gpuReadBuffer /* destination buffer */,\n 0 /* destination offset */, bufferSize /* size */\n );\n backend.flush();\n\n await gpuReadBuffer.mapAsync(GPUMapMode.READ);\n\n const arrayBuffer = gpuReadBuffer.getMappedRange();\n if (getTargetBuffer) {\n // if we already have a CPU buffer to accept the data, no need to clone the ArrayBuffer.\n const targetBuffer = getTargetBuffer();\n targetBuffer.set(new Uint8Array(arrayBuffer, 0, originalSize));\n return targetBuffer;\n } else {\n // the mapped ArrayBuffer will be released when the GPU buffer is destroyed. Need to clone the\n // ArrayBuffer.\n return new Uint8Array(arrayBuffer.slice(0, originalSize));\n }\n } finally {\n gpuReadBuffer.destroy();\n }\n };\n\nclass GpuDataManagerImpl implements GpuDataManager {\n // GPU Data ID => GPU Data ( storage buffer )\n private storageCache: Map;\n\n // pending buffers for uploading ( data is unmapped )\n private buffersForUploadingPending: GPUBuffer[];\n // pending buffers for computing\n private buffersPending: GPUBuffer[];\n\n // The reusable storage buffers for computing.\n private freeBuffers: Map;\n // The reusable uniform buffers\n private freeUniformBuffers: Map;\n\n // The external buffers registered users for IO Binding.\n private externalBuffers: Map;\n\n // The pendingBuffers for capture graph.\n // a SessionID -> GPUBuffer[] mapping.\n private capturedPendingBuffers: Map;\n\n constructor(private backend: WebGpuBackend) {\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.buffersForUploadingPending = [];\n this.buffersPending = [];\n this.externalBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n\n for (const [key, ] of bucketFreelist) {\n bucketArr.push(key);\n this.freeBuffers.set(key, []);\n this.freeUniformBuffers.set(key, []);\n }\n }\n\n upload(id: GpuDataId, data: Uint8Array): void {\n const srcArrayBuffer = data.buffer;\n const srcOffset = data.byteOffset;\n const srcLength = data.byteLength;\n const size = calcNormalizedBufferSize(srcLength);\n\n // get destination gpu buffer\n const gpuDataCache = this.storageCache.get(id);\n if (!gpuDataCache) {\n throw new Error('gpu data for uploading does not exist');\n }\n if (gpuDataCache.originalSize !== srcLength) {\n throw new Error(`inconsistent data size. gpu data size=${gpuDataCache.originalSize}, data size=${srcLength}`);\n }\n\n // create gpu buffer\n const gpuBufferForUploading = this.backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {mappedAtCreation: true, size, usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC});\n\n // copy (upload) data\n const arrayBuffer = gpuBufferForUploading.getMappedRange();\n new Uint8Array(arrayBuffer).set(new Uint8Array(srcArrayBuffer, srcOffset, srcLength));\n gpuBufferForUploading.unmap();\n\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(gpuBufferForUploading, 0, gpuDataCache.gpuData.buffer, 0, size);\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.upload(id=${id})`);\n\n this.buffersForUploadingPending.push(gpuBufferForUploading);\n }\n\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void {\n // get source gpu buffer\n const sourceGpuDataCache = this.storageCache.get(sourceId);\n if (!sourceGpuDataCache) {\n throw new Error('source gpu data for memcpy does not exist');\n }\n // get destination gpu buffer\n const destinationGpuDataCache = this.storageCache.get(destinationId);\n if (!destinationGpuDataCache) {\n throw new Error('destination gpu data for memcpy does not exist');\n }\n if (sourceGpuDataCache.originalSize !== destinationGpuDataCache.originalSize) {\n throw new Error('inconsistent source and destination gpu data size');\n }\n\n const size = calcNormalizedBufferSize(sourceGpuDataCache.originalSize);\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n sourceGpuDataCache.gpuData.buffer, 0, destinationGpuDataCache.gpuData.buffer, 0, size);\n }\n\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number {\n let id: number|undefined;\n if (previousBuffer) {\n id = this.externalBuffers.get(previousBuffer);\n if (id === undefined) {\n throw new Error('previous buffer is not registered');\n }\n if (buffer === previousBuffer) {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${\n id}, buffer is the same, skip.`);\n return id;\n } else if (this.backend.capturedCommandList.has(this.backend.currentSessionId!)) {\n throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);\n }\n this.externalBuffers.delete(previousBuffer);\n } else {\n id = createNewGpuDataId();\n }\n\n this.storageCache.set(id, {gpuData: {id, type: GpuDataType.default, buffer}, originalSize});\n this.externalBuffers.set(buffer, id);\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${id}, registered.`);\n return id;\n }\n\n unregisterExternalBuffer(buffer: GPUBuffer): void {\n const id = this.externalBuffers.get(buffer);\n if (id !== undefined) {\n this.storageCache.delete(id);\n this.externalBuffers.delete(buffer);\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${id}`);\n }\n }\n\n // eslint-disable-next-line no-bitwise\n create(size: number, usage = GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST): GpuData {\n const bufferSize = calcBucketBufferSize(size);\n\n let gpuBuffer;\n // Currently, only storage buffers are reused.\n // eslint-disable-next-line no-bitwise\n const isStorage = (usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE;\n // eslint-disable-next-line no-bitwise\n const isUniform = (usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM;\n if (isStorage || isUniform) {\n const freeBuffers = isStorage ? this.freeBuffers : this.freeUniformBuffers;\n const buffers = freeBuffers.get(bufferSize);\n if (!buffers) {\n // no such bucket/freelist - create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n } else {\n if (buffers.length > 0) {\n // in freelist, use it\n gpuBuffer = buffers.pop() as GPUBuffer;\n } else {\n // bucket empty, create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n }\n } else {\n // create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n\n const gpuData = {id: createNewGpuDataId(), type: GpuDataType.default, buffer: gpuBuffer};\n this.storageCache.set(gpuData.id, {gpuData, originalSize: size});\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.create(size=${size}) => id=${gpuData.id}`);\n return gpuData;\n }\n\n get(id: GpuDataId): GpuData|undefined {\n return this.storageCache.get(id)?.gpuData;\n }\n\n release(id: GpuDataId): number {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('releasing data does not exist');\n }\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.release(id=${id}), gpuDataId=${cachedData.gpuData.id}`);\n\n this.storageCache.delete(id);\n this.buffersPending.push(cachedData.gpuData.buffer);\n // cachedData.gpuData.buffer.destroy();\n\n return cachedData.originalSize;\n }\n\n async download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('data does not exist');\n }\n await downloadGpuData(this.backend, cachedData.gpuData.buffer, cachedData.originalSize, getTargetBuffer);\n }\n\n refreshPendingBuffers(): void {\n for (const buffer of this.buffersForUploadingPending) {\n // upload buffer is only useful in the session creation time. So we don't need to reuse them in session running.\n buffer.destroy();\n }\n this.buffersForUploadingPending = [];\n\n if (this.buffersPending.length === 0) {\n return;\n }\n\n if (this.backend.sessionStatus === 'default') {\n for (const buffer of this.buffersPending) {\n const maxInFreeList = bucketFreelist.get(buffer.size);\n\n // eslint-disable-next-line no-bitwise\n if ((buffer.usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE) {\n // Put the pending buffer to freeBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n // eslint-disable-next-line no-bitwise\n } else if ((buffer.usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM) {\n // Put the pending buffer to freeUniformBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeUniformBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n } else {\n buffer.destroy();\n }\n }\n this.buffersPending = [];\n } else {\n // Don't release intermediate tensors in non-default mode.\n // TODO: reuse the storage buffers in non-default mode.\n let capturedBuffers = this.capturedPendingBuffers.get(this.backend.currentSessionId!);\n if (!capturedBuffers) {\n capturedBuffers = [];\n this.capturedPendingBuffers.set(this.backend.currentSessionId!, capturedBuffers);\n }\n for (const buffer of this.buffersPending) {\n capturedBuffers.push(buffer);\n }\n this.buffersPending = [];\n }\n }\n\n dispose() {\n this.freeBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.freeUniformBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n\n this.storageCache.forEach((storage) => {\n storage.gpuData.buffer.destroy();\n });\n\n this.capturedPendingBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n }\n\n onReleaseSession(sessionId: number) {\n // release the captured pending buffers.\n const pendingBuffers = this.capturedPendingBuffers.get(sessionId);\n if (pendingBuffers) {\n pendingBuffers.forEach(buffer => {\n buffer.destroy();\n });\n this.capturedPendingBuffers.delete(sessionId);\n }\n }\n}\n\nexport const createGpuDataManager = (...args: ConstructorParameters): GpuDataManager =>\n new GpuDataManagerImpl(...args);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\n/**\n * create a new object from the given attribute, and add a cacheKey property to it\n */\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable no-param-reassign */\n\nexport class MatMulUtil {\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n const max = Math.max(aLen, bLen);\n if (aLen && bLen) {\n cdims[crank - i] = Math.max(aLen, bLen);\n } else {\n // when either aLen or bLen is 0, the other should be either 0 or 1, otherwise it is not broadcastable.\n if (max > 1) {\n return undefined;\n }\n cdims[crank - i] = 0;\n }\n }\n\n return cdims;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n}\n\n\nexport class ShapeUtil {\n /**\n * calculate the size (number of elements)\n */\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n /**\n * convert dims corresponding to type change to pack. ex. uint8 data to uint32\n */\n static convertShape(dims: readonly number[], size = 4): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n }\n const newDims = new Array(rank);\n let i = rank - 1;\n while (i >= 0) {\n if (dims[i] % size === 0) {\n newDims[i] = dims[i] / size;\n break;\n }\n if (size % dims[i] !== 0) {\n throw new Error('cannot convert shape');\n }\n newDims[i] = 1;\n size /= dims[i];\n i--;\n }\n for (i--; i >= 0; i--) {\n newDims[i] = dims[i];\n }\n return newDims;\n }\n\n /**\n * calculate the size (number of elements) from the given axis (inclusive)\n */\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n /**\n * calculate the size (number of elements) to the given axis (exclusive)\n */\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n /**\n * calculate the size (number of elements) from and to the given axis [start, end)\n */\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be negative.\n if (dims[i] < 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank?: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank ?? axes.length));\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]): void {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], isChannelLast: boolean, autoPad?: string): void {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + (isChannelLast ? 1 : 2)], strides[dim], dilations[dim], kernelShape[dim], pads, dim,\n dim + inputDims.length - 2, autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {ShapeUtil} from '../../util';\nimport {ProgramUniform, ProgramUniformVariableInfo} from '../types';\n\n/**\n * constant value for a workgroup size.\n *\n * We definitely can do further optimization in future, but for now we use 64.\n *\n * rule of thumb: Use [a workgroup size of] 64 unless you know what GPU you are targeting or that your workload\n * needs something different.\n *\n * from: https://surma.dev/things/webgpu/\n **/\nexport const WORKGROUP_SIZE = 64;\n\ninterface IndicesHelperTypes {\n /**\n * WGSL type of indices expression\n */\n readonly indices: string;\n\n /**\n * WGSL type of a value\n */\n readonly value: string;\n\n /**\n * WGSL type of storage type representing a value\n *\n * This is usually the same to `value`, but for some type (eg. bool), we need to use `u32` as storage type for\n * value type `vec4`\n */\n readonly storage: string;\n\n /**\n * tensor type as represented in TensorView\n */\n readonly tensor: number;\n}\n\n/**\n * A helper class for generating WGSL code for manipulating indices and data for a shader's input or output.\n *\n * This class is designed to offer a unified way to generate WGSL code for manipulating indices and data for a shader's\n * input or output.\n *\n * The following is a list of terminologies used in this class:\n * - `offset`: a uint32 value representing the offset of an element in the data buffer.\n * - `indices`: an abstraction of a multi-dimensional array's indices representing the data's index on each dimension.\n * - `value`: a value of a data element.\n *\n * Users are expected to create an instance of this class for each shader's input or output, and use the instance to\n * generate WGSL code for manipulating indices and data. The following 2 exported functions are for users to call to\n * create an instance of an indices helper:\n * - `inputVariable()`: create an indices helper instance for an input.\n * - `outputVariable()`: create an indices helper instance for an output.\n * - `internalVariable()`: create an indices helper instance for an internal variable.\n *\n * An indices helper instance contains helper functions for the following operations:\n * - access readonly basic information, including: `name`(the name of the input or output), `usage`(whether it's an\n * input, an output or an internal variable) and `shape`(the passed in shape).\n * - `type`: access readonly type information, including: `indices`(the type of indices), `value`(the type of value at\n * runtime), `storage`(the type of value at storage) and `tensor`(the tensor type as represented in TensorView).\n * - generate WGSL code for getting indices from offset. Use `offsetToIndices()` for WGSL code snippet to calculate\n * indices from offset, and use `indicesToOffset()` for WGSL code snippet to calculate offset from indices.\n * - to manipulate an instance of indices, use `setIndices()` and `getIndices()` to set and get the indices on an\n * indices variable.\n * - to manipulate data, use `set()`/`get()` to access data at the given indices from parameter list, use\n * `setByIndices()`/`getByIndices()` to access data at the given indices from an indices variable, and use\n * `setByOffset()`/`getByOffset()` to access data at the given offset.\n * - `impl`: get WGSL code of function implementation for the util functions mentioned above.\n */\nexport interface IndicesHelper {\n /**\n * get WGSL code of function implementation for the util functions.\n *\n */\n readonly impl: () => string;\n\n /**\n * get type info\n */\n readonly type: IndicesHelperTypes;\n\n /**\n * WGSL code of a expression for getting indices from offset.\n *\n * @param varOffset - a u32 expression representing the offset.\n *\n * @returns an `type.indices` expression\n */\n readonly offsetToIndices: (varOffset: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting offset from indices.\n *\n * @param varIndices - a `type.indices` expression representing the indices.\n *\n * @returns an `u32` expression\n */\n readonly indicesToOffset: (varIndices: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting original offset from broadcasted indices.\n *\n * @param varIndices - a `type.indices` expression representing the output indices.\n * @param output - output IndicesHelper.\n *\n * @returns an `u32` expression\n */\n readonly broadcastedIndicesToOffset: (varIndices: string, output: IndicesHelper) => string;\n\n /**\n * WGSL code of generating an indices literal\n *\n * @param init - initial value.\n */\n readonly indices: (...init: ReadonlyArray) => string;\n\n /**\n * WGSL code of a statement for setting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to set. can be a number or a string (WGSL `u32` expression).\n * @param value - the value to set. can be a number or a string (WGSL `u32` expression).\n *\n * @returns a WGSL statement\n */\n readonly indicesSet: (varIndices: string, idx: number|string, value: number|string) => void;\n\n /**\n * WGSL code of an `u32` expression for getting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to get. can be a number or a string (WGSL `u32` expression).\n *\n * @returns an `u32` expression\n */\n readonly indicesGet: (varIndices: string, idx: number|string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices.\n *\n * @param indicesAndValue - an array of numbers or strings (WGSL `u32` expression) representing the indices, followed\n * by the value to set. This array should have exactly `shape.length + 1` elements.\n */\n readonly set: (...indicesAndValue: ReadonlyArray) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByIndices: (varIndices: string, value: string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByOffset: (offset: number|string, value: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices.\n *\n * @param indices - an array of numbers or strings (WGSL `u32` expression) representing the indices.\n */\n readonly get: (...indices: ReadonlyArray) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n */\n readonly getByIndices: (varIndices: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n */\n readonly getByOffset: (offset: number|string) => string;\n\n /**\n * name of the data variable\n */\n readonly name: string;\n\n /**\n * whether the helper is for an input, an output or an internal variable.\n */\n readonly usage: 'input'|'output'|'internal';\n\n /**\n * the rank of the input or output.\n */\n readonly rank: number;\n\n /**\n * a string representing the variable name for the shape of the input or output.\n */\n readonly shape: string;\n\n /**\n * a string representing the variable name for the strides of the input or output.\n */\n readonly strides: string;\n}\n\nconst getWgslMappedType = (type: number, components: 1|2|3|4): string|[string, string] => {\n if (components === 3) {\n throw new Error('vec3 has same alignment as vec4, use vec4 instead');\n }\n\n // return type is [ storage type, runtime type ] or a single string for both\n switch (type) {\n case DataType.float16:\n return components > 1 ? `vec${components}` : 'f16';\n case DataType.float:\n return components > 1 ? `vec${components}` : 'f32';\n case DataType.int32:\n return components > 1 ? `vec${components}` : 'i32';\n case DataType.uint32:\n return components > 1 ? `vec${components}` : 'u32';\n case DataType.int64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'i32'];\n case DataType.uint64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'u32'];\n case DataType.bool:\n if (components !== 4) {\n throw new Error('bool must be vec4');\n }\n return ['u32', 'vec4'];\n\n default:\n throw new Error(`Unknown data type: ${type}`);\n }\n};\n\nexport const tensorTypeToWsglStorageType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[0];\n};\n\nexport const tensorTypeToWsglValueType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[1];\n};\n\nexport const createTensorShapeVariables = (...dims: ReadonlyArray): ProgramUniform[] => {\n const programUniforms: ProgramUniform[] = [];\n dims.forEach(dim => {\n if (dim.length !== 0) {\n programUniforms.push(\n {type: DataType.uint32, data: dim}, {type: DataType.uint32, data: ShapeUtil.computeStrides(dim)});\n }\n });\n return programUniforms;\n};\n\n/**\n * A helper function to get maximum vector size for specified data length\n * @param size\n */\nexport const getMaxComponents = (size: number) => {\n // we cannot use vec3 type since it has alignment of 16 bytes\n if (size % 4 === 0) {\n return 4;\n } else if (size % 2 === 0) {\n return 2;\n }\n\n return 1;\n};\n\n/**\n * A helper function that initializes variable as a scalar or vector. e.g. f32(0) or vec4f(0,0,0,0)\n * @param dataType\n * @param components\n * @param value\n */\nexport const fillVector = (dataType = 'f32', components?: number, value = '0') => {\n if (!components || components === 1) {\n return `${dataType}(${value})`;\n }\n\n return `vec${components}<${dataType}>(${value})`;\n};\n\n/**\n * A helper function that casts value or vector to f32\n * @param dataType\n * @param components\n * @param value\n */\nexport const castToF32 = (dataType: string, components: number, value: string) => {\n if (dataType === 'f32') {\n return value;\n }\n if (components === 1) {\n return `f32(${value})`;\n }\n\n return `vec${components}(${value})`;\n};\n\n/**\n * A helper function that returns scalar or sums all components of a vector\n * @param name\n * @param components\n */\nexport const sumVector = (name: string, components: number) => {\n if (components === 4) {\n return `(${name}.x + ${name}.y + ${name}.z + ${name}.w)`;\n } else if (components === 2) {\n return `(${name}.x + ${name}.y)`;\n } else if (components === 3) {\n return `(${name}.x + ${name}.y + ${name}.z)`;\n }\n\n return name;\n};\n\n/**\n * A helper function that returns variable element at index.\n * @param name - the name of variable.\n * @param index - the index of variable element.\n * @param length - the length of variable.\n * @param type - the type of variable, optional.\n */\nexport const getElementAt =\n (name: string, index: number|string, length: number, type?: UniformDataElementType): string => {\n if (name.startsWith('uniforms.') && length > 4) {\n if (typeof (index) === 'string') {\n if (type === 'f16') {\n return `${name}[(${index}) / 8][(${index}) % 8 / 4][(${index}) % 8 % 4]`;\n } else {\n return `${name}[(${index}) / 4][(${index}) % 4]`;\n }\n } else {\n if (type === 'f16') {\n return `${name}[${Math.floor(index / 8)}][${Math.floor(index % 8 / 4)}][${index % 8 % 4}]`;\n } else {\n return `${name}[${Math.floor(index / 4)}][${index % 4}]`;\n }\n }\n } else {\n return length > 1 ? `${name}[${index}]` : name;\n }\n };\n\n/**\n * A helper function to get a IndicesHelper for a given input or output.\n *\n * @param name - the name of the input or output.\n * @param tensorType - the tensor type of the input or output.\n * @param shapeOrRank - the tensor shape or the rank of the input or output.\n * @param usage - the usage of the indices helper.\n * @param components - indicates the number of components of each element. 1 for scalar, 2 for vec2, 3 for vec3, 4 for\n * vec4.\n */\nconst createIndicesHelper =\n (name: string, tensorType: number, shapeOrRank: number|readonly number[], usage: IndicesHelper['usage'],\n components: 1|2|3|4): IndicesHelper => {\n const useUniform = typeof shapeOrRank === 'number';\n const rank = useUniform ? shapeOrRank : shapeOrRank.length;\n const rankIdentity = [...new Array(rank).keys()];\n const indicesType = rank < 2 ? 'u32' : rank <= 4 ? `vec${rank}` : `array`;\n const mappedType = getWgslMappedType(tensorType, components);\n const valueType = typeof mappedType === 'string' ? mappedType : mappedType[1];\n const storageType = typeof mappedType === 'string' ? mappedType : mappedType[0];\n const type = {indices: indicesType, value: valueType, storage: storageType, tensor: tensorType};\n\n const normalizeDim = (dim: number|string): string => typeof dim === 'string' ? dim : `${dim}u`;\n\n const implementationUsed = {\n offsetToIndices: false,\n indicesToOffset: false,\n broadcastedIndicesToOffset: false,\n set: false,\n setByIndices: false,\n get: false,\n getByIndices: false,\n };\n\n const uniformPrefix = useUniform ? 'uniforms.' : '';\n const shape = `${uniformPrefix}${name}_shape`;\n const strides = `${uniformPrefix}${name}_strides`;\n\n let o2iSnippet = '';\n for (let i = 0; i < rank - 1; i++) {\n o2iSnippet += `\n let dim${i} = current / ${getElementAt(strides, i, rank)};\n let rest${i} = current % ${getElementAt(strides, i, rank)};\n indices[${i}] = dim${i};\n current = rest${i};\n `;\n }\n o2iSnippet += `indices[${rank - 1}] = current;`;\n\n const offsetToIndicesImplementation = rank < 2 ? '' : `\n fn o2i_${name}(offset: u32) -> ${type.indices} {\n var indices: ${type.indices};\n var current = offset;\n ${o2iSnippet}\n return indices;\n }`;\n\n const offsetToIndices = (varOffset: string) => {\n implementationUsed.offsetToIndices = true;\n return rank < 2 ? varOffset : `o2i_${name}(${varOffset})`;\n };\n\n const offsets: string[] = [];\n if (rank >= 2) {\n for (let i = rank - 1; i >= 0; i--) {\n offsets.push(`${getElementAt(strides, i, rank)} * (indices[${i}])`);\n }\n }\n\n const indicesToOffsetImplementation = rank < 2 ? '' : `\n fn i2o_${name}(indices: ${type.indices}) -> u32 {\n return ${offsets.join('+')};\n }`;\n\n const indicesToOffset = (varIndices: string) => {\n implementationUsed.indicesToOffset = true;\n return rank < 2 ? varIndices : `i2o_${name}(${varIndices})`;\n };\n\n const indices = (...init: ReadonlyArray) =>\n rank === 0 ? '0u' : `${type.indices}(${init.map(normalizeDim).join(',')})`;\n\n const indicesGet = (varIndices: string, idx: number|string) => {\n if (rank < 2) {\n return `${varIndices}`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}`;\n }\n };\n\n const indicesSet = (varIndices: string, idx: number|string, value: string) => {\n if (rank < 2) {\n return `${varIndices}=${value};`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}=${value};`;\n }\n };\n\n const broadcastedIndicesToOffsetImplementation: {[key: string]: string} = {};\n const broadcastedIndicesToOffset = (varIndices: string, output: IndicesHelper) => {\n implementationUsed.broadcastedIndicesToOffset = true;\n const implKey = `${output.name}broadcastedIndicesTo${name}Offset`;\n if (implKey in broadcastedIndicesToOffsetImplementation) {\n return `${implKey}(${varIndices})`;\n }\n const offsets = [];\n for (let i = rank - 1; i >= 0; i--) {\n const idx = output.indicesGet('outputIndices', i + output.rank - rank);\n offsets.push(`${indicesGet(strides, i)} * (${idx} % ${indicesGet(shape, i)})`);\n }\n broadcastedIndicesToOffsetImplementation[implKey] =\n `fn ${implKey}(outputIndices: ${output.type.indices}) -> u32 {\n return ${offsets.length > 0 ? offsets.join('+') : '0u'};\n }`;\n\n return `${implKey}(${varIndices})`;\n };\n\n const setByOffset = (offset: number|string, value: string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]=${value};`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), select(0u, 0xFFFFFFFFu, ${value} < 0));`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), 0u);`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `${name}[${offset}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${value}));`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByOffset = (offset: number|string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `i32(${name}[${offset}].x)`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `u32(${name}[${offset}].x)`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `vec4(bool(${name}[${offset}] & 0xFFu), bool(${name}[${offset}] & 0xFF00u), bool(${name}[${\n offset}] & 0xFF0000u), bool(${name}[${offset}] & 0xFF000000u))`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByIndicesImplementation = rank < 2 ? '' : `\n fn get_${name}ByIndices(indices: ${type.indices}) -> ${valueType} {\n return ${getByOffset(`i2o_${name}(indices)`)};\n }`;\n\n const getImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn get_${name}(${functionParams}) -> ${valueType} {\n return get_${name}ByIndices(${indices(dimsParams)});\n }`;\n })();\n\n const get = (...indices: ReadonlyArray) => {\n if (indices.length !== rank) {\n throw new Error(`indices length must be ${rank}`);\n }\n\n const normalizedIndices = indices.map(normalizeDim).join(',');\n\n if (rank === 0) {\n return getByOffset('0u');\n } else if (rank === 1) {\n return getByOffset(normalizedIndices[0]);\n } else {\n implementationUsed.get = true;\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}(${normalizedIndices})`;\n }\n };\n\n const getByIndices = (varIndices: string) => {\n if (rank < 2) {\n return getByOffset(varIndices);\n } else {\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}ByIndices(${varIndices})`;\n }\n };\n\n const setByIndicesImplementation = rank < 2 ? '' : `\n fn set_${name}ByIndices(indices: ${type.indices}, value: ${valueType}) {\n ${setByOffset(`i2o_${name}(indices)`, 'value')}\n }`;\n\n const setImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn set_${name}(${functionParams}, value: ${valueType}) {\n set_${name}ByIndices(${indices(dimsParams)}, value);\n }`;\n })();\n\n const set = (...indicesAndValue: ReadonlyArray) => {\n if (indicesAndValue.length !== rank + 1) {\n throw new Error(`indices length must be ${rank}`);\n }\n const value = indicesAndValue[rank];\n if (typeof value !== 'string') {\n throw new Error('value must be string');\n }\n\n const normalizedIndices = indicesAndValue.slice(0, rank).map(normalizeDim).join(',');\n\n if (rank === 0) {\n return setByOffset('0u', value);\n } else if (rank === 1) {\n return setByOffset(normalizedIndices[0], value);\n } else {\n implementationUsed.set = true;\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}(${normalizedIndices}, ${value})`;\n }\n };\n\n const setByIndices = (varIndices: string, value: string) => {\n if (rank < 2) {\n return setByOffset(varIndices, value);\n } else {\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}ByIndices(${varIndices}, ${value});`;\n }\n };\n\n const impl = () => {\n const impls = [];\n let needShapeStrides = false;\n if (implementationUsed.offsetToIndices) {\n impls.push(offsetToIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.indicesToOffset) {\n impls.push(indicesToOffsetImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.broadcastedIndicesToOffset) {\n Object.values(broadcastedIndicesToOffsetImplementation).forEach(impl => impls.push(impl));\n needShapeStrides = true;\n }\n if (implementationUsed.set) {\n impls.push(setImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.setByIndices) {\n impls.push(setByIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.get) {\n impls.push(getImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.getByIndices) {\n impls.push(getByIndicesImplementation);\n needShapeStrides = true;\n }\n if (!useUniform && needShapeStrides) {\n impls.unshift(\n `const ${shape} = ${type.indices}(${shapeOrRank.join(',')});`,\n `const ${strides} = ${type.indices}(${ShapeUtil.computeStrides(shapeOrRank).join(',')});`);\n }\n return impls.join('\\n');\n };\n\n return {\n impl,\n type,\n offsetToIndices,\n indicesToOffset,\n broadcastedIndicesToOffset,\n indices,\n indicesGet,\n indicesSet,\n set,\n setByOffset,\n setByIndices,\n get,\n getByOffset,\n getByIndices,\n // isVec4,\n usage,\n name,\n strides,\n shape,\n rank\n };\n };\n\n/**\n * Create a IndicesHelper for an input.\n *\n * @param name - the name of the input.\n * @param type - the tensor type of the input.\n * @param shapeOrRank - the tensor shape or the rank of the input.\n * @param components - the number of components of the input. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the input.\n */\nexport const inputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'input', components);\n\n/**\n * Create a IndicesHelper for an output.\n *\n * @param name - the name of the output.\n * @param type - the tensor type of the output.\n * @param shapeOrRank - the tensor shape or the rank of the output.\n * @param components - the number of components of the output. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the output.\n */\nexport const outputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'output', components);\n\n/**\n * Create a IndicesHelper for an internal variable.\n *\n * @param name - the name of the variable.\n * @param type - the tensor type of the variable.\n * @param shapeOrRank - the tensor shape or the rank of the variable.\n * @param components - the number of components of the variable. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the variable.\n */\nexport const internalVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'internal', components);\n\nexport type UniformDataElementType = 'u32'|'f16'|'f32'|'i32';\nexport type UniformsArrayType = Array<{name: string; type: UniformDataElementType; length?: number}>;\n\n/**\n * A ShaderHelper is a helper class for generating WGSL code.\n */\nexport interface ShaderHelper {\n /**\n * A helper function to generate the start of main function in WGSL source code.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param workgroupSize - an optional workgroup size. default is WORKGROUP_SIZE.\n */\n mainStart(workgroupSize?: number|[number, number, number]): string;\n\n /**\n * A helper function to generate the code snippet for guarding against out-of-bounds size.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n *\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param size - the size of the data to guard against. can be a number or a string (WGSL `u32` expression).\n */\n guardAgainstOutOfBoundsWorkgroupSizes(size: unknown): string;\n\n /**\n * A helper function to generate the code snippet for declaring multiple inputs or outputs.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n declareVariables(...variables: IndicesHelper[]): string;\n\n /**\n * A helper function to register one uniform. Can be called multiple times to register multiple uniforms.\n *\n * @param name - the name of the uniform.\n * @param type - the type of the uniform.\n * @param length - the length of the uniform, default to 1 when it is not provided.\n */\n registerUniform(name: string, type: string, length?: number): ShaderHelper;\n\n /**\n * A helper function to register multiple uniforms. Can be called multiple times to register multiple uniforms.\n *\n * @param uniforms - an array of uniforms. Each element of the array is an object with 2 properties: `name` and\n * `type`.\n */\n registerUniforms(uniforms: UniformsArrayType): ShaderHelper;\n\n /**\n * A helper function to register multiple internal variables. Can be called multiple times to register multiple\n * internal variables.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper;\n}\n\nclass ShaderHelperImpl implements ShaderHelper {\n constructor(private normalizedDispatchGroup: [number, number, number], private limits: GPUSupportedLimits) {}\n\n guardAgainstOutOfBoundsWorkgroupSizes(size: number|string): string {\n // Guard against out-of-bounds work group sizes\n const sizeInCode = typeof size === 'number' ? `${size}u` : size;\n return `if (global_idx >= ${sizeInCode}) { return; }`;\n }\n\n mainStart(workgroupSize: number|[number, number, number] = WORKGROUP_SIZE) {\n const workgroupSizeX = typeof workgroupSize === 'number' ? workgroupSize : workgroupSize[0];\n const workgroupSizeY = typeof workgroupSize === 'number' ? 1 : workgroupSize[1];\n const workgroupSizeZ = typeof workgroupSize === 'number' ? 1 : workgroupSize[2];\n\n if (workgroupSizeX > this.limits.maxComputeWorkgroupSizeX ||\n workgroupSizeY > this.limits.maxComputeWorkgroupSizeY ||\n workgroupSizeZ > this.limits.maxComputeWorkgroupSizeZ) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${\n this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);\n }\n\n if (workgroupSizeX * workgroupSizeY * workgroupSizeZ > this.limits.maxComputeInvocationsPerWorkgroup) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup invocations ${\n this.limits.maxComputeInvocationsPerWorkgroup}.`);\n }\n\n const is1DimensionDispatch = this.normalizedDispatchGroup[1] === 1 && this.normalizedDispatchGroup[2] === 1;\n const paramList = is1DimensionDispatch ? `@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3` :\n `@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`;\n const globalIdxDefinition = is1DimensionDispatch ?\n 'let global_idx = global_id.x; let local_idx = local_id.x;' :\n `let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${\n workgroupSizeX * workgroupSizeY * workgroupSizeZ}u + local_idx;`;\n\n return `@compute @workgroup_size(${workgroupSizeX}, ${workgroupSizeY}, ${workgroupSizeZ})\n fn main(${paramList}) {\n ${globalIdxDefinition}\n `;\n }\n\n private appendVariableUniforms(variable: IndicesHelper): void {\n if (variable.rank !== 0) {\n if (variable.shape.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.shape.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n if (variable.strides.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.strides.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n }\n }\n\n private declareVariable(variable: IndicesHelper, bindingIndex: number): string {\n if (variable.usage === 'internal') {\n throw new Error('cannot use internal variable with declareVariable(). use registerInternalVariables() instead.');\n }\n this.variables.push(variable);\n this.appendVariableUniforms(variable);\n\n const access = variable.usage === 'input' ? 'read' : 'read_write';\n const storageType = variable.type.storage;\n return `@group(0) @binding(${bindingIndex}) var ${variable.name}: array<${storageType}>;`;\n }\n\n declareVariables(...variables: IndicesHelper[]): string {\n return variables.map(v => this.declareVariable(v, this.variableIndex++)).join('\\n');\n }\n\n private registerInternalVariable(variable: IndicesHelper): void {\n if (variable.usage !== 'internal') {\n throw new Error(\n 'cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.');\n }\n\n this.internalVariables.push(variable);\n this.appendVariableUniforms(variable);\n }\n\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper {\n variables.forEach(v => this.registerInternalVariable(v));\n return this;\n }\n\n registerUniform(name: string, type: UniformDataElementType, length = 1): ShaderHelper {\n this.uniforms.push({name, type, length});\n return this;\n }\n\n registerUniforms(additionalUniforms: UniformsArrayType): ShaderHelper {\n this.uniforms = this.uniforms.concat(additionalUniforms);\n return this;\n }\n\n private internalVariables: IndicesHelper[] = [];\n private variables: IndicesHelper[] = [];\n private uniforms: UniformsArrayType = [];\n private uniformDeclaration(): string {\n if (this.uniforms.length === 0) {\n return '';\n }\n\n const uniformSnippets: string[] = [];\n for (const {name, type, length} of this.uniforms) {\n if (length && length > 4) {\n if (type === 'f16') {\n uniformSnippets.push(`@align(16) ${name}:array, ${Math.ceil(length / 8)}>`);\n } else {\n uniformSnippets.push(`${name}:array, ${Math.ceil(length / 4)}>`);\n }\n } else {\n const typeTemp = length == null || length === 1 ? type : `vec${length}<${type}>`;\n uniformSnippets.push(`${name}:${typeTemp}`);\n }\n }\n\n return `\n struct Uniforms { ${uniformSnippets.join(', ')} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`;\n }\n private variableIndex = 0;\n\n /**\n * Get additional implementation that needs to be added to the shader source.\n */\n get additionalImplementations(): string {\n return this.uniformDeclaration() + this.variables.map(i => i.impl()).join('\\n') +\n this.internalVariables.map(i => i.impl()).join('\\n');\n }\n\n /**\n * Get the variable info of the shader program.\n */\n get variablesInfo(): ProgramUniformVariableInfo[]|undefined {\n if (this.uniforms.length === 0) {\n return undefined;\n }\n\n const uniformWgslTypeToDataType = (type: UniformDataElementType) =>\n ([DataType.uint32, DataType.float16, DataType.float,\n DataType.int32][['u32', 'f16', 'f32', 'i32'].indexOf(type)]);\n return this.uniforms.map(u => ([uniformWgslTypeToDataType(u.type), u.length ?? 1]));\n }\n}\n\nexport const createShaderHelper = (dispatchGroup: [number, number, number], limits: GPUSupportedLimits) =>\n new ShaderHelperImpl(dispatchGroup, limits);\n\n/**\n * This function comes from https://github.com/tensorflow/tfjs/blob/master/tfjs-core/src/ops/broadcast_util.ts#L18-L40\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport const getBroadcastDims = (inShape: readonly number[], outShape: readonly number[]): number[] => {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n};\n\nconst getAdjustedPerm = (inputRank: number, perm: number[]): number[] =>\n (perm && perm.length !== inputRank) ? [...(new Array(inputRank).keys())].reverse() : perm;\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] =>\n ShapeUtil.sortBasedOnPerm(inputShape, getAdjustedPerm(inputShape.length, perm));\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nexport const createTransposeProgramInfo = (inputTensor: TensorView, permAttr: number[]): ProgramInfo => {\n const inputDataType = inputTensor.dataType;\n const inputRank = inputTensor.dims.length;\n const perm = getAdjustedPerm(inputRank, permAttr);\n const outputShape = getOutputShape(inputTensor.dims, perm);\n const output = outputVariable('output', inputDataType, outputShape.length);\n const input = inputVariable('a', inputDataType, inputRank);\n let getShaderSource;\n if (perm.length === 2 && perm[0] === 1 && perm[1] === 0) {\n const wgslType = output.type.value;\n const workgroupSize: [number, number, number] = [16, 16, 1];\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n var tile : array, ${workgroupSize[0]}>;\n ${shaderHelper.mainStart(workgroupSize)}\n var x = workgroup_id.x * ${workgroupSize[0]}u + local_id.x;\n var y = workgroup_id.y * ${workgroupSize[0]}u + local_id.y;\n let width = uniforms.output_shape[0];\n let height = uniforms.output_shape[1];\n if (x < width && y < height) {\n tile[local_id.y][local_id.x] = ${input.getByOffset('y * width + x')};\n }\n workgroupBarrier();\n x = workgroup_id.y * ${workgroupSize[0]}u + local_id.x;\n y = workgroup_id.x * ${workgroupSize[0]}u + local_id.y;\n if (x < height && y < width) {\n ${output.setByOffset('y * height + x', 'tile[local_id.x][local_id.y]')}\n }\n }`;\n } else {\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${permFunctionBody(perm, inputRank, input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${output.setByOffset('global_idx', input.getByIndices('aIndices'))}\n }`;\n }\n return {\n name: 'Transpose',\n shaderCache: {hint: `${permAttr}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputSize = ShapeUtil.size(outputShape);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const transpose = (context: ComputeContext, attributes: TransposeAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createTransposeProgramInfo(context.inputs[0], attributes.perm));\n};\n\nexport const parseTransposeAttributes = (attributes: Record): TransposeAttributes =>\n createAttributeWithCacheKey({perm: attributes.perm as number[]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\nimport {createReduceAttributesFromInputs, ReduceAttributes} from './reduce';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst reduceOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate * candidate',\n logSumExp: 'bestValue + exp(candidate)',\n l1: 'bestValue + abs(candidate)',\n l2: 'bestValue + candidate * candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceSharedOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate',\n logSumExp: 'bestValue + candidate',\n l1: 'bestValue + candidate',\n l2: 'bestValue + candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceInitValues: {[key: string]: string} = {\n max: '_A[offset]',\n min: '_A[offset]',\n mean: '0',\n sum: '0',\n prod: '1',\n sumSquare: '0',\n logSumExp: '0',\n l1: '0',\n l2: '0',\n logSum: '0'\n};\n\nconst reduceOutputValues: {[key: string]: string} = {\n max: 'bestValue',\n min: 'bestValue',\n sum: 'bestValue',\n prod: 'bestValue',\n sumSquare: 'bestValue',\n logSumExp: 'log(bestValue)',\n l1: 'bestValue',\n l2: 'sqrt(bestValue)',\n logSum: 'log(bestValue)'\n};\n\nconst getInnerMostAxes = (numInnerAxes: number, rank: number): number[] => {\n const res = [];\n for (let i = rank - numInnerAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n};\n\nconst computeOutAndReduceShapes = (shape: readonly number[], axes: readonly number[]): [number[], number[]] => {\n const outputShape = [];\n const rank = shape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputShape.push(shape[dim]);\n }\n }\n const reduceShape = axes.map(dim => shape[dim]);\n return [outputShape, reduceShape];\n};\n\nconst expandShapeToKeepDim = (shape: number[], axes: number[]): number[] => {\n const rank = shape.length + axes.length;\n const expandShape = [];\n let shapeIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n expandShape.push(shape[shapeIdx++]);\n } else {\n expandShape.push(1);\n }\n }\n return expandShape;\n};\n\nconst areAxesInnerMostDims = (axes: number[], rank: number): boolean => {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n};\n\nconst getAxesPermutation = (axes: number[], rank: number): number[] => {\n const res = [];\n if (!areAxesInnerMostDims(axes, rank)) {\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n res.push(i);\n }\n }\n axes.forEach(axis => res.push(axis));\n }\n return res;\n};\n\nexport const createReduceSharedProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceType: string,\n outputDataType: DataType, outputShape: number[], reduceShape: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n\n const outputSize = ShapeUtil.size(outputShape);\n const reduceSize = ShapeUtil.size(reduceShape);\n\n const input = inputVariable('_A', inputs[0].dataType, inputShape);\n const output = outputVariable('output', outputDataType, outputShape);\n\n const workgroupSize = 32;\n\n const sharedMemorySnippet = `\n var aBestValues : array;\n `;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('reduceSize', 'u32').declareVariables(input, output)}\n ${sharedMemorySnippet}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${shaderHelper.mainStart(workgroupSize)}\n\n let outputIndex = global_idx / ${workgroupSize};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${reduceInitValues[reduceType]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${workgroupSize}) {\n let candidate = f32(${input.getByOffset('offset + k')});\n bestValue = ${reduceOps[reduceType]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${workgroupSize}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${reduceSharedOps[reduceType]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${\n output.setByOffset(\n 'outputIndex',\n `${\n reduceType === 'mean' ? `${output.type.storage}(bestValue / f32(uniforms.reduceSize))` :\n `${output.type.storage}(${reduceOutputValues[reduceType]})`}`)};\n }\n }`;\n\n // One work group is responsible for only one element of output.\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: outputSize},\n programUniforms: [{type: DataType.uint32, data: reduceSize}]\n }),\n };\n };\n\nconst reduceCommon =\n (context: ComputeContext, name: string, attributes: ReduceAttributes,\n reduceType: 'sum'|'sumSquare'|'prod'|'min'|'max'|'mean'|'logSumExp'|'l1'|'l2'|'logSum'): void => {\n const updatedAttributes: ReduceAttributes =\n context.inputs.length === 1 ? attributes : createReduceAttributesFromInputs(context.inputs, attributes);\n\n let updatedAxes = updatedAttributes.axes;\n if (updatedAxes.length === 0 && !updatedAttributes.noopWithEmptyAxes) {\n updatedAxes = context.inputs[0].dims.map((_dim, i) => i);\n }\n const normalizeAxes = ShapeUtil.normalizeAxes(updatedAxes, context.inputs[0].dims.length);\n\n let axes = normalizeAxes;\n let input = context.inputs[0];\n const permutedAxes = getAxesPermutation(axes, context.inputs[0].dims.length);\n if (permutedAxes.length > 0) {\n input = context.compute(\n createTransposeProgramInfo(context.inputs[0], permutedAxes), {inputs: [0], outputs: [-1]})[0];\n axes = getInnerMostAxes(axes.length, input.dims.length);\n }\n\n const [outputShape, reduceShape] = computeOutAndReduceShapes(input.dims, axes);\n let finalOutputShape = outputShape;\n if (updatedAttributes.keepDims) {\n finalOutputShape = expandShapeToKeepDim(outputShape, normalizeAxes);\n }\n\n context.compute(\n createReduceSharedProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['type']}, [input], reduceType,\n context.inputs[0].dataType, finalOutputShape, reduceShape),\n {inputs: [input]});\n };\n\nexport const reduceMeanShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMeanShared', attributes, 'mean');\n};\n\nexport const reduceL1Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL1Shared', attributes, 'l1');\n};\n\nexport const reduceL2Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL2Shared', attributes, 'l2');\n};\n\nexport const reduceLogSumExpShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumExpShared', attributes, 'logSumExp');\n};\n\nexport const reduceMaxShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMaxShared', attributes, 'max');\n};\n\nexport const reduceMinShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMinShared', attributes, 'min');\n};\n\nexport const reduceProdShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceProdShared', attributes, 'prod');\n};\n\nexport const reduceSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumShared', attributes, 'sum');\n};\n\nexport const reduceSumSquareShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumSquareShared', attributes, 'sumSquare');\n};\n\nexport const reduceLogSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumShared', attributes, 'logSum');\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\nimport {reduceL1Shared, reduceL2Shared, reduceLogSumExpShared, reduceLogSumShared, reduceMaxShared, reduceMeanShared, reduceMinShared, reduceProdShared, reduceSumShared, reduceSumSquareShared} from './reduce-shared';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('Reduce op requires 1 or 2 inputs.');\n }\n\n if (inputs.length === 2 && inputs[1].dims.length !== 1) {\n throw new Error('Invalid axes input dims.');\n }\n};\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n noopWithEmptyAxes: boolean;\n axes: number[];\n}\n\nexport type ReduceOp =\n (input: IndicesHelper, output: IndicesHelper,\n axes: readonly number[]) => [string, string, string, string, ...string[]];\n\nconst noOp: ReduceOp = (input) => ['', '', `var value = ${input.getByIndices('input_indices')};`, ''];\nexport const createReduceProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceOp: ReduceOp,\n axesInput: number[], outputDataType: DataType, keepDims = false, noopWithEmptyAxes = false): ProgramInfo => {\n const outputShape: number[] = [];\n const inputShape = inputs[0].dims;\n const inputRank = inputShape.length;\n const axes = ShapeUtil.normalizeAxes(axesInput, inputRank);\n const reduceOnAllAxes = !noopWithEmptyAxes && axes.length === 0;\n inputShape.forEach((d, i) => {\n if (reduceOnAllAxes || axes.indexOf(i) >= 0) {\n if (keepDims) {\n outputShape.push(1);\n } // else { // skip this axis}\n } else {\n outputShape.push(d);\n }\n });\n const outputRank = outputShape.length;\n const outputSize = ShapeUtil.size(outputShape);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = []; // copy output indexes to input indexes\n\n const input = inputVariable('_A', inputs[0].dataType, inputRank);\n const output = outputVariable('output', outputDataType, outputRank);\n const ops = reduceOp(input, output, axes);\n let reduceOps = ops[2];\n\n for (let k = 0, l = 0; k < inputRank; k++) {\n // if this axis is reduced\n if (reduceOnAllAxes || axes.indexOf(k) >= 0) {\n if (keepDims) {\n l++;\n }\n // loop over the d-th axis\n reduceOps = `for(var j${k}: u32 = 0; j${k} < ${inputShape[k]}; j${k}++) {\n ${ops[2].includes('last_index') ? `let last_index = j${k};` : ''}\n ${input.indicesSet('input_indices', k, `j${k}`)}\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`${input.indicesSet('input_indices', k, output.indicesGet('output_indices', l))};`);\n l++;\n }\n }\n return `\n\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var input_indices: ${input.type.indices};\n let output_indices = ${output.offsetToIndices('global_idx')};\n\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${ops[1]}\n ${reduceOps}\n ${ops[3]}\n ${ops.length === 4 ? output.setByOffset('global_idx', 'value') : ops.slice(4).join('\\n')}\n }`;\n };\n\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)]\n }),\n };\n };\n\nexport const createReduceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: ReduceAttributes): ReduceAttributes => {\n const axes: number[] = [];\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => axes.push(Number(v)));\n }\n return createAttributeWithCacheKey(\n {axes, keepDims: attributes.keepDims, noopWithEmptyAxes: attributes.noopWithEmptyAxes});\n };\n\nconst runReduceProgram =\n (context: ComputeContext, name: string, attributes: ReduceAttributes, reduceOp: ReduceOp): void => {\n const inputs = context.inputs;\n const updatedAttributes: ReduceAttributes =\n inputs.length === 1 ? attributes : createReduceAttributesFromInputs(inputs, attributes);\n\n context.compute(\n createReduceProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['rank']}, [inputs[0]],\n updatedAttributes.noopWithEmptyAxes && updatedAttributes.axes.length === 0 ? noOp : reduceOp,\n updatedAttributes.axes, inputs[0].dataType, updatedAttributes.keepDims,\n updatedAttributes.noopWithEmptyAxes),\n {inputs: [0]});\n };\n\nconst reduceLogSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSum', attributes, reduceOp);\n};\n\nconst reduceL1Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += abs(${input.getByIndices('input_indices')});`,\n '',\n ];\n runReduceProgram(context, 'ReduceL1', attributes, reduceOp);\n};\n\nconst reduceL2Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += (t * t);`,\n 'value = sqrt(value);',\n ];\n runReduceProgram(context, 'ReduceL2', attributes, reduceOp);\n};\n\nconst reduceLogSumExpNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += exp(${input.getByIndices('input_indices')});`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSumExp', attributes, reduceOp);\n};\n\nconst reduceMaxNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(input.indicesSet('input_indices', k, 0));\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = max(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMax', attributes, reduceOp);\n};\n\nconst reduceMeanNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output, axes) => {\n let size = 1.0;\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n // TODO: this depends on the input dims. If we want to use uniform, this need to be updated.\n size *= context.inputs[0].dims[k];\n }\n }\n\n return [\n 'var sum = f32(0);',\n '',\n `sum += f32(${input.getByIndices('input_indices')});`,\n `let value = ${output.type.value}(sum / ${size});`,\n ];\n };\n runReduceProgram(context, 'ReduceMean', attributes, reduceOp);\n};\n\nconst reduceMinNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = min(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMin', attributes, reduceOp);\n};\n\nconst reduceProdNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(1);`,\n '',\n `value *= ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceProd', attributes, reduceOp);\n};\n\nconst reduceSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceSum', attributes, reduceOp);\n};\n\nconst reduceSumSquareNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += t * t;`,\n '',\n ];\n runReduceProgram(context, 'ReduceSumSquare', attributes, reduceOp);\n};\n\nconst useNaiveReduceMethod =\n (shape: readonly number[], axes: readonly number[], noopWithEmptyAxes: boolean): boolean => {\n if (axes.length === 0) {\n return noopWithEmptyAxes;\n }\n\n let outputSize = 1;\n let reduceSize = 1;\n for (let dim = 0; dim < axes.length; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputSize *= shape[dim];\n } else {\n reduceSize *= shape[dim];\n }\n }\n\n // The condition data is very rough, although considering the count of Execution Unit (EU), the potential\n // work groups in a EU and the counts of loops in the naive and shared methods, also doing experiments\n // on some machines.\n return reduceSize < 32 && outputSize > 1024;\n };\n\nexport const reduceMean = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMeanNaive(context, attributes);\n } else {\n reduceMeanShared(context, attributes);\n }\n};\n\nexport const reduceL1 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL1Naive(context, attributes);\n } else {\n reduceL1Shared(context, attributes);\n }\n};\n\nexport const reduceL2 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL2Naive(context, attributes);\n } else {\n reduceL2Shared(context, attributes);\n }\n};\n\nexport const reduceLogSumExp = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumExpNaive(context, attributes);\n } else {\n reduceLogSumExpShared(context, attributes);\n }\n};\n\nexport const reduceMax = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMaxNaive(context, attributes);\n } else {\n reduceMaxShared(context, attributes);\n }\n};\n\nexport const reduceMin = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMinNaive(context, attributes);\n } else {\n reduceMinShared(context, attributes);\n }\n};\n\nexport const reduceProd = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceProdNaive(context, attributes);\n } else {\n reduceProdShared(context, attributes);\n }\n};\n\nexport const reduceSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumNaive(context, attributes);\n } else {\n reduceSumShared(context, attributes);\n }\n};\n\nexport const reduceSumSquare = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumSquareNaive(context, attributes);\n } else {\n reduceSumSquareShared(context, attributes);\n }\n};\n\nexport const reduceLogSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumNaive(context, attributes);\n } else {\n reduceLogSumShared(context, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createReduceProgramInfo, ReduceOp} from './reduce';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('ArgMinMaxOp op requires 1 or 2 inputs.');\n }\n if (inputs[0].dataType !== DataType.float) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport interface ArgMinMaxAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n axis: number;\n selectLastIndex: number;\n}\n\nexport const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '<=' : '<'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'ArgMin', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '>=' : '>'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'argMax', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const parseArgMinMaxAttributes = (attributes: Record): ArgMinMaxAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext, GpuDataType, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, tensorTypeToWsglValueType, UniformDataElementType, UniformsArrayType} from './common';\n\nexport const enum AttentionQkvFormat {\n unknown, // enum value not set, or depends on qkv projection implementation details\n qkvBNSH, // for non-packed qkv, permuted\n qkvBSNH, // for non-packed qkv, not permuted, used by memory efficient attention or MultiHeadAttention\n qkvBSN3H, // for TRT fused attention, qkv are packed\n qkvBNSHqkvBS3NH, // for TRT fused causal attention, data has two formats (qkv is 3BNSH, gemm_buffer is BS3NH)\n qKvBSNHxBSN2H, // for TRT fused cross attention, kv are packed\n qkvTNH, // for memory efficient attention, qkv are not packed, and paddings are removed.\n qkvTN3H, // for TRT fused attention, qkv are packed and paddings are removed\n}\n\nexport const enum AttentionMaskType {\n none, // No mask\n mask1dKeySeqLen, // [batch_size], key sequence length\n mask1dEndStart, // [2 * batch_size] with end positions and start positions\n mask1DKeySeqLenStart, // [3 * batch_size + 2] with [key_len[0], ..., key_len[batch_size - 1], query_start[0],\n // ..., query_start[batch_size - 1], query_end[batch_size - 1], key_start[0], ...,\n // key_start[batch_size - 1], key_end[batch_size - 1]]\n mask2dDummy, // dummy mask with shape [1, 1] or [batch_size, 1]. It has same effect as no mask.\n mask2dKeyPadding, // [batch_size, total_sequence_length]\n mask3dAttention, // [batch_size, sequence_length, total_sequence_length]\n mask4dMegatron, // Megatron causal mask with shape [batch_size, 1, max_sequence_length, max_sequence_length]\n maskUnknown\n}\n\nexport interface AttentionParameters {\n batchSize: number;\n sequenceLength: number;\n pastSequenceLength: number;\n kvSequenceLength: number;\n totalSequenceLength: number;\n maxSequenceLength: number;\n inputHiddenSize: number;\n hiddenSize: number;\n vHiddenSize: number;\n headSize: number;\n vHeadSize: number;\n numHeads: number;\n kvNumHeads?: number;\n nReps?: number;\n isUnidirectional?: boolean;\n pastPresentShareBuffer: boolean;\n maskFilterValue?: number;\n maskType: AttentionMaskType;\n scale: number;\n broadcastResPosBias: boolean;\n passPastInKv: boolean;\n qkvFormat: AttentionQkvFormat;\n isPastkvBSNH?: boolean;\n}\n\nexport interface AttentionAttrs {\n numHeads: number;\n kvNumHeads?: number;\n isUnidirectional?: number;\n maskFilterValue?: number;\n scale: number;\n doRotary: number;\n qkvHiddenSizes: number[];\n pastPresentShareBuffer: boolean;\n}\n\nconst validateAttentionInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // When past state is used, Q, K and V should have same hidden size (unless we split it into past_key and past_value).\n\n // Input shapes:\n // input (Q/K/V) : (B, S, D_i)\n // weights (Q/K/V) : (D_i, D + D + D_v)\n // bias (Q/K/V) : (D + D + D_v)\n // mask_index : see below\n // past (K/V) : (2, B, N, P, H) or NULL\n // relative_position_bias : (B, N, S, T) or NULL\n\n // For mask_index, the following shapes are supported:\n // NULL, (B, 1), (1, 1)\n // (B), (2 * B), (3 * B + 2)\n // (B, T)\n // (B, S, T)\n // (B, 1, M, M)\n //\n // When a model is pruned (like some attention heads are removed in Q/K/V), input_hidden_size could be larger\n // than hidden dimension of Q, K and V.\n\n const input = inputs[0];\n const weights = inputs[1];\n const bias = inputs[2];\n const maskIndex = inputs[3];\n const past = inputs[4];\n const relativePositionBias = inputs[5];\n\n if (past && relativePositionBias) {\n throw new Error('Attention cannot have both past and relative_position_bias');\n }\n\n if (input.dims.length !== 3) {\n throw new Error('Input \"input\" must have 3 dimensions');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[1];\n const inputHiddenSize = input.dims[2];\n\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimensions');\n }\n\n if (weights.dims.length !== 2) {\n throw new Error('Input \"weights\" is expected to have 2 dimensions');\n }\n\n if (weights.dims[0] !== inputHiddenSize) {\n throw new Error('Input 1 dimension 0 should have same length as dimension 2 of input 0');\n }\n\n if (bias.dims[0] !== weights.dims[1]) {\n throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');\n }\n\n let qHiddenSize = bias.dims[0] / 3;\n let kHiddenSize = qHiddenSize;\n let vHiddenSize = kHiddenSize;\n if (attributes.qkvHiddenSizes.length > 0) {\n if (attributes.qkvHiddenSizes.length !== 3) {\n throw new Error('qkv_hidden_sizes attribute should have 3 elements');\n }\n for (const sz of attributes.qkvHiddenSizes) {\n if (sz % attributes.numHeads !== 0) {\n throw new Error('qkv_hidden_sizes should be divisible by num_heads');\n }\n }\n\n qHiddenSize = attributes.qkvHiddenSizes[0];\n kHiddenSize = attributes.qkvHiddenSizes[1];\n vHiddenSize = attributes.qkvHiddenSizes[2];\n }\n\n const kvSequenceLength = sequenceLength;\n\n if (qHiddenSize !== kHiddenSize) {\n throw new Error('qkv_hidden_sizes first element should be same as the second');\n }\n\n if (bias.dims[0] !== qHiddenSize + kHiddenSize + vHiddenSize) {\n throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');\n }\n\n let pastSequenceLength = 0;\n if (past) {\n if (kHiddenSize !== vHiddenSize) {\n throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');\n }\n if (past.dims.length !== 5) {\n throw new Error('Input \"past\" must have 5 dimensions');\n }\n if (past.dims[0] !== 2) {\n throw new Error('Input \"past\" first dimension must be 2');\n }\n if (past.dims[1] !== batchSize) {\n throw new Error('Input \"past\" second dimension must be batch_size');\n }\n if (past.dims[2] !== attributes.numHeads) {\n throw new Error('Input \"past\" third dimension must be num_heads');\n }\n if (past.dims[4] !== kHiddenSize / attributes.numHeads) {\n throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');\n }\n\n if (!attributes.pastPresentShareBuffer) {\n pastSequenceLength = past.dims[3];\n }\n // TODO: handle past_seq_len\n }\n\n const totalSequenceLength = kvSequenceLength + pastSequenceLength;\n const maxSequenceLength = -1;\n\n const maskType = AttentionMaskType.none;\n if (maskIndex) {\n // maskType = AttentionMaskType.MASK_UNKNOWN;\n // TODO: handle mask\n throw new Error('Mask not supported');\n }\n\n if (past) {\n throw new Error('past is not supported');\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize,\n hiddenSize: qHiddenSize,\n vHiddenSize,\n headSize: Math.floor(qHiddenSize / attributes.numHeads),\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias: false,\n passPastInKv: false,\n qkvFormat: AttentionQkvFormat.qkvBNSH,\n };\n};\n\nconst createInPlaceSoftmaxProgramInfo = (_context: ComputeContext, input: TensorView, n: number, d: number) => {\n const components = getMaxComponents(d);\n let WG = 64;\n const dComp = d / components;\n if (dComp < WG) {\n WG = 1;\n } else if (dComp / 8 < 64) {\n WG = Math.ceil(dComp / 8);\n }\n const elementsPerThread = Math.ceil(d / components / WG);\n const programUniforms: ProgramUniform[] = [\n {type: input.dataType, data: 1 / d}, {type: DataType.uint32, data: dComp},\n {type: DataType.uint32, data: elementsPerThread}\n ];\n const dataType = tensorTypeToWsglStorageType(input.dataType, components);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = outputVariable('x', input.dataType, input.dims, components);\n const elemValueType = tensorTypeToWsglValueType(input.dataType);\n const uniforms: UniformsArrayType = [\n {name: 'd_inv', type: elemValueType as UniformDataElementType}, {name: 'd_comp', type: 'u32'},\n {name: 'elements_per_thread', type: 'u32'}\n ];\n\n return `\n var thread_max: array;\n var thread_sum: array;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(inputHelper)}\n ${shaderHelper.mainStart([\n WG, 1, 1\n ])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${f32Type}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${f32Type}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'thread_max_vector';\n case 2:\n return 'max(thread_max_vector.x, thread_max_vector.y)';\n case 4:\n return 'max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${WG}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${f32Type}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${f32Type}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'sum_vector';\n case 2:\n return 'sum_vector.x + sum_vector.y';\n case 4:\n return 'sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${WG}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${inputHelper.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${f32Type}(x[offset + i]);\n x[offset + i] = ${inputHelper.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`;\n };\n\n return {\n name: 'AttentionProbsSoftmax',\n shaderCache: {hint: `${WG};${dataType};${components}`},\n getShaderSource,\n getRunData: () => ({outputs: [], dispatchGroup: {x: n}, programUniforms}),\n };\n};\n\nconst createAttentionProbsProgramInfo =\n (context: ComputeContext, q: TensorView, key: TensorView, pastKey: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs,\n pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n const probsShape = [parameters.batchSize, parameters.numHeads, parameters.sequenceLength, totalSequenceLength];\n const presentKey = parameters.kvNumHeads === undefined && context.outputCount > 1;\n const presentKeyShape = presentKey ?\n [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize] :\n undefined;\n\n // TODO: handle mask\n\n const alpha = attributes.scale === 0 ? 1.0 / Math.sqrt(parameters.headSize) : attributes.scale;\n const components = getMaxComponents(parameters.headSize);\n const vectorizedHeadSize = parameters.headSize / components;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(totalSequenceLength / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: parameters.sequenceLength}, {type: DataType.uint32, data: vectorizedHeadSize},\n {type: DataType.uint32, data: totalSequenceLength}, {type: DataType.uint32, data: parameters.numHeads},\n {type: DataType.float, data: alpha}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: parameters.kvSequenceLength}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (pastKey) {\n inputDependencies.push('type');\n }\n if (relativePositionBias) {\n inputDependencies.push('type');\n }\n const outputs = [{dims: probsShape, dataType: q.dataType, gpuDataType: GpuDataType.default}];\n if (presentKey) {\n outputs.push({dims: presentKeyShape!, dataType: q.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const qInput = inputVariable('q', q.dataType, q.dims, components);\n const kInput = inputVariable('key', key.dataType, key.dims, components);\n const inputVars = [qInput, kInput];\n if (pastKey) {\n const pastKeyInput = inputVariable('past_key', pastKey.dataType, pastKey.dims, components);\n inputVars.push(pastKeyInput);\n }\n if (relativePositionBias) {\n inputVars.push(\n inputVariable('relative_position_bias', relativePositionBias.dataType, relativePositionBias.dims));\n }\n const output = outputVariable('output', q.dataType, probsShape);\n const outputVars = [output];\n if (presentKey) {\n outputVars.push(outputVariable('present_key', q.dataType, presentKeyShape!, components));\n }\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'alpha', type: 'f32' as UniformDataElementType},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n\n var tileQ: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n let kOffset = uniforms.kv_sequence_length * uniforms.K * headIdx;\n let pastKeyOffset = uniforms.past_sequence_length * uniforms.K * headIdx;`;\n } else {\n return `\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;`;\n }\n })()}\n ${presentKey ? 'let presentKeyOffset = headIdx * uniforms.N * uniforms.K;' : ''}\n var value = ${f32Type}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n if (n + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_key[pastKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x];\n } else {\n tileK[idx] =\n key[kOffset + (n + local_id.y - uniforms.past_sequence_length) * uniforms.K + w + local_id.x];\n }`;\n } else {\n return 'tileK[idx] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];';\n }\n })()}\n ${\n presentKey ?\n 'present_key[presentKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x] = tileK[idx];' :\n ''}\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${f32Type}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(() => {\n switch (components) {\n case 1:\n return 'value';\n case 2:\n return 'value.x + value.y';\n case 4:\n return 'value.x + value.y + value.z + value.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n output[outputIdx] = ${output.type.value} (sum * uniforms.alpha) + ${\n relativePositionBias ? 'relative_position_bias[outputIdx]' : '0.0'};\n }\n }`;\n };\n return {\n name: 'AttentionProbs',\n shaderCache: {\n hint: `${components};${relativePositionBias !== undefined};${pastKey !== undefined};${context.outputCount}`,\n inputDependencies\n },\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\n\nconst createVxAttentionScoreProgramInfo =\n (context: ComputeContext, probs: TensorView, v: TensorView, pastValue: TensorView|undefined,\n params: AttentionParameters, pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + params.kvSequenceLength;\n const nReps = params.nReps ? params.nReps : 1;\n const repeatedVHiddenSize = params.vHiddenSize * nReps;\n const presentValue = params.kvNumHeads == null && context.outputCount > 1;\n const presentValueShape =\n presentValue ? [params.batchSize, params.numHeads, totalSequenceLength, params.headSize] : undefined;\n const outputShape = [params.batchSize, params.sequenceLength, repeatedVHiddenSize];\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(params.vHeadSize / TILE_SIZE),\n y: Math.ceil(params.sequenceLength / TILE_SIZE),\n z: params.batchSize * params.numHeads\n };\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: params.sequenceLength}, {type: DataType.uint32, data: totalSequenceLength},\n {type: DataType.uint32, data: params.vHeadSize}, {type: DataType.uint32, data: params.numHeads},\n {type: DataType.uint32, data: repeatedVHiddenSize}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] =\n pastValue ? ['type', 'type', 'type'] : ['type', 'type'];\n const outputs = [{dims: outputShape, dataType: probs.dataType, gpuDataType: GpuDataType.default}];\n if (presentValue) {\n outputs.push({dims: presentValueShape!, dataType: probs.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const probsHelper = inputVariable('probs', probs.dataType, probs.dims);\n const vHelper = inputVariable('v', v.dataType, v.dims);\n const inputVars = [probsHelper, vHelper];\n if (pastValue) {\n inputVars.push(inputVariable('past_value', pastValue.dataType, pastValue.dims));\n }\n const output = outputVariable('output', probs.dataType, outputShape);\n const outputVars = [output];\n if (presentValue) {\n outputVars.push(outputVariable('present_value', probs.dataType, presentValueShape!));\n }\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'v_hidden_size', type: 'u32'},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileQ: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n let pastValueOffset = headIdx * uniforms.N * uniforms.past_sequence_length + n;\n let vOffset = headIdx * uniforms.N * uniforms.kv_sequence_length + n;\n `;\n } else {\n return `\n let offsetB = headIdx * uniforms.N * uniforms.K + n;\n `;\n }\n })()}\n ${presentValue ? 'let presentValueOffset = headIdx * uniforms.N * uniforms.K + n;' : ''}\n var value = ${probsHelper.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n if (w + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_value[pastValueOffset + (w + local_id.y) * uniforms.N];\n } else {\n tileK[idx] = v[vOffset + (w + local_id.y - uniforms.past_sequence_length) * uniforms.N];\n }\n `;\n } else {\n return `\n tileK[idx] = v[offsetB + (w + local_id.y) * uniforms.N];\n `;\n }\n })()}\n ${presentValue ? 'present_value[presentValueOffset + (w + local_id.y) * uniforms.N] = tileK[idx];' : ''}\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`;\n };\n\n return {\n name: 'AttentionScore',\n shaderCache: {hint: `${pastValue !== undefined};${context.outputCount}`, inputDependencies},\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const applyAttention =\n (context: ComputeContext, q: TensorView, k: TensorView, v: TensorView, _maskIndex: TensorView|undefined,\n _past: TensorView|undefined, pastKey: TensorView|undefined, pastValue: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs) => {\n const outputCount = context.outputCount;\n const pastSequenceLength =\n parameters.kvNumHeads !== undefined || outputCount > 1 ? parameters.pastSequenceLength : 0;\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n\n const inputsK = (parameters.kvNumHeads === undefined && outputCount > 1 && pastKey) ? [q, k, pastKey] : [q, k];\n if (relativePositionBias) {\n inputsK.push(relativePositionBias);\n }\n\n // Run AttentionProbs\n const probs = context.compute(\n createAttentionProbsProgramInfo(\n context, q, k, outputCount > 1 ? pastKey : undefined, relativePositionBias, parameters, attributes,\n pastSequenceLength),\n {inputs: inputsK, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [-1, 1] : [-1]})[0];\n\n // Run Softmax\n context.compute(\n createInPlaceSoftmaxProgramInfo(\n context, probs, parameters.batchSize * parameters.numHeads * parameters.sequenceLength,\n totalSequenceLength),\n {inputs: [probs], outputs: []});\n\n // Run AttrionScore\n const inputsV =\n (parameters.kvNumHeads === undefined && outputCount > 1 && pastValue) ? [probs, v, pastValue] : [probs, v];\n context.compute(\n createVxAttentionScoreProgramInfo(\n context, probs, v, outputCount > 1 && pastValue ? pastValue : undefined, parameters, pastSequenceLength),\n {inputs: inputsV, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [0, 2] : [0]});\n };\n\nconst prepare = (context: ComputeContext, parameters: AttentionParameters) => {\n const outputShape = [\n parameters.batchSize,\n parameters.numHeads,\n parameters.sequenceLength,\n parameters.headSize,\n ];\n const M = parameters.sequenceLength;\n const K = parameters.inputHiddenSize;\n const N = parameters.headSize;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(parameters.headSize / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const inputs = [context.inputs[0], context.inputs[1], context.inputs[2]];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: M}, {type: DataType.uint32, data: K}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: parameters.numHeads}, {type: DataType.uint32, data: parameters.headSize},\n {type: DataType.uint32, data: parameters.hiddenSize},\n {type: DataType.uint32, data: parameters.hiddenSize + parameters.hiddenSize + parameters.vHiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const outputQ = outputVariable('output_q', inputs[0].dataType, outputShape);\n const outputK = outputVariable('output_k', inputs[0].dataType, outputShape);\n const outputV = outputVariable('output_v', inputs[0].dataType, outputShape);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims);\n const weight = inputVariable('weight', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const dataType = input.type.storage;\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'num_heads', type: 'u32'},\n {name: 'head_size', type: 'u32'}, {name: 'hidden_size', type: 'u32'}, {name: 'ldb', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileInput: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightQ: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightK: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightV: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, weight, bias, outputQ, outputK, outputV)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${dataType}(0);\n var valueK = ${dataType}(0);\n var valueV = ${dataType}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k ({\n outputs: [\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n ],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n },\n {inputs, outputs: [-1, -1, -1]});\n};\n\nexport const attention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateAttentionInputs(context.inputs, attributes);\n\n const [q, k, v] = prepare(context, params);\n\n return applyAttention(\n context, q, k, v, context.inputs[4], undefined, undefined, undefined, context.inputs[5], params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface BatchNormAttributes extends AttributeWithCacheKey {\n readonly epsilon: number;\n readonly momentum: number;\n readonly spatial: boolean;\n readonly trainingMode: boolean;\n readonly format: 'NHWC'|'NCHW';\n readonly outputCount: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: BatchNormAttributes): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs');\n }\n\n const checkShapeEqual = (actual: readonly number[], expected: readonly number[], message: string) => {\n const r = expected.length;\n if (r !== actual.length) {\n throw new Error(`${message}: num dimensions != ${r}`);\n }\n expected.forEach((v, i) => {\n if (v !== actual[i]) {\n throw new Error(`${message}: dim[${i}] do not match`);\n }\n });\n };\n\n if (inputs[0].dims.length > 1) {\n const shape = attributes.format === 'NHWC' ?\n (attributes.spatial ? inputs[0].dims.slice(-1) :\n inputs[0].dims.slice(-1).concat(inputs[0].dims.slice(1, inputs[0].dims.length - 1))) :\n inputs[0].dims.slice(1, attributes.spatial ? 2 : undefined);\n checkShapeEqual(inputs[1].dims, shape, 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, shape, 'Invalid input B');\n checkShapeEqual(inputs[3].dims, shape, 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, shape, 'Invalid input var');\n } else {\n checkShapeEqual(inputs[1].dims, [1], 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, [1], 'Invalid input B');\n checkShapeEqual(inputs[3].dims, [1], 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, [1], 'Invalid input var');\n }\n};\n\nconst createBatchNormInferenceProgramInfo =\n (inputs: readonly TensorView[], attributes: BatchNormAttributes): ProgramInfo => {\n const {epsilon, spatial, format} = attributes;\n const yShape = inputs[0].dims;\n const components = spatial ? getMaxComponents(yShape[yShape.length - 1]) : 1;\n const cComponents = format === 'NHWC' && yShape.length > 1 ? components : 1;\n const outputSize = ShapeUtil.size(yShape) / components;\n // Only support uniforms for opset version >= 9 (spatial = true).\n const useShapesUniforms = spatial;\n const shapeOrRank = useShapesUniforms ? yShape.length : yShape;\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims, cComponents);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims, cComponents);\n const inputMean = inputVariable('inputMean', inputs[3].dataType, inputs[3].dims, cComponents);\n const inputVar = inputVariable('inputVar', inputs[4].dataType, inputs[4].dims, cComponents);\n const y = outputVariable('y', inputs[0].dataType, shapeOrRank, components);\n // TODO: support inputs with different data type. Current we need to make sure all inputs have the same data type.\n // Otherwise, the shader compilation will fail.\n const calcCOffset = (): string => {\n let cOffset = '';\n if (spatial) {\n cOffset = `let cOffset = ${\n yShape.length === 1 ? '0u' :\n format === 'NHWC' ? `outputIndices[${yShape.length - 1}] / ${components}` :\n 'outputIndices[1]'};`;\n } else {\n if (format === 'NCHW') {\n cOffset = `\n ${y.indicesSet('outputIndices', '0', '0')}\n let cOffset = ${y.indicesToOffset('outputIndices')};`;\n } else {\n // update C channel.\n cOffset = `var cIndices = ${scale.type.indices}(0);\n cIndices[0] = outputIndices[${yShape.length - 1}];`;\n // update D1 x ... x Dn channels.\n for (let i = 1; i < scale.rank; i++) {\n cOffset += `cIndices[${i}] = outputIndices[${i}];`;\n }\n cOffset += `let cOffset = ${scale.indicesToOffset('cIndices')};`;\n }\n }\n return cOffset;\n };\n const getInferenceModeShaderSource = (helper: ShaderHelper) => `\n const epsilon = ${epsilon};\n ${helper.registerUniform('outputSize', 'u32').declareVariables(x, scale, bias, inputMean, inputVar, y)}\n ${helper.mainStart()}\n ${helper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${y.offsetToIndices(`global_idx * ${components}`)};\n ${calcCOffset()}\n let scale = ${scale.getByOffset('cOffset')};\n let bias = ${bias.getByOffset('cOffset')};\n let inputMean = ${inputMean.getByOffset('cOffset')};\n let inputVar = ${inputVar.getByOffset('cOffset')};\n let x = ${x.getByOffset('global_idx')};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${y.setByOffset('global_idx', 'value')}\n }`;\n return {\n name: 'BatchNormalization',\n shaderCache: {\n hint: `${attributes.epsilon}_${attributes.format}_${spatial}_${components}`,\n inputDependencies: useShapesUniforms ? ['rank', 'type', 'type', 'type', 'type'] : undefined,\n },\n getShaderSource: getInferenceModeShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: useShapesUniforms ?\n [\n {type: DataType.uint32, data: outputSize},\n ...createTensorShapeVariables(yShape),\n ] :\n [\n {type: DataType.uint32, data: outputSize},\n ],\n }),\n };\n };\n\nexport const parseBatchNormAttributes = (attributes: Record): BatchNormAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n\nexport const batchNorm = (context: ComputeContext, attributes: Record): void => {\n const {inputs, outputCount} = context;\n const updatedAttributes = parseBatchNormAttributes({...attributes, outputCount});\n if (env.webgpu.validateInputContent) {\n validateInputs(inputs, updatedAttributes);\n }\n if (attributes.trainingMode) {\n throw new Error('BatchNormalization trainingMode is not supported yet.');\n } else {\n context.compute(createBatchNormInferenceProgramInfo(inputs, updatedAttributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![320, 640, 1280].includes(inputs[0].dims[2])) {\n throw new Error('number of channels should be 320, 640 or 1280');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasAddProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims;\n\n const channels = inputs[0].dims[2];\n // since channel number can be only 320/640/1280, it's always divisable by 4\n const outputSize = ShapeUtil.size(outputShape) / 4;\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, outputShape, 4);\n const bias = inputVariable('bias', dataType, [channels], 4);\n const residual = inputVariable('residual', dataType, outputShape, 4);\n const output = outputVariable('output', dataType, outputShape, 4);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const channels = ${channels}u / 4;\n ${shaderHelper.declareVariables(input, bias, residual, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let value = ${input.getByOffset('global_idx')}\n + ${bias.getByOffset('global_idx % channels')} + ${residual.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', 'value')}\n }`;\n\n return {\n name: 'BiasAdd',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasAdd = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasAddProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {MAX_CLIP, MIN_CLIP, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType} from './common';\n\ntype BuiltinFunctionName = string;\ntype ElementwiseCustomExpression = (expression: string) => string;\ntype ElementwiseFunctionCall = BuiltinFunctionName|ElementwiseCustomExpression;\n\nconst createElementwiseProgramShader =\n (shaderHelper: ShaderHelper, datasize: number, inputDataType: number, outputDataType: number,\n funcCall: ElementwiseFunctionCall, additionalImplementation?: string): string => {\n const vecSize = Math.ceil(datasize / 4);\n\n let expression = '';\n if (typeof funcCall === 'string') {\n expression = `${funcCall}(a)`;\n } else {\n expression = funcCall('a');\n }\n\n const input = inputVariable('inputData', inputDataType, [vecSize], 4);\n const output = outputVariable('outputData', outputDataType, [vecSize], 4);\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n\n let a = ${input.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', expression)}\n }`;\n };\n\nconst createElementwiseProgramInfo =\n (input: TensorView, name: string, funcCall: ElementwiseFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType: number = input.dataType): ProgramInfo => ({\n name,\n shaderCache: {hint: cacheKey, inputDependencies: ['type']},\n getShaderSource: shaderHelper => createElementwiseProgramShader(\n shaderHelper, ShapeUtil.size(input.dims), input.dataType, outputDataType, funcCall, additionalImplementation),\n getRunData: (inputTensors) => ({\n outputs: [{dims: input.dims, dataType: outputDataType}],\n dispatchGroup:\n {x: Math.ceil(ShapeUtil.size(inputTensors[0].dims) / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(input.dims) / 4)},\n ],\n })\n });\n\nexport const abs = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Abs', 'abs'));\n};\n\nexport const acos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acos', 'acos'));\n};\n\nexport const acosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acosh', 'acosh'));\n};\n\nexport const asin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asin', 'asin'));\n};\n\nexport const asinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asinh', 'asinh'));\n};\n\nexport const atan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atan', 'atan'));\n};\nexport const atanh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atanh', 'atanh'));\n};\n\nexport interface CastAttributes extends AttributeWithCacheKey {\n readonly to: number;\n readonly saturate?: boolean;\n}\n\nexport const parseCastAttributes = (attributes: Record): CastAttributes =>\n createAttributeWithCacheKey(attributes as {to: number});\n\n\nexport const cast = (context: ComputeContext, attributes: CastAttributes): void => {\n let func: ElementwiseFunctionCall;\n switch (attributes.to) {\n case DataType.float16:\n func = 'vec4';\n break;\n case DataType.float:\n func = 'vec4';\n break;\n case DataType.uint32:\n func = 'vec4';\n break;\n case DataType.int32:\n func = 'vec4';\n break;\n case DataType.bool:\n func = 'vec4';\n break;\n default:\n throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${attributes.to}`);\n }\n context.compute(\n createElementwiseProgramInfo(context.inputs[0], 'Cast', func, undefined, attributes.cacheKey, attributes.to));\n};\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nconst generateClipAttributesFromInputs = (inputs: readonly TensorView[]): ClipAttributes => {\n const min = (inputs.length >= 2 && inputs[1].data !== 0) ? inputs[1].getFloat32Array()[0] : MIN_CLIP;\n const max = (inputs.length >= 3 && inputs[2].data !== 0) ? inputs[2].getFloat32Array()[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const clip = (context: ComputeContext, clipAttributes: ClipAttributes): void => {\n const attributes = context.inputs.length === 1 ? clipAttributes : generateClipAttributesFromInputs(context.inputs);\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(\n createElementwiseProgramInfo(\n context.inputs[0], 'Clip', a => `clamp(${a}, clip_min_, clip_max_)`, `\n const clip_min_: vec4<${dataType}> = vec4(${dataType}(${attributes.min}));\n const clip_max_: vec4<${dataType}> = vec4(${dataType}(${attributes.max}));\n`,\n attributes.cacheKey),\n {inputs: [0]});\n};\n\nexport const ceil = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Ceil', 'ceil'));\n};\n\nexport const cos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cos', 'cos'));\n};\n\nexport const cosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cosh', 'cosh'));\n};\n\nexport interface AlphaAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const parseAlphaAttributes = (attributes: Record): AlphaAttributes =>\n createAttributeWithCacheKey(attributes as {alpha: number});\n\nexport const elu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Elu', a => `elu_vf32(${a})`, `\n const elu_alpha_ = ${dataType}(${attributes.alpha});\n\n fn elu_f32(a: ${dataType}) -> ${dataType} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${dataType}>) -> vec4<${dataType}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,\n attributes.cacheKey));\n};\n\nexport const erfImpl = (varType = 'f32') => `\nconst r0: ${varType} = 0.3275911;\nconst r1: ${varType} = 0.254829592;\nconst r2: ${varType} = -0.284496736;\nconst r3: ${varType} = 1.421413741;\nconst r4: ${varType} = -1.453152027;\nconst r5: ${varType} = 1.061405429;\n\nfn erf_vf32(v: vec4<${varType}>) -> vec4<${varType}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`;\n\nexport const erf = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Erf', a => `erf_vf32(${a})`, erfImpl(dataType)));\n};\n\nexport const exp = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Exp', 'exp'));\n};\n\nexport const floor = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Floor', 'floor'));\n};\n\nexport const gelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Gelu', a => `0.5 * ${a} * (1.0 + erf_vf32(${a} * 0.7071067811865475))`, erfImpl(dataType)));\n};\n\nexport const leakyRelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'LeakyRelu', a => `select(leaky_relu_alpha_ * ${a}, ${a}, ${a} >= vec4<${dataType}>(0.0))`,\n `const leaky_relu_alpha_ = ${dataType}(${attributes.alpha});`, attributes.cacheKey));\n};\n\nexport const not = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Not', a => `!${a}`));\n};\n\nexport const neg = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Neg', a => `-${a}`));\n};\n\nexport const reciprocal = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Reciprocal', a => `1.0/${a}`));\n};\n\nexport const relu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Relu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > vec4<${dataType}>(0.0))`));\n};\n\nexport const sigmoid = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sigmoid', a => `(1.0 / (1.0 + exp(-${a})))`));\n};\n\nexport interface HardSigmoidAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n readonly beta: number;\n}\n\nexport const parseHardSigmoidAttributes = (attributes: Record): HardSigmoidAttributes =>\n createAttributeWithCacheKey(attributes as {\n alpha: number;\n beta: number;\n });\n\nexport const hardSigmoid = (context: ComputeContext, attributes: HardSigmoidAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'HardSigmoid',\n a => `max(vec4<${dataType}>(0.0), min(vec4<${dataType}>(1.0), ${attributes.alpha} * ${a} + vec4<${dataType}>(${\n attributes.beta})))`,\n undefined, attributes.cacheKey));\n};\n\nexport const sin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sin', 'sin'));\n};\n\nexport const sinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sinh', 'sinh'));\n};\n\nexport const sqrt = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sqrt', 'sqrt'));\n};\n\nexport const tan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tan', 'tan'));\n};\n\nexport const tanhExpression = (a: string) => `sign(${a}) * (1 - exp(-2 * abs(${a}))) / (1 + exp(-2 * abs(${a})))`;\n\nexport const tanh = (context: ComputeContext): void => {\n // TODO: revisit after https://github.com/gpuweb/gpuweb/issues/4458 is resolved\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tanh', tanhExpression));\n};\n\nexport const fastGeluImpl = (varType = 'f32') => `\nconst fast_gelu_a: ${varType} = 0.5;\nconst fast_gelu_b: ${varType} = 0.7978845608028654;\nconst fast_gelu_c: ${varType} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${varType}>) -> vec4<${varType}> {\n return ${tanhExpression('v')};\n}\n`;\n\nexport const fastGeluExpression = (x: string) =>\n `(fast_gelu_a + fast_gelu_a * tanh_v(${x} * (fast_gelu_c * ${x} * ${x} + fast_gelu_b))) * ${x}`;\n\nexport const fastGelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'FastGelu', fastGeluExpression, fastGeluImpl(dataType), undefined,\n context.inputs[0].dataType));\n};\n\nexport const thresholdedRelu = (context: ComputeContext, attributes: AlphaAttributes): number => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'ThresholdedRelu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > thresholded_relu_alpha_)`,\n `const thresholded_relu_alpha_ = vec4<${dataType}>(${attributes.alpha});`, attributes.cacheKey));\n return 0;\n};\n\nexport const log = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Log', 'log'));\n};\n\nexport const quickGeluImpl = (varType: string, alpha: number) => `\nconst alpha = vec4<${varType}>(${alpha});\nconst one = ${varType}(1.0);\nconst zero = ${varType}(0.0);\n\nfn quick_gelu_impl(x: vec4<${varType}>) -> vec4<${varType}> {\n let v = x *alpha;\n var x1 : vec4<${varType}>;\n for (var i = 0; i < 4; i = i + 1) {\n if (v[i] >= zero) {\n x1[i] = one / (one + exp(-v[i]));\n } else {\n x1[i] = one - one / (one + exp(v[i]));\n }\n }\n return x * x1;\n}\n`;\n\nexport const quickGeluExpression = (x: string) => `quick_gelu_impl(${x})`;\n\nexport const quickgelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'QuickGelu', quickGeluExpression, quickGeluImpl(dType, attributes.alpha), attributes.cacheKey,\n context.inputs[0].dataType));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType} from './common';\nimport {erfImpl} from './unary-op';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![2560, 5120, 10240].includes(inputs[0].dims[2])) {\n throw new Error('hidden state should be 2560, 5120 or 10240');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasSplitGeluProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n outputShape[2] = outputShape[2] / 2;\n\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims, 4);\n const bias = inputVariable('bias', inputs[0].dataType, [inputs[0].dims[2]], 4);\n const output = outputVariable('output', inputs[0].dataType, outputShape, 4);\n\n const outputSize = ShapeUtil.size(outputShape) / 4;\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${inputs[0].dims[2] / 4 / 2}u;\n\n ${shaderHelper.declareVariables(input, bias, output)}\n\n ${erfImpl(dataType)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${output.setByOffset('global_idx', 'valueLeft * geluRight')}\n }`;\n\n return {\n name: 'BiasSplitGelu',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasSplitGelu = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasSplitGeluProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype BuiltinFunctionName = string;\ntype BinaryCustomExpression = (expressionA: string, expressionB: string) => string;\ntype BinaryFunctionCall = BuiltinFunctionName|BinaryCustomExpression|{\n scalar: BinaryCustomExpression;\n vector: BinaryCustomExpression;\n};\n\nconst createBinaryOpProgramShader =\n (shaderHelper: ShaderHelper, dimsA: readonly number[], dimsB: readonly number[], dimsOutput: readonly number[],\n vectorize: boolean, doBroadcast: boolean, sharedDimensionDivisibleBy4: boolean, funcCall: BinaryFunctionCall,\n typeA: number, typeB: number, typeOutput: number, additionalImplementation?: string) => {\n let expressionScalar: BinaryCustomExpression;\n let expressionVector: BinaryCustomExpression;\n if (typeof funcCall === 'string') {\n expressionScalar = expressionVector = (a, b) => `${funcCall}((${a}),(${b}))`;\n } else if (typeof funcCall === 'function') {\n expressionScalar = expressionVector = funcCall;\n } else {\n expressionScalar = funcCall.scalar;\n expressionVector = funcCall.vector;\n }\n\n const output = outputVariable('outputData', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('aData', typeA, dimsA.length, 4);\n const b = inputVariable('bData', typeB, dimsB.length, 4);\n\n let assignment: string;\n if (vectorize) {\n if (doBroadcast) {\n const isAOneElement = ShapeUtil.size(dimsA) === 1;\n const isBOneElement = ShapeUtil.size(dimsB) === 1;\n const aLastDimDivisibleBy4 = dimsA.length > 0 && dimsA[dimsA.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = dimsB.length > 0 && dimsB[dimsB.length - 1] % 4 === 0;\n if (isAOneElement || isBOneElement) {\n assignment = output.setByOffset(\n 'global_idx',\n expressionVector(\n isAOneElement ? `${a.type.value}(${a.getByOffset('0')}.x)` : a.getByOffset('global_idx'),\n isBOneElement ? `${b.type.value}(${b.getByOffset('0')}.x)` : b.getByOffset('global_idx')));\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx * 4u')};\n let offsetA = ${a.broadcastedIndicesToOffset('outputIndices', output)};\n let offsetB = ${b.broadcastedIndicesToOffset('outputIndices', output)};\n ${\n output.setByOffset(\n 'global_idx',\n expressionVector(\n sharedDimensionDivisibleBy4 || aLastDimDivisibleBy4 ?\n a.getByOffset('offsetA / 4u') :\n `${a.type.value}(${a.getByOffset('offsetA / 4u')}[offsetA % 4u])`,\n sharedDimensionDivisibleBy4 || bLastDimDivisibleBy4 ?\n b.getByOffset('offsetB / 4u') :\n `${b.type.value}(${b.getByOffset('offsetB / 4u')}[offsetB % 4u])`))}\n `;\n }\n } else {\n assignment = output.setByOffset(\n 'global_idx', expressionVector(a.getByOffset('global_idx'), b.getByOffset('global_idx')));\n }\n } else {\n if (!doBroadcast) {\n throw new Error('no necessary to use scalar implementation for element-wise binary op implementation.');\n }\n\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `aData[indexA${x}][componentA${x}]`;\n const expressionB = `bData[indexB${x}][componentB${x}]`;\n return `\n let outputIndices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offsetA${x} = ${a.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let offsetB${x} = ${b.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let indexA${x} = offsetA${x} / 4u;\n let indexB${x} = offsetB${x} / 4u;\n let componentA${x} = offsetA${x} % 4u;\n let componentB${x} = offsetB${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expressionScalar(expressionA, expressionB)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('outputData[global_idx]', 0)}\n ${singleAssignment('outputData[global_idx]', 1)}\n ${singleAssignment('outputData[global_idx]', 2)}\n ${singleAssignment('outputData[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(a, b, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createBinaryOpProgramInfo =\n (name: string, cacheKey: string, a: TensorView, b: TensorView, funcCall: BinaryFunctionCall,\n additionalImplementation?: string, outputDataType: number = a.dataType): ProgramInfo => {\n const isBroadcast = !ShapeUtil.areEqual(a.dims, b.dims);\n let outputShape = a.dims;\n let outputSize = ShapeUtil.size(a.dims);\n\n let vectorize = false;\n let sharedDimensionDivisibleBy4 = false;\n\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n const cacheKeyAux = [isBroadcast];\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(a.dims, b.dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n const isAOneElement = ShapeUtil.size(a.dims) === 1;\n const isBOneElement = ShapeUtil.size(b.dims) === 1;\n const aLastDimDivisibleBy4 = a.dims.length > 0 && a.dims[a.dims.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = b.dims.length > 0 && b.dims[b.dims.length - 1] % 4 === 0;\n cacheKeyAux.push(isAOneElement);\n cacheKeyAux.push(isBOneElement);\n cacheKeyAux.push(aLastDimDivisibleBy4);\n cacheKeyAux.push(bLastDimDivisibleBy4);\n // check whether vectorize can be enabled\n let sharedDimension = 1;\n for (let i = 1; i < outputShape.length; i++) {\n const dimA = a.dims[a.dims.length - i] ?? 1;\n const dimB = b.dims[b.dims.length - i] ?? 1;\n if (dimA === dimB) {\n sharedDimension *= dimA;\n } else {\n break;\n }\n }\n if (sharedDimension % 4 === 0) {\n sharedDimensionDivisibleBy4 = true;\n vectorize = true;\n } else if (isAOneElement || isBOneElement || aLastDimDivisibleBy4 || bLastDimDivisibleBy4) {\n vectorize = true;\n }\n } else {\n // element-wise\n vectorize = true;\n }\n cacheKeyAux.push(vectorize);\n\n return {\n name,\n shaderCache: {\n hint: cacheKey + cacheKeyAux.map((x) => x.toString()).join('_'),\n inputDependencies: ['rank', 'rank'],\n },\n getShaderSource: (shaderHelper) => createBinaryOpProgramShader(\n shaderHelper, a.dims, b.dims, outputShape, vectorize, isBroadcast, sharedDimensionDivisibleBy4, funcCall,\n a.dataType, b.dataType, outputDataType, additionalImplementation),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* component size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(outputShape) / 4)},\n ...createTensorShapeVariables(a.dims, b.dims, outputShape)\n ],\n }),\n };\n };\n\nconst runBinaryOp =\n (context: ComputeContext, name: string, funcCall: BinaryFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType?: number): void => {\n context.compute(createBinaryOpProgramInfo(\n name, cacheKey ?? '', context.inputs[0], context.inputs[1], funcCall, additionalImplementation,\n outputDataType));\n };\n\nexport const add = (context: ComputeContext): void => {\n runBinaryOp(context, 'Add', (a, b) => `${a}+${b}`);\n};\n\nexport const div = (context: ComputeContext): void => {\n runBinaryOp(context, 'Div', (a, b) => `${a}/${b}`);\n};\n\nexport const equal = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Equal', ({scalar: (a, b) => `u32(${a}==${b})`, vector: (a, b) => `vec4(${a}==${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const mul = (context: ComputeContext): void => {\n runBinaryOp(context, 'Mul', (a, b) => `${a}*${b}`);\n};\n\nexport const pow = (context: ComputeContext): void => {\n const type = inputVariable('input', context.inputs[0].dataType, context.inputs[0].dims).type.value;\n const roundStr = type === 'i32' ? 'round' : '';\n runBinaryOp(\n context, 'Pow', ({scalar: (a, b) => `pow_custom(${a},${b})`, vector: (a, b) => `pow_vector_custom(${a},${b})`}),\n `\n fn pow_custom(a : ${type}, b : ${type}) -> ${type} {\n if (b == ${type}(0.0)) {\n return ${type}(1.0);\n } else if (a < ${type}(0.0) && f32(b) != floor(f32(b))) {\n return ${type}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${type}(1.0), round(f32(abs(b) % ${type}(2.0))) != 1.0) * ${type}(${\n roundStr}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${type}>, b : vec4<${type}>) -> vec4<${type}> {\n // TODO: implement vectorized pow\n return vec4<${type}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `);\n};\n\nexport const sub = (context: ComputeContext): void => {\n runBinaryOp(context, 'Sub', (a, b) => `${a}-${b}`);\n};\n\nexport const greater = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Greater', ({scalar: (a, b) => `u32(${a}>${b})`, vector: (a, b) => `vec4(${a}>${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const less = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Less', ({scalar: (a, b) => `u32(${a}<${b})`, vector: (a, b) => `vec4(${a}<${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const greaterOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'GreaterOrEqual', ({scalar: (a, b) => `u32(${a}>=${b})`, vector: (a, b) => `vec4(${a}>=${b})`}),\n undefined, undefined, DataType.bool);\n};\n\nexport const lessOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'LessOrEqual', ({scalar: (a, b) => `u32(${a}<=${b})`, vector: (a, b) => `vec4(${a}<=${b})`}),\n undefined, undefined, DataType.bool);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], axis: number): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n const referenceIndex = 0;\n const referenceInput = inputs[referenceIndex];\n const inputType = referenceInput.dataType;\n const inputRank = referenceInput.dims.length;\n inputs.forEach((input, i) => {\n if (i === referenceIndex) {\n return;\n }\n // make sure types of all inputs match\n if (input.dataType !== inputType) {\n throw new Error('input tensors should be one type');\n }\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputRank) {\n throw new Error('input tensors should have the same shape');\n }\n input.dims.forEach((dim, i) => {\n if (i !== axis && dim !== referenceInput.dims[i]) {\n throw new Error('non concat dimensions must match');\n }\n });\n });\n};\n\nconst calculateInputIndexImpl = (numberOfTensors: number, sizeInConcatAxisStr: string): string => `\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n for (var i: u32 = 0u; i < ${numberOfTensors}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n }`;\n\nconst assignOutputData = (inputs: readonly IndicesHelper[], output: IndicesHelper) => {\n const numberOfTensors = inputs.length;\n\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = output.setByOffset('global_idx', inputs[i].getByIndices('indices'));\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (inputIndex == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (inputIndex == ${i}) { ${returnSnippet} }`);\n }\n }\n return codeLines.join('\\n');\n};\n\nconst createConcatProgramInfo =\n (inputs: readonly TensorView[], adjustedAxis: number, outputShape: number[], dataType: DataType): ProgramInfo => {\n const outputSize = ShapeUtil.size(outputShape);\n\n const sizeInConcatAxis = new Array(inputs.length);\n const inputVars = new Array(inputs.length);\n\n let previousSum = 0;\n const inputDependencies: ProgramInputTensorInfoDependency[] = [];\n const inputRanks = [];\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: outputSize}];\n for (let i = 0; i < inputs.length; ++i) {\n previousSum += inputs[i].dims[adjustedAxis];\n sizeInConcatAxis[i] = previousSum;\n inputRanks.push(inputs[i].dims.length);\n inputVars[i] = inputVariable(`input${i}`, dataType, inputRanks[i]);\n inputDependencies.push('rank');\n programUniforms.push({type: DataType.uint32, data: sizeInConcatAxis[i]});\n }\n for (let i = 0; i < inputs.length; ++i) {\n programUniforms.push(...createTensorShapeVariables(inputs[i].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const output = outputVariable('output', dataType, outputShape.length);\n const indicesAxis = output.indicesGet('indices', adjustedAxis);\n const sizeInConcatAxisStr =\n Array.from(Array(sizeInConcatAxis.length).keys()).map(i => `uniforms.sizeInConcatAxis${i}`).join(',');\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${(() => {\n shaderHelper.registerUniform('outputSize', 'u32');\n for (let i = 0; i < inputs.length; i++) {\n shaderHelper.registerUniform(`sizeInConcatAxis${i}`, 'u32');\n }\n return shaderHelper.declareVariables(...inputVars, output);\n })()}\n\n ${calculateInputIndexImpl(sizeInConcatAxis.length, sizeInConcatAxisStr)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n var indices = ${output.offsetToIndices('global_idx')};\n\n let inputIndex = calculateInputIndex(${indicesAxis});\n if (inputIndex != 0u) {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n ${indicesAxis} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${assignOutputData(inputVars, output)}\n }`;\n\n return {\n name: 'Concat',\n shaderCache: {hint: `${adjustedAxis}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const concat = (context: ComputeContext, attributes: ConcatAttributes): void => {\n const inputs = context.inputs;\n const inputShape = inputs[0].dims;\n const adjustedAxis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n validateInputs(inputs, adjustedAxis);\n const outputShape = inputShape.slice();\n outputShape[adjustedAxis] =\n inputs.reduce((sum, input) => sum + (input.dims.length > adjustedAxis ? input.dims[adjustedAxis] : 0), 0);\n // 0 length tensors are valid for concat, remove them\n const nonEmptyInputs = inputs.filter(input => ShapeUtil.size(input.dims) > 0);\n context.compute(\n createConcatProgramInfo(nonEmptyInputs, adjustedAxis, outputShape, inputs[0].dataType), {inputs: nonEmptyInputs});\n};\n\nexport const parseConcatAttributes = (attributes: Record): ConcatAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {MAX_CLIP, MIN_CLIP} from '../../util';\nimport {ProgramUniform} from '../types';\n\nimport {UniformsArrayType} from './common';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly alpha?: number;\n readonly beta?: number;\n}\n\nexport const getActivationSnippet =\n (attributes: InternalActivationAttributes, valueType: string, baseType = 'f32'): string => {\n switch (attributes.activation) {\n case 'Relu':\n return `value = max(value, ${valueType}(0.0));`;\n case 'Sigmoid':\n return `value = (${valueType}(1.0) / (${valueType}(1.0) + exp(-value)));`;\n case 'Clip':\n return `value = clamp(value, ${valueType}(${baseType}(uniforms.clip_min)), ${valueType}(${\n baseType}(uniforms.clip_max)));`;\n case 'HardSigmoid':\n return `value = max(${valueType}(0.0), min(${valueType}(1.0), ${baseType}(uniforms.alpha) * value + ${\n baseType}(uniforms.beta)));`;\n case 'LeakyRelu':\n return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;\n case '':\n return '';\n // TODO: adding other activations that can be fused.\n default:\n throw new Error(`Unsupported activation ${attributes.activation}`);\n }\n };\n\nexport const appendActivationUniformsData =\n (attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {\n if (attributes.activation === 'Clip') {\n programUniform.push(\n {type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});\n } else if (attributes.activation === 'HardSigmoid') {\n programUniform.push(\n {type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});\n } else if (attributes.activation === 'LeakyRelu') {\n programUniform.push({type: DataType.float, data: attributes.alpha!});\n }\n };\n\nexport const appendActivationUniforms = (attributes: InternalActivationAttributes, uniforms: UniformsArrayType) => {\n if (attributes.activation === 'Clip') {\n uniforms.push({name: 'clip_max', type: 'f32'}, {name: 'clip_min', type: 'f32'});\n } else if (attributes.activation === 'HardSigmoid') {\n uniforms.push({name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'});\n } else if (attributes.activation === 'LeakyRelu') {\n uniforms.push({name: 'alpha', type: 'f32'});\n }\n};\n\nexport const parseInternalActivationAttributes =\n (attributes: Record|undefined): InternalActivationAttributes => {\n const activation = attributes?.activation as string || '';\n if (activation === 'HardSigmoid') {\n const [alpha, beta] = attributes?.activation_params as [number, number] || [0.2, 0.5];\n return {activation, alpha, beta};\n } else if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes?.activation_params as [number, number] || [MIN_CLIP, MAX_CLIP];\n return {activation, clipMax, clipMin};\n } else if (activation === 'LeakyRelu') {\n const [alpha] = attributes?.activation_params as [number] || [0.01];\n return {activation, alpha};\n }\n return {activation};\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/activation_util.ts\n//\n// modified to fit the needs of the project\n\nexport const typeSnippet = (component: number, dataType: string) => {\n switch (component) {\n case 1:\n return dataType;\n case 2:\n return `vec2<${dataType}>`;\n case 3:\n return `vec3<${dataType}>`;\n case 4:\n return `vec4<${dataType}>`;\n default:\n throw new Error(`${component}-component is not supported.`);\n }\n};\n\nexport const biasSnippet = (hasBias: boolean): string => `\n ${hasBias ? 'value = value + getBiasByOutputCoords(coords);' : ''}\n `;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-core/src/ops/conv_util.ts\n//\n// modified to fit the needs of the project\n\nexport const utilFunctions = (strideStr: string) => (`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${strideStr}.x), i32(${strideStr}.y), i32(${strideStr}.z), 1));\n}\n`);\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/matmul_packed_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getBroadcastDims, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from '../fuse-utils';\n\nimport {typeSnippet} from './activation_util';\n\nconst writeDataToSubAVec4Snippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst calculateResultSnippet = (transposeA: boolean, innerElementSize: number) => {\n if (transposeA) {\n return `\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${innerElementSize === 3 ? '' : 'let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];'}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached3[i] + acc[i];'}\n }`;\n } else {\n return `\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached.w + acc[i];'}\n }`;\n }\n};\n\nexport const makeMatMulPackedVec4Source =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32): string => {\n const tileAOuter = workgroupSize[1] * workPerThread[1];\n const tileBOuter = workgroupSize[0] * workPerThread[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n const innerElementSize = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n\n if (!(((transposeA && innerElementSize === 4 && workPerThread[1] === 4) ||\n (!transposeA && (innerElementSize === 3 || innerElementSize === 4))) &&\n tileAWidth % workgroupSize[0] === 0 && tileInner % workgroupSize[1] === 0 && workPerThread[0] === 4)) {\n throw new Error(`If transposeA ${transposeA} is true, innerElementSize ${\n innerElementSize} and workPerThread[1] ${workPerThread[1]} must be 4.\n Otherwise, innerElementSize ${innerElementSize} must be 3 or 4.\n tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${workgroupSize[0]}. tileInner ${\n tileInner} must be divisible by workgroupSize[1] ${workgroupSize[1]}. colPerThread ${\n workPerThread[0]} must be 4.`);\n }\n return `\nvar mm_Asub: array, ${tileAWidth / innerElementSize}>, ${tileAHight}>;\nvar mm_Bsub: array, ${tileBOuter / workPerThread[0]}>, ${tileInner}>;\n\nconst rowPerThread = ${workPerThread[1]};\nconst colPerThread = ${workPerThread[0]};\nconst innerElementSize = ${innerElementSize};\nconst tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\n let num_tiles = ${splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${rowPerThreadB};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${writeDataToSubAVec4Snippet(transposeA, batchDims)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${\n batchDims ? ', batchIndices' : ''});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${innerElementSize === 3 ? '' : 'let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];'}\n\n ${calculateResultSnippet(transposeA, innerElementSize)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`;\n };\n\nconst writeDataToSubASnippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst readDataFromSubASnippet = (transposeA: boolean) =>\n transposeA ? 'let ACached = mm_Asub[k][tileRow + innerRow];' : 'let ACached = mm_Asub[tileRow + innerRow][k];';\n\n// sequentialAccessByThreads means sequential data in memory is accessed by\n// threads, instead of a single thread (default behavior).\nexport const makeMatMulPackedSource =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32,\n sequentialAccessByThreads = false): string => {\n const tileAOuter = workPerThread[1] * workgroupSize[1];\n const tileBOuter = workPerThread[0] * workgroupSize[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n\n if (!(tileAHight % workgroupSize[1] === 0 && tileAWidth % workgroupSize[0] === 0 &&\n tileInner % workgroupSize[1] === 0)) {\n throw new Error(`tileAHight ${tileAHight} must be divisible by workgroupSize[1]${\n workgroupSize[1]}, tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${\n workgroupSize[0]}, tileInner ${tileInner} must be divisible by workgroupSize[1]${workgroupSize[1]}`);\n }\n const rowPerThreadA = tileAHight / workgroupSize[1];\n const colPerThreadA = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n const matmulSnippet = sequentialAccessByThreads ?\n `\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n let globalColStart = i32(workgroupId.x) * ${tileBOuter};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${tileAHight}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileAWidth}; inputCol = inputCol + ${workgroupSize[0]}) {\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${tileInner}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileBOuter}; inputCol = inputCol + ${workgroupSize[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${workgroupSize[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${\n transposeA ? `mm_Asub[k][localRow + innerRow * ${workgroupSize[1]}];` :\n `mm_Asub[localRow + innerRow * ${workgroupSize[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${workgroupSize[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${workgroupSize[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n ` :\n `\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\nlet tileRowA = i32(localId.y) * ${rowPerThreadA};\nlet tileColA = i32(localId.x) * ${colPerThreadA};\nlet tileRowB = i32(localId.y) * ${rowPerThreadB};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadA}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${colPerThreadA}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${readDataFromSubASnippet(transposeA)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;\n\n return `\n var mm_Asub : array, ${tileAHight}>;\n var mm_Bsub : array, ${tileInner}>;\n const rowPerThread = ${workPerThread[1]};\n const colPerThread = ${workPerThread[0]};\n const tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let num_tiles = ${\n splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc : array, rowPerThread>;\n ${matmulSnippet}\n }\n`;\n };\n\nconst matMulReadWriteFnSource =\n (component: number, hasBias: boolean, applyActivation: string, variables: IndicesHelper[],\n batchShapes: Array, isChannelsLast = false): string => {\n const [batchAShape, batchBShape, batchShape] = batchShapes;\n const [batchVariable, aVariable, bVariable, outputVariable] = variables;\n const broadCastADims = getBroadcastDims(batchAShape, batchShape);\n const broadCastBDims = getBroadcastDims(batchBShape, batchShape);\n const dataType = tensorTypeToWsglStorageType(variables[0].type.tensor);\n const getAIndices = () => {\n const aRank = aVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var aIndices: ${aVariable.type.indices};`;\n for (let i = aRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\naIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastADims.forEach(i => {\n resStr += `\\naIndices[${i}] = 0;`;\n });\n resStr += `\\naIndices[${aRank - 2}] = u32(row);\n aIndices[${aRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const getBIndices = () => {\n const bRank = bVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var bIndices: ${bVariable.type.indices};`;\n for (let i = bRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\nbIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastBDims.forEach(i => {\n resStr += `\\nbIndices[${i}] = 0;`;\n });\n resStr += `\\nbIndices[${bRank - 2}] = u32(row);\n bIndices[${bRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const source = `\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${getAIndices()}\n value = ${aVariable.getByIndices('aIndices')};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${getBIndices()}\n value = ${bVariable.getByIndices('bIndices')};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${typeSnippet(component, dataType)}) {\n let col = colIn * ${component};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${\n hasBias ?\n `value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :\n '' }\n ${applyActivation}\n ${outputVariable.setByIndices('vec3(coords)', 'value')}\n }\n }\n `;\n return source;\n };\n\nexport const createMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const dimAOuter = aShape[aShape.length - 2];\n const dimInner = aShape[aShape.length - 1];\n const dimBOuter = bShape[bShape.length - 1];\n const isVec4 = dimInner % 4 === 0 && dimBOuter % 4 === 0;\n\n // TODO: fine tune size\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const workgroupSize: [number, number, number] = [8, 8, 1];\n const dispatch = [\n Math.ceil(dimBOuter / workgroupSize[0] / elementsPerThread[0]),\n Math.ceil(dimAOuter / workgroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workgroupSize[2] / elementsPerThread[2])\n ];\n\n const components = isVec4 ? 4 : 1;\n const aShapeTemp = [...outerDimsA, dimAOuter, dimInner / components];\n const aRank = aShapeTemp.length;\n const bShapeTemp = [...outerDimsB, dimInner, dimBOuter / components];\n const bRank = bShapeTemp.length;\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShapeTemp, bShapeTemp));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n const hasBias = inputs.length > 2;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchRank = outerDims.length;\n const batchDims = internalVariable('batchDims', inputs[0].dataType, batchRank, 1);\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const A = inputVariable('a', inputs[0].dataType, aRank, components);\n const B = inputVariable('b', inputs[1].dataType, bRank, components);\n const output = outputVariable('result', inputs[0].dataType, outputShapeTemp.length, components);\n const inputVariables = [A, B];\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n }\n const uniforms: UniformsArrayType =\n [{name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'}];\n appendActivationUniforms(activationAttributes, uniforms);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const declareFunctions = matMulReadWriteFnSource(\n components, hasBias, applyActivation, [batchDims, A, B, output], [outerDimsA, outerDimsB, outerDims],\n isChannelsLast);\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${declareFunctions}\n ${\n isVec4 ? makeMatMulPackedVec4Source(elementsPerThread, workgroupSize, dataType, batchDims) :\n makeMatMulPackedSource(elementsPerThread, workgroupSize, dataType, batchDims)}\n `;\n };\n return {\n name: 'MatMul',\n shaderCache: {\n hint: `${elementsPerThread};${activationAttributes.activation};${isVec4};${isChannelsLast}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv2d_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet, typeSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dCommonSnippet =\n (isChannelsLast: boolean, fitAOuter: boolean, fitBOuter: boolean, fitInner: boolean, addBias = false,\n attributes: ConvAttributes, innerElementSizeX = 4, innerElementSizeW = 4, innerElementSize = 4,\n dataType = 'f32'): string => {\n const getXSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'resData = x[xIndex];';\n case 3:\n return `resData = vec3<${dataType}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;\n case 4:\n return 'resData = x[xIndex / 4];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[row * i32(uniforms.w_shape[3]) + colIn];';\n case 4:\n return 'return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, xRow, xCol, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, xRow, xCol);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n const readXSnippet = `\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${col} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${col} % inChannels;\n var resData = ${typeSnippet(innerElementSizeX, dataType)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${xHeight} && xCol >= 0 && xCol < ${xWidth}) {\n ${coordASnippet}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${getXSnippet(innerElementSizeX)}\n }\n return resData;`;\n\n const sampleX = isChannelsLast ? (fitAOuter && fitInner ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`) :\n (fitInner && fitBOuter ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`);\n\n const sampleW = `${getWSnippet(innerElementSizeW)}`;\n\n const resType = typeSnippet(innerElementSize, dataType);\n const aType =\n isChannelsLast ? typeSnippet(innerElementSizeX, dataType) : typeSnippet(innerElementSizeW, dataType);\n const bType =\n isChannelsLast ? typeSnippet(innerElementSizeW, dataType) : typeSnippet(innerElementSizeX, dataType);\n const applyActivation = getActivationSnippet(attributes, resType, dataType);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${aType} {\n ${isChannelsLast ? sampleX : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${bType} {\n ${isChannelsLast ? sampleW : sampleX}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${resType}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[], dimAOuter: number,\n dimBOuter: number, dimInner: number, hasBias: boolean, sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 || inChannels % 3 === 0) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv2d_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const tileAOuter = workGroupSize[1] * elementsPerThread[1];\n const tileBOuter = workGroupSize[0] * elementsPerThread[0];\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const fitAOuter = dimAOuter % tileAOuter === 0;\n const fitBOuter = dimBOuter % tileBOuter === 0;\n const fitInner = dimInner % tileInner === 0;\n const elementsSize = isVec4 ? [innerElementSize, 4, 4] : [1, 1, 1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.int32, data: attributes.strides}, {type: DataType.int32, data: attributes.dilations}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'pad', type: 'i32', length: 2}, {name: 'stride', type: 'i32', length: 2},\n {name: 'dilation', type: 'i32', length: 2}\n ];\n appendActivationUniforms(attributes, uniforms);\n\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n result[flatIndex] = ${isVec4 ? `vec4<${t}>` : t}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${isVec4 ? '/ 4' : ''}, value);\n }`;\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${utilFunctions('uniforms.result_strides')}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n ${\n conv2dCommonSnippet(\n isChannelsLast, fitAOuter, fitBOuter, fitInner, hasBias, attributes, elementsSize[0], elementsSize[1],\n elementsSize[2], t)}\n ${\n isVec4 ?\n makeMatMulPackedVec4Source(elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner, false, undefined,\n sequentialAccessByThreads)}`;\n };\n return {\n name: 'Conv2DMatMul',\n shaderCache: {\n hint: `${attributes.cacheKey};${innerElementSize};${isVec4};${fitAOuter};${fitBOuter};${fitInner};${\n tileAOuter};${tileBOuter};${tileInner}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv3d_naive_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\n\nconst arrayProduct = (arr: number[]) => {\n let product = 1;\n for (let i = 0; i < arr.length; i++) {\n product *= arr[i];\n }\n return product;\n};\n\nconst parse3TupleParam = (param: number|[number, number, number]): [number, number, number] =>\n typeof param === 'number' ? [param, param, param] : param;\n\nconst getEffectiveFilterSize = (filterSize: number, dilation: number): number => {\n if (dilation <= 1) {\n return filterSize;\n }\n\n return filterSize + (filterSize - 1) * (dilation - 1);\n};\n\nconst computeDefaultPad =\n (inputShape: [number, number]|[number, number, number, number], fieldSize: number, stride: number, dilation = 1):\n number => {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n };\n\nconst computeOutputShape4D =\n (inShape: [number, number, number, number], filterShape: [number, number, number], outChannels: number,\n strides: [number, number, number], zeroPad?: number): [number, number, number, number] => {\n if (zeroPad == null) {\n // eslint-disable-next-line no-param-reassign\n zeroPad = computeDefaultPad(inShape, filterShape[0], strides[0]);\n }\n const outShape: [number, number, number, number] = [0, 0, 0, outChannels];\n for (let index = 0; index < 3; index++) {\n if (inShape[index] + 2 * zeroPad >= filterShape[index]) {\n outShape[index] = Math.trunc((inShape[index] - filterShape[index] + 2 * zeroPad) / strides[index] + 1);\n }\n }\n return outShape;\n };\n\nconst get3DPadAndOutInfo =\n (pad: number|string|number[], inDepth: number, inHeight: number, inWidth: number, strideDepth: number,\n strideHeight: number, strideWidth: number, filterDepth: number, filterHeight: number,\n filterWidth: number): {padInfo: PadInfo3D; outDepth: number; outHeight: number; outWidth: number} => {\n let padInfo: PadInfo3D;\n let outDepth: number;\n let outHeight: number;\n let outWidth: number;\n\n if (pad === 'VALID') {\n // eslint-disable-next-line no-param-reassign\n pad = 0;\n }\n\n if (typeof pad === 'number') {\n padInfo = {top: pad, bottom: pad, left: pad, right: pad, front: pad, back: pad};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (Array.isArray(pad)) {\n if (!pad.every((val, _, arr) => val === arr[0])) {\n throw Error(`Unsupported padding parameter: ${pad}`);\n }\n padInfo = {top: pad[0], bottom: pad[1], left: pad[2], right: pad[3], front: pad[4], back: pad[5]};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad[0]);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (pad === 'SAME_UPPER') {\n // TODO: support 'SAME_LOWER'.\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n\n padInfo = {top, bottom, left, right, front, back};\n } else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return {padInfo, outDepth, outHeight, outWidth};\n };\n\ntype PadInfo3D = {\n top: number; left: number; right: number; bottom: number; front: number; back: number;\n};\n\nexport type Conv3DInfo = {\n batchSize: number; inDepth: number; inHeight: number; inWidth: number; inChannels: number; outDepth: number;\n outHeight: number;\n outWidth: number;\n outChannels: number;\n dataFormat: 'channelsFirst' | 'channelsLast';\n strideDepth: number;\n strideHeight: number;\n strideWidth: number;\n dilationDepth: number;\n dilationHeight: number;\n dilationWidth: number;\n filterDepth: number;\n filterHeight: number;\n filterWidth: number;\n effectiveFilterDepth: number;\n effectiveFilterHeight: number;\n effectiveFilterWidth: number;\n padInfo: PadInfo3D;\n inShape: [number, number, number, number, number];\n outShape: [number, number, number, number, number];\n filterShape: [number, number, number, number, number];\n};\n\nexport const computeConv3DInfo =\n (inShape: [number, number, number, number, number], filterShape: [number, number, number, number, number],\n strides: number|[number, number, number], dilations: number|[number, number, number], pad: number|string|number[],\n depthwise = false, dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv3DInfo => {\n let batchSize, inDepth, inHeight, inWidth, inChannels;\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n } else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterChannels, , filterDepth, filterHeight, filterWidth] = filterShape;\n\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const {padInfo, outDepth, outHeight, outWidth} = get3DPadAndOutInfo(\n pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth,\n effectiveFilterHeight, effectiveFilterWidth);\n\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n\n let outShape: [number, number, number, number, number] = [0, 0, 0, 0, 0];\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n } else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n };\n\nexport const createConv3DNaiveProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[],\n filterDims: readonly number[], pads: readonly number[], dataFormat: string): ProgramInfo => {\n const isChannelsLast = dataFormat === 'channelsLast';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n // TODO: enable vec4.\n const isVec4 = false;\n const workGroupSize: [number, number, number] = [64, 1, 1];\n const dispatchLayout = {x: outputShape.map((_, i) => i)};\n const dispatch = [Math.ceil(arrayProduct(dispatchLayout.x.map(d => outputShape[d])) / (workGroupSize[0])), 1, 1];\n\n LOG_DEBUG('verbose', () => `[conv3d_naive_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: filterDims},\n {type: DataType.uint32, data: pads}, {type: DataType.uint32, data: attributes.strides},\n {type: DataType.uint32, data: attributes.dilations}\n ];\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'pads', type: 'u32', length: pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length},\n {name: 'dilations', type: 'u32', length: attributes.dilations.length}\n ];\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : array) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[${isChannelsLast ? getElementAt('coords', 4, 5) : getElementAt('coords', 1, 5)}${\n isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${declareFunctions}\n fn getX(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${x.getByIndices('aIndices')};\n }\n fn getW(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${w.getByIndices('aIndices')};\n }\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let coords = ${output.offsetToIndices('global_idx')};\n let batch = ${getElementAt('coords', 0, x.rank)};\n let d2 = ${\n isChannelsLast ? getElementAt('coords', x.rank - 1, x.rank) : getElementAt('coords', 1, x.rank)};\n let xFRCCorner = vec3(${\n isChannelsLast ? getElementAt('coords', 1, x.rank) : getElementAt('coords', 2, x.rank)},\n ${isChannelsLast ? getElementAt('coords', 2, x.rank) : getElementAt('coords', 3, x.rank)},\n ${\n isChannelsLast ? getElementAt('coords', 3, x.rank) :\n getElementAt('coords', 4, x.rank)}) * uniforms.strides - uniforms.pads;\n let xFCorner = xFRCCorner.x;\n let xRCorner = xFRCCorner.y;\n let xCCorner = xFRCCorner.z;\n let xShapeY = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 1, x.rank) : getElementAt('uniforms.x_shape', 2, x.rank)};\n let xShapeZ = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 2, x.rank) : getElementAt('uniforms.x_shape', 3, x.rank)};\n let xShapeW = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 3, x.rank) : getElementAt('uniforms.x_shape', 4, x.rank)};\n let xShapeU = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 4, x.rank) : getElementAt('uniforms.x_shape', 1, x.rank)};\n let inputDepthNearestVec4 = (xShapeU / 4) * 4;\n let inputDepthVec4Remainder = xShapeU % 4;\n\n var dotProd = 0.0;\n for (var wF = 0u; wF < uniforms.filter_dims[0]; wF++) {\n let xF = xFCorner + wF * uniforms.dilations[0];\n if (xF < 0 || xF >= xShapeY) {\n continue;\n }\n\n for (var wR = 0u; wR < uniforms.filter_dims[1]; wR++) {\n let xR = xRCorner + wR * uniforms.dilations[1];\n if (xR < 0 || xR >= xShapeZ) {\n continue;\n }\n\n for (var wC = 0u; wC < uniforms.filter_dims[2]; wC++) {\n let xC = xCCorner + wC * uniforms.dilations[2];\n if (xC < 0 || xC >= xShapeW) {\n continue;\n }\n\n for (var d1 = 0u; d1 < inputDepthNearestVec4; d1 += 4) {\n ${\n isChannelsLast ? `let xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3));\n ` :\n `let xValues = vec4(\n getX(batch, d1, xF, xR, xC),\n getX(batch, d1 + 1, xF, xR, xC),\n getX(batch, d1 + 2, xF, xR, xC),\n getX(batch, d1 + 3, xF, xR, xC));\n `}\n let wValues = vec4(\n getW(d2, d1, wF, wR, wC),\n getW(d2, d1 + 1, wF, wR, wC),\n getW(d2, d1 + 2, wF, wR, wC),\n getW(d2, d1 + 3, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n if (inputDepthVec4Remainder == 1) {\n ${\n isChannelsLast ? `dotProd += getX(batch, xF, xR, xC, inputDepthNearestVec4)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);` :\n `dotProd += getX(batch, inputDepthNearestVec4, xF, xR, xC)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);`}\n } else if (inputDepthVec4Remainder == 2) {\n ${\n isChannelsLast ? `let xValues = vec2(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1));\n ` :\n `let xValues = vec2(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC));\n `}\n let wValues = vec2(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n } else if (inputDepthVec4Remainder == 3) {\n ${\n isChannelsLast ? `let xValues = vec3(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 2));\n ` :\n `let xValues = vec3(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 2, xF, xR, xC));\n `}\n let wValues = vec3(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 2, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n ${hasBias ? 'dotProd = dotProd + getBiasByOutputCoords(coords)' : ''};\n result[global_idx] = f32(dotProd);\n }`;\n };\n return {\n name: 'Conv3DNaive',\n shaderCache:\n {hint: `${attributes.cacheKey};${isChannelsLast};${innerElementSize};${hasBias}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from './fuse-utils';\n\n/**\n * naive grouped conv implementation, supports 1d/2d conv\n * @param squeezeOutputShapeFunction - an optional function to squeeze the output shape, only used in conv1d\n */\nexport const createGroupedConvProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n\n const isChannelLast = attributes.format === 'NHWC';\n const outputShape = calculateOutputShape(\n xShape, wShape, attributes.dilations, attributes.pads, attributes.strides, isChannelLast);\n const outputSize = ShapeUtil.size(outputShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.dilations},\n {type: DataType.uint32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.uint32, data: outputChannelsPerGroup}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length);\n const w = inputVariable('w', inputs[1].dataType, wShape.length);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims.length));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'dilations', type: 'u32', length: attributes.dilations.length},\n {name: 'strides', type: 'u32', length: 2}, {name: 'pads', type: 'u32', length: 2},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${isChannelLast ? 3 : 1}];\n let xRCCorner: vec2 = vec2(outputIndices[${isChannelLast ? 1 : 2}], outputIndices[${\n isChannelLast ? 2 : 3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${output.type.value} = ${output.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${isChannelLast ? 1 : 2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${isChannelLast ? 2 : 3}]) {\n continue;\n }\n\n let xVal = ${\n isChannelLast ? x.get('batch', 'xHeight', 'xWidth', 'input_channel') :\n x.get('batch', 'input_channel', 'xHeight', 'xWidth')};\n let wVal = ${w.get('output_channel', 'wInChannel', 'wHeight', 'wWidth')};\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${output.setByOffset('global_idx', 'value')}\n }`;\n };\n return {\n name: 'GroupedConv',\n shaderCache: {hint: attributes.cacheKey, inputDependencies},\n getRunData: () => ({\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const createGroupedConvVectorizeProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const components = getMaxComponents(outputShape[3]);\n const outputNumber = getMaxComponents(outputShape[2]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const xShape = [inputs[0].dims[0], inputs[0].dims[1], inputs[0].dims[2], inputs[0].dims[3] / components];\n const wShape = [inputs[1].dims[0], inputs[1].dims[1], inputs[1].dims[2], inputs[1].dims[3] / components];\n const outputShapeInShader = [outputShape[0], outputShape[1], outputShape[2], outputShape[3] / components];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.int32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape, outputShapeInShader));\n const xNumber = (outputNumber - 1) * attributes.strides[1] + wShape[1];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length, components);\n const w = inputVariable('w', inputs[1].dataType, wShape.length, components);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims, components));\n }\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'strides', type: 'i32', length: 2},\n {name: 'pads', type: 'i32', length: 2},\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${outputNumber}u;\n let col = (index1 % width1) * ${outputNumber}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${x.type.value}, ${xNumber}>;\n var values: array<${output.type.value}, ${outputNumber}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${wShape[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${xNumber}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${x.get('batch', 'u32(x_height)', 'u32(x_width)', 'input_channel')};\n } else {\n x_vals[i] = ${x.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${wShape[1]}; w_width++) {\n let w_val = ${w.get('w_height', 'w_width', '0', 'output_channel')};\n for (var i = 0u; i < ${outputNumber}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n ${output.set('batch', 'row', 'col + i', 'output_channel', 'value')};\n }\n }`;\n };\n\n return {\n name: 'GroupedConv-Vectorize',\n shaderCache: {\n hint: `${attributes.cacheKey};${components};${outputNumber};${xNumber};${wShape[0]};${wShape[1]}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'type'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createTensorShapeVariables, getBroadcastDims, getMaxComponents, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\n\nexport const createNaiveMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n\n const M = aShape[aShape.length - 2];\n const N = bShape[bShape.length - 1];\n const K = aShape[aShape.length - 1];\n const components = getMaxComponents(N);\n const aComponents = getMaxComponents(K);\n const outputNumber = getMaxComponents(M);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const hasBias = inputs.length > 2;\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const outputShapeInShader = [batchSize, M, N];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShape, bShape));\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeInShader));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchDims = internalVariable('batch_dims', inputs[0].dataType, outerDims.length);\n const a = inputVariable('a', inputs[0].dataType, aShape.length, aComponents);\n const b = inputVariable('b', inputs[1].dataType, bShape.length, components);\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const inputVariables = [a, b];\n let processBias = '';\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n processBias = `${\n isChannelsLast ? `value += bias[col / ${biasComponents}];` :\n `value += ${output.type.value}(bias[row + i]);`}`;\n }\n\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const broadCastADims = getBroadcastDims(outerDimsA, outerDims);\n const broadCastBDims = getBroadcastDims(outerDimsB, outerDims);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'K', type: 'u32'}\n ];\n appendActivationUniforms(activationAttributes, uniforms);\n\n const getIndices = (variable: IndicesHelper, broadCastDims: number[]) => {\n const rank = variable.rank;\n const name = variable.name;\n if (rank === 2) {\n return `var ${name}_indices = ${variable.type.indices}(0u, 0u);`;\n }\n const batchRank = batchDims.rank;\n let resStr = `var ${name}_indices: ${variable.type.indices};`;\n for (let i = rank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\n${name}_indices[${i}] = ${batchRank > 1 ? `batch_indices[${j}]` : 'batch_indices'};`;\n }\n broadCastDims.forEach(i => {\n resStr += `\\n${name}_indices[${i}] = 0;`;\n });\n resStr += `${name}_indices[${rank - 2}] = 0u;\n ${name}_indices[${rank - 1}] = 0u;`;\n return resStr;\n };\n\n const calcResult = (): string => {\n let calcStr = `var a_data: ${a.type.value};`;\n for (let i = 0; i < aComponents; i++) {\n calcStr += `\n let b_data${i} = b[(b_offset + (k + ${i}) * uniforms.N + col) / ${components}];`;\n }\n for (let i = 0; i < outputNumber; i++) {\n calcStr += `a_data = a[(a_offset + (row + ${i}) * uniforms.K + k) / ${aComponents}];`;\n\n for (let j = 0; j < aComponents; j++) {\n calcStr += `\n values[${i}] = fma(${b.type.value}(a_data${aComponents === 1 ? '' : `[${j}]`}), b_data${j}, values[${\n i}]);\\n`;\n }\n }\n return calcStr;\n };\n\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let col = (global_idx % (uniforms.N / ${components})) * ${components};\n var index1 = global_idx / (uniforms.N / ${components});\n let stride1 = uniforms.M / ${outputNumber};\n let row = (index1 % stride1) * ${outputNumber};\n let batch = index1 / stride1;\n\n ${outputShape.length === 2 ? '' : `let batch_indices = ${batchDims.offsetToIndices('batch')};`}\n ${getIndices(a, broadCastADims)}\n let a_offset = ${a.indicesToOffset('a_indices')};\n ${getIndices(b, broadCastBDims)}\n let b_offset = ${b.indicesToOffset('b_indices')};\n var values: array<${output.type.value}, ${outputNumber}>;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${aComponents}) {\n ${calcResult()}\n }\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n let cur_indices = ${output.type.indices}(batch, row + i, col);\n let offset = ${output.indicesToOffset('cur_indices')};\n ${output.setByOffset(`offset / ${components}`, 'value')};\n }\n }\n `;\n };\n return {\n name: 'MatMulNaive',\n shaderCache: {\n hint: `${activationAttributes.activation};${components};${aComponents};${outputNumber};${isChannelsLast}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'rank'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n};\n\nexport const matMul = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n const outputShape = BroadcastUtil.calcShape(context.inputs[0].dims, context.inputs[1].dims, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const N = outputShape[outputShape.length - 1];\n const K = context.inputs[0].dims[context.inputs[0].dims.length - 1];\n if (N < 8 && K < 8) {\n context.compute(createNaiveMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n } else {\n context.compute(createMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DMatMulProgramInfo} from './3rd-party/conv2d_mm_webgpu';\nimport {computeConv3DInfo, createConv3DNaiveProgramInfo} from './3rd-party/conv3d_naive_webgpu';\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createGroupedConvProgramInfo, createGroupedConvVectorizeProgramInfo} from './conv-grouped';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createNaiveMatmulProgramInfo} from './matmul';\nimport {createTransposeProgramInfo} from './transpose';\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[], isChannelLast: boolean): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(isChannelLast ? 1 : 2, isChannelLast ? 3 : 4);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly format: 'NHWC'|'NCHW';\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n readonly wIsConst: boolean;\n}\n\n// for transposing weight tensor from [M, C/group, KH, KW] to [KH, KW, C/group, M]\nconst weightTransposeAttribute = [2, 3, 1, 0];\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n if (inputs[0].dims.length > 5) {\n throw new Error('greater than 5D is not supported');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n};\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n if (kernelShape[i - 2] === 0) {\n kernelShape[i - 2] = inputs[1].dims[i];\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.format === 'NHWC',\n attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads});\n return newAttributes;\n};\n\nexport const parseConvAttributes = (attributes: Record): ConvAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad = ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number];\n const dilations = attributes.dilations as number[];\n const group = attributes.group as number;\n const kernelShape = attributes.kernel_shape as number[];\n const pads = attributes.pads as number[];\n const strides = attributes.strides as number[];\n const wIsConst = (attributes.w_is_const as () => boolean)();\n\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst conv2d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n\n // check attributes\n\n // const hasPreluActivationWeights = false; /* TODO: add support for prelu activation weights */\n const isChannelsLast = attributes.format === 'NHWC';\n if (attributes.group !== 1) {\n // NVIDIA GPU with ampere architecture fails with below 2 cases, but we couldn't repro them with any other\n // GPUs. So just disable vectorize on NVIDIA ampere to ensure always correct outputs.\n // [webgpu]Conv - conv - vectorize group - B\n // [webgpu]Conv - conv - vectorize group - D\n const enableGroupedConvVectorize = !context.adapterInfo.isArchitecture('ampere');\n if (enableGroupedConvVectorize && isChannelsLast && inputs[1].dims[0] === attributes.group &&\n inputs[1].dims[1] === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1) {\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n const convInputs = [inputs[0], transposedWeight];\n if (inputs.length === 3) {\n convInputs.push(inputs[2]);\n }\n context.compute(\n createGroupedConvVectorizeProgramInfo(convInputs, adjustedAttributes, outputShape), {inputs: convInputs});\n } else {\n context.compute(createGroupedConvProgramInfo(inputs, adjustedAttributes));\n }\n return;\n }\n\n const hasBias = inputs.length === 3;\n const inputHeight = inputs[0].dims[isChannelsLast ? 1 : 2];\n const inputWidth = inputs[0].dims[isChannelsLast ? 2 : 3];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n\n const sameSize = isChannelsLast && weightHeight === inputHeight && weightWidth === inputWidth &&\n attributes.pads[0] === 0 && attributes.pads[1] === 0;\n if (sameSize ||\n (weightHeight === 1 && weightWidth === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1 &&\n attributes.strides[0] === 1 && attributes.strides[1] === 1 && attributes.pads[0] === 0 &&\n attributes.pads[1] === 0)) {\n // conv2dByMatMul\n const batch = outputShape[0];\n let xReshaped, wReshaped, matmulOutputShape;\n const matmulInputs = [];\n if (isChannelsLast) {\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n if (sameSize) {\n const sharedDim = inputHeight * inputWidth * inputChannels;\n xReshaped = inputs[0].reshape([1, batch, sharedDim]);\n wReshaped = transposedWeight.reshape([1, sharedDim, outChannels]);\n matmulOutputShape = [1, batch, outChannels];\n } else {\n xReshaped = inputs[0].reshape([batch, inputHeight * inputWidth, inputChannels]);\n wReshaped = transposedWeight.reshape([1, inputChannels, outChannels]);\n matmulOutputShape = [batch, outHeight * outWidth, outChannels];\n }\n matmulInputs.push(xReshaped);\n matmulInputs.push(wReshaped);\n } else {\n xReshaped = inputs[0].reshape([batch, inputChannels, inputHeight * inputWidth]);\n wReshaped = inputs[1].reshape([1, outChannels, inputChannels]);\n matmulOutputShape = [batch, outChannels, outHeight * outWidth];\n matmulInputs.push(wReshaped);\n matmulInputs.push(xReshaped);\n }\n if (hasBias) {\n matmulInputs.push(inputs[2]);\n }\n const N = matmulOutputShape[2];\n const K = matmulInputs[0].dims[matmulInputs[0].dims.length - 1];\n // Tune the threshold.\n if (N < 8 && K < 8) {\n context.compute(\n createNaiveMatmulProgramInfo(\n matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n } else {\n context.compute(\n createMatmulProgramInfo(matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n }\n return;\n }\n\n // TODO: implement conv2dWithIm2Col()\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convInputs = [inputs[0], transposedWeight];\n if (hasBias) {\n convInputs.push(inputs[2]);\n }\n\n // STEP.3: compute matmul\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n context.compute(\n createConv2DMatMulProgramInfo(\n convInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convInputs});\n};\n\nconst conv1d = (context: ComputeContext, attributes: ConvAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n const pads = [0, attributes.pads[0], 0, attributes.pads[1]];\n const strides = [1].concat(attributes.strides);\n const dilations = [1].concat(attributes.dilations);\n const kernelShape = [1].concat(attributes.kernelShape);\n const adjustedAttributes = getAdjustedConvAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createGroupedConvProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] : []));\n};\n\nconst conv3d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const format = attributes.format === 'NHWC' ? 'channelsLast' : 'channelsFirst';\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const pads = attributes.autoPad === 'NOTSET' ? attributes.pads : attributes.autoPad;\n const convInfo = computeConv3DInfo(\n inputs[0].dims as [number, number, number, number, number],\n inputs[1].dims as [number, number, number, number, number],\n attributes.strides as number | [number, number, number],\n attributes.dilations as number | [number, number, number], pads as string | number[], false, format);\n context.compute(createConv3DNaiveProgramInfo(\n inputs, adjustedAttributes, convInfo.outShape,\n [convInfo.filterDepth, convInfo.filterHeight, convInfo.filterWidth],\n [convInfo.padInfo.front, convInfo.padInfo.top, convInfo.padInfo.left], format));\n};\n\nexport const conv = (context: ComputeContext, attributes: ConvAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n conv1d(context, attributes);\n } else if (context.inputs[0].dims.length === 5) {\n conv3d(context, context.inputs, attributes);\n } else {\n conv2d(context, context.inputs, attributes);\n }\n};\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dTransposeCommonSnippet =\n (isChannelsLast: boolean, addBias = false, attributes: ConvTransposeAttributes, type: string,\n innerElementSize = 4): string => {\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];';\n case 4:\n return `\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${type}(v0, v1, v2, v3);\n `;\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, iXR, iXC, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, iXR, iXC);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n\n const readASnippet = `\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${col} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${xHeight}) || fract(xR) > 0.0) {\n return ${type}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${xWidth}) || fract(xC) > 0.0) {\n return ${type}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${col} % inChannels;\n ${coordASnippet}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${innerElementSize}];`;\n\n const sampleA = isChannelsLast ? `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readASnippet}\n }\n return ${type}(0.0);` :\n `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readASnippet}\n }\n return ${type}(0.0);`;\n\n const sampleW = `\n let col = colIn * ${innerElementSize};\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${\n isChannelsLast ? 'row < uniforms.dim_inner && col < uniforms.dim_b_outer' :\n 'row < uniforms.dim_inner && col < uniforms.dim_a_outer'} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${getWSnippet(innerElementSize)}\n }\n return ${type}(0.0);\n `;\n\n const applyActivation = getActivationSnippet(attributes, type);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleA : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleW : sampleA}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${type}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${innerElementSize}] = value;\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DTransposeMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes, outputShape: readonly number[],\n dimAOuter: number, dimBOuter: number, dimInner: number, hasBias: boolean,\n sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 && inChannels % 3) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv_backprop_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? 4 : 1;\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const components = isVec4 ? 4 : 1;\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const effectiveFilterDims = [\n filterDims[0] + (attributes.dilations[0] <= 1 ? 0 : (filterDims[0] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] + (attributes.dilations[1] <= 1 ? 0 : (filterDims[1] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor((attributes.pads[1] + attributes.pads[3]) / 2)\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: attributes.strides},\n {type: DataType.int32, data: attributes.dilations}, {type: DataType.int32, data: filterDims},\n {type: DataType.int32, data: pads}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims.length, components);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, 1);\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n const inputVariables = [x, w];\n\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${bias.type.value} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'strides', type: 'i32', length: 2}, {name: 'dilations', type: 'i32', length: 2},\n {name: 'filter_dims', type: 'i32', length: filterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}\n ];\n appendActivationUniforms(attributes, uniforms);\n const elemType = tensorTypeToWsglStorageType(inputs[0].dataType, 1);\n if (elemType !== 'f16' && elemType !== 'f32') {\n throw new Error(`elemType ${elemType} is not supported.`);\n }\n return `\n ${utilFunctions('uniforms.result_strides')}\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)};\n ${declareFunctions}\n ${conv2dTransposeCommonSnippet(isChannelsLast, hasBias, attributes, x.type.value, innerElementSize)}\n ${\n isVec4 ? makeMatMulPackedVec4Source(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner, false,\n undefined, sequentialAccessByThreads)}`;\n };\n\n return {\n name: 'Conv2DTransposeMatMul',\n shaderCache:\n {hint: `${attributes.cacheKey};${elementsPerThread};${workGroupSize};${isVec4}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_webgpu.ts\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\n\nconst createConvTranspose2DOpProgramShaderSource =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], outputShape: readonly number[], hasBias: boolean,\n is1DimensionDispatch: boolean, isVec4 = false, dataType: string, uniforms: UniformsArrayType,\n isChannelsLast = false): string => {\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n const workPerThread = isVec4 ? 2 : 1;\n\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : u32, value : ${isVec4 ? `vec4<${dataType}>` : dataType}) {\n result[flatIndex] = ${isVec4 ? `vec4<${dataType}>` : dataType}(value);\n }`;\n if (hasBias) {\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${dataType}>` : dataType} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n const components = isVec4 ? 4 : 1;\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const dy = inputVariable('Dy', inputs[0].dataType, inputs[0].dims.length, components);\n const inputVariables = [dy, w];\n if (hasBias) {\n inputVariables.push(inputVariable('bias', inputs[2].dataType, [outputShape[channelDim]].length, components));\n }\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n\n const codeSnippet4 = `{\n let batch: u32 = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} / uniforms.result_shape[1];\n let r = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} % uniforms.result_shape[1];\n let c = ${is1DimensionDispatch ? 'global_id.y' : 'workgroup_id.y'} * ${workPerThread};\n let d1: u32 = ${is1DimensionDispatch ? 'global_id.x' : 'workgroup_id.x'} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${workPerThread}>;\n for (var i = 0; i < ${workPerThread}; i++) {\n dotProd[i] = vec4<${dataType}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${dataType}(dyCorner.x) + ${dataType}(wR)) / ${dataType}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${dataType}(dyCorner.y) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let dyC2 = (${dataType}(dyCorner.y) + 1.0 + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n\n dotProd[1] = dotProd[1] + vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${channelDim}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${workPerThread}; i = i + 1) {\n let value = dotProd[i] + ${hasBias ? 'bias[c+i]' : `vec4<${dataType}>(0.0)`};\n ${output.set('batch', 'r', 'c + i', 'd1', 'value')};\n }\n }`;\n const codeSnippet = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch = ${output.indicesGet('outputIndices', 0)};\n let d1 = ${output.indicesGet('outputIndices', channelDim)};\n let r = ${output.indicesGet('outputIndices', rowDim)};\n let c = ${output.indicesGet('outputIndices', colDim)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${dataType}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${dataType}(dyRCorner) + ${dataType}(wR)) / ${dataType}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[${rowDim}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${dataType}(dyCCorner) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[${colDim}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${\n isChannelsLast ? dy.get('batch', 'idyR', 'idyC', 'inputChannel') :\n dy.get('batch', 'inputChannel', 'idyR', 'idyC')};\n let wValue = ${w.get('inputChannel', 'wOutChannel', 'u32(wRPerm)', 'u32(wCPerm)')};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${hasBias ? 'bias[d1]' : `${dataType}(0.0)`};\n ${output.setByOffset('global_idx', 'value')};\n `;\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')};\n ${isVec4 ? codeSnippet4 : codeSnippet}}`;\n };\n\nexport const createConvTranspose2DProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n // const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = attributes.outputShape;\n const outputSize = ShapeUtil.size(outputShape);\n\n // const inChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // TODO Enable isVec4 for performance\n // Disabled due to weight matrix layout issue\n // const isVec4 = attributes.group === 1 && isChannelsLast && inChannels % 4 === 0 && outChannels % 4 === 0;\n const dispatch = [\n Math.ceil(outputSize / 64),\n 1,\n 1,\n ];\n LOG_DEBUG('verbose', () => `[conv2d_backprop_webgpu] dispatch = ${dispatch}`);\n\n const isChannelsLast = attributes.format === 'NHWC';\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const strides = [attributes.strides[0], attributes.strides[1]];\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const dilations = [attributes.dilations[0], attributes.dilations[1]];\n const effectiveFilterDims = [\n filterDims[0] +\n (attributes.dilations[0] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 1 : 2] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] +\n (attributes.dilations[1] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 2 : 3] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor(attributes.pads[1] + attributes.pads[3]) / 2\n ];\n\n const isVec4 = false;\n const group = attributes.group;\n const wShape = inputs[1].dims;\n const inputChannelsPerGroup = wShape[0] / group;\n const outputChannelsPerGroup = wShape[1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: strides},\n {type: DataType.uint32, data: filterDims}, {type: DataType.uint32, data: dilations},\n {type: DataType.uint32, data: effectiveFilterDims}, {type: DataType.int32, data: pads},\n {type: DataType.uint32, data: inputChannelsPerGroup}, {type: DataType.uint32, data: outputChannelsPerGroup},\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims)\n ];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const is1DimensionDispatch = dispatch[1] === 1 && dispatch[2] === 1;\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'strides', type: 'u32', length: strides.length},\n {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'dilations', type: 'u32', length: filterDims.length},\n {name: 'effective_filter_dims', type: 'u32', length: effectiveFilterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}, {name: 'input_channels_per_group', type: 'u32'},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `${\n createConvTranspose2DOpProgramShaderSource(\n shaderHelper, inputs, outputShape, hasBias, is1DimensionDispatch, isVec4, dataType, uniforms,\n isChannelsLast)}`;\n };\n return {\n name: 'ConvTranspose2D',\n shaderCache: {hint: `${attributes.cacheKey};`, inputDependencies},\n getRunData: () => ({\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n programUniforms\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DTransposeMatMulProgramInfo} from './3rd-party/conv_backprop_mm_webgpu';\nimport {createConvTranspose2DProgramInfo} from './3rd-party/conv_backprop_webgpu';\nimport {ConvAttributes} from './conv';\nimport {parseInternalActivationAttributes} from './fuse-utils';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n group: number, pads: number[], strides: readonly number[], isChannelLast: boolean, outputPadding: number[],\n outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateOutputShape = outputShape.length === 0;\n if (outputPadding.length === 0) {\n for (let i = 0; i < spatialRank; ++i) {\n outputPadding.push(0);\n }\n }\n const batchSize = inputShape[0];\n const outChannels = kernelShape[isChannelLast ? 3 : 1] * group;\n for (let i = 0, j = inputShape.length - spatialRank - (isChannelLast ? 1 : 0); i < spatialRank; ++i, ++j) {\n const inSize = inputShape[j];\n const outSize = updateOutputShape ? inSize * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inSize, strides[i], pads[i], kernelShape[j], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateOutputShape) {\n outputShape.push(\n strides[i] * (inSize - 1) + outputPadding[i] + (kernelShape[j] - 1) * dilations[i] + 1 - pads[i] -\n pads[i + spatialRank]);\n }\n }\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nconst getAdjustedConvTransposeAttributes =\n (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0 || attributes.kernelShape.reduce((a, b) => a * b, 1) === 0) {\n kernelShape.length = 0;\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const isChannelsLast = attributes.format === 'NHWC';\n kernelShape.splice(0, 0, inputs[1].dims[0]);\n kernelShape.splice(isChannelsLast ? 3 : 1, 0, inputs[1].dims[1]);\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const outputPadding = attributes.outputPadding.slice();\n const inputShape = inputs[0].dims;\n let dilations = attributes.dilations.slice();\n if (dilations.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n dilations = new Array(spatialRank).fill(1);\n }\n let strides = attributes.strides.slice();\n if (strides.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n strides = new Array(spatialRank).fill(1);\n }\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, dilations, attributes.autoPad, attributes.group, pads, strides, isChannelsLast,\n outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputPadding, outputShape, dilations, strides});\n return newAttributes;\n };\n\nexport const parseConvTransposeAttributes = (attributes: Record): ConvTransposeAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad =\n ['NOTSET', 'VALID', 'SAME_UPPER',\n 'SAME_LOWER'][typeof attributes.autoPad == 'undefined' ? 0 : attributes.autoPad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernelShape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.wIsConst as () => boolean)();\n const outputPadding = attributes.outputPadding as [number, number, number, number];\n const outputShape = attributes.outputShape as [number, number];\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n outputPadding,\n outputShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#ConvTranspose\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n const dilationsSet = attributes.dilations.reduce((a, b) => a + b, 0) > 0;\n // wrong dilations dimension\n if (dilationsSet && attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n const stridesSet = attributes.strides.reduce((a, b) => a + b, 0) > 0;\n // Wrong strides dimension\n if (stridesSet && attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n const padsSet = attributes.pads.reduce((a, b) => a + b, 0) > 0;\n if (padsSet && attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank && attributes.outputPadding.length !== 0) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n const kernelShapeSet = attributes.kernelShape.reduce((a, b) => a + b, 0) > 0;\n if (kernelShapeSet && attributes.kernelShape.length !== 0 &&\n attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n};\n\n// for transposing weight tensor from [C, M/group, KH, KW] to [KH, KW, M/group, C]\nconst weightTransposePerm = [2, 3, 1, 0];\n\nconst convTranspose2d =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = adjustedAttributes.outputShape;\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // Switch to naive method when outChannels and inputChannels are very small. It's because that in this case it's\n // not suitable for matmul version since matmul uses tile size 32x32 resulting the underlying execution unit\n // utilization rate is very low.\n if (adjustedAttributes.group !== 1 || (outChannels === 1 && inputChannels === 1)) {\n context.compute(createConvTranspose2DProgramInfo(inputs, adjustedAttributes));\n return;\n }\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposePerm),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convTransposeInputs = [inputs[0], transposedWeight];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n if (!isChannelsLast && inputs[2].dims.length === 1) {\n convTransposeInputs.push(inputs[2].reshape([inputs[2].dims[0], 1, 1]));\n } else {\n convTransposeInputs.push(inputs[2]);\n }\n }\n\n // STEP.3: compute matmul\n context.compute(\n createConv2DTransposeMatMulProgramInfo(\n convTransposeInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convTransposeInputs});\n };\n\nconst convTranspose1d = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n let kernelShape = attributes.kernelShape;\n if (kernelShape.length === 0 || kernelShape[0] === 0) {\n kernelShape = [context.inputs[1].dims[2]];\n }\n let dilations = attributes.dilations;\n if (dilations.length === 0 || dilations[0] === 0) {\n dilations = [1];\n }\n let strides = attributes.strides;\n if (strides.length === 0 || strides[0] === 0) {\n strides = [1];\n }\n let pads = attributes.pads;\n if (pads.length === 0) {\n pads = [0, 0];\n }\n pads = [0, pads[0], 0, pads[1]];\n strides = [1].concat(strides);\n dilations = [1].concat(dilations);\n kernelShape = [1].concat(kernelShape);\n const adjustedAttributes =\n getAdjustedConvTransposeAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createConvTranspose2DProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] :\n [outputShape[0], outputShape[1], outputShape[3]]));\n};\n\nexport const convTranspose = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n convTranspose1d(context, attributes);\n } else {\n convTranspose2d(context, context.inputs, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper} from './common';\n\n\nexport interface CumSumAttributes extends AttributeWithCacheKey {\n readonly exclusive: boolean;\n readonly reverse: boolean;\n}\nconst createCumsumProgramInfo =\n (inputType: number, inputShape: readonly number[], axisInput: TensorView, attributes: CumSumAttributes):\n ProgramInfo => {\n const outputSize = ShapeUtil.size(inputShape); // outputShape is same as inputShape.\n const rank = inputShape.length; // input/output rank\n const input = inputVariable('input', inputType, rank);\n const output = outputVariable('output', inputType, rank);\n const axisValue = axisInput.dataType === DataType.int32 ? axisInput.getInt32Array()[0] :\n Number(axisInput.getBigInt64Array()[0]);\n const axis = ShapeUtil.normalizeAxis(axisValue, rank);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const index = ` i32(${input.indicesGet('inputIndices', 'uniforms.axis')}) `;\n const max = getElementAt('uniforms.input_shape', 'uniforms.axis', rank);\n const lowerLimit = attributes.reverse ? index + (attributes.exclusive ? ' + 1' : '') : '0';\n const upperLimit = attributes.reverse ? max : index + (attributes.exclusive ? '' : ' + 1');\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var inputIndices = ${output.offsetToIndices('global_idx')};\n var sum = ${output.type.value}(0);\n let first : i32 = ${lowerLimit};\n let last : i32 = ${upperLimit};\n for (var i : i32 = first; i < last; i++) {\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(i)')};\n sum = sum + ${input.getByIndices('inputIndices')};\n }\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'CumSum',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: inputShape, dataType: inputType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: axis},\n ...createTensorShapeVariables(inputShape, inputShape)\n ]\n\n }),\n getShaderSource\n };\n };\n\n\nexport const cumsum = (context: ComputeContext, attributes: CumSumAttributes): void => {\n const inputShape = context.inputs[0].dims;\n const inputType = context.inputs[0].dataType;\n const axis = context.inputs[1];\n context.compute(createCumsumProgramInfo(inputType, inputShape, axis, attributes), {inputs: [0]});\n};\n\nexport const parseCumSumAttributes = (attributes: Record): CumSumAttributes => {\n const exclusive = attributes.exclusive as number === 1;\n const reverse = attributes.reverse as number === 1;\n return createAttributeWithCacheKey({exclusive, reverse});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface DepthToSpaceAttributes extends FormatAttributes, AttributeWithCacheKey {\n readonly blocksize: number;\n readonly mode: string;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('DepthToSpace requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('DepthToSpace requires 4D input.');\n }\n};\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nconst createDepthToSpaceProgramInfo = (inputTensor: TensorView, attributes: DepthToSpaceAttributes): ProgramInfo => {\n let n: number, h: number, w: number, c: number;\n let shape: number[];\n let perm: number[];\n const isChannelLast = attributes.format === 'NHWC';\n const blocksize = attributes.blocksize;\n const isDCRmode = attributes.mode === 'DCR';\n if (isChannelLast) {\n [n, h, w, c] = inputTensor.dims;\n shape = isDCRmode ? [n, h, w, blocksize, blocksize, c / (blocksize ** 2)] :\n [n, h, w, c / (blocksize ** 2), blocksize, blocksize];\n perm = isDCRmode ? [0, 1, 3, 2, 4, 5] : [0, 1, 4, 2, 5, 3];\n } else {\n [n, h, w, c] = [inputTensor.dims[0], inputTensor.dims[2], inputTensor.dims[3], inputTensor.dims[1]];\n shape = isDCRmode ? [n, blocksize, blocksize, c / (blocksize ** 2), h, w] :\n [n, c / (blocksize ** 2), blocksize, blocksize, h, w];\n perm = isDCRmode ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n }\n const reshapedInputTensor = inputTensor.reshape(shape);\n const reshapedInputRank = reshapedInputTensor.dims.length;\n const inputDataType = inputTensor.dataType;\n\n const reshapedInput = inputVariable('a', inputDataType, reshapedInputRank);\n const permedOutput = outputVariable('output', inputDataType, reshapedInputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(reshapedInput, permedOutput)}\n\n ${permFunctionBody(perm, reshapedInputRank, reshapedInput, permedOutput)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${permedOutput.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${permedOutput.setByOffset('global_idx', reshapedInput.getByIndices('aIndices'))}\n }`;\n\n return {\n name: 'DepthToSpace',\n shaderCache: {hint: `${inputTensor.dims};${attributes.blocksize};${attributes.mode}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputShape = isChannelLast ? [n, h * blocksize, w * blocksize, c / (blocksize ** 2)] :\n [n, c / (blocksize ** 2), h * blocksize, w * blocksize];\n const outputSize = ShapeUtil.size(outputShape);\n const shapeBeforePerm = reshapedInputTensor.dims;\n const shapeAfterPerm = ShapeUtil.sortBasedOnPerm(shapeBeforePerm, perm);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(shapeBeforePerm, shapeAfterPerm)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const depthToSpace = (context: ComputeContext, attributes: DepthToSpaceAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createDepthToSpaceProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseDepthToSpaceAttributes = (attributes: Record): DepthToSpaceAttributes =>\n createAttributeWithCacheKey({\n blocksize: attributes.blocksize as number,\n mode: attributes.mode as string,\n format: attributes.format as 'NHWC' | 'NCHW'\n });\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface EinsumAttributes extends AttributeWithCacheKey {\n readonly equation: string;\n}\n// The equation attribute value is a string which consists of left hand side (LHS) and optionally right hand side (RHS)\n// separated by '->'. Ex. \"ij,jk -> ik\" expresses matrix multiplication\n// \"ij->ji\" expresses matrix transpose\n// \"ii->i\" diagonal elements of a square matrix\n// LHS consists of a sequence of terms separated by commas. Each term corresponds to an input variable.\n// Each symbol corresponds to a dimension in the input variable. The symbol can be either a letter, 'a' to 'z' or 'A' to\n// 'Z' or '...' to represent arbitrary dimensions.\n\nconst symbolPattern =\n '[a-zA-Z]|\\\\.\\\\.\\\\.'; // The pattern each symbol in each term in the symbolic equation should match\nconst termPattern = '(' + symbolPattern + ')+'; // The pattern each term in the symbolic equation should match\nconst termPatternOnly = '^' + termPattern + '$'; // The patterns only matchs a term begin to end.\nconst lhsPattern = '(' + termPattern + ',)*' + termPattern; // The pattern the LHS should match\nconst lhsPatternOnly = '^' + lhsPattern + '$'; // The patterns only matchs a LHS begin to end.\n\ninterface SymbolInfo {\n count: number; // Symbol corresponding to a dimmension of an input\n inputIndices: number[]; // Number of input variables the symbol corresponds to\n dimValue: number; // Number of dimensions the symbol corresponds to\n}\n\nclass EinsumTerm {\n constructor(inputIndex = -1) {\n this.symbolToIndices = new Map();\n this.inputIndex = inputIndex;\n }\n\n // Add a symbol to the term\n addSymbol(symbol: string, index: number) {\n let value = this.symbolToIndices.get(symbol);\n if (value === undefined) {\n value = [index];\n } else {\n value.push(index);\n }\n this.symbolToIndices.set(symbol, value);\n }\n\n symbolToIndices: Map; // Map from symbol to dimensions of the input corresponding to the term\n inputIndex: number; // -1 for output and 0, 1, 2, ... for inputs\n}\n\nclass EinsumEquation {\n constructor(inputs: readonly TensorView[], public readonly equation: string) {\n this.hasEllipsis = false;\n this.symbolToInfo = new Map();\n this.lhs = new Array();\n this.outputDims = [];\n // As rhs needs to be updated allow using let instead of const for both lhs and rhs.\n // eslint-disable-next-line prefer-const\n let [lhs, rhs] = equation.includes('->') ? equation.split('->', 2) : [equation, ''];\n if (!lhs.match(RegExp(lhsPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const inputTerms = lhs.split(',');\n inputTerms.forEach((inputTerm, index) => {\n const dims = inputs[index].dims.slice();\n if (!inputTerm.match(RegExp(termPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const einsumTerm = this.processTerm(inputTerm, true, dims, index);\n this.lhs.push(einsumTerm);\n });\n\n // Initialize the RHS if not specified\n if (rhs === '') {\n // Construct RHS from LHS terms/symbols\n rhs += [...this.symbolToInfo.entries()]\n .filter(([sym, info]) => (info.count === 1 || sym === '...'))\n .map(([sym]) => sym)\n .join('');\n } else {\n if (!rhs.match(RegExp(termPattern))) {\n throw new Error('Invalid RHS');\n }\n }\n\n // Compute output dims\n const rhsSymbols = rhs.match(RegExp(symbolPattern, 'g'));\n rhsSymbols?.forEach((symbol) => {\n if (symbol === '...') {\n this.outputDims = this.outputDims.concat(this.ellipsisDims);\n } else {\n const info = this.symbolToInfo.get(symbol);\n if (info === undefined) {\n throw new Error('Invalid RHS symbol');\n }\n this.outputDims.push(info.dimValue);\n }\n });\n this.rhs = this.processTerm(rhs, false, this.outputDims);\n } // End of EinsumEqation constructor\n\n // Add a symbol to the equation\n addSymbol(symbol: string, dimValue: number, inputIndex: number) {\n let info = this.symbolToInfo.get(symbol);\n if (info !== undefined) {\n if (info.dimValue !== dimValue && info.count !== 1) {\n throw new Error('Dimension mismatch');\n } else {\n info.count++;\n info.inputIndices.push(inputIndex);\n }\n } else {\n info = {count: 1, dimValue, inputIndices: [inputIndex]};\n }\n this.symbolToInfo.set(symbol, info);\n }\n\n // Process one input/output term\n processTerm(term: string, isInput: boolean, dims: readonly number[], index = -1): EinsumTerm {\n const rank = dims.length;\n let ellipsis = false;\n let ellipsisDims = [];\n let nextDim = 0;\n // For output empty string is allowed because the output may be reduced to a scalar value\n if (!term.match(RegExp(termPatternOnly)) && (!isInput && term !== '')) {\n throw new Error('Invalid LHS term');\n }\n const indexSymbols = term.match(RegExp(symbolPattern, 'g'));\n const einsumTerm = new EinsumTerm(index);\n // symbol can be either a lettre, 'a' to 'z' or 'A' to 'Z', or '...'\n indexSymbols?.forEach((symbol: string, i: number) => {\n if (symbol === '...') {\n if (ellipsis) {\n throw new Error('Only one ellipsis is allowed per input term');\n }\n ellipsis = true;\n const ellipsisDimLength = rank - indexSymbols.length + 1;\n if (ellipsisDimLength < 0) {\n throw new Error('Ellipsis out of bounds');\n }\n ellipsisDims = dims.slice(nextDim, nextDim + ellipsisDimLength);\n if (this.hasEllipsis) {\n if (this.ellipsisDims.length !== ellipsisDims.length ||\n this.ellipsisDims.toString() !== ellipsisDims.toString()) {\n throw new Error('Ellipsis dimensions mismatch');\n }\n } else if (isInput) {\n this.hasEllipsis = true;\n this.ellipsisDims = ellipsisDims;\n } else {\n throw new Error('Ellipsis must be specified in the LHS');\n }\n // Add '0', '1', '2', '3', '4', etc to represent ellipsis dimensions to avoid special handling\n for (let j = 0; j < ellipsisDims.length; j++) {\n const symbol = String.fromCharCode('0'.charCodeAt(0) + j);\n einsumTerm.addSymbol(symbol, i + j);\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n } else {\n einsumTerm.addSymbol(symbol, i + (this.hasEllipsis ? this.ellipsisDims.length - 1 : 0));\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n });\n return einsumTerm;\n }\n\n symbolToInfo: Map; // All symbols in the equation\n hasEllipsis: boolean; // The equation has ellipsis or not\n ellipsisDims: number[]; // The dimensions of the equation ellipsis corresponds to.\n lhs: EinsumTerm[]; // Terms on the left-hand side of the equation\n rhs: EinsumTerm; // Term on the right-hand side of the equation\n outputDims: number[]; // Output dimensions of the equation\n} // End of class EinsumEquation\n\nconst appendMax = (name: string): string => name + '_max';\n\nconst createEinsumProgramInfo =\n (inputShapes: Array, dataType: number, einsumEquation: EinsumEquation,\n outputShape: readonly number[]): ProgramInfo => {\n const ranks = inputShapes.map((dims) => dims.length);\n const inputVars = ranks.map((rank, index) => inputVariable(`input${index}`, dataType, rank));\n const outputSize = ShapeUtil.size(outputShape);\n const output = outputVariable('output', dataType, outputShape.length);\n const uniformsSymbols =\n [...einsumEquation.symbolToInfo.keys()].filter((symbol) => !einsumEquation.rhs.symbolToIndices.has(symbol));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = [];\n const initProd = 'var prod = 1.0;';\n const initSum = 'var sum = 0.0;';\n const updateSum = 'sum += prod;';\n const reduceOpsSetIndices: string[] = [];\n const reduceOpsLoopHeaders: string[] = [];\n const reduceOpsLoopFooters: string[] = [];\n const reduceOpCompute: string[] = [];\n const isReduceOpsWithoutLoop = einsumEquation.symbolToInfo.size === einsumEquation.rhs.symbolToIndices.size;\n einsumEquation.symbolToInfo.forEach((info, symbol) => {\n if (einsumEquation.rhs.symbolToIndices.has(symbol)) {\n const outputIndex = einsumEquation.rhs.symbolToIndices.get(symbol)?.[0];\n if (outputIndex !== undefined) {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n idxCopy.push(`${\n inputVars[i].indicesSet(\n `input${i}Indices`, index, output.indicesGet('outputIndices', outputIndex))}`);\n });\n }\n });\n }\n } else {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n reduceOpsSetIndices.push(`${inputVars[i].indicesSet(`input${i}Indices`, index, `${symbol}`)}`);\n });\n reduceOpCompute.push(`prod *= ${inputVars[i].getByIndices(`input${i}Indices`)};`);\n }\n });\n reduceOpsLoopHeaders.push(\n `for(var ${symbol}: u32 = 0; ${symbol} < uniforms.${appendMax(symbol)}; ${symbol}++) {`);\n reduceOpsLoopFooters.push('}');\n }\n });\n const reduceOps = isReduceOpsWithoutLoop ?\n [\n ...idxCopy,\n `let sum = ${inputVars.map((inputVar, i) => inputVar.getByIndices(`input${i}Indices`)).join(' * ')};`\n ] :\n [\n ...idxCopy,\n initSum,\n ...reduceOpsLoopHeaders,\n ...reduceOpsSetIndices,\n initProd,\n ...reduceOpCompute,\n updateSum,\n ...reduceOpsLoopFooters,\n ];\n return `\n ${\n shaderHelper\n .registerUniforms(uniformsSymbols.map((symbol) => ({name: `${appendMax(symbol)}`, type: 'u32'})))\n .registerUniform('outputSize', 'u32')\n .declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${output.offsetToIndices('global_idx')};\n ${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\\n')}\n ${reduceOps.join('\\n')};\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'Einsum',\n shaderCache: {hint: einsumEquation.equation, inputDependencies: inputShapes.map(() => 'rank')},\n getRunData: () => {\n // The symbols from uniformSymbols array are guaranteed to exist in einsumEquations.symbolToInfo map. The\n // filter is added to make sure that dimValue is never 0.\n const programUniformsInit: ProgramUniform[] =\n uniformsSymbols.filter((symbol) => einsumEquation.symbolToInfo.has(symbol))\n .map(\n (symbol) =>\n ({type: DataType.uint32, data: einsumEquation.symbolToInfo.get(symbol)?.dimValue || 0}));\n programUniformsInit.push({type: DataType.uint32, data: outputSize});\n const programUniforms: ProgramUniform[] =\n inputShapes.map((dims, _) => [...createTensorShapeVariables(dims)])\n .reduce((acc, inputProgramUniforms) => acc.concat(inputProgramUniforms), programUniformsInit);\n programUniforms.push(...createTensorShapeVariables(outputShape));\n return ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n });\n },\n getShaderSource,\n };\n };\n\nexport const einsum = (context: ComputeContext, attributes: EinsumAttributes): void => {\n const einsumEquation = new EinsumEquation(context.inputs, attributes.equation);\n const outputShape = einsumEquation.outputDims;\n const inputShapes = context.inputs.map((input, _) => input.dims);\n context.compute(createEinsumProgramInfo(inputShapes, context.inputs[0].dataType, einsumEquation, outputShape));\n};\n\nexport const parseEinsumAttributes = (attributes: Record): EinsumAttributes => {\n const equation = (attributes.equation as string).replace(/\\s+/g, '');\n return createAttributeWithCacheKey({equation});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Expand requires 2 input.');\n }\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n\n let shapeIndex = shape.length < inputShape.length ? 0 : shape.length - inputShape.length;\n let inputShapeIndex = inputShape.length < shape.length ? 0 : inputShape.length - shape.length;\n for (; shapeIndex < shape.length && inputShapeIndex < inputShape.length; ++shapeIndex, ++inputShapeIndex) {\n if (shape[shapeIndex] !== inputShape[inputShapeIndex] && shape[shapeIndex] !== 1 &&\n inputShape[inputShapeIndex] !== 1) {\n throw new Error('Expand requires shape to be broadcastable to input');\n }\n }\n};\n\nconst getAdjustedShape = (shape1: readonly number[], shape2: readonly number[]): number[] => {\n const diff = shape1.length - shape2.length;\n const shape: number[] = [];\n for (let i = 0; i < diff; ++i) {\n shape.push(shape1[i]);\n }\n for (let i = 0; i < shape2.length; ++i) {\n shape.push(shape2[i] === 1 ? shape1[i + diff] : shape2[i]);\n }\n return shape;\n};\n\nconst calculateOutputShape = (inputShape: readonly number[], shape: readonly number[]): number[] =>\n (inputShape.length > shape.length) ? getAdjustedShape(inputShape, shape) : getAdjustedShape(shape, inputShape);\n\n\nconst createExpandProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n const outputShape: number[] = calculateOutputShape(inputShape, shape);\n const dataType = inputs[0].dataType;\n const components = dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', dataType, inputShape.length, components);\n const output = outputVariable('output', dataType, outputShape.length, components);\n let assignment: string;\n if (dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n let offset${x} = ${input.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${input.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n ${output.setByOffset('global_idx', 'data')}\n }`;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let inputOffset = ${input.broadcastedIndicesToOffset('outputIndices', output)};\n ${output.setByOffset('global_idx', input.getByOffset('inputOffset'))}\n }`;\n }\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}`;\n };\n\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)];\n return {\n name: 'Expand',\n shaderCache: {hint: `${outputShape.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const expand = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createExpandProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType, UniformsArrayType, WORKGROUP_SIZE} from './common';\nimport * as unary from './unary-op';\n\n// GELU is defined as Y=0.5*X*(1+tanh(0.797885*X+0.035677*X*X*X)), where X may pre-add a bias.\n\nconst createFastGeluProgramInfo = (inputTensors: readonly TensorView[]): ProgramInfo => {\n const dataType = inputTensors[0].dataType;\n const outputSize = ShapeUtil.size(inputTensors[0].dims);\n const biasLength = ShapeUtil.size(inputTensors[1].dims);\n // can only use vec4 when bias length is multiple of 4\n const useVec4 = biasLength % 4 === 0;\n const getShaderSource = (shaderHelper: ShaderHelper): string => {\n const x = inputVariable('x', dataType, [1], 4);\n const bias = inputVariable('bias', dataType, [1], 4);\n const y = outputVariable('y', dataType, [1], 4);\n\n const uniforms: UniformsArrayType = [{name: 'output_vec_size', type: 'u32'}, {name: 'bias_size', type: 'u32'}];\n\n const singleElementBias = (i: 0|1|2|3) => `\n let bias${i}_offset: u32 = (global_idx * 4 + ${i}) % uniforms.bias_size;\n let bias${i} = ${bias.getByOffset(`bias${i}_offset / 4`)}[bias${i}_offset % 4];`;\n const biasGetExpression = useVec4 ?\n `\n let bias = ${bias.getByOffset('global_idx % (uniforms.bias_size / 4)')};` :\n `${singleElementBias(0)}${singleElementBias(1)}${singleElementBias(2)}${singleElementBias(3)}\n let bias = ${x.type.value}(bias0, bias1, bias2, bias3);`;\n\n return `${shaderHelper.registerUniforms(uniforms).declareVariables(x, bias, y)}\n\n ${unary.fastGeluImpl(tensorTypeToWsglValueType(dataType))}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_vec_size')}\n\n let x = ${x.getByOffset('global_idx')};\n ${biasGetExpression}\n let x_in = x + bias;\n ${y.setByOffset('global_idx', unary.fastGeluExpression('x_in'))}\n }`;\n };\n\n return {\n name: 'FastGeluWithBias',\n shaderCache: {hint: `${useVec4}`, inputDependencies: ['type', 'type']},\n getShaderSource,\n getRunData: (inputs) => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n programUniforms:\n [{type: DataType.uint32, data: Math.ceil(outputSize / 4)}, {type: DataType.uint32, data: biasLength}],\n dispatchGroup: {x: Math.ceil(outputSize / WORKGROUP_SIZE / 4)}\n })\n };\n};\n\nexport const fastGelu = (context: ComputeContext): void => {\n if (context.inputs.length < 2 || ShapeUtil.size(context.inputs[1].dims) === 0) {\n unary.fastGelu(context);\n } else {\n context.compute(createFastGeluProgramInfo(context.inputs));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n};\n\nconst createGatherProgramInfo = (inputs: readonly TensorView[], attributes: GatherAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const indicesShape = inputs[1].dims;\n\n const inputRank = inputShape.length;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n\n const outputShape = inputShape.slice(0);\n outputShape.splice(axis, 1, ...indicesShape);\n\n const axisDimLimit = inputShape[axis];\n const components = inputs[0].dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}, ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const data = inputVariable('data', inputs[0].dataType, inputs[0].dims.length, components);\n const indices = inputVariable('inputIndices', inputs[1].dataType, inputs[1].dims.length);\n const output = outputVariable('output', inputs[0].dataType, outputShape.length, components);\n\n const calcDataIndices = (x: number|string): string => {\n const indicesRank = indicesShape.length;\n let calcStr = `var indicesIndices${x} = ${indices.type.indices}(0);`;\n for (let i = 0; i < indicesRank; i++) {\n calcStr += `${indicesRank > 1 ? `indicesIndices${x}[${i}]` : `indicesIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[uniforms.axis + ${i}]` : `outputIndices${x}`};`;\n }\n calcStr += `\n var idx${x} = ${indices.getByIndices(`indicesIndices${x}`)};\n if (idx${x} < 0) {\n idx${x} = idx${x} + uniforms.axisDimLimit;\n }\n var dataIndices${x} : ${data.type.indices};\n `;\n for (let i = 0, j = 0; i < inputRank; i++) {\n if (i === axis) {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = u32(idx${x});`;\n j += indicesRank;\n } else {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[${j}]` : `outputIndices${x}`};`;\n j++;\n }\n }\n return calcStr;\n };\n let assignment: string;\n if (inputs[0].dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n ${calcDataIndices(x)};\n let offset${x} = ${data.indicesToOffset(`dataIndices${x}`)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${data.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var value = vec4(0);\n ${singleAssignment('value', 0, 'u32')}\n ${singleAssignment('value', 1, 'u32')}\n ${singleAssignment('value', 2, 'u32')}\n ${singleAssignment('value', 3, 'u32')}\n ${output.setByOffset('global_idx', 'value')}\n `;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n ${calcDataIndices('')};\n let value = ${data.getByIndices('dataIndices')};\n ${output.setByOffset('global_idx', 'value')};\n `;\n }\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(data, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n ${assignment}\n }`;\n };\n return {\n name: 'Gather',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank', 'rank']},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGatherAttributes = (attributes: Record): GatherAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gather = (context: ComputeContext, attributes: GatherAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherElementsAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('GatherElements requires 2 inputs.');\n }\n\n if (inputs[0].dims.length < 1) {\n throw new Error('GatherElements requires that the data input be rank >= 1.');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`);\n }\n};\n\nconst createGatherElementsProgramInfo =\n (inputs: readonly TensorView[], attributes: GatherElementsAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputOutputDataType = inputs[0].dataType;\n const inputRank = inputShape.length;\n\n const indicesShape = inputs[1].dims;\n const indicesDataType = inputs[1].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n const axisDimLimit = inputShape[axis];\n\n const outputShape = indicesShape.slice(0);\n const outputSize = ShapeUtil.size(outputShape);\n\n const input = inputVariable('input', inputOutputDataType, inputRank);\n const indices = inputVariable('indicesInput', indicesDataType, indicesShape.length);\n const output = outputVariable('output', inputOutputDataType, outputShape.length);\n\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}\n ];\n programUniforms.push(...createTensorShapeVariables(inputShape, indicesShape, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n // int64 indices would be treated as little endian i32 with assumption they fall in i32 limits\n // That assumption is safe as it's not possible to allocate >2gb buffer for input tensor\n // Input data will be treated as u32 or two u32 for 8-byte tensors\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n\n var idx = ${indices.getByOffset('global_idx')};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${input.type.indices}(outputIndices);\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(idx)')};\n let value = ${input.getByIndices('inputIndices')};\n\n ${output.setByOffset('global_idx', 'value')};\n }`;\n\n return {\n name: 'GatherElements',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const parseGatherElementsAttributes = (attributes: Record): GatherElementsAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gatherElements = (context: ComputeContext, attributes: GatherElementsAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherElementsProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {GemmUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (inputs.length < 2 || inputs.length > 3) {\n throw new Error('Invaid input number.');\n }\n\n // 'C' can be of dimensionality 0, 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length > 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].dataType !== inputs[1].dataType) ||\n (inputs.length === 3 && inputs[0].dataType !== inputs[2].dataType)) {\n throw new Error('Input types are mismatched');\n }\n};\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n}\n\nconst createGemmProgramInfo = (inputs: readonly TensorView[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N, K] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}, {type: DataType.float, data: attributes.alpha},\n {type: DataType.float, data: attributes.beta}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (inputs.length === 3) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n let line = '';\n if (attributes.transA && attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[n * uniforms.K + k];';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[k * uniforms.N + n];';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[n * uniforms.K + k];';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[k * uniforms.N + n];';\n }\n\n const calculateAlpha = attributes.alpha === 1 ? '' : 'value *= uniforms.alpha;';\n const a = inputVariable('a', inputs[0].dataType, inputs[0].dims);\n const b = inputVariable('b', inputs[1].dataType, inputs[1].dims);\n const dataType = a.type.value;\n let c: IndicesHelper|null = null;\n const variables = [a, b];\n if (inputs.length === 3) {\n c = inputVariable('c', inputs[2].dataType, inputs[2].dims.length);\n variables.push(c);\n }\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n variables.push(output);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'K', type: 'u32'},\n {name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${dataType}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${line}\n }\n\n ${calculateAlpha}\n ${(() => {\n if (c != null) {\n return `let cOffset = ${c.broadcastedIndicesToOffset('vec2(m, n)', output)}; value += ${\n dataType}(uniforms.beta) * ${c.getByOffset('cOffset')};`;\n }\n return '';\n })()}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Gemm',\n shaderCache: {hint: `${attributes.cacheKey}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGemmAttributes = (attributes: Record): GemmAttributes => {\n const transA = attributes.transA as boolean;\n const transB = attributes.transB as boolean;\n const alpha = attributes.alpha as number;\n const beta = attributes.beta as number;\n return {transA, transB, alpha, beta, cacheKey: `${attributes.transA};${attributes.transB};${attributes.alpha === 1}`};\n};\n\nexport const gemm = (context: ComputeContext, attributes: GemmAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createGemmProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, GpuDataType, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nconst getInput = (inputs: readonly TensorView[], i: number) =>\n (inputs.length > i) && (inputs[i].dims.length > 0) && (ShapeUtil.size(inputs[i].dims)) > 0 ? inputs[i] : undefined;\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = getInput(inputs, 1);\n const value = getInput(inputs, 2);\n const bias = getInput(inputs, 3);\n const keyPaddingMask = getInput(inputs, 4);\n const relativePositionBias = getInput(inputs, 5);\n const pastKey = getInput(inputs, 6);\n const pastValue = getInput(inputs, 7);\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // key_padding_mask (K/V) : (B) or (2*B + 1) or (B, L) or None\n // relative_position_bias : (B, 1, S, L)\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // bias (Q/K/V) : (D + D + D_v)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // bias (Q/K/V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n // bias (Q/K/V) : None or (D + D + D_v)\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n if (pastKey && pastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastKey.dims[0] !== batchSize || pastKey.dims[1] !== attributes.numHeads || pastKey.dims[3] !== headSize) {\n throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastValue.dims[0] !== batchSize || pastValue.dims[1] !== attributes.numHeads ||\n pastValue.dims[3] !== headSize) {\n throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastKey.dims[2] !== pastValue.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n } else if (pastKey || pastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (key.dims[2] !== query.dims[2]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n if (bias) {\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimension');\n }\n\n if (value) {\n if (query.dims.length === 5 && query.dims[3] === 2) {\n throw new Error('bias is not allowed for packed kv.');\n }\n }\n }\n\n let maskType: AttentionMaskType = AttentionMaskType.none;\n if (keyPaddingMask) {\n maskType = AttentionMaskType.maskUnknown;\n const maskDims = keyPaddingMask.dims;\n if (maskDims.length === 1) {\n if (maskDims[0] === batchSize) {\n maskType = AttentionMaskType.mask1dKeySeqLen;\n } else if (maskDims[0] === 3 * batchSize + 2) {\n maskType = AttentionMaskType.mask1DKeySeqLenStart;\n }\n } else if (maskDims.length === 2 && maskDims[0] === batchSize && maskDims[1] === kvSequenceLength) {\n maskType = AttentionMaskType.mask2dKeyPadding;\n }\n if (maskType === AttentionMaskType.maskUnknown) {\n throw new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)');\n }\n throw new Error('Mask not supported');\n }\n\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n if (keyPaddingMask) {\n throw new Error('Key padding mask is not supported');\n }\n\n if (relativePositionBias) {\n if (relativePositionBias.dims.length !== 4) {\n throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');\n }\n if ((relativePositionBias.dims[0] !== batchSize && relativePositionBias.dims[0] !== 1) ||\n relativePositionBias.dims[1] !== attributes.numHeads || relativePositionBias.dims[2] !== sequenceLength ||\n relativePositionBias.dims[3] !== totalSequenceLength) {\n throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)');\n }\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n };\n};\n\nexport const parseMultiHeadAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst addBiasTranspose =\n (context: ComputeContext, qkv: TensorView, bias: TensorView, batchSize: number, sequenceLength: number,\n hiddenSize: number, biasOffset: number) => {\n const outputShape = [batchSize, sequenceLength, hiddenSize];\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: biasOffset},\n {type: DataType.uint32, data: hiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('qkv_with_bias', qkv.dataType, outputShape);\n const qkvInput = inputVariable('qkv', qkv.dataType, outputShape);\n const biasInput = inputVariable('bias', bias.dataType, outputShape);\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'bias_offset', type: 'u32'}, {name: 'hidden_size', type: 'u32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(qkvInput, biasInput, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`;\n };\n\n return context.compute(\n {\n name: 'MultiHeadAttentionAddBias',\n shaderCache: {inputDependencies: ['type', 'type']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: qkv.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [qkv, bias], outputs: [-1]})[0];\n };\n\nexport const maybeTransposeToBNSHAndAddBias =\n (context: ComputeContext, batchSize: number, numHeads: number, sequenceLength: number, headSize: number,\n input: TensorView, bias?: TensorView, biasOffset?: number) => {\n // const newDims = [];\n\n let reshapedInput = input;\n if (!bias) {\n if (input.dims.length === 3) {\n reshapedInput = input.reshape([batchSize, sequenceLength, numHeads, headSize]);\n }\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n } else {\n if (sequenceLength === 1) {\n throw new Error('AddBiasReshape is not implemented. Please export your model with packed QKV or KV');\n } else {\n reshapedInput =\n addBiasTranspose(context, input, bias, batchSize, sequenceLength, numHeads * headSize, biasOffset!);\n reshapedInput = reshapedInput.reshape([batchSize, sequenceLength, numHeads, headSize]);\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n }\n }\n };\n\nexport const multiHeadAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n const query = context.inputs[0];\n const key = getInput(context.inputs, 1);\n const value = getInput(context.inputs, 2);\n const bias = getInput(context.inputs, 3);\n const keyPaddingMask = getInput(context.inputs, 4);\n const relativePositionBias = getInput(context.inputs, 5);\n const pastKey = getInput(context.inputs, 6);\n const pastValue = getInput(context.inputs, 7);\n if (query.dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (key?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n // applyAttention expects BNSH inputs\n const kvBNSH = key && value && key.dims.length === 4 && value.dims.length === 4;\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, query, bias, 0);\n\n if (kvBNSH) {\n return applyAttention(\n context, Q, key, value, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params,\n attributes);\n }\n if (!key || !value) {\n throw new Error('key and value must be provided');\n }\n const K = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.headSize, key, bias,\n params.hiddenSize);\n\n const V = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.vHeadSize, value, bias,\n 2 * params.hiddenSize);\n\n applyAttention(\n context, Q, K, V, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst getRepeats = (repeatsTensorView: TensorView): readonly number[] =>\n Array.from(repeatsTensorView.getBigInt64Array(), Number);\n\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 inputs.');\n }\n\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16 &&\n inputs[0].dataType !== DataType.int32 && inputs[0].dataType !== DataType.uint32) {\n throw new Error('Tile only support float, float16, int32, and uint32 data types');\n }\n\n if (inputs[1].dataType !== DataType.int64) {\n throw new Error('Tile `repeats` input should be of int64 data type');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('Tile `repeats` input should be 1-D');\n }\n\n const repeats: readonly number[] = getRepeats(inputs[1]);\n\n if (repeats.length !== inputs[0].dims.length) {\n throw new Error('Tile `repeats` input should have same number of elements as rank of input data tensor');\n }\n};\n\nconst getOutputShape = (inputShape: readonly number[], repeats: readonly number[]): readonly number[] => {\n const outputShape: number[] = [];\n\n for (let i = 0; i < inputShape.length; ++i) {\n outputShape.push(inputShape[i] * repeats[i]);\n }\n\n return outputShape;\n};\n\nexport const createTileProgramInfo = (inputs: readonly TensorView[], shape?: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const repeats: readonly number[] = shape == null ? getRepeats(inputs[1]) : shape;\n const outputShape = getOutputShape(inputShape, repeats);\n const outputSize = ShapeUtil.size(outputShape);\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, inputShape.length);\n const output = outputVariable('output', dataType, outputShape.length);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const inputShape = ${input.indices(...inputShape)};\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n for (var i = 0; i < ${inputShape.length}; i++) {\n let input_dim_i = ${input.indicesGet('uniforms.input_shape', 'i')};\n let input_dim_value = ${output.indicesGet('output_indices', 'i')} % input_dim_i;\n\n ${input.indicesSet('input_indices', 'i', 'input_dim_value')}\n }\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n\n return {\n name: 'Tile',\n shaderCache: {hint: `${repeats}`, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n }),\n getShaderSource,\n };\n};\n\nexport const tile = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createTileProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {maybeTransposeToBNSHAndAddBias} from './multihead-attention';\nimport {createTileProgramInfo} from './tile';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nexport const validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = inputs[1];\n const value = inputs[2];\n const pastKey = inputs[3];\n const pastValue = inputs[4];\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n const hasPastKey = pastKey && pastKey.dims.length !== 0;\n const hasPastValue = pastValue && pastValue.dims.length !== 0;\n // TODO : this should be from attributes.\n const isPastkvBSNH = true;\n if (hasPastKey && hasPastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n if (isPastkvBSNH) {\n // For BSNH\n pastSequenceLength = pastKey.dims[1];\n maxSequenceLength = pastKey.dims[1];\n } else {\n // For BNSH\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n }\n } else if (hasPastKey || hasPastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (query.dims[2] % key.dims[2] !== 0) {\n throw new Error('Dimension 2 of \"query\" should be a multiple of \"key\"');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n const maskType: AttentionMaskType = AttentionMaskType.none;\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.kvNumHeads!),\n numHeads: attributes.numHeads,\n kvNumHeads: attributes.kvNumHeads,\n nReps: attributes.numHeads / attributes.kvNumHeads!,\n pastPresentShareBuffer: false,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n isPastkvBSNH,\n };\n};\n\nconst createConcatProgramInfo =\n (a: TensorView, b: TensorView|undefined, dataType: DataType, params: AttentionParameters): ProgramInfo => {\n const outputShape = [params.batchSize, params.totalSequenceLength, params.kvNumHeads!, params.headSize];\n const component = 4;\n const outputSize = ShapeUtil.size(outputShape) / component;\n const presentSequenceLength = params.totalSequenceLength;\n const output = outputVariable('present_kv', dataType, outputShape.length, component);\n const inputA = inputVariable('new_kv', a.dataType, a.dims.length, component);\n const inputB = b ? inputVariable('past_kv', b.dataType, b.dims.length, component) : undefined;\n\n const H = Math.ceil(params.headSize / component);\n const dispatch = {x: presentSequenceLength, y: a.dims[0], z: 1};\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = b ? ['rank', 'rank'] : ['rank'];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: params.pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength},\n {type: DataType.uint32, data: params.totalSequenceLength}\n ];\n\n const inputs = [inputA];\n if (inputB) {\n programUniforms.push(\n ...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(b!.dims),\n ...createTensorShapeVariables(outputShape));\n inputs.push(inputB);\n } else {\n programUniforms.push(...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(outputShape));\n }\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'past_seqlen', type: 'u32'}, {name: 'new_seqlen', type: 'u32'},\n {name: 'present_seqlen', type: 'u32'}\n ];\n\n const pastStr = ` let past_batch_stride = uniforms.past_seqlen * num_heads * H;\n var past_head_stride = uniforms.past_seqlen * H;\n if (is_bsnh) {\n past_head_stride = H;\n }\n let in_offset = b * past_batch_stride + s * row_stride + n * past_head_stride + h;\n present_kv[out_offset] = past_kv[in_offset];`;\n const newStr = ` let new_batch_stride = uniforms.new_seqlen * num_heads * H;\n let new_row_stride = num_heads * H;\n let new_head_stride = H;\n let in_offset = b * new_batch_stride + (s - past_seqlen) * new_row_stride + n * new_head_stride + h;\n present_kv[out_offset] = new_kv[in_offset];`;\n const concatStr = b ? `if (s < past_seqlen) {\n ${pastStr}\n } else if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }` :\n `if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }`;\n\n // TODO: handle H * params.kvNumHeads greater than maxComputeInvocationsPerWorkgroup limit.\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputs, output)}\n ${shaderHelper.mainStart([\n H, params.kvNumHeads!, 1\n ])}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var indices = ${output.offsetToIndices('global_idx')};\n let h = local_id.x;\n let n = local_id.y;\n let s = workgroup_id.x;\n let b = workgroup_id.y;\n let num_heads = ${params.kvNumHeads!}u;\n let H = ${H}u;\n\n let present_seqlen = uniforms.present_seqlen;\n let present_batch_stride = present_seqlen * num_heads * H;\n var row_stride = H;\n let is_bsnh = ${params.isPastkvBSNH};\n\n if (is_bsnh) {\n row_stride = num_heads * H;\n }\n var present_head_stride = present_seqlen * H;\n if (is_bsnh) {\n present_head_stride = H;\n }\n\n let past_seqlen = uniforms.past_seqlen;\n\n let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h;\n ${concatStr}\n }`;\n\n return {\n name: 'ConcatPastNew',\n shaderCache: {hint: `${params.kvNumHeads!}${H}${!!b}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: dispatch,\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const parseGroupQueryAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst maybeExpandAndTransposeToBNSH =\n (context: ComputeContext, input: TensorView, pastKV: TensorView|undefined, params: AttentionParameters,\n outputIndex: number) => {\n let reshapedInput = input;\n const numHeads = params.kvNumHeads!;\n const nReps = params.nReps!;\n if (input.dims.length === 3 && params.kvSequenceLength !== 0) {\n reshapedInput = input.reshape([params.batchSize, params.kvSequenceLength, numHeads, params.headSize]);\n }\n\n if (pastKV) {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, pastKV, reshapedInput.dataType, params),\n {inputs: [reshapedInput, pastKV], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n } else {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, undefined, reshapedInput.dataType, params),\n {inputs: [reshapedInput], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n }\n if (nReps !== 1) {\n reshapedInput = context.compute(\n createTileProgramInfo([reshapedInput], [1, 1, 1, nReps]), {inputs: [reshapedInput], outputs: [-1]})[0];\n reshapedInput =\n reshapedInput.reshape([params.batchSize, params.totalSequenceLength, numHeads * nReps, params.headSize]);\n }\n\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n };\n\nexport const groupQueryAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (context.inputs[1]?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, context.inputs[0], undefined,\n 0);\n const pastKey = context.inputs[3] && context.inputs[3].dims.length !== 0 ? context.inputs[3] : undefined;\n const pastValue = context.inputs[4] && context.inputs[4].dims.length !== 0 ? context.inputs[4] : undefined;\n const K = maybeExpandAndTransposeToBNSH(context, context.inputs[1], pastKey, params, 1);\n const V = maybeExpandAndTransposeToBNSH(context, context.inputs[2], pastValue, params, 2);\n applyAttention(context, Q, K, V, undefined, undefined, undefined, undefined, undefined, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface InstanceNormAttributes {\n epsilon: number;\n format: 'NHWC'|'NCHW';\n}\n\nconst createInstanceNormProgramInfo =\n (inputs: readonly TensorView[], attributes: InstanceNormAttributes): ProgramInfo => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const axis = 2;\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n const components = getMaxComponents(normSize);\n const normPackedSize = normSize / components;\n const inputShape = [xShape[0], xShape[1], normPackedSize];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'type', 'type'];\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: normSize}, {type: DataType.uint32, data: normPackedSize}];\n programUniforms.push(...createTensorShapeVariables(inputShape, inputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputShape.length, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const output = outputVariable('output', inputs[0].dataType, inputShape.length, components);\n const variables = [x, scale, bias, output];\n const dataType = x.type.value;\n const f32Type = components === 1 ? 'f32' : `vec${components}`;\n const workgroupSize = 64;\n\n const uniforms: UniformsArrayType = [{name: 'normSize', type: 'u32'}, {name: 'normPackedSize', type: 'u32'}];\n return `\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${f32Type}, ${workgroupSize}>;\n const workgroupSize = ${workgroupSize}u;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart(workgroupSize)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${f32Type}(${x.get('batch', 'channel', 'h')});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${sumVector('workgroupShared[0]', components)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${f32Type}(${x.get('batch', 'channel', 'h')}) - ${f32Type}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${sumVector('workgroupShared[0]', components)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${attributes.epsilon}));\n let channelScale = invStdDev * f32(${scale.getByOffset('channel')});\n let channelShift = f32(${bias.getByOffset('channel')}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get('batch', 'channel', 'h')} * ${dataType}(${f32Type}(channelScale)) + ${dataType}(${\n f32Type}(channelShift));\n ${output.set('batch', 'channel', 'h', 'value')};\n }\n }`;\n };\n return {\n ...{name: 'InstanceNormalization'},\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${attributes.epsilon};${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: normCount},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nconst computeMean =\n (context: ComputeContext, input: TensorView, scale: TensorView, bias: TensorView, n: number, h: number, c: number,\n epsilon: number) => {\n const components = getMaxComponents(c);\n const WG = 64;\n // we will store channel scale and channel shift in [2, components] matrix\n // or in vec2 when components == 1\n const outputType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const sumCastType = components === 1 ? 'f32' : `vec${components}f`;\n const setOutputValue = (var1: string, var2: string) => `${outputType}(${var1}, ${var2})`;\n const unitsOfWork = n * c / components;\n const wgSize = Math.ceil(h / WG);\n\n const meanInputDependencies: ProgramInputTensorInfoDependency[] = ['type'];\n const meanProgramUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: wgSize}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(h * c / components)}\n ];\n\n const getMeanShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = inputVariable('input', input.dataType, input.dims, components);\n return `\n ${shaderHelper.declareVariables(inputHelper)}\n @group(0) @binding(1) var output : array<${outputType}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart(WG)}\n let currentImageNumber = global_idx / ${WG} / uniforms.C;\n let currentChannelNumber = (global_idx / ${WG}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${sumCastType}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${setOutputValue('sum', 'squaredSum')};\n }`;\n };\n\n const meanValues = context.compute(\n {\n name: 'InstanceNormComputeMean',\n shaderCache: {hint: `${components}`, inputDependencies: meanInputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, WG, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: n * c / components},\n programUniforms: meanProgramUniforms\n }),\n getShaderSource: getMeanShaderSource,\n },\n {inputs: [input], outputs: [-1]})[0];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: unitsOfWork}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(WG * c / components)}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const scaleHelper = inputVariable('scale', scale.dataType, scale.dims, components);\n const biasHelper = inputVariable('bias', bias.dataType, bias.dims, components);\n return `\n @group(0) @binding(0) var input : array<${outputType}>;\n @group(0) @binding(1) var scale : array<${scaleHelper.type.storage}>;\n @group(0) @binding(2) var bias : array<${biasHelper.type.storage}>;\n @group(0) @binding(3) var output : array<${outputType}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.units_of_work')}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < min(${WG}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${WG}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${epsilon}));\n let channelScale = invStdDev * ${sumCastType}(scale[currentChannelNumber]);\n let channelShift = ${sumCastType}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${setOutputValue('channelScale', 'channelShift')};\n }`;\n };\n return context.compute(\n {\n name: 'InstanceNormComputeChannelScaleShift',\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${components};${epsilon}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: Math.ceil(unitsOfWork / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [meanValues, scale, bias], outputs: [-1]})[0];\n };\n\nconst createInstanceNormNHWCProgramInfo =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: InstanceNormAttributes) => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const N = xShape[0];\n const C = xShape[xShape.length - 1];\n const H = ShapeUtil.sizeFromDimension(xShape, 1) / C;\n const components = getMaxComponents(C);\n const outputSize = ShapeUtil.size(outputShape) / components;\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: H}, {type: DataType.uint32, data: Math.floor(C / components)}];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n // first compute mean\n const channelScaleShift = computeMean(context, inputs[0], inputs[1], inputs[2], N, H, C, attributes.epsilon);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const scaleType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const scaleCastType = components === 1 ? dataType : `vec${components}<${dataType}>`;\n\n const inputHelper = inputVariable('input', inputs[0].dataType, inputs[0].dims, components);\n const outputHelper = outputVariable('output', inputs[0].dataType, outputShape, components);\n\n return `\n @group(0) @binding(0) var input : array<${inputHelper.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${scaleType}>;\n @group(0) @binding(2) var output : array<${outputHelper.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${scaleCastType}(scale[0]), ${scaleCastType}(scale[1]));\n }`;\n };\n context.compute(\n {\n name: 'InstanceNormalizationNHWC',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [inputs[0], channelScaleShift]});\n };\n\nexport const instanceNorm = (context: ComputeContext, attributes: InstanceNormAttributes): void => {\n if (attributes.format === 'NHWC') {\n createInstanceNormNHWCProgramInfo(context, context.inputs, attributes);\n } else {\n context.compute(createInstanceNormProgramInfo(context.inputs, attributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType,} from './common';\n\ninterface LayerNormAttributes {\n simplified: boolean;\n axis: number;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 2) {\n throw new Error('layerNorm requires at least 2 inputs.');\n }\n};\n\nconst createLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: LayerNormAttributes, outputCount: number): ProgramInfo => {\n const simplified = attributes.simplified;\n\n const xShape = inputs[0].dims;\n const scale = inputs[1];\n const bias = !simplified && inputs[2];\n\n const outputShape = xShape;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, xShape.length);\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n\n const scaleSize = ShapeUtil.size(scale.dims);\n const biasSize = bias ? ShapeUtil.size(bias.dims) : 0;\n if (scaleSize !== normSize || (bias && biasSize !== normSize)) {\n throw new Error(`Size of X.shape()[axis:] == ${normSize}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${scaleSize} and bias size of ${biasSize}`);\n }\n\n const meanInvStdDevDim: number[] = [];\n for (let i = 0; i < xShape.length; ++i) {\n if (i < axis) {\n meanInvStdDevDim.push(xShape[i]);\n } else {\n meanInvStdDevDim.push(1);\n }\n }\n const components = getMaxComponents(normSize);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: normCount}, {type: DataType.float, data: normSize},\n {type: DataType.uint32, data: Math.floor(normSize / components)},\n {type: DataType.float, data: attributes.epsilon}\n ];\n if (bias) {\n inputDependencies.push('type');\n }\n const hasMeanDataOutput = outputCount > 1;\n const hasInvStdOutput = outputCount > 2;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('scale', scale.dataType, scale.dims, components),\n ];\n if (bias) {\n variables.push(inputVariable('bias', bias.dataType, bias.dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanDataOutput) {\n variables.push(outputVariable('mean_data_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'norm_count', type: 'u32'}, {name: 'norm_size', type: 'f32'},\n {name: 'norm_size_vectorized', type: 'u32'}, {name: 'epsilon', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.norm_count')}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${fillVector('f32', components)};\n var mean_square_vector = ${fillVector('f32', components)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${castToF32(dataType, components, 'x[h + offset]')};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${sumVector('mean_vector', components)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${sumVector('mean_square_vector', components)} / uniforms.norm_size ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${castToF32(dataType, components, 'x[j + offset]')};\n let f32scale = ${castToF32(dataType, components, 'scale[j]')};\n output[j + offset] = ${variables[0].type.value}((f32input ${simplified ? '' : '- mean'}) * inv_std_dev * f32scale\n ${bias ? `+ ${castToF32(dataType, components, 'bias[j]')}` : ''}\n );\n }\n\n ${hasMeanDataOutput ? 'mean_data_output[global_idx] = mean' : ''};\n ${hasInvStdOutput ? 'inv_std_output[global_idx] = inv_std_dev' : ''};\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (hasMeanDataOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (hasInvStdOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n\n return {\n name: 'LayerNormalization',\n shaderCache: {hint: `${components};${outputCount};${simplified}`, inputDependencies},\n getRunData: () =>\n ({outputs, dispatchGroup: {x: Math.ceil(normCount / 64 /* workgroup size */)}, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const layerNorm = (context: ComputeContext, attributes: LayerNormAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createLayerNormProgramInfo(context.inputs, attributes, context.outputCount));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType, getTensorElementSize} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\n// TODO support quantization bits not equal to 4\nexport interface MatMulNBitsAttributes extends AttributeWithCacheKey {\n k: number;\n n: number;\n accuracyLevel: number;\n bits: number;\n blockSize: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes): void => {\n if (inputs.length < 3 || inputs.length > 4) {\n throw new Error('MatMulNBits requires 3 or 4 inputs');\n }\n const a = inputs[0];\n const aRank = a.dims.length;\n if (a.dims[aRank - 1] !== attributes.k) {\n throw new Error('The last dim of input shape does not match the k value');\n }\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const b = inputs[1];\n if (!ShapeUtil.areEqual(b.dims, [attributes.n, nBlocksPerCol, blobSize])) {\n throw new Error('The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize');\n }\n const scales = inputs[2];\n const scalesShape = scales.dims;\n if (ShapeUtil.size(scalesShape) !== attributes.n * nBlocksPerCol) {\n throw new Error('scales input size error.');\n }\n if (inputs.length === 4) {\n const zeroPoints = inputs[3];\n const zeroPointsShape = zeroPoints.dims;\n const expectedZeroPointsSize =\n attributes.bits > 4 ? (attributes.n * nBlocksPerCol) : attributes.n * Math.floor((nBlocksPerCol + 1) / 2);\n if (ShapeUtil.size(zeroPointsShape) !== expectedZeroPointsSize) {\n throw new Error('zeroPoints input size error.');\n }\n }\n};\n\nexport const createMatMulNBitsProgramInfo =\n (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes,\n maxComputeWorkgroupSizes: [number, number, number], maxComputeWorkgroupStorageSize: number): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const aRank = inputShape.length;\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const dimAOuter = inputShape[aRank - 2];\n const dimInner = attributes.k;\n const dimBOuter = attributes.n;\n const batchDims = inputShape.slice(0, aRank - 2);\n const batchSize = ShapeUtil.size(batchDims);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const blobSizeInWords = blobSize / 4;\n const dataType = inputs[0].dataType;\n const outputNumber = getMaxComponents(dimAOuter);\n const aComponents = getMaxComponents(attributes.k);\n const bComponents = getMaxComponents(blobSizeInWords);\n const elementSize = getTensorElementSize(dataType)!;\n const workgroupOutputSize = dimAOuter * nBlocksPerCol * elementSize;\n const maxNumberOfComponents = Math.floor(maxComputeWorkgroupStorageSize / workgroupOutputSize);\n const useBlockwiseMatMulNBits = nBlocksPerCol <= maxComputeWorkgroupSizes[0] && maxNumberOfComponents > 0;\n const components = (!useBlockwiseMatMulNBits || maxNumberOfComponents >= 4) ? getMaxComponents(dimBOuter) :\n ((maxNumberOfComponents >= 2) && getMaxComponents(dimBOuter) >= 2) ? 2 :\n 1;\n const outputShape = batchDims.concat([dimAOuter, dimBOuter]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n\n const programUniforms: ProgramUniform[] = useBlockwiseMatMulNBits ?\n [] :\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.blockSize}];\n const inputShapeTemp = [batchSize, dimAOuter, dimInner / aComponents];\n const bShape = ShapeUtil.convertShape(inputs[1].dims).slice();\n bShape.splice(-1, 1, blobSizeInWords / bComponents);\n programUniforms.push(...createTensorShapeVariables(inputShapeTemp));\n programUniforms.push(...createTensorShapeVariables(bShape));\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n if (inputs.length === 4) {\n programUniforms.push(...createTensorShapeVariables(ShapeUtil.convertShape(inputs[3].dims)));\n }\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputRank = inputShapeTemp.length;\n const a = inputVariable('a', inputs[0].dataType, inputRank, aComponents);\n const b = inputVariable('b', DataType.uint32, bShape.length, bComponents);\n const scales = inputVariable('scales', inputs[2].dataType, inputs[2].dims.length);\n const inputVariables = [a, b, scales];\n const zeroPoints =\n inputs.length === 4 ? inputVariable('zero_points', DataType.uint32, inputs[3].dims.length) : undefined;\n if (zeroPoints) {\n inputVariables.push(zeroPoints);\n }\n const outputRank = outputShapeTemp.length;\n const output = outputVariable('output', inputs[0].dataType, outputRank, components);\n const uniforms: UniformsArrayType = [{name: 'output_size', type: 'u32'}, {name: 'block_size', type: 'u32'}];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const qDqDataType = (() => {\n switch (aComponents) {\n case 1:\n return `array<${dataType}, 8>`;\n case 2:\n return `mat4x2<${dataType}>`;\n case 4:\n return `mat2x4<${dataType}>`;\n default:\n throw new Error(`${aComponents}-component is not supported.`);\n }\n })();\n\n const processOneBlock = `\n for (var word: u32 = 0; word < ${blobSizeInWords}; word += ${bComponents}) {\n ${b.indicesSet('b_indices', '2', 'word')};\n let b_data = ${b.getByIndices('b_indices')};\n for (var i: u32 = 0; i < ${bComponents}; i++) {\n let b_value: u32 = ${bComponents === 1 ? 'b_data' : 'b_data[word + i]'};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${qDqDataType}(${\n Array.from({length: 4}, (_, i) => `${dataType}(b_value_lower[${i}]), ${dataType}(b_value_upper[${i}])`)\n .join(', ')});\n let b_dequantized_values = ${(() => {\n if (aComponents === 1) {\n return `${qDqDataType}(${\n Array.from({length: 8}, (_, i) => `(b_quantized_values[${i}] - zero_point) * scale`).join(', ')});`;\n } else {\n return `(b_quantized_values - ${qDqDataType}(${Array(8).fill('zero_point').join(',')})) * scale;`;\n }\n })()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${useBlockwiseMatMulNBits ? dimAOuter : outputNumber}u; m++) {\n ${a.indicesSet('a_indices', inputRank - 2, useBlockwiseMatMulNBits ? 'm' : `row * ${outputNumber} + m`)};\n ${a.indicesSet('a_indices', inputRank - 1, 'word_offset')};\n var input_offset = ${a.indicesToOffset('a_indices')};\n var a_data: ${qDqDataType};\n for (var j: u32 = 0; j < ${8 / aComponents}; j++) {\n a_data[j] = ${a.getByOffset('input_offset')};\n input_offset++;\n }\n ${useBlockwiseMatMulNBits ? 'workgroup_shared[workgroup_shared_offset + m]' : 'output_values[m]'}${\n components > 1 ? '[c]' : ''} += ${\n Array\n .from(\n {length: 8 / aComponents},\n (_, i) => `${\n aComponents === 1 ? `a_data[${i}] * b_dequantized_values[${i}]` :\n `dot(a_data[${i}], b_dequantized_values[${i}])`}`)\n .join(' + ')};\n }\n word_offset += ${8 / aComponents};\n }\n }`;\n const updateZeroPointIndex = zeroPoints ? `\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${zeroPoints.getByOffset('zero_point_index')};\n }` :\n '';\n\n return useBlockwiseMatMulNBits ? `\n var workgroup_shared: array<${output.type.value}, ${dimAOuter * nBlocksPerCol}>;\n ${shaderHelper.declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart([\n nBlocksPerCol, 1, 1\n ])}\n var a_indices: ${a.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${a.indicesSet('a_indices', '0', 'batch')};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${components}; c++) {\n let col_times_components_plus_c = col * ${components} + c;\n ${\n zeroPoints ? `\n var zero_point_bytes_per_col: u32 = (${nBlocksPerCol} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_word_index')} >> zero_point_bits_offset;` :\n ''}\n var b_indices: ${b.type.indices};\n ${b.indicesSet('b_indices', '0', 'col_times_components_plus_c')};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${nBlocksPerCol} + block;\n let scale = ${scales.getByOffset('scales_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? '(zero_point_word) & 0xFu' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block * ${attributes.blockSize / aComponents};\n var workgroup_shared_offset: u32 = block * ${dimAOuter};\n ${processOneBlock}\n }\n workgroupBarrier();\n var output_indices: ${output.type.indices};\n var elements_per_thread: u32 = ${Math.ceil(dimAOuter / nBlocksPerCol)};\n ${output.indicesSet('output_indices', '0', 'batch')};\n ${output.indicesSet('output_indices', outputRank - 1, 'col')};\n ${output.indicesSet('output_indices', outputRank - 2, 'local_id.x * elements_per_thread')};\n var output_offset = ${output.indicesToOffset('output_indices')};\n for (var m: u32 = 0u; m < elements_per_thread; m++) {\n var row = m + local_id.x * elements_per_thread;\n if (row < ${dimAOuter}) {\n var output_value: ${output.type.value} = ${output.type.value}(0);\n var workgroup_shared_offset: u32 = row;\n for (var b: u32 = 0u; b < ${nBlocksPerCol}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${dimAOuter};\n }\n ${output.setByOffset('output_offset', 'output_value')};\n output_offset += ${dimBOuter / components};\n }\n }\n }` :\n `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var output_values: array<${output.type.value}, ${outputNumber}>;\n var output_indices = ${output.offsetToIndices('global_idx')};\n var col = ${output.indicesGet('output_indices', outputRank - 1)};\n var row = ${output.indicesGet('output_indices', outputRank - 2)};\n var a_indices: ${a.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${\n zeroPoints ? `\n var zero_point_abs_offset = col * ${components} * ((${nBlocksPerCol} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_index')};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;` :\n ''}\n var scale_index = col * ${nBlocksPerCol * components};\n var b_indices: ${b.type.indices};\n for (var c: u32 = 0; c < ${components}; c++) {\n ${b.indicesSet('b_indices', '0', `col * ${components} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${nBlocksPerCol}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${scales.getByOffset('scale_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? 'extractBits(zero_point_word, zero_point_offset, 4)' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block_offset;\n ${processOneBlock}\n scale_index++;\n ${updateZeroPointIndex}\n block_offset += uniforms.block_size / ${aComponents};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${\n zeroPoints ? `if (zero_point_offset % 8 > 0) {\n ${updateZeroPointIndex}\n }` :\n ''}\n }\n for (var k: u32 = 0u; k < ${outputNumber}u; k++) {\n ${output.indicesSet('output_indices', outputRank - 2, `${outputNumber} * row + k`)};\n ${output.setByIndices('output_indices', 'output_values[k]')}\n }\n }`;\n };\n return {\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n shaderCache: {\n hint: `${attributes.cacheKey};${dimAOuter};${dataType};${inputs.length}`,\n inputDependencies: Array(inputs.length).fill('rank')\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n dispatchGroup: useBlockwiseMatMulNBits ? {x: 1, y: Math.ceil(dimBOuter / components), z: batchSize} :\n {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nexport const matMulNBits = (context: ComputeContext, attributes: MatMulNBitsAttributes): void => {\n validateInputs(context.inputs, attributes);\n const maxComputeWorkgroupSizes: [number, number, number] = context.getMaxComputeWorkgroupSizes();\n const maxComputeWorkgroupStorageSize = context.getMaxComputeWorkgroupStoragesize();\n context.compute(createMatMulNBitsProgramInfo(\n context.inputs, attributes, maxComputeWorkgroupSizes, maxComputeWorkgroupStorageSize));\n};\n\nexport const parseMatMulNBitsAttributes = (attributes: Record): MatMulNBitsAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\ninterface PadAttributes {\n // 0-constant, 1-reflect, 2-edge, 3-wrap\n readonly mode: number;\n readonly value: number;\n readonly pads: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('Too few inputs');\n }\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16) {\n throw new Error('Input type must be float or float16.');\n }\n\n if (inputs.length >= 2) {\n let validPads = inputs[0].dims.length * 2 === inputs[1].dims[0];\n if (inputs.length === 4) {\n validPads = inputs[3].dims[0] * 2 === inputs[1].dims[0];\n }\n if (!validPads) {\n throw new Error('The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].');\n }\n }\n};\n\nconst getPadConstant = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n break;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n value = ${output.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n }\n `;\n};\n\nconst getPadReflect = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadEdge = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadWrap = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k += i32(${getElementAt('uniforms.x_shape', i, inputRank)}]);\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k -= i32(${getElementAt('uniforms.x_shape', i, inputRank)});\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadSnippet = (output: IndicesHelper, inputRank: number, attributes: PadAttributes): string => {\n switch (attributes.mode) {\n case 0:\n return getPadConstant(output, inputRank, attributes.pads.length);\n case 1:\n return getPadReflect(output, inputRank, attributes.pads.length);\n case 2:\n return getPadEdge(output, inputRank, attributes.pads.length);\n case 3:\n return getPadWrap(output, inputRank, attributes.pads.length);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst createPadProgramInfo = (inputs: readonly TensorView[], attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(inputs[0].dims.slice(), attributes.pads);\n const inputDims = inputs[0].dims;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: attributes.pads}];\n if (attributes.mode === 0) {\n programUniforms.push({type: inputs[0].dataType, data: attributes.value});\n }\n\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('x', inputs[0].dataType, inputDims.length);\n const dataType = input.type.value;\n const padSnippet = getPadSnippet(output, inputDims.length, attributes);\n const uniforms: UniformsArrayType =\n [{name: 'output_size', type: 'u32'}, {name: 'pads', type: 'i32', length: attributes.pads.length}];\n if (attributes.mode === 0) {\n uniforms.push({name: 'constant_value', type: dataType as UniformDataElementType});\n }\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(0);\n ${padSnippet}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Pad',\n shaderCache: {hint: `${attributes.mode}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nconst createPadAttributesFromInputs = (inputs: readonly TensorView[], attributes: PadAttributes): PadAttributes => {\n if (inputs.length > 1) {\n const bigInt64Pads = inputs[1].getBigInt64Array();\n const value = (inputs.length >= 3 && inputs[2].data) ? inputs[2].getFloat32Array()[0] : 0.0;\n\n const inputRank = inputs[0].dims.length;\n const updatePads = new Int32Array(2 * inputRank).fill(0);\n if (inputs.length >= 4) {\n const axes = inputs[3].getBigInt64Array();\n for (let i = 0; i < axes.length; i++) {\n updatePads[Number(axes[i])] = Number(bigInt64Pads[i]);\n updatePads[Number(axes[i]) + inputRank] = Number(bigInt64Pads[i + axes.length]);\n }\n } else {\n bigInt64Pads.forEach((v, i) => updatePads[Number(i)] = (Number(v)));\n }\n\n const pads: number[] = [];\n updatePads.forEach(v => pads.push(v));\n\n return {mode: attributes.mode, value, pads};\n } else {\n return attributes;\n }\n};\n\nexport const pad = (context: ComputeContext, attributes: PadAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes = createPadAttributesFromInputs(context.inputs, attributes);\n context.compute(createPadProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\n// TODO: support:\n// - ceil_mode \"test_maxpool_2d_ceil\"\n// - storage_order \"test_maxpool_with_argmax_2d_precomputed_strides\"\n// - [MaxPool] dilations \"test_maxpool_2d_dilations\"\n// - [MaxPool] output[1] \"test_maxpool_with_argmax_2d_precomputed_pads\"\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (env.webgpu.validateInputContent && (!inputs || inputs.length !== 1)) {\n throw new Error('Pool ops requires 1 input.');\n }\n};\n\nconst getAdjustedPoolAttributesAndOutputShape = (\n input: TensorView, attributes: AttributeType, isGlobalOperator: boolean): [AttributeType, number[]] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inputShapeAsChannelFirst = input.dims.slice();\n if (isChannelsLast) {\n inputShapeAsChannelFirst.splice(1, 0, inputShapeAsChannelFirst.pop()!); // Move channel to the second position.\n }\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShapeAsChannelFirst, kernelShape, strides, dilations, pads);\n\n const outputShapeAsChannelFirst = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShapeAsChannelFirst, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n const outputShapeAsChannelLast = outputShapeAsChannelFirst.slice();\n outputShapeAsChannelLast.push(outputShapeAsChannelLast.splice(1, 1)[0]);\n return [newAttributes, isChannelsLast ? outputShapeAsChannelLast : outputShapeAsChannelFirst];\n};\n\nconst getUniformAndPadInfo = (\n outputShape: readonly number[],\n attributes: AttributeType): [ProgramUniform[], UniformsArrayType, boolean, boolean, boolean] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const outputSize = ShapeUtil.size(outputShape);\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: kernelSize}];\n const uniforms: UniformsArrayType = [{name: 'outputSize', type: 'u32'}, {name: 'kernelSize', type: 'u32'}];\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const pwStartEndNotZero = !!(pwStart + pwEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kw},\n {type: DataType.uint32, data: sw},\n {type: DataType.uint32, data: pwStart},\n {type: DataType.uint32, data: pwEnd},\n );\n uniforms.push(\n {name: 'kw', type: 'u32'}, {name: 'sw', type: 'u32'}, {name: 'pwStart', type: 'u32'},\n {name: 'pwEnd', type: 'u32'});\n\n let phStartEndNotZero = false;\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n phStartEndNotZero = !!(phStart + phEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kh}, {type: DataType.uint32, data: sh}, {type: DataType.uint32, data: phStart},\n {type: DataType.uint32, data: phEnd});\n\n uniforms.push(\n {name: 'kh', type: 'u32'}, {name: 'sh', type: 'u32'}, {name: 'phStart', type: 'u32'},\n {name: 'phEnd', type: 'u32'});\n }\n return [programUniforms, uniforms, true, pwStartEndNotZero, phStartEndNotZero];\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n programUniforms.push(\n {type: DataType.uint32, data: kernelStrides}, {type: DataType.uint32, data: attributes.pads},\n {type: DataType.uint32, data: attributes.strides});\n uniforms.push(\n {name: 'kernelStrides', type: 'u32', length: kernelStrides.length},\n {name: 'pads', type: 'u32', length: attributes.pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length});\n\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n return [programUniforms, uniforms, !!hasPads, false, false];\n }\n};\n\nconst generatePoolingCode = (\n shaderHelper: ShaderHelper, x: IndicesHelper, rank: number, outputShapeRank: number, attributes: AttributeType,\n op1: string, op2: string, start: number, uniforms: UniformsArrayType, hasPads: boolean, pwStartEndNotZero: boolean,\n phStartEndNotZero: boolean): string => {\n const isChannelsLast = attributes.format === 'NHWC';\n const dataType = x.type.value;\n const output = outputVariable('output', x.type.tensor, outputShapeRank);\n\n if (attributes.kernelShape.length <= 2) {\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n const dimIdxW = rank - (isChannelsLast ? 2 : 1);\n if (pwStartEndNotZero) {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${dimIdxW}] < 0 || xIndices[${dimIdxW}]\n >= uniforms.x_shape[${dimIdxW}]) {\n pad++;\n continue;\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const dimIdxH = rank - (isChannelsLast ? 3 : 2);\n if (phStartEndNotZero) {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${dimIdxH}] < 0 || xIndices[${dimIdxH}] >= uniforms.x_shape[${dimIdxH}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `;\n } else {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(${start});\n var pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const stridesRank = attributes.kernelShape.length;\n const padsRank = attributes.pads.length;\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n padCode = `\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n `;\n }\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var offsets: array;\n\n var value = ${dataType}(${start});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${stridesRank - 1}u; j++) {\n offsets[j] = offset / ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n offset -= offsets[j] * ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n }\n offsets[${stridesRank - 1}] = offset;\n\n isPad = false;\n for (var j = ${rank - stridesRank}u; j < ${rank}u; j++) {\n xIndices[j] = indices[j] * ${\n getElementAt('uniforms.strides', `j - ${rank - stridesRank}u`, stridesRank)}\n + offsets[j - ${rank - stridesRank}u] - ${getElementAt('uniforms.pads', 'j - 2u', padsRank)};\n ${padCode}\n }\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n }\n};\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface PoolCommonAttributes extends FormatAttributes {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nconst createShaderKeyFromAttributes = (attributes: PoolCommonAttributes): string =>\n (`${attributes.format};${attributes.ceilMode};${attributes.autoPad};${attributes.kernelShape.length}`);\n\nconst createAveragePoolShaderKeyFromAttributes = (attributes: AveragePoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.countIncludePad}`);\n\nconst createMaxPoolShaderKeyFromAttributes = (attributes: MaxPoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.storageOrder};${attributes.dilations}`);\n\nconst parsePoolCommonAttributes = (attributes: Record): PoolCommonAttributes => ({\n format: attributes.format as FormatAttributes['format'],\n autoPad: ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number],\n ceilMode: attributes.ceil_mode as number,\n kernelShape: attributes.kernel_shape as [number, number],\n strides: attributes.strides as [number, number],\n pads: attributes.pads as [number, number, number, number]\n});\n\nexport interface AveragePoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly countIncludePad: boolean;\n}\n\nconst createAveragePoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: AveragePoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const x = inputVariable('x', input.dataType, input.dims.length);\n const dataType = x.type.value;\n\n const op1 = 'value += x_val;';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= ${dataType}(uniforms.kernelSize);`;\n } else {\n op2 += `value /= ${dataType}(i32(uniforms.kernelSize) - pad);`;\n }\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2, 0.0, uniforms,\n hasPads, pwStartEndNotZero, phStartEndNotZero),\n };\n };\n\nexport const parseAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const countIncludePad = (attributes.count_include_pad as number) === 0 ? false : true;\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode'\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n const averagePoolAttributes = {countIncludePad, ...attr, cacheKey: ''};\n return {...averagePoolAttributes, cacheKey: createAveragePoolShaderKeyFromAttributes(averagePoolAttributes)};\n};\n\nexport const averagePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('AveragePool', context.inputs[0], false, attributes));\n};\n\nconst globalPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: []\n};\n\nexport const parseGlobalAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalAveragePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('GlobalAveragePool', context.inputs[0], true, attributes));\n};\n\nexport interface MaxPoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nconst createMaxPoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: MaxPoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const op1 = `\n value = max(x_val, value);\n `;\n const op2 = '';\n const x = inputVariable('x', input.dataType, input.dims.length);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2,\n (input.dataType === DataType.float16) ? -65504 : -1e5, uniforms, hasPads, pwStartEndNotZero,\n phStartEndNotZero),\n };\n };\n\nexport const maxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('MaxPool', context.inputs[0], false, attributes));\n};\n\nexport const parseMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const storageOrder = attributes.storage_order as number;\n const dilations = attributes.dilations as [number, number];\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n const maxPoolAttributes = {storageOrder, dilations, ...attr, cacheKey: ''};\n return {...maxPoolAttributes, cacheKey: createMaxPoolShaderKeyFromAttributes(maxPoolAttributes)};\n};\n\nexport const parseGlobalMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalMaxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('GlobalMaxPool', context.inputs[0], true, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\nconst validateInputsContent = (start: number, limit: number, delta: number): void => {\n const sameStartLimit = start === limit;\n const increasingRangeNegativeStep = start < limit && delta < 0;\n const decreasingRangePositiveStep = start > limit && delta > 0;\n\n if (sameStartLimit || increasingRangeNegativeStep || decreasingRangePositiveStep) {\n throw new Error('Range these inputs\\' contents are invalid.');\n }\n};\n\nconst createRangeProgramInfo = (start: number, limit: number, delta: number, dataType: DataType): ProgramInfo => {\n const numElements = Math.abs(Math.ceil((limit - start) / delta));\n const outputShape: number[] = [numElements];\n const outputSize = numElements;\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: dataType, data: start}, {type: dataType, data: delta},\n ...createTensorShapeVariables(outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', dataType, outputShape.length);\n const wgslType = output.type.value;\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'start', type: wgslType as UniformDataElementType},\n {name: 'delta', type: wgslType as UniformDataElementType}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n output[global_idx] = uniforms.start + ${wgslType}(global_idx) * uniforms.delta;\n }`;\n };\n\n return {\n name: 'Range',\n shaderCache: {hint: `${dataType}`},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const range = (context: ComputeContext): void => {\n let start = 0;\n let limit = 0;\n let delta = 0;\n if (context.inputs[0].dataType === DataType.int32) {\n start = context.inputs[0].getInt32Array()[0];\n limit = context.inputs[1].getInt32Array()[0];\n delta = context.inputs[2].getInt32Array()[0];\n } else if (context.inputs[0].dataType === DataType.float) {\n start = context.inputs[0].getFloat32Array()[0];\n limit = context.inputs[1].getFloat32Array()[0];\n delta = context.inputs[2].getFloat32Array()[0];\n }\n if (env.webgpu.validateInputContent) {\n validateInputsContent(start, limit, delta);\n }\n\n context.compute(createRangeProgramInfo(start, limit, delta, context.inputs[0].dataType), {inputs: []});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype CoordinateTransformMode = 'half_pixel'|'asymmetric'|'pytorch_half_pixel'|'tf_half_pixel_for_nn'|'align_corners'|\n 'tf_crop_and_resize'|'half_pixel_symmetric';\n\ntype KeepAspectRatioPolicy = 'stretch'|'not_smaller'|'not_larger';\n\ntype Mode = 'nearest'|'linear'|'cubic';\n\ntype NearestMode = 'round_prefer_floor'|'round_prefer_ceil'|'floor'|'ceil'|'simple';\n\nexport interface ResizeAttributes extends AttributeWithCacheKey {\n antialias: number;\n axes: number[];\n coordinateTransformMode: CoordinateTransformMode;\n cubicCoeffA: number;\n excludeOutside: boolean;\n extrapolationValue: number;\n keepAspectRatioPolicy: KeepAspectRatioPolicy;\n mode: Mode;\n nearestMode: NearestMode;\n}\n\nconst validateScales = (scales: number[], attributes: ResizeAttributes): void => {\n scales.every((value) => value > 0 || (() => {\n throw new Error('Resize requires scales input values to be positive');\n }));\n // Check scales dims based on mode: LINEAR, CUBIC\n if (scales.length > 0) {\n if (attributes.mode === 'linear') {\n if (!(scales.length === 2 || scales.length === 3 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1) ||\n (scales.length === 5 && scales[0] === 1 && scales[1] === 1))) {\n throw new Error(\n `For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`);\n }\n } else if (attributes.mode === 'cubic') {\n if (!(scales.length === 2 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1))) {\n throw new Error('Resize requires scales input size to be 2 or 4 for cubic mode');\n }\n }\n }\n};\n\nconst updateScales = (scales: readonly number[], axes: readonly number[], rank: number): number[] => {\n axes.every((value) => value >= 0 && value < rank || (() => {\n throw new Error('Resize requires axes input values to be positive and less than rank');\n }));\n const newScales = new Array(rank).fill(1.0);\n axes.forEach((value, index) => newScales[value] = scales[index]);\n return newScales;\n};\n\nconst validateInputs =\n (inputs: readonly TensorView[], attributes: ResizeAttributes, opsetVersion: number, scales: number[],\n sizes: number[], roi: number[]): void => {\n const [roiInputIndex, scalesInputIndex, sizesInputIndex] =\n (opsetVersion > 10) ? [1, 2, 3] : [-1, (inputs.length > 1) ? 1 : -1, -1];\n const rank = inputs[0].dims.length;\n if (roiInputIndex > 0 && inputs.length > roiInputIndex && inputs[roiInputIndex].dims.length > 0) {\n inputs[roiInputIndex].getFloat32Array().forEach((value) => roi.push(value));\n } else if (attributes.coordinateTransformMode === 'tf_crop_and_resize') {\n throw new Error('Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize');\n }\n\n if (scalesInputIndex > 0 && inputs.length > scalesInputIndex && inputs[scalesInputIndex].dims.length > 0) {\n inputs[scalesInputIndex].getFloat32Array().forEach((value) => scales.push(value));\n if (scales.length !== 0 &&\n (scales.length !== rank && (opsetVersion >= 18 && scales.length !== attributes.axes.length))) {\n throw new Error(\n 'Resize requires scales input size to be same as input rank or axes size for opset 18 and up');\n }\n validateScales(scales, attributes);\n if (attributes.axes.length > 0) {\n updateScales(scales, attributes.axes, rank).forEach((value, index) => scales[index] = value);\n }\n }\n if (sizesInputIndex > 0 && inputs.length > sizesInputIndex) {\n inputs[sizesInputIndex].getBigInt64Array().forEach((value) => sizes.push(Number(value)));\n if (sizes.length !== rank || (opsetVersion >= 18 && sizes.length === attributes.axes.length)) {\n throw new Error('Resize requires sizes input size to be same as input rank or axes size for opset 18 and up');\n }\n }\n\n if (attributes.axes.length > 0) {\n if (scales.length !== attributes.axes.length) {\n throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');\n }\n if (sizes.length !== attributes.axes.length) {\n throw new Error(\n 'Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified');\n }\n }\n if (typeof scales !== 'undefined' && typeof sizes !== 'undefined' && scales.length > 0 && sizes.length > rank) {\n throw new Error('Resize requires only of scales or sizes to be specified');\n }\n };\n\nconst getOriginalCoordinateFromResizedCoordinate =\n (coordinateTransferMode: CoordinateTransformMode, dType: string): string =>\n `fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${dType} { ` +\n (() => {\n switch (coordinateTransferMode) {\n case 'asymmetric':\n return `return ${dType}(xResized) / ${dType}(xScale);`;\n case 'pytorch_half_pixel':\n return `if (lengthResized > 1) {\n return (${dType}(xResized) + 0.5) / ${dType}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;\n case 'tf_half_pixel_for_nn':\n return `return (${dType}(xResized) + 0.5) / ${dType}(xScale);`;\n case 'align_corners':\n return `if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${dType}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${dType}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${dType}(lengthResized - 1);\n return whole + fract;\n }`;\n case 'tf_crop_and_resize':\n return `if (lengthResized > 1) {\n return ${dType}(roiStart) * ${dType}(lengthOriginal - 1) +\n (${dType}(xResized) * ${dType}(roiEnd - roiStart) * ${dType}(lengthOriginal - 1)) /\n ${dType}(lengthResized - 1);\n } else {\n return 0.5 * ${dType}(roiStart + roiEnd) * ${dType}(lengthOriginal - 1);\n }`;\n case 'half_pixel_symmetric':\n return `const outputWidth = ${dType}xScale * ${dType}(lengthResized);\n const adjustment = ${dType}(lengthResized) / outputWidth;\n const center = ${dType}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n case 'half_pixel':\n return `return ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n default:\n throw new Error(`Coordinate transform mode ${coordinateTransferMode} is not supported`);\n }\n })() +\n '}';\n\nconst getNearestPixelFromOriginal = (nearestMode: NearestMode, opsetVersion: number, dType: string): string =>\n `fn getNearestPixelFromOriginal(xOriginal: ${dType}, isDownSample: bool) -> ${dType} {` + (() => {\n switch (nearestMode) {\n case 'round_prefer_ceil':\n return 'if (fract(xOriginal) == 0.5) { \\\n return ceil(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'floor':\n return 'return floor(xOriginal);';\n case 'ceil':\n return 'return ceil(xOriginal);';\n case 'round_prefer_floor':\n return 'if (fract(xOriginal) == 0.5) { \\\n return floor(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'simple':\n default:\n if (opsetVersion < 11) {\n return 'if (isDownSample) \\\n { \\\n return ceil(xOriginal); \\\n } else { \\\n return xOriginal; \\\n }';\n }\n throw new Error(`Nearest mode ${nearestMode} is not supported`);\n }\n })() +\n '}';\n\nconst updateRoI = (roi: readonly number[], axes: readonly number[], rank: number): number[] => {\n const roiTmp = new Array(rank).fill(0).concat(new Array(rank).fill(1));\n const roiLocal = roi.length === 0 ? roiTmp : roi.slice();\n if (axes.length > 0) {\n axes.forEach((v, i) => {\n roiTmp[v] = roiLocal[i];\n roiTmp[i + rank] = roiLocal[axes.length + i];\n });\n return roiTmp;\n }\n return roiLocal;\n};\n\nconst initOutputShape =\n (inputShape: readonly number[], scales: readonly number[], sizes: readonly number[], axes: readonly number[]):\n number[] => {\n let outputShape: number[] = [];\n if (sizes.length > 0) {\n if (axes.length > 0) {\n inputShape.forEach((v) => outputShape.push(v));\n if (Math.max(...axes) > inputShape.length) {\n throw new Error('axes is out of bound');\n }\n axes.forEach((v, i) => outputShape[v] = sizes[i]);\n } else {\n sizes.forEach((v) => outputShape.push(v));\n }\n } else {\n if (scales.length === 0) {\n throw new Error('Resize requires either scales or sizes.');\n } else {\n outputShape = inputShape.map((value, index) => Math.round(value * scales[index]));\n }\n }\n return outputShape;\n };\n\nconst adjustOutputShape = (inputShape: readonly number[], scales: number[], attributes: ResizeAttributes) => {\n const scaleInPolicy = (() => {\n switch (attributes.keepAspectRatioPolicy) {\n case 'not_larger':\n return attributes.axes.length > 0 ? Math.min(...attributes.axes.map(i => scales[i]), Number.MAX_VALUE) :\n Math.min(...scales, Number.MAX_VALUE);\n case 'not_smaller':\n return attributes.axes.length > 0 ? Math.max(...attributes.axes.map(i => scales[i]), Number.MIN_VALUE) :\n Math.max(...scales, Number.MIN_VALUE);\n default:\n throw new Error(`Keep aspect ratio policy ${attributes.keepAspectRatioPolicy} is not supported`);\n }\n })();\n scales.fill(1.0, 0, scales.length);\n const adjustedOutputShape = inputShape.slice();\n if (attributes.axes.length > 0) {\n attributes.axes.forEach((v) => scales[v] = scaleInPolicy);\n attributes.axes.forEach((v) => adjustedOutputShape[v] = Math.round(inputShape[v] * scales[v]));\n } else {\n scales.fill(scaleInPolicy, 0, scales.length);\n adjustedOutputShape.forEach((v, i) => adjustedOutputShape[i] = Math.round(v * scales[i]));\n }\n return adjustedOutputShape;\n};\n\nconst calculateOriginalIndicesFromOutputIndices =\n (output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[], scalesLength: number,\n roiLength: number): string => `\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> array<${\n output.type.value}, ${outputShape.length}> {\n var original_indices: array<${output.type.value}, ${outputShape.length}>;\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n if (scale == 1.0) {\n original_indices[i] = ${output.type.value}(output_index);\n } else {\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`;\n\nconst calculateInputIndicesFromOutputIndices =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scalesLength: number, roiLength: number, useExtrapolation: boolean): string => `\n fn calculateInputIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index: u32;\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${useExtrapolation} || (original_idx >= 0 && original_idx < ${output.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${output.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${input.indicesSet('input_indices', 'i', ' input_index')}\n }\n return input_indices;\n }`;\nconst checkInputIndices = (input: IndicesHelper, inputShape: readonly number[]): string => `\n fn checkInputIndices(input_indices: ${input.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${inputShape.length}; i++) {\n var input_index = ${input.indicesGet('input_indices', 'i')};\n if (input_index < 0 || input_index >= ${getElementAt('uniforms.input_shape', 'i', inputShape.length)}) {\n return false;\n }\n }\n return true;\n }`;\n\nconst setChannelAndBatchIndices =\n (input: IndicesHelper, channelIdx: number, batchIdx: number, spacialDims: number): string =>\n input.rank > spacialDims ? `\n ${input.indicesSet('input_indices', channelIdx, 'channel')};\n ${input.indicesSet('input_indices', batchIdx, 'batch')};\n` :\n '';\n\nconst bilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 2 ? [-1, 0, 1, -1] : (isNchw ? [0, 2, 3, 1] : [0, 1, 2, 3]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(row, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(col, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 2)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn bilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${dType} = originalIndices[${heightIdx}];\n var col:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ?\n `if (row < 0 || row > (${inputShape[heightIdx]} - 1) || col < 0 || col > (${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n row = max(0, min(row, ${inputShape[heightIdx]} - 1));\n col = max(0, min(col, ${inputShape[widthIdx]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${batchIdx}])` : '0'};\n var x11: ${dType} = getInputValue(batch, channel, row1, col1);\n var x12: ${dType} = getInputValue(batch, channel, row1, col2);\n var x21: ${dType} = getInputValue(batch, channel, row2, col1);\n var x22: ${dType} = getInputValue(batch, channel, row2, col2);\n var dx1: ${dType} = abs(row - ${dType}(row1));\n var dx2: ${dType} = abs(${dType}(row2) - row);\n var dy1: ${dType} = abs(col - ${dType}(col1));\n var dy2: ${dType} = abs(${dType}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`;\n };\n\nconst bicubicInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scales: readonly number[], roi: readonly number[], cubicCoeffA: number, useExtrapolation: boolean,\n extrapolationValue: number, excludeOutside: boolean): string => {\n const is2D = inputShape.length === 2;\n const isNchw = true;\n const [heightIdx, widthIdx] = is2D ? [0, 1] : isNchw ? [2, 3] : [1, 2];\n const dType = input.type.value;\n const createCubicInterpolationFunction = (idx: number): string => {\n const direction = idx === heightIdx ? 'row' : 'col';\n return `\n fn ${direction}CubicInterpolation(input_indices: ${input.type.indices}, output_indices: ${\n output.type.indices}) -> ${dType} {\n var output_index = ${output.indicesGet('output_indices', idx)};\n var originalIdx: ${dType} = getOriginalCoordinateFromResizedCoordinate(output_index, ${scales[idx]},\n ${outputShape[idx]}, ${inputShape[idx]}, ${roi[idx]}, ${roi[idx]} + ${inputShape.length});\n var fractOriginalIdx: ${dType} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${useExtrapolation} && (originalIdx < 0 || originalIdx > (${inputShape[idx]} - 1))) {\n return ${extrapolationValue};\n }\n var data: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${direction}: ${dType} = originalIdx + ${dType}(i);\n if (${direction} < 0 || ${direction} >= ${inputShape[idx]}) {\n ${(() => {\n if (excludeOutside) {\n return `coefs[i + 1] = 0.0;\n continue;`;\n } else if (useExtrapolation) {\n return `return ${extrapolationValue};`;\n } else {\n return `${direction} = max(0, min(${direction}, ${inputShape[idx]} - 1));`;\n }\n })()};\n }\n var input_indices_copy: ${input.type.indices} = input_indices;\n ${input.indicesSet('input_indices_copy', idx, `u32(${direction})`)};\n data[i + 1] = ${\n idx === heightIdx ? input.getByIndices('input_indices_copy') :\n 'rowCubicInterpolation(input_indices_copy, output_indices)'};\n }\n return cubicInterpolation1D(data, coefs);\n }`;\n };\n\n return `\n ${createCubicInterpolationFunction(heightIdx)};\n ${createCubicInterpolationFunction(widthIdx)};\n fn getCubicInterpolationCoefs(s: ${dType}) -> array<${dType}, 4> {\n var absS = abs(s);\n var coeffs: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${dType} = 1.0 - absS;\n var twoMinusAbsS: ${dType} = 2.0 - absS;\n var onePlusAbsS: ${dType} = 1.0 + absS;\n coeffs[0] = ((${cubicCoeffA} * onePlusAbsS - 5 * ${cubicCoeffA}) * onePlusAbsS + 8 * ${\n cubicCoeffA}) * onePlusAbsS - 4 * ${cubicCoeffA};\n coeffs[1] = ((${cubicCoeffA} + 2) * absS - (${cubicCoeffA} + 3)) * absS * absS + 1;\n coeffs[2] = ((${cubicCoeffA} + 2) * oneMinusAbsS - (${cubicCoeffA} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${cubicCoeffA} * twoMinusAbsS - 5 * ${cubicCoeffA}) * twoMinusAbsS + 8 * ${\n cubicCoeffA}) * twoMinusAbsS - 4 * ${cubicCoeffA};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${dType}, 4>, coefs: array<${dType}, 4>) -> ${dType} {\n var coefsSum: ${dType} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var input_indices: ${input.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `;\n };\n\nconst trilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, depthIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 3 ? [-1, 0, 1, 2, -1] : (isNchw ? [0, 2, 3, 4, 1] : [0, 1, 2, 3, 4]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', depthIdx, `max(0, min(depth, ${inputShape[depthIdx]} - 1))`)};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(height, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(width, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 3)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn trilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${dType} = originalIndices[${depthIdx}];\n var height:${dType} = originalIndices[${heightIdx}];\n var width:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ? `if (depth < 0 || depth > (${inputShape[depthIdx]} - 1) || height < 0 || height > (${\n inputShape[heightIdx]} - 1) || width < 0 || (width > ${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n\n depth = max(0, min(depth, ${inputShape[depthIdx]} - 1));\n height = max(0, min(height, ${inputShape[heightIdx]} - 1));\n width = max(0, min(width, ${inputShape[widthIdx]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${batchIdx}])` : '0'};\n\n var x111: ${dType} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${dType} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${dType} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${dType} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${dType} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${dType} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${dType} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${dType} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${dType} = abs(depth - ${dType}(depth1));\n var dx2: ${dType} = abs(${dType}(depth2) - depth);\n var dy1: ${dType} = abs(height - ${dType}(height1));\n var dy2: ${dType} = abs(${dType}(height2) - height);\n var dz1: ${dType} = abs(width - ${dType}(width1));\n var dz2: ${dType} = abs(${dType}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`;\n };\n\nconst createResizeProgramInfo =\n (inputTensor: TensorView, attributes: ResizeAttributes, opsetVersion: number, scalesInput: readonly number[],\n sizes: readonly number[], roiInput: readonly number[]): ProgramInfo => {\n const inputShape = inputTensor.dims;\n const roi = updateRoI(roiInput, attributes.axes, inputShape.length);\n\n let outputShape = initOutputShape(inputShape, scalesInput, sizes, attributes.axes);\n let scales = scalesInput.slice();\n if (scalesInput.length === 0) {\n scales = inputShape.map((value, index) => value === 0 ? 1.0 : outputShape[index] / value);\n if (attributes.keepAspectRatioPolicy !== 'stretch') {\n outputShape = adjustOutputShape(inputShape, scales, attributes);\n }\n }\n const output = outputVariable('output', inputTensor.dataType, outputShape.length);\n const input = inputVariable('input', inputTensor.dataType, inputShape.length);\n const outputSize = ShapeUtil.size(outputShape);\n const noScale = inputShape.length === outputShape.length && inputShape.every((d, i) => d === outputShape[i]);\n const useExtrapolation = attributes.coordinateTransformMode === 'tf_crop_and_resize';\n const extrapolationValue = attributes.extrapolationValue;\n const dataType = input.type.value;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${noScale ? '' : `\n ${getOriginalCoordinateFromResizedCoordinate(attributes.coordinateTransformMode, dataType)};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `\n ${checkInputIndices(input, inputShape)};\n ${getNearestPixelFromOriginal(attributes.nearestMode, opsetVersion, dataType)};\n ${\n calculateInputIndicesFromOutputIndices(\n input, output, inputShape, outputShape, scales.length, roi.length, useExtrapolation)};\n `;\n case 'linear':\n return `\n ${calculateOriginalIndicesFromOutputIndices(output, inputShape, outputShape, scales.length, roi.length)};\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${bilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else if (inputShape.length === 3 || inputShape.length === 5) {\n return `${trilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else {\n throw Error('Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.');\n }\n })()};\n `;\n case 'cubic':\n return `\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${\n bicubicInterpolation(\n input, output, inputShape, outputShape, scales, roi, attributes.cubicCoeffA, useExtrapolation,\n attributes.extrapolationValue, attributes.excludeOutside)}`;\n } else {\n throw Error('Cubic mode only supports input dims 2 and 4 are supported in linear mode.');\n }\n })()};\n `;\n default:\n throw Error('Invalid resize mode');\n }\n })()};\n `}\n ${\n shaderHelper.registerUniform('output_size', 'u32')\n .registerUniform('scales', 'f32', scales.length)\n .registerUniform('roi', 'f32', roi.length)\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n ${noScale ? 'output[global_idx] = input[global_idx];' : `\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${input.getByIndices('input_indices')};\n } else {\n output[global_idx] = ${attributes.extrapolationValue};\n }`;\n case 'linear':\n return `output[global_idx] = ${\n (inputShape.length === 2 || inputShape.length === 4) ? 'bilinearInterpolation' :\n 'trilinearInterpolation'}(output_indices);`;\n case 'cubic':\n return 'output[global_idx] = bicubicInterpolation(output_indices);';\n default:\n throw Error(`Unsupported resize mode: ${attributes.mode}`);\n }\n })()};\n`}\n }`;\n\n return {\n name: 'Resize',\n shaderCache: {\n hint: `${attributes.cacheKey}|${opsetVersion}|${scales.length > 0 ? scales : ''}|${\n sizes.length > 0 ? sizes : ''}|${roi.length > 0 ? roi : ''}|${noScale}|${inputShape}`,\n inputDependencies: ['rank']\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputTensor.dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.float, data: scales},\n {type: DataType.float, data: roi}, ...createTensorShapeVariables(inputShape, outputShape)\n ]\n })\n };\n };\n\nconst getOpsetVersionFromCustomDataBuffer = (context: ComputeContext): number => {\n const customDataBuffer = context.customDataBuffer;\n const customDataBuffer32 = new Uint32Array(customDataBuffer, customDataBuffer.byteOffset, 1);\n const opsetVersion = customDataBuffer32[0];\n return opsetVersion;\n};\n\nexport const resize = (context: ComputeContext, attributes: ResizeAttributes): void => {\n const scales: number[] = [];\n const sizes: number[] = [];\n const roi: number[] = [];\n\n // Note that scales in resize are always f32. roi can be f32 or f16.\n // TODO: Currently this code does not support f16 for roi when passed as optional input.\n\n const opsetVersion = getOpsetVersionFromCustomDataBuffer(context);\n if (attributes.antialias !== 0) {\n throw Error('Only default value (0) for Antialias attribute is supported');\n }\n validateInputs(context.inputs, attributes, opsetVersion, scales, sizes, roi);\n context.compute(\n createResizeProgramInfo(context.inputs[0], attributes, opsetVersion, scales, sizes, roi), {inputs: [0]});\n};\n\nexport const parseResizeAttributes = (attributes: Record): ResizeAttributes => {\n const antialias = attributes.antialias as number;\n const axes = attributes.axes as number[];\n const coordinateTransformMode: CoordinateTransformMode =\n attributes.coordinateTransformMode as CoordinateTransformMode;\n const cubicCoeffA = attributes.cubicCoeffA as number;\n const excludeOutside = attributes.excludeOutside as number !== 0;\n const extrapolationValue = attributes.extrapolationValue as number;\n const keepAspectRatioPolicy: KeepAspectRatioPolicy = attributes.keepAspectRatioPolicy as KeepAspectRatioPolicy;\n const mode: Mode = attributes.mode as Mode;\n // If nearestMode is not specified, use simple mode.\n const nearestMode: NearestMode = (attributes.nearestMode === '' ? 'simple' : attributes.nearestMode) as NearestMode;\n return createAttributeWithCacheKey({\n antialias,\n axes,\n coordinateTransformMode,\n cubicCoeffA,\n excludeOutside,\n extrapolationValue,\n keepAspectRatioPolicy,\n mode,\n nearestMode\n });\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, WORKGROUP_SIZE} from './common';\n\nexport interface RotaryEmbeddingAttributes {\n readonly interleaved: boolean;\n readonly numHeads: number;\n readonly rotaryEmbeddingDim: number;\n readonly scale: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): void => {\n const [input, positionIds, cosCache, sinCache] = inputs;\n const {numHeads, rotaryEmbeddingDim} = attributes;\n\n if (input.dims.length !== 3 && input.dims.length !== 4) {\n throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${input.dims.length}`);\n }\n if (!ShapeUtil.areEqual(positionIds.dims, []) && !ShapeUtil.areEqual(positionIds.dims, [1]) &&\n positionIds.dims.length !== 2) {\n throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${positionIds.dims.length}`);\n }\n if (cosCache.dims.length !== 2) {\n throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${cosCache.dims.length}`);\n }\n if (sinCache.dims.length !== 2) {\n throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${sinCache.dims.length}`);\n }\n if (!ShapeUtil.areEqual(cosCache.dims, sinCache.dims)) {\n throw new Error('Inputs \\'cos_cache\\' and \\'sin_cache\\' are expected to have the same shape');\n }\n\n if (rotaryEmbeddingDim > 0 && numHeads === 0) {\n throw new Error('num_heads must be provided if rotary_embedding_dim is specified');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[input.dims.length - 2];\n const maxSequenceLength = cosCache.dims[0];\n const hiddenSize = ShapeUtil.sizeFromDimension(input.dims, 1) / sequenceLength;\n const headSize = rotaryEmbeddingDim === 0 ? cosCache.dims[1] * 2 : hiddenSize / numHeads;\n if (rotaryEmbeddingDim > headSize) {\n throw new Error('rotary_embedding_dim must be less than or equal to head_size');\n }\n\n if (positionIds.dims.length === 2) {\n if (batchSize !== positionIds.dims[0]) {\n throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${positionIds.dims[0]}`);\n }\n if (sequenceLength !== positionIds.dims[1]) {\n throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${positionIds.dims[1]}`);\n }\n }\n\n if (headSize / 2 !== cosCache.dims[1] && rotaryEmbeddingDim / 2 !== cosCache.dims[1]) {\n throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${\n cosCache.dims[1]}`);\n }\n\n if (sequenceLength > maxSequenceLength) {\n throw new Error('Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported');\n }\n};\n\nconst createRotaryEmbeddingProgramInfo =\n (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): ProgramInfo => {\n const {interleaved, numHeads, rotaryEmbeddingDim, scale} = attributes;\n const batchSize = inputs[0].dims[0];\n const batchStride = ShapeUtil.sizeFromDimension(inputs[0].dims, 1);\n const sequenceLength = inputs[0].dims[inputs[0].dims.length - 2];\n const hiddenSize = batchStride / sequenceLength;\n const halfRotaryEmbeddingDim = inputs[2].dims[1];\n const headSize = rotaryEmbeddingDim === 0 ? halfRotaryEmbeddingDim * 2 : hiddenSize / numHeads;\n\n // Rotary embeddings will be calculated in a pair-wise fashion. In accordance, use the shape\n // [batch size, sequence length, num of heads, num of pairs to rotate + num of dims to copy]\n // to unfold the global index in shader.\n const globalShape =\n new Array(batchSize, sequenceLength, hiddenSize / headSize, headSize - halfRotaryEmbeddingDim);\n const globalStrides = ShapeUtil.computeStrides(globalShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.float, data: scale},\n {type: DataType.uint32, data: globalShape},\n {type: DataType.uint32, data: globalStrides},\n\n // strides for addressing the input/output tensor, in permutated order to align with the unfolded global index,\n // i.e. BSNH\n ...(inputs[0].dims.length === 3 ?\n new Array({type: DataType.uint32, data: [batchStride, hiddenSize, headSize, 1]}) :\n []),\n ...(inputs[0].dims.length === 4 ?\n new Array(\n {type: DataType.uint32, data: [batchStride, headSize, sequenceLength * headSize, 1]}) :\n []),\n\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, inputs[2].dims, inputs[3].dims, inputs[0].dims),\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const positionIds = inputVariable('position_ids', inputs[1].dataType, inputs[1].dims.length);\n const cosCache = inputVariable('cos_cache', inputs[2].dataType, inputs[2].dims.length);\n const sinCache = inputVariable('sin_cache', inputs[3].dataType, inputs[3].dims.length);\n const output = outputVariable('output', inputs[0].dataType, inputs[0].dims.length);\n\n shaderHelper.registerUniforms([\n {name: 'scale', type: 'f32'},\n {name: 'global_shape', type: 'u32', length: globalShape.length},\n {name: 'global_strides', type: 'u32', length: globalStrides.length},\n {name: 'input_output_strides', type: 'u32', length: globalStrides.length},\n ]);\n\n return `\n ${shaderHelper.declareVariables(input, positionIds, cosCache, sinCache, output)}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n let half_rotary_emb_dim = uniforms.${cosCache.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('size')}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${positionIds.broadcastedIndicesToOffset('bsnh.xy', outputVariable('', positionIds.type.tensor, 2))};\n let position_id =\n u32(${positionIds.getByOffset('position_ids_idx')}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${interleaved});\n let j = i + select(half_rotary_emb_dim, 1, ${interleaved});\n let re = ${input.getByOffset('i')} * ${cosCache.get('position_id', 'bsnh[3]')} -\n ${input.getByOffset('j')} * ${sinCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('i', 're')}\n let im = ${input.getByOffset('i')} * ${sinCache.get('position_id', 'bsnh[3]')} +\n ${input.getByOffset('j')} * ${cosCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('j', 'im')}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${output.setByOffset('k', input.getByOffset('k'))}\n }\n }`;\n };\n\n return {\n name: 'RotaryEmbedding',\n shaderCache: {\n hint: createAttributeWithCacheKey({\n interleaved,\n }).cacheKey,\n inputDependencies: ['rank', 'rank', 'rank', 'rank'],\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(globalShape) / WORKGROUP_SIZE)},\n programUniforms,\n }),\n };\n };\n\nexport const rotaryEmbedding = (context: ComputeContext, attributes: RotaryEmbeddingAttributes): void => {\n validateInputs(context.inputs, attributes);\n context.compute(createRotaryEmbeddingProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {castToF32, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface SkipLayerNormAttributes {\n simplified: boolean;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 3) {\n throw new Error('layerNorm requires at least 3 inputs.');\n }\n\n const input: TensorView = inputs[0];\n const skip: TensorView = inputs[1];\n const gamma: TensorView = inputs[2];\n\n if (input.dataType !== skip.dataType || input.dataType !== gamma.dataType) {\n throw new Error('All inputs must have the same data type');\n }\n\n if (input.dims.length !== 3 && input.dims.length !== 2) {\n throw new Error('Input must be 2D or 3D');\n }\n\n if (skip.dims.length !== 3 && skip.dims.length !== 2) {\n throw new Error('Skip must be 2D or 3D');\n }\n\n const hiddenSize = input.dims[input.dims.length - 1];\n const sequenceLength = input.dims[input.dims.length - 2];\n if (skip.dims[skip.dims.length - 1] !== hiddenSize) {\n throw new Error('Skip must have the same hidden size as input');\n }\n if (skip.dims[skip.dims.length - 2] !== sequenceLength) {\n throw new Error('Skip must have the same sequence length as input');\n }\n\n if (gamma.dims.length !== 1) {\n throw new Error('Gamma must be 1D');\n }\n if (gamma.dims[gamma.dims.length - 1] !== hiddenSize) {\n throw new Error('Gamma must have the same hidden size as input');\n }\n if (inputs.length > 3) {\n const beta: TensorView = inputs[3];\n if (beta.dims.length !== 1) {\n throw new Error('Beta must be 1D');\n }\n if (beta.dims[beta.dims.length - 1] !== hiddenSize) {\n throw new Error('Beta must have the same hidden size as input');\n }\n }\n if (inputs.length > 4) {\n const bias: TensorView = inputs[4];\n if (bias.dims.length !== 1) {\n throw new Error('Bias must be 1D');\n }\n if (bias.dims[bias.dims.length - 1] !== hiddenSize) {\n throw new Error('Bias must have the same hidden size as input');\n }\n }\n};\n\nconst createSkipLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: SkipLayerNormAttributes, outputCount: number, isTraining: boolean):\n ProgramInfo => {\n const simplified = attributes.simplified;\n\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const outputShape = inputShape;\n const outputSize = inputSize;\n const hiddenSize = inputShape.slice(-1)[0];\n const meanInvStdDevDim = isTraining ? inputShape.slice(0, -1).concat(1) : [];\n const hasBetaInput = !simplified && inputs.length > 3;\n const hasBiasInput = inputs.length > 4;\n const hasMeanOutput = isTraining && outputCount > 1;\n const hasInvStdDevOutput = isTraining && outputCount > 2;\n const hasInputSkipBiasSumOutput = outputCount > 3;\n const workgroupSize = 64;\n\n const components = getMaxComponents(hiddenSize);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.uint32, data: components},\n {type: DataType.uint32, data: hiddenSize},\n {type: DataType.float, data: attributes.epsilon},\n ];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniformsArray: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'components', type: 'u32'},\n {name: 'hidden_size', type: 'u32'},\n {name: 'epsilon', type: 'f32'},\n ];\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('skip', inputs[1].dataType, inputs[1].dims, components),\n inputVariable('gamma', inputs[2].dataType, inputs[2].dims, components),\n ];\n if (hasBetaInput) {\n variables.push(inputVariable('beta', inputs[3].dataType, inputs[3].dims, components));\n }\n if (hasBiasInput) {\n variables.push(inputVariable('bias', inputs[4].dataType, inputs[4].dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanOutput) {\n variables.push(outputVariable('mean_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdDevOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInputSkipBiasSumOutput) {\n variables.push(outputVariable('input_skip_bias_sum', inputs[0].dataType, outputShape, components));\n }\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const vecDataType = tensorTypeToWsglStorageType(DataType.float, components);\n return `\n\n ${shaderHelper.registerUniforms(uniformsArray).declareVariables(...variables)}\n var sum_shared : array<${vecDataType}, ${workgroupSize}>;\n var sum_squared_shared : array<${vecDataType}, ${workgroupSize}>;\n\n ${shaderHelper.mainStart([\n workgroupSize, 1, 1\n ])}\n let ix = local_id.x;\n let iy = global_id.x / ${workgroupSize};\n\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n var stride = hidden_size_vectorized / ${workgroupSize};\n let offset = ix * stride + iy * hidden_size_vectorized;\n let offset1d = stride * ix;\n if (ix == ${workgroupSize - 1}) {\n stride = hidden_size_vectorized - stride * ix;\n }\n for (var i: u32 = 0; i < stride; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${hasBiasInput ? 'bias[offset1d + i]' : dataType + '(0.0)'};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${hasInputSkipBiasSumOutput ? 'input_skip_bias_sum[offset + i] = value;' : ''}\n output[offset + i] = value;\n let f32_value = ${castToF32(dataType, components, 'value')};\n sum_shared[ix] += f32_value;\n sum_squared_shared[ix] += f32_value * f32_value;\n }\n workgroupBarrier();\n\n var reduce_size : u32 = ${workgroupSize};\n for (var curr_size = reduce_size >> 1; curr_size > 0; curr_size = reduce_size >> 1) {\n reduce_size = curr_size + (reduce_size & 1);\n if (ix < curr_size) {\n sum_shared[ix] += sum_shared[ix + reduce_size];\n sum_squared_shared[ix] += sum_squared_shared[ix + reduce_size];\n }\n workgroupBarrier();\n }\n\n let sum = sum_shared[0];\n let square_sum = sum_squared_shared[0];\n let mean = ${sumVector('sum', components)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${sumVector('square_sum', components)} / f32(uniforms.hidden_size) ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n ${hasMeanOutput ? 'mean_output[global_idx] = mean;' : ''}\n ${hasInvStdDevOutput ? 'inv_std_output[global_idx] = inv_std_dev;' : ''}\n\n for (var i: u32 = 0; i < stride; i++) {\n output[offset + i] = (output[offset + i] ${simplified ? '' : `- ${dataType}(mean)`}) *\n ${dataType}(inv_std_dev) * gamma[offset1d + i]\n ${hasBetaInput ? '+ beta[offset1d + i]' : ''};\n }\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (outputCount > 1) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 2) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 3) {\n outputs.push({dims: inputShape, dataType: inputs[0].dataType});\n }\n return {\n name: 'SkipLayerNormalization',\n shaderCache: {\n hint: `${components};${hasMeanOutput};${hasInvStdDevOutput};${hasInputSkipBiasSumOutput}`,\n inputDependencies: inputs.map((_input, _index) => 'type')\n },\n getShaderSource,\n getRunData: () => ({\n outputs,\n dispatchGroup: {\n x: Math.ceil(outputSize / hiddenSize),\n },\n programUniforms\n }),\n };\n };\n\nexport const skipLayerNorm = (context: ComputeContext, attributes: SkipLayerNormAttributes): void => {\n // TODO: initialize isTraining from ComputeContext\n const isTraining = false;\n validateInputs(context.inputs);\n // Mean and InvStdDev are only used in training mode and are not required for inference.\n // They are added here for completeness only.\n const outputs = [0];\n if (context.outputCount > 1) {\n outputs.push(isTraining ? 1 : -3);\n }\n if (context.outputCount > 2) {\n outputs.push(isTraining ? 2 : -3);\n }\n if (context.outputCount > 3) {\n outputs.push(3);\n }\n context.compute(\n createSkipLayerNormProgramInfo(context.inputs, attributes, context.outputCount, isTraining), {outputs});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly starts: number[];\n readonly ends: number[];\n readonly axes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: SliceAttributes): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n if (attributes.axes.length !== 0) {\n if (attributes.axes.length !== attributes.starts.length || attributes.axes.length !== attributes.ends.length) {\n throw new Error('axes, starts and ends must have the same length');\n }\n } else if (attributes.starts.length !== attributes.ends.length) {\n throw new Error('starts and ends must have the same length');\n }\n inputs.slice(1).forEach((_, idx) => {\n if (inputs[idx + 1].dataType !== DataType.int32 && inputs[idx + 1].dataType !== DataType.int64) {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n });\n};\n\nconst readInput = (inputs: readonly TensorView[], idx: number): number[] => {\n const input: number[] = [];\n if (inputs.length > idx) {\n if (inputs[idx].dataType === DataType.int64) {\n inputs[idx].getBigInt64Array().forEach(v => input.push(Number(v)));\n } else if (inputs[idx].dataType === DataType.int32) {\n inputs[idx].getInt32Array().forEach(v => input.push(Number(v)));\n } else {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n }\n return input;\n};\n\nconst createSliceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SliceAttributes): SliceAttributes => {\n if (inputs.length > 1) {\n const starts: number[] = readInput(inputs, 1);\n const ends: number[] = readInput(inputs, 2);\n let axes: number[] = readInput(inputs, 3);\n if (axes.length === 0) {\n axes = [...Array(inputs[0].dims.length).keys()];\n }\n return createAttributeWithCacheKey({starts, ends, axes});\n } else {\n return attributes;\n }\n };\n\nconst fixStartEndValues =\n (value: number, index: number, inputShape: readonly number[], axes: readonly number[], steps: readonly number[]):\n number => {\n let newValue = value;\n if (value < 0) {\n newValue += inputShape[axes[index]];\n }\n if (steps[index] < 0) {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]] - 1));\n } else {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]]));\n }\n };\n\nconst calculateInputIndicesImpl =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[]): string =>\n `fn calculateInputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n var carry = 0u;\n for (var i = ${inputShape.length}; i >= 0; i--) {\n let input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n let steps_i = ${getElementAt('uniforms.steps', 'i', inputShape.length)};\n let signs_i = ${getElementAt('uniforms.signs', 'i', inputShape.length)};\n let starts_i = ${getElementAt('uniforms.starts', 'i', inputShape.length)};\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${input.indicesSet('input_indices', 'i', 'input_index')};\n }\n return input_indices;\n }`;\n\nconst createSliceProgramInfo = (inputs: readonly TensorView[], attributes: SliceAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const axes = (attributes.axes.length > 0) ? ShapeUtil.normalizeAxes(attributes.axes, inputShape.length) :\n [...Array(inputShape.length).keys()];\n let steps = readInput(inputs, 4);\n steps.forEach((step) => step !== 0 || (() => {\n throw new Error('step cannot be 0');\n }));\n if (steps.length === 0) {\n steps = Array(axes.length).fill(1);\n }\n const starts = attributes.starts.map((start, i) => fixStartEndValues(start, i, inputShape, axes, steps));\n\n const ends = attributes.ends.map((end, i) => fixStartEndValues(end, i, inputShape, axes, steps));\n\n if (axes.length !== starts.length || axes.length !== ends.length) {\n throw new Error('start, ends and axes should have the same number of elements');\n }\n\n if (axes.length !== inputShape.length) {\n for (let i = 0; i < inputShape.length; ++i) {\n if (!axes.includes(i)) {\n starts.splice(i, 0, 0);\n ends.splice(i, 0, inputShape[i]);\n steps.splice(i, 0, 1);\n }\n }\n }\n const signs = steps.map(step => Math.sign(step));\n // Convert negative steps to positive steps and reverse starts and ends\n steps.forEach((step, i, array) => {\n if (step < 0) {\n const numSteps = (ends[i] - starts[i]) / step;\n const newEnd = starts[i];\n const newStart = newEnd + numSteps * steps[i];\n starts[i] = newStart;\n ends[i] = newEnd;\n array[i] = -step;\n }\n });\n // Output rank is expected to be less than or equal to the input rank.\n const outputShape = inputShape.slice(0);\n axes.forEach((axis, _) => {\n outputShape[axis] = Math.ceil((ends[axis] - starts[axis]) / steps[axis]);\n });\n const outputTensorInfo: TensorInfo = {dims: outputShape, dataType: inputs[0].dataType};\n\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const outputSize = ShapeUtil.size(outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'starts', type: 'u32', length: starts.length},\n {name: 'signs', type: 'i32', length: signs.length}, {name: 'steps', type: 'u32', length: steps.length}\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: starts},\n {type: DataType.int32, data: signs}, {type: DataType.uint32, data: steps},\n ...createTensorShapeVariables(inputs[0].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${calculateInputIndicesImpl(input, output, inputShape)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n let input_indices = calculateInputIndices(output_indices);\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n return {\n name: 'Slice',\n shaderCache: {hint: `${signs.length}_${starts.length}_${steps.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [outputTensorInfo],\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const slice = (context: ComputeContext, attributes: SliceAttributes): void => {\n validateInputs(context.inputs, attributes);\n const updatedAttributes = createSliceAttributesFromInputs(context.inputs, attributes);\n context.compute(createSliceProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n // if (ShapeUtil.size(program.outputs[0].dims) > 0) {\n // context.compute(programInfoLoader, {inputs: [0]});\n // } else {\n // // TODO: support empty output\n // throw new Error('slice: output size is 0');\n // }\n};\n\nexport const parseSliceAttributes = (attributes: Record): SliceAttributes => {\n const starts = attributes.starts as number[];\n const ends = attributes.ends as number[];\n const axes = attributes.axes as number[];\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax op requires 1 input.');\n }\n};\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst createSoftmaxProgramInfo = (input: TensorView, attributes: SoftmaxAttributes): ProgramInfo => {\n const shape = input.dims;\n const outputSize = ShapeUtil.size(shape);\n const WG = 64;\n let axis = attributes.axis;\n if (axis < 0) {\n axis = shape.length + axis;\n }\n if (axis < shape.length - 1) {\n throw new Error('softmax only supports last axis for now.');\n }\n\n const cols = shape[axis];\n const rows = outputSize / cols;\n const components = getMaxComponents(cols);\n const packedCols = cols / components;\n\n const maxVector = (name: string, components: number) => {\n if (components === 4) {\n return `max(max(${name}.x, ${name}.y), max(${name}.z, ${name}.w))`;\n } else if (components === 2) {\n return `max(${name}.x, ${name}.y)`;\n } else if (components === 3) {\n return `max(max(${name}.x, ${name}.y), ${name}.z)`;\n }\n\n return name;\n };\n const x = inputVariable('x', input.dataType, input.dims, components);\n const output = outputVariable('result', input.dataType, input.dims, components);\n const valueType = x.type.value;\n // 6.2.4 in wgsl spec\n const threadMaxDecl = tensorTypeToWsglStorageType(input.dataType) === 'f32' ?\n `var threadMax = ${valueType}(-3.402823e+38f);` :\n `var threadMax = ${valueType}(-65504.0h);`;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n var rowMaxShared : ${valueType};\n var rowSumShared : ${valueType};\n var threadShared : array<${valueType}, ${WG}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${valueType} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${valueType}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${shaderHelper.registerUniform('packedCols', 'i32').declareVariables(x, output)}\n ${shaderHelper.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${WG};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${threadMaxDecl}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${valueType}(${maxVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${valueType}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${valueType}(${sumVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;\n return {\n name: 'Softmax',\n shaderCache: {hint: `${components}`, inputDependencies: ['type']},\n getRunData: () => ({\n outputs: [{dims: shape, dataType: input.dataType}],\n dispatchGroup: {x: rows},\n programUniforms: [{type: DataType.int32, data: packedCols}]\n }),\n getShaderSource,\n };\n};\n\nexport const softmax = (context: ComputeContext, attributes: SoftmaxAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createSoftmaxProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseSoftmaxAttributes = (attributes: Record): SoftmaxAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly numOutputs: number;\n readonly splitSizes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n};\n\nconst createSplitAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SplitAttributes): SplitAttributes => {\n const splitSizes: number[] = [];\n let numOutputs: number = attributes.numOutputs;\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => splitSizes.push(Number(v)));\n numOutputs = splitSizes.length;\n }\n return createAttributeWithCacheKey({numOutputs, axis: attributes.axis, splitSizes});\n };\n\nconst calculateOutputIndexImpl = (numberOfTensors: number): string => `\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${numberOfTensors}u; i += 1u ) {\n if (index < ${getElementAt('uniforms.size_in_split_axis', 'i', numberOfTensors)}) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n}`;\nconst writeBufferDataImpl = (outputs: readonly IndicesHelper[]) => {\n const numberOfTensors = outputs.length;\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = outputs[i].setByIndices('indices', 'input[global_idx]');\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (output_number == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (output_number == ${i}) { ${returnSnippet} }`);\n }\n }\n return `\n fn writeBufferData(output_number: u32, indices: ${outputs[0].type.indices}, global_idx: u32) {\n ${codeLines.join('\\n')}\n }`;\n};\n\nconst createSplitProgramInfo = (inputs: readonly TensorView[], attributes: SplitAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const dataType = inputs[0].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const outputs = new Array(attributes.numOutputs);\n const input = inputVariable('input', dataType, inputShape.length);\n const sizeInSplitAxis = new Array(attributes.numOutputs);\n const outputsTensorInfo: TensorInfo[] = [];\n const outputShapes: number[][] = [];\n let previousSum = 0;\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: inputSize}];\n for (let i = 0; i < attributes.numOutputs; i++) {\n previousSum += attributes.splitSizes[i];\n sizeInSplitAxis[i] = previousSum;\n const outputShape = inputShape.slice();\n outputShape[attributes.axis] = attributes.splitSizes[i];\n outputShapes.push(outputShape);\n outputs[i] = outputVariable(`output${i}`, dataType, outputShape.length);\n outputsTensorInfo.push({dims: outputShapes[i], dataType: inputs[0].dataType});\n }\n programUniforms.push(\n {type: DataType.uint32, data: sizeInSplitAxis}, ...createTensorShapeVariables(inputShape, ...outputShapes));\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('input_size', 'u32')\n .registerUniform('size_in_split_axis', 'u32', sizeInSplitAxis.length)\n .declareVariables(input, ...outputs)}\n ${calculateOutputIndexImpl(sizeInSplitAxis.length)}\n ${writeBufferDataImpl(outputs)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.input_size')}\n\n var indices = ${input.offsetToIndices('global_idx')};\n var index = ${input.indicesGet('indices', axis)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${getElementAt('uniforms.size_in_split_axis', 'output_number - 1u', sizeInSplitAxis.length)};\n ${input.indicesSet('indices', axis, 'index')};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;\n return {\n name: 'Split',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: outputsTensorInfo,\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const split = (context: ComputeContext, attributes: SplitAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes =\n context.inputs.length === 1 ? attributes : createSplitAttributesFromInputs(context.inputs, attributes);\n context.compute(createSplitProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n\nexport const parseSplitAttributes = (attributes: Record): SplitAttributes => {\n const axis = attributes.axis as number;\n const splitSizes: number[] = attributes.splitSizes as number[];\n const numOutputs = attributes.numOutputs as number < 0 ? splitSizes.length : attributes.numOutputs as number;\n if (numOutputs !== splitSizes.length) {\n throw new Error('numOutputs and splitSizes lengh must be equal');\n }\n return createAttributeWithCacheKey({axis, numOutputs, splitSizes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst createWhereOpProgramShader =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], dimsOutput: readonly number[], isBroadcast: boolean,\n typeOutput: number) => {\n const output = outputVariable('output_data', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('a_data', inputs[1].dataType, inputs[1].dims.length, 4);\n const b = inputVariable('b_data', inputs[2].dataType, inputs[2].dims.length, 4);\n const c = inputVariable('c_data', inputs[0].dataType, inputs[0].dims.length, 4);\n\n let assignment: string;\n const expression = (a: string, b: string, c: string) => `select(${b}, ${a}, ${c})`;\n if (!isBroadcast) {\n assignment = output.setByOffset(\n 'global_idx',\n expression(a.getByOffset('global_idx'), b.getByOffset('global_idx'), c.getByOffset('global_idx')));\n } else {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `a_data[index_a${x}][component_a${x}]`;\n const expressionB = `b_data[index_b${x}][component_b${x}]`;\n // eslint-disable-next-line no-bitwise\n const expressionC = `bool(c_data[index_c${x}] & (0xffu << (component_c${x} * 8)))`;\n return `\n let output_indices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offset_a${x} = ${a.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_b${x} = ${b.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_c${x} = ${c.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let index_a${x} = offset_a${x} / 4u;\n let index_b${x} = offset_b${x} / 4u;\n let index_c${x} = offset_c${x} / 4u;\n let component_a${x} = offset_a${x} % 4u;\n let component_b${x} = offset_b${x} % 4u;\n let component_c${x} = offset_c${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expression(expressionA, expressionB, expressionC)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('output_data[global_idx]', 0)}\n ${singleAssignment('output_data[global_idx]', 1)}\n ${singleAssignment('output_data[global_idx]', 2)}\n ${singleAssignment('output_data[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(c, a, b, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createWhereOpProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const dimsA = inputs[1].dims;\n const dimsB = inputs[2].dims;\n const dimsC = inputs[0].dims;\n const outputDataType = inputs[1].dataType;\n\n const isBroadcast = !(ShapeUtil.areEqual(dimsA, dimsB) && ShapeUtil.areEqual(dimsB, dimsC));\n let outputShape = dimsA;\n let outputSize = ShapeUtil.size(dimsA);\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(BroadcastUtil.calcShape(dimsA, dimsB, false)!, dimsC, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform where op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n }\n\n const vecSize = Math.ceil(outputSize / 4);\n\n return {\n name: 'Where',\n shaderCache: {inputDependencies: ['rank', 'rank', 'rank']},\n getShaderSource: (shaderHelper) =>\n createWhereOpProgramShader(shaderHelper, inputs, outputShape, isBroadcast, outputDataType),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms:\n [{type: DataType.uint32, data: vecSize}, ...createTensorShapeVariables(dimsC, dimsA, dimsB, outputShape)],\n }),\n };\n};\n\nexport const where = (context: ComputeContext): void => {\n context.compute(createWhereOpProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {argMax, argMin, parseArgMinMaxAttributes} from './ops/argminmax';\nimport {attention} from './ops/attention';\nimport {batchNorm} from './ops/batch-norm';\nimport {biasAdd} from './ops/bias-add';\nimport {biasSplitGelu} from './ops/bias-split-gelu';\nimport * as binaryOps from './ops/binary-op';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {cumsum, parseCumSumAttributes} from './ops/cumsum';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {einsum, parseEinsumAttributes} from './ops/einsum';\nimport {expand} from './ops/expand';\nimport {fastGelu} from './ops/fast-gelu';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gatherElements, parseGatherElementsAttributes} from './ops/gather-elements';\nimport {gemm, parseGemmAttributes} from './ops/gemm';\nimport {groupQueryAttention, parseGroupQueryAttentionAttributes} from './ops/group-query-attention';\nimport {instanceNorm} from './ops/instance-norm';\nimport {layerNorm} from './ops/layer-norm';\nimport {matMul} from './ops/matmul';\nimport {matMulNBits, parseMatMulNBitsAttributes} from './ops/matmulnbits';\nimport {multiHeadAttention, parseMultiHeadAttentionAttributes} from './ops/multihead-attention';\nimport {pad} from './ops/pad';\nimport * as pool from './ops/pool';\nimport {range} from './ops/range';\nimport {reduceL1, reduceL2, reduceLogSum, reduceLogSumExp, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum, reduceSumSquare} from './ops/reduce';\nimport {parseResizeAttributes, resize} from './ops/resize';\nimport {rotaryEmbedding} from './ops/rotary-embedding';\nimport {skipLayerNorm} from './ops/skip-layer-norm';\nimport {parseSliceAttributes, slice} from './ops/slice';\nimport {parseSoftmaxAttributes, softmax} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {where} from './ops/where';\nimport {ComputeContext} from './types';\n\nexport type RunFunction = (context: ComputeContext, attribute?: unknown) => void;\nexport type ParseAttributeFunction = (attributeRaw: unknown) => unknown;\nexport type OperatorImplementation = [RunFunction]|[RunFunction, ParseAttributeFunction];\n\nexport const WEBGPU_OP_RESOLVE_RULES: Map = new Map([\n ['Abs', [unaryOps.abs]],\n ['Acos', [unaryOps.acos]],\n ['Acosh', [unaryOps.acosh]],\n ['Add', [binaryOps.add]],\n ['ArgMax', [argMax, parseArgMinMaxAttributes]],\n ['ArgMin', [argMin, parseArgMinMaxAttributes]],\n ['Asin', [unaryOps.asin]],\n ['Asinh', [unaryOps.asinh]],\n ['Atan', [unaryOps.atan]],\n ['Atanh', [unaryOps.atanh]],\n ['Attention', [attention]],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', [pool.averagePool, pool.parseAveragePoolAttributes]],\n ['BatchNormalization', [batchNorm]],\n ['BiasAdd', [biasAdd]],\n ['BiasSplitGelu', [biasSplitGelu]],\n ['Cast', [unaryOps.cast, unaryOps.parseCastAttributes]],\n ['Ceil', [unaryOps.ceil]],\n ['Clip', [unaryOps.clip]],\n ['Concat', [concat, parseConcatAttributes]],\n ['Conv', [conv, parseConvAttributes]],\n ['ConvTranspose', [convTranspose, parseConvTransposeAttributes]],\n ['Cos', [unaryOps.cos]],\n ['Cosh', [unaryOps.cosh]],\n ['CumSum', [cumsum, parseCumSumAttributes]],\n ['DepthToSpace', [depthToSpace, parseDepthToSpaceAttributes]],\n ['Div', [binaryOps.div]],\n ['Einsum', [einsum, parseEinsumAttributes]],\n ['Elu', [unaryOps.elu, unaryOps.parseAlphaAttributes]],\n ['Equal', [binaryOps.equal]],\n ['Erf', [unaryOps.erf]],\n ['Exp', [unaryOps.exp]],\n ['Expand', [expand]],\n ['FastGelu', [fastGelu]],\n ['Floor', [unaryOps.floor]],\n ['FusedConv', [conv, parseConvAttributes]],\n ['Gather', [gather, parseGatherAttributes]],\n ['GatherElements', [gatherElements, parseGatherElementsAttributes]],\n ['Gelu', [unaryOps.gelu]],\n ['Gemm', [gemm, parseGemmAttributes]],\n ['GlobalAveragePool', [pool.globalAveragePool, pool.parseGlobalAveragePoolAttributes]],\n ['GlobalMaxPool', [pool.globalMaxPool, pool.parseGlobalMaxPoolAttributes]],\n ['Greater', [binaryOps.greater]],\n ['GreaterOrEqual', [binaryOps.greaterOrEqual]],\n ['GroupQueryAttention', [groupQueryAttention, parseGroupQueryAttentionAttributes]],\n ['HardSigmoid', [unaryOps.hardSigmoid, unaryOps.parseHardSigmoidAttributes]],\n ['InstanceNormalization', [instanceNorm]],\n ['LayerNormalization', [layerNorm]],\n ['LeakyRelu', [unaryOps.leakyRelu, unaryOps.parseAlphaAttributes]],\n ['Less', [binaryOps.less]],\n ['LessOrEqual', [binaryOps.lessOrEqual]],\n ['Log', [unaryOps.log]],\n ['MatMul', [matMul]],\n ['MatMulNBits', [matMulNBits, parseMatMulNBitsAttributes]],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', [pool.maxPool, pool.parseMaxPoolAttributes]],\n ['Mul', [binaryOps.mul]],\n ['MultiHeadAttention', [multiHeadAttention, parseMultiHeadAttentionAttributes]],\n ['Neg', [unaryOps.neg]],\n ['Not', [unaryOps.not]],\n ['Pad', [pad]],\n ['Pow', [binaryOps.pow]],\n ['QuickGelu', [unaryOps.quickgelu, unaryOps.parseAlphaAttributes]],\n ['Range', [range]],\n ['Reciprocal', [unaryOps.reciprocal]],\n ['ReduceMin', [reduceMin]],\n ['ReduceMean', [reduceMean]],\n ['ReduceMax', [reduceMax]],\n ['ReduceSum', [reduceSum]],\n ['ReduceProd', [reduceProd]],\n ['ReduceL1', [reduceL1]],\n ['ReduceL2', [reduceL2]],\n ['ReduceLogSum', [reduceLogSum]],\n ['ReduceLogSumExp', [reduceLogSumExp]],\n ['ReduceSumSquare', [reduceSumSquare]],\n ['Relu', [unaryOps.relu]],\n ['Resize', [resize, parseResizeAttributes]],\n ['RotaryEmbedding', [rotaryEmbedding]],\n ['Sigmoid', [unaryOps.sigmoid]],\n ['Sin', [unaryOps.sin]],\n ['Sinh', [unaryOps.sinh]],\n ['Slice', [slice, parseSliceAttributes]],\n ['SkipLayerNormalization', [skipLayerNorm]],\n ['Split', [split, parseSplitAttributes]],\n ['Sqrt', [unaryOps.sqrt]],\n ['Softmax', [softmax, parseSoftmaxAttributes]],\n ['Sub', [binaryOps.sub]],\n ['Tan', [unaryOps.tan]],\n ['Tanh', [unaryOps.tanh]],\n ['ThresholdedRelu', [unaryOps.thresholdedRelu, unaryOps.parseAlphaAttributes]],\n ['Tile', [tile]],\n ['Transpose', [transpose, parseTransposeAttributes]],\n ['Where', [where]],\n]);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {createShaderHelper} from './ops/common';\nimport {Artifact, GpuData, ProgramInfo} from './types';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n attributesBound: boolean;\n\n constructor(private backend: WebGpuBackend) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: GpuData[], outputs: GpuData[], dispatchGroup: [number, number, number],\n uniformBufferBinding: GPUBindingResource|undefined): void {\n TRACE_FUNC_BEGIN(buildArtifact.programInfo.name);\n const device = this.backend.device;\n const computePassEncoder = this.backend.getComputePassEncoder();\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2);\n const entries = [];\n for (const input of inputs) {\n entries.push({binding: entries.length, resource: {buffer: input.buffer}});\n }\n for (const output of outputs) {\n entries.push({binding: entries.length, resource: {buffer: output.buffer}});\n }\n if (uniformBufferBinding) {\n entries.push({binding: entries.length, resource: uniformBufferBinding});\n }\n const bindGroup = device.createBindGroup(\n {layout: buildArtifact.computePipeline.getBindGroupLayout(0), entries, label: buildArtifact.programInfo.name});\n\n if (this.backend.sessionStatus === 'capturing') {\n const commandInfo = {\n kernelId: this.backend.currentKernelId!,\n computePipeline: buildArtifact.computePipeline,\n bindGroup,\n dispatchGroup\n };\n const sessionCommandList = this.backend.capturedCommandList.get(this.backend.currentSessionId!);\n sessionCommandList!.push(commandInfo);\n }\n\n computePassEncoder.setPipeline(buildArtifact.computePipeline);\n computePassEncoder.setBindGroup(0, bindGroup);\n computePassEncoder.dispatchWorkgroups(...dispatchGroup);\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2 + 1);\n this.backend.pendingDispatchNumber++;\n\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber ||\n this.backend.queryType === 'at-passes') {\n this.backend.endComputePass();\n }\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber) {\n this.backend.flush();\n }\n TRACE_FUNC_END(buildArtifact.programInfo.name);\n }\n dispose(): void {\n // this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, normalizedDispatchGroupSize: [number, number, number]): Artifact {\n TRACE_FUNC_BEGIN(programInfo.name);\n const device = this.backend.device;\n const extensions: string[] = [];\n if (device.features.has('shader-f16')) {\n extensions.push('enable f16;');\n }\n const shaderHelper = createShaderHelper(normalizedDispatchGroupSize, this.backend.device.limits);\n const userCode = programInfo.getShaderSource(shaderHelper);\n const code = `${extensions.join('\\n')}\\n${shaderHelper.additionalImplementations}\\n${userCode}`;\n const shaderModule = device.createShaderModule({code, label: programInfo.name});\n LOG_DEBUG('verbose', () => `[WebGPU] ${programInfo.name} shader code: ${code}`);\n\n const computePipeline = device.createComputePipeline(\n {compute: {module: shaderModule, entryPoint: 'main'}, layout: 'auto', label: programInfo.name});\n\n TRACE_FUNC_END(programInfo.name);\n return {programInfo, computePipeline, uniformVariablesInfo: shaderHelper.variablesInfo};\n }\n\n normalizeDispatchGroupSize(dispatchGroup: ReturnType['dispatchGroup']):\n [number, number, number] {\n const x = typeof dispatchGroup === 'number' ? dispatchGroup : dispatchGroup.x;\n const y = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.y || 1);\n const z = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.z || 1);\n const limitPerDimension = this.backend.device.limits.maxComputeWorkgroupsPerDimension;\n if (x <= limitPerDimension && y <= limitPerDimension && z <= limitPerDimension) {\n return [x, y, z];\n }\n const size = x * y * z;\n let dispatchAverage = Math.ceil(Math.sqrt(size));\n if (dispatchAverage > limitPerDimension) {\n dispatchAverage = Math.ceil(Math.cbrt(size));\n if (dispatchAverage > limitPerDimension) {\n throw new Error('Total dispatch size exceeds WebGPU maximum.');\n }\n return [dispatchAverage, dispatchAverage, dispatchAverage];\n } else {\n return [dispatchAverage, dispatchAverage, 1];\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, Tensor, TRACE, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {DataType, tensorDataTypeEnumToString} from '../wasm-common';\n\nimport {configureLogger, LOG_DEBUG} from './log';\nimport {createView, TensorView} from './tensor-view';\nimport {createGpuDataManager, downloadGpuData, GpuDataManager} from './webgpu/gpu-data-manager';\nimport {RunFunction, WEBGPU_OP_RESOLVE_RULES} from './webgpu/op-resolve-rules';\nimport {ProgramManager} from './webgpu/program-manager';\nimport {AdapterInfo, ComputeContext, GpuArchitecture, GpuData, GpuVendor, ProgramInfo, ProgramInputTensorInfoDependency, SessionState, TimestampQuery} from './webgpu/types';\n\ninterface CommandInfo {\n readonly kernelId: number;\n readonly computePipeline: GPUComputePipeline;\n readonly bindGroup: GPUBindGroup;\n readonly dispatchGroup: [number, number, number];\n}\n\ninterface KernelInfo {\n readonly kernelType: string;\n readonly kernelName: string;\n readonly kernelEntry: RunFunction;\n readonly attributes: [((attribute: unknown) => unknown)|undefined, unknown];\n}\n\ninterface PendingKernelInfo {\n readonly kernelId: number;\n readonly programName: string;\n readonly inputTensorViews: readonly TensorView[];\n readonly outputTensorViews: readonly TensorView[];\n}\n\nconst getProgramInputTensorInfoDependencyKey =\n (inputTensors: readonly TensorView[], inputDependencies: readonly ProgramInputTensorInfoDependency[]): string => {\n if (inputDependencies.length !== inputTensors.length) {\n throw new Error(`inputDependencies length ${inputDependencies.length} is not equal to inputTensors length ${\n inputTensors.length}.`);\n }\n\n const inputInfos: string[] = [];\n for (let i = 0; i < inputTensors.length; ++i) {\n const type = inputTensors[i].dataType;\n switch (inputDependencies[i]) {\n case 'none': {\n inputInfos.push('');\n break;\n }\n case 'type': {\n inputInfos.push(`${type}`);\n break;\n }\n case 'rank': {\n const rank = inputTensors[i].dims.length;\n inputInfos.push(`${type};${rank}`);\n break;\n }\n case 'dims': {\n const dims = inputTensors[i].dims.join(',');\n inputInfos.push(`${type};${dims}`);\n break;\n }\n default:\n throw new Error(`unsupported input dependency: ${inputDependencies[i]}`);\n }\n }\n\n return inputInfos.join('|');\n };\n\n/**\n * get a unique key representing the program from the program info, input shapes and types.\n *\n * @returns a unique key is a shorter string than the shader source, which contains all the information to identify a\n * program. if the key is the same, the program shader source should be the same, so we can reuse the program.\n *\n */\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo, inputTensors: readonly TensorView[], is1DimensionDispatch: boolean): string => {\n // final key format:\n // []:is1DimensionDispatch:||...\n let key = programInfo.name;\n if (programInfo.shaderCache?.hint) {\n key += '[' + programInfo.shaderCache.hint + ']';\n }\n key += ':' + is1DimensionDispatch +\n `:${\n getProgramInputTensorInfoDependencyKey(\n inputTensors,\n programInfo.shaderCache?.inputDependencies ??\n new Array(inputTensors.length).fill('dims'))}`;\n return key;\n };\n\nclass AdapterInfoImpl implements AdapterInfo {\n readonly architecture?: string;\n readonly vendor?: string;\n\n constructor(adapterInfo: GPUAdapterInfo) {\n if (adapterInfo) {\n this.architecture = adapterInfo.architecture;\n this.vendor = adapterInfo.vendor;\n }\n }\n\n isArchitecture(architecture: GpuArchitecture): boolean {\n return this.architecture === architecture;\n }\n\n isVendor(vendor: GpuVendor): boolean {\n return this.vendor === vendor;\n }\n}\n\n/**\n * this class is designed to store status and being used as a singleton for JSEP. It will be passed to jsepInit() as\n * the first parameter so that it is stored for future use.\n */\nexport class WebGpuBackend {\n adapterInfo: AdapterInfoImpl;\n device: GPUDevice;\n /**\n * an instance of GpuDataManager to manage a GpuDataId -> GpuBuffer mapping\n */\n gpuDataManager: GpuDataManager;\n /**\n * an instance of ProgramManager to build and run WebGPU compute shader program, and manage a ProgramKey -> Program\n * artifacts mapping\n */\n programManager: ProgramManager;\n\n /**\n * representing the session ID of which is currently being run.\n * `null` means no session is being run.\n * only valid when session.run is executed.\n */\n currentSessionId: number|null = null;\n\n /**\n * representing the kernel ID of which is currently being computed (CPU code perspective).\n * `null` means no kernel is being computed.\n * only one kernel can be computed at a moment.\n */\n currentKernelId: number|null = null;\n /**\n * a list of temporary GPU data for the current kernel. should release when the kernel done computation.\n */\n private temporaryData: GpuData[];\n /**\n * a KernelID -> a GPU data list, which stores persistent GPU data owned by the specific kernel.\n */\n private kernelPersistentData: Map;\n /**\n * a KernelID -> a custom data, which stores custom data owned by the specific kernel.\n */\n private kernelCustomData: Map;\n /**\n * get the custom data of the current kernel\n */\n get currentKernelCustomData(): {[key: string]: unknown} {\n if (this.currentKernelId === null) {\n throw new Error('currentKernelCustomData(): currentKernelId is null. (should not happen)');\n }\n\n let data = this.kernelCustomData.get(this.currentKernelId);\n if (!data) {\n data = {};\n this.kernelCustomData.set(this.currentKernelId, data);\n }\n\n return data;\n }\n\n // KernelID -> kernelInfo mapping\n kernels: Map;\n private commandEncoder: GPUCommandEncoder|null = null;\n private computePassEncoder: GPUComputePassEncoder|null = null;\n maxDispatchNumber = 16;\n pendingDispatchNumber = 0;\n\n // info of kernels pending submission for a single batch\n private pendingKernels: PendingKernelInfo[] = [];\n // queryReadBuffer -> pendingKernels mapping for all the batches\n private pendingQueries: Map = new Map();\n private queryResolveBuffer?: GPUBuffer;\n private querySet?: GPUQuerySet;\n private queryTimeBase?: bigint;\n queryType: TimestampQuery;\n\n env: Env;\n sessionStatus: SessionState = 'default';\n /**\n * a SessionID -> CommandInfo[] mapping. It's used to record all GPU commands for corresponding session.\n */\n capturedCommandList: Map = new Map();\n\n /**\n * a SessionID -> PendingKernelInfo[] mapping for profiling.\n */\n private capturedPendingKernels: Map = new Map();\n\n /**\n * a SessionID -> a Map of (InputOutputIndex -> [ID, GPUBuffer]) mapping.\n */\n sessionExternalDataMapping: Map> = new Map();\n\n async initialize(env: Env, adapter: GPUAdapter): Promise {\n this.env = env;\n const requiredFeatures: GPUFeatureName[] = [];\n const deviceDescriptor: GPUDeviceDescriptor = {\n requiredLimits: {\n maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize,\n maxComputeWorkgroupsPerDimension: adapter.limits.maxComputeWorkgroupsPerDimension,\n maxStorageBufferBindingSize: adapter.limits.maxStorageBufferBindingSize,\n maxBufferSize: adapter.limits.maxBufferSize,\n maxComputeInvocationsPerWorkgroup: adapter.limits.maxComputeInvocationsPerWorkgroup,\n maxComputeWorkgroupSizeX: adapter.limits.maxComputeWorkgroupSizeX,\n maxComputeWorkgroupSizeY: adapter.limits.maxComputeWorkgroupSizeY,\n maxComputeWorkgroupSizeZ: adapter.limits.maxComputeWorkgroupSizeZ,\n },\n requiredFeatures,\n };\n\n if (adapter.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n requiredFeatures.push('chromium-experimental-timestamp-query-inside-passes' as GPUFeatureName);\n } else if (adapter.features.has('timestamp-query')) {\n requiredFeatures.push('timestamp-query');\n }\n if (adapter.features.has('shader-f16')) {\n requiredFeatures.push('shader-f16');\n }\n\n this.device = await adapter.requestDevice(deviceDescriptor);\n this.adapterInfo = new AdapterInfoImpl(adapter.info || await adapter.requestAdapterInfo());\n this.gpuDataManager = createGpuDataManager(this);\n this.programManager = new ProgramManager(this);\n this.kernels = new Map();\n this.kernelPersistentData = new Map();\n this.kernelCustomData = new Map();\n\n // set up flags for logger\n configureLogger(env.logLevel!, !!env.debug);\n\n // TODO: set up flags\n\n this.device.onuncapturederror = ev => {\n if (ev.error instanceof GPUValidationError) {\n // eslint-disable-next-line no-console\n console.error(`An uncaught WebGPU validation error was raised: ${ev.error.message}`);\n }\n };\n\n Object.defineProperty(\n this.env.webgpu, 'device', {value: this.device, writable: false, enumerable: true, configurable: false});\n Object.defineProperty(\n this.env.webgpu, 'adapter', {value: adapter, writable: false, enumerable: true, configurable: false});\n\n // init queryType, which is necessary for InferenceSession.create\n this.setQueryType();\n }\n\n dispose(): void {\n if (typeof this.querySet !== 'undefined') {\n this.querySet.destroy();\n }\n this.gpuDataManager.dispose();\n }\n\n getCommandEncoder(): GPUCommandEncoder {\n if (!this.commandEncoder) {\n this.commandEncoder = this.device.createCommandEncoder();\n }\n return this.commandEncoder;\n }\n\n getComputePassEncoder(): GPUComputePassEncoder {\n if (!this.computePassEncoder) {\n const commandEncoder = this.getCommandEncoder();\n const computePassDescriptor: GPUComputePassDescriptor = {};\n\n if (this.queryType === 'at-passes') {\n computePassDescriptor.timestampWrites = {\n querySet: this.querySet!,\n beginningOfPassWriteIndex: this.pendingDispatchNumber * 2,\n endOfPassWriteIndex: this.pendingDispatchNumber * 2 + 1,\n };\n }\n\n this.computePassEncoder = commandEncoder.beginComputePass(computePassDescriptor);\n }\n return this.computePassEncoder;\n }\n\n endComputePass(): void {\n if (this.computePassEncoder) {\n this.computePassEncoder.end();\n this.computePassEncoder = null;\n }\n }\n\n flush(): void {\n if (!this.commandEncoder) {\n return;\n }\n\n TRACE_FUNC_BEGIN();\n\n this.endComputePass();\n let queryReadBuffer: GPUBuffer;\n if (this.queryType !== 'none') {\n this.commandEncoder.resolveQuerySet(\n this.querySet!, 0, this.pendingDispatchNumber * 2, this.queryResolveBuffer!, 0);\n\n queryReadBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.pendingDispatchNumber * 2 * 8, usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST});\n\n this.pendingQueries.set(queryReadBuffer, this.pendingKernels);\n this.pendingKernels = [];\n this.commandEncoder.copyBufferToBuffer(\n this.queryResolveBuffer!, 0, queryReadBuffer, 0, this.pendingDispatchNumber * 2 * 8);\n }\n\n this.device.queue.submit([this.commandEncoder.finish()]);\n this.gpuDataManager.refreshPendingBuffers();\n this.commandEncoder = null;\n this.pendingDispatchNumber = 0;\n\n if (this.queryType !== 'none') {\n void queryReadBuffer!.mapAsync(GPUMapMode.READ).then(() => {\n const mappedData = new BigUint64Array(queryReadBuffer.getMappedRange());\n const pendingKernels = this.pendingQueries.get(queryReadBuffer)!;\n for (let i = 0; i < mappedData.length / 2; i++) {\n const pendingKernelInfo = pendingKernels[i];\n const kernelId = pendingKernelInfo.kernelId;\n const kernelInfo = this.kernels.get(kernelId)!;\n const kernelType = kernelInfo.kernelType;\n const kernelName = kernelInfo.kernelName;\n const programName = pendingKernelInfo.programName;\n const inputTensorViews = pendingKernelInfo.inputTensorViews;\n const outputTensorViews = pendingKernelInfo.outputTensorViews;\n const startTimeU64 = mappedData[i * 2];\n const endTimeU64 = mappedData[i * 2 + 1];\n\n if (typeof this.queryTimeBase === 'undefined') {\n this.queryTimeBase = startTimeU64;\n }\n\n const startTime = Number(startTimeU64 - this.queryTimeBase);\n const endTime = Number(endTimeU64 - this.queryTimeBase);\n\n if (!Number.isSafeInteger(startTime) || !Number.isSafeInteger(endTime)) {\n throw new RangeError('incorrect timestamp range');\n }\n\n if (this.env.webgpu.profiling?.ondata) {\n this.env.webgpu.profiling.ondata({\n version: 1,\n inputsMetadata: inputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n outputsMetadata: outputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n kernelId,\n kernelType,\n kernelName,\n programName,\n startTime,\n endTime,\n });\n } else {\n // if no callback is provided, print the profiling message to console\n let inputShapes = '';\n inputTensorViews.forEach((value, i) => {\n inputShapes += `input[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n let outputShapes = '';\n outputTensorViews.forEach((value, i) => {\n outputShapes += `output[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n // eslint-disable-next-line no-console\n console.log(`[profiling] kernel \"${kernelId}|${kernelType}|${kernelName}|${programName}\" ${inputShapes}${\n outputShapes}execution time: ${endTime - startTime} ns`);\n }\n TRACE('GPU', `${programName}::${startTimeU64}::${endTimeU64}`);\n }\n queryReadBuffer.unmap();\n this.pendingQueries.delete(queryReadBuffer);\n });\n }\n TRACE_FUNC_END();\n }\n\n /**\n * run a WebGPU program.\n * @param program a ProgramInfo instance\n * @param inputTensorViews a TensorView array. each element represents a value already exists in GPU.\n * @param outputIndices an indices array. each element can be either -1 (temporary data), -2 (persistent data) or an\n * index to the kernel's output.\n * @param createKernelOutput a callback function that create a value to kernel's output with the given index\n * @param createIntermediateOutput a callback function that create a value as a intermediate value, either temporary\n * or persistent (owned by the current kernel)\n * @returns a TensorView array representing the result.\n */\n run(program: ProgramInfo, inputTensorViews: readonly TensorView[], outputIndices: readonly number[],\n createKernelOutput: (index: number, dataType: number, dims: readonly number[]) => TensorView,\n createIntermediateOutput: (dataType: number, dims: readonly number[]) => TensorView,\n outputCount: number): TensorView[] {\n TRACE_FUNC_BEGIN(program.name);\n // create info for inputs\n const inputDatas: GpuData[] = [];\n for (let i = 0; i < inputTensorViews.length; ++i) {\n const data = inputTensorViews[i].data;\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(data);\n if (!gpuData) {\n throw new Error(`no GPU data for input: ${data}`);\n }\n inputDatas.push(gpuData);\n }\n\n const {outputs, dispatchGroup, programUniforms} = program.getRunData(inputTensorViews);\n\n // check output indices\n const validatedOutputIndices = outputIndices.length === 0 ? outputs.map((_, i) => i) : outputIndices;\n if (validatedOutputIndices.length !== outputs.length) {\n throw new Error(`Output size ${validatedOutputIndices.length} must be equal to ${outputs.length}.`);\n }\n\n // create info for outputs\n const outputTensorViews: TensorView[] = [];\n const outputDatas: GpuData[] = [];\n for (let i = 0; i < outputs.length; ++i) {\n // value -1 and -2 are used for creating temporary and persistent outputs.\n // value -3 is used for placeholder output. So -3, -2, -1 and 0, 1, 2, ... are valid\n // output indices. see type definition of ComputeContextInputsOutputsMapping for more details.\n if (!Number.isInteger(validatedOutputIndices[i]) || validatedOutputIndices[i] < -3 ||\n validatedOutputIndices[i] >= outputCount) {\n throw new Error(`Invalid output index: ${validatedOutputIndices[i]}`);\n }\n if (validatedOutputIndices[i] === -3) {\n continue;\n }\n const isTemporary = validatedOutputIndices[i] === -1;\n const isPersistent = validatedOutputIndices[i] === -2;\n const tensorView = (isTemporary || isPersistent) ?\n createIntermediateOutput(outputs[i].dataType, outputs[i].dims) :\n createKernelOutput(validatedOutputIndices[i], outputs[i].dataType, outputs[i].dims);\n outputTensorViews.push(tensorView);\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (tensorView.data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(tensorView.data);\n if (!gpuData) {\n throw new Error(`no GPU data for output: ${tensorView.data}`);\n }\n if (isTemporary) {\n this.temporaryData.push(gpuData);\n }\n if (isPersistent) {\n let persistentData = this.kernelPersistentData.get(this.currentKernelId!);\n if (!persistentData) {\n persistentData = [];\n this.kernelPersistentData.set(this.currentKernelId!, persistentData);\n }\n persistentData.push(gpuData);\n }\n outputDatas.push(gpuData);\n }\n\n // when there are any zero-sized tensor in the inputs or outputs, we should report error unless all outputs are\n // zero-sized tensors.\n if (inputDatas.length !== inputTensorViews.length || outputDatas.length !== outputTensorViews.length) {\n // if all outputs are zero-sized tensors, there is no need to run the program.\n if (outputDatas.length === 0) {\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n // if some outputs are zero-sized tensors, report an error.\n //\n // TODO: so far we don't see any use case that outputs include both zero-sized tensors and non-zero-sized tensors.\n // If we see such use case, we need to make a change here to support it.\n throw new Error(\n `Program ${program.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`);\n }\n\n // load uniforms\n // TODO: add cache for uniform (is it necessary?)\n //\n let uniformBufferBinding: GPUBindingResource|undefined;\n if (programUniforms) {\n let currentOffset = 0;\n const offsets: number[] = [];\n\n programUniforms.forEach(v => {\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (data.length === 0) {\n return;\n }\n // https://www.w3.org/TR/WGSL/#alignof\n const sizeOfElement = v.type === DataType.float16 ? 2 : 4;\n let sizeOfVecOrMat;\n let baseAlignment;\n if (v.type === DataType.float16) {\n baseAlignment = data.length > 4 ? 16 : (data.length > 2 ? 8 : data.length * sizeOfElement);\n sizeOfVecOrMat = data.length > 4 ? 16 : sizeOfElement * data.length;\n } else {\n baseAlignment = data.length <= 2 ? data.length * sizeOfElement : 16;\n sizeOfVecOrMat = 16;\n }\n currentOffset = Math.ceil(currentOffset / baseAlignment) * baseAlignment;\n offsets.push(currentOffset);\n // For non-float16 type, when data.length > 4, the uniform variable is of type array,N>, where\n // N = Math.ceil(data.length / 4) and SizeOf(vec4) = 16. The total byte length is N *\n // SizeOf(vec4). For float16 type, when data.length > 4, the uniform variable is of type\n // array,N>, where N = Math.ceil(data.length / 8) and SizeOf(mat2x4) = 16. The total byte\n // length is N * SizeOf(mat2x4).\n const elementPerVecOrMat = v.type === DataType.float16 ? 8 : 4;\n currentOffset += data.length > 4 ? Math.ceil(data.length / elementPerVecOrMat) * sizeOfVecOrMat :\n data.length * sizeOfElement;\n });\n\n // Meet alignment of struct here: https://www.w3.org/TR/WGSL/#alignment-and-size. For simplicity, set\n // maxAlignmentOfField to 16 since the underlying buffer has been rounded up to 16.\n const maxAlignmentOfField = 16;\n currentOffset = Math.ceil(currentOffset / maxAlignmentOfField) * maxAlignmentOfField;\n const arrayBuffer = new ArrayBuffer(currentOffset);\n programUniforms.forEach((v, i) => {\n const offset = offsets[i];\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (v.type === DataType.int32) {\n new Int32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.uint32) {\n new Uint32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float16) {\n // TODO: use Float16Array.\n new Uint16Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float) {\n new Float32Array(arrayBuffer, offset, data.length).set(data);\n } else {\n throw new Error(`Unsupported uniform type: ${tensorDataTypeEnumToString(v.type)}`);\n }\n });\n\n const uniformBufferData =\n // eslint-disable-next-line no-bitwise\n this.gpuDataManager.create(currentOffset, GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM);\n this.device.queue.writeBuffer(uniformBufferData.buffer, 0, arrayBuffer, 0, currentOffset);\n this.gpuDataManager.release(uniformBufferData.id);\n uniformBufferBinding = {offset: 0, size: currentOffset, buffer: uniformBufferData.buffer};\n }\n\n const normalizedDispatchGroup = this.programManager.normalizeDispatchGroupSize(dispatchGroup);\n const is1DimensionDispatch = normalizedDispatchGroup[1] === 1 && normalizedDispatchGroup[2] === 1;\n // get program info\n const key = getProgramInfoUniqueKey(program, inputTensorViews, is1DimensionDispatch);\n let artifact = this.programManager.getArtifact(key);\n if (!artifact) {\n artifact = this.programManager.build(program, normalizedDispatchGroup);\n this.programManager.setArtifact(key, artifact);\n LOG_DEBUG('info', () => `[artifact] key: ${key}, programName: ${program.name}`);\n }\n\n // validate uniform variables\n if (programUniforms && artifact.uniformVariablesInfo) {\n if (programUniforms.length !== artifact.uniformVariablesInfo.length) {\n throw new Error(`Uniform variables count mismatch: expect ${artifact.uniformVariablesInfo.length}, got ${\n programUniforms.length} in program \"${artifact.programInfo.name}\".`);\n }\n for (let i = 0; i < programUniforms.length; i++) {\n const uniform = programUniforms[i];\n const actualType = uniform.type;\n const actualLength = typeof uniform.data === 'number' ? 1 : uniform.data.length;\n const [type, length] = artifact.uniformVariablesInfo[i];\n if (actualType !== type || actualLength !== length) {\n throw new Error(`Uniform variable ${i} mismatch: expect type ${type} with size ${length}, got type ${\n actualType} with size ${actualLength} in program \"${artifact.programInfo.name}\".`);\n }\n }\n }\n\n LOG_DEBUG(\n 'info',\n () => `[ProgramManager] run \"${program.name}\" (key=${key}) with ${normalizedDispatchGroup[0]}x${\n normalizedDispatchGroup[1]}x${normalizedDispatchGroup[2]}`);\n\n if (this.queryType !== 'none' || this.sessionStatus === 'capturing') {\n const pendingKernelInfo: PendingKernelInfo = {\n kernelId: this.currentKernelId!,\n programName: artifact.programInfo.name,\n inputTensorViews,\n outputTensorViews,\n };\n this.pendingKernels.push(pendingKernelInfo);\n\n if (this.sessionStatus === 'capturing') {\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n sessionPendingKernels!.push(pendingKernelInfo);\n }\n }\n\n this.programManager.run(artifact, inputDatas, outputDatas, normalizedDispatchGroup, uniformBufferBinding);\n\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n\n upload(gpuDataId: number, data: Uint8Array): void {\n this.gpuDataManager.upload(gpuDataId, data);\n }\n\n memcpy(src: number, dst: number): void {\n this.gpuDataManager.memcpy(src, dst);\n }\n\n async download(gpuDataId: number, getTargetBuffer: () => Uint8Array): Promise {\n // the underlying buffer may be changed after the async function is called. so we use a getter function to make sure\n // the buffer is up-to-date.\n await this.gpuDataManager.download(gpuDataId, getTargetBuffer);\n }\n\n alloc(size: number): number {\n return this.gpuDataManager.create(size).id;\n }\n\n free(ptr: number): number {\n return this.gpuDataManager.release(ptr);\n }\n\n createKernel(kernelType: string, kernelId: number, attribute: unknown, kernelName: string): void {\n const op = WEBGPU_OP_RESOLVE_RULES.get(kernelType);\n if (!op) {\n throw new Error(`kernel not implemented: ${kernelType}`);\n }\n\n const kernelInfo: KernelInfo = {\n kernelType,\n kernelName,\n kernelEntry: op[0],\n attributes: [op[1], attribute],\n };\n this.kernels.set(kernelId, kernelInfo);\n }\n\n releaseKernel(kernelId: number): void {\n const persistentData = this.kernelPersistentData.get(kernelId);\n if (persistentData) {\n for (const data of persistentData) {\n this.gpuDataManager.release(data.id);\n }\n this.kernelPersistentData.delete(kernelId);\n }\n\n this.kernelCustomData.delete(kernelId);\n this.kernels.delete(kernelId);\n }\n\n computeKernel(kernelId: number, context: ComputeContext, errors: Array>): number {\n const kernel = this.kernels.get(kernelId);\n if (!kernel) {\n throw new Error(`kernel not created: ${kernelId}`);\n }\n const kernelType = kernel.kernelType;\n const kernelName = kernel.kernelName;\n const kernelEntry = kernel.kernelEntry;\n const attributes = kernel.attributes;\n if (this.currentKernelId !== null) {\n throw new Error(`kernel \"[${kernelType}] ${kernelName}\" is not allowed to be called recursively`);\n }\n this.currentKernelId = kernelId;\n\n // parse attributes if necessary\n if (attributes[0]) {\n attributes[1] = attributes[0](attributes[1]);\n attributes[0] = undefined;\n }\n\n LOG_DEBUG('info', () => `[WebGPU] Start to run kernel \"[${kernelType}] ${kernelName}\"...`);\n\n const useErrorScope = this.env.debug;\n\n this.temporaryData = [];\n try {\n if (useErrorScope) {\n this.device.pushErrorScope('validation');\n }\n\n kernelEntry(context, attributes[1]);\n return 0; // ORT_OK\n } catch (e) {\n errors.push(Promise.resolve(`[WebGPU] Kernel \"[${kernelType}] ${kernelName}\" failed. ${e}`));\n return 1; // ORT_FAIL\n } finally {\n if (useErrorScope) {\n errors.push(this.device.popErrorScope().then(\n err => err ? `GPU validation error for kernel \"[${kernelType}] ${kernelName}\": ${err.message}` : null));\n }\n\n for (const data of this.temporaryData) {\n this.gpuDataManager.release(data.id);\n }\n this.temporaryData = [];\n this.currentKernelId = null;\n }\n }\n\n // #region external buffer\n registerBuffer(sessionId: number, index: number, buffer: GPUBuffer, size: number): number {\n let sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (!sessionInputOutputMapping) {\n sessionInputOutputMapping = new Map();\n this.sessionExternalDataMapping.set(sessionId, sessionInputOutputMapping);\n }\n\n const previousBuffer = sessionInputOutputMapping.get(index);\n const id = this.gpuDataManager.registerExternalBuffer(buffer, size, previousBuffer?.[1]);\n sessionInputOutputMapping.set(index, [id, buffer]);\n return id;\n }\n unregisterBuffers(sessionId: number): void {\n const sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (sessionInputOutputMapping) {\n sessionInputOutputMapping.forEach(bufferInfo => this.gpuDataManager.unregisterExternalBuffer(bufferInfo[1]));\n this.sessionExternalDataMapping.delete(sessionId);\n }\n }\n getBuffer(gpuDataId: number): GPUBuffer {\n const gpuData = this.gpuDataManager.get(gpuDataId);\n if (!gpuData) {\n throw new Error(`no GPU data for buffer: ${gpuDataId}`);\n }\n return gpuData.buffer;\n }\n createDownloader(gpuBuffer: GPUBuffer, size: number, type: Tensor.GpuBufferDataTypes):\n () => Promise {\n return async () => {\n const data = await downloadGpuData(this, gpuBuffer, size);\n return createView(data.buffer, type);\n };\n }\n // #endregion\n writeTimestamp(index: number): void {\n if (this.queryType !== 'inside-passes') {\n return;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this.computePassEncoder as any).writeTimestamp(this.querySet, index);\n }\n setQueryType(): void {\n this.queryType = 'none';\n if (this.env.webgpu.profiling?.mode === 'default' ||\n (typeof this.env.trace === 'undefined' ? this.env.wasm.trace : this.env.trace)) {\n if (this.device.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n this.queryType = 'inside-passes';\n } else if (this.device.features.has('timestamp-query')) {\n this.queryType = 'at-passes';\n }\n\n if (this.queryType !== 'none' && typeof this.querySet === 'undefined') {\n this.querySet = this.device.createQuerySet({\n type: 'timestamp',\n count: this.maxDispatchNumber * 2,\n });\n this.queryResolveBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.maxDispatchNumber * 2 * 8, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE});\n }\n }\n }\n\n captureBegin(): void {\n LOG_DEBUG('info', 'captureBegin');\n if (!this.capturedCommandList.get(this.currentSessionId!)) {\n this.capturedCommandList.set(this.currentSessionId!, []);\n }\n if (!this.capturedPendingKernels.get(this.currentSessionId!)) {\n this.capturedPendingKernels.set(this.currentSessionId!, []);\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'capturing';\n }\n captureEnd(): void {\n LOG_DEBUG('info', 'captureEnd');\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n replay(): void {\n LOG_DEBUG('info', 'replay');\n this.sessionStatus = 'replaying';\n const sessionCommandList = this.capturedCommandList.get(this.currentSessionId!);\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n const length = sessionCommandList!.length;\n this.pendingKernels = [];\n for (let i = 0; i < length; i++) {\n const computePassEncoder = this.getComputePassEncoder();\n const command = sessionCommandList![i];\n this.writeTimestamp(this.pendingDispatchNumber * 2);\n computePassEncoder.setPipeline(command.computePipeline);\n computePassEncoder.setBindGroup(0, command.bindGroup);\n computePassEncoder.dispatchWorkgroups(...command.dispatchGroup);\n this.writeTimestamp(this.pendingDispatchNumber * 2 + 1);\n this.pendingDispatchNumber++;\n if (this.queryType !== 'none') {\n this.pendingKernels.push(sessionPendingKernels![i]);\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber || this.queryType === 'at-passes') {\n this.endComputePass();\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber) {\n this.flush();\n }\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n\n onReleaseSession(sessionId: number): void {\n this.unregisterBuffers(sessionId);\n if (this.capturedCommandList.has(sessionId)) {\n this.capturedCommandList.delete(sessionId);\n }\n if (this.capturedPendingKernels.has(sessionId)) {\n this.capturedPendingKernels.delete(sessionId);\n }\n this.gpuDataManager.onReleaseSession(sessionId);\n }\n\n onRunStart(sessionId: number): void {\n this.currentSessionId = sessionId;\n this.setQueryType();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from '../wasm-types';\nimport {DataType, getTensorElementSize} from '../wasm-common';\n\nimport {WebGpuBackend} from './backend-webgpu';\nimport {LOG_DEBUG} from './log';\nimport {TensorView} from './tensor-view';\nimport {ShapeUtil} from './util';\nimport {AdapterInfo, ComputeContext, ComputeContextInputsOutputsMapping, ProgramInfo} from './webgpu/types';\n\n/* eslint-disable no-bitwise */\n\nclass TensorViewImpl implements TensorView {\n constructor(\n private module: OrtWasmModule, public readonly dataType: number, public readonly data: number,\n public readonly dims: readonly number[]) {}\n\n getFloat32Array(): Float32Array {\n if (this.dataType !== DataType.float) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Float32Array() :\n new Float32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getBigInt64Array(): BigInt64Array {\n if (this.dataType !== DataType.int64) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new BigInt64Array() :\n new BigInt64Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getInt32Array(): Int32Array {\n if (this.dataType !== DataType.int32) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Int32Array() : new Int32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n reshape(newDims: readonly number[]): TensorView {\n if (ShapeUtil.size(newDims) !== ShapeUtil.size(this.dims)) {\n throw new Error('Invalid new shape');\n }\n return new TensorViewImpl(this.module, this.dataType, this.data, newDims);\n }\n}\n\nclass ComputeContextImpl implements ComputeContext {\n readonly adapterInfo: AdapterInfo;\n readonly opKernelContext: number;\n readonly inputs: readonly TensorView[];\n readonly outputCount: number;\n get kernelCustomData(): {[key: string]: unknown} {\n return this.backend.currentKernelCustomData;\n }\n get customDataBuffer(): Uint8Array {\n return this.module.HEAPU8.subarray(this.customDataOffset, this.customDataOffset + this.customDataSize);\n }\n private customDataOffset = 0;\n private customDataSize = 0;\n constructor(private module: OrtWasmModule, private backend: WebGpuBackend, contextDataOffset: number) {\n this.adapterInfo = backend.adapterInfo;\n const heapU32 = module.HEAPU32;\n\n // extract context data\n let dataIndex = (contextDataOffset >>> 2);\n this.opKernelContext = heapU32[dataIndex++];\n const inputCount = heapU32[dataIndex++];\n this.outputCount = heapU32[dataIndex++];\n this.customDataOffset = heapU32[dataIndex++];\n this.customDataSize = heapU32[dataIndex++];\n\n const inputs: TensorView[] = [];\n for (let i = 0; i < inputCount; i++) {\n const dataType = heapU32[dataIndex++];\n const data = heapU32[dataIndex++];\n const dim = heapU32[dataIndex++];\n const dims: number[] = [];\n for (let d = 0; d < dim; d++) {\n dims.push(heapU32[dataIndex++]);\n }\n inputs.push(new TensorViewImpl(module, dataType, data, dims));\n }\n this.inputs = inputs;\n }\n\n getMaxComputeWorkgroupSizes(): [number, number, number] {\n return [\n this.backend.device.limits.maxComputeWorkgroupSizeX, this.backend.device.limits.maxComputeWorkgroupSizeY,\n this.backend.device.limits.maxComputeWorkgroupSizeZ\n ];\n }\n\n getMaxComputeWorkgroupStoragesize(): number {\n return this.backend.device.limits.maxComputeWorkgroupStorageSize;\n }\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[] {\n // prepare inputs. inputs should always be valid data.\n const mappedInputs =\n inputsOutputsMapping?.inputs?.map(i => typeof i === 'number' ? this.inputs[i] : i) ?? this.inputs;\n // prepare outputs.\n const outputIndices = inputsOutputsMapping?.outputs ?? [];\n const createKernelOutput = (index: number, dataType: number, dims: readonly number[]): TensorView =>\n new TensorViewImpl(this.module, dataType, this.output(index, dims), dims);\n const createTemporaryOutput = (dataType: number, dims: readonly number[]): TensorView => {\n const elementSize = getTensorElementSize(dataType);\n if (!elementSize) {\n throw new Error(`Unsupported data type: ${dataType}`);\n }\n const bufferSize = elementSize * ShapeUtil.size(dims);\n const gpuDataId = bufferSize > 0 ? this.backend.gpuDataManager.create(bufferSize).id : 0;\n return new TensorViewImpl(this.module, dataType, gpuDataId, dims);\n };\n return this.backend.run(\n program, mappedInputs, outputIndices, createKernelOutput, createTemporaryOutput, this.outputCount);\n }\n\n output(index: number, dims: readonly number[]): number {\n const stack = this.module.stackSave();\n try {\n const data = this.module.stackAlloc((1 + dims.length) * 4 /* sizeof(size_t) */);\n let offset = data >> 2;\n this.module.HEAPU32[offset++] = dims.length;\n for (let i = 0; i < dims.length; i++) {\n this.module.HEAPU32[offset++] = dims[i];\n }\n return this.module._JsepOutput!(this.opKernelContext, index, data);\n } catch (e) {\n throw new Error(\n `Failed to generate kernel's output[${index}] with dims [${dims}]. ` +\n 'If you are running with pre-allocated output, please make sure the output type/dims are correct. ' +\n `Error: ${e}`);\n } finally {\n this.module.stackRestore(stack);\n }\n }\n}\n\n/**\n * Initialize JSEP with WebGPU backend.\n *\n * This function will be called after the WebAssembly module is loaded and initialized (\"_OrtInit\" is called), once for\n * each of the following EPs if they are specified:\n * - \"webgpu\"\n * - \"webnn\"\n *\n * For WebGPU, this function expects:\n * - WebGPU is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebGPU is available in current environment. (a valid GPUAdapter is passed in)\n *\n * For WebNN, this function expects:\n * - WebNN is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebNN is available in current environment. (navigator.ml is not undefined)\n *\n * If the WebAssembly module is not built with JSEP support, this function will throw an error. This will invalidate\n * 'webgpu'/'webnn' backend.\n *\n * @param name - the name of the EP, either \"webgpu\" or \"webnn\"\n * @param module - the ORT WebAssembly module\n * @param env - the ORT environment variable (ort.env)\n * @param gpuAdapter - the pre-created GPU adapter\n */\nexport const init =\n async(name: 'webgpu'|'webnn', module: OrtWasmModule, env: Env, gpuAdapter?: GPUAdapter): Promise => {\n const jsepInit = module.jsepInit;\n if (!jsepInit) {\n throw new Error('Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.');\n }\n\n if (name === 'webgpu') {\n const backend = new WebGpuBackend();\n await backend.initialize(env, gpuAdapter!);\n\n jsepInit('webgpu', [\n // backend\n backend,\n\n // jsepAlloc()\n (size: number) => backend.alloc(size),\n\n // jsepFree()\n (ptr: number) => backend.free(ptr),\n\n // jsepCopy(src, dst, size, isSourceGpu)\n (src: number, dst: number, size: number, isSourceGpu = false) => {\n if (isSourceGpu) {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyGpuToGpu: src=${src}, dst=${dst}, size=${size}`);\n backend.memcpy(src, dst);\n } else {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyCpuToGpu: dataOffset=${src}, gpuDataId=${dst}, size=${size}`);\n const data = module.HEAPU8.subarray(src >>> 0, (src >>> 0) + size);\n backend.upload(dst, data);\n }\n },\n\n // jsepCopyAsync(src, dst, size)\n async(gpuDataId: number, dataOffset: number, size: number):\n Promise => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepCopyGpuToCpu: gpuDataId=${gpuDataId}, dataOffset=${dataOffset}, size=${size}`);\n\n await backend.download(\n gpuDataId, () => module.HEAPU8.subarray(dataOffset >>> 0, (dataOffset >>> 0) + size));\n },\n\n // jsepCreateKernel\n (kernelType: string, kernelId: number, attribute: unknown) => backend.createKernel(\n kernelType, kernelId, attribute, module.UTF8ToString(module._JsepGetNodeName!(kernelId))),\n\n // jsepReleaseKernel\n (kernel: number) => backend.releaseKernel(kernel),\n\n // jsepRun\n (kernel: number, contextDataOffset: number, sessionHandle: number, errors: Array>) => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepRun: sessionHandle=${sessionHandle}, kernel=${kernel}, contextDataOffset=${\n contextDataOffset}`);\n const context = new ComputeContextImpl(module, backend, contextDataOffset);\n return backend.computeKernel(kernel, context, errors);\n },\n // jsepCaptureBegin\n () => backend.captureBegin(),\n // jsepCaptureEnd\n () => backend.captureEnd(),\n // jsepReplay\n () => backend.replay()\n ]);\n } else {\n jsepInit('webnn');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// WebNN API currently does not have a TypeScript definition file. This file is a workaround with types generated from\n// WebNN API specification.\n// https://github.com/webmachinelearning/webnn/issues/677\n/// \n\nimport {Env, InferenceSession, Tensor} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport {setRunOptions} from './run-options';\nimport {setSessionOptions} from './session-options';\nimport {dataLocationStringToEnum, getTensorElementSize, isGpuBufferSupportedType, logLevelStringToEnum, tensorDataTypeEnumToString, tensorDataTypeStringToEnum, tensorTypeToTypedArrayConstructor} from './wasm-common';\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError} from './wasm-utils';\nimport {loadFile} from './wasm-utils-load-file';\n\n// #region Initializations\n\n/**\n * There are 4 different \"initialization\" steps for ORT. They happen in different places and different time.\n *\n * 1. JavaScript initialization for onnxruntime-common and onnxruntime-web.\n * This is the first initialization step. In this step, onnxruntime-web calls onnxruntime-common's registerBackend()\n * function multiple times to register all the available backends. The backend registration is very fast. It only\n * registers the backend name with the uninitialized backend object. No heavy initialization is done in this step.\n * Refer to web/lib/index.ts for the backend registration.\n *\n * 2. WebAssembly artifact initialization.\n * This happens when any registered wasm backend is used for the first time (ie. `ort.InferenceSession.create()` or\n * `ort.TrainingSession.create()` is called). In this step, onnxruntime-web does the followings:\n * - create a proxy worker and make sure the proxy worker is ready to receive messages, if proxy is enabled.\n * - perform feature detection, locate correct WebAssembly artifact path and call the Emscripten generated\n * JavaScript code to initialize the WebAssembly runtime.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-wasm'.\n * - downloading the 'ort-wasm{...}.wasm' file is done in this step.\n * - if multi-thread is enabled, one or more webworker will be created to initialize the PThread threadpool.\n *\n * 3. ORT environment initialization.\n * This happens after step 2. In this step, onnxruntime-web performs ONNX Runtime environment initialization.\n * Function `_OrtInit()` is called in this step.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-ort'.\n * - logging level (ort.env.logLevel) and thread number (ort.env.wasm.numThreads) are set in this step.\n *\n * 4. Session initialization.\n * This happens when `ort.InferenceSession.create()` or `ort.TrainingSession.create()` is called. Unlike the first 3\n * steps (they only called once), this step will be done for each session. In this step, onnxruntime-web does the\n * followings:\n * If the parameter is a URL:\n * - download the model data from the URL.\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - dereference the model buffer. This step allows the original ArrayBuffer to be garbage collected.\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n * If the parameter is a Uint8Array object:\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n *\n */\n\n/**\n * initialize ORT environment.\n *\n * @param numThreads SetGlobalIntraOpNumThreads(numThreads)\n * @param loggingLevel CreateEnv(static_cast(logging_level))\n */\nconst initOrt = (numThreads: number, loggingLevel: number): void => {\n const errorCode = getInstance()._OrtInit(numThreads, loggingLevel);\n if (errorCode !== 0) {\n checkLastError('Can\\'t initialize onnxruntime.');\n }\n};\n\n/**\n * initialize runtime environment.\n * @param env passed in the environment config object.\n */\nexport const initRuntime = async(env: Env): Promise => {\n // init ORT\n initOrt(env.wasm.numThreads!, logLevelStringToEnum(env.logLevel));\n};\n\n/**\n * perform EP specific initialization.\n *\n * @param env\n * @param epName\n */\nexport const initEp = async(env: Env, epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_JSEP) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n const initJsep = require('./jsep/init').init;\n\n if (epName === 'webgpu') {\n // perform WebGPU availability check\n if (typeof navigator === 'undefined' || !navigator.gpu) {\n throw new Error('WebGPU is not supported in current environment');\n }\n\n let adapter = env.webgpu.adapter as GPUAdapter | null;\n if (!adapter) {\n // if adapter is not set, request a new adapter.\n const powerPreference = env.webgpu.powerPreference;\n if (powerPreference !== undefined && powerPreference !== 'low-power' &&\n powerPreference !== 'high-performance') {\n throw new Error(`Invalid powerPreference setting: \"${powerPreference}\"`);\n }\n const forceFallbackAdapter = env.webgpu.forceFallbackAdapter;\n if (forceFallbackAdapter !== undefined && typeof forceFallbackAdapter !== 'boolean') {\n throw new Error(`Invalid forceFallbackAdapter setting: \"${forceFallbackAdapter}\"`);\n }\n adapter = await navigator.gpu.requestAdapter({powerPreference, forceFallbackAdapter});\n if (!adapter) {\n throw new Error(\n 'Failed to get GPU adapter. ' +\n 'You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.');\n }\n } else {\n // if adapter is set, validate it.\n if (typeof adapter.limits !== 'object' || typeof adapter.features !== 'object' ||\n typeof adapter.requestDevice !== 'function') {\n throw new Error('Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.');\n }\n }\n\n await initJsep('webgpu', getInstance(), env, adapter);\n }\n if (epName === 'webnn') {\n // perform WebNN availability check\n if (typeof navigator === 'undefined' || !(navigator as unknown as {ml: unknown}).ml) {\n throw new Error('WebNN is not supported in current environment');\n }\n\n await initJsep('webnn', getInstance(), env);\n }\n }\n};\n\n// #endregion Initializations\n\n/**\n * valid data locations for input/output tensors.\n */\ntype SupportedTensorDataLocationForInputOutput = 'cpu'|'cpu-pinned'|'gpu-buffer';\n\ntype IOBindingState = {\n /**\n * the handle of IO binding.\n */\n readonly handle: number;\n\n /**\n * the preferred location for each output tensor.\n *\n * value is one of 'cpu', 'cpu-pinned', 'gpu-buffer'.\n */\n readonly outputPreferredLocations: readonly SupportedTensorDataLocationForInputOutput[];\n\n /**\n * enum value of the preferred location for each output tensor.\n */\n readonly outputPreferredLocationsEncoded: readonly number[];\n};\n\n/**\n * tuple elements are: InferenceSession ID; inputNamesUTF8Encoded; outputNamesUTF8Encoded; bindingState\n */\ntype SessionMetadata = [\n inferenceSessionId: number, inputNamesUTF8Encoded: number[], outputNamesUTF8Encoded: number[],\n bindingState: IOBindingState|null, enableGraphCapture: boolean, inputOutputBound: boolean\n];\n\nconst activeSessions = new Map();\n\n/**\n * get the input/output count of the session.\n * @param sessionHandle the handle representing the session. should be non-zero.\n * @returns a tuple including 2 numbers, representing the input count and output count.\n */\nconst getSessionInputOutputCount = (sessionHandle: number): [number, number] => {\n const wasm = getInstance();\n const stack = wasm.stackSave();\n try {\n const dataOffset = wasm.stackAlloc(8);\n const errorCode = wasm._OrtGetInputOutputCount(sessionHandle, dataOffset, dataOffset + 4);\n if (errorCode !== 0) {\n checkLastError('Can\\'t get session input/output count.');\n }\n return [wasm.HEAP32[dataOffset / 4], wasm.HEAP32[dataOffset / 4 + 1]];\n } finally {\n wasm.stackRestore(stack);\n }\n};\n\n/**\n * allocate the memory and memcpy the external buffer.\n *\n * @param model - the external buffer containing the model data. Must not be the same buffer as the WASM heap.\n * @returns a 2-elements tuple - the pointer and size of the allocated buffer\n */\nexport const copyFromExternalBuffer = (model: Uint8Array): [number, number] => {\n const wasm = getInstance();\n const modelDataOffset = wasm._malloc(model.byteLength);\n if (modelDataOffset === 0) {\n throw new Error(`Can't create a session. failed to allocate a buffer of size ${model.byteLength}.`);\n }\n wasm.HEAPU8.set(model, modelDataOffset);\n return [modelDataOffset, model.byteLength];\n};\n\n/**\n * create an inference session from a model data buffer.\n *\n * @param modelData - either a Uint8Array object representing the model data, or a 2-elements tuple containing the\n * pointer and size of the model data buffer.\n * @param options an optional session options object.\n * @returns a 3-elements tuple containing [session handle, input names, output names]\n */\nexport const createSession = async(\n modelData: Uint8Array|SerializableInternalBuffer,\n options?: InferenceSession.SessionOptions): Promise => {\n let modelDataOffset: number, modelDataLength: number;\n const wasm = getInstance();\n\n if (Array.isArray(modelData)) {\n // if model data is an array, it must be a 2-elements tuple containing the pointer and size of the model data\n [modelDataOffset, modelDataLength] = modelData;\n } else if (modelData.buffer === wasm.HEAPU8.buffer) {\n // if model data uses the same buffer as the WASM heap, we don't need to copy it.\n [modelDataOffset, modelDataLength] = [modelData.byteOffset, modelData.byteLength];\n } else {\n // otherwise, copy the model data to the WASM heap.\n [modelDataOffset, modelDataLength] = copyFromExternalBuffer(modelData);\n }\n\n let sessionHandle = 0;\n let sessionOptionsHandle = 0;\n let ioBindingHandle = 0;\n let allocs: number[] = [];\n const inputNamesUTF8Encoded = [];\n const outputNamesUTF8Encoded = [];\n\n try {\n [sessionOptionsHandle, allocs] = setSessionOptions(options);\n\n if (options?.externalData && wasm.mountExternalData) {\n const loadingPromises = [];\n for (const file of options.externalData) {\n const path = typeof file === 'string' ? file : file.path;\n loadingPromises.push(loadFile(typeof file === 'string' ? file : file.data).then(data => {\n wasm.mountExternalData!(path, data);\n }));\n }\n\n // wait for all external data files to be loaded\n await Promise.all(loadingPromises);\n }\n\n for (const provider of options?.executionProviders ?? []) {\n const providerName = typeof provider === 'string' ? provider : provider.name;\n if (providerName === 'webnn') {\n if (wasm.currentContext) {\n throw new Error('WebNN execution provider is already set.');\n }\n if (typeof provider !== 'string') {\n const webnnOptions = provider as InferenceSession.WebNNExecutionProviderOption;\n const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const gpuDevice = (webnnOptions as InferenceSession.WebNNOptionsWebGpu)?.gpuDevice;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n const numThreads = (webnnOptions as InferenceSession.WebNNContextOptions)?.numThreads;\n const powerPreference = (webnnOptions as InferenceSession.WebNNContextOptions)?.powerPreference;\n if (context) {\n wasm.currentContext = context as MLContext;\n } else if (gpuDevice) {\n wasm.currentContext = await navigator.ml.createContext(gpuDevice);\n } else {\n wasm.currentContext = await navigator.ml.createContext({deviceType, numThreads, powerPreference});\n }\n } else {\n wasm.currentContext = await navigator.ml.createContext();\n }\n break;\n }\n }\n\n sessionHandle = await wasm._OrtCreateSession(modelDataOffset, modelDataLength, sessionOptionsHandle);\n if (sessionHandle === 0) {\n checkLastError('Can\\'t create a session.');\n }\n\n // clear current MLContext after session creation\n if (wasm.currentContext) {\n wasm.currentContext = undefined;\n }\n\n const [inputCount, outputCount] = getSessionInputOutputCount(sessionHandle);\n\n const enableGraphCapture = !!options?.enableGraphCapture;\n\n const inputNames = [];\n const outputNames = [];\n const outputPreferredLocations: SupportedTensorDataLocationForInputOutput[] = [];\n for (let i = 0; i < inputCount; i++) {\n const name = wasm._OrtGetInputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an input name.');\n }\n inputNamesUTF8Encoded.push(name);\n inputNames.push(wasm.UTF8ToString(name));\n }\n for (let i = 0; i < outputCount; i++) {\n const name = wasm._OrtGetOutputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an output name.');\n }\n outputNamesUTF8Encoded.push(name);\n const nameString = wasm.UTF8ToString(name);\n outputNames.push(nameString);\n\n if (!BUILD_DEFS.DISABLE_JSEP) {\n if (enableGraphCapture && options?.preferredOutputLocation === undefined) {\n outputPreferredLocations.push('gpu-buffer');\n continue;\n }\n const location = typeof options?.preferredOutputLocation === 'string' ?\n options.preferredOutputLocation :\n options?.preferredOutputLocation?.[nameString] ?? 'cpu';\n if (location !== 'cpu' && location !== 'cpu-pinned' && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${location}.`);\n }\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${\n location}. Only 'gpu-buffer' location is supported when enableGraphCapture is true.`);\n }\n outputPreferredLocations.push(location);\n }\n }\n\n // use IO binding only when at least one output is preffered to be on GPU.\n let bindingState: IOBindingState|null = null;\n if (!BUILD_DEFS.DISABLE_JSEP && outputPreferredLocations.some(l => l === 'gpu-buffer')) {\n ioBindingHandle = wasm._OrtCreateBinding(sessionHandle);\n if (ioBindingHandle === 0) {\n checkLastError('Can\\'t create IO binding.');\n }\n\n bindingState = {\n handle: ioBindingHandle,\n outputPreferredLocations,\n outputPreferredLocationsEncoded: outputPreferredLocations.map(l => dataLocationStringToEnum(l)),\n };\n }\n\n activeSessions.set(\n sessionHandle,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, bindingState, enableGraphCapture, false]);\n return [sessionHandle, inputNames, outputNames];\n } catch (e) {\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n\n if (ioBindingHandle !== 0) {\n wasm._OrtReleaseBinding(ioBindingHandle);\n }\n\n if (sessionHandle !== 0) {\n wasm._OrtReleaseSession(sessionHandle);\n }\n throw e;\n } finally {\n wasm._free(modelDataOffset);\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n\n // unmount external data if necessary\n wasm.unmountExternalData?.();\n }\n};\n\nexport const releaseSession = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot release session. invalid session id: ${sessionId}`);\n }\n const [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture] = session;\n\n if (ioBindingState) {\n if (enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n }\n wasm._OrtReleaseBinding(ioBindingState.handle);\n }\n\n wasm.jsepOnReleaseSession?.(sessionId);\n\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n wasm._OrtReleaseSession(sessionHandle);\n activeSessions.delete(sessionId);\n};\n\nexport const prepareInputOutputTensor =\n (tensor: TensorMetadata|null, tensorHandles: number[], allocs: number[], sessionId: number, index: number,\n enableGraphCapture = false): void => {\n if (!tensor) {\n tensorHandles.push(0);\n return;\n }\n\n const wasm = getInstance();\n\n const dataType = tensor[0];\n const dims = tensor[1];\n const location = tensor[3];\n\n let rawData: number;\n let dataByteLength: number;\n\n if (dataType === 'string' && location === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(\n `External buffer must be provided for input/output index ${index} when enableGraphCapture is true.`);\n }\n\n if (location === 'gpu-buffer') {\n const gpuBuffer = tensor[2].gpuBuffer as GPUBuffer;\n const elementSizeInBytes = getTensorElementSize(tensorDataTypeStringToEnum(dataType))!;\n dataByteLength = dims.reduce((a, b) => a * b, 1) * elementSizeInBytes;\n\n const registerBuffer = wasm.jsepRegisterBuffer;\n if (!registerBuffer) {\n throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');\n }\n rawData = registerBuffer(sessionId, index, gpuBuffer, dataByteLength);\n } else {\n const data = tensor[2];\n\n if (Array.isArray(data)) {\n // string tensor\n dataByteLength = 4 * data.length;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n let dataIndex = rawData / 4;\n for (let i = 0; i < data.length; i++) {\n if (typeof data[i] !== 'string') {\n throw new TypeError(`tensor data at index ${i} is not a string`);\n }\n wasm.HEAPU32[dataIndex++] = allocWasmString(data[i], allocs);\n }\n } else {\n dataByteLength = data.byteLength;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n wasm.HEAPU8.set(new Uint8Array(data.buffer, data.byteOffset, dataByteLength), rawData);\n }\n }\n\n const stack = wasm.stackSave();\n const dimsOffset = wasm.stackAlloc(4 * dims.length);\n try {\n let dimIndex = dimsOffset / 4;\n dims.forEach(d => wasm.HEAP32[dimIndex++] = d);\n const tensor = wasm._OrtCreateTensor(\n tensorDataTypeStringToEnum(dataType), rawData, dataByteLength, dimsOffset, dims.length,\n dataLocationStringToEnum(location));\n if (tensor === 0) {\n checkLastError(`Can't create tensor for input/output. session=${sessionId}, index=${index}.`);\n }\n tensorHandles.push(tensor);\n } finally {\n wasm.stackRestore(stack);\n }\n };\n\n/**\n * perform inference run\n */\nexport const run = async(\n sessionId: number, inputIndices: number[], inputTensors: TensorMetadata[], outputIndices: number[],\n outputTensors: Array, options: InferenceSession.RunOptions): Promise => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot run inference. invalid session id: ${sessionId}`);\n }\n const sessionHandle = session[0];\n const inputNamesUTF8Encoded = session[1];\n const outputNamesUTF8Encoded = session[2];\n const ioBindingState = session[3];\n const enableGraphCapture = session[4];\n const inputOutputBound = session[5];\n\n const inputCount = inputIndices.length;\n const outputCount = outputIndices.length;\n\n let runOptionsHandle = 0;\n let runOptionsAllocs: number[] = [];\n\n const inputTensorHandles: number[] = [];\n const outputTensorHandles: number[] = [];\n const inputOutputAllocs: number[] = [];\n\n const beforeRunStack = wasm.stackSave();\n const inputValuesOffset = wasm.stackAlloc(inputCount * 4);\n const inputNamesOffset = wasm.stackAlloc(inputCount * 4);\n const outputValuesOffset = wasm.stackAlloc(outputCount * 4);\n const outputNamesOffset = wasm.stackAlloc(outputCount * 4);\n\n try {\n [runOptionsHandle, runOptionsAllocs] = setRunOptions(options);\n\n // create input tensors\n for (let i = 0; i < inputCount; i++) {\n prepareInputOutputTensor(\n inputTensors[i], inputTensorHandles, inputOutputAllocs, sessionId, inputIndices[i], enableGraphCapture);\n }\n\n // create output tensors\n for (let i = 0; i < outputCount; i++) {\n prepareInputOutputTensor(\n outputTensors[i], outputTensorHandles, inputOutputAllocs, sessionId, inputCount + outputIndices[i],\n enableGraphCapture);\n }\n\n let inputValuesIndex = inputValuesOffset / 4;\n let inputNamesIndex = inputNamesOffset / 4;\n let outputValuesIndex = outputValuesOffset / 4;\n let outputNamesIndex = outputNamesOffset / 4;\n for (let i = 0; i < inputCount; i++) {\n wasm.HEAPU32[inputValuesIndex++] = inputTensorHandles[i];\n wasm.HEAPU32[inputNamesIndex++] = inputNamesUTF8Encoded[inputIndices[i]];\n }\n for (let i = 0; i < outputCount; i++) {\n wasm.HEAPU32[outputValuesIndex++] = outputTensorHandles[i];\n wasm.HEAPU32[outputNamesIndex++] = outputNamesUTF8Encoded[outputIndices[i]];\n }\n\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState && !inputOutputBound) {\n const {handle, outputPreferredLocations, outputPreferredLocationsEncoded} = ioBindingState;\n\n if (inputNamesUTF8Encoded.length !== inputCount) {\n throw new Error(`input count from feeds (${\n inputCount}) is expected to be always equal to model's input count (${inputNamesUTF8Encoded.length}).`);\n }\n\n // process inputs\n for (let i = 0; i < inputCount; i++) {\n const index = inputIndices[i];\n const errorCode = await wasm._OrtBindInput(handle, inputNamesUTF8Encoded[index], inputTensorHandles[i]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind input[${i}] for session=${sessionId}.`);\n }\n }\n\n // process pre-allocated outputs\n for (let i = 0; i < outputCount; i++) {\n const index = outputIndices[i];\n const location = outputTensors[i]?.[3]; // undefined means output is not pre-allocated.\n\n if (location) {\n // output is pre-allocated. bind the tensor.\n const errorCode = wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], outputTensorHandles[i], 0);\n if (errorCode !== 0) {\n checkLastError(`Can't bind pre-allocated output[${i}] for session=${sessionId}.`);\n }\n } else {\n // output is not pre-allocated. reset preferred location.\n const errorCode =\n wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], 0, outputPreferredLocationsEncoded[index]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind output[${i}] to ${outputPreferredLocations[i]} for session=${sessionId}.`);\n }\n }\n }\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, true]);\n }\n\n wasm.jsepOnRunStart?.(sessionHandle);\n let errorCode: number;\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState) {\n errorCode = await wasm._OrtRunWithBinding(\n sessionHandle, ioBindingState.handle, outputCount, outputValuesOffset, runOptionsHandle);\n } else {\n errorCode = await wasm._OrtRun(\n sessionHandle, inputNamesOffset, inputValuesOffset, inputCount, outputNamesOffset, outputCount,\n outputValuesOffset, runOptionsHandle);\n }\n\n if (errorCode !== 0) {\n checkLastError('failed to call OrtRun().');\n }\n\n const output: TensorMetadata[] = [];\n\n for (let i = 0; i < outputCount; i++) {\n const tensor = wasm.HEAPU32[outputValuesOffset / 4 + i];\n if (tensor === outputTensorHandles[i]) {\n // output tensor is pre-allocated. no need to copy data.\n output.push(outputTensors[i]!);\n continue;\n }\n\n const beforeGetTensorDataStack = wasm.stackSave();\n // stack allocate 4 pointer value\n const tensorDataOffset = wasm.stackAlloc(4 * 4);\n\n let keepOutputTensor = false;\n let type: Tensor.Type|undefined, dataOffset = 0;\n try {\n const errorCode = wasm._OrtGetTensorData(\n tensor, tensorDataOffset, tensorDataOffset + 4, tensorDataOffset + 8, tensorDataOffset + 12);\n if (errorCode !== 0) {\n checkLastError(`Can't access output tensor data on index ${i}.`);\n }\n let tensorDataIndex = tensorDataOffset / 4;\n const dataType = wasm.HEAPU32[tensorDataIndex++];\n dataOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsLength = wasm.HEAPU32[tensorDataIndex++];\n const dims = [];\n for (let i = 0; i < dimsLength; i++) {\n dims.push(wasm.HEAPU32[dimsOffset / 4 + i]);\n }\n wasm._OrtFree(dimsOffset);\n\n const size = dims.reduce((a, b) => a * b, 1);\n type = tensorDataTypeEnumToString(dataType);\n\n const preferredLocation = ioBindingState?.outputPreferredLocations[outputIndices[i]];\n\n if (type === 'string') {\n if (preferredLocation === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n const stringData: string[] = [];\n let dataIndex = dataOffset / 4;\n for (let i = 0; i < size; i++) {\n const offset = wasm.HEAPU32[dataIndex++];\n const maxBytesToRead = i === size - 1 ? undefined : wasm.HEAPU32[dataIndex] - offset;\n stringData.push(wasm.UTF8ToString(offset, maxBytesToRead));\n }\n output.push([type, dims, stringData, 'cpu']);\n } else {\n // If a certain output's preferred location is GPU but the tensor is empty, we still need to create a CPU\n // tensor for it. There is no mapping GPU buffer for an empty tensor.\n if (preferredLocation === 'gpu-buffer' && size > 0) {\n const getBuffer = wasm.jsepGetBuffer;\n if (!getBuffer) {\n throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');\n }\n const gpuBuffer = getBuffer(dataOffset);\n const elementSize = getTensorElementSize(dataType);\n if (elementSize === undefined || !isGpuBufferSupportedType(type)) {\n throw new Error(`Unsupported data type: ${type}`);\n }\n\n // do not release the tensor right now. it will be released when user calls tensor.dispose().\n keepOutputTensor = true;\n\n output.push([\n type, dims, {\n gpuBuffer,\n download: wasm.jsepCreateDownloader!(gpuBuffer, size * elementSize, type),\n dispose: () => {\n wasm._OrtReleaseTensor(tensor);\n }\n },\n 'gpu-buffer'\n ]);\n } else {\n const typedArrayConstructor = tensorTypeToTypedArrayConstructor(type);\n const data = new typedArrayConstructor(size);\n new Uint8Array(data.buffer, data.byteOffset, data.byteLength)\n .set(wasm.HEAPU8.subarray(dataOffset, dataOffset + data.byteLength));\n output.push([type, dims, data, 'cpu']);\n }\n }\n } finally {\n wasm.stackRestore(beforeGetTensorDataStack);\n if (type === 'string' && dataOffset) {\n wasm._free(dataOffset);\n }\n if (!keepOutputTensor) {\n wasm._OrtReleaseTensor(tensor);\n }\n }\n }\n\n if (ioBindingState && !enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, false]);\n }\n return output;\n } finally {\n wasm.stackRestore(beforeRunStack);\n\n inputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n outputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n inputOutputAllocs.forEach(p => wasm._free(p));\n\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n runOptionsAllocs.forEach(p => wasm._free(p));\n }\n};\n\n/**\n * end profiling\n */\nexport const endProfiling = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error('invalid session id');\n }\n const sessionHandle = session[0];\n\n // profile file name is not used yet, but it must be freed.\n const profileFileName = wasm._OrtEndProfiling(sessionHandle);\n if (profileFileName === 0) {\n checkLastError('Can\\'t get an profile file name.');\n }\n wasm._OrtFree(profileFileName);\n};\n\nexport const extractTransferableBuffers = (tensors: readonly SerializableTensorMetadata[]): ArrayBufferLike[] => {\n const buffers: ArrayBufferLike[] = [];\n for (const tensor of tensors) {\n const data = tensor[2];\n if (!Array.isArray(data) && 'buffer' in data) {\n buffers.push(data.buffer);\n }\n }\n return buffers;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env, InferenceSession} from 'onnxruntime-common';\n\nimport {OrtWasmMessage, SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport * as core from './wasm-core-impl';\nimport {initializeWebAssembly} from './wasm-factory';\nimport {importProxyWorker} from './wasm-utils-import';\n\nconst isProxy = (): boolean => !!env.wasm.proxy && typeof document !== 'undefined';\nlet proxyWorker: Worker|undefined;\nlet initializing = false;\nlet initialized = false;\nlet aborted = false;\nlet temporaryObjectUrl: string|undefined;\n\ntype PromiseCallbacks = [resolve: (result: T) => void, reject: (reason: unknown) => void];\nlet initWasmCallbacks: PromiseCallbacks;\nconst queuedCallbacks: Map>> = new Map();\n\nconst enqueueCallbacks = (type: OrtWasmMessage['type'], callbacks: PromiseCallbacks): void => {\n const queue = queuedCallbacks.get(type);\n if (queue) {\n queue.push(callbacks);\n } else {\n queuedCallbacks.set(type, [callbacks]);\n }\n};\n\nconst ensureWorker = (): void => {\n if (initializing || !initialized || aborted || !proxyWorker) {\n throw new Error('worker not ready');\n }\n};\n\nconst onProxyWorkerMessage = (ev: MessageEvent): void => {\n switch (ev.data.type) {\n case 'init-wasm':\n initializing = false;\n if (ev.data.err) {\n aborted = true;\n initWasmCallbacks[1](ev.data.err);\n } else {\n initialized = true;\n initWasmCallbacks[0]();\n }\n if (temporaryObjectUrl) {\n URL.revokeObjectURL(temporaryObjectUrl);\n temporaryObjectUrl = undefined;\n }\n break;\n case 'init-ep':\n case 'copy-from':\n case 'create':\n case 'release':\n case 'run':\n case 'end-profiling': {\n const callbacks = queuedCallbacks.get(ev.data.type)!;\n if (ev.data.err) {\n callbacks.shift()![1](ev.data.err);\n } else {\n callbacks.shift()![0](ev.data.out!);\n }\n break;\n }\n default:\n }\n};\n\n\nexport const initializeWebAssemblyAndOrtRuntime = async(): Promise => {\n if (initialized) {\n return;\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initWasm()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initWasm()\\' failed.');\n }\n\n initializing = true;\n\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n return new Promise((resolve, reject) => {\n proxyWorker?.terminate();\n\n void importProxyWorker().then(([objectUrl, worker]) => {\n try {\n proxyWorker = worker;\n proxyWorker.onerror = (ev: ErrorEvent) => reject(ev);\n proxyWorker.onmessage = onProxyWorkerMessage;\n initWasmCallbacks = [resolve, reject];\n const message: OrtWasmMessage = {type: 'init-wasm', in : env};\n proxyWorker.postMessage(message);\n temporaryObjectUrl = objectUrl;\n } catch (e) {\n reject(e);\n }\n }, reject);\n });\n\n } else {\n try {\n await initializeWebAssembly(env.wasm);\n await core.initRuntime(env);\n initialized = true;\n } catch (e) {\n aborted = true;\n throw e;\n } finally {\n initializing = false;\n }\n }\n};\n\nexport const initializeOrtEp = async(epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('init-ep', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'init-ep', in : {epName, env}};\n proxyWorker!.postMessage(message);\n });\n } else {\n await core.initEp(env, epName);\n }\n};\n\nexport const copyFromExternalBuffer = async(buffer: Uint8Array): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('copy-from', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'copy-from', in : {buffer}};\n proxyWorker!.postMessage(message, [buffer.buffer]);\n });\n } else {\n return core.copyFromExternalBuffer(buffer);\n }\n};\n\nexport const createSession =\n async(model: SerializableInternalBuffer|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check unsupported options\n if (options?.preferredOutputLocation) {\n throw new Error('session option \"preferredOutputLocation\" is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('create', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'create', in : {model, options: {...options}}};\n const transferable: Transferable[] = [];\n if (model instanceof Uint8Array) {\n transferable.push(model.buffer);\n }\n proxyWorker!.postMessage(message, transferable);\n });\n } else {\n return core.createSession(model, options);\n }\n };\n\nexport const releaseSession = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('release', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'release', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.releaseSession(sessionId);\n }\n};\n\nexport const run = async(\n sessionId: number, inputIndices: number[], inputs: TensorMetadata[], outputIndices: number[],\n outputs: Array, options: InferenceSession.RunOptions): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check inputs location\n if (inputs.some(t => t[3] !== 'cpu')) {\n throw new Error('input tensor on GPU is not supported for proxy.');\n }\n // check outputs location\n if (outputs.some(t => t)) {\n throw new Error('pre-allocated output tensor is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('run', [resolve, reject]);\n const serializableInputs = inputs as SerializableTensorMetadata[]; // every input is on CPU.\n const message: OrtWasmMessage =\n {type: 'run', in : {sessionId, inputIndices, inputs: serializableInputs, outputIndices, options}};\n proxyWorker!.postMessage(message, core.extractTransferableBuffers(serializableInputs));\n });\n } else {\n return core.run(sessionId, inputIndices, inputs, outputIndices, outputs, options);\n }\n};\n\nexport const endProfiling = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('end-profiling', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'end-profiling', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.endProfiling(sessionId);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, TensorMetadata} from './proxy-messages';\nimport {copyFromExternalBuffer, createSession, endProfiling, releaseSession, run} from './proxy-wrapper';\nimport {isGpuBufferSupportedType} from './wasm-common';\nimport {isNode} from './wasm-utils-env';\nimport {loadFile} from './wasm-utils-load-file';\n\nexport const encodeTensorMetadata = (tensor: Tensor, getName: () => string): TensorMetadata => {\n switch (tensor.location) {\n case 'cpu':\n return [tensor.type, tensor.dims, tensor.data, 'cpu'];\n case 'gpu-buffer':\n return [tensor.type, tensor.dims, {gpuBuffer: tensor.gpuBuffer}, 'gpu-buffer'];\n default:\n throw new Error(`invalid data location: ${tensor.location} for ${getName()}`);\n }\n};\n\nexport const decodeTensorMetadata = (tensor: TensorMetadata): Tensor => {\n switch (tensor[3]) {\n case 'cpu':\n return new Tensor(tensor[0], tensor[2], tensor[1]);\n case 'gpu-buffer': {\n const dataType = tensor[0];\n if (!isGpuBufferSupportedType(dataType)) {\n throw new Error(`not supported data type: ${dataType} for deserializing GPU tensor`);\n }\n const {gpuBuffer, download, dispose} = tensor[2];\n return Tensor.fromGpuBuffer(gpuBuffer, {dataType, dims: tensor[1], download, dispose});\n }\n default:\n throw new Error(`invalid data location: ${tensor[3]}`);\n }\n};\n\nexport class OnnxruntimeWebAssemblySessionHandler implements InferenceSessionHandler {\n private sessionId: number;\n\n inputNames: string[];\n outputNames: string[];\n\n async fetchModelAndCopyToWasmMemory(path: string): Promise {\n // fetch model from url and move to wasm heap.\n return copyFromExternalBuffer(await loadFile(path));\n }\n\n async loadModel(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n let model: Parameters[0];\n\n if (typeof pathOrBuffer === 'string') {\n if (isNode) {\n // node\n model = await loadFile(pathOrBuffer);\n } else {\n // browser\n // fetch model and copy to wasm heap.\n model = await this.fetchModelAndCopyToWasmMemory(pathOrBuffer);\n }\n } else {\n model = pathOrBuffer;\n }\n\n [this.sessionId, this.inputNames, this.outputNames] = await createSession(model, options);\n TRACE_FUNC_END();\n }\n\n async dispose(): Promise {\n return releaseSession(this.sessionId);\n }\n\n async run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType, options: InferenceSession.RunOptions):\n Promise {\n TRACE_FUNC_BEGIN();\n const inputArray: Tensor[] = [];\n const inputIndices: number[] = [];\n Object.entries(feeds).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.inputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid input '${name}'`);\n }\n inputArray.push(tensor);\n inputIndices.push(index);\n });\n\n const outputArray: Array = [];\n const outputIndices: number[] = [];\n Object.entries(fetches).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.outputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid output '${name}'`);\n }\n outputArray.push(tensor);\n outputIndices.push(index);\n });\n\n const inputs =\n inputArray.map((t, i) => encodeTensorMetadata(t, () => `input \"${this.inputNames[inputIndices[i]]}\"`));\n const outputs = outputArray.map(\n (t, i) => t ? encodeTensorMetadata(t, () => `output \"${this.outputNames[outputIndices[i]]}\"`) : null);\n\n const results = await run(this.sessionId, inputIndices, inputs, outputIndices, outputs, options);\n\n const resultMap: SessionHandler.ReturnType = {};\n for (let i = 0; i < results.length; i++) {\n resultMap[this.outputNames[outputIndices[i]]] = outputArray[i] ?? decodeTensorMetadata(results[i]);\n }\n TRACE_FUNC_END();\n return resultMap;\n }\n\n startProfiling(): void {\n // TODO: implement profiling\n }\n\n endProfiling(): void {\n void endProfiling(this.sessionId);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend, env, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {initializeOrtEp, initializeWebAssemblyAndOrtRuntime} from './wasm/proxy-wrapper';\nimport {OnnxruntimeWebAssemblySessionHandler} from './wasm/session-handler-inference';\nimport {scriptSrc} from './wasm/wasm-utils-import';\n\n/**\n * This function initializes all flags for WebAssembly.\n *\n * Those flags are accessible from `ort.env.wasm`. Users are allow to set those flags before the first inference session\n * being created, to override default value.\n */\nexport const initializeFlags = (): void => {\n if (typeof env.wasm.initTimeout !== 'number' || env.wasm.initTimeout < 0) {\n env.wasm.initTimeout = 0;\n }\n\n if (env.wasm.simd === false) {\n // eslint-disable-next-line no-console\n console.warn(\n 'Deprecated property \"env.wasm.simd\" is set to false. ' +\n 'non-SIMD build is no longer provided, and this setting will be ignored.');\n }\n\n if (typeof env.wasm.proxy !== 'boolean') {\n env.wasm.proxy = false;\n }\n\n if (typeof env.wasm.trace !== 'boolean') {\n env.wasm.trace = false;\n }\n\n if (typeof env.wasm.numThreads !== 'number' || !Number.isInteger(env.wasm.numThreads) || env.wasm.numThreads <= 0) {\n // The following logic only applies when `ort.env.wasm.numThreads` is not set by user. We will always honor user's\n // setting if it is provided.\n\n // Browser: when crossOriginIsolated is false, SharedArrayBuffer is not available so WebAssembly threads will not\n // work. In this case, we will set numThreads to 1.\n //\n // There is an exception: when the browser is configured to force-enable SharedArrayBuffer (e.g. Chromuim with\n // --enable-features=SharedArrayBuffer), it is possible that `self.crossOriginIsolated` is false and\n // SharedArrayBuffer is available at the same time. This is usually for testing. In this case, we will still set\n // numThreads to 1 here. If we want to enable multi-threading in test, we should set `ort.env.wasm.numThreads` to a\n // value greater than 1.\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n env.wasm.numThreads = 1;\n } else {\n const numCpuLogicalCores =\n typeof navigator === 'undefined' ? require('node:os').cpus().length : navigator.hardwareConcurrency;\n env.wasm.numThreads = Math.min(4, Math.ceil((numCpuLogicalCores || 1) / 2));\n }\n }\n\n if (!BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n // overwrite wasm paths override if not set\n if (env.wasm.wasmPaths === undefined && scriptSrc && scriptSrc.indexOf('blob:') !== 0) {\n env.wasm.wasmPaths = scriptSrc.substring(0, scriptSrc.lastIndexOf('/') + 1);\n }\n }\n};\n\nexport class OnnxruntimeWebAssemblyBackend implements Backend {\n /**\n * This function initializes the WebAssembly backend.\n *\n * This function will be called only once for each backend name. It will be called the first time when\n * `ort.InferenceSession.create()` is called with a registered backend name.\n *\n * @param backendName - the registered backend name.\n */\n async init(backendName: string): Promise {\n // populate wasm flags\n initializeFlags();\n\n // init wasm\n await initializeWebAssemblyAndOrtRuntime();\n\n // performe EP specific initialization\n await initializeOrtEp(backendName);\n }\n createInferenceSessionHandler(path: string, options?: InferenceSession.SessionOptions):\n Promise;\n createInferenceSessionHandler(buffer: Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n const handler = new OnnxruntimeWebAssemblySessionHandler();\n await handler.loadModel(pathOrBuffer, options);\n return Promise.resolve(handler);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OnnxruntimeWebAssemblyBackend} from './backend-wasm';\nexport const wasmBackend = new OnnxruntimeWebAssemblyBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports */\n\n// We use \"require\" instead of \"import\" here because import statement must be put in top level. Our current code does\n// not allow bundler to tree-shaking code as expected because some codes are treated as having side effects.\n// So we import code inside the if-clause to allow bundler remove the code safely.\n\nexport * from 'onnxruntime-common';\nimport * as ort from 'onnxruntime-common';\nexport default ort;\n\nimport {registerBackend, env} from 'onnxruntime-common';\nimport {version} from './version';\n\nif (!BUILD_DEFS.DISABLE_WEBGL) {\n const onnxjsBackend = require('./backend-onnxjs').onnxjsBackend;\n registerBackend('webgl', onnxjsBackend, -10);\n}\n\nif (!BUILD_DEFS.DISABLE_WASM) {\n const wasmBackend = BUILD_DEFS.DISABLE_TRAINING ? require('./backend-wasm-inference').wasmBackend :\n require('./backend-wasm-training').wasmBackend;\n if (!BUILD_DEFS.DISABLE_JSEP) {\n registerBackend('webgpu', wasmBackend, 5);\n registerBackend('webnn', wasmBackend, 5);\n }\n registerBackend('cpu', wasmBackend, 10);\n registerBackend('wasm', wasmBackend, 10);\n}\n\nObject.defineProperty(env.versions, 'web', {value: version, enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n"], - "mappings": ";;;;;usBAAA,IAgBMA,GACAC,GAYOC,GAwCPC,GAwCOC,GA7GbC,GAAAC,EAAA,kBAgBMN,GAAqC,IAAI,IACzCC,GAAqC,CAAA,EAY9BC,GAAkB,CAACK,EAAcC,EAAkBC,IAA0B,CACxF,GAAID,GAAW,OAAOA,EAAQ,MAAS,YAAc,OAAOA,EAAQ,+BAAkC,WAAY,CAChH,IAAME,EAAiBV,GAAS,IAAIO,CAAI,EACxC,GAAIG,IAAmB,OACrBV,GAAS,IAAIO,EAAM,CAAC,QAAAC,EAAS,SAAAC,CAAQ,CAAC,MACjC,IAAIC,EAAe,SAAWD,EAEnC,OACK,GAAIC,EAAe,WAAaD,GACjCC,EAAe,UAAYF,EAC7B,MAAM,IAAI,MAAM,4BAA4BD,CAAI,oBAAoBE,CAAQ,EAAE,EAIlF,GAAIA,GAAY,EAAG,CACjB,IAAME,EAAIV,GAAyB,QAAQM,CAAI,EAC3CI,IAAM,IACRV,GAAyB,OAAOU,EAAG,CAAC,EAGtC,QAAS,EAAI,EAAG,EAAIV,GAAyB,OAAQ,IACnD,GAAID,GAAS,IAAIC,GAAyB,CAAC,CAAC,EAAG,UAAYQ,EAAU,CACnER,GAAyB,OAAO,EAAG,EAAGM,CAAI,EAC1C,OAGJN,GAAyB,KAAKM,CAAI,EAEpC,OAGF,MAAM,IAAI,UAAU,qBAAqB,CAC3C,EAQMJ,GAAiC,MAAMS,GAAgD,CAC3F,IAAMC,EAAcb,GAAS,IAAIY,CAAW,EAC5C,GAAI,CAACC,EACH,MAAO,qBAGT,GAAIA,EAAY,YACd,OAAOA,EAAY,QACd,GAAIA,EAAY,QACrB,OAAOA,EAAY,MACd,CACL,IAAMC,EAAiB,CAAC,CAACD,EAAY,YACrC,GAAI,CACF,OAAKC,IACHD,EAAY,YAAcA,EAAY,QAAQ,KAAKD,CAAW,GAEhE,MAAMC,EAAY,YAClBA,EAAY,YAAc,GACnBA,EAAY,cACZE,EAAG,CACV,OAAKD,IACHD,EAAY,MAAQ,GAAGE,CAAC,GACxBF,EAAY,QAAU,IAEjBA,EAAY,cAEnB,OAAOA,EAAY,aAGzB,EAWaT,GAAsC,MAAMY,GACmB,CAEtE,IAAMC,EAAMD,EAAQ,oBAAsB,CAAA,EACpCE,EAAeD,EAAI,IAAIN,GAAK,OAAOA,GAAM,SAAWA,EAAIA,EAAE,IAAI,EAC9DQ,EAAeD,EAAa,SAAW,EAAIjB,GAA2BiB,EAGxEV,EACEY,EAAS,CAAA,EACTC,EAAwB,IAAI,IAClC,QAAWT,KAAeO,EAAc,CACtC,IAAMG,EAAgB,MAAMnB,GAA+BS,CAAW,EAClE,OAAOU,GAAkB,SAC3BF,EAAO,KAAK,CAAC,KAAMR,EAAa,IAAKU,CAAa,CAAC,GAE9Cd,IACHA,EAAUc,GAERd,IAAYc,GACdD,EAAsB,IAAIT,CAAW,GAM3C,GAAI,CAACJ,EACH,MAAM,IAAI,MAAM,oCAAoCY,EAAO,IAAIL,GAAK,IAAIA,EAAE,IAAI,KAAKA,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,EAI1G,OAAW,CAAC,KAAAR,EAAM,IAAAgB,CAAG,IAAKH,EACpBF,EAAa,SAASX,CAAI,GAE5B,QAAQ,KAAK,0CACTA,CAAI,uDAAuDgB,CAAG,EAAE,EAIxE,IAAMC,EAAcP,EAAI,OAAON,GAAKU,EAAsB,IAAI,OAAOV,GAAM,SAAWA,EAAIA,EAAE,IAAI,CAAC,EAEjG,MAAO,CACLH,EAAS,IAAI,MAAMQ,EAAS,CAC1B,IAAK,CAACS,EAAQC,IACRA,IAAS,qBACJF,EAEF,QAAQ,IAAIC,EAAQC,CAAI,EAElC,EAEL,IChKJ,IAAAC,GAAAC,EAAA,kBAoFAC,OCpFA,IAMaC,GANbC,GAAAC,EAAA,kBAMaF,GAAU,WCNvB,IAQIG,GAESC,GAVbC,GAAAC,EAAA,kBAIAC,KAIIJ,GAAwC,UAE/BC,GAAW,CACtB,KAAM,CAAA,EACN,MAAO,CAAA,EACP,OAAQ,CAAA,EACR,SAAU,CAAC,OAAQI,EAAO,EAE1B,IAAI,SAASC,EAAmB,CAC9B,GAAIA,IAAU,OAGd,IAAI,OAAOA,GAAU,UAAY,CAAC,UAAW,OAAQ,UAAW,QAAS,OAAO,EAAE,QAAQA,CAAK,IAAM,GACnG,MAAM,IAAI,MAAM,8BAA8BA,CAAK,EAAE,EAEvDN,GAAgBM,EAClB,EACA,IAAI,UAAQ,CACV,OAAON,EACT,GAIF,OAAO,eAAeC,GAAK,WAAY,CAAC,WAAY,EAAI,CAAC,IC/BzD,IAmRaM,GAnRbC,GAAAC,EAAA,kBAGAC,KAgRaH,GAAWA,KCnRxB,IASaI,GA+FAC,GAxGbC,GAAAC,EAAA,kBASaH,GAAkB,CAACI,EAAgBC,IAA4C,CAC1F,IAAMC,EAAS,OAAO,SAAa,IAAc,SAAS,cAAc,QAAQ,EAAK,IAAI,gBAAgB,EAAG,CAAC,EAC7GA,EAAO,MAAQF,EAAO,KAAK,CAAC,EAC5BE,EAAO,OAASF,EAAO,KAAK,CAAC,EAC7B,IAAMG,EACFD,EAAO,WAAW,IAAI,EAE1B,GAAIC,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAJ,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,IAEtBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,GAGxB,IAAMM,EAAcL,GAAS,SAAW,OAAYA,EAAQ,OAAS,MAE/DM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EAEpBO,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5B,QAASK,EAAI,EAAGA,EAAIV,EAAQU,IAC1B,QAASC,EAAI,EAAGA,EAAIZ,EAAOY,IAAK,CAC9B,IAAMC,GAAMjB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EU,GAAMlB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EW,GAAMnB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EY,EAAIN,IAAmB,GACzB,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,EAE1EL,EAAgB,UAAY,QAAUc,EAAI,IAAMC,EAAI,IAAMC,EAAI,IAAMC,EAAI,IACxEjB,EAAgB,SAASa,EAAGD,EAAG,EAAG,CAAC,EAGvC,GAAI,cAAeb,EACjB,OAAOA,EAAO,UAAS,EAEvB,MAAM,IAAI,MAAM,4BAA4B,MAG9C,OAAM,IAAI,MAAM,2BAA2B,CAE/C,EAKaL,GAAoB,CAACG,EAAgBC,IAAiD,CACjG,IAAME,EAAkB,OAAO,SAAa,IACxC,SAAS,cAAc,QAAQ,EAAE,WAAW,IAAI,EAChD,IAAI,gBAAgB,EAAG,CAAC,EAAE,WAAW,IAAI,EACzCkB,EACJ,GAAIlB,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAiB,EACArB,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,IAExBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,GAE1B,IAAMM,EAAcL,IAAY,QAAaA,EAAQ,SAAW,OAAYA,EAAQ,OAAkB,MAEhGM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,GAAG,EACrDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EACxB,GAAIH,IAAY,SACVA,EAAQ,SAAW,QAAcqB,IAAa,GAAKrB,EAAQ,SAAW,QACrEqB,IAAa,GAAMrB,EAAQ,SAAW,OAASA,EAAQ,SAAW,OACrE,MAAM,IAAI,MAAM,+CAAgD,EAKpE,IAAMsB,EAAO,EACTC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACzEhB,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5BW,EAAQlB,EAAgB,gBAAgBC,EAAOC,CAAM,EAErD,QAASU,EAAI,EAAGA,EAAIV,EAASD,EACxBoB,GAAiBD,EAAME,GAAiBF,EAAMG,GAAiBH,EAAMI,GAAiBJ,EAAMR,IAC/FM,EAAM,KAAKG,CAAa,GAAMxB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKI,CAAa,GAAMzB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKK,CAAa,GAAM1B,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKM,CAAa,EAAIb,IAAmB,GAC3C,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,MAI5E,OAAM,IAAI,MAAM,2BAA2B,EAE7C,OAAOa,CACT,ICtMA,IAiBaO,GAkFAC,GAgKAC,GAWAC,GASAC,GAvRbC,GAAAC,EAAA,kBAIAC,KAaaP,GAAiB,CAACQ,EAAqCC,IAA0C,CAC5G,GAAID,IAAW,OACb,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAIC,EAAQ,SAAW,QAAaA,EAAQ,QAAU,OACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAM,CAAC,OAAAC,EAAQ,MAAAC,CAAK,EAAIF,EAElBG,EAAOH,EAAQ,MAAQ,CAAC,KAAM,IAAK,KAAM,CAAC,EAC5CI,EACAC,EAEA,OAAQF,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDC,EAAW,CAACD,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,GAAG,EAG3E,OAAQA,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDE,EAAW,CAACF,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,CAAC,EAG7E,IAAMG,EAAcN,EAAQ,SAAW,OAAYA,EAAQ,OAAS,OAG9DO,EACFP,EAAQ,eAAiB,QAAaA,EAAQ,eAAiB,OAAYA,EAAQ,aAAwB,MACzGQ,EAASP,EAASC,EAClBO,EAAcF,IAAiB,OAAS,IAAI,aAAaC,EAAS,CAAC,EAAI,IAAI,aAAaA,EAAS,CAAC,EAGpGE,EAAO,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACnFC,EAAiB,EAAGC,EAAiBR,EAAQS,EAAiBT,EAAS,EAAGU,EAAiB,GAG3FZ,IAAgB,QAClBI,EAAO,EACPC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,IAIdP,IAAiB,OACnBW,EAAiBV,EAAS,EACjBD,IAAiB,OAC1BQ,EAAiB,EACjBE,EAAiBT,EACjBQ,EAAiBR,EAAS,GACjBD,IAAiB,QAC1BU,EAAiB,EACjBD,EAAiBR,EACjBO,EAAiBP,EAAS,GAG5B,QAASW,EAAI,EAAGA,EAAIX,EACfW,IAAKR,GAAiBD,EAAMG,GAAiBH,EAAME,GAAiBF,EAAMI,GAAiBJ,EAC9FD,EAAYM,GAAgB,GAAKhB,EAAOY,CAAa,EAAIN,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYO,GAAgB,GAAKjB,EAAOa,CAAa,EAAIP,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYQ,GAAgB,GAAKlB,EAAOc,CAAa,EAAIR,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC9Ec,IAAmB,IAAMJ,IAAkB,KAC7CL,EAAYS,GAAgB,GAAKnB,EAAOe,CAAa,EAAIT,EAAS,CAAC,GAAKD,EAAS,CAAC,GAOtF,OAFqBG,IAAiB,OAAS,IAAIa,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,EACxD,IAAIkB,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,CAEzG,EAKaV,GAAkB,MAC3B6B,EACArB,IACyC,CAE3C,IAAMsB,EAAiB,OAAQ,iBAAsB,KAAeD,aAAiB,iBAC/EE,EAAiB,OAAQ,UAAe,KAAeF,aAAiB,UACxEG,EAAgB,OAAQ,YAAiB,KAAeH,aAAiB,YACzEI,EAAW,OAAOJ,GAAU,SAE9BK,EACAC,EAA+C3B,GAAW,CAAA,EAExD4B,EAAe,IAAK,CACxB,GAAI,OAAO,SAAa,IACtB,OAAO,SAAS,cAAc,QAAQ,EACjC,GAAI,OAAO,gBAAoB,IACpC,OAAO,IAAI,gBAAgB,EAAG,CAAC,EAE/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,EACMC,EAAuBC,GACvBA,aAAkB,mBAEXA,aAAkB,gBADpBA,EAAO,WAAW,IAAI,EAItB,KAIX,GAAIR,EAAgB,CAElB,IAAMQ,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAI9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MAMlB,GALIrB,IAAY,QAAaA,EAAQ,gBAAkB,QAAaA,EAAQ,eAAiB,SAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,cAGdA,IAAY,OAAW,CAEzB,GADA2B,EAAwB3B,EACpBA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,6DAA6D,EAE7E2B,EAAsB,aAAe,OAEvCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,OAE9ByB,EAAsB,aAAe,OACrCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAGhC6B,EAAgB,UAAUV,EAAO,EAAG,CAAC,EACrCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,UAEpCsB,EAAgB,CACzB,IAAItB,EACAC,EAiBJ,GAfIF,IAAY,QAAaA,EAAQ,eAAiB,QAAaA,EAAQ,gBAAkB,QAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,eAEhBC,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,OAGZrB,IAAY,SACd2B,EAAwB3B,GAE1B2B,EAAsB,OAAS,OAC/BA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAE1BF,IAAY,OAAW,CACzB,IAAMgC,EAAaJ,EAAY,EAE/BI,EAAW,MAAQ9B,EACnB8B,EAAW,OAAS/B,EAEpB,IAAM8B,EAAkBF,EAAoBG,CAAU,EAEtD,GAAID,GAAmB,KACrBA,EAAgB,aAAaV,EAAO,EAAG,CAAC,EACxCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,OAG7CyB,EAAOL,EAAM,aAENG,EAAe,CAExB,GAAIxB,IAAY,OACd,MAAM,IAAI,MAAM,yDAAyD,EAG3E,IAAM8B,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAM9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MACpB,OAAAU,EAAgB,UAAUV,EAAO,EAAG,EAAGnB,EAAOD,CAAM,EACpDyB,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,KACzD0B,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EACvBX,GAAemC,EAAMC,CAAqB,MAEjD,OAAM,IAAI,MAAM,2BAA2B,MAExC,IAAIF,EACT,OAAO,IAAI,QAAQ,CAACQ,EAASC,IAAU,CACrC,IAAMJ,EAASF,EAAY,EACrBO,EAAUN,EAAoBC,CAAM,EAC1C,GAAI,CAACT,GAAS,CAACc,EACb,OAAOD,EAAM,EAEf,IAAME,EAAW,IAAI,MACrBA,EAAS,YAAc,YACvBA,EAAS,IAAMf,EACfe,EAAS,OAAS,IAAK,CACrBN,EAAO,MAAQM,EAAS,MACxBN,EAAO,OAASM,EAAS,OACzBD,EAAQ,UAAUC,EAAU,EAAG,EAAGN,EAAO,MAAOA,EAAO,MAAM,EAC7D,IAAMO,EAAMF,EAAQ,aAAa,EAAG,EAAGL,EAAO,MAAOA,EAAO,MAAM,EAElEH,EAAsB,OAASG,EAAO,OACtCH,EAAsB,MAAQG,EAAO,MACrCG,EAAQ1C,GAAe8C,EAAI,KAAMV,CAAqB,CAAC,CACzD,CACF,CAAC,EAED,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAID,IAAS,OACX,OAAOnC,GAAemC,EAAMC,CAAqB,EAEjD,MAAM,IAAI,MAAM,gEAAgE,CAEpF,EAKalC,GAAoB,CAC7B6C,EAAsCtC,IAAgD,CACxF,GAAM,CAAC,MAAAE,EAAO,OAAAD,EAAQ,SAAAsC,EAAU,QAAAC,CAAO,EAAIxC,EAErCyC,EAAO,CAAC,EAAGxC,EAAQC,EAAO,CAAC,EACjC,OAAO,IAAIkB,GAAO,CAAC,SAAU,UAAW,KAAM,UAAW,QAAAkB,EAAS,KAAAG,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC5F,EAKa9C,GAAsB,CAC/BgD,EAA0C1C,IAAkD,CAC9F,GAAM,CAAC,SAAA2C,EAAU,KAAAF,EAAM,SAAAF,EAAU,QAAAC,CAAO,EAAIxC,EAC5C,OAAO,IAAIoB,GAAO,CAAC,SAAU,aAAc,KAAMuB,GAAY,UAAW,UAAAD,EAAW,KAAAD,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC7G,EAKa7C,GAAyB,CAClCiD,EAAS7C,EAAwC0C,IACjD,IAAIrB,GAAO,CAAC,SAAU,aAAc,KAAAwB,EAAM,KAAM7C,EAAQ,KAAM0C,GAAQ,CAAC1C,EAAO,MAAM,CAAC,CAAC,ICzR1F,IAWa8C,GAaAC,GAoBTC,GACSC,GA7CbC,GAAAC,EAAA,kBAWaL,GAAwC,IAAI,IAA6C,CACpG,CAAC,UAAW,YAAY,EACxB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,SAAS,EAClB,CAAC,SAAU,WAAW,EACtB,CAAC,QAAS,UAAU,EACpB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,UAAU,EACnB,CAAC,UAAW,YAAY,EACxB,CAAC,SAAU,WAAW,EACvB,EAGYC,GAAwC,IAAI,IAAkD,CACzG,CAAC,aAAc,SAAS,EACxB,CAAC,WAAY,OAAO,EACpB,CAAC,UAAW,MAAM,EAClB,CAAC,YAAa,QAAQ,EACtB,CAAC,WAAY,OAAO,EACpB,CAAC,WAAY,OAAO,EACpB,CAAC,aAAc,SAAS,EACxB,CAAC,YAAa,QAAQ,EACvB,EAWGC,GAAsB,GACbC,GAAkB,IAAK,CAClC,GAAI,CAACD,GAAqB,CACxBA,GAAsB,GACtB,IAAMI,EAA2B,OAAO,cAAkB,KAAe,cAAc,KACjFC,EAA4B,OAAO,eAAmB,KAAe,eAAe,KACpFC,EAA0B,OAAO,aAAiB,KAAe,aAAa,KAEhFF,IACFN,GAAsC,IAAI,QAAS,aAAa,EAChEC,GAAsC,IAAI,cAAe,OAAO,GAE9DM,IACFP,GAAsC,IAAI,SAAU,cAAc,EAClEC,GAAsC,IAAI,eAAgB,QAAQ,GAEhEO,GACFR,GAAsC,IAAI,UAAW,YAAY,EACjEC,GAAsC,IAAI,aAAc,SAAS,GAGjED,GAAsC,IAAI,UAAW,WAAW,EAGtE,ICpEA,IAWaS,GAkBAC,GA7BbC,GAAAC,EAAA,kBAIAC,KAOaJ,GAAiBK,GAAoC,CAChE,IAAIC,EAAO,EACX,QAASC,EAAI,EAAGA,EAAIF,EAAK,OAAQE,IAAK,CACpC,IAAMC,EAAMH,EAAKE,CAAC,EAClB,GAAI,OAAOC,GAAQ,UAAY,CAAC,OAAO,cAAcA,CAAG,EACtD,MAAM,IAAI,UAAU,QAAQD,CAAC,8BAA8BC,CAAG,EAAE,EAElE,GAAIA,EAAM,EACR,MAAM,IAAI,WAAW,QAAQD,CAAC,0CAA0CC,CAAG,EAAE,EAE/EF,GAAQE,EAEV,OAAOF,CACT,EAKaL,GAAgB,CAACQ,EAAgBJ,IAAmC,CAC/E,OAAQI,EAAO,SAAU,CACvB,IAAK,MACH,OAAO,IAAIC,GAAOD,EAAO,KAAMA,EAAO,KAAMJ,CAAI,EAClD,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,KAAMD,EAAO,KACb,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,UACH,OAAO,IAAIK,GAAO,CAChB,SAAU,UACV,QAASD,EAAO,QAChB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,UAAWD,EAAO,UAClB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,QACE,MAAM,IAAI,MAAM,kCAAkCI,EAAO,QAAQ,mBAAmB,EAE1F,ICzDA,IAwBaE,GAxBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAgBaN,GAAP,KAAa,CAyCjB,YACIO,EAEAC,EAA8EC,EAAwB,CAExGC,GAAe,EAEf,IAAIC,EACAC,EAEJ,GAAI,OAAOL,GAAS,UAAY,aAAcA,EAO5C,OAHA,KAAK,aAAeA,EAAK,SACzBI,EAAOJ,EAAK,KACZK,EAAOL,EAAK,KACJA,EAAK,SAAU,CACrB,IAAK,aAAc,CACjB,IAAMM,EAAgCC,GAAsC,IAAIH,CAAI,EACpF,GAAI,CAACE,EACH,MAAM,IAAI,UAAU,qBAAqBF,CAAI,uCAAuC,EAEtF,GAAI,EAAEJ,EAAK,gBAAgBM,GACzB,MAAM,IAAI,UAAU,4BAA4BA,EAA8B,IAAI,EAAE,EAEtF,KAAK,QAAUN,EAAK,KACpB,MAEF,IAAK,UAAW,CACd,GAAII,IAAS,UACX,MAAM,IAAI,UAAU,qBAAqBA,CAAI,iCAAiC,EAEhF,KAAK,eAAiBJ,EAAK,QAC3B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,IAAK,aAAc,CACjB,GAAKI,IAAS,WAAaA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAC7FA,IAAS,SAAWA,IAAS,OAChC,MAAM,IAAI,UAAU,qBAAqBA,CAAI,oCAAoC,EAEnF,KAAK,cAAgBJ,EAAK,UAC1B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,QACE,MAAM,IAAI,MAAM,6CAA6C,KAAK,YAAY,GAAG,MAEhF,CAIL,IAAIQ,EACAC,EAEJ,GAAI,OAAOT,GAAS,SAMlB,GAFAI,EAAOJ,EACPS,EAAYP,EACRF,IAAS,SAAU,CAErB,GAAI,CAAC,MAAM,QAAQC,CAAI,EACrB,MAAM,IAAI,UAAU,gDAAiD,EAIvEO,EAAOP,MACF,CAEL,IAAMS,EAAwBH,GAAsC,IAAIP,CAAI,EAC5E,GAAIU,IAA0B,OAC5B,MAAM,IAAI,UAAU,4BAA4BV,CAAI,GAAG,EAEzD,GAAI,MAAM,QAAQC,CAAI,EAAG,CACvB,GAAID,IAAS,WAAaU,IAA0B,YAMlD,MAAM,IAAI,UACN,+FAA+F,EAC1FV,IAAS,UAAYA,IAAS,QAYvCQ,EAAQE,EAA8B,KAAKT,EAAM,MAAM,EAIvDO,EAAQE,EAA8B,KAAKT,CAAI,UAExCA,aAAgBS,EACzBF,EAAOP,MAEP,OAAM,IAAI,UAAU,KAAKG,CAAI,kCAAkCM,CAAqB,EAAE,UAO1FD,EAAYR,EACR,MAAM,QAAQD,CAAI,EAAG,CAEvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qDAAqD,EAE3E,IAAMW,EAAmB,OAAOX,EAAK,CAAC,EACtC,GAAIW,IAAqB,SACvBP,EAAO,SACPI,EAAOR,UACEW,IAAqB,UAC9BP,EAAO,OAIPI,EAAO,WAAW,KAAKR,CAAa,MAEpC,OAAM,IAAI,UAAU,uCAAuCW,CAAgB,GAAG,MAE3E,CAEL,IAAMC,EACFC,GAAsC,IAAIb,EAAK,WAA8C,EACjG,GAAIY,IAAe,OACjB,MAAM,IAAI,UAAU,qCAAqCZ,EAAK,WAAW,GAAG,EAE9EI,EAAOQ,EACPJ,EAAOR,EAKX,GAAIS,IAAc,OAEhBA,EAAY,CAACD,EAAK,MAAM,UACf,CAAC,MAAM,QAAQC,CAAS,EACjC,MAAM,IAAI,UAAU,wCAAyC,EAE/DJ,EAAOI,EAEP,KAAK,QAAUD,EACf,KAAK,aAAe,MAItB,IAAMM,EAAOC,GAAcV,CAAI,EAE/B,GAAI,KAAK,SAAWS,IAAS,KAAK,QAAQ,OACxC,MAAM,IAAI,MAAM,iBAAiBA,CAAI,gCAAgC,KAAK,QAAQ,MAAM,IAAI,EAG9F,KAAK,KAAOV,EACZ,KAAK,KAAOC,EACZ,KAAK,KAAOS,CACd,CAIA,aAAa,UACTE,EACAC,EACoB,CACtB,OAAOC,GAAgBF,EAAOC,CAAO,CACvC,CAEA,OAAO,YACHE,EAA4BF,EAAoC,CAClE,OAAOG,GAAkBD,EAASF,CAAO,CAC3C,CAEA,OAAO,cACHI,EAAgCJ,EAAsC,CACxE,OAAOK,GAAoBD,EAAWJ,CAAO,CAC/C,CAEA,OAAO,iBACHb,EAASmB,EAAwClB,EAAwB,CAC3E,OAAOmB,GAAuBpB,EAAMmB,EAAQlB,CAAI,CAClD,CAKA,UAAUY,EAAgC,CACxC,OAAOQ,GAAgB,KAAMR,CAAO,CACtC,CAEA,YAAYA,EAAkC,CAC5C,OAAOS,GAAkB,KAAMT,CAAO,CACxC,CAgDA,IAAI,MAAI,CAEN,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,QACR,MAAM,IAAI,MACN,gJAC2E,EAEjF,OAAO,KAAK,OACd,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,YACd,CAEA,IAAI,SAAO,CAET,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,eACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,cACd,CAEA,IAAI,WAAS,CAEX,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,cACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,aACd,CAKA,MAAM,QAAQU,EAAqB,CAEjC,OADA,KAAK,YAAW,EACR,KAAK,aAAc,CACzB,IAAK,MACL,IAAK,aACH,OAAO,KAAK,KACd,IAAK,UACL,IAAK,aAAc,CACjB,GAAI,CAAC,KAAK,WACR,MAAM,IAAI,MAAM,qEAAqE,EAEvF,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAE3D,GAAI,CACF,KAAK,cAAgB,GACrB,IAAMnB,EAAO,MAAM,KAAK,WAAU,EAClC,YAAK,WAAa,OAClB,KAAK,aAAe,MACpB,KAAK,QAAUA,EAEXmB,GAAe,KAAK,WACtB,KAAK,SAAQ,EACb,KAAK,SAAW,QAGXnB,UAGP,KAAK,cAAgB,IAGzB,QACE,MAAM,IAAI,MAAM,kCAAkC,KAAK,YAAY,EAAE,EAE3E,CAEA,SAAO,CACL,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAGvD,KAAK,WACP,KAAK,SAAQ,EACb,KAAK,SAAW,QAElB,KAAK,QAAU,OACf,KAAK,eAAiB,OACtB,KAAK,cAAgB,OACrB,KAAK,WAAa,OAClB,KAAK,cAAgB,OAErB,KAAK,aAAe,MACtB,CAKQ,aAAW,CACjB,GAAI,KAAK,eAAiB,OACxB,MAAM,IAAI,MAAM,yBAAyB,CAE7C,CAEA,QAAQH,EAAuB,CAE7B,GADA,KAAK,YAAW,EACZ,KAAK,YAAc,KAAK,SAC1B,MAAM,IAAI,MAAM,iDAAiD,EAEnE,OAAOuB,GAAc,KAAMvB,CAAI,CACjC,KCpaF,IAwUawB,GAxUbC,GAAAC,EAAA,kBAIAC,KAoUaH,GAASA,KCxUtB,IAQaI,GAQPC,GAqBOC,GAUAC,GA/CbC,GAAAC,EAAA,kBAGAC,KAKaN,GAAQ,CAACO,EAAoBC,IAAiB,EACrD,OAAOC,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAI9D,QAAQ,UAAU,GAAGF,CAAU,UAAUC,CAAK,EAAE,CAClD,EAEMP,GAAa,CAACS,EAAaC,IAAqB,CACpD,IAAMC,EAAQ,IAAI,MAAK,EAAG,OAAO,MAAM,aAAa,GAAK,CAAA,EACrDC,EAAe,GACnB,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,GAAID,GAAgB,CAACD,EAAME,CAAC,EAAE,SAAS,YAAY,EAAG,CACpD,IAAIN,EAAQ,QAAQE,CAAG,KAAKE,EAAME,CAAC,EAAE,KAAI,EAAG,MAAM,GAAG,EAAE,CAAC,CAAC,GACrDH,IACFH,GAAS,KAAKG,CAAQ,IAExBX,GAAM,MAAOQ,CAAK,EAClB,OAEEI,EAAME,CAAC,EAAE,SAAS,YAAY,IAChCD,EAAe,IAGrB,EAKaX,GAAoBS,GAAqB,EAChD,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,QAASU,CAAQ,CAC9B,EAKaR,GAAkBQ,GAAqB,EAC9C,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,MAAOU,CAAQ,CAC5B,ICpDA,IAgBaI,GAhBbC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAQaL,GAAP,MAAOM,CAAgB,CAC3B,YAAoBC,EAAgC,CAClD,KAAK,QAAUA,CACjB,CAGA,MAAM,IAAIC,EAAkBC,EAA+BC,EAAiB,CAC1EC,GAAgB,EAChB,IAAMC,EAA4C,CAAA,EAC9CC,EAAsB,CAAA,EAE1B,GAAI,OAAOL,GAAU,UAAYA,IAAU,MAAQA,aAAiBM,IAAU,MAAM,QAAQN,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIO,EAAiB,GAErB,GAAI,OAAON,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBK,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQL,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DM,EAAiB,GAEjB,QAAWC,KAAQP,EAAM,CACvB,GAAI,OAAOO,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAI,KAAK,YAAY,QAAQA,CAAI,IAAM,GACrC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEJ,EAAQI,CAAI,EAAI,KAGlB,GAAI,OAAON,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIO,EAAY,GACVC,EAAW,OAAO,oBAAoBT,CAAI,EAChD,QAAWO,KAAQ,KAAK,YACtB,GAAIE,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKV,EAA4DO,CAAI,GACvEG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBH,EAAQI,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOP,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDG,EAAUJ,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWO,KAAQ,KAAK,WACtB,GAAI,OAAOR,EAAMQ,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQ,KAAK,YACtBJ,EAAQI,CAAI,EAAI,KAMpB,IAAMI,EAAU,MAAM,KAAK,QAAQ,IAAIZ,EAAOI,EAASC,CAAO,EACxDQ,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAAC,GAAc,EACPH,CACT,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,CAOA,aAAa,OACTI,EAAyChB,EAA8BC,EACvEgB,EAAqB,CACvBf,GAAgB,EAEhB,IAAIgB,EACAd,EAA0B,CAAA,EAE9B,GAAI,OAAOY,GAAS,UAElB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7CgB,aAAgB,YAEzB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAGpDgB,aAAgB,aACf,OAAO,kBAAsB,KAAeA,aAAgB,kBAAoB,CACnF,IAAMG,EAASH,EACXI,EAAa,EACbC,EAAaL,EAAK,WACtB,GAAI,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,GAAS,SAAU,CAEnC,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,EAAa,GAAKA,GAAcD,EAAO,WACzC,MAAM,IAAI,WAAW,oCAAoCA,EAAO,UAAU,IAAI,EAGhF,GADAE,EAAaL,EAAK,WAAaI,EAC3B,OAAOnB,GAAS,SAAU,CAE5B,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,GAAc,GAAKD,EAAaC,EAAaF,EAAO,WACtD,MAAM,IAAI,WAAW,oCAAoCA,EAAO,WAAaC,CAAU,IAAI,EAE7F,GAAI,OAAOH,GAAS,UAAYA,IAAS,KACvCb,EAAUa,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7C,OAAOhB,EAAS,IACzB,MAAM,IAAI,UAAU,gCAAkC,UAE/C,OAAOD,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,EAEtDkB,EAAuB,IAAI,WAAWC,EAAQC,EAAYC,CAAU,MAEpE,OAAM,IAAI,UAAU,qDAAyD,EAI/E,GAAM,CAACC,EAASC,CAAuB,EAAI,MAAMC,GAAoCpB,CAAO,EACtFN,EAAU,MAAMwB,EAAQ,8BAA8BJ,EAAsBK,CAAuB,EACzG,OAAAR,GAAc,EACP,IAAIlB,EAAiBC,CAAO,CACrC,CAEA,gBAAc,CACZ,KAAK,QAAQ,eAAc,CAC7B,CACA,cAAY,CACV,KAAK,QAAQ,aAAY,CAC3B,CAEA,IAAI,YAAU,CACZ,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,aAAW,CACb,OAAO,KAAK,QAAQ,WACtB,KCxNF,IA8hBa2B,GA9hBbC,GAAAC,EAAA,kBAGAC,KA2hBaH,GAA4CA,KC9hBzD,IAAAI,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAgBMC,GAGOC,GAnBbC,GAAAC,EAAA,kBAGAC,KAIAC,KASML,GAA0B,gHAGnBC,GAAP,MAAOK,CAAe,CAC1B,YAAoBC,EAAiCC,EAA4BC,EAAqB,CACpG,KAAK,QAAUF,EACf,KAAK,kBAAoBC,EACzB,KAAK,aAAeC,CACtB,CAKA,IAAI,oBAAkB,CACpB,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,qBAAmB,CACrB,OAAO,KAAK,QAAQ,WACtB,CAEA,IAAI,gBAAc,CAChB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,eAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CACA,IAAI,iBAAe,CACjB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,gBAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CAEA,aAAa,OAAOC,EAA+CC,EAA+B,CAEhG,IAAMC,EAA+BF,EAAgB,WAAa,GAC5DG,EAAoCH,EAAgB,gBAAkB,GACtEI,EAA0BH,GAAkB,CAAA,EAG5C,CAACI,EAASC,CAAuB,EAAI,MAAMC,GAAoCH,CAAO,EAC5F,GAAIC,EAAQ,6BAA8B,CACxC,IAAMR,EAAU,MAAMQ,EAAQ,6BAC1BL,EAAgB,gBAAiBA,EAAgB,WAAYE,EAAWC,EACxEG,CAAuB,EAC3B,OAAO,IAAIV,EAAgBC,EAAS,CAAC,CAACG,EAAgB,eAAgB,CAAC,CAACA,EAAgB,SAAS,MAEjG,OAAM,IAAI,MAAMV,EAAe,CAEnC,CAeA,wBACIkB,EAA+BC,EAAgCC,EAAkBC,EACjFC,EAAiB,CACnB,IAAMC,EAA4C,CAAA,EAC9CT,EAAsB,CAAA,EAE1B,GAAI,OAAOM,GAAU,UAAYA,IAAU,MAAQA,aAAiBI,IAAU,MAAM,QAAQJ,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIK,EAAiB,GAErB,GAAI,OAAOJ,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBG,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQH,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DI,EAAiB,GAEjB,QAAWC,KAAQL,EAAM,CACvB,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAIP,EAAY,QAAQO,CAAI,IAAM,GAChC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEH,EAAQG,CAAI,EAAI,KAGlB,GAAI,OAAOJ,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIK,EAAY,GACVC,EAAW,OAAO,oBAAoBP,CAAI,EAChD,QAAWK,KAAQP,EACjB,GAAIS,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKR,EAAmDK,CAAI,GAC9DG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBF,EAAQG,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOL,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDR,EAAUO,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWK,KAAQR,EACjB,GAAI,OAAOE,EAAMM,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQP,EACjBI,EAAQG,CAAI,EAAI,KAIpB,MAAO,CAACH,EAAST,CAAO,CAC1B,CASA,uCAAuCgB,EAAkC,CACvE,IAAMC,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAOF,CACT,CAEA,MAAM,eAAa,CACjB,MAAM,KAAK,QAAQ,cAAa,CAClC,CAIA,MAAM,aAAaX,EAAkBC,EAA+BC,EAAiB,CACnF,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,mBAAoB,KAAK,oBAAqBM,EAAOC,EAAMC,CAAI,EAC/FQ,EAAU,MAAM,KAAK,QAAQ,aAAaV,EAAOG,EAAST,CAAO,EACvE,OAAO,KAAK,uCAAuCgB,CAAO,CAC5D,CAEA,MAAM,iBAAiBhB,EAA+C,CACpE,GAAI,KAAK,kBACP,MAAM,KAAK,QAAQ,iBAAiBA,GAAW,CAAA,CAAE,MAEjD,OAAM,IAAI,MAAM,oDAAoD,CAExE,CAIA,MAAM,YAAYM,EAAkBC,EAA+BC,EAAiB,CAClF,GAAI,KAAK,aAAc,CACrB,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,eAAgB,KAAK,gBAAiBM,EAAOC,EAAMC,CAAI,EACvFQ,EAAU,MAAM,KAAK,QAAQ,YAAYV,EAAOG,EAAST,CAAO,EACtE,OAAO,KAAK,uCAAuCgB,CAAO,MAE1D,OAAM,IAAI,MAAM,+CAA+C,CAEnE,CAEA,MAAM,kBAAkBI,EAAgB,GAAI,CAC1C,OAAO,KAAK,QAAQ,kBAAkBA,CAAa,CACrD,CAEA,MAAM,qBAAqBC,EAAmBD,EAAgB,GAAI,CAChE,IAAME,EAAa,MAAM,KAAK,kBAAkBF,CAAa,EAG7D,GAAIC,EAAM,SAAW,EAAIC,EACvB,MAAM,IAAI,MACN,qJAC0D,EAEhE,OAAO,KAAK,QAAQ,qBAAqBD,EAAOD,CAAa,CAC/D,CAEA,MAAM,wBAAwBA,EAAgB,GAAI,CAChD,OAAO,KAAK,QAAQ,wBAAwBA,CAAa,CAC3D,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,KCzPF,IAmMaG,GAnMbC,GAAAC,EAAA,kBAKAC,KA8LaH,GAA0CA,KCnMvD,IAAAI,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,UAAAC,GAAA,qBAAAC,GAAA,mBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,QAAAC,GAAA,oBAAAC,KAAA,IAAAC,GAAAC,EAAA,kBAmBAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,OC5BA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IA0FMC,GACAC,GAwFCF,GAnLPG,GAAAC,EAAA,kBAsFAC,KACAC,KACAC,KAEMN,GAAc,wBACdC,GAAgB,WAAW,MAAM,OAASD,GAE5CC,KAEF,KAAK,UAAaM,GAA2C,CAC3D,GAAM,CAAC,KAAAC,EAAM,GAAKC,CAAO,EAAIF,EAAG,KAChC,GAAI,CACF,OAAQC,EAAM,CACZ,IAAK,YACHE,GAAsBD,EAAS,IAAI,EAC9B,KACG,IAAM,CACJE,GAAYF,CAAQ,EAAE,KAClB,IAAM,CACJ,YAAY,CAAC,KAAAD,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,CACP,EACAA,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,MACF,IAAK,UAAW,CACd,GAAM,CAAC,OAAAC,EAAQ,IAAAC,CAAG,EAAIL,EACtBM,GAAOD,EAAKD,CAAM,EACb,KACG,IAAM,CACJ,YAAY,CAAC,KAAAL,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,YAAa,CAChB,GAAM,CAAC,OAAAI,CAAM,EAAIP,EACXQ,EAAaC,GAAuBF,CAAM,EAChD,YAAY,CAAC,KAAAR,EAAM,IAAKS,CAAU,CAAmB,EACrD,KACF,CACA,IAAK,SAAU,CACb,GAAM,CAAC,MAAAE,EAAO,QAAAC,CAAO,EAAIX,EACzBY,GAAcF,EAAOC,CAAO,EACvB,KACGE,GAAmB,CACjB,YAAY,CAAC,KAAAd,EAAM,IAAKc,CAAe,CAAmB,CAC5D,EACAV,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,UACHW,GAAed,CAAQ,EACvB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,IAAK,MAAO,CACV,GAAM,CAAC,UAAAgB,EAAW,aAAAC,EAAc,OAAAC,EAAQ,cAAAC,EAAe,QAAAP,CAAO,EAAIX,EAClEmB,GAAIJ,EAAWC,EAAcC,EAAQC,EAAe,IAAI,MAAMA,EAAc,MAAM,EAAE,KAAK,IAAI,EAAGP,CAAO,EAClG,KACGS,GAAW,CACLA,EAAQ,KAAKC,GAAKA,EAAE,CAAC,IAAM,KAAK,EAClC,YAAY,CAAC,KAAAtB,EAAM,IAAK,iDAAiD,CAAC,EAE1E,YACI,CAAC,KAAAA,EAAM,IAAKqB,CAAO,EACnBE,GAA2B,CAAC,GAAGL,EAAQ,GAAGG,CAAO,CAAiC,CAAC,CAE3F,EACAjB,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,gBACHoB,GAAavB,CAAQ,EACrB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,QACF,CACF,OAASI,EAAK,CACZ,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAmB,CAC3C,CACF,GAGKb,GAAQE,GACX,KACCgC,GACG,IAAI,OAAOA,GAAeC,GAAY,CAAC,KAA0B,SAAsB,KAAMlC,EAAW,CAAC,ICtLjH,IAAAmC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IAAIC,GAAEC,GAA6h0CF,GAAni0CG,GAAAC,EAAA,kBAAMF,IAAGD,GAAE,YAAY,IAAI,eAAe,EAAE,CAAC,EAAE,CAAC,SAAS,GAAG,CAAC,OAAOI,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAED,EAAC,CAAC,SAASE,GAAG,CAAC,OAAOH,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEE,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOL,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEI,EAAC,CAAC,SAAS,GAAG,CAAC,OAAON,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEK,CAAC,CAAC,SAAS,GAAG,CAAC,OAAOP,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEM,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOT,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEQ,CAAC,CAAC,SAASC,GAAG,CAAC,OAAOX,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEU,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOb,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEY,EAAC,CAAC,IAAIC,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAE,CAAC,EAAEC,EAAE,IAAI,QAAS,CAACtB,EAAEC,IAAI,CAACkB,EAAEnB,EAAEoB,EAAEnB,CAAC,CAAE,EAAEsB,EAAY,OAAO,QAAjB,SAAwBC,EAAc,OAAO,eAAnB,WAAiCC,EAAED,GAAiB,KAAK,MAAnB,aAAwBH,EAAE,kBAAkB,CAACrB,EAAEC,IAAI,EAAEoB,EAAE,KAAKA,EAAE,GAAG,IAAI,MAAM,IAAIrB,EAAEC,CAAC,CAAC,EAAEoB,EAAE,oBAAoB,IAAI,CAAC,OAAOA,EAAE,EAAE,EAAE,IAAIK,EAAE,WAAW,mBAAmB,IAAI,YAAY,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,YAAY,IAAIC,EAAE,IAAI,CAAC,IAAM3B,EAAE,CAACA,EAAEC,EAAE2B,IAAI,IAAIrB,IAAI,CAAC,IAAME,EAAEoB,GAAGC,EAAE7B,IAAI,EAAEM,EAAEP,EAAE,GAAGO,CAAC,EAAE,IAAMwB,EAAE9B,IAAI,EAAE,OAAO6B,IAAIC,IAAI/B,EAAE+B,EAAEH,EAAEE,CAAC,EAAE7B,EAAE2B,EAAE,MAAMC,IAAIpB,EAAE,IAAI,QAAS,CAACT,EAAEC,IAAI,CAAC+B,GAAG,CAAC,QAAQhC,EAAE,OAAOC,CAAC,CAAC,CAAE,EAAEM,CAAC,EAAEN,EAAED,GAAG,SAASC,IAAI,CAAC,GAAG,CAAC,GAAGoB,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMO,EAAEP,EAAE,GAAG,CAAC,GAAGpB,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAEM,EAAE,MAAMP,EAAE,GAAGC,CAAC,EAAE,GAAGoB,EAAE,KAAKO,EAAE,MAAM,MAAM,kBAAkB,EAAEP,EAAE,IAAI,MAAM,EAAE,IAAMZ,EAAEmB,EAAE,OAAO,GAAG,EAAEnB,EAAE,OAAO,CAAC,IAAIT,EAAE,MAAM,QAAQ,IAAIS,CAAC,EAAE,GAAGT,EAAEA,EAAE,OAAQA,GAAGA,CAAE,EAAE,EAAEA,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAC,CAAC,OAAOO,CAAC,QAAC,CAAQc,EAAE,GAAG,IAAI,CAAC,EAAEA,EAAE,kBAAkBrB,EAAEqB,EAAE,kBAAmB,IAAIA,EAAE,kBAAoBrB,GAAGqB,EAAE,kBAAkBrB,CAAE,EAAEqB,EAAE,QAAQpB,EAAED,EAAEqB,EAAE,QAAS,IAAIA,EAAE,QAAUrB,GAAGqB,EAAE,QAAQrB,CAAE,CAAC,EAAEqB,EAAE,mBAAmBpB,EAAED,EAAEqB,EAAE,mBAAoB,IAAIA,EAAE,mBAAqBrB,GAAGqB,EAAE,mBAAmBrB,CAAE,CAAC,EAAEqB,EAAE,cAAcrB,EAAEqB,EAAE,cAAe,IAAIA,EAAE,cAAgBrB,GAAGqB,EAAE,cAAcrB,CAAE,EAAE2B,EAAE,MAAM,EAAEN,EAAE,SAAS,CAACrB,EAAEC,IAAI,CAAC,GAAG0B,IAAI,EAAa3B,IAAX,SAAa,CAAC,CAACqB,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,EAAE,EAAEpB,EAAE,IAAMD,EAAEqB,EAAE,GAAGA,EAAE,mBAAmB,CAACpB,EAAE2B,EAAErB,EAAEE,IAAIT,EAAE,eAAeC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,cAAcpB,GAAGD,EAAE,UAAUC,CAAC,EAAEoB,EAAE,qBAAqB,CAACpB,EAAE2B,EAAErB,IAAIP,EAAE,iBAAiBC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,qBAAqBpB,GAAG,CAACD,EAAE,iBAAiBC,CAAC,CAAC,EAAEoB,EAAE,eAAepB,GAAGD,EAAE,WAAWC,CAAC,CAAC,CAAC,EAAE,IAAIgC,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAEd,CAAC,EAAEe,EAAE,iBAAiBC,EAAE,CAACrC,EAAEC,IAAI,CAAC,MAAMA,CAAC,EAAEqC,EAAE,IAAIf,GAAGC,KAAKA,EAAEc,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAKtC,KAAIsC,EAAEtC,IAAGsC,EAAEA,EAAE,WAAW,OAAO,EAAE,GAAGA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAsFd,IAAIU,EAAElC,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAOA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAE,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAGgC,EAAE,CAACjC,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAMP,EAAE,EAAE,EAAEO,EAAE,aAAa,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASN,EAAEM,EAAE,QAAQ,EAAEqB,EAAE,CAAC,EAAErB,EAAE,QAAQqB,EAAErB,EAAE,KAAK,IAAI,CAAC,GAAG,IAAIgC,EAAE,QAAQ,IAAI,KAAK,OAAO,EAAEC,EAAE,QAAQ,MAAM,KAAK,OAAO,EAAE,EAAED,EAAEE,EAAED,EAAE,GAAG,OAAO,OAAOnB,EAAEc,CAAC,EAAEA,EAAE,KAAKV,EAAE,CAAY,IAASiB,EAAT,SAAY1C,EAAE,CAAC,GAAG,CAAC,IAAIC,EAAED,EAAE,KAAK4B,EAAE3B,EAAE,IAAI,GAAY2B,IAAT,OAAW,CAAC,IAAI5B,EAAE,CAAC,EAAE,KAAK,UAAUC,GAAGD,EAAE,KAAKC,CAAC,EAAE,KAAK,YAAY,IAAI,CAAC,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAE,QAAQA,KAAKD,EAAE0C,EAAGzC,CAAC,EAAE,KAAK,UAAUyC,CAAE,EAAE,QAAU1C,KAAKC,EAAE,SAASoB,EAAErB,CAAC,GAAG,CAACqB,EAAErB,CAAC,EAAE,QAAQqB,EAAErB,CAAC,EAAE,IAAIC,IAAI,CAAC,YAAY,CAAC,GAAG,cAAc,GAAGD,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAWD,GAAT,UAAa,EAAEqB,EAAErB,CAAC,GAAeA,GAAZ,aAAgByC,EAAEpB,EAAErB,CAAC,IAAII,EAAEH,EAAE,WAAWK,GAAE,EAAEqC,GAAE1C,EAAE,UAAU,CAAC,SAAiB2B,IAAR,MAAU,CAACgB,GAAG3C,EAAE,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE4C,GAAG5C,EAAE,WAAW,EAAE6C,GAAG,EAAEC,GAAG,EAAEC,KAAIC,GAAG,EAAED,GAAE,IAAI,GAAG,CAACE,GAAGjD,EAAE,cAAcA,EAAE,GAAG,CAAC,OAAOD,EAAE,CAAC,GAAaA,GAAV,SAAY,MAAMA,CAAC,CAAC,MAAgB4B,IAAX,SAAauB,GAAG,GAAGC,GAAG,EAAE,EAAmBnD,EAAE,SAAnB,iBAA6C2B,IAAjB,eAAmBoB,IAAGK,GAAG,EAAEzB,IAAIa,EAAE,oCAAoCb,CAAC,EAAE,EAAEa,EAAExC,CAAC,GAAG,OAAOD,EAAE,CAAC,MAAMsD,GAAG,EAAEtD,CAAC,CAAC,EAAjqB,IAAA0C,KAAhBC,GAAEK,GAAE,GAA8qBP,EAAE,YAAYzC,EAAE,CAACA,EAAEA,EAAE,KAAK,GAAG,EAAE,QAAQ,MAAMA,CAAC,CAAC,EAAE,KAAK,MAAM,YAAYA,EAAE,CAAC,YAAY,CAAC,GAAG,QAAQ,KAAKA,EAAE,KAAK,GAAG,EAAE,GAAGmD,GAAG,CAAC,CAAC,CAAC,EAAE9B,EAAE,gBAAgB,CAACrB,EAAEC,IAAI,IAAI,QAASD,GAAG,CAAC2C,GAAEf,GAAG,CAACA,EAAE,IAAI,YAAY,SAASA,EAAE2B,GAAG,CAAC,EAAEtD,EAAE2B,CAAC,EAAE5B,EAAE,CAAC,CAAC,CAAE,EAAE,KAAK,qBAAqBA,GAAG,CAAC,MAAMA,EAAE,QAAQA,CAAC,EAAE,KAAK,UAAU0C,CAAE,CAAC,IAAItC,EAAEoD,GAAEC,EAAEpD,GAAEG,GAAEE,GAAEC,EAAEC,GAAEE,EAAEE,GAAE0C,EAAEC,EAAEzC,GAAE0C,GAAE,GAAG,SAAStD,IAAG,CAAC,IAAIN,EAAEI,EAAE,OAAOiB,EAAE,MAAMhB,GAAE,IAAI,UAAUL,CAAC,EAAEqB,EAAE,OAAOX,GAAE,IAAI,WAAWV,CAAC,EAAEqB,EAAE,OAAOb,GAAE,IAAI,WAAWR,CAAC,EAAEqB,EAAE,QAAQV,EAAE,IAAI,YAAYX,CAAC,EAAEqB,EAAE,OAAOT,GAAE,IAAI,WAAWZ,CAAC,EAAEqB,EAAE,QAAQP,EAAE,IAAI,YAAYd,CAAC,EAAEqB,EAAE,QAAQL,GAAE,IAAI,aAAahB,CAAC,EAAEqB,EAAE,QAAQH,GAAE,IAAI,aAAalB,CAAC,EAAEqB,EAAE,OAAOqC,EAAE,IAAI,cAAc1D,CAAC,EAAEqB,EAAE,QAAQsC,EAAE,IAAI,eAAe3D,CAAC,CAAC,CAAC,GAAG,CAACyB,EAAE,CAAC,GAAGJ,EAAE,WAAWjB,EAAEiB,EAAE,mBAAmB,GAAGjB,EAAE,IAAI,YAAY,OAAO,CAAC,QAAQ,IAAI,QAAQ,MAAM,OAAO,EAAE,CAAC,GAAG,kBAAkBsB,GAAG,MAAMe,EAAE,6NAA6N,EAAE,MAAM,YAAY,EAAEnC,GAAE,CAAC,CAAC,IAAIuD,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,EAAEC,GAAE,KAAKC,GAAE,KAAK,SAASC,IAAG,CAAC,GAAM,EAAEH,IAAL,IAAgBC,KAAP,OAAW,cAAcA,EAAC,EAAEA,GAAE,MAAMC,IAAG,CAAC,IAAIlE,EAAEkE,GAAEA,GAAE,KAAKlE,EAAE,CAAC,CAAC,CAAC,SAASoE,GAAEpE,EAAE,CAAC,MAAMyC,EAAEzC,EAAE,WAAWA,EAAE,GAAG,EAAE4D,GAAE,GAAGH,EAAE,EAAEzD,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAEoB,EAAEpB,CAAC,EAAEA,CAAC,CAAC,IAAIqE,GAAGC,GAAGtE,GAAGA,EAAE,WAAW,uCAAuC,EAAEuE,GAAGvE,GAAGA,EAAE,WAAW,SAAS,EAAE,SAASwE,GAAGxE,EAAE,CAAC,GAAGkC,EAAE,OAAOA,EAAElC,CAAC,EAAE,KAAK,iDAAiD,CAAC,SAASyE,GAAGzE,EAAEC,EAAE2B,EAAE,CAAC,OAAO,SAAS5B,EAAE,CAAC,GAAGuB,GAAGC,EAAE,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAAC+C,GAAGvE,CAAC,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,CAAC,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAE,EAAE,MAAO,IAAIuE,GAAGxE,CAAC,CAAE,EAAE,GAAGiC,EAAE,OAAO,IAAI,QAAS,CAAChC,EAAE2B,IAAI,CAACK,EAAEjC,EAAGA,GAAGC,EAAE,IAAI,WAAWD,CAAC,CAAC,EAAG4B,CAAC,CAAC,CAAE,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAM,IAAI4C,GAAGxE,CAAC,CAAE,CAAC,EAAEA,CAAC,EAAE,KAAMA,GAAG,YAAY,YAAYA,EAAEC,CAAC,CAAE,EAAE,KAAK2B,EAAG5B,GAAG,CAACyC,EAAE,0CAA0CzC,CAAC,EAAE,EAAEoE,GAAEpE,CAAC,CAAC,CAAE,CAAC,CAAC,SAASuD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,GAAGmB,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAG3D,GAAG,GAAG4D,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAE/I,EAAE,GAAGgJ,GAAG,EAAEC,EAAE,CAAC,CAAC,CAAC,IAAIC,GAAG,CAAC,QAAQ,CAACtJ,EAAEC,EAAE2B,EAAEnB,IAAI,CAAC,GAAYY,IAAT,QAAY,CAACA,EAAE,GAAG,MAAO,GAAE,IAAIrB,EAAEuJ,GAAGvJ,IAAI,CAAC,GAAG,WAAW,IAAI,IAAIA,EAAEA,EAAE,UAAU,CAAC,GAAG,EAAEA,EAAEqB,EAAE,GAAG,IAAIrB,CAAC,GAAG,MAAO,GAAE,GAAGS,KAAK,GAAGR,KAAK,IAAI2B,KAAK,GAAG5B,EAAE,WAAW,MAAO,GAAE,GAAG,CAAC,OAAOO,EAAE,EAAE,IAAIP,EAAE,SAASC,EAAEA,EAAE2B,CAAC,EAAEnB,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAO,EAAC,CAAC,EAAE,QAAQ,IAAI,CAACY,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQrB,GAAGqB,EAAE,GAAGrB,CAAC,EAAE,QAAQA,GAAGqB,EAAE,GAAGrB,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAGrB,EAAEC,EAAE2B,EAAE,EAAE,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAGrB,EAAEC,EAAE2B,CAAC,CAAC,EAAE,QAAQ,IAAiB,OAAO,oBAApB,IAAwC,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,aAAarB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,cAAcrB,EAAE,CAAC,MAAMC,EAAE,KAAK2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,OAAOrB,EAAE,CAAC,IAAIC,EAAE,IAAI2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,MAAMrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,YAAYrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,OAAOrB,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,iBAAiBrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,cAAcrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,aAAarB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,aAAarB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,WAAWrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,WAAWrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,eAAerB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,YAAYrB,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE2B,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,eAAerB,EAAE,CAAC,UAAUC,EAAE,KAAKsJ,GAAG3H,CAAC,EAAE,OAAOrB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,eAAerB,EAAE,CAAC,UAAUC,EAAE,KAAKsJ,GAAG3H,CAAC,EAAE,OAAOrB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOmB,EAAE,OAAO,OAAO,QAAQlB,EAAE,UAAU,CAACM,CAAC,EAAE,MAAME,EAAE,YAAY,CAACqB,CAAC,EAAE,KAAK,CAACjB,EAAEE,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEG,IAAI,CAAC,EAAE,cAAcE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW8H,GAAG7H,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOiB,EAAE,OAAO,OAAO,QAAQhB,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASM,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAME,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASqB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAASjB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASE,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEI,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW+H,GAAG9H,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACzB,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOmB,EAAE,OAAO,OAAO,QAAQlB,EAAE,UAAU,CAACM,CAAC,EAAE,MAAME,EAAE,YAAY,CAACqB,CAAC,EAAE,KAAK,CAACjB,EAAEE,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEG,IAAI,CAAC,EAAE,cAAcE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW8H,GAAG7H,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOiB,EAAE,OAAO,OAAO,QAAQhB,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASM,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAME,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASqB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAASjB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASE,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEI,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW+H,GAAG9H,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACzB,EAAEC,IAAI,CAACoB,EAAE,GAAG,oBAAoBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,oBAAoBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,OAAOrB,EAAE,CAAC,MAAMC,EAAE,KAAK2B,EAAE,OAAOrB,EAAE,OAAOE,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,SAASrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,SAASrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAAC2B,EAAE,KAAKrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,SAASrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAAC2B,EAAE,KAAKrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,IAAI,CAACoB,EAAE,GAAG,UAAUrB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,QAAQrB,EAAE,CAAC,KAAKC,EAAE,WAAW2B,EAAE,WAAWrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,SAASrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,iBAAiBrB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,IAAI,CAACC,EAAE,GAAG,SAASrB,EAAE,CAAC,UAAUC,EAAE,KAAK2B,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAErB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwBgJ,GAAG9I,CAAC,EAAE,YAAYqB,EAAE,eAAejB,EAAE,mBAAmBE,EAAE,sBAAsBwI,GAAGtI,CAAC,EAAE,KAAKsI,GAAGpI,CAAC,EAAE,YAAYoI,GAAGnI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACpB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,IAAI,CAACQ,EAAE,GAAG,QAAQrB,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE2B,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKqB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQb,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,wBAAwBrB,EAAE,CAAC,QAAQC,EAAE,OAAO2B,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,wBAAwBrB,EAAE,CAAC,QAAQC,EAAE,OAAO2B,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,SAASuJ,GAAGtJ,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,MAAMrB,EAAE,CAAC,KAAKC,EAAE,MAAM2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,QAAQC,EAAE,SAAS2B,EAAE,QAAQ,CAAC,CAACnB,EAAE,aAAa,CAAC,CAACF,EAAE,OAAOuB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,QAAQC,EAAE,SAAS2B,EAAE,QAAQ,CAAC,CAACnB,EAAE,aAAa,CAAC,CAACF,EAAE,OAAOuB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,SAASrB,EAAE,CAAC,UAAU,OAAOC,CAAC,EAAE,QAAQ,OAAO2B,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,IAAI,CAACI,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAASC,EAAE,iBAAiB2B,EAAE,gBAAgBrB,EAAE,MAAME,EAAE,SAASqB,EAAE,eAAejB,EAAE,MAAM,KAAK,EAAE,EAAE,SAAS,OAAOE,CAAC,IAAI,EAAE,OAAOA,CAAC,EAAEF,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,uBAAuB,CAAC,CAACI,CAAC,CAAC,CAAC,EAAE,QAAQjB,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,gBAAgBrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,WAAWrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEI,EAAEE,EAAEC,EAAEE,EAAEC,EAAEC,GAAEC,GAAEC,EAAEC,GAAEM,KAAI,CAACZ,EAAE,GAAG,OAAOrB,EAAE,CAAC,OAAOwB,GAAE,OAAO,OAAO,SAASvB,EAAE,UAAUM,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,MAAMqB,EAAE,aAAajB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,EAAE,EAAEE,KAAI,CAAC,EAAE,WAAW8H,GAAG7H,CAAC,EAAE,kBAAkBC,GAAE,MAAM,KAAKZ,EAAE,EAAE,SAASY,KAAI,EAAEM,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQjC,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,sBAAsBrB,EAAE,CAAC,SAASC,EAAE,WAAW2B,EAAE,MAAMrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,KAAKC,EAAE,QAAQ2B,EAAE,WAAW,CAAC,CAACrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,KAAKC,EAAE,QAAQ2B,EAAE,WAAW,CAAC,CAACrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,cAAcrB,EAAE,CAAC,EAAEC,EAAE,EAAE2B,EAAE,cAAcrB,EAAE,KAAKE,EAAE,UAAUqB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,SAASC,EAAE,iBAAiB2B,EAAE,gBAAgBrB,EAAE,MAAME,EAAE,SAASqB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,IAAI,CAACoB,EAAE,GAAG,YAAYrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,YAAY,CAAC,CAACC,EAAE,SAAS2B,EAAE,mBAAmBrB,EAAE,MAAME,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,yBAAyBrB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAAC2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAGrB,CAAC,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAIoB,EAAE,GAAGrB,EAAEC,EAAEoB,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,EAAE,QAAQ,CAACrB,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,yBAAyBrB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAAC2B,CAAC,CAAC,CAAC,CAAC,EAAE,SAAS8C,GAAG1E,EAAEC,EAAE2B,EAAE,CAAC,OAAO4H,GAAI,SAAS,CAAC,MAAMnI,EAAE,GAAGrB,EAAEC,EAAE2B,CAAC,CAAC,CAAE,CAAC,CAAC,SAAS6H,GAAGzJ,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,IAAI0J,GAAG1J,GAAG,CAACA,EAAE,UAAU,EAAEA,EAAE,UAAU,IAAI,CAAC,CAAC,EAAE2J,GAAG3J,GAAG,CAAI4J,GAAG,QAAN,IAAeC,GAAG,EAAEC,GAAGF,GAAG,CAAC,CAAC,GAAG,IAAI3J,EAAE2J,GAAG,IAAI,EAAE,GAAG,CAAC3J,EAAE,MAAO,GAAE8J,GAAG,KAAK9J,CAAC,EAAE+J,GAAGhK,EAAE,EAAE,EAAEC,EAAEA,EAAE,GAAGD,EAAE,GAAG,IAAI4B,EAAE,CAAC,IAAI,MAAM,cAAc5B,EAAE,GAAG,IAAIA,EAAE,GAAG,YAAYA,EAAE,EAAE,EAAE,OAAOC,EAAE,YAAY2B,EAAE5B,EAAE,EAAE,EAAE,CAAC,EAAEiK,GAAG,EAAEC,GAAG,CAAClK,EAAEC,KAAK2B,IAAI,CAAC,QAAQrB,EAAE,EAAEqB,EAAE,OAAOnB,EAAE0J,GAAG,EAAErI,EAAEsI,GAAG,EAAE7J,CAAC,EAAEwB,EAAED,IAAI,EAAEjB,EAAE,EAAEA,EAAEe,EAAE,OAAOf,IAAI,CAAC,IAAIE,EAAEa,EAAEf,CAAC,EAAY,OAAOE,GAAjB,UAAoB2C,EAAE3B,EAAE,EAAElB,CAAC,EAAE,GAAG6C,EAAE3B,EAAE,EAAElB,EAAE,CAAC,EAAEE,IAAI2C,EAAE3B,EAAE,EAAElB,CAAC,EAAE,GAAGI,EAAE,EAAEc,EAAE,EAAElB,EAAE,IAAI,CAAC,EAAEE,EAAE,CAAC,OAAOf,EAAEqK,GAAGrK,EAAE,EAAEO,EAAEuB,EAAE7B,CAAC,EAAEqK,GAAG7J,CAAC,EAAET,CAAC,EAAE,SAASuK,GAAGvK,EAAE,CAAC,GAAGyB,EAAE,OAAOyI,GAAG,EAAE,EAAElK,CAAC,EAAE,GAAGyD,EAAEzD,EAAE,EAAE,EAAEiK,IAAI,CAAC,QAAQhK,KAAK8J,GAAGL,GAAGzJ,CAAC,EAAE,IAAIA,KAAK2J,GAAGF,GAAGzJ,CAAC,EAAE2J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE3I,EAAE,SAASrB,CAAC,EAAE4D,GAAE,EAAE,CAACvB,EAAErC,EAAE,IAAIyJ,GAAGzJ,CAAC,CAAC,CAAC,CAAC,SAASwK,GAAGxK,EAAE,CAAC,GAAGyB,EAAE,OAAOyI,GAAG,EAAE,EAAElK,CAAC,EAAE+I,GAAG/I,CAAC,CAAC,CAAC,IAAI+I,GAAG/I,GAAG,CAAC,GAAGyD,EAAEzD,EAAEyB,EAAE,MAAM+I,GAAGxK,CAAC,EAAE,SAASuK,GAAGvK,CAAC,CAAC,EAAE4J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEU,GAAG,CAAC,EAAET,GAAG,CAAC,EAAEU,GAAG1K,GAAG,CAAC,IAAIC,EAAED,EAAE,GAAG,OAAOgK,GAAG/J,CAAC,EAAE2J,GAAG,KAAK5J,CAAC,EAAE+J,GAAG,OAAOA,GAAG,QAAQ/J,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,EAAE2K,GAAG1K,CAAC,CAAC,EAAE,SAAS8C,IAAI,CAAC0H,GAAG,QAASzK,GAAGA,EAAE,CAAE,CAAC,CAAC,IAAI8J,GAAG9J,GAAG,IAAI,QAASC,GAAG,CAACD,EAAE,UAAU4B,GAAG,CAAC,IAAIrB,GAAGqB,EAAEA,EAAE,MAAM,IAAI,GAAGA,EAAE,cAAcA,EAAE,cAAcuB,GAAG,EAAE,CAAC,IAAI1C,EAAEuJ,GAAGpI,EAAE,YAAY,EAAEnB,EAAEA,EAAE,YAAYmB,EAAEA,EAAE,YAAY,EAAEa,EAAE,0CAA0ClC,CAAC,uBAAuBqB,EAAE,YAAY,qCAAqC,CAAC,MAAsBrB,IAAjB,eAAmB8C,GAAG,EAAkB9C,IAAhB,cAAkBoJ,GAAG/H,CAAC,EAAoBrB,IAAlB,gBAAoBmK,GAAGV,GAAGpI,EAAE,MAAM,CAAC,EAAiBrB,IAAf,cAAkBqB,EAAEA,EAAE,OAAOrB,EAAEyJ,GAAGpI,CAAC,EAAE,OAAOoI,GAAGpI,CAAC,EAAE8H,GAAGnJ,CAAC,EAAEoK,GAAG/I,CAAC,EAAEmI,GAAG,OAAOA,GAAG,QAAQxJ,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,GAAoBA,IAAjB,eAAmByJ,GAAGpI,EAAE,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAarB,IAAX,UAAcP,EAAE,OAAO,GAAGC,EAAED,CAAC,GAAaO,IAAV,QAAY,MAAM,UAAUqB,EAAE,QAAQ,KAAKA,EAAE,IAAI,EAAE,EAAmBA,EAAE,SAAnB,eAA0B5B,EAAE,YAAY4B,CAAC,EAAkBrB,IAAhB,cAAkBc,EAAEO,EAAE,OAAO,EAAE,GAAGA,EAAE,IAAI,EAAErB,GAAGkC,EAAE,kCAAkClC,CAAC,EAAE,CAAC,EAAEP,EAAE,QAAQA,GAAG,CAAC,MAAMyC,EAAE,yBAAyBzC,EAAE,QAAQ,IAAIA,EAAE,MAAM,KAAKA,EAAE,OAAO,EAAE,EAAEA,CAAC,EAAE,IAAI4B,EAAErB,EAAE,CAAC,EAAE,IAAIqB,IAAI,CAAC,QAAQ,EAAEP,EAAE,eAAeO,CAAC,GAAGrB,EAAE,KAAKqB,CAAC,EAAE5B,EAAE,YAAY,CAAC,IAAI,OAAO,SAASO,EAAE,WAAWH,EAAE,WAAWoD,EAAC,CAAC,CAAC,CAAE,EAAE,SAASqG,IAAI,CAAC,IAAI7J,EAAE,IAAI,OAAO,IAAI,IAAI,YAAY,GAAG,EAAE,CAAC,KAAK,SAAS,WAAW,aAAa,KAAK,YAAY,CAAC,EAAE4J,GAAG,KAAK5J,CAAC,CAAC,CAAC,IAAI4K,GAAG5K,GAAG,CAAC,KAAK,EAAEA,EAAE,QAAQA,EAAE,MAAM,EAAEqB,CAAC,CAAC,EAAEyB,GAAG,IAAI,CAAC,IAAI9C,EAAEmD,GAAG,EAAElD,EAAEY,EAAE,EAAEb,EAAE,KAAK,IAAI,CAAC,EAAEA,EAAEa,EAAE,EAAEb,EAAE,KAAK,IAAI,CAAC,EAAE6K,GAAG5K,EAAEA,EAAED,CAAC,EAAEsK,GAAGrK,CAAC,CAAC,EAAEiD,GAAG,CAAClD,EAAEC,IAAI,CAACgK,GAAG,EAAEjK,EAAE8K,GAAG9K,EAAEC,CAAC,EAAE,EAAEgK,GAAGxG,EAAEzD,EAAEoD,GAAGpD,CAAC,CAAC,EAAE,MAAM+K,EAAE,CAAC,YAAY/K,EAAE,CAAC,KAAK,GAAGA,EAAE,EAAE,CAAC,CAAC,SAAS2E,GAAG3E,EAAEC,EAAE2B,EAAE,CAAC,IAAIrB,EAAE,IAAIwK,GAAG/K,KAAK,CAAC,EAAE,MAAMC,KAAK,EAAE2B,KAAK,EAAEf,EAAE,EAAEN,EAAE,GAAG,KAAK,IAAI,CAAC,EAAE,EAAEM,EAAE,EAAEN,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEN,EAAEY,EAAE,EAAEN,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEqB,EAAE5B,CAAC,CAAC,SAASgL,GAAGhL,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAEyI,GAAG,EAAE,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,EAAEqE,GAAG5E,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASqE,GAAG5E,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGP,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAWmB,IAAT,OAAW,OAAOe,EAAE,qFAAqF,EAAE,EAAE,IAAIhC,EAAE,CAAC,EAAE,OAAOgB,GAAOhB,EAAE,SAAN,EAAauK,GAAGhL,EAAEC,EAAE2B,EAAErB,CAAC,GAAGP,EAAE,CAAC,GAAG4B,EAAE,GAAG5B,EAAE,GAAGO,EAAE,GAAGE,CAAC,EAAEgB,GAAGzB,EAAE,GAAG,cAAc,YAAYA,EAAES,CAAC,EAAE,GAAGkJ,GAAG3J,CAAC,EAAE,CAAC,IAAIiL,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAAClL,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,GAAGN,KAAK,GAAG2B,EAAE,IAAIA,EAAE3B,EAAED,EAAE4B,CAAC,GAAG,EAAEA,GAAGrB,IAAI,EAAEqB,EAAE,GAAG,GAAGA,EAAE3B,GAAGD,EAAE,QAAQiL,GAAG,OAAOA,GAAG,OAAOjL,EAAE,kBAAkB0B,EAAE1B,EAAE,MAAMC,EAAE2B,CAAC,EAAE5B,EAAE,SAASC,EAAE2B,CAAC,CAAC,EAAE,IAAIrB,EAAE,GAAGN,EAAE2B,GAAG,CAAC,IAAInB,EAAET,EAAEC,GAAG,EAAE,GAAG,IAAIQ,EAAE,CAAC,IAAIqB,EAAE,GAAG9B,EAAEC,GAAG,EAAE,IAAS,IAAIQ,IAAV,IAAaF,GAAG,OAAO,cAAc,GAAGE,IAAI,EAAEqB,CAAC,MAAM,CAAC,IAAIC,EAAE,GAAG/B,EAAEC,GAAG,EAAE,OAAOQ,GAAQ,IAAIA,IAAV,KAAc,GAAGA,IAAI,GAAGqB,GAAG,EAAEC,GAAG,EAAEtB,IAAI,GAAGqB,GAAG,GAAGC,GAAG,EAAE,GAAG/B,EAAEC,GAAG,GAAGM,GAAG,OAAO,aAAaE,CAAC,GAAGA,GAAG,MAAMF,GAAG,OAAO,aAAa,MAAME,GAAG,GAAG,MAAM,KAAKA,CAAC,EAAE,CAAC,MAAMF,GAAG,OAAO,aAAaE,CAAC,CAAC,CAAC,OAAOF,CAAC,EAAEgJ,GAAG,CAACvJ,EAAEC,KAAKD,KAAK,GAAGkL,GAAG3K,EAAE,EAAEP,EAAEC,CAAC,EAAE,GAAG,SAAS4E,GAAG7E,EAAEC,EAAE2B,EAAE,CAAC,OAAOH,EAAEyI,GAAG,EAAE,EAAElK,EAAEC,EAAE2B,CAAC,EAAE,CAAC,CAAC,SAASkD,GAAG9E,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,EAAE,EAAElK,EAAEC,CAAC,CAAC,CAAC,IAAIkL,GAAGnL,GAAG,CAAC,QAAQC,EAAE,EAAE2B,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIrB,EAAEP,EAAE,WAAW4B,CAAC,EAAE,KAAKrB,EAAEN,IAAI,MAAMM,EAAEN,GAAG,EAAE,OAAOM,GAAG,OAAOA,GAAGN,GAAG,EAAE,EAAE2B,GAAG3B,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEmL,GAAG,CAACpL,EAAEC,EAAE2B,EAAErB,IAAI,CAAC,GAAG,EAAE,EAAEA,GAAG,MAAO,GAAE,IAAIE,EAAEmB,KAAK,EAAErB,EAAEqB,EAAErB,EAAE,EAAE,QAAQuB,EAAE,EAAEA,EAAE9B,EAAE,OAAO,EAAE8B,EAAE,CAAC,IAAIC,EAAE/B,EAAE,WAAW8B,CAAC,EAAE,GAAG,OAAOC,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK/B,EAAE,WAAW,EAAE8B,CAAC,GAAG,KAAKC,EAAE,CAAC,GAAGH,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAEG,CAAC,KAAK,CAAC,GAAG,MAAMA,EAAE,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,EAAE,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,KAAK,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAG9B,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAG,EAAE,CAAC9B,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,EAAE,CAAC9B,EAAE2B,MAAM,CAAC,EAAE,IAAI,GAAGG,CAAC,CAAC,CAAC,OAAO9B,EAAE2B,IAAI,CAAC,EAAE,EAAEA,EAAEnB,CAAC,EAAE4K,GAAG,CAACrL,EAAEC,EAAE2B,IAAIwJ,GAAGpL,EAAEO,EAAE,EAAEN,EAAE2B,CAAC,EAAE,SAASmD,GAAG/E,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,EAAE,EAAElK,EAAEC,CAAC,CAAC,CAAC,SAAS+E,GAAGhF,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAOyI,GAAG,EAAE,EAAElK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,SAASqD,GAAGjF,EAAEC,EAAE2B,EAAE,CAAC,OAAOH,EAAEyI,GAAG,EAAE,EAAElK,EAAEC,EAAE2B,CAAC,EAAE,CAAC,CAAC,SAASsD,GAAGlF,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,EAAE,EAAElK,EAAEC,CAAC,CAAC,CAAC,SAASkF,GAAGnF,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAOyI,GAAG,EAAE,EAAElK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,SAASwD,GAAGpF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAAS8E,GAAGrF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAAS+E,GAAGtF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASgF,GAAGvF,EAAE,CAAC,GAAGyB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,CAAC,CAAC,CAAC,SAASwF,GAAGxF,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,CAAC,CAAC,CAAC,SAASwF,GAAGzF,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,IAAI0J,GAAGC,GAAG7F,GAAG,IAAI,CAACtB,GAAE,EAAE,CAAC,EAAEoH,GAAGxL,GAAG,CAAC,QAAQC,EAAE,GAAGM,EAAE,EAAEP,IAAI,CAAC,GAAGC,GAAGqL,GAAG/K,EAAE,EAAEP,MAAM,CAAC,CAAC,EAAE,OAAOC,CAAC,EAAEwL,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASC,GAAG5L,EAAEC,EAAE2B,EAAE,CAAC,EAAE,CAAC,GAAG,EAAE,mBAAmB3B,GAAG,MAAM,IAAI,UAAU,yDAAyD,EAAE,OAAO,SAASD,EAAEC,EAAE2B,EAAE,CAAC,EAAE,CAAC,IAAIrB,EAAEN,EAAE,KAAK,GAAG,CAACD,EAAE,MAAM,IAAIuL,GAAG,SAAShL,CAAC,+CAA+C,EAAE,GAAGmL,GAAG,eAAe1L,CAAC,EAAE,CAAC,GAAG4B,EAAE,GAAG,OAAO,MAAM,IAAI2J,GAAG,yBAAyBhL,CAAC,SAAS,CAAC,CAACmL,GAAG1L,CAAC,EAAEC,EAAE,OAAO0L,GAAG3L,CAAC,EAAEyL,GAAG,eAAezL,CAAC,IAAIC,EAAEwL,GAAGzL,CAAC,EAAE,OAAOyL,GAAGzL,CAAC,EAAEC,EAAE,QAASD,GAAGA,EAAE,CAAE,EAAE,EAAEA,EAAEC,EAAE2B,CAAC,CAAC,CAAC,IAAIiK,GAAG,CAAC7L,EAAEC,EAAEc,IAAI,CAAC,OAAOd,EAAE,CAAC,IAAK,GAAE,OAAOc,EAAEf,GAAG,EAAE,EAAEA,IAAI,CAAC,EAAEA,GAAGO,EAAE,EAAEP,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAGS,EAAE,EAAET,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEA,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAG0D,EAAE1D,IAAI,CAAC,EAAEA,GAAG2D,EAAE3D,IAAI,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,0BAA0BC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS2F,GAAG3F,EAAEC,EAAE2B,EAAE,CAACA,KAAK,EAAEgK,GAAG5L,KAAK,EAAE,CAAC,KAAKC,EAAEuL,GAAGvL,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,GAAa,OAAOA,GAAjB,UAA8B,OAAOA,GAAjB,SAAmB,MAAMA,EAASA,IAAP,KAAS,QAAkBD,EAAE,OAAOC,IAApB,UAAkCD,IAAV,SAA0BA,IAAb,WAAeC,EAAE,SAAS,EAAE,GAAGA,EAAE,IAAI,UAAU,mBAAmBA,CAAC,QAAQ,KAAK,IAAI,EAAE,EAAE,OAAgB,OAAOA,GAAjB,WAAqBA,EAAE,OAAOA,CAAC,GAAGA,CAAC,EAAE,eAAe6L,GAAG,qBAAqBD,GAAG5L,EAAE2B,EAAM3B,EAAE,QAAQ,GAAG,GAAjB,EAAkB,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI6L,GAAG,EAAE,SAASlG,GAAG5F,EAAEC,EAAE2B,EAAEnB,EAAE,CAACmL,GAAG5L,KAAK,EAAE,CAAC,KAAKC,EAAEuL,GAAGvL,IAAI,CAAC,EAAE,aAAa,SAASD,EAAE,CAAC,MAAM,CAAC,CAACA,CAAC,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,OAAOA,EAAE2B,EAAEnB,CAAC,EAAE,eAAeqL,GAAG,qBAAqB,SAAS9L,EAAE,CAAC,OAAO,KAAK,aAAaO,EAAE,EAAEP,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI+L,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASlF,GAAG9G,EAAE,CAAC,GAAGA,KAAK,IAAO,EAAEgM,GAAGhM,EAAE,CAAC,GAAX,IAAegM,GAAGhM,CAAC,EAAE,OAAO+L,GAAG,KAAK/L,CAAC,EAAE,CAAC,IAAIiM,GAAGjM,GAAG,CAAC,GAAG,CAACA,EAAE,MAAM,IAAIuL,GAAG,oCAAoCvL,CAAC,EAAE,OAAOgM,GAAGhM,CAAC,CAAC,EAAEkM,GAAGlM,GAAG,CAAC,OAAOA,EAAE,CAAC,KAAK,OAAO,MAAO,GAAE,KAAK,KAAK,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,QAAQ,IAAMC,EAAE8L,GAAG,IAAI,GAAGC,GAAG,OAAO,OAAOA,GAAG/L,CAAC,EAAED,EAAEgM,GAAG/L,EAAE,CAAC,EAAE,EAAEA,CAAC,CAAC,EAAE,SAASkM,GAAGnM,EAAE,CAAC,OAAO,KAAK,aAAaa,EAAE,EAAEb,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIoM,GAAG,CAAC,KAAK,kBAAkB,aAAapM,GAAG,CAAC,IAAIC,EAAEgM,GAAGjM,CAAC,EAAE,OAAO8G,GAAG9G,CAAC,EAAEC,CAAC,EAAE,WAAW,CAACD,EAAEC,IAAIiM,GAAGjM,CAAC,EAAE,eAAe6L,GAAG,qBAAqBK,GAAG,GAAG,IAAI,EAAE,SAAStG,GAAG7F,EAAE,CAAC,OAAO4L,GAAG5L,IAAI,EAAEoM,EAAE,CAAC,CAAC,IAAIC,GAAG,CAACrM,EAAEC,IAAI,CAAC,OAAOA,EAAE,CAAC,IAAK,GAAE,OAAO,SAASD,EAAE,CAAC,OAAO,KAAK,aAAae,EAAE,EAAEf,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,IAAK,GAAE,OAAO,SAASA,EAAE,CAAC,OAAO,KAAK,aAAaiB,EAAE,EAAEjB,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,wBAAwBC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS8F,GAAG9F,EAAEC,EAAE2B,EAAE,CAACA,KAAK,EAAEgK,GAAG5L,KAAK,EAAE,CAAC,KAAKC,EAAEuL,GAAGvL,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,CAACA,EAAEC,IAAIA,EAAE,eAAe6L,GAAG,qBAAqBO,GAAGpM,EAAE2B,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAASmE,GAAG/F,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,GAAGT,KAAK,EAAE4B,KAAK,EAAE3B,EAAEuL,GAAGvL,IAAI,CAAC,EAAOQ,IAAL,KAASA,EAAE,YAAYA,EAAET,GAAGA,EAAMO,IAAJ,EAAM,CAAC,IAAIuB,EAAE,GAAG,EAAEF,EAAEnB,EAAET,GAAGA,GAAG8B,IAAIA,CAAC,CAAC,IAAIC,EAAE9B,EAAE,SAAS,UAAU,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,CAAC,EAAE2L,GAAG5L,EAAE,CAAC,KAAKC,EAAE,aAAaQ,EAAE,WAAWsB,EAAE,eAAe+J,GAAG,qBAAqBD,GAAG5L,EAAE2B,EAAMrB,IAAJ,CAAK,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAASyF,GAAGhG,EAAEC,EAAEM,EAAE,CAAC,SAASE,EAAET,EAAE,CAAC,IAAIC,EAAEY,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,OAAOA,EAAEa,EAAE,EAAEb,EAAE,IAAI,IAAI,CAAC,EAAE,IAAI8B,EAAE,EAAE,EAAE,OAAO9B,EAAEC,CAAC,CAAC,CAAC,IAAI6B,EAAE,CAAC,UAAU,WAAW,WAAW,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,cAAc,EAAE7B,CAAC,EAAE2L,GAAG5L,KAAK,EAAE,CAAC,KAAKO,EAAEiL,GAAGjL,IAAI,CAAC,EAAE,aAAaE,EAAE,eAAeqL,GAAG,qBAAqBrL,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,SAASwF,GAAGjG,EAAEC,EAAE,CAACD,KAAK,EAAE,IAAI4B,GAAmB3B,EAAEuL,GAAGvL,IAAI,CAAC,KAA3B,cAA8B2L,GAAG5L,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASD,EAAE,CAAC,IAAIC,EAAEY,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAES,EAAET,EAAE,EAAE,GAAG4B,EAAE,QAAQE,EAAErB,EAAEsB,EAAE,EAAEA,GAAG9B,EAAE,EAAE8B,EAAE,CAAC,IAAIhB,EAAEN,EAAEsB,EAAE,GAAGA,GAAG9B,GAAMM,EAAE,EAAEQ,IAAI,CAAC,GAAZ,EAAc,CAAC,GAAGe,EAAEyH,GAAGzH,EAAEf,EAAEe,CAAC,EAAWb,IAAT,OAAW,IAAIA,EAAEa,OAAOb,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGa,EAAEA,EAAEf,EAAE,CAAC,CAAC,KAAK,CAAC,IAAIE,EAAE,MAAMhB,CAAC,EAAE8B,EAAE,EAAEA,EAAE9B,EAAE,EAAE8B,EAAEd,EAAEc,CAAC,EAAE,OAAO,aAAaxB,EAAE,EAAEE,EAAEsB,IAAI,CAAC,CAAC,EAAEd,EAAEA,EAAE,KAAK,EAAE,CAAC,CAAC,OAAOqL,GAAGtM,CAAC,EAAEiB,CAAC,EAAE,WAAW,SAASjB,EAAEC,EAAE,CAACA,aAAa,cAAcA,EAAE,IAAI,WAAWA,CAAC,GAAG,IAAIQ,EAAY,OAAOR,GAAjB,SAAmB,GAAG,EAAEQ,GAAGR,aAAa,YAAYA,aAAa,mBAAmBA,aAAa,WAAW,MAAM,IAAIsL,GAAG,uCAAuC,EAAE,IAAIzJ,EAAEF,GAAGnB,EAAE0K,GAAGlL,CAAC,EAAEA,EAAE,OAAO8B,EAAEwK,GAAG,EAAEzK,EAAE,CAAC,EAAEf,EAAEgB,EAAE,EAAE,GAAGlB,EAAE,EAAEkB,IAAI,IAAI,CAAC,EAAED,EAAEF,GAAGnB,EAAE4K,GAAGpL,EAAEc,EAAEe,EAAE,CAAC,UAAUrB,EAAE,IAAIA,EAAE,EAAEA,EAAEqB,EAAE,EAAErB,EAAE,CAAC,IAAIQ,EAAEhB,EAAE,WAAWQ,CAAC,EAAE,GAAG,IAAIQ,EAAE,MAAMqL,GAAGvL,CAAC,EAAE,IAAIwK,GAAG,wDAAwD,EAAEhL,EAAE,EAAEQ,EAAEN,IAAI,CAAC,EAAEQ,CAAC,KAAM,KAAIR,EAAE,EAAEA,EAAEqB,EAAE,EAAErB,EAAEF,EAAE,EAAEQ,EAAEN,IAAI,CAAC,EAAER,EAAEQ,CAAC,EAAE,OAAcT,IAAP,MAAUA,EAAE,KAAKsM,GAAGvK,CAAC,EAAEA,CAAC,EAAE,eAAe+J,GAAG,qBAAqBK,GAAG,GAAGnM,EAAE,CAACsM,GAAGtM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAIwM,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,UAAU,EAAE,OAAOC,GAAG,CAACzM,EAAEC,IAAI,CAAC,QAAQ2B,EAAE5B,GAAG,EAAE+B,EAAEH,EAAE3B,EAAE,EAAE,EAAE2B,GAAGG,IAAI,EAAE,EAAEH,IAAI,CAAC,GAAG,EAAEA,EAAE,GAAG,IAAIA,IAAI,GAAG5B,GAAGwM,GAAG,OAAOA,GAAG,OAAOjM,EAAE,EAAE,MAAMP,EAAE4B,CAAC,CAAC,EAAE,IAAIA,EAAE,GAAGG,EAAE,EAAE,EAAEA,GAAG9B,EAAE,GAAG,EAAE8B,EAAE,CAAC,IAAIlB,EAAEJ,EAAE,EAAET,EAAE,EAAE+B,IAAI,IAAI,CAAC,EAAE,GAAMlB,GAAH,EAAK,MAAMe,GAAG,OAAO,aAAaf,CAAC,CAAC,CAAC,OAAOe,CAAC,EAAE8K,GAAG,CAAC1M,EAAEC,EAAE2B,IAAI,CAAC,GAAGA,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIrB,EAAEN,EAAE2B,GAAGA,GAAG,GAAG,EAAE5B,EAAE,OAAO4B,EAAE,EAAE5B,EAAE,OAAO,QAAQ8B,EAAE,EAAEA,EAAEF,EAAE,EAAEE,EAAE,CAAC,IAAIC,EAAE/B,EAAE,WAAW8B,CAAC,EAAErB,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE8B,EAAE9B,GAAG,CAAC,CAAC,OAAOQ,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEM,CAAC,EAAEoM,GAAG3M,GAAG,EAAEA,EAAE,OAAO4M,GAAG,CAAC5M,EAAEC,IAAI,CAAC,QAAQ2B,EAAE,EAAErB,EAAE,GAAG,EAAEqB,GAAG3B,EAAE,IAAI,CAAC,IAAIQ,EAAE,EAAE,EAAET,EAAE,EAAE4B,IAAI,IAAI,CAAC,EAAE,GAAMnB,GAAH,EAAK,MAAM,EAAEmB,EAAE,OAAOnB,GAAGA,GAAG,MAAMF,GAAG,OAAO,aAAa,MAAME,GAAG,GAAG,MAAM,KAAKA,CAAC,GAAGF,GAAG,OAAO,aAAaE,CAAC,CAAC,CAAC,OAAOF,CAAC,EAAEsM,GAAG,CAAC7M,EAAEC,EAAE2B,IAAI,CAAC,GAAG3B,KAAK,EAAE2B,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIrB,EAAEN,EAAE2B,EAAErB,EAAEqB,EAAE,EAAE,QAAQnB,EAAE,EAAEA,EAAET,EAAE,OAAO,EAAES,EAAE,CAAC,IAAIqB,EAAE9B,EAAE,WAAWS,CAAC,EAAE,GAAG,OAAOqB,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK9B,EAAE,WAAW,EAAES,CAAC,GAAG,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE6B,GAAG7B,GAAG,GAAG,EAAE2B,EAAE,KAAK,CAAC,OAAO,EAAE,EAAE3B,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEM,CAAC,EAAEuM,GAAG9M,GAAG,CAAC,QAAQC,EAAE,EAAE2B,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIrB,EAAEP,EAAE,WAAW4B,CAAC,EAAE,OAAOrB,GAAG,OAAOA,GAAG,EAAEqB,EAAE3B,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAE,SAASiG,GAAGlG,EAAEC,EAAE2B,EAAE,CAAC,GAAG5B,KAAK,EAAEC,KAAK,EAAE2B,EAAE4J,GAAG5J,KAAK,CAAC,EAAM3B,IAAJ,EAAM,IAAIM,EAAEkM,GAAGhM,EAAEiM,GAAG3K,EAAE4K,GAAG5L,EAAEf,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,OAAWC,IAAJ,IAAQM,EAAEqM,GAAGnM,EAAEoM,GAAG9K,EAAE+K,GAAG/L,EAAEf,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,GAAG4L,GAAG5L,EAAE,CAAC,KAAK4B,EAAE,aAAa5B,GAAG,CAAC,QAAQ4B,EAAEnB,EAAEI,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE8B,EAAE9B,EAAE,EAAE+B,GAAE,EAAEA,IAAGtB,EAAE,EAAEsB,GAAE,CAAC,IAAId,GAAEjB,EAAE,EAAE+B,GAAE9B,EAAE8B,IAAGtB,GAAMM,EAAEE,EAAC,GAAN,IAAUa,EAAEvB,EAAEuB,EAAEb,GAAEa,CAAC,EAAWF,IAAT,OAAWA,EAAEE,GAAGF,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGE,GAAGA,EAAEb,GAAEhB,EAAE,CAAC,OAAOqM,GAAGtM,CAAC,EAAE4B,CAAC,EAAE,WAAW,CAAC5B,EAAEO,IAAI,CAAC,GAAa,OAAOA,GAAjB,SAAmB,MAAM,IAAIgL,GAAG,6CAA6C3J,CAAC,EAAE,EAAE,IAAIE,EAAEC,EAAExB,CAAC,EAAEQ,EAAEwL,GAAG,EAAEzK,EAAE7B,CAAC,EAAE,OAAOY,EAAE,EAAEE,IAAI,IAAI,CAAC,EAAEe,EAAE7B,EAAEQ,EAAEF,EAAEQ,EAAE,EAAEe,EAAE7B,CAAC,EAASD,IAAP,MAAUA,EAAE,KAAKsM,GAAGvL,CAAC,EAAEA,CAAC,EAAE,eAAe+K,GAAG,qBAAqBK,GAAG,GAAGnM,EAAE,CAACsM,GAAGtM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAASmG,GAAGnG,EAAEC,EAAE,CAAC2L,GAAG5L,KAAK,EAAE,CAAC,GAAG,GAAG,KAAKC,EAAEuL,GAAGvL,IAAI,CAAC,EAAE,eAAe,EAAE,aAAa,IAAI,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAImG,GAAG,IAAI,EAAE,SAASC,GAAGrG,EAAE,CAAC4C,GAAG5C,IAAI,EAAE,CAACwB,EAAE,EAAE,CAACD,EAAE,OAAO,EAAE,EAAEwB,GAAG,CAAC,CAAC,IAAIgK,GAAG/M,GAAG,CAAC,GAAG,CAAC4D,GAAE,GAAG,CAAC,GAAG5D,EAAE,EAAE,EAAE,EAAEiK,IAAI,GAAG,CAACxI,EAAE2B,GAAGK,CAAC,EAAEsF,GAAGtF,CAAC,CAAC,OAAOzD,EAAE,CAACA,aAAayJ,IAAczJ,GAAV,UAAaqC,EAAE,EAAErC,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAayJ,IAAczJ,GAAV,UAAaqC,EAAE,EAAErC,CAAC,CAAC,CAAC,EAAE,SAAS6C,GAAG7C,EAAE,CAACA,KAAK,EAAc,OAAO,QAAQ,IAA3B,aAAgC,QAAQ,GAAG,EAAE,EAAEA,IAAI,EAAEA,CAAC,EAAE,MAAM,KAAKqD,EAAE,EAAErD,GAAG,IAAI,QAAQ,MAAM,EAAE,EAAEA,IAAI,EAAE,CAAC,EAAE,CAAC,IAAIqD,GAAG,IAAI,CAAC,IAAIrD,EAAEmD,GAAG,EAAEnD,IAAI6C,GAAG7C,CAAC,EAAE+M,GAAGC,EAAE,EAAE,EAAE,SAAS1G,GAAGtG,EAAEC,EAAE,EAAED,KAAK,IAAIC,IAAI,EAAE,WAAWoD,EAAE,EAAE5B,EAAE,YAAY,CAAC,aAAazB,EAAE,IAAI,cAAc,CAAC,GAAGA,EAAEgK,GAAGhK,CAAC,IAAIA,EAAE,YAAY,CAAC,IAAI,cAAc,CAAC,CAAC,CAAC,IAAIiN,GAAG,CAAC,EAAE,SAAS1G,GAAGvG,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,IAAIR,KAAK,EAAEM,GAAG,EAAE0M,GAAG,OAAO1M,EAAEqB,EAAEnB,IAAI,IAAI,EAAEA,EAAE,EAAEA,EAAEF,EAAEE,IAAIwM,GAAGxM,CAAC,EAAEiD,EAAE9B,EAAE,EAAEnB,CAAC,EAAEiD,EAAE9B,EAAE,EAAEnB,EAAE,CAAC,EAAEQ,EAAE,EAAEW,EAAE,EAAEnB,EAAE,IAAI,CAAC,EAAE,OAAOR,EAAEqJ,GAAGrJ,CAAC,EAAEiN,GAAGlN,CAAC,GAAG,GAAGiN,EAAE,CAAC,CAAC,SAASzG,GAAGxG,EAAE,CAACA,KAAK,EAAEyB,EAAE,YAAY,CAAC,IAAI,gBAAgB,OAAOzB,CAAC,CAAC,EAAE0K,GAAGV,GAAGhK,CAAC,CAAC,CAAC,CAAC,SAASyG,GAAGzG,EAAE,CAAC,CAAC,IAAImN,GAAG,CAACnN,EAAEC,IAAI,CAAC,IAAI2B,EAAE8J,GAAG1L,CAAC,EAAE,GAAY4B,IAAT,OAAW,MAAM5B,EAAEoN,GAAGpN,CAAC,EAAE4B,EAAE4J,GAAGxL,CAAC,EAAEsM,GAAGtM,CAAC,EAAE,IAAIuL,GAAG,GAAGtL,CAAC,qBAAqB2B,CAAC,EAAE,EAAE,OAAOA,CAAC,EAAEyL,GAAG,CAACrN,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,EAAE,CAAC,EAAE,OAAOP,EAAEA,EAAE,WAAWO,EAAEqB,CAAC,EAAErB,EAAE,SAASM,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEiM,GAAG3L,CAAC,GAAGP,CAAC,EAAE,SAAS0G,GAAG1G,EAAEC,EAAE2B,EAAE,CAAC,OAAO3B,KAAK,EAAE2B,KAAK,EAAE5B,EAAEiM,GAAGjM,IAAI,CAAC,EAAEC,EAAEkN,GAAGlN,EAAE,WAAW,EAAEoN,GAAGpN,EAAE2B,EAAE5B,CAAC,CAAC,CAAC,IAAIsN,GAAGtN,GAAG,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOA,EAAE,CAACoE,GAAEpE,CAAC,CAAC,CAAC,EAAEuN,GAAG,EAAE1L,GAAG,KAAK2L,GAAG,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAE5L,GAAG,KAAK6L,GAAG,CAAC,EAAE,SAASrE,GAAGxJ,EAAE,CAAC,OAAO,SAASA,EAAE,CAAC,GAAG,CAAC4D,GAAE,CAAC,GAAO2J,KAAJ,EAAO,CAAC,IAAItN,EAAE,GAAG2B,EAAE,GAAG5B,EAAG,CAACA,EAAE,IAAI,CAAC,GAAG,CAAC4D,KAAI4J,GAAGxN,EAAEC,EAAE,GAAG2B,GAAG,CAAC2L,GAAG,EAAED,GAAI,IAAIQ,GAAGjM,EAAE,CAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAE7B,EAAE,GAAG,GAAG,CAAC,IAAIO,EAAE,UAAU,CAAC,IAAIP,EAAE,EAAE,EAAE6B,GAAG,IAAI,IAAI,CAAC,EAAE,OAAO7B,EAAE+N,EAAGJ,GAAG3N,CAAC,CAAC,EAAE,EAAEiK,GAAGjK,EAAE,CAAC,EAAE,CAAC,OAAOC,EAAE,CAACM,EAAEN,EAAED,EAAE,EAAE,CAAC,IAAIS,EAAE,GAAG,GAAG,CAACoB,GAAG,CAAC,IAAIC,EAAEE,GAAGF,IAAIE,GAAG,MAAMhC,EAAE8B,EAAE,OAAOA,EAAE,SAASvB,CAAC,EAAEE,EAAE,GAAG,CAAC,GAAGT,GAAG,CAACS,EAAE,MAAMF,CAAC,CAAC,CAAE,EAAEqB,EAAE,GAAG3B,IAAIsN,GAAG,EAAE1L,GAAG,UAAU,CAAC,IAAI7B,EAAEuM,GAAG,KAAK,EAAEtM,EAAED,EAAE,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAEC,EAAEY,EAAE,EAAEb,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAEwN,GAAG,CAAC,EAAE,IAAI7L,EAAE8L,GAAGzN,CAAC,EAAE,OAAgB2B,IAAT,SAAaA,EAAEgM,KAAKF,GAAGzN,CAAC,EAAE2B,EAAE+L,GAAG/L,CAAC,EAAE3B,GAAGA,EAAE2B,EAAE,EAAE,EAAE5B,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAED,CAAC,EAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEsN,GAAI,IAAIU,GAAGnM,EAAE,CAAE,EAAE,MAAU0L,KAAJ,GAAQA,GAAG,EAAED,GAAGW,EAAE,EAAE3B,GAAGzK,EAAE,EAAEA,GAAG,KAAKgM,GAAG,QAAQd,EAAE,GAAG3I,GAAE,kBAAkBmJ,EAAE,EAAE,EAAE,OAAOC,EAAE,CAAC,EAAGvN,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAE,CAAC,CAAC,SAAS0G,GAAG3G,EAAE,CAAC,OAAOA,KAAK,EAAEwJ,GAAI,KAAKxJ,EAAEiM,GAAGjM,CAAC,GAAG,KAAKkM,EAAE,CAAE,CAAC,CAAC,IAAIgC,GAAG,CAAC,EAAE,SAAStH,GAAG5G,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOqB,KAAK,EAAErB,KAAK,GAAGP,EAAEkO,GAAGlO,IAAI,CAAC,GAAG,KAAKC,EAAEgM,GAAGhM,IAAI,CAAC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,IAAI4N,GAAG,CAAC,EAAEC,GAAGpO,GAAG,CAAC,IAAIC,EAAEkO,GAAGnO,CAAC,EAAE,OAAgBC,IAAT,OAAWuL,GAAGxL,CAAC,EAAEC,CAAC,EAAE,SAAS4G,GAAG7G,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,OAAOmB,KAAK,EAAErB,KAAK,EAAEE,KAAK,GAAGT,EAAEkO,GAAGlO,IAAI,CAAC,GAAGC,EAAEgM,GAAGhM,IAAI,CAAC,EAAEA,EAAE2B,EAAEwM,GAAGxM,CAAC,CAAC,EAAErB,EAAEE,CAAC,CAAC,CAAC,IAAI4N,GAAG,IAAc,OAAO,YAAjB,SAA4B,WAAW,SAAS,aAAa,EAAE,EAAE,SAAStH,GAAG/G,EAAE,CAAC,OAAWA,KAAK,IAAT,EAAYkM,GAAGmC,GAAG,CAAC,GAAGrO,EAAEoO,GAAGpO,CAAC,EAAEkM,GAAGmC,GAAG,EAAErO,CAAC,CAAC,EAAE,CAAC,IAAIsO,GAAGtO,GAAG,CAAC,IAAIC,EAAEiO,GAAG,OAAO,OAAOA,GAAG,KAAKlO,CAAC,EAAEC,CAAC,EAAEsO,GAAG,CAACvO,EAAEC,IAAI,CAAC,QAAQ2B,EAAE,MAAM5B,CAAC,EAAEO,EAAE,EAAEA,EAAEP,EAAE,EAAEO,EAAEqB,EAAErB,CAAC,EAAE4M,GAAGtM,EAAE,EAAEZ,EAAE,EAAEM,IAAI,IAAI,CAAC,EAAE,aAAaA,CAAC,EAAE,OAAOqB,CAAC,EAAE4M,GAAG,CAACxO,EAAEC,IAAI,OAAO,eAAeA,EAAE,OAAO,CAAC,MAAMD,CAAC,CAAC,EAAE,SAASgH,GAAGhH,EAAEC,EAAE2B,EAAE,CAAC,IAAIrB,GAAGN,EAAEsO,GAAGvO,EAAEC,IAAI,CAAC,GAAG,MAAM,EAAED,IAAI,IAAIS,EAAE;AAAA,EAAwDqB,EAAE,EAAEC,EAAE,CAAC,EAAMH,IAAJ,GAAOG,EAAE,KAAK,KAAK,EAAE,QAAQlB,EAAE,CAAC,SAAS,EAAEE,EAAE,CAACR,CAAC,EAAEU,EAAE,EAAEA,EAAEjB,EAAE,EAAEiB,EAAEc,EAAE,KAAK,MAAMd,CAAC,EAAEJ,EAAE,KAAK,UAAUI,CAAC,EAAEF,EAAE,KAAKd,EAAEgB,CAAC,CAAC,EAAER,GAAG,YAAYQ,CAAC,aAAaA,CAAC,6BAA6Ba,EAAE,IAAIA,EAAE,EAAE;AAAA,EAAOA,GAAG7B,EAAEgB,CAAC,EAAE,eAAe,OAAOR,GAAG,cAAkBmB,IAAJ,EAAM,WAAW,WAAW,IAAIG,EAAE,KAAK,IAAI,CAAC;AAAA,EAAOxB,EAAE,KAAKM,EAAE,KAAK,mBAAmB,EAAEE,EAAE,KAAKsM,EAAE,EAAE5M,GAAG;AAAA,GAA8DI,EAAE,KAAKJ,EAAE;AAAA,CAAM,EAAET,EAAE,SAASA,EAAE,CAAC,IAAIC,GAAE,SAAS,GAAG,EAAEA,cAAa,UAAU,MAAM,IAAI,UAAU,qCAAqC,OAAOA,EAAC,0BAA0B,EAAE,IAAI2B,GAAE4M,GAAGvO,GAAE,MAAM,sBAAuB,UAAU,CAAC,CAAE,EAAE,OAAO2B,GAAE,UAAU3B,GAAE,UAAU2B,GAAE,IAAIA,IAAG5B,EAAEC,GAAE,MAAM2B,GAAE5B,CAAC,aAAa,OAAOA,EAAE4B,EAAC,EAAEf,CAAC,EAAE,GAAGE,CAAC,EAAEa,EAAE,iBAAiB3B,EAAE,IAAKD,GAAGA,EAAE,IAAK,EAAE,KAAK,IAAI,CAAC,QAAQO,EAAE,IAAI,IAAI+N,GAAGE,GAAG5M,EAAE5B,CAAC,CAAC,CAAC,CAAC,SAASiH,GAAGjH,EAAE,CAAC,OAAOA,EAAEoO,GAAGpO,IAAI,CAAC,EAAEkM,GAAG7K,EAAErB,CAAC,CAAC,CAAC,CAAC,SAASkH,GAAGlH,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,EAAEiM,GAAGjM,IAAI,CAAC,EAAEC,EAAEgM,GAAGhM,CAAC,EAAEiM,GAAGlM,EAAEC,CAAC,CAAC,CAAC,CAAC,SAASkH,GAAGnH,EAAE,CAAC,GAAGA,KAAK,KAAKgM,GAAGhM,EAAE,CAAC,GAAG,EAAE,CAAC,SAASoH,IAAI,CAAC,OAAO8E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS7E,GAAGrH,EAAE,CAACA,EAAEiM,GAAGjM,IAAI,CAAC,EAAE,QAAQC,EAAE,MAAMD,EAAE,MAAM,EAAE4B,EAAE,EAAEA,EAAE5B,EAAE,OAAO4B,IAAI3B,EAAE2B,CAAC,EAAE5B,EAAE4B,CAAC,EAAE,OAAOsK,GAAGjM,CAAC,CAAC,CAAC,SAASqH,GAAGtH,EAAE,CAAC,OAAOkM,GAAGkC,GAAGpO,IAAI,CAAC,CAAC,CAAC,CAAC,SAASuH,IAAI,CAAC,OAAO2E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS1E,GAAGxH,EAAE,CAAC,QAAQC,EAAEgM,GAAGjM,KAAK,CAAC,EAAEC,EAAE,QAAQ,CAAC,IAAI2B,EAAE3B,EAAE,IAAI,EAAEA,EAAE,IAAI,EAAE2B,CAAC,CAAC,CAACkF,GAAG9G,CAAC,CAAC,CAAC,SAASyH,GAAGzH,EAAEC,EAAE2B,EAAE,CAAC3B,KAAK,EAAE2B,KAAK,EAAE5B,EAAEiM,GAAGjM,IAAI,CAAC,EAAEC,EAAEgM,GAAGhM,CAAC,EAAE2B,EAAEqK,GAAGrK,CAAC,EAAE5B,EAAEC,CAAC,EAAE2B,CAAC,CAAC,SAAS8F,GAAG1H,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,GAAGA,EAAEmN,GAAGnN,IAAI,EAAE,mBAAmB,GAAG,qBAAqBC,CAAC,EAAEiM,GAAGlM,CAAC,CAAC,CAAC,SAAS2H,GAAG3H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,eAAe,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,UAAU,EAAEA,GAAGA,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,IAAIyO,GAAGzO,GAAMA,EAAE,GAAL,IAAYA,EAAE,KAAL,GAAaA,EAAE,KAAL,GAAU0O,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAE,SAAS/G,GAAG5H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,QAAQ,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,OAAO,EAAE,IAAI4B,GAAG6M,GAAGzO,EAAE,YAAY,CAAC,EAAE0O,GAAGC,IAAI3O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAE2B,EAAE,EAAE,EAAE3B,EAAE,KAAK,IAAI,CAAC,EAAE,IAAID,EAAE,kBAAkB,EAAE4B,EAAE,IAAI,KAAK5B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE,IAAIO,EAAE,IAAI,KAAKP,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEA,EAAE,GAAG4B,GAAGrB,GAAGP,EAAE,kBAAkB,GAAG,KAAK,IAAIO,EAAEqB,CAAC,GAAG,EAAE,EAAE3B,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,SAAS6H,GAAG7H,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAK,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE,KAAK,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,CAAC,EAAE4B,EAAE,EAAE,EAAE5B,EAAE,KAAK,IAAI,CAAC,EAAEO,EAAEN,EAAE,kBAAkB,EAAEQ,EAAE,IAAI,KAAKR,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE6B,EAAE,IAAI,KAAK7B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEY,EAAE,KAAK,IAAIiB,EAAErB,CAAC,EAAE,MAAO,GAAEmB,EAAE,EAAE,EAAE5B,EAAE,KAAK,IAAI,CAAC,EAAE,EAAOS,GAAGqB,GAAGjB,GAAGN,GAAG,EAAEqB,IAAIf,GAAGN,KAAKE,EAAE,KAAK,IAAIqB,EAAErB,CAAC,EAAER,EAAE,QAAQA,EAAE,QAAQ,EAAE,MAAM,EAAE2B,EAAEf,EAAEJ,GAAGF,EAAE,GAAG,EAAE,EAAEP,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAE2B,GAAG6M,GAAGxO,EAAE,YAAY,CAAC,EAAEyO,GAAGC,IAAI1O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE4B,EAAE,EAAE,EAAE5B,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAE,OAAO,MAAMD,CAAC,EAAE,GAAGA,EAAE,GAAG,CAAC,CAAC,SAAS8H,GAAG9H,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAE,CAAC,OAAON,EAAEyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,CAAC,EAAE,GAAG,CAAC,SAASgG,GAAG/H,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAE,CAAC,GAAGL,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,CAAC,CAAC,SAASkG,GAAGhI,EAAEC,EAAE2B,EAAErB,EAAE,CAACP,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAE,IAAIE,EAAG,IAAI,OAAM,YAAY,EAAEqB,EAAE,IAAI,KAAKrB,EAAE,EAAE,CAAC,EAAEM,EAAE,IAAI,KAAKN,EAAE,EAAE,CAAC,EAAEA,EAAEqB,EAAE,kBAAkB,EAAE,IAAIb,EAAEF,EAAE,kBAAkB,EAAEI,EAAE,KAAK,IAAIV,EAAEQ,CAAC,EAAEJ,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,GAAGmB,EAAE,EAAE,EAAElB,IAAI,IAAI,CAAC,EAAE,EAAOQ,GAAGQ,GAAGa,GAAG9B,EAAEA,GAAGA,EAAE,mBAAmB,OAAO,CAAC,OAAO,GAAG,aAAa,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC,GAAG8B,CAAC,EAAEf,EAAEf,EAAEe,CAAC,EAAEE,EAAER,GAAG4K,GAAGvJ,EAAEF,EAAE,EAAE,EAAEyJ,GAAGtK,EAAER,EAAE,EAAE,IAAI8K,GAAGvJ,EAAEvB,EAAE,EAAE,EAAE8K,GAAGtK,EAAEa,EAAE,EAAE,EAAE,CAAC,IAAIgN,GAAG,CAAC,EAAEC,GAAG,CAAC7O,EAAEC,IAAI,CAAC2O,GAAG,OAAO,EAAE,QAAQhN,EAAEA,EAAErB,EAAE,EAAEP,MAAM,CAAC,GAAG,CAAC,IAAIS,EAAOmB,GAAL,IAAO3B,IAAIQ,GAAQmB,GAAL,MAAS3B,EAAE,EAAE,EAAE,EAAE2O,GAAG,KAAUhN,GAAL,IAAOf,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAO2B,GAAL,IAAO8B,EAAEzD,IAAI,CAAC,EAAO2B,GAAL,IAAO,EAAE,EAAE3B,IAAI,IAAI,CAAC,EAAEgB,EAAE,EAAEhB,IAAI,IAAI,CAAC,CAAC,EAAEA,GAAGQ,EAAE,EAAE,CAAC,CAAC,OAAOmO,EAAE,EAAE,SAAS3G,GAAGjI,EAAEC,EAAE2B,EAAE,CAAC,OAAO5B,KAAK,EAAEC,EAAE4O,GAAG5O,IAAI,EAAE2B,IAAI,CAAC,EAAE0H,GAAGtJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,SAASiI,GAAGlI,EAAEC,EAAE2B,EAAE,CAAC,OAAO5B,KAAK,EAAEC,EAAE4O,GAAG5O,IAAI,EAAE2B,IAAI,CAAC,EAAE0H,GAAGtJ,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,IAAIkI,GAAG,IAAI,CAAC,EAAEC,GAAG,IAAI,KAAK,IAAI,EAAE,SAASC,GAAGrI,EAAEC,EAAE,CAAC,OAAOwC,EAAE8G,GAAGvJ,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIuI,GAAGF,GAAG,IAAI,CAAC,MAAM2B,IAAI,EAAE,QAAQ,EAAE,SAAS1B,IAAI,CAAC,MAAO,WAAU,CAACC,GAAG,IAAI,YAAY,WAAW,YAAY,IAAI,EAAE,IAAIC,GAAG,IAAI,UAAU,oBAAoB,SAASC,GAAG1I,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAEM,EAAE,EAAE,OAAO,GAAGP,GAAGC,GAAG,WAAWD,EAAE,MAAM,GAAG,QAAQ4B,EAAE,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAInB,EAAER,GAAG,EAAE,GAAG2B,GAAGnB,EAAE,KAAK,IAAIA,EAAET,EAAE,SAAS,EAAE,IAAI8B,EAAE,KAAKrB,EAAE,KAAK,IAAIT,EAAES,CAAC,EAAET,EAAE,CAAC8B,GAAGA,EAAE,IAAI,KAAKA,EAAE,WAAWrB,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEL,EAAE,OAAO,WAAW,OAAO,MAAM,GAAG,CAACA,EAAE,KAAK0B,CAAC,EAAExB,GAAE,EAAE,IAAIyB,EAAE,EAAE,MAAM/B,CAAC,MAAS,CAAC,CAAC+B,EAAE,MAAM,CAAC,GAAGA,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI+M,GAAG,KAAK1K,GAAE,iGAAiG,EAAE,GAAG2K,GAAG,CAAC,EAAEC,GAAGhP,GAAG,CAACA,EAAE,QAASA,GAAG,CAAC,IAAIC,EAAE6O,GAAG,EAAE7O,IAAI8O,GAAG9O,CAAC,EAAED,EAAE,CAAE,CAAC,EAAE,SAAS2I,IAAI,CAAC,IAAI3I,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,EAAE,OAAeA,EAAE,CAAC,GAAZ,SAAeA,EAAE,MAAM,EAAEgP,GAAGhP,CAAC,EAAE+O,GAAG,GAAGD,GAAG,EAAEC,GAAG,GAAG/O,EAAE+O,GAAG,EAAE,CAAC,SAASnG,GAAG5I,EAAEC,EAAE2B,EAAE,CAAC,GAAG5B,KAAK,EAAEC,KAAK,EAAE8O,GAAG,IAAI/O,EAAE,IAAIO,EAAEwO,GAAG,QAAiBxO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,GAAG,CAAC,GAAnD,SAAsDA,EAAE,MAAM,EAAEyO,GAAGzO,CAAC,EAAE,QAAQE,EAAE,EAAEF,EAAEE,CAAC,GAAGqO,GAAG,GAAG9O,GAAG,EAAES,EAAE,IAAIT,EAAE,EAAEA,EAAE4B,GAAGrB,EAAEP,EAAES,CAAC,EAAE,EAAET,EAAE,EAAE,EAAEC,EAAE,EAAED,IAAI,IAAI,CAAC,EAAE8O,GAAG,EAAE,OAAO9O,CAAC,CAAC,IAAIiP,GAAGC,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACF,GAAG,CAAC,IAAIjP,EAAEC,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAAG,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEmC,GAAG,gBAAgB,EAAE,IAAIpC,KAAKkP,GAAYA,GAAGlP,CAAC,IAAb,OAAe,OAAOC,EAAED,CAAC,EAAEC,EAAED,CAAC,EAAEkP,GAAGlP,CAAC,EAAE,IAAI4B,EAAE,CAAC,EAAE,IAAI5B,KAAKC,EAAE2B,EAAE,KAAK,GAAG5B,CAAC,IAAIC,EAAED,CAAC,CAAC,EAAE,EAAEiP,GAAGrN,CAAC,CAAC,OAAOqN,EAAE,EAAE,SAASpG,GAAG7I,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIM,EAAE,EAAE,OAAO4O,GAAG,EAAE,QAAS,CAAC1O,EAAEqB,IAAI,CAAC,IAAIC,EAAE9B,EAAEM,EAAE,IAAIuB,EAAEjB,EAAE,EAAEb,EAAE,EAAE8B,IAAI,IAAI,CAAC,EAAEC,EAAEA,EAAE,EAAEA,EAAEtB,EAAE,OAAO,EAAEsB,EAAE,EAAE,EAAED,MAAM,CAAC,EAAErB,EAAE,WAAWsB,CAAC,EAAE,EAAE,EAAED,IAAI,CAAC,EAAE,EAAEvB,GAAGE,EAAE,OAAO,CAAC,CAAE,EAAE,CAAC,CAAC,SAASqI,GAAG9I,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAI2B,EAAEuN,GAAG,EAAEtO,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE4B,EAAE,OAAO,IAAIrB,EAAE,EAAE,OAAOqB,EAAE,QAAS5B,GAAGO,GAAGP,EAAE,OAAO,CAAE,EAAEa,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEM,EAAE,CAAC,CAAC,SAASyI,GAAGhJ,EAAE,CAAC,OAAOyB,EAAEyI,GAAG,GAAG,EAAElK,CAAC,EAAE,EAAE,CAAC,SAASiJ,GAAGjJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAEyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,EAAE,EAAE,CAAC,SAAS2I,GAAGlJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAEyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAErB,CAAC,EAAE,EAAE,CAAC,IAAI6O,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,SAASjG,GAAGnJ,EAAEC,EAAE2B,EAAEnB,EAAE,CAAC,GAAGgB,EAAE,OAAOyI,GAAG,GAAG,EAAElK,EAAEC,EAAE2B,EAAEnB,CAAC,EAAER,KAAK,EAAE2B,KAAK,EAAEnB,KAAK,EAAE,QAAQqB,EAAE,EAAEC,EAAE,EAAEA,EAAEH,EAAEG,IAAI,CAAC,IAAIhB,EAAEF,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEgB,EAAEJ,EAAE,EAAEZ,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQkB,EAAE,EAAEA,EAAEF,EAAEE,IAAI,CAAC,IAAIC,EAAEb,EAAE,EAAEQ,EAAEI,IAAI,CAAC,EAAEE,EAAE+N,GAAGpP,CAAC,EAAMoB,IAAJ,GAAYA,IAAL,KAAapB,IAAJ,EAAM,EAAEyC,GAAGyI,GAAG7J,EAAE,CAAC,CAAC,EAAEA,EAAE,OAAO,GAAGA,EAAE,KAAKD,CAAC,CAAC,CAACU,GAAGb,CAAC,CAAC,OAAOJ,EAAE,EAAEJ,IAAI,IAAI,CAAC,EAAEqB,EAAE,CAAC,CAAC,IAAIuN,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAACvP,EAAEC,IAAI,CAAC,EAAE,EAAE,IAAID,EAAEC,IAAI,CAAC,CAAC,EAAE,SAASmJ,GAAGpJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,SAASE,EAAET,EAAEC,GAAE2B,GAAE,CAAC,IAAI5B,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,IAAGD,EAAE4B,GAAE,CAAC,EAAE5B,EAAE,OAAOA,CAAC,CAAC,SAAS8B,EAAE9B,EAAEC,GAAE,CAAC,OAAOQ,EAAET,EAAEC,GAAE,GAAG,CAAC,CAAC,SAASc,EAAEf,EAAEC,GAAE,CAAC,SAAS2B,GAAE5B,GAAE,CAAC,MAAO,GAAEA,GAAE,GAAG,EAAEA,GAAE,EAAE,CAAC,CAAC,IAAIO,GAAE,OAAYA,GAAEqB,GAAE5B,EAAE,YAAY,EAAEC,GAAE,YAAY,CAAC,KAAxC,IAAiDM,GAAEqB,GAAE5B,EAAE,SAAS,EAAEC,GAAE,SAAS,CAAC,KAAlC,IAAuCM,GAAEqB,GAAE5B,EAAE,QAAQ,EAAEC,GAAE,QAAQ,CAAC,GAAGM,EAAC,CAAC,SAASU,EAAEjB,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAASmB,EAAEnB,EAAE,CAAC,IAAIC,GAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAK,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,EAAE,EAAEC,IAAG,CAAC,IAAI2B,GAAE5B,EAAE,SAAS,EAAEO,IAAGkO,GAAGzO,EAAE,YAAY,CAAC,EAAEqP,GAAGC,IAAI1N,EAAC,EAAE,GAAG,EAAE3B,GAAEM,GAAEP,EAAE,QAAQ,GAAG,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,EAAC,EAAE,KAAK,CAACA,IAAGM,GAAEP,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAG4B,GAAE5B,EAAE,SAAS4B,GAAE,CAAC,GAAG5B,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,EAAE,CAAC,OAAO4B,GAAE,IAAI,KAAK5B,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,GAAEgB,EAAE,IAAI,KAAKjB,EAAE,YAAY,EAAE,EAAE,CAAC,CAAC,EAAE4B,GAAEX,EAAEW,EAAC,EAAE,GAAGb,EAAEd,GAAED,CAAC,EAAE,GAAGe,EAAEa,GAAE5B,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAACA,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAE,IAAIa,EAAEP,EAAE,EAAEN,EAAE,KAAK,IAAI,CAAC,EAAE,QAAQc,KAAKd,EAAE,CAAC,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAGa,EAAEmI,GAAGnI,CAAC,EAAE,EAAE,EAAEQ,EAAE2H,GAAG3H,CAAC,EAAER,EAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAEQ,EAAEA,EAAE,QAAQ,IAAI,OAAOP,EAAE,GAAG,EAAED,EAAEC,CAAC,CAAC,EAAE,IAAIC,GAAE,2DAA2D,MAAM,GAAG,EAAEC,GAAE,wFAAwF,MAAM,GAAG,EAAE,IAAIF,KAAKD,EAAE,CAAC,KAAKpB,GAAGsB,GAAEtB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGsB,GAAEtB,EAAE,EAAE,EAAE,KAAKA,GAAGuB,GAAEvB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGuB,GAAEvB,EAAE,EAAE,EAAE,KAAKA,GAAG8B,GAAG9B,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGS,EAAET,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAGmB,EAAEnB,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKmB,EAAE,KAAKnB,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAKA,KAAQA,EAAEA,EAAE,KAAR,EAAYA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAI8B,EAAE9B,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,GAAE,EAAE2B,GAAE,EAAEA,IAAG5B,EAAE,GAAG,EAAEC,KAAIwO,GAAGzO,EAAE,GAAG,IAAI,EAAEqP,GAAGC,IAAI1N,IAAG,EAAE,CAAC,OAAOE,EAAE9B,EAAE,GAAGC,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAG8B,EAAE9B,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAG8B,EAAE,KAAK,OAAO9B,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GAAG,CAAC,IAAIC,GAAE,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,GAAG,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,KAAIA,GAAMA,IAAJ,MAAY2B,IAAG5B,EAAE,GAAG,IAAIA,EAAE,IAAI,IAAtB,GAA6B4B,IAAH,GAAM6M,GAAGzO,EAAE,EAAE,IAAIC,GAAE,QAAQ,CAACA,GAAE,GAAG,IAAI2B,IAAG5B,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAM4B,IAAH,GAASA,IAAH,GAAM6M,GAAGzO,EAAE,GAAG,IAAI,CAAC,IAAIC,IAAG,CAAC,OAAO6B,EAAE7B,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAG8B,EAAE,KAAK,OAAO9B,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAAC,IAAIC,GAAE,IAAID,EAAEA,EAAE,IAAI,OAAOA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAIC,GAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAE4B,EAAEA,EAAE,QAAQ,MAAM,MAAM,EAAER,EAAEQ,EAAE,SAASP,CAAC,IAAIO,EAAEA,EAAE,QAAQ,IAAI,OAAOP,EAAE,GAAG,EAAED,EAAEC,CAAC,EAAEd,CAAC,CAAC,GAAG,OAAOc,EAAE,SAASrB,EAAE,CAAC,IAAIC,GAAE,MAAMkL,GAAGnL,CAAC,EAAE,CAAC,EAAE,OAAOoL,GAAGpL,EAAEC,GAAE,EAAEA,GAAE,MAAM,EAAEA,EAAC,EAAE2B,EAAEA,EAAE,QAAQ,QAAQ,GAAG,CAAC,EAAEP,EAAE,OAAOpB,EAAE,GAAGsP,GAAGlO,EAAErB,CAAC,EAAEqB,EAAE,OAAO,EAAE,CAAC,SAASgI,GAAGrJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAO6I,GAAGpJ,IAAI,EAAEC,IAAI,EAAE2B,IAAI,EAAErB,IAAI,CAAC,CAAC,CAACkB,GAAG,UAAU,CAAC,QAAQzB,EAAEqB,EAAE,WAAW,EAAErB,KAAK6J,GAAG,EAAEhG,GAAE,QAAS,IAAI,CAACG,KAAI,SAAShE,EAAE,CAACyB,EAAEzB,EAAE,EAAE,QAAQ,IAAI4J,GAAG,IAAIE,EAAE,CAAC,EAAE,KAAK9J,CAAC,CAAC,EAAG,IAAImE,GAAE,CAAE,CAAC,CAAE,CAAC,EAAE,EAAE,QAAQqL,GAAG,MAAM,GAAG,EAAEC,GAAG,EAAE,IAAIA,GAAG,EAAEA,GAAGD,GAAGC,EAAE,EAAE,OAAO,aAAaA,EAAE,EAAEnE,GAAGkE,GAAGjE,GAAGlK,EAAE,aAAa,cAAc,KAAK,CAAC,YAAYrB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,cAAc,CAAC,EAAEqB,EAAE,cAAc,cAAc,KAAK,CAAC,YAAYrB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,eAAe,CAAC,EAAEgM,GAAG,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE3K,EAAE,oBAAoB,IAAI2K,GAAG,OAAO,EAAE,EAAED,GAAG,OAAO,IAAImB,GAAG,CAAC3C,GAAGC,GAAGQ,GAAGnG,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGqC,GAAGC,GAAGc,GAAGC,GAAGE,GAAGC,GAAGC,GAAGC,EAAE,EAAE4E,EAAG,UAAU,CAAC,SAAS/N,EAAEA,EAAEC,EAAE,CAAC,OAAO8N,EAAG/N,EAAE,QAAQ+N,EAAG,UAAU,CAAC,IAAI/N,EAAE+N,EAAG9N,EAAE,CAAC,EAAE,OAAO,CAAC2B,EAAErB,CAAC,IAAI,OAAO,QAAQP,CAAC,EAAEC,EAAE2B,CAAC,EAAc,OAAOrB,GAAnB,WAAqB,IAAIP,IAAI,CAACyN,GAAG,KAAK7L,CAAC,EAAE,GAAG,CAAC,OAAOrB,EAAE,GAAGP,CAAC,CAAC,QAAC,CAAQ4D,KAAI6J,GAAG,IAAI,EAAE5L,IAAQ0L,KAAJ,GAAYE,GAAG,SAAP,IAAgBF,GAAG,EAAEtD,IAAI,EAAEqD,GAAGoC,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEnP,EAAE,OAAON,CAAC,EAAE,EAAE8N,EAAG,UAAU,CAAC,IAAI/N,EAAE+N,EAAG9N,EAAED,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAE2B,EAAE5B,GAAG,IAAIA,EAAE,IAAI,EAAE,OAAOA,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,GAAG,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAG4B,EAAE5B,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,kCAAkC4B,EAAE5B,EAAE,iCAAiC,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAG4B,EAAE5B,EAAE,EAAE,EAAEA,CAAC,EAAE,EAAEyK,GAAG,KAAKsD,EAAG,EAAE,EAAEjK,GAAE,QAAQiK,EAAG,EAAE,EAAEvK,GAAEvD,EAAEkE,GAAE,EAAE4J,CAAE,CAAC,IAAI9N,EAAEsD,GAAG,EAAE,GAAGS,KAAI3C,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBpB,EAAED,CAAC,CAAC,OAAOA,EAAE,CAACyC,EAAE,sDAAsDzC,CAAC,EAAE,EAAEoB,EAAEpB,CAAC,CAAC,CAAC,OAAOqE,KAAKhD,EAAE,WAAWiD,GAAG,kCAAkC,EAAE,mCAAmCjD,EAAE,WAAWA,EAAE,WAAW,mCAAmCiB,CAAC,EAAEA,EAAE,mCAAmC,IAAI,IAAI,mCAAmC,YAAY,GAAG,EAAE,KAAK,SAAStC,EAAEC,EAAE,CAAC,IAAI2B,EAAEyC,GAAG,OAAkB,OAAO,YAAY,sBAA/B,YAAqDC,GAAG1C,CAAC,GAAG2C,GAAG3C,CAAC,GAAe,OAAO,OAAnB,WAAyB6C,GAAG7C,EAAE5B,EAAEC,CAAC,EAAE,MAAM2B,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMrB,GAAG,YAAY,qBAAqBA,EAAEP,CAAC,EAAE,KAAKC,EAAG,SAASM,EAAE,CAAC,OAAOkC,EAAE,kCAAkClC,CAAC,EAAE,EAAEkC,EAAE,2CAA2C,EAAEgC,GAAG7C,EAAE5B,EAAEC,CAAC,CAAC,CAAE,CAAE,CAAC,EAAEA,EAAG,SAASA,EAAE,CAACD,EAAEC,EAAE,SAASA,EAAE,MAAM,CAAC,CAAE,EAAE,MAAMmB,CAAC,EAAE,CAAC,CAAC,EAAE,EAAEgM,GAAGpN,IAAIoN,GAAGW,EAAG,IAAI/N,CAAC,EAAEiD,GAAG,KAAKA,GAAG8K,EAAG,IAAI,EAAE1M,EAAE,SAAS,CAACrB,EAAEC,KAAKoB,EAAE,SAAS0M,EAAG,IAAI/N,EAAEC,CAAC,EAAEoB,EAAE,iBAAiB,CAACrB,EAAEC,KAAKoB,EAAE,iBAAiB0M,EAAG,IAAI/N,EAAEC,CAAC,EAAEoB,EAAE,yBAAyB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,KAAKI,EAAE,yBAAyB0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,CAAC,EAAEI,EAAE,4BAA4B,CAACrB,EAAEC,KAAKoB,EAAE,4BAA4B0M,EAAG,IAAI/N,EAAEC,CAAC,EAAEoB,EAAE,6BAA6B,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,6BAA6B0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,0BAA0B,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,0BAA0B0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,0BAA0BrB,IAAIqB,EAAE,0BAA0B0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,kBAAkB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,kBAAkB0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,mBAAmBrB,IAAIqB,EAAE,mBAAmB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,wBAAwB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,wBAAwB0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,iBAAiB,CAACrB,EAAEC,KAAKoB,EAAE,iBAAiB0M,EAAG,IAAI/N,EAAEC,CAAC,EAAEoB,EAAE,kBAAkB,CAACrB,EAAEC,KAAKoB,EAAE,kBAAkB0M,EAAG,IAAI/N,EAAEC,CAAC,EAAEoB,EAAE,SAASrB,IAAIqB,EAAE,SAAS0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,iBAAiB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,KAAKT,EAAE,iBAAiB0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,EAAET,EAAE,kBAAkB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,KAAKY,EAAE,kBAAkB0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,kBAAkBrB,IAAIqB,EAAE,kBAAkB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,qBAAqB,CAACrB,EAAEC,EAAE2B,EAAErB,KAAKc,EAAE,qBAAqB0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,sBAAsB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,sBAAsB0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,sBAAsBrB,IAAIqB,EAAE,sBAAsB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,kBAAkBrB,IAAIqB,EAAE,kBAAkB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,cAAc,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,cAAc0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,eAAe,CAACrB,EAAEC,EAAE2B,EAAErB,KAAKc,EAAE,eAAe0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,sBAAsBrB,IAAIqB,EAAE,sBAAsB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,mBAAmBrB,IAAIqB,EAAE,mBAAmB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,mBAAmB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,KAAKY,EAAE,mBAAmB0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,QAAQ,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,KAAKQ,EAAE,QAAQ0M,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,CAAC,EAAEQ,EAAE,iBAAiBrB,IAAIqB,EAAE,iBAAiB0M,EAAG,IAAI/N,CAAC,EAAEqB,EAAE,YAAY,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,YAAY0M,EAAG,IAAI/N,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,iBAAiBrB,IAAIqB,EAAE,iBAAiB0M,EAAG,IAAI/N,CAAC,EAAE,IAAI2P,GAAGxM,GAAG,KAAKA,GAAG4K,EAAG,IAAI,EAAExB,GAAGlL,EAAE,QAAQrB,IAAIuM,GAAGlL,EAAE,QAAQ0M,EAAG,IAAI/N,CAAC,EAAEsM,GAAGjL,EAAE,MAAMrB,IAAIsM,GAAGjL,EAAE,MAAM0M,EAAG,IAAI/N,CAAC,EAAE4C,GAAG,CAAC5C,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,KAAKc,GAAGmL,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,EAAEwB,GAAG,KAAKA,GAAGyK,EAAG,IAAI,EAAE1D,GAAG,CAACrK,EAAEC,EAAE2B,EAAErB,EAAEE,KAAK4J,GAAG0D,EAAG,IAAI/N,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEkK,GAAG3K,IAAI2K,GAAGoD,EAAG,IAAI/N,CAAC,EAAEoD,GAAGpD,IAAIoD,GAAG2K,EAAG,IAAI/N,CAAC,EAAEgN,GAAG,KAAKA,GAAGe,EAAG,IAAI,EAAElD,GAAG,CAAC7K,EAAEC,KAAK4K,GAAGkD,EAAG,IAAI/N,EAAEC,CAAC,EAAEqK,GAAGtK,IAAIsK,GAAGyD,EAAG,IAAI/N,CAAC,EAAEoK,GAAGpK,IAAIoK,GAAG2D,EAAG,IAAI/N,CAAC,EAAEmK,GAAG,KAAKA,GAAG4D,EAAG,IAAI,EAAEjD,GAAGzJ,EAAE,WAAW,CAACrB,EAAEC,KAAK6K,GAAGzJ,EAAE,WAAW0M,EAAG,IAAI/N,EAAEC,CAAC,EAAE+N,GAAGhO,IAAIgO,GAAGD,EAAG,IAAI/N,CAAC,EAAE0P,GAAG,KAAKA,GAAG3B,EAAG,IAAI,EAAED,GAAG9N,IAAI8N,GAAGC,EAAG,IAAI/N,CAAC,EAAEiO,GAAG,KAAKA,GAAGF,EAAG,IAAI,EAAE,SAAS6B,IAAI,CAAC,GAAG,EAAE,EAAE5L,IAAG,GAAGvC,EAAEN,EAAEE,CAAC,EAAEI,GAAGmJ,GAAG9G,EAAC,EAAE,YAAYzC,CAAC,MAAM,CAAC,GAAGA,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQwC,GAAE,QAAQxC,EAAE,OAAO,MAAM,CAAC,EAAEuJ,GAAG/G,EAAC,EAAE,EAAEG,IAAG2L,KAAKA,GAAG,GAAGtO,EAAE,UAAU,GAAGuC,KAAInC,GAAGmJ,GAAG9G,EAAC,EAAE3C,EAAEE,CAAC,EAAEI,GAAGmJ,GAAG7G,EAAC,GAAG,CAAC,CAAC,OAAO1C,EAAE,eAAe,QAAQA,EAAE,cAAc,QAAQA,EAAE,UAAU,IAAI8I,GAAG,EAAE9I,EAAE,aAAarB,GAAGsK,GAAGtK,CAAC,EAAEqB,EAAE,WAAWrB,GAAGoK,GAAGpK,CAAC,EAAEqB,EAAE,aAAakI,GAAGlI,EAAE,aAAagK,GAAGhK,EAAE,gBAAgB8J,GAAGjH,GAAE,SAASlE,GAAG,CAAC2P,IAAIC,GAAG,EAAED,KAAKzL,GAAElE,EAAE,EAAE4P,GAAG,EAAEtO,CAAC,GAAUvB,GAAQE,GAAiB,WAAW,MAAM,OAAhC,cAAsCA,GAAE,ICArl0C,IAWa4P,GAePC,GAKAC,GAwCAC,GAsBAC,GAeOC,GAoBPC,GAsBOC,GAtJbC,GAAAC,EAAA,kBAIAC,KAOaV,GAET,GAAS,OAEA,kBAEJ,OAAO,SAAa,IAAe,SAAS,eAAqC,IAE9C,OAAO,KAAS,IAAc,KAAK,UAAU,KAAO,QAO1FC,GAAS,IAAU,OAAO,SAAa,IAAc,OAAY,SAAS,OAK1EC,GAAe,CAACS,EAAkBC,IAA4B,CAClE,GAAI,CACF,IAAMC,EAAUD,GAAkBZ,GAElC,OADYa,EAAU,IAAI,IAAIF,EAAUE,CAAO,EAAI,IAAI,IAAIF,CAAQ,GACxD,SAAWV,EACxB,MAAQ,CACN,MAAO,EACT,CACF,EAgCME,GAAU,MAAMW,GAAyC,CAE7D,IAAMC,EAAO,MADI,MAAM,MAAMD,EAAa,CAAC,YAAa,aAAa,CAAC,GAC1C,KAAK,EACjC,OAAO,IAAI,gBAAgBC,CAAI,CACjC,EAkBMX,GAE0C,cAA+B,QAalEC,GAAoB,SAAkD,CACjF,GAAI,CAACL,GACH,MAAM,IAAI,MAAM,sEAAsE,EAIxF,GAAIE,GAAaF,EAAS,EACxB,MAAO,CAAC,OAAWI,GAAmB,CAAC,EAIzC,IAAMY,EAAM,MAAMb,GAAQH,EAAS,EACnC,MAAO,CAACgB,EAAKZ,GAAmBY,CAAG,CAAC,CACtC,EAOMV,GAGF,cAIK,QAeIC,GAAmB,MAC5BU,EAA+BL,EAC/BM,IAEO,CAAC,OAAWZ,EAAmB,IC1J1C,IAQIa,GACAC,GACAC,GACAC,GAEEC,GAwBAC,GAyBOC,GA+GAC,GA7KbC,GAAAC,EAAA,kBAMAC,KAGIT,GAAc,GACdC,GAAe,GACfC,GAAU,GAERC,GAAyB,IAAe,CAE5C,GAAI,OAAO,kBAAsB,IAC/B,MAAO,GAGT,GAAI,CAGF,OAAI,OAAO,eAAmB,KAC5B,IAAI,eAAe,EAAE,MAAM,YAAY,IAAI,kBAAkB,CAAC,CAAC,EAK1D,YAAY,SAAS,IAAI,WAAW,CACzC,EAAG,GAAI,IAAK,IAAK,EAAG,EAAI,EAAI,EAAG,EAAG,EAAG,EAAI,GAAI,EAAK,EAAI,EAAG,EAAG,EAAI,EAAG,EACnE,EAAG,EAAI,EAAK,EAAK,EAAG,GAAI,GAAI,EAAG,EAAG,EAAG,GAAI,EAAI,IAAK,GAAI,EAAG,EAAG,GAAI,EAClE,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMC,GAAkB,IAAe,CACrC,GAAI,CAeF,OAAO,YAAY,SAAS,IAAI,WAAW,CACzC,EAAK,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,GAAI,EAAK,GAAK,EAAG,GAAI,EACvF,IAAK,GAAI,IAAK,GAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAI,IAAK,IAAK,EAAG,GAAI,EACzF,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEaC,GAAwB,MAAMK,GAA+C,CACxF,GAAIV,GACF,OAAO,QAAQ,QAAQ,EAEzB,GAAIC,GACF,MAAM,IAAI,MAAM,uDAAyD,EAE3E,GAAIC,GACF,MAAM,IAAI,MAAM,oDAAsD,EAGxED,GAAe,GAGf,IAAMU,EAAUD,EAAM,YAClBE,EAAaF,EAAM,WAGvB,GAAI,CAACN,GAAgB,EACnB,MAAM,IAAI,MAAM,+DAA+D,EAIjF,IAAMS,EAAuBV,GAAuB,EAChDS,EAAa,GAAK,CAACC,IACjB,OAAO,KAAS,KAAe,CAAC,KAAK,qBAEvC,QAAQ,KACJ,iCAAmCD,EACnC,uIACkE,EAIxE,QAAQ,KACJ,4GACmC,EAGvCF,EAAM,WAAaE,EAAa,GAGlC,IAAME,EAAYJ,EAAM,UAClBK,EAAqB,OAAOD,GAAc,SAAWA,EAAY,OACjEE,EAAuBF,GAAiC,IACxDG,EAAmBD,GAA6B,MAAQA,EACxDE,EAAwBJ,GAAiC,KACzDK,EAAoBD,GAA8B,MAAQA,EAE1D,CAACE,EAAWC,CAAc,EAAK,MAAMC,GAAiBL,EAAiBF,EAAoBH,EAAa,CAAC,EAE3GW,EAAY,GAEVC,EAA8B,CAAC,EAqDrC,GAlDIb,EAAU,GACZa,EAAM,KAAK,IAAI,QAASC,GAAY,CAClC,WAAW,IAAM,CACfF,EAAY,GACZE,EAAQ,CACV,EAAGd,CAAO,CACZ,CAAC,CAAC,EAIJa,EAAM,KAAK,IAAI,QAAQ,CAACC,EAASC,IAAW,CAC1C,IAAMC,EAAiC,CAKrC,WAAAf,CACF,GAEIO,GAAoBJ,KAMtBY,EAAO,WAAa,CAACC,EAAUC,IAC3BV,IAAqBJ,GAAsBc,GAAmBD,GAGpEP,EAAeM,CAAM,EAAE,KAEnBG,GAAU,CACR7B,GAAe,GACfD,GAAc,GACdD,GAAO+B,EACPL,EAAQ,EACJL,GACF,IAAI,gBAAgBA,CAAS,CAEjC,EAECW,GAAS,CACR9B,GAAe,GACfC,GAAU,GACVwB,EAAOK,CAAI,CACb,CAAC,CACP,CAAC,CAAC,EAEF,MAAM,QAAQ,KAAKP,CAAK,EAEpBD,EACF,MAAM,IAAI,MAAM,2DAA2DZ,CAAO,IAAI,CAE1F,EAEaL,GAAc,IAAqB,CAC9C,GAAIN,IAAeD,GACjB,OAAOA,GAGT,MAAM,IAAI,MAAM,qCAAqC,CACvD,ICnLA,IAKaiC,GAeAC,GA6BAC,GAjDbC,GAAAC,EAAA,kBAGAC,KAEaL,GAAkB,CAACM,EAAcC,IAA6B,CACzE,IAAMC,EAAOC,GAAY,EAEnBC,EAAaF,EAAK,gBAAgBF,CAAI,EAAI,EAC1CK,EAAaH,EAAK,QAAQE,CAAU,EAC1C,OAAAF,EAAK,aAAaF,EAAMK,EAAYD,CAAU,EAC9CH,EAAO,KAAKI,CAAU,EAEfA,CACT,EAMaV,GACT,CAACW,EAAkCC,EAAgBC,EAClDC,IAAuC,CACtC,GAAI,OAAOH,GAAW,UAAYA,IAAY,KAAM,CAClD,GAAIE,EAAK,IAAIF,CAAO,EAClB,MAAM,IAAI,MAAM,+BAA+B,EAE/CE,EAAK,IAAIF,CAAO,CAEpB,CAEA,OAAO,QAAQA,CAAO,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAChD,IAAMC,EAAQL,EAAUA,EAASG,EAAMA,EACvC,GAAI,OAAOC,GAAU,SACnBhB,GAAoBgB,EAAkCC,EAAO,IAAKJ,EAAMC,CAAO,UACtE,OAAOE,GAAU,UAAY,OAAOA,GAAU,SACvDF,EAAQG,EAAMD,EAAM,SAAS,CAAC,UACrB,OAAOA,GAAU,UAC1BF,EAAQG,EAAOD,EAAS,IAAM,GAAG,MAEjC,OAAM,IAAI,MAAM,mCAAmC,OAAOA,CAAK,EAAE,CAErE,CAAC,CACH,EAMSf,GAAkBiB,GAA0B,CACvD,IAAMX,EAAOC,GAAY,EAEnBW,EAAQZ,EAAK,UAAU,EAC7B,GAAI,CACF,IAAMa,EAAeb,EAAK,WAAW,CAAC,EACtCA,EAAK,iBAAiBa,EAAcA,EAAe,CAAC,EACpD,IAAMC,EAAYd,EAAK,OAAOa,EAAe,CAAC,EACxCE,EAAsBf,EAAK,QAAQa,EAAe,EAAI,CAAC,EACvDG,EAAeD,EAAsBf,EAAK,aAAae,CAAmB,EAAI,GACpF,MAAM,IAAI,MAAM,GAAGJ,CAAO,gBAAgBG,CAAS,oBAAoBE,CAAY,EAAE,CACvF,QAAE,CACAhB,EAAK,aAAaY,CAAK,CACzB,CACF,IC/DA,IAQaK,GARbC,GAAAC,EAAA,kBAKAC,KACAC,KAEaJ,GAAiBK,GAA6D,CACzF,IAAMC,EAAOC,GAAY,EACrBC,EAAmB,EACjBC,EAAmB,CAAC,EAEpBC,EAA0CL,GAAW,CAAC,EAE5D,GAAI,CACF,GAAIA,GAAS,mBAAqB,OAChCK,EAAW,iBAAmB,UAE5B,OAAOL,EAAQ,kBAAqB,UAAY,CAAC,OAAO,UAAUA,EAAQ,gBAAgB,GAC1FA,EAAQ,iBAAmB,GAAKA,EAAQ,iBAAmB,EAC7D,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,gBAAgB,EAAE,EAGjF,GAAIA,GAAS,oBAAsB,OACjCK,EAAW,kBAAoB,UACtB,OAAOL,EAAQ,mBAAsB,UAAY,CAAC,OAAO,UAAUA,EAAQ,iBAAiB,EACrG,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,iBAAiB,EAAE,EAG9EA,GAAS,YAAc,SACzBK,EAAW,UAAY,IAGzB,IAAIC,EAAgB,EACpB,OAAIN,GAAS,MAAQ,SACnBM,EAAgBC,GAAgBP,EAAQ,IAAKI,CAAM,GAGrDD,EAAmBF,EAAK,qBACpBI,EAAW,iBAAmBA,EAAW,kBAAoB,CAAC,CAACA,EAAW,UAAYC,CAAa,EACnGH,IAAqB,GACvBK,GAAe,2BAA4B,EAGzCR,GAAS,QAAU,QACrBS,GAAoBT,EAAQ,MAAO,GAAI,IAAI,QAAoC,CAACU,EAAKC,IAAU,CAC7F,IAAMC,EAAgBL,GAAgBG,EAAKN,CAAM,EAC3CS,EAAkBN,GAAgBI,EAAOP,CAAM,EAEjDH,EAAK,sBAAsBE,EAAkBS,EAAeC,CAAe,IAAM,GACnFL,GAAe,iCAAiCE,CAAG,MAAMC,CAAK,GAAG,CAErE,CAAC,EAGI,CAACR,EAAkBC,CAAM,CAClC,OAASU,EAAG,CACV,MAAIX,IAAqB,GACvBF,EAAK,sBAAsBE,CAAgB,EAE7CC,EAAO,QAAQW,GAASd,EAAK,MAAMc,CAAK,CAAC,EACnCD,CACR,CACF,IChEA,IAQME,GAeAC,GAWAC,GAoBAC,GAwDOC,GA9GbC,GAAAC,EAAA,kBAKAC,KACAC,KAEMR,GAA4BS,GAAmD,CACnF,OAAQA,EAAwB,CAC9B,IAAK,WACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,IAAK,MACH,MAAO,IACT,QACE,MAAM,IAAI,MAAM,yCAAyCA,CAAsB,EAAE,CACrF,CACF,EAEMR,GAAoBS,GAAmD,CAC3E,OAAQA,EAAe,CACrB,IAAK,aACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,+BAA+BA,CAAa,EAAE,CAClE,CACF,EAEMR,GAAwBS,GAAmD,CAC1EA,EAAQ,QACXA,EAAQ,MAAQ,CAAC,GAEdA,EAAQ,MAAM,UACjBA,EAAQ,MAAM,QAAU,CAAC,GAE3B,IAAMC,EAAUD,EAAQ,MAAM,QACzBC,EAAQ,+BAEXA,EAAQ,6BAA+B,KAIrCD,EAAQ,oBACRA,EAAQ,mBAAmB,KAAKE,IAAO,OAAOA,GAAO,SAAWA,EAAKA,EAAG,QAAU,QAAQ,IAC5FF,EAAQ,iBAAmB,GAE/B,EAEMR,GACF,CAACW,EAA8BC,EAC9BC,IAA2B,CAC1B,QAAWH,KAAME,EAAoB,CACnC,IAAIE,EAAS,OAAOJ,GAAO,SAAWA,EAAKA,EAAG,KAG9C,OAAQI,EAAQ,CACd,IAAK,QAEH,GADAA,EAAS,QACL,OAAOJ,GAAO,SAAU,CAG1B,IAAMK,EAFeL,GAEsD,WAC3E,GAAIK,EAAY,CACd,IAAMC,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBF,EAAYF,CAAM,EACtDM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,CAAU,GAAG,CAEpF,CACF,CACA,MACF,IAAK,SAEH,GADAD,EAAS,KACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMW,EAAgBX,EACtB,GAAIW,GAAe,gBAAiB,CAClC,GAAIA,EAAc,kBAAoB,QAAUA,EAAc,kBAAoB,OAChF,MAAM,IAAI,MAAM,oDAAoDA,EAAc,eAAe,EAAE,EAErG,IAAML,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBI,EAAc,gBAAiBR,CAAM,EACzEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDC,EAAc,eAAe,GAAG,CAEjG,CACF,CACA,MACF,IAAK,OACL,IAAK,MACH,SACF,QACE,MAAM,IAAI,MAAM,qCAAqCP,CAAM,EAAE,CACjE,CAEA,IAAMQ,EAAmBL,GAAgBH,EAAQD,CAAM,EACnDM,GAAY,EAAE,4BAA4BR,EAAsBW,CAAgB,IAAM,GACxFF,GAAe,oCAAoCN,CAAM,GAAG,CAEhE,CACF,EAESb,GAAqBO,GAAkE,CAClG,IAAMe,EAAOJ,GAAY,EACrBR,EAAuB,EACrBE,EAAmB,CAAC,EAEpBW,EAAkDhB,GAAW,CAAC,EACpET,GAAqByB,CAAc,EAEnC,GAAI,CACF,IAAMlB,EAAyBT,GAAyB2B,EAAe,wBAA0B,KAAK,EAChGjB,EAAgBT,GAAiB0B,EAAe,eAAiB,YAAY,EAC7EC,EACF,OAAOD,EAAe,OAAU,SAAWP,GAAgBO,EAAe,MAAOX,CAAM,EAAI,EAEzFa,EAAmBF,EAAe,kBAAoB,EAC5D,GAAI,CAAC,OAAO,UAAUE,CAAgB,GAAKA,EAAmB,GAAKA,EAAmB,EACpF,MAAM,IAAI,MAAM,qCAAqCA,CAAgB,EAAE,EAGzE,IAAMC,EAAoBH,EAAe,mBAAqB,EAC9D,GAAI,CAAC,OAAO,UAAUG,CAAiB,GAAKA,EAAoB,GAAKA,EAAoB,EACvF,MAAM,IAAI,MAAM,qCAAqCA,CAAiB,EAAE,EAG1E,IAAMC,EAA+B,OAAOJ,EAAe,wBAA2B,SAClFP,GAAgBO,EAAe,uBAAwBX,CAAM,EAC7D,EAcJ,GAZAF,EAAuBY,EAAK,yBACxBjB,EAAwB,CAAC,CAACkB,EAAe,kBAAmB,CAAC,CAACA,EAAe,iBAAkBjB,EAC/F,CAAC,CAACiB,EAAe,gBAAiB,EAAGC,EAAiBC,EAAkBC,EACxEC,CAA4B,EAC5BjB,IAAyB,GAC3BS,GAAe,+BAAgC,EAG7CI,EAAe,oBACjBxB,GAAsBW,EAAsBa,EAAe,mBAAoBX,CAAM,EAGnFW,EAAe,qBAAuB,OAAW,CACnD,GAAI,OAAOA,EAAe,oBAAuB,UAC/C,MAAM,IAAI,MAAM,+CAA+CA,EAAe,kBAAkB,EAAE,EAEpG,IAAMR,EAAgBC,GAAgB,qBAAsBJ,CAAM,EAC5DK,EAAkBD,GAAgBO,EAAe,mBAAmB,SAAS,EAAGX,CAAM,EACxFU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GACI,4DAA4DI,EAAe,kBAAkB,GAAG,CAExG,CAEA,GAAIA,EAAe,uBACjB,OAAW,CAACK,EAAMC,CAAK,IAAK,OAAO,QAAQN,EAAe,sBAAsB,EAAG,CACjF,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,MAAM,kDAAkDA,CAAI,EAAE,EAE1E,GAAI,OAAOC,GAAU,UAAY,CAAC,OAAO,UAAUA,CAAK,GAAKA,EAAQ,EACnE,MAAM,IAAI,MAAM,iEAAiEA,CAAK,EAAE,EAE1F,IAAMC,EAAad,GAAgBY,EAAMhB,CAAM,EAC3CU,EAAK,6BAA6BZ,EAAsBoB,EAAYD,CAAK,IAAM,GACjFV,GAAe,wCAAwCS,CAAI,MAAMC,CAAK,GAAG,CAE7E,CAGF,OAAIN,EAAe,QAAU,QAC3BQ,GAAoBR,EAAe,MAAO,GAAI,IAAI,QAAoC,CAACS,EAAKH,IAAU,CACpG,IAAMd,EAAgBC,GAAgBgB,EAAKpB,CAAM,EAC3CK,EAAkBD,GAAgBa,EAAOjB,CAAM,EAEjDU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GAAe,qCAAqCa,CAAG,MAAMH,CAAK,GAAG,CAEzE,CAAC,EAGI,CAACnB,EAAsBE,CAAM,CACtC,OAASqB,EAAG,CACV,MAAIvB,IAAyB,GAC3BY,EAAK,0BAA0BZ,CAAoB,EAErDE,EAAO,QAAQsB,GAASZ,EAAK,MAAMY,CAAK,CAAC,EACnCD,CACR,CACF,ICpMA,IAuCaE,GAqCAC,GAsCAC,GAMAC,GAqCAC,GAoBAC,GAOAC,GAxLbC,EAAAC,EAAA,kBAuCaR,GAA8BS,GAA2B,CACpE,OAAQA,EAAM,CACZ,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IACT,IAAK,UACH,MAAO,IACT,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,IACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,EAKaR,GAA8BS,GAAqC,CAC9E,OAAQA,EAAW,CACjB,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,UACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,CACzD,CACF,EAMaR,GAAwBS,GACpB,CAAC,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,OAAW,MAAS,EAAEA,CAAQ,EAKxGR,GAAqCM,GAEoD,CAChG,OAAQA,EAAM,CACZ,IAAK,UAEH,OAAO,OAAO,aAAiB,KAAe,aAAa,KAAO,aAAe,YACnF,IAAK,UACH,OAAO,aACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,WACT,IAAK,UACH,OAAO,aACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,SACH,OAAO,eACT,QACE,MAAM,IAAI,MAAM,qBAAqBA,CAAI,EAAE,CAC/C,CACF,EAKSL,GAAwBQ,GAAkE,CACrG,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,EAKaP,GAA4BI,GAAyDA,IAAS,WACvGA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAAYA,IAAS,SAC5FA,IAAS,OAKAH,GAA4BO,GAA0C,CACjF,OAAQA,EAAU,CAChB,IAAK,OACH,MAAO,GACT,IAAK,MACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,ICvMA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KAQaH,GAAW,MAAMI,GAAsE,CAClG,GAAI,OAAOA,GAAS,SAClB,GAAI,GAEF,GAAI,CACF,GAAM,CAAC,SAAAC,CAAQ,EAAI,GAAQ,kBAAkB,EAC7C,OAAO,IAAI,WAAW,MAAMA,EAASD,CAAI,CAAC,CAC5C,OAASE,EAAG,CACV,GAAIA,EAAE,OAAS,wBAAyB,CAEtC,GAAM,CAAC,iBAAAC,CAAgB,EAAI,GAAQ,SAAS,EACtCC,EAASD,EAAiBH,CAAI,EAC9BK,EAAuB,CAAC,EAC9B,cAAiBC,KAASF,EACxBC,EAAO,KAAKC,CAAK,EAEnB,OAAO,IAAI,WAAW,OAAO,OAAOD,CAAM,CAAC,CAC7C,CACA,MAAMH,CACR,KACK,CAEL,IAAMK,EAAW,MAAM,MAAMP,CAAI,EACjC,GAAI,CAACO,EAAS,GACZ,MAAM,IAAI,MAAM,sCAAsCP,CAAI,EAAE,EAE9D,IAAMQ,EAAsBD,EAAS,QAAQ,IAAI,gBAAgB,EAC3DE,EAAWD,EAAsB,SAASA,EAAqB,EAAE,EAAI,EAC3E,GAAIC,EAAW,WAGb,OAAO,IAAI,WAAW,MAAMF,EAAS,YAAY,CAAC,EAC7C,CAEL,GAAI,CAACA,EAAS,KACZ,MAAM,IAAI,MAAM,sCAAsCP,CAAI,qBAAqB,EAEjF,IAAMU,EAASH,EAAS,KAAK,UAAU,EAEnCI,EACJ,GAAI,CAEFA,EAAS,IAAI,YAAYF,CAAQ,CACnC,OAASP,EAAG,CACV,GAAIA,aAAa,WAAY,CAE3B,IAAMU,EAAQ,KAAK,KAAKH,EAAW,KAAK,EACxCE,EAAS,IAAI,YAAY,OAAO,CAAC,QAASC,EAAO,QAASA,CAAK,CAAC,EAAE,MACpE,KACE,OAAMV,CAEV,CAEA,IAAIW,EAAS,EAEb,OAAa,CACX,GAAM,CAAC,KAAAC,EAAM,MAAAC,CAAK,EAAI,MAAML,EAAO,KAAK,EACxC,GAAII,EACF,MAEF,IAAME,EAAYD,EAAM,WACV,IAAI,WAAWJ,EAAQE,EAAQG,CAAS,EAChD,IAAID,CAAK,EACfF,GAAUG,CACZ,CACA,OAAO,IAAI,WAAWL,EAAQ,EAAGF,CAAQ,CAC3C,CACF,KAEK,QAAIT,aAAgB,KAClB,IAAI,WAAW,MAAMA,EAAK,YAAY,CAAC,EACrCA,aAAgB,WAClBA,EAEA,IAAI,WAAWA,CAAI,CAE9B,ICvFA,IAYMiB,GAEAC,GAKFC,GACAC,GAESC,GAQAC,GAWAC,GAzCbC,GAAAC,EAAA,kBAKAC,IAOMT,GAAiB,CAAC,IAAK,IAAK,IAAK,IAAK,GAAG,EAEzCC,GAAQ,CAACS,EAAeC,IAA0B,CAEtD,QAAQ,IAAI,IAAIX,GAAeU,CAAK,CAAC,IAAI,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIC,CAAO,EAAE,CAChF,EAKaP,GAAkB,CAACQ,EAA2BC,IAA0B,CACnFX,GAAiBU,EACjBT,GAAQU,CACV,EAKaR,GAAM,CAACS,EAAoBC,IAAuB,CAC7D,IAAMC,EAAeC,GAAqBH,CAAQ,EAC5CI,EAAcD,GAAqBf,EAAc,EACnDc,GAAgBE,GAClBjB,GAAMe,EAAc,OAAOD,GAAQ,WAAaA,EAAI,EAAIA,CAAG,CAE/D,EAKaT,GAAwB,IAAIa,IAAiC,CACpEhB,IACFE,GAAI,GAAGc,CAAI,CAEf,IC7CA,IAOaC,GAPbC,GAAAC,EAAA,kBAKAC,IAEaH,GAAa,CAACI,EAAyBC,IAE5C,IAAKC,GAAkCD,CAAI,GAAGD,CAAU,ICThE,IAAAG,GAAAC,EAAA,oBCAA,IA8EMC,GA+BAC,GAKAC,GAKAC,GAWFC,GACEC,GAYOC,GAkCPC,GAoSOC,GArdbC,GAAAC,EAAA,kBAIAC,KAEAC,KAwEMZ,GAAsC,IAAI,IAAI,CAClD,CAAC,GAAI,GAAG,EACR,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,EAAE,EACT,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,CAAC,EACZ,CAAC,SAAU,CAAC,EAGZ,CAAC,SAAU,CAAC,EACZ,CAAC,UAAW,CAAC,EACb,CAAC,UAAW,CAAC,CACf,CAAC,EAEKC,GAAsB,CAAC,EAKvBC,GAA4BW,GAAiB,KAAK,KAAKA,EAAO,EAAE,EAAI,GAKpEV,GAAwBU,GAAiB,CAC7C,QAASC,EAAM,EAAGA,EAAMb,GAAU,OAAQa,IAAO,CAC/C,IAAMC,EAAgBd,GAAUa,CAAG,EACnC,GAAID,GAAQE,EACV,OAAOA,CAEX,CAEA,OAAO,KAAK,KAAKF,EAAO,EAAE,EAAI,EAChC,EAEIT,GAAO,EACLC,GAAqB,IAAMD,KAYpBE,GACT,MAAMU,EAAwBC,EAAsBC,EAAsBC,IAC/C,CACrB,IAAMC,EAAalB,GAAyBgB,CAAY,EAClDG,EAAgBL,EAAQ,OAAO,aAEjC,CAAC,KAAMI,EAAY,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAChF,GAAI,CACF,IAAME,EAAiBN,EAAQ,kBAAkB,EACjDA,EAAQ,eAAe,EACvBM,EAAe,mBACXL,EAA+B,EAAuBI,EACtD,EAA4BD,CAChC,EACAJ,EAAQ,MAAM,EAEd,MAAMK,EAAc,SAAS,WAAW,IAAI,EAE5C,IAAME,EAAcF,EAAc,eAAe,EACjD,GAAIF,EAAiB,CAEnB,IAAMK,EAAeL,EAAgB,EACrC,OAAAK,EAAa,IAAI,IAAI,WAAWD,EAAa,EAAGL,CAAY,CAAC,EACtDM,CACT,KAGE,QAAO,IAAI,WAAWD,EAAY,MAAM,EAAGL,CAAY,CAAC,CAE5D,QAAE,CACAG,EAAc,QAAQ,CACxB,CACF,EAEFd,GAAN,KAAmD,CAqBjD,YAAoBS,EAAwB,CAAxB,aAAAA,EAClB,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,2BAA6B,CAAC,EACnC,KAAK,eAAiB,CAAC,EACvB,KAAK,gBAAkB,IAAI,IAC3B,KAAK,uBAAyB,IAAI,IAElC,OAAW,CAACS,CAAK,IAAKzB,GACpBC,GAAU,KAAKwB,CAAG,EAClB,KAAK,YAAY,IAAIA,EAAK,CAAC,CAAC,EAC5B,KAAK,mBAAmB,IAAIA,EAAK,CAAC,CAAC,CAEvC,CAEA,OAAOC,EAAeC,EAAwB,CAC5C,IAAMC,EAAiBD,EAAK,OACtBE,EAAYF,EAAK,WACjBG,EAAYH,EAAK,WACjBd,EAAOX,GAAyB4B,CAAS,EAGzCC,EAAe,KAAK,aAAa,IAAIL,CAAE,EAC7C,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,uCAAuC,EAEzD,GAAIA,EAAa,eAAiBD,EAChC,MAAM,IAAI,MAAM,yCAAyCC,EAAa,YAAY,eAAeD,CAAS,EAAE,EAI9G,IAAME,EAAwB,KAAK,QAAQ,OAAO,aAE9C,CAAC,iBAAkB,GAAM,KAAAnB,EAAM,MAAO,eAAe,UAAY,eAAe,QAAQ,CAAC,EAGvFU,EAAcS,EAAsB,eAAe,EACzD,IAAI,WAAWT,CAAW,EAAE,IAAI,IAAI,WAAWK,EAAgBC,EAAWC,CAAS,CAAC,EACpFE,EAAsB,MAAM,EAI5B,IAAMV,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBAAmBU,EAAuB,EAAGD,EAAa,QAAQ,OAAQ,EAAGlB,CAAI,EAEhGoB,GAAU,UAAW,IAAM,qCAAqCP,CAAE,GAAG,EAErE,KAAK,2BAA2B,KAAKM,CAAqB,CAC5D,CAEA,OAAOE,EAAqBC,EAAgC,CAE1D,IAAMC,EAAqB,KAAK,aAAa,IAAIF,CAAQ,EACzD,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,2CAA2C,EAG7D,IAAMC,EAA0B,KAAK,aAAa,IAAIF,CAAa,EACnE,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAID,EAAmB,eAAiBC,EAAwB,aAC9D,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMxB,EAAOX,GAAyBkC,EAAmB,YAAY,EAG/Dd,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBACXc,EAAmB,QAAQ,OAAQ,EAAGC,EAAwB,QAAQ,OAAQ,EAAGxB,CAAI,CAC3F,CAEA,uBAAuByB,EAAmBpB,EAAsBqB,EAAoC,CAClG,IAAIb,EACJ,GAAIa,EAAgB,CAElB,GADAb,EAAK,KAAK,gBAAgB,IAAIa,CAAc,EACxCb,IAAO,OACT,MAAM,IAAI,MAAM,mCAAmC,EAErD,GAAIY,IAAWC,EACb,OAAAN,GACI,UACA,IAAM,uDAAuDf,CAAY,WACrEQ,CAAE,6BAA6B,EAChCA,EACF,GAAI,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC5E,MAAM,IAAI,MAAM;AAAA,sDAC8B,EAEhD,KAAK,gBAAgB,OAAOa,CAAc,CAC5C,MACEb,EAAKrB,GAAmB,EAG1B,YAAK,aAAa,IAAIqB,EAAI,CAAC,QAAS,CAAC,GAAAA,EAAI,OAA2B,OAAAY,CAAM,EAAG,aAAApB,CAAY,CAAC,EAC1F,KAAK,gBAAgB,IAAIoB,EAAQZ,CAAE,EACnCO,GACI,UACA,IAAM,uDAAuDf,CAAY,WAAWQ,CAAE,eAAe,EAClGA,CACT,CAEA,yBAAyBY,EAAyB,CAChD,IAAMZ,EAAK,KAAK,gBAAgB,IAAIY,CAAM,EACtCZ,IAAO,SACT,KAAK,aAAa,OAAOA,CAAE,EAC3B,KAAK,gBAAgB,OAAOY,CAAM,EAClCL,GAAU,UAAW,IAAM,4DAA4DP,CAAE,EAAE,EAE/F,CAGA,OAAOb,EAAc2B,EAAQ,eAAe,QAAU,eAAe,SAAW,eAAe,SAAmB,CAChH,IAAMpB,EAAajB,GAAqBU,CAAI,EAExCI,EAGEwB,GAAaD,EAAQ,eAAe,WAAa,eAAe,QAEhEE,GAAaF,EAAQ,eAAe,WAAa,eAAe,QACtE,GAAIC,GAAaC,EAAW,CAE1B,IAAMC,GADcF,EAAY,KAAK,YAAc,KAAK,oBAC5B,IAAIrB,CAAU,EACrCuB,EAICA,EAAQ,OAAS,EAEnB1B,EAAY0B,EAAQ,IAAI,EAGxB1B,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAPxEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,CAU1E,MAEEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAGxE,IAAMI,EAAU,CAAC,GAAIvC,GAAmB,EAAG,OAA2B,OAAQY,CAAS,EACvF,YAAK,aAAa,IAAI2B,EAAQ,GAAI,CAAC,QAAAA,EAAS,aAAc/B,CAAI,CAAC,EAE/DoB,GAAU,UAAW,IAAM,uCAAuCpB,CAAI,WAAW+B,EAAQ,EAAE,EAAE,EACtFA,CACT,CAEA,IAAIlB,EAAkC,CACpC,OAAO,KAAK,aAAa,IAAIA,CAAE,GAAG,OACpC,CAEA,QAAQA,EAAuB,CAC7B,IAAMmB,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAAZ,GAAU,UAAW,IAAM,sCAAsCP,CAAE,gBAAgBmB,EAAW,QAAQ,EAAE,EAAE,EAE1G,KAAK,aAAa,OAAOnB,CAAE,EAC3B,KAAK,eAAe,KAAKmB,EAAW,QAAQ,MAAM,EAG3CA,EAAW,YACpB,CAEA,MAAM,SAASnB,EAAeP,EAAkD,CAC9E,IAAM0B,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,qBAAqB,EAEvC,MAAMvC,GAAgB,KAAK,QAASuC,EAAW,QAAQ,OAAQA,EAAW,aAAc1B,CAAe,CACzG,CAEA,uBAA8B,CAC5B,QAAWmB,KAAU,KAAK,2BAExBA,EAAO,QAAQ,EAIjB,GAFA,KAAK,2BAA6B,CAAC,EAE/B,KAAK,eAAe,SAAW,EAInC,GAAI,KAAK,QAAQ,gBAAkB,UAAW,CAC5C,QAAWA,KAAU,KAAK,eAAgB,CACxC,IAAMQ,EAAgB9C,GAAe,IAAIsC,EAAO,IAAI,EAGpD,IAAKA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAEtE,IAAMS,EAAW,KAAK,YAAY,IAAIT,EAAO,IAAI,GAAK,CAAC,EACnDQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAGxB,UAAYA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAE7E,IAAMS,EAAW,KAAK,mBAAmB,IAAIT,EAAO,IAAI,GAAK,CAAC,EAC1DQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAExB,MACEA,EAAO,QAAQ,CAEnB,CACA,KAAK,eAAiB,CAAC,CACzB,KAAO,CAGL,IAAIU,EAAkB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,gBAAiB,EAC/EA,IACHA,EAAkB,CAAC,EACnB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,iBAAmBA,CAAe,GAEjF,QAAWV,KAAU,KAAK,eACxBU,EAAgB,KAAKV,CAAM,EAE7B,KAAK,eAAiB,CAAC,CACzB,CACF,CAEA,SAAU,CACR,KAAK,YAAY,QAASK,GAAY,CACpCA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,mBAAmB,QAASK,GAAY,CAC3CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EAED,KAAK,aAAa,QAASW,GAAY,CACrCA,EAAQ,QAAQ,OAAO,QAAQ,CACjC,CAAC,EAED,KAAK,uBAAuB,QAASN,GAAY,CAC/CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,uBAAyB,IAAI,GACpC,CAEA,iBAAiBY,EAAmB,CAElC,IAAMC,EAAiB,KAAK,uBAAuB,IAAID,CAAS,EAC5DC,IACFA,EAAe,QAAQb,GAAU,CAC/BA,EAAO,QAAQ,CACjB,CAAC,EACD,KAAK,uBAAuB,OAAOY,CAAS,EAEhD,CACF,EAEa1C,GAAuB,IAAI4C,IACpC,IAAI7C,GAAmB,GAAG6C,CAAI,ICtdlC,IAGMC,GAsBOC,GAzBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EASaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,IC1B3C,IAKaE,GAaAC,GA6EAC,EA6IAC,GA0MAC,GAkDAC,GACAC,GAzebC,GAAAC,EAAA,kBAKaR,GAAN,KAAiB,CAOtB,OAAO,gBAAgBS,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAGaT,GAAN,KAAoB,CAQzB,OAAO,UAAUU,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFlB,GAAW,gBAAgB,CAACW,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAASC,EAAIN,EAAW,EAAI,EAAGM,GAAKH,EAAOG,IAAK,CAC9C,IAAMC,EAAON,EAAQK,EAAI,EAAI,EAAIR,EAAMG,EAAQK,CAAC,EAC1CE,EAAON,EAAQI,EAAI,EAAI,EAAIP,EAAMG,EAAQI,CAAC,EAEhD,GAAIC,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEF,IAAMC,EAAM,KAAK,IAAIF,EAAMC,CAAI,EAC/B,GAAID,GAAQC,EACVJ,EAAMD,EAAQG,CAAC,EAAI,KAAK,IAAIC,EAAMC,CAAI,MACjC,CAEL,GAAIC,EAAM,EACR,OAEFL,EAAMD,EAAQG,CAAC,EAAI,CACrB,CACF,CAEA,OAAOF,CACT,CAOA,OAAO,iBAAiBM,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CACF,EAGaxB,EAAN,MAAMyB,CAAU,CAIrB,OAAO,KAAKC,EAAiC,CAC3C,OAAOD,EAAU,0BAA0BC,EAAM,EAAGA,EAAK,MAAM,CACjE,CAKA,OAAO,aAAaA,EAAyBC,EAAO,EAAsB,CACxE,IAAMC,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EAEV,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC1B,EAAIA,EAAO,EACf,KAAO,GAAK,GAAG,CACb,GAAIF,EAAK,CAAC,EAAIC,IAAS,EAAG,CACxBE,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAAIC,EACvB,KACF,CACA,GAAIA,EAAOD,EAAK,CAAC,IAAM,EACrB,MAAM,IAAI,MAAM,sBAAsB,EAExCG,EAAQ,CAAC,EAAI,EACbF,GAAQD,EAAK,CAAC,EACd,GACF,CACA,IAAK,IAAK,GAAK,EAAG,IAChBG,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAErB,OAAOG,CACT,CAKA,OAAO,kBAAkBH,EAAyBI,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,wCAAwCJ,EAAK,MAAM,cAAc,EAE/G,OAAOD,EAAU,0BAA0BC,EAAMI,EAAMJ,EAAK,MAAM,CACpE,CAKA,OAAO,gBAAgBA,EAAyBI,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,sCAAsCJ,EAAK,MAAM,cAAc,EAE7G,OAAOD,EAAU,0BAA0BC,EAAM,EAAGI,CAAI,CAC1D,CAKA,OAAO,0BAA0BJ,EAAyBK,EAAeC,EAAqB,CAC5F,IAAIL,EAAO,EACX,QAAS,EAAII,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAIN,EAAK,CAAC,EAAI,EACZ,MAAM,IAAI,MAEN,+GAA+G,EAErHC,GAAQD,EAAK,CAAC,CAChB,CACA,OAAOC,CACT,CAEA,OAAO,eAAeD,EAA4C,CAChE,IAAME,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMK,EAAU,IAAI,MAAML,CAAI,EAC9BK,EAAQL,EAAO,CAAC,EAAI,EACpBK,EAAQL,EAAO,CAAC,EAAIF,EAAKE,EAAO,CAAC,EACjC,QAASX,EAAIW,EAAO,EAAGX,GAAK,EAAG,EAAEA,EAC/BgB,EAAQhB,CAAC,EAAIgB,EAAQhB,EAAI,CAAC,EAAIS,EAAKT,EAAI,CAAC,EAE1C,OAAOgB,CACT,CAKA,OAAO,cAAcH,EAAcI,EAA4B,CAC7D,GAAIJ,EAAO,CAACI,GAAcJ,GAAQI,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOJ,EAAO,EAAIA,EAAOI,EAAaJ,CACxC,CAEA,OAAO,cAAcK,EAAyBD,EAA+B,CAC3E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,GAAcC,EAAK,MAAM,CAAC,CACvE,CAQA,OAAO,gBAAgB5B,EAAsB8B,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAM/B,EAAE+B,CAAC,CAAC,EAEpB/B,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASmB,EAAyBa,EAA2C,CAClF,IAAMX,EAAOF,EAAK,OAClB,OAAOA,EAAK,IAAI,CAACY,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAIX,CAAI,CAAC,CACtD,CAOA,OAAO,SAASY,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGrB,IAAMqB,IAAMG,EAAOxB,CAAC,CAAC,CAC/C,CACF,EAEahB,GAAN,MAAMyC,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBZ,EAChFa,EAAqBC,EAAsB,CAC7C,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IACxCA,GAAOH,EAAY,OACrBA,EAAY,KAAKD,EAAUI,EAAM,CAAC,CAAC,EAEnCH,EAAYG,CAAG,EAAIJ,EAAUI,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMf,EAAQ,QAChB,GAAIA,EAAQe,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhEf,EAAQ,KAAK,CAAC,EAKlB,QAASe,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMF,EAAU,QAClB,GAAIA,EAAUE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEF,EAAU,KAAK,CAAC,EAKpB,QAASE,EAAM,EAAGA,EAAMH,EAAY,OAAS,EAAGG,IAC9C,GAAIA,EAAMD,EAAK,QACb,GAAIA,EAAKC,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DD,EAAK,KAAK,CAAC,EAKf,QAASC,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAAO,CACjD,GAAIH,EAAYG,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAID,EAAKC,CAAG,GAAKH,EAAYG,CAAG,GAAKD,EAAKC,EAAMH,EAAY,MAAM,GAAKA,EAAYG,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACHJ,EAA8BX,EAA4Ba,EAC1DD,EAAgCE,EAAgBE,EAAwBC,EAAwB,CAClG,GAAKA,EAIL,IAAIH,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIX,EAAQ,SAAYW,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAASI,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CN,EAAa,wBACTE,EAAUI,GAAOC,EAAgB,EAAI,EAAE,EAAGhB,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAChGA,EAAMJ,EAAU,OAAS,EAAGM,CAAO,EAE3C,CAaA,OAAO,uBACHP,EAA2BC,EAA8BX,EAAmBa,EAC5ED,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMO,EAAa,CAACP,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACpFC,CACT,CAYA,OAAO,uBACHP,EAA8BQ,EAA+BnB,EAAmBa,EAChFD,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,GAAKQ,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMD,EAAa,CAACP,EAAU,CAAC,EAAGQ,EAAW,CAAC,CAAC,EAE/C,OAAAV,EAAa,mBAAmB,GAAOE,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACrGC,CACT,CAKA,OAAe,mBACXR,EAA2BC,EAA8BO,EAAsBlB,EAC/Ea,EAA8BD,EAAgCE,EAAgBG,EAAkB,CAClG,GAAIP,EACF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAK,CAAC,MAGnB,SAASH,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAKT,EAAa,wBACzBE,EAAUI,EAAM,CAAC,EAAGf,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAAKA,EAAMJ,EAAU,OAAS,EACxGM,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXG,EAAgBC,EAAgBC,EAAkBC,EAAgBT,EAAgBU,EAClFC,EAAsBR,EAA0B,CAClD,IAAMS,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIN,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAH,EAAKU,CAAY,EAAI,EACrBV,EAAKW,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAN,EAAKU,CAAY,EACgB,KAAK,MAAjCP,IAAY,cAA4BU,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/Db,EAAKW,CAAY,EAAIE,EAAYb,EAAKU,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASN,EAAKU,CAAY,EAAIV,EAAKW,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEapD,GAAN,KAAe,CAIpB,OAAO,qBACH2D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAGahE,GAAW,sBACXC,GAAW,uBCzexB,IAiBakE,GAsMPC,GAoCOC,GAKAC,GAKAC,EAeAC,GAiBAC,GAcAC,GAgBAC,GAmBAC,EA+BPC,GAiTOC,EAaAC,EAaAC,GAgFPC,GAwJOC,GAaAC,GAr7BbC,GAAAC,EAAA,kBAGAC,IACAC,KAaapB,GAAiB,GAsMxBC,GAAoB,CAACoB,EAAcC,IAAiD,CACxF,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,OAAQD,EAAM,CACZ,QACE,OAAOC,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,QACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,QACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,OACE,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mBAAmB,EAErC,MAAO,CAAC,MAAO,YAAY,EAE7B,QACE,MAAM,IAAI,MAAM,sBAAsBD,CAAI,EAAE,CAChD,CACF,EAEanB,GAA8B,CAACmB,EAAgBC,EAAsB,IAAM,CACtF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEapB,GAA4B,CAACkB,EAAgBC,EAAsB,IAAM,CACpF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEanB,EAA6B,IAAIoB,IAA6D,CACzG,IAAMC,EAAoC,CAAC,EAC3C,OAAAD,EAAK,QAAQE,GAAO,CACdA,EAAI,SAAW,GACjBD,EAAgB,KACZ,CAAC,QAAuB,KAAMC,CAAG,EAAG,CAAC,QAAuB,KAAMC,EAAU,eAAeD,CAAG,CAAC,CAAC,CAExG,CAAC,EACMD,CACT,EAMapB,GAAoBuB,GAE3BA,EAAO,IAAM,EACR,EACEA,EAAO,IAAM,EACf,EAGF,EASItB,GAAa,CAACuB,EAAW,MAAOP,EAAqBQ,EAAQ,MACpE,CAACR,GAAcA,IAAe,EACzB,GAAGO,CAAQ,IAAIC,CAAK,IAGtB,MAAMR,CAAU,IAAIO,CAAQ,KAAKC,CAAK,IASlCvB,GAAY,CAACsB,EAAkBP,EAAoBQ,IAC1DD,IAAa,MACRC,EAELR,IAAe,EACV,OAAOQ,CAAK,IAGd,MAAMR,CAAU,SAASQ,CAAK,IAQ1BtB,GAAY,CAACuB,EAAcT,IAClCA,IAAe,EACV,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAC1CT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,MAClBT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAGlCA,EAUItB,EACT,CAACsB,EAAcC,EAAsBC,EAAgBZ,IAC/CU,EAAK,WAAW,WAAW,GAAKE,EAAS,EACvC,OAAQD,GAAW,SACjBX,IAAS,MACJ,GAAGU,CAAI,KAAKC,CAAK,WAAWA,CAAK,eAAeA,CAAK,aAErD,GAAGD,CAAI,KAAKC,CAAK,WAAWA,CAAK,SAGtCX,IAAS,MACJ,GAAGU,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAK,KAAK,MAAMA,EAAQ,EAAI,CAAC,CAAC,KAAKA,EAAQ,EAAI,CAAC,IAEhF,GAAGD,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAKA,EAAQ,CAAC,IAIlDC,EAAS,EAAI,GAAGF,CAAI,IAAIC,CAAK,IAAMD,EAc5CrB,GACF,CAACqB,EAAcG,EAAoBC,EAAuCC,EACzEd,IAAuC,CACtC,IAAMe,EAAa,OAAOF,GAAgB,SACpCG,EAAOD,EAAaF,EAAcA,EAAY,OAC9CI,EAAe,CAAC,GAAG,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAC,EACzCE,EAAcF,EAAO,EAAI,MAAQA,GAAQ,EAAI,MAAMA,CAAI,QAAU,cAAcA,CAAI,IACnFf,EAAatB,GAAkBiC,EAAYZ,CAAU,EACrDmB,EAAY,OAAOlB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACtEmB,EAAc,OAAOnB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACxEF,EAAO,CAAC,QAASmB,EAAa,MAAOC,EAAW,QAASC,EAAa,OAAQR,CAAU,EAExFS,EAAgBjB,GAA+B,OAAOA,GAAQ,SAAWA,EAAM,GAAGA,CAAG,IAErFkB,EAAqB,CACzB,gBAAiB,GACjB,gBAAiB,GACjB,2BAA4B,GAC5B,IAAK,GACL,aAAc,GACd,IAAK,GACL,aAAc,EAChB,EAEMC,EAAgBR,EAAa,YAAc,GAC3CS,EAAQ,GAAGD,CAAa,GAAGd,CAAI,SAC/BgB,EAAU,GAAGF,CAAa,GAAGd,CAAI,WAEnCiB,EAAa,GACjB,QAASC,EAAI,EAAGA,EAAIX,EAAO,EAAGW,IAC5BD,GAAc;AAAA,aACTC,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC9CW,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC/CW,CAAC,UAAUA,CAAC;AAAA,oBACNA,CAAC;AAAA,MAGfD,GAAc,WAAWV,EAAO,CAAC,eAEjC,IAAMY,EAAgCZ,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,oBAAoBV,EAAK,OAAO;AAAA,mBAC5BA,EAAK,OAAO;AAAA;AAAA,MAEzB2B,CAAU;AAAA;AAAA,KAIJG,EAAmBC,IACvBR,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIc,EAAY,OAAOrB,CAAI,IAAIqB,CAAS,KAGlDC,EAAoB,CAAC,EAC3B,GAAIf,GAAQ,EACV,QAASW,EAAIX,EAAO,EAAGW,GAAK,EAAGA,IAC7BI,EAAQ,KAAK,GAAG5C,EAAasC,EAASE,EAAGX,CAAI,CAAC,eAAeW,CAAC,IAAI,EAItE,IAAMK,EAAgChB,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,aAAaV,EAAK,OAAO;AAAA,aAC3BgC,EAAQ,KAAK,GAAG,CAAC;AAAA,KAGlBE,EAAmBC,IACvBZ,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIkB,EAAa,OAAOzB,CAAI,IAAIyB,CAAU,KAGpDC,EAAU,IAAIC,IAChBpB,IAAS,EAAI,KAAO,GAAGjB,EAAK,OAAO,IAAIqC,EAAK,IAAIf,CAAY,EAAE,KAAK,GAAG,CAAC,IAErEgB,EAAa,CAACH,EAAoBI,IAClCtB,EAAO,EACF,GAAGkB,CAAU,GAEb,GAAG/C,EAAa+C,EAAYI,EAAKtB,CAAI,CAAC,GAI3CuB,EAAa,CAACL,EAAoBI,EAAoB9B,KACtDQ,EAAO,EACF,GAAGkB,CAAU,IAAI1B,EAAK,IAEtB,GAAGrB,EAAa+C,EAAYI,EAAKtB,CAAI,CAAC,IAAIR,EAAK,IAIpDgC,EAAoE,CAAC,EACrEC,GAA6B,CAACP,EAAoBQ,IAA0B,CAChFpB,EAAmB,2BAA6B,GAChD,IAAMqB,GAAU,GAAGD,EAAO,IAAI,uBAAuBjC,CAAI,SACzD,GAAIkC,MAAWH,EACb,MAAO,GAAGG,EAAO,IAAIT,CAAU,IAEjC,IAAMH,GAAU,CAAC,EACjB,QAASJ,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAAK,CAClC,IAAMW,GAAMI,EAAO,WAAW,gBAAiBf,GAAIe,EAAO,KAAO1B,CAAI,EACrEe,GAAQ,KAAK,GAAGM,EAAWZ,EAASE,EAAC,CAAC,OAAOW,EAAG,MAAMD,EAAWb,EAAOG,EAAC,CAAC,GAAG,CAC/E,CACA,OAAAa,EAAyCG,EAAO,EAC5C,MAAMA,EAAO,mBAAmBD,EAAO,KAAK,OAAO;AAAA,sBACzCX,GAAQ,OAAS,EAAIA,GAAQ,KAAK,GAAG,EAAI,IAAI;AAAA,cAGpD,GAAGY,EAAO,IAAIT,CAAU,GACjC,EAEMU,GAAc,CAACC,EAAuBrC,KAAmB,IAAM,CACnE,GAAIT,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,KAAKrC,CAAK,IAC7B,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,CAAK,8BAA8BA,CAAK,UAC9E,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,CAAK,UAC3C,GAAIT,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,8DAA8DrC,CAAK,MAE3F,MAAM,IAAI,MAAM,6CAA6CT,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEG+C,EAAeD,IAA2B,IAAM,CACpD,GAAI9C,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,IACnB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,mBAAmBU,CAAI,IAAIoC,CAAM,oBAAoBpC,CAAI,IAAIoC,CAAM,sBAAsBpC,CAAI,IAChGoC,CAAM,wBAAwBpC,CAAI,IAAIoC,CAAM,oBAEhD,MAAM,IAAI,MAAM,6CAA6C9C,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEGgD,GAA6B/B,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,QAAQoB,CAAS;AAAA,aACrD2B,EAAY,OAAOrC,CAAI,WAAW,CAAC;AAAA,KAGpCuC,EAAoBhC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,EAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,QAAQ9B,CAAS;AAAA,iBACjCV,CAAI,aAAa0B,EAAQe,CAAU,CAAC;AAAA,IAE/C,GAAG,EAEGC,GAAM,IAAIhB,IAA0C,CACxD,GAAIA,EAAQ,SAAWnB,EACrB,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAGlD,IAAMoC,EAAoBjB,EAAQ,IAAId,CAAY,EAAE,KAAK,GAAG,EAE5D,OAAIL,IAAS,EACJ8B,EAAY,IAAI,EACd9B,IAAS,EACX8B,EAAYM,EAAkB,CAAC,CAAC,GAEvC9B,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,CAAiB,IAE3C,EAEMC,GAAgBnB,GAChBlB,EAAO,EACF8B,EAAYZ,CAAU,GAE7BZ,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAIvCoB,GAA6BtC,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,YAAYoB,CAAS;AAAA,MAChEyB,GAAY,OAAOnC,CAAI,YAAa,OAAO,CAAC;AAAA,KAGtC8C,EAAoBvC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,EAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,YAAY9B,CAAS;AAAA,UAC5CV,CAAI,aAAa0B,EAAQe,CAAU,CAAC;AAAA,IAExC,GAAG,EA0EH,MAAO,CACL,KAxCW,IAAM,CACjB,IAAMM,EAAQ,CAAC,EACXC,EAAmB,GACvB,OAAInC,EAAmB,kBACrBkC,EAAM,KAAK5B,CAA6B,EACxC6B,EAAmB,IAEjBnC,EAAmB,kBACrBkC,EAAM,KAAKxB,CAA6B,EACxCyB,EAAmB,IAEjBnC,EAAmB,6BACrB,OAAO,OAAOkB,CAAwC,EAAE,QAAQkB,IAAQF,EAAM,KAAKE,EAAI,CAAC,EACxFD,EAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKD,CAAiB,EAC5BE,EAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKF,EAA0B,EACrCG,EAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKR,CAAiB,EAC5BS,EAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKT,EAA0B,EACrCU,EAAmB,IAEjB,CAAC1C,GAAc0C,GACjBD,EAAM,QACF,SAAShC,CAAK,MAAMzB,EAAK,OAAO,IAAIc,EAAY,KAAK,GAAG,CAAC,KACzD,SAASY,CAAO,MAAM1B,EAAK,OAAO,IAAIM,EAAU,eAAeQ,CAAW,EAAE,KAAK,GAAG,CAAC,IAAI,EAExF2C,EAAM,KAAK;AAAA,CAAI,CACxB,EAIE,KAAAzD,EACA,gBAAA8B,EACA,gBAAAI,EACA,2BAAAQ,GACA,QAAAN,EACA,WAAAE,EACA,WAAAE,EACA,IAjFU,IAAIoB,IAAkD,CAChE,GAAIA,EAAgB,SAAW3C,EAAO,EACpC,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAElD,IAAMR,EAAQmD,EAAgB3C,CAAI,EAClC,GAAI,OAAOR,GAAU,SACnB,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAM4C,GAAoBO,EAAgB,MAAM,EAAG3C,CAAI,EAAE,IAAIK,CAAY,EAAE,KAAK,GAAG,EAEnF,OAAIL,IAAS,EACJ4B,GAAY,KAAMpC,CAAK,EACrBQ,IAAS,EACX4B,GAAYQ,GAAkB,CAAC,EAAG5C,CAAK,GAE9Cc,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,KAAK5C,CAAK,IAErD,EA6DE,YAAAoC,GACA,aA5DmB,CAACV,EAAoB1B,IACpCQ,EAAO,EACF4B,GAAYV,EAAY1B,CAAK,GAEpCc,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAAK1B,CAAK,MAuDrD,IAAA2C,GACA,YAAAL,EACA,aAAAO,GAEA,MAAAvC,EACA,KAAAL,EACA,QAAAgB,EACA,MAAAD,EACA,KAAAR,CACF,CACF,EAWS3B,EACT,CAACoB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,QAASb,CAAU,EAW3DV,EACT,CAACmB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,SAAUb,CAAU,EAW5DT,GACT,CAACkB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,WAAYb,CAAU,EA8ErER,GAAN,KAA+C,CAC7C,YAAoBoE,EAA2DC,EAA4B,CAAvF,6BAAAD,EAA2D,YAAAC,EAoG/E,KAAQ,kBAAqC,CAAC,EAC9C,KAAQ,UAA6B,CAAC,EACtC,KAAQ,SAA8B,CAAC,EAwBvC,KAAQ,cAAgB,CA9HoF,CAE5G,sCAAsCvD,EAA6B,CAGjE,MAAO,qBADY,OAAOA,GAAS,SAAW,GAAGA,CAAI,IAAMA,CACrB,eACxC,CAEA,UAAUwD,EAAiDpF,GAAgB,CACzE,IAAMqF,EAAiB,OAAOD,GAAkB,SAAWA,EAAgBA,EAAc,CAAC,EACpFE,EAAiB,OAAOF,GAAkB,SAAW,EAAIA,EAAc,CAAC,EACxEG,EAAiB,OAAOH,GAAkB,SAAW,EAAIA,EAAc,CAAC,EAE9E,GAAIC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,yBAC/B,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,yCAAyC,KAAK,OAAO,wBAAwB,KAC3F,KAAK,OAAO,wBAAwB,KAAK,KAAK,OAAO,wBAAwB,IAAI,EAGvF,GAAIF,EAAiBC,EAAiBC,EAAiB,KAAK,OAAO,kCACjE,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,+CACd,KAAK,OAAO,iCAAiC,GAAG,EAGtD,IAAMC,EAAuB,KAAK,wBAAwB,CAAC,IAAM,GAAK,KAAK,wBAAwB,CAAC,IAAM,EACpGC,EAAYD,EAAuB;AAAA;AAAA,wDAGA;AAAA;AAAA;AAAA;AAAA,yDAKnCE,EAAsBF,EACxB,4DACA;AAAA,mEAEIH,EAAiBC,EAAiBC,CAAc,iBAExD,MAAO,4BAA4BF,CAAc,KAAKC,CAAc,KAAKC,CAAc;AAAA,YAC/EE,CAAS;AAAA,MACfC,CAAmB;AAAA,GAEvB,CAEQ,uBAAuBC,EAA+B,CACxDA,EAAS,OAAS,IAChBA,EAAS,MAAM,WAAW,WAAW,GACvC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,MAAM,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAEpGA,EAAS,QAAQ,WAAW,WAAW,GACzC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,QAAQ,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAG9G,CAEQ,gBAAgBA,EAAyBC,EAA8B,CAC7E,GAAID,EAAS,QAAU,WACrB,MAAM,IAAI,MAAM,+FAA+F,EAEjH,KAAK,UAAU,KAAKA,CAAQ,EAC5B,KAAK,uBAAuBA,CAAQ,EAEpC,IAAME,EAASF,EAAS,QAAU,QAAU,OAAS,aAC/CjD,EAAciD,EAAS,KAAK,QAClC,MAAO,sBAAsBC,CAAY,kBAAkBC,CAAM,KAAKF,EAAS,IAAI,WAAWjD,CAAW,IAC3G,CAEA,oBAAoBoD,EAAoC,CACtD,OAAOA,EAAU,IAAIC,GAAK,KAAK,gBAAgBA,EAAG,KAAK,eAAe,CAAC,EAAE,KAAK;AAAA,CAAI,CACpF,CAEQ,yBAAyBJ,EAA+B,CAC9D,GAAIA,EAAS,QAAU,WACrB,MAAM,IAAI,MACN,sGAAsG,EAG5G,KAAK,kBAAkB,KAAKA,CAAQ,EACpC,KAAK,uBAAuBA,CAAQ,CACtC,CAEA,6BAA6BG,EAA0C,CACrE,OAAAA,EAAU,QAAQC,GAAK,KAAK,yBAAyBA,CAAC,CAAC,EAChD,IACT,CAEA,gBAAgBhE,EAAcV,EAA8BY,EAAS,EAAiB,CACpF,YAAK,SAAS,KAAK,CAAC,KAAAF,EAAM,KAAAV,EAAM,OAAAY,CAAM,CAAC,EAChC,IACT,CAEA,iBAAiB+D,EAAqD,CACpE,YAAK,SAAW,KAAK,SAAS,OAAOA,CAAkB,EAChD,IACT,CAKQ,oBAA6B,CACnC,GAAI,KAAK,SAAS,SAAW,EAC3B,MAAO,GAGT,IAAMC,EAA4B,CAAC,EACnC,OAAW,CAAC,KAAAlE,EAAM,KAAAV,EAAM,OAAAY,CAAM,IAAK,KAAK,SACtC,GAAIA,GAAUA,EAAS,EACjBZ,IAAS,MACX4E,EAAgB,KAAK,cAAclE,CAAI,iBAAiBV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,EAE1FgE,EAAgB,KAAK,GAAGlE,CAAI,eAAeV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,MAE1E,CACL,IAAMiE,EAAWjE,GAAU,MAAQA,IAAW,EAAIZ,EAAO,MAAMY,CAAM,IAAIZ,CAAI,IAC7E4E,EAAgB,KAAK,GAAGlE,CAAI,IAAImE,CAAQ,EAAE,CAC5C,CAGF,MAAO;AAAA,0BACeD,EAAgB,KAAK,IAAI,CAAC;AAAA,2BACzB,KAAK,aAAa,oCAC3C,CAMA,IAAI,2BAAoC,CACtC,OAAO,KAAK,mBAAmB,EAAI,KAAK,UAAU,IAAIhD,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,EAC1E,KAAK,kBAAkB,IAAIA,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,CACzD,CAKA,IAAI,eAAwD,CAC1D,GAAI,KAAK,SAAS,SAAW,EAC3B,OAGF,IAAMkD,EAA6B9E,GAC9B,UACe,EAAE,CAAC,MAAO,MAAO,MAAO,KAAK,EAAE,QAAQA,CAAI,CAAC,EAChE,OAAO,KAAK,SAAS,IAAI+E,GAAM,CAACD,EAA0BC,EAAE,IAAI,EAAGA,EAAE,QAAU,CAAC,CAAE,CACpF,CACF,EAEarF,GAAqB,CAACsF,EAAyClB,IACxE,IAAIrE,GAAiBuF,EAAelB,CAAM,EAYjCnE,GAAmB,CAACsF,EAA4BC,IAA0C,CACrG,IAAMC,EAASF,EAAQ,OACjB9E,EAAiB,CAAC,EACxB,QAASyB,EAAI,EAAGA,EAAIuD,EAAQvD,IAAK,CAC/B,IAAMvB,EAAM8E,EAAS,EAAIvD,EACnB,EAAIqD,EAAQ5E,CAAG,GAAK,GAChB6E,EAASA,EAAS,OAAS,EAAItD,CAAC,GAAK,GACvC,GAAK,IAAM,GACjBzB,EAAK,QAAQE,CAAG,CAEpB,CACA,OAAOF,CACT,ICj8BA,IAeMiF,GAMAC,GAGAC,GAGAC,GAWOC,GA4DAC,GAKAC,GAvGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEMZ,GAAkB,CAACa,EAAmBC,IACvCA,GAAQA,EAAK,SAAWD,EAAa,CAAC,GAAI,IAAI,MAAMA,CAAS,EAAE,KAAK,CAAE,EAAE,QAAQ,EAAIC,EAEnFb,GAAiB,CAACc,EAA+BD,IACnDE,EAAU,gBAAgBD,EAAYf,GAAgBe,EAAW,OAAQD,CAAI,CAAC,EAE5EZ,GAAmB,CAACY,EAAgBG,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKJ,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAM,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEajB,GAA6B,CAACkB,EAAyBC,IAAoC,CACtG,IAAMC,EAAgBF,EAAY,SAC5BR,EAAYQ,EAAY,KAAK,OAC7BP,EAAOd,GAAgBa,EAAWS,CAAQ,EAC1CE,EAAcvB,GAAeoB,EAAY,KAAMP,CAAI,EACnDK,EAASM,EAAe,SAAUF,EAAeC,EAAY,MAAM,EACnEN,EAAQQ,EAAc,IAAKH,EAAeV,CAAS,EACrDc,EACJ,GAAIb,EAAK,SAAW,GAAKA,EAAK,CAAC,IAAM,GAAKA,EAAK,CAAC,IAAM,EAAG,CACvD,IAAMc,EAAWT,EAAO,KAAK,MACvBU,EAA0C,CAAC,GAAI,GAAI,CAAC,EAC1DF,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA,sCAChDS,CAAQ,KAAKC,EAAc,CAAC,EAAI,CAAC,MAAMA,EAAc,CAAC,CAAC;AAAA,IACzFC,EAAa,UAAUD,CAAa,CAAC;AAAA,+BACVA,EAAc,CAAC,CAAC;AAAA,+BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,uCAIRX,EAAM,YAAY,eAAe,CAAC;AAAA;AAAA;AAAA,2BAG9CW,EAAc,CAAC,CAAC;AAAA,2BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA,QAEnCV,EAAO,YAAY,iBAAkB,8BAA8B,CAAC;AAAA;AAAA,IAG1E,MACEQ,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA;AAAA,IAElFjB,GAAiBY,EAAMD,EAAWK,EAAOC,CAAM,CAAC;AAAA;AAAA,IAEhDW,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DX,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGlDA,EAAO,YAAY,aAAcD,EAAM,aAAa,UAAU,CAAC,CAAC;AAAA,KAGpE,MAAO,CACL,KAAM,YACN,YAAa,CAAC,KAAM,GAAGI,CAAQ,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC9D,WAAaV,GAAW,CACtB,IAAMmB,EAAaf,EAAU,KAAKQ,CAAW,EAC7C,MAAO,CACL,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKmB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGC,EAA2BpB,EAAO,CAAC,EAAE,KAAMY,CAAW,CAAC,CAC5G,CACF,EACA,gBAAAG,CACF,CACF,EAEavB,GAAY,CAAC6B,EAAyBC,IAA0C,CAC3FnC,GAAekC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ9B,GAA2B8B,EAAQ,OAAO,CAAC,EAAGC,EAAW,IAAI,CAAC,CAChF,EAEa7B,GAA4B6B,GACrCC,GAA4B,CAAC,KAAMD,EAAW,IAAgB,CAAC,ICxGnE,IAYME,GAaAC,GAaAC,GAaAC,GAYAC,GAQAC,GAYAC,GAcAC,GASAC,GAaOC,GAyEPC,GAkCOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAtQbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAEM3B,GAAqC,CACzC,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,oCACX,UAAW,6BACX,GAAI,6BACJ,GAAI,oCACJ,OAAQ,uBACV,EAEMC,GAA2C,CAC/C,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,wBACX,UAAW,wBACX,GAAI,wBACJ,GAAI,wBACJ,OAAQ,uBACV,EAEMC,GAA4C,CAChD,IAAK,aACL,IAAK,aACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,UAAW,IACX,UAAW,IACX,GAAI,IACJ,GAAI,IACJ,OAAQ,GACV,EAEMC,GAA8C,CAClD,IAAK,YACL,IAAK,YACL,IAAK,YACL,KAAM,YACN,UAAW,YACX,UAAW,iBACX,GAAI,YACJ,GAAI,kBACJ,OAAQ,gBACV,EAEMC,GAAmB,CAACwB,EAAsBC,IAA2B,CACzE,IAAMC,EAAM,CAAC,EACb,QAASC,EAAIF,EAAOD,EAAcG,EAAIF,EAAM,EAAEE,EAC5CD,EAAI,KAAKC,CAAC,EAEZ,OAAOD,CACT,EAEMzB,GAA4B,CAAC2B,EAA0BC,IAAkD,CAC7G,IAAMC,EAAc,CAAC,EACfL,EAAOG,EAAM,OACnB,QAASG,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,IACxBD,EAAY,KAAKF,EAAMG,CAAG,CAAC,EAG/B,IAAMC,EAAcH,EAAK,IAAIE,GAAOH,EAAMG,CAAG,CAAC,EAC9C,MAAO,CAACD,EAAaE,CAAW,CAClC,EAEM9B,GAAuB,CAAC0B,EAAiBC,IAA6B,CAC1E,IAAMJ,EAAOG,EAAM,OAASC,EAAK,OAC3BI,EAAc,CAAC,EACjBC,EAAW,EACf,QAASH,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,GACxBE,EAAY,KAAKL,EAAMM,GAAU,CAAC,EAElCD,EAAY,KAAK,CAAC,EAGtB,OAAOA,CACT,EAEM9B,GAAuB,CAAC0B,EAAgBJ,IAA0B,CACtE,QAASE,EAAI,EAAGA,EAAIE,EAAK,OAAQ,EAAEF,EACjC,GAAIE,EAAKA,EAAK,OAASF,EAAI,CAAC,IAAMF,EAAO,EAAIE,EAC3C,MAAO,GAGX,MAAO,EACT,EAEMvB,GAAqB,CAACyB,EAAgBJ,IAA2B,CACrE,IAAMC,EAAM,CAAC,EACb,GAAI,CAACvB,GAAqB0B,EAAMJ,CAAI,EAAG,CACrC,QAASE,EAAI,EAAGA,EAAIF,EAAM,EAAEE,EACtBE,EAAK,QAAQF,CAAC,IAAM,IACtBD,EAAI,KAAKC,CAAC,EAGdE,EAAK,QAAQM,GAAQT,EAAI,KAAKS,CAAI,CAAC,CACrC,CACA,OAAOT,CACT,EAEarB,GACT,CAAC+B,EAAcC,EAAqCC,EAA+BC,EAClFC,EAA0BV,EAAuBE,IAAuC,CACvF,IAAMS,EAAaH,EAAO,CAAC,EAAE,KAEvBI,EAAaC,EAAU,KAAKb,CAAW,EACvCc,EAAaD,EAAU,KAAKX,CAAW,EAEvCa,EAAQC,EAAc,KAAMR,EAAO,CAAC,EAAE,SAAUG,CAAU,EAC1DM,EAASC,EAAe,SAAUR,EAAgBV,CAAW,EAE7DmB,EAAgB,GAEhBC,EAAsB;AAAA,oDACkBD,CAAa;AAAA,SA+C3D,MAAO,CACL,KAAAb,EACA,YAAAC,EACA,gBA/CuBc,GAA+B;AAAA,UACpDA,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBN,EAAOE,CAAM,CAAC;AAAA,UACjFG,CAAmB;AAAA;AAAA;AAAA;AAAA,WAIlBC,EAAa,UAAUF,CAAa,CAAC;AAAA;AAAA,2CAELA,CAAa;AAAA;AAAA;AAAA,gCAGxBnD,GAAiByC,CAAU,CAAC;AAAA;AAAA,wDAEJU,CAAa;AAAA,iCACpCJ,EAAM,YAAY,YAAY,CAAC;AAAA,yBACvCjD,GAAU2C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,wCAKNU,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAM3BpD,GAAgB0C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAS3CQ,EAAO,YACH,cACA,GACIR,IAAe,OAAS,GAAGQ,EAAO,KAAK,OAAO,yCACtB,GAAGA,EAAO,KAAK,OAAO,IAAIhD,GAAmBwC,CAAU,CAAC,GAAG,EAAE,CAAC;AAAA;AAAA,WAShG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUU,CAAc,CAAC,EACvD,cAAe,CAAC,EAAGE,CAAU,EAC7B,gBAAiB,CAAC,CAAC,QAAuB,KAAME,CAAU,CAAC,CAC7D,EACF,CACF,EAEEtC,GACF,CAAC8C,EAAyBhB,EAAciB,EACvCd,IAAiG,CAChG,IAAMe,EACFF,EAAQ,OAAO,SAAW,EAAIC,EAAaE,GAAiCH,EAAQ,OAAQC,CAAU,EAEtGG,EAAcF,EAAkB,KAChCE,EAAY,SAAW,GAAK,CAACF,EAAkB,oBACjDE,EAAcJ,EAAQ,OAAO,CAAC,EAAE,KAAK,IAAI,CAACK,EAAM9B,IAAMA,CAAC,GAEzD,IAAM+B,EAAgBf,EAAU,cAAca,EAAaJ,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EAEpFvB,EAAO6B,EACPb,EAAQO,EAAQ,OAAO,CAAC,EACtBO,EAAevD,GAAmByB,EAAMuB,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EACvEO,EAAa,OAAS,IACxBd,EAAQO,EAAQ,QACZQ,GAA2BR,EAAQ,OAAO,CAAC,EAAGO,CAAY,EAAG,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAChG9B,EAAO7B,GAAiB6B,EAAK,OAAQgB,EAAM,KAAK,MAAM,GAGxD,GAAM,CAACf,EAAaE,CAAW,EAAI/B,GAA0B4C,EAAM,KAAMhB,CAAI,EACzEgC,EAAmB/B,EACnBwB,EAAkB,WACpBO,EAAmB3D,GAAqB4B,EAAa4B,CAAa,GAGpEN,EAAQ,QACJ/C,GACI+B,EAAM,CAAC,KAAMkB,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACT,CAAK,EAAGN,EAChFa,EAAQ,OAAO,CAAC,EAAE,SAAUS,EAAkB7B,CAAW,EAC7D,CAAC,OAAQ,CAACa,CAAK,CAAC,CAAC,CACvB,EAEStC,GAAmB,CAAC6C,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEa7C,GAAiB,CAAC4C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa5C,GAAiB,CAAC2C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa3C,GAAwB,CAAC0C,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEa1C,GAAkB,CAACyC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEazC,GAAkB,CAACwC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEaxC,GAAmB,CAACuC,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEavC,GAAkB,CAACsC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEatC,GAAwB,CAACqC,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEarC,GAAqB,CAACoC,EAAyBC,IAAuC,CACjG/C,GAAa8C,EAAS,qBAAsBC,EAAY,QAAQ,CAClE,ICxQA,IAYMS,GAoBAC,GACOC,GA2EAC,GAUPC,GAeAC,GAWAC,GAWAC,GAWAC,GAWAC,GAoBAC,GAqBAC,GAoBAC,GAWAC,GAWAC,GAWAC,GAsBOC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GA7WbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KACAC,KAEMhC,GAAkBiC,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,0BAA0B,CAE9C,EAYMhC,GAAkBiC,GAAU,CAAC,GAAI,GAAI,eAAeA,EAAM,aAAa,eAAe,CAAC,IAAK,EAAE,EACvFhC,GACT,CAACiC,EAAcC,EAAqCH,EAA+BI,EAClFC,EAAqBC,EAA0BC,EAAW,GAAOC,EAAoB,KAAuB,CAC3G,IAAMC,EAAwB,CAAC,EACzBC,EAAaV,EAAO,CAAC,EAAE,KACvBW,EAAYD,EAAW,OACvBE,EAAOC,EAAU,cAAcR,EAAWM,CAAS,EACnDG,EAAkB,CAACN,GAAqBI,EAAK,SAAW,EAC9DF,EAAW,QAAQ,CAACK,EAAGC,IAAM,CACvBF,GAAmBF,EAAK,QAAQI,CAAC,GAAK,EACpCT,GACFE,EAAY,KAAK,CAAC,EAGpBA,EAAY,KAAKM,CAAC,CAEtB,CAAC,EACD,IAAME,EAAaR,EAAY,OACzBS,EAAaL,EAAU,KAAKJ,CAAW,EA4C7C,MAAO,CACL,KAAAP,EACA,YAAAC,EACA,gBA9CuBgB,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EAErBnB,EAAQoB,EAAc,KAAMrB,EAAO,CAAC,EAAE,SAAUW,CAAS,EACzDW,EAASC,EAAe,SAAUjB,EAAgBW,CAAU,EAC5DO,EAAMpB,EAASH,EAAOqB,EAAQV,CAAI,EACpCa,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGC,EAAI,EAAGD,EAAIf,EAAWe,IAEhCZ,GAAmBF,EAAK,QAAQc,CAAC,GAAK,GACpCnB,GACFoB,IAGFF,EAAY,YAAYC,CAAC,eAAeA,CAAC,MAAMhB,EAAWgB,CAAC,CAAC,MAAMA,CAAC;AAAA,oBAC3DF,EAAI,CAAC,EAAE,SAAS,YAAY,EAAI,qBAAqBE,CAAC,IAAM,EAAE;AAAA,oBAC9DzB,EAAM,WAAW,gBAAiByB,EAAG,IAAIA,CAAC,EAAE,CAAC;AAAA,oBAC7CD,CAAS;AAAA,qBAGjBL,EAAQ,KAAK,GAAGnB,EAAM,WAAW,gBAAiByB,EAAGJ,EAAO,WAAW,iBAAkBK,CAAC,CAAC,CAAC,GAAG,EAC/FA,KAGJ,MAAO;AAAA;AAAA,UAELR,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBlB,EAAOqB,CAAM,CAAC;AAAA;AAAA,UAElFH,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,+BACvDlB,EAAM,KAAK,OAAO;AAAA,iCAChBqB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,YAClBI,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,CAAC,CAAC;AAAA,YACNC,CAAS;AAAA,YACTD,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,SAAW,EAAIF,EAAO,YAAY,aAAc,OAAO,EAAIE,EAAI,MAAM,CAAC,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,UAE5F,EAME,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUH,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGU,EAA2BlB,EAAYD,CAAW,CAAC,CACxG,EACF,CACF,EAESvC,GACT,CAAC8B,EAA+B6B,IAAmD,CACjF,IAAMjB,EAAiB,CAAC,EACxB,OAAIZ,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,GACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQ8B,GAAKlB,EAAK,KAAK,OAAOkB,CAAC,CAAC,CAAC,EAEzDC,GACH,CAAC,KAAAnB,EAAM,SAAUiB,EAAW,SAAU,kBAAmBA,EAAW,iBAAiB,CAAC,CAC5F,EAEE1D,GACF,CAAC6D,EAAyB9B,EAAc2B,EAA8BzB,IAA6B,CACjG,IAAMJ,EAASgC,EAAQ,OACjBC,EACFjC,EAAO,SAAW,EAAI6B,EAAa3D,GAAiC8B,EAAQ6B,CAAU,EAE1FG,EAAQ,QACJ/D,GACIiC,EAAM,CAAC,KAAM+B,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACjC,EAAO,CAAC,CAAC,EACjFiC,EAAkB,mBAAqBA,EAAkB,KAAK,SAAW,EAAIjE,GAAOoC,EACpF6B,EAAkB,KAAMjC,EAAO,CAAC,EAAE,SAAUiC,EAAkB,SAC9DA,EAAkB,iBAAiB,EACvC,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEE7D,GAAoB,CAAC4D,EAAyBH,IAAuC,CACzF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,eAAgBH,EANf,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,qBACL,CAC8D,CAChE,EAEM5B,GAAgB,CAAC2D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,EACL,CAC0D,CAC5D,EAEM3B,GAAgB,CAAC0D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,sBAC1C,sBACL,CAC0D,CAC5D,EAEM1B,GAAuB,CAACyD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,qBACL,CACiE,CACnE,EAEMzB,GAAiB,CAACwD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAKlC,EAAM,WAAW,gBAAiByB,EAAG,CAAC,CAAC,EAIxD,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMxB,GAAkB,CAACuD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAiB7B7D,GAAiB6D,EAAS,aAAcH,EAhBb,CAAC5B,EAAOqB,EAAQV,IAAS,CAClD,IAAIwB,EAAO,EACX,QAASV,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,KAE1CwB,GAAQJ,EAAQ,OAAO,CAAC,EAAE,KAAKN,CAAC,GAIpC,MAAO,CACL,oBACA,GACA,cAAczB,EAAM,aAAa,eAAe,CAAC,KACjD,eAAeqB,EAAO,KAAK,KAAK,UAAUc,CAAI,IAChD,CACF,CAC4D,CAC9D,EAEM1D,GAAiB,CAACsD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAK,iBAAiBT,CAAC,QAAQ,EAI3C,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMtB,GAAkB,CAACqD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,aAAcH,EANb,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC4D,CAC9D,EAEMrB,GAAiB,CAACoD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,YAAaH,EANZ,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC2D,CAC7D,EAEMpB,GAAuB,CAACmD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,oBAC1C,EACL,CACiE,CACnE,EAEMnB,GACF,CAACuD,EAA0BzB,EAAyBJ,IAAwC,CAC1F,GAAII,EAAK,SAAW,EAClB,OAAOJ,EAGT,IAAIU,EAAa,EACboB,EAAa,EACjB,QAASC,EAAM,EAAGA,EAAM3B,EAAK,OAAQ2B,IAC/B3B,EAAK,QAAQ2B,CAAG,IAAM,GACxBrB,GAAcmB,EAAME,CAAG,EAEvBD,GAAcD,EAAME,CAAG,EAO3B,OAAOD,EAAa,IAAMpB,EAAa,IACzC,EAESnC,GAAa,CAACiD,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FpD,GAAgBuD,EAASH,CAAU,EAEnCW,GAAiBR,EAASH,CAAU,CAExC,EAEa7C,GAAW,CAACgD,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FxD,GAAc2D,EAASH,CAAU,EAEjCY,GAAeT,EAASH,CAAU,CAEtC,EAEa5C,GAAW,CAAC+C,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FvD,GAAc0D,EAASH,CAAU,EAEjCa,GAAeV,EAASH,CAAU,CAEtC,EAEa3C,GAAkB,CAAC8C,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FtD,GAAqByD,EAASH,CAAU,EAExCc,GAAsBX,EAASH,CAAU,CAE7C,EAEa1C,GAAY,CAAC6C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FrD,GAAewD,EAASH,CAAU,EAElCe,GAAgBZ,EAASH,CAAU,CAEvC,EAEazC,GAAY,CAAC4C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FnD,GAAesD,EAASH,CAAU,EAElCgB,GAAgBb,EAASH,CAAU,CAEvC,EAEaxC,GAAa,CAAC2C,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FlD,GAAgBqD,EAASH,CAAU,EAEnCiB,GAAiBd,EAASH,CAAU,CAExC,EAEavC,GAAY,CAAC0C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FjD,GAAeoD,EAASH,CAAU,EAElCkB,GAAgBf,EAASH,CAAU,CAEvC,EAEatC,GAAkB,CAACyC,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FhD,GAAqBmD,EAASH,CAAU,EAExCmB,GAAsBhB,EAASH,CAAU,CAE7C,EAEarC,GAAe,CAACwC,EAAyBH,IAAuC,CACvF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FzD,GAAkB4D,EAASH,CAAU,EAErCoB,GAAmBjB,EAASH,CAAU,CAE1C,ICnXA,IAcMqB,GAeOC,GA0BAC,GA0BAC,GAjFbC,GAAAC,EAAA,kBAOAC,IAEAC,KAGAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAQaR,GAAS,CAACS,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaT,GAAS,CAACQ,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaR,GAA4BQ,GACrCQ,GAA4BR,CAAoE,IClFpG,IAuEMS,GAmKAC,GAsGAC,GA2JAC,GA0HOC,GAqCPC,GAmHOC,GA7vBbC,GAAAC,EAAA,kBAGAC,IAEAC,KAEAC,KAgEMX,GAA0B,CAACY,EAA+BC,IAAoD,CAmClH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAUH,EAAO,CAAC,EAClBI,EAAOJ,EAAO,CAAC,EACfK,EAAYL,EAAO,CAAC,EACpBM,EAAON,EAAO,CAAC,EACfO,EAAuBP,EAAO,CAAC,EAErC,GAAIM,GAAQC,EACV,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIL,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMM,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAkBR,EAAM,KAAK,CAAC,EAEpC,GAAIE,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIA,EAAQ,KAAK,CAAC,IAAMO,EACtB,MAAM,IAAI,MAAM,uEAAuE,EAGzF,GAAIN,EAAK,KAAK,CAAC,IAAMD,EAAQ,KAAK,CAAC,EACjC,MAAM,IAAI,MAAM,oFAAoF,EAGtG,IAAIQ,EAAcP,EAAK,KAAK,CAAC,EAAI,EAC7BQ,EAAcD,EACdE,EAAcD,EAClB,GAAIX,EAAW,eAAe,OAAS,EAAG,CACxC,GAAIA,EAAW,eAAe,SAAW,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAErE,QAAWa,KAAMb,EAAW,eAC1B,GAAIa,EAAKb,EAAW,WAAa,EAC/B,MAAM,IAAI,MAAM,mDAAmD,EAIvEU,EAAcV,EAAW,eAAe,CAAC,EACzCW,EAAcX,EAAW,eAAe,CAAC,EACzCY,EAAcZ,EAAW,eAAe,CAAC,CAC3C,CAEA,IAAMc,EAAmBN,EAEzB,GAAIE,IAAgBC,EAClB,MAAM,IAAI,MAAM,6DAA6D,EAG/E,GAAIR,EAAK,KAAK,CAAC,IAAMO,EAAcC,EAAcC,EAC/C,MAAM,IAAI,MAAM,+EAA+E,EAGjG,IAAIG,EAAqB,EACzB,GAAIV,EAAM,CACR,GAAIM,IAAgBC,EAClB,MAAM,IAAI,MAAM,oDAAoD,EAEtE,GAAIP,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,qCAAqC,EAEvD,GAAIA,EAAK,KAAK,CAAC,IAAM,EACnB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,KAAK,CAAC,IAAME,EACnB,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIF,EAAK,KAAK,CAAC,IAAML,EAAW,SAC9B,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAIK,EAAK,KAAK,CAAC,IAAMM,EAAcX,EAAW,SAC5C,MAAM,IAAI,MAAM,gEAAgE,EAG7EA,EAAW,yBACde,EAAqBV,EAAK,KAAK,CAAC,EAGpC,CAEA,IAAMW,EAAsBF,EAAmBC,EACzCE,EAAoB,GAEpBC,EAAW,EACjB,GAAId,EAGF,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAIC,EACF,MAAM,IAAI,MAAM,uBAAuB,EAGzC,MAAO,CACL,UAAAE,EACA,eAAAC,EACA,mBAAAO,EACA,iBAAAD,EACA,oBAAAE,EACA,kBAAAC,EACA,gBAAAR,EACA,WAAYC,EACZ,YAAAE,EACA,SAAU,KAAK,MAAMF,EAAcV,EAAW,QAAQ,EACtD,UAAW,KAAK,MAAMY,EAAcZ,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAqB,GACrB,aAAc,GACd,UAAW,CACb,CACF,EAEMZ,GAAkC,CAAC+B,EAA0BlB,EAAmBmB,EAAWC,IAAc,CAC7G,IAAMC,EAAaC,GAAiBF,CAAC,EACjCG,EAAK,GACHC,EAAQJ,EAAIC,EACdG,EAAQD,EACVA,EAAK,EACIC,EAAQ,EAAI,KACrBD,EAAK,KAAK,KAAKC,EAAQ,CAAC,GAE1B,IAAMC,EAAoB,KAAK,KAAKL,EAAIC,EAAaE,CAAE,EACjDG,EAAoC,CACxC,CAAC,KAAM1B,EAAM,SAAU,KAAM,EAAIoB,CAAC,EAAG,CAAC,QAAuB,KAAMI,CAAK,EACxE,CAAC,QAAuB,KAAMC,CAAiB,CACjD,EACME,EAAWC,GAA4B5B,EAAM,SAAUqB,CAAU,EACjEQ,EAAUC,KAA0CT,CAAU,EAE9DU,EAAmBC,GAA+B,CACtD,IAAMC,EAAcC,EAAe,IAAKlC,EAAM,SAAUA,EAAM,KAAMqB,CAAU,EAExEc,EAA8B,CAClC,CAAC,KAAM,QAAS,KAFIL,GAA0B9B,EAAM,QAAQ,CAEC,EAAG,CAAC,KAAM,SAAU,KAAM,KAAK,EAC5F,CAAC,KAAM,sBAAuB,KAAM,KAAK,CAC3C,EAEA,MAAO;AAAA,0CAC+BuB,CAAE;AAAA,0CACFA,CAAE;AAAA,IACxCS,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBF,CAAW,CAAC;AAAA,IACrED,EAAa,UAAU,CACrBT,EAAI,EAAG,CACT,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,8BAIwBM,CAAO;AAAA;AAAA,gCAELA,CAAO;AAAA;AAAA,+BAER,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,gDACT,IAAK,GACH,MAAO,oGACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA,uBAINM,CAAO;AAAA;AAAA,0BAEJA,CAAO;AAAA;AAAA,+BAEF,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,aACT,IAAK,GACH,MAAO,8BACT,IAAK,GACH,MAAO,4DACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAMHU,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,yBAIvBJ,CAAO;AAAA,0BACNI,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA,IAI9C,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CAAC,KAAM,GAAGV,CAAE,IAAII,CAAQ,IAAIN,CAAU,EAAE,EACrD,gBAAAU,EACA,WAAY,KAAO,CAAC,QAAS,CAAC,EAAG,cAAe,CAAC,EAAGZ,CAAC,EAAG,gBAAAO,CAAe,EACzE,CACF,EAEMtC,GACF,CAACgD,EAAyBC,EAAeC,EAAiBC,EACzDlC,EAA4CmC,EAAiCzC,EAC7Ee,IAA+B,CAC9B,IAAMC,EAAsBD,EAAqB0B,EAAW,iBACtDC,EAAa,CAACD,EAAW,UAAWA,EAAW,SAAUA,EAAW,eAAgBzB,CAAmB,EACvG2B,EAAaF,EAAW,aAAe,QAAaJ,EAAQ,YAAc,EAC1EO,EAAkBD,EACpB,CAACF,EAAW,UAAWA,EAAW,SAAUzB,EAAqByB,EAAW,QAAQ,EACpF,OAIEI,EAAQ7C,EAAW,QAAU,EAAI,EAAM,KAAK,KAAKyC,EAAW,QAAQ,EAAIzC,EAAW,MACnFsB,EAAaC,GAAiBkB,EAAW,QAAQ,EACjDK,EAAqBL,EAAW,SAAWnB,EAC3CyB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKhC,EAAsB+B,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMd,EAAoC,CACxC,CAAC,QAAuB,KAAMc,EAAW,cAAc,EAAG,CAAC,QAAuB,KAAMK,CAAkB,EAC1G,CAAC,QAAuB,KAAM9B,CAAmB,EAAG,CAAC,QAAuB,KAAMyB,EAAW,QAAQ,EACrG,CAAC,OAAsB,KAAMI,CAAK,EAAG,CAAC,QAAuB,KAAM9B,CAAkB,EACrF,CAAC,QAAuB,KAAM0B,EAAW,gBAAgB,CAC3D,EAEMQ,EAAwD,CAAC,OAAQ,MAAM,EACzET,GACFS,EAAkB,KAAK,MAAM,EAE3B3C,GACF2C,EAAkB,KAAK,MAAM,EAE/B,IAAMC,EAAU,CAAC,CAAC,KAAMR,EAAY,SAAUJ,EAAE,SAAU,aAAgC,CAAC,EACvFK,GACFO,EAAQ,KAAK,CAAC,KAAMN,EAAkB,SAAUN,EAAE,SAAU,aAAgC,CAAC,EAE/F,IAAMN,EAAmBC,GAA+B,CACtD,IAAMkB,EAASC,EAAc,IAAKd,EAAE,SAAUA,EAAE,KAAMhB,CAAU,EAC1D+B,EAASD,EAAc,MAAOb,EAAI,SAAUA,EAAI,KAAMjB,CAAU,EAChEgC,EAAY,CAACH,EAAQE,CAAM,EACjC,GAAIb,EAAS,CACX,IAAMe,GAAeH,EAAc,WAAYZ,EAAQ,SAAUA,EAAQ,KAAMlB,CAAU,EACzFgC,EAAU,KAAKC,EAAY,CAC7B,CACIjD,GACFgD,EAAU,KACNF,EAAc,yBAA0B9C,EAAqB,SAAUA,EAAqB,IAAI,CAAC,EAEvG,IAAMkD,EAASrB,EAAe,SAAUG,EAAE,SAAUI,CAAU,EACxDe,EAAa,CAACD,CAAM,EACtBb,GACFc,EAAW,KAAKtB,EAAe,cAAeG,EAAE,SAAUM,EAAkBtB,CAAU,CAAC,EAEzF,IAAMQ,EAAUC,KAA0CT,CAAU,EAE9Dc,GAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAM,KAA+B,EACvF,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA;AAAA,gCAECI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,gCAC7CI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,IACzEd,EAAa,iBAAiBG,EAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMH,IACOP,GAAWG,EACN;AAAA;AAAA,+EAIA;AAAA,wEAGR,CAAC;AAAA,MACNA,EAAa,4DAA8D,EAAE;AAAA,kBACjEb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOhB,IACKU,GAAWG,EACN;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAQA,yEAER,CAAC;AAAA,QAEAA,EACI,+FACA,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA,mBAKCb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBASF,IAAM,CACpB,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,wCACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA,8BACkBkC,EAAO,KAAK,KAAK,6BACnClD,EAAuB,oCAAsC,KAAK;AAAA;AAAA,IAGxE,EACA,MAAO,CACL,KAAM,iBACN,YAAa,CACX,KAAM,GAAGgB,CAAU,IAAIhB,IAAyB,MAAS,IAAIkC,IAAY,MAAS,IAAIH,EAAQ,WAAW,GACzG,kBAAAY,CACF,EACA,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAGE1C,GACF,CAAC+C,EAAyBqB,EAAmBC,EAAeC,EAC3DC,EAA6B9C,IAA+B,CAC3D,IAAMC,EAAsBD,EAAqB8C,EAAO,iBAClDC,EAAQD,EAAO,MAAQA,EAAO,MAAQ,EACtCE,EAAsBF,EAAO,YAAcC,EAC3CE,EAAeH,EAAO,YAAc,MAAQxB,EAAQ,YAAc,EAClE4B,EACFD,EAAe,CAACH,EAAO,UAAWA,EAAO,SAAU7C,EAAqB6C,EAAO,QAAQ,EAAI,OACzFK,EAAc,CAACL,EAAO,UAAWA,EAAO,eAAgBE,CAAmB,EAC3EhB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKa,EAAO,UAAYd,CAAS,EACzC,EAAG,KAAK,KAAKc,EAAO,eAAiBd,CAAS,EAC9C,EAAGc,EAAO,UAAYA,EAAO,QAC/B,EAEMlC,EAAoC,CACxC,CAAC,QAAuB,KAAMkC,EAAO,cAAc,EAAG,CAAC,QAAuB,KAAM7C,CAAmB,EACvG,CAAC,QAAuB,KAAM6C,EAAO,SAAS,EAAG,CAAC,QAAuB,KAAMA,EAAO,QAAQ,EAC9F,CAAC,QAAuB,KAAME,CAAmB,EAAG,CAAC,QAAuB,KAAMhD,CAAkB,EACpG,CAAC,QAAuB,KAAM8C,EAAO,gBAAgB,CACvD,EACMZ,EACFW,EAAY,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,EACpDV,EAAU,CAAC,CAAC,KAAMgB,EAAa,SAAUR,EAAM,SAAU,aAAgC,CAAC,EAC5FM,GACFd,EAAQ,KAAK,CAAC,KAAMe,EAAoB,SAAUP,EAAM,SAAU,aAAgC,CAAC,EAErG,IAAM1B,EAAmBC,GAA+B,CACtD,IAAMkC,EAAcf,EAAc,QAASM,EAAM,SAAUA,EAAM,IAAI,EAC/DU,EAAUhB,EAAc,IAAKO,EAAE,SAAUA,EAAE,IAAI,EAC/CL,EAAY,CAACa,EAAaC,CAAO,EACnCR,GACFN,EAAU,KAAKF,EAAc,aAAcQ,EAAU,SAAUA,EAAU,IAAI,CAAC,EAGhF,IAAMH,EAAa,CADJtB,EAAe,SAAUuB,EAAM,SAAUQ,CAAW,CACzC,EACtBF,GACFP,EAAW,KAAKtB,EAAe,gBAAiBuB,EAAM,SAAUO,CAAkB,CAAC,EAErF,IAAM7B,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,gBAAiB,KAAM,KAAK,EACrE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA,gCACCoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,gCAChDoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,IAC5Ed,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMJ,IACQa,GAAaI,EACR;AAAA;AAAA;AAAA,QAKA;AAAA;AAAA,eAIR,CAAC;AAAA,MACNA,EAAe,kEAAoE,EAAE;AAAA,iBAC1EG,EAAY,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAO9B,IACGP,GAAaI,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA;AAAA,SAIR,CAAC;AAAA,UACFA,EAAe,kFAAoF,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAkBzG,EAEA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,KAAM,GAAGJ,IAAc,MAAS,IAAIvB,EAAQ,WAAW,GAAI,kBAAAY,CAAiB,EAC1F,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAESzC,GACT,CAAC8C,EAAyBC,EAAe+B,EAAeV,EAAeW,EACtEC,EAA6B/B,EAA+BoB,EAC5DtD,EAA4CmC,EAAiCzC,IAA+B,CAC3G,IAAMwE,EAAcnC,EAAQ,YACtBtB,EACF0B,EAAW,aAAe,QAAa+B,EAAc,EAAI/B,EAAW,mBAAqB,EACvFzB,EAAsBD,EAAqB0B,EAAW,iBAEtDgC,EAAWhC,EAAW,aAAe,QAAa+B,EAAc,GAAKhC,EAAW,CAACF,EAAG+B,EAAG7B,CAAO,EAAI,CAACF,EAAG+B,CAAC,EACzG/D,GACFmE,EAAQ,KAAKnE,CAAoB,EAInC,IAAMoD,EAAQrB,EAAQ,QAClBhD,GACIgD,EAASC,EAAG+B,EAAGG,EAAc,EAAIhC,EAAU,OAAWlC,EAAsBmC,EAAYzC,EACxFe,CAAkB,EACtB,CAAC,OAAQ0D,EAAS,QAAUhC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,GAAI,CAAC,EAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAG5GnC,EAAQ,QACJjD,GACIiD,EAASqB,EAAOjB,EAAW,UAAYA,EAAW,SAAWA,EAAW,eACxEzB,CAAmB,EACvB,CAAC,OAAQ,CAAC0C,CAAK,EAAG,QAAS,CAAC,CAAC,CAAC,EAGlC,IAAMgB,EACDjC,EAAW,aAAe,QAAa+B,EAAc,GAAKZ,EAAa,CAACF,EAAOC,EAAGC,CAAS,EAAI,CAACF,EAAOC,CAAC,EAC7GtB,EAAQ,QACJ/C,GACI+C,EAASqB,EAAOC,EAAGa,EAAc,GAAKZ,EAAYA,EAAY,OAAWnB,EAAY1B,CAAkB,EAC3G,CAAC,OAAQ2D,EAAS,QAAUjC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,EAAG,CAAC,EAAI,CAAC,CAAC,CAAC,CAAC,CACzG,EAEEhF,GAAU,CAAC6C,EAAyBI,IAAoC,CAC5E,IAAMyB,EAAc,CAClBzB,EAAW,UACXA,EAAW,SACXA,EAAW,eACXA,EAAW,QACb,EACMkC,EAAIlC,EAAW,eACfmC,EAAInC,EAAW,gBACfoC,EAAIpC,EAAW,SACfM,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKP,EAAW,SAAWM,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACM1C,EAAS,CAACsC,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,CAAC,EACjEV,EAAoC,CACxC,CAAC,QAAuB,KAAMgD,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EACnG,CAAC,QAAuB,KAAMpC,EAAW,QAAQ,EAAG,CAAC,QAAuB,KAAMA,EAAW,QAAQ,EACrG,CAAC,QAAuB,KAAMA,EAAW,UAAU,EACnD,CAAC,QAAuB,KAAMA,EAAW,WAAaA,EAAW,WAAaA,EAAW,WAAW,CACtG,EAEMT,EAAmBC,GAA+B,CACtD,IAAM6C,EAAU3C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEa,EAAU5C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEc,EAAU7C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEjE,EAAQmD,EAAc,QAASrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEkF,EAAS7B,EAAc,SAAUrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACnEI,EAAOiD,EAAc,OAAQrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/D6B,EAAW3B,EAAM,KAAK,QAEtBmC,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAC7G,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,MAAO,KAAM,KAAK,CACjG,EACA,MAAO;AAAA,sBACWW,CAAS;AAAA,oCACKnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAChCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,IACpEd,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBnC,EAAOgF,EAAQ9E,EAAM2E,EAASC,EAASC,CAAO,CAAC;AAAA,IACxG/C,EAAa,UAAU,CACrBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWanB,CAAQ;AAAA,mBACRA,CAAQ;AAAA,mBACRA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoCzB,EAEA,OAAOS,EAAQ,QACX,CACE,KAAM,mBACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,CAC5F,EACA,cAAeW,EACf,gBAAArB,CACF,GACA,gBAAAK,CACF,EACA,CAAC,OAAAjC,EAAQ,QAAS,CAAC,GAAI,GAAI,EAAE,CAAC,CAAC,CACrC,EAEaN,GAAY,CAAC4C,EAAyBrC,IAAqC,CACtF,IAAM6D,EAAS1E,GAAwBkD,EAAQ,OAAQrC,CAAU,EAE3D,CAACsC,EAAG+B,EAAGV,CAAC,EAAInE,GAAQ6C,EAASwB,CAAM,EAEzC,OAAOtE,GACH8C,EAASC,EAAG+B,EAAGV,EAAGtB,EAAQ,OAAO,CAAC,EAAG,OAAW,OAAW,OAAWA,EAAQ,OAAO,CAAC,EAAGwB,EAAQ7D,CAAU,CACjH,ICpwBA,IAsBMkF,GAkCAC,GAgFOC,GAGAC,GA3IbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KACAC,KAGAC,KAWMV,GAAiB,CAACW,EAA+BC,IAA0C,CAC/F,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAME,EAAkB,CAACC,EAA2BC,EAA6BC,IAAoB,CACnG,IAAMC,EAAIF,EAAS,OACnB,GAAIE,IAAMH,EAAO,OACf,MAAM,IAAI,MAAM,GAAGE,CAAO,uBAAuBC,CAAC,EAAE,EAEtDF,EAAS,QAAQ,CAACG,EAAGC,IAAM,CACzB,GAAID,IAAMJ,EAAOK,CAAC,EAChB,MAAM,IAAI,MAAM,GAAGH,CAAO,SAASG,CAAC,gBAAgB,CAExD,CAAC,CACH,EAEA,GAAIR,EAAO,CAAC,EAAE,KAAK,OAAS,EAAG,CAC7B,IAAMS,EAAQR,EAAW,SAAW,OAC/BA,EAAW,QAAUD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EACvBA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EAAE,OAAOA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,CAAC,EACxGA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGC,EAAW,QAAU,EAAI,MAAS,EAC9DC,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,qBAAqB,EAC5DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,iBAAiB,EACxDP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,oBAAoB,EAC3DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,mBAAmB,CAC5D,MACEP,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,qBAAqB,EAC1DE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,iBAAiB,EACtDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,oBAAoB,EACzDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,mBAAmB,CAE5D,EAEMV,GACF,CAACU,EAA+BC,IAAiD,CAC/E,GAAM,CAAC,QAAAS,EAAS,QAAAC,EAAS,OAAAC,CAAM,EAAIX,EAC7BY,EAASb,EAAO,CAAC,EAAE,KACnBc,EAAaH,EAAUI,GAAiBF,EAAOA,EAAO,OAAS,CAAC,CAAC,EAAI,EACrEG,EAAcJ,IAAW,QAAUC,EAAO,OAAS,EAAIC,EAAa,EACpEG,EAAaC,EAAU,KAAKL,CAAM,EAAIC,EAEtCK,EAAoBR,EACpBS,EAAcD,EAAoBN,EAAO,OAASA,EAClDQ,EAAIC,EAAc,IAAKtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMc,CAAU,EACrES,EAAQD,EAAc,QAAStB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC9EQ,EAAOF,EAAc,OAAQtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC5ES,EAAYH,EAAc,YAAatB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACtFU,EAAWJ,EAAc,WAAYtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACpFW,EAAIC,EAAe,IAAK5B,EAAO,CAAC,EAAE,SAAUoB,EAAaN,CAAU,EAGnEe,EAAc,IAAc,CAChC,IAAIC,EAAU,GACd,GAAInB,EACFmB,EAAU,iBACNjB,EAAO,SAAW,EAAM,KACpBD,IAAW,OAAS,iBAAiBC,EAAO,OAAS,CAAC,OAAOC,CAAU,GACnD,kBAAkB,YAE1CF,IAAW,OACbkB,EAAU;AAAA,cACRH,EAAE,WAAW,gBAAiB,IAAK,GAAG,CAAC;AAAA,4BACzBA,EAAE,gBAAgB,eAAe,CAAC,QAC7C,CAELG,EAAU,kBAAkBP,EAAM,KAAK,OAAO;AAAA,qDACLV,EAAO,OAAS,CAAC,KAE1D,QAASL,EAAI,EAAGA,EAAIe,EAAM,KAAMf,IAC9BsB,GAAW,YAAYtB,CAAC,qBAAqBA,CAAC,KAEhDsB,GAAW,iBAAiBP,EAAM,gBAAgB,UAAU,CAAC,GAC/D,CAEF,OAAOO,CACT,EACMC,EAAgCC,GAAyB;AAAA,oBACjDtB,CAAO;AAAA,IACvBsB,EAAO,gBAAgB,aAAc,KAAK,EAAE,iBAAiBX,EAAGE,EAAOC,EAAMC,EAAWC,EAAUC,CAAC,CAAC;AAAA,IACpGK,EAAO,UAAU,CAAC;AAAA,IAClBA,EAAO,sCAAsC,qBAAqB,CAAC;AAAA,0BAC7CL,EAAE,gBAAgB,gBAAgBb,CAAU,EAAE,CAAC;AAAA,MACnEe,EAAY,CAAC;AAAA,kBACDN,EAAM,YAAY,SAAS,CAAC;AAAA,iBAC7BC,EAAK,YAAY,SAAS,CAAC;AAAA,sBACtBC,EAAU,YAAY,SAAS,CAAC;AAAA,qBACjCC,EAAS,YAAY,SAAS,CAAC;AAAA,cACtCL,EAAE,YAAY,YAAY,CAAC;AAAA;AAAA,MAEnCM,EAAE,YAAY,aAAc,OAAO,CAAC;AAAA,KAEpC,MAAO,CACL,KAAM,qBACN,YAAa,CACX,KAAM,GAAG1B,EAAW,OAAO,IAAIA,EAAW,MAAM,IAAIU,CAAO,IAAIG,CAAU,GACzE,kBAAmBK,EAAoB,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,MAAM,EAAI,MACpF,EACA,gBAAiBY,EACjB,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM/B,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKiB,EAAa,EAAuB,CAAC,EAClE,gBAAiBE,EACb,CACE,CAAC,QAAuB,KAAMF,CAAU,EACxC,GAAGgB,EAA2BpB,CAAM,CACtC,EACA,CACE,CAAC,QAAuB,KAAMI,CAAU,CAC1C,CACN,EACF,CACF,EAES1B,GAA4BU,GACrCiC,GAA4BjC,CAAoE,EAEvFT,GAAY,CAAC2C,EAAyBlC,IAA8C,CAC/F,GAAM,CAAC,OAAAD,EAAQ,YAAAoC,CAAW,EAAID,EACxBE,EAAoB9C,GAAyB,CAAC,GAAGU,EAAY,YAAAmC,CAAW,CAAC,EAI/E,GAHIE,GAAI,OAAO,sBACbjD,GAAeW,EAAQqC,CAAiB,EAEtCpC,EAAW,aACb,MAAM,IAAI,MAAM,uDAAuD,EAEvEkC,EAAQ,QAAQ7C,GAAoCU,EAAQqC,CAAiB,CAAC,CAElF,ICtJA,IASME,GAkBAC,GAkCOC,GA7DbC,GAAAC,EAAA,kBAIAC,KAGAC,KAEMN,GAAkBO,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,IAAK,IAAK,IAAI,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC9C,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMN,GAA4BM,GAA+C,CAC/E,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAExBE,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAE3BG,EAAaC,EAAU,KAAKH,CAAW,EAAI,EAE3CI,EAAWL,EAAO,CAAC,EAAE,SACrBM,EAAQC,EAAc,QAASF,EAAUJ,EAAa,CAAC,EACvDO,EAAOD,EAAc,OAAQF,EAAU,CAACH,CAAQ,EAAG,CAAC,EACpDO,EAAWF,EAAc,WAAYF,EAAUJ,EAAa,CAAC,EAC7DS,EAASC,EAAe,SAAUN,EAAUJ,EAAa,CAAC,EAahE,MAAO,CACL,KAAM,UACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,CACpE,GACA,gBAjBuBS,GAA+B;AAAA,qBACrCV,CAAQ;AAAA,IACzBU,EAAa,iBAAiBN,EAAOE,EAAMC,EAAUC,CAAM,CAAC;AAAA;AAAA,IAE5DE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCT,CAAU,CAAC;AAAA,kBAClDG,EAAM,YAAY,YAAY,CAAC;AAAA,UACvCE,EAAK,YAAY,uBAAuB,CAAC,MAAMC,EAAS,YAAY,YAAY,CAAC;AAAA,MACrFC,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAU7C,CACF,EAEaf,GAAWkB,GAAkC,CACxDpB,GAAeoB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQnB,GAAyBmB,EAAQ,MAAM,CAAC,CAC1D,IChEA,IAeMC,GA4BAC,GAiBOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAGAC,GASAC,GAIAC,GA8BPC,GAMOC,GAaAC,GAIAC,GAIAC,GAQAC,GAGAC,GAgBAC,GAcAC,GAKAC,GAIAC,GAIAC,GAMAC,GAOAC,GAIAC,GAIAC,GAIAC,GAMAC,GASAC,GAMAC,GASAC,GAIAC,GAIAC,GAIAC,GAIAC,GAEAC,GAKAC,GAUAC,GAGAC,GAOAC,GAQAC,GAIAC,GAmBAC,GAEAC,GAlVbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMlD,GACF,CAACmD,EAA4BC,EAAkBC,EAAuBC,EACrEC,EAAmCC,IAA8C,CAChF,IAAMC,EAAU,KAAK,KAAKL,EAAW,CAAC,EAElCM,EAAa,GACb,OAAOH,GAAa,SACtBG,EAAa,GAAGH,CAAQ,MAExBG,EAAaH,EAAS,GAAG,EAG3B,IAAMI,EAAQC,EAAc,YAAaP,EAAe,CAACI,CAAO,EAAG,CAAC,EAC9DI,EAASC,EAAe,aAAcR,EAAgB,CAACG,CAAO,EAAG,CAAC,EAExE,MAAO;AAAA,QACLN,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBQ,EAAOE,CAAM,CAAC;AAAA;AAAA,IAEnFL,GAA4B,EAAE;AAAA;AAAA,IAE9BL,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA;AAAA,cAE/DQ,EAAM,YAAY,YAAY,CAAC;AAAA,MACvCE,EAAO,YAAY,aAAcH,CAAU,CAAC;AAAA,IAE9C,EAEEzD,GACF,CAAC0D,EAAmBI,EAAcR,EAAmCC,EACpEQ,EAAmBV,EAAyBK,EAAM,YAA2B,CAC5E,KAAAI,EACA,YAAa,CAAC,KAAMC,EAAU,kBAAmB,CAAC,MAAM,CAAC,EACzD,gBAAiBb,GAAgBnD,GAC7BmD,EAAcc,EAAU,KAAKN,EAAM,IAAI,EAAGA,EAAM,SAAUL,EAAgBC,EAAUC,CAAwB,EAChH,WAAaU,IAAkB,CAC7B,QAAS,CAAC,CAAC,KAAMP,EAAM,KAAM,SAAUL,CAAc,CAAC,EACtD,cACI,CAAC,EAAG,KAAK,KAAKW,EAAU,KAAKC,EAAa,CAAC,EAAE,IAAI,EAAI,GAA0B,CAAgB,CAAC,EACpG,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKD,EAAU,KAAKN,EAAM,IAAI,EAAI,CAAC,CAAC,CACzE,CACF,EACF,GAESzD,GAAOiE,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahE,GAAQgE,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa/D,GAAS+D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa9D,GAAQ8D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa7D,GAAS6D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa5D,GAAQ4D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EACa3D,GAAS2D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAOa1D,GAAuB2D,GAChCC,GAA4BD,CAA0B,EAG7C1D,GAAO,CAACyD,EAAyBC,IAAqC,CACjF,IAAIE,EACJ,OAAQF,EAAW,GAAI,CACrB,QACEE,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,QACEA,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,OACEA,EAAO,aACP,MACF,QACE,MAAM,IAAI,WAAW,0EAA0EF,EAAW,EAAE,EAAE,CAClH,CACAD,EAAQ,QACJlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQG,EAAM,OAAWF,EAAW,SAAUA,EAAW,EAAE,CAAC,CAClH,EAOMzD,GAAoC4D,GAAkD,CAC1F,IAAMC,EAAOD,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAIE,GACtFC,EAAOH,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAII,GAC5F,OAAON,GAA4B,CAAC,IAAAG,EAAK,IAAAE,CAAG,CAAC,CAC/C,EAEa9D,GAAO,CAACuD,EAAyBS,IAAyC,CACrF,IAAMR,EAAaD,EAAQ,OAAO,SAAW,EAAIS,EAAiBjE,GAAiCwD,EAAQ,MAAM,EAC3GU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QACJlE,GACIkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,0BAA2B;AAAA,4BACnDF,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,4BAC9CS,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,EAEhEA,EAAW,QAAQ,EACvB,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEavD,GAAQsD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEarD,GAAOqD,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEapD,GAAQoD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAManD,GAAwBoD,GACjCC,GAA4BD,CAA6B,EAEhDnD,GAAM,CAACkD,EAAyBC,IAAsC,CACjF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK;AAAA,uBAChCF,CAAQ,IAAIT,EAAW,KAAK;AAAA;AAAA,kBAEjCS,CAAQ,QAAQA,CAAQ;AAAA;AAAA;AAAA;AAAA,wBAIlBA,CAAQ,cAAcA,CAAQ;AAAA;AAAA,KAGhDT,EAAW,QAAQ,CAAC,CAC1B,EAEalD,GAAU,CAAC8D,EAAU,QAAU;AAAA,YAChCA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA;AAAA,sBAEGA,CAAO,cAAcA,CAAO;AAAA;AAAA;AAAA;AAAA,GAMrC7D,GAAOgD,GAAkC,CACpD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK7D,GAAQ2D,CAAQ,CAAC,CAAC,CAClH,EAEazD,GAAO+C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa9C,GAAS8C,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa7C,GAAQ6C,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,sBAAsBA,CAAC,0BAA2B7D,GAAQ2D,CAAQ,CAAC,CAAC,CACpH,EAEatD,GAAY,CAAC4C,EAAyBC,IAAsC,CACvF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaY,GAAK,8BAA8BA,CAAC,KAAKA,CAAC,KAAKA,CAAC,YAAYF,CAAQ,UACpG,6BAA6BA,CAAQ,IAAIT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,CACzF,EAEa5C,GAAO2C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEatD,GAAO0C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEarD,GAAcyC,GAAkC,CAC3DA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,aAAcY,GAAK,OAAOA,CAAC,EAAE,CAAC,CAChG,EAEapD,GAAQwC,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,WAAWF,CAAQ,SAAS,CAAC,CAC5G,EAEajD,GAAWuC,GAAkC,CACxDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,UAAWY,GAAK,sBAAsBA,CAAC,KAAK,CAAC,CAC/G,EAOalD,GAA8BuC,GACvCC,GAA4BD,CAG3B,EAEQtC,GAAc,CAACqC,EAAyBC,IAA4C,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,cACnBY,GAAK,YAAYF,CAAQ,oBAAoBA,CAAQ,WAAWT,EAAW,KAAK,MAAMW,CAAC,WAAWF,CAAQ,KACtGT,EAAW,IAAI,MACnB,OAAWA,EAAW,QAAQ,CAAC,CACrC,EAEarC,GAAOoC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEanC,GAAQmC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEalC,GAAQkC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEajC,GAAOiC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahC,GAAkB4C,GAAc,QAAQA,CAAC,yBAAyBA,CAAC,2BAA2BA,CAAC,MAE/F3C,GAAQ+B,GAAkC,CAErDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQhC,EAAc,CAAC,CACzF,EAEaE,GAAe,CAAC2C,EAAU,QAAU;AAAA,qBAC5BA,CAAO;AAAA,qBACPA,CAAO;AAAA,qBACPA,CAAO;AAAA;AAAA,oBAERA,CAAO,cAAcA,CAAO;AAAA,WACrC7C,GAAe,GAAG,CAAC;AAAA;AAAA,EAIjBG,GAAsB2C,GAC/B,uCAAuCA,CAAC,qBAAqBA,CAAC,MAAMA,CAAC,uBAAuBA,CAAC,GAEpF1C,GAAY4B,GAAkC,CACzD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,WAAY7B,GAAoBD,GAAawC,CAAQ,EAAG,OAC3EV,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,EAEa3B,GAAkB,CAAC2B,EAAyBC,IAAwC,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrE,OAAAA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,kBAAmBY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,8BACpF,wCAAwCF,CAAQ,KAAKT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,EAC5F,CACT,EAEa3B,GAAO0B,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEazB,GAAgB,CAACsC,EAAiBE,IAAkB;AAAA,qBAC5CF,CAAO,KAAKE,CAAK;AAAA,cACxBF,CAAO;AAAA,eACNA,CAAO;AAAA;AAAA,6BAEOA,CAAO,cAAcA,CAAO;AAAA;AAAA,kBAEvCA,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYZrC,GAAuBsC,GAAc,mBAAmBA,CAAC,IAEzDrC,GAAY,CAACuB,EAAyBC,IAAsC,CACvF,IAAMe,EAAQL,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAClEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaxB,GAAqBD,GAAcyC,EAAOf,EAAW,KAAK,EAAGA,EAAW,SACxGD,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,ICvVA,IAUMiB,GAkBAC,GAyCOC,GArEbC,GAAAC,EAAA,kBAIAC,KAGAC,KACAC,KAEMP,GAAkBQ,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,KAAM,KAAM,KAAK,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EACjD,MAAM,IAAI,MAAM,4CAA4C,EAG9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMP,GAAkCO,GAA+C,CACrF,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAAK,MAAM,EACzCC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAI,EAElC,IAAMC,EAAQC,EAAc,QAASH,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM,CAAC,EACpEI,EAAOD,EAAc,OAAQH,EAAO,CAAC,EAAE,SAAU,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAAG,CAAC,EACvEK,EAASC,EAAe,SAAUN,EAAO,CAAC,EAAE,SAAUC,EAAa,CAAC,EAEpEM,EAAaC,EAAU,KAAKP,CAAW,EAAI,EAC3CQ,EAAWC,GAA4BV,EAAO,CAAC,EAAE,QAAQ,EAsB/D,MAAO,CACL,KAAM,gBACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKO,EAAa,EAAuB,CAAC,CACpE,GACA,gBA1BuBI,GAA+B;AAAA;AAAA,yBAEjCX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,EAAI,CAAC;AAAA;AAAA,IAE9CW,EAAa,iBAAiBT,EAAOE,EAAMC,CAAM,CAAC;AAAA;AAAA,IAElDO,GAAQH,CAAQ,CAAC;AAAA;AAAA,IAEjBE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCJ,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ9DF,EAAO,YAAY,aAAc,uBAAuB,CAAC;AAAA,IAU7D,CACF,EAEaX,GAAiBmB,GAAkC,CAC9DrB,GAAeqB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQpB,GAA+BoB,EAAQ,MAAM,CAAC,CAChE,ICxEA,IAiBMC,GAqGAC,GAsEAC,GAQOC,GAIAC,GAIAC,GAMAC,GAIAC,GAsBAC,GAIAC,GAMAC,GAMAC,GAMAC,GAlQbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KASMjB,GACF,CAACkB,EAA4BC,EAA0BC,EAA0BC,EAChFC,EAAoBC,EAAsBC,EAAsCC,EAChFC,EAAeC,EAAeC,EAAoBC,IAAsC,CACvF,IAAIC,EACAC,EACA,OAAON,GAAa,SACtBK,EAAmBC,EAAmB,CAACC,EAAGC,IAAM,GAAGR,CAAQ,KAAKO,CAAC,MAAMC,CAAC,KAC/D,OAAOR,GAAa,WAC7BK,EAAmBC,EAAmBN,GAEtCK,EAAmBL,EAAS,OAC5BM,EAAmBN,EAAS,QAG9B,IAAMS,EAASC,EAAe,aAAcP,EAAYP,EAAW,OAAQ,CAAC,EACtEW,EAAII,EAAc,QAASV,EAAOP,EAAM,OAAQ,CAAC,EACjDc,EAAIG,EAAc,QAAST,EAAOP,EAAM,OAAQ,CAAC,EAEnDiB,EACJ,GAAIf,EACF,GAAIC,EAAa,CACf,IAAMe,EAAgBC,EAAU,KAAKpB,CAAK,IAAM,EAC1CqB,EAAgBD,EAAU,KAAKnB,CAAK,IAAM,EAC1CqB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC3EuB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC7EkB,GAAiBE,EACnBH,EAAaH,EAAO,YAChB,aACAH,EACIO,EAAgB,GAAGN,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,EACvFQ,EAAgB,GAAGP,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,CAAC,CAAC,EAEjGI,EAAa;AAAA,kCACSH,EAAO,gBAAgB,iBAAiB,CAAC;AAAA,4BAC/CF,EAAE,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,4BACrDD,EAAE,2BAA2B,gBAAiBC,CAAM,CAAC;AAAA,cAEjEA,EAAO,YACH,aACAH,EACIP,GAA+BiB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,kBACpDR,GAA+BkB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAAA,WAGvF,MACEI,EAAaH,EAAO,YAChB,aAAcH,EAAiBC,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAEzF,CACL,GAAI,CAACV,EACH,MAAM,IAAI,MAAM,sFAAsF,EAGxG,IAAMoB,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,eAAeF,CAAC,eAAeA,CAAC,IAC9CG,EAAc,eAAeH,CAAC,eAAeA,CAAC,IACpD,MAAO;AAAA,+BACcA,CAAC,MAAMX,EAAO,gBAAgB,qBAAqBW,CAAC,GAAG,CAAC;AAAA,yBAC9DA,CAAC,MAAMb,EAAE,2BAA2B,gBAAgBa,CAAC,GAAIX,CAAM,CAAC;AAAA,yBAChEW,CAAC,MAAMZ,EAAE,2BAA2B,gBAAgBY,CAAC,GAAIX,CAAM,CAAC;AAAA,wBACjEW,CAAC,aAAaA,CAAC;AAAA,wBACfA,CAAC,aAAaA,CAAC;AAAA,4BACXA,CAAC,aAAaA,CAAC;AAAA,4BACfA,CAAC,aAAaA,CAAC;AAAA,cAC7BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIhB,EAAiBiB,EAAaC,CAAW,CAAC;AAAA,WAE9E,EACIpB,IAAe,EACjBS,EAAa;AAAA;AAAA,cAETM,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,uGAGtCN,EAAa;AAAA,cACTM,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,WAGrD,CAEA,MAAO;AAAA,UACHzB,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBc,EAAGC,EAAGC,CAAM,CAAC;AAAA;AAAA,UAE9EL,GAA4B,EAAE;AAAA;AAAA,UAE9BX,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEmB,CAAU;AAAA,QAEhB,EAEEpC,GACF,CAACgD,EAAcC,EAAkBlB,EAAeC,EAAeR,EAC9DI,EAAmCsB,EAAyBnB,EAAE,WAA0B,CACvF,IAAMoB,EAAc,CAACb,EAAU,SAASP,EAAE,KAAMC,EAAE,IAAI,EAClDoB,EAAcrB,EAAE,KAChBsB,EAAaf,EAAU,KAAKP,EAAE,IAAI,EAElCV,EAAY,GACZE,EAA8B,GAG5B+B,EAAc,CAACH,CAAW,EAChC,GAAIA,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUzB,EAAE,KAAMC,EAAE,KAAM,EAAK,EACrE,GAAI,CAACuB,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEH,EAAcG,EACdF,EAAaf,EAAU,KAAKc,CAAW,EACvC,IAAMf,EAAgBC,EAAU,KAAKP,EAAE,IAAI,IAAM,EAC3CQ,EAAgBD,EAAU,KAAKN,EAAE,IAAI,IAAM,EAC3CQ,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EAC9EU,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EACpFsB,EAAY,KAAKjB,CAAa,EAC9BiB,EAAY,KAAKf,CAAa,EAC9Be,EAAY,KAAKd,CAAoB,EACrCc,EAAY,KAAKb,CAAoB,EAErC,IAAIgB,EAAkB,EACtB,QAASC,EAAI,EAAGA,EAAIN,EAAY,OAAQM,IAAK,CAC3C,IAAMC,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS2B,CAAC,GAAK,EACpCE,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS0B,CAAC,GAAK,EAC1C,GAAIC,IAASC,EACXH,GAAmBE,MAEnB,MAEJ,CACIF,EAAkB,IAAM,GAC1BlC,EAA8B,GAC9BF,EAAY,KACHgB,GAAiBE,GAAiBC,GAAwBC,KACnEpB,EAAY,GAEhB,MAEEA,EAAY,GAEd,OAAAiC,EAAY,KAAKjC,CAAS,EAEnB,CACL,KAAA2B,EACA,YAAa,CACX,KAAMC,EAAWK,EAAY,IAAKV,GAAMA,EAAE,SAAS,CAAC,EAAE,KAAK,GAAG,EAC9D,kBAAmB,CAAC,OAAQ,MAAM,CACpC,EACA,gBAAkB3B,GAAiBlB,GAC/BkB,EAAcc,EAAE,KAAMC,EAAE,KAAMoB,EAAa/B,EAAW8B,EAAa5B,EAA6BC,EAChGO,EAAE,SAAUC,EAAE,SAAUkB,EAAgBtB,CAAwB,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAsB,CAAC,EAC3F,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKf,EAAU,KAAKc,CAAW,EAAI,CAAC,CAAC,EACxE,GAAGS,EAA2B9B,EAAE,KAAMC,EAAE,KAAMoB,CAAW,CAC3D,CACF,EACF,CACF,EAEEnD,GACF,CAAC6D,EAAyBd,EAAcxB,EAA8BI,EACrEqB,EAAmBC,IAAkC,CACpDY,EAAQ,QAAQ9D,GACZgD,EAAMC,GAAY,GAAIa,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGtC,EAAUI,EACtEsB,CAAc,CAAC,CACrB,EAEShD,GAAO4D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa7B,GAAO2D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa5B,GAAS0D,GAAkC,CACtD7D,GACI6D,EAAS,QAAU,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEa3B,GAAOyD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa1B,GAAOwD,GAAkC,CACpD,IAAMC,EAAO5B,EAAc,QAAS2B,EAAQ,OAAO,CAAC,EAAE,SAAUA,EAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,MAE7F7D,GACI6D,EAAS,MAAQ,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,cAAcD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,qBAAqBD,CAAC,IAAIC,CAAC,GAAG,EAC7G;AAAA,wBACkB+B,CAAI,SAASA,CAAI,QAAQA,CAAI;AAAA,iBACpCA,CAAI;AAAA,iBACJA,CAAI;AAAA,uBACEA,CAAI;AAAA,iBACVA,CAAI;AAAA;AAAA,+BAEUA,CAAI,6BAA6BA,CAAI,qBAAqBA,CAAI,IAV1EA,IAAS,MAAQ,QAAU,EAW5B;AAAA;AAAA,oCAEkBA,CAAI,eAAeA,CAAI,cAAcA,CAAI;AAAA;AAAA,oBAEzDA,CAAI;AAAA;AAAA,OAEjB,CACP,EAEaxD,GAAOuD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEaxB,GAAWsD,GAAkC,CACxD7D,GACI6D,EAAS,UAAY,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEavB,GAAQqD,GAAkC,CACrD7D,GACI6D,EAAS,OAAS,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACnG,QAAwB,CAC9B,EAEatB,GAAkBoD,GAAkC,CAC/D7D,GACI6D,EAAS,iBAAmB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAC3G,OAAW,QAAwB,CACzC,EAEarB,GAAemD,GAAkC,CAC5D7D,GACI6D,EAAS,cAAgB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EACxG,OAAW,QAAwB,CACzC,ICtQA,IAeMgC,GA4BAC,GAWAC,GAmBAC,GAkEOC,GAcAC,GAzJbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMX,GAAiB,CAACY,EAA+BC,IAAuB,CAC5E,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,IAAME,EAAiB,EACjBC,EAAiBH,EAAOE,CAAc,EACtCE,EAAYD,EAAe,SAC3BE,EAAYF,EAAe,KAAK,OACtCH,EAAO,QAAQ,CAACM,EAAOC,IAAM,CAC3B,GAAIA,IAAML,EAIV,IAAII,EAAM,WAAaF,EACrB,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAE5DC,EAAM,KAAK,QAAQ,CAACE,EAAKD,IAAM,CAC7B,GAAIA,IAAMN,GAAQO,IAAQL,EAAe,KAAKI,CAAC,EAC7C,MAAM,IAAI,MAAM,kCAAkC,CAEtD,CAAC,EACH,CAAC,CACH,EAEMlB,GAA0B,CAACoB,EAAyBC,IAAwC;AAAA;AAAA,wCAE1DD,CAAe,MAAMC,CAAmB;AAAA,gCAChDD,CAAe;AAAA;AAAA;AAAA;AAAA;AAAA,aAKlCA,CAAe;AAAA,KAGtBnB,GAAmB,CAACU,EAAkCW,IAA0B,CACpF,IAAMF,EAAkBT,EAAO,OAEzBY,EAAsB,CAAC,EAC7B,QAASL,EAAI,EAAGA,EAAIE,EAAiB,EAAEF,EAAG,CACxC,IAAMM,EAAgBF,EAAO,YAAY,aAAcX,EAAOO,CAAC,EAAE,aAAa,SAAS,CAAC,EACpFE,IAAoB,EACtBG,EAAU,KAAKC,CAAa,EACnBN,IAAM,EACfK,EAAU,KAAK,qBAAqBL,CAAC,QAAQM,CAAa,IAAI,EACrDN,IAAME,EAAkB,EACjCG,EAAU,KAAK,UAAUC,CAAa,IAAI,EAE1CD,EAAU,KAAK,0BAA0BL,CAAC,OAAOM,CAAa,IAAI,CAEtE,CACA,OAAOD,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMrB,GACF,CAACS,EAA+Bc,EAAsBC,EAAuBC,IAAoC,CAC/G,IAAMC,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAmB,IAAI,MAAcnB,EAAO,MAAM,EAClDoB,EAAY,IAAI,MAAqBpB,EAAO,MAAM,EAEpDqB,EAAc,EACZC,EAAwD,CAAC,EACzDC,EAAa,CAAC,EACdC,EAAoC,CAAC,CAAC,QAAuB,KAAMP,CAAU,CAAC,EACpF,QAASV,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCc,GAAerB,EAAOO,CAAC,EAAE,KAAKO,CAAY,EAC1CK,EAAiBZ,CAAC,EAAIc,EACtBE,EAAW,KAAKvB,EAAOO,CAAC,EAAE,KAAK,MAAM,EACrCa,EAAUb,CAAC,EAAIkB,EAAc,QAAQlB,CAAC,GAAIS,EAAUO,EAAWhB,CAAC,CAAC,EACjEe,EAAkB,KAAK,MAAM,EAC7BE,EAAgB,KAAK,CAAC,QAAuB,KAAML,EAAiBZ,CAAC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCiB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAOO,CAAC,EAAE,IAAI,CAAC,EAEpEiB,EAAgB,KAAK,GAAGE,EAA2BX,CAAW,CAAC,EAE/D,IAAMJ,EAASgB,EAAe,SAAUX,EAAUD,EAAY,MAAM,EAC9Da,EAAcjB,EAAO,WAAW,UAAWG,CAAY,EACvDJ,EACF,MAAM,KAAK,MAAMS,EAAiB,MAAM,EAAE,KAAK,CAAC,EAAE,IAAIZ,GAAK,4BAA4BA,CAAC,EAAE,EAAE,KAAK,GAAG,EAClGsB,EAAmBC,GAA+B;AAAA;AAAA,KAEzD,IAAM,CACHA,EAAa,gBAAgB,aAAc,KAAK,EAChD,QAASvB,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjCuB,EAAa,gBAAgB,mBAAmBvB,CAAC,GAAI,KAAK,EAE5D,OAAOuB,EAAa,iBAAiB,GAAGV,EAAWT,CAAM,CAC3D,GAAG,CAAC;AAAA;AAAA,IAENtB,GAAwB8B,EAAiB,OAAQT,CAAmB,CAAC;AAAA;AAAA,IAErEoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DnB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,2CAEbiB,CAAW;AAAA;AAAA,0CAEZT,EAAiB,MAAM,MAAMT,CAAmB;AAAA,QAClFkB,CAAW;AAAA;AAAA;AAAA,MAGbtC,GAAiB8B,EAAWT,CAAM,CAAC;AAAA,KAGnC,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGG,CAAY,GAAI,kBAAAQ,CAAiB,EACxD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAa,SAAAC,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKC,EAAa,EAAuB,CAAC,EAClE,gBAAAO,CACF,GACA,gBAAAK,CACF,CACF,EAESrC,GAAS,CAACuC,EAAyBC,IAAuC,CACrF,IAAMhC,EAAS+B,EAAQ,OACjBE,EAAajC,EAAO,CAAC,EAAE,KACvBc,EAAeI,EAAU,cAAcc,EAAW,KAAMC,EAAW,MAAM,EAC/E7C,GAAeY,EAAQc,CAAY,EACnC,IAAMC,EAAckB,EAAW,MAAM,EACrClB,EAAYD,CAAY,EACpBd,EAAO,OAAO,CAACkC,EAAK5B,IAAU4B,GAAO5B,EAAM,KAAK,OAASQ,EAAeR,EAAM,KAAKQ,CAAY,EAAI,GAAI,CAAC,EAE5G,IAAMqB,EAAiBnC,EAAO,OAAOM,GAASY,EAAU,KAAKZ,EAAM,IAAI,EAAI,CAAC,EAC5EyB,EAAQ,QACJxC,GAAwB4C,EAAgBrB,EAAcC,EAAaf,EAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQmC,CAAc,CAAC,CACtH,EAEa1C,GAAyBuC,GAClCI,GAA4B,CAAC,KAAMJ,EAAW,IAAc,CAAC,IC1JjE,IAiBaK,GAuBAC,GAaAC,GAUAC,GA/DbC,GAAAC,EAAA,kBAGAC,IACAC,KAaaP,GACT,CAACQ,EAA0CC,EAAmBC,EAAW,QAAkB,CACzF,OAAQF,EAAW,WAAY,CAC7B,IAAK,OACH,MAAO,sBAAsBC,CAAS,UACxC,IAAK,UACH,MAAO,YAAYA,CAAS,YAAYA,CAAS,yBACnD,IAAK,OACH,MAAO,wBAAwBA,CAAS,IAAIC,CAAQ,yBAAyBD,CAAS,IAClFC,CAAQ,yBACd,IAAK,cACH,MAAO,eAAeD,CAAS,cAAcA,CAAS,UAAUC,CAAQ,8BACpEA,CAAQ,qBACd,IAAK,YACH,MAAO,kBAAkBA,CAAQ,6CAA6CD,CAAS,UACzF,IAAK,GACH,MAAO,GAET,QACE,MAAM,IAAI,MAAM,0BAA0BD,EAAW,UAAU,EAAE,CACrE,CACF,EAESP,GACT,CAACO,EAA0CG,IAAqC,CAC1EH,EAAW,aAAe,OAC5BG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,OAAQ,EAAG,CAAC,OAAsB,KAAMA,EAAW,OAAQ,CAAC,EAC/FA,EAAW,aAAe,cACnCG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,KAAM,EAAG,CAAC,OAAsB,KAAMA,EAAW,IAAK,CAAC,EAC1FA,EAAW,aAAe,aACnCG,EAAe,KAAK,CAAC,OAAsB,KAAMH,EAAW,KAAM,CAAC,CAEvE,EAESN,GAA2B,CAACM,EAA0CI,IAAgC,CAC7GJ,EAAW,aAAe,OAC5BI,EAAS,KAAK,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,WAAY,KAAM,KAAK,CAAC,EACrEJ,EAAW,aAAe,cACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAAC,EAC9DJ,EAAW,aAAe,aACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAE9C,EAEaT,GACRK,GAAgF,CAC/E,IAAMK,EAAaL,GAAY,YAAwB,GACvD,GAAIK,IAAe,cAAe,CAChC,GAAM,CAACC,EAAOC,CAAI,EAAIP,GAAY,mBAAyC,CAAC,GAAK,EAAG,EACpF,MAAO,CAAC,WAAAK,EAAY,MAAAC,EAAO,KAAAC,CAAI,CACjC,SAAWF,IAAe,OAAQ,CAChC,GAAM,CAACG,EAASC,CAAO,EAAIT,GAAY,mBAAyC,CAACU,GAAUC,EAAQ,EACnG,MAAO,CAAC,WAAAN,EAAY,QAAAI,EAAS,QAAAD,CAAO,CACtC,SAAWH,IAAe,YAAa,CACrC,GAAM,CAACC,CAAK,EAAIN,GAAY,mBAAiC,CAAC,GAAI,EAClE,MAAO,CAAC,WAAAK,EAAY,MAAAC,CAAK,CAC3B,CACA,MAAO,CAAC,WAAAD,CAAU,CACpB,IC7EJ,IAqBaO,GAeAC,GApCbC,GAAAC,EAAA,kBAqBaH,GAAc,CAACI,EAAmBC,IAAqB,CAClE,OAAQD,EAAW,CACjB,IAAK,GACH,OAAOC,EACT,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,QACE,MAAM,IAAI,MAAM,GAAGD,CAAS,8BAA8B,CAC9D,CACF,EAEaH,GAAeK,GAA6B;AAAA,QACjDA,EAAU,iDAAmD,EAAE;UCrCvE,IAqBaC,GArBbC,GAAAC,EAAA,kBAqBaF,GAAiBG,GAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAO3CA,CAAS,YAAYA,CAAS,YAAYA,CAAS;AAAA;IC5B7D,IA8BMC,GAiBAC,GAyBOC,GAuFPC,GAiBAC,GAKOC,GA0JPC,GA8EOC,GA7ZbC,GAAAC,EAAA,kBAqBAC,IAEAC,KAEAC,KACAC,KAEAC,KAEMd,GAA6B,CAACe,EAAoBC,IAClDD,EACK;AAAA;AAAA;AAAA,wDAG6CC,EAAY,iBAAmB,EAAE;AAAA,UAI9E;AAAA;AAAA;AAAA,gDAGqCA,EAAY,iBAAmB,EAAE;AAAA,UAK3Ef,GAAyB,CAACgB,EAAqBC,IAC/CD,EACK;AAAA;AAAA;AAAA;AAAA,UAIDC,IAAqB,EAAI,GAAK,6DAA6D;AAAA;AAAA;AAAA;AAAA;AAAA,YAKzFA,IAAqB,EAAI,GAAK,2CAA2C;AAAA,WAG1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMCA,IAAqB,EAAI,GAAK,yCAAyC;AAAA,WAKtEhB,GACT,CAACiB,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,KAAe,CACpF,IAAMC,EAAaL,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CO,EAAaN,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CQ,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EACtCP,EAAmBS,EAAaP,EAAc,CAAC,EAC/CS,EAAgBP,EAAYF,EAAc,CAAC,EAEjD,GAAI,GAAIH,GAAcC,IAAqB,GAAKC,EAAc,CAAC,IAAM,GAC7D,CAACF,IAAeC,IAAqB,GAAKA,IAAqB,KACjES,EAAaP,EAAc,CAAC,IAAM,GAAKE,EAAYF,EAAc,CAAC,IAAM,GAAKD,EAAc,CAAC,IAAM,GACtG,MAAM,IAAI,MAAM,iBAAiBF,CAAU,8BACvCC,CAAgB,yBAAyBC,EAAc,CAAC,CAAC;AAAA,oCACjCD,CAAgB;AAAA,eACrCS,CAAU,yCAAyCP,EAAc,CAAC,CAAC,eACtEE,CAAS,0CAA0CF,EAAc,CAAC,CAAC,kBACnED,EAAc,CAAC,CAAC,aAAa,EAEnC,MAAO;AAAA,yCAC4BD,CAAgB,IAAIG,CAAI,MAAMM,EAAaT,CAAgB,MAAMU,CAAU;AAAA,2CACzEP,CAAI,MAAMK,EAAaP,EAAc,CAAC,CAAC,MAAMG,CAAS;AAAA;AAAA,uBAE1EH,EAAc,CAAC,CAAC;AAAA,uBAChBA,EAAc,CAAC,CAAC;AAAA,2BACZD,CAAgB;AAAA,oBACvBI,CAAS;AAAA;AAAA,2BAEFF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAUrEG,EAAS,IAAM,iBAAiB;AAAA,IAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,8CACvCS,CAAU;AAAA;AAAA,oBAEpCF,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,iBACpGC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,wBAE9CH,CAAI;AAAA;AAAA;AAAA,8BAGEQ,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAM/B7B,GAA2BiB,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,0CAInBa,CAAa;AAAA;AAAA;AAAA,sFAI7Cb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAU/BE,IAAqB,EAAI,GAAK,4DAA4D;AAAA;AAAA,YAE1FjB,GAAuBgB,EAAYC,CAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5D,EAEEf,GAAyB,CAACY,EAAoBC,IAC9CD,EACK;AAAA;AAAA;AAAA,yCAG8BC,EAAY,iBAAmB,EAAE;AAAA,cAI/D;AAAA;AAAA;AAAA,iCAGsBA,EAAY,iBAAmB,EAAE;AAAA,cAK5DZ,GAA2Ba,GAC7BA,EAAa,gDAAkD,gDAItDZ,GACT,CAACc,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,GACtEM,EAA4B,KAAkB,CAC7C,IAAML,EAAaN,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CM,EAAaP,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CO,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EAE5C,GAAI,EAAEG,EAAaR,EAAc,CAAC,IAAM,GAAKO,EAAaP,EAAc,CAAC,IAAM,GACzEE,EAAYF,EAAc,CAAC,IAAM,GACrC,MAAM,IAAI,MAAM,cAAcQ,CAAU,yCACpCR,EAAc,CAAC,CAAC,gBAAgBO,CAAU,yCAC1CP,EAAc,CAAC,CAAC,eAAeE,CAAS,yCAAyCF,EAAc,CAAC,CAAC,EAAE,EAEzG,IAAMW,EAAgBH,EAAaR,EAAc,CAAC,EAC5CY,EAAgBL,EAAaP,EAAc,CAAC,EAC5CS,EAAgBP,EAAYF,EAAc,CAAC,EAC3Ca,EAAgBH,EAClB;AAAA;AAAA;AAAA,gDAGsCL,CAAU;AAAA,gDACVC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,iDAKTE,CAAU,2BAA2BR,EAAc,CAAC,CAAC;AAAA,mDACnDO,CAAU,2BAA2BP,EAAc,CAAC,CAAC;AAAA,YAC5FjB,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,iDAIRM,CAAS,2BAA2BF,EAAc,CAAC,CAAC;AAAA,uDAC9CM,CAAU,2BAA2BN,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,uCAGrEJ,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAO5CK,CAAI;AAAA;AAAA;AAAA,2DAG2BD,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,0BAI7DH,EAAa,oCAAoCG,EAAc,CAAC,CAAC,KACpD,iCAAiCA,EAAc,CAAC,CAAC,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAUzBA,EAAc,CAAC,CAAC;AAAA;AAAA,4DAEdA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,MAKlE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4CAMkCK,CAAU;AAAA;AAAA,kCAEpBM,CAAa;AAAA,kCACbC,CAAa;AAAA,kCACbH,CAAa;AAAA;AAAA;AAAA;AAAA,sCAITE,CAAa;AAAA,wCACXC,CAAa;AAAA;AAAA;AAAA,QAG7C7B,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sCAKfa,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMrBb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOvCK,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOpBjB,GAAwBa,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBrC,MAAO;AAAA,yCAC4BI,CAAI,KAAKM,CAAU,MAAMC,CAAU;AAAA,yCACnCP,CAAI,KAAKK,CAAU,MAAMJ,CAAS;AAAA,yBAClDH,EAAc,CAAC,CAAC;AAAA,yBAChBA,EAAc,CAAC,CAAC;AAAA,sBACnBG,CAAS;AAAA;AAAA,2BAEJF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,kBAInEG,EAAS,IAAM,iBAAiB;AAAA,MAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,sBAE7EO,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,mBACxFC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,4BAE5CH,CAAI;AAAA,MAC1BY,CAAa;AAAA;AAAA,CAGf,EAEE3B,GACF,CAAC4B,EAAmBC,EAAkBC,EAAyBC,EAC9DC,EAAuCC,EAAiB,KAAkB,CACzE,GAAM,CAACC,EAAaC,EAAaC,CAAU,EAAIJ,EACzC,CAACK,EAAeC,EAAWC,EAAWC,CAAc,EAAIT,EACxDU,EAAiBC,GAAiBR,EAAaE,CAAU,EACzDO,EAAiBD,GAAiBP,EAAaC,CAAU,EACzDQ,EAAWC,GAA4Bd,EAAU,CAAC,EAAE,KAAK,MAAM,EAC/De,EAAc,IAAM,CACxB,IAAMC,EAAQT,EAAU,KAClBU,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBX,EAAU,KAAK,OAAO,IACpD,QAASY,EAAIH,EAAQ,EAAI,EAAGI,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAV,EAAe,QAAQS,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcF,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBE,CACT,EACMG,EAAc,IAAM,CACxB,IAAMC,EAAQd,EAAU,KAClBS,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBV,EAAU,KAAK,OAAO,IACpD,QAASW,EAAIG,EAAQ,EAAI,EAAGF,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAR,EAAe,QAAQO,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcI,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBJ,CACT,EAwCA,MAvCe;AAAA,kEAC6CZ,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBkB,EAAY,CAAC;AAAA,kBACLR,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kEAKcD,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBwB,EAAY,CAAC;AAAA,kBACLb,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6DAKSe,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BACnEhB,CAAS;AAAA;AAAA;AAAA;AAAA,UAKzBC,EACI,mBAAmBI,EAAiB,cAAgB,GAAGqB,GAAY1B,EAAWgB,CAAQ,CAAC,aAAa,IAChE,EAAsC;AAAA,UAC9Ed,CAAe;AAAA,UACfU,EAAe,aAAa,oBAAqB,OAAO,CAAC;AAAA;AAAA;AAAA,KAK/D,EAESvC,GACT,CAACsD,EAA+BC,EAAoDC,EACnFC,EACAzB,EAAiB,KAAyD,CACzE,IAAM0B,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAASL,EAAO,CAAC,EAAE,KACnBM,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAYL,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAC5FO,EAAYC,EAAU,KAAKF,CAAS,EACpCG,EAAYP,EAAOA,EAAO,OAAS,CAAC,EACpCQ,EAAWR,EAAOA,EAAO,OAAS,CAAC,EACnCS,EAAYR,EAAOA,EAAO,OAAS,CAAC,EACpCS,EAASF,EAAW,IAAM,GAAKC,EAAY,IAAM,EAGjDE,EAAoBJ,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDpD,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDyD,EAAW,CACf,KAAK,KAAKH,EAAYtD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKJ,EAAYpD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKN,EAAYlD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,CAC/D,EAEME,EAAaH,EAAS,EAAI,EAC1BI,EAAa,CAAC,GAAGZ,EAAYK,EAAWC,EAAWK,CAAU,EAC7DzB,EAAQ0B,EAAW,OACnBC,EAAa,CAAC,GAAGZ,EAAYK,EAAUC,EAAYI,CAAU,EAC7DnB,EAAQqB,EAAW,OACnBC,EAAkB,CAACX,EAAWE,EAAWE,EAAYI,CAAU,EAC/DI,EAAoC,CACxC,CAAC,OAAsB,KAAMV,CAAS,EAAG,CAAC,OAAsB,KAAME,CAAS,EAC/E,CAAC,OAAsB,KAAMD,CAAQ,CACvC,EACAU,GAA6BrB,EAAsBoB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2Bf,EAAWU,EAAYC,CAAU,CAAC,EACrF,IAAMK,EAAwD,CAAC,OAAQ,MAAM,EAEvElD,EAAU0B,EAAO,OAAS,EAC5B1B,IACF+C,EAAgB,KAAK,GAAGE,EAA2BvB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEwB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BH,CAAe,CAAC,EAEnE,IAAMK,EAAmBC,IAA+B,CACtD,IAAMjC,GAAYe,EAAU,OACtBrD,EAAYwE,GAAiB,YAAa3B,EAAO,CAAC,EAAE,SAAUP,GAAW,CAAC,EAC1EJ,GAAWC,GAA4BU,EAAO,CAAC,EAAE,QAAQ,EAEzD4B,EAAIC,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUR,EAAOyB,CAAU,EAC5Da,GAAID,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUF,EAAOmB,CAAU,EAC5Dc,GAAS9C,EAAe,SAAUe,EAAO,CAAC,EAAE,SAAUoB,EAAgB,OAAQH,CAAU,EACxFe,GAAiB,CAACJ,EAAGE,EAAC,EAC5B,GAAIxD,EAAS,CACX,IAAM2D,EAAiBvD,EAAiBuC,EAAa,EACrDe,GAAe,KAAKH,EAAc,OAAQ7B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQiC,CAAc,CAAC,CACtG,CACA,IAAMC,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAC7GC,GAAyBlC,EAAsBiC,CAAQ,EACvD,IAAME,GAAW9C,GAA4ByC,GAAO,KAAK,MAAM,EACzDxD,EAAkB8D,GAAqBpC,EAAsB8B,GAAO,KAAK,MAAOK,EAAQ,EACxFE,GAAmB7F,GACrBwE,EAAY3C,EAASC,EAAiB,CAACpB,EAAWyE,EAAGE,GAAGC,EAAM,EAAG,CAACzB,EAAYC,EAAYC,CAAS,EACnG9B,CAAc,EAClB,MAAO;AAAA,IAEHgD,GAAa,iBAAiBQ,CAAQ,EAAE,0BAA0B/E,CAAS,EAAE,iBACzE,GAAG6E,GAAgBD,EAAM,CAAC;AAAA,IACtCO,EAAgB;AAAA,IAERxB,EAASzE,GAA2B0E,EAAmBxD,EAAe8B,GAAUlC,CAAS,EAChFX,GAAuBuE,EAAmBxD,EAAe8B,GAAUlC,CAAS,CAAC;AAAA,oBAE5F,EACA,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAG4D,CAAiB,IAAId,EAAqB,UAAU,IAAIa,CAAM,IAAIpC,CAAc,GACzF,kBAAA8C,CACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGgB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAK,CACF,GACA,gBAAAI,CACF,CACF,ICtfJ,IAiCMc,GA4HOC,GA7JbC,GAAAC,EAAA,kBAqBAC,IACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAoBC,EAAoBC,EAAmBC,EAAU,GAC9FC,EAA4BC,EAAoB,EAAGC,EAAoB,EAAGC,EAAmB,EAC7FC,EAAW,QAAkB,CAC5B,IAAMC,EAAeF,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,uBACT,IAAK,GACH,MAAO,kBAAkBC,CAAQ,8CACnC,IAAK,GACH,MAAO,2BACT,QACE,MAAM,IAAI,MAAM,oBAAoBD,CAAgB,oBAAoB,CAC5E,CACF,EACMG,EAAeH,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,oDACT,IAAK,GACH,MAAO,wDACT,QACE,MAAM,IAAI,MAAM,oBAAoBA,CAAgB,oBAAoB,CAC5E,CACF,EACMI,EAAgBZ,EAAiB;AAAA;AAAA,MAGA;AAAA;AAAA,MAIjCa,EAAkBb,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCc,EAAUd,EAAiB,2BAA6B,2BACxDe,EAASf,EAAiB,2BAA6B,2BACvDgB,EAAMhB,EAAiB,MAAQ,MAC/BiB,EAAMjB,EAAiB,MAAQ,MAC/BkB,EAAe;AAAA;AAAA,qBAENlB,EAAiB,gCAAkC,+BAA+B;AAAA,mBACpFgB,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA,iBAELC,CAAG;AAAA,iBACHA,CAAG;AAAA;AAAA;AAAA,gBAGJA,CAAG;AAAA,oBACCE,GAAYb,EAAmBG,CAAQ,CAAC;AAAA;AAAA;AAAA,8BAG9BK,CAAO,2BAA2BC,CAAM;AAAA,QAC9DH,CAAa;AAAA;AAAA,QAEbF,EAAYJ,CAAiB,CAAC;AAAA;AAAA,qBAI1Bc,EAAUpB,EAAkBC,GAAaE,EAAW;AAAA,wBACxCG,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SACbN,GAAYD,EAAY;AAAA,wBACxCI,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SAEzCY,EAAU,GAAGV,EAAYJ,CAAiB,CAAC,GAE3Ce,EAAUH,GAAYX,EAAkBC,CAAQ,EAChDc,EACFvB,EAAiBmB,GAAYb,EAAmBG,CAAQ,EAAIU,GAAYZ,EAAmBE,CAAQ,EACjGe,EACFxB,EAAiBmB,GAAYZ,EAAmBE,CAAQ,EAAIU,GAAYb,EAAmBG,CAAQ,EACjGgB,EAAkBC,GAAqBrB,EAAYiB,EAASb,CAAQ,EAsB1E,MArBiB;AAAA,yDACkCc,CAAK;AAAA,QACtDvB,EAAiBoB,EAAUC,CAAO;AAAA;AAAA;AAAA,yDAGeG,CAAK;AAAA,QACtDxB,EAAiBqB,EAAUD,CAAO;AAAA;AAAA;AAAA,gEAGsBE,CAAO;AAAA,0BAC7Cd,CAAgB;AAAA;AAAA;AAAA;AAAA,uBAInBR,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGa,CAAe;AAAA,QACfc,GAAYvB,CAAO,CAAC;AAAA,QACpBqB,CAAe;AAAA;AAAA;AAAA,MAKnB,EAESnC,GACT,CAACsC,EAA+BvB,EAA4BwB,EAAgCC,EAC3FC,EAAmBC,EAAkBC,EAAkBC,IAAoD,CAC1G,IAAMlC,EAAiBK,EAAW,SAAW,OACvC8B,EAAanC,EAAiB4B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAWrC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAYtC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAcvC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAASxC,IAAmBmC,EAAa,IAAM,GAAKA,EAAa,IAAM,IAAMI,EAAc,IAAM,EAGjGE,EAAYzC,EAAiBuC,EAAcF,EAAWC,EACtDI,EAAY1C,EAAiBqC,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,iCAAiCD,CAAQ,EAAE,EAEtE,IAAMrC,EAAmBgC,EAAUxC,GAAkBmC,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EY,EAAaJ,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDI,EAAaL,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDK,EAAY,KAAK,IAAIN,EAAc,CAAC,EAAInC,EAAkBmC,EAAc,CAAC,CAAC,EAC1E1C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAW6B,EAAWiB,IAAc,EACpCC,EAAeV,EAAS,CAAChC,EAAkB,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EAE3D2C,GAAoC,CACxC,CAAC,OAAsB,KAAMrB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAM,CAAC3B,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EAC7G,CAAC,OAAsB,KAAMA,EAAW,OAAO,EAAG,CAAC,OAAsB,KAAMA,EAAW,SAAS,CACrG,EACA+C,GAA6B/C,EAAY8C,EAAe,EACxDA,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAM0B,GAAwD,CAAC,OAAQ,MAAM,EACzErB,IACFkB,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE0B,GAAkB,KAAK,MAAM,GAE/BH,GAAgB,KAAK,GAAGE,EAA2BxB,CAAW,CAAC,EAE/D,IAAM0B,EAAmBC,IAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,MAAO,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ,CAAC,EAC9E,CAAC,KAAM,WAAY,KAAM,MAAO,OAAQ,CAAC,CAC3C,EACAC,GAAyBrD,EAAYoD,CAAQ,EAG7C,IAAME,GAAanB,EAAS,EAAI,EAC1BoB,GAAIC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EACpDkC,GAAmB;AAAA,qDACsBtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,8BAChDpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,6EAEsBpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,qCAEjEpB,EAAS,MAAQ,EAAE;AAAA,SAE1CuB,EAAIC,EACN,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQpB,IAAqB,EAAI,EAAIA,CAAgB,EAC3FyD,GAAID,EAAc,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EAC5EO,EAAiB,CAACH,EAAGE,EAAC,EACtBE,GAASC,EAAe,SAAUxC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQ8B,EAAU,EAC1F,GAAI1B,EAAS,CACX,IAAMoC,EAAOL,EAAc,OAAQpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EACxFO,EAAe,KAAKG,CAAI,EACxBP,IAAoB;AAAA,0DAC4BtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,+BACpD5D,EAAiB,IAAM,GAAG,GAAGwC,EAAS,MAAQ,EAAE;AAAA,UAEvE,CAEA,MAAO;AAAA,UACL8B,GAAc,yBAAyB,CAAC;AAAA;AAAA;AAAA;AAAA,UAIxCd,GAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGS,EAAgBC,EAAM,CAAC;AAAA,UACnFL,EAAgB;AAAA,UAEdzE,GACIW,EAAgBC,EAAWC,EAAWC,EAAU8B,EAAS5B,EAAY6C,EAAa,CAAC,EAAGA,EAAa,CAAC,EACpGA,EAAa,CAAC,EAAGU,EAAC,CAAC;AAAA,UAEvBpB,EACI+B,GAA2B3B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,CAAS,EACrGuB,GACI5B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,EAAW,GAAO,OACnFf,CAAyB,CAAC,EACxC,EACA,MAAO,CACL,KAAM,eACN,YAAa,CACX,KAAM,GAAG7B,EAAW,QAAQ,IAAIG,CAAgB,IAAIgC,CAAM,IAAIvC,CAAS,IAAIC,CAAS,IAAIC,CAAQ,IAC5F4C,CAAU,IAAIC,CAAU,IAAIC,CAAS,GACzC,kBAAAK,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMzB,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,EACF,GACA,gBAAAI,CACF,CACF,IC9QJ,IA6BMkB,GAQAC,GAGAC,GAQAC,GAOAC,GAgBAC,GAmFOC,GA+DAC,GAzNbC,GAAAC,EAAA,kBAqBAC,IACAC,KAEAC,KAEAC,KAGMb,GAAgBc,GAAkB,CACtC,IAAIC,EAAU,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAC9BD,GAAWD,EAAIE,CAAC,EAElB,OAAOD,CACT,EAEMd,GAAoBgB,GACtB,OAAOA,GAAU,SAAW,CAACA,EAAOA,EAAOA,CAAK,EAAIA,EAElDf,GAAyB,CAACgB,EAAoBC,IAC9CA,GAAY,EACPD,EAGFA,GAAcA,EAAa,IAAMC,EAAW,GAG/ChB,GACF,CAACiB,EAA+DC,EAAmBC,EAAgBH,EAAW,IAChG,CACR,IAAMI,EAAqBrB,GAAuBmB,EAAWF,CAAQ,EACrE,OAAO,KAAK,OAAOC,EAAW,CAAC,GAAKE,EAAS,GAAKA,EAASC,GAAsB,CAAC,CACpF,EAEFnB,GACF,CAACoB,EAA2CC,EAAuCC,EAClFC,EAAmCC,IAAuD,CACrFA,GAAW,OAEbA,EAAUzB,GAAkBqB,EAASC,EAAY,CAAC,EAAGE,EAAQ,CAAC,CAAC,GAEjE,IAAME,EAA6C,CAAC,EAAG,EAAG,EAAGH,CAAW,EACxE,QAASI,EAAQ,EAAGA,EAAQ,EAAGA,IACzBN,EAAQM,CAAK,EAAI,EAAIF,GAAWH,EAAYK,CAAK,IACnDD,EAASC,CAAK,EAAI,KAAK,OAAON,EAAQM,CAAK,EAAIL,EAAYK,CAAK,EAAI,EAAIF,GAAWD,EAAQG,CAAK,EAAI,CAAC,GAGzG,OAAOD,CACT,EAEExB,GACF,CAAC0B,EAA6BC,EAAiBC,EAAkBC,EAAiBC,EACjFC,EAAsBC,EAAqBC,EAAqBC,EAChEC,IAAqG,CACpG,IAAIC,EACAC,EACAC,EACAC,EAOJ,GALIb,IAAQ,UAEVA,EAAM,GAGJ,OAAOA,GAAQ,SAAU,CAC3BU,EAAU,CAAC,IAAKV,EAAK,OAAQA,EAAK,KAAMA,EAAK,MAAOA,EAAK,MAAOA,EAAK,KAAMA,CAAG,EAC9E,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,CAAG,EACjDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAW,MAAM,QAAQE,CAAG,EAAG,CAC7B,GAAI,CAACA,EAAI,MAAM,CAACc,EAAKC,EAAGhC,IAAQ+B,IAAQ/B,EAAI,CAAC,CAAC,EAC5C,MAAM,MAAM,kCAAkCiB,CAAG,EAAE,EAErDU,EAAU,CAAC,IAAKV,EAAI,CAAC,EAAG,OAAQA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,CAAC,EAChG,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,EAAI,CAAC,CAAC,EACpDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAWE,IAAQ,aAAc,CAE/BW,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1CQ,EAAY,KAAK,KAAKV,EAAWG,CAAY,EAC7CQ,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1C,IAAMU,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,GAAkBL,EAAY,GAAKP,EAAeG,EAAeN,EACjEgB,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,EAAQ,KAAK,MAAMH,EAAgB,CAAC,EACpCI,EAAOJ,EAAgBG,EACvBE,EAAM,KAAK,MAAMJ,EAAiB,CAAC,EACnCK,EAASL,EAAiBI,EAC1BE,EAAO,KAAK,MAAML,EAAgB,CAAC,EACnCM,EAAQN,EAAgBK,EAE9Bb,EAAU,CAAC,IAAAW,EAAK,OAAAC,EAAQ,KAAAC,EAAM,MAAAC,EAAO,MAAAL,EAAO,KAAAC,CAAI,CAClD,KACE,OAAM,MAAM,8BAA8BpB,CAAG,EAAE,EAEjD,MAAO,CAAC,QAAAU,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,CAAQ,CAChD,EA8BStC,GACT,CAACkB,EAAmDC,EACnDE,EAA0C6B,EAA4CzB,EACtF0B,EAAY,GAAOC,EAA6C,iBAA+B,CAC9F,IAAIC,EAAW3B,EAASC,EAAUC,EAAS0B,EAC3C,GAAIF,IAAe,eACjB,CAACC,EAAW3B,EAASC,EAAUC,EAAS0B,CAAU,EAAIpC,UAC7CkC,IAAe,gBACxB,CAACC,EAAWC,EAAY5B,EAASC,EAAUC,CAAO,EAAIV,MAEtD,OAAM,IAAI,MAAM,sBAAsBkC,CAAU,EAAE,EAEpD,GAAM,CAACG,EAAgB,CAAEvB,EAAaC,EAAcC,CAAW,EAAIf,EAE7D,CAACU,EAAaC,EAAcC,CAAW,EAAIpC,GAAiB0B,CAAO,EACnE,CAACmC,EAAeC,EAAgBC,CAAa,EAAI/D,GAAiBuD,CAAS,EAE3ES,EAAuB/D,GAAuBoC,EAAawB,CAAa,EACxEI,EAAwBhE,GAAuBqC,EAAcwB,CAAc,EAC3EI,EAAuBjE,GAAuBsC,EAAawB,CAAa,EACxE,CAAC,QAAAvB,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,EAAQ,EAAIvC,GAC7C0B,EAAKC,EAASC,EAAUC,EAASC,EAAaC,EAAcC,EAAa4B,EACzEC,EAAuBC,CAAoB,EAEzCzC,GAAc+B,EAAYI,EAAiBD,EAAaC,EAE1DhC,EAAqD,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACvE,OAAI6B,IAAe,gBACjB7B,EAAW,CAAC8B,EAAWjC,GAAagB,EAAUC,EAAWC,EAAQ,EACxDc,IAAe,iBACxB7B,EAAW,CAAC8B,EAAWjB,EAAUC,EAAWC,GAAUlB,EAAW,GAG5D,CACL,UAAAiC,EACA,WAAAD,EACA,QAAA1B,EACA,SAAAC,EACA,QAAAC,EACA,WAAA0B,EACA,SAAAlB,EACA,UAAAC,EACA,SAAAC,GACA,YAAAlB,GACA,QAAAe,EACA,YAAAN,EACA,aAAAC,EACA,YAAAC,EACA,YAAAC,EACA,aAAAC,EACA,YAAAC,EACA,qBAAAyB,EACA,sBAAAC,EACA,qBAAAC,EACA,cAAAL,EACA,eAAAC,EACA,cAAAC,EACA,QAAAxC,EACA,SAAAK,EACA,YAAAJ,CACF,CACF,EAESlB,GACT,CAAC6D,EAA+BC,EAA4BC,EAC3DC,EAA+BC,EAAyBd,IAAoC,CAC3F,IAAMe,EAAiBf,IAAe,eAChCE,EAAaa,EAAiBL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAElEM,EAAS,GACTC,EAA0C,CAAC,GAAI,EAAG,CAAC,EACnDC,EAAiB,CAAC,EAAGN,EAAY,IAAI,CAACxB,EAAG9B,IAAMA,CAAC,CAAC,EACjD6D,EAAW,CAAC,KAAK,KAAK7E,GAAa4E,EAAe,EAAE,IAAIE,GAAKR,EAAYQ,CAAC,CAAC,CAAC,EAAKH,EAAc,CAAC,CAAE,EAAG,EAAG,CAAC,EAE/GI,GAAU,UAAW,IAAM,oCAAoCF,CAAQ,EAAE,EAEzE,IAAMG,EAAmBN,EAAUD,GAAkBb,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EqB,EAAaC,EAAU,KAAKZ,CAAW,EACvCa,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,CAAU,EACnF,CAAC,QAAuB,KAAMC,CAAI,EAAG,CAAC,QAAuB,KAAMH,EAAW,OAAO,EACrF,CAAC,QAAuB,KAAMA,EAAW,SAAS,CACpD,EACAc,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAMiB,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAUlB,EAAO,SAAW,EAC9BkB,IACFH,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEiB,EAAkB,KAAK,MAAM,GAE/BF,EAAgB,KAAK,GAAGC,EAA2Bd,CAAW,CAAC,EAE/D,IAAMiB,EAAmBC,GAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQlB,EAAW,MAAM,EAChG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAC/C,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQH,EAAW,QAAQ,MAAM,EAChE,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,UAAU,MAAM,CACtE,EAEMqB,EAAahB,EAAS,EAAI,EAC1BiB,EAAIC,GAA4BxB,EAAO,CAAC,EAAE,QAAQ,EAElDyB,EAAIC,EACN,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQY,IAAqB,EAAI,EAAIA,CAAgB,EAC3Fe,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EAC5EM,EAAiB,CAACH,EAAGE,CAAC,EACtBE,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUE,EAAY,OAAQoB,CAAU,EACtFS,EAAmB,GACvB,GAAIb,EAAS,CACX,IAAMc,EAAON,EAAc,OAAQ1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EACxFM,EAAe,KAAKI,CAAI,EACxBD,GAAoB;AAAA,8DACgCzB,EAAS,QAAQiB,CAAC,IAAMA,CAAC;AAAA,wBAC/DlB,EAAiB4B,EAAa,SAAU,EAAG,CAAC,EAAIA,EAAa,SAAU,EAAG,CAAC,CAAC,GACtF3B,EAAS,MAAQ,EAAE;AAAA,UAEzB,CAEA,MAAO;AAAA,cACDyB,CAAgB;AAAA;AAAA;AAAA,uBAGPN,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA,uBAI1BE,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA,YAErCP,EAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGO,EAAgBC,CAAM,CAAC;AAAA,YACnFT,EAAa,UAAU,CAAC;AAAA,YACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACzDS,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACrCI,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,yBAEjDpB,EAAiB4B,EAAa,SAAUR,EAAE,KAAO,EAAGA,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,2CAE/FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAClFpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAE1FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAChCQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA,8BAKlDpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAyB1GpB,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAUjBA,EAAiB;AAAA,0EAEA;AAAA,yEAC4C;AAAA;AAAA,wBAG7DA,EAAiB;AAAA;AAAA;AAAA,wBAIA;AAAA;AAAA;AAAA,qBAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOTA,EAAiB;AAAA;AAAA;AAAA;AAAA,wBAKA;AAAA;AAAA;AAAA;AAAA,qBAIR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAULa,EAAU,oDAAsD,EAAE;AAAA;AAAA,YAG5E,EACA,MAAO,CACL,KAAM,cACN,YACI,CAAC,KAAM,GAAGjB,EAAW,QAAQ,IAAII,CAAc,IAAIO,CAAgB,IAAIM,CAAO,GAAI,kBAAAD,CAAiB,EACvG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGS,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,CACF,GACA,gBAAAI,CACF,CACF,ICtZJ,IAgBae,GAuGAC,GAvHbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAMaR,GACT,CAACS,EAA+BC,EAC/BC,IAAqF,CACpF,IAAMC,EAAUH,EAAO,OAAS,EAC1BI,EAAcD,EAAU,8BAAgC,GACxDE,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KACnBO,EAAyBD,EAAO,CAAC,EAAIL,EAAW,MAEhDO,EAAgBP,EAAW,SAAW,OACtCQ,EAAcC,GAChBL,EAAQC,EAAQL,EAAW,UAAWA,EAAW,KAAMA,EAAW,QAASO,CAAa,EACtFG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,EAAW,SAAS,EAC7F,CAAC,QAAuB,KAAM,CAACA,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC5E,CAAC,QAAuB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EACtE,CAAC,QAAuB,KAAMM,CAAsB,CACtD,EACAO,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,CAAM,CAAC,EAClE,IAAMU,EAAwD,CAAC,OAAQ,MAAM,EACzEb,IACFU,EAAgB,KAAK,GAAGE,EAA2Bf,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEgB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BN,CAAW,CAAC,EAE/D,IAAMQ,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEY,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,MAAM,EACxDsB,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,MAAM,EACxDsB,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,CAAC,EAG9E,IAAM6B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ5B,EAAW,UAAU,MAAM,EACxG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,EAChF,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACA,OAAA6B,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA;AAAA,IAE9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,0BAEtDC,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,8CAEhBX,EAAgB,EAAI,CAAC;AAAA,yDACVA,EAAgB,EAAI,CAAC,oBAClEA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA,iBAGhBW,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMCX,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMrBA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA,uBAKnEA,EAAgBiB,EAAE,IAAI,QAAS,UAAW,SAAU,eAAe,EACnDA,EAAE,IAAI,QAAS,gBAAiB,UAAW,QAAQ,CAAC;AAAA,uBACzDE,EAAE,IAAI,iBAAkB,aAAc,UAAW,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,MAK3EvB,CAAW;AAAA,MACXmB,CAAe;AAAA,MACfJ,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAEzC,EACA,MAAO,CACL,KAAM,cACN,YAAa,CAAC,KAAMlB,EAAW,SAAU,kBAAAe,CAAiB,EAC1D,WAAY,KAAO,CACjB,QAAS,CAAC,CACR,KAAMd,EAA6BA,EAA2BO,CAAW,EAAIA,EAC7E,SAAUT,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,EAESzB,GACT,CAACQ,EAA+BC,EAA4BQ,IAAgD,CAC1G,IAAMN,EAAUH,EAAO,OAAS,EAC1B+B,EAAaC,GAAiBvB,EAAY,CAAC,CAAC,EAC5CwB,EAAeD,GAAiBvB,EAAY,CAAC,CAAC,EAC9CE,EAAaC,EAAU,KAAKH,CAAW,EAAIsB,EAAaE,EACxD5B,EAAS,CAACL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGzB,EAAS,CAACN,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGG,EAAsB,CAACzB,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAIsB,CAAU,EAElGlB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EACxC,CAAC,OAAsB,KAAM,CAACV,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC3E,CAAC,OAAsB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,CACvE,EACAa,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,EAAQ4B,CAAmB,CAAC,EACvF,IAAMC,GAAWF,EAAe,GAAKhC,EAAW,QAAQ,CAAC,EAAIK,EAAO,CAAC,EAC/DW,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUkC,EAAoB,OAAQH,CAAU,EAC5FV,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQ0B,CAAU,EACpEJ,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQyB,CAAU,EACpEH,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM+B,CAAU,CAAC,EAEnF,IAAM3B,EAAcD,EAAU,8BAAgC,GACxD0B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EACxC,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,CACvC,EACA,OAAAC,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA,IAC9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,8CAIlCe,CAAY;AAAA,oCACtBA,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOxBR,EAAE,KAAK,KAAK,KAAKU,CAAO;AAAA,wBACxBhB,EAAO,KAAK,KAAK,KAAKc,CAAY;AAAA;AAAA;AAAA,8CAGZ3B,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGzB6B,CAAO;AAAA;AAAA;AAAA,0BAGXV,EAAE,IAAI,QAAS,gBAAiB,eAAgB,eAAe,CAAC;AAAA;AAAA,0BAEhEA,EAAE,KAAK,KAAK;AAAA;AAAA;AAAA,gDAGUnB,EAAO,CAAC,CAAC;AAAA,wBACjCqB,EAAE,IAAI,WAAY,UAAW,IAAK,gBAAgB,CAAC;AAAA,iCAC1CM,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOlBA,CAAY;AAAA;AAAA,QAE/B7B,CAAW;AAAA,QACXmB,CAAe;AAAA,QACfJ,EAAO,IAAI,QAAS,MAAO,UAAW,iBAAkB,OAAO,CAAC;AAAA;AAAA,IAGlE,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CACX,KAAM,GAAGlB,EAAW,QAAQ,IAAI8B,CAAU,IAAIE,CAAY,IAAIE,CAAO,IAAI7B,EAAO,CAAC,CAAC,IAAIA,EAAO,CAAC,CAAC,GAC/F,kBAAmBH,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMM,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,ICvNJ,IAYamB,GA6IPC,GAUOC,GAnKbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAEaT,GACT,CAACU,EAA+BC,EAAoDC,EACnFC,EACAC,EAAiB,KAAyD,CACzE,IAAMC,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KAEnBO,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIJ,EAAOA,EAAO,OAAS,CAAC,EAC5BK,EAAaC,GAAiBH,CAAC,EAC/BI,EAAcD,GAAiBF,CAAC,EAChCI,EAAeF,GAAiBJ,CAAC,EACjCO,EAAaC,EAAU,KAAKb,CAAW,EAAIQ,EAAaG,EACxDG,EAAUhB,EAAO,OAAS,EAC1BiB,EAAYd,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAE5FgB,EAAsB,CADVH,EAAU,KAAKE,CAAS,EACFV,EAAGC,CAAC,EAEtCW,EAAoC,CACxC,CAAC,QAAuB,KAAML,CAAU,EAAG,CAAC,QAAuB,KAAMP,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,CACjC,EACAW,GAA6BnB,EAAsBkB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2BJ,EAAWZ,EAAQC,CAAM,CAAC,EACzEU,GACFG,EAAgB,KAAK,GAAGE,EAA2BrB,EAAO,CAAC,EAAE,IAAI,CAAC,EAEpEmB,EAAgB,KAAK,GAAGE,EAA2BH,CAAmB,CAAC,EAEvE,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAYC,GAAiB,aAAczB,EAAO,CAAC,EAAE,SAAUiB,EAAU,MAAM,EAC/ES,EAAIC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQO,CAAW,EACrEgB,EAAID,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQI,CAAU,EACpEmB,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUkB,EAAoB,OAAQR,CAAU,EAC5FqB,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBjC,EAAsB4B,EAAO,KAAK,MAAOE,CAAQ,EACxFI,EAAiB,CAACT,EAAGE,CAAC,EACxBQ,GAAc,GAClB,GAAIpB,EAAS,CACX,IAAMqB,EAAiBjC,EAAiBM,EAAa,EACrDyB,EAAe,KAAKR,EAAc,OAAQ3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQqC,CAAc,CAAC,EACpGD,GAAc,GACVhC,EAAiB,uBAAuBiC,CAAc,KACrC,YAAYR,EAAO,KAAK,KAAK,kBAAkB,EACtE,CAEA,IAAMS,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,EAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAiBC,GAAiBH,GAAYrB,CAAS,EACvDyB,EAAiBD,GAAiBF,EAAYtB,CAAS,EACvD0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EACrF,CAAC,KAAM,IAAK,KAAM,KAAK,CACzB,EACAC,GAAyB3C,EAAsB0C,EAAQ,EAEvD,IAAME,GAAa,CAACC,EAAyBC,KAA4B,CACvE,IAAMC,EAAOF,EAAS,KAChBG,GAAOH,EAAS,KACtB,GAAIE,IAAS,EACX,MAAO,OAAOC,EAAI,cAAcH,EAAS,KAAK,OAAO,YAEvD,IAAMI,EAAY1B,EAAU,KACxB2B,EAAS,OAAOF,EAAI,aAAaH,EAAS,KAAK,OAAO,IAC1D,QAASM,GAAIJ,EAAO,EAAI,EAAGK,GAAIH,EAAY,EAAGE,IAAK,EAAGA,KAAKC,KACzDF,GAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,OAAOF,EAAY,EAAI,iBAAiBG,EAAC,IAAM,eAAe,IAEhG,OAAAN,GAAc,QAAQK,IAAK,CACzBD,GAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,QAClC,CAAC,EACDD,GAAU,GAAGF,EAAI,YAAYD,EAAO,CAAC;AAAA,uBACxBC,EAAI,YAAYD,EAAO,CAAC,UAC9BG,CACT,EAEMG,GAAa,IAAc,CAC/B,IAAIC,EAAU,eAAe7B,EAAE,KAAK,KAAK,IACzC,QAAS0B,GAAI,EAAGA,GAAIxC,EAAawC,KAC/BG,GAAW;AAAA,0BACGH,EAAC,yBAAyBA,EAAC,2BAA2B1C,CAAU,KAEhF,QAAS0C,GAAI,EAAGA,GAAIvC,EAAcuC,KAAK,CACrCG,GAAW,iCAAiCH,EAAC,yBAAyBxC,CAAW,KAEjF,QAASyC,EAAI,EAAGA,EAAIzC,EAAayC,IAC/BE,GAAW;AAAA,qBACJH,EAAC,WAAWxB,EAAE,KAAK,KAAK,UAAUhB,IAAgB,EAAI,GAAK,IAAIyC,CAAC,GAAG,YAAYA,CAAC,YACnFD,EAAC;AAAA,CAET,CACA,OAAOG,CACT,EAEA,MAAO;AAAA,IAEHhC,EAAa,iBAAiBoB,EAAQ,EAAE,0BAA0BnB,CAAS,EAAE,iBACzE,GAAGW,EAAgBN,CAAM,CAAC;AAAA,IACtCN,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,4CACpCb,CAAU,QAAQA,CAAU;AAAA,8CAC1BA,CAAU;AAAA,iCACvBG,CAAY;AAAA,qCACRA,CAAY;AAAA;AAAA;AAAA,MAG3CX,EAAY,SAAW,EAAI,GAAK,uBAAuBsB,EAAU,gBAAgB,OAAO,CAAC,GAAG;AAAA,MAC5FqB,GAAWnB,EAAGc,EAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,MAC7CmB,GAAWjB,EAAGc,CAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,wBAC3BC,EAAO,KAAK,KAAK,KAAKhB,CAAY;AAAA,oDACND,CAAW;AAAA,QACvD0C,GAAW,CAAC;AAAA;AAAA,2BAEOzC,CAAY;AAAA;AAAA,QAE/BuB,EAAW;AAAA,QACXH,CAAe;AAAA,0BACGJ,EAAO,KAAK,OAAO;AAAA,qBACxBA,EAAO,gBAAgB,aAAa,CAAC;AAAA,QAClDA,EAAO,YAAY,YAAYnB,CAAU,GAAI,OAAO,CAAC;AAAA;AAAA;AAAA,GAIvD,EACA,MAAO,CACL,KAAM,cACN,YAAa,CACX,KAAM,GAAGT,EAAqB,UAAU,IAAIS,CAAU,IAAIE,CAAW,IAAIC,CAAY,IAAIT,CAAc,GACvG,kBAAmBY,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMd,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAClE,gBAAAK,CACF,GACA,gBAAAG,CACF,CACF,EAEE/B,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,CAEtD,EAEaR,GAAUgE,GAAkC,CACvDjE,GAAeiE,EAAQ,MAAM,EAC7B,IAAMtD,EAAcuD,GAAc,UAAUD,EAAQ,OAAO,CAAC,EAAE,KAAMA,EAAQ,OAAO,CAAC,EAAE,KAAM,EAAI,EAChG,GAAI,CAACtD,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMM,EAAIN,EAAYA,EAAY,OAAS,CAAC,EACtCO,EAAI+C,EAAQ,OAAO,CAAC,EAAE,KAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EAC9DhD,EAAI,GAAKC,EAAI,EACf+C,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,EAE3FsD,EAAQ,QAAQE,GAAwBF,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,CAE1F,IChLA,IAgBayD,GA6BPC,GAEAC,GAkDAC,GAmBOC,GA0BPC,GAyIAC,GA0BAC,GAeOC,GAhUbC,GAAAC,EAAA,kBAIAC,KAIAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEalB,GACT,CAACmB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,EAA4BC,IAAqC,CAC/F,IAAMC,EAAYN,EAAW,CAAC,EACxBO,EAAoBP,EAAW,MAAMK,EAAgB,EAAI,EAAGA,EAAgB,EAAI,CAAC,EACjFG,EAAcD,EAAkB,OAChCE,EAAcR,EAAY,CAAC,EAE3BS,EADqBT,EAAY,MAAM,CAAC,EACA,IAAI,CAACU,EAAGC,IAAMD,GAAKA,EAAI,IAAMT,EAAUU,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIR,EAAWS,CAAC,EAAIT,EAAWS,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIR,EAAQQ,CAAC,GAAKR,EAAQQ,CAAC,CAAC,CAAC,EAC5G,OAAAC,EAAY,OAAO,EAAG,EAAGP,CAAS,EAClCO,EAAY,OAAOR,EAAgB,EAAI,EAAG,EAAGI,CAAW,EACjDI,CACT,EAcE/B,GAA2B,CAAC,EAAG,EAAG,EAAG,CAAC,EAEtCC,GAAiB,CAAC+B,EAA+BC,IAAqC,CAG1F,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAME,EAAcF,EAAO,CAAC,EAAE,KAAKC,EAAW,SAAW,OAASD,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFG,EAAkBH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAIC,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAIH,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMN,EAAcM,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWP,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIO,EAAW,QAAQ,SAAWP,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIO,EAAW,KAAK,SAAWP,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIO,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAEM9B,GAA4B,CAA2B+B,EAAeD,IAAqC,CAC/G,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EACvCb,EAAY,EAAI,CAAC,IAAM,IACzBA,EAAY,EAAI,CAAC,EAAIa,EAAO,CAAC,EAAE,KAAK,CAAC,GAGzC,IAAMI,EAAOH,EAAW,KAAK,MAAM,EACnCI,GAAa,yBACTL,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAaiB,EAAMH,EAAW,SAAW,OACnGA,EAAW,OAAO,EAGtB,IAAMK,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAiB,CAAI,CAAC,EACzCE,CACT,EAEanC,GAAuB8B,GAAwD,CAC1F,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBS,EAAU,CAAC,SAAU,QAAS,aAAc,YAAY,EAAET,EAAW,QAAkB,EACvFb,EAAYa,EAAW,UACvBU,EAAQV,EAAW,MACnBd,EAAcc,EAAW,aACzBG,EAAOH,EAAW,KAClBX,EAAUW,EAAW,QACrBW,EAAYX,EAAW,WAA6B,EAE1D,MAAO,CACL,QAAAS,EACA,OAAAD,EACA,UAAArB,EACA,MAAAuB,EACA,YAAAxB,EACA,KAAAiB,EACA,QAAAd,EACA,SAAAsB,EACA,GAAGL,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEMnC,GAAS,CAACyC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EAKjEe,EAAiBd,EAAW,SAAW,OAC7C,GAAIA,EAAW,QAAU,EAAG,CAM1B,GADmC,CAACY,EAAQ,YAAY,eAAe,QAAQ,GAC7CE,GAAkBf,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMC,EAAW,OACjFD,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAKC,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,EAAG,CAC7F,IAAMF,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZC,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAEhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3ChB,EAAO,SAAW,GACpBkB,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAE3Ba,EAAQ,QACJM,GAAsCD,EAAYJ,EAAoBf,CAAW,EAAG,CAAC,OAAQmB,CAAU,CAAC,CAC9G,MACEL,EAAQ,QAAQO,GAA6BpB,EAAQc,CAAkB,CAAC,EAE1E,MACF,CAEA,IAAMO,EAAUrB,EAAO,SAAW,EAC5BsB,EAActB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACnDQ,EAAavB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EAClDS,EAAgBxB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACrDU,EAAezB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/B0B,EAAc1B,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BD,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZY,EAAY5B,EAAYgB,EAAiB,EAAI,CAAC,EAC9Ca,EAAW7B,EAAYgB,EAAiB,EAAI,CAAC,EAC7CpB,EAAcI,EAAYgB,EAAiB,EAAI,CAAC,EAEhDc,EAAWd,GAAkBU,IAAiBH,GAAeI,IAAgBH,GAC/EtB,EAAW,KAAK,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,EACvD,GAAI4B,GACCJ,IAAiB,GAAKC,IAAgB,GAAKzB,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,GACxGA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,GACrFA,EAAW,KAAK,CAAC,IAAM,EAAI,CAE9B,IAAM6B,EAAQ/B,EAAY,CAAC,EACvBgC,EAAWC,EAAWC,EACpBC,EAAe,CAAC,EACtB,GAAInB,EAAgB,CAClB,IAAMC,GAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAIlE,GAHIA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,IAE5Ba,EAAU,CACZ,IAAMM,EAAYb,EAAcC,EAAaC,EAC7CO,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAG8B,EAAOK,CAAS,CAAC,EACnDH,EAAYhB,GAAiB,QAAQ,CAAC,EAAGmB,EAAWxC,CAAW,CAAC,EAChEsC,EAAoB,CAAC,EAAGH,EAAOnC,CAAW,CAC5C,MACEoC,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAOR,EAAcC,EAAYC,CAAa,CAAC,EAC9EQ,EAAYhB,GAAiB,QAAQ,CAAC,EAAGQ,EAAe7B,CAAW,CAAC,EACpEsC,EAAoB,CAACH,EAAOH,EAAYC,EAAUjC,CAAW,EAE/DuC,EAAa,KAAKH,CAAS,EAC3BG,EAAa,KAAKF,CAAS,CAC7B,MACED,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAON,EAAeF,EAAcC,CAAU,CAAC,EAC9ES,EAAYhC,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAGL,EAAa6B,CAAa,CAAC,EAC7DS,EAAoB,CAACH,EAAOnC,EAAagC,EAAYC,CAAQ,EAC7DM,EAAa,KAAKF,CAAS,EAC3BE,EAAa,KAAKH,CAAS,EAEzBV,GACFa,EAAa,KAAKlC,EAAO,CAAC,CAAC,EAE7B,IAAMoC,EAAIH,EAAkB,CAAC,EACvBI,GAAIH,EAAa,CAAC,EAAE,KAAKA,EAAa,CAAC,EAAE,KAAK,OAAS,CAAC,EAE1DE,EAAI,GAAKC,GAAI,EACfxB,EAAQ,QACJyB,GACIJ,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACpF,CAAC,OAAQmB,CAAY,CAAC,EAE1BrB,EAAQ,QACJ0B,GAAwBL,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACxG,CAAC,OAAQmB,CAAY,CAAC,EAE5B,MACF,CAIA,IAAMM,EAAgE,GAGhExB,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAIhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3CK,GACFH,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAI3B,IAAMyC,EAAY1B,EAAiBY,EAAYC,EAAWjC,EACpD+C,EAAY3B,EAAiBpB,EAAcgC,EAAYC,EACvDe,EAAWlB,EAAeC,EAAcF,EAC9CX,EAAQ,QACJ+B,GACI1B,EAAYJ,EAAoBf,EAAa0C,EAAWC,EAAWC,EAAUtB,EAC7EmB,CAAyB,EAC7B,CAAC,OAAQtB,CAAU,CAAC,CAC1B,EAEM7C,GAAS,CAACwC,EAAyBZ,IAAqC,CAE5E,IAAMV,EAAgBU,EAAW,SAAW,OACtCD,EAAS,CACba,EAAQ,OAAO,CAAC,EAAE,QACdtB,EAEI,CAACsB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5Bb,EAAO,KAAKa,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAMT,EAAO,CAAC,EAAGH,EAAW,KAAK,CAAC,EAAG,EAAGA,EAAW,KAAK,CAAC,CAAC,EACpDX,EAAU,CAAC,CAAC,EAAE,OAAOW,EAAW,OAAO,EACvCb,EAAY,CAAC,CAAC,EAAE,OAAOa,EAAW,SAAS,EAC3Cd,EAAc,CAAC,CAAC,EAAE,OAAOc,EAAW,WAAW,EAC/Ca,EAAqB5C,GAA0B,CAAC,GAAG+B,EAAY,KAAAG,EAAM,QAAAd,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGa,CAAM,EACnHa,EAAQ,QAAQO,GACZpB,EAAQc,EACRf,GAAeR,EAAgB,CAACQ,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAAI,CAAC,CAAC,CAAC,CAC3F,EAEMzB,GAAS,CAACuC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMQ,EAASR,EAAW,SAAW,OAAS,eAAiB,gBACzDa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EACjEI,EAAOH,EAAW,UAAY,SAAWA,EAAW,KAAOA,EAAW,QACtE4C,EAAWC,GACb9C,EAAO,CAAC,EAAE,KACVA,EAAO,CAAC,EAAE,KACVC,EAAW,QACXA,EAAW,UAAgDG,EAA2B,GAAOK,CAAM,EACvGI,EAAQ,QAAQkC,GACZ/C,EAAQc,EAAoB+B,EAAS,SACrC,CAACA,EAAS,YAAaA,EAAS,aAAcA,EAAS,WAAW,EAClE,CAACA,EAAS,QAAQ,MAAOA,EAAS,QAAQ,IAAKA,EAAS,QAAQ,IAAI,EAAGpC,CAAM,CAAC,CACpF,EAEalC,GAAO,CAACsC,EAAyBZ,IAAqC,CACjFhC,GAAe4C,EAAQ,OAAQZ,CAAU,EACrCY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpCxC,GAAOwC,EAASZ,CAAU,EACjBY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAC3CvC,GAAOuC,EAASA,EAAQ,OAAQZ,CAAU,EAE1C7B,GAAOyC,EAASA,EAAQ,OAAQZ,CAAU,CAE9C,ICzUA,IAiCM+C,GA2HOC,GA5JbC,GAAAC,EAAA,kBAqBAC,IACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAU,GAAOC,EAAqCC,EAC/EC,EAAmB,IAAc,CAChC,IAAMC,EAAeD,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,sEACT,IAAK,GACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQED,CAAI;AAAA,cAEf,QACE,MAAM,IAAI,MAAM,oBAAoBC,CAAgB,oBAAoB,CAC5E,CACF,EACME,EAAgBN,EAAiB;AAAA;AAAA,QAGA;AAAA;AAAA,QAIjCO,EAAkBP,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCQ,EAAUR,EAAiB,2BAA6B,2BACxDS,EAAST,EAAiB,2BAA6B,2BACvDU,EAAMV,EAAiB,MAAQ,MAC/BW,EAAMX,EAAiB,MAAQ,MAE/BY,EAAe;AAAA,yBACFZ,EAAiB,2BAA6B,0BAA0B;AAAA,uBAC1EA,EAAiB,gCAAkC,+BAA+B;AAAA,qBACpFU,CAAG;AAAA,qBACHA,CAAG;AAAA;AAAA,mBAELC,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA;AAAA,kCAGYH,CAAO;AAAA,iBACxBL,CAAI;AAAA;AAAA,kCAEaM,CAAM;AAAA,iBACvBN,CAAI;AAAA;AAAA;AAAA;AAAA,kBAIHQ,CAAG;AAAA,QACbL,CAAa;AAAA,0EACqDF,CAAgB,KAE9ES,EAAUb,EAAiB;AAAA,0BACbI,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SACoB;AAAA,0BACbC,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SAEPW,EAAU;AAAA,0BACIV,CAAgB;AAAA,yBACjBJ,EAAiB,2BAA6B,0BAA0B;AAAA;AAAA;AAAA,YAIvFA,EAAiB,yDACA,wDAAwD;AAAA;AAAA;AAAA,UAGzEK,EAAYD,CAAgB,CAAC;AAAA;AAAA,eAExBD,CAAI;AAAA,QAGPY,EAAkBC,GAAqBd,EAAYC,CAAI,EAqB7D,MApBiB;AAAA,uDACgCA,CAAI;AAAA,MACrDH,EAAiBa,EAAUC,CAAO;AAAA;AAAA;AAAA,uDAGeX,CAAI;AAAA,MACrDH,EAAiBc,EAAUD,CAAO;AAAA;AAAA;AAAA,iEAGyBV,CAAI;AAAA,wBAC7CC,CAAgB;AAAA;AAAA;AAAA,uBAGjBJ,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGO,CAAe;AAAA,QACfU,GAAYhB,CAAO,CAAC;AAAA,QACpBc,CAAe;AAAA,8EACuDX,CAAgB;AAAA;AAAA,IAI1F,EAESd,GACT,CAAC4B,EAA+BhB,EAAqCiB,EACpEC,EAAmBC,EAAmBC,EAAkBC,EACxDC,IAAoD,CACnD,IAAMxB,EAAiBE,EAAW,SAAW,OACvCuB,EAAazB,EAAiBkB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAW3B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAY5B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAc7B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAAS9B,GAAmByB,EAAa,IAAM,GAAKA,EAAa,GAAMI,EAAc,IAAM,EAG3FE,EAAY/B,EAAiB6B,EAAcF,EAAWC,EACtDI,EAAYhC,EAAiB2B,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,wCAAwCD,CAAQ,EAAE,EAE7E,IAAM/B,EAAmB0B,EAAS,EAAI,EAChCO,EAAY,KAAK,IAAIJ,EAAc,CAAC,EAAI7B,EAAkB6B,EAAc,CAAC,CAAC,EAC1EK,EAAaR,EAAS,EAAI,EAC1BS,EACF,CAACrC,EAAW,YAAYF,EAAiB,EAAI,CAAC,EAAGE,EAAW,YAAYF,EAAiB,EAAI,CAAC,CAAC,EAC7FwC,EAAsB,CAC1BD,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,IACrGqC,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,GACvG,EACMuC,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFsC,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,CACvF,EAEMwC,EAAoC,CACxC,CAAC,OAAsB,KAAMtB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAMpB,EAAW,OAAO,EACvF,CAAC,OAAsB,KAAMA,EAAW,SAAS,EAAG,CAAC,OAAsB,KAAMqC,CAAU,EAC3F,CAAC,OAAsB,KAAME,CAAI,CACnC,EACAE,GAA6BzC,EAAYwC,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAElF,IAAM2B,EAAwD,CAAC,OAAQ,MAAM,EACzEtB,IACFmB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE2B,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BzB,CAAW,CAAC,EAE/D,IAAM2B,GAAmBC,IAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EAC5EY,GAAID,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACnEiC,EAASC,EAAe,SAAUlC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQmB,CAAU,EACpFe,GAAiB,CAACL,EAAGE,EAAC,EAExBI,GAAmB,GACvB,GAAI/B,EAAS,CACX,IAAMgC,GAAON,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EACxFe,GAAe,KAAKE,EAAI,EACxBD,IAAoB;AAAA,4DAC8BC,GAAK,KAAK,KAAK;AAAA,iCAC1CvD,EAAiB,IAAM,GAAG,GAAG8B,EAAS,MAAQ,EAAE;AAAA,YAEzE,CAEA,IAAM0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ,CAAC,EACrF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQjB,EAAW,MAAM,EAC5D,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQE,EAAK,MAAM,CACjD,EACAgB,GAAyBvD,EAAYsD,EAAQ,EAC7C,IAAME,EAAWC,GAA4BzC,EAAO,CAAC,EAAE,SAAU,CAAC,EAClE,GAAIwC,IAAa,OAASA,IAAa,MACrC,MAAM,IAAI,MAAM,YAAYA,CAAQ,oBAAoB,EAE1D,MAAO;AAAA,UACLE,GAAc,yBAAyB,CAAC;AAAA,UACxCb,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGH,GAAgBF,CAAM,CAAC;AAAA,UACnFG,EAAgB;AAAA,UAChBjE,GAA6BW,EAAgBuB,EAASrB,EAAY8C,EAAE,KAAK,MAAO5C,CAAgB,CAAC;AAAA,UAE/F0B,EAAS+B,GACI3B,EAAmBD,EAAeyB,EAAU,OAAW,CAAC1D,EAAgBqC,CAAS,EACrFyB,GACI5B,EAAmBD,EAAeyB,EAAU,OAAW,CAAC1D,EAAgBqC,EAAW,GACnF,OAAWb,CAAyB,CAAC,EACxD,EAEA,MAAO,CACL,KAAM,wBACN,YACI,CAAC,KAAM,GAAGtB,EAAW,QAAQ,IAAIgC,CAAiB,IAAID,CAAa,IAAIH,CAAM,GAAI,kBAAAe,CAAiB,EACtG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM1B,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAO,CACF,GACA,gBAAAI,EACF,CACF,ICvQJ,IA2BMiB,GAiMOC,GA5NbC,GAAAC,EAAA,kBAmBAC,IACAC,KAEAC,KAEAC,KAGMP,GACF,CAACQ,EAA4BC,EAA+BC,EAAgCC,EAC3FC,EAA+BC,EAAS,GAAOC,EAAkBC,EACjEC,EAAiB,KAAkB,CAClC,IAAMC,EAASD,EAAiB,EAAI,EAC9BE,EAASF,EAAiB,EAAI,EAC9BG,EAAaH,EAAiB,EAAI,EAClCI,EAAgBP,EAAS,EAAI,EAE/BQ,EAAmB;AAAA,iDACoBR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,0BAC9DD,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,KAEvDH,IACFU,GAAoB;AAAA,sDAC0BR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,2BAClEE,EAAiB,IAAM,GAAG,GAAGH,EAAS,MAAQ,EAAE;AAAA,QAGrE,IAAMS,EAAaT,EAAS,EAAI,EAC1BU,EAAIC,EAAc,IAAKf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC5EG,EAAKD,EAAc,KAAMf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC9EI,EAAiB,CAACD,EAAIF,CAAC,EACzBZ,GACFe,EAAe,KAAKF,EAAc,OAAQf,EAAO,CAAC,EAAE,SAAU,CAACC,EAAYS,CAAU,CAAC,EAAE,OAAQG,CAAU,CAAC,EAE7G,IAAMK,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQY,CAAU,EAEpFO,EAAe;AAAA,2BACAjB,EAAuB,cAAgB,gBAAgB;AAAA,kBAChEA,EAAuB,cAAgB,gBAAgB;AAAA,kBACvDA,EAAuB,cAAgB,gBAAgB,MAAMQ,CAAa;AAAA,wBACpER,EAAuB,cAAgB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM7CE,CAAQ,MAAMM,CAAa;AAAA,8BAC/BA,CAAa;AAAA,8BACbN,CAAQ;AAAA;AAAA;AAAA,uBAGfA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,oCAExCA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAOnBA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA,0BACpDA,CAAQ,wBAAwBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAO/CA,CAAQ;AAAA;AAAA;AAAA;AAAA,wCAINA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAUhBS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAMhBW,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA;AAAA,iDAEjBX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAMRK,CAAU;AAAA;AAAA,gCAE3BI,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCASZS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA,oCACjCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAUTM,CAAa;AAAA,qCACXT,EAAU,YAAc,QAAQG,CAAQ,QAAQ;AAAA,YACzEa,EAAO,IAAI,QAAS,IAAK,QAAS,KAAM,OAAO,CAAC;AAAA;AAAA,SAGhDG,EAAc;AAAA,gCACMH,EAAO,gBAAgB,YAAY,CAAC;AAAA,wBAC5CA,EAAO,WAAW,gBAAiB,CAAC,CAAC;AAAA,qBACxCA,EAAO,WAAW,gBAAiBR,CAAU,CAAC;AAAA,oBAC/CQ,EAAO,WAAW,gBAAiBV,CAAM,CAAC;AAAA,oBAC1CU,EAAO,WAAW,gBAAiBT,CAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQpCJ,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,yBAKTA,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,sCAEvCA,CAAQ,sBAAsBG,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAU/CH,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,wCAEvCA,CAAQ,sBAAsBI,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAQlEF,EAAiBS,EAAG,IAAI,QAAS,OAAQ,OAAQ,cAAc,EAC9CA,EAAG,IAAI,QAAS,eAAgB,OAAQ,MAAM,CAAC;AAAA,+BAC3CF,EAAE,IAAI,eAAgB,cAAe,cAAe,aAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM/DZ,EAAU,WAAa,GAAGG,CAAQ,OAAO;AAAA,YAC/Da,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,UAG/C,MAAO;AAAA,IACTnB,EAAa,iBAAiBO,CAAQ,EAAE,iBAAiB,GAAGW,EAAgBC,CAAM,CAAC;AAAA,IACnFN,CAAgB;AAAA;AAAA,MAEdb,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,IAC5EK,EAASgB,EAAeC,CAAW,GACnC,EAES7B,GACT,CAACQ,EAA+BsB,EAC/BC,IAAqF,CACpF,IAAMrB,EAAUF,EAAO,OAAS,EAE1BC,EAAcqB,EAAW,YACzBE,EAAaC,EAAU,KAAKxB,CAAW,EAMvCyB,EAAW,CACf,KAAK,KAAKF,EAAa,EAAE,EACzB,EACA,CACF,EACAG,GAAU,UAAW,IAAM,uCAAuCD,CAAQ,EAAE,EAE5E,IAAMnB,EAAiBe,EAAW,SAAW,OACvCM,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAU,CAACP,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,EACvDQ,EACF,CAACR,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAGe,EAAW,YAAYf,EAAiB,EAAI,CAAC,CAAC,EAC7FwB,EAAY,CAACT,EAAW,UAAU,CAAC,EAAGA,EAAW,UAAU,CAAC,CAAC,EAC7DU,EAAsB,CAC1BF,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,IAC3FQ,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,GAC7F,EACMW,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFU,EAAoB,CAAC,EAAI,EAAI,KAAK,MAAMV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,CAAC,EAAI,CACrF,EAEMlB,EAAS,GACT8B,EAAQZ,EAAW,MACnBa,EAASnC,EAAO,CAAC,EAAE,KACnBoC,EAAwBD,EAAO,CAAC,EAAID,EACpCG,EAAyBF,EAAO,CAAC,EAEjCG,EAAoC,CACxC,CAAC,QAAuB,KAAMd,CAAU,EAAG,CAAC,QAAuB,KAAMK,CAAO,EAChF,CAAC,QAAuB,KAAMC,CAAU,EAAG,CAAC,QAAuB,KAAMC,CAAS,EAClF,CAAC,QAAuB,KAAMC,CAAmB,EAAG,CAAC,OAAsB,KAAMC,CAAI,EACrF,CAAC,QAAuB,KAAMG,CAAqB,EAAG,CAAC,QAAuB,KAAMC,CAAsB,EAC1G,GAAGE,EAA2BvC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9D,EACIE,IACFoC,EAAgB,KAAK,GAAGC,EAA2BvC,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE4B,EAAkB,KAAK,MAAM,GAE/BU,EAAgB,KAAK,GAAGC,EAA2BtC,CAAW,CAAC,EAE/D,IAAME,EAAuBuB,EAAS,CAAC,IAAM,GAAKA,EAAS,CAAC,IAAM,EAC5Dc,EAAmBzC,GAA+B,CACtD,IAAMO,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQuB,EAAQ,MAAM,EACzF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQC,EAAW,MAAM,EAC5D,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,MAAM,EAC1D,CAAC,KAAM,wBAAyB,KAAM,MAAO,OAAQE,EAAoB,MAAM,EAC/E,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAAG,CAAC,KAAM,2BAA4B,KAAM,KAAK,EAChG,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACM5B,EAAWoC,GAA4BzC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO,GACHT,GACIQ,EAAcC,EAAQC,EAAaC,EAASC,EAAsBC,EAAQC,EAAUC,EACpFC,CAAc,CAAC,EACzB,EACA,MAAO,CACL,KAAM,kBACN,YAAa,CAAC,KAAM,GAAGe,EAAW,QAAQ,IAAK,kBAAAM,CAAiB,EAChE,WAAY,KAAO,CACjB,cAAe,CAAC,EAAGF,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,QAAS,CAAC,CACR,KAAMH,EAA6BA,EAA2BtB,CAAW,EAAIA,EAC7E,SAAUD,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,gBAAAsC,CACF,GACA,gBAAAE,CACF,CACF,ICpTJ,IAYME,GAIAC,GAWAC,GAiCAC,GAwCOC,GA+BPC,GAqEAC,GAEAC,GAsDAC,GA6COC,GA7SbC,GAAAC,EAAA,kBAMAC,KACAC,KAEAC,KACAC,KAEMf,GACF,CAACgB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DpB,GAAoB,CAACqB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEMzB,GACF,CAAC0B,EAA+BC,EAAgCC,EAA8BP,EAC7FQ,EAAeP,EAAgBQ,EAA4BC,EAAwBC,EACnFC,IAA0B,CACzB,IAAMC,EAAcR,EAAW,OAAS,EAClCS,EAAoBF,EAAY,SAAW,EACjD,GAAID,EAAc,SAAW,EAC3B,QAASI,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EACjCJ,EAAc,KAAK,CAAC,EAGxB,IAAMK,EAAYX,EAAW,CAAC,EACxBY,EAAcX,EAAYI,EAAgB,EAAI,CAAC,EAAIF,EACzD,QAASO,EAAI,EAAGG,EAAIb,EAAW,OAASQ,GAAeH,EAAgB,EAAI,GAAIK,EAAIF,EAAa,EAAEE,EAAG,EAAEG,EAAG,CACxG,IAAMC,EAASd,EAAWa,CAAC,EACrBpB,EAAUgB,EAAoBK,EAASV,EAAQM,CAAC,EAAIH,EAAYG,CAAC,EACjEhB,EAAWtB,GAAgB0C,EAAQV,EAAQM,CAAC,EAAGd,EAAKc,CAAC,EAAGT,EAAYY,CAAC,EAAGX,EAAUQ,CAAC,EAAGjB,CAAO,EACnGpB,GAAkBqB,EAAUC,EAASC,EAAMc,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRH,EAAQM,CAAC,GAAKI,EAAS,GAAKR,EAAcI,CAAC,GAAKT,EAAYY,CAAC,EAAI,GAAKX,EAAUQ,CAAC,EAAI,EAAId,EAAKc,CAAC,EAC/Fd,EAAKc,EAAIF,CAAW,CAAC,CAE7B,CACAD,EAAY,OAAO,EAAG,EAAGI,CAAS,EAClCJ,EAAY,OAAOF,EAAgB,EAAI,EAAG,EAAGO,CAAW,CAC1D,EAOErC,GACF,CAAoCwC,EAAeC,IAAqC,CACtF,IAAMf,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAClGjB,EAAY,OAAS,EACrB,QAASS,EAAI,EAAGA,EAAIM,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEN,EAC3CT,EAAY,KAAKe,EAAO,CAAC,EAAE,KAAKN,CAAC,CAAC,CAEtC,CACA,IAAMS,EAAiBJ,EAAW,SAAW,OAC7Cd,EAAY,OAAO,EAAG,EAAGe,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC1Cf,EAAY,OAAOkB,EAAiB,EAAI,EAAG,EAAGH,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAE/D,IAAMpB,EAAOmB,EAAW,KAAK,MAAM,EAC7BR,EAAcQ,EAAW,YAAY,MAAM,EAC3CT,EAAgBS,EAAW,cAAc,MAAM,EAC/Cf,EAAagB,EAAO,CAAC,EAAE,KACzBd,EAAYa,EAAW,UAAU,MAAM,EAC3C,GAAIb,EAAU,OAAO,CAACe,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC9C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5Cd,EAAY,IAAI,MAAMM,CAAW,EAAE,KAAK,CAAC,CAC3C,CACA,IAAIJ,EAAUW,EAAW,QAAQ,MAAM,EACvC,GAAIX,EAAQ,OAAO,CAACa,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC5C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5CZ,EAAU,IAAI,MAAMI,CAAW,EAAE,KAAK,CAAC,CACzC,CAGAlC,GACI0B,EAAYC,EAAaC,EAAWa,EAAW,QAASA,EAAW,MAAOnB,EAAMQ,EAASe,EACzFb,EAAeC,CAAW,EAG9B,IAAMa,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAL,EAAM,cAAAU,EAAe,YAAAC,EAAa,UAAAL,EAAW,QAAAE,CAAO,CAAC,EACzFgB,CACT,EAES5C,GAAgCuC,GAAiE,CAC5G,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBpB,EACF,CAAC,SAAU,QAAS,aACnB,YAAY,EAAE,OAAOoB,EAAW,QAAW,IAAc,EAAIA,EAAW,OAAiB,EACxFb,EAAYa,EAAW,UACvBZ,EAAQY,EAAW,MACnBd,EAAcc,EAAW,YACzBnB,EAAOmB,EAAW,KAClBX,EAAUW,EAAW,QACrBS,EAAYT,EAAW,SAA2B,EAClDT,EAAgBS,EAAW,cAC3BR,EAAcQ,EAAW,YAC/B,MAAO,CACL,QAAApB,EACA,OAAA4B,EACA,UAAArB,EACA,MAAAC,EACA,YAAAF,EACA,cAAAK,EACA,YAAAC,EACA,KAAAX,EACA,QAAAQ,EACA,SAAAoB,EACA,GAAGH,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEM5C,GAAiB,CAACuC,EAA+BD,IAA8C,CAGnG,GAAI,CAACC,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAG7D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAMS,EAAcT,EAAO,CAAC,EAAE,KAAKD,EAAW,SAAW,OAASC,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFU,EAAkBV,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIS,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAcX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAID,EAAW,MAGnD,GAAIC,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMW,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMnB,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAG5C,GAFqBD,EAAW,UAAU,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEnDH,EAAW,UAAU,SAAWP,EAClD,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAKvD,GAFmBO,EAAW,QAAQ,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEjDH,EAAW,QAAQ,SAAWP,EAC9C,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAKrD,GADgBO,EAAW,KAAK,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAC9CH,EAAW,KAAK,SAAWP,EAAc,EACtD,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIO,EAAW,cAAc,SAAWP,GAAeO,EAAW,cAAc,SAAW,EACzF,MAAM,IAAI,MAAM,4BAA4BP,CAAW,GAAG,EAM5D,GADuBO,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GACrDH,EAAW,YAAY,SAAW,GACpDA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5D,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAID,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAGMtC,GAAsB,CAAC,EAAG,EAAG,EAAG,CAAC,EAEjCC,GACF,CAACiD,EAAyBZ,EAA+BD,IAA8C,CACrG,IAAMc,EAAqBtD,GAAmCwC,EAAYC,CAAM,EAC1EG,EAAiBJ,EAAW,SAAW,OACvCR,EAAcsB,EAAmB,YACjCjB,EAAcL,EAAYY,EAAiB,EAAI,CAAC,EAChDW,EAAgBd,EAAO,CAAC,EAAE,KAAKG,EAAiB,EAAI,CAAC,EAI3D,GAAIU,EAAmB,QAAU,GAAMjB,IAAgB,GAAKkB,IAAkB,EAAI,CAChFF,EAAQ,QAAQG,GAAiCf,EAAQa,CAAkB,CAAC,EAC5E,MACF,CACA,IAAMG,EAAYzB,EAAYY,EAAiB,EAAI,CAAC,EAC9Cc,EAAW1B,EAAYY,EAAiB,EAAI,CAAC,EAC7Ce,EAAelB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/BmB,EAAcnB,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BoB,EAAYjB,EAAiBa,EAAYC,EAAWrB,EACpDyB,EAAYlB,EAAiBP,EAAcoB,EAAYC,EACvDK,EAAWJ,EAAeC,EAAcL,EAExCS,EAAgE,GAIhEC,EAAoBZ,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJa,GAA2BzB,EAAO,CAAC,EAAGtC,EAAmB,EACzD,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACqC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACa,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKY,GAIhC,IAAME,EAAsB,CAAC1B,EAAO,CAAC,EAAGwB,CAAgB,EAClDG,EAAU3B,EAAO,SAAW,EAC9B2B,IACE,CAACxB,GAAkBH,EAAO,CAAC,EAAE,KAAK,SAAW,EAC/C0B,EAAoB,KAAK1B,EAAO,CAAC,EAAE,QAAQ,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,CAAC,CAAC,EAErE0B,EAAoB,KAAK1B,EAAO,CAAC,CAAC,GAKtCY,EAAQ,QACJgB,GACIF,EAAqBb,EAAoBtB,EAAa6B,EAAWC,EAAWC,EAAUK,EACtFJ,CAAyB,EAC7B,CAAC,OAAQG,CAAmB,CAAC,CACnC,EAEE9D,GAAkB,CAACgD,EAAyBb,IAA8C,CAE9F,IAAMV,EAAgBU,EAAW,SAAW,OAEtCC,EAAS,CACbY,EAAQ,OAAO,CAAC,EAAE,QACdvB,EAEI,CAACuB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5BZ,EAAO,KAAKY,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAI3B,EAAcc,EAAW,aACzBd,EAAY,SAAW,GAAKA,EAAY,CAAC,IAAM,KACjDA,EAAc,CAAC2B,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,GAE1C,IAAI1B,EAAYa,EAAW,WACvBb,EAAU,SAAW,GAAKA,EAAU,CAAC,IAAM,KAC7CA,EAAY,CAAC,CAAC,GAEhB,IAAIE,EAAUW,EAAW,SACrBX,EAAQ,SAAW,GAAKA,EAAQ,CAAC,IAAM,KACzCA,EAAU,CAAC,CAAC,GAEd,IAAIR,EAAOmB,EAAW,KAClBnB,EAAK,SAAW,IAClBA,EAAO,CAAC,EAAG,CAAC,GAEdA,EAAO,CAAC,EAAGA,EAAK,CAAC,EAAG,EAAGA,EAAK,CAAC,CAAC,EAC9BQ,EAAU,CAAC,CAAC,EAAE,OAAOA,CAAO,EAC5BF,EAAY,CAAC,CAAC,EAAE,OAAOA,CAAS,EAChCD,EAAc,CAAC,CAAC,EAAE,OAAOA,CAAW,EACpC,IAAM4B,EACFtD,GAAmC,CAAC,GAAGwC,EAAY,KAAAnB,EAAM,QAAAQ,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGe,CAAM,EACrGY,EAAQ,QAAQG,GACZf,EAAQa,EACRtB,GAAeF,EAAgB,CAACE,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAC/C,CAACA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,CAAC,CAAC,CACtF,EAEa1B,GAAgB,CAAC+C,EAAyBb,IAA8C,CACnGtC,GAAemD,EAAQ,OAAQb,CAAU,EACrCa,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpChD,GAAgBgD,EAASb,CAAU,EAEnCpC,GAAgBiD,EAASA,EAAQ,OAAQb,CAAU,CAEvD,ICpTA,IAgBM8B,GAkDOC,GAOAC,GAzEbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAOMR,GACF,CAACS,EAAmBC,EAA+BC,EAAuBC,IACvD,CACb,IAAMC,EAAaC,EAAU,KAAKJ,CAAU,EACtCK,EAAOL,EAAW,OAClBM,EAAQC,EAAc,QAASR,EAAWM,CAAI,EAC9CG,EAASC,EAAe,SAAUV,EAAWM,CAAI,EACjDK,EAAYT,EAAU,WAAa,EAAiBA,EAAU,cAAc,EAAE,CAAC,EAC3B,OAAOA,EAAU,iBAAiB,EAAE,CAAC,CAAC,EAC1FU,EAAOP,EAAU,cAAcM,EAAWL,CAAI,EAC9CO,EAAmBC,GAA+B,CACtD,IAAMC,EAAQ,QAAQR,EAAM,WAAW,eAAgB,eAAe,CAAC,KACjES,EAAMC,EAAa,uBAAwB,gBAAiBX,CAAI,EAChEY,EAAaf,EAAW,QAAUY,GAASZ,EAAW,UAAY,OAAS,IAAM,IACjFgB,EAAahB,EAAW,QAAUa,EAAMD,GAASZ,EAAW,UAAY,GAAK,QACnF,MAAO;AAAA,kBAEHW,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,CAAM,CAAC;AAAA,kBAClCK,EAAa,UAAU,CAAC;AAAA,oBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,uCACtDL,EAAO,gBAAgB,YAAY,CAAC;AAAA,8BAC7CA,EAAO,KAAK,KAAK;AAAA,sCACTS,CAAU;AAAA,qCACXC,CAAU;AAAA;AAAA,sBAEzBZ,EAAM,WAAW,eAAgB,gBAAiB,QAAQ,CAAC;AAAA,kCAC/CA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,oBAEhDE,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,kBAEjD,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMF,EAAY,SAAUD,CAAS,CAAC,EACjD,cAAe,CAAC,EAAG,KAAK,KAAKI,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,QAAuB,KAAMQ,CAAI,EAC7E,GAAGQ,EAA2BnB,EAAYA,CAAU,CACtD,CAEF,GACA,gBAAAY,CACF,CACF,EAGKrB,GAAS,CAAC6B,EAAyBlB,IAAuC,CACrF,IAAMF,EAAaoB,EAAQ,OAAO,CAAC,EAAE,KAC/BrB,EAAYqB,EAAQ,OAAO,CAAC,EAAE,SAC9BT,EAAOS,EAAQ,OAAO,CAAC,EAC7BA,EAAQ,QAAQ9B,GAAwBS,EAAWC,EAAYW,EAAMT,CAAU,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACjG,EAEaV,GAAyBU,GAA0D,CAC9F,IAAMmB,EAAYnB,EAAW,YAAwB,EAC/CoB,EAAUpB,EAAW,UAAsB,EACjD,OAAOqB,GAA4B,CAAC,UAAAF,EAAW,QAAAC,CAAO,CAAC,CACzD,IC7EA,IAoBME,GASAC,GAWAC,GA2DOC,GAKAC,GAxGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAWMV,GAAkBW,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,gCAAgC,EAElD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,iCAAiC,CAErD,EAEMV,GAAmB,CAACW,EAAgBC,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKF,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAI,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMd,GAAgC,CAACe,EAAyBC,IAAoD,CAClH,IAAIC,EAAWC,EAAWC,EAAWC,EACjCC,EACAX,EACEY,EAAgBN,EAAW,SAAW,OACtCO,EAAYP,EAAW,UACvBQ,EAAYR,EAAW,OAAS,MAClCM,GACF,CAACL,EAAGC,EAAGC,EAAGC,CAAC,EAAIL,EAAY,KAC3BM,EAAQG,EAAY,CAACP,EAAGC,EAAGC,EAAGI,EAAWA,EAAWH,EAAKG,GAAa,CAAE,EACpD,CAACN,EAAGC,EAAGC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,CAAS,EACxEb,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,IAEzD,CAACP,EAAGC,EAAGC,EAAGC,CAAC,EAAI,CAACL,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,CAAC,EAClGM,EAAQG,EAAY,CAACP,EAAGM,EAAWA,EAAWH,EAAKG,GAAa,EAAIL,EAAGC,CAAC,EACpD,CAACF,EAAGG,EAAKG,GAAa,EAAIA,EAAWA,EAAWL,EAAGC,CAAC,EACxET,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,GAE3D,IAAMC,EAAsBV,EAAY,QAAQM,CAAK,EAC/CK,EAAoBD,EAAoB,KAAK,OAC7CE,EAAgBZ,EAAY,SAE5Ba,EAAgBC,EAAc,IAAKF,EAAeD,CAAiB,EACnEI,EAAeC,EAAe,SAAUJ,EAAeD,CAAiB,EAExEM,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEhG/B,GAAiBW,EAAMgB,EAAmBE,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEtEG,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DH,EAAa,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGxDA,EAAa,YAAY,aAAcF,EAAc,aAAa,UAAU,CAAC,CAAC;AAAA,KAGlF,MAAO,CACL,KAAM,eACN,YAAa,CAAC,KAAM,GAAGb,EAAY,IAAI,IAAIC,EAAW,SAAS,IAAIA,EAAW,IAAI,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACjH,WAAaP,GAAW,CACtB,IAAMyB,EAAcZ,EAAgB,CAACL,EAAGC,EAAIK,EAAWJ,EAAII,EAAWH,EAAKG,GAAa,CAAE,EACtD,CAACN,EAAGG,EAAKG,GAAa,EAAIL,EAAIK,EAAWJ,EAAII,CAAS,EACpFY,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAkBZ,EAAoB,KACtCa,EAAiBF,EAAU,gBAAgBC,EAAiB3B,CAAI,EACtE,MAAO,CACL,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUzB,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAK0B,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGI,EAA2BF,EAAiBC,CAAc,CAAC,CAChH,CACF,EACA,gBAAAN,CACF,CACF,EAEa/B,GAAe,CAACuC,EAAyBxB,IAA6C,CACjGlB,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxC,GAA8BwC,EAAQ,OAAO,CAAC,EAAGxB,CAAU,CAAC,CAC9E,EAEad,GAA+Bc,GACxCyB,GAA4B,CAC1B,UAAWzB,EAAW,UACtB,KAAMA,EAAW,KACjB,OAAQA,EAAW,MACrB,CAAC,IC7GL,IAsBM0B,GAEAC,GACAC,GACAC,GACAC,GAQAC,GAqBAC,GA4HAC,GAEAC,GA+GOC,GAOAC,GA5SbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAaMhB,GACF,qBACEC,GAAc,IAAMD,GAAgB,KACpCE,GAAkB,IAAMD,GAAc,IACtCE,GAAa,IAAMF,GAAc,MAAQA,GACzCG,GAAiB,IAAMD,GAAa,IAQpCE,GAAN,KAAiB,CACf,YAAYY,EAAa,GAAI,CAC3B,KAAK,gBAAkB,IAAI,IAC3B,KAAK,WAAaA,CACpB,CAGA,UAAUC,EAAgBC,EAAe,CACvC,IAAIC,EAAQ,KAAK,gBAAgB,IAAIF,CAAM,EACvCE,IAAU,OACZA,EAAQ,CAACD,CAAK,EAEdC,EAAM,KAAKD,CAAK,EAElB,KAAK,gBAAgB,IAAID,EAAQE,CAAK,CACxC,CAIF,EAEMd,GAAN,KAAqB,CACnB,YAAYe,EAA+CC,EAAkB,CAAlB,cAAAA,EACzD,KAAK,YAAc,GACnB,KAAK,aAAe,IAAI,IACxB,KAAK,IAAM,IAAI,MACf,KAAK,WAAa,CAAC,EAGnB,GAAI,CAACC,EAAKC,CAAG,EAAIF,EAAS,SAAS,IAAI,EAAIA,EAAS,MAAM,KAAM,CAAC,EAAI,CAACA,EAAU,EAAE,EAClF,GAAI,CAACC,EAAI,MAAM,OAAOnB,EAAc,CAAC,EACnC,MAAM,IAAI,MAAM,kBAAkB,EAapC,GAXmBmB,EAAI,MAAM,GAAG,EACrB,QAAQ,CAACE,EAAWN,IAAU,CACvC,IAAMO,EAAOL,EAAOF,CAAK,EAAE,KAAK,MAAM,EACtC,GAAI,CAACM,EAAU,MAAM,OAAOvB,EAAe,CAAC,EAC1C,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMyB,EAAa,KAAK,YAAYF,EAAW,GAAMC,EAAMP,CAAK,EAChE,KAAK,IAAI,KAAKQ,CAAU,CAC1B,CAAC,EAGGH,IAAQ,GAEVA,GAAO,CAAC,GAAG,KAAK,aAAa,QAAQ,CAAC,EAC1B,OAAO,CAAC,CAACI,EAAKC,CAAI,IAAOA,EAAK,QAAU,GAAKD,IAAQ,KAAM,EAC3D,IAAI,CAAC,CAACA,CAAG,IAAMA,CAAG,EAClB,KAAK,EAAE,UAEf,CAACJ,EAAI,MAAM,OAAOvB,EAAW,CAAC,EAChC,MAAM,IAAI,MAAM,aAAa,EAKduB,EAAI,MAAM,OAAOxB,GAAe,GAAG,CAAC,GAC3C,QAASkB,GAAW,CAC9B,GAAIA,IAAW,MACb,KAAK,WAAa,KAAK,WAAW,OAAO,KAAK,YAAY,MACrD,CACL,IAAMW,EAAO,KAAK,aAAa,IAAIX,CAAM,EACzC,GAAIW,IAAS,OACX,MAAM,IAAI,MAAM,oBAAoB,EAEtC,KAAK,WAAW,KAAKA,EAAK,QAAQ,CACpC,CACF,CAAC,EACD,KAAK,IAAM,KAAK,YAAYL,EAAK,GAAO,KAAK,UAAU,CACzD,CAGA,UAAUN,EAAgBY,EAAkBb,EAAoB,CAC9D,IAAIY,EAAO,KAAK,aAAa,IAAIX,CAAM,EACvC,GAAIW,IAAS,OAAW,CACtB,GAAIA,EAAK,WAAaC,GAAYD,EAAK,QAAU,EAC/C,MAAM,IAAI,MAAM,oBAAoB,EAEpCA,EAAK,QACLA,EAAK,aAAa,KAAKZ,CAAU,CAErC,MACEY,EAAO,CAAC,MAAO,EAAG,SAAAC,EAAU,aAAc,CAACb,CAAU,CAAC,EAExD,KAAK,aAAa,IAAIC,EAAQW,CAAI,CACpC,CAGA,YAAYE,EAAcC,EAAkBN,EAAyBP,EAAQ,GAAgB,CAC3F,IAAMc,EAAOP,EAAK,OACdQ,EAAW,GACXC,EAAe,CAAC,EAChBC,EAAU,EAEd,GAAI,CAACL,EAAK,MAAM,OAAO7B,EAAe,CAAC,GAAM,CAAC8B,GAAWD,IAAS,GAChE,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMM,EAAeN,EAAK,MAAM,OAAO/B,GAAe,GAAG,CAAC,EACpD2B,EAAa,IAAItB,GAAWc,CAAK,EAEvC,OAAAkB,GAAc,QAAQ,CAACnB,EAAgBoB,IAAc,CACnD,GAAIpB,IAAW,MAAO,CACpB,GAAIgB,EACF,MAAM,IAAI,MAAM,6CAA6C,EAE/DA,EAAW,GACX,IAAMK,EAAoBN,EAAOI,EAAa,OAAS,EACvD,GAAIE,EAAoB,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GADAJ,EAAeT,EAAK,MAAMU,EAASA,EAAUG,CAAiB,EAC1D,KAAK,aACP,GAAI,KAAK,aAAa,SAAWJ,EAAa,QAC1C,KAAK,aAAa,SAAS,IAAMA,EAAa,SAAS,EACzD,MAAM,IAAI,MAAM,8BAA8B,UAEvCH,EACT,KAAK,YAAc,GACnB,KAAK,aAAeG,MAEpB,OAAM,IAAI,MAAM,uCAAuC,EAGzD,QAASK,EAAI,EAAGA,EAAIL,EAAa,OAAQK,IAAK,CAC5C,IAAMtB,EAAS,OAAO,aAAa,IAAI,WAAW,CAAC,EAAIsB,CAAC,EACxDb,EAAW,UAAUT,EAAQoB,EAAIE,CAAC,EAClC,KAAK,UAAUtB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAC/C,CACF,MACEQ,EAAW,UAAUT,EAAQoB,GAAK,KAAK,YAAc,KAAK,aAAa,OAAS,EAAI,EAAE,EACtF,KAAK,UAAUpB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAEjD,CAAC,EACMQ,CACT,CAQF,EAEMpB,GAAakC,GAAyBA,EAAO,OAE7CjC,GACF,CAACkC,EAAuCC,EAAkBC,EACzDC,IAAgD,CAE/C,IAAMC,EADQJ,EAAY,IAAKhB,GAASA,EAAK,MAAM,EAC3B,IAAI,CAACO,EAAMd,IAAU4B,EAAc,QAAQ5B,CAAK,GAAIwB,EAAUV,CAAI,CAAC,EACrFe,EAAaC,EAAU,KAAKJ,CAAW,EACvCK,EAASC,EAAe,SAAUR,EAAUE,EAAY,MAAM,EAC9DO,EACF,CAAC,GAAGR,EAAe,aAAa,KAAK,CAAC,EAAE,OAAQ1B,GAAW,CAAC0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,CAAC,EACxGmC,EAAmBC,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EACrBC,EAAW,kBACXC,EAAU,iBACVC,EAAY,eACZC,EAAgC,CAAC,EACjCC,EAAiC,CAAC,EAClCC,EAAiC,CAAC,EAClCC,EAA4B,CAAC,EAC7BC,EAAyBnB,EAAe,aAAa,OAASA,EAAe,IAAI,gBAAgB,KACvGA,EAAe,aAAa,QAAQ,CAACf,EAAMX,IAAW,CACpD,GAAI0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,EAAG,CAClD,IAAM8C,EAAcpB,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,IAAI,CAAC,EAClE8C,IAAgB,QAClBpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBoC,EAAQ,KAAK,GACTT,EAAUR,CAAC,EAAE,WACT,QAAQA,CAAC,UAAWnB,EAAO+B,EAAO,WAAW,gBAAiBc,CAAW,CAAC,CAAC,EAAE,CACvF,CAAC,CACH,CACF,CAAC,CAEL,MACEpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBwC,EAAoB,KAAK,GAAGb,EAAUR,CAAC,EAAE,WAAW,QAAQA,CAAC,UAAWnB,EAAO,GAAGD,CAAM,EAAE,CAAC,EAAE,CAC/F,CAAC,EACD4C,EAAgB,KAAK,WAAWhB,EAAUR,CAAC,EAAE,aAAa,QAAQA,CAAC,SAAS,CAAC,GAAG,CAClF,CACF,CAAC,EACDsB,EAAqB,KACjB,WAAW1C,CAAM,cAAcA,CAAM,eAAeX,GAAUW,CAAM,CAAC,KAAKA,CAAM,OAAO,EAC3F2C,EAAqB,KAAK,GAAG,CAEjC,CAAC,EACD,IAAMK,EAAYH,EACd,CACE,GAAGR,EACH,aAAaT,EAAU,IAAI,CAACqB,EAAU7B,IAAM6B,EAAS,aAAa,QAAQ7B,CAAC,SAAS,CAAC,EAAE,KAAK,KAAK,CAAC,GACpG,EACA,CACE,GAAGiB,EACHE,EACA,GAAGG,EACH,GAAGD,EACHH,EACA,GAAGM,EACHJ,EACA,GAAGG,CACL,EACJ,MAAO;AAAA,cAEHP,EACK,iBAAiBF,EAAgB,IAAKlC,IAAY,CAAC,KAAM,GAAGX,GAAUW,CAAM,CAAC,GAAI,KAAM,KAAK,EAAE,CAAC,EAC/F,gBAAgB,aAAc,KAAK,EACnC,iBAAiB,GAAG4B,EAAWI,CAAM,CAAC;AAAA;AAAA,cAEzCI,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,kCACrDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA,cACxDJ,EAAU,IAAI,CAACsB,EAAM9B,IAAM,YAAYA,CAAC,YAAYQ,EAAUR,CAAC,EAAE,KAAK,OAAO,GAAG,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,cAC5F4B,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,cACpBhB,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,YAE/C,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAe,SAAU,kBAAmBF,EAAY,IAAI,IAAM,MAAM,CAAC,EAC7F,WAAY,IAAM,CAGhB,IAAM2B,EACFjB,EAAgB,OAAQlC,GAAW0B,EAAe,aAAa,IAAI1B,CAAM,CAAC,EACrE,IACIA,IACI,CAAC,QAAuB,KAAM0B,EAAe,aAAa,IAAI1B,CAAM,GAAG,UAAY,CAAC,EAAE,EACvGmD,EAAoB,KAAK,CAAC,QAAuB,KAAMrB,CAAU,CAAC,EAClE,IAAMsB,EACF5B,EAAY,IAAI,CAAChB,EAAM6C,IAAM,CAAC,GAAGC,EAA2B9C,CAAI,CAAC,CAAC,EAC7D,OAAO,CAAC+C,EAAKC,IAAyBD,EAAI,OAAOC,CAAoB,EAAGL,CAAmB,EACpG,OAAAC,EAAgB,KAAK,GAAGE,EAA2B3B,CAAW,CAAC,EACvD,CACN,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAAsB,CACF,CACF,EACA,gBAAAjB,CACF,CACF,EAES5C,GAAS,CAACkE,EAAyBC,IAAuC,CACrF,IAAMhC,EAAiB,IAAItC,GAAeqE,EAAQ,OAAQC,EAAW,QAAQ,EACvE/B,EAAcD,EAAe,WAC7BF,EAAciC,EAAQ,OAAO,IAAI,CAACE,EAAON,IAAMM,EAAM,IAAI,EAC/DF,EAAQ,QAAQnE,GAAwBkC,EAAaiC,EAAQ,OAAO,CAAC,EAAE,SAAU/B,EAAgBC,CAAW,CAAC,CAC/G,EAEanC,GAAyBkE,GAA0D,CAC9F,IAAMtD,EAAYsD,EAAW,SAAoB,QAAQ,OAAQ,EAAE,EACnE,OAAOE,GAA4B,CAAC,SAAAxD,CAAQ,CAAC,CAC/C,IC/SA,IAUMyD,GAiBAC,GAYAC,GAIAC,GAyDOC,GApGbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0BAA0B,EAE5C,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EAEzDG,EAAaD,EAAM,OAASD,EAAW,OAAS,EAAIC,EAAM,OAASD,EAAW,OAC9EG,EAAkBH,EAAW,OAASC,EAAM,OAAS,EAAID,EAAW,OAASC,EAAM,OACvF,KAAOC,EAAaD,EAAM,QAAUE,EAAkBH,EAAW,OAAQ,EAAEE,EAAY,EAAEC,EACvF,GAAIF,EAAMC,CAAU,IAAMF,EAAWG,CAAe,GAAKF,EAAMC,CAAU,IAAM,GAC3EF,EAAWG,CAAe,IAAM,EAClC,MAAM,IAAI,MAAM,oDAAoD,CAG1E,EAEMb,GAAmB,CAACc,EAA2BC,IAAwC,CAC3F,IAAMC,EAAOF,EAAO,OAASC,EAAO,OAC9BJ,EAAkB,CAAC,EACzB,QAASM,EAAI,EAAGA,EAAID,EAAM,EAAEC,EAC1BN,EAAM,KAAKG,EAAOG,CAAC,CAAC,EAEtB,QAASA,EAAI,EAAGA,EAAIF,EAAO,OAAQ,EAAEE,EACnCN,EAAM,KAAKI,EAAOE,CAAC,IAAM,EAAIH,EAAOG,EAAID,CAAI,EAAID,EAAOE,CAAC,CAAC,EAE3D,OAAON,CACT,EAEMV,GAAuB,CAACS,EAA+BC,IACxDD,EAAW,OAASC,EAAM,OAAUX,GAAiBU,EAAYC,CAAK,EAAIX,GAAiBW,EAAOD,CAAU,EAG3GR,GAA2BO,GAA+C,CAC9E,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EACvDS,EAAwBjB,GAAqBS,EAAYC,CAAK,EAC9DQ,EAAWV,EAAO,CAAC,EAAE,SACrBW,EAAaD,IAAa,EAAgB,EAAI,EAC9CE,EAAa,KAAK,KAAKC,EAAU,KAAKJ,CAAW,EAAIE,CAAU,EAE/DG,EAAmBC,GAA+B,CACtD,IAAMC,EAAQC,EAAc,QAASP,EAAUT,EAAW,OAAQU,CAAU,EACtEO,EAASC,EAAe,SAAUT,EAAUD,EAAY,OAAQE,CAAU,EAC5ES,EACJ,GAAIV,IAAa,EAAe,CAC9B,IAAMW,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO;AAAA,6BAChDD,CAAC,MAAML,EAAO,gBAAgB,kBAAkBK,CAAC,GAAG,CAAC;AAAA,sBAC5DA,CAAC,MAAMP,EAAM,2BAA2B,gBAAgBO,CAAC,GAAIL,CAAM,CAAC;AAAA,qBACrEK,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIR,EAAM,YAAY,QAAQO,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAEhFH,EAAa;AAAA,0CACuBT,CAAU;AAAA;AAAA,UAE1CU,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCH,EAAO,YAAY,aAAc,MAAM,CAAC;AAAA,QAE9C,MACEE,EAAa;AAAA,8BACWF,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACtCF,EAAM,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,UAC3EA,EAAO,YAAY,aAAcF,EAAM,YAAY,aAAa,CAAC,CAAC;AAAA,SAGxE,MAAO;AAAA,MACLD,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBC,EAAOE,CAAM,CAAC;AAAA,MAC/EH,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,MACvEK,CAAU,EACd,EAEMK,EACF,CAAC,CAAC,QAAuB,KAAMb,CAAU,EAAG,GAAGc,EAA2BzB,EAAYQ,CAAW,CAAC,EACtG,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGA,EAAY,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACxE,gBAAAK,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBAAAa,CACF,EACF,CACF,EAEa/B,GAAUiC,GAAkC,CACvDrC,GAAeqC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAwBkC,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxE,ICvGA,IAaMC,GAiDOC,GA9DbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KAIMP,GAA6BQ,GAAqD,CACtF,IAAMC,EAAWD,EAAa,CAAC,EAAE,SAC3BE,EAAaC,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAChDI,EAAaD,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAEhDK,EAAUD,EAAa,IAAM,EAC7BE,EAAmBC,GAAuC,CAC9D,IAAMC,EAAIC,EAAc,IAAKR,EAAU,CAAC,CAAC,EAAG,CAAC,EACvCS,EAAOD,EAAc,OAAQR,EAAU,CAAC,CAAC,EAAG,CAAC,EAC7CU,EAAIC,EAAe,IAAKX,EAAU,CAAC,CAAC,EAAG,CAAC,EAExCY,EAA8B,CAAC,CAAC,KAAM,kBAAmB,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAEvGC,EAAqBC,GAAe;AAAA,gBAC9BA,CAAC,oCAAoCA,CAAC;AAAA,gBACtCA,CAAC,MAAML,EAAK,YAAY,OAAOK,CAAC,aAAa,CAAC,QAAQA,CAAC,gBAC7DC,EAAoBX,EACtB;AAAA,mBACWK,EAAK,YAAY,uCAAuC,CAAC,IACpE,GAAGI,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC;AAAA,mBACjFN,EAAE,KAAK,KAAK,gCAE3B,MAAO,GAAGD,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBL,EAAGE,EAAMC,CAAC,CAAC;AAAA;AAAA,MAEtEM,GAAaC,GAA0BjB,CAAQ,CAAC,CAAC;AAAA;AAAA,MAEvDM,EAAa,UAAUY,EAAc,CAAC;AAAA,QACpCZ,EAAa,sCAAsC,0BAA0B,CAAC;AAAA;AAAA,gBAEtEC,EAAE,YAAY,YAAY,CAAC;AAAA,QACnCQ,CAAiB;AAAA;AAAA,QAEjBL,EAAE,YAAY,aAAoBS,GAAmB,MAAM,CAAC,CAAC;AAAA,MAEnE,EAEA,MAAO,CACL,KAAM,mBACN,YAAa,CAAC,KAAM,GAAGf,CAAO,GAAI,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACrE,gBAAAC,EACA,WAAae,IAAY,CACvB,QAAS,CAAC,CAAC,KAAMA,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,gBACI,CAAC,CAAC,QAAuB,KAAM,KAAK,KAAKnB,EAAa,CAAC,CAAC,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACxG,cAAe,CAAC,EAAG,KAAK,KAAKF,EAAaiB,GAAiB,CAAC,CAAC,CAC/D,EACF,CACF,EAEa1B,GAAY6B,GAAkC,CACrDA,EAAQ,OAAO,OAAS,GAAKnB,EAAU,KAAKmB,EAAQ,OAAO,CAAC,EAAE,IAAI,IAAM,EACpE7B,GAAS6B,CAAO,EAEtBA,EAAQ,QAAQ9B,GAA0B8B,EAAQ,MAAM,CAAC,CAE7D,ICpEA,IAeMC,GAMAC,GAsGOC,GAGAC,GA9HbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,EAEMT,GAA0B,CAACS,EAA+BC,IAA8C,CAC5G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAeH,EAAO,CAAC,EAAE,KAEzBI,EAAYF,EAAW,OACvBG,EAAOC,EAAU,cAAcL,EAAW,KAAMG,CAAS,EAEzDG,EAAcL,EAAW,MAAM,CAAC,EACtCK,EAAY,OAAOF,EAAM,EAAG,GAAGF,CAAY,EAE3C,IAAMK,EAAeN,EAAWG,CAAI,EAC9BI,EAAaT,EAAO,CAAC,EAAE,WAAa,EAAgB,EAAI,EACxDU,EAAa,KAAK,KAAKJ,EAAU,KAAKC,CAAW,EAAIE,CAAU,EAE/DE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMH,CAAI,EAAG,GAAGO,EAA2BZ,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMO,CAAW,CAChH,EAEMM,EAAmBC,GAA+B,CACtD,IAAMC,EAAOC,EAAc,OAAQhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQS,CAAU,EAClFQ,EAAUD,EAAc,eAAgBhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACjFkB,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUO,EAAY,OAAQE,CAAU,EAEpFW,EAAmBC,GAA6B,CACpD,IAAMC,EAAcnB,EAAa,OAC7BoB,EAAU,qBAAqBF,CAAC,OAAOJ,EAAQ,KAAK,OAAO,OAC/D,QAASO,EAAI,EAAGA,EAAIF,EAAaE,IAC/BD,GAAW,GAAGD,EAAc,EAAI,iBAAiBD,CAAC,IAAIG,CAAC,IAAM,iBAAiBH,CAAC,EAAE,MAC7Ed,EAAY,OAAS,EAAI,gBAAgBc,CAAC,oBAAoBG,CAAC,IAAM,gBAAgBH,CAAC,EAAE,IAE9FE,GAAW;AAAA,mBACEF,CAAC,MAAMJ,EAAQ,aAAa,iBAAiBI,CAAC,EAAE,CAAC;AAAA,mBACjDA,CAAC;AAAA,iBACHA,CAAC,SAASA,CAAC;AAAA;AAAA,2BAEDA,CAAC,MAAMN,EAAK,KAAK,OAAO;AAAA,UAE7C,QAASS,EAAI,EAAGC,EAAI,EAAGD,EAAIpB,EAAWoB,IAChCA,IAAMnB,GACRkB,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,aAAaA,CAAC,KACvFI,GAAKH,IAELC,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,MACrEd,EAAY,OAAS,EAAI,gBAAgBc,CAAC,IAAII,CAAC,IAAM,gBAAgBJ,CAAC,EAAE,IAC5EI,KAGJ,OAAOF,CACT,EACIG,EACJ,GAAI1B,EAAO,CAAC,EAAE,WAAa,EAAe,CACxC,IAAM2B,EAAmB,CAACC,EAAgBP,EAAWQ,EAAW,KAAO;AAAA,6BAChDR,CAAC,MAAMH,EAAO,gBAAgB,kBAAkBG,CAAC,GAAG,CAAC;AAAA,YACtED,EAAgBC,CAAC,CAAC;AAAA,sBACRA,CAAC,MAAMN,EAAK,gBAAgB,cAAcM,CAAC,EAAE,CAAC;AAAA,qBAC/CA,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BO,CAAM,IAAIP,CAAC,OAAOQ,CAAQ,IAAId,EAAK,YAAY,QAAQM,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAE/EK,EAAa;AAAA,0CACuBjB,CAAU;AAAA;AAAA,UAE1CkB,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCT,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,OAE/C,MACEQ,EAAa;AAAA,4BACSR,EAAO,gBAAgB,YAAY,CAAC;AAAA,QACxDE,EAAgB,EAAE,CAAC;AAAA,oBACPL,EAAK,aAAa,aAAa,CAAC;AAAA,QAC5CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,QAG7C,MAAO;AAAA,QAEHJ,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBC,EAAME,EAASC,CAAM,CAAC;AAAA,QAC5CJ,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,UACzEY,CAAU;AAAA,QAElB,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMzB,EAAW,SAAU,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMM,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAE,CACF,CACF,EAEarB,GAAyBS,GAClC6B,GAA4B,CAAC,KAAM7B,EAAW,IAAc,CAAC,EAEpDR,GAAS,CAACsC,EAAyB9B,IAAuC,CACrF,IAAMD,EAAS+B,EAAQ,OACvBzC,GAAeU,CAAM,EACrB+B,EAAQ,QAAQxC,GAAwBwC,EAAQ,OAAQ9B,CAAU,CAAC,CACrE,IClIA,IAeM+B,GAeAC,GA+DOC,GAGAC,GAhGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM;AAAA,4DACwC,CAE5D,EAEMT,GACF,CAACS,EAA+BC,IAAsD,CACpF,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAsBH,EAAO,CAAC,EAAE,SAChCI,EAAYF,EAAW,OAEvBG,EAAeL,EAAO,CAAC,EAAE,KACzBM,EAAkBN,EAAO,CAAC,EAAE,SAC5BO,EAAOC,EAAU,cAAcP,EAAW,KAAMG,CAAS,EACzDK,EAAeP,EAAWK,CAAI,EAE9BG,EAAcL,EAAa,MAAM,CAAC,EAClCM,EAAaH,EAAU,KAAKE,CAAW,EAEvCE,EAAQC,EAAc,QAASV,EAAqBC,CAAS,EAC7DU,EAAUD,EAAc,eAAgBP,EAAiBD,EAAa,MAAM,EAC5EU,EAASC,EAAe,SAAUb,EAAqBO,EAAY,MAAM,EAGzEO,EAAoC,CACxC,CAAC,QAAuB,KAAMN,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMF,CAAI,CACpC,EACA,OAAAU,EAAgB,KAAK,GAAGC,EAA2BhB,EAAYG,EAAcK,CAAW,CAAC,EA4BlF,CACL,KAAM,iBACN,YAAa,CAAC,kBA7B8C,CAAC,OAAQ,MAAM,CA6B5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAM,CACF,GACA,gBA9BuBE,GAA+B;AAAA,QAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,EAASC,CAAM,CAAC;AAAA,QAC/CI,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,4BAErDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,kBAE9CD,EAAQ,YAAY,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA,2BAIxBF,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,eAAgB,gBAAiB,UAAU,CAAC;AAAA,oBACjDA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,QAE9CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAY3C,CACF,EAESvB,GAAiCS,GAC1CmB,GAA4B,CAAC,KAAMnB,EAAW,IAAc,CAAC,EAEpDR,GAAiB,CAAC4B,EAAyBpB,IAA+C,CACrG,IAAMD,EAASqB,EAAQ,OACvB/B,GAAeU,CAAM,EACrBqB,EAAQ,QAAQ9B,GAAgC8B,EAAQ,OAAQpB,CAAU,CAAC,CAC7E,ICpGA,IAWMqB,GA0BAC,GAwFOC,GAQAC,GArIbC,GAAAC,EAAA,kBAGAC,IAEAC,KAIAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,UACjCA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,SAC3D,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EASMR,GAAwB,CAACQ,EAA+BC,IAA4C,CACxG,IAAMC,EAASF,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BG,EAASH,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACI,EAAGC,EAAGC,CAAC,EAAIC,GAAS,qBACvBL,EAAQD,EAAW,OAAQE,EAAQF,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGQ,EAAc,CAACJ,EAAGC,CAAC,EACzB,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAMC,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAML,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,OAAsB,KAAML,EAAW,KAAK,EAC/E,CAAC,OAAsB,KAAMA,EAAW,IAAI,CAC9C,EACMW,EAAwD,CAAC,OAAQ,MAAM,EACzEZ,EAAO,SAAW,IACpBW,EAAgB,KAAK,GAAGE,EAA2Bb,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEY,EAAkB,KAAK,MAAM,GAE/BD,EAAgB,KAAK,GAAGE,EAA2BL,CAAW,CAAC,EAE/D,IAAMM,EAAmBC,GAA+B,CACtD,IAAIC,EAAO,GACPf,EAAW,QAAUA,EAAW,OAClCe,EAAO,0DACEf,EAAW,QAAU,CAACA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAUA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAU,CAACA,EAAW,SAC3Ce,EAAO,2DAGT,IAAMC,EAAiBhB,EAAW,QAAU,EAAI,GAAK,2BAC/CiB,EAAIC,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDoB,EAAID,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDqB,EAAWH,EAAE,KAAK,MACpBI,EAAwB,KACtBC,EAAY,CAACL,EAAGE,CAAC,EACnBpB,EAAO,SAAW,IACpBsB,EAAIH,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAChEuB,EAAU,KAAKD,CAAC,GAElB,IAAME,EAASC,EAAe,SAAUzB,EAAO,CAAC,EAAE,SAAUQ,EAAY,MAAM,EAC9Ee,EAAU,KAAKC,CAAM,EACrB,IAAME,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC/G,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAC1D,EACA,MAAO;AAAA,IACPX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA;AAAA,IAEtER,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kBAK9DM,CAAQ;AAAA;AAAA,QAElBL,CAAI;AAAA;AAAA;AAAA,MAGNC,CAAc;AAAA,OACb,IACGK,GAAK,KACA,iBAAiBA,EAAE,2BAA2B,aAAcE,CAAM,CAAC,cACtEH,CAAQ,qBAAqBC,EAAE,YAAY,SAAS,CAAC,IAEpD,IACN,CAAC;AAAA;AAAA,IAGN,EAEA,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGrB,EAAW,QAAQ,GAAI,kBAAAW,CAAiB,EAC/D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAUR,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKS,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAG,CACF,CACF,EAEarB,GAAuBQ,GAAwD,CAC1F,IAAM0B,EAAS1B,EAAW,OACpB2B,EAAS3B,EAAW,OACpB4B,EAAQ5B,EAAW,MACnB6B,EAAO7B,EAAW,KACxB,MAAO,CAAC,OAAA0B,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,SAAU,GAAG7B,EAAW,MAAM,IAAIA,EAAW,MAAM,IAAIA,EAAW,QAAU,CAAC,EAAE,CACtH,EAEaP,GAAO,CAACqC,EAAyB9B,IAAqC,CACjFV,GAAewC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAAsBuC,EAAQ,OAAQ9B,CAAU,CAAC,CACnE,ICxIA,IAaM+B,GAGAC,GAiOOC,GAGPC,GAEAC,GA0COC,GA2BAC,GA3TbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMf,GAAW,CAACgB,EAA+BC,IAC5CD,EAAO,OAASC,GAAOD,EAAOC,CAAC,EAAE,KAAK,OAAS,GAAOC,EAAU,KAAKF,EAAOC,CAAC,EAAE,IAAI,EAAK,EAAID,EAAOC,CAAC,EAAI,OAEvGhB,GAAiB,CAACe,EAA+BG,IAAoD,CACzG,IAAMC,EAAQJ,EAAO,CAAC,EAChBK,EAAMrB,GAASgB,EAAQ,CAAC,EACxBM,EAAQtB,GAASgB,EAAQ,CAAC,EAC1BO,EAAOvB,GAASgB,EAAQ,CAAC,EACzBQ,EAAiBxB,GAASgB,EAAQ,CAAC,EACnCS,EAAuBzB,GAASgB,EAAQ,CAAC,EACzCU,EAAU1B,GAASgB,EAAQ,CAAC,EAC5BW,EAAY3B,GAASgB,EAAQ,CAAC,EAoCpC,GAAII,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMQ,EAAe,GACfC,EAAYT,EAAM,KAAK,CAAC,EACxBU,EAAiBV,EAAM,KAAK,CAAC,EAC7BW,EAAaX,EAAM,KAAK,SAAW,EAAKQ,EAAeR,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3EY,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaZ,EAAW,QAAQ,EAC5D,GAAIO,GAAWC,EAAW,CACxB,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIA,EAAQ,KAAK,CAAC,IAAMG,GAAaH,EAAQ,KAAK,CAAC,IAAMP,EAAW,UAAYO,EAAQ,KAAK,CAAC,IAAMS,EAClG,MAAM,IAAI,MAAM,iFAAiF,EAEnG,GAAIR,EAAU,KAAK,CAAC,IAAME,GAAaF,EAAU,KAAK,CAAC,IAAMR,EAAW,UACpEQ,EAAU,KAAK,CAAC,IAAMQ,EACxB,MAAM,IAAI,MAAM,mFAAmF,EAErG,GAAIT,EAAQ,KAAK,CAAC,IAAMC,EAAU,KAAK,CAAC,EACtC,MAAM,IAAI,MAAM,gFAAgF,EAElG,GAAIA,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEvEM,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,CACpC,SAAWA,GAAWC,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIS,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAIA,EAAI,KAAK,CAAC,IAAMD,EAAM,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,6DAA6D,EAE/EgB,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMc,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIb,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMc,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GC,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,GAAIb,EAAM,CACR,GAAIA,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8CAA8C,EAGhE,GAAID,GACEF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,CAAC,IAAM,EAC/C,MAAM,IAAI,MAAM,oCAAoC,CAG1D,CAEA,IAAIiB,IACJ,GAAIb,EAAgB,CAClBa,EAAW,EACX,IAAMC,EAAWd,EAAe,KAUhC,MATIc,EAAS,SAAW,EAClBA,EAAS,CAAC,IAAMT,EAClBQ,EAAW,EACFC,EAAS,CAAC,IAAM,EAAIT,EAAY,IACzCQ,EAAW,GAEJC,EAAS,SAAW,GAAKA,EAAS,CAAC,IAAMT,GAAaS,EAAS,CAAC,IAAMN,IAC/EK,EAAW,GAETA,IAAa,EACT,IAAI,MAAM,0FAA0F,EAEtG,IAAI,MAAM,oBAAoB,CACtC,CAEA,IAAIE,EAAe,GACfC,EAAcT,EAClB,GAAIT,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FkB,EAAclB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGkB,EAAclB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CiB,EAAe,EACjB,CACF,CAEA,IAAME,EAAsBR,EAAqBD,EAC3CU,EAAsB,GAE5B,GAAIlB,EACF,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIC,EAAsB,CACxB,GAAIA,EAAqB,KAAK,SAAW,EACvC,MAAM,IAAI,MAAM,iEAAiE,EAEnF,GAAKA,EAAqB,KAAK,CAAC,IAAMI,GAAaJ,EAAqB,KAAK,CAAC,IAAM,GAChFA,EAAqB,KAAK,CAAC,IAAMN,EAAW,UAAYM,EAAqB,KAAK,CAAC,IAAMK,GACzFL,EAAqB,KAAK,CAAC,IAAMgB,EACnC,MAAM,IAAI,MAAM,2FAA2F,CAE/G,CAEA,MAAO,CACL,UAAAZ,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAS,EACA,kBAAAP,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAS,EACA,SAAAL,EACA,UAAW,KAAK,MAAMK,EAAcrB,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAuB,EACA,aAAAH,EACA,UAAAH,CACF,CACF,EAEalC,GAAqCiB,GAC9CwB,GAA4B,CAAC,GAAGxB,CAAU,CAAC,EAEzChB,GAAgDwC,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhGvC,GACF,CAACwC,EAAyBC,EAAiBtB,EAAkBM,EAAmBC,EAC/EC,EAAoBe,IAAuB,CAC1C,IAAMC,EAAc,CAAClB,EAAWC,EAAgBC,CAAU,EACpDiB,EAAa9B,EAAU,KAAK6B,CAAW,EACvCE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,QAAuB,KAAMF,CAAU,EACnF,CAAC,QAAuB,KAAMf,CAAU,CAC1C,EAEMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,gBAAiBR,EAAI,SAAUE,CAAW,EAClEO,EAAWC,EAAc,MAAOV,EAAI,SAAUE,CAAW,EACzDS,EAAYD,EAAc,OAAQhC,EAAK,SAAUwB,CAAW,EAE5DU,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,CAC3G,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBH,EAAUE,EAAWJ,CAAM,CAAC;AAAA,IACrFD,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,IAK1E,EAEA,OAAOP,EAAQ,QACX,CACE,KAAM,4BACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACjD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMG,EAAa,SAAUF,EAAI,SAAU,aAAgC,CAAC,EACvF,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAC,CACF,EACA,CAAC,OAAQ,CAACL,EAAKtB,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC7C,EAESlB,GACT,CAACuC,EAAyBf,EAAmB6B,EAAkB5B,EAAwBK,EACtFwB,EAAmBpC,EAAmBuB,IAAwB,CAG7D,IAAIc,EAAgBD,EACpB,GAAKpC,EAOE,CACL,GAAIO,IAAmB,EACrB,MAAM,IAAI,MAAM,mFAAmF,EAEnG,OAAA8B,EACIxD,GAAiBwC,EAASe,EAAOpC,EAAMM,EAAWC,EAAgB4B,EAAWvB,EAAUW,CAAW,EACtGc,EAAgBA,EAAc,QAAQ,CAAC/B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,EAC9ES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAEnD,KAjBE,QAAID,EAAM,KAAK,SAAW,IACxBC,EAAgBD,EAAM,QAAQ,CAAC9B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,GAExES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAanD,EAEStD,GAAqB,CAACsC,EAAyBzB,IAAqC,CAC/F,IAAM2C,EAAS7D,GAAe2C,EAAQ,OAAQzB,CAAU,EAClDC,EAAQwB,EAAQ,OAAO,CAAC,EACxBvB,EAAMrB,GAAS4C,EAAQ,OAAQ,CAAC,EAChCtB,EAAQtB,GAAS4C,EAAQ,OAAQ,CAAC,EAClCrB,EAAOvB,GAAS4C,EAAQ,OAAQ,CAAC,EACjCpB,EAAiBxB,GAAS4C,EAAQ,OAAQ,CAAC,EAC3CnB,EAAuBzB,GAAS4C,EAAQ,OAAQ,CAAC,EACjDlB,EAAU1B,GAAS4C,EAAQ,OAAQ,CAAC,EACpCjB,EAAY3B,GAAS4C,EAAQ,OAAQ,CAAC,EAC5C,GAAIxB,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIC,GAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8BAA8B,EAIhD,IAAM0C,EAAS1C,GAAOC,GAASD,EAAI,KAAK,SAAW,GAAKC,EAAM,KAAK,SAAW,EAExE0C,EAAI3D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAU1C,EAAOG,EAAM,CAAC,EAEtG,GAAIwC,EACF,OAAOE,GACHrB,EAASoB,EAAG3C,EAAKC,EAAOE,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAC7F3C,CAAU,EAEhB,GAAI,CAACE,GAAO,CAACC,EACX,MAAM,IAAI,MAAM,gCAAgC,EAElD,IAAM4C,EAAI7D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,SAAUzC,EAAKE,EAC3FuC,EAAO,UAAU,EAEfK,EAAI9D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,UAAWxC,EAAOC,EAC9F,EAAIuC,EAAO,UAAU,EAEzBG,GACIrB,EAASoB,EAAGE,EAAGC,EAAG3C,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAAQ3C,CAAU,CAC/G,ICrWA,IAUMiD,GAIAC,GAyBAC,GAUOC,GAuCAC,GAxFbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMT,GAAcU,GAChB,MAAM,KAAKA,EAAkB,iBAAiB,EAAG,MAAM,EAGrDT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,IAChEA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,oCAAoC,EAKtD,GAFmCX,GAAWW,EAAO,CAAC,CAAC,EAE3C,SAAWA,EAAO,CAAC,EAAE,KAAK,OACpC,MAAM,IAAI,MAAM,uFAAuF,CAE3G,EAEMT,GAAiB,CAACU,EAA+BC,IAAkD,CACvG,IAAMC,EAAwB,CAAC,EAE/B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQ,EAAEG,EACvCD,EAAY,KAAKF,EAAWG,CAAC,EAAIF,EAAQE,CAAC,CAAC,EAG7C,OAAOD,CACT,EAEaX,GAAwB,CAACQ,EAA+BK,IAAkC,CACrG,IAAMJ,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAA6BG,GAAgBhB,GAAWW,EAAO,CAAC,CAAC,EACjEG,EAAcZ,GAAeU,EAAYC,CAAO,EAChDI,EAAaC,EAAU,KAAKJ,CAAW,EAEvCK,EAAWR,EAAO,CAAC,EAAE,SACrBS,EAAQC,EAAc,QAASF,EAAUP,EAAW,MAAM,EAC1DU,EAASC,EAAe,SAAUJ,EAAUL,EAAY,MAAM,EAE9DU,EAAmBC,GAA+B;AAAA,2BAC/BL,EAAM,QAAQ,GAAGR,CAAU,CAAC;AAAA,QAC/Ca,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAOE,CAAM,CAAC;AAAA,QAClFG,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACrDH,EAAO,gBAAgB,YAAY,CAAC;AAAA,2BACtCF,EAAM,KAAK,OAAO;AAAA,4BACjBR,EAAW,MAAM;AAAA,4BACjBQ,EAAM,WAAW,uBAAwB,GAAG,CAAC;AAAA,gCACzCE,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,UAE9DF,EAAM,WAAW,gBAAiB,IAAK,iBAAiB,CAAC;AAAA;AAAA,QAE3DE,EAAO,YAAY,aAAcF,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,OAG3E,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGP,CAAO,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC7D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKM,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGS,EAA2Bf,EAAO,CAAC,EAAE,KAAMG,CAAW,CAAC,CAC5G,GACA,gBAAAU,CACF,CACF,EAEapB,GAAQuB,GAAkC,CACrD1B,GAAe0B,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxB,GAAsBwB,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACtE,IC3FA,IAeaC,GA6KPC,GAuGOC,GAGPC,GAEAC,GA+BOC,GAvUbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KAEaf,GAAiB,CAACgB,EAA+BC,IAAoD,CAChH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAMH,EAAO,CAAC,EACdI,EAAQJ,EAAO,CAAC,EAChBK,EAAUL,EAAO,CAAC,EAClBM,EAAYN,EAAO,CAAC,EA+B1B,GAAIE,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMK,EAAe,GACfC,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAaR,EAAM,KAAK,SAAW,EAAKK,EAAeL,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3ES,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaT,EAAW,QAAQ,EACtDc,EAAaV,GAAWA,EAAQ,KAAK,SAAW,EAChDW,EAAeV,GAAaA,EAAU,KAAK,SAAW,EAEtDW,EAAe,GACrB,GAAIF,GAAcC,EAAc,CAC9B,GAAIX,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIC,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEnEW,GAEFL,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,IAGlCO,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,EAEtC,SAAWU,GAAcC,EACvB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIE,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAID,EAAM,KAAK,CAAC,EAAIC,EAAI,KAAK,CAAC,IAAM,EAClC,MAAM,IAAI,MAAM,sDAAsD,EAExEe,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMW,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIV,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMW,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GI,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,IAAMC,IACFC,EAAe,GACfC,EAAcX,EAClB,GAAIN,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FiB,EAAcjB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGiB,EAAcjB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CgB,EAAe,EACjB,CACF,CACA,IAAME,EAAsBV,EAAqBD,EAC3CY,EAAsB,GAE5B,MAAO,CACL,UAAAf,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAW,EACA,kBAAAT,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAW,EACA,SAAAP,EACA,UAAW,KAAK,MAAMO,EAAcpB,EAAW,UAAW,EAC1D,SAAUA,EAAW,SACrB,WAAYA,EAAW,WACvB,MAAOA,EAAW,SAAWA,EAAW,WACxC,uBAAwB,GACxB,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAsB,EACA,aAAAH,EACA,UAAAF,EACA,aAAAD,CACF,CACF,EAEMhC,GACF,CAACuC,EAAeC,EAAyBC,EAAoBC,IAA6C,CACxG,IAAMC,EAAc,CAACD,EAAO,UAAWA,EAAO,oBAAqBA,EAAO,WAAaA,EAAO,QAAQ,EAChGE,EAAY,EACZC,EAAaC,EAAU,KAAKH,CAAW,EAAIC,EAC3CG,EAAwBL,EAAO,oBAC/BM,EAASC,EAAe,aAAcR,EAAUE,EAAY,OAAQC,CAAS,EAC7EM,EAASC,EAAc,SAAUZ,EAAE,SAAUA,EAAE,KAAK,OAAQK,CAAS,EACrEQ,EAASZ,EAAIW,EAAc,UAAWX,EAAE,SAAUA,EAAE,KAAK,OAAQI,CAAS,EAAI,OAE9ES,EAAI,KAAK,KAAKX,EAAO,SAAWE,CAAS,EACzCU,EAAW,CAAC,EAAGP,EAAuB,EAAGR,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,EAExDgB,EAAwDf,EAAI,CAAC,OAAQ,MAAM,EAAI,CAAC,MAAM,EAEtFgB,EAAoC,CACxC,CAAC,QAAuB,KAAMX,CAAU,EAAG,CAAC,QAAuB,KAAMH,EAAO,kBAAkB,EAClG,CAAC,QAAuB,KAAMA,EAAO,gBAAgB,EACrD,CAAC,QAAuB,KAAMA,EAAO,mBAAmB,CAC1D,EAEM3B,EAAS,CAACmC,CAAM,EAClBE,GACFI,EAAgB,KACZ,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2BjB,EAAG,IAAI,EAC5E,GAAGiB,EAA2Bd,CAAW,CAAC,EAC9C5B,EAAO,KAAKqC,CAAM,GAElBI,EAAgB,KAAK,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2Bd,CAAW,CAAC,EAExG,IAAMe,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,EACxG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CACtC,EAEMC,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDAOVC,EAAS;AAAA;AAAA;AAAA;AAAA,qDAKTC,EAAYrB,EAAI;AAAA,UAClBmB,CAAO;AAAA;AAAA,UAEPC,CAAM;AAAA,WAEY;AAAA,YAChBA,CAAM;AAAA,WAINE,EAAmBC,GAA+B;AAAA;AAAA,IAE1DA,EAAa,iBAAiBL,CAAQ,EAAE,iBAAiB,GAAG3C,EAAQiC,CAAM,CAAC;AAAA,IAC3Ee,EAAa,UAAU,CACnBV,EAAGX,EAAO,WAAa,CACzB,CAAC,CAAC;AAAA,MACFqB,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,oBAC5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKlCN,EAAO,UAAW;AAAA,cAC1BW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKKX,EAAO,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAajCmB,CAAS;AAAA,KAGT,MAAO,CACL,KAAM,gBACN,YAAa,CAAC,KAAM,GAAGnB,EAAO,UAAW,GAAGW,CAAC,GAAG,CAAC,CAACb,CAAC,GAAI,kBAAAe,CAAiB,EACxE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMZ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAea,EACf,gBAAAE,CACF,GACA,gBAAAM,CACF,CACF,EAES7D,GAAsCe,GAC/CgD,GAA4B,CAAC,GAAGhD,CAAU,CAAC,EAEzCd,GAAgD8D,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhG7D,GACF,CAAC8D,EAAyBC,EAAmBC,EAA8BzB,EAC1E0B,IAAwB,CACvB,IAAIC,EAAgBH,EACdI,EAAW5B,EAAO,WAClB6B,EAAQ7B,EAAO,MACrB,OAAIwB,EAAM,KAAK,SAAW,GAAKxB,EAAO,mBAAqB,IACzD2B,EAAgBH,EAAM,QAAQ,CAACxB,EAAO,UAAWA,EAAO,iBAAkB4B,EAAU5B,EAAO,QAAQ,CAAC,GAGlGyB,EACFE,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAeF,EAAQE,EAAc,SAAU3B,CAAM,EAC7E,CAAC,OAAQ,CAAC2B,EAAeF,CAAM,EAAG,QAAS,CAACzB,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAE3FC,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAe,OAAWA,EAAc,SAAU3B,CAAM,EAChF,CAAC,OAAQ,CAAC2B,CAAa,EAAG,QAAS,CAAC3B,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjFG,IAAU,IACZF,EAAgBJ,EAAQ,QACpBO,GAAsB,CAACH,CAAa,EAAG,CAAC,EAAG,EAAG,EAAGE,CAAK,CAAC,EAAG,CAAC,OAAQ,CAACF,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EACzGA,EACIA,EAAc,QAAQ,CAAC3B,EAAO,UAAWA,EAAO,oBAAqB4B,EAAWC,EAAO7B,EAAO,QAAQ,CAAC,GAGtGuB,EAAQ,QACXQ,GAA2BJ,EAAenE,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACmE,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CACjD,EAESjE,GAAsB,CAAC6D,EAAyBjD,IAAqC,CAChG,IAAM0B,EAAS3C,GAAekE,EAAQ,OAAQjD,CAAU,EACxD,GAAIiD,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpC,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIA,EAAQ,OAAO,CAAC,GAAG,KAAK,SAAW,EACrC,MAAM,IAAI,MAAM,8BAA8B,EAGhD,IAAMS,EAAIC,GACNV,EAASvB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAUuB,EAAQ,OAAO,CAAC,EAAG,OACvG,CAAC,EACC7C,EAAU6C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OACzF5C,EAAY4C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OAC3FW,EAAIzE,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG7C,EAASsB,EAAQ,CAAC,EAChFmC,EAAI1E,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG5C,EAAWqB,EAAQ,CAAC,EACxFoC,GAAeb,EAASS,EAAGE,EAAGC,EAAG,OAAW,OAAW,OAAW,OAAW,OAAWnC,EAAQ1B,CAAU,CAC5G,ICzVA,IAeM+D,GAwGAC,GAwHAC,GAoDOC,GAnSbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAOMR,GACF,CAACS,EAA+BC,IAAoD,CAClF,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdE,EAAO,EACPC,EAAYC,EAAU,gBAAgBJ,EAAQE,CAAI,EAClDG,EAAWD,EAAU,kBAAkBJ,EAAQE,CAAI,EACnDI,EAAaC,GAAiBF,CAAQ,EACtCG,EAAiBH,EAAWC,EAC5BG,EAAa,CAACT,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGQ,CAAc,EAClDE,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EC,EACF,CAAC,CAAC,QAAuB,KAAMN,CAAQ,EAAG,CAAC,QAAuB,KAAMG,CAAc,CAAC,EAC3FG,EAAgB,KAAK,GAAGC,EAA2BH,EAAYA,CAAU,CAAC,EAE1E,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAKlB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACxEW,EAAQD,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEoB,EAAOF,EAAc,OAAQlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/DqB,EAASC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACnFe,EAAY,CAACN,EAAGE,EAAOC,EAAMC,CAAM,EACnCG,EAAWP,EAAE,KAAK,MAClBQ,EAAUjB,IAAe,EAAI,MAAQ,MAAMA,CAAU,QACrDkB,EAAgB,GAEhBC,EAA8B,CAAC,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CAAC,EAC3G,MAAO;AAAA;AAAA;AAAA,2CAG4BF,CAAO,KAAKC,CAAa;AAAA,0BAC1CA,CAAa;AAAA,IACnCV,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGJ,CAAS,CAAC;AAAA,IACtEP,EAAa,UAAUU,CAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOrBD,CAAO;AAAA;AAAA,4BAECA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAahDW,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKhDiB,CAAO;AAAA;AAAA,yBAEEA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,OAAOQ,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAcpDG,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,mFAIYP,EAAW,OAAO;AAAA,yCAC5DkB,EAAM,YAAY,SAAS,CAAC;AAAA,6BACxCC,EAAK,YAAY,SAAS,CAAC;AAAA;AAAA,oBAEpCH,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,MAAMO,CAAQ,IAAIC,CAAO,qBAAqBD,CAAQ,IAC5FC,CAAO;AAAA,QACXJ,EAAO,IAAI,QAAS,UAAW,IAAK,OAAO,CAAC;AAAA;AAAA,IAG9C,EACA,MAAO,CACD,KAAM,wBAEV,YAAa,CAAC,KAAM,GAAGpB,EAAW,OAAO,IAAIO,CAAU,GAAI,kBAAAI,CAAiB,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAGK,CAAS,EAC5B,gBAAAQ,CACF,GACA,gBAAAE,CACF,CACF,EAEEvB,GACF,CAACqC,EAAyBC,EAAmBX,EAAmBC,EAAkBW,EAAWC,EAAWC,EACvGC,IAAoB,CACnB,IAAM1B,EAAaC,GAAiBwB,CAAC,EAC/BE,EAAK,GAGLC,EAAa5B,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC5D6B,EAAc7B,IAAe,EAAI,MAAQ,MAAMA,CAAU,IACzD8B,EAAiB,CAACC,EAAcC,IAAiB,GAAGJ,CAAU,IAAIG,CAAI,KAAKC,CAAI,IAC/EC,EAAcV,EAAIE,EAAIzB,EACtBkC,EAAS,KAAK,KAAKV,EAAIG,CAAE,EAEzBQ,EAA4D,CAAC,MAAM,EACnEC,EAAwC,CAC5C,CAAC,QAAuB,KAAMF,CAAM,EAAG,CAAC,QAAuB,KAAMV,CAAC,EACtE,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAMwB,EAAIC,EAAIzB,CAAU,CAAC,CAC9D,EAEMqC,EAAuB7B,GAA+B,CAC1D,IAAM8B,EAAc5B,EAAc,QAASY,EAAM,SAAUA,EAAM,KAAMtB,CAAU,EACjF,MAAO;AAAA,IACXQ,EAAa,iBAAiB8B,CAAW,CAAC;AAAA,kEACoBV,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAUmB,CAAE,CAAC;AAAA,4CACcA,CAAE;AAAA,+CACCA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAQjCY,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA;AAAA,sBAE9B6B,CAAW;AAAA;AAAA;AAAA;AAAA,2BAINC,EAAe,MAAO,YAAY,CAAC;AAAA,IAExD,EAEMU,EAAanB,EAAQ,QACvB,CACE,KAAM,0BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAmBmC,CAAqB,EAC7E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACZ,EAAGE,EAAGE,EAAI,CAAC,EAAG,UAAwB,CAChD,EACA,cAAe,CAAC,EAAGJ,EAAIE,EAAIzB,CAAU,EACrC,gBAAiBoC,CACnB,GACA,gBAAiBC,CACnB,EACA,CAAC,OAAQ,CAACf,CAAK,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjCjB,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,CAAW,EAAG,CAAC,QAAuB,KAAMT,CAAC,EAC3E,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAM2B,EAAKF,EAAIzB,CAAU,CAAC,CAC/D,EACMI,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EG,EAAmBC,GAA+B,CACtD,IAAMiC,EAAc/B,EAAc,QAASC,EAAM,SAAUA,EAAM,KAAMX,CAAU,EAC3E0C,EAAahC,EAAc,OAAQE,EAAK,SAAUA,EAAK,KAAMZ,CAAU,EAC7E,MAAO;AAAA,2DAC4C4B,CAAU;AAAA,2DACVa,EAAY,KAAK,OAAO;AAAA,0DACzBC,EAAW,KAAK,OAAO;AAAA,kEACfd,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,wBAAwB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKlE+B,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA,mCACjB2B,CAAE;AAAA,gEAC2BA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+DAMHD,CAAO;AAAA,qCACjCG,CAAW;AAAA,yBACvBA,CAAW;AAAA;AAAA,2BAETC,EAAe,eAAgB,cAAc,CAAC;AAAA,IAEnE,EACA,OAAOT,EAAQ,QACX,CACE,KAAM,uCAEN,YAAa,CAAC,KAAM,GAAGrB,CAAU,IAAI0B,CAAO,GAAI,kBAAAtB,CAAiB,EACjE,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACmB,EAAGE,EAAG,CAAC,EAAG,UAAwB,CAC5C,EACA,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAc,EAAuB,CAAC,EACnE,gBAAA5B,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACiC,EAAY7B,EAAOC,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC3D,EAEE3B,GACF,CAACoC,EAAyB7B,EAA+BC,IAAuC,CAC9F,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdiD,EAAIjD,EAAO,CAAC,EACZkD,EAAIlD,EAAOA,EAAO,OAAS,CAAC,EAC5BmD,EAAI/C,EAAU,kBAAkBJ,EAAQ,CAAC,EAAIkD,EAC7C5C,EAAaC,GAAiB2C,CAAC,EAC/BE,EAAahD,EAAU,KAAKH,CAAW,EAAIK,EAC3CK,EACF,CAAC,CAAC,QAAuB,KAAMwC,CAAC,EAAG,CAAC,QAAuB,KAAM,KAAK,MAAMD,EAAI5C,CAAU,CAAC,CAAC,EAC1FI,EAAwD,CAAC,OAAQ,MAAM,EAEvE2C,EAAoB/D,GAAYqC,EAAS7B,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGmD,EAAGE,EAAGD,EAAGnD,EAAW,OAAO,EACrGc,EAAmBC,GAA+B,CACtD,IAAMQ,EAAWgC,GAA4BxD,EAAO,CAAC,EAAE,QAAQ,EACzDyD,EAAYjD,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC3DkD,EAAgBlD,IAAe,EAAIgB,EAAW,MAAMhB,CAAU,IAAIgB,CAAQ,IAE1EsB,EAAc5B,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMQ,CAAU,EACnFmD,EAAerC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUG,EAAaK,CAAU,EAEzF,MAAO;AAAA,2DAC4CsC,EAAY,KAAK,OAAO;AAAA,gEACnBW,CAAS;AAAA,kEACPE,EAAa,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,IAIvF3C,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kDAMsB0C,CAAa,eAAeA,CAAa;AAAA,IAErF,EACA7B,EAAQ,QACJ,CACE,KAAM,4BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAAI,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKsD,EAAa,EAAuB,CAAC,EAClE,gBAAAzC,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACf,EAAO,CAAC,EAAGuD,CAAiB,CAAC,CAAC,CAC9C,EAES7D,GAAe,CAACmC,EAAyB5B,IAA6C,CAC7FA,EAAW,SAAW,OACxBR,GAAkCoC,EAASA,EAAQ,OAAQ5B,CAAU,EAErE4B,EAAQ,QAAQtC,GAA8BsC,EAAQ,OAAQ5B,CAAU,CAAC,CAE7E,ICzSA,IAgBM2D,GAMAC,GA6GOC,GAnIbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAQMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,CAE3D,EAEMP,GACF,CAACO,EAA+BC,EAAiCC,IAAqC,CACpG,IAAMC,EAAaF,EAAW,WAExBG,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAAQL,EAAO,CAAC,EAChBM,EAAO,CAACH,GAAcH,EAAO,CAAC,EAE9BO,EAAcH,EACdI,EAAOC,EAAU,cAAcR,EAAW,KAAMG,EAAO,MAAM,EAC7DM,EAAYD,EAAU,gBAAgBL,EAAQI,CAAI,EAClDG,EAAWF,EAAU,kBAAkBL,EAAQI,CAAI,EAEnDI,EAAYH,EAAU,KAAKJ,EAAM,IAAI,EACrCQ,EAAWP,EAAOG,EAAU,KAAKH,EAAK,IAAI,EAAI,EACpD,GAAIM,IAAcD,GAAaL,GAAQO,IAAaF,EAClD,MAAM,IAAI,MAAM,+BAA+BA,CAAQ;AAAA;AAAA,2BAEpCC,CAAS,qBAAqBC,CAAQ,EAAE,EAG7D,IAAMC,EAA6B,CAAC,EACpC,QAASC,EAAI,EAAGA,EAAIX,EAAO,OAAQ,EAAEW,EAC/BA,EAAIP,EACNM,EAAiB,KAAKV,EAAOW,CAAC,CAAC,EAE/BD,EAAiB,KAAK,CAAC,EAG3B,IAAME,EAAaC,GAAiBN,CAAQ,EACtCO,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAQ,EAC/E,CAAC,QAAuB,KAAM,KAAK,MAAMA,EAAWK,CAAU,CAAC,EAC/D,CAAC,OAAsB,KAAMf,EAAW,OAAO,CACjD,EACIK,GACFY,EAAkB,KAAK,MAAM,EAE/B,IAAME,EAAoBlB,EAAc,EAClCmB,EAAkBnB,EAAc,EAEhCoB,EAAmBC,GAA+B,CACtD,IAAMC,EAAWC,GAA4BzB,EAAO,CAAC,EAAE,QAAQ,EACzD0B,EAAY,CAChBC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAU,EACjEW,EAAc,QAAStB,EAAM,SAAUA,EAAM,KAAMW,CAAU,CAC/D,EACIV,GACFoB,EAAU,KAAKC,EAAc,OAAQrB,EAAK,SAAUA,EAAK,KAAMU,CAAU,CAAC,EAE5EU,EAAU,KAAKE,EAAe,SAAU5B,EAAO,CAAC,EAAE,SAAUO,EAAaS,CAAU,CAAC,EAChFI,GACFM,EAAU,KAAKE,EAAe,qBAAoCd,CAAgB,CAAC,EAEjFO,GACFK,EAAU,KAAKE,EAAe,mBAAkCd,CAAgB,CAAC,EAGnF,IAAMe,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAClE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,CAC5E,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA,IACtEH,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,wBAEvDO,GAAW,MAAOd,CAAU,CAAC;AAAA,+BACtBc,GAAW,MAAOd,CAAU,CAAC;AAAA;AAAA;AAAA,oBAGxCe,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA,iBAInDgB,GAAU,cAAehB,CAAU,CAAC;AAAA,oCACjBgB,GAAU,qBAAsBhB,CAAU,CAAC,yBACnEb,EAAa,GAAK,eAAe;AAAA;AAAA;AAAA,uBAGtB4B,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA,uBAChDe,GAAUP,EAAUR,EAAY,UAAU,CAAC;AAAA,6BACrCU,EAAU,CAAC,EAAE,KAAK,KAAK,cAAcvB,EAAa,GAAK,QAAQ;AAAA,UAClFG,EAAO,KAAKyB,GAAUP,EAAUR,EAAY,SAAS,CAAC,GAAK,EAAE;AAAA;AAAA;AAAA;AAAA,MAIjEI,EAAoB,sCAAwC,EAAE;AAAA,MAC9DC,EAAkB,2CAA6C,EAAE;AAAA,IAEjE,EACMY,EAAU,CAAC,CAAC,KAAM1B,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIoB,GACFa,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAE7DO,GACFY,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAG1D,CACL,KAAM,qBACN,YAAa,CAAC,KAAM,GAAGE,CAAU,IAAId,CAAW,IAAIC,CAAU,GAAI,kBAAAe,CAAiB,EACnF,WAAY,KACP,CAAC,QAAAe,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKvB,EAAY,EAAuB,CAAC,EAAG,gBAAAS,CAAe,GAClG,gBAAAG,CACF,CACF,EAES5B,GAAY,CAACwC,EAAyBjC,IAA0C,CAC3FT,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQzC,GAA2ByC,EAAQ,OAAQjC,EAAYiC,EAAQ,WAAW,CAAC,CAC7F,ICtIA,IAoBMC,GA+BOC,GAqPAC,GAQAC,GAhTbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAWMT,GAAiB,CAACU,EAA+BC,IAA4C,CACjG,GAAID,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,oCAAoC,EAEtD,IAAME,EAAIF,EAAO,CAAC,EACZG,EAAQD,EAAE,KAAK,OACrB,GAAIA,EAAE,KAAKC,EAAQ,CAAC,IAAMF,EAAW,EACnC,MAAM,IAAI,MAAM,wDAAwD,EAE1E,IAAMG,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3FI,EAAWJ,EAAW,UAAY,EAAIA,EAAW,KACjDK,EAAIN,EAAO,CAAC,EAClB,GAAI,CAACO,EAAU,SAASD,EAAE,KAAM,CAACL,EAAW,EAAGG,EAAeC,CAAQ,CAAC,EACrE,MAAM,IAAI,MAAM,6EAA6E,EAG/F,IAAMG,EADSR,EAAO,CAAC,EACI,KAC3B,GAAIO,EAAU,KAAKC,CAAW,IAAMP,EAAW,EAAIG,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAE5C,GAAIJ,EAAO,SAAW,EAAG,CAEvB,IAAMS,EADaT,EAAO,CAAC,EACQ,KAC7BU,EACFT,EAAW,KAAO,EAAKA,EAAW,EAAIG,EAAiBH,EAAW,EAAI,KAAK,OAAOG,EAAgB,GAAK,CAAC,EAC5G,GAAIG,EAAU,KAAKE,CAAe,IAAMC,EACtC,MAAM,IAAI,MAAM,8BAA8B,CAElD,CACF,EAEanB,GACT,CAACS,EAA+BC,EAC/BU,EAAoDC,IAAwD,CAC3G,IAAMC,EAAab,EAAO,CAAC,EAAE,KACvBG,EAAQU,EAAW,OACnBT,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3Fa,EAAYD,EAAWV,EAAQ,CAAC,EAChCY,EAAWd,EAAW,EACtBe,EAAYf,EAAW,EACvBgB,EAAYJ,EAAW,MAAM,EAAGV,EAAQ,CAAC,EACzCe,EAAYX,EAAU,KAAKU,CAAS,EAEpCE,EADWlB,EAAW,UAAY,EAAIA,EAAW,KACpB,EAC7BmB,EAAWpB,EAAO,CAAC,EAAE,SACrBqB,EAAeC,GAAiBR,CAAS,EACzCS,EAAcD,GAAiBrB,EAAW,CAAC,EAC3CuB,EAAcF,GAAiBH,CAAe,EAC9CM,EAAcC,GAAqBN,CAAQ,EAC3CO,EAAsBb,EAAYV,EAAgBqB,EAClDG,EAAwB,KAAK,MAAMhB,EAAiCe,CAAmB,EACvFE,EAA0BzB,GAAiBO,EAAyB,CAAC,GAAKiB,EAAwB,EAClGE,EAAc,CAACD,GAA2BD,GAAyB,EAAKN,GAAiBN,CAAS,EAClGY,GAAyB,GAAMN,GAAiBN,CAAS,GAAK,EAAU,EACA,EACxEe,EAAcd,EAAU,OAAO,CAACH,EAAWE,CAAS,CAAC,EACrDgB,EAAazB,EAAU,KAAKwB,CAAW,EAAID,EAAaT,EAExDY,EAAoCJ,EACtC,CAAC,EACD,CAAC,CAAC,QAAuB,KAAMG,CAAU,EAAG,CAAC,QAAuB,KAAM/B,EAAW,SAAS,CAAC,EAC7FiC,EAAiB,CAAChB,EAAWJ,EAAWC,EAAWQ,CAAW,EAC9DY,EAAS5B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAC5DmC,EAAO,OAAO,GAAI,EAAGhB,EAAkBK,CAAW,EAClDS,EAAgB,KAAK,GAAGG,EAA2BF,CAAc,CAAC,EAClED,EAAgB,KAAK,GAAGG,EAA2BD,CAAM,CAAC,EAC1DF,EAAgB,KAAK,GAAGG,EAA2BpC,EAAO,CAAC,EAAE,IAAI,CAAC,EAC9DA,EAAO,SAAW,GACpBiC,EAAgB,KAAK,GAAGG,EAA2B7B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAE5F,IAAMqC,GAAkB,CAACnB,EAAWJ,EAAWE,EAAYc,CAAU,EACrEG,EAAgB,KAAK,GAAGG,EAA2BC,EAAe,CAAC,EACnE,IAAMC,GAAmBC,GAA+B,CACtD,IAAMC,GAAYN,EAAe,OAC3BhC,EAAIuC,EAAc,IAAKzC,EAAO,CAAC,EAAE,SAAUwC,GAAWjB,CAAW,EACjEjB,GAAImC,EAAc,OAAsBN,EAAO,OAAQX,CAAW,EAClEkB,GAASD,EAAc,SAAUzC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC1E2C,GAAiB,CAACzC,EAAGI,GAAGoC,EAAM,EAC9BE,EACF5C,EAAO,SAAW,EAAIyC,EAAc,iBAAgCzC,EAAO,CAAC,EAAE,KAAK,MAAM,EAAI,OAC7F4C,GACFD,GAAe,KAAKC,CAAU,EAEhC,IAAMC,GAAaR,GAAgB,OAC7BS,EAASC,EAAe,SAAU/C,EAAO,CAAC,EAAE,SAAU6C,GAAYf,CAAU,EAC5EkB,GAA8B,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACpG5B,EAAW6B,GAA4BjD,EAAO,CAAC,EAAE,QAAQ,EAEzDkD,GAAe,IAAM,CACzB,OAAQ3B,EAAa,CACnB,IAAK,GACH,MAAO,SAASH,CAAQ,OAC1B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,QACE,MAAM,IAAI,MAAM,GAAGG,CAAW,8BAA8B,CAChE,CACF,GAAG,EAEG4B,GAAkB;AAAA,yCACShC,CAAe,aAAaK,CAAW;AAAA,YACpElB,GAAE,WAAW,YAAa,IAAK,MAAM,CAAC;AAAA,yBACzBA,GAAE,aAAa,WAAW,CAAC;AAAA,qCACfkB,CAAW;AAAA,iCACfA,IAAgB,EAAI,SAAW,kBAAkB;AAAA;AAAA;AAAA;AAAA,uCAI3C0B,CAAW,IACtC,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,GAAGjC,CAAQ,kBAAkBiC,EAAC,OAAOjC,CAAQ,kBAAkBiC,EAAC,IAAI,EACjG,KAAK,IAAI,CAAC;AAAA,0CACe,IAC5B9B,IAAgB,EACX,GAAG2B,CAAW,IACjB,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,uBAAuBA,EAAC,yBAAyB,EAAE,KAAK,IAAI,CAAC,KAE5F,yBAAyBH,CAAW,IAAI,MAAM,CAAC,EAAE,KAAK,YAAY,EAAE,KAAK,GAAG,CAAC,eAErF,CAAC;AAAA;AAAA,uCAE2BrB,EAA0Bf,EAAYO,CAAY;AAAA,gBACzEnB,EAAE,WAAW,YAAasC,GAAY,EAAGX,EAA0B,IAAM,SAASR,CAAY,MAAM,CAAC;AAAA,gBACrGnB,EAAE,WAAW,YAAasC,GAAY,EAAG,aAAa,CAAC;AAAA,mCACpCtC,EAAE,gBAAgB,WAAW,CAAC;AAAA,4BACrCgD,CAAW;AAAA,yCACE,EAAI3B,CAAW;AAAA,8BAC1BrB,EAAE,YAAY,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG3C2B,EAA0B,gDAAkD,kBAAkB,GAClGC,EAAa,EAAI,MAAQ,EAAE,OAC3B,MACK,KACG,CAAC,OAAQ,EAAIP,CAAW,EACxB,CAAC6B,GAAGC,KAAM,GACN9B,IAAgB,EAAI,UAAU8B,EAAC,4BAA4BA,EAAC,IACxC,cAAcA,EAAC,2BAA2BA,EAAC,IAAI,EAAE,EAC5E,KAAK,KAAK,CAAC;AAAA;AAAA,6BAEC,EAAI9B,CAAW;AAAA;AAAA,WAG9B+B,GAAuBV,EAAa;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKlBA,EAAW,YAAY,kBAAkB,CAAC;AAAA,aAExB,GAE1C,OAAOf,EAA0B;AAAA,iDACQiB,EAAO,KAAK,KAAK,KAAKhC,EAAYV,CAAa;AAAA,UACtFmC,EAAa,iBAAiB,GAAGI,GAAgBG,CAAM,CAAC;AAAA,UACxDP,EAAa,UAAU,CACvBnC,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA,2BACiBF,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAI7BA,EAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,qCAEd4B,CAAU;AAAA,sDACOA,CAAU;AAAA,gBAEnBc,EAAa;AAAA,mDACPxC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yCAMvBwC,EAAW,YAAY,uBAAuB,CAAC,8BAC9B,EAAE;AAAA,6BAC/BtC,GAAE,KAAK,OAAO;AAAA,cAC7BA,GAAE,WAAW,YAAa,IAAK,6BAA6B,CAAC;AAAA;AAAA,+DAEZF,CAAa;AAAA,0BAClDsC,GAAO,YAAY,cAAc,CAAC;AAAA;AAAA,+BAE7BtB,CAAQ,IAAIwB,EAAa,2BAA6B,CAAG;AAAA,cAC1EtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA,6CACRL,EAAW,UAAYsB,CAAW;AAAA,yDACtBT,CAAS;AAAA,cACpDqC,EAAe;AAAA;AAAA;AAAA,gCAGGL,EAAO,KAAK,OAAO;AAAA,2CACR,KAAK,KAAKhC,EAAYV,CAAa,CAAC;AAAA,YACnE0C,EAAO,WAAW,iBAAkB,IAAK,OAAO,CAAC;AAAA,YACjDA,EAAO,WAAW,iBAAkBD,GAAa,EAAG,KAAK,CAAC;AAAA,YAC1DC,EAAO,WAAW,iBAAkBD,GAAa,EAAG,kCAAkC,CAAC;AAAA,gCACnEC,EAAO,gBAAgB,gBAAgB,CAAC;AAAA;AAAA;AAAA,wBAGhDhC,CAAS;AAAA,kCACCgC,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA,0CAEhC1C,CAAa;AAAA;AAAA,6CAEVU,CAAS;AAAA;AAAA,gBAEtCgC,EAAO,YAAY,gBAAiB,cAAc,CAAC;AAAA,iCAClC9B,EAAYc,CAAU;AAAA;AAAA;AAAA,WAId;AAAA,UAC/BS,EAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGL,GAAgBG,CAAM,CAAC;AAAA,UACnFP,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,qCACjDO,EAAO,KAAK,KAAK,KAAKzB,CAAY;AAAA,iCACtCyB,EAAO,gBAAgB,YAAY,CAAC;AAAA,sBAC/CA,EAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,sBACnDC,EAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,2BAC9C3C,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAKI0C,EAAa;AAAA,8CACZd,CAAU,QAAQ1B,CAAa;AAAA;AAAA,uCAEtCwC,EAAW,YAAY,kBAAkB,CAAC;AAAA,yEAEvB,EAAE;AAAA,oCACxBxC,EAAgB0B,CAAU;AAAA,2BACnCxB,GAAE,KAAK,OAAO;AAAA,qCACJwB,CAAU;AAAA,cACjCxB,GAAE,WAAW,YAAa,IAAK,SAASwB,CAAU,MAAM,CAAC;AAAA;AAAA,+CAExB1B,CAAa;AAAA;AAAA,4BAEhCsC,GAAO,YAAY,aAAa,CAAC;AAAA;AAAA,iCAE5BtB,CAAQ,IAAIwB,EAAa,qDAAuD,CAAG;AAAA,gBACpGtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,gBAEvC6C,EAAe;AAAA;AAAA,gBAEfG,EAAoB;AAAA,sDACkB/B,CAAW;AAAA;AAAA;AAAA,cAIpBqB,EAAa;AAAA,kBACxCU,EAAoB;AAAA,iBAEoB,EAAE;AAAA;AAAA,wCAEpBjC,CAAY;AAAA,gBACpCyB,EAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAGxB,CAAY,YAAY,CAAC;AAAA,gBAChFyB,EAAO,aAAa,iBAAkB,kBAAkB,CAAC;AAAA;AAAA,UAGnE,EACA,MAAO,CACL,KAAMjB,EAA0B,uBAAyB,cACzD,YAAa,CACX,KAAM,GAAG5B,EAAW,QAAQ,IAAIa,CAAS,IAAIM,CAAQ,IAAIpB,EAAO,MAAM,GACtE,kBAAmB,MAAMA,EAAO,MAAM,EAAE,KAAK,MAAM,CACrD,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM+B,EAAa,SAAAX,CAAQ,CAAC,EACvC,KAAMS,EAA0B,uBAAyB,cACzD,cAAeA,EAA0B,CAAC,EAAG,EAAG,EAAG,KAAK,KAAKb,EAAYc,CAAU,EAAG,EAAGZ,CAAS,EACzD,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAC5F,gBAAAC,CACF,GACA,gBAAAK,EACF,CACF,EAES9C,GAAc,CAAC+D,EAAyBtD,IAA4C,CAC/FX,GAAeiE,EAAQ,OAAQtD,CAAU,EACzC,IAAMU,EAAqD4C,EAAQ,4BAA4B,EACzF3C,EAAiC2C,EAAQ,kCAAkC,EACjFA,EAAQ,QAAQhE,GACZgE,EAAQ,OAAQtD,EAAYU,EAA0BC,CAA8B,CAAC,CAC3F,EAEanB,GAA8BQ,GACvCuD,GAA4BvD,CAAsE,ICjTtG,IAiBMwD,GAmBAC,GA0BAC,GA2BAC,GAuBAC,GAuBAC,GAeAC,GAiDAC,GA0BOC,GAjObC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KASMb,GAAkBc,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAIA,EAAO,QAAU,EAAG,CACtB,IAAIC,EAAYD,EAAO,CAAC,EAAE,KAAK,OAAS,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EAI9D,GAHIA,EAAO,SAAW,IACpBC,EAAYD,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAEpD,CAACC,EACH,MAAM,IAAI,MAAM,6EAA6E,CAEjG,CACF,EAEMd,GAAiB,CAACe,EAAuBC,EAAmBC,IAA+B,CAC/F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,sBACSH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,2BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA,gCAGzCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,UAI9E,MAAO;AAAA,oBACWD,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,cAIvBG,CAAK;AAAA;AAAA;AAAA,OAInB,EAEMjB,GAAgB,CAACc,EAAuBC,EAAmBC,IAA+B,CAC9F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,yCAKnEG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,gCAEvDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,oCAI1CI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMhB,GAAa,CAACa,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,+BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,4BACjDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEtCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMf,GAAa,CAACY,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA,6BAE/EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,+BAE5CI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,6BAChDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEvCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMd,GAAgB,CAACW,EAAuBC,EAAmBK,IAAsC,CACrG,OAAQA,EAAW,KAAM,CACvB,IAAK,GACH,OAAOrB,GAAee,EAAQC,EAAWK,EAAW,KAAK,MAAM,EACjE,IAAK,GACH,OAAOpB,GAAcc,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAChE,IAAK,GACH,OAAOnB,GAAWa,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,IAAK,GACH,OAAOlB,GAAWY,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMhB,GAAuB,CAACQ,EAA+BQ,IAA2C,CACtG,IAAMC,EAAcC,EAAU,SAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGQ,EAAW,IAAI,EACxEG,EAAYX,EAAO,CAAC,EAAE,KACtBY,EAAaF,EAAU,KAAKD,CAAW,EACvCI,EACF,CAAC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMJ,EAAW,IAAI,CAAC,EACzFA,EAAW,OAAS,GACtBK,EAAgB,KAAK,CAAC,KAAMb,EAAO,CAAC,EAAE,SAAU,KAAMQ,EAAW,KAAK,CAAC,EAGzEK,EAAgB,KAAK,GAAGC,EAA2Bd,EAAO,CAAC,EAAE,KAAMS,CAAW,CAAC,EAC/E,IAAMM,EAAwD,CAAC,MAAM,EAE/DC,EAAmBC,GAA+B,CACtD,IAAMf,EAASgB,EAAe,SAAUlB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEU,EAAQC,EAAc,IAAKpB,EAAO,CAAC,EAAE,SAAUW,EAAU,MAAM,EAC/DU,EAAWF,EAAM,KAAK,MACtBG,EAAa/B,GAAcW,EAAQS,EAAU,OAAQH,CAAU,EAC/De,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQf,EAAW,KAAK,MAAM,CAAC,EACpG,OAAIA,EAAW,OAAS,GACtBe,EAAS,KAAK,CAAC,KAAM,iBAAkB,KAAMF,CAAkC,CAAC,EAG3E;AAAA,cACGJ,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBJ,EAAOjB,CAAM,CAAC;AAAA,cACvEe,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,4BAE5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,0BAEtCmB,CAAQ;AAAA,cACpBC,CAAU;AAAA;AAAA,UAGtB,EAEA,MAAO,CACL,KAAM,MACN,YAAa,CAAC,KAAM,GAAGd,EAAW,IAAI,GAAI,kBAAAO,CAAiB,EAC3D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMN,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAU,KAAKD,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAAG,CACF,CACF,EAEMvB,GAAgC,CAACO,EAA+BQ,IAA6C,CACjH,GAAIR,EAAO,OAAS,EAAG,CACrB,IAAMwB,EAAexB,EAAO,CAAC,EAAE,iBAAiB,EAC1CyB,EAASzB,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,KAAQA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAI,EAElFG,EAAYH,EAAO,CAAC,EAAE,KAAK,OAC3B0B,EAAa,IAAI,WAAW,EAAIvB,CAAS,EAAE,KAAK,CAAC,EACvD,GAAIH,EAAO,QAAU,EAAG,CACtB,IAAM2B,EAAO3B,EAAO,CAAC,EAAE,iBAAiB,EACxC,QAASM,EAAI,EAAGA,EAAIqB,EAAK,OAAQrB,IAC/BoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,CAAC,EAAI,OAAOkB,EAAalB,CAAC,CAAC,EACpDoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,EAAIH,CAAS,EAAI,OAAOqB,EAAalB,EAAIqB,EAAK,MAAM,CAAC,CAElF,MACEH,EAAa,QAAQ,CAACI,EAAGtB,IAAMoB,EAAW,OAAOpB,CAAC,CAAC,EAAK,OAAOsB,CAAC,CAAE,EAGpE,IAAMC,EAAiB,CAAC,EACxB,OAAAH,EAAW,QAAQE,GAAKC,EAAK,KAAKD,CAAC,CAAC,EAE7B,CAAC,KAAMpB,EAAW,KAAM,MAAAiB,EAAO,KAAAI,CAAI,CAC5C,KACE,QAAOrB,CAEX,EAEad,GAAM,CAACoC,EAAyBtB,IAAoC,CAC/EtB,GAAe4C,EAAQ,MAAM,EAC7B,IAAMC,EAAoBtC,GAA8BqC,EAAQ,OAAQtB,CAAU,EAClFsB,EAAQ,QAAQtC,GAAqBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxF,ICrOA,IAmBMC,GAMAC,GA4BAC,GA2DAC,GAsJAC,GAGAC,GAGAC,GAGAC,GAaAC,GAiCOC,GAYAC,GAKPC,GAWOC,GAKAC,GAUPC,GA6BOC,GAKAC,GAgBAC,GAKAC,GA/ZbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KAIAC,KAQMxB,GAAkByB,GAAwC,CAC9D,GAAIC,GAAI,OAAO,uBAAyB,CAACD,GAAUA,EAAO,SAAW,GACnE,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EAEMxB,GAA0C,CAC5C0B,EAAmBC,EAA2BC,IAAyD,CACzG,IAAMC,EAAiBF,EAAW,SAAW,OACvCG,EAA2BJ,EAAM,KAAK,MAAM,EAC9CG,GACFC,EAAyB,OAAO,EAAG,EAAGA,EAAyB,IAAI,CAAE,EAEvE,IAAMC,EAAe,OAAO,eAAe,KAAKJ,EAAY,WAAW,EACjEK,EAAcL,EAAW,YAAY,MAAM,EAC3CM,EAAUN,EAAW,QAAQ,MAAM,EACnCO,EAAsBH,EAAgBJ,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCS,GAAa,qBAAqBR,EAAkBE,EAA0BE,EAAaC,EAASC,EAAWC,CAAI,EAEnH,IAAME,EAA4BD,GAAa,uBAC3CR,EAAkBE,EAA0BG,EAASC,EAAWF,EAAaG,EAAMR,EAAW,OAAO,EAEnGW,EAAgB,OAAO,OAAO,CAAC,EAAGX,CAAU,EAC9CI,EACF,OAAO,OAAOO,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,UAAAD,EAAW,SAAUP,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOW,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAE1F,IAAMY,EAA2BF,EAA0B,MAAM,EACjE,OAAAE,EAAyB,KAAKA,EAAyB,OAAO,EAAG,CAAC,EAAE,CAAC,CAAC,EAC/D,CAACD,EAAeT,EAAiBU,EAA2BF,CAAyB,CAC9F,EAEMpC,GAAuB,CACzBuC,EACAb,IAAgG,CAClG,IAAME,EAAiBF,EAAW,SAAW,OACvCc,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAaD,EAAU,KAAKf,EAAW,WAAW,EAClDiB,EACF,CAAC,CAAC,QAAuB,KAAMH,CAAU,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACnFE,EAA8B,CAAC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACzG,GAAIlB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAMmB,EAAKnB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoB,EAAKpB,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqB,EAAUrB,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsB,EAAQtB,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuB,EAAoB,CAAC,EAAEF,EAAUC,GACvCL,EAAgB,KACZ,CAAC,QAAuB,KAAME,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAO,EACrC,CAAC,QAAuB,KAAMC,CAAK,CACvC,EACAJ,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,EAEhC,IAAIM,EAAoB,GACxB,GAAIxB,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMyB,EAAKzB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D0B,EAAK1B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD2B,EAAU3B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD4B,EAAQ5B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EACxDwB,EAAoB,CAAC,EAAEG,EAAUC,GACjCX,EAAgB,KACZ,CAAC,QAAuB,KAAMQ,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAO,EAC3G,CAAC,QAAuB,KAAMC,CAAK,CAAC,EAExCV,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAClC,CACA,MAAO,CAACD,EAAiBC,EAAU,GAAMK,EAAmBC,CAAiB,CAC/E,KAAO,CACL,GAAItB,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM2B,EAAgBd,EAAU,eAAef,EAAW,WAAW,EACrEiB,EAAgB,KACZ,CAAC,QAAuB,KAAMY,CAAa,EAAG,CAAC,QAAuB,KAAM7B,EAAW,IAAI,EAC3F,CAAC,QAAuB,KAAMA,EAAW,OAAO,CAAC,EACrDkB,EAAS,KACL,CAAC,KAAM,gBAAiB,KAAM,MAAO,OAAQW,EAAc,MAAM,EACjE,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ7B,EAAW,KAAK,MAAM,EAC1D,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQA,EAAW,QAAQ,MAAM,CAAC,EAErE,IAAM8B,EAAU9B,EAAW,KAAK,OAAO,CAAC+B,EAAKC,IAAQD,EAAMC,CAAG,EAC9D,MAAO,CAACf,EAAiBC,EAAU,CAAC,CAACY,EAAS,GAAO,EAAK,CAC5D,CACF,EAEMvD,GAAsB,CACxB0D,EAA4BC,EAAkBC,EAAcC,EAAyBpC,EACrFqC,EAAaC,EAAaC,EAAerB,EAA6BY,EAAkBP,EACxFC,IAAuC,CACzC,IAAMtB,EAAiBF,EAAW,SAAW,OACvCwC,EAAWN,EAAE,KAAK,MAClBO,EAASC,EAAe,SAAUR,EAAE,KAAK,OAAQE,CAAe,EAEtE,GAAIpC,EAAW,YAAY,QAAU,EAAG,CACtC,IAAI2C,EAAQ,GACRC,EAAQ,GACRC,EAAW,GACTC,EAAUX,GAAQjC,EAAiB,EAAI,GAsB7C,GArBIqB,EACFoB,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO;AAAA,4CACxBA,CAAO;AAAA;AAAA;AAAA;AAAA,kCAIjBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAGjBM,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,kCACxBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAIfrC,EAAW,YAAY,SAAW,EAAG,CACvC,IAAM+C,EAAUZ,GAAQjC,EAAiB,EAAI,GACzCsB,EACFoB,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO,yBAAyBA,CAAO;AAAA;AAAA;AAAA;AAAA,gBAM5FH,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,kBAGpDF,EAAW;AAAA;AAAA,aAGb,CAoBA,MAlBoB;AAAA,cACVZ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,8BAE3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,4BAEvCD,CAAQ,IAAID,CAAK;AAAA;AAAA,gBAE7BK,CAAK;AAAA,gBACLD,CAAK;AAAA,gBACLE,CAAQ;AAAA,gBACRP,CAAG;AAAA;AAAA;AAAA,cAKjB,KAAO,CACL,GAAIpC,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM8C,EAAchD,EAAW,YAAY,OACrCiD,EAAWjD,EAAW,KAAK,OAC7BkD,EAAU,GACd,OAAIpB,EACFoB,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAQgBhB,EAAE,gBAAgB,UAAU,CAAC;AAAA,kBAC3CG,CAAG;AAAA,iBAGfa,EAAU;AAAA;AAAA,8BAEchB,EAAE,gBAAgB,UAAU,CAAC;AAAA,gBAC3CG,CAAG;AAAA,cAGK;AAAA,cACVJ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,8BAC3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,wCAE3BO,CAAW;AAAA;AAAA,4BAEvBR,CAAQ,IAAID,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uCAMNS,EAAc,CAAC;AAAA,0CACZG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA,2CACvDG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA;AAAA,0BAEzEA,EAAc,CAAC;AAAA;AAAA;AAAA,+BAGVb,EAAOa,CAAW,UAAUb,CAAI;AAAA,+CAEvDgB,EAAa,mBAAoB,OAAOhB,EAAOa,CAAW,IAAKA,CAAW,CAAC;AAAA,oCAC/Cb,EAAOa,CAAW,QAAQG,EAAa,gBAAiB,SAAUF,CAAQ,CAAC;AAAA,oBAC3FC,CAAO;AAAA;AAAA,gBAEXZ,CAAG;AAAA;AAAA;AAAA,cAKjB,CACF,EAcM9D,GAAiCwB,GAClC,GAAGA,EAAW,MAAM,IAAIA,EAAW,QAAQ,IAAIA,EAAW,OAAO,IAAIA,EAAW,YAAY,MAAM,GAEjGvB,GAA4CuB,GAC7C,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,eAAe,GAEzEtB,GAAwCsB,GACzC,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,YAAY,IAAIA,EAAW,SAAS,GAE9FrB,GAA6BqB,IAA+D,CAChG,OAAQA,EAAW,OACnB,QAAS,CAAC,SAAU,QAAS,aAAc,YAAY,EAAEA,EAAW,QAAkB,EACtF,SAAUA,EAAW,UACrB,YAAaA,EAAW,aACxB,QAASA,EAAW,QACpB,KAAMA,EAAW,IACnB,GAMMpB,GACF,CAACwE,EAAcrD,EAAmBE,EAA2BD,IAAmD,CAC9G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEiC,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyC,EAAWN,EAAE,KAAK,MAElBG,EAAM,kBACRC,EAAM,GACNe,EAAmB,gBACrBf,GAAO,YAAYE,CAAQ,yBAE3BF,GAAO,YAAYE,CAAQ,oCAE7B,GAAM,CAACvB,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxDpC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EAC3E,IAAM2C,EAAwD,CAAC,MAAM,EACrE,MAAO,CACL,KAAAJ,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAAK,EAAKpB,EAC3FY,EAASP,EAAmBC,CAAiB,CACnD,CACF,EAES3C,GAA8BmB,GAA+D,CACxG,IAAMyD,EAAmBzD,EAAW,oBAAiC,EAE/D0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI0D,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAE1F,IAAMC,EAAwB,CAAC,gBAAAF,EAAiB,GAAGC,EAAM,SAAU,EAAE,EACrE,MAAO,CAAC,GAAGC,EAAuB,SAAUlF,GAAyCkF,CAAqB,CAAC,CAC7G,EAEa7E,GAAc,CAAC8E,EAAyB5D,IAA4C,CAC/F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,cAAegF,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CACnG,EAEMjB,GAAuB,CAC3B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,CACd,EAEaC,GAAoCgB,GAA+D,CAC9G,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEa5E,GAAoB,CAAC2E,EAAyB5D,IAA4C,CACrG5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,oBAAqBgF,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CACxG,EAOMd,GACF,CAACkE,EAAcrD,EAAmBE,EAA2BD,IAA+C,CAC1G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEoC,EAAM;AAAA;AAAA,MAGNC,EAAM,GACNJ,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyD,EAAwD,CAAC,MAAM,EAC/D,CAACvC,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxD,OAAApC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EACpE,CACL,KAAAuC,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAChFvC,EAAM,WAAa,GAAoB,OAAS,KAAMmB,EAAUY,EAASP,EAC1EC,CAAiB,CACvB,CACF,EAESrC,GAAU,CAACyE,EAAyB5D,IAAwC,CACvF5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,UAAW0E,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CAC3F,EAEaZ,GAA0BY,GAA2D,CAChG,IAAM8D,EAAe9D,EAAW,cAC1BO,EAAYP,EAAW,UAEvB0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI8D,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAIJ,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,oEAAoE,EAEtF,IAAMK,EAAoB,CAAC,aAAAD,EAAc,UAAAvD,EAAW,GAAGmD,EAAM,SAAU,EAAE,EACzE,MAAO,CAAC,GAAGK,EAAmB,SAAUrF,GAAqCqF,CAAiB,CAAC,CACjG,EAEa1E,GAAgCW,GAA2D,CACtG,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEavE,GAAgB,CAACsE,EAAyB5D,IAAwC,CAC7F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,gBAAiB0E,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CAChG,IClaA,IAUMgE,GAUAC,GAoCOC,GAxDbC,GAAAC,EAAA,kBAGAC,KAEAC,IAGAC,KAEMP,GAAwB,CAACQ,EAAeC,EAAeC,IAAwB,CACnF,IAAMC,EAAiBH,IAAUC,EAC3BG,EAA8BJ,EAAQC,GAASC,EAAQ,EACvDG,EAA8BL,EAAQC,GAASC,EAAQ,EAE7D,GAAIC,GAAkBC,GAA+BC,EACnD,MAAM,IAAI,MAAM,2CAA4C,CAEhE,EAEMZ,GAAyB,CAACO,EAAeC,EAAeC,EAAeI,IAAoC,CAC/G,IAAMC,EAAc,KAAK,IAAI,KAAK,MAAMN,EAAQD,GAASE,CAAK,CAAC,EACzDM,EAAwB,CAACD,CAAW,EACpCE,EAAaF,EACbG,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,KAAMH,EAAU,KAAMN,CAAK,EAAG,CAAC,KAAMM,EAAU,KAAMJ,CAAK,EACtG,GAAGS,EAA2BH,CAAW,CAC3C,EAEMI,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUT,EAAUE,EAAY,MAAM,EAC9DQ,EAAWF,EAAO,KAAK,MACvBG,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAMD,CAAkC,EAC3F,CAAC,KAAM,QAAS,KAAMA,CAAkC,CAC1D,EACA,MAAO;AAAA,UACDH,EAAa,iBAAiBI,CAAQ,EAAE,iBAAiBH,CAAM,CAAC;AAAA,UAChED,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,gDACnCG,CAAQ;AAAA,QAEtD,EAEA,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGV,CAAQ,EAAE,EACjC,gBAAAM,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,EACF,CACF,EAEahB,GAASwB,GAAkC,CACtD,IAAIlB,EAAQ,EACRC,EAAQ,EACRC,EAAQ,EACRgB,EAAQ,OAAO,CAAC,EAAE,WAAa,GACjClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,GAClCA,EAAQ,OAAO,CAAC,EAAE,WAAa,IACxClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,GAE3CC,GAAI,OAAO,sBACb3B,GAAsBQ,EAAOC,EAAOC,CAAK,EAG3CgB,EAAQ,QAAQzB,GAAuBO,EAAOC,EAAOC,EAAOgB,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CACvG,IC1EA,IAiCME,GAuBAC,GASAC,GA6CAC,GAkDAC,GAkCAC,GAaAC,GAwBAC,GAyBAC,GAuBAC,GAkCAC,GAWAC,GAQAC,GAsDAC,GA6EAC,GAwEAC,GAoHAC,GAOOC,GAiBAC,GAnqBbC,GAAAC,EAAA,kBAIAC,IAEAC,KACAC,KAGAC,KAuBMxB,GAAiB,CAACyB,EAAkBC,IAAuC,CAK/E,GAJAD,EAAO,MAAOE,GAAUA,EAAQ,IAAM,IAAM,CAClB,MAAM,IAAI,MAAM,oDAAoD,CACtE,EAAE,EAEtBF,EAAO,OAAS,GAClB,GAAIC,EAAW,OAAS,UACtB,GAAI,EAAED,EAAO,SAAW,GAAKA,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACtGA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACxDA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MACN;AAAA,oGACwF,UAErFC,EAAW,OAAS,SACzB,EAAED,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC/EA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MAAM,+DAA+D,EAIvF,EAEMxB,GAAe,CAACwB,EAA2BG,EAAyBC,IAA2B,CACnGD,EAAK,MAAOD,GAAUA,GAAS,GAAKA,EAAQE,IAAS,IAAM,CACnC,MAAM,IAAI,MAAM,qEAAqE,CACvF,EAAE,EACxB,IAAMC,EAAY,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAG,EAC1C,OAAAD,EAAK,QAAQ,CAACD,EAAOI,IAAUD,EAAUH,CAAK,EAAIF,EAAOM,CAAK,CAAC,EACxDD,CACT,EAEM5B,GACF,CAAC8B,EAA+BN,EAA8BO,EAAsBR,EACnFS,EAAiBC,IAAwB,CACxC,GAAM,CAACC,EAAeC,EAAkBC,CAAe,EAClDL,EAAe,GAAM,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,GAAKD,EAAO,OAAS,EAAK,EAAI,GAAI,EAAE,EACrEH,EAAOG,EAAO,CAAC,EAAE,KAAK,OAC5B,GAAII,EAAgB,GAAKJ,EAAO,OAASI,GAAiBJ,EAAOI,CAAa,EAAE,KAAK,OAAS,EAC5FJ,EAAOI,CAAa,EAAE,gBAAgB,EAAE,QAAST,GAAUQ,EAAI,KAAKR,CAAK,CAAC,UACjED,EAAW,0BAA4B,qBAChD,MAAM,IAAI,MAAM,2FAA2F,EAG7G,GAAIW,EAAmB,GAAKL,EAAO,OAASK,GAAoBL,EAAOK,CAAgB,EAAE,KAAK,OAAS,EAAG,CAExG,GADAL,EAAOK,CAAgB,EAAE,gBAAgB,EAAE,QAASV,GAAUF,EAAO,KAAKE,CAAK,CAAC,EAC5EF,EAAO,SAAW,GACjBA,EAAO,SAAWI,GAASI,GAAgB,IAAMR,EAAO,SAAWC,EAAW,KAAK,OACtF,MAAM,IAAI,MACN,6FAA6F,EAEnG1B,GAAeyB,EAAQC,CAAU,EAC7BA,EAAW,KAAK,OAAS,GAC3BzB,GAAawB,EAAQC,EAAW,KAAMG,CAAI,EAAE,QAAQ,CAACF,EAAOI,IAAUN,EAAOM,CAAK,EAAIJ,CAAK,CAE/F,CACA,GAAIW,EAAkB,GAAKN,EAAO,OAASM,IACzCN,EAAOM,CAAe,EAAE,iBAAiB,EAAE,QAASX,GAAUO,EAAM,KAAK,OAAOP,CAAK,CAAC,CAAC,EACnFO,EAAM,SAAWL,GAASI,GAAgB,IAAMC,EAAM,SAAWR,EAAW,KAAK,QACnF,MAAM,IAAI,MAAM,4FAA4F,EAIhH,GAAIA,EAAW,KAAK,OAAS,EAAG,CAC9B,GAAID,EAAO,SAAWC,EAAW,KAAK,OACpC,MAAM,IAAI,MAAM,0FAA0F,EAE5G,GAAIQ,EAAM,SAAWR,EAAW,KAAK,OACnC,MAAM,IAAI,MACN,8FAA8F,CAEtG,CACA,GAAI,OAAOD,EAAW,KAAe,OAAOS,EAAU,KAAeT,EAAO,OAAS,GAAKS,EAAM,OAASL,EACvG,MAAM,IAAI,MAAM,yDAAyD,CAE7E,EAEE1B,GACF,CAACoC,EAAiDC,IAC9C;AAAA,2DACmDA,CAAK,OAC3D,IAAM,CACD,OAAQD,EAAwB,CAC9B,IAAK,aACH,MAAO,UAAUC,CAAK,gBAAgBA,CAAK,YAC7C,IAAK,qBACH,MAAO;AAAA,8BACSA,CAAK,uBAAuBA,CAAK;AAAA;AAAA;AAAA,qBAInD,IAAK,uBACH,MAAO,WAAWA,CAAK,uBAAuBA,CAAK,YACrD,IAAK,gBACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMaA,CAAK;AAAA;AAAA,0BAEbA,CAAK,6DAA6DA,CAAK;AAAA;AAAA,qBAGrF,IAAK,qBACH,MAAO;AAAA,6BACQA,CAAK,gBAAgBA,CAAK;AAAA,2BAC5BA,CAAK,gBAAgBA,CAAK,yBAAyBA,CAAK;AAAA,0BACzDA,CAAK;AAAA;AAAA,mCAEIA,CAAK,yBAAyBA,CAAK;AAAA,qBAE1D,IAAK,uBACH,MAAO,uBAAuBA,CAAK,YAAYA,CAAK;AAAA,uCAC3BA,CAAK;AAAA,mCACTA,CAAK;AAAA;AAAA,sCAEFA,CAAK,uBAAuBA,CAAK,mBAC3D,IAAK,aACH,MAAO,YAAYA,CAAK,uBAAuBA,CAAK,mBACtD,QACE,MAAM,IAAI,MAAM,6BAA6BD,CAAsB,mBAAmB,CAC1F,CACF,GAAG,EACP,IAEEnC,GAA8B,CAACqC,EAA0BR,EAAsBO,IACjF,6CAA6CA,CAAK,4BAA4BA,CAAK,MAAQ,IAAM,CAC/F,OAAQC,EAAa,CACnB,IAAK,oBACH,MAAO,yIAKT,IAAK,QACH,MAAO,2BACT,IAAK,OACH,MAAO,0BACT,IAAK,qBACH,MAAO,0KAKT,IAAK,SACL,QACE,GAAIR,EAAe,GACjB,MAAO,mLAOT,MAAM,IAAI,MAAM,gBAAgBQ,CAAW,mBAAmB,CAClE,CACF,GAAG,EACH,IAEEpC,GAAY,CAAC8B,EAAwBP,EAAyBC,IAA2B,CAC7F,IAAMa,EAAS,IAAI,MAAMb,CAAI,EAAE,KAAK,CAAC,EAAE,OAAO,IAAI,MAAMA,CAAI,EAAE,KAAK,CAAC,CAAC,EAC/Dc,EAAWR,EAAI,SAAW,EAAIO,EAASP,EAAI,MAAM,EACvD,OAAIP,EAAK,OAAS,GAChBA,EAAK,QAAQ,CAACgB,EAAGC,IAAM,CACrBH,EAAOE,CAAC,EAAID,EAASE,CAAC,EACtBH,EAAOG,EAAIhB,CAAI,EAAIc,EAASf,EAAK,OAASiB,CAAC,CAC7C,CAAC,EACMH,GAEFC,CACT,EAEMrC,GACF,CAACwC,EAA+BrB,EAA2BS,EAA0BN,IACrE,CACV,IAAImB,EAAwB,CAAC,EAC7B,GAAIb,EAAM,OAAS,EACjB,GAAIN,EAAK,OAAS,EAAG,CAEnB,GADAkB,EAAW,QAASF,GAAMG,EAAY,KAAKH,CAAC,CAAC,EACzC,KAAK,IAAI,GAAGhB,CAAI,EAAIkB,EAAW,OACjC,MAAM,IAAI,MAAM,sBAAsB,EAExClB,EAAK,QAAQ,CAACgB,EAAGC,IAAME,EAAYH,CAAC,EAAIV,EAAMW,CAAC,CAAC,CAClD,MACEX,EAAM,QAASU,GAAMG,EAAY,KAAKH,CAAC,CAAC,MAErC,CACL,GAAInB,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyC,EAEzDsB,EAAcD,EAAW,IAAI,CAACnB,EAAOI,IAAU,KAAK,MAAMJ,EAAQF,EAAOM,CAAK,CAAC,CAAC,CAEpF,CACA,OAAOgB,CACT,EAEFxC,GAAoB,CAACuC,EAA+BrB,EAAkBC,IAAiC,CAC3G,IAAMsB,GAAiB,IAAM,CAC3B,OAAQtB,EAAW,sBAAuB,CACxC,IAAK,aACH,OAAOA,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,IAAK,cACH,OAAOC,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,QACE,MAAM,IAAI,MAAM,4BAA4BC,EAAW,qBAAqB,mBAAmB,CACnG,CACF,GAAG,EACHD,EAAO,KAAK,EAAK,EAAGA,EAAO,MAAM,EACjC,IAAMwB,EAAsBH,EAAW,MAAM,EAC7C,OAAIpB,EAAW,KAAK,OAAS,GAC3BA,EAAW,KAAK,QAASkB,GAAMnB,EAAOmB,CAAC,EAAII,CAAa,EACxDtB,EAAW,KAAK,QAASkB,GAAMK,EAAoBL,CAAC,EAAI,KAAK,MAAME,EAAWF,CAAC,EAAInB,EAAOmB,CAAC,CAAC,CAAC,IAE7FnB,EAAO,KAAKuB,EAAe,EAAGvB,EAAO,MAAM,EAC3CwB,EAAoB,QAAQ,CAACL,EAAGC,IAAMI,EAAoBJ,CAAC,EAAI,KAAK,MAAMD,EAAInB,EAAOoB,CAAC,CAAC,CAAC,GAEnFI,CACT,EAEMzC,GACF,CAAC0C,EAAuBJ,EAA+BC,EAAgCI,EACtFC,IAA8B;AAAA,mEACgCF,EAAO,KAAK,OAAO,cAC9EA,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,oCACZG,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,gCAC5CA,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA,sBAC/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA,wBAChDE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,uBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA;AAAA,kCAExDF,EAAO,KAAK,KAAK;AAAA;AAAA,gCAEnBG,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQzFtC,GACF,CAAC6C,EAAsBJ,EAAuBJ,EAA+BC,EAC5EI,EAAsBC,EAAmBG,IAAsC;AAAA,gEACpBL,EAAO,KAAK,OAAO,QAAQI,EAAM,KAAK,OAAO;AAAA,2BAClFA,EAAM,KAAK,OAAO;AAAA,gCACbP,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,sBAE/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA;AAAA;AAAA;AAAA,0BAI9CE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,yBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA,gCAC5DC,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA,iBAG9EQ,CAAgB,4CAA4CL,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA,wCAGtDA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAS/CI,EAAM,WAAW,gBAAiB,IAAK,cAAc,CAAC;AAAA;AAAA;AAAA,OAI1D5C,GAAoB,CAAC4C,EAAsBR,IAA0C;AAAA,0CACjDQ,EAAM,KAAK,OAAO;AAAA,gCAC5BR,EAAW,MAAM;AAAA,4BACrBQ,EAAM,WAAW,gBAAiB,GAAG,CAAC;AAAA,gDAClBD,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,OAOtGnC,GACF,CAAC2C,EAAsBE,EAAoBC,EAAkBC,IACzDJ,EAAM,KAAOI,EAAc;AAAA,MAC7BJ,EAAM,WAAW,gBAAiBE,EAAY,SAAS,CAAC;AAAA,MACxDF,EAAM,WAAW,gBAAiBG,EAAU,OAAO,CAAC;AAAA,EAEvB,GAE7B7C,GACF,CAAC0C,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUG,EAAWC,EAAUL,CAAU,EAC5CV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,CAAC,EAC9DN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wEAC2Dd,CAAK;AAAA,2BAClDc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBM,EAAW,mBAAmBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QAC9FN,EAAM,WAAW,gBAAiBO,EAAU,mBAAmBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC5FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,+CAGHJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,gBAE/DA,CAAK,sBAAsBoB,CAAS;AAAA,gBACpCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAEzCN,EACI,yBAAyBT,EAAWc,CAAS,CAAC,8BAA8Bd,EAAWe,CAAQ,CAAC;AAAA,iBAC7FF,CAAkB;AAAA,SAErB,EAAE;AAAA,8BACcb,EAAWc,CAAS,CAAC;AAAA,8BACrBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKvBf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA,iBAC1EjB,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWjC,EAEE3B,GACF,CAACyC,EAAsBJ,EAAuBJ,EAA+BC,EAC5EtB,EAA2BU,EAAwB2B,EAAqBP,EACxEI,EAA4BI,IAAoC,CAC/D,IAAMC,EAAOlB,EAAW,SAAW,EAC7BmB,EAAS,GACT,CAACL,EAAWC,CAAQ,EAAIG,EAAO,CAAC,EAAG,CAAC,EAAIC,EAAS,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAC/DzB,EAAQc,EAAM,KAAK,MACnBY,EAAoCC,GAAwB,CAChE,IAAMC,EAAYD,IAAQP,EAAY,MAAQ,MAC9C,MAAO;AAAA,WACJQ,CAAS,qCAAqCd,EAAM,KAAK,OAAO,qBAC/DJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,6BACfU,EAAO,WAAW,iBAAkBiB,CAAG,CAAC;AAAA,2BAC1C3B,CAAK,+DAA+Df,EAAO0C,CAAG,CAAC;AAAA,UAChGpB,EAAYoB,CAAG,CAAC,KAAKrB,EAAWqB,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,MAAMrB,EAAW,MAAM;AAAA,gCAC/DN,CAAK;AAAA;AAAA;AAAA,cAGvBe,CAAgB,0CAA0CT,EAAWqB,CAAG,CAAC;AAAA,mBACpER,CAAkB;AAAA;AAAA,0BAEXnB,CAAK,gBAAgBA,CAAK;AAAA;AAAA,gBAEpC4B,CAAS,KAAK5B,CAAK,oBAAoBA,CAAK;AAAA,gBAC5C4B,CAAS,WAAWA,CAAS,OAAOtB,EAAWqB,CAAG,CAAC;AAAA,eACpD,IACDJ,EACK;AAAA,mCAEER,EACF,UAAUI,CAAkB,IAE5B,GAAGS,CAAS,iBAAiBA,CAAS,KAAKtB,EAAWqB,CAAG,CAAC,WAElE,CAAC;AAAA;AAAA,kCAEsBb,EAAM,KAAK,OAAO;AAAA,YACxCA,EAAM,WAAW,qBAAsBa,EAAK,OAAOC,CAAS,GAAG,CAAC;AAAA,0BAEhED,IAAQP,EAAYN,EAAM,aAAa,oBAAoB,EACvC,2DAA2D;AAAA;AAAA;AAAA,QAIrF,EAEA,MAAO;AAAA,MACPY,EAAiCN,CAAS,CAAC;AAAA,MAC3CM,EAAiCL,CAAQ,CAAC;AAAA,qCACXrB,CAAK,cAAcA,CAAK;AAAA;AAAA,wBAErCA,CAAK,gBAAgBA,CAAK;AAAA,wBAC1BA,CAAK;AAAA,wBACLA,CAAK;AAAA,uBACNA,CAAK;AAAA,oBACRsB,CAAW,wBAAwBA,CAAW,yBACxDA,CAAW,yBAAyBA,CAAW;AAAA,oBACrCA,CAAW,mBAAmBA,CAAW;AAAA,oBACzCA,CAAW,2BAA2BA,CAAW;AAAA,oBACjDA,CAAW,yBAAyBA,CAAW,0BACzDA,CAAW,0BAA0BA,CAAW;AAAA;AAAA;AAAA;AAAA,qCAIrBtB,CAAK,sBAAsBA,CAAK,YAAYA,CAAK;AAAA,oBAClEA,CAAK;AAAA;AAAA;AAAA;AAAA,4CAImBU,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,yBACnDc,EAAM,KAAK,OAAO;AAAA;AAAA;AAAA,KAIvC,EAEExC,GACF,CAACwC,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUY,EAAUT,EAAWC,EAAUL,CAAU,EACtDV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACpEN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wFAC2Ed,CAAK;AAAA,2BAClEc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBe,EAAU,qBAAqBvB,EAAWuB,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9Ff,EAAM,WAAW,gBAAiBM,EAAW,sBAAsBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QACjGN,EAAM,WAAW,gBAAiBO,EAAU,qBAAqBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,gDAGFJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,kBAE9DA,CAAK,sBAAsB6B,CAAQ;AAAA,mBAClC7B,CAAK,sBAAsBoB,CAAS;AAAA,kBACrCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAE3CN,EAAmB,6BAA6BT,EAAWuB,CAAQ,CAAC,oCAC7CvB,EAAWc,CAAS,CAAC,kCAAkCd,EAAWe,CAAQ,CAAC;AAAA,eAC7FF,CAAkB;AAAA,WAEJ,EAAE;AAAA;AAAA,gCAECb,EAAWuB,CAAQ,CAAC;AAAA,oCAChBvB,EAAWc,CAAS,CAAC;AAAA,kCACvBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAO3Bf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA;AAAA,kBAEzEjB,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,iBACNA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,mBAAmBA,CAAK;AAAA,iBAC7BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAgBjC,EAEEzB,GACF,CAACuD,EAAyB5C,EAA8BO,EAAsBsC,EAC7ErC,EAA0BsC,IAA6C,CACtE,IAAM1B,EAAawB,EAAY,KACzBnC,EAAM9B,GAAUmE,EAAU9C,EAAW,KAAMoB,EAAW,MAAM,EAE9DC,EAAczC,GAAgBwC,EAAYyB,EAAarC,EAAOR,EAAW,IAAI,EAC7ED,EAAS8C,EAAY,MAAM,EAC3BA,EAAY,SAAW,IACzB9C,EAASqB,EAAW,IAAI,CAACnB,EAAOI,IAAUJ,IAAU,EAAI,EAAMoB,EAAYhB,CAAK,EAAIJ,CAAK,EACpFD,EAAW,wBAA0B,YACvCqB,EAAcxC,GAAkBuC,EAAYrB,EAAQC,CAAU,IAGlE,IAAMwB,EAASuB,EAAe,SAAUH,EAAY,SAAUvB,EAAY,MAAM,EAC1EO,EAAQoB,EAAc,QAASJ,EAAY,SAAUxB,EAAW,MAAM,EACtE6B,EAAaC,EAAU,KAAK7B,CAAW,EACvC8B,EAAU/B,EAAW,SAAWC,EAAY,QAAUD,EAAW,MAAM,CAACgC,EAAGjC,IAAMiC,IAAM/B,EAAYF,CAAC,CAAC,EACrGU,EAAmB7B,EAAW,0BAA4B,qBAC1DiC,EAAqBjC,EAAW,mBAChCqD,EAAWzB,EAAM,KAAK,MACtB0B,EAAmBC,GAA+B;AAAA,QACtDJ,EAAU,GAAK;AAAA,QACf1E,GAA2CuB,EAAW,wBAAyBqD,CAAQ,CAAC;AAAA,SACvF,IAAM,CACP,OAAQrD,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA,gBACHhB,GAAkB4C,EAAOR,CAAU,CAAC;AAAA,gBACpC1C,GAA4BsB,EAAW,YAAaO,EAAc8C,CAAQ,CAAC;AAAA,gBAE3EtE,GACI6C,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,OAAQoB,CAAgB,CAAC;AAAA,gBAE9F,IAAK,SACH,MAAO;AAAA,gBACH/C,GAA0C0C,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,MAAM,CAAC;AAAA,iBACpG,IAAM,CACT,GAAIW,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GAAGlC,GAAsB0C,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAC3F,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EAC1D,MAAO,GAAGhC,GAAuBwC,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAEjG,MAAM,MAAM,kFAAkF,CAElG,GAAG,CAAC;AAAA,cAEN,IAAK,QACH,MAAO;AAAA,eACJ,IAAM,CACP,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GACHjC,GACIyC,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAQU,EAAKT,EAAW,YAAa6B,EAC7E7B,EAAW,mBAAoBA,EAAW,cAAc,CAAC,GAEjE,MAAM,MAAM,2EAA2E,CAE3F,GAAG,CAAC;AAAA,cAEN,QACE,MAAM,MAAM,qBAAqB,CACrC,CACF,GAAG,CAAC;AAAA,OACH;AAAA,QAEGuD,EAAa,gBAAgB,cAAe,KAAK,EAC5C,gBAAgB,SAAU,MAAOxD,EAAO,MAAM,EAC9C,gBAAgB,MAAO,MAAOU,EAAI,MAAM,EACxC,iBAAiBmB,EAAOJ,CAAM,CAAC;AAAA,QACtC+B,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,UAC1EJ,EAAU,0CAA4C;AAAA,+BACjC3B,EAAO,gBAAgB,YAAY,CAAC;AAAA,6BACtCI,EAAM,KAAK,OAAO;AAAA,WACpC,IAAM,CACT,OAAQ5B,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA;AAAA,yCAEsB4B,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,yCAEnC5B,EAAW,kBAAkB;AAAA,mBAE5D,IAAK,SACH,MAAO,wBACFoB,EAAW,SAAW,GAAKA,EAAW,SAAW,EAAK,wBACA,wBAAwB,oBACrF,IAAK,QACH,MAAO,6DACT,QACE,MAAM,MAAM,4BAA4BpB,EAAW,IAAI,EAAE,CAC7D,CACF,GAAG,CAAC;AAAA,CACT;AAAA,SAGK,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAGA,EAAW,QAAQ,IAAIO,CAAY,IAAIR,EAAO,OAAS,EAAIA,EAAS,EAAE,IAC3ES,EAAM,OAAS,EAAIA,EAAQ,EAAE,IAAIC,EAAI,OAAS,EAAIA,EAAM,EAAE,IAAI0C,CAAO,IAAI/B,CAAU,GACvF,kBAAmB,CAAC,MAAM,CAC5B,EACA,gBAAAkC,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMjC,EAAa,SAAUuB,EAAY,QAAQ,CAAC,EAC7D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,OAAsB,KAAMlD,CAAM,EAC9E,CAAC,OAAsB,KAAMU,CAAG,EAAG,GAAG+C,EAA2BpC,EAAYC,CAAW,CAC1F,CACF,EACF,CACF,EAEE/B,GAAuCmE,GAAoC,CAC/E,IAAMC,EAAmBD,EAAQ,iBAGjC,OAF2B,IAAI,YAAYC,EAAkBA,EAAiB,WAAY,CAAC,EACnD,CAAC,CAE3C,EAEanE,GAAS,CAACkE,EAAyBzD,IAAuC,CACrF,IAAMD,EAAmB,CAAC,EACpBS,EAAkB,CAAC,EACnBC,EAAgB,CAAC,EAKjBF,EAAejB,GAAoCmE,CAAO,EAChE,GAAIzD,EAAW,YAAc,EAC3B,MAAM,MAAM,6DAA6D,EAE3ExB,GAAeiF,EAAQ,OAAQzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAC3EgD,EAAQ,QACJpE,GAAwBoE,EAAQ,OAAO,CAAC,EAAGzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC7G,EAEajB,GAAyBQ,GAA0D,CAC9F,IAAM2D,EAAY3D,EAAW,UACvBE,EAAOF,EAAW,KAClB4D,EACF5D,EAAW,wBACToC,EAAcpC,EAAW,YACzBqC,EAAiBrC,EAAW,iBAA6B,EACzDiC,EAAqBjC,EAAW,mBAChC6D,EAA+C7D,EAAW,sBAC1D8D,EAAa9D,EAAW,KAExBe,EAA4Bf,EAAW,cAAgB,GAAK,SAAWA,EAAW,YACxF,OAAO+D,GAA4B,CACjC,UAAAJ,EACA,KAAAzD,EACA,wBAAA0D,EACA,YAAAxB,EACA,eAAAC,EACA,mBAAAJ,EACA,sBAAA4B,EACA,KAAAC,EACA,YAAA/C,CACF,CAAC,CACH,IC1rBA,IAkBMiD,GAqDAC,GA+FOC,GAtKbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KASMR,GAAiB,CAACS,EAA+BC,IAAgD,CACrG,GAAM,CAACC,EAAOC,EAAaC,EAAUC,CAAQ,EAAIL,EAC3C,CAAC,SAAAM,EAAU,mBAAAC,CAAkB,EAAIN,EAEvC,GAAIC,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wDAAwDA,EAAM,KAAK,MAAM,EAAE,EAE7F,GAAI,CAACM,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,GAAK,CAACK,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,CAAC,GACtFA,EAAY,KAAK,SAAW,EAC9B,MAAM,IAAI,MAAM,uEAAuEA,EAAY,KAAK,MAAM,EAAE,EAElH,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAI,CAACG,EAAU,SAASJ,EAAS,KAAMC,EAAS,IAAI,EAClD,MAAM,IAAI,MAAM,wEAA4E,EAG9F,GAAIE,EAAqB,GAAKD,IAAa,EACzC,MAAM,IAAI,MAAM,iEAAiE,EAGnF,IAAMG,EAAYP,EAAM,KAAK,CAAC,EACxBQ,EAAiBR,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACjDS,EAAoBP,EAAS,KAAK,CAAC,EACnCQ,EAAaJ,EAAU,kBAAkBN,EAAM,KAAM,CAAC,EAAIQ,EAC1DG,EAAWN,IAAuB,EAAIH,EAAS,KAAK,CAAC,EAAI,EAAIQ,EAAaN,EAChF,GAAIC,EAAqBM,EACvB,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIV,EAAY,KAAK,SAAW,EAAG,CACjC,GAAIM,IAAcN,EAAY,KAAK,CAAC,EAClC,MAAM,IAAI,MAAM,sEAAsEA,EAAY,KAAK,CAAC,CAAC,EAAE,EAE7G,GAAIO,IAAmBP,EAAY,KAAK,CAAC,EACvC,MAAM,IAAI,MAAM,2EAA2EA,EAAY,KAAK,CAAC,CAAC,EAAE,CAEpH,CAEA,GAAIU,EAAW,IAAMT,EAAS,KAAK,CAAC,GAAKG,EAAqB,IAAMH,EAAS,KAAK,CAAC,EACjF,MAAM,IAAI,MAAM,kGACZA,EAAS,KAAK,CAAC,CAAC,EAAE,EAGxB,GAAIM,EAAiBC,EACnB,MAAM,IAAI,MAAM,gFAAgF,CAEpG,EAEMnB,GACF,CAACQ,EAA+BC,IAAuD,CACrF,GAAM,CAAC,YAAAa,EAAa,SAAAR,EAAU,mBAAAC,EAAoB,MAAAQ,CAAK,EAAId,EACrDQ,EAAYT,EAAO,CAAC,EAAE,KAAK,CAAC,EAC5BgB,EAAcR,EAAU,kBAAkBR,EAAO,CAAC,EAAE,KAAM,CAAC,EAC3DU,EAAiBV,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACzDY,EAAaI,EAAcN,EAC3BO,EAAyBjB,EAAO,CAAC,EAAE,KAAK,CAAC,EACzCa,EAAWN,IAAuB,EAAIU,EAAyB,EAAIL,EAAaN,EAKhFY,EACF,IAAI,MAAcT,EAAWC,EAAgBE,EAAaC,EAAUA,EAAWI,CAAsB,EACnGE,EAAgBX,EAAU,eAAeU,CAAW,EAEpDE,EAAoC,CACxC,CAAC,OAAsB,KAAML,CAAK,EAClC,CAAC,QAAuB,KAAMG,CAAW,EACzC,CAAC,QAAuB,KAAMC,CAAa,EAI3C,GAAInB,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MAAsB,CAAC,QAAuB,KAAM,CAACgB,EAAaJ,EAAYC,EAAU,CAAC,CAAC,CAAC,EAC/F,CAAC,EACT,GAAIb,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MACA,CAAC,QAAuB,KAAM,CAACgB,EAAaH,EAAUH,EAAiBG,EAAU,CAAC,CAAC,CAAC,EACxF,CAAC,EAET,GAAGQ,EAA2BrB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9G,EAEMsB,EAAmBC,GAA+B,CACtD,IAAMrB,EAAQsB,EAAc,QAASxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEG,EAAcqB,EAAc,eAAgBxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACrFI,EAAWoB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EK,EAAWmB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EyB,EAASC,EAAe,SAAU1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAEjF,OAAAuB,EAAa,iBAAiB,CAC5B,CAAC,KAAM,QAAS,KAAM,KAAK,EAC3B,CAAC,KAAM,eAAgB,KAAM,MAAO,OAAQL,EAAY,MAAM,EAC9D,CAAC,KAAM,iBAAkB,KAAM,MAAO,OAAQC,EAAc,MAAM,EAClE,CAAC,KAAM,uBAAwB,KAAM,MAAO,OAAQA,EAAc,MAAM,CAC1E,CAAC,EAEM;AAAA,UACLI,EAAa,iBAAiBrB,EAAOC,EAAaC,EAAUC,EAAUoB,CAAM,CAAC;AAAA;AAAA,UAE7EF,EAAa,UAAUI,EAAc,CAAC;AAAA,+CACDvB,EAAS,IAAI;AAAA;AAAA;AAAA,YAGhDmB,EAAa,sCAAsC,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA,kBAIpDpB,EAAY,2BAA2B,UAAWuB,EAAe,GAAIvB,EAAY,KAAK,OAAQ,CAAC,CAAC,CAAC;AAAA;AAAA,sBAE7FA,EAAY,YAAY,kBAAkB,CAAC;AAAA,oFACmBW,CAAW;AAAA,yDACtCA,CAAW;AAAA,uBAC7CZ,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEF,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEoB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA,uBACpBvB,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEH,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEqB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA;AAAA;AAAA,cAG7BA,EAAO,YAAY,IAAKvB,EAAM,YAAY,GAAG,CAAC,CAAC;AAAA;AAAA,UAGvD,EAEA,MAAO,CACL,KAAM,kBACN,YAAa,CACX,KAAM0B,GAA4B,CAC1B,YAAAd,CACF,CAAC,EAAE,SACT,kBAAmB,CAAC,OAAQ,OAAQ,OAAQ,MAAM,CACpD,EACA,gBAAAQ,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAU,KAAKU,CAAW,EAAIS,EAAc,CAAC,EAC1E,gBAAAP,CACF,EACF,CACF,EAES3B,GAAkB,CAACoC,EAAyB5B,IAAgD,CACvGV,GAAesC,EAAQ,OAAQ5B,CAAU,EACzC4B,EAAQ,QAAQrC,GAAiCqC,EAAQ,OAAQ5B,CAAU,CAAC,CAC9E,ICzKA,IAeM6B,GAwDAC,GA4IOC,GAnNbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAOMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMC,EAAoBD,EAAO,CAAC,EAC5BE,EAAmBF,EAAO,CAAC,EAC3BG,EAAoBH,EAAO,CAAC,EAElC,GAAIC,EAAM,WAAaC,EAAK,UAAYD,EAAM,WAAaE,EAAM,SAC/D,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIC,EAAK,KAAK,SAAW,GAAKA,EAAK,KAAK,SAAW,EACjD,MAAM,IAAI,MAAM,uBAAuB,EAGzC,IAAME,EAAaH,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EAC7CI,EAAiBJ,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACvD,GAAIC,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAME,EACtC,MAAM,IAAI,MAAM,8CAA8C,EAEhE,GAAIF,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMG,EACtC,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIF,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,IAAMC,EACxC,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBN,EAAO,CAAC,EACjC,GAAIM,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMF,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACA,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMO,EAAmBP,EAAO,CAAC,EACjC,GAAIO,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMH,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACF,EAEMX,GACF,CAACO,EAA+BQ,EAAqCC,EAAqBC,IACvE,CACb,IAAMC,EAAaH,EAAW,WAExBI,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAcH,EACdI,EAAaH,EACbT,EAAaQ,EAAW,MAAM,EAAE,EAAE,CAAC,EACnCK,EAAmBP,EAAaE,EAAW,MAAM,EAAG,EAAE,EAAE,OAAO,CAAC,EAAI,CAAC,EACrEM,EAAe,CAACP,GAAcX,EAAO,OAAS,EAC9CmB,EAAenB,EAAO,OAAS,EAC/BoB,EAAgBV,GAAcD,EAAc,EAC5CY,EAAqBX,GAAcD,EAAc,EACjDa,EAA4Bb,EAAc,EAC1Cc,EAAgB,GAEhBC,EAAaC,GAAiBrB,CAAU,EAExCsB,EAAoC,CACxC,CAAC,QAAuB,KAAMV,CAAU,EACxC,CAAC,QAAuB,KAAMQ,CAAU,EACxC,CAAC,QAAuB,KAAMpB,CAAU,EACxC,CAAC,OAAsB,KAAMI,EAAW,OAAO,CACjD,EACMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAAmC,CACvC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,aAAc,KAAM,KAAK,EAChC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,KAAK,CAC/B,EACMC,EAAY,CAChBC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACjEO,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACpEO,EAAc,QAAS/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CACvE,EACIN,GACFY,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAElFL,GACFW,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAEtFM,EAAU,KAAKE,EAAe,SAAUhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAChFJ,GACFU,EAAU,KAAKE,EAAe,gBAA+Bf,CAAgB,CAAC,EAE5EI,GACFS,EAAU,KAAKE,EAAe,mBAAkCf,CAAgB,CAAC,EAE/EK,GACFQ,EAAU,KAAKE,EAAe,sBAAuBhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAEnG,IAAMS,EAAWC,GAA4BlC,EAAO,CAAC,EAAE,QAAQ,EACzDmC,EAAcD,KAA4CV,CAAU,EAC1E,MAAO;AAAA;AAAA,QAEXI,EAAa,iBAAiBC,CAAa,EAAE,iBAAiB,GAAGC,CAAS,CAAC;AAAA,0CACzCK,CAAW,KAAKZ,CAAa;AAAA,kDACrBY,CAAW,KAAKZ,CAAa;AAAA;AAAA,QAEvEK,EAAa,UAAU,CACjBL,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA;AAAA,iCAEmBA,CAAa;AAAA;AAAA;AAAA,gDAGEA,CAAa;AAAA;AAAA;AAAA,oBAGzCA,EAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKRJ,EAAe,qBAAuBc,EAAW,OAAO;AAAA;AAAA;AAAA,YAGzEX,EAA4B,2CAA6C,EAAE;AAAA;AAAA,4BAE3Dc,GAAUH,EAAUT,EAAY,OAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMlCD,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAY1Bc,GAAU,MAAOb,CAAU,CAAC;AAAA,wCACTa,GAAU,aAAcb,CAAU,CAAC,gCAC3Db,EAAa,GAAK,eAAe;AAAA,UACvCS,EAAgB,kCAAoC,EAAE;AAAA,UACtDC,EAAqB,4CAA8C,EAAE;AAAA;AAAA;AAAA,qDAG1BV,EAAa,GAAK,KAAKsB,CAAQ,QAAQ;AAAA,cAC9EA,CAAQ;AAAA,cACRf,EAAe,uBAAyB,EAAE;AAAA;AAAA,QAG9C,EACMoB,EAAU,CAAC,CAAC,KAAMvB,EAAa,SAAUf,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIS,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAM1B,EAAY,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAExD,CACL,KAAM,yBACN,YAAa,CACX,KAAM,GAAGwB,CAAU,IAAIJ,CAAa,IAAIC,CAAkB,IAAIC,CAAyB,GACvF,kBAAmBtB,EAAO,IAAI,CAACuC,EAAQC,IAAW,MAAM,CAC1D,EACA,gBAAAb,EACA,WAAY,KAAO,CACjB,QAAAW,EACA,cAAe,CACb,EAAG,KAAK,KAAKtB,EAAaZ,CAAU,CACtC,EACA,gBAAAsB,CACF,EACF,CACF,EAEKhC,GAAgB,CAAC+C,EAAyBjC,IAA8C,CAGnGhB,GAAeiD,EAAQ,MAAM,EAG7B,IAAMH,EAAU,CAAC,CAAC,EACdG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAK,CAAC,EAEhBG,EAAQ,QACJhD,GAA+BgD,EAAQ,OAAQjC,EAAYiC,EAAQ,YAAa,EAAU,EAAG,CAAC,QAAAH,CAAO,CAAC,CAC5G,ICrOA,IAiBMI,GAkBAC,GAcAC,GAeAC,GAcAC,GAsBAC,GAmFOC,GAYAC,GAnMbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAQMb,GAAiB,CAACc,EAA+BC,IAAsC,CAC3F,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIC,EAAW,KAAK,SAAW,GAC7B,GAAIA,EAAW,KAAK,SAAWA,EAAW,OAAO,QAAUA,EAAW,KAAK,SAAWA,EAAW,KAAK,OACpG,MAAM,IAAI,MAAM,iDAAiD,UAE1DA,EAAW,OAAO,SAAWA,EAAW,KAAK,OACtD,MAAM,IAAI,MAAM,2CAA2C,EAE7DD,EAAO,MAAM,CAAC,EAAE,QAAQ,CAACE,EAAGC,IAAQ,CAClC,GAAIH,EAAOG,EAAM,CAAC,EAAE,WAAa,GAAkBH,EAAOG,EAAM,CAAC,EAAE,WAAa,EAC9E,MAAM,IAAI,MAAM,SAASA,CAAG,qCAAqC,CAErE,CAAC,CACH,EAEMhB,GAAY,CAACa,EAA+BG,IAA0B,CAC1E,IAAMC,EAAkB,CAAC,EACzB,GAAIJ,EAAO,OAASG,EAClB,GAAIH,EAAOG,CAAG,EAAE,WAAa,EAC3BH,EAAOG,CAAG,EAAE,iBAAiB,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,UACxDL,EAAOG,CAAG,EAAE,WAAa,EAClCH,EAAOG,CAAG,EAAE,cAAc,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,MAE9D,OAAM,IAAI,MAAM,SAASF,CAAG,qCAAqC,EAGrE,OAAOC,CACT,EAEMhB,GACF,CAACY,EAA+BC,IAAiD,CAC/E,GAAID,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBnB,GAAUa,EAAQ,CAAC,EACtCO,EAAiBpB,GAAUa,EAAQ,CAAC,EACtCQ,EAAiBrB,GAAUa,EAAQ,CAAC,EACxC,OAAIQ,EAAK,SAAW,IAClBA,EAAO,CAAC,GAAG,MAAMR,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,CAAC,GAEzCS,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,KACE,QAAOP,CAEX,EAEEZ,GACF,CAACqB,EAAeC,EAAeC,EAA+BJ,EAAyBK,IACzE,CACR,IAAIC,EAAWJ,EAIf,OAHIA,EAAQ,IACVI,GAAYF,EAAWJ,EAAKG,CAAK,CAAC,GAEhCE,EAAMF,CAAK,EAAI,EACV,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,EAAI,CAAC,CAAC,EAE3D,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,CAAC,CAAC,CAElE,EAEFrB,GACF,CAACc,EAAsBW,EAAuBH,IAC1C,4CAA4CG,EAAO,KAAK,OAAO,QAAQX,EAAM,KAAK,OAAO;AAAA,+BAClEA,EAAM,KAAK,OAAO;AAAA;AAAA,yBAExBQ,EAAW,MAAM;AAAA,kCACRI,EAAa,uBAAwB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BAClEI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BACtDI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,6BACrDI,EAAa,kBAAmB,IAAKJ,EAAW,MAAM,CAAC;AAAA,iCACnDG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAO3DX,EAAM,WAAW,gBAAiB,IAAK,aAAa,CAAC;AAAA;AAAA;AAAA,SAK7Db,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBiB,EAAYC,EAAU,KAAKN,CAAU,EACrCJ,EAAQP,EAAW,KAAK,OAAS,EAAKiB,EAAU,cAAcjB,EAAW,KAAMW,EAAW,MAAM,EAC1D,CAAC,GAAG,MAAMA,EAAW,MAAM,EAAE,KAAK,CAAC,EAC3EC,EAAQ1B,GAAUa,EAAQ,CAAC,EAC/Ba,EAAM,QAASM,GAASA,IAAS,IAAM,IAAM,CACnB,MAAM,IAAI,MAAM,kBAAkB,CACpC,EAAE,EACtBN,EAAM,SAAW,IACnBA,EAAQ,MAAML,EAAK,MAAM,EAAE,KAAK,CAAC,GAEnC,IAAMF,EAASL,EAAW,OAAO,IAAI,CAACmB,EAAOC,IAAMhC,GAAkB+B,EAAOC,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAEjGN,EAAON,EAAW,KAAK,IAAI,CAACqB,EAAKD,IAAMhC,GAAkBiC,EAAKD,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAE/F,GAAIL,EAAK,SAAWF,EAAO,QAAUE,EAAK,SAAWD,EAAK,OACxD,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIC,EAAK,SAAWI,EAAW,OAC7B,QAASS,EAAI,EAAGA,EAAIT,EAAW,OAAQ,EAAES,EAClCb,EAAK,SAASa,CAAC,IAClBf,EAAO,OAAOe,EAAG,EAAG,CAAC,EACrBd,EAAK,OAAOc,EAAG,EAAGT,EAAWS,CAAC,CAAC,EAC/BR,EAAM,OAAOQ,EAAG,EAAG,CAAC,GAI1B,IAAME,EAAQV,EAAM,IAAIM,GAAQ,KAAK,KAAKA,CAAI,CAAC,EAE/CN,EAAM,QAAQ,CAACM,EAAME,EAAGG,IAAU,CAChC,GAAIL,EAAO,EAAG,CACZ,IAAMM,GAAYlB,EAAKc,CAAC,EAAIf,EAAOe,CAAC,GAAKF,EACnCO,EAASpB,EAAOe,CAAC,EACjBM,EAAWD,EAASD,EAAWZ,EAAMQ,CAAC,EAC5Cf,EAAOe,CAAC,EAAIM,EACZpB,EAAKc,CAAC,EAAIK,EACVF,EAAMH,CAAC,EAAI,CAACF,CACd,CACF,CAAC,EAED,IAAMS,EAAchB,EAAW,MAAM,CAAC,EACtCJ,EAAK,QAAQ,CAACqB,EAAM3B,IAAM,CACxB0B,EAAYC,CAAI,EAAI,KAAK,MAAMtB,EAAKsB,CAAI,EAAIvB,EAAOuB,CAAI,GAAKhB,EAAMgB,CAAI,CAAC,CACzE,CAAC,EACD,IAAMC,EAA+B,CAAC,KAAMF,EAAa,SAAU5B,EAAO,CAAC,EAAE,QAAQ,EAE/Ee,EAASgB,EAAe,SAAU/B,EAAO,CAAC,EAAE,SAAU4B,EAAY,MAAM,EACxExB,EAAQ4B,EAAc,QAAShC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEiC,EAAaf,EAAU,KAAKU,CAAW,EACvCM,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ5B,EAAO,MAAM,EACtF,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQiB,EAAM,MAAM,EAAG,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQV,EAAM,MAAM,CACvG,EAEMsB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAM3B,CAAM,EAC/E,CAAC,OAAsB,KAAMiB,CAAK,EAAG,CAAC,QAAuB,KAAMV,CAAK,EACxE,GAAGuB,EAA2BpC,EAAO,CAAC,EAAE,KAAM4B,CAAW,CAC3D,EAEMS,EAAmBC,GAA+B;AAAA,QAClDA,EAAa,iBAAiBJ,CAAQ,EAAE,iBAAiB9B,EAAOW,CAAM,CAAC;AAAA,UACrEzB,GAA0Bc,EAAOW,EAAQH,CAAU,CAAC;AAAA,UACpD0B,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,iCACpDvB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDA,EAAO,YAAY,aAAcX,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,SAE/E,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGmB,EAAM,MAAM,IAAIjB,EAAO,MAAM,IAAIO,EAAM,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACnG,gBAAAwB,EACA,WAAY,KAAO,CACjB,QAAS,CAACP,CAAgB,EAC1B,cAAe,CAAC,EAAG,KAAK,KAAKb,EAAY,EAAuB,CAAC,EACjE,gBAAAkB,CACF,EACF,CACF,EAEa3C,GAAQ,CAAC+C,EAAyBtC,IAAsC,CACnFf,GAAeqD,EAAQ,OAAQtC,CAAU,EACzC,IAAMuC,EAAoBpD,GAAgCmD,EAAQ,OAAQtC,CAAU,EACpFsC,EAAQ,QAAQhD,GAAuBgD,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAO1F,EAEa/C,GAAwBQ,GAAyD,CAC5F,IAAMK,EAASL,EAAW,OACpBM,EAAON,EAAW,KAClBO,EAAOP,EAAW,KACxB,OAAOQ,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,ICxMA,IAeMiC,GAUAC,GAwHOC,GAKAC,GAtJbC,GAAAC,EAAA,kBAOAC,IAEAC,KACAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,CAElD,EAMMT,GAA2B,CAACU,EAAmBC,IAA+C,CAClG,IAAMC,EAAQF,EAAM,KACdG,EAAaC,EAAU,KAAKF,CAAK,EACjCG,EAAK,GACPC,EAAOL,EAAW,KAItB,GAHIK,EAAO,IACTA,EAAOJ,EAAM,OAASI,GAEpBA,EAAOJ,EAAM,OAAS,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMK,EAAOL,EAAMI,CAAI,EACjBE,EAAOL,EAAaI,EACpBE,EAAaC,GAAiBH,CAAI,EAClCI,EAAaJ,EAAOE,EAEpBG,EAAY,CAACC,EAAcJ,IAC3BA,IAAe,EACV,WAAWI,CAAI,OAAOA,CAAI,YAAYA,CAAI,OAAOA,CAAI,OACnDJ,IAAe,EACjB,OAAOI,CAAI,OAAOA,CAAI,MACpBJ,IAAe,EACjB,WAAWI,CAAI,OAAOA,CAAI,QAAQA,CAAI,MAGxCA,EAEHC,EAAIC,EAAc,IAAKf,EAAM,SAAUA,EAAM,KAAMS,CAAU,EAC7DO,EAASC,EAAe,SAAUjB,EAAM,SAAUA,EAAM,KAAMS,CAAU,EACxES,EAAYJ,EAAE,KAAK,MAEnBK,EAAgBC,GAA4BpB,EAAM,QAAQ,IAAM,MAClE,mBAAmBkB,CAAS,oBAC5B,mBAAmBA,CAAS,eAC1BG,EAAmBC,GAA+B;AAAA,sCACpBJ,CAAS;AAAA,sCACTA,CAAS;AAAA,4CACHA,CAAS,KAAKb,CAAE;AAAA;AAAA,4DAEAa,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA,gEAKLA,CAAS;AAAA;AAAA;AAAA;AAAA,QAIjEI,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBR,EAAGE,CAAM,CAAC;AAAA,QAC7EM,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA,qBAGXjB,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMbc,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAmBID,CAAS,IAAIN,EAAU,kBAAmBH,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKtDS,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAeRA,CAAS,IAAIK,GAAU,kBAAmBd,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAU9E,MAAO,CACL,KAAM,UACN,YAAa,CAAC,KAAM,GAAGA,CAAU,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAChE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAO,SAAUF,EAAM,QAAQ,CAAC,EACjD,cAAe,CAAC,EAAGQ,CAAI,EACvB,gBAAiB,CAAC,CAAC,OAAsB,KAAMG,CAAU,CAAC,CAC5D,GACA,gBAAAU,CACF,CACF,EAEa9B,GAAU,CAACiC,EAAyBvB,IAAwC,CACvFZ,GAAemC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAyBkC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CACzE,EAEaT,GAA0BS,GACnCwB,GAA4B,CAAC,KAAMxB,EAAW,IAAc,CAAC,ICvJjE,IAiBMyB,GAMAC,GAWAC,GASAC,GAqBAC,GAuDOC,GAOAC,GA9HbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAQMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,CAEpC,EAEMZ,GACF,CAACY,EAA+BC,IAAiD,CAC/E,IAAMC,EAAuB,CAAC,EAC1BC,EAAqBF,EAAW,WACpC,OAAID,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQI,GAAKF,EAAW,KAAK,OAAOE,CAAC,CAAC,CAAC,EACpED,EAAaD,EAAW,QAEnBG,GAA4B,CAAC,WAAAF,EAAY,KAAMF,EAAW,KAAM,WAAAC,CAAU,CAAC,CACpF,EAEEb,GAA4BiB,GAAoC;AAAA;AAAA,gCAEtCA,CAAe;AAAA,kBAC7BC,EAAa,8BAA+B,IAAKD,CAAe,CAAC;AAAA;AAAA;AAAA;AAAA,aAItEA,CAAe;AAAA,GAEtBhB,GAAuBkB,GAAsC,CACjE,IAAMF,EAAkBE,EAAQ,OAC1BC,EAAsB,CAAC,EAC7B,QAASC,EAAI,EAAGA,EAAIJ,EAAiB,EAAEI,EAAG,CACxC,IAAMC,EAAgBH,EAAQE,CAAC,EAAE,aAAa,UAAW,mBAAmB,EACxEJ,IAAoB,EACtBG,EAAU,KAAKE,CAAa,EACnBD,IAAM,EACfD,EAAU,KAAK,wBAAwBC,CAAC,QAAQC,CAAa,IAAI,EACxDD,IAAMJ,EAAkB,EACjCG,EAAU,KAAK,UAAUE,CAAa,IAAI,EAE1CF,EAAU,KAAK,6BAA6BC,CAAC,OAAOC,CAAa,IAAI,CAEzE,CACA,MAAO;AAAA,wDAC+CH,EAAQ,CAAC,EAAE,KAAK,OAAO;AAAA,UACrEC,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,QAE9B,EAEMlB,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAWf,EAAO,CAAC,EAAE,SACrBgB,EAAOF,EAAU,cAAcb,EAAW,KAAMW,EAAW,MAAM,EACjEJ,EAAU,IAAI,MAAqBP,EAAW,UAAU,EACxDgB,EAAQC,EAAc,QAASH,EAAUH,EAAW,MAAM,EAC1DO,EAAkB,IAAI,MAAclB,EAAW,UAAU,EACzDmB,EAAkC,CAAC,EACnCC,EAA2B,CAAC,EAC9BC,EAAc,EACZC,EAAoC,CAAC,CAAC,QAAuB,KAAMV,CAAS,CAAC,EACnF,QAASH,EAAI,EAAGA,EAAIT,EAAW,WAAYS,IAAK,CAC9CY,GAAerB,EAAW,WAAWS,CAAC,EACtCS,EAAgBT,CAAC,EAAIY,EACrB,IAAME,EAAcZ,EAAW,MAAM,EACrCY,EAAYvB,EAAW,IAAI,EAAIA,EAAW,WAAWS,CAAC,EACtDW,EAAa,KAAKG,CAAW,EAC7BhB,EAAQE,CAAC,EAAIe,EAAe,SAASf,CAAC,GAAIK,EAAUS,EAAY,MAAM,EACtEJ,EAAkB,KAAK,CAAC,KAAMC,EAAaX,CAAC,EAAG,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,CAC9E,CACAuB,EAAgB,KACZ,CAAC,QAAuB,KAAMJ,CAAe,EAAG,GAAGO,EAA2Bd,EAAY,GAAGS,CAAY,CAAC,EAC9G,IAAMM,EAAmBC,GAA+B;AAAA,IAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,qBAAsB,MAAOT,EAAgB,MAAM,EACnE,iBAAiBF,EAAO,GAAGT,CAAO,CAAC;AAAA,IAC1CnB,GAAyB8B,EAAgB,MAAM,CAAC;AAAA,IAChD7B,GAAoBkB,CAAO,CAAC;AAAA;AAAA,IAE5BoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DX,EAAM,gBAAgB,YAAY,CAAC;AAAA,kBACrCA,EAAM,WAAW,UAAWD,CAAI,CAAC;AAAA;AAAA;AAAA,iBAGlCT,EAAa,8BAA+B,qBAAsBY,EAAgB,MAAM,CAAC;AAAA,QAClGF,EAAM,WAAW,UAAWD,EAAM,OAAO,CAAC;AAAA;AAAA;AAAA,KAIhD,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAMf,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,gBAAA0B,EACA,WAAY,KAAO,CACjB,QAASP,EACT,cAAe,CAAC,EAAG,KAAK,KAAKP,EAAY,EAAuB,CAAC,EACjE,gBAAAU,CACF,EACF,CACF,EAEa/B,GAAQ,CAACqC,EAAyB5B,IAAsC,CACnFd,GAAe0C,EAAQ,MAAM,EAC7B,IAAMC,EACFD,EAAQ,OAAO,SAAW,EAAI5B,EAAab,GAAgCyC,EAAQ,OAAQ5B,CAAU,EACzG4B,EAAQ,QAAQtC,GAAuBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC1F,EAEarC,GAAwBQ,GAAyD,CAC5F,IAAMe,EAAOf,EAAW,KAClBC,EAAuBD,EAAW,WAClCE,EAAaF,EAAW,WAAuB,EAAIC,EAAW,OAASD,EAAW,WACxF,GAAIE,IAAeD,EAAW,OAC5B,MAAM,IAAI,MAAM,+CAA+C,EAEjE,OAAOG,GAA4B,CAAC,KAAAW,EAAM,WAAAb,EAAY,WAAAD,CAAU,CAAC,CACnE,ICtIA,IAUM6B,GA4DAC,GAoCOC,GA1GbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMP,GACF,CAACQ,EAA4BC,EAA+BC,EAA+BC,EAC1FC,IAAuB,CACtB,IAAMC,EAASC,EAAe,cAAeF,EAAYF,EAAW,OAAQ,CAAC,EACvE,EAAIK,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEO,EAAID,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEQ,EAAIF,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EAE1ES,EACEC,EAAa,CAACC,EAAWJ,EAAWC,IAAc,UAAUD,CAAC,KAAKI,CAAC,KAAKH,CAAC,IAC/E,GAAI,CAACN,EACHO,EAAaL,EAAO,YAChB,aACAM,EAAW,EAAE,YAAY,YAAY,EAAGH,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAChG,CACL,IAAMI,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,iBAAiBF,CAAC,gBAAgBA,CAAC,IACjDG,EAAc,iBAAiBH,CAAC,gBAAgBA,CAAC,IAEjDI,EAAc,sBAAsBJ,CAAC,6BAA6BA,CAAC,UACzE,MAAO;AAAA,gCACeA,CAAC,MAAMV,EAAO,gBAAgB,qBAAqBU,CAAC,GAAG,CAAC;AAAA,0BAC9DA,CAAC,MAAM,EAAE,2BAA2B,iBAAiBA,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMP,EAAE,2BAA2B,iBAAiBO,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMN,EAAE,2BAA2B,iBAAiBM,CAAC,GAAIV,CAAM,CAAC;AAAA,yBAClEU,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,6BACZA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,cAC/BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIL,EAAWM,EAAaC,EAAaC,CAAW,CAAC;AAAA,WAErF,EACIf,IAAe,EACjBM,EAAa;AAAA;AAAA,cAETG,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,wGAGtCH,EAAa;AAAA,cACTG,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,WAGtD,CAEA,MAAO;AAAA,UACHb,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBS,EAAG,EAAGD,EAAGH,CAAM,CAAC;AAAA,UACjFL,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEU,CAAU;AAAA,QAEhB,EAEEjB,GAA4BQ,GAA+C,CAC/E,IAAMmB,EAAQnB,EAAO,CAAC,EAAE,KAClBoB,EAAQpB,EAAO,CAAC,EAAE,KAClBqB,EAAQrB,EAAO,CAAC,EAAE,KAClBsB,EAAiBtB,EAAO,CAAC,EAAE,SAE3BE,EAAc,EAAEqB,EAAU,SAASJ,EAAOC,CAAK,GAAKG,EAAU,SAASH,EAAOC,CAAK,GACrFG,EAAcL,EACdM,EAAaF,EAAU,KAAKJ,CAAK,EAGrC,GAAIjB,EAAa,CACf,IAAMwB,EAAkBC,GAAc,UAAUA,GAAc,UAAUR,EAAOC,EAAO,EAAK,EAAIC,EAAO,EAAK,EAC3G,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,6CAA8C,EAEhEF,EAAcE,EACdD,EAAaF,EAAU,KAAKC,CAAW,CACzC,CAEA,IAAMI,EAAU,KAAK,KAAKH,EAAa,CAAC,EAExC,MAAO,CACL,KAAM,QACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,gBAAkB1B,GACdR,GAA2BQ,EAAcC,EAAQwB,EAAatB,EAAaoB,CAAc,EAC7F,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAME,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAgB,CAAC,EACrF,gBACI,CAAC,CAAC,QAAuB,KAAMG,CAAO,EAAG,GAAGC,EAA2BR,EAAOF,EAAOC,EAAOI,CAAW,CAAC,CAC9G,EACF,CACF,EAEa/B,GAASqC,GAAkC,CACtDA,EAAQ,QAAQtC,GAAyBsC,EAAQ,MAAM,CAAC,CAC1D,IC5GA,IA8CaC,GA9CbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAOavC,GAA+D,IAAI,IAAI,CAClF,CAAC,MAAO,CAAUwC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAwB,CAAC,EAC7C,CAAC,SAAU,CAACC,GAAQD,EAAwB,CAAC,EAC7C,CAAC,OAAQ,CAAUE,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACC,EAAS,CAAC,EAEzB,CAAC,cAAe,CAAMC,GAAkBC,EAA0B,CAAC,EACnE,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,UAAW,CAACC,EAAO,CAAC,EACrB,CAAC,gBAAiB,CAACC,EAAa,CAAC,EACjC,CAAC,OAAQ,CAAUC,GAAeC,EAAmB,CAAC,EACtD,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,gBAAiB,CAACC,GAAeC,EAA4B,CAAC,EAC/D,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,eAAgB,CAACC,GAAcC,EAA2B,CAAC,EAC5D,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,MAAO,CAAUC,GAAcC,EAAoB,CAAC,EACrD,CAAC,QAAS,CAAWC,EAAK,CAAC,EAC3B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACpB,GAAMC,EAAmB,CAAC,EACzC,CAAC,SAAU,CAACoB,GAAQC,EAAqB,CAAC,EAC1C,CAAC,iBAAkB,CAACC,GAAgBC,EAA6B,CAAC,EAClE,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,oBAAqB,CAAMC,GAAwBC,EAAgC,CAAC,EACrF,CAAC,gBAAiB,CAAMC,GAAoBC,EAA4B,CAAC,EACzE,CAAC,UAAW,CAAWC,EAAO,CAAC,EAC/B,CAAC,iBAAkB,CAAWC,EAAc,CAAC,EAC7C,CAAC,sBAAuB,CAACC,GAAqBC,EAAkC,CAAC,EACjF,CAAC,cAAe,CAAUC,GAAsBC,EAA0B,CAAC,EAC3E,CAAC,wBAAyB,CAACC,EAAY,CAAC,EACxC,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,YAAa,CAAUC,GAAoB1B,EAAoB,CAAC,EACjE,CAAC,OAAQ,CAAW2B,EAAI,CAAC,EACzB,CAAC,cAAe,CAAWC,EAAW,CAAC,EACvC,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,cAAe,CAACC,GAAaC,EAA0B,CAAC,EAEzD,CAAC,UAAW,CAAMC,GAAcC,EAAsB,CAAC,EACvD,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,qBAAsB,CAACC,GAAoBC,EAAiC,CAAC,EAC9E,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAACC,EAAG,CAAC,EACb,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,YAAa,CAAUC,GAAoB1C,EAAoB,CAAC,EACjE,CAAC,QAAS,CAAC2C,EAAK,CAAC,EACjB,CAAC,aAAc,CAAUC,EAAU,CAAC,EACpC,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,eAAgB,CAACC,EAAY,CAAC,EAC/B,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,UAAW,CAAUC,EAAO,CAAC,EAC9B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,yBAA0B,CAACC,EAAa,CAAC,EAC1C,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,UAAW,CAACC,GAASC,EAAsB,CAAC,EAC7C,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,kBAAmB,CAAUC,GAA0BzE,EAAoB,CAAC,EAC7E,CAAC,OAAQ,CAAC0E,EAAI,CAAC,EACf,CAAC,YAAa,CAACC,GAAWC,EAAwB,CAAC,EACnD,CAAC,QAAS,CAACC,EAAK,CAAC,CACnB,CAAC,IC5ID,IAoBaC,GApBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEAC,KAYaL,GAAN,KAAqB,CAI1B,YAAoBM,EAAwB,CAAxB,aAAAA,EAClB,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAmBC,EAAoBC,EAChEC,EAA0D,CAC5DC,GAAiBL,EAAc,YAAY,IAAI,EAC/C,IAAMM,EAAS,KAAK,QAAQ,OACtBC,EAAqB,KAAK,QAAQ,sBAAsB,EAC9D,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,CAAC,EAClE,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAASR,EAClBO,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQC,EAAM,MAAM,CAAC,CAAC,EAE1E,QAAWC,KAAUR,EACnBM,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQE,EAAO,MAAM,CAAC,CAAC,EAEvEN,GACFI,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAUJ,CAAoB,CAAC,EAExE,IAAMO,EAAYL,EAAO,gBACrB,CAAC,OAAQN,EAAc,gBAAgB,mBAAmB,CAAC,EAAG,QAAAQ,EAAS,MAAOR,EAAc,YAAY,IAAI,CAAC,EAEjH,GAAI,KAAK,QAAQ,gBAAkB,YAAa,CAC9C,IAAMY,EAAc,CAClB,SAAU,KAAK,QAAQ,gBACvB,gBAAiBZ,EAAc,gBAC/B,UAAAW,EACA,cAAAR,CACF,EAC2B,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC1E,KAAKS,CAAW,CACtC,CAEAL,EAAmB,YAAYP,EAAc,eAAe,EAC5DO,EAAmB,aAAa,EAAGI,CAAS,EAC5CJ,EAAmB,mBAAmB,GAAGJ,CAAa,EACtD,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,EAAI,CAAC,EACtE,KAAK,QAAQ,yBAET,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACnD,KAAK,QAAQ,YAAc,cAC7B,KAAK,QAAQ,eAAe,EAE1B,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACrD,KAAK,QAAQ,MAAM,EAErBU,GAAeb,EAAc,YAAY,IAAI,CAC/C,CACA,SAAgB,CAEhB,CACA,MAAMc,EAA0BC,EAAiE,CAC/FV,GAAiBS,EAAY,IAAI,EACjC,IAAMR,EAAS,KAAK,QAAQ,OACtBU,EAAuB,CAAC,EAC1BV,EAAO,SAAS,IAAI,YAAY,GAClCU,EAAW,KAAK,aAAa,EAE/B,IAAMC,EAAeC,GAAmBH,EAA6B,KAAK,QAAQ,OAAO,MAAM,EACzFI,EAAWL,EAAY,gBAAgBG,CAAY,EACnDG,EAAO,GAAGJ,EAAW,KAAK;AAAA,CAAI,CAAC;AAAA,EAAKC,EAAa,yBAAyB;AAAA,EAAKE,CAAQ,GACvFE,EAAef,EAAO,mBAAmB,CAAC,KAAAc,EAAM,MAAON,EAAY,IAAI,CAAC,EAC9EQ,GAAU,UAAW,IAAM,YAAYR,EAAY,IAAI,iBAAiBM,CAAI,EAAE,EAE9E,IAAMG,EAAkBjB,EAAO,sBAC3B,CAAC,QAAS,CAAC,OAAQe,EAAc,WAAY,MAAM,EAAG,OAAQ,OAAQ,MAAOP,EAAY,IAAI,CAAC,EAElG,OAAAD,GAAeC,EAAY,IAAI,EACxB,CAAC,YAAAA,EAAa,gBAAAS,EAAiB,qBAAsBN,EAAa,aAAa,CACxF,CAEA,2BAA2Bd,EACE,CAC3B,IAAMqB,EAAI,OAAOrB,GAAkB,SAAWA,EAAgBA,EAAc,EACtEsB,EAAI,OAAOtB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEuB,EAAI,OAAOvB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEwB,EAAoB,KAAK,QAAQ,OAAO,OAAO,iCACrD,GAAIH,GAAKG,GAAqBF,GAAKE,GAAqBD,GAAKC,EAC3D,MAAO,CAACH,EAAGC,EAAGC,CAAC,EAEjB,IAAME,EAAOJ,EAAIC,EAAIC,EACjBG,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EAC/C,GAAIC,EAAkBF,EAAmB,CAEvC,GADAE,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EACvCC,EAAkBF,EACpB,MAAM,IAAI,MAAM,6CAA6C,EAE/D,MAAO,CAACE,EAAiBA,EAAiBA,CAAe,CAC3D,KACE,OAAO,CAACA,EAAiBA,EAAiB,CAAC,CAE/C,CACF,IC3HA,IAmCMC,GA4CAC,GAiBAC,GAwBOC,GAxHbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KACAC,KACAC,KACAC,KACAC,KAwBMZ,GACF,CAACa,EAAqCC,IAA2E,CAC/G,GAAIA,EAAkB,SAAWD,EAAa,OAC5C,MAAM,IAAI,MAAM,4BAA4BC,EAAkB,MAAM,wCAChED,EAAa,MAAM,GAAG,EAG5B,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIH,EAAa,OAAQ,EAAEG,EAAG,CAC5C,IAAMC,EAAOJ,EAAaG,CAAC,EAAE,SAC7B,OAAQF,EAAkBE,CAAC,EAAG,CAC5B,IAAK,OAAQ,CACXD,EAAW,KAAK,EAAE,EAClB,KACF,CACA,IAAK,OAAQ,CACXA,EAAW,KAAK,GAAGE,CAAI,EAAE,EACzB,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAOL,EAAaG,CAAC,EAAE,KAAK,OAClCD,EAAW,KAAK,GAAGE,CAAI,IAAIC,CAAI,EAAE,EACjC,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAON,EAAaG,CAAC,EAAE,KAAK,KAAK,GAAG,EAC1CD,EAAW,KAAK,GAAGE,CAAI,IAAIE,CAAI,EAAE,EACjC,KACF,CACA,QACE,MAAM,IAAI,MAAM,iCAAiCL,EAAkBE,CAAC,CAAC,EAAE,CAC3E,CACF,CAEA,OAAOD,EAAW,KAAK,GAAG,CAC5B,EASEd,GACF,CAACmB,EAA0BP,EAAqCQ,IAA0C,CAGxG,IAAIC,EAAMF,EAAY,KACtB,OAAIA,EAAY,aAAa,OAC3BE,GAAO,IAAMF,EAAY,YAAY,KAAO,KAE9CE,GAAO,IAAMD,EACT,IACOrB,GACIa,EACAO,EAAY,aAAa,mBACrB,IAAI,MAAwCP,EAAa,MAAM,EAAE,KAAK,MAAM,CAAC,CAAC,GAC1FS,CACT,EAEEpB,GAAN,KAA6C,CAI3C,YAAYqB,EAA6B,CACnCA,IACF,KAAK,aAAeA,EAAY,aAChC,KAAK,OAASA,EAAY,OAE9B,CAEA,eAAeC,EAAwC,CACrD,OAAO,KAAK,eAAiBA,CAC/B,CAEA,SAASC,EAA4B,CACnC,OAAO,KAAK,SAAWA,CACzB,CACF,EAMatB,GAAN,KAAoB,CAApB,cAkBL,sBAAgC,KAOhC,qBAA+B,KAgC/B,KAAQ,eAAyC,KACjD,KAAQ,mBAAiD,KACzD,uBAAoB,GACpB,2BAAwB,EAGxB,KAAQ,eAAsC,CAAC,EAE/C,KAAQ,eAAsD,IAAI,IAOlE,mBAA8B,UAI9B,yBAAkD,IAAI,IAKtD,KAAQ,uBAA2D,IAAI,IAKvE,gCAA4E,IAAI,IA7ChF,IAAI,yBAAoD,CACtD,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,yEAAyE,EAG3F,IAAIuB,EAAO,KAAK,iBAAiB,IAAI,KAAK,eAAe,EACzD,OAAKA,IACHA,EAAO,CAAC,EACR,KAAK,iBAAiB,IAAI,KAAK,gBAAiBA,CAAI,GAG/CA,CACT,CAmCA,MAAM,WAAWC,EAAUC,EAAoC,CAC7D,KAAK,IAAMD,EACX,IAAME,EAAqC,CAAC,EACtCC,EAAwC,CAC5C,eAAgB,CACd,+BAAgCF,EAAQ,OAAO,+BAC/C,iCAAkCA,EAAQ,OAAO,iCACjD,4BAA6BA,EAAQ,OAAO,4BAC5C,cAAeA,EAAQ,OAAO,cAC9B,kCAAmCA,EAAQ,OAAO,kCAClD,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,wBAC3C,EACA,iBAAAC,CACF,EAEID,EAAQ,SAAS,IAAI,qDAAqD,EAC5EC,EAAiB,KAAK,qDAAuE,EACpFD,EAAQ,SAAS,IAAI,iBAAiB,GAC/CC,EAAiB,KAAK,iBAAiB,EAErCD,EAAQ,SAAS,IAAI,YAAY,GACnCC,EAAiB,KAAK,YAAY,EAGpC,KAAK,OAAS,MAAMD,EAAQ,cAAcE,CAAgB,EAC1D,KAAK,YAAc,IAAI5B,GAAgB0B,EAAQ,MAAQ,MAAMA,EAAQ,mBAAmB,CAAC,EACzF,KAAK,eAAiBG,GAAqB,IAAI,EAC/C,KAAK,eAAiB,IAAIC,GAAe,IAAI,EAC7C,KAAK,QAAU,IAAI,IACnB,KAAK,qBAAuB,IAAI,IAChC,KAAK,iBAAmB,IAAI,IAG5BC,GAAgBN,EAAI,SAAW,CAAC,CAACA,EAAI,KAAK,EAI1C,KAAK,OAAO,kBAAoBO,GAAM,CAChCA,EAAG,iBAAiB,oBAEtB,QAAQ,MAAM,mDAAmDA,EAAG,MAAM,OAAO,EAAE,CAEvF,EAEA,OAAO,eACH,KAAK,IAAI,OAAQ,SAAU,CAAC,MAAO,KAAK,OAAQ,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAC3G,OAAO,eACH,KAAK,IAAI,OAAQ,UAAW,CAAC,MAAON,EAAS,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAGxG,KAAK,aAAa,CACpB,CAEA,SAAgB,CACV,OAAO,KAAK,SAAa,KAC3B,KAAK,SAAS,QAAQ,EAExB,KAAK,eAAe,QAAQ,CAC9B,CAEA,mBAAuC,CACrC,OAAK,KAAK,iBACR,KAAK,eAAiB,KAAK,OAAO,qBAAqB,GAElD,KAAK,cACd,CAEA,uBAA+C,CAC7C,GAAI,CAAC,KAAK,mBAAoB,CAC5B,IAAMO,EAAiB,KAAK,kBAAkB,EACxCC,EAAkD,CAAC,EAErD,KAAK,YAAc,cACrBA,EAAsB,gBAAkB,CACtC,SAAU,KAAK,SACf,0BAA2B,KAAK,sBAAwB,EACxD,oBAAqB,KAAK,sBAAwB,EAAI,CACxD,GAGF,KAAK,mBAAqBD,EAAe,iBAAiBC,CAAqB,CACjF,CACA,OAAO,KAAK,kBACd,CAEA,gBAAuB,CACjB,KAAK,qBACP,KAAK,mBAAmB,IAAI,EAC5B,KAAK,mBAAqB,KAE9B,CAEA,OAAc,CACZ,GAAI,CAAC,KAAK,eACR,OAGFC,GAAiB,EAEjB,KAAK,eAAe,EACpB,IAAIC,EACA,KAAK,YAAc,SACrB,KAAK,eAAe,gBAChB,KAAK,SAAW,EAAG,KAAK,sBAAwB,EAAG,KAAK,mBAAqB,CAAC,EAElFA,EAAkB,KAAK,OAAO,aAE1B,CAAC,KAAM,KAAK,sBAAwB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAExG,KAAK,eAAe,IAAIA,EAAiB,KAAK,cAAc,EAC5D,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAe,mBAChB,KAAK,mBAAqB,EAAGA,EAAiB,EAAG,KAAK,sBAAwB,EAAI,CAAC,GAGzF,KAAK,OAAO,MAAM,OAAO,CAAC,KAAK,eAAe,OAAO,CAAC,CAAC,EACvD,KAAK,eAAe,sBAAsB,EAC1C,KAAK,eAAiB,KACtB,KAAK,sBAAwB,EAEzB,KAAK,YAAc,QAChBA,EAAiB,SAAS,WAAW,IAAI,EAAE,KAAK,IAAM,CACzD,IAAMC,EAAa,IAAI,eAAeD,EAAgB,eAAe,CAAC,EAChEE,EAAiB,KAAK,eAAe,IAAIF,CAAe,EAC9D,QAAStB,EAAI,EAAGA,EAAIuB,EAAW,OAAS,EAAGvB,IAAK,CAC9C,IAAMyB,EAAoBD,EAAexB,CAAC,EACpC0B,EAAWD,EAAkB,SAC7BE,EAAa,KAAK,QAAQ,IAAID,CAAQ,EACtCE,EAAaD,EAAW,WACxBE,EAAaF,EAAW,WACxBG,EAAcL,EAAkB,YAChCM,EAAmBN,EAAkB,iBACrCO,EAAoBP,EAAkB,kBACtCQ,EAAeV,EAAWvB,EAAI,CAAC,EAC/BkC,EAAaX,EAAWvB,EAAI,EAAI,CAAC,EAEnC,OAAO,KAAK,cAAkB,MAChC,KAAK,cAAgBiC,GAGvB,IAAME,EAAY,OAAOF,EAAe,KAAK,aAAa,EACpDG,EAAU,OAAOF,EAAa,KAAK,aAAa,EAEtD,GAAI,CAAC,OAAO,cAAcC,CAAS,GAAK,CAAC,OAAO,cAAcC,CAAO,EACnE,MAAM,IAAI,WAAW,2BAA2B,EAGlD,GAAI,KAAK,IAAI,OAAO,WAAW,OAC7B,KAAK,IAAI,OAAO,UAAU,OAAO,CAC/B,QAAS,EACT,eAAgBL,EAAiB,IAC7BM,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,gBAAiBL,EAAkB,IAC/BK,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,SAAAX,EACA,WAAAE,EACA,WAAAC,EACA,YAAAC,EACA,UAAAK,EACA,QAAAC,CACF,CAAC,MACI,CAEL,IAAIG,EAAc,GAClBR,EAAiB,QAAQ,CAACM,EAAOrC,IAAM,CACrCuC,GAAe,SAASvC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC7F,CAAC,EACD,IAAIG,EAAe,GACnBR,EAAkB,QAAQ,CAACK,EAAOrC,IAAM,CACtCwC,GAAgB,UAAUxC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC/F,CAAC,EAED,QAAQ,IAAI,uBAAuBX,CAAQ,IAAIE,CAAU,IAAIC,CAAU,IAAIC,CAAW,KAAKS,CAAW,GAClGC,CAAY,mBAAmBJ,EAAUD,CAAS,KAAK,CAC7D,CACAM,GAAM,MAAO,GAAGX,CAAW,KAAKG,CAAY,KAAKC,CAAU,EAAE,CAC/D,CACAZ,EAAgB,MAAM,EACtB,KAAK,eAAe,OAAOA,CAAe,CAC5C,CAAC,EAEHoB,GAAe,CACjB,CAaA,IAAIC,EAAsBZ,EAAyCa,EAC/DC,EACAC,EACAC,EAAmC,CACrC1B,GAAiBsB,EAAQ,IAAI,EAE7B,IAAMK,EAAwB,CAAC,EAC/B,QAAShD,EAAI,EAAGA,EAAI+B,EAAiB,OAAQ,EAAE/B,EAAG,CAChD,IAAMU,EAAOqB,EAAiB/B,CAAC,EAAE,KAEjC,GAAIU,IAAS,EACX,SAEF,IAAMuC,EAAU,KAAK,eAAe,IAAIvC,CAAI,EAC5C,GAAI,CAACuC,EACH,MAAM,IAAI,MAAM,0BAA0BvC,CAAI,EAAE,EAElDsC,EAAW,KAAKC,CAAO,CACzB,CAEA,GAAM,CAAC,QAAAC,EAAS,cAAAC,EAAe,gBAAAC,CAAe,EAAIT,EAAQ,WAAWZ,CAAgB,EAG/EsB,EAAyBT,EAAc,SAAW,EAAIM,EAAQ,IAAI,CAACI,EAAGtD,IAAMA,CAAC,EAAI4C,EACvF,GAAIS,EAAuB,SAAWH,EAAQ,OAC5C,MAAM,IAAI,MAAM,eAAeG,EAAuB,MAAM,qBAAqBH,EAAQ,MAAM,GAAG,EAIpG,IAAMlB,EAAkC,CAAC,EACnCuB,EAAyB,CAAC,EAChC,QAASvD,EAAI,EAAGA,EAAIkD,EAAQ,OAAQ,EAAElD,EAAG,CAIvC,GAAI,CAAC,OAAO,UAAUqD,EAAuBrD,CAAC,CAAC,GAAKqD,EAAuBrD,CAAC,EAAI,IAC5EqD,EAAuBrD,CAAC,GAAK+C,EAC/B,MAAM,IAAI,MAAM,yBAAyBM,EAAuBrD,CAAC,CAAC,EAAE,EAEtE,GAAIqD,EAAuBrD,CAAC,IAAM,GAChC,SAEF,IAAMwD,EAAcH,EAAuBrD,CAAC,IAAM,GAC5CyD,EAAeJ,EAAuBrD,CAAC,IAAM,GAC7C0D,EAAcF,GAAeC,EAC/BX,EAAyBI,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAC7D6C,EAAmBQ,EAAuBrD,CAAC,EAAGkD,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAGtF,GAFAgC,EAAkB,KAAK0B,CAAU,EAE7BA,EAAW,OAAS,EACtB,SAEF,IAAMT,EAAU,KAAK,eAAe,IAAIS,EAAW,IAAI,EACvD,GAAI,CAACT,EACH,MAAM,IAAI,MAAM,2BAA2BS,EAAW,IAAI,EAAE,EAK9D,GAHIF,GACF,KAAK,cAAc,KAAKP,CAAO,EAE7BQ,EAAc,CAChB,IAAIE,EAAiB,KAAK,qBAAqB,IAAI,KAAK,eAAgB,EACnEA,IACHA,EAAiB,CAAC,EAClB,KAAK,qBAAqB,IAAI,KAAK,gBAAkBA,CAAc,GAErEA,EAAe,KAAKV,CAAO,CAC7B,CACAM,EAAY,KAAKN,CAAO,CAC1B,CAIA,GAAID,EAAW,SAAWjB,EAAiB,QAAUwB,EAAY,SAAWvB,EAAkB,OAAQ,CAEpG,GAAIuB,EAAY,SAAW,EACzB,OAAAb,GAAeC,EAAQ,IAAI,EACpBX,EAMT,MAAM,IAAI,MACN,WAAWW,EAAQ,IAAI,4EAA4E,CACzG,CAKA,IAAIiB,EACJ,GAAIR,EAAiB,CACnB,IAAIS,EAAgB,EACdC,EAAoB,CAAC,EAE3BV,EAAgB,QAAQW,GAAK,CAC3B,IAAMrD,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIrD,EAAK,SAAW,EAClB,OAGF,IAAMsD,EAAgBD,EAAE,OAAS,GAAmB,EAAI,EACpDE,EACAC,GACAH,EAAE,OAAS,IACbG,GAAgBxD,EAAK,OAAS,EAAI,GAAMA,EAAK,OAAS,EAAI,EAAIA,EAAK,OAASsD,EAC5EC,EAAiBvD,EAAK,OAAS,EAAI,GAAKsD,EAAgBtD,EAAK,SAE7DwD,GAAgBxD,EAAK,QAAU,EAAIA,EAAK,OAASsD,EAAgB,GACjEC,EAAiB,IAEnBJ,EAAgB,KAAK,KAAKA,EAAgBK,EAAa,EAAIA,GAC3DJ,EAAQ,KAAKD,CAAa,EAM1B,IAAMM,GAAqBJ,EAAE,OAAS,GAAmB,EAAI,EAC7DF,GAAiBnD,EAAK,OAAS,EAAI,KAAK,KAAKA,EAAK,OAASyD,EAAkB,EAAIF,EAC9CvD,EAAK,OAASsD,CACnD,CAAC,EAID,IAAMI,EAAsB,GAC5BP,EAAgB,KAAK,KAAKA,EAAgBO,CAAmB,EAAIA,EACjE,IAAMC,EAAc,IAAI,YAAYR,CAAa,EACjDT,EAAgB,QAAQ,CAACW,EAAG/D,IAAM,CAChC,IAAMsE,EAASR,EAAQ9D,CAAC,EAClBU,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIA,EAAE,OAAS,EACb,IAAI,WAAWM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UAChDqD,EAAE,OAAS,GACpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,GAEpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,EACpB,IAAI,aAAaM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,MAE3D,OAAM,IAAI,MAAM,6BAA6B4B,GAA2ByB,EAAE,IAAI,CAAC,EAAE,CAErF,CAAC,EAED,IAAMQ,EAEF,KAAK,eAAe,OAAOV,EAAe,eAAe,SAAW,eAAe,OAAO,EAC9F,KAAK,OAAO,MAAM,YAAYU,EAAkB,OAAQ,EAAGF,EAAa,EAAGR,CAAa,EACxF,KAAK,eAAe,QAAQU,EAAkB,EAAE,EAChDX,EAAuB,CAAC,OAAQ,EAAG,KAAMC,EAAe,OAAQU,EAAkB,MAAM,CAC1F,CAEA,IAAMC,EAA0B,KAAK,eAAe,2BAA2BrB,CAAa,EACtF9C,EAAuBmE,EAAwB,CAAC,IAAM,GAAKA,EAAwB,CAAC,IAAM,EAE1FlE,EAAMrB,GAAwB0D,EAASZ,EAAkB1B,CAAoB,EAC/EoE,EAAW,KAAK,eAAe,YAAYnE,CAAG,EAQlD,GAPKmE,IACHA,EAAW,KAAK,eAAe,MAAM9B,EAAS6B,CAAuB,EACrE,KAAK,eAAe,YAAYlE,EAAKmE,CAAQ,EAC7CC,GAAU,OAAQ,IAAM,mBAAmBpE,CAAG,kBAAkBqC,EAAQ,IAAI,EAAE,GAI5ES,GAAmBqB,EAAS,qBAAsB,CACpD,GAAIrB,EAAgB,SAAWqB,EAAS,qBAAqB,OAC3D,MAAM,IAAI,MAAM,4CAA4CA,EAAS,qBAAqB,MAAM,SAC5FrB,EAAgB,MAAM,gBAAgBqB,EAAS,YAAY,IAAI,IAAI,EAEzE,QAASzE,EAAI,EAAGA,EAAIoD,EAAgB,OAAQpD,IAAK,CAC/C,IAAM2E,EAAUvB,EAAgBpD,CAAC,EAC3B4E,EAAaD,EAAQ,KACrBE,EAAe,OAAOF,EAAQ,MAAS,SAAW,EAAIA,EAAQ,KAAK,OACnE,CAAC1E,EAAM6E,CAAM,EAAIL,EAAS,qBAAqBzE,CAAC,EACtD,GAAI4E,IAAe3E,GAAQ4E,IAAiBC,EAC1C,MAAM,IAAI,MAAM,oBAAoB9E,CAAC,0BAA0BC,CAAI,cAAc6E,CAAM,cACnFF,CAAU,cAAcC,CAAY,gBAAgBJ,EAAS,YAAY,IAAI,IAAI,CAEzF,CACF,CAOA,GALAC,GACI,OACA,IAAM,yBAAyB/B,EAAQ,IAAI,UAAUrC,CAAG,UAAUkE,EAAwB,CAAC,CAAC,IACxFA,EAAwB,CAAC,CAAC,IAAIA,EAAwB,CAAC,CAAC,EAAE,EAE9D,KAAK,YAAc,QAAU,KAAK,gBAAkB,YAAa,CACnE,IAAM/C,EAAuC,CAC3C,SAAU,KAAK,gBACf,YAAagD,EAAS,YAAY,KAClC,iBAAA1C,EACA,kBAAAC,CACF,EACA,KAAK,eAAe,KAAKP,CAAiB,EAEtC,KAAK,gBAAkB,aACK,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC7D,KAAKA,CAAiB,CAEjD,CAEA,YAAK,eAAe,IAAIgD,EAAUzB,EAAYO,EAAaiB,EAAyBZ,CAAoB,EAExGlB,GAAeC,EAAQ,IAAI,EACpBX,CACT,CAEA,OAAO+C,EAAmBrE,EAAwB,CAChD,KAAK,eAAe,OAAOqE,EAAWrE,CAAI,CAC5C,CAEA,OAAOsE,EAAaC,EAAmB,CACrC,KAAK,eAAe,OAAOD,EAAKC,CAAG,CACrC,CAEA,MAAM,SAASF,EAAmBG,EAAkD,CAGlF,MAAM,KAAK,eAAe,SAASH,EAAWG,CAAe,CAC/D,CAEA,MAAMC,EAAsB,CAC1B,OAAO,KAAK,eAAe,OAAOA,CAAI,EAAE,EAC1C,CAEA,KAAKC,EAAqB,CACxB,OAAO,KAAK,eAAe,QAAQA,CAAG,CACxC,CAEA,aAAaxD,EAAoBF,EAAkB2D,EAAoBxD,EAA0B,CAC/F,IAAMyD,EAAKC,GAAwB,IAAI3D,CAAU,EACjD,GAAI,CAAC0D,EACH,MAAM,IAAI,MAAM,2BAA2B1D,CAAU,EAAE,EAGzD,IAAMD,EAAyB,CAC7B,WAAAC,EACA,WAAAC,EACA,YAAayD,EAAG,CAAC,EACjB,WAAY,CAACA,EAAG,CAAC,EAAGD,CAAS,CAC/B,EACA,KAAK,QAAQ,IAAI3D,EAAUC,CAAU,CACvC,CAEA,cAAcD,EAAwB,CACpC,IAAMiC,EAAiB,KAAK,qBAAqB,IAAIjC,CAAQ,EAC7D,GAAIiC,EAAgB,CAClB,QAAWjD,KAAQiD,EACjB,KAAK,eAAe,QAAQjD,EAAK,EAAE,EAErC,KAAK,qBAAqB,OAAOgB,CAAQ,CAC3C,CAEA,KAAK,iBAAiB,OAAOA,CAAQ,EACrC,KAAK,QAAQ,OAAOA,CAAQ,CAC9B,CAEA,cAAcA,EAAkB8D,EAAyBC,EAA6C,CACpG,IAAMC,EAAS,KAAK,QAAQ,IAAIhE,CAAQ,EACxC,GAAI,CAACgE,EACH,MAAM,IAAI,MAAM,uBAAuBhE,CAAQ,EAAE,EAEnD,IAAME,EAAa8D,EAAO,WACpB7D,EAAa6D,EAAO,WACpBC,EAAcD,EAAO,YACrBE,EAAaF,EAAO,WAC1B,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,YAAY9D,CAAU,KAAKC,CAAU,2CAA2C,EAElG,KAAK,gBAAkBH,EAGnBkE,EAAW,CAAC,IACdA,EAAW,CAAC,EAAIA,EAAW,CAAC,EAAEA,EAAW,CAAC,CAAC,EAC3CA,EAAW,CAAC,EAAI,QAGlBlB,GAAU,OAAQ,IAAM,kCAAkC9C,CAAU,KAAKC,CAAU,MAAM,EAEzF,IAAMgE,EAAgB,KAAK,IAAI,MAE/B,KAAK,cAAgB,CAAC,EACtB,GAAI,CACF,OAAIA,GACF,KAAK,OAAO,eAAe,YAAY,EAGzCF,EAAYH,EAASI,EAAW,CAAC,CAAC,EAC3B,CACT,OAASE,EAAG,CACV,OAAAL,EAAO,KAAK,QAAQ,QAAQ,qBAAqB7D,CAAU,KAAKC,CAAU,aAAaiE,CAAC,EAAE,CAAC,EACpF,CACT,QAAE,CACID,GACFJ,EAAO,KAAK,KAAK,OAAO,cAAc,EAAE,KACpCM,GAAOA,EAAM,qCAAqCnE,CAAU,KAAKC,CAAU,MAAMkE,EAAI,OAAO,GAAK,IAAI,CAAC,EAG5G,QAAWrF,KAAQ,KAAK,cACtB,KAAK,eAAe,QAAQA,EAAK,EAAE,EAErC,KAAK,cAAgB,CAAC,EACtB,KAAK,gBAAkB,IACzB,CACF,CAGA,eAAesF,EAAmBC,EAAeC,EAAmBf,EAAsB,CACxF,IAAIgB,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EACxEG,IACHA,EAA4B,IAAI,IAChC,KAAK,2BAA2B,IAAIH,EAAWG,CAAyB,GAG1E,IAAMC,EAAiBD,EAA0B,IAAIF,CAAK,EACpDI,EAAK,KAAK,eAAe,uBAAuBH,EAAQf,EAAMiB,IAAiB,CAAC,CAAC,EACvF,OAAAD,EAA0B,IAAIF,EAAO,CAACI,EAAIH,CAAM,CAAC,EAC1CG,CACT,CACA,kBAAkBL,EAAyB,CACzC,IAAMG,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EAC3EG,IACFA,EAA0B,QAAQG,GAAc,KAAK,eAAe,yBAAyBA,EAAW,CAAC,CAAC,CAAC,EAC3G,KAAK,2BAA2B,OAAON,CAAS,EAEpD,CACA,UAAUjB,EAA8B,CACtC,IAAM9B,EAAU,KAAK,eAAe,IAAI8B,CAAS,EACjD,GAAI,CAAC9B,EACH,MAAM,IAAI,MAAM,2BAA2B8B,CAAS,EAAE,EAExD,OAAO9B,EAAQ,MACjB,CACA,iBAAiBsD,EAAsBpB,EAAclF,EAClB,CACjC,MAAO,UAAY,CACjB,IAAMS,EAAO,MAAM8F,GAAgB,KAAMD,EAAWpB,CAAI,EACxD,OAAOsB,GAAW/F,EAAK,OAAQT,CAAI,CACrC,CACF,CAEA,eAAegG,EAAqB,CAC9B,KAAK,YAAc,iBAKtB,KAAK,mBAA2B,eAAe,KAAK,SAAUA,CAAK,CACtE,CACA,cAAqB,CACnB,KAAK,UAAY,QACb,KAAK,IAAI,OAAO,WAAW,OAAS,YACnC,OAAO,KAAK,IAAI,MAAU,IAAc,KAAK,IAAI,KAAK,MAAQ,KAAK,IAAI,UACtE,KAAK,OAAO,SAAS,IAAI,qDAAqD,EAChF,KAAK,UAAY,gBACR,KAAK,OAAO,SAAS,IAAI,iBAAiB,IACnD,KAAK,UAAY,aAGf,KAAK,YAAc,QAAU,OAAO,KAAK,SAAa,MACxD,KAAK,SAAW,KAAK,OAAO,eAAe,CACzC,KAAM,YACN,MAAO,KAAK,kBAAoB,CAClC,CAAC,EACD,KAAK,mBAAqB,KAAK,OAAO,aAElC,CAAC,KAAM,KAAK,kBAAoB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,aAAa,CAAC,GAG/G,CAEA,cAAqB,CACnBvB,GAAU,OAAQ,cAAc,EAC3B,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,GACtD,KAAK,oBAAoB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAEpD,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,GACzD,KAAK,uBAAuB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAG5D,KAAK,MAAM,EACX,KAAK,cAAgB,WACvB,CACA,YAAmB,CACjBA,GAAU,OAAQ,YAAY,EAE9B,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CACA,QAAe,CACbA,GAAU,OAAQ,QAAQ,EAC1B,KAAK,cAAgB,YACrB,IAAMgC,EAAqB,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,EACxEC,EAAwB,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC9E7B,EAAS4B,EAAoB,OACnC,KAAK,eAAiB,CAAC,EACvB,QAAS1G,EAAI,EAAGA,EAAI8E,EAAQ9E,IAAK,CAC/B,IAAM4G,EAAqB,KAAK,sBAAsB,EAChDC,EAAUH,EAAoB1G,CAAC,EACrC,KAAK,eAAe,KAAK,sBAAwB,CAAC,EAClD4G,EAAmB,YAAYC,EAAQ,eAAe,EACtDD,EAAmB,aAAa,EAAGC,EAAQ,SAAS,EACpDD,EAAmB,mBAAmB,GAAGC,EAAQ,aAAa,EAC9D,KAAK,eAAe,KAAK,sBAAwB,EAAI,CAAC,EACtD,KAAK,wBACD,KAAK,YAAc,QACrB,KAAK,eAAe,KAAKF,EAAuB3G,CAAC,CAAC,GAEhD,KAAK,uBAAyB,KAAK,mBAAqB,KAAK,YAAc,cAC7E,KAAK,eAAe,EAElB,KAAK,uBAAyB,KAAK,mBACrC,KAAK,MAAM,CAEf,CAEA,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CAEA,iBAAiBgG,EAAyB,CACxC,KAAK,kBAAkBA,CAAS,EAC5B,KAAK,oBAAoB,IAAIA,CAAS,GACxC,KAAK,oBAAoB,OAAOA,CAAS,EAEvC,KAAK,uBAAuB,IAAIA,CAAS,GAC3C,KAAK,uBAAuB,OAAOA,CAAS,EAE9C,KAAK,eAAe,iBAAiBA,CAAS,CAChD,CAEA,WAAWA,EAAyB,CAClC,KAAK,iBAAmBA,EACxB,KAAK,aAAa,CACpB,CACF,ICx0BA,IAAAc,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAgBMC,GAuCAC,GAoHOF,GA3KbG,GAAAC,EAAA,kBAMAC,IAEAC,KACAC,KAEAC,KAKMP,GAAN,MAAMQ,CAAqC,CACzC,YACYC,EAAuCC,EAAkCC,EACjEC,EAAyB,CADjC,YAAAH,EAAuC,cAAAC,EAAkC,UAAAC,EACjE,UAAAC,CAA0B,CAE9C,iBAAgC,CAC9B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMC,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,aACJ,IAAI,aAAa,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CAChG,CAEA,kBAAkC,CAChC,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,cACJ,IAAI,cAAc,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjG,CAEA,eAA4B,CAC1B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,WAAe,IAAI,WAAW,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjH,CAEA,QAAQE,EAAwC,CAC9C,GAAID,EAAU,KAAKC,CAAO,IAAMD,EAAU,KAAK,KAAK,IAAI,EACtD,MAAM,IAAI,MAAM,mBAAmB,EAErC,OAAO,IAAIN,EAAe,KAAK,OAAQ,KAAK,SAAU,KAAK,KAAMO,CAAO,CAC1E,CACF,EAEMd,GAAN,KAAmD,CAajD,YAAoBQ,EAA+BO,EAAwBC,EAA2B,CAAlF,YAAAR,EAA+B,aAAAO,EAFnD,KAAQ,iBAAmB,EAC3B,KAAQ,eAAiB,EAEvB,KAAK,YAAcA,EAAQ,YAC3B,IAAME,EAAUT,EAAO,QAGnBU,EAAaF,IAAsB,EACvC,KAAK,gBAAkBC,EAAQC,GAAW,EAC1C,IAAMC,EAAaF,EAAQC,GAAW,EACtC,KAAK,YAAcD,EAAQC,GAAW,EACtC,KAAK,iBAAmBD,EAAQC,GAAW,EAC3C,KAAK,eAAiBD,EAAQC,GAAW,EAEzC,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIF,EAAYE,IAAK,CACnC,IAAMZ,EAAWQ,EAAQC,GAAW,EAC9BR,EAAOO,EAAQC,GAAW,EAC1BI,EAAML,EAAQC,GAAW,EACzBP,EAAiB,CAAC,EACxB,QAASY,EAAI,EAAGA,EAAID,EAAKC,IACvBZ,EAAK,KAAKM,EAAQC,GAAW,CAAC,EAEhCE,EAAO,KAAK,IAAIrB,GAAeS,EAAQC,EAAUC,EAAMC,CAAI,CAAC,CAC9D,CACA,KAAK,OAASS,CAChB,CAhCA,IAAI,kBAA6C,CAC/C,OAAO,KAAK,QAAQ,uBACtB,CACA,IAAI,kBAA+B,CACjC,OAAO,KAAK,OAAO,OAAO,SAAS,KAAK,iBAAkB,KAAK,iBAAmB,KAAK,cAAc,CACvG,CA6BA,6BAAwD,CACtD,MAAO,CACL,KAAK,QAAQ,OAAO,OAAO,yBAA0B,KAAK,QAAQ,OAAO,OAAO,yBAChF,KAAK,QAAQ,OAAO,OAAO,wBAC7B,CACF,CAEA,mCAA4C,CAC1C,OAAO,KAAK,QAAQ,OAAO,OAAO,8BACpC,CAEA,QAAQI,EAAsBC,EAAyE,CAErG,IAAMC,EACFD,GAAsB,QAAQ,IAAIJ,GAAK,OAAOA,GAAM,SAAW,KAAK,OAAOA,CAAC,EAAIA,CAAC,GAAK,KAAK,OAEzFM,EAAgBF,GAAsB,SAAW,CAAC,EAClDG,EAAqB,CAACC,EAAepB,EAAkBE,IACzD,IAAIZ,GAAe,KAAK,OAAQU,EAAU,KAAK,OAAOoB,EAAOlB,CAAI,EAAGA,CAAI,EACtEmB,EAAwB,CAACrB,EAAkBE,IAAwC,CACvF,IAAMoB,EAAcC,GAAqBvB,CAAQ,EACjD,GAAI,CAACsB,EACH,MAAM,IAAI,MAAM,0BAA0BtB,CAAQ,EAAE,EAEtD,IAAMwB,EAAaF,EAAclB,EAAU,KAAKF,CAAI,EAC9CuB,EAAYD,EAAa,EAAI,KAAK,QAAQ,eAAe,OAAOA,CAAU,EAAE,GAAK,EACvF,OAAO,IAAIlC,GAAe,KAAK,OAAQU,EAAUyB,EAAWvB,CAAI,CAClE,EACA,OAAO,KAAK,QAAQ,IAChBa,EAASE,EAAcC,EAAeC,EAAoBE,EAAuB,KAAK,WAAW,CACvG,CAEA,OAAOD,EAAelB,EAAiC,CACrD,IAAMwB,EAAQ,KAAK,OAAO,UAAU,EACpC,GAAI,CACF,IAAMzB,EAAO,KAAK,OAAO,YAAY,EAAIC,EAAK,QAAU,CAAsB,EAC1EyB,EAAS1B,GAAQ,EACrB,KAAK,OAAO,QAAQ0B,GAAQ,EAAIzB,EAAK,OACrC,QAASU,EAAI,EAAGA,EAAIV,EAAK,OAAQU,IAC/B,KAAK,OAAO,QAAQe,GAAQ,EAAIzB,EAAKU,CAAC,EAExC,OAAO,KAAK,OAAO,YAAa,KAAK,gBAAiBQ,EAAOnB,CAAI,CACnE,OAAS2B,EAAG,CACV,MAAM,IAAI,MACN,sCAAsCR,CAAK,gBAAgBlB,CAAI,8GAErD0B,CAAC,EAAE,CACnB,QAAE,CACA,KAAK,OAAO,aAAaF,CAAK,CAChC,CACF,CACF,EA0BarC,GACT,MAAMwC,EAAwB9B,EAAuB+B,EAAUC,IAA2C,CAC5G,IAAMC,EAAWjC,EAAO,SACxB,GAAI,CAACiC,EACH,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIH,IAAS,SAAU,CACrB,IAAMvB,EAAU,IAAI2B,GACpB,MAAM3B,EAAQ,WAAWwB,EAAKC,CAAW,EAEzCC,EAAS,SAAU,CAEjB1B,EAGC4B,GAAiB5B,EAAQ,MAAM4B,CAAI,EAGnCC,GAAgB7B,EAAQ,KAAK6B,CAAG,EAGjC,CAACC,EAAaC,EAAaH,EAAcI,EAAc,KAAU,CAC/D,GAAIA,EACFC,GAAU,UAAW,IAAM,kCAAkCH,CAAG,SAASC,CAAG,UAAUH,CAAI,EAAE,EAC5F5B,EAAQ,OAAO8B,EAAKC,CAAG,MAClB,CACLE,GAAU,UAAW,IAAM,yCAAyCH,CAAG,eAAeC,CAAG,UAAUH,CAAI,EAAE,EACzG,IAAMjC,EAAOF,EAAO,OAAO,SAASqC,IAAQ,GAAIA,IAAQ,GAAKF,CAAI,EACjE5B,EAAQ,OAAO+B,EAAKpC,CAAI,CAC1B,CACF,EAGA,MAAMwB,EAAmBe,EAAoBN,IACxB,CACfK,GACI,UACA,IAAM,wCAAwCd,CAAS,gBAAgBe,CAAU,UAAUN,CAAI,EAAE,EAErG,MAAM5B,EAAQ,SACVmB,EAAW,IAAM1B,EAAO,OAAO,SAASyC,IAAe,GAAIA,IAAe,GAAKN,CAAI,CAAC,CAC1F,EAGJ,CAACO,EAAoBC,EAAkBC,IAAuBrC,EAAQ,aAClEmC,EAAYC,EAAUC,EAAW5C,EAAO,aAAaA,EAAO,iBAAkB2C,CAAQ,CAAC,CAAC,EAG3FE,GAAmBtC,EAAQ,cAAcsC,CAAM,EAGhD,CAACA,EAAgBrC,EAA2BsC,EAAuBC,IAAwC,CACzGP,GACI,UACA,IAAM,mCAAmCM,CAAa,YAAYD,CAAM,uBACpErC,CAAiB,EAAE,EAC3B,IAAMwC,EAAU,IAAIxD,GAAmBQ,EAAQO,EAASC,CAAiB,EACzE,OAAOD,EAAQ,cAAcsC,EAAQG,EAASD,CAAM,CACtD,EAEA,IAAMxC,EAAQ,aAAa,EAE3B,IAAMA,EAAQ,WAAW,EAEzB,IAAMA,EAAQ,OAAO,CACvB,CAAC,CACH,MACE0B,EAAS,OAAO,CAEpB,ICjPA,IAoEMgB,GAWOC,GAWAC,GAoFPC,GAOAC,GAqBOC,GAkBAC,GAmKAC,GAuBAC,GA+EAC,GA6OAC,GAgBAC,GAluBbC,GAAAC,EAAA,kBAWAC,KACAC,KACAC,IACAC,KACAC,KACAC,KAoDMnB,GAAU,CAACoB,EAAoBC,IAA+B,CAChDC,GAAY,EAAE,SAASF,EAAYC,CAAY,IAC/C,GAChBE,GAAe,+BAAgC,CAEnD,EAMatB,GAAc,MAAMuB,GAA4B,CAE3DxB,GAAQwB,EAAI,KAAK,WAAaC,GAAqBD,EAAI,QAAQ,CAAC,CAClE,EAQatB,GAAS,MAAMsB,EAAUE,IAAkC,CACxC,CAE5B,IAAMC,EAAW,cAAuB,KAExC,GAAID,IAAW,SAAU,CAEvB,GAAI,OAAO,UAAc,KAAe,CAAC,UAAU,IACjD,MAAM,IAAI,MAAM,gDAAgD,EAGlE,IAAIE,EAAUJ,EAAI,OAAO,QACzB,GAAKI,GAmBH,GAAI,OAAOA,EAAQ,QAAW,UAAY,OAAOA,EAAQ,UAAa,UAClE,OAAOA,EAAQ,eAAkB,WACnC,MAAM,IAAI,MAAM,kFAAkF,MArBxF,CAEZ,IAAMC,EAAkBL,EAAI,OAAO,gBACnC,GAAIK,IAAoB,QAAaA,IAAoB,aACrDA,IAAoB,mBACtB,MAAM,IAAI,MAAM,qCAAqCA,CAAe,GAAG,EAEzE,IAAMC,EAAuBN,EAAI,OAAO,qBACxC,GAAIM,IAAyB,QAAa,OAAOA,GAAyB,UACxE,MAAM,IAAI,MAAM,0CAA0CA,CAAoB,GAAG,EAGnF,GADAF,EAAU,MAAM,UAAU,IAAI,eAAe,CAAC,gBAAAC,EAAiB,qBAAAC,CAAoB,CAAC,EAChF,CAACF,EACH,MAAM,IAAI,MACN,0GAC+E,CAEvF,CAQA,MAAMD,EAAS,SAAUL,GAAY,EAAGE,EAAKI,CAAO,CACtD,CACA,GAAIF,IAAW,QAAS,CAEtB,GAAI,OAAO,UAAc,KAAe,CAAE,UAAuC,GAC/E,MAAM,IAAI,MAAM,+CAA+C,EAGjE,MAAMC,EAAS,QAASL,GAAY,EAAGE,CAAG,CAC5C,CACF,CACF,EAoCMrB,GAAiB,IAAI,IAOrBC,GAA8B2B,GAA4C,CAC9E,IAAMC,EAAOV,GAAY,EACnBW,EAAQD,EAAK,UAAU,EAC7B,GAAI,CACF,IAAME,EAAaF,EAAK,WAAW,CAAC,EAEpC,OADkBA,EAAK,wBAAwBD,EAAeG,EAAYA,EAAa,CAAC,IACtE,GAChBX,GAAe,uCAAwC,EAElD,CAACS,EAAK,OAAOE,EAAa,CAAC,EAAGF,EAAK,OAAOE,EAAa,EAAI,CAAC,CAAC,CACtE,QAAE,CACAF,EAAK,aAAaC,CAAK,CACzB,CACF,EAQa5B,GAA0B8B,GAAwC,CAC7E,IAAMH,EAAOV,GAAY,EACnBc,EAAkBJ,EAAK,QAAQG,EAAM,UAAU,EACrD,GAAIC,IAAoB,EACtB,MAAM,IAAI,MAAM,+DAA+DD,EAAM,UAAU,GAAG,EAEpG,OAAAH,EAAK,OAAO,IAAIG,EAAOC,CAAe,EAC/B,CAACA,EAAiBD,EAAM,UAAU,CAC3C,EAUa7B,GAAgB,MACzB+B,EACAC,IAAoF,CACtF,IAAIF,EAAyBG,EACvBP,EAAOV,GAAY,EAErB,MAAM,QAAQe,CAAS,EAEzB,CAACD,EAAiBG,CAAe,EAAIF,EAC5BA,EAAU,SAAWL,EAAK,OAAO,OAE1C,CAACI,EAAiBG,CAAe,EAAI,CAACF,EAAU,WAAYA,EAAU,UAAU,EAGhF,CAACD,EAAiBG,CAAe,EAAIlC,GAAuBgC,CAAS,EAGvE,IAAIN,EAAgB,EAChBS,EAAuB,EACvBC,EAAkB,EAClBC,EAAmB,CAAC,EAClBC,EAAwB,CAAC,EACzBC,EAAyB,CAAC,EAEhC,GAAI,CAGF,GAFA,CAACJ,EAAsBE,CAAM,EAAIG,GAAkBP,CAAO,EAEtDA,GAAS,cAAgBN,EAAK,kBAAmB,CACnD,IAAMc,EAAkB,CAAC,EACzB,QAAWC,KAAQT,EAAQ,aAAc,CACvC,IAAMU,EAAO,OAAOD,GAAS,SAAWA,EAAOA,EAAK,KACpDD,EAAgB,KAAKG,GAAS,OAAOF,GAAS,SAAWA,EAAOA,EAAK,IAAI,EAAE,KAAKG,GAAQ,CACtFlB,EAAK,kBAAmBgB,EAAME,CAAI,CACpC,CAAC,CAAC,CACJ,CAGA,MAAM,QAAQ,IAAIJ,CAAe,CACnC,CAEA,QAAWK,KAAYb,GAAS,oBAAsB,CAAC,EAErD,IADqB,OAAOa,GAAa,SAAWA,EAAWA,EAAS,QACnD,QAAS,CAC5B,GAAInB,EAAK,eACP,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAI,OAAOmB,GAAa,SAAU,CAChC,IAAMC,EAAeD,EACfE,EAAWD,GAA6D,QACxEE,EAAaF,GAAsD,UACnEG,EAAcH,GAAuD,WACrEhC,EAAcgC,GAAuD,WACrEvB,EAAmBuB,GAAuD,gBAC5EC,EACFrB,EAAK,eAAiBqB,EACbC,EACTtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAcsB,CAAS,EAEhEtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,CAAC,WAAAuB,EAAY,WAAAnC,EAAY,gBAAAS,CAAe,CAAC,CAEpG,MACEG,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,EAEzD,KACF,CAGFD,EAAgB,MAAMC,EAAK,kBAAkBI,EAAiBG,EAAiBC,CAAoB,EAC/FT,IAAkB,GACpBR,GAAe,yBAA0B,EAIvCS,EAAK,iBACPA,EAAK,eAAiB,QAGxB,GAAM,CAACwB,EAAYC,CAAW,EAAIrD,GAA2B2B,CAAa,EAEpE2B,EAAqB,CAAC,CAACpB,GAAS,mBAEhCqB,EAAa,CAAC,EACdC,EAAc,CAAC,EACfC,EAAwE,CAAC,EAC/E,QAASC,EAAI,EAAGA,EAAIN,EAAYM,IAAK,CACnC,IAAMC,EAAO/B,EAAK,iBAAiBD,EAAe+B,CAAC,EAC/CC,IAAS,GACXxC,GAAe,0BAA2B,EAE5CoB,EAAsB,KAAKoB,CAAI,EAC/BJ,EAAW,KAAK3B,EAAK,aAAa+B,CAAI,CAAC,CACzC,CACA,QAASD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMC,EAAO/B,EAAK,kBAAkBD,EAAe+B,CAAC,EAChDC,IAAS,GACXxC,GAAe,2BAA4B,EAE7CqB,EAAuB,KAAKmB,CAAI,EAChC,IAAMC,EAAahC,EAAK,aAAa+B,CAAI,EACzCH,EAAY,KAAKI,CAAU,EAEG,CAC5B,GAAIN,GAAsBpB,GAAS,0BAA4B,OAAW,CACxEuB,EAAyB,KAAK,YAAY,EAC1C,QACF,CACA,IAAMI,EAAW,OAAO3B,GAAS,yBAA4B,SACzDA,EAAQ,wBACRA,GAAS,0BAA0B0B,CAAU,GAAK,MACtD,GAAIC,IAAa,OAASA,IAAa,cAAgBA,IAAa,aAClE,MAAM,IAAI,MAAM,4CAA4CA,CAAQ,GAAG,EAEzE,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MAAM,4CACZA,CAAQ,4EAA4E,EAE1FJ,EAAyB,KAAKI,CAAQ,CACxC,CACF,CAGA,IAAIC,EAAoC,KACxC,OAAgCL,EAAyB,KAAKM,GAAKA,IAAM,YAAY,IACnF1B,EAAkBT,EAAK,kBAAkBD,CAAa,EAClDU,IAAoB,GACtBlB,GAAe,0BAA2B,EAG5C2C,EAAe,CACb,OAAQzB,EACR,yBAAAoB,EACA,gCAAiCA,EAAyB,IAAIM,GAAKC,GAAyBD,CAAC,CAAC,CAChG,GAGFhE,GAAe,IACX4B,EACA,CAACA,EAAeY,EAAuBC,EAAwBsB,EAAcR,EAAoB,EAAK,CAAC,EACpG,CAAC3B,EAAe4B,EAAYC,CAAW,CAChD,OAASS,EAAG,CACV,MAAA1B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EAEpD7B,IAAoB,GACtBT,EAAK,mBAAmBS,CAAe,EAGrCV,IAAkB,GACpBC,EAAK,mBAAmBD,CAAa,EAEjCsC,CACR,QAAE,CACArC,EAAK,MAAMI,CAAe,EACtBI,IAAyB,GAC3BR,EAAK,0BAA0BQ,CAAoB,EAErDE,EAAO,QAAQ6B,GAASvC,EAAK,MAAMuC,CAAK,CAAC,EAGzCvC,EAAK,sBAAsB,CAC7B,CACF,EAEazB,GAAkBiE,GAA4B,CACzD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,+CAA+CD,CAAS,EAAE,EAE5E,GAAM,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,CAAkB,EAAIe,EAEvGC,IACEhB,GACF1B,EAAK,sBAAsB0C,EAAe,MAAM,EAElD1C,EAAK,mBAAmB0C,EAAe,MAAM,GAG/C1C,EAAK,uBAAuBwC,CAAS,EAErC7B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACxDtC,EAAK,mBAAmBD,CAAa,EACrC5B,GAAe,OAAOqE,CAAS,CACjC,EAEahE,GACT,CAACmE,EAA6BC,EAAyBlC,EAAkB8B,EAAmBK,EAC3FnB,EAAqB,KAAgB,CACpC,GAAI,CAACiB,EAAQ,CACXC,EAAc,KAAK,CAAC,EACpB,MACF,CAEA,IAAM5C,EAAOV,GAAY,EAEnBwD,EAAWH,EAAO,CAAC,EACnBI,EAAOJ,EAAO,CAAC,EACfV,EAAWU,EAAO,CAAC,EAErBK,EACAC,EAEJ,GAAIH,IAAa,UAAYb,IAAa,aACxC,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MACN,2DAA2DY,CAAK,mCAAmC,EAGzG,GAAIZ,IAAa,aAAc,CAC7B,IAAMiB,EAAYP,EAAO,CAAC,EAAE,UACtBQ,EAAqBC,GAAqBC,GAA2BP,CAAQ,CAAC,EACpFG,EAAiBF,EAAK,OAAO,CAACO,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAIJ,EAEnD,IAAMK,EAAiBxD,EAAK,mBAC5B,GAAI,CAACwD,EACH,MAAM,IAAI,MAAM,qEAAqE,EAEvFR,EAAUQ,EAAehB,EAAWK,EAAOK,EAAWD,CAAc,CACtE,KAAO,CACL,IAAM/B,EAAOyB,EAAO,CAAC,EAErB,GAAI,MAAM,QAAQzB,CAAI,EAAG,CAEvB+B,EAAiB,EAAI/B,EAAK,OAC1B8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnB,IAAIS,EAAYT,EAAU,EAC1B,QAASlB,EAAI,EAAGA,EAAIZ,EAAK,OAAQY,IAAK,CACpC,GAAI,OAAOZ,EAAKY,CAAC,GAAM,SACrB,MAAM,IAAI,UAAU,wBAAwBA,CAAC,kBAAkB,EAEjE9B,EAAK,QAAQyD,GAAW,EAAIC,GAAgBxC,EAAKY,CAAC,EAAGpB,CAAM,CAC7D,CACF,MACEuC,EAAiB/B,EAAK,WACtB8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnBhD,EAAK,OAAO,IAAI,IAAI,WAAWkB,EAAK,OAAQA,EAAK,WAAY+B,CAAc,EAAGD,CAAO,CAEzF,CAEA,IAAM/C,EAAQD,EAAK,UAAU,EACvB2D,EAAa3D,EAAK,WAAW,EAAI+C,EAAK,MAAM,EAClD,GAAI,CACF,IAAIa,EAAWD,EAAa,EAC5BZ,EAAK,QAAQc,GAAK7D,EAAK,OAAO4D,GAAU,EAAIC,CAAC,EAC7C,IAAMlB,EAAS3C,EAAK,iBAChBqD,GAA2BP,CAAQ,EAAGE,EAASC,EAAgBU,EAAYZ,EAAK,OAChFX,GAAyBH,CAAQ,CAAC,EAClCU,IAAW,GACbpD,GAAe,iDAAiDiD,CAAS,WAAWK,CAAK,GAAG,EAE9FD,EAAc,KAAKD,CAAM,CAC3B,QAAE,CACA3C,EAAK,aAAaC,CAAK,CACzB,CACF,EAKSxB,GAAM,MACf+D,EAAmBsB,EAAwBC,EAAgCC,EAC3EC,EAA2C3D,IAAoE,CACjH,IAAMN,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,6CAA6CD,CAAS,EAAE,EAE1E,IAAMzC,EAAgB0C,EAAQ,CAAC,EACzB9B,EAAwB8B,EAAQ,CAAC,EACjC7B,EAAyB6B,EAAQ,CAAC,EAClCC,EAAiBD,EAAQ,CAAC,EAC1Bf,EAAqBe,EAAQ,CAAC,EAC9ByB,EAAmBzB,EAAQ,CAAC,EAE5BjB,EAAasC,EAAa,OAC1BrC,EAAcuC,EAAc,OAE9BG,EAAmB,EACnBC,EAA6B,CAAC,EAE5BC,EAA+B,CAAC,EAChCC,EAAgC,CAAC,EACjCC,EAA8B,CAAC,EAE/BC,EAAiBxE,EAAK,UAAU,EAChCyE,EAAoBzE,EAAK,WAAWwB,EAAa,CAAC,EAClDkD,EAAmB1E,EAAK,WAAWwB,EAAa,CAAC,EACjDmD,EAAqB3E,EAAK,WAAWyB,EAAc,CAAC,EACpDmD,EAAoB5E,EAAK,WAAWyB,EAAc,CAAC,EAEzD,GAAI,CACF,CAAC0C,EAAkBC,CAAgB,EAAIS,GAAcvE,CAAO,EAG5D,QAASwB,EAAI,EAAGA,EAAIN,EAAYM,IAC9BtD,GACIuF,EAAajC,CAAC,EAAGuC,EAAoBE,EAAmB/B,EAAWsB,EAAahC,CAAC,EAAGJ,CAAkB,EAI5G,QAASI,EAAI,EAAGA,EAAIL,EAAaK,IAC/BtD,GACIyF,EAAcnC,CAAC,EAAGwC,EAAqBC,EAAmB/B,EAAWhB,EAAawC,EAAclC,CAAC,EACjGJ,CAAkB,EAGxB,IAAIoD,EAAmBL,EAAoB,EACvCM,EAAkBL,EAAmB,EACrCM,GAAoBL,EAAqB,EACzCM,GAAmBL,EAAoB,EAC3C,QAAS9C,EAAI,EAAGA,EAAIN,EAAYM,IAC9B9B,EAAK,QAAQ8E,GAAkB,EAAIT,EAAmBvC,CAAC,EACvD9B,EAAK,QAAQ+E,GAAiB,EAAIpE,EAAsBmD,EAAahC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIL,EAAaK,IAC/B9B,EAAK,QAAQgF,IAAmB,EAAIV,EAAoBxC,CAAC,EACzD9B,EAAK,QAAQiF,IAAkB,EAAIrE,EAAuBoD,EAAclC,CAAC,CAAC,EAG5E,GAAgCY,GAAkB,CAACwB,EAAkB,CACnE,GAAM,CAAC,OAAAgB,EAAQ,yBAAArD,GAA0B,gCAAAsD,EAA+B,EAAIzC,EAE5E,GAAI/B,EAAsB,SAAWa,EACnC,MAAM,IAAI,MAAM,2BACZA,CAAU,4DAA4Db,EAAsB,MAAM,IAAI,EAI5G,QAASmB,GAAI,EAAGA,GAAIN,EAAYM,KAAK,CACnC,IAAMe,EAAQiB,EAAahC,EAAC,EACV,MAAM9B,EAAK,cAAckF,EAAQvE,EAAsBkC,CAAK,EAAGwB,EAAmBvC,EAAC,CAAC,IACpF,GAChBvC,GAAe,oBAAoBuC,EAAC,iBAAiBU,CAAS,GAAG,CAErE,CAGA,QAASV,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMe,EAAQmB,EAAclC,EAAC,EACZmC,EAAcnC,EAAC,IAAI,CAAC,EAIjB9B,EAAK,eAAekF,EAAQtE,EAAuBiC,CAAK,EAAGyB,EAAoBxC,EAAC,EAAG,CAAC,IACpF,GAChBvC,GAAe,mCAAmCuC,EAAC,iBAAiBU,CAAS,GAAG,EAK9ExC,EAAK,eAAekF,EAAQtE,EAAuBiC,CAAK,EAAG,EAAGsC,GAAgCtC,CAAK,CAAC,IACtF,GAChBtD,GAAe,qBAAqBuC,EAAC,QAAQD,GAAyBC,EAAC,CAAC,gBAAgBU,CAAS,GAAG,CAG1G,CACArE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAI,CAAC,CAC9G,CAEA1B,EAAK,iBAAiBD,CAAa,EACnC,IAAIqF,EAC4B1C,EAC9B0C,EAAY,MAAMpF,EAAK,mBACnBD,EAAe2C,EAAe,OAAQjB,EAAakD,EAAoBR,CAAgB,EAE3FiB,EAAY,MAAMpF,EAAK,QACnBD,EAAe2E,EAAkBD,EAAmBjD,EAAYoD,EAAmBnD,EACnFkD,EAAoBR,CAAgB,EAGtCiB,IAAc,GAChB7F,GAAe,0BAA0B,EAG3C,IAAM8F,GAA2B,CAAC,EAElC,QAASvD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMa,GAAS3C,EAAK,QAAQ2E,EAAqB,EAAI7C,CAAC,EACtD,GAAIa,KAAW2B,EAAoBxC,CAAC,EAAG,CAErCuD,GAAO,KAAKpB,EAAcnC,CAAC,CAAE,EAC7B,QACF,CAEA,IAAMwD,GAA2BtF,EAAK,UAAU,EAE1CuF,GAAmBvF,EAAK,WAAW,EAAI,CAAC,EAE1CwF,EAAmB,GACnBC,GAA6BvF,EAAa,EAC9C,GAAI,CACgBF,EAAK,kBACnB2C,GAAQ4C,GAAkBA,GAAmB,EAAGA,GAAmB,EAAGA,GAAmB,EAAE,IAC7E,GAChBhG,GAAe,4CAA4CuC,CAAC,GAAG,EAEjE,IAAI4D,EAAkBH,GAAmB,EACnCzC,EAAW9C,EAAK,QAAQ0F,GAAiB,EAC/CxF,EAAaF,EAAK,QAAQ0F,GAAiB,EAC3C,IAAM/B,GAAa3D,EAAK,QAAQ0F,GAAiB,EAC3CC,GAAa3F,EAAK,QAAQ0F,GAAiB,EAC3C3C,GAAO,CAAC,EACd,QAASjB,GAAI,EAAGA,GAAI6D,GAAY7D,KAC9BiB,GAAK,KAAK/C,EAAK,QAAQ2D,GAAa,EAAI7B,EAAC,CAAC,EAE5C9B,EAAK,SAAS2D,EAAU,EAExB,IAAMiC,GAAO7C,GAAK,OAAO,CAACO,GAAGC,KAAMD,GAAIC,GAAG,CAAC,EAC3CkC,GAAOI,GAA2B/C,CAAQ,EAE1C,IAAMgD,GAAoBpD,GAAgB,yBAAyBsB,EAAclC,CAAC,CAAC,EAEnF,GAAI2D,KAAS,SAAU,CACrB,GAAIK,KAAsB,aACxB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMC,GAAuB,CAAC,EAC1BtC,GAAYvD,EAAa,EAC7B,QAAS4B,GAAI,EAAGA,GAAI8D,GAAM9D,KAAK,CAC7B,IAAMkE,GAAShG,EAAK,QAAQyD,IAAW,EACjCwC,GAAiBnE,KAAM8D,GAAO,EAAI,OAAY5F,EAAK,QAAQyD,EAAS,EAAIuC,GAC9ED,GAAW,KAAK/F,EAAK,aAAagG,GAAQC,EAAc,CAAC,CAC3D,CACAZ,GAAO,KAAK,CAACI,GAAM1C,GAAMgD,GAAY,KAAK,CAAC,CAC7C,SAGMD,KAAsB,cAAgBF,GAAO,EAAG,CAClD,IAAMM,GAAYlG,EAAK,cACvB,GAAI,CAACkG,GACH,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAMhD,GAAYgD,GAAUhG,CAAU,EAChCiG,GAAc/C,GAAqBN,CAAQ,EACjD,GAAIqD,KAAgB,QAAa,CAACC,GAAyBX,EAAI,EAC7D,MAAM,IAAI,MAAM,0BAA0BA,EAAI,EAAE,EAIlDD,EAAmB,GAEnBH,GAAO,KAAK,CACVI,GAAM1C,GAAM,CACV,UAAAG,GACA,SAAUlD,EAAK,qBAAsBkD,GAAW0C,GAAOO,GAAaV,EAAI,EACxE,QAAS,IAAM,CACbzF,EAAK,kBAAkB2C,EAAM,CAC/B,CACF,EACA,YACF,CAAC,CACH,KAAO,CACL,IAAM0D,GAAwBC,GAAkCb,EAAI,EAC9DvE,GAAO,IAAImF,GAAsBT,EAAI,EAC3C,IAAI,WAAW1E,GAAK,OAAQA,GAAK,WAAYA,GAAK,UAAU,EACvD,IAAIlB,EAAK,OAAO,SAASE,EAAYA,EAAagB,GAAK,UAAU,CAAC,EACvEmE,GAAO,KAAK,CAACI,GAAM1C,GAAM7B,GAAM,KAAK,CAAC,CACvC,CAEJ,QAAE,CACAlB,EAAK,aAAasF,EAAwB,EACtCG,KAAS,UAAYvF,GACvBF,EAAK,MAAME,CAAU,EAElBsF,GACHxF,EAAK,kBAAkB2C,EAAM,CAEjC,CACF,CAEA,OAAID,GAAkB,CAAChB,IACrB1B,EAAK,sBAAsB0C,EAAe,MAAM,EAChDvE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAK,CAAC,GAExG2D,EACT,QAAE,CACArF,EAAK,aAAawE,CAAc,EAEhCH,EAAmB,QAAQkC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EACzDjC,EAAoB,QAAQiC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EAC1DhC,EAAkB,QAAQiC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,EAExCrC,IAAqB,GACvBnE,EAAK,sBAAsBmE,CAAgB,EAE7CC,EAAiB,QAAQoC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,CAC7C,CACF,EAKa9H,GAAgB8D,GAA4B,CACvD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,oBAAoB,EAEtC,IAAM1C,EAAgB0C,EAAQ,CAAC,EAGzBgE,EAAkBzG,EAAK,iBAAiBD,CAAa,EACvD0G,IAAoB,GACtBlH,GAAe,iCAAkC,EAEnDS,EAAK,SAASyG,CAAe,CAC/B,EAEa9H,GAA8B+H,GAAsE,CAC/G,IAAMC,EAA6B,CAAC,EACpC,QAAWhE,KAAU+D,EAAS,CAC5B,IAAMxF,EAAOyB,EAAO,CAAC,EACjB,CAAC,MAAM,QAAQzB,CAAI,GAAK,WAAYA,GACtCyF,EAAQ,KAAKzF,EAAK,MAAM,CAE5B,CACA,OAAOyF,CACT,IC3uBA,IAUMC,GACFC,GACAC,GACAC,GACAC,GACAC,GAGAC,GACEC,GAEAC,GASAC,GAMAC,GAmCOC,GA8CAC,GAaAC,GAaAC,GAuBAC,GAaAC,GAyBAC,GA5MbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEMvB,GAAU,IAAe,CAAC,CAACwB,GAAI,KAAK,OAAS,OAAO,SAAa,IAEnEtB,GAAe,GACfC,GAAc,GACdC,GAAU,GAKRG,GAAiF,IAAI,IAErFC,GAAmB,CAACiB,EAA8BC,IAA+C,CACrG,IAAMC,EAAQpB,GAAgB,IAAIkB,CAAI,EAClCE,EACFA,EAAM,KAAKD,CAAS,EAEpBnB,GAAgB,IAAIkB,EAAM,CAACC,CAAS,CAAC,CAEzC,EAEMjB,GAAe,IAAY,CAC/B,GAAIP,IAAgB,CAACC,IAAeC,IAAW,CAACH,GAC9C,MAAM,IAAI,MAAM,kBAAkB,CAEtC,EAEMS,GAAwBkB,GAA2C,CACvE,OAAQA,EAAG,KAAK,KAAM,CACpB,IAAK,YACH1B,GAAe,GACX0B,EAAG,KAAK,KACVxB,GAAU,GACVE,GAAkB,CAAC,EAAEsB,EAAG,KAAK,GAAG,IAEhCzB,GAAc,GACdG,GAAkB,CAAC,EAAE,GAEnBD,KACF,IAAI,gBAAgBA,EAAkB,EACtCA,GAAqB,QAEvB,MACF,IAAK,UACL,IAAK,YACL,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,gBAAiB,CACpB,IAAMqB,EAAYnB,GAAgB,IAAIqB,EAAG,KAAK,IAAI,EAC9CA,EAAG,KAAK,IACVF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAG,EAEjCF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAI,EAEpC,KACF,CACA,QACF,CACF,EAGajB,GAAqC,SAA0B,CAC1E,GAAI,CAAAR,GAGJ,IAAID,GACF,MAAM,IAAI,MAAM,0CAA4C,EAE9D,GAAIE,GACF,MAAM,IAAI,MAAM,uCAAyC,EAK3D,GAFAF,GAAe,GAEuBF,GAAQ,EAC5C,OAAO,IAAI,QAAc,CAAC6B,EAASC,IAAW,CAC5C7B,IAAa,UAAU,EAElB8B,GAAkB,EAAE,KAAK,CAAC,CAACC,EAAWC,CAAM,IAAM,CACrD,GAAI,CACFhC,GAAcgC,EACdhC,GAAY,QAAW2B,GAAmBE,EAAOF,CAAE,EACnD3B,GAAY,UAAYS,GACxBJ,GAAoB,CAACuB,EAASC,CAAM,EACpC,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAKV,EAAG,EAC5DvB,GAAY,YAAYiC,CAAO,EAC/B7B,GAAqB2B,CACvB,OAASG,EAAG,CACVL,EAAOK,CAAC,CACV,CACF,EAAGL,CAAM,CACX,CAAC,EAGD,GAAI,CACF,MAAMM,GAAsBZ,GAAI,IAAI,EACpC,MAAWa,GAAYb,EAAG,EAC1BrB,GAAc,EAChB,OAAS,EAAG,CACV,MAAAC,GAAU,GACJ,CACR,QAAE,CACAF,GAAe,EACjB,EAEJ,EAEaU,GAAkB,MAAM0B,GAAkC,CACrE,GAAsCtC,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAK,CAAC,OAAAI,EAAQ,IAAAd,EAAG,CAAC,EACpEvB,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAED,MAAWK,GAAOf,GAAKc,CAAM,CAEjC,EAEazB,GAAyB,MAAM2B,GACJxC,GAAQ,GAC5CS,GAAa,EACN,IAAI,QAAoC,CAACoB,EAASC,IAAW,CAClEtB,GAAiB,YAAa,CAACqB,EAASC,CAAM,CAAC,EAC/C,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAK,CAAC,OAAAM,CAAM,CAAC,EACjEvC,GAAa,YAAYiC,EAAS,CAACM,EAAO,MAAM,CAAC,CACnD,CAAC,GAEW3B,GAAuB2B,CAAM,EAIhC1B,GACT,MAAM2B,EAA8CC,IACR,CACtC,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI0C,GAAS,wBACX,MAAM,IAAI,MAAM,sEAAsE,EAExF,OAAAjC,GAAa,EACN,IAAI,QAAqC,CAACoB,EAASC,IAAW,CACnEtB,GAAiB,SAAU,CAACqB,EAASC,CAAM,CAAC,EAC5C,IAAMI,EAA0B,CAAC,KAAM,SAAU,GAAK,CAAC,MAAAO,EAAO,QAAS,CAAC,GAAGC,CAAO,CAAC,CAAC,EAC9EC,EAA+B,CAAC,EAClCF,aAAiB,YACnBE,EAAa,KAAKF,EAAM,MAAM,EAEhCxC,GAAa,YAAYiC,EAASS,CAAY,CAChD,CAAC,CACH,KACE,QAAY7B,GAAc2B,EAAOC,CAAO,CAE5C,EAEK3B,GAAiB,MAAM6B,GAAqC,CACvE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAKU,CAAS,EAChE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEInB,GAAe6B,CAAS,CAEjC,EAEa5B,GAAM,MACf4B,EAAmBC,EAAwBC,EAA0BC,EACrEC,EAAqCN,IAAoE,CAC3G,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI8C,EAAO,KAAKG,GAAKA,EAAE,CAAC,IAAM,KAAK,EACjC,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAID,EAAQ,KAAKC,GAAKA,CAAC,EACrB,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAAxC,GAAa,EACN,IAAI,QAAsC,CAACoB,EAASC,IAAW,CACpEtB,GAAiB,MAAO,CAACqB,EAASC,CAAM,CAAC,EACzC,IAAMoB,EAAqBJ,EACrBZ,EACF,CAAC,KAAM,MAAO,GAAK,CAAC,UAAAU,EAAW,aAAAC,EAAc,OAAQK,EAAoB,cAAAH,EAAe,QAAAL,CAAO,CAAC,EACpGzC,GAAa,YAAYiC,EAAciB,GAA2BD,CAAkB,CAAC,CACvF,CAAC,CACH,KACE,QAAYlC,GAAI4B,EAAWC,EAAcC,EAAQC,EAAeC,EAASN,CAAO,CAEpF,EAEazB,GAAe,MAAM2B,GAAqC,CACrE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,gBAAiB,CAACqB,EAASC,CAAM,CAAC,EACnD,IAAMI,EAA0B,CAAC,KAAM,gBAAiB,GAAKU,CAAS,EACtE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEIjB,GAAa2B,CAAS,CAE/B,ICvNA,IAWaQ,GAWAC,GAiBAC,GAvCbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,IACAC,KACAC,KAEaT,GAAuB,CAACU,EAAgBC,IAA0C,CAC7F,OAAQD,EAAO,SAAU,CACvB,IAAK,MACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAMA,EAAO,KAAM,KAAK,EACtD,IAAK,aACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAM,CAAC,UAAWA,EAAO,SAAS,EAAG,YAAY,EAC/E,QACE,MAAM,IAAI,MAAM,0BAA0BA,EAAO,QAAQ,QAAQC,EAAQ,CAAC,EAAE,CAChF,CACF,EAEaV,GAAwBS,GAAmC,CACtE,OAAQA,EAAO,CAAC,EAAG,CACjB,IAAK,MACH,OAAO,IAAIE,GAAOF,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EACnD,IAAK,aAAc,CACjB,IAAMG,EAAWH,EAAO,CAAC,EACzB,GAAI,CAACI,GAAyBD,CAAQ,EACpC,MAAM,IAAI,MAAM,4BAA4BA,CAAQ,+BAA+B,EAErF,GAAM,CAAC,UAAAE,EAAW,SAAAC,EAAU,QAAAC,CAAO,EAAIP,EAAO,CAAC,EAC/C,OAAOE,GAAO,cAAcG,EAAW,CAAC,SAAAF,EAAU,KAAMH,EAAO,CAAC,EAAG,SAAAM,EAAU,QAAAC,CAAO,CAAC,CACvF,CACA,QACE,MAAM,IAAI,MAAM,0BAA0BP,EAAO,CAAC,CAAC,EAAE,CACzD,CACF,EAEaR,GAAN,KAA8E,CAMnF,MAAM,8BAA8BgB,EAAmD,CAErF,OAAOC,GAAuB,MAAMC,GAASF,CAAI,CAAC,CACpD,CAEA,MAAM,UAAUG,EAAiCC,EAA0D,CACzGC,GAAiB,EACjB,IAAIC,EAEA,OAAOH,GAAiB,SACtB,GAEFG,EAAQ,MAAMJ,GAASC,CAAY,EAInCG,EAAQ,MAAM,KAAK,8BAA8BH,CAAY,EAG/DG,EAAQH,EAGV,CAAC,KAAK,UAAW,KAAK,WAAY,KAAK,WAAW,EAAI,MAAMI,GAAcD,EAAOF,CAAO,EACxFI,GAAe,CACjB,CAEA,MAAM,SAAyB,CAC7B,OAAOC,GAAe,KAAK,SAAS,CACtC,CAEA,MAAM,IAAIC,EAAiCC,EAAqCP,EACzC,CACrCC,GAAiB,EACjB,IAAMO,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAChC,OAAO,QAAQH,CAAK,EAAE,QAAQI,GAAO,CACnC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,WAAW,QAAQD,CAAI,EAC1C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,kBAAkBD,CAAI,GAAG,EAE3CH,EAAW,KAAKpB,CAAM,EACtBqB,EAAa,KAAKG,CAAK,CACzB,CAAC,EAED,IAAMC,EAAkC,CAAC,EACnCC,EAA0B,CAAC,EACjC,OAAO,QAAQP,CAAO,EAAE,QAAQG,GAAO,CACrC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,YAAY,QAAQD,CAAI,EAC3C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,mBAAmBD,CAAI,GAAG,EAE5CE,EAAY,KAAKzB,CAAM,EACvB0B,EAAc,KAAKF,CAAK,CAC1B,CAAC,EAED,IAAMG,EACFP,EAAW,IAAI,CAACQ,EAAGC,IAAMvC,GAAqBsC,EAAG,IAAM,UAAU,KAAK,WAAWP,EAAaQ,CAAC,CAAC,CAAC,GAAG,CAAC,EACnGC,EAAUL,EAAY,IACxB,CAACG,EAAGC,IAAMD,EAAItC,GAAqBsC,EAAG,IAAM,WAAW,KAAK,YAAYF,EAAcG,CAAC,CAAC,CAAC,GAAG,EAAI,IAAI,EAElGE,EAAU,MAAMC,GAAI,KAAK,UAAWX,EAAcM,EAAQD,EAAeI,EAASlB,CAAO,EAEzFqB,EAAuC,CAAC,EAC9C,QAASJ,EAAI,EAAGA,EAAIE,EAAQ,OAAQF,IAClCI,EAAU,KAAK,YAAYP,EAAcG,CAAC,CAAC,CAAC,EAAIJ,EAAYI,CAAC,GAAKtC,GAAqBwC,EAAQF,CAAC,CAAC,EAEnG,OAAAb,GAAe,EACRiB,CACT,CAEA,gBAAuB,CAEvB,CAEA,cAAqB,CACdC,GAAa,KAAK,SAAS,CAClC,CACF,IC9HA,IAeaC,GAiDAC,GAhEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAQaP,GAAkB,IAAY,CAoBzC,IAnBI,OAAOQ,GAAI,KAAK,aAAgB,UAAYA,GAAI,KAAK,YAAc,KACrEA,GAAI,KAAK,YAAc,GAGrBA,GAAI,KAAK,OAAS,IAEpB,QAAQ,KACJ,8HACyE,EAG3E,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,YAAe,UAAY,CAAC,OAAO,UAAUA,GAAI,KAAK,UAAU,GAAKA,GAAI,KAAK,YAAc,EAY9G,GAAI,OAAO,KAAS,KAAe,CAAC,KAAK,oBACvCA,GAAI,KAAK,WAAa,MACjB,CACL,IAAMC,EACF,OAAO,UAAc,IAAc,GAAQ,SAAS,EAAE,KAAK,EAAE,OAAS,UAAU,oBACpFD,GAAI,KAAK,WAAa,KAAK,IAAI,EAAG,KAAK,MAAMC,GAAsB,GAAK,CAAC,CAAC,CAC5E,CASJ,EAEaR,GAAN,KAAuD,CAS5D,MAAM,KAAKS,EAAoC,CAE7CV,GAAgB,EAGhB,MAAMW,GAAmC,EAGzC,MAAMC,GAAgBF,CAAW,CACnC,CAKA,MAAM,8BAA8BG,EAAiCC,EAChC,CACnC,IAAMC,EAAU,IAAIC,GACpB,aAAMD,EAAQ,UAAUF,EAAcC,CAAO,EACtC,QAAQ,QAAQC,CAAO,CAChC,CACF,IC7FA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,KAAA,IAIaA,GAJbC,GAAAC,EAAA,kBAGAC,KACaH,GAAc,IAAII,KCK/BC,KACAA,KAGAA,KCPO,IAAMC,GAAU,SDKvB,IAAOC,GAAQC,GAUe,CAC5B,IAAMC,EAA4C,cAAoC,YAGpFC,GAAgB,SAAUD,EAAa,CAAC,EACxCC,GAAgB,QAASD,EAAa,CAAC,EAEzCC,GAAgB,MAAOD,EAAa,EAAE,EACtCC,GAAgB,OAAQD,EAAa,EAAE,CACzC,CAEA,OAAO,eAAeE,GAAI,SAAU,MAAO,CAAC,MAAOC,GAAS,WAAY,EAAI,CAAC", - "names": ["backends", "backendsSortedByPriority", "registerBackend", "tryResolveAndInitializeBackend", "resolveBackendAndExecutionProviders", "init_backend_impl", "__esmMin", "name", "backend", "priority", "currentBackend", "i", "backendName", "backendInfo", "isInitializing", "e", "options", "eps", "backendHints", "backendNames", "errors", "availableBackendNames", "resolveResult", "err", "filteredEps", "target", "prop", "init_backend", "__esmMin", "init_backend_impl", "version", "init_version", "__esmMin", "logLevelValue", "env", "init_env_impl", "__esmMin", "init_version", "version", "value", "env", "init_env", "__esmMin", "init_env_impl", "tensorToDataURL", "tensorToImageData", "init_tensor_conversion_impl", "__esmMin", "tensor", "options", "canvas", "pixels2DContext", "width", "height", "inputformat", "norm", "normMean", "normBias", "stride", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "j", "R", "G", "B", "A", "image", "channels", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "bufferToTensor", "tensorFromImage", "tensorFromTexture", "tensorFromGpuBuffer", "tensorFromPinnedBuffer", "init_tensor_factory_impl", "__esmMin", "init_tensor_impl", "buffer", "options", "height", "width", "norm", "normMean", "normBias", "inputformat", "outputformat", "stride", "float32Data", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "Tensor", "image", "isHTMLImageEle", "isImageDataEle", "isImageBitmap", "isString", "data", "bufferToTensorOptions", "createCanvas", "createCanvasContext", "canvas", "pixels2DContext", "tempCanvas", "resolve", "reject", "context", "newImage", "img", "texture", "download", "dispose", "dims", "gpuBuffer", "dataType", "type", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "isTypedArrayChecked", "checkTypedArray", "init_tensor_impl_type_mapping", "__esmMin", "isBigInt64ArrayAvailable", "isBigUint64ArrayAvailable", "isFloat16ArrayAvailable", "calculateSize", "tensorReshape", "init_tensor_utils_impl", "__esmMin", "init_tensor_impl", "dims", "size", "i", "dim", "tensor", "Tensor", "Tensor", "init_tensor_impl", "__esmMin", "init_tensor_conversion_impl", "init_tensor_factory_impl", "init_tensor_impl_type_mapping", "init_tensor_utils_impl", "arg0", "arg1", "arg2", "checkTypedArray", "type", "dims", "expectedTypedArrayConstructor", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "data", "maybeDims", "typedArrayConstructor", "firstElementType", "mappedType", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "size", "calculateSize", "image", "options", "tensorFromImage", "texture", "tensorFromTexture", "gpuBuffer", "tensorFromGpuBuffer", "buffer", "tensorFromPinnedBuffer", "tensorToDataURL", "tensorToImageData", "releaseData", "tensorReshape", "Tensor", "init_tensor", "__esmMin", "init_tensor_impl", "TRACE", "TRACE_FUNC", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "init_trace", "__esmMin", "init_env_impl", "deviceType", "label", "env", "msg", "extraMsg", "stack", "hasTraceFunc", "i", "InferenceSession", "init_inference_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "init_trace", "_InferenceSession", "handler", "feeds", "arg1", "arg2", "TRACE_FUNC_BEGIN", "fetches", "options", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "TRACE_FUNC_END", "arg0", "arg3", "filePathOrUint8Array", "buffer", "byteOffset", "byteLength", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "InferenceSession", "init_inference_session", "__esmMin", "init_inference_session_impl", "init_tensor_conversion", "__esmMin", "init_tensor_factory", "__esmMin", "init_onnx_model", "__esmMin", "init_onnx_value", "__esmMin", "noBackendErrMsg", "TrainingSession", "init_training_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "_TrainingSession", "handler", "hasOptimizerModel", "hasEvalModel", "trainingOptions", "sessionOptions", "evalModel", "optimizerModel", "options", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "inputNames", "outputNames", "feeds", "arg1", "arg2", "fetches", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "trainableOnly", "array", "paramsSize", "TrainingSession", "init_training_session", "__esmMin", "init_training_session_impl", "esm_exports", "__export", "InferenceSession", "TRACE", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "Tensor", "TrainingSession", "env", "registerBackend", "init_esm", "__esmMin", "init_backend", "init_env", "init_inference_session", "init_tensor", "init_tensor_conversion", "init_tensor_factory", "init_trace", "init_onnx_model", "init_onnx_value", "init_training_session", "init_wasm_utils_env", "__esmMin", "main_exports", "__export", "main_default", "WORKER_NAME", "isProxyWorker", "init_main", "__esmMin", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "ev", "type", "message", "initializeWebAssembly", "initRuntime", "err", "epName", "env", "initEp", "buffer", "bufferData", "copyFromExternalBuffer", "model", "options", "createSession", "sessionMetadata", "releaseSession", "sessionId", "inputIndices", "inputs", "outputIndices", "run", "outputs", "o", "extractTransferableBuffers", "endProfiling", "urlOverride", "scriptSrc", "ort_wasm_simd_threaded_jsep_exports", "__export", "ort_wasm_simd_threaded_jsep_default", "r", "e", "init_ort_wasm_simd_threaded_jsep", "__esmMin", "k", "R", "Y", "n", "P", "a", "D", "F", "B", "s", "I", "u", "U", "f", "G", "l", "c", "d", "b", "g", "m", "p", "h", "v", "t", "Ye", "o", "i", "Qe", "y", "w", "A", "_", "C", "O", "T", "S", "E", "Dn", "x", "wn", "He", "Tr", "Ar", "M", "mn", "Sr", "hn", "On", "Re", "An", "or", "N", "H", "j", "$", "z", "L", "V", "q", "J", "X", "Q", "Z", "K", "rr", "er", "tr", "nr", "ar", "sr", "Er", "Mr", "Rr", "Pr", "Ir", "Ur", "jr", "$r", "Gr", "zr", "Yr", "Lr", "Vr", "qr", "Jr", "Zr", "oe", "se", "me", "he", "ve", "ye", "we", "Ee", "xe", "Me", "ke", "Pe", "Fe", "Be", "Ie", "$e", "rt", "tt", "ot", "le", "st", "ct", "dt", "bt", "gt", "mt", "pt", "ht", "vt", "yt", "wt", "At", "_t", "St", "Wt", "Et", "xt", "Mt", "Ht", "Rt", "Pt", "Dt", "Ft", "It", "Ut", "Bt", "jt", "$t", "Lt", "Vt", "Qt", "Zt", "mr", "Kt", "rn", "en", "nn", "un", "fn", "ir", "Hr", "Ke", "ur", "fr", "lr", "pr", "Cr", "_r", "hr", "yr", "cr", "dr", "xn", "En", "_n", "Wn", "br", "gr", "vr", "wr", "Cn", "Or", "Sn", "Mn", "Wr", "xr", "kr", "Nr", "Dr", "Fr", "Br", "Xr", "Qr", "Kr", "re", "ee", "te", "ne", "ae", "ie", "ue", "fe", "ce", "de", "be", "ge", "pe", "yn", "vn", "Ae", "_e", "Ce", "Oe", "Te", "Se", "We", "Ne", "Tn", "De", "dn", "Ue", "gn", "je", "Ge", "ze", "Le", "Ve", "qe", "Je", "Xe", "Ze", "Hn", "bn", "kn", "Rn", "et", "nt", "at", "it", "ut", "ft", "lt", "Ct", "Ot", "Tt", "kt", "Nt", "Gt", "zt", "Yt", "qt", "Jt", "Xt", "tn", "an", "on", "sn", "ln", "cn", "Nn", "pn", "Pn", "scriptSrc", "origin", "isSameOrigin", "preload", "createProxyWorker", "importProxyWorker", "embeddedWasmModule", "importWasmModule", "init_wasm_utils_import", "__esmMin", "init_wasm_utils_env", "filename", "prefixOverride", "baseUrl", "absoluteUrl", "blob", "url", "urlOverride", "isMultiThreaded", "wasm", "initialized", "initializing", "aborted", "isMultiThreadSupported", "isSimdSupported", "initializeWebAssembly", "getInstance", "init_wasm_factory", "__esmMin", "init_wasm_utils_import", "flags", "timeout", "numThreads", "multiThreadSupported", "wasmPaths", "wasmPrefixOverride", "mjsPathOverrideFlag", "mjsPathOverride", "wasmPathOverrideFlag", "wasmPathOverride", "objectUrl", "ortWasmFactory", "importWasmModule", "isTimeout", "tasks", "resolve", "reject", "config", "fileName", "scriptDirectory", "module", "what", "allocWasmString", "iterateExtraOptions", "checkLastError", "init_wasm_utils", "__esmMin", "init_wasm_factory", "data", "allocs", "wasm", "getInstance", "dataLength", "dataOffset", "options", "prefix", "seen", "handler", "key", "value", "name", "message", "stack", "paramsOffset", "errorCode", "errorMessagePointer", "errorMessage", "setRunOptions", "init_run_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "options", "wasm", "getInstance", "runOptionsHandle", "allocs", "runOptions", "tagDataOffset", "allocWasmString", "checkLastError", "iterateExtraOptions", "key", "value", "keyDataOffset", "valueDataOffset", "e", "alloc", "getGraphOptimzationLevel", "getExecutionMode", "appendDefaultOptions", "setExecutionProviders", "setSessionOptions", "init_session_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "graphOptimizationLevel", "executionMode", "options", "session", "ep", "sessionOptionsHandle", "executionProviders", "allocs", "epName", "deviceType", "keyDataOffset", "allocWasmString", "valueDataOffset", "getInstance", "checkLastError", "webgpuOptions", "epNameDataOffset", "wasm", "sessionOptions", "logIdDataOffset", "logSeverityLevel", "logVerbosityLevel", "optimizedModelFilePathOffset", "name", "value", "nameOffset", "iterateExtraOptions", "key", "e", "alloc", "tensorDataTypeStringToEnum", "tensorDataTypeEnumToString", "getTensorElementSize", "tensorTypeToTypedArrayConstructor", "logLevelStringToEnum", "isGpuBufferSupportedType", "dataLocationStringToEnum", "init_wasm_common", "__esmMin", "type", "typeProto", "dateType", "logLevel", "location", "loadFile", "init_wasm_utils_load_file", "__esmMin", "init_wasm_utils_env", "file", "readFile", "e", "createReadStream", "stream", "chunks", "chunk", "response", "contentLengthHeader", "fileSize", "reader", "buffer", "pages", "offset", "done", "value", "chunkSize", "logLevelPrefix", "doLog", "configLogLevel", "debug", "configureLogger", "LOG", "LOG_DEBUG", "init_log", "__esmMin", "init_wasm_common", "level", "message", "$configLogLevel", "$debug", "logLevel", "msg", "messageLevel", "logLevelStringToEnum", "configLevel", "args", "createView", "init_tensor_view", "__esmMin", "init_wasm_common", "dataBuffer", "type", "tensorTypeToTypedArrayConstructor", "init_types", "__esmMin", "bucketFreelist", "bucketArr", "calcNormalizedBufferSize", "calcBucketBufferSize", "guid", "createNewGpuDataId", "downloadGpuData", "GpuDataManagerImpl", "createGpuDataManager", "init_gpu_data_manager", "__esmMin", "init_log", "init_types", "size", "idx", "sizeForBucket", "backend", "gpuBuffer", "originalSize", "getTargetBuffer", "bufferSize", "gpuReadBuffer", "commandEncoder", "arrayBuffer", "targetBuffer", "key", "id", "data", "srcArrayBuffer", "srcOffset", "srcLength", "gpuDataCache", "gpuBufferForUploading", "LOG_DEBUG", "sourceId", "destinationId", "sourceGpuDataCache", "destinationGpuDataCache", "buffer", "previousBuffer", "usage", "isStorage", "isUniform", "buffers", "gpuData", "cachedData", "maxInFreeList", "freelist", "capturedBuffers", "storage", "sessionId", "pendingBuffers", "args", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "MatMulUtil", "BroadcastUtil", "ShapeUtil", "PoolConvUtil", "GemmUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "a", "b", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "i", "aLen", "bLen", "max", "shape", "finalShape", "inputRank", "finalRank", "_ShapeUtil", "dims", "size", "rank", "newDims", "axis", "start", "end", "strides", "tensorRank", "axes", "x", "perm", "v", "pad", "shape1", "shape2", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "dim", "isChannelLast", "autoPad", "outputDims", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "WORKGROUP_SIZE", "getWgslMappedType", "tensorTypeToWsglStorageType", "tensorTypeToWsglValueType", "createTensorShapeVariables", "getMaxComponents", "fillVector", "castToF32", "sumVector", "getElementAt", "createIndicesHelper", "inputVariable", "outputVariable", "internalVariable", "ShaderHelperImpl", "createShaderHelper", "getBroadcastDims", "init_common", "__esmMin", "init_wasm_common", "init_util", "type", "components", "mappedType", "dims", "programUniforms", "dim", "ShapeUtil", "size", "dataType", "value", "name", "index", "length", "tensorType", "shapeOrRank", "usage", "useUniform", "rank", "rankIdentity", "indicesType", "valueType", "storageType", "normalizeDim", "implementationUsed", "uniformPrefix", "shape", "strides", "o2iSnippet", "i", "offsetToIndicesImplementation", "offsetToIndices", "varOffset", "offsets", "indicesToOffsetImplementation", "indicesToOffset", "varIndices", "indices", "init", "indicesGet", "idx", "indicesSet", "broadcastedIndicesToOffsetImplementation", "broadcastedIndicesToOffset", "output", "implKey", "setByOffset", "offset", "getByOffset", "getByIndicesImplementation", "getImplementation", "functionParams", "dimsParams", "get", "normalizedIndices", "getByIndices", "setByIndicesImplementation", "setImplementation", "impls", "needShapeStrides", "impl", "indicesAndValue", "normalizedDispatchGroup", "limits", "workgroupSize", "workgroupSizeX", "workgroupSizeY", "workgroupSizeZ", "is1DimensionDispatch", "paramList", "globalIdxDefinition", "variable", "bindingIndex", "access", "variables", "v", "additionalUniforms", "uniformSnippets", "typeTemp", "uniformWgslTypeToDataType", "u", "dispatchGroup", "inShape", "outShape", "inRank", "validateInputs", "getAdjustedPerm", "getOutputShape", "permFunctionBody", "createTransposeProgramInfo", "transpose", "parseTransposeAttributes", "init_transpose", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "inputRank", "perm", "inputShape", "ShapeUtil", "rank", "input", "output", "reverseFunc", "inputTensor", "permAttr", "inputDataType", "outputShape", "outputVariable", "inputVariable", "getShaderSource", "wgslType", "workgroupSize", "shaderHelper", "outputSize", "createTensorShapeVariables", "context", "attributes", "createAttributeWithCacheKey", "reduceOps", "reduceSharedOps", "reduceInitValues", "reduceOutputValues", "getInnerMostAxes", "computeOutAndReduceShapes", "expandShapeToKeepDim", "areAxesInnerMostDims", "getAxesPermutation", "createReduceSharedProgramInfo", "reduceCommon", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "init_reduce_shared", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_reduce", "init_transpose", "numInnerAxes", "rank", "res", "i", "shape", "axes", "outputShape", "dim", "reduceShape", "expandShape", "shapeIdx", "axis", "name", "shaderCache", "inputs", "reduceType", "outputDataType", "inputShape", "outputSize", "ShapeUtil", "reduceSize", "input", "inputVariable", "output", "outputVariable", "workgroupSize", "sharedMemorySnippet", "shaderHelper", "context", "attributes", "updatedAttributes", "createReduceAttributesFromInputs", "updatedAxes", "_dim", "normalizeAxes", "permutedAxes", "createTransposeProgramInfo", "finalOutputShape", "validateInputs", "noOp", "createReduceProgramInfo", "createReduceAttributesFromInputs", "runReduceProgram", "reduceLogSumNaive", "reduceL1Naive", "reduceL2Naive", "reduceLogSumExpNaive", "reduceMaxNaive", "reduceMeanNaive", "reduceMinNaive", "reduceProdNaive", "reduceSumNaive", "reduceSumSquareNaive", "useNaiveReduceMethod", "reduceMean", "reduceL1", "reduceL2", "reduceLogSumExp", "reduceMax", "reduceMin", "reduceProd", "reduceSum", "reduceSumSquare", "reduceLogSum", "init_reduce", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "init_reduce_shared", "inputs", "input", "name", "shaderCache", "reduceOp", "axesInput", "outputDataType", "keepDims", "noopWithEmptyAxes", "outputShape", "inputShape", "inputRank", "axes", "ShapeUtil", "reduceOnAllAxes", "d", "i", "outputRank", "outputSize", "shaderHelper", "idxCopy", "inputVariable", "output", "outputVariable", "ops", "reduceOps", "k", "l", "createTensorShapeVariables", "attributes", "v", "createAttributeWithCacheKey", "context", "updatedAttributes", "_output", "idxZero", "size", "shape", "reduceSize", "dim", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "validateInputs", "argMin", "argMax", "parseArgMinMaxAttributes", "init_argminmax", "__esmMin", "init_wasm_common", "init_attribute_with_cache_key", "init_reduce", "inputs", "context", "attributes", "argMinMaxOp", "input", "output", "axes", "idxZero", "k", "createReduceProgramInfo", "createAttributeWithCacheKey", "validateAttentionInputs", "createInPlaceSoftmaxProgramInfo", "createAttentionProbsProgramInfo", "createVxAttentionScoreProgramInfo", "applyAttention", "prepare", "attention", "init_attention", "__esmMin", "init_wasm_common", "init_types", "init_common", "inputs", "attributes", "input", "weights", "bias", "maskIndex", "past", "relativePositionBias", "batchSize", "sequenceLength", "inputHiddenSize", "qHiddenSize", "kHiddenSize", "vHiddenSize", "sz", "kvSequenceLength", "pastSequenceLength", "totalSequenceLength", "maxSequenceLength", "maskType", "_context", "n", "d", "components", "getMaxComponents", "WG", "dComp", "elementsPerThread", "programUniforms", "dataType", "tensorTypeToWsglStorageType", "f32Type", "tensorTypeToWsglValueType", "getShaderSource", "shaderHelper", "inputHelper", "outputVariable", "uniforms", "context", "q", "key", "pastKey", "parameters", "probsShape", "presentKey", "presentKeyShape", "alpha", "vectorizedHeadSize", "TILE_SIZE", "dispatch", "inputDependencies", "outputs", "qInput", "inputVariable", "kInput", "inputVars", "pastKeyInput", "output", "outputVars", "probs", "v", "pastValue", "params", "nReps", "repeatedVHiddenSize", "presentValue", "presentValueShape", "outputShape", "probsHelper", "vHelper", "k", "_maskIndex", "_past", "outputCount", "inputsK", "inputsV", "M", "K", "N", "outputQ", "outputK", "outputV", "weight", "validateInputs", "createBatchNormInferenceProgramInfo", "parseBatchNormAttributes", "batchNorm", "init_batch_norm", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "checkShapeEqual", "actual", "expected", "message", "r", "v", "i", "shape", "epsilon", "spatial", "format", "yShape", "components", "getMaxComponents", "cComponents", "outputSize", "ShapeUtil", "useShapesUniforms", "shapeOrRank", "x", "inputVariable", "scale", "bias", "inputMean", "inputVar", "y", "outputVariable", "calcCOffset", "cOffset", "getInferenceModeShaderSource", "helper", "createTensorShapeVariables", "createAttributeWithCacheKey", "context", "outputCount", "updatedAttributes", "env", "validateInputs", "createBiasAddProgramInfo", "biasAdd", "init_bias_add", "__esmMin", "init_util", "init_common", "inputs", "outputShape", "channels", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "bias", "residual", "output", "outputVariable", "shaderHelper", "context", "createElementwiseProgramShader", "createElementwiseProgramInfo", "abs", "acos", "acosh", "asin", "asinh", "atan", "atanh", "parseCastAttributes", "cast", "generateClipAttributesFromInputs", "clip", "ceil", "cos", "cosh", "parseAlphaAttributes", "elu", "erfImpl", "erf", "exp", "floor", "gelu", "leakyRelu", "not", "neg", "reciprocal", "relu", "sigmoid", "parseHardSigmoidAttributes", "hardSigmoid", "sin", "sinh", "sqrt", "tan", "tanhExpression", "tanh", "fastGeluImpl", "fastGeluExpression", "fastGelu", "thresholdedRelu", "log", "quickGeluImpl", "quickGeluExpression", "quickgelu", "init_unary_op", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "shaderHelper", "datasize", "inputDataType", "outputDataType", "funcCall", "additionalImplementation", "vecSize", "expression", "input", "inputVariable", "output", "outputVariable", "name", "cacheKey", "ShapeUtil", "inputTensors", "context", "attributes", "createAttributeWithCacheKey", "func", "inputs", "min", "MIN_CLIP", "max", "MAX_CLIP", "clipAttributes", "dataType", "tensorTypeToWsglValueType", "a", "varType", "x", "alpha", "dType", "validateInputs", "createBiasSplitGeluProgramInfo", "biasSplitGelu", "init_bias_split_gelu", "__esmMin", "init_util", "init_common", "init_unary_op", "inputs", "outputShape", "input", "inputVariable", "bias", "output", "outputVariable", "outputSize", "ShapeUtil", "dataType", "tensorTypeToWsglStorageType", "shaderHelper", "erfImpl", "context", "createBinaryOpProgramShader", "createBinaryOpProgramInfo", "runBinaryOp", "add", "div", "equal", "mul", "pow", "sub", "greater", "less", "greaterOrEqual", "lessOrEqual", "init_binary_op", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "dimsA", "dimsB", "dimsOutput", "vectorize", "doBroadcast", "sharedDimensionDivisibleBy4", "funcCall", "typeA", "typeB", "typeOutput", "additionalImplementation", "expressionScalar", "expressionVector", "a", "b", "output", "outputVariable", "inputVariable", "assignment", "isAOneElement", "ShapeUtil", "isBOneElement", "aLastDimDivisibleBy4", "bLastDimDivisibleBy4", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "name", "cacheKey", "outputDataType", "isBroadcast", "outputShape", "outputSize", "cacheKeyAux", "calculatedShape", "BroadcastUtil", "sharedDimension", "i", "dimA", "dimB", "createTensorShapeVariables", "context", "type", "validateInputs", "calculateInputIndexImpl", "assignOutputData", "createConcatProgramInfo", "concat", "parseConcatAttributes", "init_concat", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "axis", "referenceIndex", "referenceInput", "inputType", "inputRank", "input", "i", "dim", "numberOfTensors", "sizeInConcatAxisStr", "output", "codeLines", "returnSnippet", "adjustedAxis", "outputShape", "dataType", "outputSize", "ShapeUtil", "sizeInConcatAxis", "inputVars", "previousSum", "inputDependencies", "inputRanks", "programUniforms", "inputVariable", "createTensorShapeVariables", "outputVariable", "indicesAxis", "getShaderSource", "shaderHelper", "context", "attributes", "inputShape", "sum", "nonEmptyInputs", "createAttributeWithCacheKey", "getActivationSnippet", "appendActivationUniformsData", "appendActivationUniforms", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_wasm_common", "init_util", "attributes", "valueType", "baseType", "programUniform", "uniforms", "activation", "alpha", "beta", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "typeSnippet", "biasSnippet", "init_activation_util", "__esmMin", "component", "dataType", "hasBias", "utilFunctions", "init_conv_util", "__esmMin", "strideStr", "writeDataToSubAVec4Snippet", "calculateResultSnippet", "makeMatMulPackedVec4Source", "writeDataToSubASnippet", "readDataFromSubASnippet", "makeMatMulPackedSource", "matMulReadWriteFnSource", "createMatmulProgramInfo", "init_matmul_packed_webgpu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_fuse_utils", "init_activation_util", "transpose", "batchDims", "transposeA", "innerElementSize", "workPerThread", "workgroupSize", "type", "tileInner", "splitK", "splitedDimInner", "tileAOuter", "tileBOuter", "tileAWidth", "tileAHight", "rowPerThreadB", "sequentialAccessByThreads", "rowPerThreadA", "colPerThreadA", "matmulSnippet", "component", "hasBias", "applyActivation", "variables", "batchShapes", "isChannelsLast", "batchAShape", "batchBShape", "batchShape", "batchVariable", "aVariable", "bVariable", "outputVariable", "broadCastADims", "getBroadcastDims", "broadCastBDims", "dataType", "tensorTypeToWsglStorageType", "getAIndices", "aRank", "batchRank", "resStr", "i", "j", "getBIndices", "bRank", "typeSnippet", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "aShape", "bShape", "outerDimsA", "outerDimsB", "outerDims", "batchSize", "ShapeUtil", "dimAOuter", "dimInner", "dimBOuter", "isVec4", "elementsPerThread", "dispatch", "components", "aShapeTemp", "bShapeTemp", "outputShapeTemp", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "internalVariable", "A", "inputVariable", "B", "output", "inputVariables", "biasComponents", "uniforms", "appendActivationUniforms", "baseType", "getActivationSnippet", "declareFunctions", "conv2dCommonSnippet", "createConv2DMatMulProgramInfo", "init_conv2d_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "fitAOuter", "fitBOuter", "fitInner", "addBias", "attributes", "innerElementSizeX", "innerElementSizeW", "innerElementSize", "dataType", "getXSnippet", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readXSnippet", "typeSnippet", "sampleX", "sampleW", "resType", "aType", "bType", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileAOuter", "tileBOuter", "tileInner", "elementsSize", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "uniforms", "appendActivationUniforms", "components", "t", "tensorTypeToWsglStorageType", "declareFunctions", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "bias", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "arrayProduct", "parse3TupleParam", "getEffectiveFilterSize", "computeDefaultPad", "computeOutputShape4D", "get3DPadAndOutInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "init_conv3d_naive_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "arr", "product", "i", "param", "filterSize", "dilation", "inputShape", "fieldSize", "stride", "effectiveFieldSize", "inShape", "filterShape", "outChannels", "strides", "zeroPad", "outShape", "index", "pad", "inDepth", "inHeight", "inWidth", "strideDepth", "strideHeight", "strideWidth", "filterDepth", "filterHeight", "filterWidth", "padInfo", "outDepth", "outHeight", "outWidth", "val", "_", "padAlongDepth", "padAlongHeight", "padAlongWidth", "front", "back", "top", "bottom", "left", "right", "dilations", "depthwise", "dataFormat", "batchSize", "inChannels", "filterChannels", "dilationDepth", "dilationHeight", "dilationWidth", "effectiveFilterDepth", "effectiveFilterHeight", "effectiveFilterWidth", "inputs", "attributes", "outputShape", "filterDims", "pads", "isChannelsLast", "isVec4", "workGroupSize", "dispatchLayout", "dispatch", "d", "LOG_DEBUG", "innerElementSize", "outputSize", "ShapeUtil", "programUniforms", "createTensorShapeVariables", "inputDependencies", "hasBias", "getShaderSource", "shaderHelper", "uniforms", "components", "t", "tensorTypeToWsglStorageType", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "declareFunctions", "bias", "getElementAt", "createGroupedConvProgramInfo", "createGroupedConvVectorizeProgramInfo", "init_conv_grouped", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_conv", "init_fuse_utils", "inputs", "attributes", "squeezeOutputShapeFunction", "hasBias", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "isChannelLast", "outputShape", "calculateOutputShape", "outputSize", "ShapeUtil", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "x", "inputVariable", "w", "inputVars", "uniforms", "appendActivationUniforms", "components", "getMaxComponents", "outputNumber", "outputShapeInShader", "xNumber", "createNaiveMatmulProgramInfo", "validateInputs", "matMul", "init_matmul", "__esmMin", "init_wasm_common", "init_util", "init_matmul_packed_webgpu", "init_common", "init_fuse_utils", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "isChannelsLast", "aShape", "bShape", "M", "N", "K", "components", "getMaxComponents", "aComponents", "outputNumber", "outputSize", "ShapeUtil", "hasBias", "outerDims", "outputShapeInShader", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "batchDims", "internalVariable", "a", "inputVariable", "b", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "inputVariables", "processBias", "biasComponents", "outerDimsA", "outerDimsB", "broadCastADims", "getBroadcastDims", "broadCastBDims", "uniforms", "appendActivationUniforms", "getIndices", "variable", "broadCastDims", "rank", "name", "batchRank", "resStr", "i", "j", "calcResult", "calcStr", "context", "BroadcastUtil", "createMatmulProgramInfo", "calculateOutputShape", "weightTransposeAttribute", "validateInputs", "getAdjustedConvAttributes", "parseConvAttributes", "conv2d", "conv1d", "conv3d", "conv", "init_conv", "__esmMin", "init_util", "init_conv2d_mm_webgpu", "init_conv3d_naive_webgpu", "init_matmul_packed_webgpu", "init_conv_grouped", "init_fuse_utils", "init_matmul", "init_transpose", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "isChannelLast", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputShape", "inputs", "attributes", "dataChannel", "filterInChannel", "pads", "PoolConvUtil", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "autoPad", "group", "wIsConst", "context", "adjustedAttributes", "isChannelsLast", "transposedWeight", "createTransposeProgramInfo", "convInputs", "createGroupedConvVectorizeProgramInfo", "createGroupedConvProgramInfo", "hasBias", "inputHeight", "inputWidth", "inputChannels", "weightHeight", "weightWidth", "outHeight", "outWidth", "sameSize", "batch", "xReshaped", "wReshaped", "matmulOutputShape", "matmulInputs", "sharedDim", "N", "K", "createNaiveMatmulProgramInfo", "createMatmulProgramInfo", "sequentialAccessByThreads", "dimAOuter", "dimBOuter", "dimInner", "createConv2DMatMulProgramInfo", "convInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "conv2dTransposeCommonSnippet", "createConv2DTransposeMatMulProgramInfo", "init_conv_backprop_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "addBias", "attributes", "type", "innerElementSize", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readASnippet", "sampleA", "sampleW", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileInner", "components", "filterDims", "effectiveFilterDims", "pads", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "x", "inputVariable", "w", "output", "outputVariable", "inputVariables", "declareFunctions", "bias", "uniforms", "appendActivationUniforms", "elemType", "tensorTypeToWsglStorageType", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createConvTranspose2DOpProgramShaderSource", "createConvTranspose2DProgramInfo", "init_conv_backprop_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "shaderHelper", "inputs", "outputShape", "hasBias", "is1DimensionDispatch", "isVec4", "dataType", "uniforms", "isChannelsLast", "rowDim", "colDim", "channelDim", "workPerThread", "declareFunctions", "components", "w", "inputVariable", "dy", "inputVariables", "output", "outputVariable", "codeSnippet4", "codeSnippet", "attributes", "squeezeOutputShapeFunction", "outputSize", "ShapeUtil", "dispatch", "LOG_DEBUG", "inputDependencies", "strides", "filterDims", "dilations", "effectiveFilterDims", "pads", "group", "wShape", "inputChannelsPerGroup", "outputChannelsPerGroup", "programUniforms", "createTensorShapeVariables", "getShaderSource", "tensorTypeToWsglStorageType", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "weightTransposePerm", "convTranspose2d", "convTranspose1d", "convTranspose", "init_conv_transpose", "__esmMin", "init_conv_backprop_mm_webgpu", "init_conv_backprop_webgpu", "init_fuse_utils", "init_transpose", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "group", "strides", "isChannelLast", "outputPadding", "outputShape", "spatialRank", "updateOutputShape", "i", "batchSize", "outChannels", "j", "inSize", "attributes", "inputs", "a", "b", "isChannelsLast", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "wIsConst", "dataChannel", "filterInChannel", "featureMaps", "context", "adjustedAttributes", "inputChannels", "createConvTranspose2DProgramInfo", "outHeight", "outWidth", "weightHeight", "weightWidth", "dimAOuter", "dimBOuter", "dimInner", "sequentialAccessByThreads", "transposedWeight", "createTransposeProgramInfo", "convTransposeInputs", "hasBias", "createConv2DTransposeMatMulProgramInfo", "createCumsumProgramInfo", "cumsum", "parseCumSumAttributes", "init_cumsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputType", "inputShape", "axisInput", "attributes", "outputSize", "ShapeUtil", "rank", "input", "inputVariable", "output", "outputVariable", "axisValue", "axis", "getShaderSource", "shaderHelper", "index", "max", "getElementAt", "lowerLimit", "upperLimit", "createTensorShapeVariables", "context", "exclusive", "reverse", "createAttributeWithCacheKey", "validateInputs", "permFunctionBody", "createDepthToSpaceProgramInfo", "depthToSpace", "parseDepthToSpaceAttributes", "init_depth_to_space", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "perm", "rank", "input", "output", "reverseFunc", "inputTensor", "attributes", "n", "h", "w", "c", "shape", "isChannelLast", "blocksize", "isDCRmode", "reshapedInputTensor", "reshapedInputRank", "inputDataType", "reshapedInput", "inputVariable", "permedOutput", "outputVariable", "getShaderSource", "shaderHelper", "outputShape", "outputSize", "ShapeUtil", "shapeBeforePerm", "shapeAfterPerm", "createTensorShapeVariables", "context", "createAttributeWithCacheKey", "symbolPattern", "termPattern", "termPatternOnly", "lhsPattern", "lhsPatternOnly", "EinsumTerm", "EinsumEquation", "appendMax", "createEinsumProgramInfo", "einsum", "parseEinsumAttributes", "init_einsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputIndex", "symbol", "index", "value", "inputs", "equation", "lhs", "rhs", "inputTerm", "dims", "einsumTerm", "sym", "info", "dimValue", "term", "isInput", "rank", "ellipsis", "ellipsisDims", "nextDim", "indexSymbols", "i", "ellipsisDimLength", "j", "name", "inputShapes", "dataType", "einsumEquation", "outputShape", "inputVars", "inputVariable", "outputSize", "ShapeUtil", "output", "outputVariable", "uniformsSymbols", "getShaderSource", "shaderHelper", "idxCopy", "initProd", "initSum", "updateSum", "reduceOpsSetIndices", "reduceOpsLoopHeaders", "reduceOpsLoopFooters", "reduceOpCompute", "isReduceOpsWithoutLoop", "outputIndex", "indices", "reduceOps", "inputVar", "_var", "programUniformsInit", "programUniforms", "_", "createTensorShapeVariables", "acc", "inputProgramUniforms", "context", "attributes", "input", "createAttributeWithCacheKey", "validateInputs", "getAdjustedShape", "calculateOutputShape", "createExpandProgramInfo", "expand", "init_expand", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "inputShape", "shape", "shapeIndex", "inputShapeIndex", "shape1", "shape2", "diff", "i", "outputShape", "dataType", "components", "outputSize", "ShapeUtil", "getShaderSource", "shaderHelper", "input", "inputVariable", "output", "outputVariable", "assignment", "singleAssignment", "resStr", "x", "typeCast", "programUniforms", "createTensorShapeVariables", "context", "createFastGeluProgramInfo", "fastGelu", "init_fast_gelu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_unary_op", "inputTensors", "dataType", "outputSize", "ShapeUtil", "biasLength", "useVec4", "getShaderSource", "shaderHelper", "x", "inputVariable", "bias", "y", "outputVariable", "uniforms", "singleElementBias", "i", "biasGetExpression", "fastGeluImpl", "tensorTypeToWsglValueType", "WORKGROUP_SIZE", "fastGeluExpression", "inputs", "context", "validateInputs", "createGatherProgramInfo", "parseGatherAttributes", "gather", "init_gather", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "indicesShape", "inputRank", "axis", "ShapeUtil", "outputShape", "axisDimLimit", "components", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "data", "inputVariable", "indices", "output", "outputVariable", "calcDataIndices", "x", "indicesRank", "calcStr", "i", "j", "assignment", "singleAssignment", "resStr", "typeCast", "createAttributeWithCacheKey", "context", "validateInputs", "createGatherElementsProgramInfo", "parseGatherElementsAttributes", "gatherElements", "init_gather_elements", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "inputOutputDataType", "inputRank", "indicesShape", "indicesDataType", "axis", "ShapeUtil", "axisDimLimit", "outputShape", "outputSize", "input", "inputVariable", "indices", "output", "outputVariable", "programUniforms", "createTensorShapeVariables", "shaderHelper", "createAttributeWithCacheKey", "context", "validateInputs", "createGemmProgramInfo", "parseGemmAttributes", "gemm", "init_gemm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "aShape", "bShape", "M", "N", "K", "GemmUtil", "outputShape", "outputSize", "ShapeUtil", "programUniforms", "inputDependencies", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "line", "calculateAlpha", "a", "inputVariable", "b", "dataType", "c", "variables", "output", "outputVariable", "uniforms", "transA", "transB", "alpha", "beta", "context", "getInput", "validateInputs", "parseMultiHeadAttentionAttributes", "weightTransposeAttribute", "addBiasTranspose", "maybeTransposeToBNSHAndAddBias", "multiHeadAttention", "init_multihead_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_types", "init_attention", "init_common", "init_transpose", "inputs", "i", "ShapeUtil", "attributes", "query", "key", "value", "bias", "keyPaddingMask", "relativePositionBias", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "qkvFormat", "maskType", "maskDims", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "createAttributeWithCacheKey", "context", "qkv", "biasOffset", "outputShape", "outputSize", "programUniforms", "getShaderSource", "shaderHelper", "output", "outputVariable", "qkvInput", "inputVariable", "biasInput", "uniforms", "numHeads", "input", "reshapedInput", "createTransposeProgramInfo", "params", "kvBNSH", "Q", "applyAttention", "K", "V", "getRepeats", "validateInputs", "getOutputShape", "createTileProgramInfo", "tile", "init_tile", "__esmMin", "init_wasm_common", "init_util", "init_common", "repeatsTensorView", "inputs", "inputShape", "repeats", "outputShape", "i", "shape", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "output", "outputVariable", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "validateInputs", "createConcatProgramInfo", "parseGroupQueryAttentionAttributes", "weightTransposeAttribute", "maybeExpandAndTransposeToBNSH", "groupQueryAttention", "init_group_query_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_attention", "init_common", "init_multihead_attention", "init_tile", "init_transpose", "inputs", "attributes", "query", "key", "value", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "hasPastKey", "hasPastValue", "isPastkvBSNH", "qkvFormat", "maskType", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "a", "b", "dataType", "params", "outputShape", "component", "outputSize", "ShapeUtil", "presentSequenceLength", "output", "outputVariable", "inputA", "inputVariable", "inputB", "H", "dispatch", "inputDependencies", "programUniforms", "createTensorShapeVariables", "uniforms", "pastStr", "newStr", "concatStr", "getShaderSource", "shaderHelper", "createAttributeWithCacheKey", "context", "input", "pastKV", "outputIndex", "reshapedInput", "numHeads", "nReps", "createTileProgramInfo", "createTransposeProgramInfo", "Q", "maybeTransposeToBNSHAndAddBias", "K", "V", "applyAttention", "createInstanceNormProgramInfo", "computeMean", "createInstanceNormNHWCProgramInfo", "instanceNorm", "init_instance_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "xShape", "outputShape", "axis", "normCount", "ShapeUtil", "normSize", "components", "getMaxComponents", "normPackedSize", "inputShape", "inputDependencies", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "x", "inputVariable", "scale", "bias", "output", "outputVariable", "variables", "dataType", "f32Type", "workgroupSize", "uniforms", "sumVector", "context", "input", "n", "h", "c", "epsilon", "WG", "outputType", "sumCastType", "setOutputValue", "var1", "var2", "unitsOfWork", "wgSize", "meanInputDependencies", "meanProgramUniforms", "getMeanShaderSource", "inputHelper", "fillVector", "meanValues", "scaleHelper", "biasHelper", "N", "C", "H", "outputSize", "channelScaleShift", "tensorTypeToWsglStorageType", "scaleType", "scaleCastType", "outputHelper", "validateInputs", "createLayerNormProgramInfo", "layerNorm", "init_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "outputCount", "simplified", "xShape", "scale", "bias", "outputShape", "axis", "ShapeUtil", "normCount", "normSize", "scaleSize", "biasSize", "meanInvStdDevDim", "i", "components", "getMaxComponents", "inputDependencies", "programUniforms", "hasMeanDataOutput", "hasInvStdOutput", "getShaderSource", "shaderHelper", "dataType", "tensorTypeToWsglStorageType", "variables", "inputVariable", "outputVariable", "uniforms", "fillVector", "castToF32", "sumVector", "outputs", "context", "validateInputs", "createMatMulNBitsProgramInfo", "matMulNBits", "parseMatMulNBitsAttributes", "init_matmulnbits", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "a", "aRank", "nBlocksPerCol", "blobSize", "b", "ShapeUtil", "scalesShape", "zeroPointsShape", "expectedZeroPointsSize", "maxComputeWorkgroupSizes", "maxComputeWorkgroupStorageSize", "inputShape", "dimAOuter", "dimInner", "dimBOuter", "batchDims", "batchSize", "blobSizeInWords", "dataType", "outputNumber", "getMaxComponents", "aComponents", "bComponents", "elementSize", "getTensorElementSize", "workgroupOutputSize", "maxNumberOfComponents", "useBlockwiseMatMulNBits", "components", "outputShape", "outputSize", "programUniforms", "inputShapeTemp", "bShape", "createTensorShapeVariables", "outputShapeTemp", "getShaderSource", "shaderHelper", "inputRank", "inputVariable", "scales", "inputVariables", "zeroPoints", "outputRank", "output", "outputVariable", "uniforms", "tensorTypeToWsglStorageType", "qDqDataType", "processOneBlock", "_", "i", "updateZeroPointIndex", "context", "createAttributeWithCacheKey", "validateInputs", "getPadConstant", "getPadReflect", "getPadEdge", "getPadWrap", "getPadSnippet", "createPadProgramInfo", "createPadAttributesFromInputs", "pad", "init_pad", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "validPads", "output", "inputRank", "padsLength", "block", "i", "getElementAt", "attributes", "outputShape", "ShapeUtil", "inputDims", "outputSize", "programUniforms", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "outputVariable", "input", "inputVariable", "dataType", "padSnippet", "uniforms", "bigInt64Pads", "value", "updatePads", "axes", "v", "pads", "context", "updatedAttributes", "validateInputs", "getAdjustedPoolAttributesAndOutputShape", "getUniformAndPadInfo", "generatePoolingCode", "createShaderKeyFromAttributes", "createAveragePoolShaderKeyFromAttributes", "createMaxPoolShaderKeyFromAttributes", "parsePoolCommonAttributes", "createAveragePoolProgramInfo", "parseAveragePoolAttributes", "averagePool", "globalPoolAttributes", "parseGlobalAveragePoolAttributes", "globalAveragePool", "createMaxPoolProgramInfo", "maxPool", "parseMaxPoolAttributes", "parseGlobalMaxPoolAttributes", "globalMaxPool", "init_pool", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_common", "inputs", "env", "input", "attributes", "isGlobalOperator", "isChannelsLast", "inputShapeAsChannelFirst", "hasDilations", "kernelShape", "strides", "dilations", "pads", "PoolConvUtil", "outputShapeAsChannelFirst", "newAttributes", "outputShapeAsChannelLast", "outputShape", "outputSize", "ShapeUtil", "kernelSize", "programUniforms", "uniforms", "kw", "sw", "pwStart", "pwEnd", "pwStartEndNotZero", "phStartEndNotZero", "kh", "sh", "phStart", "phEnd", "kernelStrides", "hasPads", "sum", "cur", "shaderHelper", "x", "rank", "outputShapeRank", "op1", "op2", "start", "dataType", "output", "outputVariable", "codeW", "codeH", "codeHEnd", "dimIdxW", "dimIdxH", "stridesRank", "padsRank", "padCode", "getElementAt", "name", "adjustedAttributes", "inputVariable", "createTensorShapeVariables", "inputDependencies", "countIncludePad", "attr", "averagePoolAttributes", "context", "format", "storageOrder", "maxPoolAttributes", "validateInputsContent", "createRangeProgramInfo", "range", "init_range", "__esmMin", "init_esm", "init_wasm_common", "init_common", "start", "limit", "delta", "sameStartLimit", "increasingRangeNegativeStep", "decreasingRangePositiveStep", "dataType", "numElements", "outputShape", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "output", "outputVariable", "wgslType", "uniforms", "context", "env", "validateScales", "updateScales", "validateInputs", "getOriginalCoordinateFromResizedCoordinate", "getNearestPixelFromOriginal", "updateRoI", "initOutputShape", "adjustOutputShape", "calculateOriginalIndicesFromOutputIndices", "calculateInputIndicesFromOutputIndices", "checkInputIndices", "setChannelAndBatchIndices", "bilinearInterpolation", "bicubicInterpolation", "trilinearInterpolation", "createResizeProgramInfo", "getOpsetVersionFromCustomDataBuffer", "resize", "parseResizeAttributes", "init_resize", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "scales", "attributes", "value", "axes", "rank", "newScales", "index", "inputs", "opsetVersion", "sizes", "roi", "roiInputIndex", "scalesInputIndex", "sizesInputIndex", "coordinateTransferMode", "dType", "nearestMode", "roiTmp", "roiLocal", "v", "i", "inputShape", "outputShape", "scaleInPolicy", "adjustedOutputShape", "output", "scalesLength", "roiLength", "getElementAt", "input", "useExtrapolation", "channelIdx", "batchIdx", "spacialDims", "extrapolationValue", "heightIdx", "widthIdx", "cubicCoeffA", "excludeOutside", "is2D", "isNchw", "createCubicInterpolationFunction", "idx", "direction", "depthIdx", "inputTensor", "scalesInput", "roiInput", "outputVariable", "inputVariable", "outputSize", "ShapeUtil", "noScale", "d", "dataType", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "customDataBuffer", "antialias", "coordinateTransformMode", "keepAspectRatioPolicy", "mode", "createAttributeWithCacheKey", "validateInputs", "createRotaryEmbeddingProgramInfo", "rotaryEmbedding", "init_rotary_embedding", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "input", "positionIds", "cosCache", "sinCache", "numHeads", "rotaryEmbeddingDim", "ShapeUtil", "batchSize", "sequenceLength", "maxSequenceLength", "hiddenSize", "headSize", "interleaved", "scale", "batchStride", "halfRotaryEmbeddingDim", "globalShape", "globalStrides", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "inputVariable", "output", "outputVariable", "WORKGROUP_SIZE", "createAttributeWithCacheKey", "context", "validateInputs", "createSkipLayerNormProgramInfo", "skipLayerNorm", "init_skip_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "input", "skip", "gamma", "hiddenSize", "sequenceLength", "beta", "bias", "attributes", "outputCount", "isTraining", "simplified", "inputShape", "inputSize", "ShapeUtil", "outputShape", "outputSize", "meanInvStdDevDim", "hasBetaInput", "hasBiasInput", "hasMeanOutput", "hasInvStdDevOutput", "hasInputSkipBiasSumOutput", "workgroupSize", "components", "getMaxComponents", "programUniforms", "getShaderSource", "shaderHelper", "uniformsArray", "variables", "inputVariable", "outputVariable", "dataType", "tensorTypeToWsglStorageType", "vecDataType", "castToF32", "sumVector", "outputs", "_input", "_index", "context", "validateInputs", "readInput", "createSliceAttributesFromInputs", "fixStartEndValues", "calculateInputIndicesImpl", "createSliceProgramInfo", "slice", "parseSliceAttributes", "init_slice", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "_", "idx", "input", "v", "starts", "ends", "axes", "createAttributeWithCacheKey", "value", "index", "inputShape", "steps", "newValue", "output", "getElementAt", "inputSize", "ShapeUtil", "step", "start", "i", "end", "signs", "array", "numSteps", "newEnd", "newStart", "outputShape", "axis", "outputTensorInfo", "outputVariable", "inputVariable", "outputSize", "uniforms", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "validateInputs", "createSoftmaxProgramInfo", "softmax", "parseSoftmaxAttributes", "init_softmax", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "input", "attributes", "shape", "outputSize", "ShapeUtil", "WG", "axis", "cols", "rows", "components", "getMaxComponents", "packedCols", "maxVector", "name", "x", "inputVariable", "output", "outputVariable", "valueType", "threadMaxDecl", "tensorTypeToWsglStorageType", "getShaderSource", "shaderHelper", "sumVector", "context", "createAttributeWithCacheKey", "validateInputs", "createSplitAttributesFromInputs", "calculateOutputIndexImpl", "writeBufferDataImpl", "createSplitProgramInfo", "split", "parseSplitAttributes", "init_split", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "splitSizes", "numOutputs", "v", "createAttributeWithCacheKey", "numberOfTensors", "getElementAt", "outputs", "codeLines", "i", "returnSnippet", "inputShape", "inputSize", "ShapeUtil", "dataType", "axis", "input", "inputVariable", "sizeInSplitAxis", "outputsTensorInfo", "outputShapes", "previousSum", "programUniforms", "outputShape", "outputVariable", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "createWhereOpProgramShader", "createWhereOpProgramInfo", "where", "init_where", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "inputs", "dimsOutput", "isBroadcast", "typeOutput", "output", "outputVariable", "inputVariable", "b", "c", "assignment", "expression", "a", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "expressionC", "dimsA", "dimsB", "dimsC", "outputDataType", "ShapeUtil", "outputShape", "outputSize", "calculatedShape", "BroadcastUtil", "vecSize", "createTensorShapeVariables", "context", "WEBGPU_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_argminmax", "init_attention", "init_batch_norm", "init_bias_add", "init_bias_split_gelu", "init_binary_op", "init_concat", "init_conv", "init_conv_transpose", "init_cumsum", "init_depth_to_space", "init_einsum", "init_expand", "init_fast_gelu", "init_gather", "init_gather_elements", "init_gemm", "init_group_query_attention", "init_instance_norm", "init_layer_norm", "init_matmul", "init_matmulnbits", "init_multihead_attention", "init_pad", "init_pool", "init_range", "init_reduce", "init_resize", "init_rotary_embedding", "init_skip_layer_norm", "init_slice", "init_softmax", "init_split", "init_tile", "init_transpose", "init_unary_op", "init_where", "abs", "acos", "acosh", "add", "argMax", "parseArgMinMaxAttributes", "argMin", "asin", "asinh", "atan", "atanh", "attention", "averagePool", "parseAveragePoolAttributes", "batchNorm", "biasAdd", "biasSplitGelu", "cast", "parseCastAttributes", "ceil", "clip", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "cosh", "cumsum", "parseCumSumAttributes", "depthToSpace", "parseDepthToSpaceAttributes", "div", "einsum", "parseEinsumAttributes", "elu", "parseAlphaAttributes", "equal", "erf", "exp", "expand", "fastGelu", "floor", "gather", "parseGatherAttributes", "gatherElements", "parseGatherElementsAttributes", "gelu", "gemm", "parseGemmAttributes", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "parseGlobalMaxPoolAttributes", "greater", "greaterOrEqual", "groupQueryAttention", "parseGroupQueryAttentionAttributes", "hardSigmoid", "parseHardSigmoidAttributes", "instanceNorm", "layerNorm", "leakyRelu", "less", "lessOrEqual", "log", "matMul", "matMulNBits", "parseMatMulNBitsAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "multiHeadAttention", "parseMultiHeadAttentionAttributes", "neg", "not", "pad", "pow", "quickgelu", "range", "reciprocal", "reduceMin", "reduceMean", "reduceMax", "reduceSum", "reduceProd", "reduceL1", "reduceL2", "reduceLogSum", "reduceLogSumExp", "reduceSumSquare", "relu", "resize", "parseResizeAttributes", "rotaryEmbedding", "sigmoid", "sin", "sinh", "slice", "parseSliceAttributes", "skipLayerNorm", "split", "parseSplitAttributes", "sqrt", "softmax", "parseSoftmaxAttributes", "sub", "tan", "tanh", "thresholdedRelu", "tile", "transpose", "parseTransposeAttributes", "where", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_log", "init_common", "backend", "key", "artifact", "buildArtifact", "inputs", "outputs", "dispatchGroup", "uniformBufferBinding", "TRACE_FUNC_BEGIN", "device", "computePassEncoder", "entries", "input", "output", "bindGroup", "commandInfo", "TRACE_FUNC_END", "programInfo", "normalizedDispatchGroupSize", "extensions", "shaderHelper", "createShaderHelper", "userCode", "code", "shaderModule", "LOG_DEBUG", "computePipeline", "x", "y", "z", "limitPerDimension", "size", "dispatchAverage", "getProgramInputTensorInfoDependencyKey", "getProgramInfoUniqueKey", "AdapterInfoImpl", "WebGpuBackend", "init_backend_webgpu", "__esmMin", "init_esm", "init_wasm_common", "init_log", "init_tensor_view", "init_gpu_data_manager", "init_op_resolve_rules", "init_program_manager", "inputTensors", "inputDependencies", "inputInfos", "i", "type", "rank", "dims", "programInfo", "is1DimensionDispatch", "key", "adapterInfo", "architecture", "vendor", "data", "env", "adapter", "requiredFeatures", "deviceDescriptor", "createGpuDataManager", "ProgramManager", "configureLogger", "ev", "commandEncoder", "computePassDescriptor", "TRACE_FUNC_BEGIN", "queryReadBuffer", "mappedData", "pendingKernels", "pendingKernelInfo", "kernelId", "kernelInfo", "kernelType", "kernelName", "programName", "inputTensorViews", "outputTensorViews", "startTimeU64", "endTimeU64", "startTime", "endTime", "value", "tensorDataTypeEnumToString", "inputShapes", "outputShapes", "TRACE", "TRACE_FUNC_END", "program", "outputIndices", "createKernelOutput", "createIntermediateOutput", "outputCount", "inputDatas", "gpuData", "outputs", "dispatchGroup", "programUniforms", "validatedOutputIndices", "_", "outputDatas", "isTemporary", "isPersistent", "tensorView", "persistentData", "uniformBufferBinding", "currentOffset", "offsets", "v", "sizeOfElement", "sizeOfVecOrMat", "baseAlignment", "elementPerVecOrMat", "maxAlignmentOfField", "arrayBuffer", "offset", "uniformBufferData", "normalizedDispatchGroup", "artifact", "LOG_DEBUG", "uniform", "actualType", "actualLength", "length", "gpuDataId", "src", "dst", "getTargetBuffer", "size", "ptr", "attribute", "op", "WEBGPU_OP_RESOLVE_RULES", "context", "errors", "kernel", "kernelEntry", "attributes", "useErrorScope", "e", "err", "sessionId", "index", "buffer", "sessionInputOutputMapping", "previousBuffer", "id", "bufferInfo", "gpuBuffer", "downloadGpuData", "createView", "sessionCommandList", "sessionPendingKernels", "computePassEncoder", "command", "init_exports", "__export", "init", "TensorViewImpl", "ComputeContextImpl", "init_init", "__esmMin", "init_wasm_common", "init_backend_webgpu", "init_log", "init_util", "_TensorViewImpl", "module", "dataType", "data", "dims", "elementCount", "ShapeUtil", "newDims", "backend", "contextDataOffset", "heapU32", "dataIndex", "inputCount", "inputs", "i", "dim", "d", "program", "inputsOutputsMapping", "mappedInputs", "outputIndices", "createKernelOutput", "index", "createTemporaryOutput", "elementSize", "getTensorElementSize", "bufferSize", "gpuDataId", "stack", "offset", "e", "name", "env", "gpuAdapter", "jsepInit", "WebGpuBackend", "size", "ptr", "src", "dst", "isSourceGpu", "LOG_DEBUG", "dataOffset", "kernelType", "kernelId", "attribute", "kernel", "sessionHandle", "errors", "context", "initOrt", "initRuntime", "initEp", "activeSessions", "getSessionInputOutputCount", "copyFromExternalBuffer", "createSession", "releaseSession", "prepareInputOutputTensor", "run", "endProfiling", "extractTransferableBuffers", "init_wasm_core_impl", "__esmMin", "init_run_options", "init_session_options", "init_wasm_common", "init_wasm_factory", "init_wasm_utils", "init_wasm_utils_load_file", "numThreads", "loggingLevel", "getInstance", "checkLastError", "env", "logLevelStringToEnum", "epName", "initJsep", "adapter", "powerPreference", "forceFallbackAdapter", "sessionHandle", "wasm", "stack", "dataOffset", "model", "modelDataOffset", "modelData", "options", "modelDataLength", "sessionOptionsHandle", "ioBindingHandle", "allocs", "inputNamesUTF8Encoded", "outputNamesUTF8Encoded", "setSessionOptions", "loadingPromises", "file", "path", "loadFile", "data", "provider", "webnnOptions", "context", "gpuDevice", "deviceType", "inputCount", "outputCount", "enableGraphCapture", "inputNames", "outputNames", "outputPreferredLocations", "i", "name", "nameString", "location", "bindingState", "l", "dataLocationStringToEnum", "e", "buf", "alloc", "sessionId", "session", "ioBindingState", "tensor", "tensorHandles", "index", "dataType", "dims", "rawData", "dataByteLength", "gpuBuffer", "elementSizeInBytes", "getTensorElementSize", "tensorDataTypeStringToEnum", "a", "b", "registerBuffer", "dataIndex", "allocWasmString", "dimsOffset", "dimIndex", "d", "inputIndices", "inputTensors", "outputIndices", "outputTensors", "inputOutputBound", "runOptionsHandle", "runOptionsAllocs", "inputTensorHandles", "outputTensorHandles", "inputOutputAllocs", "beforeRunStack", "inputValuesOffset", "inputNamesOffset", "outputValuesOffset", "outputNamesOffset", "setRunOptions", "inputValuesIndex", "inputNamesIndex", "outputValuesIndex", "outputNamesIndex", "handle", "outputPreferredLocationsEncoded", "errorCode", "output", "beforeGetTensorDataStack", "tensorDataOffset", "keepOutputTensor", "type", "tensorDataIndex", "dimsLength", "size", "tensorDataTypeEnumToString", "preferredLocation", "stringData", "offset", "maxBytesToRead", "getBuffer", "elementSize", "isGpuBufferSupportedType", "typedArrayConstructor", "tensorTypeToTypedArrayConstructor", "v", "p", "profileFileName", "tensors", "buffers", "isProxy", "proxyWorker", "initializing", "initialized", "aborted", "temporaryObjectUrl", "initWasmCallbacks", "queuedCallbacks", "enqueueCallbacks", "ensureWorker", "onProxyWorkerMessage", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "copyFromExternalBuffer", "createSession", "releaseSession", "run", "endProfiling", "init_proxy_wrapper", "__esmMin", "init_esm", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "env", "type", "callbacks", "queue", "ev", "resolve", "reject", "importProxyWorker", "objectUrl", "worker", "message", "e", "initializeWebAssembly", "initRuntime", "epName", "initEp", "buffer", "model", "options", "transferable", "sessionId", "inputIndices", "inputs", "outputIndices", "outputs", "t", "serializableInputs", "extractTransferableBuffers", "encodeTensorMetadata", "decodeTensorMetadata", "OnnxruntimeWebAssemblySessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_proxy_wrapper", "init_wasm_common", "init_wasm_utils_env", "init_wasm_utils_load_file", "tensor", "getName", "Tensor", "dataType", "isGpuBufferSupportedType", "gpuBuffer", "download", "dispose", "path", "copyFromExternalBuffer", "loadFile", "pathOrBuffer", "options", "TRACE_FUNC_BEGIN", "model", "createSession", "TRACE_FUNC_END", "releaseSession", "feeds", "fetches", "inputArray", "inputIndices", "kvp", "name", "index", "outputArray", "outputIndices", "inputs", "t", "i", "outputs", "results", "run", "resultMap", "endProfiling", "initializeFlags", "OnnxruntimeWebAssemblyBackend", "init_backend_wasm", "__esmMin", "init_esm", "init_proxy_wrapper", "init_session_handler_inference", "init_wasm_utils_import", "env", "numCpuLogicalCores", "backendName", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "pathOrBuffer", "options", "handler", "OnnxruntimeWebAssemblySessionHandler", "backend_wasm_inference_exports", "__export", "wasmBackend", "init_backend_wasm_inference", "__esmMin", "init_backend_wasm", "OnnxruntimeWebAssemblyBackend", "init_esm", "version", "lib_default", "esm_exports", "wasmBackend", "registerBackend", "env", "version"] + "sourcesContent": ["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend} from './backend.js';\nimport {InferenceSession} from './inference-session.js';\n\ninterface BackendInfo {\n backend: Backend;\n priority: number;\n\n initPromise?: Promise;\n initialized?: boolean;\n aborted?: boolean;\n error?: string;\n}\n\nconst backends: Map = new Map();\nconst backendsSortedByPriority: string[] = [];\n\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @ignore\n */\nexport const registerBackend = (name: string, backend: Backend, priority: number): void => {\n if (backend && typeof backend.init === 'function' && typeof backend.createInferenceSessionHandler === 'function') {\n const currentBackend = backends.get(name);\n if (currentBackend === undefined) {\n backends.set(name, {backend, priority});\n } else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n } else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends.get(backendsSortedByPriority[i])!.priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n\n throw new TypeError('not a valid backend');\n};\n\n/**\n * Try to resolve and initialize a backend.\n *\n * @param backendName - the name of the backend.\n * @returns the backend instance if resolved and initialized successfully, or an error message if failed.\n */\nconst tryResolveAndInitializeBackend = async(backendName: string): Promise => {\n const backendInfo = backends.get(backendName);\n if (!backendInfo) {\n return 'backend not found.';\n }\n\n if (backendInfo.initialized) {\n return backendInfo.backend;\n } else if (backendInfo.aborted) {\n return backendInfo.error!;\n } else {\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init(backendName);\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n } catch (e) {\n if (!isInitializing) {\n backendInfo.error = `${e}`;\n backendInfo.aborted = true;\n }\n return backendInfo.error!;\n } finally {\n delete backendInfo.initPromise;\n }\n }\n};\n\n/**\n * Resolve execution providers from the specific session options.\n *\n * @param options - the session options object.\n * @returns a promise that resolves to a tuple of an initialized backend instance and a session options object with\n * filtered EP list.\n *\n * @ignore\n */\nexport const resolveBackendAndExecutionProviders = async(options: InferenceSession.SessionOptions):\n Promise<[backend: Backend, options: InferenceSession.SessionOptions]> => {\n // extract backend hints from session options\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n\n // try to resolve and initialize all requested backends\n let backend: Backend|undefined;\n const errors = [];\n const availableBackendNames = new Set();\n for (const backendName of backendNames) {\n const resolveResult = await tryResolveAndInitializeBackend(backendName);\n if (typeof resolveResult === 'string') {\n errors.push({name: backendName, err: resolveResult});\n } else {\n if (!backend) {\n backend = resolveResult;\n }\n if (backend === resolveResult) {\n availableBackendNames.add(backendName);\n }\n }\n }\n\n // if no backend is available, throw error.\n if (!backend) {\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n }\n\n // for each explicitly requested backend, if it's not available, output warning message.\n for (const {name, err} of errors) {\n if (backendHints.includes(name)) {\n // eslint-disable-next-line no-console\n console.warn(`removing requested execution provider \"${\n name}\" from session options because it is not available: ${err}`);\n }\n }\n\n const filteredEps = eps.filter(i => availableBackendNames.has(typeof i === 'string' ? i : i.name));\n\n return [\n backend, new Proxy(options, {\n get: (target, prop) => {\n if (prop === 'executionProviders') {\n return filteredEps;\n }\n return Reflect.get(target, prop);\n }\n })\n ];\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession} from './training-session.js';\n\n/**\n * @ignore\n */\nexport declare namespace SessionHandler {\n type FeedsType = {[name: string]: OnnxValue};\n type FetchesType = {[name: string]: OnnxValue | null};\n type ReturnType = {[name: string]: OnnxValue};\n}\n\n/**\n * Represents shared SessionHandler functionality\n *\n * @ignore\n */\ninterface SessionHandler {\n dispose(): Promise;\n\n readonly inputNames: readonly string[];\n readonly outputNames: readonly string[];\n}\n\n/**\n * Represent a handler instance of an inference session.\n *\n * @ignore\n */\nexport interface InferenceSessionHandler extends SessionHandler {\n startProfiling(): void;\n endProfiling(): void;\n\n run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n}\n\n/**\n * Represent a handler instance of a training inference session.\n *\n * @ignore\n */\nexport interface TrainingSessionHandler extends SessionHandler {\n readonly evalInputNames: readonly string[];\n readonly evalOutputNames: readonly string[];\n\n lazyResetGrad(): Promise;\n runTrainStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n runOptimizerStep(options: InferenceSession.RunOptions): Promise;\n runEvalStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n\n getParametersSize(trainableOnly: boolean): Promise;\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n getContiguousParameters(trainableOnly: boolean): Promise;\n}\n\n/**\n * Represent a backend that provides implementation of model inferencing.\n *\n * @ignore\n */\nexport interface Backend {\n /**\n * Initialize the backend asynchronously. Should throw when failed.\n */\n init(backendName: string): Promise;\n\n createInferenceSessionHandler(uriOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n\n createTrainingSessionHandler?\n (checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,\n evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,\n options: InferenceSession.SessionOptions): Promise;\n}\n\nexport {registerBackend} from './backend-impl.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from './env.js';\nimport {version} from './version.js';\n\ntype LogLevelType = Env['logLevel'];\n\nlet logLevelValue: Required = 'warning';\n\nexport const env: Env = {\n wasm: {} as Env.WebAssemblyFlags,\n webgl: {} as Env.WebGLFlags,\n webgpu: {} as Env.WebGpuFlags,\n versions: {common: version},\n\n set logLevel(value: LogLevelType) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n logLevelValue = value;\n },\n get logLevel(): Required {\n return logLevelValue;\n },\n};\n\n// set property 'logLevel' so that they can be correctly transferred to worker by `postMessage()`.\nObject.defineProperty(env, 'logLevel', {enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env as envImpl} from './env-impl.js';\n\nexport declare namespace Env {\n export type WasmPathPrefix = string;\n export interface WasmFilePaths {\n /**\n * Specify the override path for the main .wasm file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .wasm file is:\n * - `ort-wasm-simd-threaded.wasm` for default build\n * - `ort-wasm-simd-threaded.jsep.wasm` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.wasm` for training build\n */\n wasm?: URL|string;\n /**\n * Specify the override path for the main .mjs file.\n *\n * This path should be an absolute path.\n *\n * If not modified, the filename of the .mjs file is:\n * - `ort-wasm-simd-threaded.mjs` for default build\n * - `ort-wasm-simd-threaded.jsep.mjs` for JSEP build (with WebGPU and WebNN)\n * - `ort-training-wasm-simd-threaded.mjs` for training build\n */\n mjs?: URL|string;\n }\n export type WasmPrefixOrFilePaths = WasmPathPrefix|WasmFilePaths;\n export interface WebAssemblyFlags {\n /**\n * set or get number of thread(s). If omitted or set to 0, number of thread(s) will be determined by system. If set\n * to 1, no worker thread will be spawned.\n *\n * This setting is available only when WebAssembly multithread feature is available in current context.\n *\n * @defaultValue `0`\n */\n numThreads?: number;\n\n /**\n * set or get a boolean value indicating whether to enable SIMD. If set to false, SIMD will be forcely disabled.\n *\n * This setting is available only when WebAssembly SIMD feature is available in current context.\n *\n * @deprecated This property is deprecated. Since SIMD is supported by all major JavaScript engines, non-SIMD\n * build is no longer provided. This property will be removed in future release.\n * @defaultValue `true`\n */\n simd?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @deprecated Use `env.trace` instead. If `env.trace` is set, this property will be ignored.\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Set or get a number specifying the timeout for initialization of WebAssembly backend, in milliseconds. A zero\n * value indicates no timeout is set.\n *\n * @defaultValue `0`\n */\n initTimeout?: number;\n\n /**\n * Set a custom URL prefix to the .wasm/.mjs files, or an object of overrides for both .wasm/.mjs file. The override\n * path should be an absolute path.\n */\n wasmPaths?: WasmPrefixOrFilePaths;\n\n /**\n * Set or get a boolean value indicating whether to proxy the execution of main thread to a worker thread.\n *\n * @defaultValue `false`\n */\n proxy?: boolean;\n }\n\n export interface WebGLFlags {\n /**\n * Set or get the WebGL Context ID (webgl or webgl2).\n *\n * @defaultValue `'webgl2'`\n */\n contextId?: 'webgl'|'webgl2';\n /**\n * Get the WebGL rendering context.\n */\n readonly context: WebGLRenderingContext;\n /**\n * Set or get the maximum batch size for matmul. 0 means to disable batching.\n *\n * @deprecated\n */\n matmulMaxBatchSize?: number;\n /**\n * Set or get the texture cache mode.\n *\n * @defaultValue `'full'`\n */\n textureCacheMode?: 'initializerOnly'|'full';\n /**\n * Set or get the packed texture mode\n *\n * @defaultValue `false`\n */\n pack?: boolean;\n /**\n * Set or get whether enable async download.\n *\n * @defaultValue `false`\n */\n async?: boolean;\n }\n\n export interface WebGpuProfilingDataV1TensorMetadata {\n dims: readonly number[];\n dataType: string;\n }\n export interface WebGpuProfilingDataV1 {\n version: 1;\n inputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n outputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n kernelId: number;\n kernelType: string;\n kernelName: string;\n programName: string;\n startTime: number;\n endTime: number;\n }\n\n export type WebGpuProfilingData = WebGpuProfilingDataV1;\n\n export interface WebGpuFlags {\n /**\n * Set or get the profiling mode.\n *\n * @deprecated Use `env.webgpu.profiling.mode` instead. If `env.webgpu.profiling.mode` is set, this property will be\n * ignored.\n */\n profilingMode?: 'off'|'default';\n /**\n * Set or get the profiling configuration.\n */\n profiling?: {\n /**\n * Set or get the profiling mode.\n *\n * @defaultValue `'off'`\n */\n mode?: 'off'|'default';\n\n /**\n * Set or get a callback function when a profiling data is received. If not set, the profiling data will be\n * printed to console.\n */\n ondata?: (data: WebGpuProfilingData) => void;\n };\n /**\n * Set or get the power preference.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n powerPreference?: 'low-power'|'high-performance';\n /**\n * Set or get the force fallback adapter flag.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n forceFallbackAdapter?: boolean;\n /**\n * Set or get the adapter for WebGPU.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as the GPU adapter for the underlying WebGPU backend to create GPU device.\n *\n * If this property is not set, it will be available to get after the first WebGPU inference session is created. The\n * value will be the GPU adapter that created by the underlying WebGPU backend.\n *\n * When use with TypeScript, the type of this property is `GPUAdapter` defined in \"@webgpu/types\".\n * Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType}\n */\n adapter: unknown;\n /**\n * Get the device for WebGPU.\n *\n * This property is only available after the first WebGPU inference session is created.\n *\n * When use with TypeScript, the type of this property is `GPUDevice` defined in \"@webgpu/types\".\n * Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in \"@webgpu/types\".\n */\n readonly device: unknown;\n /**\n * Set or get whether validate input content.\n *\n * @defaultValue `false`\n */\n validateInputContent?: boolean;\n }\n}\n\nexport interface Env {\n /**\n * set the severity level for logging.\n *\n * @defaultValue `'warning'`\n */\n logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal';\n\n /**\n * Indicate whether run in debug mode.\n *\n * @defaultValue `false`\n */\n debug?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Get version of the current package.\n */\n readonly versions: {\n readonly common: string;\n readonly web?: string;\n readonly node?: string;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n readonly 'react-native'?: string;\n };\n\n /**\n * Represent a set of flags for WebAssembly\n */\n readonly wasm: Env.WebAssemblyFlags;\n\n /**\n * Represent a set of flags for WebGL\n */\n readonly webgl: Env.WebGLFlags;\n\n /**\n * Represent a set of flags for WebGPU\n */\n readonly webgpu: Env.WebGpuFlags;\n\n [name: string]: unknown;\n}\n\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env: Env = envImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {Tensor} from './tensor.js';\n\n/**\n * implementation of Tensor.toDataURL()\n */\nexport const tensorToDataURL = (tensor: Tensor, options?: TensorToDataUrlOptions): string => {\n const canvas = typeof document !== 'undefined' ? document.createElement('canvas') : (new OffscreenCanvas(1, 1));\n canvas.width = tensor.dims[3];\n canvas.height = tensor.dims[2];\n const pixels2DContext =\n canvas.getContext('2d') as (CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null);\n\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n }\n\n const inputformat = options?.format !== undefined ? options.format : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 0];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n // Default pointer assignments\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < height; i++) {\n for (let j = 0; j < width; j++) {\n const R = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n const G = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n const B = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n const A = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n pixels2DContext.fillStyle = 'rgba(' + R + ',' + G + ',' + B + ',' + A + ')';\n pixels2DContext.fillRect(j, i, 1, 1);\n }\n }\n if ('toDataURL' in canvas) {\n return canvas.toDataURL();\n } else {\n throw new Error('toDataURL is not supported');\n }\n } else {\n throw new Error('Can not access image data');\n }\n};\n\n/**\n * implementation of Tensor.toImageData()\n */\nexport const tensorToImageData = (tensor: Tensor, options?: TensorToImageDataOptions): ImageData => {\n const pixels2DContext = typeof document !== 'undefined' ?\n document.createElement('canvas').getContext('2d') :\n new OffscreenCanvas(1, 1).getContext('2d') as OffscreenCanvasRenderingContext2D;\n let image: ImageData;\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n let channels: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[1];\n channels = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n channels = tensor.dims[1];\n }\n const inputformat = options !== undefined ? (options.format !== undefined ? options.format : 'RGB') : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 255];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n if (options !== undefined) {\n if (options.format !== undefined && (channels === 4 && options.format !== 'RGBA') ||\n (channels === 3 && (options.format !== 'RGB' && options.format !== 'BGR'))) {\n throw new Error('Tensor format doesn\\'t match input tensor dims');\n }\n }\n\n // Default pointer assignments\n const step = 4;\n let rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n image = pixels2DContext.createImageData(width, height);\n\n for (let i = 0; i < height * width;\n rImagePointer += step, gImagePointer += step, bImagePointer += step, aImagePointer += step, i++) {\n image.data[rImagePointer] = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n image.data[gImagePointer] = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n image.data[bImagePointer] = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n image.data[aImagePointer] = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n }\n\n } else {\n throw new Error('Can not access image data');\n }\n return image;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsDimensions, OptionsFormat, OptionsNormalizationParameters, OptionsTensorFormat, OptionsTensorLayout, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\ninterface BufferToTensorOptions extends OptionsDimensions, OptionsTensorLayout, OptionsNormalizationParameters,\n OptionsFormat, OptionsTensorFormat {}\n\n/**\n * Create a new tensor object from image object\n *\n * @param buffer - Extracted image buffer data - assuming RGBA format\n * @param imageFormat - input image configuration - required configurations height, width, format\n * @param tensorFormat - output tensor configuration - Default is RGB format\n */\nexport const bufferToTensor = (buffer: Uint8ClampedArray|undefined, options: BufferToTensorOptions): Tensor => {\n if (buffer === undefined) {\n throw new Error('Image buffer must be defined');\n }\n if (options.height === undefined || options.width === undefined) {\n throw new Error('Image height and width must be defined');\n }\n if (options.tensorLayout === 'NHWC') {\n throw new Error('NHWC Tensor layout is not supported yet');\n }\n\n const {height, width} = options;\n\n const norm = options.norm ?? {mean: 255, bias: 0};\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean![0], norm.mean![1], norm.mean![2], norm.mean![3] ?? 255];\n }\n\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias![0], norm.bias![1], norm.bias![2], norm.bias![3] ?? 0];\n }\n\n const inputformat = options.format !== undefined ? options.format : 'RGBA';\n // default value is RGBA since imagedata and HTMLImageElement uses it\n\n const outputformat =\n options.tensorFormat !== undefined ? (options.tensorFormat !== undefined ? options.tensorFormat : 'RGB') : 'RGB';\n const stride = height * width;\n const float32Data = outputformat === 'RGBA' ? new Float32Array(stride * 4) : new Float32Array(stride * 3);\n\n // Default pointer assignments\n let step = 4, rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGB') {\n step = 3;\n rImagePointer = 0;\n gImagePointer = 1;\n bImagePointer = 2;\n aImagePointer = -1;\n }\n\n // Updating the pointer assignments based on the output tensor format\n if (outputformat === 'RGBA') {\n aTensorPointer = stride * 3;\n } else if (outputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n } else if (outputformat === 'BGR') {\n bTensorPointer = 0;\n gTensorPointer = stride;\n rTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < stride;\n i++, rImagePointer += step, bImagePointer += step, gImagePointer += step, aImagePointer += step) {\n float32Data[rTensorPointer++] = (buffer[rImagePointer] + normBias[0]) / normMean[0];\n float32Data[gTensorPointer++] = (buffer[gImagePointer] + normBias[1]) / normMean[1];\n float32Data[bTensorPointer++] = (buffer[bImagePointer] + normBias[2]) / normMean[2];\n if (aTensorPointer !== -1 && aImagePointer !== -1) {\n float32Data[aTensorPointer++] = (buffer[aImagePointer] + normBias[3]) / normMean[3];\n }\n }\n\n // Float32Array -> ort.Tensor\n const outputTensor = outputformat === 'RGBA' ? new Tensor('float32', float32Data, [1, 4, height, width]) :\n new Tensor('float32', float32Data, [1, 3, height, width]);\n return outputTensor;\n};\n\n/**\n * implementation of Tensor.fromImage().\n */\nexport const tensorFromImage = async(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise => {\n // checking the type of image object\n const isHTMLImageEle = typeof (HTMLImageElement) !== 'undefined' && image instanceof HTMLImageElement;\n const isImageDataEle = typeof (ImageData) !== 'undefined' && image instanceof ImageData;\n const isImageBitmap = typeof (ImageBitmap) !== 'undefined' && image instanceof ImageBitmap;\n const isString = typeof image === 'string';\n\n let data: Uint8ClampedArray|undefined;\n let bufferToTensorOptions: BufferToTensorOptions = options ?? {};\n\n const createCanvas = () => {\n if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n } else if (typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1);\n } else {\n throw new Error('Canvas is not supported');\n }\n };\n const createCanvasContext = (canvas: HTMLCanvasElement|OffscreenCanvas) => {\n if (canvas instanceof HTMLCanvasElement) {\n return canvas.getContext('2d');\n } else if (canvas instanceof OffscreenCanvas) {\n return canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n } else {\n return null;\n }\n };\n // filling and checking image configuration options\n if (isHTMLImageEle) {\n // HTMLImageElement - image object - format is RGBA by default\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n let height = image.height;\n let width = image.width;\n if (options !== undefined && options.resizedHeight !== undefined && options.resizedWidth !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n if (options.tensorFormat !== undefined) {\n throw new Error('Image input config format must be RGBA for HTMLImageElement');\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n }\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n }\n\n pixels2DContext.drawImage(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isImageDataEle) {\n let height: number;\n let width: number;\n\n if (options !== undefined && options.resizedWidth !== undefined && options.resizedHeight !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n } else {\n height = image.height;\n width = image.width;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n }\n bufferToTensorOptions.format = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n\n if (options !== undefined) {\n const tempCanvas = createCanvas();\n\n tempCanvas.width = width;\n tempCanvas.height = height;\n\n const pixels2DContext = createCanvasContext(tempCanvas);\n\n if (pixels2DContext != null) {\n pixels2DContext.putImageData(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else {\n data = image.data;\n }\n } else if (isImageBitmap) {\n // ImageBitmap - image object - format must be provided by user\n if (options === undefined) {\n throw new Error('Please provide image config with format for Imagebitmap');\n }\n\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n const height = image.height;\n const width = image.width;\n pixels2DContext.drawImage(image, 0, 0, width, height);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isString) {\n return new Promise((resolve, reject) => {\n const canvas = createCanvas();\n const context = createCanvasContext(canvas);\n if (!image || !context) {\n return reject();\n }\n const newImage = new Image();\n newImage.crossOrigin = 'Anonymous';\n newImage.src = image;\n newImage.onload = () => {\n canvas.width = newImage.width;\n canvas.height = newImage.height;\n context.drawImage(newImage, 0, 0, canvas.width, canvas.height);\n const img = context.getImageData(0, 0, canvas.width, canvas.height);\n\n bufferToTensorOptions.height = canvas.height;\n bufferToTensorOptions.width = canvas.width;\n resolve(bufferToTensor(img.data, bufferToTensorOptions));\n };\n });\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n\n if (data !== undefined) {\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n};\n\n/**\n * implementation of Tensor.fromTexture().\n */\nexport const tensorFromTexture = (\n texture: TensorInterface.TextureType, options: TensorFromTextureOptions): Tensor => {\n const {width, height, download, dispose} = options;\n // Always assume RGBAF32. TODO: support different texture format\n const dims = [1, height, width, 4];\n return new Tensor({location: 'texture', type: 'float32', texture, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromGpuBuffer().\n */\nexport const tensorFromGpuBuffer = (\n gpuBuffer: TensorInterface.GpuBufferType, options: TensorFromGpuBufferOptions): Tensor => {\n const {dataType, dims, download, dispose} = options;\n return new Tensor({location: 'gpu-buffer', type: dataType ?? 'float32', gpuBuffer, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromPinnedBuffer().\n */\nexport const tensorFromPinnedBuffer = (\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor =>\n new Tensor({location: 'cpu-pinned', type, data: buffer, dims: dims ?? [buffer.length]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type SupportedTypedArrayConstructors = Float32ArrayConstructor|Uint8ArrayConstructor|Int8ArrayConstructor|\n Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|Uint8ArrayConstructor|\n Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor;\nexport type SupportedTypedArray = InstanceType;\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([\n ['float32', Float32Array],\n ['uint8', Uint8Array],\n ['int8', Int8Array],\n ['uint16', Uint16Array],\n ['int16', Int16Array],\n ['int32', Int32Array],\n ['bool', Uint8Array],\n ['float64', Float64Array],\n ['uint32', Uint32Array],\n]);\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP = new Map([\n [Float32Array, 'float32'],\n [Uint8Array, 'uint8'],\n [Int8Array, 'int8'],\n [Uint16Array, 'uint16'],\n [Int16Array, 'int16'],\n [Int32Array, 'int32'],\n [Float64Array, 'float64'],\n [Uint32Array, 'uint32'],\n]);\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// the following code allows delaying execution of BigInt/Float16Array checking. This allows lazy initialization for\n// NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP and NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, which allows BigInt/Float16Array\n// polyfill if available.\nlet isTypedArrayChecked = false;\nexport const checkTypedArray = () => {\n if (!isTypedArrayChecked) {\n isTypedArrayChecked = true;\n const isBigInt64ArrayAvailable = typeof BigInt64Array !== 'undefined' && BigInt64Array.from;\n const isBigUint64ArrayAvailable = typeof BigUint64Array !== 'undefined' && BigUint64Array.from;\n const isFloat16ArrayAvailable = typeof Float16Array !== 'undefined' && Float16Array.from;\n\n if (isBigInt64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('int64', BigInt64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigInt64Array, 'int64');\n }\n if (isBigUint64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('uint64', BigUint64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigUint64Array, 'uint64');\n }\n if (isFloat16ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Float16Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(Float16Array, 'float16');\n } else {\n // if Float16Array is not available, use 'Uint16Array' to store the data.\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Uint16Array);\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TextureConstructorParameters} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\n\n/**\n * calculate size from dims.\n *\n * @param dims the dims array. May be an illegal input.\n */\nexport const calculateSize = (dims: readonly unknown[]): number => {\n let size = 1;\n for (let i = 0; i < dims.length; i++) {\n const dim = dims[i];\n if (typeof dim !== 'number' || !Number.isSafeInteger(dim)) {\n throw new TypeError(`dims[${i}] must be an integer, got: ${dim}`);\n }\n if (dim < 0) {\n throw new RangeError(`dims[${i}] must be a non-negative integer, got: ${dim}`);\n }\n size *= dim;\n }\n return size;\n};\n\n/**\n * implementation of Tensor.reshape()\n */\nexport const tensorReshape = (tensor: Tensor, dims: readonly number[]): Tensor => {\n switch (tensor.location) {\n case 'cpu':\n return new Tensor(tensor.type, tensor.data, dims);\n case 'cpu-pinned':\n return new Tensor({\n location: 'cpu-pinned',\n data: tensor.data as CpuPinnedConstructorParameters['data'],\n type: tensor.type as CpuPinnedConstructorParameters['type'],\n dims,\n });\n case 'texture':\n return new Tensor({\n location: 'texture',\n texture: tensor.texture,\n type: tensor.type as TextureConstructorParameters['type'],\n dims,\n });\n case 'gpu-buffer':\n return new Tensor({\n location: 'gpu-buffer',\n gpuBuffer: tensor.gpuBuffer,\n type: tensor.type as GpuBufferConstructorParameters['type'],\n dims,\n });\n default:\n throw new Error(`tensorReshape: tensor location ${tensor.location} is not supported`);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {tensorToDataURL, tensorToImageData} from './tensor-conversion-impl.js';\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {tensorFromGpuBuffer, tensorFromImage, tensorFromPinnedBuffer, tensorFromTexture} from './tensor-factory-impl.js';\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions, TextureConstructorParameters} from './tensor-factory.js';\nimport {checkTypedArray, NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP, NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, SupportedTypedArray, SupportedTypedArrayConstructors} from './tensor-impl-type-mapping.js';\nimport {calculateSize, tensorReshape} from './tensor-utils-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\n// type aliases for those exported from Tensor interface\n\ntype TensorType = TensorInterface.Type;\ntype TensorDataType = TensorInterface.DataType;\ntype TensorDataLocation = TensorInterface.DataLocation;\ntype TensorTextureType = TensorInterface.TextureType;\ntype TensorGpuBufferType = TensorInterface.GpuBufferType;\n\n/**\n * the implementation of Tensor interface.\n *\n * @ignore\n */\nexport class Tensor implements TensorInterface {\n // #region constructors\n\n /**\n * Construct a new CPU tensor object from the given type, data and dims.\n */\n constructor(\n type: TensorType, data: TensorDataType|readonly string[]|readonly number[]|readonly boolean[],\n dims?: readonly number[]);\n /**\n * Construct a new CPU tensor object from the given data and dims. Type is inferred from data.\n */\n constructor(data: TensorDataType|readonly string[]|readonly boolean[], dims?: readonly number[]);\n /**\n * Construct a new tensor object from the pinned CPU data with the given type and dims.\n *\n * Tensor's location will be set to 'cpu-pinned'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: CpuPinnedConstructorParameters);\n /**\n * Construct a new tensor object from the WebGL texture with the given type and dims.\n *\n * Tensor's location will be set to 'texture'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: TextureConstructorParameters);\n /**\n * Construct a new tensor object from the WebGPU buffer with the given type and dims.\n *\n * Tensor's location will be set to 'gpu-buffer'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: GpuBufferConstructorParameters);\n\n /**\n * implementation.\n */\n constructor(\n arg0: TensorType|TensorDataType|readonly string[]|readonly boolean[]|CpuPinnedConstructorParameters|\n TextureConstructorParameters|GpuBufferConstructorParameters,\n arg1?: TensorDataType|readonly number[]|readonly string[]|readonly boolean[], arg2?: readonly number[]) {\n // perform one-time check for BigInt/Float16Array support\n checkTypedArray();\n\n let type: TensorType;\n let dims: readonly number[];\n\n if (typeof arg0 === 'object' && 'location' in arg0) {\n //\n // constructing tensor from specific location\n //\n this.dataLocation = arg0.location;\n type = arg0.type;\n dims = arg0.dims;\n switch (arg0.location) {\n case 'cpu-pinned': {\n const expectedTypedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(type);\n if (!expectedTypedArrayConstructor) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from pinned buffer`);\n }\n if (!(arg0.data instanceof expectedTypedArrayConstructor)) {\n throw new TypeError(`buffer should be of type ${expectedTypedArrayConstructor.name}`);\n }\n this.cpuData = arg0.data;\n break;\n }\n case 'texture': {\n if (type !== 'float32') {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from texture`);\n }\n this.gpuTextureData = arg0.texture;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n case 'gpu-buffer': {\n if ((type !== 'float32' && type !== 'float16' && type !== 'int32' && type !== 'int64' && type !== 'uint32' &&\n type !== 'uint8' && type !== 'bool')) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from gpu buffer`);\n }\n this.gpuBufferData = arg0.gpuBuffer;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n default:\n throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`);\n }\n } else {\n //\n // constructing tensor of location 'cpu'\n //\n let data: TensorDataType;\n let maybeDims: typeof arg1|typeof arg2;\n // check whether arg0 is type or data\n if (typeof arg0 === 'string') {\n //\n // Override: constructor(type, data, ...)\n //\n type = arg0;\n maybeDims = arg2;\n if (arg0 === 'string') {\n // string tensor\n if (!Array.isArray(arg1)) {\n throw new TypeError('A string tensor\\'s data must be a string array.');\n }\n // we don't check whether every element in the array is string; this is too slow. we assume it's correct and\n // error will be populated at inference\n data = arg1;\n } else {\n // numeric tensor\n const typedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(arg0);\n if (typedArrayConstructor === undefined) {\n throw new TypeError(`Unsupported tensor type: ${arg0}.`);\n }\n if (Array.isArray(arg1)) {\n if (arg0 === 'float16' && typedArrayConstructor === Uint16Array) {\n // When no Float16Array polyfill is used, we cannot create 'float16' tensor from number array.\n //\n // Throw error here because when user try to use number array as data,\n // e.g. new Tensor('float16', [1, 2, 3, 4], dims)), it will actually call\n // Uint16Array.from(arg1) which generates wrong data.\n throw new TypeError(\n 'Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.');\n } else if (arg0 === 'uint64' || arg0 === 'int64') {\n // use 'as any' here because:\n // 1. TypeScript's check on type of 'Array.isArray()' does not work with readonly arrays.\n // see https://github.com/microsoft/TypeScript/issues/17002\n // 2. TypeScript's check on union type of '(BigInt64ArrayConstructor|BigUint64ArrayConstructor).from()'\n // does not accept parameter mapFn.\n // 3. parameters of 'SupportedTypedArrayConstructors.from()' does not match the requirement of the union\n // type.\n\n // assume 'arg1' is of type \"readonly number[]|readonly bigint[]\" here.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1, BigInt);\n } else {\n // assume 'arg1' is of type \"readonly number[]\" here.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1);\n }\n } else if (arg1 instanceof typedArrayConstructor) {\n data = arg1;\n } else {\n throw new TypeError(`A ${type} tensor's data must be type of ${typedArrayConstructor}`);\n }\n }\n } else {\n //\n // Override: constructor(data, ...)\n //\n maybeDims = arg1;\n if (Array.isArray(arg0)) {\n // only boolean[] and string[] is supported\n if (arg0.length === 0) {\n throw new TypeError('Tensor type cannot be inferred from an empty array.');\n }\n const firstElementType = typeof arg0[0];\n if (firstElementType === 'string') {\n type = 'string';\n data = arg0;\n } else if (firstElementType === 'boolean') {\n type = 'bool';\n // 'arg0' is of type 'boolean[]'. Uint8Array.from(boolean[]) actually works, but typescript thinks this is\n // wrong type. We use 'as any' to make it happy.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = Uint8Array.from(arg0 as any[]);\n } else {\n throw new TypeError(`Invalid element type of data array: ${firstElementType}.`);\n }\n } else {\n // get tensor type from TypedArray\n const mappedType =\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.get(arg0.constructor as SupportedTypedArrayConstructors);\n if (mappedType === undefined) {\n throw new TypeError(`Unsupported type for tensor data: ${arg0.constructor}.`);\n }\n type = mappedType;\n data = arg0 as SupportedTypedArray;\n }\n }\n\n // type and data is processed, now processing dims\n if (maybeDims === undefined) {\n // assume 1-D tensor if dims omitted\n maybeDims = [data.length];\n } else if (!Array.isArray(maybeDims)) {\n throw new TypeError('A tensor\\'s dims must be a number array');\n }\n dims = maybeDims as readonly number[];\n\n this.cpuData = data;\n this.dataLocation = 'cpu';\n }\n\n // perform check on dims\n const size = calculateSize(dims);\n // if data is on CPU, check whether data length matches tensor size\n if (this.cpuData && size !== this.cpuData.length) {\n throw new Error(`Tensor's size(${size}) does not match data length(${this.cpuData.length}).`);\n }\n\n this.type = type;\n this.dims = dims;\n this.size = size;\n }\n // #endregion\n\n // #region factory\n static async fromImage(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise {\n return tensorFromImage(image, options);\n }\n\n static fromTexture(\n texture: TensorTextureType, options: TensorFromTextureOptions): TensorInterface {\n return tensorFromTexture(texture, options);\n }\n\n static fromGpuBuffer(\n gpuBuffer: TensorGpuBufferType, options: TensorFromGpuBufferOptions): TensorInterface {\n return tensorFromGpuBuffer(gpuBuffer, options);\n }\n\n static fromPinnedBuffer(\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor {\n return tensorFromPinnedBuffer(type, buffer, dims);\n }\n\n // #endregion\n\n // #region conversions\n toDataURL(options?: TensorToDataUrlOptions): string {\n return tensorToDataURL(this, options);\n }\n\n toImageData(options?: TensorToImageDataOptions): ImageData {\n return tensorToImageData(this, options);\n }\n // #endregion\n\n // #region public fields\n readonly dims: readonly number[];\n readonly type: TensorType;\n readonly size: number;\n // #endregion\n\n // #region private fields\n\n /**\n * stores the location of the data.\n */\n private dataLocation: TensorDataLocation;\n\n /**\n * stores the data on CPU, if location is 'cpu' or 'cpu-pinned'. otherwise empty.\n */\n private cpuData?: TensorDataType;\n\n /**\n * stores the underlying texture when location is 'texture'. otherwise empty.\n */\n private gpuTextureData?: TensorTextureType;\n\n /**\n * stores the underlying GPU buffer when location is 'gpu-buffer'. otherwise empty.\n */\n private gpuBufferData?: TensorGpuBufferType;\n\n /**\n * stores an optional downloader function to download data from GPU to CPU.\n */\n private downloader?(): Promise;\n\n /**\n * a flag indicating whether the data is being downloaded from GPU to CPU.\n */\n private isDownloading?: boolean;\n\n /**\n * stores an optional disposer function to dispose the underlying data.\n */\n private disposer?(): void;\n // #endregion\n\n // #region properties\n get data(): TensorDataType {\n this.ensureValid();\n if (!this.cpuData) {\n throw new Error(\n 'The data is not on CPU. Use `getData()` to download GPU data to CPU, ' +\n 'or use `texture` or `gpuBuffer` property to access the GPU data directly.');\n }\n return this.cpuData;\n }\n\n get location(): TensorDataLocation {\n return this.dataLocation;\n }\n\n get texture(): TensorTextureType {\n this.ensureValid();\n if (!this.gpuTextureData) {\n throw new Error('The data is not stored as a WebGL texture.');\n }\n return this.gpuTextureData;\n }\n\n get gpuBuffer(): TensorGpuBufferType {\n this.ensureValid();\n if (!this.gpuBufferData) {\n throw new Error('The data is not stored as a WebGPU buffer.');\n }\n return this.gpuBufferData;\n }\n // #endregion\n\n // #region methods\n\n async getData(releaseData?: boolean): Promise {\n this.ensureValid();\n switch (this.dataLocation) {\n case 'cpu':\n case 'cpu-pinned':\n return this.data;\n case 'texture':\n case 'gpu-buffer': {\n if (!this.downloader) {\n throw new Error('The current tensor is not created with a specified data downloader.');\n }\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n try {\n this.isDownloading = true;\n const data = await this.downloader();\n this.downloader = undefined;\n this.dataLocation = 'cpu';\n this.cpuData = data;\n\n if (releaseData && this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n\n return data;\n\n } finally {\n this.isDownloading = false;\n }\n }\n default:\n throw new Error(`cannot get data from location: ${this.dataLocation}`);\n }\n }\n\n dispose(): void {\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n\n if (this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n this.cpuData = undefined;\n this.gpuTextureData = undefined;\n this.gpuBufferData = undefined;\n this.downloader = undefined;\n this.isDownloading = undefined;\n\n this.dataLocation = 'none';\n }\n\n // #endregion\n\n // #region tensor utilities\n private ensureValid(): void {\n if (this.dataLocation === 'none') {\n throw new Error('The tensor is disposed.');\n }\n }\n\n reshape(dims: readonly number[]): TensorInterface {\n this.ensureValid();\n if (this.downloader || this.disposer) {\n throw new Error('Cannot reshape a tensor that owns GPU resource.');\n }\n return tensorReshape(this, dims);\n }\n // #endregion\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorFactory} from './tensor-factory.js';\nimport {Tensor as TensorImpl} from './tensor-impl.js';\nimport {TypedTensorUtils} from './tensor-utils.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\n/**\n * represent a basic tensor with specified dimensions and data type.\n */\ninterface TypedTensorBase {\n /**\n * Get the dimensions of the tensor.\n */\n readonly dims: readonly number[];\n /**\n * Get the data type of the tensor.\n */\n readonly type: T;\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is not on CPU (eg. it's in the form of WebGL texture or WebGPU buffer), throw error.\n */\n readonly data: Tensor.DataTypeMap[T];\n /**\n * Get the location of the data.\n */\n readonly location: Tensor.DataLocation;\n /**\n * Get the WebGL texture that holds the tensor data.\n *\n * If the data is not on GPU as WebGL texture, throw error.\n */\n readonly texture: Tensor.TextureType;\n /**\n * Get the WebGPU buffer that holds the tensor data.\n *\n * If the data is not on GPU as WebGPU buffer, throw error.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is on CPU, returns the data immediately.\n * If the data is on GPU, downloads the data and returns the promise.\n *\n * @param releaseData - whether release the data on GPU. Ignore if data is already on CPU.\n */\n getData(releaseData?: boolean): Promise;\n\n /**\n * Dispose the tensor data.\n *\n * If the data is on CPU, remove its internal reference to the underlying data.\n * If the data is on GPU, release the data on GPU.\n *\n * After calling this function, the tensor is considered no longer valid. Its location will be set to 'none'.\n */\n dispose(): void;\n}\n\nexport declare namespace Tensor {\n interface DataTypeMap {\n float32: Float32Array;\n uint8: Uint8Array;\n int8: Int8Array;\n uint16: Uint16Array;\n int16: Int16Array;\n int32: Int32Array;\n int64: BigInt64Array;\n string: string[];\n bool: Uint8Array;\n float16: Uint16Array; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: Float64Array;\n uint32: Uint32Array;\n uint64: BigUint64Array;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n interface ElementTypeMap {\n float32: number;\n uint8: number;\n int8: number;\n uint16: number;\n int16: number;\n int32: number;\n int64: bigint;\n string: string;\n bool: boolean;\n float16: number; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: number;\n uint32: number;\n uint64: bigint;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n type DataType = DataTypeMap[Type];\n type ElementType = ElementTypeMap[Type];\n\n /**\n * supported data types for constructing a tensor from a pinned CPU buffer\n */\n export type CpuPinnedDataTypes = Exclude;\n\n /**\n * type alias for WebGL texture\n */\n export type TextureType = WebGLTexture;\n\n /**\n * supported data types for constructing a tensor from a WebGL texture\n */\n export type TextureDataTypes = 'float32';\n\n /**\n * type alias for WebGPU buffer\n *\n * The reason why we don't use type \"GPUBuffer\" defined in webgpu.d.ts from @webgpu/types is because \"@webgpu/types\"\n * requires \"@types/dom-webcodecs\" as peer dependency when using TypeScript < v5.1 and its version need to be chosen\n * carefully according to the TypeScript version being used. This means so far there is not a way to keep every\n * TypeScript version happy. It turns out that we will easily broke users on some TypeScript version.\n *\n * for more info see https://github.com/gpuweb/types/issues/127\n */\n export type GpuBufferType = {size: number; mapState: 'unmapped' | 'pending' | 'mapped'};\n\n /**\n * supported data types for constructing a tensor from a WebGPU buffer\n */\n export type GpuBufferDataTypes = 'float32'|'float16'|'int32'|'int64'|'uint32'|'uint8'|'bool';\n\n /**\n * represent where the tensor data is stored\n */\n export type DataLocation = 'none'|'cpu'|'cpu-pinned'|'texture'|'gpu-buffer';\n\n /**\n * represent the data type of a tensor\n */\n export type Type = keyof DataTypeMap;\n}\n\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface TypedTensor extends TypedTensorBase, TypedTensorUtils {}\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface Tensor extends TypedTensorBase, TypedTensorUtils {}\n\n/**\n * type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.\n */\nexport interface TensorConstructor extends TensorFactory {\n // #region CPU tensor - specify element type\n /**\n * Construct a new string tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'string', data: Tensor.DataTypeMap['string']|readonly string[],\n dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'bool', data: Tensor.DataTypeMap['bool']|readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new 64-bit integer typed tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(\n type: T, data: Tensor.DataTypeMap[T]|readonly bigint[]|readonly number[],\n dims?: readonly number[]): TypedTensor;\n\n /**\n * Construct a new numeric tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new>(\n type: T, data: Tensor.DataTypeMap[T]|readonly number[], dims?: readonly number[]): TypedTensor;\n // #endregion\n\n // #region CPU tensor - infer element types\n\n /**\n * Construct a new float32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float32Array, dims?: readonly number[]): TypedTensor<'float32'>;\n\n /**\n * Construct a new int8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int8Array, dims?: readonly number[]): TypedTensor<'int8'>;\n\n /**\n * Construct a new uint8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint8Array, dims?: readonly number[]): TypedTensor<'uint8'>;\n\n /**\n * Construct a new uint16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint16Array, dims?: readonly number[]): TypedTensor<'uint16'>;\n\n /**\n * Construct a new int16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int16Array, dims?: readonly number[]): TypedTensor<'int16'>;\n\n /**\n * Construct a new int32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int32Array, dims?: readonly number[]): TypedTensor<'int32'>;\n\n /**\n * Construct a new int64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigInt64Array, dims?: readonly number[]): TypedTensor<'int64'>;\n\n /**\n * Construct a new string tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly string[], dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new float64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float64Array, dims?: readonly number[]): TypedTensor<'float64'>;\n\n /**\n * Construct a new uint32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint32Array, dims?: readonly number[]): TypedTensor<'uint32'>;\n\n /**\n * Construct a new uint64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigUint64Array, dims?: readonly number[]): TypedTensor<'uint64'>;\n\n // #endregion\n\n // #region CPU tensor - fall back to non-generic tensor type declaration\n\n /**\n * Construct a new tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: Tensor.Type, data: Tensor.DataType|readonly number[]|readonly string[]|readonly bigint[]|readonly boolean[],\n dims?: readonly number[]): Tensor;\n\n /**\n * Construct a new tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Tensor.DataType, dims?: readonly number[]): Tensor;\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const Tensor = TensorImpl as TensorConstructor;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from './env-impl.js';\n\n/**\n * @ignore\n */\nexport const TRACE = (deviceType: string, label: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n // eslint-disable-next-line no-console\n console.timeStamp(`${deviceType}::ORT::${label}`);\n};\n\nconst TRACE_FUNC = (msg: string, extraMsg?: string) => {\n const stack = new Error().stack?.split(/\\r\\n|\\r|\\n/g) || [];\n let hasTraceFunc = false;\n for (let i = 0; i < stack.length; i++) {\n if (hasTraceFunc && !stack[i].includes('TRACE_FUNC')) {\n let label = `FUNC_${msg}::${stack[i].trim().split(' ')[1]}`;\n if (extraMsg) {\n label += `::${extraMsg}`;\n }\n TRACE('CPU', label);\n return;\n }\n if (stack[i].includes('TRACE_FUNC')) {\n hasTraceFunc = true;\n }\n }\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_BEGIN = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('BEGIN', extraMsg);\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_END = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('END', extraMsg);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {InferenceSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSessionInterface} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from './trace.js';\n\ntype SessionOptions = InferenceSessionInterface.SessionOptions;\ntype RunOptions = InferenceSessionInterface.RunOptions;\ntype FeedsType = InferenceSessionInterface.FeedsType;\ntype FetchesType = InferenceSessionInterface.FetchesType;\ntype ReturnType = InferenceSessionInterface.ReturnType;\n\nexport class InferenceSession implements InferenceSessionInterface {\n private constructor(handler: InferenceSessionHandler) {\n this.handler = handler;\n }\n run(feeds: FeedsType, options?: RunOptions): Promise;\n run(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async run(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n TRACE_FUNC_BEGIN();\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSessionInterface.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n\n // feeds, fetches and options are prepared\n\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n TRACE_FUNC_END();\n return returnValue;\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n\n static create(path: string, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: SessionOptions):\n Promise;\n static create(buffer: Uint8Array, options?: SessionOptions): Promise;\n static async create(\n arg0: string|ArrayBufferLike|Uint8Array, arg1?: SessionOptions|number, arg2?: number,\n arg3?: SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n // either load from a file or buffer\n let filePathOrUint8Array: string|Uint8Array;\n let options: SessionOptions = {};\n\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (\n arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n } else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n } else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n const handler = await backend.createInferenceSessionHandler(filePathOrUint8Array, optionsWithValidatedEPs);\n TRACE_FUNC_END();\n return new InferenceSession(handler);\n }\n\n startProfiling(): void {\n this.handler.startProfiling();\n }\n endProfiling(): void {\n this.handler.endProfiling();\n }\n\n get inputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get outputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n private handler: InferenceSessionHandler;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession as InferenceSessionImpl} from './inference-session-impl.js';\nimport {OnnxModelOptions} from './onnx-model.js';\nimport {OnnxValue, OnnxValueDataLocation} from './onnx-value.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace InferenceSession {\n // #region input/output types\n\n type OnnxValueMapType = {readonly [name: string]: OnnxValue};\n type NullableOnnxValueMapType = {readonly [name: string]: OnnxValue | null};\n\n /**\n * A feeds (model inputs) is an object that uses input names as keys and OnnxValue as corresponding values.\n */\n type FeedsType = OnnxValueMapType;\n\n /**\n * A fetches (model outputs) could be one of the following:\n *\n * - Omitted. Use model's output names definition.\n * - An array of string indicating the output names.\n * - An object that use output names as keys and OnnxValue or null as corresponding values.\n *\n * @remark\n * different from input argument, in output, OnnxValue is optional. If an OnnxValue is present it will be\n * used as a pre-allocated value by the inference engine; if omitted, inference engine will allocate buffer\n * internally.\n */\n type FetchesType = readonly string[]|NullableOnnxValueMapType;\n\n /**\n * A inferencing return type is an object that uses output names as keys and OnnxValue as corresponding values.\n */\n type ReturnType = OnnxValueMapType;\n\n // #endregion\n\n // #region session options\n\n /**\n * A set of configurations for session behavior.\n */\n export interface SessionOptions extends OnnxModelOptions {\n /**\n * An array of execution provider options.\n *\n * An execution provider option can be a string indicating the name of the execution provider,\n * or an object of corresponding type.\n */\n executionProviders?: readonly ExecutionProviderConfig[];\n\n /**\n * The intra OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n intraOpNumThreads?: number;\n\n /**\n * The inter OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n interOpNumThreads?: number;\n\n /**\n * The free dimension override.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n freeDimensionOverrides?: {readonly [dimensionName: string]: number};\n\n /**\n * The optimization level.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n graphOptimizationLevel?: 'disabled'|'basic'|'extended'|'all';\n\n /**\n * Whether enable CPU memory arena.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableCpuMemArena?: boolean;\n\n /**\n * Whether enable memory pattern.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableMemPattern?: boolean;\n\n /**\n * Execution mode.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n executionMode?: 'sequential'|'parallel';\n\n /**\n * Optimized model file path.\n *\n * If this setting is specified, the optimized model will be dumped. In browser, a blob will be created\n * with a pop-up window.\n */\n optimizedModelFilePath?: string;\n\n /**\n * Whether enable profiling.\n *\n * This setting is a placeholder for a future use.\n */\n enableProfiling?: boolean;\n\n /**\n * File prefix for profiling.\n *\n * This setting is a placeholder for a future use.\n */\n profileFilePrefix?: string;\n\n /**\n * Log ID.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logId?: string;\n\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Specify string as a preferred data location for all outputs, or an object that use output names as keys and a\n * preferred data location as corresponding values.\n *\n * This setting is available only in ONNXRuntime Web for WebGL and WebGPU EP.\n */\n preferredOutputLocation?: OnnxValueDataLocation|{readonly [outputName: string]: OnnxValueDataLocation};\n\n /**\n * Whether enable graph capture.\n * This setting is available only in ONNXRuntime Web for WebGPU EP.\n */\n enableGraphCapture?: boolean;\n\n /**\n * Store configurations for a session. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_session_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n * ```js\n * extra: {\n * session: {\n * set_denormal_as_zero: \"1\",\n * disable_prepacking: \"1\"\n * },\n * optimization: {\n * enable_gelu_approximation: \"1\"\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #region execution providers\n\n // Currently, we have the following backends to support execution providers:\n // Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).\n // Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.\n // Backend ONNX.js: supports 'webgl'.\n // Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).\n interface ExecutionProviderOptionMap {\n coreml: CoreMLExecutionProviderOption;\n cpu: CpuExecutionProviderOption;\n cuda: CudaExecutionProviderOption;\n dml: DmlExecutionProviderOption;\n nnapi: NnapiExecutionProviderOption;\n tensorrt: TensorRtExecutionProviderOption;\n wasm: WebAssemblyExecutionProviderOption;\n webgl: WebGLExecutionProviderOption;\n webgpu: WebGpuExecutionProviderOption;\n webnn: WebNNExecutionProviderOption;\n qnn: QnnExecutionProviderOption;\n xnnpack: XnnpackExecutionProviderOption;\n }\n\n type ExecutionProviderName = keyof ExecutionProviderOptionMap;\n type ExecutionProviderConfig =\n ExecutionProviderOptionMap[ExecutionProviderName]|ExecutionProviderOption|ExecutionProviderName|string;\n\n export interface ExecutionProviderOption {\n readonly name: string;\n }\n export interface CpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cpu';\n useArena?: boolean;\n }\n export interface CudaExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cuda';\n deviceId?: number;\n }\n export interface DmlExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'dml';\n deviceId?: number;\n }\n export interface TensorRtExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'tensorrt';\n deviceId?: number;\n }\n export interface WebAssemblyExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'wasm';\n }\n export interface WebGLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgl';\n // TODO: add flags\n }\n export interface XnnpackExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'xnnpack';\n }\n export interface WebGpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgpu';\n preferredLayout?: 'NCHW'|'NHWC';\n }\n\n // #region WebNN options\n\n interface WebNNExecutionProviderName extends ExecutionProviderOption {\n readonly name: 'webnn';\n }\n\n /**\n * Represents a set of options for creating a WebNN MLContext.\n *\n * @see https://www.w3.org/TR/webnn/#dictdef-mlcontextoptions\n */\n export interface WebNNContextOptions {\n deviceType?: 'cpu'|'gpu'|'npu';\n numThreads?: number;\n powerPreference?: 'default'|'low-power'|'high-performance';\n }\n\n /**\n * Represents a set of options for WebNN execution provider without MLContext.\n */\n export interface WebNNOptionsWithoutMLContext extends WebNNExecutionProviderName, WebNNContextOptions {\n context?: never;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext.\n *\n * When MLContext is provided, the deviceType is also required so that the WebNN EP can determine the preferred\n * channel layout.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext\n */\n export interface WebNNOptionsWithMLContext extends WebNNExecutionProviderName,\n Omit,\n Required> {\n context: unknown /* MLContext */;\n }\n\n /**\n * Represents a set of options for WebNN execution provider with MLContext which is created from GPUDevice.\n *\n * @see https://www.w3.org/TR/webnn/#dom-ml-createcontext-gpudevice\n */\n export interface WebNNOptionsWebGpu extends WebNNExecutionProviderName {\n context: unknown /* MLContext */;\n gpuDevice: unknown /* GPUDevice */;\n }\n\n /**\n * Options for WebNN execution provider.\n */\n export type WebNNExecutionProviderOption = WebNNOptionsWithoutMLContext|WebNNOptionsWithMLContext|WebNNOptionsWebGpu;\n\n // #endregion\n\n export interface QnnExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'qnn';\n // TODO add flags\n }\n export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'coreml';\n /**\n * The bit flags for CoreML execution provider.\n *\n * ```\n * COREML_FLAG_USE_CPU_ONLY = 0x001\n * COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002\n * COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004\n * COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008\n * COREML_FLAG_CREATE_MLPROGRAM = 0x010\n * ```\n *\n * See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.\n *\n * This flag is available only in ONNXRuntime (Node.js binding).\n */\n coreMlFlags?: number;\n /**\n * Specify whether to use CPU only in CoreML EP.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n useCPUOnly?: boolean;\n /**\n * Specify whether to enable CoreML EP on subgraph.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n enableOnSubgraph?: boolean;\n /**\n * Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n onlyEnableDeviceWithANE?: boolean;\n }\n export interface NnapiExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'nnapi';\n useFP16?: boolean;\n useNCHW?: boolean;\n cpuDisabled?: boolean;\n cpuOnly?: boolean;\n }\n // #endregion\n\n // #endregion\n\n // #region run options\n\n /**\n * A set of configurations for inference run behavior\n */\n export interface RunOptions {\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Terminate all incomplete OrtRun calls as soon as possible if true\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n terminate?: boolean;\n\n /**\n * A tag for the Run() calls using this\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n tag?: string;\n\n /**\n * Set a single run configuration entry. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_run_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n *\n * ```js\n * extra: {\n * memory: {\n * enable_memory_arena_shrinkage: \"1\",\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #endregion\n\n // #region value metadata\n\n // eslint-disable-next-line @typescript-eslint/no-empty-interface\n interface ValueMetadata {\n // TBD\n }\n\n // #endregion\n}\n\n/**\n * Represent a runtime instance of an ONNX model.\n */\nexport interface InferenceSession {\n // #region run()\n\n /**\n * Execute the model asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Execute the model asynchronously with the given feeds, fetches and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param fetches - Representation of the model output. See type description of `InferenceSession.OutputType` for\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n\n // #endregion\n\n // #region profiling\n\n /**\n * Start profiling.\n */\n startProfiling(): void;\n\n /**\n * End profiling.\n */\n endProfiling(): void;\n\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded model.\n */\n readonly inputNames: readonly string[];\n\n /**\n * Get output names of the loaded model.\n */\n readonly outputNames: readonly string[];\n\n // /**\n // * Get input metadata of the loaded model.\n // */\n // readonly inputMetadata: ReadonlyArray>;\n\n // /**\n // * Get output metadata of the loaded model.\n // */\n // readonly outputMetadata: ReadonlyArray>;\n\n // #endregion\n}\n\nexport interface InferenceSessionFactory {\n // #region create()\n\n /**\n * Create a new inference session and load model asynchronously from an ONNX model file.\n *\n * @param uri - The URI or file path of the model to load.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(uri: string, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from segment of an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param byteOffset - The beginning of the specified portion of the array buffer.\n * @param byteLength - The length in bytes of the array buffer.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: InferenceSession.SessionOptions):\n Promise;\n\n /**\n * Create a new inference session and load model asynchronously from a Uint8Array.\n *\n * @param buffer - A Uint8Array representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: Uint8Array, options?: InferenceSession.SessionOptions): Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession: InferenceSessionFactory = InferenceSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsFormat, OptionsNormalizationParameters, OptionsTensorLayout} from './tensor-factory.js';\n\nexport interface TensorToDataUrlOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface TensorToImageDataOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface ConversionUtils {\n /**\n * creates a DataURL instance from tensor\n *\n * @param options - An optional object representing options for creating a DataURL instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns a DataURL string representing the image converted from tensor data\n */\n toDataURL(options?: TensorToDataUrlOptions): string;\n\n /**\n * creates an ImageData instance from tensor\n *\n * @param options - An optional object representing options for creating an ImageData instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns an ImageData instance representing the image converted from tensor data\n */\n toImageData(options?: TensorToImageDataOptions): ImageData;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor, TypedTensor} from './tensor.js';\n\nexport type ImageFormat = 'RGB'|'RGBA'|'BGR'|'RBG';\nexport type ImageTensorLayout = 'NHWC'|'NCHW';\n\n// the following region contains type definitions for constructing tensor from a specific location.\n\n// #region types for constructing a tensor from a specific location\n\n/**\n * represent common properties of the parameter for constructing a tensor from a specific location.\n */\ninterface CommonConstructorParameters extends Pick {\n /**\n * Specify the data type of the tensor.\n */\n readonly type: T;\n}\n\n/**\n * represent the parameter for constructing a tensor from a GPU resource.\n */\ninterface GpuResourceConstructorParameters {\n /**\n * an optional callback function to download data from GPU to CPU.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n download?(): Promise;\n\n /**\n * an optional callback function that will be called when the tensor is disposed.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n dispose?(): void;\n}\n\n/**\n * represent the parameter for constructing a tensor from a pinned CPU buffer\n */\nexport interface CpuPinnedConstructorParameters extends\n CommonConstructorParameters {\n /**\n * Specify the location of the data to be 'cpu-pinned'.\n */\n readonly location: 'cpu-pinned';\n /**\n * Specify the CPU pinned buffer that holds the tensor data.\n */\n readonly data: Tensor.DataTypeMap[T];\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGL texture\n */\nexport interface TextureConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'texture'.\n */\n readonly location: 'texture';\n /**\n * Specify the WebGL texture that holds the tensor data.\n */\n readonly texture: Tensor.TextureType;\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGPU buffer\n */\nexport interface GpuBufferConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'gpu-buffer'.\n */\n readonly location: 'gpu-buffer';\n /**\n * Specify the WebGPU buffer that holds the tensor data.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n}\n\n// #endregion\n\n// the following region contains type definitions of each individual options.\n// the tensor factory functions use a composition of those options as the parameter type.\n\n// #region Options fields\n\nexport interface OptionsFormat {\n /**\n * Describes the image format represented in RGBA color space.\n */\n format?: ImageFormat;\n}\n\nexport interface OptionsTensorFormat {\n /**\n * Describes the image format of the tensor.\n *\n * NOTE: this is different from option 'format'. While option 'format' represents the original image, 'tensorFormat'\n * represents the target format of the tensor. A transpose will be performed if they are different.\n */\n tensorFormat?: ImageFormat;\n}\n\nexport interface OptionsTensorDataType {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: 'float32'|'uint8';\n}\n\nexport interface OptionsTensorLayout {\n /**\n * Describes the tensor layout when representing data of one or more image(s).\n */\n tensorLayout?: ImageTensorLayout;\n}\n\nexport interface OptionsDimensions {\n /**\n * Describes the image height in pixel\n */\n height?: number;\n /**\n * Describes the image width in pixel\n */\n width?: number;\n}\n\nexport interface OptionResizedDimensions {\n /**\n * Describes the resized height. If omitted, original height will be used.\n */\n resizedHeight?: number;\n /**\n * Describes resized width - can be accessed via tensor dimensions as well\n */\n resizedWidth?: number;\n}\n\nexport interface OptionsNormalizationParameters {\n /**\n * Describes normalization parameters when preprocessing the image as model input.\n *\n * Data element are ranged from 0 to 255.\n */\n norm?: {\n /**\n * The 'bias' value for image normalization.\n * - If omitted, use default value 0.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n bias?: number|[number, number, number]|[number, number, number, number];\n /**\n * The 'mean' value for image normalization.\n * - If omitted, use default value 255.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n mean?: number | [number, number, number] | [number, number, number, number];\n };\n}\n\n// #endregion\n\n// #region Options composition\n\nexport interface TensorFromImageDataOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromImageElementOptions extends OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromUrlOptions extends OptionsDimensions, OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromImageBitmapOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromTextureOptions extends\n Required, OptionsFormat, GpuResourceConstructorParameters/* TODO: add more */ {}\n\nexport interface TensorFromGpuBufferOptions extends\n Pick, GpuResourceConstructorParameters {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: T;\n}\n\n// #endregion\n\n/**\n * type TensorFactory defines the factory functions of 'Tensor' to create tensor instances from existing data or\n * resources.\n */\nexport interface TensorFactory {\n /**\n * create a tensor from an ImageData object\n *\n * @param imageData - the ImageData object to create tensor from\n * @param options - An optional object representing options for creating tensor from ImageData.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageData: ImageData, options?: TensorFromImageDataOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a HTMLImageElement object\n *\n * @param imageElement - the HTMLImageElement object to create tensor from\n * @param options - An optional object representing options for creating tensor from HTMLImageElement.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from URL\n *\n * @param urlSource - a string as a URL to the image or a data URL containing the image data.\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(urlSource: string, options?: TensorFromUrlOptions): Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from an ImageBitmap object\n *\n * @param bitmap - the ImageBitmap object to create tensor from\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(bitmap: ImageBitmap, options: TensorFromImageBitmapOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a WebGL texture\n *\n * @param texture - the WebGLTexture object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGL texture.\n *\n * The options include following properties:\n * - `width`: the width of the texture. Required.\n * - `height`: the height of the texture. Required.\n * - `format`: the format of the texture. If omitted, assume 'RGBA'.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromTexture(\n texture: Tensor.TextureType, options: TensorFromTextureOptions): TypedTensor<'float32'>;\n\n /**\n * create a tensor from a WebGPU buffer\n *\n * @param buffer - the GPUBuffer object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGPU buffer.\n *\n * The options include following properties:\n * - `dataType`: the data type of the tensor. If omitted, assume 'float32'.\n * - `dims`: the dimension of the tensor. Required.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromGpuBuffer(\n buffer: Tensor.GpuBufferType, options: TensorFromGpuBufferOptions): TypedTensor;\n\n /**\n * create a tensor from a pre-allocated buffer. The buffer will be used as a pinned buffer.\n *\n * @param type - the tensor element type.\n * @param buffer - a TypedArray corresponding to the type.\n * @param dims - specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n *\n * @returns a tensor object\n */\n fromPinnedBuffer>(\n type: T, buffer: Tensor.DataTypeMap[T], dims?: readonly number[]): TypedTensor;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * A string that represents a file's URL or path.\n *\n * Path is vailable only in onnxruntime-node or onnxruntime-web running in Node.js.\n */\nexport type FileUrlOrPath = string;\n\n/**\n * A Blob object that represents a file.\n */\nexport type FileBlob = Blob;\n\n/**\n * A Uint8Array, ArrayBuffer or SharedArrayBuffer object that represents a file content.\n *\n * When it is an ArrayBuffer or SharedArrayBuffer, the whole buffer is assumed to be the file content.\n */\nexport type FileData = Uint8Array|ArrayBufferLike;\n\n/**\n * Represents a file that can be loaded by the ONNX Runtime JavaScript API.\n */\nexport type FileType = FileUrlOrPath|FileBlob|FileData;\n\n/**\n * Represents an external data file.\n */\nexport interface ExternalDataFileDescription {\n /**\n * Specify the external data file.\n */\n data: FileType;\n /**\n * Specify the file path.\n */\n path: string;\n}\n\n/**\n * Represents an external data file.\n *\n * When using a string, it should be a file URL or path that in the same directory as the model file.\n */\nexport type ExternalDataFileType = ExternalDataFileDescription|FileUrlOrPath;\n\n/**\n * Options for model loading.\n */\nexport interface OnnxModelOptions {\n /**\n * Specifying a list of files that represents the external data.\n */\n externalData?: readonly ExternalDataFileType[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type NonTensorType = never;\n\n/**\n * Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.\n *\n * NOTE: currently not support non-tensor\n */\nexport type OnnxValue = Tensor|NonTensorType;\n\n/**\n * Type OnnxValueDataLocation represents the location of the data of an OnnxValue.\n */\nexport type OnnxValueDataLocation = Tensor.DataLocation;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {SessionHandler, TrainingSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TrainingSession as TrainingSessionInterface, TrainingSessionCreateOptions} from './training-session.js';\n\ntype SessionOptions = InferenceSession.SessionOptions;\ntype FeedsType = InferenceSession.FeedsType;\ntype FetchesType = InferenceSession.FetchesType;\ntype ReturnType = InferenceSession.ReturnType;\ntype RunOptions = InferenceSession.RunOptions;\n\nconst noBackendErrMsg: string = 'Training backend could not be resolved. ' +\n 'Make sure you\\'re using the correct configuration & WebAssembly files.';\n\nexport class TrainingSession implements TrainingSessionInterface {\n private constructor(handler: TrainingSessionHandler, hasOptimizerModel: boolean, hasEvalModel: boolean) {\n this.handler = handler;\n this.hasOptimizerModel = hasOptimizerModel;\n this.hasEvalModel = hasEvalModel;\n }\n private handler: TrainingSessionHandler;\n private hasOptimizerModel: boolean;\n private hasEvalModel: boolean;\n\n get trainingInputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get trainingOutputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n get evalInputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalInputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n get evalOutputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalOutputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n\n static async create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: SessionOptions):\n Promise {\n const evalModel: string|Uint8Array = trainingOptions.evalModel || '';\n const optimizerModel: string|Uint8Array = trainingOptions.optimizerModel || '';\n const options: SessionOptions = sessionOptions || {};\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n if (backend.createTrainingSessionHandler) {\n const handler = await backend.createTrainingSessionHandler(\n trainingOptions.checkpointState, trainingOptions.trainModel, evalModel, optimizerModel,\n optionsWithValidatedEPs);\n return new TrainingSession(handler, !!trainingOptions.optimizerModel, !!trainingOptions.evalModel);\n } else {\n throw new Error(noBackendErrMsg);\n }\n }\n\n /**\n * Helper function for runTrainStep and future runStep methods that handles the type-narrowing conversion from\n * the given parameters to SessionHandler.FetchesType and RunOptions.\n *\n * @param inputNames the feeds object is checked that they contain all input names in the provided list of input\n * names.\n * @param outputNames the fetches object is checked that their keys match up with valid names in the list of output\n * names.\n * @param feeds the required input\n * @param arg1 narrowed & converted into the SessionHandler.FetchesType or RunOptions object\n * @param arg2 optional RunOptions object.\n * @returns\n */\n typeNarrowingForRunStep(\n inputNames: readonly string[], outputNames: readonly string[], feeds: FeedsType, arg1?: FetchesType|RunOptions,\n arg2?: RunOptions): [SessionHandler.FetchesType, RunOptions] {\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSession.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of outputNames) {\n fetches[name] = null;\n }\n }\n\n return [fetches, options];\n }\n\n /**\n * Helper method for runTrainStep and any other runStep methods. Takes the ReturnType result from the SessionHandler\n * and changes it into a map of Tensors.\n *\n * @param results\n * @returns\n */\n convertHandlerReturnTypeToMapOfTensors(results: SessionHandler.ReturnType): ReturnType {\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n return returnValue;\n }\n\n async lazyResetGrad(): Promise {\n await this.handler.lazyResetGrad();\n }\n\n runTrainStep(feeds: FeedsType, options?: RunOptions): Promise;\n runTrainStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async runTrainStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.trainingInputNames, this.trainingOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runTrainStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n }\n\n async runOptimizerStep(options?: InferenceSession.RunOptions|undefined): Promise {\n if (this.hasOptimizerModel) {\n await this.handler.runOptimizerStep(options || {});\n } else {\n throw new Error('This TrainingSession has no OptimizerModel loaded.');\n }\n }\n\n runEvalStep(feeds: FeedsType, options?: RunOptions|undefined): Promise;\n runEvalStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions|undefined): Promise;\n async runEvalStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n if (this.hasEvalModel) {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.evalInputNames, this.evalOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runEvalStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n } else {\n throw new Error('This TrainingSession has no EvalModel loaded.');\n }\n }\n\n async getParametersSize(trainableOnly = true): Promise {\n return this.handler.getParametersSize(trainableOnly);\n }\n\n async loadParametersBuffer(array: Uint8Array, trainableOnly = true): Promise {\n const paramsSize = await this.getParametersSize(trainableOnly);\n // checking that the size of the Uint8Array is equivalent to the byte length of a Float32Array of the number\n // of parameters\n if (array.length !== 4 * paramsSize) {\n throw new Error(\n 'Size of the buffer passed into loadParametersBuffer must match the number of parameters in ' +\n 'the model. Please use getParametersSize method to check.');\n }\n return this.handler.loadParametersBuffer(array, trainableOnly);\n }\n\n async getContiguousParameters(trainableOnly = true): Promise {\n return this.handler.getContiguousParameters(trainableOnly);\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession as TrainingSessionImpl} from './training-session-impl.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace TrainingSession {\n /**\n * Either URI file path (string) or Uint8Array containing model or checkpoint information.\n */\n type UriOrBuffer = string|Uint8Array;\n}\n\n/**\n * Represent a runtime instance of an ONNX training session,\n * which contains a model that can be trained, and, optionally,\n * an eval and optimizer model.\n */\nexport interface TrainingSession {\n // #region run()\n\n /**\n * Lazily resets the gradients of all trainable parameters to zero. Should happen after the invocation of\n * runOptimizerStep.\n */\n lazyResetGrad(): Promise;\n\n /**\n * Run TrainStep asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for\n detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n runTrainStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single train step with the given inputs and options.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runTrainStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Runs a single optimizer step, which performs weight updates for the trainable parameters using the optimizer model.\n *\n * @param options - Optional. A set of options that controls the behavior of model optimizing.\n */\n runOptimizerStep(options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region copy parameters\n\n /**\n * Retrieves the size of all parameters for the training state. Calculates the total number of primitive (datatype of\n * the parameters) elements of all the parameters in the training state.\n *\n * @param trainableOnly - When set to true, the size is calculated for trainable params only. Default value is true.\n */\n getParametersSize(trainableOnly: boolean): Promise;\n\n /**\n * Copies parameter values from the given buffer to the training state. Currently, only supporting models with\n * parameters of type Float32.\n *\n * @param buffer - A Uint8Array representation of Float32 parameters.\n * @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.\n */\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n\n /**\n * Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.\n * Currently, only supporting models with parameters of type Float32.\n *\n * @param trainableOnly - When set to true, only trainable parameters are copied. Trainable parameters are parameters\n * for which requires_grad is set to true. Default value is true.\n * @returns A promise that resolves to a Float32 OnnxValue of the requested parameters.\n */\n getContiguousParameters(trainableOnly: boolean): Promise;\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded training model.\n */\n readonly trainingInputNames: readonly string[];\n\n /**\n * Get output names of the loaded training model.\n */\n readonly trainingOutputNames: readonly string[];\n\n /**\n * Get input names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalInputNames: readonly string[];\n\n /**\n * Get output names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalOutputNames: readonly string[];\n\n // #endregion\n}\n\n/**\n * Represents the optional parameters that can be passed into the TrainingSessionFactory.\n */\nexport interface TrainingSessionCreateOptions {\n /**\n * URI or buffer for a .ckpt file that contains the checkpoint for the training model.\n */\n checkpointState: TrainingSession.UriOrBuffer;\n /**\n * URI or buffer for the .onnx training file.\n */\n trainModel: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx optimizer model file.\n */\n optimizerModel?: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx eval model file.\n */\n evalModel?: TrainingSession.UriOrBuffer;\n}\n\n/**\n * Defines method overload possibilities for creating a TrainingSession.\n */\nexport interface TrainingSessionFactory {\n // #region create()\n\n /**\n * Creates a new TrainingSession and asynchronously loads any models passed in through trainingOptions\n *\n * @param trainingOptions specify models and checkpoints to load into the Training Session\n * @param sessionOptions specify configuration for training session behavior\n *\n * @returns Promise that resolves to a TrainingSession object\n */\n create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: InferenceSession.SessionOptions):\n Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const TrainingSession: TrainingSessionFactory = TrainingSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * # ONNX Runtime JavaScript API\n *\n * ONNX Runtime JavaScript API is a unified API for all JavaScript usages, including the following NPM packages:\n *\n * - [onnxruntime-node](https://www.npmjs.com/package/onnxruntime-node)\n * - [onnxruntime-web](https://www.npmjs.com/package/onnxruntime-web)\n * - [onnxruntime-react-native](https://www.npmjs.com/package/onnxruntime-react-native)\n *\n * See also:\n * - [Get Started](https://onnxruntime.ai/docs/get-started/with-javascript/)\n * - [Inference examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js)\n *\n * @packageDocumentation\n */\n\nexport * from './backend.js';\nexport * from './env.js';\nexport * from './inference-session.js';\nexport * from './tensor.js';\nexport * from './tensor-conversion.js';\nexport * from './tensor-factory.js';\nexport * from './trace.js';\nexport * from './onnx-model.js';\nexport * from './onnx-value.js';\nexport * from './training-session.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nexport const isNode = !!(typeof process !== 'undefined' && process.versions && process.versions.node);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/// \n\n//\n// * type hack for \"HTMLImageElement\"\n//\n// in typescript, the type of \"HTMLImageElement\" is defined in lib.dom.d.ts, which is conflict with lib.webworker.d.ts.\n// when we use webworker, the lib.webworker.d.ts will be used, which does not have HTMLImageElement defined.\n//\n// we will get the following errors complaining that HTMLImageElement is not defined:\n//\n// ====================================================================================================================\n//\n// ../common/dist/cjs/tensor-factory.d.ts:187:29 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 187 fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n// Promise | TypedTensor<'uint8'>>;\n// ~~~~~~~~~~~~~~~~\n//\n// node_modules/@webgpu/types/dist/index.d.ts:83:7 - error TS2552: Cannot find name 'HTMLImageElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 83 | HTMLImageElement\n// ~~~~~~~~~~~~~~~~\n//\n// ====================================================================================================================\n//\n// `HTMLImageElement` is only used in type declaration and not in real code. So we define it as `unknown` here to\n// bypass the type check.\n\n//\n// * type hack for \"document\"\n//\n// in typescript, the type of \"document\" is defined in lib.dom.d.ts, so it's not available in webworker.\n//\n// we will get the following errors complaining that document is not defined:\n//\n// ====================================================================================================================\n//\n// lib/wasm/wasm-utils-import.ts:7:33 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:61 - error TS2584: Cannot find name 'document'. Do you need to change your target\n// library? Try changing the 'lib' compiler option to include 'dom'.\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~\n//\n// lib/wasm/wasm-utils-import.ts:7:88 - error TS2552: Cannot find name 'HTMLScriptElement'. Did you mean\n// 'HTMLLIElement'?\n//\n// 7 export const scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src :\n// ~~~~~~~~~~~~~~~~~\n// ====================================================================================================================\n//\n// `document` is used to get the current script URL, which is not available in webworker. This file is served as a\n// \"dual\" file for entries of both webworker and the esm module.\n//\ndeclare global {\n type HTMLImageElement = unknown;\n type HTMLScriptElement = {src?: string};\n const document: undefined|{currentScript?: HTMLScriptElement};\n}\n\n/**\n * @summary\n *\n * This file is served as a \"dual\" file for both entries of the following:\n * - The proxy worker itself.\n * - When used as a worker, it listens to the messages from the main thread and performs the corresponding operations.\n * - Should be imported directly using `new Worker()` in the main thread.\n *\n * - The ESM module that creates the proxy worker (as a worker launcher).\n * - When used as a worker launcher, it creates the proxy worker and returns it.\n * - Should be imported using `import()` in the main thread, with the query parameter `import=1`.\n *\n * This file will be always compiling into ESM format.\n */\n\nimport type {OrtWasmMessage, SerializableTensorMetadata} from '../proxy-messages.js';\nimport {createSession, copyFromExternalBuffer, endProfiling, extractTransferableBuffers, initEp, initRuntime, releaseSession, run} from '../wasm-core-impl.js';\nimport {initializeWebAssembly} from '../wasm-factory.js';\nimport {scriptSrc} from '../wasm-utils-import.js';\n\nconst WORKER_NAME = 'ort-wasm-proxy-worker';\nconst isProxyWorker = globalThis.self?.name === WORKER_NAME;\n\nif (isProxyWorker) {\n // Worker thread\n self.onmessage = (ev: MessageEvent): void => {\n const {type, in : message} = ev.data;\n try {\n switch (type) {\n case 'init-wasm':\n initializeWebAssembly(message!.wasm)\n .then(\n () => {\n initRuntime(message!).then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n },\n err => {\n postMessage({type, err});\n });\n break;\n case 'init-ep': {\n const {epName, env} = message!;\n initEp(env, epName)\n .then(\n () => {\n postMessage({type});\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'copy-from': {\n const {buffer} = message!;\n const bufferData = copyFromExternalBuffer(buffer);\n postMessage({type, out: bufferData} as OrtWasmMessage);\n break;\n }\n case 'create': {\n const {model, options} = message!;\n createSession(model, options)\n .then(\n sessionMetadata => {\n postMessage({type, out: sessionMetadata} as OrtWasmMessage);\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'release':\n releaseSession(message!);\n postMessage({type});\n break;\n case 'run': {\n const {sessionId, inputIndices, inputs, outputIndices, options} = message!;\n run(sessionId, inputIndices, inputs, outputIndices, new Array(outputIndices.length).fill(null), options)\n .then(\n outputs => {\n if (outputs.some(o => o[3] !== 'cpu')) {\n postMessage({type, err: 'Proxy does not support non-cpu tensor location.'});\n } else {\n postMessage(\n {type, out: outputs} as OrtWasmMessage,\n extractTransferableBuffers([...inputs, ...outputs] as SerializableTensorMetadata[]));\n }\n },\n err => {\n postMessage({type, err});\n });\n break;\n }\n case 'end-profiling':\n endProfiling(message!);\n postMessage({type});\n break;\n default:\n }\n } catch (err) {\n postMessage({type, err} as OrtWasmMessage);\n }\n };\n}\n\nexport default isProxyWorker ?\n null :\n (urlOverride?: string) =>\n new Worker(urlOverride ?? scriptSrc!, {type: BUILD_DEFS.IS_ESM ? 'module' : 'classic', name: WORKER_NAME});\n", "var e,r=(e=import.meta.url,async function(r={}){function t(){return x.buffer!=R.buffer&&L(),R}function n(){return x.buffer!=R.buffer&&L(),H}function a(){return x.buffer!=R.buffer&&L(),D}function o(){return x.buffer!=R.buffer&&L(),F}function i(){return x.buffer!=R.buffer&&L(),P}function s(){return x.buffer!=R.buffer&&L(),B}function u(){return x.buffer!=R.buffer&&L(),I}function f(){return x.buffer!=R.buffer&&L(),$}var l,c,d=Object.assign({},r),b=new Promise(((e,r)=>{l=e,c=r})),m=\"object\"==typeof window,p=\"function\"==typeof importScripts,h=p&&\"em-pthread\"==self.name;d.mountExternalData=(e,r)=>{(d.Fb||(d.Fb=new Map)).set(e,r)},d.unmountExternalData=()=>{delete d.Fb};var g=globalThis.SharedArrayBuffer??new WebAssembly.Memory({initial:0,maximum:0,shared:!0}).buffer.constructor;let v=()=>{const e=(e,r,t)=>(...n)=>{const a=zr,o=r?.();n=e(...n);const i=r?.();return o!==i&&(e=i,t(o),r=t=null),zr!=a?new Promise(((e,r)=>{Qr={resolve:e,reject:r}})):n},r=e=>async(...r)=>{try{if(d.Eb)throw Error(\"Session already started\");const t=d.Eb={bc:r[0],errors:[]},n=await e(...r);if(d.Eb!==t)throw Error(\"Session mismatch\");d.Mb?.flush();const a=t.errors;if(0e)),0d._OrtCreateSession),(e=>d._OrtCreateSession=e)),d._OrtRun=r(e(d._OrtRun,(()=>d._OrtRun),(e=>d._OrtRun=e))),d._OrtRunWithBinding=r(e(d._OrtRunWithBinding,(()=>d._OrtRunWithBinding),(e=>d._OrtRunWithBinding=e))),d._OrtBindInput=e(d._OrtBindInput,(()=>d._OrtBindInput),(e=>d._OrtBindInput=e)),v=void 0};d.jsepInit=(e,r)=>{if(v?.(),\"webgpu\"===e){[d.Mb,d.Tb,d.Xb,d.Nb,d.Wb,d.jb,d.Yb,d.$b,d.Ub,d.Vb,d.Zb]=r;const e=d.Mb;d.jsepRegisterBuffer=(r,t,n,a)=>e.registerBuffer(r,t,n,a),d.jsepGetBuffer=r=>e.getBuffer(r),d.jsepCreateDownloader=(r,t,n)=>e.createDownloader(r,t,n),d.jsepOnReleaseSession=r=>{e.onReleaseSession(r)},d.jsepOnRunStart=r=>e.onRunStart(r)}};var y,w,A=Object.assign({},d),_=\"./this.program\",C=(e,r)=>{throw r},O=\"\";(m||p)&&(p?O=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(O=document.currentScript.src),e&&(O=e),O=O.startsWith(\"blob:\")?\"\":O.substr(0,O.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),e=>{var r=new XMLHttpRequest;return r.open(\"GET\",e,!1),r.send(null),r.responseText},p&&(w=e=>{var r=new XMLHttpRequest;return r.open(\"GET\",e,!1),r.responseType=\"arraybuffer\",r.send(null),new Uint8Array(r.response)}),y=(e,r,t)=>{var n=new XMLHttpRequest;n.open(\"GET\",e,!0),n.responseType=\"arraybuffer\",n.onload=()=>{200==n.status||0==n.status&&n.response?r(n.response):t()},n.onerror=t,n.send(null)});var j=console.log.bind(console),T=console.error.bind(console),S=j,W=T;if(Object.assign(d,A),A=null,h){var E,M=!1;function Pn(e){try{var r=e.data,t=r.cmd;if(\"load\"===t){let e=[];self.onmessage=r=>e.push(r),self.startWorker=()=>{postMessage({cmd:\"loaded\"});for(let r of e)Pn(r);self.onmessage=Pn};for(const e of r.handlers)d[e]&&!d[e].proxy||(d[e]=(...r)=>{postMessage({Lb:\"callHandler\",kc:e,args:r})},\"print\"==e&&(S=d[e]),\"printErr\"==e&&(W=d[e]));x=r.wasmMemory,L(),E(r.wasmModule)}else if(\"run\"===t){_n(r.pthread_ptr,0,0,1,0,0),Rr(r.pthread_ptr),Te(),_e(),M||(gn(),M=!0);try{Se(r.start_routine,r.arg)}catch(e){if(\"unwind\"!=e)throw e}}else\"cancel\"===t?yn()&&Tn(-1):\"setimmediate\"!==r.target&&(\"checkMailbox\"===t?M&&Hr():t&&(W(`worker: received unknown command ${t}`),W(r)))}catch(e){throw Cn(),e}}W=function(...e){e=e.join(\" \"),console.error(e)},self.alert=function(...e){postMessage({Lb:\"alert\",text:e.join(\" \"),mc:yn()})},d.instantiateWasm=(e,r)=>new Promise((e=>{E=t=>{t=new WebAssembly.Instance(t,oe()),r(t),e()}})),self.onunhandledrejection=e=>{throw e.reason||e},self.onmessage=Pn}var x,N,k,R,H,D,F,P,B,I,U,G,$,Y=!1;function L(){var e=x.buffer;d.HEAP8=R=new Int8Array(e),d.HEAP16=D=new Int16Array(e),d.HEAPU8=H=new Uint8Array(e),d.HEAPU16=F=new Uint16Array(e),d.HEAP32=P=new Int32Array(e),d.HEAPU32=B=new Uint32Array(e),d.HEAPF32=I=new Float32Array(e),d.HEAPF64=$=new Float64Array(e),d.HEAP64=U=new BigInt64Array(e),d.HEAPU64=G=new BigUint64Array(e)}if(!h){if(d.wasmMemory)x=d.wasmMemory;else if(!((x=new WebAssembly.Memory({initial:256,maximum:65536,shared:!0})).buffer instanceof g))throw W(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),Error(\"bad memory\");L()}var z=[],V=[],q=[],J=0,X=null,K=null;function Q(){if(0==--J&&(null!==X&&(clearInterval(X),X=null),K)){var e=K;K=null,e()}}function Z(e){throw W(e=\"Aborted(\"+e+\")\"),Y=!0,k=1,e=new WebAssembly.RuntimeError(e+\". Build with -sASSERTIONS for more info.\"),c(e),e}var ee,re=e=>e.startsWith(\"data:application/octet-stream;base64,\"),te=e=>e.startsWith(\"file://\");function ne(e){if(w)return w(e);throw\"both async and sync fetching of the wasm failed\"}function ae(e,r,t){return function(e){if(m||p){if(\"function\"==typeof fetch&&!te(e))return fetch(e,{credentials:\"same-origin\"}).then((r=>{if(!r.ok)throw`failed to load wasm binary file at '${e}'`;return r.arrayBuffer()})).catch((()=>ne(e)));if(y)return new Promise(((r,t)=>{y(e,(e=>r(new Uint8Array(e))),t)}))}return Promise.resolve().then((()=>ne(e)))}(e).then((e=>WebAssembly.instantiate(e,r))).then(t,(e=>{W(`failed to asynchronously prepare wasm: ${e}`),Z(e)}))}function oe(){return{a:{M:ue,za:se,b:Ee,$:xe,z:He,pa:De,X:Ie,Z:Ue,qa:Ge,na:$e,ga:Ye,ma:Le,J:ze,Y:Ve,V:qe,oa:Je,W:Xe,va:Ze,D:ir,P:ur,O:hr,C:vr,s:yr,p:wr,E:Ar,y:Er,Q:Mr,ta:xr,ja:Nr,T:Dr,aa:Pr,F:Br,ia:Rr,sa:Ir,u:$r,B:rt,o:nt,k:it,c:cr,n:ut,j:dt,Aa:bt,r:mt,d:pt,v:ht,m:gt,g:vt,l:yt,i:wt,h:At,e:_t,da:Ct,ea:St,fa:Wt,ba:Et,ca:Mt,S:xt,f:Rt,N:Ht,G:Dt,K:Ft,w:Pt,ra:It,U:Ut,t:Bt,x:Gt,L:$t,R:Yt,ya:qt,xa:Jt,ka:Zt,la:en,_:he,A:rn,I:tn,ha:nn,H:on,a:x,wa:me,ua:ln,q:cn}}}var ie={1337716:(e,r,t,a)=>{if(void 0===d||!d.Fb)return 1;if((e=Re(e>>>0)).startsWith(\"./\")&&(e=e.substring(2)),!(e=d.Fb.get(e)))return 2;if(a>>>=0,(r>>>=0)+(t>>>=0)>e.byteLength)return 3;try{return n().set(e.subarray(r,r+t),a>>>0),0}catch{return 4}},1338217:()=>{d.Ub()},1338248:()=>{d.Vb()},1338277:()=>{d.Zb()},1338302:e=>d.Tb(e),1338335:e=>d.Xb(e),1338367:(e,r,t)=>{d.Nb(e,r,t,!0)},1338406:(e,r,t)=>{d.Nb(e,r,t)},1338439:()=>\"undefined\"!=typeof wasmOffsetConverter,1338496:e=>{d.jb(\"Abs\",e,void 0)},1338547:e=>{d.jb(\"Neg\",e,void 0)},1338598:e=>{d.jb(\"Floor\",e,void 0)},1338651:e=>{d.jb(\"Ceil\",e,void 0)},1338703:e=>{d.jb(\"Reciprocal\",e,void 0)},1338761:e=>{d.jb(\"Sqrt\",e,void 0)},1338813:e=>{d.jb(\"Exp\",e,void 0)},1338864:e=>{d.jb(\"Erf\",e,void 0)},1338915:e=>{d.jb(\"Sigmoid\",e,void 0)},1338970:(e,r,t)=>{d.jb(\"HardSigmoid\",e,{alpha:r,beta:t})},1339049:e=>{d.jb(\"Log\",e,void 0)},1339100:e=>{d.jb(\"Sin\",e,void 0)},1339151:e=>{d.jb(\"Cos\",e,void 0)},1339202:e=>{d.jb(\"Tan\",e,void 0)},1339253:e=>{d.jb(\"Asin\",e,void 0)},1339305:e=>{d.jb(\"Acos\",e,void 0)},1339357:e=>{d.jb(\"Atan\",e,void 0)},1339409:e=>{d.jb(\"Sinh\",e,void 0)},1339461:e=>{d.jb(\"Cosh\",e,void 0)},1339513:e=>{d.jb(\"Asinh\",e,void 0)},1339566:e=>{d.jb(\"Acosh\",e,void 0)},1339619:e=>{d.jb(\"Atanh\",e,void 0)},1339672:e=>{d.jb(\"Tanh\",e,void 0)},1339724:e=>{d.jb(\"Not\",e,void 0)},1339775:(e,r,t)=>{d.jb(\"Clip\",e,{min:r,max:t})},1339844:e=>{d.jb(\"Clip\",e,void 0)},1339896:(e,r)=>{d.jb(\"Elu\",e,{alpha:r})},1339954:e=>{d.jb(\"Relu\",e,void 0)},1340006:(e,r)=>{d.jb(\"LeakyRelu\",e,{alpha:r})},1340070:(e,r)=>{d.jb(\"ThresholdedRelu\",e,{alpha:r})},1340140:(e,r)=>{d.jb(\"Cast\",e,{to:r})},1340198:e=>{d.jb(\"Add\",e,void 0)},1340249:e=>{d.jb(\"Sub\",e,void 0)},1340300:e=>{d.jb(\"Mul\",e,void 0)},1340351:e=>{d.jb(\"Div\",e,void 0)},1340402:e=>{d.jb(\"Pow\",e,void 0)},1340453:e=>{d.jb(\"Equal\",e,void 0)},1340506:e=>{d.jb(\"Greater\",e,void 0)},1340561:e=>{d.jb(\"GreaterOrEqual\",e,void 0)},1340623:e=>{d.jb(\"Less\",e,void 0)},1340675:e=>{d.jb(\"LessOrEqual\",e,void 0)},1340734:(e,r,t,n,a)=>{d.jb(\"ReduceMean\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1340893:(e,r,t,n,a)=>{d.jb(\"ReduceMax\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341051:(e,r,t,n,a)=>{d.jb(\"ReduceMin\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341209:(e,r,t,n,a)=>{d.jb(\"ReduceProd\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341368:(e,r,t,n,a)=>{d.jb(\"ReduceSum\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341526:(e,r,t,n,a)=>{d.jb(\"ReduceL1\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341683:(e,r,t,n,a)=>{d.jb(\"ReduceL2\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1341840:(e,r,t,n,a)=>{d.jb(\"ReduceLogSum\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342001:(e,r,t,n,a)=>{d.jb(\"ReduceSumSquare\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342165:(e,r,t,n,a)=>{d.jb(\"ReduceLogSumExp\",e,{keepDims:!!r,noopWithEmptyAxes:!!t,axes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1342329:e=>{d.jb(\"Where\",e,void 0)},1342382:(e,r,t)=>{d.jb(\"Transpose\",e,{perm:r?Array.from(i().subarray(r>>>0,t>>>0)):[]})},1342490:(e,r,t,n)=>{d.jb(\"DepthToSpace\",e,{blocksize:r,mode:Re(t),format:n?\"NHWC\":\"NCHW\"})},1342623:(e,r,t,n)=>{d.jb(\"DepthToSpace\",e,{blocksize:r,mode:Re(t),format:n?\"NHWC\":\"NCHW\"})},1342756:(e,r,n,a,o,s,u,f,l,c,b,m,p,h,g)=>{d.jb(\"ConvTranspose\",e,{format:l?\"NHWC\":\"NCHW\",autoPad:r,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,m>>>0)):[],outputShape:p?Array.from(i().subarray(p>>>0,h>>>0)):[],activation:Re(g)})},1343157:(e,r,n,a,o,s,u,f,l,c,b,m,p,h)=>{d.jb(\"ConvTranspose\",e,{format:f?\"NHWC\":\"NCHW\",autoPad:r,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Re(h)})},1343722:(e,r,n,a,o,s,u,f,l,c,b,m,p,h,g)=>{d.jb(\"ConvTranspose\",e,{format:l?\"NHWC\":\"NCHW\",autoPad:r,dilations:[n],group:a,kernelShape:[o],pads:[s,u],strides:[f],wIsConst:()=>!!t()[c>>>0],outputPadding:b?Array.from(i().subarray(b>>>0,m>>>0)):[],outputShape:p?Array.from(i().subarray(p>>>0,h>>>0)):[],activation:Re(g)})},1344123:(e,r,n,a,o,s,u,f,l,c,b,m,p,h)=>{d.jb(\"ConvTranspose\",e,{format:f?\"NHWC\":\"NCHW\",autoPad:r,dilations:Array.from(i().subarray(n>>>0,2+(n>>>0)>>>0)),group:a,kernelShape:Array.from(i().subarray(o>>>0,2+(o>>>0)>>>0)),pads:Array.from(i().subarray(s>>>0,4+(s>>>0)>>>0)),strides:Array.from(i().subarray(u>>>0,2+(u>>>0)>>>0)),wIsConst:()=>!!t()[l>>>0],outputPadding:c?Array.from(i().subarray(c>>>0,b>>>0)):[],outputShape:m?Array.from(i().subarray(m>>>0,p>>>0)):[],activation:Re(h)})},1344688:(e,r)=>{d.jb(\"GlobalAveragePool\",e,{format:r?\"NHWC\":\"NCHW\"})},1344779:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"AveragePool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345063:(e,r)=>{d.jb(\"GlobalAveragePool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345154:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"AveragePool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345438:(e,r)=>{d.jb(\"GlobalMaxPool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345525:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"MaxPool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1345805:(e,r)=>{d.jb(\"GlobalMaxPool\",e,{format:r?\"NHWC\":\"NCHW\"})},1345892:(e,r,t,n,a,o,i,s,u,f,l,c,b,m,p,h)=>{d.jb(\"MaxPool\",e,{format:h?\"NHWC\":\"NCHW\",auto_pad:r,ceil_mode:t,count_include_pad:n,storage_order:a,dilations:[o,i],kernel_shape:[s,u],pads:[f,l,c,b],strides:[m,p]})},1346172:(e,r,t,n,a)=>{d.jb(\"Gemm\",e,{alpha:r,beta:t,transA:n,transB:a})},1346276:e=>{d.jb(\"MatMul\",e,void 0)},1346330:(e,r,t,n)=>{d.jb(\"ArgMax\",e,{keepDims:!!r,selectLastIndex:!!t,axis:n})},1346438:(e,r,t,n)=>{d.jb(\"ArgMin\",e,{keepDims:!!r,selectLastIndex:!!t,axis:n})},1346546:(e,r)=>{d.jb(\"Softmax\",e,{axis:r})},1346609:(e,r)=>{d.jb(\"Concat\",e,{axis:r})},1346669:(e,r,t,n,a)=>{d.jb(\"Split\",e,{axis:r,numOutputs:t,splitSizes:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1346809:e=>{d.jb(\"Expand\",e,void 0)},1346863:(e,r)=>{d.jb(\"Gather\",e,{axis:Number(r)})},1346934:(e,r)=>{d.jb(\"GatherElements\",e,{axis:Number(r)})},1347013:(e,r,t,n,a,o,s,u,f,l,c)=>{d.jb(\"Resize\",e,{antialias:r,axes:t?Array.from(i().subarray(t>>>0,n>>>0)):[],coordinateTransformMode:Re(a),cubicCoeffA:o,excludeOutside:s,extrapolationValue:u,keepAspectRatioPolicy:Re(f),mode:Re(l),nearestMode:Re(c)})},1347359:(e,r,t,n,a,o,s)=>{d.jb(\"Slice\",e,{starts:r?Array.from(i().subarray(r>>>0,t>>>0)):[],ends:n?Array.from(i().subarray(n>>>0,a>>>0)):[],axes:o?Array.from(i().subarray(o>>>0,s>>>0)):[]})},1347575:e=>{d.jb(\"Tile\",e,void 0)},1347627:(e,r,t)=>{d.jb(\"InstanceNormalization\",e,{epsilon:r,format:t?\"NHWC\":\"NCHW\"})},1347741:(e,r,t)=>{d.jb(\"InstanceNormalization\",e,{epsilon:r,format:t?\"NHWC\":\"NCHW\"})},1347855:e=>{d.jb(\"Range\",e,void 0)},1347908:(e,r)=>{d.jb(\"Einsum\",e,{equation:Re(r)})},1347989:(e,r,t,n,a)=>{d.jb(\"Pad\",e,{mode:r,value:t,pads:n?Array.from(i().subarray(n>>>0,a>>>0)):[]})},1348116:(e,r,t,n,a,o)=>{d.jb(\"BatchNormalization\",e,{epsilon:r,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1348285:(e,r,t,n,a,o)=>{d.jb(\"BatchNormalization\",e,{epsilon:r,momentum:t,spatial:!!a,trainingMode:!!n,format:o?\"NHWC\":\"NCHW\"})},1348454:(e,r,t)=>{d.jb(\"CumSum\",e,{exclusive:Number(r),reverse:Number(t)})},1348551:(e,r,t,n,a,o,s,u,f)=>{d.jb(\"Attention\",e,{numHeads:r,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o,qkvHiddenSizes:s?Array.from(i().subarray(Number(u)>>>0,Number(u)+s>>>0)):[],pastPresentShareBuffer:!!f})},1348823:e=>{d.jb(\"BiasAdd\",e,void 0)},1348878:e=>{d.jb(\"BiasSplitGelu\",e,void 0)},1348939:e=>{d.jb(\"FastGelu\",e,void 0)},1348995:(e,r,n,a,o,s,f,l,c,b,m,p,h,g,v,y)=>{d.jb(\"Conv\",e,{format:p?\"NHWC\":\"NCHW\",auto_pad:r,dilations:n?Array.from(i().subarray(n>>>0,a>>>0)):[],group:o,kernel_shape:s?Array.from(i().subarray(s>>>0,f>>>0)):[],pads:l?Array.from(i().subarray(l>>>0,c>>>0)):[],strides:b?Array.from(i().subarray(b>>>0,m>>>0)):[],w_is_const:()=>!!t()[h>>>0],activation:Re(g),activation_params:v?Array.from(u().subarray(v>>>0,y>>>0)):[]})},1349491:e=>{d.jb(\"Gelu\",e,void 0)},1349543:(e,r,t,n)=>{d.jb(\"GroupQueryAttention\",e,{numHeads:r,kvNumHeads:t,scale:n})},1349656:(e,r,t,n)=>{d.jb(\"LayerNormalization\",e,{axis:r,epsilon:t,simplified:!!n})},1349767:(e,r,t,n)=>{d.jb(\"LayerNormalization\",e,{axis:r,epsilon:t,simplified:!!n})},1349878:(e,r,t,n,a,o)=>{d.jb(\"MatMulNBits\",e,{k:r,n:t,accuracyLevel:n,bits:a,blockSize:o})},1350005:(e,r,t,n,a,o)=>{d.jb(\"MultiHeadAttention\",e,{numHeads:r,isUnidirectional:t,maskFilterValue:n,scale:a,doRotary:o})},1350164:(e,r)=>{d.jb(\"QuickGelu\",e,{alpha:r})},1350228:(e,r,t,n,a)=>{d.jb(\"RotaryEmbedding\",e,{interleaved:!!r,numHeads:t,rotaryEmbeddingDim:n,scale:a})},1350367:(e,r,t)=>{d.jb(\"SkipLayerNormalization\",e,{epsilon:r,simplified:!!t})},1350469:e=>{d.Yb(e)},1350503:(e,r)=>d.$b(e,r,d.Eb.bc,d.Eb.errors),1350615:(e,r,t)=>{d.jb(\"SkipLayerNormalization\",e,{epsilon:r,simplified:!!t})}};function se(e,r,t){return et((async()=>{await d.Wb(e,r,t)}))}function ue(){return\"undefined\"!=typeof wasmOffsetConverter}function fe(e){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${e})`,this.status=e}var le=e=>{e.terminate(),e.onmessage=()=>{}},ce=e=>{0==ge.length&&(Oe(),Ce(ge[0]));var r=ge.pop();if(!r)return 6;ve.push(r),we[e.Ab]=r,r.Ab=e.Ab;var t={cmd:\"run\",start_routine:e.cc,arg:e.Pb,pthread_ptr:e.Ab};return r.postMessage(t,e.ic),0},de=0,be=(e,r,...t)=>{for(var n=2*t.length,a=xn(),o=Mn(8*n),i=o>>>3,s=0;s>>0]=u)}return e=On(e,0,n,o,r),En(a),e};function me(e){if(h)return be(0,1,e);if(k=e,!(0{if(k=e,h)throw pe(e),\"unwind\";me(e)},ge=[],ve=[],ye=[],we={},Ae=e=>{var r=e.Ab;delete we[r],ge.push(e),ve.splice(ve.indexOf(e),1),e.Ab=0,jn(r)};function _e(){ye.forEach((e=>e()))}var Ce=e=>new Promise((r=>{e.onmessage=t=>{var n=(t=t.data).cmd;if(t.targetThread&&t.targetThread!=yn()){var a=we[t.targetThread];a?a.postMessage(t,t.transferList):W(`Internal error! Worker sent a message \"${n}\" to target pthread ${t.targetThread}, but that thread no longer exists!`)}else\"checkMailbox\"===n?Hr():\"spawnThread\"===n?ce(t):\"cleanupThread\"===n?Ae(we[t.thread]):\"killThread\"===n?(t=t.thread,n=we[t],delete we[t],le(n),jn(t),ve.splice(ve.indexOf(n),1),n.Ab=0):\"cancelThread\"===n?we[t.thread].postMessage({cmd:\"cancel\"}):\"loaded\"===n?(e.loaded=!0,r(e)):\"alert\"===n?alert(`Thread ${t.threadId}: ${t.text}`):\"setimmediate\"===t.target?e.postMessage(t):\"callHandler\"===n?d[t.handler](...t.args):n&&W(`worker sent an unknown command ${n}`)},e.onerror=e=>{throw W(`worker sent an error! ${e.filename}:${e.lineno}: ${e.message}`),e};var t,n=[];for(t of[\"onExit\"])d.hasOwnProperty(t)&&n.push(t);e.postMessage({cmd:\"load\",handlers:n,wasmMemory:x,wasmModule:N})}));function Oe(){var e=new Worker(new URL(import.meta.url),{type:\"module\",workerData:\"em-pthread\",name:\"em-pthread\"});ge.push(e)}var je=e=>{for(;0{var e=yn(),r=s()[e+52>>>2>>>0];e=s()[e+56>>>2>>>0],Wn(r,r-e),En(r)},Se=(e,r)=>{de=0,e=Nn(e,r),0>>=0);throw r>>>=0,t>>>=0,s()[n.Ib+16>>>2>>>0]=0,s()[n.Ib+4>>>2>>>0]=r,s()[n.Ib+8>>>2>>>0]=t,e}function Me(e,r,t,n){return h?be(2,1,e,r,t,n):xe(e,r,t,n)}function xe(e,r,t,n){if(e>>>=0,r>>>=0,t>>>=0,n>>>=0,void 0===g)return W(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var a=[];return h&&0===a.length?Me(e,r,t,n):(e={cc:t,Ab:e,Pb:n,ic:a},h?(e.Lb=\"spawnThread\",postMessage(e,a),0):ce(e))}var Ne=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,ke=(e,r,t)=>{var n=(r>>>=0)+t;for(t=r;e[t]&&!(t>=n);)++t;if(16(a=224==(240&a)?(15&a)<<12|o<<6|i:(7&a)<<18|o<<12|i<<6|63&e[r++])?n+=String.fromCharCode(a):(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a))}}else n+=String.fromCharCode(a)}return n},Re=(e,r)=>(e>>>=0)?ke(n(),e,r):\"\";function He(e,r,t){return h?be(3,1,e,r,t):0}function De(e,r){if(h)return be(4,1,e,r)}var Fe=e=>{for(var r=0,t=0;t=n?r++:2047>=n?r+=2:55296<=n&&57343>=n?(r+=4,++t):r+=3}return r},Pe=(e,r,t,n)=>{if(!(0>>=0;n=t+n-1;for(var o=0;o=i&&(i=65536+((1023&i)<<10)|1023&e.charCodeAt(++o)),127>=i){if(t>=n)break;r[t++>>>0]=i}else{if(2047>=i){if(t+1>=n)break;r[t++>>>0]=192|i>>6}else{if(65535>=i){if(t+2>=n)break;r[t++>>>0]=224|i>>12}else{if(t+3>=n)break;r[t++>>>0]=240|i>>18,r[t++>>>0]=128|i>>12&63}r[t++>>>0]=128|i>>6&63}r[t++>>>0]=128|63&i}}return r[t>>>0]=0,t-a},Be=(e,r,t)=>Pe(e,n(),r,t);function Ie(e,r){if(h)return be(5,1,e,r)}function Ue(e,r,t){if(h)return be(6,1,e,r,t)}function Ge(e,r,t){return h?be(7,1,e,r,t):0}function $e(e,r){if(h)return be(8,1,e,r)}function Ye(e,r,t){if(h)return be(9,1,e,r,t)}function Le(e,r,t,n){if(h)return be(10,1,e,r,t,n)}function ze(e,r,t,n){if(h)return be(11,1,e,r,t,n)}function Ve(e,r,t,n){if(h)return be(12,1,e,r,t,n)}function qe(e){if(h)return be(13,1,e)}function Je(e,r){if(h)return be(14,1,e,r)}function Xe(e,r,t){if(h)return be(15,1,e,r,t)}var Ke,Qe,Ze=()=>{Z(\"\")},er=e=>{for(var r=\"\";n()[e>>>0];)r+=Ke[n()[e++>>>0]];return r},rr={},tr={},nr={};function ar(e,r,t={}){if(!(\"argPackAdvance\"in r))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return function(e,r,t={}){var n=r.name;if(!e)throw new Qe(`type \"${n}\" must have a positive integer typeid pointer`);if(tr.hasOwnProperty(e)){if(t.Rb)return;throw new Qe(`Cannot register type '${n}' twice`)}tr[e]=r,delete nr[e],rr.hasOwnProperty(e)&&(r=rr[e],delete rr[e],r.forEach((e=>e())))}(e,r,t)}var or=(e,r,u)=>{switch(r){case 1:return u?e=>t()[e>>>0]:e=>n()[e>>>0];case 2:return u?e=>a()[e>>>1>>>0]:e=>o()[e>>>1>>>0];case 4:return u?e=>i()[e>>>2>>>0]:e=>s()[e>>>2>>>0];case 8:return u?e=>U[e>>>3]:e=>G[e>>>3];default:throw new TypeError(`invalid integer width (${r}): ${e}`)}};function ir(e,r,t){t>>>=0,ar(e>>>=0,{name:r=er(r>>>0),fromWireType:e=>e,toWireType:function(e,r){if(\"bigint\"!=typeof r&&\"number\"!=typeof r)throw r=null===r?\"null\":\"object\"==(e=typeof r)||\"array\"===e||\"function\"===e?r.toString():\"\"+r,new TypeError(`Cannot convert \"${r}\" to ${this.name}`);return\"number\"==typeof r&&(r=BigInt(r)),r},argPackAdvance:sr,readValueFromPointer:or(r,t,-1==r.indexOf(\"u\")),Db:null})}var sr=8;function ur(e,r,t,a){ar(e>>>=0,{name:r=er(r>>>0),fromWireType:function(e){return!!e},toWireType:function(e,r){return r?t:a},argPackAdvance:sr,readValueFromPointer:function(e){return this.fromWireType(n()[e>>>0])},Db:null})}var fr=[],lr=[];function cr(e){9<(e>>>=0)&&0==--lr[e+1]&&(lr[e]=void 0,fr.push(e))}var dr=e=>{if(!e)throw new Qe(\"Cannot use deleted val. handle = \"+e);return lr[e]},br=e=>{switch(e){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:const r=fr.pop()||lr.length;return lr[r]=e,lr[r+1]=1,r}};function mr(e){return this.fromWireType(s()[e>>>2>>>0])}var pr={name:\"emscripten::val\",fromWireType:e=>{var r=dr(e);return cr(e),r},toWireType:(e,r)=>br(r),argPackAdvance:sr,readValueFromPointer:mr,Db:null};function hr(e){return ar(e>>>0,pr)}var gr=(e,r)=>{switch(r){case 4:return function(e){return this.fromWireType(u()[e>>>2>>>0])};case 8:return function(e){return this.fromWireType(f()[e>>>3>>>0])};default:throw new TypeError(`invalid float width (${r}): ${e}`)}};function vr(e,r,t){t>>>=0,ar(e>>>=0,{name:r=er(r>>>0),fromWireType:e=>e,toWireType:(e,r)=>r,argPackAdvance:sr,readValueFromPointer:gr(r,t),Db:null})}function yr(e,r,t,n,a){if(e>>>=0,t>>>=0,r=er(r>>>0),-1===a&&(a=4294967295),a=e=>e,0===n){var o=32-8*t;a=e=>e<>>o}var i=r.includes(\"unsigned\")?function(e,r){return r>>>0}:function(e,r){return r};ar(e,{name:r,fromWireType:a,toWireType:i,argPackAdvance:sr,readValueFromPointer:or(r,t,0!==n),Db:null})}function wr(e,r,n){function a(e){var r=s()[e>>>2>>>0];return e=s()[e+4>>>2>>>0],new o(t().buffer,e,r)}var o=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][r];ar(e>>>=0,{name:n=er(n>>>0),fromWireType:a,argPackAdvance:sr,readValueFromPointer:a},{Rb:!0})}function Ar(e,r){e>>>=0;var t=\"std::string\"===(r=er(r>>>0));ar(e,{name:r,fromWireType:function(e){var r=s()[e>>>2>>>0],a=e+4;if(t)for(var o=a,i=0;i<=r;++i){var u=a+i;if(i==r||0==n()[u>>>0]){if(o=Re(o,u-o),void 0===f)var f=o;else f+=String.fromCharCode(0),f+=o;o=u+1}}else{for(f=Array(r),i=0;i>>0]);f=f.join(\"\")}return An(e),f},toWireType:function(e,r){r instanceof ArrayBuffer&&(r=new Uint8Array(r));var a=\"string\"==typeof r;if(!(a||r instanceof Uint8Array||r instanceof Uint8ClampedArray||r instanceof Int8Array))throw new Qe(\"Cannot pass non-string to std::string\");var o=t&&a?Fe(r):r.length,i=wn(4+o+1),u=i+4;if(s()[i>>>2>>>0]=o,t&&a)Be(r,u,o+1);else if(a)for(a=0;a>>0]=f}else for(a=0;a>>0]=r[a];return null!==e&&e.push(An,i),i},argPackAdvance:sr,readValueFromPointer:mr,Db(e){An(e)}})}var _r=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf-16le\"):void 0,Cr=(e,r)=>{for(var t=e>>1,i=t+r/2;!(t>=i)&&o()[t>>>0];)++t;if(32<(t<<=1)-e&&_r)return _r.decode(n().slice(e,t));for(t=\"\",i=0;!(i>=r/2);++i){var s=a()[e+2*i>>>1>>>0];if(0==s)break;t+=String.fromCharCode(s)}return t},Or=(e,r,t)=>{if(t??=2147483647,2>t)return 0;var n=r;t=(t-=2)<2*e.length?t/2:e.length;for(var o=0;o>>1>>>0]=i,r+=2}return a()[r>>>1>>>0]=0,r-n},jr=e=>2*e.length,Tr=(e,r)=>{for(var t=0,n=\"\";!(t>=r/4);){var a=i()[e+4*t>>>2>>>0];if(0==a)break;++t,65536<=a?(a-=65536,n+=String.fromCharCode(55296|a>>10,56320|1023&a)):n+=String.fromCharCode(a)}return n},Sr=(e,r,t)=>{if(r>>>=0,t??=2147483647,4>t)return 0;var n=r;t=n+t-4;for(var a=0;a=o&&(o=65536+((1023&o)<<10)|1023&e.charCodeAt(++a)),i()[r>>>2>>>0]=o,(r+=4)+4>t)break}return i()[r>>>2>>>0]=0,r-n},Wr=e=>{for(var r=0,t=0;t=n&&++t,r+=4}return r};function Er(e,r,t){if(e>>>=0,r>>>=0,t=er(t>>>=0),2===r)var n=Cr,a=Or,i=jr,u=e=>o()[e>>>1>>>0];else 4===r&&(n=Tr,a=Sr,i=Wr,u=e=>s()[e>>>2>>>0]);ar(e,{name:t,fromWireType:e=>{for(var t,a=s()[e>>>2>>>0],o=e+4,i=0;i<=a;++i){var f=e+4+i*r;i!=a&&0!=u(f)||(o=n(o,f-o),void 0===t?t=o:(t+=String.fromCharCode(0),t+=o),o=f+r)}return An(e),t},toWireType:(e,n)=>{if(\"string\"!=typeof n)throw new Qe(`Cannot pass non-string to C++ string type ${t}`);var o=i(n),u=wn(4+o+r);return s()[u>>>2>>>0]=o/r,a(n,u+4,o+r),null!==e&&e.push(An,u),u},argPackAdvance:sr,readValueFromPointer:mr,Db(e){An(e)}})}function Mr(e,r){ar(e>>>=0,{Sb:!0,name:r=er(r>>>0),argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})}var xr=()=>1;function Nr(e){_n(e>>>0,!p,1,!m,131072,!1),_e()}var kr=e=>{if(!Y)try{if(e(),!(0>>=0,\"function\"==typeof Atomics.jc&&(Atomics.jc(i(),e>>>2,e).value.then(Hr),e+=128,Atomics.store(i(),e>>>2,1))}var Hr=()=>{var e=yn();e&&(Rr(e),kr(Sn))};function Dr(e,r){(e>>>=0)==r>>>0?setTimeout(Hr):h?postMessage({targetThread:e,cmd:\"checkMailbox\"}):(e=we[e])&&e.postMessage({cmd:\"checkMailbox\"})}var Fr=[];function Pr(e,r,t,n,a){for(r>>>=0,n/=2,Fr.length=n,t=a>>>0>>>3,a=0;a>>0];return(r?ie[r]:mn[e])(...Fr)}function Br(e){e>>>=0,h?postMessage({cmd:\"cleanupThread\",thread:e}):Ae(we[e])}function Ir(e){}var Ur=(e,r)=>{var t=tr[e];if(void 0===t)throw e=hn(e),t=er(e),An(e),new Qe(`${r} has unknown type ${t}`);return t},Gr=(e,r,t)=>{var n=[];return e=e.toWireType(n,t),n.length&&(s()[r>>>2>>>0]=br(n)),e};function $r(e,r,t){return r>>>=0,t>>>=0,e=dr(e>>>0),r=Ur(r,\"emval::as\"),Gr(r,t,e)}var Yr=e=>{try{e()}catch(e){Z(e)}},Lr=0,zr=null,Vr=0,qr=[],Jr={},Xr={},Kr=0,Qr=null,Zr=[];function et(e){return function(e){if(!Y){if(0===Lr){var r=!1,t=!1;e(((e=0)=>{if(!Y&&(Vr=e,r=!0,t)){Lr=2,Yr((()=>Hn(zr))),\"undefined\"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.resume(),e=!1;try{var n=function(){var e=i()[zr+8>>>2>>>0];return e=pn[Xr[e]],--de,e()}()}catch(r){n=r,e=!0}var a=!1;if(!zr){var o=Qr;o&&(Qr=null,(e?o.reject:o.resolve)(n),a=!0)}if(e&&!a)throw n}})),t=!0,r||(Lr=1,zr=function(){var e=wn(65548),r=e+12;s()[e>>>2>>>0]=r,s()[e+4>>>2>>>0]=r+65536,r=qr[0];var t=Jr[r];return void 0===t&&(t=Kr++,Jr[r]=t,Xr[t]=r),r=t,i()[e+8>>>2>>>0]=r,e}(),\"undefined\"!=typeof Browser&&Browser.Jb.Qb&&Browser.Jb.pause(),Yr((()=>kn(zr))))}else 2===Lr?(Lr=0,Yr(Dn),An(zr),zr=null,Zr.forEach(kr)):Z(`invalid state: ${Lr}`);return Vr}}((r=>{e().then(r)}))}function rt(e){return e>>>=0,et((()=>(e=dr(e)).then(br)))}var tt=[];function nt(e,r,t,n){return t>>>=0,n>>>=0,(e=tt[e>>>0])(null,r=dr(r>>>0),t,n)}var at={},ot=e=>{var r=at[e];return void 0===r?er(e):r};function it(e,r,t,n,a){return t>>>=0,n>>>=0,a>>>=0,(e=tt[e>>>0])(r=dr(r>>>0),r[t=ot(t)],n,a)}var st=()=>\"object\"==typeof globalThis?globalThis:Function(\"return this\")();function ut(e){return 0==(e>>>=0)?br(st()):(e=ot(e),br(st()[e]))}var ft=e=>{var r=tt.length;return tt.push(e),r},lt=(e,r)=>{for(var t=Array(e),n=0;n>>2>>>0],\"parameter \"+n);return t},ct=(e,r)=>Object.defineProperty(r,\"name\",{value:e});function dt(e,r,t){var n=(r=lt(e,r>>>0)).shift();e--;var a=\"return function (obj, func, destructorsRef, args) {\\n\",o=0,i=[];0===t&&i.push(\"obj\");for(var s=[\"retType\"],u=[n],f=0;fe.name)).join(\", \")}) => ${n.name}>`,ft(ct(t,e))}function bt(e){return e=ot(e>>>0),br(d[e])}function mt(e,r){return r>>>=0,e=dr(e>>>0),r=dr(r),br(e[r])}function pt(e){9<(e>>>=0)&&(lr[e+1]+=1)}function ht(){return br([])}function gt(e){e=dr(e>>>0);for(var r=Array(e.length),t=0;t>>0))}function yt(){return br({})}function wt(e){for(var r=dr(e>>>=0);r.length;){var t=r.pop();r.pop()(t)}cr(e)}function At(e,r,t){r>>>=0,t>>>=0,e=dr(e>>>0),r=dr(r),t=dr(t),e[r]=t}function _t(e,r){return r>>>=0,e=(e=Ur(e>>>0,\"_emval_take_value\")).readValueFromPointer(r),br(e)}function Ct(e,r){e=-9007199254740992>e||9007199254740992>>=0,e=new Date(1e3*e),i()[r>>>2>>>0]=e.getUTCSeconds(),i()[r+4>>>2>>>0]=e.getUTCMinutes(),i()[r+8>>>2>>>0]=e.getUTCHours(),i()[r+12>>>2>>>0]=e.getUTCDate(),i()[r+16>>>2>>>0]=e.getUTCMonth(),i()[r+20>>>2>>>0]=e.getUTCFullYear()-1900,i()[r+24>>>2>>>0]=e.getUTCDay(),e=(e.getTime()-Date.UTC(e.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,i()[r+28>>>2>>>0]=e}var Ot=e=>0==e%4&&(0!=e%100||0==e%400),jt=[0,31,60,91,121,152,182,213,244,274,305,335],Tt=[0,31,59,90,120,151,181,212,243,273,304,334];function St(e,r){e=-9007199254740992>e||9007199254740992>>=0,e=new Date(1e3*e),i()[r>>>2>>>0]=e.getSeconds(),i()[r+4>>>2>>>0]=e.getMinutes(),i()[r+8>>>2>>>0]=e.getHours(),i()[r+12>>>2>>>0]=e.getDate(),i()[r+16>>>2>>>0]=e.getMonth(),i()[r+20>>>2>>>0]=e.getFullYear()-1900,i()[r+24>>>2>>>0]=e.getDay();var t=(Ot(e.getFullYear())?jt:Tt)[e.getMonth()]+e.getDate()-1|0;i()[r+28>>>2>>>0]=t,i()[r+36>>>2>>>0]=-60*e.getTimezoneOffset(),t=new Date(e.getFullYear(),6,1).getTimezoneOffset();var n=new Date(e.getFullYear(),0,1).getTimezoneOffset();e=0|(t!=n&&e.getTimezoneOffset()==Math.min(n,t)),i()[r+32>>>2>>>0]=e}function Wt(e){e>>>=0;var r=new Date(i()[e+20>>>2>>>0]+1900,i()[e+16>>>2>>>0],i()[e+12>>>2>>>0],i()[e+8>>>2>>>0],i()[e+4>>>2>>>0],i()[e>>>2>>>0],0),t=i()[e+32>>>2>>>0],n=r.getTimezoneOffset(),a=new Date(r.getFullYear(),6,1).getTimezoneOffset(),o=new Date(r.getFullYear(),0,1).getTimezoneOffset(),s=Math.min(o,a);return 0>t?i()[e+32>>>2>>>0]=Number(a!=o&&s==n):0>>2>>>0]=r.getDay(),t=(Ot(r.getFullYear())?jt:Tt)[r.getMonth()]+r.getDate()-1|0,i()[e+28>>>2>>>0]=t,i()[e>>>2>>>0]=r.getSeconds(),i()[e+4>>>2>>>0]=r.getMinutes(),i()[e+8>>>2>>>0]=r.getHours(),i()[e+12>>>2>>>0]=r.getDate(),i()[e+16>>>2>>>0]=r.getMonth(),i()[e+20>>>2>>>0]=r.getYear(),e=r.getTime(),BigInt(isNaN(e)?-1:e/1e3)}function Et(e,r,t,n,a,o,i){return h?be(16,1,e,r,t,n,a,o,i):-52}function Mt(e,r,t,n,a,o){if(h)return be(17,1,e,r,t,n,a,o)}function xt(e,r,t,n){e>>>=0,r>>>=0,t>>>=0,n>>>=0;var a=(new Date).getFullYear(),o=new Date(a,0,1),u=new Date(a,6,1);a=o.getTimezoneOffset();var f=u.getTimezoneOffset(),l=Math.max(a,f);s()[e>>>2>>>0]=60*l,i()[r>>>2>>>0]=Number(a!=f),o=(e=e=>e.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1])(o),u=e(u),f{Nt.length=0;for(var t;t=n()[e++>>>0];){var a=105!=t;r+=(a&=112!=t)&&r%8?4:0,Nt.push(112==t?s()[r>>>2>>>0]:106==t?U[r>>>3]:105==t?i()[r>>>2>>>0]:f()[r>>>3>>>0]),r+=a?8:4}return Nt};function Rt(e,r,t){return e>>>=0,r=kt(r>>>0,t>>>0),ie[e](...r)}function Ht(e,r,t){return e>>>=0,r=kt(r>>>0,t>>>0),ie[e](...r)}var Dt=()=>{},Ft=()=>Date.now();function Pt(e,r){return W(Re(e>>>0,r>>>0))}var Bt,It=()=>{throw de+=1,\"unwind\"};function Ut(){return 4294901760}Bt=()=>performance.timeOrigin+performance.now();var Gt=()=>navigator.hardwareConcurrency;function $t(){return Z(\"Cannot use emscripten_pc_get_function without -sUSE_OFFSET_CONVERTER\"),0}function Yt(e){e>>>=0;var r=n().length;if(e<=r||4294901760=t;t*=2){var a=r*(1+.2/t);a=Math.min(a,e+100663296);var o=Math;a=Math.max(e,a);e:{o=(o.min.call(o,4294901760,a+(65536-a%65536)%65536)-x.buffer.byteLength+65535)/65536;try{x.grow(o),L();var i=1;break e}catch(e){}i=void 0}if(i)return!0}return!1}var Lt=()=>(Z(\"Cannot use convertFrameToPC (needed by __builtin_return_address) without -sUSE_OFFSET_CONVERTER\"),0),zt={},Vt=e=>{e.forEach((e=>{var r=Lt();r&&(zt[r]=e)}))};function qt(){var e=Error().stack.toString().split(\"\\n\");return\"Error\"==e[0]&&e.shift(),Vt(e),zt.Ob=Lt(),zt.ac=e,zt.Ob}function Jt(e,r,t){if(e>>>=0,r>>>=0,zt.Ob==e)var n=zt.ac;else\"Error\"==(n=Error().stack.toString().split(\"\\n\"))[0]&&n.shift(),Vt(n);for(var a=3;n[a]&&Lt()!=e;)++a;for(e=0;e>>2>>>0]=Lt();return e}var Xt,Kt={},Qt=()=>{if(!Xt){var e,r={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:_||\"./this.program\"};for(e in Kt)void 0===Kt[e]?delete r[e]:r[e]=Kt[e];var t=[];for(e in r)t.push(`${e}=${r[e]}`);Xt=t}return Xt};function Zt(e,r){if(h)return be(18,1,e,r);e>>>=0,r>>>=0;var n=0;return Qt().forEach(((a,o)=>{var i=r+n;for(o=s()[e+4*o>>>2>>>0]=i,i=0;i>>0]=a.charCodeAt(i);t()[o>>>0]=0,n+=a.length+1})),0}function en(e,r){if(h)return be(19,1,e,r);e>>>=0,r>>>=0;var t=Qt();s()[e>>>2>>>0]=t.length;var n=0;return t.forEach((e=>n+=e.length+1)),s()[r>>>2>>>0]=n,0}function rn(e){return h?be(20,1,e):52}function tn(e,r,t,n){return h?be(21,1,e,r,t,n):52}function nn(e,r,t,n){return h?be(22,1,e,r,t,n):70}var an=[null,[],[]];function on(e,r,t,a){if(h)return be(23,1,e,r,t,a);r>>>=0,t>>>=0,a>>>=0;for(var o=0,i=0;i>>2>>>0],f=s()[r+4>>>2>>>0];r+=8;for(var l=0;l>>0],d=an[e];0===c||10===c?((1===e?S:W)(ke(d,0)),d.length=0):d.push(c)}o+=f}return s()[a>>>2>>>0]=o,0}var sn=[31,29,31,30,31,30,31,31,30,31,30,31],un=[31,28,31,30,31,30,31,31,30,31,30,31],fn=(e,r)=>{t().set(e,r>>>0)};function ln(e,r,t,n){function a(e,r,t){for(e=\"number\"==typeof e?e.toString():e||\"\";e.lengthe?-1:0n-e.getDate())){e.setDate(e.getDate()+r);break}r-=n-e.getDate()+1,e.setDate(1),11>t?e.setMonth(t+1):(e.setMonth(0),e.setFullYear(e.getFullYear()+1))}return t=new Date(e.getFullYear()+1,0,4),r=f(new Date(e.getFullYear(),0,4)),t=f(t),0>=u(r,e)?0>=u(t,e)?e.getFullYear()+1:e.getFullYear():e.getFullYear()-1}e>>>=0,r>>>=0,t>>>=0,n>>>=0;var c=s()[n+40>>>2>>>0];for(var d in n={fc:i()[n>>>2>>>0],ec:i()[n+4>>>2>>>0],Gb:i()[n+8>>>2>>>0],Kb:i()[n+12>>>2>>>0],Hb:i()[n+16>>>2>>>0],Cb:i()[n+20>>>2>>>0],ub:i()[n+24>>>2>>>0],Bb:i()[n+28>>>2>>>0],nc:i()[n+32>>>2>>>0],dc:i()[n+36>>>2>>>0],hc:c?Re(c):\"\"},t=Re(t),c={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"})t=t.replace(new RegExp(d,\"g\"),c[d]);var b=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),m=\"January February March April May June July August September October November December\".split(\" \");for(d in c={\"%a\":e=>b[e.ub].substring(0,3),\"%A\":e=>b[e.ub],\"%b\":e=>m[e.Hb].substring(0,3),\"%B\":e=>m[e.Hb],\"%C\":e=>o((e.Cb+1900)/100|0,2),\"%d\":e=>o(e.Kb,2),\"%e\":e=>a(e.Kb,2,\" \"),\"%g\":e=>l(e).toString().substring(2),\"%G\":l,\"%H\":e=>o(e.Gb,2),\"%I\":e=>(0==(e=e.Gb)?e=12:12{for(var r=0,t=0;t<=e.Hb-1;r+=(Ot(e.Cb+1900)?sn:un)[t++]);return o(e.Kb+r,3)},\"%m\":e=>o(e.Hb+1,2),\"%M\":e=>o(e.ec,2),\"%n\":()=>\"\\n\",\"%p\":e=>0<=e.Gb&&12>e.Gb?\"AM\":\"PM\",\"%S\":e=>o(e.fc,2),\"%t\":()=>\"\\t\",\"%u\":e=>e.ub||7,\"%U\":e=>o(Math.floor((e.Bb+7-e.ub)/7),2),\"%V\":e=>{var r=Math.floor((e.Bb+7-(e.ub+6)%7)/7);if(2>=(e.ub+371-e.Bb-2)%7&&r++,r)53==r&&(4==(t=(e.ub+371-e.Bb)%7)||3==t&&Ot(e.Cb)||(r=1));else{r=52;var t=(e.ub+7-e.Bb-1)%7;(4==t||5==t&&Ot(e.Cb%400-1))&&r++}return o(r,2)},\"%w\":e=>e.ub,\"%W\":e=>o(Math.floor((e.Bb+7-(e.ub+6)%7)/7),2),\"%y\":e=>(e.Cb+1900).toString().substring(2),\"%Y\":e=>e.Cb+1900,\"%z\":e=>{var r=0<=(e=e.dc);return e=Math.abs(e)/60,(r?\"+\":\"-\")+String(\"0000\"+(e/60*100+e%60)).slice(-4)},\"%Z\":e=>e.hc,\"%%\":()=>\"%\"},t=t.replace(/%%/g,\"\\0\\0\"),c)t.includes(d)&&(t=t.replace(new RegExp(d,\"g\"),c[d](n)));return d=function(e){var r=Array(Fe(e)+1);return Pe(e,r,0,r.length),r}(t=t.replace(/\\0\\0/g,\"%\")),d.length>r?0:(fn(d,e),d.length-1)}function cn(e,r,t,n){return ln(e>>>0,r>>>0,t>>>0,n>>>0)}h||function(){for(var e=d.numThreads-1;e--;)Oe();z.unshift((()=>{J++,function(e){h?e():Promise.all(ge.map(Ce)).then(e)}((()=>Q()))}))}();for(var dn=Array(256),bn=0;256>bn;++bn)dn[bn]=String.fromCharCode(bn);Ke=dn,Qe=d.BindingError=class extends Error{constructor(e){super(e),this.name=\"BindingError\"}},d.InternalError=class extends Error{constructor(e){super(e),this.name=\"InternalError\"}},lr.push(0,1,void 0,1,null,1,!0,1,!1,1),d.count_emval_handles=()=>lr.length/2-5-fr.length;var mn=[me,pe,Me,He,De,Ie,Ue,Ge,$e,Ye,Le,ze,Ve,qe,Je,Xe,Et,Mt,Zt,en,rn,tn,nn,on],pn=function(){function e(e,r){return pn=e.exports,pn=function(){var e=pn,r={};for(let[t,n]of Object.entries(e))r[t]=\"function\"==typeof n?(...e)=>{qr.push(t);try{return n(...e)}finally{Y||(qr.pop(),zr&&1===Lr&&0===qr.length&&(Lr=0,de+=1,Yr(Rn),\"undefined\"!=typeof Fibers&&Fibers.oc()))}}:n;return r}(),pn=function(){var e=pn,r=e=>r=>e(r)>>>0,t=e=>()=>e()>>>0;return(e=Object.assign({},e)).Ca=r(e.Ca),e.fb=t(e.fb),e.gb=r(e.gb),e.emscripten_main_runtime_thread_id=t(e.emscripten_main_runtime_thread_id),e.sb=r(e.sb),e.tb=t(e.tb),e}(),ye.push(pn.ib),V.unshift(pn.Ba),N=r,Q(),pn}var r=oe();if(J++,d.instantiateWasm)try{return d.instantiateWasm(r,e)}catch(e){W(`Module.instantiateWasm callback failed with error: ${e}`),c(e)}return ee||=d.locateFile?re(\"ort-wasm-simd-threaded.jsep.wasm\")?\"ort-wasm-simd-threaded.jsep.wasm\":d.locateFile?d.locateFile(\"ort-wasm-simd-threaded.jsep.wasm\",O):O+\"ort-wasm-simd-threaded.jsep.wasm\":new URL(\"ort-wasm-simd-threaded.jsep.wasm\",import.meta.url).href,function(e,r){var t=ee;return\"function\"!=typeof WebAssembly.instantiateStreaming||re(t)||te(t)||\"function\"!=typeof fetch?ae(t,e,r):fetch(t,{credentials:\"same-origin\"}).then((n=>WebAssembly.instantiateStreaming(n,e).then(r,(function(n){return W(`wasm streaming compile failed: ${n}`),W(\"falling back to ArrayBuffer instantiation\"),ae(t,e,r)}))))}(r,(function(r){e(r.instance,r.module)})).catch(c),{}}(),hn=e=>(hn=pn.Ca)(e),gn=()=>(gn=pn.Da)();d._OrtInit=(e,r)=>(d._OrtInit=pn.Ea)(e,r),d._OrtGetLastError=(e,r)=>(d._OrtGetLastError=pn.Fa)(e,r),d._OrtCreateSessionOptions=(e,r,t,n,a,o,i,s,u,f)=>(d._OrtCreateSessionOptions=pn.Ga)(e,r,t,n,a,o,i,s,u,f),d._OrtAppendExecutionProvider=(e,r)=>(d._OrtAppendExecutionProvider=pn.Ha)(e,r),d._OrtAddFreeDimensionOverride=(e,r,t)=>(d._OrtAddFreeDimensionOverride=pn.Ia)(e,r,t),d._OrtAddSessionConfigEntry=(e,r,t)=>(d._OrtAddSessionConfigEntry=pn.Ja)(e,r,t),d._OrtReleaseSessionOptions=e=>(d._OrtReleaseSessionOptions=pn.Ka)(e),d._OrtCreateSession=(e,r,t)=>(d._OrtCreateSession=pn.La)(e,r,t),d._OrtReleaseSession=e=>(d._OrtReleaseSession=pn.Ma)(e),d._OrtGetInputOutputCount=(e,r,t)=>(d._OrtGetInputOutputCount=pn.Na)(e,r,t),d._OrtGetInputName=(e,r)=>(d._OrtGetInputName=pn.Oa)(e,r),d._OrtGetOutputName=(e,r)=>(d._OrtGetOutputName=pn.Pa)(e,r),d._OrtFree=e=>(d._OrtFree=pn.Qa)(e),d._OrtCreateTensor=(e,r,t,n,a,o)=>(d._OrtCreateTensor=pn.Ra)(e,r,t,n,a,o),d._OrtGetTensorData=(e,r,t,n,a)=>(d._OrtGetTensorData=pn.Sa)(e,r,t,n,a),d._OrtReleaseTensor=e=>(d._OrtReleaseTensor=pn.Ta)(e),d._OrtCreateRunOptions=(e,r,t,n)=>(d._OrtCreateRunOptions=pn.Ua)(e,r,t,n),d._OrtAddRunConfigEntry=(e,r,t)=>(d._OrtAddRunConfigEntry=pn.Va)(e,r,t),d._OrtReleaseRunOptions=e=>(d._OrtReleaseRunOptions=pn.Wa)(e),d._OrtCreateBinding=e=>(d._OrtCreateBinding=pn.Xa)(e),d._OrtBindInput=(e,r,t)=>(d._OrtBindInput=pn.Ya)(e,r,t),d._OrtBindOutput=(e,r,t,n)=>(d._OrtBindOutput=pn.Za)(e,r,t,n),d._OrtClearBoundOutputs=e=>(d._OrtClearBoundOutputs=pn._a)(e),d._OrtReleaseBinding=e=>(d._OrtReleaseBinding=pn.$a)(e),d._OrtRunWithBinding=(e,r,t,n,a)=>(d._OrtRunWithBinding=pn.ab)(e,r,t,n,a),d._OrtRun=(e,r,t,n,a,o,i,s)=>(d._OrtRun=pn.bb)(e,r,t,n,a,o,i,s),d._OrtEndProfiling=e=>(d._OrtEndProfiling=pn.cb)(e),d._JsepOutput=(e,r,t)=>(d._JsepOutput=pn.db)(e,r,t),d._JsepGetNodeName=e=>(d._JsepGetNodeName=pn.eb)(e);var vn,yn=()=>(yn=pn.fb)(),wn=d._malloc=e=>(wn=d._malloc=pn.gb)(e),An=d._free=e=>(An=d._free=pn.hb)(e),_n=(e,r,t,n,a,o)=>(_n=pn.kb)(e,r,t,n,a,o),Cn=()=>(Cn=pn.lb)(),On=(e,r,t,n,a)=>(On=pn.mb)(e,r,t,n,a),jn=e=>(jn=pn.nb)(e),Tn=e=>(Tn=pn.ob)(e),Sn=()=>(Sn=pn.pb)(),Wn=(e,r)=>(Wn=pn.qb)(e,r),En=e=>(En=pn.rb)(e),Mn=e=>(Mn=pn.sb)(e),xn=()=>(xn=pn.tb)(),Nn=d.dynCall_ii=(e,r)=>(Nn=d.dynCall_ii=pn.vb)(e,r),kn=e=>(kn=pn.wb)(e),Rn=()=>(Rn=pn.xb)(),Hn=e=>(Hn=pn.yb)(e),Dn=()=>(Dn=pn.zb)();function Fn(){if(!(0xn(),d.stackRestore=e=>En(e),d.stackAlloc=e=>Mn(e),d.UTF8ToString=Re,d.stringToUTF8=Be,d.lengthBytesUTF8=Fe,K=function e(){vn||Fn(),vn||(K=e)},Fn(),b});export default r;\"em-pthread\"===globalThis.self?.name&&r();", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {isNode} from './wasm-utils-env';\n\n/**\n * The classic script source URL. This is not always available in non ESModule environments.\n *\n * In Node.js, this is undefined.\n */\nexport const scriptSrc =\n // if Nodejs, return undefined\n isNode ? undefined :\n // if It's ESM, use import.meta.url\n BUILD_DEFS.ESM_IMPORT_META_URL ??\n // use `document.currentScript.src` if available\n (typeof document !== 'undefined' ? (document.currentScript as HTMLScriptElement)?.src :\n // use `self.location.href` if available\n (typeof self !== 'undefined' ? self.location?.href : undefined));\n\n/**\n * The origin of the current location.\n *\n * In Node.js, this is undefined.\n */\nconst origin = isNode || typeof location === 'undefined' ? undefined : location.origin;\n\n/**\n * Check if the given filename with prefix is from the same origin.\n */\nconst isSameOrigin = (filename: string, prefixOverride?: string) => {\n try {\n const baseUrl = prefixOverride ?? scriptSrc;\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.origin === origin;\n } catch {\n return false;\n }\n};\n\n/**\n * Normalize the inputs to an absolute URL with the given prefix override. If failed, return undefined.\n */\nconst normalizeUrl = (filename: string, prefixOverride?: string) => {\n const baseUrl = prefixOverride ?? scriptSrc;\n try {\n const url = baseUrl ? new URL(filename, baseUrl) : new URL(filename);\n return url.href;\n } catch {\n return undefined;\n }\n};\n\n/**\n * Create a fallback URL if an absolute URL cannot be created by the normalizeUrl function.\n */\nconst fallbackUrl = (filename: string, prefixOverride?: string) => `${prefixOverride ?? './'}${filename}`;\n\n/**\n * This helper function is used to preload a module from a URL.\n *\n * If the origin of the worker URL is different from the current origin, the worker cannot be loaded directly.\n * See discussions in https://github.com/webpack-contrib/worker-loader/issues/154\n *\n * In this case, we will fetch the worker URL and create a new Blob URL with the same origin as a workaround.\n *\n * @param absoluteUrl - The absolute URL to preload.\n *\n * @returns - A promise that resolves to a new Blob URL\n */\nconst preload = async(absoluteUrl: string): Promise => {\n const response = await fetch(absoluteUrl, {credentials: 'same-origin'});\n const blob = await response.blob();\n return URL.createObjectURL(blob);\n};\n\n/**\n * This helper function is used to dynamically import a module from a URL.\n *\n * The build script has special handling for this function to ensure that the URL is not bundled into the final output.\n *\n * @param url - The URL to import.\n *\n * @returns - A promise that resolves to the default export of the module.\n */\nconst dynamicImportDefault = async(url: string): Promise => (await import(/* webpackIgnore: true */ url)).default;\n\n/**\n * The proxy worker factory imported from the proxy worker module.\n *\n * This is only available when the WebAssembly proxy is not disabled.\n */\nconst createProxyWorker: ((urlOverride?: string) => Worker)|undefined =\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n BUILD_DEFS.DISABLE_WASM_PROXY ? undefined : require('./proxy-worker/main').default;\n\n/**\n * Import the proxy worker.\n *\n * This function will perform the following steps:\n * 1. If a preload is needed, it will preload the module and return the object URL.\n * 2. Use the proxy worker factory to create the proxy worker.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The proxy worker.\n */\nexport const importProxyWorker = async(): Promise<[undefined | string, Worker]> => {\n if (!scriptSrc) {\n throw new Error('Failed to load proxy worker: cannot determine the script source URL.');\n }\n\n // If the script source is from the same origin, we can use the embedded proxy module directly.\n if (isSameOrigin(scriptSrc)) {\n return [undefined, createProxyWorker!()];\n }\n\n // Otherwise, need to preload\n const url = await preload(scriptSrc);\n return [url, createProxyWorker!(url)];\n};\n\n/**\n * The embedded WebAssembly module.\n *\n * This is only available in ESM and when embedding is not disabled.\n */\nconst embeddedWasmModule: EmscriptenModuleFactory|undefined =\n BUILD_DEFS.IS_ESM && BUILD_DEFS.DISABLE_DYNAMIC_IMPORT ?\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n require(\n !BUILD_DEFS.DISABLE_TRAINING ? '../../dist/ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? '../../dist/ort-wasm-simd-threaded.jsep.mjs' :\n '../../dist/ort-wasm-simd-threaded.mjs')\n .default :\n undefined;\n\n/**\n * Import the WebAssembly module.\n *\n * This function will perform the following steps:\n * 1. If BUILD_DEFS.DISABLE_DYNAMIC_IMPORT is true, use the embedded module.\n * 2. If a preload is needed, it will preload the module and return the object URL.\n * 3. Otherwise, it will perform a dynamic import of the module.\n *\n * @returns - A promise that resolves to a tuple of 2 elements:\n * - The object URL of the preloaded module, or undefined if no preload is needed.\n * - The default export of the module, which is a factory function to create the WebAssembly module.\n */\nexport const importWasmModule = async(\n urlOverride: string|undefined, prefixOverride: string|undefined,\n isMultiThreaded: boolean): Promise<[undefined | string, EmscriptenModuleFactory]> => {\n if (BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n return [undefined, embeddedWasmModule!];\n } else {\n const wasmModuleFilename = !BUILD_DEFS.DISABLE_TRAINING ? 'ort-training-wasm-simd-threaded.mjs' :\n !BUILD_DEFS.DISABLE_JSEP ? 'ort-wasm-simd-threaded.jsep.mjs' :\n 'ort-wasm-simd-threaded.mjs';\n const wasmModuleUrl = urlOverride ?? normalizeUrl(wasmModuleFilename, prefixOverride);\n // need to preload if all of the following conditions are met:\n // 1. not in Node.js.\n // - Node.js does not have the same origin policy for creating workers.\n // 2. multi-threaded is enabled.\n // - If multi-threaded is disabled, no worker will be created. So we don't need to preload the module.\n // 3. the absolute URL is available.\n // - If the absolute URL is failed to be created, the origin cannot be determined. In this case, we will not\n // preload the module.\n // 4. the worker URL is not from the same origin.\n // - If the worker URL is from the same origin, we can create the worker directly.\n const needPreload = !isNode && isMultiThreaded && wasmModuleUrl && !isSameOrigin(wasmModuleUrl, prefixOverride);\n const url = needPreload ? (await preload(wasmModuleUrl)) :\n (wasmModuleUrl ?? fallbackUrl(wasmModuleFilename, prefixOverride));\n return [needPreload ? url : undefined, await dynamicImportDefault>(url)];\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from './wasm-types';\nimport {importWasmModule} from './wasm-utils-import';\n\nlet wasm: OrtWasmModule|undefined;\nlet initialized = false;\nlet initializing = false;\nlet aborted = false;\n\nconst isMultiThreadSupported = (): boolean => {\n // If 'SharedArrayBuffer' is not available, WebAssembly threads will not work.\n if (typeof SharedArrayBuffer === 'undefined') {\n return false;\n }\n\n try {\n // Test for transferability of SABs (for browsers. needed for Firefox)\n // https://groups.google.com/forum/#!msg/mozilla.dev.platform/IHkBZlHETpA/dwsMNchWEQAJ\n if (typeof MessageChannel !== 'undefined') {\n new MessageChannel().port1.postMessage(new SharedArrayBuffer(1));\n }\n\n // Test for WebAssembly threads capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing threaded instructions.\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 5,\n 4, 1, 3, 1, 1, 10, 11, 1, 9, 0, 65, 0, 254, 16, 2, 0, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst isSimdSupported = (): boolean => {\n try {\n // Test for WebAssembly SIMD capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing SIMD instructions.\n\n // The binary data is generated from the following code by wat2wasm:\n //\n // (module\n // (type $t0 (func))\n // (func $f0 (type $t0)\n // (drop\n // (i32x4.dot_i16x8_s\n // (i8x16.splat\n // (i32.const 0))\n // (v128.const i32x4 0x00000000 0x00000000 0x00000000 0x00000000)))))\n\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 10, 30, 1, 28, 0, 65, 0,\n 253, 15, 253, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 253, 186, 1, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nexport const initializeWebAssembly = async(flags: Env.WebAssemblyFlags): Promise => {\n if (initialized) {\n return Promise.resolve();\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initializeWebAssembly()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initializeWebAssembly()\\' failed.');\n }\n\n initializing = true;\n\n // wasm flags are already initialized\n const timeout = flags.initTimeout!;\n let numThreads = flags.numThreads!;\n\n // ensure SIMD is supported\n if (!isSimdSupported()) {\n throw new Error('WebAssembly SIMD is not supported in the current environment.');\n }\n\n // check if multi-threading is supported\n const multiThreadSupported = isMultiThreadSupported();\n if (numThreads > 1 && !multiThreadSupported) {\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', but this will not work unless you enable crossOriginIsolated mode. ' +\n 'See https://web.dev/cross-origin-isolation-guide/ for more info.');\n }\n\n // eslint-disable-next-line no-console\n console.warn(\n 'WebAssembly multi-threading is not supported in the current environment. ' +\n 'Falling back to single-threading.');\n\n // set flags.numThreads to 1 so that OrtInit() will not create a global thread pool.\n flags.numThreads = numThreads = 1;\n }\n\n const wasmPaths = flags.wasmPaths;\n const wasmPrefixOverride = typeof wasmPaths === 'string' ? wasmPaths : undefined;\n const mjsPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.mjs;\n const mjsPathOverride = (mjsPathOverrideFlag as URL)?.href ?? mjsPathOverrideFlag;\n const wasmPathOverrideFlag = (wasmPaths as Env.WasmFilePaths)?.wasm;\n const wasmPathOverride = (wasmPathOverrideFlag as URL)?.href ?? wasmPathOverrideFlag;\n\n const [objectUrl, ortWasmFactory] = (await importWasmModule(mjsPathOverride, wasmPrefixOverride, numThreads > 1));\n\n let isTimeout = false;\n\n const tasks: Array> = [];\n\n // promise for timeout\n if (timeout > 0) {\n tasks.push(new Promise((resolve) => {\n setTimeout(() => {\n isTimeout = true;\n resolve();\n }, timeout);\n }));\n }\n\n // promise for module initialization\n tasks.push(new Promise((resolve, reject) => {\n const config: Partial = {\n /**\n * The number of threads. WebAssembly will create (Module.numThreads - 1) workers. If it is 1, no worker will be\n * created.\n */\n numThreads,\n };\n\n if (wasmPathOverride || wasmPrefixOverride) {\n /**\n * A callback function to locate the WebAssembly file. The function should return the full path of the file.\n *\n * Since Emscripten 3.1.58, this function is only called for the .wasm file.\n */\n config.locateFile = (fileName, scriptDirectory) =>\n wasmPathOverride ?? (wasmPrefixOverride ?? scriptDirectory) + fileName;\n }\n\n ortWasmFactory(config).then(\n // wasm module initialized successfully\n module => {\n initializing = false;\n initialized = true;\n wasm = module;\n resolve();\n if (objectUrl) {\n URL.revokeObjectURL(objectUrl);\n }\n },\n // wasm module failed to initialize\n (what) => {\n initializing = false;\n aborted = true;\n reject(what);\n });\n }));\n\n await Promise.race(tasks);\n\n if (isTimeout) {\n throw new Error(`WebAssembly backend initializing failed due to timeout: ${timeout}ms`);\n }\n};\n\nexport const getInstance = (): OrtWasmModule => {\n if (initialized && wasm) {\n return wasm;\n }\n\n throw new Error('WebAssembly is not initialized yet.');\n};\n\nexport const dispose = (): void => {\n if (initialized && !initializing && !aborted) {\n // TODO: currently \"PThread.terminateAllThreads()\" is not exposed in the wasm module.\n // And this function is not yet called by any code.\n // If it is needed in the future, we should expose it in the wasm module and uncomment the following line.\n\n // wasm?.PThread?.terminateAllThreads();\n wasm = undefined;\n\n initializing = false;\n initialized = false;\n aborted = true;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getInstance} from './wasm-factory';\n\nexport const allocWasmString = (data: string, allocs: number[]): number => {\n const wasm = getInstance();\n\n const dataLength = wasm.lengthBytesUTF8(data) + 1;\n const dataOffset = wasm._malloc(dataLength);\n wasm.stringToUTF8(data, dataOffset, dataLength);\n allocs.push(dataOffset);\n\n return dataOffset;\n};\n\ninterface ExtraOptionsHandler {\n (name: string, value: string): void;\n}\n\nexport const iterateExtraOptions =\n (options: Record, prefix: string, seen: WeakSet>,\n handler: ExtraOptionsHandler): void => {\n if (typeof options == 'object' && options !== null) {\n if (seen.has(options)) {\n throw new Error('Circular reference in options');\n } else {\n seen.add(options);\n }\n }\n\n Object.entries(options).forEach(([key, value]) => {\n const name = (prefix) ? prefix + key : key;\n if (typeof value === 'object') {\n iterateExtraOptions(value as Record, name + '.', seen, handler);\n } else if (typeof value === 'string' || typeof value === 'number') {\n handler(name, value.toString());\n } else if (typeof value === 'boolean') {\n handler(name, (value) ? '1' : '0');\n } else {\n throw new Error(`Can't handle extra config type: ${typeof value}`);\n }\n });\n };\n\n/**\n * check web assembly API's last error and throw error if any error occurred.\n * @param message a message used when an error occurred.\n */\nexport const checkLastError = (message: string): void => {\n const wasm = getInstance();\n\n const stack = wasm.stackSave();\n try {\n const paramsOffset = wasm.stackAlloc(8);\n wasm._OrtGetLastError(paramsOffset, paramsOffset + 4);\n const errorCode = wasm.HEAP32[paramsOffset / 4];\n const errorMessagePointer = wasm.HEAPU32[paramsOffset / 4 + 1];\n const errorMessage = errorMessagePointer ? wasm.UTF8ToString(errorMessagePointer) : '';\n throw new Error(`${message} ERROR_CODE: ${errorCode}, ERROR_MESSAGE: ${errorMessage}`);\n } finally {\n wasm.stackRestore(stack);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nexport const setRunOptions = (options: InferenceSession.RunOptions): [number, number[]] => {\n const wasm = getInstance();\n let runOptionsHandle = 0;\n const allocs: number[] = [];\n\n const runOptions: InferenceSession.RunOptions = options || {};\n\n try {\n if (options?.logSeverityLevel === undefined) {\n runOptions.logSeverityLevel = 2; // Default to warning\n } else if (\n typeof options.logSeverityLevel !== 'number' || !Number.isInteger(options.logSeverityLevel) ||\n options.logSeverityLevel < 0 || options.logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${options.logSeverityLevel}`);\n }\n\n if (options?.logVerbosityLevel === undefined) {\n runOptions.logVerbosityLevel = 0; // Default to 0\n } else if (typeof options.logVerbosityLevel !== 'number' || !Number.isInteger(options.logVerbosityLevel)) {\n throw new Error(`log verbosity level is not valid: ${options.logVerbosityLevel}`);\n }\n\n if (options?.terminate === undefined) {\n runOptions.terminate = false;\n }\n\n let tagDataOffset = 0;\n if (options?.tag !== undefined) {\n tagDataOffset = allocWasmString(options.tag, allocs);\n }\n\n runOptionsHandle = wasm._OrtCreateRunOptions(\n runOptions.logSeverityLevel!, runOptions.logVerbosityLevel!, !!runOptions.terminate!, tagDataOffset);\n if (runOptionsHandle === 0) {\n checkLastError('Can\\'t create run options.');\n }\n\n if (options?.extra !== undefined) {\n iterateExtraOptions(options.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddRunConfigEntry(runOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a run config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [runOptionsHandle, allocs];\n } catch (e) {\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nconst getGraphOptimzationLevel = (graphOptimizationLevel: string|unknown): number => {\n switch (graphOptimizationLevel) {\n case 'disabled':\n return 0;\n case 'basic':\n return 1;\n case 'extended':\n return 2;\n case 'all':\n return 99;\n default:\n throw new Error(`unsupported graph optimization level: ${graphOptimizationLevel}`);\n }\n};\n\nconst getExecutionMode = (executionMode: 'sequential'|'parallel'): number => {\n switch (executionMode) {\n case 'sequential':\n return 0;\n case 'parallel':\n return 1;\n default:\n throw new Error(`unsupported execution mode: ${executionMode}`);\n }\n};\n\nconst appendDefaultOptions = (options: InferenceSession.SessionOptions): void => {\n if (!options.extra) {\n options.extra = {};\n }\n if (!options.extra.session) {\n options.extra.session = {};\n }\n const session = options.extra.session as Record;\n if (!session.use_ort_model_bytes_directly) {\n // eslint-disable-next-line camelcase\n session.use_ort_model_bytes_directly = '1';\n }\n\n // if using JSEP with WebGPU, always disable memory pattern\n if (options.executionProviders &&\n options.executionProviders.some(ep => (typeof ep === 'string' ? ep : ep.name) === 'webgpu')) {\n options.enableMemPattern = false;\n }\n};\n\nconst setExecutionProviders =\n (sessionOptionsHandle: number, executionProviders: readonly InferenceSession.ExecutionProviderConfig[],\n allocs: number[]): void => {\n for (const ep of executionProviders) {\n let epName = typeof ep === 'string' ? ep : ep.name;\n\n // check EP name\n switch (epName) {\n case 'webnn':\n epName = 'WEBNN';\n if (typeof ep !== 'string') {\n const webnnOptions = ep as InferenceSession.WebNNExecutionProviderOption;\n // const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n if (deviceType) {\n const keyDataOffset = allocWasmString('deviceType', allocs);\n const valueDataOffset = allocWasmString(deviceType, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'deviceType' - ${deviceType}.`);\n }\n }\n }\n break;\n case 'webgpu':\n epName = 'JS';\n if (typeof ep !== 'string') {\n const webgpuOptions = ep as InferenceSession.WebGpuExecutionProviderOption;\n if (webgpuOptions?.preferredLayout) {\n if (webgpuOptions.preferredLayout !== 'NCHW' && webgpuOptions.preferredLayout !== 'NHWC') {\n throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${webgpuOptions.preferredLayout}`);\n }\n const keyDataOffset = allocWasmString('preferredLayout', allocs);\n const valueDataOffset = allocWasmString(webgpuOptions.preferredLayout, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'preferredLayout' - ${webgpuOptions.preferredLayout}.`);\n }\n }\n }\n break;\n case 'wasm':\n case 'cpu':\n continue;\n default:\n throw new Error(`not supported execution provider: ${epName}`);\n }\n\n const epNameDataOffset = allocWasmString(epName, allocs);\n if (getInstance()._OrtAppendExecutionProvider(sessionOptionsHandle, epNameDataOffset) !== 0) {\n checkLastError(`Can't append execution provider: ${epName}.`);\n }\n }\n };\n\nexport const setSessionOptions = (options?: InferenceSession.SessionOptions): [number, number[]] => {\n const wasm = getInstance();\n let sessionOptionsHandle = 0;\n const allocs: number[] = [];\n\n const sessionOptions: InferenceSession.SessionOptions = options || {};\n appendDefaultOptions(sessionOptions);\n\n try {\n const graphOptimizationLevel = getGraphOptimzationLevel(sessionOptions.graphOptimizationLevel ?? 'all');\n const executionMode = getExecutionMode(sessionOptions.executionMode ?? 'sequential');\n const logIdDataOffset =\n typeof sessionOptions.logId === 'string' ? allocWasmString(sessionOptions.logId, allocs) : 0;\n\n const logSeverityLevel = sessionOptions.logSeverityLevel ?? 2; // Default to 2 - warning\n if (!Number.isInteger(logSeverityLevel) || logSeverityLevel < 0 || logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${logSeverityLevel}`);\n }\n\n const logVerbosityLevel = sessionOptions.logVerbosityLevel ?? 0; // Default to 0 - verbose\n if (!Number.isInteger(logVerbosityLevel) || logVerbosityLevel < 0 || logVerbosityLevel > 4) {\n throw new Error(`log verbosity level is not valid: ${logVerbosityLevel}`);\n }\n\n const optimizedModelFilePathOffset = typeof sessionOptions.optimizedModelFilePath === 'string' ?\n allocWasmString(sessionOptions.optimizedModelFilePath, allocs) :\n 0;\n\n sessionOptionsHandle = wasm._OrtCreateSessionOptions(\n graphOptimizationLevel, !!sessionOptions.enableCpuMemArena, !!sessionOptions.enableMemPattern, executionMode,\n !!sessionOptions.enableProfiling, 0, logIdDataOffset, logSeverityLevel, logVerbosityLevel,\n optimizedModelFilePathOffset);\n if (sessionOptionsHandle === 0) {\n checkLastError('Can\\'t create session options.');\n }\n\n if (sessionOptions.executionProviders) {\n setExecutionProviders(sessionOptionsHandle, sessionOptions.executionProviders, allocs);\n }\n\n if (sessionOptions.enableGraphCapture !== undefined) {\n if (typeof sessionOptions.enableGraphCapture !== 'boolean') {\n throw new Error(`enableGraphCapture must be a boolean value: ${sessionOptions.enableGraphCapture}`);\n }\n const keyDataOffset = allocWasmString('enableGraphCapture', allocs);\n const valueDataOffset = allocWasmString(sessionOptions.enableGraphCapture.toString(), allocs);\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(\n `Can't set a session config entry: 'enableGraphCapture' - ${sessionOptions.enableGraphCapture}.`);\n }\n }\n\n if (sessionOptions.freeDimensionOverrides) {\n for (const [name, value] of Object.entries(sessionOptions.freeDimensionOverrides)) {\n if (typeof name !== 'string') {\n throw new Error(`free dimension override name must be a string: ${name}`);\n }\n if (typeof value !== 'number' || !Number.isInteger(value) || value < 0) {\n throw new Error(`free dimension override value must be a non-negative integer: ${value}`);\n }\n const nameOffset = allocWasmString(name, allocs);\n if (wasm._OrtAddFreeDimensionOverride(sessionOptionsHandle, nameOffset, value) !== 0) {\n checkLastError(`Can't set a free dimension override: ${name} - ${value}.`);\n }\n }\n }\n\n if (sessionOptions.extra !== undefined) {\n iterateExtraOptions(sessionOptions.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a session config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [sessionOptionsHandle, allocs];\n } catch (e) {\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// This file includes common definitions. They do NOT have dependency on the WebAssembly instance.\n\n/**\n * Copied from ONNX definition. Use this to drop dependency 'onnx_proto' to decrease compiled .js file size.\n */\nexport const enum DataType {\n undefined = 0,\n float = 1,\n uint8 = 2,\n int8 = 3,\n uint16 = 4,\n int16 = 5,\n int32 = 6,\n int64 = 7,\n string = 8,\n bool = 9,\n float16 = 10,\n double = 11,\n uint32 = 12,\n uint64 = 13,\n complex64 = 14,\n complex128 = 15,\n bfloat16 = 16\n}\n\n/**\n * Map string tensor data to enum value\n */\nexport const tensorDataTypeStringToEnum = (type: string): DataType => {\n switch (type) {\n case 'int8':\n return DataType.int8;\n case 'uint8':\n return DataType.uint8;\n case 'bool':\n return DataType.bool;\n case 'int16':\n return DataType.int16;\n case 'uint16':\n return DataType.uint16;\n case 'int32':\n return DataType.int32;\n case 'uint32':\n return DataType.uint32;\n case 'float16':\n return DataType.float16;\n case 'float32':\n return DataType.float;\n case 'float64':\n return DataType.double;\n case 'string':\n return DataType.string;\n case 'int64':\n return DataType.int64;\n case 'uint64':\n return DataType.uint64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n};\n\n/**\n * Map enum value to string tensor data\n */\nexport const tensorDataTypeEnumToString = (typeProto: DataType): Tensor.Type => {\n switch (typeProto) {\n case DataType.int8:\n return 'int8';\n case DataType.uint8:\n return 'uint8';\n case DataType.bool:\n return 'bool';\n case DataType.int16:\n return 'int16';\n case DataType.uint16:\n return 'uint16';\n case DataType.int32:\n return 'int32';\n case DataType.uint32:\n return 'uint32';\n case DataType.float16:\n return 'float16';\n case DataType.float:\n return 'float32';\n case DataType.double:\n return 'float64';\n case DataType.string:\n return 'string';\n case DataType.int64:\n return 'int64';\n case DataType.uint64:\n return 'uint64';\n\n default:\n throw new Error(`unsupported data type: ${typeProto}`);\n }\n};\n\n/**\n * get tensor element size in bytes by the given data type\n * @returns size in integer or undefined if the data type is not supported\n */\nexport const getTensorElementSize = (dateType: number): number|\n undefined => [undefined, 4, 1, 1, 2, 2, 4, 8, undefined, 1, 2, 8, 4, 8, undefined, undefined, undefined][dateType];\n\n/**\n * get typed array constructor by the given tensor type\n */\nexport const tensorTypeToTypedArrayConstructor = (type: Tensor.Type): Float32ArrayConstructor|Uint8ArrayConstructor|\n Int8ArrayConstructor|Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|\n Uint8ArrayConstructor|Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor => {\n switch (type) {\n case 'float16':\n // allow Float16Array polyfill.\n return typeof Float16Array !== 'undefined' && Float16Array.from ? Float16Array : Uint16Array;\n case 'float32':\n return Float32Array;\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'uint16':\n return Uint16Array;\n case 'int16':\n return Int16Array;\n case 'int32':\n return Int32Array;\n case 'bool':\n return Uint8Array;\n case 'float64':\n return Float64Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'uint64':\n return BigUint64Array;\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n };\n\n/**\n * Map string log level to integer value\n */\nexport const logLevelStringToEnum = (logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal'): number => {\n switch (logLevel) {\n case 'verbose':\n return 0;\n case 'info':\n return 1;\n case 'warning':\n return 2;\n case 'error':\n return 3;\n case 'fatal':\n return 4;\n default:\n throw new Error(`unsupported logging level: ${logLevel}`);\n }\n};\n\n/**\n * Check whether the given tensor type is supported by GPU buffer\n */\nexport const isGpuBufferSupportedType = (type: Tensor.Type): type is Tensor.GpuBufferDataTypes => type === 'float32' ||\n type === 'float16' || type === 'int32' || type === 'int64' || type === 'uint32' || type === 'uint8' ||\n type === 'bool';\n\n/**\n * Map string data location to integer value\n */\nexport const dataLocationStringToEnum = (location: Tensor.DataLocation): number => {\n switch (location) {\n case 'none':\n return 0;\n case 'cpu':\n return 1;\n case 'cpu-pinned':\n return 2;\n case 'texture':\n return 3;\n case 'gpu-buffer':\n return 4;\n default:\n throw new Error(`unsupported data location: ${location}`);\n }\n};\n\n/**\n * Map integer data location to string value\n */\nexport const dataLocationEnumToString = (location: number): Tensor.DataLocation|undefined =>\n (['none', 'cpu', 'cpu-pinned', 'texture', 'gpu-buffer'] as const)[location];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {isNode} from './wasm-utils-env';\n\n/**\n * Load a file into a Uint8Array.\n *\n * @param file - the file to load. Can be a URL/path, a Blob, an ArrayBuffer, or a Uint8Array.\n * @returns a Uint8Array containing the file data.\n */\nexport const loadFile = async(file: string|Blob|ArrayBufferLike|Uint8Array): Promise => {\n if (typeof file === 'string') {\n if (isNode) {\n // load file into ArrayBuffer in Node.js\n try {\n const {readFile} = require('node:fs/promises');\n return new Uint8Array(await readFile(file));\n } catch (e) {\n if (e.code === 'ERR_FS_FILE_TOO_LARGE') {\n // file is too large, use fs.createReadStream instead\n const {createReadStream} = require('node:fs');\n const stream = createReadStream(file);\n const chunks: Uint8Array[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n return new Uint8Array(Buffer.concat(chunks));\n }\n throw e;\n }\n } else {\n // load file into ArrayBuffer in browsers\n const response = await fetch(file);\n if (!response.ok) {\n throw new Error(`failed to load external data file: ${file}`);\n }\n const contentLengthHeader = response.headers.get('Content-Length');\n const fileSize = contentLengthHeader ? parseInt(contentLengthHeader, 10) : 0;\n if (fileSize < 1073741824 /* 1GB */) {\n // when Content-Length header is not set, we cannot determine the file size. We assume it is small enough to\n // load into memory.\n return new Uint8Array(await response.arrayBuffer());\n } else {\n // file is too large, use stream instead\n if (!response.body) {\n throw new Error(`failed to load external data file: ${file}, no response body.`);\n }\n const reader = response.body.getReader();\n\n let buffer;\n try {\n // try to create ArrayBuffer directly\n buffer = new ArrayBuffer(fileSize);\n } catch (e) {\n if (e instanceof RangeError) {\n // use WebAssembly Memory to allocate larger ArrayBuffer\n const pages = Math.ceil(fileSize / 65536);\n buffer = new WebAssembly.Memory({initial: pages, maximum: pages}).buffer;\n } else {\n throw e;\n }\n }\n\n let offset = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const {done, value} = await reader.read();\n if (done) {\n break;\n }\n const chunkSize = value.byteLength;\n const chunk = new Uint8Array(buffer, offset, chunkSize);\n chunk.set(value);\n offset += chunkSize;\n }\n return new Uint8Array(buffer, 0, fileSize);\n }\n }\n\n } else if (file instanceof Blob) {\n return new Uint8Array(await file.arrayBuffer());\n } else if (file instanceof Uint8Array) {\n return file;\n } else {\n return new Uint8Array(file);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {logLevelStringToEnum} from '../wasm-common';\n\ntype LogLevel = NonNullable;\ntype MessageString = string;\ntype MessageFunction = () => string;\ntype Message = MessageString|MessageFunction;\n\nconst logLevelPrefix = ['V', 'I', 'W', 'E', 'F'];\n\nconst doLog = (level: number, message: string): void => {\n // eslint-disable-next-line no-console\n console.log(`[${logLevelPrefix[level]},${new Date().toISOString()}]${message}`);\n};\n\nlet configLogLevel: LogLevel|undefined;\nlet debug: boolean|undefined;\n\nexport const configureLogger = ($configLogLevel: LogLevel, $debug: boolean): void => {\n configLogLevel = $configLogLevel;\n debug = $debug;\n};\n\n/**\n * A simple logging utility to log messages to the console.\n */\nexport const LOG = (logLevel: LogLevel, msg: Message): void => {\n const messageLevel = logLevelStringToEnum(logLevel);\n const configLevel = logLevelStringToEnum(configLogLevel);\n if (messageLevel >= configLevel) {\n doLog(messageLevel, typeof msg === 'function' ? msg() : msg);\n }\n};\n\n/**\n * A simple logging utility to log messages to the console. Only logs when debug is enabled.\n */\nexport const LOG_DEBUG: typeof LOG = (...args: Parameters) => {\n if (debug) {\n LOG(...args);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\nimport {tensorTypeToTypedArrayConstructor} from '../wasm-common';\n\nexport const createView = (dataBuffer: ArrayBuffer, type: Tensor.Type): Int32Array|Uint32Array|BigInt64Array|\n BigUint64Array|Uint8Array|Float32Array|Float64Array|Int8Array|Int16Array|Uint16Array =>\n new (tensorTypeToTypedArrayConstructor(type))(dataBuffer);\n\n/**\n * a TensorView does not own the data.\n */\nexport interface TensorView {\n readonly data: number;\n readonly dataType: number;\n readonly dims: readonly number[];\n\n /**\n * get a Float32Array data view of the tensor data. tensor data must be on CPU.\n */\n getFloat32Array(): Float32Array;\n\n /**\n * get a BigInt64Array data view of the tensor data. tensor data must be on CPU.\n */\n getBigInt64Array(): BigInt64Array;\n\n /**\n * get a Int32Array data view of the tensor data. tensor data must be on CPU.\n */\n getInt32Array(): Int32Array;\n\n /**\n * create a new tensor view with the same data but different dimensions.\n */\n reshape(newDims: readonly number[]): TensorView;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../wasm-common';\nimport {TensorView} from '../tensor-view';\n\nimport {ShaderHelper} from './ops/common';\n\nexport type SessionState = 'default'|'capturing'|'replaying';\n\nexport enum GpuDataType {\n default = 0,\n upload = 1,\n profile = 2\n}\nexport type GpuDataId = number;\n\nexport type GpuArchitecture = 'ampere';\nexport type GpuVendor = 'amd'|'intel'|'nvidia';\nexport interface AdapterInfo {\n isArchitecture: (architecture: GpuArchitecture) => boolean;\n isVendor: (vendor: GpuVendor) => boolean;\n}\n\nexport interface GpuData {\n type: GpuDataType;\n id: GpuDataId;\n buffer: GPUBuffer;\n}\n\nexport interface TensorInfo {\n dims: readonly number[];\n dataType: number;\n}\n\nexport interface ProgramUniform {\n type: DataType;\n data: number|readonly number[];\n}\n\nexport type ProgramUniformVariableInfo = [type: DataType, length: number];\n\n/**\n * Represent the dependency of a program on a specific input tensor.\n *\n * - 'none': the shader/uniform does not depend on this input's info\n * - 'type': the shader/uniform depends on data type of this input\n * - 'rank': the shader/uniform depends on data type and the rank of this input\n * - 'dims': the shader/uniform depends on data type and the dims of this input\n * - 'data': the shader/uniform depends on data type, the dims and the data of this input\n */\nexport type ProgramInputTensorInfoDependency = 'none'|'type'|'rank'|'dims'|'data';\n\n/**\n * Represent information about a program's cache for shader.\n */\nexport interface ProgramShaderCacheInfo {\n /**\n * an optional string as a cache hint in the artifact cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains initializing-time information, such as the attributes or any information of\n * initializers. It should NOT contain any runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'dims' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n/**\n * Represent information about a program's cache for uniform.\n */\nexport interface ProgramUniformCacheInfo {\n /**\n * an optional string as a cache hint in the uniform cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'none' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n\n /**\n * an optional object describing the cache information of the program shader.\n *\n * If this is not specified, assume hint is empty and inputDependencies are ['dims'] for all inputs.\n */\n shaderCache?: ProgramShaderCacheInfo;\n\n /**\n * the shader's processing source code.\n *\n * This function will be called when shader cache missed.\n */\n getShaderSource: (shaderHelper: ShaderHelper) => string;\n\n /**\n * A function to get run data required to run the program.\n *\n * This function will be called every time the program is executed. Should keep this function as simple as possible.\n */\n getRunData: (inputs: readonly TensorView[]) => {\n outputs: readonly TensorInfo[];\n dispatchGroup: {x: number; y?: number; z?: number};\n programUniforms?: readonly ProgramUniform[];\n };\n}\n\nexport interface Artifact {\n programInfo: ProgramInfo;\n computePipeline: GPUComputePipeline;\n uniformVariablesInfo: readonly ProgramUniformVariableInfo[]|undefined;\n}\n\nexport interface ComputeContextInputsOutputsMapping {\n /**\n * specify the mapping to the program's inputs. the value can be a number or a tensor view.\n * - if it's a number, it's the index of the kernel's input\n * - if it's a tensor view, it's an existing tensor view that will be used as the input\n *\n * if inputs is not specified, the mapping will be the kernel's inputs in order.\n */\n readonly inputs?: ReadonlyArray;\n /**\n * specify the mapping to the program's outputs. the value must be a number.\n * - if it's a non-negative number, it's the index of the kernel's output\n * - if it's -1, it's an output that will be created as a temporary value. this value will be released after\n * the kernel is executed.\n * - if it's -2, it's an output that will be created as a persistent value. this value will be released when the\n * kernel is released.\n *\n * if outputs is not specified, the mapping will be the kernel's outputs in order.\n */\n readonly outputs?: readonly number[];\n}\n\n/**\n * A ComputeContext instance carries the states that representing the current running of a kernel.\n */\nexport interface ComputeContext {\n /**\n * gpu adapter info\n */\n readonly adapterInfo: AdapterInfo;\n\n /**\n * stores the pointer to OpKernelContext\n */\n readonly opKernelContext: number;\n\n /**\n * a list of inputs, each input is an instance of TensorView\n */\n readonly inputs: readonly TensorView[];\n\n /**\n * a custom data object that can be used to store any data that is needed by the kernel\n */\n readonly kernelCustomData: {[key: string]: unknown};\n\n /**\n * a buffer that can be used to access custom data created each time the kernel is executed\n */\n readonly customDataBuffer: Uint8Array;\n\n /**\n * a number of outputs for the node\n */\n readonly outputCount: number;\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[];\n output(index: number, dims: readonly number[]): number;\n getMaxComputeWorkgroupSizes(): [number, number, number];\n getMaxComputeWorkgroupStoragesize(): number;\n}\n\nexport type TimestampQuery = 'none'|'inside-passes'|'at-passes';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {GpuData, GpuDataId, GpuDataType} from './types';\n\n/**\n * manages GpuDataId -> GpuBuffer\n */\nexport interface GpuDataManager {\n /**\n * copy data from CPU to GPU.\n */\n upload(id: GpuDataId, data: Uint8Array): void;\n /**\n * copy data from GPU to GPU.\n */\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void;\n /**\n * create new data on GPU.\n */\n create(size: number, usage?: number): GpuData;\n /**\n * get GPU data by ID.\n */\n get(id: GpuDataId): GpuData|undefined;\n /**\n * release the data on GPU by ID.\n *\n * @return size of the data released\n */\n release(id: GpuDataId): number;\n /**\n * copy data from GPU to CPU.\n */\n download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise;\n\n /**\n * refresh the buffers that marked for release.\n *\n * when release() is called, the buffer is not released immediately. this is because we need to wait for the commands\n * to be submitted to the GPU. this function is called after the commands are submitted so that the buffers can be\n * actually released.\n */\n refreshPendingBuffers(): void;\n\n /**\n * register an external buffer for IO Binding. If the buffer is already registered, return the existing GPU data ID.\n *\n * GPU data manager only manages a mapping between the buffer and the GPU data ID. It will not manage the lifecycle of\n * the external buffer.\n */\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number;\n\n /**\n * unregister an external buffer for IO Binding.\n */\n unregisterExternalBuffer(buffer: GPUBuffer): void;\n\n /**\n * destroy all gpu buffers.\n */\n dispose(): void;\n\n /**\n * release session related data.\n * @param sessionId - specify the session ID.\n */\n onReleaseSession(sessionId: number): void;\n}\n\ninterface StorageCacheValue {\n gpuData: GpuData;\n originalSize: number;\n}\n\nconst bucketFreelist: Map = new Map([\n [64, 250],\n [128, 200],\n [256, 200],\n [512, 200],\n [2048, 230],\n [4096, 200],\n [8192, 50],\n [16384, 50],\n [32768, 50],\n [65536, 50],\n [131072, 50],\n [262144, 50],\n [524288, 50],\n [1048576, 50],\n [2097152, 30],\n [4194304, 20],\n [8388608, 10],\n [12582912, 10],\n [16777216, 10],\n [26214400, 15],\n [33554432, 22],\n [44236800, 2],\n [58982400, 6],\n // we don't want to cache the bucket sizes below but not caching them\n // results in some major performance hits for models like sd-turbo.\n [67108864, 6],\n [134217728, 6],\n [167772160, 6],\n]);\n\nconst bucketArr: number[] = [];\n\n/**\n * normalize the buffer size so that it fits the 128-bits (16 bytes) alignment.\n */\nconst calcNormalizedBufferSize = (size: number) => Math.ceil(size / 16) * 16;\n\n/**\n * calculate the buffer size so that it fits into buckets.\n */\nconst calcBucketBufferSize = (size: number) => {\n for (let idx = 0; idx < bucketArr.length; idx++) {\n const sizeForBucket = bucketArr[idx];\n if (size <= sizeForBucket) {\n return sizeForBucket;\n }\n }\n // not in bucket list -> caller will not cache, round up to 16.\n return Math.ceil(size / 16) * 16;\n};\n\nlet guid = 1;\nconst createNewGpuDataId = () => guid++;\n\n/**\n * exported standard download function. This function is used by the session to download the data from GPU, and also by\n * factory to create GPU tensors with the capacity of downloading data from GPU.\n *\n * @param backend - the WebGPU backend\n * @param gpuBuffer - the GPU buffer to download\n * @param originalSize - the original size of the data\n * @param getTargetBuffer - optional. If provided, the data will be copied to the target buffer. Otherwise, a new buffer\n * will be created and returned.\n */\nexport const downloadGpuData =\n async(backend: WebGpuBackend, gpuBuffer: GPUBuffer, originalSize: number, getTargetBuffer?: () => Uint8Array):\n Promise => {\n const bufferSize = calcNormalizedBufferSize(originalSize);\n const gpuReadBuffer = backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: bufferSize, usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ});\n try {\n const commandEncoder = backend.getCommandEncoder();\n backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n gpuBuffer /* source buffer */, 0 /* source offset */, gpuReadBuffer /* destination buffer */,\n 0 /* destination offset */, bufferSize /* size */\n );\n backend.flush();\n\n await gpuReadBuffer.mapAsync(GPUMapMode.READ);\n\n const arrayBuffer = gpuReadBuffer.getMappedRange();\n if (getTargetBuffer) {\n // if we already have a CPU buffer to accept the data, no need to clone the ArrayBuffer.\n const targetBuffer = getTargetBuffer();\n targetBuffer.set(new Uint8Array(arrayBuffer, 0, originalSize));\n return targetBuffer;\n } else {\n // the mapped ArrayBuffer will be released when the GPU buffer is destroyed. Need to clone the\n // ArrayBuffer.\n return new Uint8Array(arrayBuffer.slice(0, originalSize));\n }\n } finally {\n gpuReadBuffer.destroy();\n }\n };\n\nclass GpuDataManagerImpl implements GpuDataManager {\n // GPU Data ID => GPU Data ( storage buffer )\n private storageCache: Map;\n\n // pending buffers for uploading ( data is unmapped )\n private buffersForUploadingPending: GPUBuffer[];\n // pending buffers for computing\n private buffersPending: GPUBuffer[];\n\n // The reusable storage buffers for computing.\n private freeBuffers: Map;\n // The reusable uniform buffers\n private freeUniformBuffers: Map;\n\n // The external buffers registered users for IO Binding.\n private externalBuffers: Map;\n\n // The pendingBuffers for capture graph.\n // a SessionID -> GPUBuffer[] mapping.\n private capturedPendingBuffers: Map;\n\n constructor(private backend: WebGpuBackend) {\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.buffersForUploadingPending = [];\n this.buffersPending = [];\n this.externalBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n\n for (const [key, ] of bucketFreelist) {\n bucketArr.push(key);\n this.freeBuffers.set(key, []);\n this.freeUniformBuffers.set(key, []);\n }\n }\n\n upload(id: GpuDataId, data: Uint8Array): void {\n const srcArrayBuffer = data.buffer;\n const srcOffset = data.byteOffset;\n const srcLength = data.byteLength;\n const size = calcNormalizedBufferSize(srcLength);\n\n // get destination gpu buffer\n const gpuDataCache = this.storageCache.get(id);\n if (!gpuDataCache) {\n throw new Error('gpu data for uploading does not exist');\n }\n if (gpuDataCache.originalSize !== srcLength) {\n throw new Error(`inconsistent data size. gpu data size=${gpuDataCache.originalSize}, data size=${srcLength}`);\n }\n\n // create gpu buffer\n const gpuBufferForUploading = this.backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {mappedAtCreation: true, size, usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC});\n\n // copy (upload) data\n const arrayBuffer = gpuBufferForUploading.getMappedRange();\n new Uint8Array(arrayBuffer).set(new Uint8Array(srcArrayBuffer, srcOffset, srcLength));\n gpuBufferForUploading.unmap();\n\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(gpuBufferForUploading, 0, gpuDataCache.gpuData.buffer, 0, size);\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.upload(id=${id})`);\n\n this.buffersForUploadingPending.push(gpuBufferForUploading);\n }\n\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void {\n // get source gpu buffer\n const sourceGpuDataCache = this.storageCache.get(sourceId);\n if (!sourceGpuDataCache) {\n throw new Error('source gpu data for memcpy does not exist');\n }\n // get destination gpu buffer\n const destinationGpuDataCache = this.storageCache.get(destinationId);\n if (!destinationGpuDataCache) {\n throw new Error('destination gpu data for memcpy does not exist');\n }\n if (sourceGpuDataCache.originalSize !== destinationGpuDataCache.originalSize) {\n throw new Error('inconsistent source and destination gpu data size');\n }\n\n const size = calcNormalizedBufferSize(sourceGpuDataCache.originalSize);\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n sourceGpuDataCache.gpuData.buffer, 0, destinationGpuDataCache.gpuData.buffer, 0, size);\n }\n\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number {\n let id: number|undefined;\n if (previousBuffer) {\n id = this.externalBuffers.get(previousBuffer);\n if (id === undefined) {\n throw new Error('previous buffer is not registered');\n }\n if (buffer === previousBuffer) {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${\n id}, buffer is the same, skip.`);\n return id;\n } else if (this.backend.capturedCommandList.has(this.backend.currentSessionId!)) {\n throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);\n }\n this.externalBuffers.delete(previousBuffer);\n } else {\n id = createNewGpuDataId();\n }\n\n this.storageCache.set(id, {gpuData: {id, type: GpuDataType.default, buffer}, originalSize});\n this.externalBuffers.set(buffer, id);\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${id}, registered.`);\n return id;\n }\n\n unregisterExternalBuffer(buffer: GPUBuffer): void {\n const id = this.externalBuffers.get(buffer);\n if (id !== undefined) {\n this.storageCache.delete(id);\n this.externalBuffers.delete(buffer);\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${id}`);\n }\n }\n\n // eslint-disable-next-line no-bitwise\n create(size: number, usage = GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST): GpuData {\n const bufferSize = calcBucketBufferSize(size);\n\n let gpuBuffer;\n // Currently, only storage buffers are reused.\n // eslint-disable-next-line no-bitwise\n const isStorage = (usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE;\n // eslint-disable-next-line no-bitwise\n const isUniform = (usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM;\n if (isStorage || isUniform) {\n const freeBuffers = isStorage ? this.freeBuffers : this.freeUniformBuffers;\n const buffers = freeBuffers.get(bufferSize);\n if (!buffers) {\n // no such bucket/freelist - create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n } else {\n if (buffers.length > 0) {\n // in freelist, use it\n gpuBuffer = buffers.pop() as GPUBuffer;\n } else {\n // bucket empty, create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n }\n } else {\n // create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n\n const gpuData = {id: createNewGpuDataId(), type: GpuDataType.default, buffer: gpuBuffer};\n this.storageCache.set(gpuData.id, {gpuData, originalSize: size});\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.create(size=${size}) => id=${gpuData.id}`);\n return gpuData;\n }\n\n get(id: GpuDataId): GpuData|undefined {\n return this.storageCache.get(id)?.gpuData;\n }\n\n release(id: GpuDataId): number {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('releasing data does not exist');\n }\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.release(id=${id}), gpuDataId=${cachedData.gpuData.id}`);\n\n this.storageCache.delete(id);\n this.buffersPending.push(cachedData.gpuData.buffer);\n // cachedData.gpuData.buffer.destroy();\n\n return cachedData.originalSize;\n }\n\n async download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('data does not exist');\n }\n await downloadGpuData(this.backend, cachedData.gpuData.buffer, cachedData.originalSize, getTargetBuffer);\n }\n\n refreshPendingBuffers(): void {\n for (const buffer of this.buffersForUploadingPending) {\n // upload buffer is only useful in the session creation time. So we don't need to reuse them in session running.\n buffer.destroy();\n }\n this.buffersForUploadingPending = [];\n\n if (this.buffersPending.length === 0) {\n return;\n }\n\n if (this.backend.sessionStatus === 'default') {\n for (const buffer of this.buffersPending) {\n const maxInFreeList = bucketFreelist.get(buffer.size);\n\n // eslint-disable-next-line no-bitwise\n if ((buffer.usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE) {\n // Put the pending buffer to freeBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n // eslint-disable-next-line no-bitwise\n } else if ((buffer.usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM) {\n // Put the pending buffer to freeUniformBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeUniformBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n } else {\n buffer.destroy();\n }\n }\n this.buffersPending = [];\n } else {\n // Don't release intermediate tensors in non-default mode.\n // TODO: reuse the storage buffers in non-default mode.\n let capturedBuffers = this.capturedPendingBuffers.get(this.backend.currentSessionId!);\n if (!capturedBuffers) {\n capturedBuffers = [];\n this.capturedPendingBuffers.set(this.backend.currentSessionId!, capturedBuffers);\n }\n for (const buffer of this.buffersPending) {\n capturedBuffers.push(buffer);\n }\n this.buffersPending = [];\n }\n }\n\n dispose() {\n this.freeBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.freeUniformBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n\n this.storageCache.forEach((storage) => {\n storage.gpuData.buffer.destroy();\n });\n\n this.capturedPendingBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n }\n\n onReleaseSession(sessionId: number) {\n // release the captured pending buffers.\n const pendingBuffers = this.capturedPendingBuffers.get(sessionId);\n if (pendingBuffers) {\n pendingBuffers.forEach(buffer => {\n buffer.destroy();\n });\n this.capturedPendingBuffers.delete(sessionId);\n }\n }\n}\n\nexport const createGpuDataManager = (...args: ConstructorParameters): GpuDataManager =>\n new GpuDataManagerImpl(...args);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\n/**\n * create a new object from the given attribute, and add a cacheKey property to it\n */\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable no-param-reassign */\n\nexport class MatMulUtil {\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n const max = Math.max(aLen, bLen);\n if (aLen && bLen) {\n cdims[crank - i] = Math.max(aLen, bLen);\n } else {\n // when either aLen or bLen is 0, the other should be either 0 or 1, otherwise it is not broadcastable.\n if (max > 1) {\n return undefined;\n }\n cdims[crank - i] = 0;\n }\n }\n\n return cdims;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n}\n\n\nexport class ShapeUtil {\n /**\n * calculate the size (number of elements)\n */\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n /**\n * convert dims corresponding to type change to pack. ex. uint8 data to uint32\n */\n static convertShape(dims: readonly number[], size = 4): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n }\n const newDims = new Array(rank);\n let i = rank - 1;\n while (i >= 0) {\n if (dims[i] % size === 0) {\n newDims[i] = dims[i] / size;\n break;\n }\n if (size % dims[i] !== 0) {\n throw new Error('cannot convert shape');\n }\n newDims[i] = 1;\n size /= dims[i];\n i--;\n }\n for (i--; i >= 0; i--) {\n newDims[i] = dims[i];\n }\n return newDims;\n }\n\n /**\n * calculate the size (number of elements) from the given axis (inclusive)\n */\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n /**\n * calculate the size (number of elements) to the given axis (exclusive)\n */\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n /**\n * calculate the size (number of elements) from and to the given axis [start, end)\n */\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be negative.\n if (dims[i] < 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank?: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank ?? axes.length));\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]): void {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], isChannelLast: boolean, autoPad?: string): void {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + (isChannelLast ? 1 : 2)], strides[dim], dilations[dim], kernelShape[dim], pads, dim,\n dim + inputDims.length - 2, autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {ShapeUtil} from '../../util';\nimport {ProgramUniform, ProgramUniformVariableInfo} from '../types';\n\n/**\n * constant value for a workgroup size.\n *\n * We definitely can do further optimization in future, but for now we use 64.\n *\n * rule of thumb: Use [a workgroup size of] 64 unless you know what GPU you are targeting or that your workload\n * needs something different.\n *\n * from: https://surma.dev/things/webgpu/\n **/\nexport const WORKGROUP_SIZE = 64;\n\ninterface IndicesHelperTypes {\n /**\n * WGSL type of indices expression\n */\n readonly indices: string;\n\n /**\n * WGSL type of a value\n */\n readonly value: string;\n\n /**\n * WGSL type of storage type representing a value\n *\n * This is usually the same to `value`, but for some type (eg. bool), we need to use `u32` as storage type for\n * value type `vec4`\n */\n readonly storage: string;\n\n /**\n * tensor type as represented in TensorView\n */\n readonly tensor: number;\n}\n\n/**\n * A helper class for generating WGSL code for manipulating indices and data for a shader's input or output.\n *\n * This class is designed to offer a unified way to generate WGSL code for manipulating indices and data for a shader's\n * input or output.\n *\n * The following is a list of terminologies used in this class:\n * - `offset`: a uint32 value representing the offset of an element in the data buffer.\n * - `indices`: an abstraction of a multi-dimensional array's indices representing the data's index on each dimension.\n * - `value`: a value of a data element.\n *\n * Users are expected to create an instance of this class for each shader's input or output, and use the instance to\n * generate WGSL code for manipulating indices and data. The following 2 exported functions are for users to call to\n * create an instance of an indices helper:\n * - `inputVariable()`: create an indices helper instance for an input.\n * - `outputVariable()`: create an indices helper instance for an output.\n * - `internalVariable()`: create an indices helper instance for an internal variable.\n *\n * An indices helper instance contains helper functions for the following operations:\n * - access readonly basic information, including: `name`(the name of the input or output), `usage`(whether it's an\n * input, an output or an internal variable) and `shape`(the passed in shape).\n * - `type`: access readonly type information, including: `indices`(the type of indices), `value`(the type of value at\n * runtime), `storage`(the type of value at storage) and `tensor`(the tensor type as represented in TensorView).\n * - generate WGSL code for getting indices from offset. Use `offsetToIndices()` for WGSL code snippet to calculate\n * indices from offset, and use `indicesToOffset()` for WGSL code snippet to calculate offset from indices.\n * - to manipulate an instance of indices, use `setIndices()` and `getIndices()` to set and get the indices on an\n * indices variable.\n * - to manipulate data, use `set()`/`get()` to access data at the given indices from parameter list, use\n * `setByIndices()`/`getByIndices()` to access data at the given indices from an indices variable, and use\n * `setByOffset()`/`getByOffset()` to access data at the given offset.\n * - `impl`: get WGSL code of function implementation for the util functions mentioned above.\n */\nexport interface IndicesHelper {\n /**\n * get WGSL code of function implementation for the util functions.\n *\n */\n readonly impl: () => string;\n\n /**\n * get type info\n */\n readonly type: IndicesHelperTypes;\n\n /**\n * WGSL code of a expression for getting indices from offset.\n *\n * @param varOffset - a u32 expression representing the offset.\n *\n * @returns an `type.indices` expression\n */\n readonly offsetToIndices: (varOffset: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting offset from indices.\n *\n * @param varIndices - a `type.indices` expression representing the indices.\n *\n * @returns an `u32` expression\n */\n readonly indicesToOffset: (varIndices: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting original offset from broadcasted indices.\n *\n * @param varIndices - a `type.indices` expression representing the output indices.\n * @param output - output IndicesHelper.\n *\n * @returns an `u32` expression\n */\n readonly broadcastedIndicesToOffset: (varIndices: string, output: IndicesHelper) => string;\n\n /**\n * WGSL code of generating an indices literal\n *\n * @param init - initial value.\n */\n readonly indices: (...init: ReadonlyArray) => string;\n\n /**\n * WGSL code of a statement for setting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to set. can be a number or a string (WGSL `u32` expression).\n * @param value - the value to set. can be a number or a string (WGSL `u32` expression).\n *\n * @returns a WGSL statement\n */\n readonly indicesSet: (varIndices: string, idx: number|string, value: number|string) => void;\n\n /**\n * WGSL code of an `u32` expression for getting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to get. can be a number or a string (WGSL `u32` expression).\n *\n * @returns an `u32` expression\n */\n readonly indicesGet: (varIndices: string, idx: number|string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices.\n *\n * @param indicesAndValue - an array of numbers or strings (WGSL `u32` expression) representing the indices, followed\n * by the value to set. This array should have exactly `shape.length + 1` elements.\n */\n readonly set: (...indicesAndValue: ReadonlyArray) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByIndices: (varIndices: string, value: string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByOffset: (offset: number|string, value: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices.\n *\n * @param indices - an array of numbers or strings (WGSL `u32` expression) representing the indices.\n */\n readonly get: (...indices: ReadonlyArray) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n */\n readonly getByIndices: (varIndices: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n */\n readonly getByOffset: (offset: number|string) => string;\n\n /**\n * name of the data variable\n */\n readonly name: string;\n\n /**\n * whether the helper is for an input, an output or an internal variable.\n */\n readonly usage: 'input'|'output'|'internal';\n\n /**\n * the rank of the input or output.\n */\n readonly rank: number;\n\n /**\n * a string representing the variable name for the shape of the input or output.\n */\n readonly shape: string;\n\n /**\n * a string representing the variable name for the strides of the input or output.\n */\n readonly strides: string;\n}\n\nconst getWgslMappedType = (type: number, components: 1|2|3|4): string|[string, string] => {\n if (components === 3) {\n throw new Error('vec3 has same alignment as vec4, use vec4 instead');\n }\n\n // return type is [ storage type, runtime type ] or a single string for both\n switch (type) {\n case DataType.float16:\n return components > 1 ? `vec${components}` : 'f16';\n case DataType.float:\n return components > 1 ? `vec${components}` : 'f32';\n case DataType.int32:\n return components > 1 ? `vec${components}` : 'i32';\n case DataType.uint32:\n return components > 1 ? `vec${components}` : 'u32';\n case DataType.int64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'i32'];\n case DataType.uint64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'u32'];\n case DataType.bool:\n if (components !== 4) {\n throw new Error('bool must be vec4');\n }\n return ['u32', 'vec4'];\n\n default:\n throw new Error(`Unknown data type: ${type}`);\n }\n};\n\nexport const tensorTypeToWsglStorageType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[0];\n};\n\nexport const tensorTypeToWsglValueType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[1];\n};\n\nexport const createTensorShapeVariables = (...dims: ReadonlyArray): ProgramUniform[] => {\n const programUniforms: ProgramUniform[] = [];\n dims.forEach(dim => {\n if (dim.length !== 0) {\n programUniforms.push(\n {type: DataType.uint32, data: dim}, {type: DataType.uint32, data: ShapeUtil.computeStrides(dim)});\n }\n });\n return programUniforms;\n};\n\n/**\n * A helper function to get maximum vector size for specified data length\n * @param size\n */\nexport const getMaxComponents = (size: number) => {\n // we cannot use vec3 type since it has alignment of 16 bytes\n if (size % 4 === 0) {\n return 4;\n } else if (size % 2 === 0) {\n return 2;\n }\n\n return 1;\n};\n\n/**\n * A helper function that initializes variable as a scalar or vector. e.g. f32(0) or vec4f(0,0,0,0)\n * @param dataType\n * @param components\n * @param value\n */\nexport const fillVector = (dataType = 'f32', components?: number, value = '0') => {\n if (!components || components === 1) {\n return `${dataType}(${value})`;\n }\n\n return `vec${components}<${dataType}>(${value})`;\n};\n\n/**\n * A helper function that casts value or vector to f32\n * @param dataType\n * @param components\n * @param value\n */\nexport const castToF32 = (dataType: string, components: number, value: string) => {\n if (dataType === 'f32') {\n return value;\n }\n if (components === 1) {\n return `f32(${value})`;\n }\n\n return `vec${components}(${value})`;\n};\n\n/**\n * A helper function that returns scalar or sums all components of a vector\n * @param name\n * @param components\n */\nexport const sumVector = (name: string, components: number) => {\n if (components === 4) {\n return `(${name}.x + ${name}.y + ${name}.z + ${name}.w)`;\n } else if (components === 2) {\n return `(${name}.x + ${name}.y)`;\n } else if (components === 3) {\n return `(${name}.x + ${name}.y + ${name}.z)`;\n }\n\n return name;\n};\n\n/**\n * A helper function that returns variable element at index.\n * @param name - the name of variable.\n * @param index - the index of variable element.\n * @param length - the length of variable.\n * @param type - the type of variable, optional.\n */\nexport const getElementAt =\n (name: string, index: number|string, length: number, type?: UniformDataElementType): string => {\n if (name.startsWith('uniforms.') && length > 4) {\n if (typeof (index) === 'string') {\n if (type === 'f16') {\n return `${name}[(${index}) / 8][(${index}) % 8 / 4][(${index}) % 8 % 4]`;\n } else {\n return `${name}[(${index}) / 4][(${index}) % 4]`;\n }\n } else {\n if (type === 'f16') {\n return `${name}[${Math.floor(index / 8)}][${Math.floor(index % 8 / 4)}][${index % 8 % 4}]`;\n } else {\n return `${name}[${Math.floor(index / 4)}][${index % 4}]`;\n }\n }\n } else {\n return length > 1 ? `${name}[${index}]` : name;\n }\n };\n\n/**\n * A helper function to get a IndicesHelper for a given input or output.\n *\n * @param name - the name of the input or output.\n * @param tensorType - the tensor type of the input or output.\n * @param shapeOrRank - the tensor shape or the rank of the input or output.\n * @param usage - the usage of the indices helper.\n * @param components - indicates the number of components of each element. 1 for scalar, 2 for vec2, 3 for vec3, 4 for\n * vec4.\n */\nconst createIndicesHelper =\n (name: string, tensorType: number, shapeOrRank: number|readonly number[], usage: IndicesHelper['usage'],\n components: 1|2|3|4): IndicesHelper => {\n const useUniform = typeof shapeOrRank === 'number';\n const rank = useUniform ? shapeOrRank : shapeOrRank.length;\n const rankIdentity = [...new Array(rank).keys()];\n const indicesType = rank < 2 ? 'u32' : rank <= 4 ? `vec${rank}` : `array`;\n const mappedType = getWgslMappedType(tensorType, components);\n const valueType = typeof mappedType === 'string' ? mappedType : mappedType[1];\n const storageType = typeof mappedType === 'string' ? mappedType : mappedType[0];\n const type = {indices: indicesType, value: valueType, storage: storageType, tensor: tensorType};\n\n const normalizeDim = (dim: number|string): string => typeof dim === 'string' ? dim : `${dim}u`;\n\n const implementationUsed = {\n offsetToIndices: false,\n indicesToOffset: false,\n broadcastedIndicesToOffset: false,\n set: false,\n setByIndices: false,\n get: false,\n getByIndices: false,\n };\n\n const uniformPrefix = useUniform ? 'uniforms.' : '';\n const shape = `${uniformPrefix}${name}_shape`;\n const strides = `${uniformPrefix}${name}_strides`;\n\n let o2iSnippet = '';\n for (let i = 0; i < rank - 1; i++) {\n o2iSnippet += `\n let dim${i} = current / ${getElementAt(strides, i, rank)};\n let rest${i} = current % ${getElementAt(strides, i, rank)};\n indices[${i}] = dim${i};\n current = rest${i};\n `;\n }\n o2iSnippet += `indices[${rank - 1}] = current;`;\n\n const offsetToIndicesImplementation = rank < 2 ? '' : `\n fn o2i_${name}(offset: u32) -> ${type.indices} {\n var indices: ${type.indices};\n var current = offset;\n ${o2iSnippet}\n return indices;\n }`;\n\n const offsetToIndices = (varOffset: string) => {\n implementationUsed.offsetToIndices = true;\n return rank < 2 ? varOffset : `o2i_${name}(${varOffset})`;\n };\n\n const offsets: string[] = [];\n if (rank >= 2) {\n for (let i = rank - 1; i >= 0; i--) {\n offsets.push(`${getElementAt(strides, i, rank)} * (indices[${i}])`);\n }\n }\n\n const indicesToOffsetImplementation = rank < 2 ? '' : `\n fn i2o_${name}(indices: ${type.indices}) -> u32 {\n return ${offsets.join('+')};\n }`;\n\n const indicesToOffset = (varIndices: string) => {\n implementationUsed.indicesToOffset = true;\n return rank < 2 ? varIndices : `i2o_${name}(${varIndices})`;\n };\n\n const indices = (...init: ReadonlyArray) =>\n rank === 0 ? '0u' : `${type.indices}(${init.map(normalizeDim).join(',')})`;\n\n const indicesGet = (varIndices: string, idx: number|string) => {\n if (rank < 2) {\n return `${varIndices}`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}`;\n }\n };\n\n const indicesSet = (varIndices: string, idx: number|string, value: string) => {\n if (rank < 2) {\n return `${varIndices}=${value};`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}=${value};`;\n }\n };\n\n const broadcastedIndicesToOffsetImplementation: {[key: string]: string} = {};\n const broadcastedIndicesToOffset = (varIndices: string, output: IndicesHelper) => {\n implementationUsed.broadcastedIndicesToOffset = true;\n const implKey = `${output.name}broadcastedIndicesTo${name}Offset`;\n if (implKey in broadcastedIndicesToOffsetImplementation) {\n return `${implKey}(${varIndices})`;\n }\n const offsets = [];\n for (let i = rank - 1; i >= 0; i--) {\n const idx = output.indicesGet('outputIndices', i + output.rank - rank);\n offsets.push(`${indicesGet(strides, i)} * (${idx} % ${indicesGet(shape, i)})`);\n }\n broadcastedIndicesToOffsetImplementation[implKey] =\n `fn ${implKey}(outputIndices: ${output.type.indices}) -> u32 {\n return ${offsets.length > 0 ? offsets.join('+') : '0u'};\n }`;\n\n return `${implKey}(${varIndices})`;\n };\n\n const setByOffset = (offset: number|string, value: string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]=${value};`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), select(0u, 0xFFFFFFFFu, ${value} < 0));`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), 0u);`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `${name}[${offset}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${value}));`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByOffset = (offset: number|string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `i32(${name}[${offset}].x)`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `u32(${name}[${offset}].x)`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `vec4(bool(${name}[${offset}] & 0xFFu), bool(${name}[${offset}] & 0xFF00u), bool(${name}[${\n offset}] & 0xFF0000u), bool(${name}[${offset}] & 0xFF000000u))`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByIndicesImplementation = rank < 2 ? '' : `\n fn get_${name}ByIndices(indices: ${type.indices}) -> ${valueType} {\n return ${getByOffset(`i2o_${name}(indices)`)};\n }`;\n\n const getImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn get_${name}(${functionParams}) -> ${valueType} {\n return get_${name}ByIndices(${indices(dimsParams)});\n }`;\n })();\n\n const get = (...indices: ReadonlyArray) => {\n if (indices.length !== rank) {\n throw new Error(`indices length must be ${rank}`);\n }\n\n const normalizedIndices = indices.map(normalizeDim).join(',');\n\n if (rank === 0) {\n return getByOffset('0u');\n } else if (rank === 1) {\n return getByOffset(normalizedIndices[0]);\n } else {\n implementationUsed.get = true;\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}(${normalizedIndices})`;\n }\n };\n\n const getByIndices = (varIndices: string) => {\n if (rank < 2) {\n return getByOffset(varIndices);\n } else {\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}ByIndices(${varIndices})`;\n }\n };\n\n const setByIndicesImplementation = rank < 2 ? '' : `\n fn set_${name}ByIndices(indices: ${type.indices}, value: ${valueType}) {\n ${setByOffset(`i2o_${name}(indices)`, 'value')}\n }`;\n\n const setImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn set_${name}(${functionParams}, value: ${valueType}) {\n set_${name}ByIndices(${indices(dimsParams)}, value);\n }`;\n })();\n\n const set = (...indicesAndValue: ReadonlyArray) => {\n if (indicesAndValue.length !== rank + 1) {\n throw new Error(`indices length must be ${rank}`);\n }\n const value = indicesAndValue[rank];\n if (typeof value !== 'string') {\n throw new Error('value must be string');\n }\n\n const normalizedIndices = indicesAndValue.slice(0, rank).map(normalizeDim).join(',');\n\n if (rank === 0) {\n return setByOffset('0u', value);\n } else if (rank === 1) {\n return setByOffset(normalizedIndices[0], value);\n } else {\n implementationUsed.set = true;\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}(${normalizedIndices}, ${value})`;\n }\n };\n\n const setByIndices = (varIndices: string, value: string) => {\n if (rank < 2) {\n return setByOffset(varIndices, value);\n } else {\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}ByIndices(${varIndices}, ${value});`;\n }\n };\n\n const impl = () => {\n const impls = [];\n let needShapeStrides = false;\n if (implementationUsed.offsetToIndices) {\n impls.push(offsetToIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.indicesToOffset) {\n impls.push(indicesToOffsetImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.broadcastedIndicesToOffset) {\n Object.values(broadcastedIndicesToOffsetImplementation).forEach(impl => impls.push(impl));\n needShapeStrides = true;\n }\n if (implementationUsed.set) {\n impls.push(setImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.setByIndices) {\n impls.push(setByIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.get) {\n impls.push(getImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.getByIndices) {\n impls.push(getByIndicesImplementation);\n needShapeStrides = true;\n }\n if (!useUniform && needShapeStrides) {\n impls.unshift(\n `const ${shape} = ${type.indices}(${shapeOrRank.join(',')});`,\n `const ${strides} = ${type.indices}(${ShapeUtil.computeStrides(shapeOrRank).join(',')});`);\n }\n return impls.join('\\n');\n };\n\n return {\n impl,\n type,\n offsetToIndices,\n indicesToOffset,\n broadcastedIndicesToOffset,\n indices,\n indicesGet,\n indicesSet,\n set,\n setByOffset,\n setByIndices,\n get,\n getByOffset,\n getByIndices,\n // isVec4,\n usage,\n name,\n strides,\n shape,\n rank\n };\n };\n\n/**\n * Create a IndicesHelper for an input.\n *\n * @param name - the name of the input.\n * @param type - the tensor type of the input.\n * @param shapeOrRank - the tensor shape or the rank of the input.\n * @param components - the number of components of the input. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the input.\n */\nexport const inputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'input', components);\n\n/**\n * Create a IndicesHelper for an output.\n *\n * @param name - the name of the output.\n * @param type - the tensor type of the output.\n * @param shapeOrRank - the tensor shape or the rank of the output.\n * @param components - the number of components of the output. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the output.\n */\nexport const outputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'output', components);\n\n/**\n * Create a IndicesHelper for an internal variable.\n *\n * @param name - the name of the variable.\n * @param type - the tensor type of the variable.\n * @param shapeOrRank - the tensor shape or the rank of the variable.\n * @param components - the number of components of the variable. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the variable.\n */\nexport const internalVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'internal', components);\n\nexport type UniformDataElementType = 'u32'|'f16'|'f32'|'i32';\nexport type UniformsArrayType = Array<{name: string; type: UniformDataElementType; length?: number}>;\n\n/**\n * A ShaderHelper is a helper class for generating WGSL code.\n */\nexport interface ShaderHelper {\n /**\n * A helper function to generate the start of main function in WGSL source code.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param workgroupSize - an optional workgroup size. default is WORKGROUP_SIZE.\n */\n mainStart(workgroupSize?: number|[number, number, number]): string;\n\n /**\n * A helper function to generate the code snippet for guarding against out-of-bounds size.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n *\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param size - the size of the data to guard against. can be a number or a string (WGSL `u32` expression).\n */\n guardAgainstOutOfBoundsWorkgroupSizes(size: unknown): string;\n\n /**\n * A helper function to generate the code snippet for declaring multiple inputs or outputs.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n declareVariables(...variables: IndicesHelper[]): string;\n\n /**\n * A helper function to register one uniform. Can be called multiple times to register multiple uniforms.\n *\n * @param name - the name of the uniform.\n * @param type - the type of the uniform.\n * @param length - the length of the uniform, default to 1 when it is not provided.\n */\n registerUniform(name: string, type: string, length?: number): ShaderHelper;\n\n /**\n * A helper function to register multiple uniforms. Can be called multiple times to register multiple uniforms.\n *\n * @param uniforms - an array of uniforms. Each element of the array is an object with 2 properties: `name` and\n * `type`.\n */\n registerUniforms(uniforms: UniformsArrayType): ShaderHelper;\n\n /**\n * A helper function to register multiple internal variables. Can be called multiple times to register multiple\n * internal variables.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper;\n}\n\nclass ShaderHelperImpl implements ShaderHelper {\n constructor(private normalizedDispatchGroup: [number, number, number], private limits: GPUSupportedLimits) {}\n\n guardAgainstOutOfBoundsWorkgroupSizes(size: number|string): string {\n // Guard against out-of-bounds work group sizes\n const sizeInCode = typeof size === 'number' ? `${size}u` : size;\n return `if (global_idx >= ${sizeInCode}) { return; }`;\n }\n\n mainStart(workgroupSize: number|[number, number, number] = WORKGROUP_SIZE) {\n const workgroupSizeX = typeof workgroupSize === 'number' ? workgroupSize : workgroupSize[0];\n const workgroupSizeY = typeof workgroupSize === 'number' ? 1 : workgroupSize[1];\n const workgroupSizeZ = typeof workgroupSize === 'number' ? 1 : workgroupSize[2];\n\n if (workgroupSizeX > this.limits.maxComputeWorkgroupSizeX ||\n workgroupSizeY > this.limits.maxComputeWorkgroupSizeY ||\n workgroupSizeZ > this.limits.maxComputeWorkgroupSizeZ) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${\n this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);\n }\n\n if (workgroupSizeX * workgroupSizeY * workgroupSizeZ > this.limits.maxComputeInvocationsPerWorkgroup) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup invocations ${\n this.limits.maxComputeInvocationsPerWorkgroup}.`);\n }\n\n const is1DimensionDispatch = this.normalizedDispatchGroup[1] === 1 && this.normalizedDispatchGroup[2] === 1;\n const paramList = is1DimensionDispatch ? `@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3` :\n `@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`;\n const globalIdxDefinition = is1DimensionDispatch ?\n 'let global_idx = global_id.x; let local_idx = local_id.x;' :\n `let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${\n workgroupSizeX * workgroupSizeY * workgroupSizeZ}u + local_idx;`;\n\n return `@compute @workgroup_size(${workgroupSizeX}, ${workgroupSizeY}, ${workgroupSizeZ})\n fn main(${paramList}) {\n ${globalIdxDefinition}\n `;\n }\n\n private appendVariableUniforms(variable: IndicesHelper): void {\n if (variable.rank !== 0) {\n if (variable.shape.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.shape.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n if (variable.strides.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.strides.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n }\n }\n\n private declareVariable(variable: IndicesHelper, bindingIndex: number): string {\n if (variable.usage === 'internal') {\n throw new Error('cannot use internal variable with declareVariable(). use registerInternalVariables() instead.');\n }\n this.variables.push(variable);\n this.appendVariableUniforms(variable);\n\n const access = variable.usage === 'input' ? 'read' : 'read_write';\n const storageType = variable.type.storage;\n return `@group(0) @binding(${bindingIndex}) var ${variable.name}: array<${storageType}>;`;\n }\n\n declareVariables(...variables: IndicesHelper[]): string {\n return variables.map(v => this.declareVariable(v, this.variableIndex++)).join('\\n');\n }\n\n private registerInternalVariable(variable: IndicesHelper): void {\n if (variable.usage !== 'internal') {\n throw new Error(\n 'cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.');\n }\n\n this.internalVariables.push(variable);\n this.appendVariableUniforms(variable);\n }\n\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper {\n variables.forEach(v => this.registerInternalVariable(v));\n return this;\n }\n\n registerUniform(name: string, type: UniformDataElementType, length = 1): ShaderHelper {\n this.uniforms.push({name, type, length});\n return this;\n }\n\n registerUniforms(additionalUniforms: UniformsArrayType): ShaderHelper {\n this.uniforms = this.uniforms.concat(additionalUniforms);\n return this;\n }\n\n private internalVariables: IndicesHelper[] = [];\n private variables: IndicesHelper[] = [];\n private uniforms: UniformsArrayType = [];\n private uniformDeclaration(): string {\n if (this.uniforms.length === 0) {\n return '';\n }\n\n const uniformSnippets: string[] = [];\n for (const {name, type, length} of this.uniforms) {\n if (length && length > 4) {\n if (type === 'f16') {\n uniformSnippets.push(`@align(16) ${name}:array, ${Math.ceil(length / 8)}>`);\n } else {\n uniformSnippets.push(`${name}:array, ${Math.ceil(length / 4)}>`);\n }\n } else {\n const typeTemp = length == null || length === 1 ? type : `vec${length}<${type}>`;\n uniformSnippets.push(`${name}:${typeTemp}`);\n }\n }\n\n return `\n struct Uniforms { ${uniformSnippets.join(', ')} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`;\n }\n private variableIndex = 0;\n\n /**\n * Get additional implementation that needs to be added to the shader source.\n */\n get additionalImplementations(): string {\n return this.uniformDeclaration() + this.variables.map(i => i.impl()).join('\\n') +\n this.internalVariables.map(i => i.impl()).join('\\n');\n }\n\n /**\n * Get the variable info of the shader program.\n */\n get variablesInfo(): ProgramUniformVariableInfo[]|undefined {\n if (this.uniforms.length === 0) {\n return undefined;\n }\n\n const uniformWgslTypeToDataType = (type: UniformDataElementType) =>\n ([DataType.uint32, DataType.float16, DataType.float,\n DataType.int32][['u32', 'f16', 'f32', 'i32'].indexOf(type)]);\n return this.uniforms.map(u => ([uniformWgslTypeToDataType(u.type), u.length ?? 1]));\n }\n}\n\nexport const createShaderHelper = (dispatchGroup: [number, number, number], limits: GPUSupportedLimits) =>\n new ShaderHelperImpl(dispatchGroup, limits);\n\n/**\n * This function comes from https://github.com/tensorflow/tfjs/blob/master/tfjs-core/src/ops/broadcast_util.ts#L18-L40\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport const getBroadcastDims = (inShape: readonly number[], outShape: readonly number[]): number[] => {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n};\n\nconst getAdjustedPerm = (inputRank: number, perm: number[]): number[] =>\n (perm && perm.length !== inputRank) ? [...(new Array(inputRank).keys())].reverse() : perm;\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] =>\n ShapeUtil.sortBasedOnPerm(inputShape, getAdjustedPerm(inputShape.length, perm));\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nexport const createTransposeProgramInfo = (inputTensor: TensorView, permAttr: number[]): ProgramInfo => {\n const inputDataType = inputTensor.dataType;\n const inputRank = inputTensor.dims.length;\n const perm = getAdjustedPerm(inputRank, permAttr);\n const outputShape = getOutputShape(inputTensor.dims, perm);\n const output = outputVariable('output', inputDataType, outputShape.length);\n const input = inputVariable('a', inputDataType, inputRank);\n let getShaderSource;\n if (perm.length === 2 && perm[0] === 1 && perm[1] === 0) {\n const wgslType = output.type.value;\n const workgroupSize: [number, number, number] = [16, 16, 1];\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n var tile : array, ${workgroupSize[0]}>;\n ${shaderHelper.mainStart(workgroupSize)}\n var x = workgroup_id.x * ${workgroupSize[0]}u + local_id.x;\n var y = workgroup_id.y * ${workgroupSize[0]}u + local_id.y;\n let width = uniforms.output_shape[0];\n let height = uniforms.output_shape[1];\n if (x < width && y < height) {\n tile[local_id.y][local_id.x] = ${input.getByOffset('y * width + x')};\n }\n workgroupBarrier();\n x = workgroup_id.y * ${workgroupSize[0]}u + local_id.x;\n y = workgroup_id.x * ${workgroupSize[0]}u + local_id.y;\n if (x < height && y < width) {\n ${output.setByOffset('y * height + x', 'tile[local_id.x][local_id.y]')}\n }\n }`;\n } else {\n getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${permFunctionBody(perm, inputRank, input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${output.setByOffset('global_idx', input.getByIndices('aIndices'))}\n }`;\n }\n return {\n name: 'Transpose',\n shaderCache: {hint: `${permAttr}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputSize = ShapeUtil.size(outputShape);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const transpose = (context: ComputeContext, attributes: TransposeAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createTransposeProgramInfo(context.inputs[0], attributes.perm));\n};\n\nexport const parseTransposeAttributes = (attributes: Record): TransposeAttributes =>\n createAttributeWithCacheKey({perm: attributes.perm as number[]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\nimport {createReduceAttributesFromInputs, ReduceAttributes} from './reduce';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst reduceOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate * candidate',\n logSumExp: 'bestValue + exp(candidate)',\n l1: 'bestValue + abs(candidate)',\n l2: 'bestValue + candidate * candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceSharedOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate',\n logSumExp: 'bestValue + candidate',\n l1: 'bestValue + candidate',\n l2: 'bestValue + candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceInitValues: {[key: string]: string} = {\n max: '_A[offset]',\n min: '_A[offset]',\n mean: '0',\n sum: '0',\n prod: '1',\n sumSquare: '0',\n logSumExp: '0',\n l1: '0',\n l2: '0',\n logSum: '0'\n};\n\nconst reduceOutputValues: {[key: string]: string} = {\n max: 'bestValue',\n min: 'bestValue',\n sum: 'bestValue',\n prod: 'bestValue',\n sumSquare: 'bestValue',\n logSumExp: 'log(bestValue)',\n l1: 'bestValue',\n l2: 'sqrt(bestValue)',\n logSum: 'log(bestValue)'\n};\n\nconst getInnerMostAxes = (numInnerAxes: number, rank: number): number[] => {\n const res = [];\n for (let i = rank - numInnerAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n};\n\nconst computeOutAndReduceShapes = (shape: readonly number[], axes: readonly number[]): [number[], number[]] => {\n const outputShape = [];\n const rank = shape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputShape.push(shape[dim]);\n }\n }\n const reduceShape = axes.map(dim => shape[dim]);\n return [outputShape, reduceShape];\n};\n\nconst expandShapeToKeepDim = (shape: number[], axes: number[]): number[] => {\n const rank = shape.length + axes.length;\n const expandShape = [];\n let shapeIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n expandShape.push(shape[shapeIdx++]);\n } else {\n expandShape.push(1);\n }\n }\n return expandShape;\n};\n\nconst areAxesInnerMostDims = (axes: number[], rank: number): boolean => {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n};\n\nconst getAxesPermutation = (axes: number[], rank: number): number[] => {\n const res = [];\n if (!areAxesInnerMostDims(axes, rank)) {\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n res.push(i);\n }\n }\n axes.forEach(axis => res.push(axis));\n }\n return res;\n};\n\nexport const createReduceSharedProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceType: string,\n outputDataType: DataType, outputShape: number[], reduceShape: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n\n const outputSize = ShapeUtil.size(outputShape);\n const reduceSize = ShapeUtil.size(reduceShape);\n\n const input = inputVariable('_A', inputs[0].dataType, inputShape);\n const output = outputVariable('output', outputDataType, outputShape);\n\n const workgroupSize = 32;\n\n const sharedMemorySnippet = `\n var aBestValues : array;\n `;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('reduceSize', 'u32').declareVariables(input, output)}\n ${sharedMemorySnippet}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${shaderHelper.mainStart(workgroupSize)}\n\n let outputIndex = global_idx / ${workgroupSize};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${reduceInitValues[reduceType]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${workgroupSize}) {\n let candidate = f32(${input.getByOffset('offset + k')});\n bestValue = ${reduceOps[reduceType]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${workgroupSize}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${reduceSharedOps[reduceType]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${\n output.setByOffset(\n 'outputIndex',\n `${\n reduceType === 'mean' ? `${output.type.storage}(bestValue / f32(uniforms.reduceSize))` :\n `${output.type.storage}(${reduceOutputValues[reduceType]})`}`)};\n }\n }`;\n\n // One work group is responsible for only one element of output.\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: outputSize},\n programUniforms: [{type: DataType.uint32, data: reduceSize}]\n }),\n };\n };\n\nconst reduceCommon =\n (context: ComputeContext, name: string, attributes: ReduceAttributes,\n reduceType: 'sum'|'sumSquare'|'prod'|'min'|'max'|'mean'|'logSumExp'|'l1'|'l2'|'logSum'): void => {\n const updatedAttributes: ReduceAttributes =\n context.inputs.length === 1 ? attributes : createReduceAttributesFromInputs(context.inputs, attributes);\n\n let updatedAxes = updatedAttributes.axes;\n if (updatedAxes.length === 0 && !updatedAttributes.noopWithEmptyAxes) {\n updatedAxes = context.inputs[0].dims.map((_dim, i) => i);\n }\n const normalizeAxes = ShapeUtil.normalizeAxes(updatedAxes, context.inputs[0].dims.length);\n\n let axes = normalizeAxes;\n let input = context.inputs[0];\n const permutedAxes = getAxesPermutation(axes, context.inputs[0].dims.length);\n if (permutedAxes.length > 0) {\n input = context.compute(\n createTransposeProgramInfo(context.inputs[0], permutedAxes), {inputs: [0], outputs: [-1]})[0];\n axes = getInnerMostAxes(axes.length, input.dims.length);\n }\n\n const [outputShape, reduceShape] = computeOutAndReduceShapes(input.dims, axes);\n let finalOutputShape = outputShape;\n if (updatedAttributes.keepDims) {\n finalOutputShape = expandShapeToKeepDim(outputShape, normalizeAxes);\n }\n\n context.compute(\n createReduceSharedProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['type']}, [input], reduceType,\n context.inputs[0].dataType, finalOutputShape, reduceShape),\n {inputs: [input]});\n };\n\nexport const reduceMeanShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMeanShared', attributes, 'mean');\n};\n\nexport const reduceL1Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL1Shared', attributes, 'l1');\n};\n\nexport const reduceL2Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL2Shared', attributes, 'l2');\n};\n\nexport const reduceLogSumExpShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumExpShared', attributes, 'logSumExp');\n};\n\nexport const reduceMaxShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMaxShared', attributes, 'max');\n};\n\nexport const reduceMinShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMinShared', attributes, 'min');\n};\n\nexport const reduceProdShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceProdShared', attributes, 'prod');\n};\n\nexport const reduceSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumShared', attributes, 'sum');\n};\n\nexport const reduceSumSquareShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumSquareShared', attributes, 'sumSquare');\n};\n\nexport const reduceLogSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumShared', attributes, 'logSum');\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\nimport {reduceL1Shared, reduceL2Shared, reduceLogSumExpShared, reduceLogSumShared, reduceMaxShared, reduceMeanShared, reduceMinShared, reduceProdShared, reduceSumShared, reduceSumSquareShared} from './reduce-shared';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('Reduce op requires 1 or 2 inputs.');\n }\n\n if (inputs.length === 2 && inputs[1].dims.length !== 1) {\n throw new Error('Invalid axes input dims.');\n }\n};\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n noopWithEmptyAxes: boolean;\n axes: number[];\n}\n\nexport type ReduceOp =\n (input: IndicesHelper, output: IndicesHelper,\n axes: readonly number[]) => [string, string, string, string, ...string[]];\n\nconst noOp: ReduceOp = (input) => ['', '', `var value = ${input.getByIndices('input_indices')};`, ''];\nexport const createReduceProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceOp: ReduceOp,\n axesInput: number[], outputDataType: DataType, keepDims = false, noopWithEmptyAxes = false): ProgramInfo => {\n const outputShape: number[] = [];\n const inputShape = inputs[0].dims;\n const inputRank = inputShape.length;\n const axes = ShapeUtil.normalizeAxes(axesInput, inputRank);\n const reduceOnAllAxes = !noopWithEmptyAxes && axes.length === 0;\n inputShape.forEach((d, i) => {\n if (reduceOnAllAxes || axes.indexOf(i) >= 0) {\n if (keepDims) {\n outputShape.push(1);\n } // else { // skip this axis}\n } else {\n outputShape.push(d);\n }\n });\n const outputRank = outputShape.length;\n const outputSize = ShapeUtil.size(outputShape);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = []; // copy output indexes to input indexes\n\n const input = inputVariable('_A', inputs[0].dataType, inputRank);\n const output = outputVariable('output', outputDataType, outputRank);\n const ops = reduceOp(input, output, axes);\n let reduceOps = ops[2];\n\n for (let k = 0, l = 0; k < inputRank; k++) {\n // if this axis is reduced\n if (reduceOnAllAxes || axes.indexOf(k) >= 0) {\n if (keepDims) {\n l++;\n }\n // loop over the d-th axis\n reduceOps = `for(var j${k}: u32 = 0; j${k} < ${inputShape[k]}; j${k}++) {\n ${ops[2].includes('last_index') ? `let last_index = j${k};` : ''}\n ${input.indicesSet('input_indices', k, `j${k}`)}\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`${input.indicesSet('input_indices', k, output.indicesGet('output_indices', l))};`);\n l++;\n }\n }\n return `\n\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var input_indices: ${input.type.indices};\n let output_indices = ${output.offsetToIndices('global_idx')};\n\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${ops[1]}\n ${reduceOps}\n ${ops[3]}\n ${ops.length === 4 ? output.setByOffset('global_idx', 'value') : ops.slice(4).join('\\n')}\n }`;\n };\n\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)]\n }),\n };\n };\n\nexport const createReduceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: ReduceAttributes): ReduceAttributes => {\n const axes: number[] = [];\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => axes.push(Number(v)));\n }\n return createAttributeWithCacheKey(\n {axes, keepDims: attributes.keepDims, noopWithEmptyAxes: attributes.noopWithEmptyAxes});\n };\n\nconst runReduceProgram =\n (context: ComputeContext, name: string, attributes: ReduceAttributes, reduceOp: ReduceOp): void => {\n const inputs = context.inputs;\n const updatedAttributes: ReduceAttributes =\n inputs.length === 1 ? attributes : createReduceAttributesFromInputs(inputs, attributes);\n\n context.compute(\n createReduceProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['rank']}, [inputs[0]],\n updatedAttributes.noopWithEmptyAxes && updatedAttributes.axes.length === 0 ? noOp : reduceOp,\n updatedAttributes.axes, inputs[0].dataType, updatedAttributes.keepDims,\n updatedAttributes.noopWithEmptyAxes),\n {inputs: [0]});\n };\n\nconst reduceLogSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSum', attributes, reduceOp);\n};\n\nconst reduceL1Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += abs(${input.getByIndices('input_indices')});`,\n '',\n ];\n runReduceProgram(context, 'ReduceL1', attributes, reduceOp);\n};\n\nconst reduceL2Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += (t * t);`,\n 'value = sqrt(value);',\n ];\n runReduceProgram(context, 'ReduceL2', attributes, reduceOp);\n};\n\nconst reduceLogSumExpNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += exp(${input.getByIndices('input_indices')});`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSumExp', attributes, reduceOp);\n};\n\nconst reduceMaxNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(input.indicesSet('input_indices', k, 0));\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = max(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMax', attributes, reduceOp);\n};\n\nconst reduceMeanNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output, axes) => {\n let size = 1.0;\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n // TODO: this depends on the input dims. If we want to use uniform, this need to be updated.\n size *= context.inputs[0].dims[k];\n }\n }\n\n return [\n 'var sum = f32(0);',\n '',\n `sum += f32(${input.getByIndices('input_indices')});`,\n `let value = ${output.type.value}(sum / ${size});`,\n ];\n };\n runReduceProgram(context, 'ReduceMean', attributes, reduceOp);\n};\n\nconst reduceMinNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = min(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMin', attributes, reduceOp);\n};\n\nconst reduceProdNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(1);`,\n '',\n `value *= ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceProd', attributes, reduceOp);\n};\n\nconst reduceSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceSum', attributes, reduceOp);\n};\n\nconst reduceSumSquareNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += t * t;`,\n '',\n ];\n runReduceProgram(context, 'ReduceSumSquare', attributes, reduceOp);\n};\n\nconst useNaiveReduceMethod =\n (shape: readonly number[], axes: readonly number[], noopWithEmptyAxes: boolean): boolean => {\n if (axes.length === 0) {\n return noopWithEmptyAxes;\n }\n\n let outputSize = 1;\n let reduceSize = 1;\n for (let dim = 0; dim < axes.length; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputSize *= shape[dim];\n } else {\n reduceSize *= shape[dim];\n }\n }\n\n // The condition data is very rough, although considering the count of Execution Unit (EU), the potential\n // work groups in a EU and the counts of loops in the naive and shared methods, also doing experiments\n // on some machines.\n return reduceSize < 32 && outputSize > 1024;\n };\n\nexport const reduceMean = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMeanNaive(context, attributes);\n } else {\n reduceMeanShared(context, attributes);\n }\n};\n\nexport const reduceL1 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL1Naive(context, attributes);\n } else {\n reduceL1Shared(context, attributes);\n }\n};\n\nexport const reduceL2 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL2Naive(context, attributes);\n } else {\n reduceL2Shared(context, attributes);\n }\n};\n\nexport const reduceLogSumExp = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumExpNaive(context, attributes);\n } else {\n reduceLogSumExpShared(context, attributes);\n }\n};\n\nexport const reduceMax = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMaxNaive(context, attributes);\n } else {\n reduceMaxShared(context, attributes);\n }\n};\n\nexport const reduceMin = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMinNaive(context, attributes);\n } else {\n reduceMinShared(context, attributes);\n }\n};\n\nexport const reduceProd = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceProdNaive(context, attributes);\n } else {\n reduceProdShared(context, attributes);\n }\n};\n\nexport const reduceSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumNaive(context, attributes);\n } else {\n reduceSumShared(context, attributes);\n }\n};\n\nexport const reduceSumSquare = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumSquareNaive(context, attributes);\n } else {\n reduceSumSquareShared(context, attributes);\n }\n};\n\nexport const reduceLogSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumNaive(context, attributes);\n } else {\n reduceLogSumShared(context, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createReduceProgramInfo, ReduceOp} from './reduce';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('ArgMinMaxOp op requires 1 or 2 inputs.');\n }\n if (inputs[0].dataType !== DataType.float) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport interface ArgMinMaxAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n axis: number;\n selectLastIndex: number;\n}\n\nexport const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '<=' : '<'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'ArgMin', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '>=' : '>'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'argMax', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const parseArgMinMaxAttributes = (attributes: Record): ArgMinMaxAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext, GpuDataType, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, tensorTypeToWsglValueType, UniformDataElementType, UniformsArrayType} from './common';\n\nexport const enum AttentionQkvFormat {\n unknown, // enum value not set, or depends on qkv projection implementation details\n qkvBNSH, // for non-packed qkv, permuted\n qkvBSNH, // for non-packed qkv, not permuted, used by memory efficient attention or MultiHeadAttention\n qkvBSN3H, // for TRT fused attention, qkv are packed\n qkvBNSHqkvBS3NH, // for TRT fused causal attention, data has two formats (qkv is 3BNSH, gemm_buffer is BS3NH)\n qKvBSNHxBSN2H, // for TRT fused cross attention, kv are packed\n qkvTNH, // for memory efficient attention, qkv are not packed, and paddings are removed.\n qkvTN3H, // for TRT fused attention, qkv are packed and paddings are removed\n}\n\nexport const enum AttentionMaskType {\n none, // No mask\n mask1dKeySeqLen, // [batch_size], key sequence length\n mask1dEndStart, // [2 * batch_size] with end positions and start positions\n mask1DKeySeqLenStart, // [3 * batch_size + 2] with [key_len[0], ..., key_len[batch_size - 1], query_start[0],\n // ..., query_start[batch_size - 1], query_end[batch_size - 1], key_start[0], ...,\n // key_start[batch_size - 1], key_end[batch_size - 1]]\n mask2dDummy, // dummy mask with shape [1, 1] or [batch_size, 1]. It has same effect as no mask.\n mask2dKeyPadding, // [batch_size, total_sequence_length]\n mask3dAttention, // [batch_size, sequence_length, total_sequence_length]\n mask4dMegatron, // Megatron causal mask with shape [batch_size, 1, max_sequence_length, max_sequence_length]\n maskUnknown\n}\n\nexport interface AttentionParameters {\n batchSize: number;\n sequenceLength: number;\n pastSequenceLength: number;\n kvSequenceLength: number;\n totalSequenceLength: number;\n maxSequenceLength: number;\n inputHiddenSize: number;\n hiddenSize: number;\n vHiddenSize: number;\n headSize: number;\n vHeadSize: number;\n numHeads: number;\n kvNumHeads?: number;\n nReps?: number;\n isUnidirectional?: boolean;\n pastPresentShareBuffer: boolean;\n maskFilterValue?: number;\n maskType: AttentionMaskType;\n scale: number;\n broadcastResPosBias: boolean;\n passPastInKv: boolean;\n qkvFormat: AttentionQkvFormat;\n isPastkvBSNH?: boolean;\n}\n\nexport interface AttentionAttrs {\n numHeads: number;\n kvNumHeads?: number;\n isUnidirectional?: number;\n maskFilterValue?: number;\n scale: number;\n doRotary: number;\n qkvHiddenSizes: number[];\n pastPresentShareBuffer: boolean;\n}\n\nconst validateAttentionInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // When past state is used, Q, K and V should have same hidden size (unless we split it into past_key and past_value).\n\n // Input shapes:\n // input (Q/K/V) : (B, S, D_i)\n // weights (Q/K/V) : (D_i, D + D + D_v)\n // bias (Q/K/V) : (D + D + D_v)\n // mask_index : see below\n // past (K/V) : (2, B, N, P, H) or NULL\n // relative_position_bias : (B, N, S, T) or NULL\n\n // For mask_index, the following shapes are supported:\n // NULL, (B, 1), (1, 1)\n // (B), (2 * B), (3 * B + 2)\n // (B, T)\n // (B, S, T)\n // (B, 1, M, M)\n //\n // When a model is pruned (like some attention heads are removed in Q/K/V), input_hidden_size could be larger\n // than hidden dimension of Q, K and V.\n\n const input = inputs[0];\n const weights = inputs[1];\n const bias = inputs[2];\n const maskIndex = inputs[3];\n const past = inputs[4];\n const relativePositionBias = inputs[5];\n\n if (past && relativePositionBias) {\n throw new Error('Attention cannot have both past and relative_position_bias');\n }\n\n if (input.dims.length !== 3) {\n throw new Error('Input \"input\" must have 3 dimensions');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[1];\n const inputHiddenSize = input.dims[2];\n\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimensions');\n }\n\n if (weights.dims.length !== 2) {\n throw new Error('Input \"weights\" is expected to have 2 dimensions');\n }\n\n if (weights.dims[0] !== inputHiddenSize) {\n throw new Error('Input 1 dimension 0 should have same length as dimension 2 of input 0');\n }\n\n if (bias.dims[0] !== weights.dims[1]) {\n throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');\n }\n\n let qHiddenSize = bias.dims[0] / 3;\n let kHiddenSize = qHiddenSize;\n let vHiddenSize = kHiddenSize;\n if (attributes.qkvHiddenSizes.length > 0) {\n if (attributes.qkvHiddenSizes.length !== 3) {\n throw new Error('qkv_hidden_sizes attribute should have 3 elements');\n }\n for (const sz of attributes.qkvHiddenSizes) {\n if (sz % attributes.numHeads !== 0) {\n throw new Error('qkv_hidden_sizes should be divisible by num_heads');\n }\n }\n\n qHiddenSize = attributes.qkvHiddenSizes[0];\n kHiddenSize = attributes.qkvHiddenSizes[1];\n vHiddenSize = attributes.qkvHiddenSizes[2];\n }\n\n const kvSequenceLength = sequenceLength;\n\n if (qHiddenSize !== kHiddenSize) {\n throw new Error('qkv_hidden_sizes first element should be same as the second');\n }\n\n if (bias.dims[0] !== qHiddenSize + kHiddenSize + vHiddenSize) {\n throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');\n }\n\n let pastSequenceLength = 0;\n if (past) {\n if (kHiddenSize !== vHiddenSize) {\n throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');\n }\n if (past.dims.length !== 5) {\n throw new Error('Input \"past\" must have 5 dimensions');\n }\n if (past.dims[0] !== 2) {\n throw new Error('Input \"past\" first dimension must be 2');\n }\n if (past.dims[1] !== batchSize) {\n throw new Error('Input \"past\" second dimension must be batch_size');\n }\n if (past.dims[2] !== attributes.numHeads) {\n throw new Error('Input \"past\" third dimension must be num_heads');\n }\n if (past.dims[4] !== kHiddenSize / attributes.numHeads) {\n throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');\n }\n\n if (!attributes.pastPresentShareBuffer) {\n pastSequenceLength = past.dims[3];\n }\n // TODO: handle past_seq_len\n }\n\n const totalSequenceLength = kvSequenceLength + pastSequenceLength;\n const maxSequenceLength = -1;\n\n const maskType = AttentionMaskType.none;\n if (maskIndex) {\n // maskType = AttentionMaskType.MASK_UNKNOWN;\n // TODO: handle mask\n throw new Error('Mask not supported');\n }\n\n if (past) {\n throw new Error('past is not supported');\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize,\n hiddenSize: qHiddenSize,\n vHiddenSize,\n headSize: Math.floor(qHiddenSize / attributes.numHeads),\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias: false,\n passPastInKv: false,\n qkvFormat: AttentionQkvFormat.qkvBNSH,\n };\n};\n\nconst createInPlaceSoftmaxProgramInfo = (_context: ComputeContext, input: TensorView, n: number, d: number) => {\n const components = getMaxComponents(d);\n let WG = 64;\n const dComp = d / components;\n if (dComp < WG) {\n WG = 1;\n } else if (dComp / 8 < 64) {\n WG = Math.ceil(dComp / 8);\n }\n const elementsPerThread = Math.ceil(d / components / WG);\n const programUniforms: ProgramUniform[] = [\n {type: input.dataType, data: 1 / d}, {type: DataType.uint32, data: dComp},\n {type: DataType.uint32, data: elementsPerThread}\n ];\n const dataType = tensorTypeToWsglStorageType(input.dataType, components);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = outputVariable('x', input.dataType, input.dims, components);\n const elemValueType = tensorTypeToWsglValueType(input.dataType);\n const uniforms: UniformsArrayType = [\n {name: 'd_inv', type: elemValueType as UniformDataElementType}, {name: 'd_comp', type: 'u32'},\n {name: 'elements_per_thread', type: 'u32'}\n ];\n\n return `\n var thread_max: array;\n var thread_sum: array;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(inputHelper)}\n ${shaderHelper.mainStart([\n WG, 1, 1\n ])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${f32Type}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${f32Type}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'thread_max_vector';\n case 2:\n return 'max(thread_max_vector.x, thread_max_vector.y)';\n case 4:\n return 'max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${WG}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${f32Type}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${f32Type}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'sum_vector';\n case 2:\n return 'sum_vector.x + sum_vector.y';\n case 4:\n return 'sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${WG}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${inputHelper.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${f32Type}(x[offset + i]);\n x[offset + i] = ${inputHelper.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`;\n };\n\n return {\n name: 'AttentionProbsSoftmax',\n shaderCache: {hint: `${WG};${dataType};${components}`},\n getShaderSource,\n getRunData: () => ({outputs: [], dispatchGroup: {x: n}, programUniforms}),\n };\n};\n\nconst createAttentionProbsProgramInfo =\n (context: ComputeContext, q: TensorView, key: TensorView, pastKey: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs,\n pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n const probsShape = [parameters.batchSize, parameters.numHeads, parameters.sequenceLength, totalSequenceLength];\n const presentKey = parameters.kvNumHeads === undefined && context.outputCount > 1;\n const presentKeyShape = presentKey ?\n [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize] :\n undefined;\n\n // TODO: handle mask\n\n const alpha = attributes.scale === 0 ? 1.0 / Math.sqrt(parameters.headSize) : attributes.scale;\n const components = getMaxComponents(parameters.headSize);\n const vectorizedHeadSize = parameters.headSize / components;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(totalSequenceLength / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: parameters.sequenceLength}, {type: DataType.uint32, data: vectorizedHeadSize},\n {type: DataType.uint32, data: totalSequenceLength}, {type: DataType.uint32, data: parameters.numHeads},\n {type: DataType.float, data: alpha}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: parameters.kvSequenceLength}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (pastKey) {\n inputDependencies.push('type');\n }\n if (relativePositionBias) {\n inputDependencies.push('type');\n }\n const outputs = [{dims: probsShape, dataType: q.dataType, gpuDataType: GpuDataType.default}];\n if (presentKey) {\n outputs.push({dims: presentKeyShape!, dataType: q.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const qInput = inputVariable('q', q.dataType, q.dims, components);\n const kInput = inputVariable('key', key.dataType, key.dims, components);\n const inputVars = [qInput, kInput];\n if (pastKey) {\n const pastKeyInput = inputVariable('past_key', pastKey.dataType, pastKey.dims, components);\n inputVars.push(pastKeyInput);\n }\n if (relativePositionBias) {\n inputVars.push(\n inputVariable('relative_position_bias', relativePositionBias.dataType, relativePositionBias.dims));\n }\n const output = outputVariable('output', q.dataType, probsShape);\n const outputVars = [output];\n if (presentKey) {\n outputVars.push(outputVariable('present_key', q.dataType, presentKeyShape!, components));\n }\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'alpha', type: 'f32' as UniformDataElementType},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n\n var tileQ: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n let kOffset = uniforms.kv_sequence_length * uniforms.K * headIdx;\n let pastKeyOffset = uniforms.past_sequence_length * uniforms.K * headIdx;`;\n } else {\n return `\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;`;\n }\n })()}\n ${presentKey ? 'let presentKeyOffset = headIdx * uniforms.N * uniforms.K;' : ''}\n var value = ${f32Type}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastKey && presentKey) {\n return `\n if (n + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_key[pastKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x];\n } else {\n tileK[idx] =\n key[kOffset + (n + local_id.y - uniforms.past_sequence_length) * uniforms.K + w + local_id.x];\n }`;\n } else {\n return 'tileK[idx] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];';\n }\n })()}\n ${\n presentKey ?\n 'present_key[presentKeyOffset + (n + local_id.y) * uniforms.K + w + local_id.x] = tileK[idx];' :\n ''}\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${f32Type}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(() => {\n switch (components) {\n case 1:\n return 'value';\n case 2:\n return 'value.x + value.y';\n case 4:\n return 'value.x + value.y + value.z + value.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n output[outputIdx] = ${output.type.value} (sum * uniforms.alpha) + ${\n relativePositionBias ? 'relative_position_bias[outputIdx]' : '0.0'};\n }\n }`;\n };\n return {\n name: 'AttentionProbs',\n shaderCache: {\n hint: `${components};${relativePositionBias !== undefined};${pastKey !== undefined};${context.outputCount}`,\n inputDependencies\n },\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\n\nconst createVxAttentionScoreProgramInfo =\n (context: ComputeContext, probs: TensorView, v: TensorView, pastValue: TensorView|undefined,\n params: AttentionParameters, pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + params.kvSequenceLength;\n const nReps = params.nReps ? params.nReps : 1;\n const repeatedVHiddenSize = params.vHiddenSize * nReps;\n const presentValue = params.kvNumHeads == null && context.outputCount > 1;\n const presentValueShape =\n presentValue ? [params.batchSize, params.numHeads, totalSequenceLength, params.headSize] : undefined;\n const outputShape = [params.batchSize, params.sequenceLength, repeatedVHiddenSize];\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(params.vHeadSize / TILE_SIZE),\n y: Math.ceil(params.sequenceLength / TILE_SIZE),\n z: params.batchSize * params.numHeads\n };\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: params.sequenceLength}, {type: DataType.uint32, data: totalSequenceLength},\n {type: DataType.uint32, data: params.vHeadSize}, {type: DataType.uint32, data: params.numHeads},\n {type: DataType.uint32, data: repeatedVHiddenSize}, {type: DataType.uint32, data: pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] =\n pastValue ? ['type', 'type', 'type'] : ['type', 'type'];\n const outputs = [{dims: outputShape, dataType: probs.dataType, gpuDataType: GpuDataType.default}];\n if (presentValue) {\n outputs.push({dims: presentValueShape!, dataType: probs.dataType, gpuDataType: GpuDataType.default});\n }\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const probsHelper = inputVariable('probs', probs.dataType, probs.dims);\n const vHelper = inputVariable('v', v.dataType, v.dims);\n const inputVars = [probsHelper, vHelper];\n if (pastValue) {\n inputVars.push(inputVariable('past_value', pastValue.dataType, pastValue.dims));\n }\n const output = outputVariable('output', probs.dataType, outputShape);\n const outputVars = [output];\n if (presentValue) {\n outputVars.push(outputVariable('present_value', probs.dataType, presentValueShape!));\n }\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'v_hidden_size', type: 'u32'},\n {name: 'past_sequence_length', type: 'u32'}, {name: 'kv_sequence_length', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileQ: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, ...outputVars)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n let pastValueOffset = headIdx * uniforms.N * uniforms.past_sequence_length + n;\n let vOffset = headIdx * uniforms.N * uniforms.kv_sequence_length + n;\n `;\n } else {\n return `\n let offsetB = headIdx * uniforms.N * uniforms.K + n;\n `;\n }\n })()}\n ${presentValue ? 'let presentValueOffset = headIdx * uniforms.N * uniforms.K + n;' : ''}\n var value = ${probsHelper.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n var idx = TILE_SIZE * local_id.y + local_id.x;\n ${(() => {\n if (pastValue && presentValue) {\n return `\n if (w + local_id.y < uniforms.past_sequence_length) {\n tileK[idx] = past_value[pastValueOffset + (w + local_id.y) * uniforms.N];\n } else {\n tileK[idx] = v[vOffset + (w + local_id.y - uniforms.past_sequence_length) * uniforms.N];\n }\n `;\n } else {\n return `\n tileK[idx] = v[offsetB + (w + local_id.y) * uniforms.N];\n `;\n }\n })()}\n ${presentValue ? 'present_value[presentValueOffset + (w + local_id.y) * uniforms.N] = tileK[idx];' : ''}\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`;\n };\n\n return {\n name: 'AttentionScore',\n shaderCache: {hint: `${pastValue !== undefined};${context.outputCount}`, inputDependencies},\n getRunData: () => ({outputs, dispatchGroup: dispatch, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const applyAttention =\n (context: ComputeContext, q: TensorView, k: TensorView, v: TensorView, _maskIndex: TensorView|undefined,\n _past: TensorView|undefined, pastKey: TensorView|undefined, pastValue: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs) => {\n const outputCount = context.outputCount;\n const pastSequenceLength =\n parameters.kvNumHeads !== undefined || outputCount > 1 ? parameters.pastSequenceLength : 0;\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n\n const inputsK = (parameters.kvNumHeads === undefined && outputCount > 1 && pastKey) ? [q, k, pastKey] : [q, k];\n if (relativePositionBias) {\n inputsK.push(relativePositionBias);\n }\n\n // Run AttentionProbs\n const probs = context.compute(\n createAttentionProbsProgramInfo(\n context, q, k, outputCount > 1 ? pastKey : undefined, relativePositionBias, parameters, attributes,\n pastSequenceLength),\n {inputs: inputsK, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [-1, 1] : [-1]})[0];\n\n // Run Softmax\n context.compute(\n createInPlaceSoftmaxProgramInfo(\n context, probs, parameters.batchSize * parameters.numHeads * parameters.sequenceLength,\n totalSequenceLength),\n {inputs: [probs], outputs: []});\n\n // Run AttrionScore\n const inputsV =\n (parameters.kvNumHeads === undefined && outputCount > 1 && pastValue) ? [probs, v, pastValue] : [probs, v];\n context.compute(\n createVxAttentionScoreProgramInfo(\n context, probs, v, outputCount > 1 && pastValue ? pastValue : undefined, parameters, pastSequenceLength),\n {inputs: inputsV, outputs: (parameters.kvNumHeads === undefined && outputCount > 1) ? [0, 2] : [0]});\n };\n\nconst prepare = (context: ComputeContext, parameters: AttentionParameters) => {\n const outputShape = [\n parameters.batchSize,\n parameters.numHeads,\n parameters.sequenceLength,\n parameters.headSize,\n ];\n const M = parameters.sequenceLength;\n const K = parameters.inputHiddenSize;\n const N = parameters.headSize;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(parameters.headSize / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const inputs = [context.inputs[0], context.inputs[1], context.inputs[2]];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: M}, {type: DataType.uint32, data: K}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: parameters.numHeads}, {type: DataType.uint32, data: parameters.headSize},\n {type: DataType.uint32, data: parameters.hiddenSize},\n {type: DataType.uint32, data: parameters.hiddenSize + parameters.hiddenSize + parameters.vHiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const outputQ = outputVariable('output_q', inputs[0].dataType, outputShape);\n const outputK = outputVariable('output_k', inputs[0].dataType, outputShape);\n const outputV = outputVariable('output_v', inputs[0].dataType, outputShape);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims);\n const weight = inputVariable('weight', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const dataType = input.type.storage;\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'num_heads', type: 'u32'},\n {name: 'head_size', type: 'u32'}, {name: 'hidden_size', type: 'u32'}, {name: 'ldb', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileInput: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightQ: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightK: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightV: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, weight, bias, outputQ, outputK, outputV)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${dataType}(0);\n var valueK = ${dataType}(0);\n var valueV = ${dataType}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k ({\n outputs: [\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n ],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n },\n {inputs, outputs: [-1, -1, -1]});\n};\n\nexport const attention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateAttentionInputs(context.inputs, attributes);\n\n const [q, k, v] = prepare(context, params);\n\n return applyAttention(\n context, q, k, v, context.inputs[4], undefined, undefined, undefined, context.inputs[5], params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface BatchNormAttributes extends AttributeWithCacheKey {\n readonly epsilon: number;\n readonly momentum: number;\n readonly spatial: boolean;\n readonly trainingMode: boolean;\n readonly format: 'NHWC'|'NCHW';\n readonly outputCount: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: BatchNormAttributes): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs');\n }\n\n const checkShapeEqual = (actual: readonly number[], expected: readonly number[], message: string) => {\n const r = expected.length;\n if (r !== actual.length) {\n throw new Error(`${message}: num dimensions != ${r}`);\n }\n expected.forEach((v, i) => {\n if (v !== actual[i]) {\n throw new Error(`${message}: dim[${i}] do not match`);\n }\n });\n };\n\n if (inputs[0].dims.length > 1) {\n const shape = attributes.format === 'NHWC' ?\n (attributes.spatial ? inputs[0].dims.slice(-1) :\n inputs[0].dims.slice(-1).concat(inputs[0].dims.slice(1, inputs[0].dims.length - 1))) :\n inputs[0].dims.slice(1, attributes.spatial ? 2 : undefined);\n checkShapeEqual(inputs[1].dims, shape, 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, shape, 'Invalid input B');\n checkShapeEqual(inputs[3].dims, shape, 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, shape, 'Invalid input var');\n } else {\n checkShapeEqual(inputs[1].dims, [1], 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, [1], 'Invalid input B');\n checkShapeEqual(inputs[3].dims, [1], 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, [1], 'Invalid input var');\n }\n};\n\nconst createBatchNormInferenceProgramInfo =\n (inputs: readonly TensorView[], attributes: BatchNormAttributes): ProgramInfo => {\n const {epsilon, spatial, format} = attributes;\n const yShape = inputs[0].dims;\n const components = spatial ? getMaxComponents(yShape[yShape.length - 1]) : 1;\n const cComponents = format === 'NHWC' && yShape.length > 1 ? components : 1;\n const outputSize = ShapeUtil.size(yShape) / components;\n // Only support uniforms for opset version >= 9 (spatial = true).\n const useShapesUniforms = spatial;\n const shapeOrRank = useShapesUniforms ? yShape.length : yShape;\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims, cComponents);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims, cComponents);\n const inputMean = inputVariable('inputMean', inputs[3].dataType, inputs[3].dims, cComponents);\n const inputVar = inputVariable('inputVar', inputs[4].dataType, inputs[4].dims, cComponents);\n const y = outputVariable('y', inputs[0].dataType, shapeOrRank, components);\n // TODO: support inputs with different data type. Current we need to make sure all inputs have the same data type.\n // Otherwise, the shader compilation will fail.\n const calcCOffset = (): string => {\n let cOffset = '';\n if (spatial) {\n cOffset = `let cOffset = ${\n yShape.length === 1 ? '0u' :\n format === 'NHWC' ? `outputIndices[${yShape.length - 1}] / ${components}` :\n 'outputIndices[1]'};`;\n } else {\n if (format === 'NCHW') {\n cOffset = `\n ${y.indicesSet('outputIndices', '0', '0')}\n let cOffset = ${y.indicesToOffset('outputIndices')};`;\n } else {\n // update C channel.\n cOffset = `var cIndices = ${scale.type.indices}(0);\n cIndices[0] = outputIndices[${yShape.length - 1}];`;\n // update D1 x ... x Dn channels.\n for (let i = 1; i < scale.rank; i++) {\n cOffset += `cIndices[${i}] = outputIndices[${i}];`;\n }\n cOffset += `let cOffset = ${scale.indicesToOffset('cIndices')};`;\n }\n }\n return cOffset;\n };\n const getInferenceModeShaderSource = (helper: ShaderHelper) => `\n const epsilon = ${epsilon};\n ${helper.registerUniform('outputSize', 'u32').declareVariables(x, scale, bias, inputMean, inputVar, y)}\n ${helper.mainStart()}\n ${helper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${y.offsetToIndices(`global_idx * ${components}`)};\n ${calcCOffset()}\n let scale = ${scale.getByOffset('cOffset')};\n let bias = ${bias.getByOffset('cOffset')};\n let inputMean = ${inputMean.getByOffset('cOffset')};\n let inputVar = ${inputVar.getByOffset('cOffset')};\n let x = ${x.getByOffset('global_idx')};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${y.setByOffset('global_idx', 'value')}\n }`;\n return {\n name: 'BatchNormalization',\n shaderCache: {\n hint: `${attributes.epsilon}_${attributes.format}_${spatial}_${components}`,\n inputDependencies: useShapesUniforms ? ['rank', 'type', 'type', 'type', 'type'] : undefined,\n },\n getShaderSource: getInferenceModeShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: useShapesUniforms ?\n [\n {type: DataType.uint32, data: outputSize},\n ...createTensorShapeVariables(yShape),\n ] :\n [\n {type: DataType.uint32, data: outputSize},\n ],\n }),\n };\n };\n\nexport const parseBatchNormAttributes = (attributes: Record): BatchNormAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n\nexport const batchNorm = (context: ComputeContext, attributes: Record): void => {\n const {inputs, outputCount} = context;\n const updatedAttributes = parseBatchNormAttributes({...attributes, outputCount});\n if (env.webgpu.validateInputContent) {\n validateInputs(inputs, updatedAttributes);\n }\n if (attributes.trainingMode) {\n throw new Error('BatchNormalization trainingMode is not supported yet.');\n } else {\n context.compute(createBatchNormInferenceProgramInfo(inputs, updatedAttributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![320, 640, 1280].includes(inputs[0].dims[2])) {\n throw new Error('number of channels should be 320, 640 or 1280');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasAddProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims;\n\n const channels = inputs[0].dims[2];\n // since channel number can be only 320/640/1280, it's always divisable by 4\n const outputSize = ShapeUtil.size(outputShape) / 4;\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, outputShape, 4);\n const bias = inputVariable('bias', dataType, [channels], 4);\n const residual = inputVariable('residual', dataType, outputShape, 4);\n const output = outputVariable('output', dataType, outputShape, 4);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const channels = ${channels}u / 4;\n ${shaderHelper.declareVariables(input, bias, residual, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let value = ${input.getByOffset('global_idx')}\n + ${bias.getByOffset('global_idx % channels')} + ${residual.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', 'value')}\n }`;\n\n return {\n name: 'BiasAdd',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasAdd = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasAddProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {MAX_CLIP, MIN_CLIP, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType} from './common';\n\ntype BuiltinFunctionName = string;\ntype ElementwiseCustomExpression = (expression: string) => string;\ntype ElementwiseFunctionCall = BuiltinFunctionName|ElementwiseCustomExpression;\n\nconst createElementwiseProgramShader =\n (shaderHelper: ShaderHelper, datasize: number, inputDataType: number, outputDataType: number,\n funcCall: ElementwiseFunctionCall, additionalImplementation?: string): string => {\n const vecSize = Math.ceil(datasize / 4);\n\n let expression = '';\n if (typeof funcCall === 'string') {\n expression = `${funcCall}(a)`;\n } else {\n expression = funcCall('a');\n }\n\n const input = inputVariable('inputData', inputDataType, [vecSize], 4);\n const output = outputVariable('outputData', outputDataType, [vecSize], 4);\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n\n let a = ${input.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', expression)}\n }`;\n };\n\nconst createElementwiseProgramInfo =\n (input: TensorView, name: string, funcCall: ElementwiseFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType: number = input.dataType): ProgramInfo => ({\n name,\n shaderCache: {hint: cacheKey, inputDependencies: ['type']},\n getShaderSource: shaderHelper => createElementwiseProgramShader(\n shaderHelper, ShapeUtil.size(input.dims), input.dataType, outputDataType, funcCall, additionalImplementation),\n getRunData: (inputTensors) => ({\n outputs: [{dims: input.dims, dataType: outputDataType}],\n dispatchGroup:\n {x: Math.ceil(ShapeUtil.size(inputTensors[0].dims) / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(input.dims) / 4)},\n ],\n })\n });\n\nexport const abs = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Abs', 'abs'));\n};\n\nexport const acos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acos', 'acos'));\n};\n\nexport const acosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acosh', 'acosh'));\n};\n\nexport const asin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asin', 'asin'));\n};\n\nexport const asinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asinh', 'asinh'));\n};\n\nexport const atan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atan', 'atan'));\n};\nexport const atanh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atanh', 'atanh'));\n};\n\nexport interface CastAttributes extends AttributeWithCacheKey {\n readonly to: number;\n readonly saturate?: boolean;\n}\n\nexport const parseCastAttributes = (attributes: Record): CastAttributes =>\n createAttributeWithCacheKey(attributes as {to: number});\n\n\nexport const cast = (context: ComputeContext, attributes: CastAttributes): void => {\n let func: ElementwiseFunctionCall;\n switch (attributes.to) {\n case DataType.float16:\n func = 'vec4';\n break;\n case DataType.float:\n func = 'vec4';\n break;\n case DataType.uint32:\n func = 'vec4';\n break;\n case DataType.int32:\n func = 'vec4';\n break;\n case DataType.bool:\n func = 'vec4';\n break;\n default:\n throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${attributes.to}`);\n }\n context.compute(\n createElementwiseProgramInfo(context.inputs[0], 'Cast', func, undefined, attributes.cacheKey, attributes.to));\n};\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nconst generateClipAttributesFromInputs = (inputs: readonly TensorView[]): ClipAttributes => {\n const min = (inputs.length >= 2 && inputs[1].data !== 0) ? inputs[1].getFloat32Array()[0] : MIN_CLIP;\n const max = (inputs.length >= 3 && inputs[2].data !== 0) ? inputs[2].getFloat32Array()[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const clip = (context: ComputeContext, clipAttributes: ClipAttributes): void => {\n const attributes = context.inputs.length === 1 ? clipAttributes : generateClipAttributesFromInputs(context.inputs);\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(\n createElementwiseProgramInfo(\n context.inputs[0], 'Clip', a => `clamp(${a}, clip_min_, clip_max_)`, `\n const clip_min_: vec4<${dataType}> = vec4(${dataType}(${attributes.min}));\n const clip_max_: vec4<${dataType}> = vec4(${dataType}(${attributes.max}));\n`,\n attributes.cacheKey),\n {inputs: [0]});\n};\n\nexport const ceil = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Ceil', 'ceil'));\n};\n\nexport const cos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cos', 'cos'));\n};\n\nexport const cosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cosh', 'cosh'));\n};\n\nexport interface AlphaAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const parseAlphaAttributes = (attributes: Record): AlphaAttributes =>\n createAttributeWithCacheKey(attributes as {alpha: number});\n\nexport const elu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Elu', a => `elu_vf32(${a})`, `\n const elu_alpha_ = ${dataType}(${attributes.alpha});\n\n fn elu_f32(a: ${dataType}) -> ${dataType} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${dataType}>) -> vec4<${dataType}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,\n attributes.cacheKey));\n};\n\nexport const erfImpl = (varType = 'f32') => `\nconst r0: ${varType} = 0.3275911;\nconst r1: ${varType} = 0.254829592;\nconst r2: ${varType} = -0.284496736;\nconst r3: ${varType} = 1.421413741;\nconst r4: ${varType} = -1.453152027;\nconst r5: ${varType} = 1.061405429;\n\nfn erf_vf32(v: vec4<${varType}>) -> vec4<${varType}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`;\n\nexport const erf = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Erf', a => `erf_vf32(${a})`, erfImpl(dataType)));\n};\n\nexport const exp = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Exp', 'exp'));\n};\n\nexport const floor = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Floor', 'floor'));\n};\n\nexport const gelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Gelu', a => `0.5 * ${a} * (1.0 + erf_vf32(${a} * 0.7071067811865475))`, erfImpl(dataType)));\n};\n\nexport const leakyRelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'LeakyRelu', a => `select(leaky_relu_alpha_ * ${a}, ${a}, ${a} >= vec4<${dataType}>(0.0))`,\n `const leaky_relu_alpha_ = ${dataType}(${attributes.alpha});`, attributes.cacheKey));\n};\n\nexport const not = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Not', a => `!${a}`));\n};\n\nexport const neg = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Neg', a => `-${a}`));\n};\n\nexport const reciprocal = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Reciprocal', a => `1.0/${a}`));\n};\n\nexport const relu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Relu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > vec4<${dataType}>(0.0))`));\n};\n\nexport const sigmoid = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sigmoid', a => `(1.0 / (1.0 + exp(-${a})))`));\n};\n\nexport interface HardSigmoidAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n readonly beta: number;\n}\n\nexport const parseHardSigmoidAttributes = (attributes: Record): HardSigmoidAttributes =>\n createAttributeWithCacheKey(attributes as {\n alpha: number;\n beta: number;\n });\n\nexport const hardSigmoid = (context: ComputeContext, attributes: HardSigmoidAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'HardSigmoid',\n a => `max(vec4<${dataType}>(0.0), min(vec4<${dataType}>(1.0), ${attributes.alpha} * ${a} + vec4<${dataType}>(${\n attributes.beta})))`,\n undefined, attributes.cacheKey));\n};\n\nexport const sin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sin', 'sin'));\n};\n\nexport const sinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sinh', 'sinh'));\n};\n\nexport const sqrt = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sqrt', 'sqrt'));\n};\n\nexport const tan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tan', 'tan'));\n};\n\nexport const tanhExpression = (a: string) => `sign(${a}) * (1 - exp(-2 * abs(${a}))) / (1 + exp(-2 * abs(${a})))`;\n\nexport const tanh = (context: ComputeContext): void => {\n // TODO: revisit after https://github.com/gpuweb/gpuweb/issues/4458 is resolved\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tanh', tanhExpression));\n};\n\nexport const fastGeluImpl = (varType = 'f32') => `\nconst fast_gelu_a: ${varType} = 0.5;\nconst fast_gelu_b: ${varType} = 0.7978845608028654;\nconst fast_gelu_c: ${varType} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${varType}>) -> vec4<${varType}> {\n return ${tanhExpression('v')};\n}\n`;\n\nexport const fastGeluExpression = (x: string) =>\n `(fast_gelu_a + fast_gelu_a * tanh_v(${x} * (fast_gelu_c * ${x} * ${x} + fast_gelu_b))) * ${x}`;\n\nexport const fastGelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'FastGelu', fastGeluExpression, fastGeluImpl(dataType), undefined,\n context.inputs[0].dataType));\n};\n\nexport const thresholdedRelu = (context: ComputeContext, attributes: AlphaAttributes): number => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'ThresholdedRelu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > thresholded_relu_alpha_)`,\n `const thresholded_relu_alpha_ = vec4<${dataType}>(${attributes.alpha});`, attributes.cacheKey));\n return 0;\n};\n\nexport const log = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Log', 'log'));\n};\n\nexport const quickGeluImpl = (varType: string, alpha: number) => `\nconst alpha = vec4<${varType}>(${alpha});\nconst one = ${varType}(1.0);\nconst zero = ${varType}(0.0);\n\nfn quick_gelu_impl(x: vec4<${varType}>) -> vec4<${varType}> {\n let v = x *alpha;\n var x1 : vec4<${varType}>;\n for (var i = 0; i < 4; i = i + 1) {\n if (v[i] >= zero) {\n x1[i] = one / (one + exp(-v[i]));\n } else {\n x1[i] = one - one / (one + exp(v[i]));\n }\n }\n return x * x1;\n}\n`;\n\nexport const quickGeluExpression = (x: string) => `quick_gelu_impl(${x})`;\n\nexport const quickgelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'QuickGelu', quickGeluExpression, quickGeluImpl(dType, attributes.alpha), attributes.cacheKey,\n context.inputs[0].dataType));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType} from './common';\nimport {erfImpl} from './unary-op';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![2560, 5120, 10240].includes(inputs[0].dims[2])) {\n throw new Error('hidden state should be 2560, 5120 or 10240');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasSplitGeluProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n outputShape[2] = outputShape[2] / 2;\n\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims, 4);\n const bias = inputVariable('bias', inputs[0].dataType, [inputs[0].dims[2]], 4);\n const output = outputVariable('output', inputs[0].dataType, outputShape, 4);\n\n const outputSize = ShapeUtil.size(outputShape) / 4;\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${inputs[0].dims[2] / 4 / 2}u;\n\n ${shaderHelper.declareVariables(input, bias, output)}\n\n ${erfImpl(dataType)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${output.setByOffset('global_idx', 'valueLeft * geluRight')}\n }`;\n\n return {\n name: 'BiasSplitGelu',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasSplitGelu = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasSplitGeluProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype BuiltinFunctionName = string;\ntype BinaryCustomExpression = (expressionA: string, expressionB: string) => string;\ntype BinaryFunctionCall = BuiltinFunctionName|BinaryCustomExpression|{\n scalar: BinaryCustomExpression;\n vector: BinaryCustomExpression;\n};\n\nconst createBinaryOpProgramShader =\n (shaderHelper: ShaderHelper, dimsA: readonly number[], dimsB: readonly number[], dimsOutput: readonly number[],\n vectorize: boolean, doBroadcast: boolean, sharedDimensionDivisibleBy4: boolean, funcCall: BinaryFunctionCall,\n typeA: number, typeB: number, typeOutput: number, additionalImplementation?: string) => {\n let expressionScalar: BinaryCustomExpression;\n let expressionVector: BinaryCustomExpression;\n if (typeof funcCall === 'string') {\n expressionScalar = expressionVector = (a, b) => `${funcCall}((${a}),(${b}))`;\n } else if (typeof funcCall === 'function') {\n expressionScalar = expressionVector = funcCall;\n } else {\n expressionScalar = funcCall.scalar;\n expressionVector = funcCall.vector;\n }\n\n const output = outputVariable('outputData', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('aData', typeA, dimsA.length, 4);\n const b = inputVariable('bData', typeB, dimsB.length, 4);\n\n let assignment: string;\n if (vectorize) {\n if (doBroadcast) {\n const isAOneElement = ShapeUtil.size(dimsA) === 1;\n const isBOneElement = ShapeUtil.size(dimsB) === 1;\n const aLastDimDivisibleBy4 = dimsA.length > 0 && dimsA[dimsA.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = dimsB.length > 0 && dimsB[dimsB.length - 1] % 4 === 0;\n if (isAOneElement || isBOneElement) {\n assignment = output.setByOffset(\n 'global_idx',\n expressionVector(\n isAOneElement ? `${a.type.value}(${a.getByOffset('0')}.x)` : a.getByOffset('global_idx'),\n isBOneElement ? `${b.type.value}(${b.getByOffset('0')}.x)` : b.getByOffset('global_idx')));\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx * 4u')};\n let offsetA = ${a.broadcastedIndicesToOffset('outputIndices', output)};\n let offsetB = ${b.broadcastedIndicesToOffset('outputIndices', output)};\n ${\n output.setByOffset(\n 'global_idx',\n expressionVector(\n sharedDimensionDivisibleBy4 || aLastDimDivisibleBy4 ?\n a.getByOffset('offsetA / 4u') :\n `${a.type.value}(${a.getByOffset('offsetA / 4u')}[offsetA % 4u])`,\n sharedDimensionDivisibleBy4 || bLastDimDivisibleBy4 ?\n b.getByOffset('offsetB / 4u') :\n `${b.type.value}(${b.getByOffset('offsetB / 4u')}[offsetB % 4u])`))}\n `;\n }\n } else {\n assignment = output.setByOffset(\n 'global_idx', expressionVector(a.getByOffset('global_idx'), b.getByOffset('global_idx')));\n }\n } else {\n if (!doBroadcast) {\n throw new Error('no necessary to use scalar implementation for element-wise binary op implementation.');\n }\n\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `aData[indexA${x}][componentA${x}]`;\n const expressionB = `bData[indexB${x}][componentB${x}]`;\n return `\n let outputIndices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offsetA${x} = ${a.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let offsetB${x} = ${b.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let indexA${x} = offsetA${x} / 4u;\n let indexB${x} = offsetB${x} / 4u;\n let componentA${x} = offsetA${x} % 4u;\n let componentB${x} = offsetB${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expressionScalar(expressionA, expressionB)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('outputData[global_idx]', 0)}\n ${singleAssignment('outputData[global_idx]', 1)}\n ${singleAssignment('outputData[global_idx]', 2)}\n ${singleAssignment('outputData[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(a, b, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createBinaryOpProgramInfo =\n (name: string, cacheKey: string, a: TensorView, b: TensorView, funcCall: BinaryFunctionCall,\n additionalImplementation?: string, outputDataType: number = a.dataType): ProgramInfo => {\n const isBroadcast = !ShapeUtil.areEqual(a.dims, b.dims);\n let outputShape = a.dims;\n let outputSize = ShapeUtil.size(a.dims);\n\n let vectorize = false;\n let sharedDimensionDivisibleBy4 = false;\n\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n const cacheKeyAux = [isBroadcast];\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(a.dims, b.dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n const isAOneElement = ShapeUtil.size(a.dims) === 1;\n const isBOneElement = ShapeUtil.size(b.dims) === 1;\n const aLastDimDivisibleBy4 = a.dims.length > 0 && a.dims[a.dims.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = b.dims.length > 0 && b.dims[b.dims.length - 1] % 4 === 0;\n cacheKeyAux.push(isAOneElement);\n cacheKeyAux.push(isBOneElement);\n cacheKeyAux.push(aLastDimDivisibleBy4);\n cacheKeyAux.push(bLastDimDivisibleBy4);\n // check whether vectorize can be enabled\n let sharedDimension = 1;\n for (let i = 1; i < outputShape.length; i++) {\n const dimA = a.dims[a.dims.length - i] ?? 1;\n const dimB = b.dims[b.dims.length - i] ?? 1;\n if (dimA === dimB) {\n sharedDimension *= dimA;\n } else {\n break;\n }\n }\n if (sharedDimension % 4 === 0) {\n sharedDimensionDivisibleBy4 = true;\n vectorize = true;\n } else if (isAOneElement || isBOneElement || aLastDimDivisibleBy4 || bLastDimDivisibleBy4) {\n vectorize = true;\n }\n } else {\n // element-wise\n vectorize = true;\n }\n cacheKeyAux.push(vectorize);\n\n return {\n name,\n shaderCache: {\n hint: cacheKey + cacheKeyAux.map((x) => x.toString()).join('_'),\n inputDependencies: ['rank', 'rank'],\n },\n getShaderSource: (shaderHelper) => createBinaryOpProgramShader(\n shaderHelper, a.dims, b.dims, outputShape, vectorize, isBroadcast, sharedDimensionDivisibleBy4, funcCall,\n a.dataType, b.dataType, outputDataType, additionalImplementation),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* component size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(outputShape) / 4)},\n ...createTensorShapeVariables(a.dims, b.dims, outputShape)\n ],\n }),\n };\n };\n\nconst runBinaryOp =\n (context: ComputeContext, name: string, funcCall: BinaryFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType?: number): void => {\n context.compute(createBinaryOpProgramInfo(\n name, cacheKey ?? '', context.inputs[0], context.inputs[1], funcCall, additionalImplementation,\n outputDataType));\n };\n\nexport const add = (context: ComputeContext): void => {\n runBinaryOp(context, 'Add', (a, b) => `${a}+${b}`);\n};\n\nexport const div = (context: ComputeContext): void => {\n runBinaryOp(context, 'Div', (a, b) => `${a}/${b}`);\n};\n\nexport const equal = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Equal', ({scalar: (a, b) => `u32(${a}==${b})`, vector: (a, b) => `vec4(${a}==${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const mul = (context: ComputeContext): void => {\n runBinaryOp(context, 'Mul', (a, b) => `${a}*${b}`);\n};\n\nexport const pow = (context: ComputeContext): void => {\n const type = inputVariable('input', context.inputs[0].dataType, context.inputs[0].dims).type.value;\n const roundStr = type === 'i32' ? 'round' : '';\n runBinaryOp(\n context, 'Pow', ({scalar: (a, b) => `pow_custom(${a},${b})`, vector: (a, b) => `pow_vector_custom(${a},${b})`}),\n `\n fn pow_custom(a : ${type}, b : ${type}) -> ${type} {\n if (b == ${type}(0.0)) {\n return ${type}(1.0);\n } else if (a < ${type}(0.0) && f32(b) != floor(f32(b))) {\n return ${type}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${type}(1.0), round(f32(abs(b) % ${type}(2.0))) != 1.0) * ${type}(${\n roundStr}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${type}>, b : vec4<${type}>) -> vec4<${type}> {\n // TODO: implement vectorized pow\n return vec4<${type}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `);\n};\n\nexport const sub = (context: ComputeContext): void => {\n runBinaryOp(context, 'Sub', (a, b) => `${a}-${b}`);\n};\n\nexport const greater = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Greater', ({scalar: (a, b) => `u32(${a}>${b})`, vector: (a, b) => `vec4(${a}>${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const less = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Less', ({scalar: (a, b) => `u32(${a}<${b})`, vector: (a, b) => `vec4(${a}<${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const greaterOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'GreaterOrEqual', ({scalar: (a, b) => `u32(${a}>=${b})`, vector: (a, b) => `vec4(${a}>=${b})`}),\n undefined, undefined, DataType.bool);\n};\n\nexport const lessOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'LessOrEqual', ({scalar: (a, b) => `u32(${a}<=${b})`, vector: (a, b) => `vec4(${a}<=${b})`}),\n undefined, undefined, DataType.bool);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], axis: number): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n const referenceIndex = 0;\n const referenceInput = inputs[referenceIndex];\n const inputType = referenceInput.dataType;\n const inputRank = referenceInput.dims.length;\n inputs.forEach((input, i) => {\n if (i === referenceIndex) {\n return;\n }\n // make sure types of all inputs match\n if (input.dataType !== inputType) {\n throw new Error('input tensors should be one type');\n }\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputRank) {\n throw new Error('input tensors should have the same shape');\n }\n input.dims.forEach((dim, i) => {\n if (i !== axis && dim !== referenceInput.dims[i]) {\n throw new Error('non concat dimensions must match');\n }\n });\n });\n};\n\nconst calculateInputIndexImpl = (numberOfTensors: number, sizeInConcatAxisStr: string): string => `\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n for (var i: u32 = 0u; i < ${numberOfTensors}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n }`;\n\nconst assignOutputData = (inputs: readonly IndicesHelper[], output: IndicesHelper) => {\n const numberOfTensors = inputs.length;\n\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = output.setByOffset('global_idx', inputs[i].getByIndices('indices'));\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (inputIndex == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (inputIndex == ${i}) { ${returnSnippet} }`);\n }\n }\n return codeLines.join('\\n');\n};\n\nconst createConcatProgramInfo =\n (inputs: readonly TensorView[], adjustedAxis: number, outputShape: number[], dataType: DataType): ProgramInfo => {\n const outputSize = ShapeUtil.size(outputShape);\n\n const sizeInConcatAxis = new Array(inputs.length);\n const inputVars = new Array(inputs.length);\n\n let previousSum = 0;\n const inputDependencies: ProgramInputTensorInfoDependency[] = [];\n const inputRanks = [];\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: outputSize}];\n for (let i = 0; i < inputs.length; ++i) {\n previousSum += inputs[i].dims[adjustedAxis];\n sizeInConcatAxis[i] = previousSum;\n inputRanks.push(inputs[i].dims.length);\n inputVars[i] = inputVariable(`input${i}`, dataType, inputRanks[i]);\n inputDependencies.push('rank');\n programUniforms.push({type: DataType.uint32, data: sizeInConcatAxis[i]});\n }\n for (let i = 0; i < inputs.length; ++i) {\n programUniforms.push(...createTensorShapeVariables(inputs[i].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const output = outputVariable('output', dataType, outputShape.length);\n const indicesAxis = output.indicesGet('indices', adjustedAxis);\n const sizeInConcatAxisStr =\n Array.from(Array(sizeInConcatAxis.length).keys()).map(i => `uniforms.sizeInConcatAxis${i}`).join(',');\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${(() => {\n shaderHelper.registerUniform('outputSize', 'u32');\n for (let i = 0; i < inputs.length; i++) {\n shaderHelper.registerUniform(`sizeInConcatAxis${i}`, 'u32');\n }\n return shaderHelper.declareVariables(...inputVars, output);\n })()}\n\n ${calculateInputIndexImpl(sizeInConcatAxis.length, sizeInConcatAxisStr)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n var indices = ${output.offsetToIndices('global_idx')};\n\n let inputIndex = calculateInputIndex(${indicesAxis});\n if (inputIndex != 0u) {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n ${indicesAxis} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${assignOutputData(inputVars, output)}\n }`;\n\n return {\n name: 'Concat',\n shaderCache: {hint: `${adjustedAxis}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const concat = (context: ComputeContext, attributes: ConcatAttributes): void => {\n const inputs = context.inputs;\n const inputShape = inputs[0].dims;\n const adjustedAxis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n validateInputs(inputs, adjustedAxis);\n const outputShape = inputShape.slice();\n outputShape[adjustedAxis] =\n inputs.reduce((sum, input) => sum + (input.dims.length > adjustedAxis ? input.dims[adjustedAxis] : 0), 0);\n // 0 length tensors are valid for concat, remove them\n const nonEmptyInputs = inputs.filter(input => ShapeUtil.size(input.dims) > 0);\n context.compute(\n createConcatProgramInfo(nonEmptyInputs, adjustedAxis, outputShape, inputs[0].dataType), {inputs: nonEmptyInputs});\n};\n\nexport const parseConcatAttributes = (attributes: Record): ConcatAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {MAX_CLIP, MIN_CLIP} from '../../util';\nimport {ProgramUniform} from '../types';\n\nimport {UniformsArrayType} from './common';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly alpha?: number;\n readonly beta?: number;\n}\n\nexport const getActivationSnippet =\n (attributes: InternalActivationAttributes, valueType: string, baseType = 'f32'): string => {\n switch (attributes.activation) {\n case 'Relu':\n return `value = max(value, ${valueType}(0.0));`;\n case 'Sigmoid':\n return `value = (${valueType}(1.0) / (${valueType}(1.0) + exp(-value)));`;\n case 'Clip':\n return `value = clamp(value, ${valueType}(${baseType}(uniforms.clip_min)), ${valueType}(${\n baseType}(uniforms.clip_max)));`;\n case 'HardSigmoid':\n return `value = max(${valueType}(0.0), min(${valueType}(1.0), ${baseType}(uniforms.alpha) * value + ${\n baseType}(uniforms.beta)));`;\n case 'LeakyRelu':\n return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;\n case '':\n return '';\n // TODO: adding other activations that can be fused.\n default:\n throw new Error(`Unsupported activation ${attributes.activation}`);\n }\n };\n\nexport const appendActivationUniformsData =\n (attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {\n if (attributes.activation === 'Clip') {\n programUniform.push(\n {type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});\n } else if (attributes.activation === 'HardSigmoid') {\n programUniform.push(\n {type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});\n } else if (attributes.activation === 'LeakyRelu') {\n programUniform.push({type: DataType.float, data: attributes.alpha!});\n }\n };\n\nexport const appendActivationUniforms = (attributes: InternalActivationAttributes, uniforms: UniformsArrayType) => {\n if (attributes.activation === 'Clip') {\n uniforms.push({name: 'clip_max', type: 'f32'}, {name: 'clip_min', type: 'f32'});\n } else if (attributes.activation === 'HardSigmoid') {\n uniforms.push({name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'});\n } else if (attributes.activation === 'LeakyRelu') {\n uniforms.push({name: 'alpha', type: 'f32'});\n }\n};\n\nexport const parseInternalActivationAttributes =\n (attributes: Record|undefined): InternalActivationAttributes => {\n const activation = attributes?.activation as string || '';\n if (activation === 'HardSigmoid') {\n const [alpha, beta] = attributes?.activation_params as [number, number] || [0.2, 0.5];\n return {activation, alpha, beta};\n } else if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes?.activation_params as [number, number] || [MIN_CLIP, MAX_CLIP];\n return {activation, clipMax, clipMin};\n } else if (activation === 'LeakyRelu') {\n const [alpha] = attributes?.activation_params as [number] || [0.01];\n return {activation, alpha};\n }\n return {activation};\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/activation_util.ts\n//\n// modified to fit the needs of the project\n\nexport const typeSnippet = (component: number, dataType: string) => {\n switch (component) {\n case 1:\n return dataType;\n case 2:\n return `vec2<${dataType}>`;\n case 3:\n return `vec3<${dataType}>`;\n case 4:\n return `vec4<${dataType}>`;\n default:\n throw new Error(`${component}-component is not supported.`);\n }\n};\n\nexport const biasSnippet = (hasBias: boolean): string => `\n ${hasBias ? 'value = value + getBiasByOutputCoords(coords);' : ''}\n `;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-core/src/ops/conv_util.ts\n//\n// modified to fit the needs of the project\n\nexport const utilFunctions = (strideStr: string) => (`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${strideStr}.x), i32(${strideStr}.y), i32(${strideStr}.z), 1));\n}\n`);\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/matmul_packed_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getBroadcastDims, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from '../fuse-utils';\n\nimport {typeSnippet} from './activation_util';\n\nconst writeDataToSubAVec4Snippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst calculateResultSnippet = (transposeA: boolean, innerElementSize: number) => {\n if (transposeA) {\n return `\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${innerElementSize === 3 ? '' : 'let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];'}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached3[i] + acc[i];'}\n }`;\n } else {\n return `\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached.w + acc[i];'}\n }`;\n }\n};\n\nexport const makeMatMulPackedVec4Source =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32): string => {\n const tileAOuter = workgroupSize[1] * workPerThread[1];\n const tileBOuter = workgroupSize[0] * workPerThread[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n const innerElementSize = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n\n if (!(((transposeA && innerElementSize === 4 && workPerThread[1] === 4) ||\n (!transposeA && (innerElementSize === 3 || innerElementSize === 4))) &&\n tileAWidth % workgroupSize[0] === 0 && tileInner % workgroupSize[1] === 0 && workPerThread[0] === 4)) {\n throw new Error(`If transposeA ${transposeA} is true, innerElementSize ${\n innerElementSize} and workPerThread[1] ${workPerThread[1]} must be 4.\n Otherwise, innerElementSize ${innerElementSize} must be 3 or 4.\n tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${workgroupSize[0]}. tileInner ${\n tileInner} must be divisible by workgroupSize[1] ${workgroupSize[1]}. colPerThread ${\n workPerThread[0]} must be 4.`);\n }\n return `\nvar mm_Asub: array, ${tileAWidth / innerElementSize}>, ${tileAHight}>;\nvar mm_Bsub: array, ${tileBOuter / workPerThread[0]}>, ${tileInner}>;\n\nconst rowPerThread = ${workPerThread[1]};\nconst colPerThread = ${workPerThread[0]};\nconst innerElementSize = ${innerElementSize};\nconst tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\n let num_tiles = ${splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${rowPerThreadB};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${writeDataToSubAVec4Snippet(transposeA, batchDims)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${\n batchDims ? ', batchIndices' : ''});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${innerElementSize === 3 ? '' : 'let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];'}\n\n ${calculateResultSnippet(transposeA, innerElementSize)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`;\n };\n\nconst writeDataToSubASnippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst readDataFromSubASnippet = (transposeA: boolean) =>\n transposeA ? 'let ACached = mm_Asub[k][tileRow + innerRow];' : 'let ACached = mm_Asub[tileRow + innerRow][k];';\n\n// sequentialAccessByThreads means sequential data in memory is accessed by\n// threads, instead of a single thread (default behavior).\nexport const makeMatMulPackedSource =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32,\n sequentialAccessByThreads = false): string => {\n const tileAOuter = workPerThread[1] * workgroupSize[1];\n const tileBOuter = workPerThread[0] * workgroupSize[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n\n if (!(tileAHight % workgroupSize[1] === 0 && tileAWidth % workgroupSize[0] === 0 &&\n tileInner % workgroupSize[1] === 0)) {\n throw new Error(`tileAHight ${tileAHight} must be divisible by workgroupSize[1]${\n workgroupSize[1]}, tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${\n workgroupSize[0]}, tileInner ${tileInner} must be divisible by workgroupSize[1]${workgroupSize[1]}`);\n }\n const rowPerThreadA = tileAHight / workgroupSize[1];\n const colPerThreadA = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n const matmulSnippet = sequentialAccessByThreads ?\n `\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n let globalColStart = i32(workgroupId.x) * ${tileBOuter};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${tileAHight}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileAWidth}; inputCol = inputCol + ${workgroupSize[0]}) {\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${tileInner}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileBOuter}; inputCol = inputCol + ${workgroupSize[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${workgroupSize[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${\n transposeA ? `mm_Asub[k][localRow + innerRow * ${workgroupSize[1]}];` :\n `mm_Asub[localRow + innerRow * ${workgroupSize[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${workgroupSize[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${workgroupSize[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n ` :\n `\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\nlet tileRowA = i32(localId.y) * ${rowPerThreadA};\nlet tileColA = i32(localId.x) * ${colPerThreadA};\nlet tileRowB = i32(localId.y) * ${rowPerThreadB};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadA}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${colPerThreadA}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${readDataFromSubASnippet(transposeA)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;\n\n return `\n var mm_Asub : array, ${tileAHight}>;\n var mm_Bsub : array, ${tileInner}>;\n const rowPerThread = ${workPerThread[1]};\n const colPerThread = ${workPerThread[0]};\n const tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let num_tiles = ${\n splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc : array, rowPerThread>;\n ${matmulSnippet}\n }\n`;\n };\n\nconst matMulReadWriteFnSource =\n (component: number, hasBias: boolean, applyActivation: string, variables: IndicesHelper[],\n batchShapes: Array, isChannelsLast = false): string => {\n const [batchAShape, batchBShape, batchShape] = batchShapes;\n const [batchVariable, aVariable, bVariable, outputVariable] = variables;\n const broadCastADims = getBroadcastDims(batchAShape, batchShape);\n const broadCastBDims = getBroadcastDims(batchBShape, batchShape);\n const dataType = tensorTypeToWsglStorageType(variables[0].type.tensor);\n const getAIndices = () => {\n const aRank = aVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var aIndices: ${aVariable.type.indices};`;\n for (let i = aRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\naIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastADims.forEach(i => {\n resStr += `\\naIndices[${i}] = 0;`;\n });\n resStr += `\\naIndices[${aRank - 2}] = u32(row);\n aIndices[${aRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const getBIndices = () => {\n const bRank = bVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var bIndices: ${bVariable.type.indices};`;\n for (let i = bRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\nbIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastBDims.forEach(i => {\n resStr += `\\nbIndices[${i}] = 0;`;\n });\n resStr += `\\nbIndices[${bRank - 2}] = u32(row);\n bIndices[${bRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const source = `\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${getAIndices()}\n value = ${aVariable.getByIndices('aIndices')};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${getBIndices()}\n value = ${bVariable.getByIndices('bIndices')};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${typeSnippet(component, dataType)}) {\n let col = colIn * ${component};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${\n hasBias ?\n `value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :\n '' }\n ${applyActivation}\n ${outputVariable.setByIndices('vec3(coords)', 'value')}\n }\n }\n `;\n return source;\n };\n\nexport const createMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const dimAOuter = aShape[aShape.length - 2];\n const dimInner = aShape[aShape.length - 1];\n const dimBOuter = bShape[bShape.length - 1];\n const isVec4 = dimInner % 4 === 0 && dimBOuter % 4 === 0;\n\n // TODO: fine tune size\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const workgroupSize: [number, number, number] = [8, 8, 1];\n const dispatch = [\n Math.ceil(dimBOuter / workgroupSize[0] / elementsPerThread[0]),\n Math.ceil(dimAOuter / workgroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workgroupSize[2] / elementsPerThread[2])\n ];\n\n const components = isVec4 ? 4 : 1;\n const aShapeTemp = [...outerDimsA, dimAOuter, dimInner / components];\n const aRank = aShapeTemp.length;\n const bShapeTemp = [...outerDimsB, dimInner, dimBOuter / components];\n const bRank = bShapeTemp.length;\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShapeTemp, bShapeTemp));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n const hasBias = inputs.length > 2;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchRank = outerDims.length;\n const batchDims = internalVariable('batchDims', inputs[0].dataType, batchRank, 1);\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const A = inputVariable('a', inputs[0].dataType, aRank, components);\n const B = inputVariable('b', inputs[1].dataType, bRank, components);\n const output = outputVariable('result', inputs[0].dataType, outputShapeTemp.length, components);\n const inputVariables = [A, B];\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n }\n const uniforms: UniformsArrayType =\n [{name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'}];\n appendActivationUniforms(activationAttributes, uniforms);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const declareFunctions = matMulReadWriteFnSource(\n components, hasBias, applyActivation, [batchDims, A, B, output], [outerDimsA, outerDimsB, outerDims],\n isChannelsLast);\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${declareFunctions}\n ${\n isVec4 ? makeMatMulPackedVec4Source(elementsPerThread, workgroupSize, dataType, batchDims) :\n makeMatMulPackedSource(elementsPerThread, workgroupSize, dataType, batchDims)}\n `;\n };\n return {\n name: 'MatMul',\n shaderCache: {\n hint: `${elementsPerThread};${activationAttributes.activation};${isVec4};${isChannelsLast}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv2d_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet, typeSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dCommonSnippet =\n (isChannelsLast: boolean, fitAOuter: boolean, fitBOuter: boolean, fitInner: boolean, addBias = false,\n attributes: ConvAttributes, innerElementSizeX = 4, innerElementSizeW = 4, innerElementSize = 4,\n dataType = 'f32'): string => {\n const getXSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'resData = x[xIndex];';\n case 3:\n return `resData = vec3<${dataType}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;\n case 4:\n return 'resData = x[xIndex / 4];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[row * i32(uniforms.w_shape[3]) + colIn];';\n case 4:\n return 'return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, xRow, xCol, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, xRow, xCol);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n const readXSnippet = `\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${col} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${col} % inChannels;\n var resData = ${typeSnippet(innerElementSizeX, dataType)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${xHeight} && xCol >= 0 && xCol < ${xWidth}) {\n ${coordASnippet}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${getXSnippet(innerElementSizeX)}\n }\n return resData;`;\n\n const sampleX = isChannelsLast ? (fitAOuter && fitInner ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`) :\n (fitInner && fitBOuter ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`);\n\n const sampleW = `${getWSnippet(innerElementSizeW)}`;\n\n const resType = typeSnippet(innerElementSize, dataType);\n const aType =\n isChannelsLast ? typeSnippet(innerElementSizeX, dataType) : typeSnippet(innerElementSizeW, dataType);\n const bType =\n isChannelsLast ? typeSnippet(innerElementSizeW, dataType) : typeSnippet(innerElementSizeX, dataType);\n const applyActivation = getActivationSnippet(attributes, resType, dataType);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${aType} {\n ${isChannelsLast ? sampleX : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${bType} {\n ${isChannelsLast ? sampleW : sampleX}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${resType}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[], dimAOuter: number,\n dimBOuter: number, dimInner: number, hasBias: boolean, sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 || inChannels % 3 === 0) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv2d_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const tileAOuter = workGroupSize[1] * elementsPerThread[1];\n const tileBOuter = workGroupSize[0] * elementsPerThread[0];\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const fitAOuter = dimAOuter % tileAOuter === 0;\n const fitBOuter = dimBOuter % tileBOuter === 0;\n const fitInner = dimInner % tileInner === 0;\n const elementsSize = isVec4 ? [innerElementSize, 4, 4] : [1, 1, 1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.int32, data: attributes.strides}, {type: DataType.int32, data: attributes.dilations}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'pad', type: 'i32', length: 2}, {name: 'stride', type: 'i32', length: 2},\n {name: 'dilation', type: 'i32', length: 2}\n ];\n appendActivationUniforms(attributes, uniforms);\n\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n result[flatIndex] = ${isVec4 ? `vec4<${t}>` : t}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${isVec4 ? '/ 4' : ''}, value);\n }`;\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${utilFunctions('uniforms.result_strides')}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n ${\n conv2dCommonSnippet(\n isChannelsLast, fitAOuter, fitBOuter, fitInner, hasBias, attributes, elementsSize[0], elementsSize[1],\n elementsSize[2], t)}\n ${\n isVec4 ?\n makeMatMulPackedVec4Source(elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner, false, undefined,\n sequentialAccessByThreads)}`;\n };\n return {\n name: 'Conv2DMatMul',\n shaderCache: {\n hint: `${attributes.cacheKey};${innerElementSize};${isVec4};${fitAOuter};${fitBOuter};${fitInner};${\n tileAOuter};${tileBOuter};${tileInner}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv3d_naive_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\n\nconst arrayProduct = (arr: number[]) => {\n let product = 1;\n for (let i = 0; i < arr.length; i++) {\n product *= arr[i];\n }\n return product;\n};\n\nconst parse3TupleParam = (param: number|[number, number, number]): [number, number, number] =>\n typeof param === 'number' ? [param, param, param] : param;\n\nconst getEffectiveFilterSize = (filterSize: number, dilation: number): number => {\n if (dilation <= 1) {\n return filterSize;\n }\n\n return filterSize + (filterSize - 1) * (dilation - 1);\n};\n\nconst computeDefaultPad =\n (inputShape: [number, number]|[number, number, number, number], fieldSize: number, stride: number, dilation = 1):\n number => {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n };\n\nconst computeOutputShape4D =\n (inShape: [number, number, number, number], filterShape: [number, number, number], outChannels: number,\n strides: [number, number, number], zeroPad?: number): [number, number, number, number] => {\n if (zeroPad == null) {\n // eslint-disable-next-line no-param-reassign\n zeroPad = computeDefaultPad(inShape, filterShape[0], strides[0]);\n }\n const outShape: [number, number, number, number] = [0, 0, 0, outChannels];\n for (let index = 0; index < 3; index++) {\n if (inShape[index] + 2 * zeroPad >= filterShape[index]) {\n outShape[index] = Math.trunc((inShape[index] - filterShape[index] + 2 * zeroPad) / strides[index] + 1);\n }\n }\n return outShape;\n };\n\nconst get3DPadAndOutInfo =\n (pad: number|string|number[], inDepth: number, inHeight: number, inWidth: number, strideDepth: number,\n strideHeight: number, strideWidth: number, filterDepth: number, filterHeight: number,\n filterWidth: number): {padInfo: PadInfo3D; outDepth: number; outHeight: number; outWidth: number} => {\n let padInfo: PadInfo3D;\n let outDepth: number;\n let outHeight: number;\n let outWidth: number;\n\n if (pad === 'VALID') {\n // eslint-disable-next-line no-param-reassign\n pad = 0;\n }\n\n if (typeof pad === 'number') {\n padInfo = {top: pad, bottom: pad, left: pad, right: pad, front: pad, back: pad};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (Array.isArray(pad)) {\n if (!pad.every((val, _, arr) => val === arr[0])) {\n throw Error(`Unsupported padding parameter: ${pad}`);\n }\n padInfo = {top: pad[0], bottom: pad[1], left: pad[2], right: pad[3], front: pad[4], back: pad[5]};\n const outShape = computeOutputShape4D(\n [inDepth, inHeight, inWidth, 1], [filterDepth, filterHeight, filterWidth], 1,\n [strideDepth, strideHeight, strideWidth], pad[0]);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n } else if (pad === 'SAME_UPPER') {\n // TODO: support 'SAME_LOWER'.\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n\n padInfo = {top, bottom, left, right, front, back};\n } else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return {padInfo, outDepth, outHeight, outWidth};\n };\n\ntype PadInfo3D = {\n top: number; left: number; right: number; bottom: number; front: number; back: number;\n};\n\nexport type Conv3DInfo = {\n batchSize: number; inDepth: number; inHeight: number; inWidth: number; inChannels: number; outDepth: number;\n outHeight: number;\n outWidth: number;\n outChannels: number;\n dataFormat: 'channelsFirst' | 'channelsLast';\n strideDepth: number;\n strideHeight: number;\n strideWidth: number;\n dilationDepth: number;\n dilationHeight: number;\n dilationWidth: number;\n filterDepth: number;\n filterHeight: number;\n filterWidth: number;\n effectiveFilterDepth: number;\n effectiveFilterHeight: number;\n effectiveFilterWidth: number;\n padInfo: PadInfo3D;\n inShape: [number, number, number, number, number];\n outShape: [number, number, number, number, number];\n filterShape: [number, number, number, number, number];\n};\n\nexport const computeConv3DInfo =\n (inShape: [number, number, number, number, number], filterShape: [number, number, number, number, number],\n strides: number|[number, number, number], dilations: number|[number, number, number], pad: number|string|number[],\n depthwise = false, dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv3DInfo => {\n let batchSize, inDepth, inHeight, inWidth, inChannels;\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n } else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterChannels, , filterDepth, filterHeight, filterWidth] = filterShape;\n\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const {padInfo, outDepth, outHeight, outWidth} = get3DPadAndOutInfo(\n pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth,\n effectiveFilterHeight, effectiveFilterWidth);\n\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n\n let outShape: [number, number, number, number, number] = [0, 0, 0, 0, 0];\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n } else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n };\n\nexport const createConv3DNaiveProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[],\n filterDims: readonly number[], pads: readonly number[], dataFormat: string): ProgramInfo => {\n const isChannelsLast = dataFormat === 'channelsLast';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n // TODO: enable vec4.\n const isVec4 = false;\n const workGroupSize: [number, number, number] = [64, 1, 1];\n const dispatchLayout = {x: outputShape.map((_, i) => i)};\n const dispatch = [Math.ceil(arrayProduct(dispatchLayout.x.map(d => outputShape[d])) / (workGroupSize[0])), 1, 1];\n\n LOG_DEBUG('verbose', () => `[conv3d_naive_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: filterDims},\n {type: DataType.uint32, data: pads}, {type: DataType.uint32, data: attributes.strides},\n {type: DataType.uint32, data: attributes.dilations}\n ];\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'pads', type: 'u32', length: pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length},\n {name: 'dilations', type: 'u32', length: attributes.dilations.length}\n ];\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : array) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[${isChannelsLast ? getElementAt('coords', 4, 5) : getElementAt('coords', 1, 5)}${\n isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${declareFunctions}\n fn getX(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${x.getByIndices('aIndices')};\n }\n fn getW(d0 : u32, d1 : u32, d2 : u32, d3 : u32, d4 : u32) -> f32 {\n let aIndices = array(d0, d1, d2, d3, d4);\n return ${w.getByIndices('aIndices')};\n }\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let coords = ${output.offsetToIndices('global_idx')};\n let batch = ${getElementAt('coords', 0, x.rank)};\n let d2 = ${\n isChannelsLast ? getElementAt('coords', x.rank - 1, x.rank) : getElementAt('coords', 1, x.rank)};\n let xFRCCorner = vec3(${\n isChannelsLast ? getElementAt('coords', 1, x.rank) : getElementAt('coords', 2, x.rank)},\n ${isChannelsLast ? getElementAt('coords', 2, x.rank) : getElementAt('coords', 3, x.rank)},\n ${\n isChannelsLast ? getElementAt('coords', 3, x.rank) :\n getElementAt('coords', 4, x.rank)}) * uniforms.strides - uniforms.pads;\n let xFCorner = xFRCCorner.x;\n let xRCorner = xFRCCorner.y;\n let xCCorner = xFRCCorner.z;\n let xShapeY = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 1, x.rank) : getElementAt('uniforms.x_shape', 2, x.rank)};\n let xShapeZ = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 2, x.rank) : getElementAt('uniforms.x_shape', 3, x.rank)};\n let xShapeW = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 3, x.rank) : getElementAt('uniforms.x_shape', 4, x.rank)};\n let xShapeU = ${\n isChannelsLast ? getElementAt('uniforms.x_shape', 4, x.rank) : getElementAt('uniforms.x_shape', 1, x.rank)};\n let inputDepthNearestVec4 = (xShapeU / 4) * 4;\n let inputDepthVec4Remainder = xShapeU % 4;\n\n var dotProd = 0.0;\n for (var wF = 0u; wF < uniforms.filter_dims[0]; wF++) {\n let xF = xFCorner + wF * uniforms.dilations[0];\n if (xF < 0 || xF >= xShapeY) {\n continue;\n }\n\n for (var wR = 0u; wR < uniforms.filter_dims[1]; wR++) {\n let xR = xRCorner + wR * uniforms.dilations[1];\n if (xR < 0 || xR >= xShapeZ) {\n continue;\n }\n\n for (var wC = 0u; wC < uniforms.filter_dims[2]; wC++) {\n let xC = xCCorner + wC * uniforms.dilations[2];\n if (xC < 0 || xC >= xShapeW) {\n continue;\n }\n\n for (var d1 = 0u; d1 < inputDepthNearestVec4; d1 += 4) {\n ${\n isChannelsLast ? `let xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3));\n ` :\n `let xValues = vec4(\n getX(batch, d1, xF, xR, xC),\n getX(batch, d1 + 1, xF, xR, xC),\n getX(batch, d1 + 2, xF, xR, xC),\n getX(batch, d1 + 3, xF, xR, xC));\n `}\n let wValues = vec4(\n getW(d2, d1, wF, wR, wC),\n getW(d2, d1 + 1, wF, wR, wC),\n getW(d2, d1 + 2, wF, wR, wC),\n getW(d2, d1 + 3, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n if (inputDepthVec4Remainder == 1) {\n ${\n isChannelsLast ? `dotProd += getX(batch, xF, xR, xC, inputDepthNearestVec4)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);` :\n `dotProd += getX(batch, inputDepthNearestVec4, xF, xR, xC)\n * getW(d2, inputDepthNearestVec4, wF, wR, wC);`}\n } else if (inputDepthVec4Remainder == 2) {\n ${\n isChannelsLast ? `let xValues = vec2(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1));\n ` :\n `let xValues = vec2(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC));\n `}\n let wValues = vec2(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n } else if (inputDepthVec4Remainder == 3) {\n ${\n isChannelsLast ? `let xValues = vec3(\n getX(batch, xF, xR, xC, inputDepthNearestVec4),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 1),\n getX(batch, xF, xR, xC, inputDepthNearestVec4 + 2));\n ` :\n `let xValues = vec3(\n getX(batch, inputDepthNearestVec4, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 1, xF, xR, xC),\n getX(batch, inputDepthNearestVec4 + 2, xF, xR, xC));\n `}\n let wValues = vec3(\n getW(d2, inputDepthNearestVec4, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 1, wF, wR, wC),\n getW(d2, inputDepthNearestVec4 + 2, wF, wR, wC));\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n ${hasBias ? 'dotProd = dotProd + getBiasByOutputCoords(coords)' : ''};\n result[global_idx] = f32(dotProd);\n }`;\n };\n return {\n name: 'Conv3DNaive',\n shaderCache:\n {hint: `${attributes.cacheKey};${isChannelsLast};${innerElementSize};${hasBias}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from './fuse-utils';\n\n/**\n * naive grouped conv implementation, supports 1d/2d conv\n * @param squeezeOutputShapeFunction - an optional function to squeeze the output shape, only used in conv1d\n */\nexport const createGroupedConvProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n\n const isChannelLast = attributes.format === 'NHWC';\n const outputShape = calculateOutputShape(\n xShape, wShape, attributes.dilations, attributes.pads, attributes.strides, isChannelLast);\n const outputSize = ShapeUtil.size(outputShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.dilations},\n {type: DataType.uint32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.uint32, data: outputChannelsPerGroup}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length);\n const w = inputVariable('w', inputs[1].dataType, wShape.length);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims.length));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'dilations', type: 'u32', length: attributes.dilations.length},\n {name: 'strides', type: 'u32', length: 2}, {name: 'pads', type: 'u32', length: 2},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${isChannelLast ? 3 : 1}];\n let xRCCorner: vec2 = vec2(outputIndices[${isChannelLast ? 1 : 2}], outputIndices[${\n isChannelLast ? 2 : 3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${output.type.value} = ${output.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${isChannelLast ? 1 : 2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${isChannelLast ? 2 : 3}]) {\n continue;\n }\n\n let xVal = ${\n isChannelLast ? x.get('batch', 'xHeight', 'xWidth', 'input_channel') :\n x.get('batch', 'input_channel', 'xHeight', 'xWidth')};\n let wVal = ${w.get('output_channel', 'wInChannel', 'wHeight', 'wWidth')};\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${output.setByOffset('global_idx', 'value')}\n }`;\n };\n return {\n name: 'GroupedConv',\n shaderCache: {hint: attributes.cacheKey, inputDependencies},\n getRunData: () => ({\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const createGroupedConvVectorizeProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const components = getMaxComponents(outputShape[3]);\n const outputNumber = getMaxComponents(outputShape[2]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const xShape = [inputs[0].dims[0], inputs[0].dims[1], inputs[0].dims[2], inputs[0].dims[3] / components];\n const wShape = [inputs[1].dims[0], inputs[1].dims[1], inputs[1].dims[2], inputs[1].dims[3] / components];\n const outputShapeInShader = [outputShape[0], outputShape[1], outputShape[2], outputShape[3] / components];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.int32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape, outputShapeInShader));\n const xNumber = (outputNumber - 1) * attributes.strides[1] + wShape[1];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length, components);\n const w = inputVariable('w', inputs[1].dataType, wShape.length, components);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims, components));\n }\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'strides', type: 'i32', length: 2},\n {name: 'pads', type: 'i32', length: 2},\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${outputNumber}u;\n let col = (index1 % width1) * ${outputNumber}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${x.type.value}, ${xNumber}>;\n var values: array<${output.type.value}, ${outputNumber}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${wShape[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${xNumber}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${x.get('batch', 'u32(x_height)', 'u32(x_width)', 'input_channel')};\n } else {\n x_vals[i] = ${x.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${wShape[1]}; w_width++) {\n let w_val = ${w.get('w_height', 'w_width', '0', 'output_channel')};\n for (var i = 0u; i < ${outputNumber}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n ${output.set('batch', 'row', 'col + i', 'output_channel', 'value')};\n }\n }`;\n };\n\n return {\n name: 'GroupedConv-Vectorize',\n shaderCache: {\n hint: `${attributes.cacheKey};${components};${outputNumber};${xNumber};${wShape[0]};${wShape[1]}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'type'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createTensorShapeVariables, getBroadcastDims, getMaxComponents, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\n\nexport const createNaiveMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n\n const M = aShape[aShape.length - 2];\n const N = bShape[bShape.length - 1];\n const K = aShape[aShape.length - 1];\n const components = getMaxComponents(N);\n const aComponents = getMaxComponents(K);\n const outputNumber = getMaxComponents(M);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const hasBias = inputs.length > 2;\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const outputShapeInShader = [batchSize, M, N];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShape, bShape));\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeInShader));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchDims = internalVariable('batch_dims', inputs[0].dataType, outerDims.length);\n const a = inputVariable('a', inputs[0].dataType, aShape.length, aComponents);\n const b = inputVariable('b', inputs[1].dataType, bShape.length, components);\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const inputVariables = [a, b];\n let processBias = '';\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n processBias = `${\n isChannelsLast ? `value += bias[col / ${biasComponents}];` :\n `value += ${output.type.value}(bias[row + i]);`}`;\n }\n\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const broadCastADims = getBroadcastDims(outerDimsA, outerDims);\n const broadCastBDims = getBroadcastDims(outerDimsB, outerDims);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'K', type: 'u32'}\n ];\n appendActivationUniforms(activationAttributes, uniforms);\n\n const getIndices = (variable: IndicesHelper, broadCastDims: number[]) => {\n const rank = variable.rank;\n const name = variable.name;\n if (rank === 2) {\n return `var ${name}_indices = ${variable.type.indices}(0u, 0u);`;\n }\n const batchRank = batchDims.rank;\n let resStr = `var ${name}_indices: ${variable.type.indices};`;\n for (let i = rank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\n${name}_indices[${i}] = ${batchRank > 1 ? `batch_indices[${j}]` : 'batch_indices'};`;\n }\n broadCastDims.forEach(i => {\n resStr += `\\n${name}_indices[${i}] = 0;`;\n });\n resStr += `${name}_indices[${rank - 2}] = 0u;\n ${name}_indices[${rank - 1}] = 0u;`;\n return resStr;\n };\n\n const calcResult = (): string => {\n let calcStr = `var a_data: ${a.type.value};`;\n for (let i = 0; i < aComponents; i++) {\n calcStr += `\n let b_data${i} = b[(b_offset + (k + ${i}) * uniforms.N + col) / ${components}];`;\n }\n for (let i = 0; i < outputNumber; i++) {\n calcStr += `a_data = a[(a_offset + (row + ${i}) * uniforms.K + k) / ${aComponents}];`;\n\n for (let j = 0; j < aComponents; j++) {\n calcStr += `\n values[${i}] = fma(${b.type.value}(a_data${aComponents === 1 ? '' : `[${j}]`}), b_data${j}, values[${\n i}]);\\n`;\n }\n }\n return calcStr;\n };\n\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let col = (global_idx % (uniforms.N / ${components})) * ${components};\n var index1 = global_idx / (uniforms.N / ${components});\n let stride1 = uniforms.M / ${outputNumber};\n let row = (index1 % stride1) * ${outputNumber};\n let batch = index1 / stride1;\n\n ${outputShape.length === 2 ? '' : `let batch_indices = ${batchDims.offsetToIndices('batch')};`}\n ${getIndices(a, broadCastADims)}\n let a_offset = ${a.indicesToOffset('a_indices')};\n ${getIndices(b, broadCastBDims)}\n let b_offset = ${b.indicesToOffset('b_indices')};\n var values: array<${output.type.value}, ${outputNumber}>;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${aComponents}) {\n ${calcResult()}\n }\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n let cur_indices = ${output.type.indices}(batch, row + i, col);\n let offset = ${output.indicesToOffset('cur_indices')};\n ${output.setByOffset(`offset / ${components}`, 'value')};\n }\n }\n `;\n };\n return {\n name: 'MatMulNaive',\n shaderCache: {\n hint: `${activationAttributes.activation};${components};${aComponents};${outputNumber};${isChannelsLast}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'rank'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n};\n\nexport const matMul = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n const outputShape = BroadcastUtil.calcShape(context.inputs[0].dims, context.inputs[1].dims, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const N = outputShape[outputShape.length - 1];\n const K = context.inputs[0].dims[context.inputs[0].dims.length - 1];\n if (N < 8 && K < 8) {\n context.compute(createNaiveMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n } else {\n context.compute(createMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DMatMulProgramInfo} from './3rd-party/conv2d_mm_webgpu';\nimport {computeConv3DInfo, createConv3DNaiveProgramInfo} from './3rd-party/conv3d_naive_webgpu';\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createGroupedConvProgramInfo, createGroupedConvVectorizeProgramInfo} from './conv-grouped';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createNaiveMatmulProgramInfo} from './matmul';\nimport {createTransposeProgramInfo} from './transpose';\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[], isChannelLast: boolean): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(isChannelLast ? 1 : 2, isChannelLast ? 3 : 4);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly format: 'NHWC'|'NCHW';\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n readonly wIsConst: boolean;\n}\n\n// for transposing weight tensor from [M, C/group, KH, KW] to [KH, KW, C/group, M]\nconst weightTransposeAttribute = [2, 3, 1, 0];\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n if (inputs[0].dims.length > 5) {\n throw new Error('greater than 5D is not supported');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n};\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n if (kernelShape[i - 2] === 0) {\n kernelShape[i - 2] = inputs[1].dims[i];\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.format === 'NHWC',\n attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads});\n return newAttributes;\n};\n\nexport const parseConvAttributes = (attributes: Record): ConvAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad = ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number];\n const dilations = attributes.dilations as number[];\n const group = attributes.group as number;\n const kernelShape = attributes.kernel_shape as number[];\n const pads = attributes.pads as number[];\n const strides = attributes.strides as number[];\n const wIsConst = (attributes.w_is_const as () => boolean)();\n\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst conv2d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n\n // check attributes\n\n // const hasPreluActivationWeights = false; /* TODO: add support for prelu activation weights */\n const isChannelsLast = attributes.format === 'NHWC';\n if (attributes.group !== 1) {\n // NVIDIA GPU with ampere architecture fails with below 2 cases, but we couldn't repro them with any other\n // GPUs. So just disable vectorize on NVIDIA ampere to ensure always correct outputs.\n // [webgpu]Conv - conv - vectorize group - B\n // [webgpu]Conv - conv - vectorize group - D\n const enableGroupedConvVectorize = !context.adapterInfo.isArchitecture('ampere');\n if (enableGroupedConvVectorize && isChannelsLast && inputs[1].dims[0] === attributes.group &&\n inputs[1].dims[1] === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1) {\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n const convInputs = [inputs[0], transposedWeight];\n if (inputs.length === 3) {\n convInputs.push(inputs[2]);\n }\n context.compute(\n createGroupedConvVectorizeProgramInfo(convInputs, adjustedAttributes, outputShape), {inputs: convInputs});\n } else {\n context.compute(createGroupedConvProgramInfo(inputs, adjustedAttributes));\n }\n return;\n }\n\n const hasBias = inputs.length === 3;\n const inputHeight = inputs[0].dims[isChannelsLast ? 1 : 2];\n const inputWidth = inputs[0].dims[isChannelsLast ? 2 : 3];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n\n const sameSize = isChannelsLast && weightHeight === inputHeight && weightWidth === inputWidth &&\n attributes.pads[0] === 0 && attributes.pads[1] === 0;\n if (sameSize ||\n (weightHeight === 1 && weightWidth === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1 &&\n attributes.strides[0] === 1 && attributes.strides[1] === 1 && attributes.pads[0] === 0 &&\n attributes.pads[1] === 0)) {\n // conv2dByMatMul\n const batch = outputShape[0];\n let xReshaped, wReshaped, matmulOutputShape;\n const matmulInputs = [];\n if (isChannelsLast) {\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n if (sameSize) {\n const sharedDim = inputHeight * inputWidth * inputChannels;\n xReshaped = inputs[0].reshape([1, batch, sharedDim]);\n wReshaped = transposedWeight.reshape([1, sharedDim, outChannels]);\n matmulOutputShape = [1, batch, outChannels];\n } else {\n xReshaped = inputs[0].reshape([batch, inputHeight * inputWidth, inputChannels]);\n wReshaped = transposedWeight.reshape([1, inputChannels, outChannels]);\n matmulOutputShape = [batch, outHeight * outWidth, outChannels];\n }\n matmulInputs.push(xReshaped);\n matmulInputs.push(wReshaped);\n } else {\n xReshaped = inputs[0].reshape([batch, inputChannels, inputHeight * inputWidth]);\n wReshaped = inputs[1].reshape([1, outChannels, inputChannels]);\n matmulOutputShape = [batch, outChannels, outHeight * outWidth];\n matmulInputs.push(wReshaped);\n matmulInputs.push(xReshaped);\n }\n if (hasBias) {\n matmulInputs.push(inputs[2]);\n }\n const N = matmulOutputShape[2];\n const K = matmulInputs[0].dims[matmulInputs[0].dims.length - 1];\n // Tune the threshold.\n if (N < 8 && K < 8) {\n context.compute(\n createNaiveMatmulProgramInfo(\n matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n } else {\n context.compute(\n createMatmulProgramInfo(matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n }\n return;\n }\n\n // TODO: implement conv2dWithIm2Col()\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convInputs = [inputs[0], transposedWeight];\n if (hasBias) {\n convInputs.push(inputs[2]);\n }\n\n // STEP.3: compute matmul\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n context.compute(\n createConv2DMatMulProgramInfo(\n convInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convInputs});\n};\n\nconst conv1d = (context: ComputeContext, attributes: ConvAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n const pads = [0, attributes.pads[0], 0, attributes.pads[1]];\n const strides = [1].concat(attributes.strides);\n const dilations = [1].concat(attributes.dilations);\n const kernelShape = [1].concat(attributes.kernelShape);\n const adjustedAttributes = getAdjustedConvAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createGroupedConvProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] : []));\n};\n\nconst conv3d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const format = attributes.format === 'NHWC' ? 'channelsLast' : 'channelsFirst';\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n const pads = attributes.autoPad === 'NOTSET' ? attributes.pads : attributes.autoPad;\n const convInfo = computeConv3DInfo(\n inputs[0].dims as [number, number, number, number, number],\n inputs[1].dims as [number, number, number, number, number],\n attributes.strides as number | [number, number, number],\n attributes.dilations as number | [number, number, number], pads as string | number[], false, format);\n context.compute(createConv3DNaiveProgramInfo(\n inputs, adjustedAttributes, convInfo.outShape,\n [convInfo.filterDepth, convInfo.filterHeight, convInfo.filterWidth],\n [convInfo.padInfo.front, convInfo.padInfo.top, convInfo.padInfo.left], format));\n};\n\nexport const conv = (context: ComputeContext, attributes: ConvAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n conv1d(context, attributes);\n } else if (context.inputs[0].dims.length === 5) {\n conv3d(context, context.inputs, attributes);\n } else {\n conv2d(context, context.inputs, attributes);\n }\n};\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dTransposeCommonSnippet =\n (isChannelsLast: boolean, addBias = false, attributes: ConvTransposeAttributes, type: string,\n innerElementSize = 4): string => {\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];';\n case 4:\n return `\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${type}(v0, v1, v2, v3);\n `;\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, iXR, iXC, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, iXR, iXC);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n\n const readASnippet = `\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${col} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${xHeight}) || fract(xR) > 0.0) {\n return ${type}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${xWidth}) || fract(xC) > 0.0) {\n return ${type}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${col} % inChannels;\n ${coordASnippet}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${innerElementSize}];`;\n\n const sampleA = isChannelsLast ? `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readASnippet}\n }\n return ${type}(0.0);` :\n `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readASnippet}\n }\n return ${type}(0.0);`;\n\n const sampleW = `\n let col = colIn * ${innerElementSize};\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${\n isChannelsLast ? 'row < uniforms.dim_inner && col < uniforms.dim_b_outer' :\n 'row < uniforms.dim_inner && col < uniforms.dim_a_outer'} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${getWSnippet(innerElementSize)}\n }\n return ${type}(0.0);\n `;\n\n const applyActivation = getActivationSnippet(attributes, type);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleA : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleW : sampleA}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${type}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${innerElementSize}] = value;\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DTransposeMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes, outputShape: readonly number[],\n dimAOuter: number, dimBOuter: number, dimInner: number, hasBias: boolean,\n sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 && inChannels % 3) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv_backprop_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? 4 : 1;\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const components = isVec4 ? 4 : 1;\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const effectiveFilterDims = [\n filterDims[0] + (attributes.dilations[0] <= 1 ? 0 : (filterDims[0] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] + (attributes.dilations[1] <= 1 ? 0 : (filterDims[1] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor((attributes.pads[1] + attributes.pads[3]) / 2)\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: attributes.strides},\n {type: DataType.int32, data: attributes.dilations}, {type: DataType.int32, data: filterDims},\n {type: DataType.int32, data: pads}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims.length, components);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, 1);\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n const inputVariables = [x, w];\n\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${bias.type.value} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'strides', type: 'i32', length: 2}, {name: 'dilations', type: 'i32', length: 2},\n {name: 'filter_dims', type: 'i32', length: filterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}\n ];\n appendActivationUniforms(attributes, uniforms);\n const elemType = tensorTypeToWsglStorageType(inputs[0].dataType, 1);\n if (elemType !== 'f16' && elemType !== 'f32') {\n throw new Error(`elemType ${elemType} is not supported.`);\n }\n return `\n ${utilFunctions('uniforms.result_strides')}\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)};\n ${declareFunctions}\n ${conv2dTransposeCommonSnippet(isChannelsLast, hasBias, attributes, x.type.value, innerElementSize)}\n ${\n isVec4 ? makeMatMulPackedVec4Source(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner, false,\n undefined, sequentialAccessByThreads)}`;\n };\n\n return {\n name: 'Conv2DTransposeMatMul',\n shaderCache:\n {hint: `${attributes.cacheKey};${elementsPerThread};${workGroupSize};${isVec4}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_webgpu.ts\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\n\nconst createConvTranspose2DOpProgramShaderSource =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], outputShape: readonly number[], hasBias: boolean,\n is1DimensionDispatch: boolean, isVec4 = false, dataType: string, uniforms: UniformsArrayType,\n isChannelsLast = false): string => {\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n const workPerThread = isVec4 ? 2 : 1;\n\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : u32, value : ${isVec4 ? `vec4<${dataType}>` : dataType}) {\n result[flatIndex] = ${isVec4 ? `vec4<${dataType}>` : dataType}(value);\n }`;\n if (hasBias) {\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${dataType}>` : dataType} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n const components = isVec4 ? 4 : 1;\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const dy = inputVariable('Dy', inputs[0].dataType, inputs[0].dims.length, components);\n const inputVariables = [dy, w];\n if (hasBias) {\n inputVariables.push(inputVariable('bias', inputs[2].dataType, [outputShape[channelDim]].length, components));\n }\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n\n const codeSnippet4 = `{\n let batch: u32 = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} / uniforms.result_shape[1];\n let r = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} % uniforms.result_shape[1];\n let c = ${is1DimensionDispatch ? 'global_id.y' : 'workgroup_id.y'} * ${workPerThread};\n let d1: u32 = ${is1DimensionDispatch ? 'global_id.x' : 'workgroup_id.x'} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${workPerThread}>;\n for (var i = 0; i < ${workPerThread}; i++) {\n dotProd[i] = vec4<${dataType}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${dataType}(dyCorner.x) + ${dataType}(wR)) / ${dataType}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${dataType}(dyCorner.y) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let dyC2 = (${dataType}(dyCorner.y) + 1.0 + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n\n dotProd[1] = dotProd[1] + vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${channelDim}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${workPerThread}; i = i + 1) {\n let value = dotProd[i] + ${hasBias ? 'bias[c+i]' : `vec4<${dataType}>(0.0)`};\n ${output.set('batch', 'r', 'c + i', 'd1', 'value')};\n }\n }`;\n const codeSnippet = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch = ${output.indicesGet('outputIndices', 0)};\n let d1 = ${output.indicesGet('outputIndices', channelDim)};\n let r = ${output.indicesGet('outputIndices', rowDim)};\n let c = ${output.indicesGet('outputIndices', colDim)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${dataType}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${dataType}(dyRCorner) + ${dataType}(wR)) / ${dataType}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[${rowDim}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${dataType}(dyCCorner) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[${colDim}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${\n isChannelsLast ? dy.get('batch', 'idyR', 'idyC', 'inputChannel') :\n dy.get('batch', 'inputChannel', 'idyR', 'idyC')};\n let wValue = ${w.get('inputChannel', 'wOutChannel', 'u32(wRPerm)', 'u32(wCPerm)')};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${hasBias ? 'bias[d1]' : `${dataType}(0.0)`};\n ${output.setByOffset('global_idx', 'value')};\n `;\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')};\n ${isVec4 ? codeSnippet4 : codeSnippet}}`;\n };\n\nexport const createConvTranspose2DProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n // const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = attributes.outputShape;\n const outputSize = ShapeUtil.size(outputShape);\n\n // const inChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // TODO Enable isVec4 for performance\n // Disabled due to weight matrix layout issue\n // const isVec4 = attributes.group === 1 && isChannelsLast && inChannels % 4 === 0 && outChannels % 4 === 0;\n const dispatch = [\n Math.ceil(outputSize / 64),\n 1,\n 1,\n ];\n LOG_DEBUG('verbose', () => `[conv2d_backprop_webgpu] dispatch = ${dispatch}`);\n\n const isChannelsLast = attributes.format === 'NHWC';\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const strides = [attributes.strides[0], attributes.strides[1]];\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const dilations = [attributes.dilations[0], attributes.dilations[1]];\n const effectiveFilterDims = [\n filterDims[0] +\n (attributes.dilations[0] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 1 : 2] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] +\n (attributes.dilations[1] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 2 : 3] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor(attributes.pads[1] + attributes.pads[3]) / 2\n ];\n\n const isVec4 = false;\n const group = attributes.group;\n const wShape = inputs[1].dims;\n const inputChannelsPerGroup = wShape[0] / group;\n const outputChannelsPerGroup = wShape[1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: strides},\n {type: DataType.uint32, data: filterDims}, {type: DataType.uint32, data: dilations},\n {type: DataType.uint32, data: effectiveFilterDims}, {type: DataType.int32, data: pads},\n {type: DataType.uint32, data: inputChannelsPerGroup}, {type: DataType.uint32, data: outputChannelsPerGroup},\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims)\n ];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const is1DimensionDispatch = dispatch[1] === 1 && dispatch[2] === 1;\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'strides', type: 'u32', length: strides.length},\n {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'dilations', type: 'u32', length: filterDims.length},\n {name: 'effective_filter_dims', type: 'u32', length: effectiveFilterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}, {name: 'input_channels_per_group', type: 'u32'},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `${\n createConvTranspose2DOpProgramShaderSource(\n shaderHelper, inputs, outputShape, hasBias, is1DimensionDispatch, isVec4, dataType, uniforms,\n isChannelsLast)}`;\n };\n return {\n name: 'ConvTranspose2D',\n shaderCache: {hint: `${attributes.cacheKey};`, inputDependencies},\n getRunData: () => ({\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n programUniforms\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DTransposeMatMulProgramInfo} from './3rd-party/conv_backprop_mm_webgpu';\nimport {createConvTranspose2DProgramInfo} from './3rd-party/conv_backprop_webgpu';\nimport {ConvAttributes} from './conv';\nimport {parseInternalActivationAttributes} from './fuse-utils';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n group: number, pads: number[], strides: readonly number[], isChannelLast: boolean, outputPadding: number[],\n outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateOutputShape = outputShape.length === 0;\n if (outputPadding.length === 0) {\n for (let i = 0; i < spatialRank; ++i) {\n outputPadding.push(0);\n }\n }\n const batchSize = inputShape[0];\n const outChannels = kernelShape[isChannelLast ? 3 : 1] * group;\n for (let i = 0, j = inputShape.length - spatialRank - (isChannelLast ? 1 : 0); i < spatialRank; ++i, ++j) {\n const inSize = inputShape[j];\n const outSize = updateOutputShape ? inSize * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inSize, strides[i], pads[i], kernelShape[j], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateOutputShape) {\n outputShape.push(\n strides[i] * (inSize - 1) + outputPadding[i] + (kernelShape[j] - 1) * dilations[i] + 1 - pads[i] -\n pads[i + spatialRank]);\n }\n }\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nconst getAdjustedConvTransposeAttributes =\n (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0 || attributes.kernelShape.reduce((a, b) => a * b, 1) === 0) {\n kernelShape.length = 0;\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const isChannelsLast = attributes.format === 'NHWC';\n kernelShape.splice(0, 0, inputs[1].dims[0]);\n kernelShape.splice(isChannelsLast ? 3 : 1, 0, inputs[1].dims[1]);\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const outputPadding = attributes.outputPadding.slice();\n const inputShape = inputs[0].dims;\n let dilations = attributes.dilations.slice();\n if (dilations.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n dilations = new Array(spatialRank).fill(1);\n }\n let strides = attributes.strides.slice();\n if (strides.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n strides = new Array(spatialRank).fill(1);\n }\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, dilations, attributes.autoPad, attributes.group, pads, strides, isChannelsLast,\n outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputPadding, outputShape, dilations, strides});\n return newAttributes;\n };\n\nexport const parseConvTransposeAttributes = (attributes: Record): ConvTransposeAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad =\n ['NOTSET', 'VALID', 'SAME_UPPER',\n 'SAME_LOWER'][typeof attributes.autoPad == 'undefined' ? 0 : attributes.autoPad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernelShape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.wIsConst as () => boolean)();\n const outputPadding = attributes.outputPadding as [number, number, number, number];\n const outputShape = attributes.outputShape as [number, number];\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n outputPadding,\n outputShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#ConvTranspose\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n const dilationsSet = attributes.dilations.reduce((a, b) => a + b, 0) > 0;\n // wrong dilations dimension\n if (dilationsSet && attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n const stridesSet = attributes.strides.reduce((a, b) => a + b, 0) > 0;\n // Wrong strides dimension\n if (stridesSet && attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n const padsSet = attributes.pads.reduce((a, b) => a + b, 0) > 0;\n if (padsSet && attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank && attributes.outputPadding.length !== 0) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n const kernelShapeSet = attributes.kernelShape.reduce((a, b) => a + b, 0) > 0;\n if (kernelShapeSet && attributes.kernelShape.length !== 0 &&\n attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n};\n\n// for transposing weight tensor from [C, M/group, KH, KW] to [KH, KW, M/group, C]\nconst weightTransposePerm = [2, 3, 1, 0];\n\nconst convTranspose2d =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = adjustedAttributes.outputShape;\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // Switch to naive method when outChannels and inputChannels are very small. It's because that in this case it's\n // not suitable for matmul version since matmul uses tile size 32x32 resulting the underlying execution unit\n // utilization rate is very low.\n if (adjustedAttributes.group !== 1 || (outChannels === 1 && inputChannels === 1)) {\n context.compute(createConvTranspose2DProgramInfo(inputs, adjustedAttributes));\n return;\n }\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposePerm),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convTransposeInputs = [inputs[0], transposedWeight];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n if (!isChannelsLast && inputs[2].dims.length === 1) {\n convTransposeInputs.push(inputs[2].reshape([inputs[2].dims[0], 1, 1]));\n } else {\n convTransposeInputs.push(inputs[2]);\n }\n }\n\n // STEP.3: compute matmul\n context.compute(\n createConv2DTransposeMatMulProgramInfo(\n convTransposeInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convTransposeInputs});\n };\n\nconst convTranspose1d = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n let kernelShape = attributes.kernelShape;\n if (kernelShape.length === 0 || kernelShape[0] === 0) {\n kernelShape = [context.inputs[1].dims[2]];\n }\n let dilations = attributes.dilations;\n if (dilations.length === 0 || dilations[0] === 0) {\n dilations = [1];\n }\n let strides = attributes.strides;\n if (strides.length === 0 || strides[0] === 0) {\n strides = [1];\n }\n let pads = attributes.pads;\n if (pads.length === 0) {\n pads = [0, 0];\n }\n pads = [0, pads[0], 0, pads[1]];\n strides = [1].concat(strides);\n dilations = [1].concat(dilations);\n kernelShape = [1].concat(kernelShape);\n const adjustedAttributes =\n getAdjustedConvTransposeAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createConvTranspose2DProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] :\n [outputShape[0], outputShape[1], outputShape[3]]));\n};\n\nexport const convTranspose = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n convTranspose1d(context, attributes);\n } else {\n convTranspose2d(context, context.inputs, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper} from './common';\n\n\nexport interface CumSumAttributes extends AttributeWithCacheKey {\n readonly exclusive: boolean;\n readonly reverse: boolean;\n}\nconst createCumsumProgramInfo =\n (inputType: number, inputShape: readonly number[], axisInput: TensorView, attributes: CumSumAttributes):\n ProgramInfo => {\n const outputSize = ShapeUtil.size(inputShape); // outputShape is same as inputShape.\n const rank = inputShape.length; // input/output rank\n const input = inputVariable('input', inputType, rank);\n const output = outputVariable('output', inputType, rank);\n const axisValue = axisInput.dataType === DataType.int32 ? axisInput.getInt32Array()[0] :\n Number(axisInput.getBigInt64Array()[0]);\n const axis = ShapeUtil.normalizeAxis(axisValue, rank);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const index = ` i32(${input.indicesGet('inputIndices', 'uniforms.axis')}) `;\n const max = getElementAt('uniforms.input_shape', 'uniforms.axis', rank);\n const lowerLimit = attributes.reverse ? index + (attributes.exclusive ? ' + 1' : '') : '0';\n const upperLimit = attributes.reverse ? max : index + (attributes.exclusive ? '' : ' + 1');\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var inputIndices = ${output.offsetToIndices('global_idx')};\n var sum = ${output.type.value}(0);\n let first : i32 = ${lowerLimit};\n let last : i32 = ${upperLimit};\n for (var i : i32 = first; i < last; i++) {\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(i)')};\n sum = sum + ${input.getByIndices('inputIndices')};\n }\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'CumSum',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: inputShape, dataType: inputType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: axis},\n ...createTensorShapeVariables(inputShape, inputShape)\n ]\n\n }),\n getShaderSource\n };\n };\n\n\nexport const cumsum = (context: ComputeContext, attributes: CumSumAttributes): void => {\n const inputShape = context.inputs[0].dims;\n const inputType = context.inputs[0].dataType;\n const axis = context.inputs[1];\n context.compute(createCumsumProgramInfo(inputType, inputShape, axis, attributes), {inputs: [0]});\n};\n\nexport const parseCumSumAttributes = (attributes: Record): CumSumAttributes => {\n const exclusive = attributes.exclusive as number === 1;\n const reverse = attributes.reverse as number === 1;\n return createAttributeWithCacheKey({exclusive, reverse});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface DepthToSpaceAttributes extends FormatAttributes, AttributeWithCacheKey {\n readonly blocksize: number;\n readonly mode: string;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('DepthToSpace requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('DepthToSpace requires 4D input.');\n }\n};\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nconst createDepthToSpaceProgramInfo = (inputTensor: TensorView, attributes: DepthToSpaceAttributes): ProgramInfo => {\n let n: number, h: number, w: number, c: number;\n let shape: number[];\n let perm: number[];\n const isChannelLast = attributes.format === 'NHWC';\n const blocksize = attributes.blocksize;\n const isDCRmode = attributes.mode === 'DCR';\n if (isChannelLast) {\n [n, h, w, c] = inputTensor.dims;\n shape = isDCRmode ? [n, h, w, blocksize, blocksize, c / (blocksize ** 2)] :\n [n, h, w, c / (blocksize ** 2), blocksize, blocksize];\n perm = isDCRmode ? [0, 1, 3, 2, 4, 5] : [0, 1, 4, 2, 5, 3];\n } else {\n [n, h, w, c] = [inputTensor.dims[0], inputTensor.dims[2], inputTensor.dims[3], inputTensor.dims[1]];\n shape = isDCRmode ? [n, blocksize, blocksize, c / (blocksize ** 2), h, w] :\n [n, c / (blocksize ** 2), blocksize, blocksize, h, w];\n perm = isDCRmode ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n }\n const reshapedInputTensor = inputTensor.reshape(shape);\n const reshapedInputRank = reshapedInputTensor.dims.length;\n const inputDataType = inputTensor.dataType;\n\n const reshapedInput = inputVariable('a', inputDataType, reshapedInputRank);\n const permedOutput = outputVariable('output', inputDataType, reshapedInputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(reshapedInput, permedOutput)}\n\n ${permFunctionBody(perm, reshapedInputRank, reshapedInput, permedOutput)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${permedOutput.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${permedOutput.setByOffset('global_idx', reshapedInput.getByIndices('aIndices'))}\n }`;\n\n return {\n name: 'DepthToSpace',\n shaderCache: {hint: `${inputTensor.dims};${attributes.blocksize};${attributes.mode}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputShape = isChannelLast ? [n, h * blocksize, w * blocksize, c / (blocksize ** 2)] :\n [n, c / (blocksize ** 2), h * blocksize, w * blocksize];\n const outputSize = ShapeUtil.size(outputShape);\n const shapeBeforePerm = reshapedInputTensor.dims;\n const shapeAfterPerm = ShapeUtil.sortBasedOnPerm(shapeBeforePerm, perm);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(shapeBeforePerm, shapeAfterPerm)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const depthToSpace = (context: ComputeContext, attributes: DepthToSpaceAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createDepthToSpaceProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseDepthToSpaceAttributes = (attributes: Record): DepthToSpaceAttributes =>\n createAttributeWithCacheKey({\n blocksize: attributes.blocksize as number,\n mode: attributes.mode as string,\n format: attributes.format as 'NHWC' | 'NCHW'\n });\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface EinsumAttributes extends AttributeWithCacheKey {\n readonly equation: string;\n}\n// The equation attribute value is a string which consists of left hand side (LHS) and optionally right hand side (RHS)\n// separated by '->'. Ex. \"ij,jk -> ik\" expresses matrix multiplication\n// \"ij->ji\" expresses matrix transpose\n// \"ii->i\" diagonal elements of a square matrix\n// LHS consists of a sequence of terms separated by commas. Each term corresponds to an input variable.\n// Each symbol corresponds to a dimension in the input variable. The symbol can be either a letter, 'a' to 'z' or 'A' to\n// 'Z' or '...' to represent arbitrary dimensions.\n\nconst symbolPattern =\n '[a-zA-Z]|\\\\.\\\\.\\\\.'; // The pattern each symbol in each term in the symbolic equation should match\nconst termPattern = '(' + symbolPattern + ')+'; // The pattern each term in the symbolic equation should match\nconst termPatternOnly = '^' + termPattern + '$'; // The patterns only matchs a term begin to end.\nconst lhsPattern = '(' + termPattern + ',)*' + termPattern; // The pattern the LHS should match\nconst lhsPatternOnly = '^' + lhsPattern + '$'; // The patterns only matchs a LHS begin to end.\n\ninterface SymbolInfo {\n count: number; // Symbol corresponding to a dimmension of an input\n inputIndices: number[]; // Number of input variables the symbol corresponds to\n dimValue: number; // Number of dimensions the symbol corresponds to\n}\n\nclass EinsumTerm {\n constructor(inputIndex = -1) {\n this.symbolToIndices = new Map();\n this.inputIndex = inputIndex;\n }\n\n // Add a symbol to the term\n addSymbol(symbol: string, index: number) {\n let value = this.symbolToIndices.get(symbol);\n if (value === undefined) {\n value = [index];\n } else {\n value.push(index);\n }\n this.symbolToIndices.set(symbol, value);\n }\n\n symbolToIndices: Map; // Map from symbol to dimensions of the input corresponding to the term\n inputIndex: number; // -1 for output and 0, 1, 2, ... for inputs\n}\n\nclass EinsumEquation {\n constructor(inputs: readonly TensorView[], public readonly equation: string) {\n this.hasEllipsis = false;\n this.symbolToInfo = new Map();\n this.lhs = new Array();\n this.outputDims = [];\n // As rhs needs to be updated allow using let instead of const for both lhs and rhs.\n // eslint-disable-next-line prefer-const\n let [lhs, rhs] = equation.includes('->') ? equation.split('->', 2) : [equation, ''];\n if (!lhs.match(RegExp(lhsPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const inputTerms = lhs.split(',');\n inputTerms.forEach((inputTerm, index) => {\n const dims = inputs[index].dims.slice();\n if (!inputTerm.match(RegExp(termPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const einsumTerm = this.processTerm(inputTerm, true, dims, index);\n this.lhs.push(einsumTerm);\n });\n\n // Initialize the RHS if not specified\n if (rhs === '') {\n // Construct RHS from LHS terms/symbols\n rhs += [...this.symbolToInfo.entries()]\n .filter(([sym, info]) => (info.count === 1 || sym === '...'))\n .map(([sym]) => sym)\n .join('');\n } else {\n if (!rhs.match(RegExp(termPattern))) {\n throw new Error('Invalid RHS');\n }\n }\n\n // Compute output dims\n const rhsSymbols = rhs.match(RegExp(symbolPattern, 'g'));\n rhsSymbols?.forEach((symbol) => {\n if (symbol === '...') {\n this.outputDims = this.outputDims.concat(this.ellipsisDims);\n } else {\n const info = this.symbolToInfo.get(symbol);\n if (info === undefined) {\n throw new Error('Invalid RHS symbol');\n }\n this.outputDims.push(info.dimValue);\n }\n });\n this.rhs = this.processTerm(rhs, false, this.outputDims);\n } // End of EinsumEqation constructor\n\n // Add a symbol to the equation\n addSymbol(symbol: string, dimValue: number, inputIndex: number) {\n let info = this.symbolToInfo.get(symbol);\n if (info !== undefined) {\n if (info.dimValue !== dimValue && info.count !== 1) {\n throw new Error('Dimension mismatch');\n } else {\n info.count++;\n info.inputIndices.push(inputIndex);\n }\n } else {\n info = {count: 1, dimValue, inputIndices: [inputIndex]};\n }\n this.symbolToInfo.set(symbol, info);\n }\n\n // Process one input/output term\n processTerm(term: string, isInput: boolean, dims: readonly number[], index = -1): EinsumTerm {\n const rank = dims.length;\n let ellipsis = false;\n let ellipsisDims = [];\n let nextDim = 0;\n // For output empty string is allowed because the output may be reduced to a scalar value\n if (!term.match(RegExp(termPatternOnly)) && (!isInput && term !== '')) {\n throw new Error('Invalid LHS term');\n }\n const indexSymbols = term.match(RegExp(symbolPattern, 'g'));\n const einsumTerm = new EinsumTerm(index);\n // symbol can be either a lettre, 'a' to 'z' or 'A' to 'Z', or '...'\n indexSymbols?.forEach((symbol: string, i: number) => {\n if (symbol === '...') {\n if (ellipsis) {\n throw new Error('Only one ellipsis is allowed per input term');\n }\n ellipsis = true;\n const ellipsisDimLength = rank - indexSymbols.length + 1;\n if (ellipsisDimLength < 0) {\n throw new Error('Ellipsis out of bounds');\n }\n ellipsisDims = dims.slice(nextDim, nextDim + ellipsisDimLength);\n if (this.hasEllipsis) {\n if (this.ellipsisDims.length !== ellipsisDims.length ||\n this.ellipsisDims.toString() !== ellipsisDims.toString()) {\n throw new Error('Ellipsis dimensions mismatch');\n }\n } else if (isInput) {\n this.hasEllipsis = true;\n this.ellipsisDims = ellipsisDims;\n } else {\n throw new Error('Ellipsis must be specified in the LHS');\n }\n // Add '0', '1', '2', '3', '4', etc to represent ellipsis dimensions to avoid special handling\n for (let j = 0; j < ellipsisDims.length; j++) {\n const symbol = String.fromCharCode('0'.charCodeAt(0) + j);\n einsumTerm.addSymbol(symbol, i + j);\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n } else {\n einsumTerm.addSymbol(symbol, i + (this.hasEllipsis ? this.ellipsisDims.length - 1 : 0));\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n });\n return einsumTerm;\n }\n\n symbolToInfo: Map; // All symbols in the equation\n hasEllipsis: boolean; // The equation has ellipsis or not\n ellipsisDims: number[]; // The dimensions of the equation ellipsis corresponds to.\n lhs: EinsumTerm[]; // Terms on the left-hand side of the equation\n rhs: EinsumTerm; // Term on the right-hand side of the equation\n outputDims: number[]; // Output dimensions of the equation\n} // End of class EinsumEquation\n\nconst appendMax = (name: string): string => name + '_max';\n\nconst createEinsumProgramInfo =\n (inputShapes: Array, dataType: number, einsumEquation: EinsumEquation,\n outputShape: readonly number[]): ProgramInfo => {\n const ranks = inputShapes.map((dims) => dims.length);\n const inputVars = ranks.map((rank, index) => inputVariable(`input${index}`, dataType, rank));\n const outputSize = ShapeUtil.size(outputShape);\n const output = outputVariable('output', dataType, outputShape.length);\n const uniformsSymbols =\n [...einsumEquation.symbolToInfo.keys()].filter((symbol) => !einsumEquation.rhs.symbolToIndices.has(symbol));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = [];\n const initProd = 'var prod = 1.0;';\n const initSum = 'var sum = 0.0;';\n const updateSum = 'sum += prod;';\n const reduceOpsSetIndices: string[] = [];\n const reduceOpsLoopHeaders: string[] = [];\n const reduceOpsLoopFooters: string[] = [];\n const reduceOpCompute: string[] = [];\n const isReduceOpsWithoutLoop = einsumEquation.symbolToInfo.size === einsumEquation.rhs.symbolToIndices.size;\n einsumEquation.symbolToInfo.forEach((info, symbol) => {\n if (einsumEquation.rhs.symbolToIndices.has(symbol)) {\n const outputIndex = einsumEquation.rhs.symbolToIndices.get(symbol)?.[0];\n if (outputIndex !== undefined) {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n idxCopy.push(`${\n inputVars[i].indicesSet(\n `input${i}Indices`, index, output.indicesGet('outputIndices', outputIndex))}`);\n });\n }\n });\n }\n } else {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n reduceOpsSetIndices.push(`${inputVars[i].indicesSet(`input${i}Indices`, index, `${symbol}`)}`);\n });\n reduceOpCompute.push(`prod *= ${inputVars[i].getByIndices(`input${i}Indices`)};`);\n }\n });\n reduceOpsLoopHeaders.push(\n `for(var ${symbol}: u32 = 0; ${symbol} < uniforms.${appendMax(symbol)}; ${symbol}++) {`);\n reduceOpsLoopFooters.push('}');\n }\n });\n const reduceOps = isReduceOpsWithoutLoop ?\n [\n ...idxCopy,\n `let sum = ${inputVars.map((inputVar, i) => inputVar.getByIndices(`input${i}Indices`)).join(' * ')};`\n ] :\n [\n ...idxCopy,\n initSum,\n ...reduceOpsLoopHeaders,\n ...reduceOpsSetIndices,\n initProd,\n ...reduceOpCompute,\n updateSum,\n ...reduceOpsLoopFooters,\n ];\n return `\n ${\n shaderHelper\n .registerUniforms(uniformsSymbols.map((symbol) => ({name: `${appendMax(symbol)}`, type: 'u32'})))\n .registerUniform('outputSize', 'u32')\n .declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${output.offsetToIndices('global_idx')};\n ${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\\n')}\n ${reduceOps.join('\\n')};\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'Einsum',\n shaderCache: {hint: einsumEquation.equation, inputDependencies: inputShapes.map(() => 'rank')},\n getRunData: () => {\n // The symbols from uniformSymbols array are guaranteed to exist in einsumEquations.symbolToInfo map. The\n // filter is added to make sure that dimValue is never 0.\n const programUniformsInit: ProgramUniform[] =\n uniformsSymbols.filter((symbol) => einsumEquation.symbolToInfo.has(symbol))\n .map(\n (symbol) =>\n ({type: DataType.uint32, data: einsumEquation.symbolToInfo.get(symbol)?.dimValue || 0}));\n programUniformsInit.push({type: DataType.uint32, data: outputSize});\n const programUniforms: ProgramUniform[] =\n inputShapes.map((dims, _) => [...createTensorShapeVariables(dims)])\n .reduce((acc, inputProgramUniforms) => acc.concat(inputProgramUniforms), programUniformsInit);\n programUniforms.push(...createTensorShapeVariables(outputShape));\n return ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n });\n },\n getShaderSource,\n };\n };\n\nexport const einsum = (context: ComputeContext, attributes: EinsumAttributes): void => {\n const einsumEquation = new EinsumEquation(context.inputs, attributes.equation);\n const outputShape = einsumEquation.outputDims;\n const inputShapes = context.inputs.map((input, _) => input.dims);\n context.compute(createEinsumProgramInfo(inputShapes, context.inputs[0].dataType, einsumEquation, outputShape));\n};\n\nexport const parseEinsumAttributes = (attributes: Record): EinsumAttributes => {\n const equation = (attributes.equation as string).replace(/\\s+/g, '');\n return createAttributeWithCacheKey({equation});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Expand requires 2 input.');\n }\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n\n let shapeIndex = shape.length < inputShape.length ? 0 : shape.length - inputShape.length;\n let inputShapeIndex = inputShape.length < shape.length ? 0 : inputShape.length - shape.length;\n for (; shapeIndex < shape.length && inputShapeIndex < inputShape.length; ++shapeIndex, ++inputShapeIndex) {\n if (shape[shapeIndex] !== inputShape[inputShapeIndex] && shape[shapeIndex] !== 1 &&\n inputShape[inputShapeIndex] !== 1) {\n throw new Error('Expand requires shape to be broadcastable to input');\n }\n }\n};\n\nconst getAdjustedShape = (shape1: readonly number[], shape2: readonly number[]): number[] => {\n const diff = shape1.length - shape2.length;\n const shape: number[] = [];\n for (let i = 0; i < diff; ++i) {\n shape.push(shape1[i]);\n }\n for (let i = 0; i < shape2.length; ++i) {\n shape.push(shape2[i] === 1 ? shape1[i + diff] : shape2[i]);\n }\n return shape;\n};\n\nconst calculateOutputShape = (inputShape: readonly number[], shape: readonly number[]): number[] =>\n (inputShape.length > shape.length) ? getAdjustedShape(inputShape, shape) : getAdjustedShape(shape, inputShape);\n\n\nconst createExpandProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n const outputShape: number[] = calculateOutputShape(inputShape, shape);\n const dataType = inputs[0].dataType;\n const components = dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', dataType, inputShape.length, components);\n const output = outputVariable('output', dataType, outputShape.length, components);\n let assignment: string;\n if (dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n let offset${x} = ${input.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${input.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n ${output.setByOffset('global_idx', 'data')}\n }`;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let inputOffset = ${input.broadcastedIndicesToOffset('outputIndices', output)};\n ${output.setByOffset('global_idx', input.getByOffset('inputOffset'))}\n }`;\n }\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}`;\n };\n\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)];\n return {\n name: 'Expand',\n shaderCache: {hint: `${outputShape.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const expand = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createExpandProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType, UniformsArrayType, WORKGROUP_SIZE} from './common';\nimport * as unary from './unary-op';\n\n// GELU is defined as Y=0.5*X*(1+tanh(0.797885*X+0.035677*X*X*X)), where X may pre-add a bias.\n\nconst createFastGeluProgramInfo = (inputTensors: readonly TensorView[]): ProgramInfo => {\n const dataType = inputTensors[0].dataType;\n const outputSize = ShapeUtil.size(inputTensors[0].dims);\n const biasLength = ShapeUtil.size(inputTensors[1].dims);\n // can only use vec4 when bias length is multiple of 4\n const useVec4 = biasLength % 4 === 0;\n const getShaderSource = (shaderHelper: ShaderHelper): string => {\n const x = inputVariable('x', dataType, [1], 4);\n const bias = inputVariable('bias', dataType, [1], 4);\n const y = outputVariable('y', dataType, [1], 4);\n\n const uniforms: UniformsArrayType = [{name: 'output_vec_size', type: 'u32'}, {name: 'bias_size', type: 'u32'}];\n\n const singleElementBias = (i: 0|1|2|3) => `\n let bias${i}_offset: u32 = (global_idx * 4 + ${i}) % uniforms.bias_size;\n let bias${i} = ${bias.getByOffset(`bias${i}_offset / 4`)}[bias${i}_offset % 4];`;\n const biasGetExpression = useVec4 ?\n `\n let bias = ${bias.getByOffset('global_idx % (uniforms.bias_size / 4)')};` :\n `${singleElementBias(0)}${singleElementBias(1)}${singleElementBias(2)}${singleElementBias(3)}\n let bias = ${x.type.value}(bias0, bias1, bias2, bias3);`;\n\n return `${shaderHelper.registerUniforms(uniforms).declareVariables(x, bias, y)}\n\n ${unary.fastGeluImpl(tensorTypeToWsglValueType(dataType))}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_vec_size')}\n\n let x = ${x.getByOffset('global_idx')};\n ${biasGetExpression}\n let x_in = x + bias;\n ${y.setByOffset('global_idx', unary.fastGeluExpression('x_in'))}\n }`;\n };\n\n return {\n name: 'FastGeluWithBias',\n shaderCache: {hint: `${useVec4}`, inputDependencies: ['type', 'type']},\n getShaderSource,\n getRunData: (inputs) => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n programUniforms:\n [{type: DataType.uint32, data: Math.ceil(outputSize / 4)}, {type: DataType.uint32, data: biasLength}],\n dispatchGroup: {x: Math.ceil(outputSize / WORKGROUP_SIZE / 4)}\n })\n };\n};\n\nexport const fastGelu = (context: ComputeContext): void => {\n if (context.inputs.length < 2 || ShapeUtil.size(context.inputs[1].dims) === 0) {\n unary.fastGelu(context);\n } else {\n context.compute(createFastGeluProgramInfo(context.inputs));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n};\n\nconst createGatherProgramInfo = (inputs: readonly TensorView[], attributes: GatherAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const indicesShape = inputs[1].dims;\n\n const inputRank = inputShape.length;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n\n const outputShape = inputShape.slice(0);\n outputShape.splice(axis, 1, ...indicesShape);\n\n const axisDimLimit = inputShape[axis];\n const components = inputs[0].dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}, ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const data = inputVariable('data', inputs[0].dataType, inputs[0].dims.length, components);\n const indices = inputVariable('inputIndices', inputs[1].dataType, inputs[1].dims.length);\n const output = outputVariable('output', inputs[0].dataType, outputShape.length, components);\n\n const calcDataIndices = (x: number|string): string => {\n const indicesRank = indicesShape.length;\n let calcStr = `var indicesIndices${x} = ${indices.type.indices}(0);`;\n for (let i = 0; i < indicesRank; i++) {\n calcStr += `${indicesRank > 1 ? `indicesIndices${x}[${i}]` : `indicesIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[uniforms.axis + ${i}]` : `outputIndices${x}`};`;\n }\n calcStr += `\n var idx${x} = ${indices.getByIndices(`indicesIndices${x}`)};\n if (idx${x} < 0) {\n idx${x} = idx${x} + uniforms.axisDimLimit;\n }\n var dataIndices${x} : ${data.type.indices};\n `;\n for (let i = 0, j = 0; i < inputRank; i++) {\n if (i === axis) {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = u32(idx${x});`;\n j += indicesRank;\n } else {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[${j}]` : `outputIndices${x}`};`;\n j++;\n }\n }\n return calcStr;\n };\n let assignment: string;\n if (inputs[0].dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n ${calcDataIndices(x)};\n let offset${x} = ${data.indicesToOffset(`dataIndices${x}`)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${data.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var value = vec4(0);\n ${singleAssignment('value', 0, 'u32')}\n ${singleAssignment('value', 1, 'u32')}\n ${singleAssignment('value', 2, 'u32')}\n ${singleAssignment('value', 3, 'u32')}\n ${output.setByOffset('global_idx', 'value')}\n `;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n ${calcDataIndices('')};\n let value = ${data.getByIndices('dataIndices')};\n ${output.setByOffset('global_idx', 'value')};\n `;\n }\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(data, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n ${assignment}\n }`;\n };\n return {\n name: 'Gather',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank', 'rank']},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGatherAttributes = (attributes: Record): GatherAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gather = (context: ComputeContext, attributes: GatherAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherElementsAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('GatherElements requires 2 inputs.');\n }\n\n if (inputs[0].dims.length < 1) {\n throw new Error('GatherElements requires that the data input be rank >= 1.');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`);\n }\n};\n\nconst createGatherElementsProgramInfo =\n (inputs: readonly TensorView[], attributes: GatherElementsAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputOutputDataType = inputs[0].dataType;\n const inputRank = inputShape.length;\n\n const indicesShape = inputs[1].dims;\n const indicesDataType = inputs[1].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n const axisDimLimit = inputShape[axis];\n\n const outputShape = indicesShape.slice(0);\n const outputSize = ShapeUtil.size(outputShape);\n\n const input = inputVariable('input', inputOutputDataType, inputRank);\n const indices = inputVariable('indicesInput', indicesDataType, indicesShape.length);\n const output = outputVariable('output', inputOutputDataType, outputShape.length);\n\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}\n ];\n programUniforms.push(...createTensorShapeVariables(inputShape, indicesShape, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n // int64 indices would be treated as little endian i32 with assumption they fall in i32 limits\n // That assumption is safe as it's not possible to allocate >2gb buffer for input tensor\n // Input data will be treated as u32 or two u32 for 8-byte tensors\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n\n var idx = ${indices.getByOffset('global_idx')};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${input.type.indices}(outputIndices);\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(idx)')};\n let value = ${input.getByIndices('inputIndices')};\n\n ${output.setByOffset('global_idx', 'value')};\n }`;\n\n return {\n name: 'GatherElements',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const parseGatherElementsAttributes = (attributes: Record): GatherElementsAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gatherElements = (context: ComputeContext, attributes: GatherElementsAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherElementsProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {GemmUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (inputs.length < 2 || inputs.length > 3) {\n throw new Error('Invaid input number.');\n }\n\n // 'C' can be of dimensionality 0, 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length > 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].dataType !== inputs[1].dataType) ||\n (inputs.length === 3 && inputs[0].dataType !== inputs[2].dataType)) {\n throw new Error('Input types are mismatched');\n }\n};\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n}\n\nconst createGemmProgramInfo = (inputs: readonly TensorView[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N, K] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}, {type: DataType.float, data: attributes.alpha},\n {type: DataType.float, data: attributes.beta}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (inputs.length === 3) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n let line = '';\n if (attributes.transA && attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[n * uniforms.K + k];';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[k * uniforms.N + n];';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[n * uniforms.K + k];';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[k * uniforms.N + n];';\n }\n\n const calculateAlpha = attributes.alpha === 1 ? '' : 'value *= uniforms.alpha;';\n const a = inputVariable('a', inputs[0].dataType, inputs[0].dims);\n const b = inputVariable('b', inputs[1].dataType, inputs[1].dims);\n const dataType = a.type.value;\n let c: IndicesHelper|null = null;\n const variables = [a, b];\n if (inputs.length === 3) {\n c = inputVariable('c', inputs[2].dataType, inputs[2].dims.length);\n variables.push(c);\n }\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n variables.push(output);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'K', type: 'u32'},\n {name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${dataType}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${line}\n }\n\n ${calculateAlpha}\n ${(() => {\n if (c != null) {\n return `let cOffset = ${c.broadcastedIndicesToOffset('vec2(m, n)', output)}; value += ${\n dataType}(uniforms.beta) * ${c.getByOffset('cOffset')};`;\n }\n return '';\n })()}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Gemm',\n shaderCache: {hint: `${attributes.cacheKey}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGemmAttributes = (attributes: Record): GemmAttributes => {\n const transA = attributes.transA as boolean;\n const transB = attributes.transB as boolean;\n const alpha = attributes.alpha as number;\n const beta = attributes.beta as number;\n return {transA, transB, alpha, beta, cacheKey: `${attributes.transA};${attributes.transB};${attributes.alpha === 1}`};\n};\n\nexport const gemm = (context: ComputeContext, attributes: GemmAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createGemmProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, GpuDataType, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nconst getInput = (inputs: readonly TensorView[], i: number) =>\n (inputs.length > i) && (inputs[i].dims.length > 0) && (ShapeUtil.size(inputs[i].dims)) > 0 ? inputs[i] : undefined;\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = getInput(inputs, 1);\n const value = getInput(inputs, 2);\n const bias = getInput(inputs, 3);\n const keyPaddingMask = getInput(inputs, 4);\n const relativePositionBias = getInput(inputs, 5);\n const pastKey = getInput(inputs, 6);\n const pastValue = getInput(inputs, 7);\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // key_padding_mask (K/V) : (B) or (2*B + 1) or (B, L) or None\n // relative_position_bias : (B, 1, S, L)\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // bias (Q/K/V) : (D + D + D_v)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // bias (Q/K/V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n // bias (Q/K/V) : None or (D + D + D_v)\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n if (pastKey && pastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastKey.dims[0] !== batchSize || pastKey.dims[1] !== attributes.numHeads || pastKey.dims[3] !== headSize) {\n throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastValue.dims[0] !== batchSize || pastValue.dims[1] !== attributes.numHeads ||\n pastValue.dims[3] !== headSize) {\n throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastKey.dims[2] !== pastValue.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n } else if (pastKey || pastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (key.dims[2] !== query.dims[2]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n if (bias) {\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimension');\n }\n\n if (value) {\n if (query.dims.length === 5 && query.dims[3] === 2) {\n throw new Error('bias is not allowed for packed kv.');\n }\n }\n }\n\n let maskType: AttentionMaskType = AttentionMaskType.none;\n if (keyPaddingMask) {\n maskType = AttentionMaskType.maskUnknown;\n const maskDims = keyPaddingMask.dims;\n if (maskDims.length === 1) {\n if (maskDims[0] === batchSize) {\n maskType = AttentionMaskType.mask1dKeySeqLen;\n } else if (maskDims[0] === 3 * batchSize + 2) {\n maskType = AttentionMaskType.mask1DKeySeqLenStart;\n }\n } else if (maskDims.length === 2 && maskDims[0] === batchSize && maskDims[1] === kvSequenceLength) {\n maskType = AttentionMaskType.mask2dKeyPadding;\n }\n if (maskType === AttentionMaskType.maskUnknown) {\n throw new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)');\n }\n throw new Error('Mask not supported');\n }\n\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n if (keyPaddingMask) {\n throw new Error('Key padding mask is not supported');\n }\n\n if (relativePositionBias) {\n if (relativePositionBias.dims.length !== 4) {\n throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');\n }\n if ((relativePositionBias.dims[0] !== batchSize && relativePositionBias.dims[0] !== 1) ||\n relativePositionBias.dims[1] !== attributes.numHeads || relativePositionBias.dims[2] !== sequenceLength ||\n relativePositionBias.dims[3] !== totalSequenceLength) {\n throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)');\n }\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n };\n};\n\nexport const parseMultiHeadAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst addBiasTranspose =\n (context: ComputeContext, qkv: TensorView, bias: TensorView, batchSize: number, sequenceLength: number,\n hiddenSize: number, biasOffset: number) => {\n const outputShape = [batchSize, sequenceLength, hiddenSize];\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: biasOffset},\n {type: DataType.uint32, data: hiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('qkv_with_bias', qkv.dataType, outputShape);\n const qkvInput = inputVariable('qkv', qkv.dataType, outputShape);\n const biasInput = inputVariable('bias', bias.dataType, outputShape);\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'bias_offset', type: 'u32'}, {name: 'hidden_size', type: 'u32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(qkvInput, biasInput, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`;\n };\n\n return context.compute(\n {\n name: 'MultiHeadAttentionAddBias',\n shaderCache: {inputDependencies: ['type', 'type']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: qkv.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [qkv, bias], outputs: [-1]})[0];\n };\n\nexport const maybeTransposeToBNSHAndAddBias =\n (context: ComputeContext, batchSize: number, numHeads: number, sequenceLength: number, headSize: number,\n input: TensorView, bias?: TensorView, biasOffset?: number) => {\n // const newDims = [];\n\n let reshapedInput = input;\n if (!bias) {\n if (input.dims.length === 3) {\n reshapedInput = input.reshape([batchSize, sequenceLength, numHeads, headSize]);\n }\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n } else {\n if (sequenceLength === 1) {\n throw new Error('AddBiasReshape is not implemented. Please export your model with packed QKV or KV');\n } else {\n reshapedInput =\n addBiasTranspose(context, input, bias, batchSize, sequenceLength, numHeads * headSize, biasOffset!);\n reshapedInput = reshapedInput.reshape([batchSize, sequenceLength, numHeads, headSize]);\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n }\n }\n };\n\nexport const multiHeadAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n const query = context.inputs[0];\n const key = getInput(context.inputs, 1);\n const value = getInput(context.inputs, 2);\n const bias = getInput(context.inputs, 3);\n const keyPaddingMask = getInput(context.inputs, 4);\n const relativePositionBias = getInput(context.inputs, 5);\n const pastKey = getInput(context.inputs, 6);\n const pastValue = getInput(context.inputs, 7);\n if (query.dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (key?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n // applyAttention expects BNSH inputs\n const kvBNSH = key && value && key.dims.length === 4 && value.dims.length === 4;\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, query, bias, 0);\n\n if (kvBNSH) {\n return applyAttention(\n context, Q, key, value, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params,\n attributes);\n }\n if (!key || !value) {\n throw new Error('key and value must be provided');\n }\n const K = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.headSize, key, bias,\n params.hiddenSize);\n\n const V = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.vHeadSize, value, bias,\n 2 * params.hiddenSize);\n\n applyAttention(\n context, Q, K, V, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst getRepeats = (repeatsTensorView: TensorView): readonly number[] =>\n Array.from(repeatsTensorView.getBigInt64Array(), Number);\n\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 inputs.');\n }\n\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16 &&\n inputs[0].dataType !== DataType.int32 && inputs[0].dataType !== DataType.uint32) {\n throw new Error('Tile only support float, float16, int32, and uint32 data types');\n }\n\n if (inputs[1].dataType !== DataType.int64) {\n throw new Error('Tile `repeats` input should be of int64 data type');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('Tile `repeats` input should be 1-D');\n }\n\n const repeats: readonly number[] = getRepeats(inputs[1]);\n\n if (repeats.length !== inputs[0].dims.length) {\n throw new Error('Tile `repeats` input should have same number of elements as rank of input data tensor');\n }\n};\n\nconst getOutputShape = (inputShape: readonly number[], repeats: readonly number[]): readonly number[] => {\n const outputShape: number[] = [];\n\n for (let i = 0; i < inputShape.length; ++i) {\n outputShape.push(inputShape[i] * repeats[i]);\n }\n\n return outputShape;\n};\n\nexport const createTileProgramInfo = (inputs: readonly TensorView[], shape?: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const repeats: readonly number[] = shape == null ? getRepeats(inputs[1]) : shape;\n const outputShape = getOutputShape(inputShape, repeats);\n const outputSize = ShapeUtil.size(outputShape);\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, inputShape.length);\n const output = outputVariable('output', dataType, outputShape.length);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const inputShape = ${input.indices(...inputShape)};\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n for (var i = 0; i < ${inputShape.length}; i++) {\n let input_dim_i = ${input.indicesGet('uniforms.input_shape', 'i')};\n let input_dim_value = ${output.indicesGet('output_indices', 'i')} % input_dim_i;\n\n ${input.indicesSet('input_indices', 'i', 'input_dim_value')}\n }\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n\n return {\n name: 'Tile',\n shaderCache: {hint: `${repeats}`, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n }),\n getShaderSource,\n };\n};\n\nexport const tile = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createTileProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {maybeTransposeToBNSHAndAddBias} from './multihead-attention';\nimport {createTileProgramInfo} from './tile';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nexport const validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = inputs[1];\n const value = inputs[2];\n const pastKey = inputs[3];\n const pastValue = inputs[4];\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n const hasPastKey = pastKey && pastKey.dims.length !== 0;\n const hasPastValue = pastValue && pastValue.dims.length !== 0;\n // TODO : this should be from attributes.\n const isPastkvBSNH = true;\n if (hasPastKey && hasPastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n if (isPastkvBSNH) {\n // For BSNH\n pastSequenceLength = pastKey.dims[1];\n maxSequenceLength = pastKey.dims[1];\n } else {\n // For BNSH\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n }\n } else if (hasPastKey || hasPastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (query.dims[2] % key.dims[2] !== 0) {\n throw new Error('Dimension 2 of \"query\" should be a multiple of \"key\"');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n const maskType: AttentionMaskType = AttentionMaskType.none;\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.kvNumHeads!),\n numHeads: attributes.numHeads,\n kvNumHeads: attributes.kvNumHeads,\n nReps: attributes.numHeads / attributes.kvNumHeads!,\n pastPresentShareBuffer: false,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n isPastkvBSNH,\n };\n};\n\nconst createConcatProgramInfo =\n (a: TensorView, b: TensorView|undefined, dataType: DataType, params: AttentionParameters): ProgramInfo => {\n const outputShape = [params.batchSize, params.totalSequenceLength, params.kvNumHeads!, params.headSize];\n const component = 4;\n const outputSize = ShapeUtil.size(outputShape) / component;\n const presentSequenceLength = params.totalSequenceLength;\n const output = outputVariable('present_kv', dataType, outputShape.length, component);\n const inputA = inputVariable('new_kv', a.dataType, a.dims.length, component);\n const inputB = b ? inputVariable('past_kv', b.dataType, b.dims.length, component) : undefined;\n\n const H = Math.ceil(params.headSize / component);\n const dispatch = {x: presentSequenceLength, y: a.dims[0], z: 1};\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = b ? ['rank', 'rank'] : ['rank'];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: params.pastSequenceLength},\n {type: DataType.uint32, data: params.kvSequenceLength},\n {type: DataType.uint32, data: params.totalSequenceLength}\n ];\n\n const inputs = [inputA];\n if (inputB) {\n programUniforms.push(\n ...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(b!.dims),\n ...createTensorShapeVariables(outputShape));\n inputs.push(inputB);\n } else {\n programUniforms.push(...createTensorShapeVariables(a.dims), ...createTensorShapeVariables(outputShape));\n }\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'past_seqlen', type: 'u32'}, {name: 'new_seqlen', type: 'u32'},\n {name: 'present_seqlen', type: 'u32'}\n ];\n\n const pastStr = ` let past_batch_stride = uniforms.past_seqlen * num_heads * H;\n var past_head_stride = uniforms.past_seqlen * H;\n if (is_bsnh) {\n past_head_stride = H;\n }\n let in_offset = b * past_batch_stride + s * row_stride + n * past_head_stride + h;\n present_kv[out_offset] = past_kv[in_offset];`;\n const newStr = ` let new_batch_stride = uniforms.new_seqlen * num_heads * H;\n let new_row_stride = num_heads * H;\n let new_head_stride = H;\n let in_offset = b * new_batch_stride + (s - past_seqlen) * new_row_stride + n * new_head_stride + h;\n present_kv[out_offset] = new_kv[in_offset];`;\n const concatStr = b ? `if (s < past_seqlen) {\n ${pastStr}\n } else if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }` :\n `if (s < past_seqlen + uniforms.new_seqlen) {\n ${newStr}\n }`;\n\n // TODO: handle H * params.kvNumHeads greater than maxComputeInvocationsPerWorkgroup limit.\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputs, output)}\n ${shaderHelper.mainStart([\n H, params.kvNumHeads!, 1\n ])}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var indices = ${output.offsetToIndices('global_idx')};\n let h = local_id.x;\n let n = local_id.y;\n let s = workgroup_id.x;\n let b = workgroup_id.y;\n let num_heads = ${params.kvNumHeads!}u;\n let H = ${H}u;\n\n let present_seqlen = uniforms.present_seqlen;\n let present_batch_stride = present_seqlen * num_heads * H;\n var row_stride = H;\n let is_bsnh = ${params.isPastkvBSNH};\n\n if (is_bsnh) {\n row_stride = num_heads * H;\n }\n var present_head_stride = present_seqlen * H;\n if (is_bsnh) {\n present_head_stride = H;\n }\n\n let past_seqlen = uniforms.past_seqlen;\n\n let out_offset = b * present_batch_stride + s * row_stride + n * present_head_stride + h;\n ${concatStr}\n }`;\n\n return {\n name: 'ConcatPastNew',\n shaderCache: {hint: `${params.kvNumHeads!}${H}${!!b}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: dispatch,\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const parseGroupQueryAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst maybeExpandAndTransposeToBNSH =\n (context: ComputeContext, input: TensorView, pastKV: TensorView|undefined, params: AttentionParameters,\n outputIndex: number) => {\n let reshapedInput = input;\n const numHeads = params.kvNumHeads!;\n const nReps = params.nReps!;\n if (input.dims.length === 3 && params.kvSequenceLength !== 0) {\n reshapedInput = input.reshape([params.batchSize, params.kvSequenceLength, numHeads, params.headSize]);\n }\n\n if (pastKV) {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, pastKV, reshapedInput.dataType, params),\n {inputs: [reshapedInput, pastKV], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n } else {\n reshapedInput = context.compute(\n createConcatProgramInfo(reshapedInput, undefined, reshapedInput.dataType, params),\n {inputs: [reshapedInput], outputs: [params.isPastkvBSNH ? outputIndex : -1]})[0];\n }\n if (nReps !== 1) {\n reshapedInput = context.compute(\n createTileProgramInfo([reshapedInput], [1, 1, 1, nReps]), {inputs: [reshapedInput], outputs: [-1]})[0];\n reshapedInput =\n reshapedInput.reshape([params.batchSize, params.totalSequenceLength, numHeads * nReps, params.headSize]);\n }\n\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n };\n\nexport const groupQueryAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (context.inputs[1]?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, context.inputs[0], undefined,\n 0);\n const pastKey = context.inputs[3] && context.inputs[3].dims.length !== 0 ? context.inputs[3] : undefined;\n const pastValue = context.inputs[4] && context.inputs[4].dims.length !== 0 ? context.inputs[4] : undefined;\n const K = maybeExpandAndTransposeToBNSH(context, context.inputs[1], pastKey, params, 1);\n const V = maybeExpandAndTransposeToBNSH(context, context.inputs[2], pastValue, params, 2);\n applyAttention(context, Q, K, V, undefined, undefined, undefined, undefined, undefined, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface InstanceNormAttributes {\n epsilon: number;\n format: 'NHWC'|'NCHW';\n}\n\nconst createInstanceNormProgramInfo =\n (inputs: readonly TensorView[], attributes: InstanceNormAttributes): ProgramInfo => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const axis = 2;\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n const components = getMaxComponents(normSize);\n const normPackedSize = normSize / components;\n const inputShape = [xShape[0], xShape[1], normPackedSize];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'type', 'type'];\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: normSize}, {type: DataType.uint32, data: normPackedSize}];\n programUniforms.push(...createTensorShapeVariables(inputShape, inputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputShape.length, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const output = outputVariable('output', inputs[0].dataType, inputShape.length, components);\n const variables = [x, scale, bias, output];\n const dataType = x.type.value;\n const f32Type = components === 1 ? 'f32' : `vec${components}`;\n const workgroupSize = 64;\n\n const uniforms: UniformsArrayType = [{name: 'normSize', type: 'u32'}, {name: 'normPackedSize', type: 'u32'}];\n return `\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${f32Type}, ${workgroupSize}>;\n const workgroupSize = ${workgroupSize}u;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart(workgroupSize)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${f32Type}(${x.get('batch', 'channel', 'h')});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${sumVector('workgroupShared[0]', components)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${f32Type}(${x.get('batch', 'channel', 'h')}) - ${f32Type}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${sumVector('workgroupShared[0]', components)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${attributes.epsilon}));\n let channelScale = invStdDev * f32(${scale.getByOffset('channel')});\n let channelShift = f32(${bias.getByOffset('channel')}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get('batch', 'channel', 'h')} * ${dataType}(${f32Type}(channelScale)) + ${dataType}(${\n f32Type}(channelShift));\n ${output.set('batch', 'channel', 'h', 'value')};\n }\n }`;\n };\n return {\n ...{name: 'InstanceNormalization'},\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${attributes.epsilon};${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: normCount},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nconst computeMean =\n (context: ComputeContext, input: TensorView, scale: TensorView, bias: TensorView, n: number, h: number, c: number,\n epsilon: number) => {\n const components = getMaxComponents(c);\n const WG = 64;\n // we will store channel scale and channel shift in [2, components] matrix\n // or in vec2 when components == 1\n const outputType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const sumCastType = components === 1 ? 'f32' : `vec${components}f`;\n const setOutputValue = (var1: string, var2: string) => `${outputType}(${var1}, ${var2})`;\n const unitsOfWork = n * c / components;\n const wgSize = Math.ceil(h / WG);\n\n const meanInputDependencies: ProgramInputTensorInfoDependency[] = ['type'];\n const meanProgramUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: wgSize}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(h * c / components)}\n ];\n\n const getMeanShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = inputVariable('input', input.dataType, input.dims, components);\n return `\n ${shaderHelper.declareVariables(inputHelper)}\n @group(0) @binding(1) var output : array<${outputType}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart(WG)}\n let currentImageNumber = global_idx / ${WG} / uniforms.C;\n let currentChannelNumber = (global_idx / ${WG}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${sumCastType}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${setOutputValue('sum', 'squaredSum')};\n }`;\n };\n\n const meanValues = context.compute(\n {\n name: 'InstanceNormComputeMean',\n shaderCache: {hint: `${components}`, inputDependencies: meanInputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, WG, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: n * c / components},\n programUniforms: meanProgramUniforms\n }),\n getShaderSource: getMeanShaderSource,\n },\n {inputs: [input], outputs: [-1]})[0];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: unitsOfWork}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(WG * c / components)}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const scaleHelper = inputVariable('scale', scale.dataType, scale.dims, components);\n const biasHelper = inputVariable('bias', bias.dataType, bias.dims, components);\n return `\n @group(0) @binding(0) var input : array<${outputType}>;\n @group(0) @binding(1) var scale : array<${scaleHelper.type.storage}>;\n @group(0) @binding(2) var bias : array<${biasHelper.type.storage}>;\n @group(0) @binding(3) var output : array<${outputType}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.units_of_work')}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < min(${WG}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${WG}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${epsilon}));\n let channelScale = invStdDev * ${sumCastType}(scale[currentChannelNumber]);\n let channelShift = ${sumCastType}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${setOutputValue('channelScale', 'channelShift')};\n }`;\n };\n return context.compute(\n {\n name: 'InstanceNormComputeChannelScaleShift',\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${components};${epsilon}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: Math.ceil(unitsOfWork / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [meanValues, scale, bias], outputs: [-1]})[0];\n };\n\nconst createInstanceNormNHWCProgramInfo =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: InstanceNormAttributes) => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const N = xShape[0];\n const C = xShape[xShape.length - 1];\n const H = ShapeUtil.sizeFromDimension(xShape, 1) / C;\n const components = getMaxComponents(C);\n const outputSize = ShapeUtil.size(outputShape) / components;\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: H}, {type: DataType.uint32, data: Math.floor(C / components)}];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n // first compute mean\n const channelScaleShift = computeMean(context, inputs[0], inputs[1], inputs[2], N, H, C, attributes.epsilon);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const scaleType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const scaleCastType = components === 1 ? dataType : `vec${components}<${dataType}>`;\n\n const inputHelper = inputVariable('input', inputs[0].dataType, inputs[0].dims, components);\n const outputHelper = outputVariable('output', inputs[0].dataType, outputShape, components);\n\n return `\n @group(0) @binding(0) var input : array<${inputHelper.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${scaleType}>;\n @group(0) @binding(2) var output : array<${outputHelper.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${scaleCastType}(scale[0]), ${scaleCastType}(scale[1]));\n }`;\n };\n context.compute(\n {\n name: 'InstanceNormalizationNHWC',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [inputs[0], channelScaleShift]});\n };\n\nexport const instanceNorm = (context: ComputeContext, attributes: InstanceNormAttributes): void => {\n if (attributes.format === 'NHWC') {\n createInstanceNormNHWCProgramInfo(context, context.inputs, attributes);\n } else {\n context.compute(createInstanceNormProgramInfo(context.inputs, attributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType,} from './common';\n\ninterface LayerNormAttributes {\n simplified: boolean;\n axis: number;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 2) {\n throw new Error('layerNorm requires at least 2 inputs.');\n }\n};\n\nconst createLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: LayerNormAttributes, outputCount: number): ProgramInfo => {\n const simplified = attributes.simplified;\n\n const xShape = inputs[0].dims;\n const scale = inputs[1];\n const bias = !simplified && inputs[2];\n\n const outputShape = xShape;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, xShape.length);\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n\n const scaleSize = ShapeUtil.size(scale.dims);\n const biasSize = bias ? ShapeUtil.size(bias.dims) : 0;\n if (scaleSize !== normSize || (bias && biasSize !== normSize)) {\n throw new Error(`Size of X.shape()[axis:] == ${normSize}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${scaleSize} and bias size of ${biasSize}`);\n }\n\n const meanInvStdDevDim: number[] = [];\n for (let i = 0; i < xShape.length; ++i) {\n if (i < axis) {\n meanInvStdDevDim.push(xShape[i]);\n } else {\n meanInvStdDevDim.push(1);\n }\n }\n const components = getMaxComponents(normSize);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: normCount}, {type: DataType.float, data: normSize},\n {type: DataType.uint32, data: Math.floor(normSize / components)},\n {type: DataType.float, data: attributes.epsilon}\n ];\n if (bias) {\n inputDependencies.push('type');\n }\n const hasMeanDataOutput = outputCount > 1;\n const hasInvStdOutput = outputCount > 2;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('scale', scale.dataType, scale.dims, components),\n ];\n if (bias) {\n variables.push(inputVariable('bias', bias.dataType, bias.dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanDataOutput) {\n variables.push(outputVariable('mean_data_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'norm_count', type: 'u32'}, {name: 'norm_size', type: 'f32'},\n {name: 'norm_size_vectorized', type: 'u32'}, {name: 'epsilon', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.norm_count')}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${fillVector('f32', components)};\n var mean_square_vector = ${fillVector('f32', components)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${castToF32(dataType, components, 'x[h + offset]')};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${sumVector('mean_vector', components)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${sumVector('mean_square_vector', components)} / uniforms.norm_size ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${castToF32(dataType, components, 'x[j + offset]')};\n let f32scale = ${castToF32(dataType, components, 'scale[j]')};\n output[j + offset] = ${variables[0].type.value}((f32input ${simplified ? '' : '- mean'}) * inv_std_dev * f32scale\n ${bias ? `+ ${castToF32(dataType, components, 'bias[j]')}` : ''}\n );\n }\n\n ${hasMeanDataOutput ? 'mean_data_output[global_idx] = mean' : ''};\n ${hasInvStdOutput ? 'inv_std_output[global_idx] = inv_std_dev' : ''};\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (hasMeanDataOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (hasInvStdOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n\n return {\n name: 'LayerNormalization',\n shaderCache: {hint: `${components};${outputCount};${simplified}`, inputDependencies},\n getRunData: () =>\n ({outputs, dispatchGroup: {x: Math.ceil(normCount / 64 /* workgroup size */)}, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const layerNorm = (context: ComputeContext, attributes: LayerNormAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createLayerNormProgramInfo(context.inputs, attributes, context.outputCount));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType, getTensorElementSize} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\n// TODO support quantization bits not equal to 4\nexport interface MatMulNBitsAttributes extends AttributeWithCacheKey {\n k: number;\n n: number;\n accuracyLevel: number;\n bits: number;\n blockSize: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes): void => {\n if (inputs.length < 3 || inputs.length > 4) {\n throw new Error('MatMulNBits requires 3 or 4 inputs');\n }\n const a = inputs[0];\n const aRank = a.dims.length;\n if (a.dims[aRank - 1] !== attributes.k) {\n throw new Error('The last dim of input shape does not match the k value');\n }\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const b = inputs[1];\n if (!ShapeUtil.areEqual(b.dims, [attributes.n, nBlocksPerCol, blobSize])) {\n throw new Error('The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize');\n }\n const scales = inputs[2];\n const scalesShape = scales.dims;\n if (ShapeUtil.size(scalesShape) !== attributes.n * nBlocksPerCol) {\n throw new Error('scales input size error.');\n }\n if (inputs.length === 4) {\n const zeroPoints = inputs[3];\n const zeroPointsShape = zeroPoints.dims;\n const expectedZeroPointsSize =\n attributes.bits > 4 ? (attributes.n * nBlocksPerCol) : attributes.n * Math.floor((nBlocksPerCol + 1) / 2);\n if (ShapeUtil.size(zeroPointsShape) !== expectedZeroPointsSize) {\n throw new Error('zeroPoints input size error.');\n }\n }\n};\n\nexport const createMatMulNBitsProgramInfo =\n (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes,\n maxComputeWorkgroupSizes: [number, number, number], maxComputeWorkgroupStorageSize: number): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const aRank = inputShape.length;\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const dimAOuter = inputShape[aRank - 2];\n const dimInner = attributes.k;\n const dimBOuter = attributes.n;\n const batchDims = inputShape.slice(0, aRank - 2);\n const batchSize = ShapeUtil.size(batchDims);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const blobSizeInWords = blobSize / 4;\n const dataType = inputs[0].dataType;\n const outputNumber = getMaxComponents(dimAOuter);\n const aComponents = getMaxComponents(attributes.k);\n const bComponents = getMaxComponents(blobSizeInWords);\n const elementSize = getTensorElementSize(dataType)!;\n const workgroupOutputSize = dimAOuter * nBlocksPerCol * elementSize;\n const maxNumberOfComponents = Math.floor(maxComputeWorkgroupStorageSize / workgroupOutputSize);\n const useBlockwiseMatMulNBits = nBlocksPerCol <= maxComputeWorkgroupSizes[0] && maxNumberOfComponents > 0;\n const components = (!useBlockwiseMatMulNBits || maxNumberOfComponents >= 4) ? getMaxComponents(dimBOuter) :\n ((maxNumberOfComponents >= 2) && getMaxComponents(dimBOuter) >= 2) ? 2 :\n 1;\n const outputShape = batchDims.concat([dimAOuter, dimBOuter]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n\n const programUniforms: ProgramUniform[] = useBlockwiseMatMulNBits ?\n [] :\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.blockSize}];\n const inputShapeTemp = [batchSize, dimAOuter, dimInner / aComponents];\n const bShape = ShapeUtil.convertShape(inputs[1].dims).slice();\n bShape.splice(-1, 1, blobSizeInWords / bComponents);\n programUniforms.push(...createTensorShapeVariables(inputShapeTemp));\n programUniforms.push(...createTensorShapeVariables(bShape));\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n if (inputs.length === 4) {\n programUniforms.push(...createTensorShapeVariables(ShapeUtil.convertShape(inputs[3].dims)));\n }\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputRank = inputShapeTemp.length;\n const a = inputVariable('a', inputs[0].dataType, inputRank, aComponents);\n const b = inputVariable('b', DataType.uint32, bShape.length, bComponents);\n const scales = inputVariable('scales', inputs[2].dataType, inputs[2].dims.length);\n const inputVariables = [a, b, scales];\n const zeroPoints =\n inputs.length === 4 ? inputVariable('zero_points', DataType.uint32, inputs[3].dims.length) : undefined;\n if (zeroPoints) {\n inputVariables.push(zeroPoints);\n }\n const outputRank = outputShapeTemp.length;\n const output = outputVariable('output', inputs[0].dataType, outputRank, components);\n const uniforms: UniformsArrayType = [{name: 'output_size', type: 'u32'}, {name: 'block_size', type: 'u32'}];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const qDqDataType = (() => {\n switch (aComponents) {\n case 1:\n return `array<${dataType}, 8>`;\n case 2:\n return `mat4x2<${dataType}>`;\n case 4:\n return `mat2x4<${dataType}>`;\n default:\n throw new Error(`${aComponents}-component is not supported.`);\n }\n })();\n\n const processOneBlock = `\n for (var word: u32 = 0; word < ${blobSizeInWords}; word += ${bComponents}) {\n ${b.indicesSet('b_indices', '2', 'word')};\n let b_data = ${b.getByIndices('b_indices')};\n for (var i: u32 = 0; i < ${bComponents}; i++) {\n let b_value: u32 = ${bComponents === 1 ? 'b_data' : 'b_data[word + i]'};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${qDqDataType}(${\n Array.from({length: 4}, (_, i) => `${dataType}(b_value_lower[${i}]), ${dataType}(b_value_upper[${i}])`)\n .join(', ')});\n let b_dequantized_values = ${(() => {\n if (aComponents === 1) {\n return `${qDqDataType}(${\n Array.from({length: 8}, (_, i) => `(b_quantized_values[${i}] - zero_point) * scale`).join(', ')});`;\n } else {\n return `(b_quantized_values - ${qDqDataType}(${Array(8).fill('zero_point').join(',')})) * scale;`;\n }\n })()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${useBlockwiseMatMulNBits ? dimAOuter : outputNumber}u; m++) {\n ${a.indicesSet('a_indices', inputRank - 2, useBlockwiseMatMulNBits ? 'm' : `row * ${outputNumber} + m`)};\n ${a.indicesSet('a_indices', inputRank - 1, 'word_offset')};\n var input_offset = ${a.indicesToOffset('a_indices')};\n var a_data: ${qDqDataType};\n for (var j: u32 = 0; j < ${8 / aComponents}; j++) {\n a_data[j] = ${a.getByOffset('input_offset')};\n input_offset++;\n }\n ${useBlockwiseMatMulNBits ? 'workgroup_shared[workgroup_shared_offset + m]' : 'output_values[m]'}${\n components > 1 ? '[c]' : ''} += ${\n Array\n .from(\n {length: 8 / aComponents},\n (_, i) => `${\n aComponents === 1 ? `a_data[${i}] * b_dequantized_values[${i}]` :\n `dot(a_data[${i}], b_dequantized_values[${i}])`}`)\n .join(' + ')};\n }\n word_offset += ${8 / aComponents};\n }\n }`;\n const updateZeroPointIndex = zeroPoints ? `\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${zeroPoints.getByOffset('zero_point_index')};\n }` :\n '';\n\n return useBlockwiseMatMulNBits ? `\n var workgroup_shared: array<${output.type.value}, ${dimAOuter * nBlocksPerCol}>;\n ${shaderHelper.declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart([\n nBlocksPerCol, 1, 1\n ])}\n var a_indices: ${a.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${a.indicesSet('a_indices', '0', 'batch')};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${components}; c++) {\n let col_times_components_plus_c = col * ${components} + c;\n ${\n zeroPoints ? `\n var zero_point_bytes_per_col: u32 = (${nBlocksPerCol} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_word_index')} >> zero_point_bits_offset;` :\n ''}\n var b_indices: ${b.type.indices};\n ${b.indicesSet('b_indices', '0', 'col_times_components_plus_c')};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${nBlocksPerCol} + block;\n let scale = ${scales.getByOffset('scales_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? '(zero_point_word) & 0xFu' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block * ${attributes.blockSize / aComponents};\n var workgroup_shared_offset: u32 = block * ${dimAOuter};\n ${processOneBlock}\n }\n workgroupBarrier();\n var output_indices: ${output.type.indices};\n var elements_per_thread: u32 = ${Math.ceil(dimAOuter / nBlocksPerCol)};\n ${output.indicesSet('output_indices', '0', 'batch')};\n ${output.indicesSet('output_indices', outputRank - 1, 'col')};\n ${output.indicesSet('output_indices', outputRank - 2, 'local_id.x * elements_per_thread')};\n var output_offset = ${output.indicesToOffset('output_indices')};\n for (var m: u32 = 0u; m < elements_per_thread; m++) {\n var row = m + local_id.x * elements_per_thread;\n if (row < ${dimAOuter}) {\n var output_value: ${output.type.value} = ${output.type.value}(0);\n var workgroup_shared_offset: u32 = row;\n for (var b: u32 = 0u; b < ${nBlocksPerCol}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${dimAOuter};\n }\n ${output.setByOffset('output_offset', 'output_value')};\n output_offset += ${dimBOuter / components};\n }\n }\n }` :\n `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var output_values: array<${output.type.value}, ${outputNumber}>;\n var output_indices = ${output.offsetToIndices('global_idx')};\n var col = ${output.indicesGet('output_indices', outputRank - 1)};\n var row = ${output.indicesGet('output_indices', outputRank - 2)};\n var a_indices: ${a.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${\n zeroPoints ? `\n var zero_point_abs_offset = col * ${components} * ((${nBlocksPerCol} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_index')};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;` :\n ''}\n var scale_index = col * ${nBlocksPerCol * components};\n var b_indices: ${b.type.indices};\n for (var c: u32 = 0; c < ${components}; c++) {\n ${b.indicesSet('b_indices', '0', `col * ${components} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${nBlocksPerCol}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${scales.getByOffset('scale_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? 'extractBits(zero_point_word, zero_point_offset, 4)' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block_offset;\n ${processOneBlock}\n scale_index++;\n ${updateZeroPointIndex}\n block_offset += uniforms.block_size / ${aComponents};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${\n zeroPoints ? `if (zero_point_offset % 8 > 0) {\n ${updateZeroPointIndex}\n }` :\n ''}\n }\n for (var k: u32 = 0u; k < ${outputNumber}u; k++) {\n ${output.indicesSet('output_indices', outputRank - 2, `${outputNumber} * row + k`)};\n ${output.setByIndices('output_indices', 'output_values[k]')}\n }\n }`;\n };\n return {\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n shaderCache: {\n hint: `${attributes.cacheKey};${dimAOuter};${dataType};${inputs.length}`,\n inputDependencies: Array(inputs.length).fill('rank')\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n dispatchGroup: useBlockwiseMatMulNBits ? {x: 1, y: Math.ceil(dimBOuter / components), z: batchSize} :\n {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nexport const matMulNBits = (context: ComputeContext, attributes: MatMulNBitsAttributes): void => {\n validateInputs(context.inputs, attributes);\n const maxComputeWorkgroupSizes: [number, number, number] = context.getMaxComputeWorkgroupSizes();\n const maxComputeWorkgroupStorageSize = context.getMaxComputeWorkgroupStoragesize();\n context.compute(createMatMulNBitsProgramInfo(\n context.inputs, attributes, maxComputeWorkgroupSizes, maxComputeWorkgroupStorageSize));\n};\n\nexport const parseMatMulNBitsAttributes = (attributes: Record): MatMulNBitsAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\ninterface PadAttributes {\n // 0-constant, 1-reflect, 2-edge, 3-wrap\n readonly mode: number;\n readonly value: number;\n readonly pads: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('Too few inputs');\n }\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16) {\n throw new Error('Input type must be float or float16.');\n }\n\n if (inputs.length >= 2) {\n let validPads = inputs[0].dims.length * 2 === inputs[1].dims[0];\n if (inputs.length === 4) {\n validPads = inputs[3].dims[0] * 2 === inputs[1].dims[0];\n }\n if (!validPads) {\n throw new Error('The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].');\n }\n }\n};\n\nconst getPadConstant = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n break;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n value = ${output.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n }\n `;\n};\n\nconst getPadReflect = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadEdge = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadWrap = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k += i32(${getElementAt('uniforms.x_shape', i, inputRank)}]);\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k -= i32(${getElementAt('uniforms.x_shape', i, inputRank)});\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadSnippet = (output: IndicesHelper, inputRank: number, attributes: PadAttributes): string => {\n switch (attributes.mode) {\n case 0:\n return getPadConstant(output, inputRank, attributes.pads.length);\n case 1:\n return getPadReflect(output, inputRank, attributes.pads.length);\n case 2:\n return getPadEdge(output, inputRank, attributes.pads.length);\n case 3:\n return getPadWrap(output, inputRank, attributes.pads.length);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst createPadProgramInfo = (inputs: readonly TensorView[], attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(inputs[0].dims.slice(), attributes.pads);\n const inputDims = inputs[0].dims;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: attributes.pads}];\n if (attributes.mode === 0) {\n programUniforms.push({type: inputs[0].dataType, data: attributes.value});\n }\n\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('x', inputs[0].dataType, inputDims.length);\n const dataType = input.type.value;\n const padSnippet = getPadSnippet(output, inputDims.length, attributes);\n const uniforms: UniformsArrayType =\n [{name: 'output_size', type: 'u32'}, {name: 'pads', type: 'i32', length: attributes.pads.length}];\n if (attributes.mode === 0) {\n uniforms.push({name: 'constant_value', type: dataType as UniformDataElementType});\n }\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(0);\n ${padSnippet}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Pad',\n shaderCache: {hint: `${attributes.mode}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nconst createPadAttributesFromInputs = (inputs: readonly TensorView[], attributes: PadAttributes): PadAttributes => {\n if (inputs.length > 1) {\n const bigInt64Pads = inputs[1].getBigInt64Array();\n const value = (inputs.length >= 3 && inputs[2].data) ? inputs[2].getFloat32Array()[0] : 0.0;\n\n const inputRank = inputs[0].dims.length;\n const updatePads = new Int32Array(2 * inputRank).fill(0);\n if (inputs.length >= 4) {\n const axes = inputs[3].getBigInt64Array();\n for (let i = 0; i < axes.length; i++) {\n updatePads[Number(axes[i])] = Number(bigInt64Pads[i]);\n updatePads[Number(axes[i]) + inputRank] = Number(bigInt64Pads[i + axes.length]);\n }\n } else {\n bigInt64Pads.forEach((v, i) => updatePads[Number(i)] = (Number(v)));\n }\n\n const pads: number[] = [];\n updatePads.forEach(v => pads.push(v));\n\n return {mode: attributes.mode, value, pads};\n } else {\n return attributes;\n }\n};\n\nexport const pad = (context: ComputeContext, attributes: PadAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes = createPadAttributesFromInputs(context.inputs, attributes);\n context.compute(createPadProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\n// TODO: support:\n// - ceil_mode \"test_maxpool_2d_ceil\"\n// - storage_order \"test_maxpool_with_argmax_2d_precomputed_strides\"\n// - [MaxPool] dilations \"test_maxpool_2d_dilations\"\n// - [MaxPool] output[1] \"test_maxpool_with_argmax_2d_precomputed_pads\"\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (env.webgpu.validateInputContent && (!inputs || inputs.length !== 1)) {\n throw new Error('Pool ops requires 1 input.');\n }\n};\n\nconst getAdjustedPoolAttributesAndOutputShape = (\n input: TensorView, attributes: AttributeType, isGlobalOperator: boolean): [AttributeType, number[]] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inputShapeAsChannelFirst = input.dims.slice();\n if (isChannelsLast) {\n inputShapeAsChannelFirst.splice(1, 0, inputShapeAsChannelFirst.pop()!); // Move channel to the second position.\n }\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShapeAsChannelFirst, kernelShape, strides, dilations, pads);\n\n const outputShapeAsChannelFirst = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShapeAsChannelFirst, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n const outputShapeAsChannelLast = outputShapeAsChannelFirst.slice();\n outputShapeAsChannelLast.push(outputShapeAsChannelLast.splice(1, 1)[0]);\n return [newAttributes, isChannelsLast ? outputShapeAsChannelLast : outputShapeAsChannelFirst];\n};\n\nconst getUniformAndPadInfo = (\n outputShape: readonly number[],\n attributes: AttributeType): [ProgramUniform[], UniformsArrayType, boolean, boolean, boolean] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const outputSize = ShapeUtil.size(outputShape);\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: kernelSize}];\n const uniforms: UniformsArrayType = [{name: 'outputSize', type: 'u32'}, {name: 'kernelSize', type: 'u32'}];\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const pwStartEndNotZero = !!(pwStart + pwEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kw},\n {type: DataType.uint32, data: sw},\n {type: DataType.uint32, data: pwStart},\n {type: DataType.uint32, data: pwEnd},\n );\n uniforms.push(\n {name: 'kw', type: 'u32'}, {name: 'sw', type: 'u32'}, {name: 'pwStart', type: 'u32'},\n {name: 'pwEnd', type: 'u32'});\n\n let phStartEndNotZero = false;\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n phStartEndNotZero = !!(phStart + phEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kh}, {type: DataType.uint32, data: sh}, {type: DataType.uint32, data: phStart},\n {type: DataType.uint32, data: phEnd});\n\n uniforms.push(\n {name: 'kh', type: 'u32'}, {name: 'sh', type: 'u32'}, {name: 'phStart', type: 'u32'},\n {name: 'phEnd', type: 'u32'});\n }\n return [programUniforms, uniforms, true, pwStartEndNotZero, phStartEndNotZero];\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n programUniforms.push(\n {type: DataType.uint32, data: kernelStrides}, {type: DataType.uint32, data: attributes.pads},\n {type: DataType.uint32, data: attributes.strides});\n uniforms.push(\n {name: 'kernelStrides', type: 'u32', length: kernelStrides.length},\n {name: 'pads', type: 'u32', length: attributes.pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length});\n\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n return [programUniforms, uniforms, !!hasPads, false, false];\n }\n};\n\nconst generatePoolingCode = (\n shaderHelper: ShaderHelper, x: IndicesHelper, rank: number, outputShapeRank: number, attributes: AttributeType,\n op1: string, op2: string, start: number, uniforms: UniformsArrayType, hasPads: boolean, pwStartEndNotZero: boolean,\n phStartEndNotZero: boolean): string => {\n const isChannelsLast = attributes.format === 'NHWC';\n const dataType = x.type.value;\n const output = outputVariable('output', x.type.tensor, outputShapeRank);\n\n if (attributes.kernelShape.length <= 2) {\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n const dimIdxW = rank - (isChannelsLast ? 2 : 1);\n if (pwStartEndNotZero) {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${dimIdxW}] < 0 || xIndices[${dimIdxW}]\n >= uniforms.x_shape[${dimIdxW}]) {\n pad++;\n continue;\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const dimIdxH = rank - (isChannelsLast ? 3 : 2);\n if (phStartEndNotZero) {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${dimIdxH}] < 0 || xIndices[${dimIdxH}] >= uniforms.x_shape[${dimIdxH}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `;\n } else {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(${start});\n var pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const stridesRank = attributes.kernelShape.length;\n const padsRank = attributes.pads.length;\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n padCode = `\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n `;\n }\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var offsets: array;\n\n var value = ${dataType}(${start});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${stridesRank - 1}u; j++) {\n offsets[j] = offset / ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n offset -= offsets[j] * ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n }\n offsets[${stridesRank - 1}] = offset;\n\n isPad = false;\n for (var j = ${rank - stridesRank}u; j < ${rank}u; j++) {\n xIndices[j] = indices[j] * ${\n getElementAt('uniforms.strides', `j - ${rank - stridesRank}u`, stridesRank)}\n + offsets[j - ${rank - stridesRank}u] - ${getElementAt('uniforms.pads', 'j - 2u', padsRank)};\n ${padCode}\n }\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n }\n};\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface PoolCommonAttributes extends FormatAttributes {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nconst createShaderKeyFromAttributes = (attributes: PoolCommonAttributes): string =>\n (`${attributes.format};${attributes.ceilMode};${attributes.autoPad};${attributes.kernelShape.length}`);\n\nconst createAveragePoolShaderKeyFromAttributes = (attributes: AveragePoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.countIncludePad}`);\n\nconst createMaxPoolShaderKeyFromAttributes = (attributes: MaxPoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.storageOrder};${attributes.dilations}`);\n\nconst parsePoolCommonAttributes = (attributes: Record): PoolCommonAttributes => ({\n format: attributes.format as FormatAttributes['format'],\n autoPad: ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number],\n ceilMode: attributes.ceil_mode as number,\n kernelShape: attributes.kernel_shape as [number, number],\n strides: attributes.strides as [number, number],\n pads: attributes.pads as [number, number, number, number]\n});\n\nexport interface AveragePoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly countIncludePad: boolean;\n}\n\nconst createAveragePoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: AveragePoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const x = inputVariable('x', input.dataType, input.dims.length);\n const dataType = x.type.value;\n\n const op1 = 'value += x_val;';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= ${dataType}(uniforms.kernelSize);`;\n } else {\n op2 += `value /= ${dataType}(i32(uniforms.kernelSize) - pad);`;\n }\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2, 0.0, uniforms,\n hasPads, pwStartEndNotZero, phStartEndNotZero),\n };\n };\n\nexport const parseAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const countIncludePad = (attributes.count_include_pad as number) === 0 ? false : true;\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode'\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n const averagePoolAttributes = {countIncludePad, ...attr, cacheKey: ''};\n return {...averagePoolAttributes, cacheKey: createAveragePoolShaderKeyFromAttributes(averagePoolAttributes)};\n};\n\nexport const averagePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('AveragePool', context.inputs[0], false, attributes));\n};\n\nconst globalPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: []\n};\n\nexport const parseGlobalAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalAveragePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('GlobalAveragePool', context.inputs[0], true, attributes));\n};\n\nexport interface MaxPoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nconst createMaxPoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: MaxPoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const op1 = `\n value = max(x_val, value);\n `;\n const op2 = '';\n const x = inputVariable('x', input.dataType, input.dims.length);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2,\n (input.dataType === DataType.float16) ? -65504 : -1e5, uniforms, hasPads, pwStartEndNotZero,\n phStartEndNotZero),\n };\n };\n\nexport const maxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('MaxPool', context.inputs[0], false, attributes));\n};\n\nexport const parseMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const storageOrder = attributes.storage_order as number;\n const dilations = attributes.dilations as [number, number];\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n const maxPoolAttributes = {storageOrder, dilations, ...attr, cacheKey: ''};\n return {...maxPoolAttributes, cacheKey: createMaxPoolShaderKeyFromAttributes(maxPoolAttributes)};\n};\n\nexport const parseGlobalMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalMaxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('GlobalMaxPool', context.inputs[0], true, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\nconst validateInputsContent = (start: number, limit: number, delta: number): void => {\n const sameStartLimit = start === limit;\n const increasingRangeNegativeStep = start < limit && delta < 0;\n const decreasingRangePositiveStep = start > limit && delta > 0;\n\n if (sameStartLimit || increasingRangeNegativeStep || decreasingRangePositiveStep) {\n throw new Error('Range these inputs\\' contents are invalid.');\n }\n};\n\nconst createRangeProgramInfo = (start: number, limit: number, delta: number, dataType: DataType): ProgramInfo => {\n const numElements = Math.abs(Math.ceil((limit - start) / delta));\n const outputShape: number[] = [numElements];\n const outputSize = numElements;\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: dataType, data: start}, {type: dataType, data: delta},\n ...createTensorShapeVariables(outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', dataType, outputShape.length);\n const wgslType = output.type.value;\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'start', type: wgslType as UniformDataElementType},\n {name: 'delta', type: wgslType as UniformDataElementType}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n output[global_idx] = uniforms.start + ${wgslType}(global_idx) * uniforms.delta;\n }`;\n };\n\n return {\n name: 'Range',\n shaderCache: {hint: `${dataType}`},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const range = (context: ComputeContext): void => {\n let start = 0;\n let limit = 0;\n let delta = 0;\n if (context.inputs[0].dataType === DataType.int32) {\n start = context.inputs[0].getInt32Array()[0];\n limit = context.inputs[1].getInt32Array()[0];\n delta = context.inputs[2].getInt32Array()[0];\n } else if (context.inputs[0].dataType === DataType.float) {\n start = context.inputs[0].getFloat32Array()[0];\n limit = context.inputs[1].getFloat32Array()[0];\n delta = context.inputs[2].getFloat32Array()[0];\n }\n if (env.webgpu.validateInputContent) {\n validateInputsContent(start, limit, delta);\n }\n\n context.compute(createRangeProgramInfo(start, limit, delta, context.inputs[0].dataType), {inputs: []});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype CoordinateTransformMode = 'half_pixel'|'asymmetric'|'pytorch_half_pixel'|'tf_half_pixel_for_nn'|'align_corners'|\n 'tf_crop_and_resize'|'half_pixel_symmetric';\n\ntype KeepAspectRatioPolicy = 'stretch'|'not_smaller'|'not_larger';\n\ntype Mode = 'nearest'|'linear'|'cubic';\n\ntype NearestMode = 'round_prefer_floor'|'round_prefer_ceil'|'floor'|'ceil'|'simple';\n\nexport interface ResizeAttributes extends AttributeWithCacheKey {\n antialias: number;\n axes: number[];\n coordinateTransformMode: CoordinateTransformMode;\n cubicCoeffA: number;\n excludeOutside: boolean;\n extrapolationValue: number;\n keepAspectRatioPolicy: KeepAspectRatioPolicy;\n mode: Mode;\n nearestMode: NearestMode;\n}\n\nconst validateScales = (scales: number[], attributes: ResizeAttributes): void => {\n scales.every((value) => value > 0 || (() => {\n throw new Error('Resize requires scales input values to be positive');\n }));\n // Check scales dims based on mode: LINEAR, CUBIC\n if (scales.length > 0) {\n if (attributes.mode === 'linear') {\n if (!(scales.length === 2 || scales.length === 3 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1) ||\n (scales.length === 5 && scales[0] === 1 && scales[1] === 1))) {\n throw new Error(\n `For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`);\n }\n } else if (attributes.mode === 'cubic') {\n if (!(scales.length === 2 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1))) {\n throw new Error('Resize requires scales input size to be 2 or 4 for cubic mode');\n }\n }\n }\n};\n\nconst updateScales = (scales: readonly number[], axes: readonly number[], rank: number): number[] => {\n axes.every((value) => value >= 0 && value < rank || (() => {\n throw new Error('Resize requires axes input values to be positive and less than rank');\n }));\n const newScales = new Array(rank).fill(1.0);\n axes.forEach((value, index) => newScales[value] = scales[index]);\n return newScales;\n};\n\nconst validateInputs =\n (inputs: readonly TensorView[], attributes: ResizeAttributes, opsetVersion: number, scales: number[],\n sizes: number[], roi: number[]): void => {\n const [roiInputIndex, scalesInputIndex, sizesInputIndex] =\n (opsetVersion > 10) ? [1, 2, 3] : [-1, (inputs.length > 1) ? 1 : -1, -1];\n const rank = inputs[0].dims.length;\n if (roiInputIndex > 0 && inputs.length > roiInputIndex && inputs[roiInputIndex].dims.length > 0) {\n inputs[roiInputIndex].getFloat32Array().forEach((value) => roi.push(value));\n } else if (attributes.coordinateTransformMode === 'tf_crop_and_resize') {\n throw new Error('Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize');\n }\n\n if (scalesInputIndex > 0 && inputs.length > scalesInputIndex && inputs[scalesInputIndex].dims.length > 0) {\n inputs[scalesInputIndex].getFloat32Array().forEach((value) => scales.push(value));\n if (scales.length !== 0 &&\n (scales.length !== rank && (opsetVersion >= 18 && scales.length !== attributes.axes.length))) {\n throw new Error(\n 'Resize requires scales input size to be same as input rank or axes size for opset 18 and up');\n }\n validateScales(scales, attributes);\n if (attributes.axes.length > 0) {\n updateScales(scales, attributes.axes, rank).forEach((value, index) => scales[index] = value);\n }\n }\n if (sizesInputIndex > 0 && inputs.length > sizesInputIndex) {\n inputs[sizesInputIndex].getBigInt64Array().forEach((value) => sizes.push(Number(value)));\n if (sizes.length !== rank || (opsetVersion >= 18 && sizes.length === attributes.axes.length)) {\n throw new Error('Resize requires sizes input size to be same as input rank or axes size for opset 18 and up');\n }\n }\n\n if (attributes.axes.length > 0) {\n if (scales.length !== attributes.axes.length) {\n throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');\n }\n if (sizes.length !== attributes.axes.length) {\n throw new Error(\n 'Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified');\n }\n }\n if (typeof scales !== 'undefined' && typeof sizes !== 'undefined' && scales.length > 0 && sizes.length > rank) {\n throw new Error('Resize requires only of scales or sizes to be specified');\n }\n };\n\nconst getOriginalCoordinateFromResizedCoordinate =\n (coordinateTransferMode: CoordinateTransformMode, dType: string): string =>\n `fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${dType} { ` +\n (() => {\n switch (coordinateTransferMode) {\n case 'asymmetric':\n return `return ${dType}(xResized) / ${dType}(xScale);`;\n case 'pytorch_half_pixel':\n return `if (lengthResized > 1) {\n return (${dType}(xResized) + 0.5) / ${dType}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;\n case 'tf_half_pixel_for_nn':\n return `return (${dType}(xResized) + 0.5) / ${dType}(xScale);`;\n case 'align_corners':\n return `if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${dType}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${dType}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${dType}(lengthResized - 1);\n return whole + fract;\n }`;\n case 'tf_crop_and_resize':\n return `if (lengthResized > 1) {\n return ${dType}(roiStart) * ${dType}(lengthOriginal - 1) +\n (${dType}(xResized) * ${dType}(roiEnd - roiStart) * ${dType}(lengthOriginal - 1)) /\n ${dType}(lengthResized - 1);\n } else {\n return 0.5 * ${dType}(roiStart + roiEnd) * ${dType}(lengthOriginal - 1);\n }`;\n case 'half_pixel_symmetric':\n return `const outputWidth = ${dType}xScale * ${dType}(lengthResized);\n const adjustment = ${dType}(lengthResized) / outputWidth;\n const center = ${dType}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n case 'half_pixel':\n return `return ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n default:\n throw new Error(`Coordinate transform mode ${coordinateTransferMode} is not supported`);\n }\n })() +\n '}';\n\nconst getNearestPixelFromOriginal = (nearestMode: NearestMode, opsetVersion: number, dType: string): string =>\n `fn getNearestPixelFromOriginal(xOriginal: ${dType}, isDownSample: bool) -> ${dType} {` + (() => {\n switch (nearestMode) {\n case 'round_prefer_ceil':\n return 'if (fract(xOriginal) == 0.5) { \\\n return ceil(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'floor':\n return 'return floor(xOriginal);';\n case 'ceil':\n return 'return ceil(xOriginal);';\n case 'round_prefer_floor':\n return 'if (fract(xOriginal) == 0.5) { \\\n return floor(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'simple':\n default:\n if (opsetVersion < 11) {\n return 'if (isDownSample) \\\n { \\\n return ceil(xOriginal); \\\n } else { \\\n return xOriginal; \\\n }';\n }\n throw new Error(`Nearest mode ${nearestMode} is not supported`);\n }\n })() +\n '}';\n\nconst updateRoI = (roi: readonly number[], axes: readonly number[], rank: number): number[] => {\n const roiTmp = new Array(rank).fill(0).concat(new Array(rank).fill(1));\n const roiLocal = roi.length === 0 ? roiTmp : roi.slice();\n if (axes.length > 0) {\n axes.forEach((v, i) => {\n roiTmp[v] = roiLocal[i];\n roiTmp[i + rank] = roiLocal[axes.length + i];\n });\n return roiTmp;\n }\n return roiLocal;\n};\n\nconst initOutputShape =\n (inputShape: readonly number[], scales: readonly number[], sizes: readonly number[], axes: readonly number[]):\n number[] => {\n let outputShape: number[] = [];\n if (sizes.length > 0) {\n if (axes.length > 0) {\n inputShape.forEach((v) => outputShape.push(v));\n if (Math.max(...axes) > inputShape.length) {\n throw new Error('axes is out of bound');\n }\n axes.forEach((v, i) => outputShape[v] = sizes[i]);\n } else {\n sizes.forEach((v) => outputShape.push(v));\n }\n } else {\n if (scales.length === 0) {\n throw new Error('Resize requires either scales or sizes.');\n } else {\n outputShape = inputShape.map((value, index) => Math.round(value * scales[index]));\n }\n }\n return outputShape;\n };\n\nconst adjustOutputShape = (inputShape: readonly number[], scales: number[], attributes: ResizeAttributes) => {\n const scaleInPolicy = (() => {\n switch (attributes.keepAspectRatioPolicy) {\n case 'not_larger':\n return attributes.axes.length > 0 ? Math.min(...attributes.axes.map(i => scales[i]), Number.MAX_VALUE) :\n Math.min(...scales, Number.MAX_VALUE);\n case 'not_smaller':\n return attributes.axes.length > 0 ? Math.max(...attributes.axes.map(i => scales[i]), Number.MIN_VALUE) :\n Math.max(...scales, Number.MIN_VALUE);\n default:\n throw new Error(`Keep aspect ratio policy ${attributes.keepAspectRatioPolicy} is not supported`);\n }\n })();\n scales.fill(1.0, 0, scales.length);\n const adjustedOutputShape = inputShape.slice();\n if (attributes.axes.length > 0) {\n attributes.axes.forEach((v) => scales[v] = scaleInPolicy);\n attributes.axes.forEach((v) => adjustedOutputShape[v] = Math.round(inputShape[v] * scales[v]));\n } else {\n scales.fill(scaleInPolicy, 0, scales.length);\n adjustedOutputShape.forEach((v, i) => adjustedOutputShape[i] = Math.round(v * scales[i]));\n }\n return adjustedOutputShape;\n};\n\nconst calculateOriginalIndicesFromOutputIndices =\n (output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[], scalesLength: number,\n roiLength: number): string => `\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> array<${\n output.type.value}, ${outputShape.length}> {\n var original_indices: array<${output.type.value}, ${outputShape.length}>;\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n if (scale == 1.0) {\n original_indices[i] = ${output.type.value}(output_index);\n } else {\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`;\n\nconst calculateInputIndicesFromOutputIndices =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scalesLength: number, roiLength: number, useExtrapolation: boolean): string => `\n fn calculateInputIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index: u32;\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${useExtrapolation} || (original_idx >= 0 && original_idx < ${output.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${output.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${input.indicesSet('input_indices', 'i', ' input_index')}\n }\n return input_indices;\n }`;\nconst checkInputIndices = (input: IndicesHelper, inputShape: readonly number[]): string => `\n fn checkInputIndices(input_indices: ${input.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${inputShape.length}; i++) {\n var input_index = ${input.indicesGet('input_indices', 'i')};\n if (input_index < 0 || input_index >= ${getElementAt('uniforms.input_shape', 'i', inputShape.length)}) {\n return false;\n }\n }\n return true;\n }`;\n\nconst setChannelAndBatchIndices =\n (input: IndicesHelper, channelIdx: number, batchIdx: number, spacialDims: number): string =>\n input.rank > spacialDims ? `\n ${input.indicesSet('input_indices', channelIdx, 'channel')};\n ${input.indicesSet('input_indices', batchIdx, 'batch')};\n` :\n '';\n\nconst bilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 2 ? [-1, 0, 1, -1] : (isNchw ? [0, 2, 3, 1] : [0, 1, 2, 3]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(row, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(col, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 2)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn bilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${dType} = originalIndices[${heightIdx}];\n var col:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ?\n `if (row < 0 || row > (${inputShape[heightIdx]} - 1) || col < 0 || col > (${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n row = max(0, min(row, ${inputShape[heightIdx]} - 1));\n col = max(0, min(col, ${inputShape[widthIdx]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${batchIdx}])` : '0'};\n var x11: ${dType} = getInputValue(batch, channel, row1, col1);\n var x12: ${dType} = getInputValue(batch, channel, row1, col2);\n var x21: ${dType} = getInputValue(batch, channel, row2, col1);\n var x22: ${dType} = getInputValue(batch, channel, row2, col2);\n var dx1: ${dType} = abs(row - ${dType}(row1));\n var dx2: ${dType} = abs(${dType}(row2) - row);\n var dy1: ${dType} = abs(col - ${dType}(col1));\n var dy2: ${dType} = abs(${dType}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`;\n };\n\nconst bicubicInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scales: readonly number[], roi: readonly number[], cubicCoeffA: number, useExtrapolation: boolean,\n extrapolationValue: number, excludeOutside: boolean): string => {\n const is2D = inputShape.length === 2;\n const isNchw = true;\n const [heightIdx, widthIdx] = is2D ? [0, 1] : isNchw ? [2, 3] : [1, 2];\n const dType = input.type.value;\n const createCubicInterpolationFunction = (idx: number): string => {\n const direction = idx === heightIdx ? 'row' : 'col';\n return `\n fn ${direction}CubicInterpolation(input_indices: ${input.type.indices}, output_indices: ${\n output.type.indices}) -> ${dType} {\n var output_index = ${output.indicesGet('output_indices', idx)};\n var originalIdx: ${dType} = getOriginalCoordinateFromResizedCoordinate(output_index, ${scales[idx]},\n ${outputShape[idx]}, ${inputShape[idx]}, ${roi[idx]}, ${roi[idx]} + ${inputShape.length});\n var fractOriginalIdx: ${dType} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${useExtrapolation} && (originalIdx < 0 || originalIdx > (${inputShape[idx]} - 1))) {\n return ${extrapolationValue};\n }\n var data: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${direction}: ${dType} = originalIdx + ${dType}(i);\n if (${direction} < 0 || ${direction} >= ${inputShape[idx]}) {\n ${(() => {\n if (excludeOutside) {\n return `coefs[i + 1] = 0.0;\n continue;`;\n } else if (useExtrapolation) {\n return `return ${extrapolationValue};`;\n } else {\n return `${direction} = max(0, min(${direction}, ${inputShape[idx]} - 1));`;\n }\n })()};\n }\n var input_indices_copy: ${input.type.indices} = input_indices;\n ${input.indicesSet('input_indices_copy', idx, `u32(${direction})`)};\n data[i + 1] = ${\n idx === heightIdx ? input.getByIndices('input_indices_copy') :\n 'rowCubicInterpolation(input_indices_copy, output_indices)'};\n }\n return cubicInterpolation1D(data, coefs);\n }`;\n };\n\n return `\n ${createCubicInterpolationFunction(heightIdx)};\n ${createCubicInterpolationFunction(widthIdx)};\n fn getCubicInterpolationCoefs(s: ${dType}) -> array<${dType}, 4> {\n var absS = abs(s);\n var coeffs: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${dType} = 1.0 - absS;\n var twoMinusAbsS: ${dType} = 2.0 - absS;\n var onePlusAbsS: ${dType} = 1.0 + absS;\n coeffs[0] = ((${cubicCoeffA} * onePlusAbsS - 5 * ${cubicCoeffA}) * onePlusAbsS + 8 * ${\n cubicCoeffA}) * onePlusAbsS - 4 * ${cubicCoeffA};\n coeffs[1] = ((${cubicCoeffA} + 2) * absS - (${cubicCoeffA} + 3)) * absS * absS + 1;\n coeffs[2] = ((${cubicCoeffA} + 2) * oneMinusAbsS - (${cubicCoeffA} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${cubicCoeffA} * twoMinusAbsS - 5 * ${cubicCoeffA}) * twoMinusAbsS + 8 * ${\n cubicCoeffA}) * twoMinusAbsS - 4 * ${cubicCoeffA};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${dType}, 4>, coefs: array<${dType}, 4>) -> ${dType} {\n var coefsSum: ${dType} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var input_indices: ${input.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `;\n };\n\nconst trilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, depthIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 3 ? [-1, 0, 1, 2, -1] : (isNchw ? [0, 2, 3, 4, 1] : [0, 1, 2, 3, 4]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', depthIdx, `max(0, min(depth, ${inputShape[depthIdx]} - 1))`)};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(height, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(width, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 3)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn trilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${dType} = originalIndices[${depthIdx}];\n var height:${dType} = originalIndices[${heightIdx}];\n var width:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ? `if (depth < 0 || depth > (${inputShape[depthIdx]} - 1) || height < 0 || height > (${\n inputShape[heightIdx]} - 1) || width < 0 || (width > ${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n\n depth = max(0, min(depth, ${inputShape[depthIdx]} - 1));\n height = max(0, min(height, ${inputShape[heightIdx]} - 1));\n width = max(0, min(width, ${inputShape[widthIdx]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${batchIdx}])` : '0'};\n\n var x111: ${dType} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${dType} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${dType} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${dType} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${dType} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${dType} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${dType} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${dType} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${dType} = abs(depth - ${dType}(depth1));\n var dx2: ${dType} = abs(${dType}(depth2) - depth);\n var dy1: ${dType} = abs(height - ${dType}(height1));\n var dy2: ${dType} = abs(${dType}(height2) - height);\n var dz1: ${dType} = abs(width - ${dType}(width1));\n var dz2: ${dType} = abs(${dType}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`;\n };\n\nconst createResizeProgramInfo =\n (inputTensor: TensorView, attributes: ResizeAttributes, opsetVersion: number, scalesInput: readonly number[],\n sizes: readonly number[], roiInput: readonly number[]): ProgramInfo => {\n const inputShape = inputTensor.dims;\n const roi = updateRoI(roiInput, attributes.axes, inputShape.length);\n\n let outputShape = initOutputShape(inputShape, scalesInput, sizes, attributes.axes);\n let scales = scalesInput.slice();\n if (scalesInput.length === 0) {\n scales = inputShape.map((value, index) => value === 0 ? 1.0 : outputShape[index] / value);\n if (attributes.keepAspectRatioPolicy !== 'stretch') {\n outputShape = adjustOutputShape(inputShape, scales, attributes);\n }\n }\n const output = outputVariable('output', inputTensor.dataType, outputShape.length);\n const input = inputVariable('input', inputTensor.dataType, inputShape.length);\n const outputSize = ShapeUtil.size(outputShape);\n const noScale = inputShape.length === outputShape.length && inputShape.every((d, i) => d === outputShape[i]);\n const useExtrapolation = attributes.coordinateTransformMode === 'tf_crop_and_resize';\n const extrapolationValue = attributes.extrapolationValue;\n const dataType = input.type.value;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${noScale ? '' : `\n ${getOriginalCoordinateFromResizedCoordinate(attributes.coordinateTransformMode, dataType)};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `\n ${checkInputIndices(input, inputShape)};\n ${getNearestPixelFromOriginal(attributes.nearestMode, opsetVersion, dataType)};\n ${\n calculateInputIndicesFromOutputIndices(\n input, output, inputShape, outputShape, scales.length, roi.length, useExtrapolation)};\n `;\n case 'linear':\n return `\n ${calculateOriginalIndicesFromOutputIndices(output, inputShape, outputShape, scales.length, roi.length)};\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${bilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else if (inputShape.length === 3 || inputShape.length === 5) {\n return `${trilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else {\n throw Error('Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.');\n }\n })()};\n `;\n case 'cubic':\n return `\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${\n bicubicInterpolation(\n input, output, inputShape, outputShape, scales, roi, attributes.cubicCoeffA, useExtrapolation,\n attributes.extrapolationValue, attributes.excludeOutside)}`;\n } else {\n throw Error('Cubic mode only supports input dims 2 and 4 are supported in linear mode.');\n }\n })()};\n `;\n default:\n throw Error('Invalid resize mode');\n }\n })()};\n `}\n ${\n shaderHelper.registerUniform('output_size', 'u32')\n .registerUniform('scales', 'f32', scales.length)\n .registerUniform('roi', 'f32', roi.length)\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n ${noScale ? 'output[global_idx] = input[global_idx];' : `\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${input.getByIndices('input_indices')};\n } else {\n output[global_idx] = ${attributes.extrapolationValue};\n }`;\n case 'linear':\n return `output[global_idx] = ${\n (inputShape.length === 2 || inputShape.length === 4) ? 'bilinearInterpolation' :\n 'trilinearInterpolation'}(output_indices);`;\n case 'cubic':\n return 'output[global_idx] = bicubicInterpolation(output_indices);';\n default:\n throw Error(`Unsupported resize mode: ${attributes.mode}`);\n }\n })()};\n`}\n }`;\n\n return {\n name: 'Resize',\n shaderCache: {\n hint: `${attributes.cacheKey}|${opsetVersion}|${scales.length > 0 ? scales : ''}|${\n sizes.length > 0 ? sizes : ''}|${roi.length > 0 ? roi : ''}|${noScale}|${inputShape}`,\n inputDependencies: ['rank']\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputTensor.dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.float, data: scales},\n {type: DataType.float, data: roi}, ...createTensorShapeVariables(inputShape, outputShape)\n ]\n })\n };\n };\n\nconst getOpsetVersionFromCustomDataBuffer = (context: ComputeContext): number => {\n const customDataBuffer = context.customDataBuffer;\n const customDataBuffer32 = new Uint32Array(customDataBuffer, customDataBuffer.byteOffset, 1);\n const opsetVersion = customDataBuffer32[0];\n return opsetVersion;\n};\n\nexport const resize = (context: ComputeContext, attributes: ResizeAttributes): void => {\n const scales: number[] = [];\n const sizes: number[] = [];\n const roi: number[] = [];\n\n // Note that scales in resize are always f32. roi can be f32 or f16.\n // TODO: Currently this code does not support f16 for roi when passed as optional input.\n\n const opsetVersion = getOpsetVersionFromCustomDataBuffer(context);\n if (attributes.antialias !== 0) {\n throw Error('Only default value (0) for Antialias attribute is supported');\n }\n validateInputs(context.inputs, attributes, opsetVersion, scales, sizes, roi);\n context.compute(\n createResizeProgramInfo(context.inputs[0], attributes, opsetVersion, scales, sizes, roi), {inputs: [0]});\n};\n\nexport const parseResizeAttributes = (attributes: Record): ResizeAttributes => {\n const antialias = attributes.antialias as number;\n const axes = attributes.axes as number[];\n const coordinateTransformMode: CoordinateTransformMode =\n attributes.coordinateTransformMode as CoordinateTransformMode;\n const cubicCoeffA = attributes.cubicCoeffA as number;\n const excludeOutside = attributes.excludeOutside as number !== 0;\n const extrapolationValue = attributes.extrapolationValue as number;\n const keepAspectRatioPolicy: KeepAspectRatioPolicy = attributes.keepAspectRatioPolicy as KeepAspectRatioPolicy;\n const mode: Mode = attributes.mode as Mode;\n // If nearestMode is not specified, use simple mode.\n const nearestMode: NearestMode = (attributes.nearestMode === '' ? 'simple' : attributes.nearestMode) as NearestMode;\n return createAttributeWithCacheKey({\n antialias,\n axes,\n coordinateTransformMode,\n cubicCoeffA,\n excludeOutside,\n extrapolationValue,\n keepAspectRatioPolicy,\n mode,\n nearestMode\n });\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, WORKGROUP_SIZE} from './common';\n\nexport interface RotaryEmbeddingAttributes {\n readonly interleaved: boolean;\n readonly numHeads: number;\n readonly rotaryEmbeddingDim: number;\n readonly scale: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): void => {\n const [input, positionIds, cosCache, sinCache] = inputs;\n const {numHeads, rotaryEmbeddingDim} = attributes;\n\n if (input.dims.length !== 3 && input.dims.length !== 4) {\n throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${input.dims.length}`);\n }\n if (!ShapeUtil.areEqual(positionIds.dims, []) && !ShapeUtil.areEqual(positionIds.dims, [1]) &&\n positionIds.dims.length !== 2) {\n throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${positionIds.dims.length}`);\n }\n if (cosCache.dims.length !== 2) {\n throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${cosCache.dims.length}`);\n }\n if (sinCache.dims.length !== 2) {\n throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${sinCache.dims.length}`);\n }\n if (!ShapeUtil.areEqual(cosCache.dims, sinCache.dims)) {\n throw new Error('Inputs \\'cos_cache\\' and \\'sin_cache\\' are expected to have the same shape');\n }\n\n if (rotaryEmbeddingDim > 0 && numHeads === 0) {\n throw new Error('num_heads must be provided if rotary_embedding_dim is specified');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[input.dims.length - 2];\n const maxSequenceLength = cosCache.dims[0];\n const hiddenSize = ShapeUtil.sizeFromDimension(input.dims, 1) / sequenceLength;\n const headSize = rotaryEmbeddingDim === 0 ? cosCache.dims[1] * 2 : hiddenSize / numHeads;\n if (rotaryEmbeddingDim > headSize) {\n throw new Error('rotary_embedding_dim must be less than or equal to head_size');\n }\n\n if (positionIds.dims.length === 2) {\n if (batchSize !== positionIds.dims[0]) {\n throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${positionIds.dims[0]}`);\n }\n if (sequenceLength !== positionIds.dims[1]) {\n throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${positionIds.dims[1]}`);\n }\n }\n\n if (headSize / 2 !== cosCache.dims[1] && rotaryEmbeddingDim / 2 !== cosCache.dims[1]) {\n throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${\n cosCache.dims[1]}`);\n }\n\n if (sequenceLength > maxSequenceLength) {\n throw new Error('Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported');\n }\n};\n\nconst createRotaryEmbeddingProgramInfo =\n (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): ProgramInfo => {\n const {interleaved, numHeads, rotaryEmbeddingDim, scale} = attributes;\n const batchSize = inputs[0].dims[0];\n const batchStride = ShapeUtil.sizeFromDimension(inputs[0].dims, 1);\n const sequenceLength = inputs[0].dims[inputs[0].dims.length - 2];\n const hiddenSize = batchStride / sequenceLength;\n const halfRotaryEmbeddingDim = inputs[2].dims[1];\n const headSize = rotaryEmbeddingDim === 0 ? halfRotaryEmbeddingDim * 2 : hiddenSize / numHeads;\n\n // Rotary embeddings will be calculated in a pair-wise fashion. In accordance, use the shape\n // [batch size, sequence length, num of heads, num of pairs to rotate + num of dims to copy]\n // to unfold the global index in shader.\n const globalShape =\n new Array(batchSize, sequenceLength, hiddenSize / headSize, headSize - halfRotaryEmbeddingDim);\n const globalStrides = ShapeUtil.computeStrides(globalShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.float, data: scale},\n {type: DataType.uint32, data: globalShape},\n {type: DataType.uint32, data: globalStrides},\n\n // strides for addressing the input/output tensor, in permutated order to align with the unfolded global index,\n // i.e. BSNH\n ...(inputs[0].dims.length === 3 ?\n new Array({type: DataType.uint32, data: [batchStride, hiddenSize, headSize, 1]}) :\n []),\n ...(inputs[0].dims.length === 4 ?\n new Array(\n {type: DataType.uint32, data: [batchStride, headSize, sequenceLength * headSize, 1]}) :\n []),\n\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, inputs[2].dims, inputs[3].dims, inputs[0].dims),\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const positionIds = inputVariable('position_ids', inputs[1].dataType, inputs[1].dims.length);\n const cosCache = inputVariable('cos_cache', inputs[2].dataType, inputs[2].dims.length);\n const sinCache = inputVariable('sin_cache', inputs[3].dataType, inputs[3].dims.length);\n const output = outputVariable('output', inputs[0].dataType, inputs[0].dims.length);\n\n shaderHelper.registerUniforms([\n {name: 'scale', type: 'f32'},\n {name: 'global_shape', type: 'u32', length: globalShape.length},\n {name: 'global_strides', type: 'u32', length: globalStrides.length},\n {name: 'input_output_strides', type: 'u32', length: globalStrides.length},\n ]);\n\n return `\n ${shaderHelper.declareVariables(input, positionIds, cosCache, sinCache, output)}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n let half_rotary_emb_dim = uniforms.${cosCache.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('size')}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${positionIds.broadcastedIndicesToOffset('bsnh.xy', outputVariable('', positionIds.type.tensor, 2))};\n let position_id =\n u32(${positionIds.getByOffset('position_ids_idx')}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${interleaved});\n let j = i + select(half_rotary_emb_dim, 1, ${interleaved});\n let re = ${input.getByOffset('i')} * ${cosCache.get('position_id', 'bsnh[3]')} -\n ${input.getByOffset('j')} * ${sinCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('i', 're')}\n let im = ${input.getByOffset('i')} * ${sinCache.get('position_id', 'bsnh[3]')} +\n ${input.getByOffset('j')} * ${cosCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('j', 'im')}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${output.setByOffset('k', input.getByOffset('k'))}\n }\n }`;\n };\n\n return {\n name: 'RotaryEmbedding',\n shaderCache: {\n hint: createAttributeWithCacheKey({\n interleaved,\n }).cacheKey,\n inputDependencies: ['rank', 'rank', 'rank', 'rank'],\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(globalShape) / WORKGROUP_SIZE)},\n programUniforms,\n }),\n };\n };\n\nexport const rotaryEmbedding = (context: ComputeContext, attributes: RotaryEmbeddingAttributes): void => {\n validateInputs(context.inputs, attributes);\n context.compute(createRotaryEmbeddingProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {castToF32, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface SkipLayerNormAttributes {\n simplified: boolean;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 3) {\n throw new Error('layerNorm requires at least 3 inputs.');\n }\n\n const input: TensorView = inputs[0];\n const skip: TensorView = inputs[1];\n const gamma: TensorView = inputs[2];\n\n if (input.dataType !== skip.dataType || input.dataType !== gamma.dataType) {\n throw new Error('All inputs must have the same data type');\n }\n\n if (input.dims.length !== 3 && input.dims.length !== 2) {\n throw new Error('Input must be 2D or 3D');\n }\n\n if (skip.dims.length !== 3 && skip.dims.length !== 2) {\n throw new Error('Skip must be 2D or 3D');\n }\n\n const hiddenSize = input.dims[input.dims.length - 1];\n const sequenceLength = input.dims[input.dims.length - 2];\n if (skip.dims[skip.dims.length - 1] !== hiddenSize) {\n throw new Error('Skip must have the same hidden size as input');\n }\n if (skip.dims[skip.dims.length - 2] !== sequenceLength) {\n throw new Error('Skip must have the same sequence length as input');\n }\n\n if (gamma.dims.length !== 1) {\n throw new Error('Gamma must be 1D');\n }\n if (gamma.dims[gamma.dims.length - 1] !== hiddenSize) {\n throw new Error('Gamma must have the same hidden size as input');\n }\n if (inputs.length > 3) {\n const beta: TensorView = inputs[3];\n if (beta.dims.length !== 1) {\n throw new Error('Beta must be 1D');\n }\n if (beta.dims[beta.dims.length - 1] !== hiddenSize) {\n throw new Error('Beta must have the same hidden size as input');\n }\n }\n if (inputs.length > 4) {\n const bias: TensorView = inputs[4];\n if (bias.dims.length !== 1) {\n throw new Error('Bias must be 1D');\n }\n if (bias.dims[bias.dims.length - 1] !== hiddenSize) {\n throw new Error('Bias must have the same hidden size as input');\n }\n }\n};\n\nconst createSkipLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: SkipLayerNormAttributes, outputCount: number, isTraining: boolean):\n ProgramInfo => {\n const simplified = attributes.simplified;\n\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const outputShape = inputShape;\n const outputSize = inputSize;\n const hiddenSize = inputShape.slice(-1)[0];\n const meanInvStdDevDim = isTraining ? inputShape.slice(0, -1).concat(1) : [];\n const hasBetaInput = !simplified && inputs.length > 3;\n const hasBiasInput = inputs.length > 4;\n const hasMeanOutput = isTraining && outputCount > 1;\n const hasInvStdDevOutput = isTraining && outputCount > 2;\n const hasInputSkipBiasSumOutput = outputCount > 3;\n const workgroupSize = 64;\n\n const components = getMaxComponents(hiddenSize);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.uint32, data: components},\n {type: DataType.uint32, data: hiddenSize},\n {type: DataType.float, data: attributes.epsilon},\n ];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniformsArray: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'components', type: 'u32'},\n {name: 'hidden_size', type: 'u32'},\n {name: 'epsilon', type: 'f32'},\n ];\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('skip', inputs[1].dataType, inputs[1].dims, components),\n inputVariable('gamma', inputs[2].dataType, inputs[2].dims, components),\n ];\n if (hasBetaInput) {\n variables.push(inputVariable('beta', inputs[3].dataType, inputs[3].dims, components));\n }\n if (hasBiasInput) {\n variables.push(inputVariable('bias', inputs[4].dataType, inputs[4].dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanOutput) {\n variables.push(outputVariable('mean_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdDevOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInputSkipBiasSumOutput) {\n variables.push(outputVariable('input_skip_bias_sum', inputs[0].dataType, outputShape, components));\n }\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const vecDataType = tensorTypeToWsglStorageType(DataType.float, components);\n return `\n\n ${shaderHelper.registerUniforms(uniformsArray).declareVariables(...variables)}\n var sum_shared : array<${vecDataType}, ${workgroupSize}>;\n var sum_squared_shared : array<${vecDataType}, ${workgroupSize}>;\n\n ${shaderHelper.mainStart([\n workgroupSize, 1, 1\n ])}\n let ix = local_id.x;\n let iy = global_id.x / ${workgroupSize};\n\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n var stride = hidden_size_vectorized / ${workgroupSize};\n let offset = ix * stride + iy * hidden_size_vectorized;\n let offset1d = stride * ix;\n if (ix == ${workgroupSize - 1}) {\n stride = hidden_size_vectorized - stride * ix;\n }\n for (var i: u32 = 0; i < stride; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${hasBiasInput ? 'bias[offset1d + i]' : dataType + '(0.0)'};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${hasInputSkipBiasSumOutput ? 'input_skip_bias_sum[offset + i] = value;' : ''}\n output[offset + i] = value;\n let f32_value = ${castToF32(dataType, components, 'value')};\n sum_shared[ix] += f32_value;\n sum_squared_shared[ix] += f32_value * f32_value;\n }\n workgroupBarrier();\n\n var reduce_size : u32 = ${workgroupSize};\n for (var curr_size = reduce_size >> 1; curr_size > 0; curr_size = reduce_size >> 1) {\n reduce_size = curr_size + (reduce_size & 1);\n if (ix < curr_size) {\n sum_shared[ix] += sum_shared[ix + reduce_size];\n sum_squared_shared[ix] += sum_squared_shared[ix + reduce_size];\n }\n workgroupBarrier();\n }\n\n let sum = sum_shared[0];\n let square_sum = sum_squared_shared[0];\n let mean = ${sumVector('sum', components)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${sumVector('square_sum', components)} / f32(uniforms.hidden_size) ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n ${hasMeanOutput ? 'mean_output[global_idx] = mean;' : ''}\n ${hasInvStdDevOutput ? 'inv_std_output[global_idx] = inv_std_dev;' : ''}\n\n for (var i: u32 = 0; i < stride; i++) {\n output[offset + i] = (output[offset + i] ${simplified ? '' : `- ${dataType}(mean)`}) *\n ${dataType}(inv_std_dev) * gamma[offset1d + i]\n ${hasBetaInput ? '+ beta[offset1d + i]' : ''};\n }\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (outputCount > 1) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 2) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 3) {\n outputs.push({dims: inputShape, dataType: inputs[0].dataType});\n }\n return {\n name: 'SkipLayerNormalization',\n shaderCache: {\n hint: `${components};${hasMeanOutput};${hasInvStdDevOutput};${hasInputSkipBiasSumOutput}`,\n inputDependencies: inputs.map((_input, _index) => 'type')\n },\n getShaderSource,\n getRunData: () => ({\n outputs,\n dispatchGroup: {\n x: Math.ceil(outputSize / hiddenSize),\n },\n programUniforms\n }),\n };\n };\n\nexport const skipLayerNorm = (context: ComputeContext, attributes: SkipLayerNormAttributes): void => {\n // TODO: initialize isTraining from ComputeContext\n const isTraining = false;\n validateInputs(context.inputs);\n // Mean and InvStdDev are only used in training mode and are not required for inference.\n // They are added here for completeness only.\n const outputs = [0];\n if (context.outputCount > 1) {\n outputs.push(isTraining ? 1 : -3);\n }\n if (context.outputCount > 2) {\n outputs.push(isTraining ? 2 : -3);\n }\n if (context.outputCount > 3) {\n outputs.push(3);\n }\n context.compute(\n createSkipLayerNormProgramInfo(context.inputs, attributes, context.outputCount, isTraining), {outputs});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly starts: number[];\n readonly ends: number[];\n readonly axes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: SliceAttributes): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n if (attributes.axes.length !== 0) {\n if (attributes.axes.length !== attributes.starts.length || attributes.axes.length !== attributes.ends.length) {\n throw new Error('axes, starts and ends must have the same length');\n }\n } else if (attributes.starts.length !== attributes.ends.length) {\n throw new Error('starts and ends must have the same length');\n }\n inputs.slice(1).forEach((_, idx) => {\n if (inputs[idx + 1].dataType !== DataType.int32 && inputs[idx + 1].dataType !== DataType.int64) {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n });\n};\n\nconst readInput = (inputs: readonly TensorView[], idx: number): number[] => {\n const input: number[] = [];\n if (inputs.length > idx) {\n if (inputs[idx].dataType === DataType.int64) {\n inputs[idx].getBigInt64Array().forEach(v => input.push(Number(v)));\n } else if (inputs[idx].dataType === DataType.int32) {\n inputs[idx].getInt32Array().forEach(v => input.push(Number(v)));\n } else {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n }\n return input;\n};\n\nconst createSliceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SliceAttributes): SliceAttributes => {\n if (inputs.length > 1) {\n const starts: number[] = readInput(inputs, 1);\n const ends: number[] = readInput(inputs, 2);\n let axes: number[] = readInput(inputs, 3);\n if (axes.length === 0) {\n axes = [...Array(inputs[0].dims.length).keys()];\n }\n return createAttributeWithCacheKey({starts, ends, axes});\n } else {\n return attributes;\n }\n };\n\nconst fixStartEndValues =\n (value: number, index: number, inputShape: readonly number[], axes: readonly number[], steps: readonly number[]):\n number => {\n let newValue = value;\n if (value < 0) {\n newValue += inputShape[axes[index]];\n }\n if (steps[index] < 0) {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]] - 1));\n } else {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]]));\n }\n };\n\nconst calculateInputIndicesImpl =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[]): string =>\n `fn calculateInputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n var carry = 0u;\n for (var i = ${inputShape.length}; i >= 0; i--) {\n let input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n let steps_i = ${getElementAt('uniforms.steps', 'i', inputShape.length)};\n let signs_i = ${getElementAt('uniforms.signs', 'i', inputShape.length)};\n let starts_i = ${getElementAt('uniforms.starts', 'i', inputShape.length)};\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${input.indicesSet('input_indices', 'i', 'input_index')};\n }\n return input_indices;\n }`;\n\nconst createSliceProgramInfo = (inputs: readonly TensorView[], attributes: SliceAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const axes = (attributes.axes.length > 0) ? ShapeUtil.normalizeAxes(attributes.axes, inputShape.length) :\n [...Array(inputShape.length).keys()];\n let steps = readInput(inputs, 4);\n steps.forEach((step) => step !== 0 || (() => {\n throw new Error('step cannot be 0');\n }));\n if (steps.length === 0) {\n steps = Array(axes.length).fill(1);\n }\n const starts = attributes.starts.map((start, i) => fixStartEndValues(start, i, inputShape, axes, steps));\n\n const ends = attributes.ends.map((end, i) => fixStartEndValues(end, i, inputShape, axes, steps));\n\n if (axes.length !== starts.length || axes.length !== ends.length) {\n throw new Error('start, ends and axes should have the same number of elements');\n }\n\n if (axes.length !== inputShape.length) {\n for (let i = 0; i < inputShape.length; ++i) {\n if (!axes.includes(i)) {\n starts.splice(i, 0, 0);\n ends.splice(i, 0, inputShape[i]);\n steps.splice(i, 0, 1);\n }\n }\n }\n const signs = steps.map(step => Math.sign(step));\n // Convert negative steps to positive steps and reverse starts and ends\n steps.forEach((step, i, array) => {\n if (step < 0) {\n const numSteps = (ends[i] - starts[i]) / step;\n const newEnd = starts[i];\n const newStart = newEnd + numSteps * steps[i];\n starts[i] = newStart;\n ends[i] = newEnd;\n array[i] = -step;\n }\n });\n // Output rank is expected to be less than or equal to the input rank.\n const outputShape = inputShape.slice(0);\n axes.forEach((axis, _) => {\n outputShape[axis] = Math.ceil((ends[axis] - starts[axis]) / steps[axis]);\n });\n const outputTensorInfo: TensorInfo = {dims: outputShape, dataType: inputs[0].dataType};\n\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const outputSize = ShapeUtil.size(outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'starts', type: 'u32', length: starts.length},\n {name: 'signs', type: 'i32', length: signs.length}, {name: 'steps', type: 'u32', length: steps.length}\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: starts},\n {type: DataType.int32, data: signs}, {type: DataType.uint32, data: steps},\n ...createTensorShapeVariables(inputs[0].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${calculateInputIndicesImpl(input, output, inputShape)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n let input_indices = calculateInputIndices(output_indices);\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n return {\n name: 'Slice',\n shaderCache: {hint: `${signs.length}_${starts.length}_${steps.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [outputTensorInfo],\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const slice = (context: ComputeContext, attributes: SliceAttributes): void => {\n validateInputs(context.inputs, attributes);\n const updatedAttributes = createSliceAttributesFromInputs(context.inputs, attributes);\n context.compute(createSliceProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n // if (ShapeUtil.size(program.outputs[0].dims) > 0) {\n // context.compute(programInfoLoader, {inputs: [0]});\n // } else {\n // // TODO: support empty output\n // throw new Error('slice: output size is 0');\n // }\n};\n\nexport const parseSliceAttributes = (attributes: Record): SliceAttributes => {\n const starts = attributes.starts as number[];\n const ends = attributes.ends as number[];\n const axes = attributes.axes as number[];\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax op requires 1 input.');\n }\n};\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst createSoftmaxProgramInfo = (input: TensorView, attributes: SoftmaxAttributes): ProgramInfo => {\n const shape = input.dims;\n const outputSize = ShapeUtil.size(shape);\n const WG = 64;\n let axis = attributes.axis;\n if (axis < 0) {\n axis = shape.length + axis;\n }\n if (axis < shape.length - 1) {\n throw new Error('softmax only supports last axis for now.');\n }\n\n const cols = shape[axis];\n const rows = outputSize / cols;\n const components = getMaxComponents(cols);\n const packedCols = cols / components;\n\n const maxVector = (name: string, components: number) => {\n if (components === 4) {\n return `max(max(${name}.x, ${name}.y), max(${name}.z, ${name}.w))`;\n } else if (components === 2) {\n return `max(${name}.x, ${name}.y)`;\n } else if (components === 3) {\n return `max(max(${name}.x, ${name}.y), ${name}.z)`;\n }\n\n return name;\n };\n const x = inputVariable('x', input.dataType, input.dims, components);\n const output = outputVariable('result', input.dataType, input.dims, components);\n const valueType = x.type.value;\n // 6.2.4 in wgsl spec\n const threadMaxDecl = tensorTypeToWsglStorageType(input.dataType) === 'f32' ?\n `var threadMax = ${valueType}(-3.402823e+38f);` :\n `var threadMax = ${valueType}(-65504.0h);`;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n var rowMaxShared : ${valueType};\n var rowSumShared : ${valueType};\n var threadShared : array<${valueType}, ${WG}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${valueType} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${valueType}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${shaderHelper.registerUniform('packedCols', 'i32').declareVariables(x, output)}\n ${shaderHelper.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${WG};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${threadMaxDecl}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${valueType}(${maxVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${valueType}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${valueType}(${sumVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;\n return {\n name: 'Softmax',\n shaderCache: {hint: `${components}`, inputDependencies: ['type']},\n getRunData: () => ({\n outputs: [{dims: shape, dataType: input.dataType}],\n dispatchGroup: {x: rows},\n programUniforms: [{type: DataType.int32, data: packedCols}]\n }),\n getShaderSource,\n };\n};\n\nexport const softmax = (context: ComputeContext, attributes: SoftmaxAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createSoftmaxProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseSoftmaxAttributes = (attributes: Record): SoftmaxAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly numOutputs: number;\n readonly splitSizes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n};\n\nconst createSplitAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SplitAttributes): SplitAttributes => {\n const splitSizes: number[] = [];\n let numOutputs: number = attributes.numOutputs;\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => splitSizes.push(Number(v)));\n numOutputs = splitSizes.length;\n }\n return createAttributeWithCacheKey({numOutputs, axis: attributes.axis, splitSizes});\n };\n\nconst calculateOutputIndexImpl = (numberOfTensors: number): string => `\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${numberOfTensors}u; i += 1u ) {\n if (index < ${getElementAt('uniforms.size_in_split_axis', 'i', numberOfTensors)}) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n}`;\nconst writeBufferDataImpl = (outputs: readonly IndicesHelper[]) => {\n const numberOfTensors = outputs.length;\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = outputs[i].setByIndices('indices', 'input[global_idx]');\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (output_number == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (output_number == ${i}) { ${returnSnippet} }`);\n }\n }\n return `\n fn writeBufferData(output_number: u32, indices: ${outputs[0].type.indices}, global_idx: u32) {\n ${codeLines.join('\\n')}\n }`;\n};\n\nconst createSplitProgramInfo = (inputs: readonly TensorView[], attributes: SplitAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const dataType = inputs[0].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const outputs = new Array(attributes.numOutputs);\n const input = inputVariable('input', dataType, inputShape.length);\n const sizeInSplitAxis = new Array(attributes.numOutputs);\n const outputsTensorInfo: TensorInfo[] = [];\n const outputShapes: number[][] = [];\n let previousSum = 0;\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: inputSize}];\n for (let i = 0; i < attributes.numOutputs; i++) {\n previousSum += attributes.splitSizes[i];\n sizeInSplitAxis[i] = previousSum;\n const outputShape = inputShape.slice();\n outputShape[attributes.axis] = attributes.splitSizes[i];\n outputShapes.push(outputShape);\n outputs[i] = outputVariable(`output${i}`, dataType, outputShape.length);\n outputsTensorInfo.push({dims: outputShapes[i], dataType: inputs[0].dataType});\n }\n programUniforms.push(\n {type: DataType.uint32, data: sizeInSplitAxis}, ...createTensorShapeVariables(inputShape, ...outputShapes));\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('input_size', 'u32')\n .registerUniform('size_in_split_axis', 'u32', sizeInSplitAxis.length)\n .declareVariables(input, ...outputs)}\n ${calculateOutputIndexImpl(sizeInSplitAxis.length)}\n ${writeBufferDataImpl(outputs)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.input_size')}\n\n var indices = ${input.offsetToIndices('global_idx')};\n var index = ${input.indicesGet('indices', axis)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${getElementAt('uniforms.size_in_split_axis', 'output_number - 1u', sizeInSplitAxis.length)};\n ${input.indicesSet('indices', axis, 'index')};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;\n return {\n name: 'Split',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: outputsTensorInfo,\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const split = (context: ComputeContext, attributes: SplitAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes =\n context.inputs.length === 1 ? attributes : createSplitAttributesFromInputs(context.inputs, attributes);\n context.compute(createSplitProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n\nexport const parseSplitAttributes = (attributes: Record): SplitAttributes => {\n const axis = attributes.axis as number;\n const splitSizes: number[] = attributes.splitSizes as number[];\n const numOutputs = attributes.numOutputs as number < 0 ? splitSizes.length : attributes.numOutputs as number;\n if (numOutputs !== splitSizes.length) {\n throw new Error('numOutputs and splitSizes lengh must be equal');\n }\n return createAttributeWithCacheKey({axis, numOutputs, splitSizes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst createWhereOpProgramShader =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], dimsOutput: readonly number[], isBroadcast: boolean,\n typeOutput: number) => {\n const output = outputVariable('output_data', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('a_data', inputs[1].dataType, inputs[1].dims.length, 4);\n const b = inputVariable('b_data', inputs[2].dataType, inputs[2].dims.length, 4);\n const c = inputVariable('c_data', inputs[0].dataType, inputs[0].dims.length, 4);\n\n let assignment: string;\n const expression = (a: string, b: string, c: string) => `select(${b}, ${a}, ${c})`;\n if (!isBroadcast) {\n assignment = output.setByOffset(\n 'global_idx',\n expression(a.getByOffset('global_idx'), b.getByOffset('global_idx'), c.getByOffset('global_idx')));\n } else {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `a_data[index_a${x}][component_a${x}]`;\n const expressionB = `b_data[index_b${x}][component_b${x}]`;\n // eslint-disable-next-line no-bitwise\n const expressionC = `bool(c_data[index_c${x}] & (0xffu << (component_c${x} * 8)))`;\n return `\n let output_indices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offset_a${x} = ${a.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_b${x} = ${b.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_c${x} = ${c.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let index_a${x} = offset_a${x} / 4u;\n let index_b${x} = offset_b${x} / 4u;\n let index_c${x} = offset_c${x} / 4u;\n let component_a${x} = offset_a${x} % 4u;\n let component_b${x} = offset_b${x} % 4u;\n let component_c${x} = offset_c${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expression(expressionA, expressionB, expressionC)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('output_data[global_idx]', 0)}\n ${singleAssignment('output_data[global_idx]', 1)}\n ${singleAssignment('output_data[global_idx]', 2)}\n ${singleAssignment('output_data[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(c, a, b, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createWhereOpProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const dimsA = inputs[1].dims;\n const dimsB = inputs[2].dims;\n const dimsC = inputs[0].dims;\n const outputDataType = inputs[1].dataType;\n\n const isBroadcast = !(ShapeUtil.areEqual(dimsA, dimsB) && ShapeUtil.areEqual(dimsB, dimsC));\n let outputShape = dimsA;\n let outputSize = ShapeUtil.size(dimsA);\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(BroadcastUtil.calcShape(dimsA, dimsB, false)!, dimsC, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform where op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n }\n\n const vecSize = Math.ceil(outputSize / 4);\n\n return {\n name: 'Where',\n shaderCache: {inputDependencies: ['rank', 'rank', 'rank']},\n getShaderSource: (shaderHelper) =>\n createWhereOpProgramShader(shaderHelper, inputs, outputShape, isBroadcast, outputDataType),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms:\n [{type: DataType.uint32, data: vecSize}, ...createTensorShapeVariables(dimsC, dimsA, dimsB, outputShape)],\n }),\n };\n};\n\nexport const where = (context: ComputeContext): void => {\n context.compute(createWhereOpProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {argMax, argMin, parseArgMinMaxAttributes} from './ops/argminmax';\nimport {attention} from './ops/attention';\nimport {batchNorm} from './ops/batch-norm';\nimport {biasAdd} from './ops/bias-add';\nimport {biasSplitGelu} from './ops/bias-split-gelu';\nimport * as binaryOps from './ops/binary-op';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {cumsum, parseCumSumAttributes} from './ops/cumsum';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {einsum, parseEinsumAttributes} from './ops/einsum';\nimport {expand} from './ops/expand';\nimport {fastGelu} from './ops/fast-gelu';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gatherElements, parseGatherElementsAttributes} from './ops/gather-elements';\nimport {gemm, parseGemmAttributes} from './ops/gemm';\nimport {groupQueryAttention, parseGroupQueryAttentionAttributes} from './ops/group-query-attention';\nimport {instanceNorm} from './ops/instance-norm';\nimport {layerNorm} from './ops/layer-norm';\nimport {matMul} from './ops/matmul';\nimport {matMulNBits, parseMatMulNBitsAttributes} from './ops/matmulnbits';\nimport {multiHeadAttention, parseMultiHeadAttentionAttributes} from './ops/multihead-attention';\nimport {pad} from './ops/pad';\nimport * as pool from './ops/pool';\nimport {range} from './ops/range';\nimport {reduceL1, reduceL2, reduceLogSum, reduceLogSumExp, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum, reduceSumSquare} from './ops/reduce';\nimport {parseResizeAttributes, resize} from './ops/resize';\nimport {rotaryEmbedding} from './ops/rotary-embedding';\nimport {skipLayerNorm} from './ops/skip-layer-norm';\nimport {parseSliceAttributes, slice} from './ops/slice';\nimport {parseSoftmaxAttributes, softmax} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {where} from './ops/where';\nimport {ComputeContext} from './types';\n\nexport type RunFunction = (context: ComputeContext, attribute?: unknown) => void;\nexport type ParseAttributeFunction = (attributeRaw: unknown) => unknown;\nexport type OperatorImplementation = [RunFunction]|[RunFunction, ParseAttributeFunction];\n\nexport const WEBGPU_OP_RESOLVE_RULES: Map = new Map([\n ['Abs', [unaryOps.abs]],\n ['Acos', [unaryOps.acos]],\n ['Acosh', [unaryOps.acosh]],\n ['Add', [binaryOps.add]],\n ['ArgMax', [argMax, parseArgMinMaxAttributes]],\n ['ArgMin', [argMin, parseArgMinMaxAttributes]],\n ['Asin', [unaryOps.asin]],\n ['Asinh', [unaryOps.asinh]],\n ['Atan', [unaryOps.atan]],\n ['Atanh', [unaryOps.atanh]],\n ['Attention', [attention]],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', [pool.averagePool, pool.parseAveragePoolAttributes]],\n ['BatchNormalization', [batchNorm]],\n ['BiasAdd', [biasAdd]],\n ['BiasSplitGelu', [biasSplitGelu]],\n ['Cast', [unaryOps.cast, unaryOps.parseCastAttributes]],\n ['Ceil', [unaryOps.ceil]],\n ['Clip', [unaryOps.clip]],\n ['Concat', [concat, parseConcatAttributes]],\n ['Conv', [conv, parseConvAttributes]],\n ['ConvTranspose', [convTranspose, parseConvTransposeAttributes]],\n ['Cos', [unaryOps.cos]],\n ['Cosh', [unaryOps.cosh]],\n ['CumSum', [cumsum, parseCumSumAttributes]],\n ['DepthToSpace', [depthToSpace, parseDepthToSpaceAttributes]],\n ['Div', [binaryOps.div]],\n ['Einsum', [einsum, parseEinsumAttributes]],\n ['Elu', [unaryOps.elu, unaryOps.parseAlphaAttributes]],\n ['Equal', [binaryOps.equal]],\n ['Erf', [unaryOps.erf]],\n ['Exp', [unaryOps.exp]],\n ['Expand', [expand]],\n ['FastGelu', [fastGelu]],\n ['Floor', [unaryOps.floor]],\n ['FusedConv', [conv, parseConvAttributes]],\n ['Gather', [gather, parseGatherAttributes]],\n ['GatherElements', [gatherElements, parseGatherElementsAttributes]],\n ['Gelu', [unaryOps.gelu]],\n ['Gemm', [gemm, parseGemmAttributes]],\n ['GlobalAveragePool', [pool.globalAveragePool, pool.parseGlobalAveragePoolAttributes]],\n ['GlobalMaxPool', [pool.globalMaxPool, pool.parseGlobalMaxPoolAttributes]],\n ['Greater', [binaryOps.greater]],\n ['GreaterOrEqual', [binaryOps.greaterOrEqual]],\n ['GroupQueryAttention', [groupQueryAttention, parseGroupQueryAttentionAttributes]],\n ['HardSigmoid', [unaryOps.hardSigmoid, unaryOps.parseHardSigmoidAttributes]],\n ['InstanceNormalization', [instanceNorm]],\n ['LayerNormalization', [layerNorm]],\n ['LeakyRelu', [unaryOps.leakyRelu, unaryOps.parseAlphaAttributes]],\n ['Less', [binaryOps.less]],\n ['LessOrEqual', [binaryOps.lessOrEqual]],\n ['Log', [unaryOps.log]],\n ['MatMul', [matMul]],\n ['MatMulNBits', [matMulNBits, parseMatMulNBitsAttributes]],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', [pool.maxPool, pool.parseMaxPoolAttributes]],\n ['Mul', [binaryOps.mul]],\n ['MultiHeadAttention', [multiHeadAttention, parseMultiHeadAttentionAttributes]],\n ['Neg', [unaryOps.neg]],\n ['Not', [unaryOps.not]],\n ['Pad', [pad]],\n ['Pow', [binaryOps.pow]],\n ['QuickGelu', [unaryOps.quickgelu, unaryOps.parseAlphaAttributes]],\n ['Range', [range]],\n ['Reciprocal', [unaryOps.reciprocal]],\n ['ReduceMin', [reduceMin]],\n ['ReduceMean', [reduceMean]],\n ['ReduceMax', [reduceMax]],\n ['ReduceSum', [reduceSum]],\n ['ReduceProd', [reduceProd]],\n ['ReduceL1', [reduceL1]],\n ['ReduceL2', [reduceL2]],\n ['ReduceLogSum', [reduceLogSum]],\n ['ReduceLogSumExp', [reduceLogSumExp]],\n ['ReduceSumSquare', [reduceSumSquare]],\n ['Relu', [unaryOps.relu]],\n ['Resize', [resize, parseResizeAttributes]],\n ['RotaryEmbedding', [rotaryEmbedding]],\n ['Sigmoid', [unaryOps.sigmoid]],\n ['Sin', [unaryOps.sin]],\n ['Sinh', [unaryOps.sinh]],\n ['Slice', [slice, parseSliceAttributes]],\n ['SkipLayerNormalization', [skipLayerNorm]],\n ['Split', [split, parseSplitAttributes]],\n ['Sqrt', [unaryOps.sqrt]],\n ['Softmax', [softmax, parseSoftmaxAttributes]],\n ['Sub', [binaryOps.sub]],\n ['Tan', [unaryOps.tan]],\n ['Tanh', [unaryOps.tanh]],\n ['ThresholdedRelu', [unaryOps.thresholdedRelu, unaryOps.parseAlphaAttributes]],\n ['Tile', [tile]],\n ['Transpose', [transpose, parseTransposeAttributes]],\n ['Where', [where]],\n]);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {createShaderHelper} from './ops/common';\nimport {Artifact, GpuData, ProgramInfo} from './types';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n attributesBound: boolean;\n\n constructor(private backend: WebGpuBackend) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: GpuData[], outputs: GpuData[], dispatchGroup: [number, number, number],\n uniformBufferBinding: GPUBindingResource|undefined): void {\n TRACE_FUNC_BEGIN(buildArtifact.programInfo.name);\n const device = this.backend.device;\n const computePassEncoder = this.backend.getComputePassEncoder();\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2);\n const entries = [];\n for (const input of inputs) {\n entries.push({binding: entries.length, resource: {buffer: input.buffer}});\n }\n for (const output of outputs) {\n entries.push({binding: entries.length, resource: {buffer: output.buffer}});\n }\n if (uniformBufferBinding) {\n entries.push({binding: entries.length, resource: uniformBufferBinding});\n }\n const bindGroup = device.createBindGroup(\n {layout: buildArtifact.computePipeline.getBindGroupLayout(0), entries, label: buildArtifact.programInfo.name});\n\n if (this.backend.sessionStatus === 'capturing') {\n const commandInfo = {\n kernelId: this.backend.currentKernelId!,\n computePipeline: buildArtifact.computePipeline,\n bindGroup,\n dispatchGroup\n };\n const sessionCommandList = this.backend.capturedCommandList.get(this.backend.currentSessionId!);\n sessionCommandList!.push(commandInfo);\n }\n\n computePassEncoder.setPipeline(buildArtifact.computePipeline);\n computePassEncoder.setBindGroup(0, bindGroup);\n computePassEncoder.dispatchWorkgroups(...dispatchGroup);\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2 + 1);\n this.backend.pendingDispatchNumber++;\n\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber ||\n this.backend.queryType === 'at-passes') {\n this.backend.endComputePass();\n }\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber) {\n this.backend.flush();\n }\n TRACE_FUNC_END(buildArtifact.programInfo.name);\n }\n dispose(): void {\n // this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, normalizedDispatchGroupSize: [number, number, number]): Artifact {\n TRACE_FUNC_BEGIN(programInfo.name);\n const device = this.backend.device;\n const extensions: string[] = [];\n if (device.features.has('shader-f16')) {\n extensions.push('enable f16;');\n }\n const shaderHelper = createShaderHelper(normalizedDispatchGroupSize, this.backend.device.limits);\n const userCode = programInfo.getShaderSource(shaderHelper);\n const code = `${extensions.join('\\n')}\\n${shaderHelper.additionalImplementations}\\n${userCode}`;\n const shaderModule = device.createShaderModule({code, label: programInfo.name});\n LOG_DEBUG('verbose', () => `[WebGPU] ${programInfo.name} shader code: ${code}`);\n\n const computePipeline = device.createComputePipeline(\n {compute: {module: shaderModule, entryPoint: 'main'}, layout: 'auto', label: programInfo.name});\n\n TRACE_FUNC_END(programInfo.name);\n return {programInfo, computePipeline, uniformVariablesInfo: shaderHelper.variablesInfo};\n }\n\n normalizeDispatchGroupSize(dispatchGroup: ReturnType['dispatchGroup']):\n [number, number, number] {\n const x = typeof dispatchGroup === 'number' ? dispatchGroup : dispatchGroup.x;\n const y = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.y || 1);\n const z = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.z || 1);\n const limitPerDimension = this.backend.device.limits.maxComputeWorkgroupsPerDimension;\n if (x <= limitPerDimension && y <= limitPerDimension && z <= limitPerDimension) {\n return [x, y, z];\n }\n const size = x * y * z;\n let dispatchAverage = Math.ceil(Math.sqrt(size));\n if (dispatchAverage > limitPerDimension) {\n dispatchAverage = Math.ceil(Math.cbrt(size));\n if (dispatchAverage > limitPerDimension) {\n throw new Error('Total dispatch size exceeds WebGPU maximum.');\n }\n return [dispatchAverage, dispatchAverage, dispatchAverage];\n } else {\n return [dispatchAverage, dispatchAverage, 1];\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, Tensor, TRACE, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {DataType, tensorDataTypeEnumToString} from '../wasm-common';\n\nimport {configureLogger, LOG_DEBUG} from './log';\nimport {createView, TensorView} from './tensor-view';\nimport {createGpuDataManager, downloadGpuData, GpuDataManager} from './webgpu/gpu-data-manager';\nimport {RunFunction, WEBGPU_OP_RESOLVE_RULES} from './webgpu/op-resolve-rules';\nimport {ProgramManager} from './webgpu/program-manager';\nimport {AdapterInfo, ComputeContext, GpuArchitecture, GpuData, GpuVendor, ProgramInfo, ProgramInputTensorInfoDependency, SessionState, TimestampQuery} from './webgpu/types';\n\ninterface CommandInfo {\n readonly kernelId: number;\n readonly computePipeline: GPUComputePipeline;\n readonly bindGroup: GPUBindGroup;\n readonly dispatchGroup: [number, number, number];\n}\n\ninterface KernelInfo {\n readonly kernelType: string;\n readonly kernelName: string;\n readonly kernelEntry: RunFunction;\n readonly attributes: [((attribute: unknown) => unknown)|undefined, unknown];\n}\n\ninterface PendingKernelInfo {\n readonly kernelId: number;\n readonly programName: string;\n readonly inputTensorViews: readonly TensorView[];\n readonly outputTensorViews: readonly TensorView[];\n}\n\nconst getProgramInputTensorInfoDependencyKey =\n (inputTensors: readonly TensorView[], inputDependencies: readonly ProgramInputTensorInfoDependency[]): string => {\n if (inputDependencies.length !== inputTensors.length) {\n throw new Error(`inputDependencies length ${inputDependencies.length} is not equal to inputTensors length ${\n inputTensors.length}.`);\n }\n\n const inputInfos: string[] = [];\n for (let i = 0; i < inputTensors.length; ++i) {\n const type = inputTensors[i].dataType;\n switch (inputDependencies[i]) {\n case 'none': {\n inputInfos.push('');\n break;\n }\n case 'type': {\n inputInfos.push(`${type}`);\n break;\n }\n case 'rank': {\n const rank = inputTensors[i].dims.length;\n inputInfos.push(`${type};${rank}`);\n break;\n }\n case 'dims': {\n const dims = inputTensors[i].dims.join(',');\n inputInfos.push(`${type};${dims}`);\n break;\n }\n default:\n throw new Error(`unsupported input dependency: ${inputDependencies[i]}`);\n }\n }\n\n return inputInfos.join('|');\n };\n\n/**\n * get a unique key representing the program from the program info, input shapes and types.\n *\n * @returns a unique key is a shorter string than the shader source, which contains all the information to identify a\n * program. if the key is the same, the program shader source should be the same, so we can reuse the program.\n *\n */\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo, inputTensors: readonly TensorView[], is1DimensionDispatch: boolean): string => {\n // final key format:\n // []:is1DimensionDispatch:||...\n let key = programInfo.name;\n if (programInfo.shaderCache?.hint) {\n key += '[' + programInfo.shaderCache.hint + ']';\n }\n key += ':' + is1DimensionDispatch +\n `:${\n getProgramInputTensorInfoDependencyKey(\n inputTensors,\n programInfo.shaderCache?.inputDependencies ??\n new Array(inputTensors.length).fill('dims'))}`;\n return key;\n };\n\nclass AdapterInfoImpl implements AdapterInfo {\n readonly architecture?: string;\n readonly vendor?: string;\n\n constructor(adapterInfo: GPUAdapterInfo) {\n if (adapterInfo) {\n this.architecture = adapterInfo.architecture;\n this.vendor = adapterInfo.vendor;\n }\n }\n\n isArchitecture(architecture: GpuArchitecture): boolean {\n return this.architecture === architecture;\n }\n\n isVendor(vendor: GpuVendor): boolean {\n return this.vendor === vendor;\n }\n}\n\n/**\n * this class is designed to store status and being used as a singleton for JSEP. It will be passed to jsepInit() as\n * the first parameter so that it is stored for future use.\n */\nexport class WebGpuBackend {\n adapterInfo: AdapterInfoImpl;\n device: GPUDevice;\n /**\n * an instance of GpuDataManager to manage a GpuDataId -> GpuBuffer mapping\n */\n gpuDataManager: GpuDataManager;\n /**\n * an instance of ProgramManager to build and run WebGPU compute shader program, and manage a ProgramKey -> Program\n * artifacts mapping\n */\n programManager: ProgramManager;\n\n /**\n * representing the session ID of which is currently being run.\n * `null` means no session is being run.\n * only valid when session.run is executed.\n */\n currentSessionId: number|null = null;\n\n /**\n * representing the kernel ID of which is currently being computed (CPU code perspective).\n * `null` means no kernel is being computed.\n * only one kernel can be computed at a moment.\n */\n currentKernelId: number|null = null;\n /**\n * a list of temporary GPU data for the current kernel. should release when the kernel done computation.\n */\n private temporaryData: GpuData[];\n /**\n * a KernelID -> a GPU data list, which stores persistent GPU data owned by the specific kernel.\n */\n private kernelPersistentData: Map;\n /**\n * a KernelID -> a custom data, which stores custom data owned by the specific kernel.\n */\n private kernelCustomData: Map;\n /**\n * get the custom data of the current kernel\n */\n get currentKernelCustomData(): {[key: string]: unknown} {\n if (this.currentKernelId === null) {\n throw new Error('currentKernelCustomData(): currentKernelId is null. (should not happen)');\n }\n\n let data = this.kernelCustomData.get(this.currentKernelId);\n if (!data) {\n data = {};\n this.kernelCustomData.set(this.currentKernelId, data);\n }\n\n return data;\n }\n\n // KernelID -> kernelInfo mapping\n kernels: Map;\n private commandEncoder: GPUCommandEncoder|null = null;\n private computePassEncoder: GPUComputePassEncoder|null = null;\n maxDispatchNumber = 16;\n pendingDispatchNumber = 0;\n\n // info of kernels pending submission for a single batch\n private pendingKernels: PendingKernelInfo[] = [];\n // queryReadBuffer -> pendingKernels mapping for all the batches\n private pendingQueries: Map = new Map();\n private queryResolveBuffer?: GPUBuffer;\n private querySet?: GPUQuerySet;\n private queryTimeBase?: bigint;\n queryType: TimestampQuery;\n\n env: Env;\n sessionStatus: SessionState = 'default';\n /**\n * a SessionID -> CommandInfo[] mapping. It's used to record all GPU commands for corresponding session.\n */\n capturedCommandList: Map = new Map();\n\n /**\n * a SessionID -> PendingKernelInfo[] mapping for profiling.\n */\n private capturedPendingKernels: Map = new Map();\n\n /**\n * a SessionID -> a Map of (InputOutputIndex -> [ID, GPUBuffer]) mapping.\n */\n sessionExternalDataMapping: Map> = new Map();\n\n async initialize(env: Env, adapter: GPUAdapter): Promise {\n this.env = env;\n const requiredFeatures: GPUFeatureName[] = [];\n const deviceDescriptor: GPUDeviceDescriptor = {\n requiredLimits: {\n maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize,\n maxComputeWorkgroupsPerDimension: adapter.limits.maxComputeWorkgroupsPerDimension,\n maxStorageBufferBindingSize: adapter.limits.maxStorageBufferBindingSize,\n maxBufferSize: adapter.limits.maxBufferSize,\n maxComputeInvocationsPerWorkgroup: adapter.limits.maxComputeInvocationsPerWorkgroup,\n maxComputeWorkgroupSizeX: adapter.limits.maxComputeWorkgroupSizeX,\n maxComputeWorkgroupSizeY: adapter.limits.maxComputeWorkgroupSizeY,\n maxComputeWorkgroupSizeZ: adapter.limits.maxComputeWorkgroupSizeZ,\n },\n requiredFeatures,\n };\n\n if (adapter.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n requiredFeatures.push('chromium-experimental-timestamp-query-inside-passes' as GPUFeatureName);\n } else if (adapter.features.has('timestamp-query')) {\n requiredFeatures.push('timestamp-query');\n }\n if (adapter.features.has('shader-f16')) {\n requiredFeatures.push('shader-f16');\n }\n\n this.device = await adapter.requestDevice(deviceDescriptor);\n this.adapterInfo = new AdapterInfoImpl(adapter.info || await adapter.requestAdapterInfo());\n this.gpuDataManager = createGpuDataManager(this);\n this.programManager = new ProgramManager(this);\n this.kernels = new Map();\n this.kernelPersistentData = new Map();\n this.kernelCustomData = new Map();\n\n // set up flags for logger\n configureLogger(env.logLevel!, !!env.debug);\n\n // TODO: set up flags\n\n this.device.onuncapturederror = ev => {\n if (ev.error instanceof GPUValidationError) {\n // eslint-disable-next-line no-console\n console.error(`An uncaught WebGPU validation error was raised: ${ev.error.message}`);\n }\n };\n\n Object.defineProperty(\n this.env.webgpu, 'device', {value: this.device, writable: false, enumerable: true, configurable: false});\n Object.defineProperty(\n this.env.webgpu, 'adapter', {value: adapter, writable: false, enumerable: true, configurable: false});\n\n // init queryType, which is necessary for InferenceSession.create\n this.setQueryType();\n }\n\n dispose(): void {\n if (typeof this.querySet !== 'undefined') {\n this.querySet.destroy();\n }\n this.gpuDataManager.dispose();\n }\n\n getCommandEncoder(): GPUCommandEncoder {\n if (!this.commandEncoder) {\n this.commandEncoder = this.device.createCommandEncoder();\n }\n return this.commandEncoder;\n }\n\n getComputePassEncoder(): GPUComputePassEncoder {\n if (!this.computePassEncoder) {\n const commandEncoder = this.getCommandEncoder();\n const computePassDescriptor: GPUComputePassDescriptor = {};\n\n if (this.queryType === 'at-passes') {\n computePassDescriptor.timestampWrites = {\n querySet: this.querySet!,\n beginningOfPassWriteIndex: this.pendingDispatchNumber * 2,\n endOfPassWriteIndex: this.pendingDispatchNumber * 2 + 1,\n };\n }\n\n this.computePassEncoder = commandEncoder.beginComputePass(computePassDescriptor);\n }\n return this.computePassEncoder;\n }\n\n endComputePass(): void {\n if (this.computePassEncoder) {\n this.computePassEncoder.end();\n this.computePassEncoder = null;\n }\n }\n\n flush(): void {\n if (!this.commandEncoder) {\n return;\n }\n\n TRACE_FUNC_BEGIN();\n\n this.endComputePass();\n let queryReadBuffer: GPUBuffer;\n if (this.queryType !== 'none') {\n this.commandEncoder.resolveQuerySet(\n this.querySet!, 0, this.pendingDispatchNumber * 2, this.queryResolveBuffer!, 0);\n\n queryReadBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.pendingDispatchNumber * 2 * 8, usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST});\n\n this.pendingQueries.set(queryReadBuffer, this.pendingKernels);\n this.pendingKernels = [];\n this.commandEncoder.copyBufferToBuffer(\n this.queryResolveBuffer!, 0, queryReadBuffer, 0, this.pendingDispatchNumber * 2 * 8);\n }\n\n this.device.queue.submit([this.commandEncoder.finish()]);\n this.gpuDataManager.refreshPendingBuffers();\n this.commandEncoder = null;\n this.pendingDispatchNumber = 0;\n\n if (this.queryType !== 'none') {\n void queryReadBuffer!.mapAsync(GPUMapMode.READ).then(() => {\n const mappedData = new BigUint64Array(queryReadBuffer.getMappedRange());\n const pendingKernels = this.pendingQueries.get(queryReadBuffer)!;\n for (let i = 0; i < mappedData.length / 2; i++) {\n const pendingKernelInfo = pendingKernels[i];\n const kernelId = pendingKernelInfo.kernelId;\n const kernelInfo = this.kernels.get(kernelId)!;\n const kernelType = kernelInfo.kernelType;\n const kernelName = kernelInfo.kernelName;\n const programName = pendingKernelInfo.programName;\n const inputTensorViews = pendingKernelInfo.inputTensorViews;\n const outputTensorViews = pendingKernelInfo.outputTensorViews;\n const startTimeU64 = mappedData[i * 2];\n const endTimeU64 = mappedData[i * 2 + 1];\n\n if (typeof this.queryTimeBase === 'undefined') {\n this.queryTimeBase = startTimeU64;\n }\n\n const startTime = Number(startTimeU64 - this.queryTimeBase);\n const endTime = Number(endTimeU64 - this.queryTimeBase);\n\n if (!Number.isSafeInteger(startTime) || !Number.isSafeInteger(endTime)) {\n throw new RangeError('incorrect timestamp range');\n }\n\n if (this.env.webgpu.profiling?.ondata) {\n this.env.webgpu.profiling.ondata({\n version: 1,\n inputsMetadata: inputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n outputsMetadata: outputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n kernelId,\n kernelType,\n kernelName,\n programName,\n startTime,\n endTime,\n });\n } else {\n // if no callback is provided, print the profiling message to console\n let inputShapes = '';\n inputTensorViews.forEach((value, i) => {\n inputShapes += `input[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n let outputShapes = '';\n outputTensorViews.forEach((value, i) => {\n outputShapes += `output[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n // eslint-disable-next-line no-console\n console.log(`[profiling] kernel \"${kernelId}|${kernelType}|${kernelName}|${programName}\" ${inputShapes}${\n outputShapes}execution time: ${endTime - startTime} ns`);\n }\n TRACE('GPU', `${programName}::${startTimeU64}::${endTimeU64}`);\n }\n queryReadBuffer.unmap();\n this.pendingQueries.delete(queryReadBuffer);\n });\n }\n TRACE_FUNC_END();\n }\n\n /**\n * run a WebGPU program.\n * @param program a ProgramInfo instance\n * @param inputTensorViews a TensorView array. each element represents a value already exists in GPU.\n * @param outputIndices an indices array. each element can be either -1 (temporary data), -2 (persistent data) or an\n * index to the kernel's output.\n * @param createKernelOutput a callback function that create a value to kernel's output with the given index\n * @param createIntermediateOutput a callback function that create a value as a intermediate value, either temporary\n * or persistent (owned by the current kernel)\n * @returns a TensorView array representing the result.\n */\n run(program: ProgramInfo, inputTensorViews: readonly TensorView[], outputIndices: readonly number[],\n createKernelOutput: (index: number, dataType: number, dims: readonly number[]) => TensorView,\n createIntermediateOutput: (dataType: number, dims: readonly number[]) => TensorView,\n outputCount: number): TensorView[] {\n TRACE_FUNC_BEGIN(program.name);\n // create info for inputs\n const inputDatas: GpuData[] = [];\n for (let i = 0; i < inputTensorViews.length; ++i) {\n const data = inputTensorViews[i].data;\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(data);\n if (!gpuData) {\n throw new Error(`no GPU data for input: ${data}`);\n }\n inputDatas.push(gpuData);\n }\n\n const {outputs, dispatchGroup, programUniforms} = program.getRunData(inputTensorViews);\n\n // check output indices\n const validatedOutputIndices = outputIndices.length === 0 ? outputs.map((_, i) => i) : outputIndices;\n if (validatedOutputIndices.length !== outputs.length) {\n throw new Error(`Output size ${validatedOutputIndices.length} must be equal to ${outputs.length}.`);\n }\n\n // create info for outputs\n const outputTensorViews: TensorView[] = [];\n const outputDatas: GpuData[] = [];\n for (let i = 0; i < outputs.length; ++i) {\n // value -1 and -2 are used for creating temporary and persistent outputs.\n // value -3 is used for placeholder output. So -3, -2, -1 and 0, 1, 2, ... are valid\n // output indices. see type definition of ComputeContextInputsOutputsMapping for more details.\n if (!Number.isInteger(validatedOutputIndices[i]) || validatedOutputIndices[i] < -3 ||\n validatedOutputIndices[i] >= outputCount) {\n throw new Error(`Invalid output index: ${validatedOutputIndices[i]}`);\n }\n if (validatedOutputIndices[i] === -3) {\n continue;\n }\n const isTemporary = validatedOutputIndices[i] === -1;\n const isPersistent = validatedOutputIndices[i] === -2;\n const tensorView = (isTemporary || isPersistent) ?\n createIntermediateOutput(outputs[i].dataType, outputs[i].dims) :\n createKernelOutput(validatedOutputIndices[i], outputs[i].dataType, outputs[i].dims);\n outputTensorViews.push(tensorView);\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (tensorView.data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(tensorView.data);\n if (!gpuData) {\n throw new Error(`no GPU data for output: ${tensorView.data}`);\n }\n if (isTemporary) {\n this.temporaryData.push(gpuData);\n }\n if (isPersistent) {\n let persistentData = this.kernelPersistentData.get(this.currentKernelId!);\n if (!persistentData) {\n persistentData = [];\n this.kernelPersistentData.set(this.currentKernelId!, persistentData);\n }\n persistentData.push(gpuData);\n }\n outputDatas.push(gpuData);\n }\n\n // when there are any zero-sized tensor in the inputs or outputs, we should report error unless all outputs are\n // zero-sized tensors.\n if (inputDatas.length !== inputTensorViews.length || outputDatas.length !== outputTensorViews.length) {\n // if all outputs are zero-sized tensors, there is no need to run the program.\n if (outputDatas.length === 0) {\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n // if some outputs are zero-sized tensors, report an error.\n //\n // TODO: so far we don't see any use case that outputs include both zero-sized tensors and non-zero-sized tensors.\n // If we see such use case, we need to make a change here to support it.\n throw new Error(\n `Program ${program.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`);\n }\n\n // load uniforms\n // TODO: add cache for uniform (is it necessary?)\n //\n let uniformBufferBinding: GPUBindingResource|undefined;\n if (programUniforms) {\n let currentOffset = 0;\n const offsets: number[] = [];\n\n programUniforms.forEach(v => {\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (data.length === 0) {\n return;\n }\n // https://www.w3.org/TR/WGSL/#alignof\n const sizeOfElement = v.type === DataType.float16 ? 2 : 4;\n let sizeOfVecOrMat;\n let baseAlignment;\n if (v.type === DataType.float16) {\n baseAlignment = data.length > 4 ? 16 : (data.length > 2 ? 8 : data.length * sizeOfElement);\n sizeOfVecOrMat = data.length > 4 ? 16 : sizeOfElement * data.length;\n } else {\n baseAlignment = data.length <= 2 ? data.length * sizeOfElement : 16;\n sizeOfVecOrMat = 16;\n }\n currentOffset = Math.ceil(currentOffset / baseAlignment) * baseAlignment;\n offsets.push(currentOffset);\n // For non-float16 type, when data.length > 4, the uniform variable is of type array,N>, where\n // N = Math.ceil(data.length / 4) and SizeOf(vec4) = 16. The total byte length is N *\n // SizeOf(vec4). For float16 type, when data.length > 4, the uniform variable is of type\n // array,N>, where N = Math.ceil(data.length / 8) and SizeOf(mat2x4) = 16. The total byte\n // length is N * SizeOf(mat2x4).\n const elementPerVecOrMat = v.type === DataType.float16 ? 8 : 4;\n currentOffset += data.length > 4 ? Math.ceil(data.length / elementPerVecOrMat) * sizeOfVecOrMat :\n data.length * sizeOfElement;\n });\n\n // Meet alignment of struct here: https://www.w3.org/TR/WGSL/#alignment-and-size. For simplicity, set\n // maxAlignmentOfField to 16 since the underlying buffer has been rounded up to 16.\n const maxAlignmentOfField = 16;\n currentOffset = Math.ceil(currentOffset / maxAlignmentOfField) * maxAlignmentOfField;\n const arrayBuffer = new ArrayBuffer(currentOffset);\n programUniforms.forEach((v, i) => {\n const offset = offsets[i];\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (v.type === DataType.int32) {\n new Int32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.uint32) {\n new Uint32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float16) {\n // TODO: use Float16Array.\n new Uint16Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float) {\n new Float32Array(arrayBuffer, offset, data.length).set(data);\n } else {\n throw new Error(`Unsupported uniform type: ${tensorDataTypeEnumToString(v.type)}`);\n }\n });\n\n const uniformBufferData =\n // eslint-disable-next-line no-bitwise\n this.gpuDataManager.create(currentOffset, GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM);\n this.device.queue.writeBuffer(uniformBufferData.buffer, 0, arrayBuffer, 0, currentOffset);\n this.gpuDataManager.release(uniformBufferData.id);\n uniformBufferBinding = {offset: 0, size: currentOffset, buffer: uniformBufferData.buffer};\n }\n\n const normalizedDispatchGroup = this.programManager.normalizeDispatchGroupSize(dispatchGroup);\n const is1DimensionDispatch = normalizedDispatchGroup[1] === 1 && normalizedDispatchGroup[2] === 1;\n // get program info\n const key = getProgramInfoUniqueKey(program, inputTensorViews, is1DimensionDispatch);\n let artifact = this.programManager.getArtifact(key);\n if (!artifact) {\n artifact = this.programManager.build(program, normalizedDispatchGroup);\n this.programManager.setArtifact(key, artifact);\n LOG_DEBUG('info', () => `[artifact] key: ${key}, programName: ${program.name}`);\n }\n\n // validate uniform variables\n if (programUniforms && artifact.uniformVariablesInfo) {\n if (programUniforms.length !== artifact.uniformVariablesInfo.length) {\n throw new Error(`Uniform variables count mismatch: expect ${artifact.uniformVariablesInfo.length}, got ${\n programUniforms.length} in program \"${artifact.programInfo.name}\".`);\n }\n for (let i = 0; i < programUniforms.length; i++) {\n const uniform = programUniforms[i];\n const actualType = uniform.type;\n const actualLength = typeof uniform.data === 'number' ? 1 : uniform.data.length;\n const [type, length] = artifact.uniformVariablesInfo[i];\n if (actualType !== type || actualLength !== length) {\n throw new Error(`Uniform variable ${i} mismatch: expect type ${type} with size ${length}, got type ${\n actualType} with size ${actualLength} in program \"${artifact.programInfo.name}\".`);\n }\n }\n }\n\n LOG_DEBUG(\n 'info',\n () => `[ProgramManager] run \"${program.name}\" (key=${key}) with ${normalizedDispatchGroup[0]}x${\n normalizedDispatchGroup[1]}x${normalizedDispatchGroup[2]}`);\n\n if (this.queryType !== 'none' || this.sessionStatus === 'capturing') {\n const pendingKernelInfo: PendingKernelInfo = {\n kernelId: this.currentKernelId!,\n programName: artifact.programInfo.name,\n inputTensorViews,\n outputTensorViews,\n };\n this.pendingKernels.push(pendingKernelInfo);\n\n if (this.sessionStatus === 'capturing') {\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n sessionPendingKernels!.push(pendingKernelInfo);\n }\n }\n\n this.programManager.run(artifact, inputDatas, outputDatas, normalizedDispatchGroup, uniformBufferBinding);\n\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n\n upload(gpuDataId: number, data: Uint8Array): void {\n this.gpuDataManager.upload(gpuDataId, data);\n }\n\n memcpy(src: number, dst: number): void {\n this.gpuDataManager.memcpy(src, dst);\n }\n\n async download(gpuDataId: number, getTargetBuffer: () => Uint8Array): Promise {\n // the underlying buffer may be changed after the async function is called. so we use a getter function to make sure\n // the buffer is up-to-date.\n await this.gpuDataManager.download(gpuDataId, getTargetBuffer);\n }\n\n alloc(size: number): number {\n return this.gpuDataManager.create(size).id;\n }\n\n free(ptr: number): number {\n return this.gpuDataManager.release(ptr);\n }\n\n createKernel(kernelType: string, kernelId: number, attribute: unknown, kernelName: string): void {\n const op = WEBGPU_OP_RESOLVE_RULES.get(kernelType);\n if (!op) {\n throw new Error(`kernel not implemented: ${kernelType}`);\n }\n\n const kernelInfo: KernelInfo = {\n kernelType,\n kernelName,\n kernelEntry: op[0],\n attributes: [op[1], attribute],\n };\n this.kernels.set(kernelId, kernelInfo);\n }\n\n releaseKernel(kernelId: number): void {\n const persistentData = this.kernelPersistentData.get(kernelId);\n if (persistentData) {\n for (const data of persistentData) {\n this.gpuDataManager.release(data.id);\n }\n this.kernelPersistentData.delete(kernelId);\n }\n\n this.kernelCustomData.delete(kernelId);\n this.kernels.delete(kernelId);\n }\n\n computeKernel(kernelId: number, context: ComputeContext, errors: Array>): number {\n const kernel = this.kernels.get(kernelId);\n if (!kernel) {\n throw new Error(`kernel not created: ${kernelId}`);\n }\n const kernelType = kernel.kernelType;\n const kernelName = kernel.kernelName;\n const kernelEntry = kernel.kernelEntry;\n const attributes = kernel.attributes;\n if (this.currentKernelId !== null) {\n throw new Error(`kernel \"[${kernelType}] ${kernelName}\" is not allowed to be called recursively`);\n }\n this.currentKernelId = kernelId;\n\n // parse attributes if necessary\n if (attributes[0]) {\n attributes[1] = attributes[0](attributes[1]);\n attributes[0] = undefined;\n }\n\n LOG_DEBUG('info', () => `[WebGPU] Start to run kernel \"[${kernelType}] ${kernelName}\"...`);\n\n const useErrorScope = this.env.debug;\n\n this.temporaryData = [];\n try {\n if (useErrorScope) {\n this.device.pushErrorScope('validation');\n }\n\n kernelEntry(context, attributes[1]);\n return 0; // ORT_OK\n } catch (e) {\n errors.push(Promise.resolve(`[WebGPU] Kernel \"[${kernelType}] ${kernelName}\" failed. ${e}`));\n return 1; // ORT_FAIL\n } finally {\n if (useErrorScope) {\n errors.push(this.device.popErrorScope().then(\n err => err ? `GPU validation error for kernel \"[${kernelType}] ${kernelName}\": ${err.message}` : null));\n }\n\n for (const data of this.temporaryData) {\n this.gpuDataManager.release(data.id);\n }\n this.temporaryData = [];\n this.currentKernelId = null;\n }\n }\n\n // #region external buffer\n registerBuffer(sessionId: number, index: number, buffer: GPUBuffer, size: number): number {\n let sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (!sessionInputOutputMapping) {\n sessionInputOutputMapping = new Map();\n this.sessionExternalDataMapping.set(sessionId, sessionInputOutputMapping);\n }\n\n const previousBuffer = sessionInputOutputMapping.get(index);\n const id = this.gpuDataManager.registerExternalBuffer(buffer, size, previousBuffer?.[1]);\n sessionInputOutputMapping.set(index, [id, buffer]);\n return id;\n }\n unregisterBuffers(sessionId: number): void {\n const sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (sessionInputOutputMapping) {\n sessionInputOutputMapping.forEach(bufferInfo => this.gpuDataManager.unregisterExternalBuffer(bufferInfo[1]));\n this.sessionExternalDataMapping.delete(sessionId);\n }\n }\n getBuffer(gpuDataId: number): GPUBuffer {\n const gpuData = this.gpuDataManager.get(gpuDataId);\n if (!gpuData) {\n throw new Error(`no GPU data for buffer: ${gpuDataId}`);\n }\n return gpuData.buffer;\n }\n createDownloader(gpuBuffer: GPUBuffer, size: number, type: Tensor.GpuBufferDataTypes):\n () => Promise {\n return async () => {\n const data = await downloadGpuData(this, gpuBuffer, size);\n return createView(data.buffer, type);\n };\n }\n // #endregion\n writeTimestamp(index: number): void {\n if (this.queryType !== 'inside-passes') {\n return;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this.computePassEncoder as any).writeTimestamp(this.querySet, index);\n }\n setQueryType(): void {\n this.queryType = 'none';\n if (this.env.webgpu.profiling?.mode === 'default' ||\n (typeof this.env.trace === 'undefined' ? this.env.wasm.trace : this.env.trace)) {\n if (this.device.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n this.queryType = 'inside-passes';\n } else if (this.device.features.has('timestamp-query')) {\n this.queryType = 'at-passes';\n }\n\n if (this.queryType !== 'none' && typeof this.querySet === 'undefined') {\n this.querySet = this.device.createQuerySet({\n type: 'timestamp',\n count: this.maxDispatchNumber * 2,\n });\n this.queryResolveBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.maxDispatchNumber * 2 * 8, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE});\n }\n }\n }\n\n captureBegin(): void {\n LOG_DEBUG('info', 'captureBegin');\n if (!this.capturedCommandList.get(this.currentSessionId!)) {\n this.capturedCommandList.set(this.currentSessionId!, []);\n }\n if (!this.capturedPendingKernels.get(this.currentSessionId!)) {\n this.capturedPendingKernels.set(this.currentSessionId!, []);\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'capturing';\n }\n captureEnd(): void {\n LOG_DEBUG('info', 'captureEnd');\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n replay(): void {\n LOG_DEBUG('info', 'replay');\n this.sessionStatus = 'replaying';\n const sessionCommandList = this.capturedCommandList.get(this.currentSessionId!);\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n const length = sessionCommandList!.length;\n this.pendingKernels = [];\n for (let i = 0; i < length; i++) {\n const computePassEncoder = this.getComputePassEncoder();\n const command = sessionCommandList![i];\n this.writeTimestamp(this.pendingDispatchNumber * 2);\n computePassEncoder.setPipeline(command.computePipeline);\n computePassEncoder.setBindGroup(0, command.bindGroup);\n computePassEncoder.dispatchWorkgroups(...command.dispatchGroup);\n this.writeTimestamp(this.pendingDispatchNumber * 2 + 1);\n this.pendingDispatchNumber++;\n if (this.queryType !== 'none') {\n this.pendingKernels.push(sessionPendingKernels![i]);\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber || this.queryType === 'at-passes') {\n this.endComputePass();\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber) {\n this.flush();\n }\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n\n onReleaseSession(sessionId: number): void {\n this.unregisterBuffers(sessionId);\n if (this.capturedCommandList.has(sessionId)) {\n this.capturedCommandList.delete(sessionId);\n }\n if (this.capturedPendingKernels.has(sessionId)) {\n this.capturedPendingKernels.delete(sessionId);\n }\n this.gpuDataManager.onReleaseSession(sessionId);\n }\n\n onRunStart(sessionId: number): void {\n this.currentSessionId = sessionId;\n this.setQueryType();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport type {OrtWasmModule} from '../wasm-types';\nimport {DataType, getTensorElementSize} from '../wasm-common';\n\nimport {WebGpuBackend} from './backend-webgpu';\nimport {LOG_DEBUG} from './log';\nimport {TensorView} from './tensor-view';\nimport {ShapeUtil} from './util';\nimport {AdapterInfo, ComputeContext, ComputeContextInputsOutputsMapping, ProgramInfo} from './webgpu/types';\n\n/* eslint-disable no-bitwise */\n\nclass TensorViewImpl implements TensorView {\n constructor(\n private module: OrtWasmModule, public readonly dataType: number, public readonly data: number,\n public readonly dims: readonly number[]) {}\n\n getFloat32Array(): Float32Array {\n if (this.dataType !== DataType.float) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Float32Array() :\n new Float32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getBigInt64Array(): BigInt64Array {\n if (this.dataType !== DataType.int64) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new BigInt64Array() :\n new BigInt64Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getInt32Array(): Int32Array {\n if (this.dataType !== DataType.int32) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Int32Array() : new Int32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n reshape(newDims: readonly number[]): TensorView {\n if (ShapeUtil.size(newDims) !== ShapeUtil.size(this.dims)) {\n throw new Error('Invalid new shape');\n }\n return new TensorViewImpl(this.module, this.dataType, this.data, newDims);\n }\n}\n\nclass ComputeContextImpl implements ComputeContext {\n readonly adapterInfo: AdapterInfo;\n readonly opKernelContext: number;\n readonly inputs: readonly TensorView[];\n readonly outputCount: number;\n get kernelCustomData(): {[key: string]: unknown} {\n return this.backend.currentKernelCustomData;\n }\n get customDataBuffer(): Uint8Array {\n return this.module.HEAPU8.subarray(this.customDataOffset, this.customDataOffset + this.customDataSize);\n }\n private customDataOffset = 0;\n private customDataSize = 0;\n constructor(private module: OrtWasmModule, private backend: WebGpuBackend, contextDataOffset: number) {\n this.adapterInfo = backend.adapterInfo;\n const heapU32 = module.HEAPU32;\n\n // extract context data\n let dataIndex = (contextDataOffset >>> 2);\n this.opKernelContext = heapU32[dataIndex++];\n const inputCount = heapU32[dataIndex++];\n this.outputCount = heapU32[dataIndex++];\n this.customDataOffset = heapU32[dataIndex++];\n this.customDataSize = heapU32[dataIndex++];\n\n const inputs: TensorView[] = [];\n for (let i = 0; i < inputCount; i++) {\n const dataType = heapU32[dataIndex++];\n const data = heapU32[dataIndex++];\n const dim = heapU32[dataIndex++];\n const dims: number[] = [];\n for (let d = 0; d < dim; d++) {\n dims.push(heapU32[dataIndex++]);\n }\n inputs.push(new TensorViewImpl(module, dataType, data, dims));\n }\n this.inputs = inputs;\n }\n\n getMaxComputeWorkgroupSizes(): [number, number, number] {\n return [\n this.backend.device.limits.maxComputeWorkgroupSizeX, this.backend.device.limits.maxComputeWorkgroupSizeY,\n this.backend.device.limits.maxComputeWorkgroupSizeZ\n ];\n }\n\n getMaxComputeWorkgroupStoragesize(): number {\n return this.backend.device.limits.maxComputeWorkgroupStorageSize;\n }\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[] {\n // prepare inputs. inputs should always be valid data.\n const mappedInputs =\n inputsOutputsMapping?.inputs?.map(i => typeof i === 'number' ? this.inputs[i] : i) ?? this.inputs;\n // prepare outputs.\n const outputIndices = inputsOutputsMapping?.outputs ?? [];\n const createKernelOutput = (index: number, dataType: number, dims: readonly number[]): TensorView =>\n new TensorViewImpl(this.module, dataType, this.output(index, dims), dims);\n const createTemporaryOutput = (dataType: number, dims: readonly number[]): TensorView => {\n const elementSize = getTensorElementSize(dataType);\n if (!elementSize) {\n throw new Error(`Unsupported data type: ${dataType}`);\n }\n const bufferSize = elementSize * ShapeUtil.size(dims);\n const gpuDataId = bufferSize > 0 ? this.backend.gpuDataManager.create(bufferSize).id : 0;\n return new TensorViewImpl(this.module, dataType, gpuDataId, dims);\n };\n return this.backend.run(\n program, mappedInputs, outputIndices, createKernelOutput, createTemporaryOutput, this.outputCount);\n }\n\n output(index: number, dims: readonly number[]): number {\n const stack = this.module.stackSave();\n try {\n const data = this.module.stackAlloc((1 + dims.length) * 4 /* sizeof(size_t) */);\n let offset = data >> 2;\n this.module.HEAPU32[offset++] = dims.length;\n for (let i = 0; i < dims.length; i++) {\n this.module.HEAPU32[offset++] = dims[i];\n }\n return this.module._JsepOutput!(this.opKernelContext, index, data);\n } catch (e) {\n throw new Error(\n `Failed to generate kernel's output[${index}] with dims [${dims}]. ` +\n 'If you are running with pre-allocated output, please make sure the output type/dims are correct. ' +\n `Error: ${e}`);\n } finally {\n this.module.stackRestore(stack);\n }\n }\n}\n\n/**\n * Initialize JSEP with WebGPU backend.\n *\n * This function will be called after the WebAssembly module is loaded and initialized (\"_OrtInit\" is called), once for\n * each of the following EPs if they are specified:\n * - \"webgpu\"\n * - \"webnn\"\n *\n * For WebGPU, this function expects:\n * - WebGPU is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebGPU is available in current environment. (a valid GPUAdapter is passed in)\n *\n * For WebNN, this function expects:\n * - WebNN is enabled in build (BUILD_DEFS.DISABLE_JSEP === false).\n * - WebNN is available in current environment. (navigator.ml is not undefined)\n *\n * If the WebAssembly module is not built with JSEP support, this function will throw an error. This will invalidate\n * 'webgpu'/'webnn' backend.\n *\n * @param name - the name of the EP, either \"webgpu\" or \"webnn\"\n * @param module - the ORT WebAssembly module\n * @param env - the ORT environment variable (ort.env)\n * @param gpuAdapter - the pre-created GPU adapter\n */\nexport const init =\n async(name: 'webgpu'|'webnn', module: OrtWasmModule, env: Env, gpuAdapter?: GPUAdapter): Promise => {\n const jsepInit = module.jsepInit;\n if (!jsepInit) {\n throw new Error('Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.');\n }\n\n if (name === 'webgpu') {\n const backend = new WebGpuBackend();\n await backend.initialize(env, gpuAdapter!);\n\n jsepInit('webgpu', [\n // backend\n backend,\n\n // jsepAlloc()\n (size: number) => backend.alloc(size),\n\n // jsepFree()\n (ptr: number) => backend.free(ptr),\n\n // jsepCopy(src, dst, size, isSourceGpu)\n (src: number, dst: number, size: number, isSourceGpu = false) => {\n if (isSourceGpu) {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyGpuToGpu: src=${src}, dst=${dst}, size=${size}`);\n backend.memcpy(src, dst);\n } else {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyCpuToGpu: dataOffset=${src}, gpuDataId=${dst}, size=${size}`);\n const data = module.HEAPU8.subarray(src >>> 0, (src >>> 0) + size);\n backend.upload(dst, data);\n }\n },\n\n // jsepCopyAsync(src, dst, size)\n async(gpuDataId: number, dataOffset: number, size: number):\n Promise => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepCopyGpuToCpu: gpuDataId=${gpuDataId}, dataOffset=${dataOffset}, size=${size}`);\n\n await backend.download(\n gpuDataId, () => module.HEAPU8.subarray(dataOffset >>> 0, (dataOffset >>> 0) + size));\n },\n\n // jsepCreateKernel\n (kernelType: string, kernelId: number, attribute: unknown) => backend.createKernel(\n kernelType, kernelId, attribute, module.UTF8ToString(module._JsepGetNodeName!(kernelId))),\n\n // jsepReleaseKernel\n (kernel: number) => backend.releaseKernel(kernel),\n\n // jsepRun\n (kernel: number, contextDataOffset: number, sessionHandle: number, errors: Array>) => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepRun: sessionHandle=${sessionHandle}, kernel=${kernel}, contextDataOffset=${\n contextDataOffset}`);\n const context = new ComputeContextImpl(module, backend, contextDataOffset);\n return backend.computeKernel(kernel, context, errors);\n },\n // jsepCaptureBegin\n () => backend.captureBegin(),\n // jsepCaptureEnd\n () => backend.captureEnd(),\n // jsepReplay\n () => backend.replay()\n ]);\n } else {\n jsepInit('webnn');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// WebNN API currently does not have a TypeScript definition file. This file is a workaround with types generated from\n// WebNN API specification.\n// https://github.com/webmachinelearning/webnn/issues/677\n/// \n\nimport {Env, InferenceSession, Tensor} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport {setRunOptions} from './run-options';\nimport {setSessionOptions} from './session-options';\nimport {dataLocationStringToEnum, getTensorElementSize, isGpuBufferSupportedType, logLevelStringToEnum, tensorDataTypeEnumToString, tensorDataTypeStringToEnum, tensorTypeToTypedArrayConstructor} from './wasm-common';\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError} from './wasm-utils';\nimport {loadFile} from './wasm-utils-load-file';\n\n// #region Initializations\n\n/**\n * There are 4 different \"initialization\" steps for ORT. They happen in different places and different time.\n *\n * 1. JavaScript initialization for onnxruntime-common and onnxruntime-web.\n * This is the first initialization step. In this step, onnxruntime-web calls onnxruntime-common's registerBackend()\n * function multiple times to register all the available backends. The backend registration is very fast. It only\n * registers the backend name with the uninitialized backend object. No heavy initialization is done in this step.\n * Refer to web/lib/index.ts for the backend registration.\n *\n * 2. WebAssembly artifact initialization.\n * This happens when any registered wasm backend is used for the first time (ie. `ort.InferenceSession.create()` or\n * `ort.TrainingSession.create()` is called). In this step, onnxruntime-web does the followings:\n * - create a proxy worker and make sure the proxy worker is ready to receive messages, if proxy is enabled.\n * - perform feature detection, locate correct WebAssembly artifact path and call the Emscripten generated\n * JavaScript code to initialize the WebAssembly runtime.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-wasm'.\n * - downloading the 'ort-wasm{...}.wasm' file is done in this step.\n * - if multi-thread is enabled, one or more webworker will be created to initialize the PThread threadpool.\n *\n * 3. ORT environment initialization.\n * This happens after step 2. In this step, onnxruntime-web performs ONNX Runtime environment initialization.\n * Function `_OrtInit()` is called in this step.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-ort'.\n * - logging level (ort.env.logLevel) and thread number (ort.env.wasm.numThreads) are set in this step.\n *\n * 4. Session initialization.\n * This happens when `ort.InferenceSession.create()` or `ort.TrainingSession.create()` is called. Unlike the first 3\n * steps (they only called once), this step will be done for each session. In this step, onnxruntime-web does the\n * followings:\n * If the parameter is a URL:\n * - download the model data from the URL.\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - dereference the model buffer. This step allows the original ArrayBuffer to be garbage collected.\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n * If the parameter is a Uint8Array object:\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n *\n */\n\n/**\n * initialize ORT environment.\n *\n * @param numThreads SetGlobalIntraOpNumThreads(numThreads)\n * @param loggingLevel CreateEnv(static_cast(logging_level))\n */\nconst initOrt = (numThreads: number, loggingLevel: number): void => {\n const errorCode = getInstance()._OrtInit(numThreads, loggingLevel);\n if (errorCode !== 0) {\n checkLastError('Can\\'t initialize onnxruntime.');\n }\n};\n\n/**\n * initialize runtime environment.\n * @param env passed in the environment config object.\n */\nexport const initRuntime = async(env: Env): Promise => {\n // init ORT\n initOrt(env.wasm.numThreads!, logLevelStringToEnum(env.logLevel));\n};\n\n/**\n * perform EP specific initialization.\n *\n * @param env\n * @param epName\n */\nexport const initEp = async(env: Env, epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_JSEP) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n const initJsep = require('./jsep/init').init;\n\n if (epName === 'webgpu') {\n // perform WebGPU availability check\n if (typeof navigator === 'undefined' || !navigator.gpu) {\n throw new Error('WebGPU is not supported in current environment');\n }\n\n let adapter = env.webgpu.adapter as GPUAdapter | null;\n if (!adapter) {\n // if adapter is not set, request a new adapter.\n const powerPreference = env.webgpu.powerPreference;\n if (powerPreference !== undefined && powerPreference !== 'low-power' &&\n powerPreference !== 'high-performance') {\n throw new Error(`Invalid powerPreference setting: \"${powerPreference}\"`);\n }\n const forceFallbackAdapter = env.webgpu.forceFallbackAdapter;\n if (forceFallbackAdapter !== undefined && typeof forceFallbackAdapter !== 'boolean') {\n throw new Error(`Invalid forceFallbackAdapter setting: \"${forceFallbackAdapter}\"`);\n }\n adapter = await navigator.gpu.requestAdapter({powerPreference, forceFallbackAdapter});\n if (!adapter) {\n throw new Error(\n 'Failed to get GPU adapter. ' +\n 'You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.');\n }\n } else {\n // if adapter is set, validate it.\n if (typeof adapter.limits !== 'object' || typeof adapter.features !== 'object' ||\n typeof adapter.requestDevice !== 'function') {\n throw new Error('Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.');\n }\n }\n\n await initJsep('webgpu', getInstance(), env, adapter);\n }\n if (epName === 'webnn') {\n // perform WebNN availability check\n if (typeof navigator === 'undefined' || !(navigator as unknown as {ml: unknown}).ml) {\n throw new Error('WebNN is not supported in current environment');\n }\n\n await initJsep('webnn', getInstance(), env);\n }\n }\n};\n\n// #endregion Initializations\n\n/**\n * valid data locations for input/output tensors.\n */\ntype SupportedTensorDataLocationForInputOutput = 'cpu'|'cpu-pinned'|'gpu-buffer';\n\ntype IOBindingState = {\n /**\n * the handle of IO binding.\n */\n readonly handle: number;\n\n /**\n * the preferred location for each output tensor.\n *\n * value is one of 'cpu', 'cpu-pinned', 'gpu-buffer'.\n */\n readonly outputPreferredLocations: readonly SupportedTensorDataLocationForInputOutput[];\n\n /**\n * enum value of the preferred location for each output tensor.\n */\n readonly outputPreferredLocationsEncoded: readonly number[];\n};\n\n/**\n * tuple elements are: InferenceSession ID; inputNamesUTF8Encoded; outputNamesUTF8Encoded; bindingState\n */\ntype SessionMetadata = [\n inferenceSessionId: number, inputNamesUTF8Encoded: number[], outputNamesUTF8Encoded: number[],\n bindingState: IOBindingState|null, enableGraphCapture: boolean, inputOutputBound: boolean\n];\n\nconst activeSessions = new Map();\n\n/**\n * get the input/output count of the session.\n * @param sessionHandle the handle representing the session. should be non-zero.\n * @returns a tuple including 2 numbers, representing the input count and output count.\n */\nconst getSessionInputOutputCount = (sessionHandle: number): [number, number] => {\n const wasm = getInstance();\n const stack = wasm.stackSave();\n try {\n const dataOffset = wasm.stackAlloc(8);\n const errorCode = wasm._OrtGetInputOutputCount(sessionHandle, dataOffset, dataOffset + 4);\n if (errorCode !== 0) {\n checkLastError('Can\\'t get session input/output count.');\n }\n return [wasm.HEAP32[dataOffset / 4], wasm.HEAP32[dataOffset / 4 + 1]];\n } finally {\n wasm.stackRestore(stack);\n }\n};\n\n/**\n * allocate the memory and memcpy the external buffer.\n *\n * @param model - the external buffer containing the model data. Must not be the same buffer as the WASM heap.\n * @returns a 2-elements tuple - the pointer and size of the allocated buffer\n */\nexport const copyFromExternalBuffer = (model: Uint8Array): [number, number] => {\n const wasm = getInstance();\n const modelDataOffset = wasm._malloc(model.byteLength);\n if (modelDataOffset === 0) {\n throw new Error(`Can't create a session. failed to allocate a buffer of size ${model.byteLength}.`);\n }\n wasm.HEAPU8.set(model, modelDataOffset);\n return [modelDataOffset, model.byteLength];\n};\n\n/**\n * create an inference session from a model data buffer.\n *\n * @param modelData - either a Uint8Array object representing the model data, or a 2-elements tuple containing the\n * pointer and size of the model data buffer.\n * @param options an optional session options object.\n * @returns a 3-elements tuple containing [session handle, input names, output names]\n */\nexport const createSession = async(\n modelData: Uint8Array|SerializableInternalBuffer,\n options?: InferenceSession.SessionOptions): Promise => {\n let modelDataOffset: number, modelDataLength: number;\n const wasm = getInstance();\n\n if (Array.isArray(modelData)) {\n // if model data is an array, it must be a 2-elements tuple containing the pointer and size of the model data\n [modelDataOffset, modelDataLength] = modelData;\n } else if (modelData.buffer === wasm.HEAPU8.buffer) {\n // if model data uses the same buffer as the WASM heap, we don't need to copy it.\n [modelDataOffset, modelDataLength] = [modelData.byteOffset, modelData.byteLength];\n } else {\n // otherwise, copy the model data to the WASM heap.\n [modelDataOffset, modelDataLength] = copyFromExternalBuffer(modelData);\n }\n\n let sessionHandle = 0;\n let sessionOptionsHandle = 0;\n let ioBindingHandle = 0;\n let allocs: number[] = [];\n const inputNamesUTF8Encoded = [];\n const outputNamesUTF8Encoded = [];\n\n try {\n [sessionOptionsHandle, allocs] = setSessionOptions(options);\n\n if (options?.externalData && wasm.mountExternalData) {\n const loadingPromises = [];\n for (const file of options.externalData) {\n const path = typeof file === 'string' ? file : file.path;\n loadingPromises.push(loadFile(typeof file === 'string' ? file : file.data).then(data => {\n wasm.mountExternalData!(path, data);\n }));\n }\n\n // wait for all external data files to be loaded\n await Promise.all(loadingPromises);\n }\n\n for (const provider of options?.executionProviders ?? []) {\n const providerName = typeof provider === 'string' ? provider : provider.name;\n if (providerName === 'webnn') {\n if (wasm.currentContext) {\n throw new Error('WebNN execution provider is already set.');\n }\n if (typeof provider !== 'string') {\n const webnnOptions = provider as InferenceSession.WebNNExecutionProviderOption;\n const context = (webnnOptions as InferenceSession.WebNNOptionsWithMLContext)?.context;\n const gpuDevice = (webnnOptions as InferenceSession.WebNNOptionsWebGpu)?.gpuDevice;\n const deviceType = (webnnOptions as InferenceSession.WebNNContextOptions)?.deviceType;\n const numThreads = (webnnOptions as InferenceSession.WebNNContextOptions)?.numThreads;\n const powerPreference = (webnnOptions as InferenceSession.WebNNContextOptions)?.powerPreference;\n if (context) {\n wasm.currentContext = context as MLContext;\n } else if (gpuDevice) {\n wasm.currentContext = await navigator.ml.createContext(gpuDevice);\n } else {\n wasm.currentContext = await navigator.ml.createContext({deviceType, numThreads, powerPreference});\n }\n } else {\n wasm.currentContext = await navigator.ml.createContext();\n }\n break;\n }\n }\n\n sessionHandle = await wasm._OrtCreateSession(modelDataOffset, modelDataLength, sessionOptionsHandle);\n if (sessionHandle === 0) {\n checkLastError('Can\\'t create a session.');\n }\n\n // clear current MLContext after session creation\n if (wasm.currentContext) {\n wasm.currentContext = undefined;\n }\n\n const [inputCount, outputCount] = getSessionInputOutputCount(sessionHandle);\n\n const enableGraphCapture = !!options?.enableGraphCapture;\n\n const inputNames = [];\n const outputNames = [];\n const outputPreferredLocations: SupportedTensorDataLocationForInputOutput[] = [];\n for (let i = 0; i < inputCount; i++) {\n const name = wasm._OrtGetInputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an input name.');\n }\n inputNamesUTF8Encoded.push(name);\n inputNames.push(wasm.UTF8ToString(name));\n }\n for (let i = 0; i < outputCount; i++) {\n const name = wasm._OrtGetOutputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an output name.');\n }\n outputNamesUTF8Encoded.push(name);\n const nameString = wasm.UTF8ToString(name);\n outputNames.push(nameString);\n\n if (!BUILD_DEFS.DISABLE_JSEP) {\n if (enableGraphCapture && options?.preferredOutputLocation === undefined) {\n outputPreferredLocations.push('gpu-buffer');\n continue;\n }\n const location = typeof options?.preferredOutputLocation === 'string' ?\n options.preferredOutputLocation :\n options?.preferredOutputLocation?.[nameString] ?? 'cpu';\n if (location !== 'cpu' && location !== 'cpu-pinned' && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${location}.`);\n }\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${\n location}. Only 'gpu-buffer' location is supported when enableGraphCapture is true.`);\n }\n outputPreferredLocations.push(location);\n }\n }\n\n // use IO binding only when at least one output is preffered to be on GPU.\n let bindingState: IOBindingState|null = null;\n if (!BUILD_DEFS.DISABLE_JSEP && outputPreferredLocations.some(l => l === 'gpu-buffer')) {\n ioBindingHandle = wasm._OrtCreateBinding(sessionHandle);\n if (ioBindingHandle === 0) {\n checkLastError('Can\\'t create IO binding.');\n }\n\n bindingState = {\n handle: ioBindingHandle,\n outputPreferredLocations,\n outputPreferredLocationsEncoded: outputPreferredLocations.map(l => dataLocationStringToEnum(l)),\n };\n }\n\n activeSessions.set(\n sessionHandle,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, bindingState, enableGraphCapture, false]);\n return [sessionHandle, inputNames, outputNames];\n } catch (e) {\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n\n if (ioBindingHandle !== 0) {\n wasm._OrtReleaseBinding(ioBindingHandle);\n }\n\n if (sessionHandle !== 0) {\n wasm._OrtReleaseSession(sessionHandle);\n }\n throw e;\n } finally {\n wasm._free(modelDataOffset);\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n\n // unmount external data if necessary\n wasm.unmountExternalData?.();\n }\n};\n\nexport const releaseSession = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot release session. invalid session id: ${sessionId}`);\n }\n const [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture] = session;\n\n if (ioBindingState) {\n if (enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n }\n wasm._OrtReleaseBinding(ioBindingState.handle);\n }\n\n wasm.jsepOnReleaseSession?.(sessionId);\n\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n wasm._OrtReleaseSession(sessionHandle);\n activeSessions.delete(sessionId);\n};\n\nexport const prepareInputOutputTensor =\n (tensor: TensorMetadata|null, tensorHandles: number[], allocs: number[], sessionId: number, index: number,\n enableGraphCapture = false): void => {\n if (!tensor) {\n tensorHandles.push(0);\n return;\n }\n\n const wasm = getInstance();\n\n const dataType = tensor[0];\n const dims = tensor[1];\n const location = tensor[3];\n\n let rawData: number;\n let dataByteLength: number;\n\n if (dataType === 'string' && location === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(\n `External buffer must be provided for input/output index ${index} when enableGraphCapture is true.`);\n }\n\n if (location === 'gpu-buffer') {\n const gpuBuffer = tensor[2].gpuBuffer as GPUBuffer;\n const elementSizeInBytes = getTensorElementSize(tensorDataTypeStringToEnum(dataType))!;\n dataByteLength = dims.reduce((a, b) => a * b, 1) * elementSizeInBytes;\n\n const registerBuffer = wasm.jsepRegisterBuffer;\n if (!registerBuffer) {\n throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');\n }\n rawData = registerBuffer(sessionId, index, gpuBuffer, dataByteLength);\n } else {\n const data = tensor[2];\n\n if (Array.isArray(data)) {\n // string tensor\n dataByteLength = 4 * data.length;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n let dataIndex = rawData / 4;\n for (let i = 0; i < data.length; i++) {\n if (typeof data[i] !== 'string') {\n throw new TypeError(`tensor data at index ${i} is not a string`);\n }\n wasm.HEAPU32[dataIndex++] = allocWasmString(data[i], allocs);\n }\n } else {\n dataByteLength = data.byteLength;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n wasm.HEAPU8.set(new Uint8Array(data.buffer, data.byteOffset, dataByteLength), rawData);\n }\n }\n\n const stack = wasm.stackSave();\n const dimsOffset = wasm.stackAlloc(4 * dims.length);\n try {\n let dimIndex = dimsOffset / 4;\n dims.forEach(d => wasm.HEAP32[dimIndex++] = d);\n const tensor = wasm._OrtCreateTensor(\n tensorDataTypeStringToEnum(dataType), rawData, dataByteLength, dimsOffset, dims.length,\n dataLocationStringToEnum(location));\n if (tensor === 0) {\n checkLastError(`Can't create tensor for input/output. session=${sessionId}, index=${index}.`);\n }\n tensorHandles.push(tensor);\n } finally {\n wasm.stackRestore(stack);\n }\n };\n\n/**\n * perform inference run\n */\nexport const run = async(\n sessionId: number, inputIndices: number[], inputTensors: TensorMetadata[], outputIndices: number[],\n outputTensors: Array, options: InferenceSession.RunOptions): Promise => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot run inference. invalid session id: ${sessionId}`);\n }\n const sessionHandle = session[0];\n const inputNamesUTF8Encoded = session[1];\n const outputNamesUTF8Encoded = session[2];\n const ioBindingState = session[3];\n const enableGraphCapture = session[4];\n const inputOutputBound = session[5];\n\n const inputCount = inputIndices.length;\n const outputCount = outputIndices.length;\n\n let runOptionsHandle = 0;\n let runOptionsAllocs: number[] = [];\n\n const inputTensorHandles: number[] = [];\n const outputTensorHandles: number[] = [];\n const inputOutputAllocs: number[] = [];\n\n const beforeRunStack = wasm.stackSave();\n const inputValuesOffset = wasm.stackAlloc(inputCount * 4);\n const inputNamesOffset = wasm.stackAlloc(inputCount * 4);\n const outputValuesOffset = wasm.stackAlloc(outputCount * 4);\n const outputNamesOffset = wasm.stackAlloc(outputCount * 4);\n\n try {\n [runOptionsHandle, runOptionsAllocs] = setRunOptions(options);\n\n // create input tensors\n for (let i = 0; i < inputCount; i++) {\n prepareInputOutputTensor(\n inputTensors[i], inputTensorHandles, inputOutputAllocs, sessionId, inputIndices[i], enableGraphCapture);\n }\n\n // create output tensors\n for (let i = 0; i < outputCount; i++) {\n prepareInputOutputTensor(\n outputTensors[i], outputTensorHandles, inputOutputAllocs, sessionId, inputCount + outputIndices[i],\n enableGraphCapture);\n }\n\n let inputValuesIndex = inputValuesOffset / 4;\n let inputNamesIndex = inputNamesOffset / 4;\n let outputValuesIndex = outputValuesOffset / 4;\n let outputNamesIndex = outputNamesOffset / 4;\n for (let i = 0; i < inputCount; i++) {\n wasm.HEAPU32[inputValuesIndex++] = inputTensorHandles[i];\n wasm.HEAPU32[inputNamesIndex++] = inputNamesUTF8Encoded[inputIndices[i]];\n }\n for (let i = 0; i < outputCount; i++) {\n wasm.HEAPU32[outputValuesIndex++] = outputTensorHandles[i];\n wasm.HEAPU32[outputNamesIndex++] = outputNamesUTF8Encoded[outputIndices[i]];\n }\n\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState && !inputOutputBound) {\n const {handle, outputPreferredLocations, outputPreferredLocationsEncoded} = ioBindingState;\n\n if (inputNamesUTF8Encoded.length !== inputCount) {\n throw new Error(`input count from feeds (${\n inputCount}) is expected to be always equal to model's input count (${inputNamesUTF8Encoded.length}).`);\n }\n\n // process inputs\n for (let i = 0; i < inputCount; i++) {\n const index = inputIndices[i];\n const errorCode = await wasm._OrtBindInput(handle, inputNamesUTF8Encoded[index], inputTensorHandles[i]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind input[${i}] for session=${sessionId}.`);\n }\n }\n\n // process pre-allocated outputs\n for (let i = 0; i < outputCount; i++) {\n const index = outputIndices[i];\n const location = outputTensors[i]?.[3]; // undefined means output is not pre-allocated.\n\n if (location) {\n // output is pre-allocated. bind the tensor.\n const errorCode = wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], outputTensorHandles[i], 0);\n if (errorCode !== 0) {\n checkLastError(`Can't bind pre-allocated output[${i}] for session=${sessionId}.`);\n }\n } else {\n // output is not pre-allocated. reset preferred location.\n const errorCode =\n wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], 0, outputPreferredLocationsEncoded[index]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind output[${i}] to ${outputPreferredLocations[i]} for session=${sessionId}.`);\n }\n }\n }\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, true]);\n }\n\n wasm.jsepOnRunStart?.(sessionHandle);\n let errorCode: number;\n if (!BUILD_DEFS.DISABLE_JSEP && ioBindingState) {\n errorCode = await wasm._OrtRunWithBinding(\n sessionHandle, ioBindingState.handle, outputCount, outputValuesOffset, runOptionsHandle);\n } else {\n errorCode = await wasm._OrtRun(\n sessionHandle, inputNamesOffset, inputValuesOffset, inputCount, outputNamesOffset, outputCount,\n outputValuesOffset, runOptionsHandle);\n }\n\n if (errorCode !== 0) {\n checkLastError('failed to call OrtRun().');\n }\n\n const output: TensorMetadata[] = [];\n\n for (let i = 0; i < outputCount; i++) {\n const tensor = wasm.HEAPU32[outputValuesOffset / 4 + i];\n if (tensor === outputTensorHandles[i]) {\n // output tensor is pre-allocated. no need to copy data.\n output.push(outputTensors[i]!);\n continue;\n }\n\n const beforeGetTensorDataStack = wasm.stackSave();\n // stack allocate 4 pointer value\n const tensorDataOffset = wasm.stackAlloc(4 * 4);\n\n let keepOutputTensor = false;\n let type: Tensor.Type|undefined, dataOffset = 0;\n try {\n const errorCode = wasm._OrtGetTensorData(\n tensor, tensorDataOffset, tensorDataOffset + 4, tensorDataOffset + 8, tensorDataOffset + 12);\n if (errorCode !== 0) {\n checkLastError(`Can't access output tensor data on index ${i}.`);\n }\n let tensorDataIndex = tensorDataOffset / 4;\n const dataType = wasm.HEAPU32[tensorDataIndex++];\n dataOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsLength = wasm.HEAPU32[tensorDataIndex++];\n const dims = [];\n for (let i = 0; i < dimsLength; i++) {\n dims.push(wasm.HEAPU32[dimsOffset / 4 + i]);\n }\n wasm._OrtFree(dimsOffset);\n\n const size = dims.reduce((a, b) => a * b, 1);\n type = tensorDataTypeEnumToString(dataType);\n\n const preferredLocation = ioBindingState?.outputPreferredLocations[outputIndices[i]];\n\n if (type === 'string') {\n if (preferredLocation === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n const stringData: string[] = [];\n let dataIndex = dataOffset / 4;\n for (let i = 0; i < size; i++) {\n const offset = wasm.HEAPU32[dataIndex++];\n const maxBytesToRead = i === size - 1 ? undefined : wasm.HEAPU32[dataIndex] - offset;\n stringData.push(wasm.UTF8ToString(offset, maxBytesToRead));\n }\n output.push([type, dims, stringData, 'cpu']);\n } else {\n // If a certain output's preferred location is GPU but the tensor is empty, we still need to create a CPU\n // tensor for it. There is no mapping GPU buffer for an empty tensor.\n if (preferredLocation === 'gpu-buffer' && size > 0) {\n const getBuffer = wasm.jsepGetBuffer;\n if (!getBuffer) {\n throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');\n }\n const gpuBuffer = getBuffer(dataOffset);\n const elementSize = getTensorElementSize(dataType);\n if (elementSize === undefined || !isGpuBufferSupportedType(type)) {\n throw new Error(`Unsupported data type: ${type}`);\n }\n\n // do not release the tensor right now. it will be released when user calls tensor.dispose().\n keepOutputTensor = true;\n\n output.push([\n type, dims, {\n gpuBuffer,\n download: wasm.jsepCreateDownloader!(gpuBuffer, size * elementSize, type),\n dispose: () => {\n wasm._OrtReleaseTensor(tensor);\n }\n },\n 'gpu-buffer'\n ]);\n } else {\n const typedArrayConstructor = tensorTypeToTypedArrayConstructor(type);\n const data = new typedArrayConstructor(size);\n new Uint8Array(data.buffer, data.byteOffset, data.byteLength)\n .set(wasm.HEAPU8.subarray(dataOffset, dataOffset + data.byteLength));\n output.push([type, dims, data, 'cpu']);\n }\n }\n } finally {\n wasm.stackRestore(beforeGetTensorDataStack);\n if (type === 'string' && dataOffset) {\n wasm._free(dataOffset);\n }\n if (!keepOutputTensor) {\n wasm._OrtReleaseTensor(tensor);\n }\n }\n }\n\n if (ioBindingState && !enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, false]);\n }\n return output;\n } finally {\n wasm.stackRestore(beforeRunStack);\n\n inputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n outputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n inputOutputAllocs.forEach(p => wasm._free(p));\n\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n runOptionsAllocs.forEach(p => wasm._free(p));\n }\n};\n\n/**\n * end profiling\n */\nexport const endProfiling = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error('invalid session id');\n }\n const sessionHandle = session[0];\n\n // profile file name is not used yet, but it must be freed.\n const profileFileName = wasm._OrtEndProfiling(sessionHandle);\n if (profileFileName === 0) {\n checkLastError('Can\\'t get an profile file name.');\n }\n wasm._OrtFree(profileFileName);\n};\n\nexport const extractTransferableBuffers = (tensors: readonly SerializableTensorMetadata[]): ArrayBufferLike[] => {\n const buffers: ArrayBufferLike[] = [];\n for (const tensor of tensors) {\n const data = tensor[2];\n if (!Array.isArray(data) && 'buffer' in data) {\n buffers.push(data.buffer);\n }\n }\n return buffers;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env, InferenceSession} from 'onnxruntime-common';\n\nimport {OrtWasmMessage, SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport * as core from './wasm-core-impl';\nimport {initializeWebAssembly} from './wasm-factory';\nimport {importProxyWorker} from './wasm-utils-import';\n\nconst isProxy = (): boolean => !!env.wasm.proxy && typeof document !== 'undefined';\nlet proxyWorker: Worker|undefined;\nlet initializing = false;\nlet initialized = false;\nlet aborted = false;\nlet temporaryObjectUrl: string|undefined;\n\ntype PromiseCallbacks = [resolve: (result: T) => void, reject: (reason: unknown) => void];\nlet initWasmCallbacks: PromiseCallbacks;\nconst queuedCallbacks: Map>> = new Map();\n\nconst enqueueCallbacks = (type: OrtWasmMessage['type'], callbacks: PromiseCallbacks): void => {\n const queue = queuedCallbacks.get(type);\n if (queue) {\n queue.push(callbacks);\n } else {\n queuedCallbacks.set(type, [callbacks]);\n }\n};\n\nconst ensureWorker = (): void => {\n if (initializing || !initialized || aborted || !proxyWorker) {\n throw new Error('worker not ready');\n }\n};\n\nconst onProxyWorkerMessage = (ev: MessageEvent): void => {\n switch (ev.data.type) {\n case 'init-wasm':\n initializing = false;\n if (ev.data.err) {\n aborted = true;\n initWasmCallbacks[1](ev.data.err);\n } else {\n initialized = true;\n initWasmCallbacks[0]();\n }\n if (temporaryObjectUrl) {\n URL.revokeObjectURL(temporaryObjectUrl);\n temporaryObjectUrl = undefined;\n }\n break;\n case 'init-ep':\n case 'copy-from':\n case 'create':\n case 'release':\n case 'run':\n case 'end-profiling': {\n const callbacks = queuedCallbacks.get(ev.data.type)!;\n if (ev.data.err) {\n callbacks.shift()![1](ev.data.err);\n } else {\n callbacks.shift()![0](ev.data.out!);\n }\n break;\n }\n default:\n }\n};\n\n\nexport const initializeWebAssemblyAndOrtRuntime = async(): Promise => {\n if (initialized) {\n return;\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initWasm()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initWasm()\\' failed.');\n }\n\n initializing = true;\n\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n return new Promise((resolve, reject) => {\n proxyWorker?.terminate();\n\n void importProxyWorker().then(([objectUrl, worker]) => {\n try {\n proxyWorker = worker;\n proxyWorker.onerror = (ev: ErrorEvent) => reject(ev);\n proxyWorker.onmessage = onProxyWorkerMessage;\n initWasmCallbacks = [resolve, reject];\n const message: OrtWasmMessage = {type: 'init-wasm', in : env};\n proxyWorker.postMessage(message);\n temporaryObjectUrl = objectUrl;\n } catch (e) {\n reject(e);\n }\n }, reject);\n });\n\n } else {\n try {\n await initializeWebAssembly(env.wasm);\n await core.initRuntime(env);\n initialized = true;\n } catch (e) {\n aborted = true;\n throw e;\n } finally {\n initializing = false;\n }\n }\n};\n\nexport const initializeOrtEp = async(epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('init-ep', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'init-ep', in : {epName, env}};\n proxyWorker!.postMessage(message);\n });\n } else {\n await core.initEp(env, epName);\n }\n};\n\nexport const copyFromExternalBuffer = async(buffer: Uint8Array): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('copy-from', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'copy-from', in : {buffer}};\n proxyWorker!.postMessage(message, [buffer.buffer]);\n });\n } else {\n return core.copyFromExternalBuffer(buffer);\n }\n};\n\nexport const createSession =\n async(model: SerializableInternalBuffer|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check unsupported options\n if (options?.preferredOutputLocation) {\n throw new Error('session option \"preferredOutputLocation\" is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('create', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'create', in : {model, options: {...options}}};\n const transferable: Transferable[] = [];\n if (model instanceof Uint8Array) {\n transferable.push(model.buffer);\n }\n proxyWorker!.postMessage(message, transferable);\n });\n } else {\n return core.createSession(model, options);\n }\n };\n\nexport const releaseSession = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('release', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'release', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.releaseSession(sessionId);\n }\n};\n\nexport const run = async(\n sessionId: number, inputIndices: number[], inputs: TensorMetadata[], outputIndices: number[],\n outputs: Array, options: InferenceSession.RunOptions): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check inputs location\n if (inputs.some(t => t[3] !== 'cpu')) {\n throw new Error('input tensor on GPU is not supported for proxy.');\n }\n // check outputs location\n if (outputs.some(t => t)) {\n throw new Error('pre-allocated output tensor is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('run', [resolve, reject]);\n const serializableInputs = inputs as SerializableTensorMetadata[]; // every input is on CPU.\n const message: OrtWasmMessage =\n {type: 'run', in : {sessionId, inputIndices, inputs: serializableInputs, outputIndices, options}};\n proxyWorker!.postMessage(message, core.extractTransferableBuffers(serializableInputs));\n });\n } else {\n return core.run(sessionId, inputIndices, inputs, outputIndices, outputs, options);\n }\n};\n\nexport const endProfiling = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('end-profiling', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'end-profiling', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.endProfiling(sessionId);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, TensorMetadata} from './proxy-messages';\nimport {copyFromExternalBuffer, createSession, endProfiling, releaseSession, run} from './proxy-wrapper';\nimport {isGpuBufferSupportedType} from './wasm-common';\nimport {isNode} from './wasm-utils-env';\nimport {loadFile} from './wasm-utils-load-file';\n\nexport const encodeTensorMetadata = (tensor: Tensor, getName: () => string): TensorMetadata => {\n switch (tensor.location) {\n case 'cpu':\n return [tensor.type, tensor.dims, tensor.data, 'cpu'];\n case 'gpu-buffer':\n return [tensor.type, tensor.dims, {gpuBuffer: tensor.gpuBuffer}, 'gpu-buffer'];\n default:\n throw new Error(`invalid data location: ${tensor.location} for ${getName()}`);\n }\n};\n\nexport const decodeTensorMetadata = (tensor: TensorMetadata): Tensor => {\n switch (tensor[3]) {\n case 'cpu':\n return new Tensor(tensor[0], tensor[2], tensor[1]);\n case 'gpu-buffer': {\n const dataType = tensor[0];\n if (!isGpuBufferSupportedType(dataType)) {\n throw new Error(`not supported data type: ${dataType} for deserializing GPU tensor`);\n }\n const {gpuBuffer, download, dispose} = tensor[2];\n return Tensor.fromGpuBuffer(gpuBuffer, {dataType, dims: tensor[1], download, dispose});\n }\n default:\n throw new Error(`invalid data location: ${tensor[3]}`);\n }\n};\n\nexport class OnnxruntimeWebAssemblySessionHandler implements InferenceSessionHandler {\n private sessionId: number;\n\n inputNames: string[];\n outputNames: string[];\n\n async fetchModelAndCopyToWasmMemory(path: string): Promise {\n // fetch model from url and move to wasm heap.\n return copyFromExternalBuffer(await loadFile(path));\n }\n\n async loadModel(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n let model: Parameters[0];\n\n if (typeof pathOrBuffer === 'string') {\n if (isNode) {\n // node\n model = await loadFile(pathOrBuffer);\n } else {\n // browser\n // fetch model and copy to wasm heap.\n model = await this.fetchModelAndCopyToWasmMemory(pathOrBuffer);\n }\n } else {\n model = pathOrBuffer;\n }\n\n [this.sessionId, this.inputNames, this.outputNames] = await createSession(model, options);\n TRACE_FUNC_END();\n }\n\n async dispose(): Promise {\n return releaseSession(this.sessionId);\n }\n\n async run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType, options: InferenceSession.RunOptions):\n Promise {\n TRACE_FUNC_BEGIN();\n const inputArray: Tensor[] = [];\n const inputIndices: number[] = [];\n Object.entries(feeds).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.inputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid input '${name}'`);\n }\n inputArray.push(tensor);\n inputIndices.push(index);\n });\n\n const outputArray: Array = [];\n const outputIndices: number[] = [];\n Object.entries(fetches).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.outputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid output '${name}'`);\n }\n outputArray.push(tensor);\n outputIndices.push(index);\n });\n\n const inputs =\n inputArray.map((t, i) => encodeTensorMetadata(t, () => `input \"${this.inputNames[inputIndices[i]]}\"`));\n const outputs = outputArray.map(\n (t, i) => t ? encodeTensorMetadata(t, () => `output \"${this.outputNames[outputIndices[i]]}\"`) : null);\n\n const results = await run(this.sessionId, inputIndices, inputs, outputIndices, outputs, options);\n\n const resultMap: SessionHandler.ReturnType = {};\n for (let i = 0; i < results.length; i++) {\n resultMap[this.outputNames[outputIndices[i]]] = outputArray[i] ?? decodeTensorMetadata(results[i]);\n }\n TRACE_FUNC_END();\n return resultMap;\n }\n\n startProfiling(): void {\n // TODO: implement profiling\n }\n\n endProfiling(): void {\n void endProfiling(this.sessionId);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend, env, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {initializeOrtEp, initializeWebAssemblyAndOrtRuntime} from './wasm/proxy-wrapper';\nimport {OnnxruntimeWebAssemblySessionHandler} from './wasm/session-handler-inference';\nimport {scriptSrc} from './wasm/wasm-utils-import';\n\n/**\n * This function initializes all flags for WebAssembly.\n *\n * Those flags are accessible from `ort.env.wasm`. Users are allow to set those flags before the first inference session\n * being created, to override default value.\n */\nexport const initializeFlags = (): void => {\n if (typeof env.wasm.initTimeout !== 'number' || env.wasm.initTimeout < 0) {\n env.wasm.initTimeout = 0;\n }\n\n if (env.wasm.simd === false) {\n // eslint-disable-next-line no-console\n console.warn(\n 'Deprecated property \"env.wasm.simd\" is set to false. ' +\n 'non-SIMD build is no longer provided, and this setting will be ignored.');\n }\n\n if (typeof env.wasm.proxy !== 'boolean') {\n env.wasm.proxy = false;\n }\n\n if (typeof env.wasm.trace !== 'boolean') {\n env.wasm.trace = false;\n }\n\n if (typeof env.wasm.numThreads !== 'number' || !Number.isInteger(env.wasm.numThreads) || env.wasm.numThreads <= 0) {\n // The following logic only applies when `ort.env.wasm.numThreads` is not set by user. We will always honor user's\n // setting if it is provided.\n\n // Browser: when crossOriginIsolated is false, SharedArrayBuffer is not available so WebAssembly threads will not\n // work. In this case, we will set numThreads to 1.\n //\n // There is an exception: when the browser is configured to force-enable SharedArrayBuffer (e.g. Chromuim with\n // --enable-features=SharedArrayBuffer), it is possible that `self.crossOriginIsolated` is false and\n // SharedArrayBuffer is available at the same time. This is usually for testing. In this case, we will still set\n // numThreads to 1 here. If we want to enable multi-threading in test, we should set `ort.env.wasm.numThreads` to a\n // value greater than 1.\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n env.wasm.numThreads = 1;\n } else {\n const numCpuLogicalCores =\n typeof navigator === 'undefined' ? require('node:os').cpus().length : navigator.hardwareConcurrency;\n env.wasm.numThreads = Math.min(4, Math.ceil((numCpuLogicalCores || 1) / 2));\n }\n }\n\n if (!BUILD_DEFS.DISABLE_DYNAMIC_IMPORT) {\n // overwrite wasm paths override if not set\n if (env.wasm.wasmPaths === undefined && scriptSrc && scriptSrc.indexOf('blob:') !== 0) {\n env.wasm.wasmPaths = scriptSrc.substring(0, scriptSrc.lastIndexOf('/') + 1);\n }\n }\n};\n\nexport class OnnxruntimeWebAssemblyBackend implements Backend {\n /**\n * This function initializes the WebAssembly backend.\n *\n * This function will be called only once for each backend name. It will be called the first time when\n * `ort.InferenceSession.create()` is called with a registered backend name.\n *\n * @param backendName - the registered backend name.\n */\n async init(backendName: string): Promise {\n // populate wasm flags\n initializeFlags();\n\n // init wasm\n await initializeWebAssemblyAndOrtRuntime();\n\n // performe EP specific initialization\n await initializeOrtEp(backendName);\n }\n createInferenceSessionHandler(path: string, options?: InferenceSession.SessionOptions):\n Promise;\n createInferenceSessionHandler(buffer: Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n const handler = new OnnxruntimeWebAssemblySessionHandler();\n await handler.loadModel(pathOrBuffer, options);\n return Promise.resolve(handler);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OnnxruntimeWebAssemblyBackend} from './backend-wasm';\nexport const wasmBackend = new OnnxruntimeWebAssemblyBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports */\n\n// We use \"require\" instead of \"import\" here because import statement must be put in top level. Our current code does\n// not allow bundler to tree-shaking code as expected because some codes are treated as having side effects.\n// So we import code inside the if-clause to allow bundler remove the code safely.\n\nexport * from 'onnxruntime-common';\nimport * as ort from 'onnxruntime-common';\nexport default ort;\n\nimport {registerBackend, env} from 'onnxruntime-common';\nimport {version} from './version';\n\nif (!BUILD_DEFS.DISABLE_WEBGL) {\n const onnxjsBackend = require('./backend-onnxjs').onnxjsBackend;\n registerBackend('webgl', onnxjsBackend, -10);\n}\n\nif (!BUILD_DEFS.DISABLE_WASM) {\n const wasmBackend = BUILD_DEFS.DISABLE_TRAINING ? require('./backend-wasm-inference').wasmBackend :\n require('./backend-wasm-training').wasmBackend;\n if (!BUILD_DEFS.DISABLE_JSEP) {\n registerBackend('webgpu', wasmBackend, 5);\n registerBackend('webnn', wasmBackend, 5);\n }\n registerBackend('cpu', wasmBackend, 10);\n registerBackend('wasm', wasmBackend, 10);\n}\n\nObject.defineProperty(env.versions, 'web', {value: version, enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.19.0';\n"], + "mappings": ";;;;;usBAAA,IAgBMA,GACAC,GAYOC,GAwCPC,GAwCOC,GA7GbC,GAAAC,EAAA,kBAgBMN,GAAqC,IAAI,IACzCC,GAAqC,CAAA,EAY9BC,GAAkB,CAACK,EAAcC,EAAkBC,IAA0B,CACxF,GAAID,GAAW,OAAOA,EAAQ,MAAS,YAAc,OAAOA,EAAQ,+BAAkC,WAAY,CAChH,IAAME,EAAiBV,GAAS,IAAIO,CAAI,EACxC,GAAIG,IAAmB,OACrBV,GAAS,IAAIO,EAAM,CAAC,QAAAC,EAAS,SAAAC,CAAQ,CAAC,MACjC,IAAIC,EAAe,SAAWD,EAEnC,OACK,GAAIC,EAAe,WAAaD,GACjCC,EAAe,UAAYF,EAC7B,MAAM,IAAI,MAAM,4BAA4BD,CAAI,oBAAoBE,CAAQ,EAAE,EAIlF,GAAIA,GAAY,EAAG,CACjB,IAAME,EAAIV,GAAyB,QAAQM,CAAI,EAC3CI,IAAM,IACRV,GAAyB,OAAOU,EAAG,CAAC,EAGtC,QAAS,EAAI,EAAG,EAAIV,GAAyB,OAAQ,IACnD,GAAID,GAAS,IAAIC,GAAyB,CAAC,CAAC,EAAG,UAAYQ,EAAU,CACnER,GAAyB,OAAO,EAAG,EAAGM,CAAI,EAC1C,OAGJN,GAAyB,KAAKM,CAAI,EAEpC,OAGF,MAAM,IAAI,UAAU,qBAAqB,CAC3C,EAQMJ,GAAiC,MAAMS,GAAgD,CAC3F,IAAMC,EAAcb,GAAS,IAAIY,CAAW,EAC5C,GAAI,CAACC,EACH,MAAO,qBAGT,GAAIA,EAAY,YACd,OAAOA,EAAY,QACd,GAAIA,EAAY,QACrB,OAAOA,EAAY,MACd,CACL,IAAMC,EAAiB,CAAC,CAACD,EAAY,YACrC,GAAI,CACF,OAAKC,IACHD,EAAY,YAAcA,EAAY,QAAQ,KAAKD,CAAW,GAEhE,MAAMC,EAAY,YAClBA,EAAY,YAAc,GACnBA,EAAY,cACZE,EAAG,CACV,OAAKD,IACHD,EAAY,MAAQ,GAAGE,CAAC,GACxBF,EAAY,QAAU,IAEjBA,EAAY,cAEnB,OAAOA,EAAY,aAGzB,EAWaT,GAAsC,MAAMY,GACmB,CAEtE,IAAMC,EAAMD,EAAQ,oBAAsB,CAAA,EACpCE,EAAeD,EAAI,IAAIN,GAAK,OAAOA,GAAM,SAAWA,EAAIA,EAAE,IAAI,EAC9DQ,EAAeD,EAAa,SAAW,EAAIjB,GAA2BiB,EAGxEV,EACEY,EAAS,CAAA,EACTC,EAAwB,IAAI,IAClC,QAAWT,KAAeO,EAAc,CACtC,IAAMG,EAAgB,MAAMnB,GAA+BS,CAAW,EAClE,OAAOU,GAAkB,SAC3BF,EAAO,KAAK,CAAC,KAAMR,EAAa,IAAKU,CAAa,CAAC,GAE9Cd,IACHA,EAAUc,GAERd,IAAYc,GACdD,EAAsB,IAAIT,CAAW,GAM3C,GAAI,CAACJ,EACH,MAAM,IAAI,MAAM,oCAAoCY,EAAO,IAAIL,GAAK,IAAIA,EAAE,IAAI,KAAKA,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,EAI1G,OAAW,CAAC,KAAAR,EAAM,IAAAgB,CAAG,IAAKH,EACpBF,EAAa,SAASX,CAAI,GAE5B,QAAQ,KAAK,0CACTA,CAAI,uDAAuDgB,CAAG,EAAE,EAIxE,IAAMC,EAAcP,EAAI,OAAON,GAAKU,EAAsB,IAAI,OAAOV,GAAM,SAAWA,EAAIA,EAAE,IAAI,CAAC,EAEjG,MAAO,CACLH,EAAS,IAAI,MAAMQ,EAAS,CAC1B,IAAK,CAACS,EAAQC,IACRA,IAAS,qBACJF,EAEF,QAAQ,IAAIC,EAAQC,CAAI,EAElC,EAEL,IChKJ,IAAAC,GAAAC,EAAA,kBAoFAC,OCpFA,IAMaC,GANbC,GAAAC,EAAA,kBAMaF,GAAU,WCNvB,IAQIG,GAESC,GAVbC,GAAAC,EAAA,kBAIAC,KAIIJ,GAAwC,UAE/BC,GAAW,CACtB,KAAM,CAAA,EACN,MAAO,CAAA,EACP,OAAQ,CAAA,EACR,SAAU,CAAC,OAAQI,EAAO,EAE1B,IAAI,SAASC,EAAmB,CAC9B,GAAIA,IAAU,OAGd,IAAI,OAAOA,GAAU,UAAY,CAAC,UAAW,OAAQ,UAAW,QAAS,OAAO,EAAE,QAAQA,CAAK,IAAM,GACnG,MAAM,IAAI,MAAM,8BAA8BA,CAAK,EAAE,EAEvDN,GAAgBM,EAClB,EACA,IAAI,UAAQ,CACV,OAAON,EACT,GAIF,OAAO,eAAeC,GAAK,WAAY,CAAC,WAAY,EAAI,CAAC,IC/BzD,IAmRaM,GAnRbC,GAAAC,EAAA,kBAGAC,KAgRaH,GAAWA,KCnRxB,IASaI,GA+FAC,GAxGbC,GAAAC,EAAA,kBASaH,GAAkB,CAACI,EAAgBC,IAA4C,CAC1F,IAAMC,EAAS,OAAO,SAAa,IAAc,SAAS,cAAc,QAAQ,EAAK,IAAI,gBAAgB,EAAG,CAAC,EAC7GA,EAAO,MAAQF,EAAO,KAAK,CAAC,EAC5BE,EAAO,OAASF,EAAO,KAAK,CAAC,EAC7B,IAAMG,EACFD,EAAO,WAAW,IAAI,EAE1B,GAAIC,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAJ,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,IAEtBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,GAGxB,IAAMM,EAAcL,GAAS,SAAW,OAAYA,EAAQ,OAAS,MAE/DM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EAEpBO,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5B,QAASK,EAAI,EAAGA,EAAIV,EAAQU,IAC1B,QAASC,EAAI,EAAGA,EAAIZ,EAAOY,IAAK,CAC9B,IAAMC,GAAMjB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EU,GAAMlB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EW,GAAMnB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EY,EAAIN,IAAmB,GACzB,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,EAE1EL,EAAgB,UAAY,QAAUc,EAAI,IAAMC,EAAI,IAAMC,EAAI,IAAMC,EAAI,IACxEjB,EAAgB,SAASa,EAAGD,EAAG,EAAG,CAAC,EAGvC,GAAI,cAAeb,EACjB,OAAOA,EAAO,UAAS,EAEvB,MAAM,IAAI,MAAM,4BAA4B,MAG9C,OAAM,IAAI,MAAM,2BAA2B,CAE/C,EAKaL,GAAoB,CAACG,EAAgBC,IAAiD,CACjG,IAAME,EAAkB,OAAO,SAAa,IACxC,SAAS,cAAc,QAAQ,EAAE,WAAW,IAAI,EAChD,IAAI,gBAAgB,EAAG,CAAC,EAAE,WAAW,IAAI,EACzCkB,EACJ,GAAIlB,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAiB,EACArB,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,IAExBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBsB,EAAWtB,EAAO,KAAK,CAAC,GAE1B,IAAMM,EAAcL,IAAY,QAAaA,EAAQ,SAAW,OAAYA,EAAQ,OAAkB,MAEhGM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,GAAG,EACrDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EACxB,GAAIH,IAAY,SACVA,EAAQ,SAAW,QAAcqB,IAAa,GAAKrB,EAAQ,SAAW,QACrEqB,IAAa,GAAMrB,EAAQ,SAAW,OAASA,EAAQ,SAAW,OACrE,MAAM,IAAI,MAAM,+CAAgD,EAKpE,IAAMsB,EAAO,EACTC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACzEhB,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5BW,EAAQlB,EAAgB,gBAAgBC,EAAOC,CAAM,EAErD,QAASU,EAAI,EAAGA,EAAIV,EAASD,EACxBoB,GAAiBD,EAAME,GAAiBF,EAAMG,GAAiBH,EAAMI,GAAiBJ,EAAMR,IAC/FM,EAAM,KAAKG,CAAa,GAAMxB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKI,CAAa,GAAMzB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKK,CAAa,GAAM1B,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGa,EAAM,KAAKM,CAAa,EAAIb,IAAmB,GAC3C,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,MAI5E,OAAM,IAAI,MAAM,2BAA2B,EAE7C,OAAOa,CACT,ICtMA,IAiBaO,GAkFAC,GAgKAC,GAWAC,GASAC,GAvRbC,GAAAC,EAAA,kBAIAC,KAaaP,GAAiB,CAACQ,EAAqCC,IAA0C,CAC5G,GAAID,IAAW,OACb,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAIC,EAAQ,SAAW,QAAaA,EAAQ,QAAU,OACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAM,CAAC,OAAAC,EAAQ,MAAAC,CAAK,EAAIF,EAElBG,EAAOH,EAAQ,MAAQ,CAAC,KAAM,IAAK,KAAM,CAAC,EAC5CI,EACAC,EAEA,OAAQF,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDC,EAAW,CAACD,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,GAAG,EAG3E,OAAQA,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDE,EAAW,CAACF,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,CAAC,EAG7E,IAAMG,EAAcN,EAAQ,SAAW,OAAYA,EAAQ,OAAS,OAG9DO,EACFP,EAAQ,eAAiB,QAAaA,EAAQ,eAAiB,OAAYA,EAAQ,aAAwB,MACzGQ,EAASP,EAASC,EAClBO,EAAcF,IAAiB,OAAS,IAAI,aAAaC,EAAS,CAAC,EAAI,IAAI,aAAaA,EAAS,CAAC,EAGpGE,EAAO,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACnFC,EAAiB,EAAGC,EAAiBR,EAAQS,EAAiBT,EAAS,EAAGU,EAAiB,GAG3FZ,IAAgB,QAClBI,EAAO,EACPC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,IAIdP,IAAiB,OACnBW,EAAiBV,EAAS,EACjBD,IAAiB,OAC1BQ,EAAiB,EACjBE,EAAiBT,EACjBQ,EAAiBR,EAAS,GACjBD,IAAiB,QAC1BU,EAAiB,EACjBD,EAAiBR,EACjBO,EAAiBP,EAAS,GAG5B,QAASW,EAAI,EAAGA,EAAIX,EACfW,IAAKR,GAAiBD,EAAMG,GAAiBH,EAAME,GAAiBF,EAAMI,GAAiBJ,EAC9FD,EAAYM,GAAgB,GAAKhB,EAAOY,CAAa,EAAIN,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYO,GAAgB,GAAKjB,EAAOa,CAAa,EAAIP,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYQ,GAAgB,GAAKlB,EAAOc,CAAa,EAAIR,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC9Ec,IAAmB,IAAMJ,IAAkB,KAC7CL,EAAYS,GAAgB,GAAKnB,EAAOe,CAAa,EAAIT,EAAS,CAAC,GAAKD,EAAS,CAAC,GAOtF,OAFqBG,IAAiB,OAAS,IAAIa,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,EACxD,IAAIkB,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,CAEzG,EAKaV,GAAkB,MAC3B6B,EACArB,IACyC,CAE3C,IAAMsB,EAAiB,OAAQ,iBAAsB,KAAeD,aAAiB,iBAC/EE,EAAiB,OAAQ,UAAe,KAAeF,aAAiB,UACxEG,EAAgB,OAAQ,YAAiB,KAAeH,aAAiB,YACzEI,EAAW,OAAOJ,GAAU,SAE9BK,EACAC,EAA+C3B,GAAW,CAAA,EAExD4B,EAAe,IAAK,CACxB,GAAI,OAAO,SAAa,IACtB,OAAO,SAAS,cAAc,QAAQ,EACjC,GAAI,OAAO,gBAAoB,IACpC,OAAO,IAAI,gBAAgB,EAAG,CAAC,EAE/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,EACMC,EAAuBC,GACvBA,aAAkB,mBAEXA,aAAkB,gBADpBA,EAAO,WAAW,IAAI,EAItB,KAIX,GAAIR,EAAgB,CAElB,IAAMQ,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAI9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MAMlB,GALIrB,IAAY,QAAaA,EAAQ,gBAAkB,QAAaA,EAAQ,eAAiB,SAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,cAGdA,IAAY,OAAW,CAEzB,GADA2B,EAAwB3B,EACpBA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,6DAA6D,EAE7E2B,EAAsB,aAAe,OAEvCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,OAE9ByB,EAAsB,aAAe,OACrCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAGhC6B,EAAgB,UAAUV,EAAO,EAAG,CAAC,EACrCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,UAEpCsB,EAAgB,CACzB,IAAItB,EACAC,EAiBJ,GAfIF,IAAY,QAAaA,EAAQ,eAAiB,QAAaA,EAAQ,gBAAkB,QAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,eAEhBC,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,OAGZrB,IAAY,SACd2B,EAAwB3B,GAE1B2B,EAAsB,OAAS,OAC/BA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAE1BF,IAAY,OAAW,CACzB,IAAMgC,EAAaJ,EAAY,EAE/BI,EAAW,MAAQ9B,EACnB8B,EAAW,OAAS/B,EAEpB,IAAM8B,EAAkBF,EAAoBG,CAAU,EAEtD,GAAID,GAAmB,KACrBA,EAAgB,aAAaV,EAAO,EAAG,CAAC,EACxCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,OAG7CyB,EAAOL,EAAM,aAENG,EAAe,CAExB,GAAIxB,IAAY,OACd,MAAM,IAAI,MAAM,yDAAyD,EAG3E,IAAM8B,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAM9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MACpB,OAAAU,EAAgB,UAAUV,EAAO,EAAG,EAAGnB,EAAOD,CAAM,EACpDyB,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,KACzD0B,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EACvBX,GAAemC,EAAMC,CAAqB,MAEjD,OAAM,IAAI,MAAM,2BAA2B,MAExC,IAAIF,EACT,OAAO,IAAI,QAAQ,CAACQ,EAASC,IAAU,CACrC,IAAMJ,EAASF,EAAY,EACrBO,EAAUN,EAAoBC,CAAM,EAC1C,GAAI,CAACT,GAAS,CAACc,EACb,OAAOD,EAAM,EAEf,IAAME,EAAW,IAAI,MACrBA,EAAS,YAAc,YACvBA,EAAS,IAAMf,EACfe,EAAS,OAAS,IAAK,CACrBN,EAAO,MAAQM,EAAS,MACxBN,EAAO,OAASM,EAAS,OACzBD,EAAQ,UAAUC,EAAU,EAAG,EAAGN,EAAO,MAAOA,EAAO,MAAM,EAC7D,IAAMO,EAAMF,EAAQ,aAAa,EAAG,EAAGL,EAAO,MAAOA,EAAO,MAAM,EAElEH,EAAsB,OAASG,EAAO,OACtCH,EAAsB,MAAQG,EAAO,MACrCG,EAAQ1C,GAAe8C,EAAI,KAAMV,CAAqB,CAAC,CACzD,CACF,CAAC,EAED,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAID,IAAS,OACX,OAAOnC,GAAemC,EAAMC,CAAqB,EAEjD,MAAM,IAAI,MAAM,gEAAgE,CAEpF,EAKalC,GAAoB,CAC7B6C,EAAsCtC,IAAgD,CACxF,GAAM,CAAC,MAAAE,EAAO,OAAAD,EAAQ,SAAAsC,EAAU,QAAAC,CAAO,EAAIxC,EAErCyC,EAAO,CAAC,EAAGxC,EAAQC,EAAO,CAAC,EACjC,OAAO,IAAIkB,GAAO,CAAC,SAAU,UAAW,KAAM,UAAW,QAAAkB,EAAS,KAAAG,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC5F,EAKa9C,GAAsB,CAC/BgD,EAA0C1C,IAAkD,CAC9F,GAAM,CAAC,SAAA2C,EAAU,KAAAF,EAAM,SAAAF,EAAU,QAAAC,CAAO,EAAIxC,EAC5C,OAAO,IAAIoB,GAAO,CAAC,SAAU,aAAc,KAAMuB,GAAY,UAAW,UAAAD,EAAW,KAAAD,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC7G,EAKa7C,GAAyB,CAClCiD,EAAS7C,EAAwC0C,IACjD,IAAIrB,GAAO,CAAC,SAAU,aAAc,KAAAwB,EAAM,KAAM7C,EAAQ,KAAM0C,GAAQ,CAAC1C,EAAO,MAAM,CAAC,CAAC,ICzR1F,IAWa8C,GAaAC,GAoBTC,GACSC,GA7CbC,GAAAC,EAAA,kBAWaL,GAAwC,IAAI,IAA6C,CACpG,CAAC,UAAW,YAAY,EACxB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,SAAS,EAClB,CAAC,SAAU,WAAW,EACtB,CAAC,QAAS,UAAU,EACpB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,UAAU,EACnB,CAAC,UAAW,YAAY,EACxB,CAAC,SAAU,WAAW,EACvB,EAGYC,GAAwC,IAAI,IAAkD,CACzG,CAAC,aAAc,SAAS,EACxB,CAAC,WAAY,OAAO,EACpB,CAAC,UAAW,MAAM,EAClB,CAAC,YAAa,QAAQ,EACtB,CAAC,WAAY,OAAO,EACpB,CAAC,WAAY,OAAO,EACpB,CAAC,aAAc,SAAS,EACxB,CAAC,YAAa,QAAQ,EACvB,EAWGC,GAAsB,GACbC,GAAkB,IAAK,CAClC,GAAI,CAACD,GAAqB,CACxBA,GAAsB,GACtB,IAAMI,EAA2B,OAAO,cAAkB,KAAe,cAAc,KACjFC,EAA4B,OAAO,eAAmB,KAAe,eAAe,KACpFC,EAA0B,OAAO,aAAiB,KAAe,aAAa,KAEhFF,IACFN,GAAsC,IAAI,QAAS,aAAa,EAChEC,GAAsC,IAAI,cAAe,OAAO,GAE9DM,IACFP,GAAsC,IAAI,SAAU,cAAc,EAClEC,GAAsC,IAAI,eAAgB,QAAQ,GAEhEO,GACFR,GAAsC,IAAI,UAAW,YAAY,EACjEC,GAAsC,IAAI,aAAc,SAAS,GAGjED,GAAsC,IAAI,UAAW,WAAW,EAGtE,ICpEA,IAWaS,GAkBAC,GA7BbC,GAAAC,EAAA,kBAIAC,KAOaJ,GAAiBK,GAAoC,CAChE,IAAIC,EAAO,EACX,QAASC,EAAI,EAAGA,EAAIF,EAAK,OAAQE,IAAK,CACpC,IAAMC,EAAMH,EAAKE,CAAC,EAClB,GAAI,OAAOC,GAAQ,UAAY,CAAC,OAAO,cAAcA,CAAG,EACtD,MAAM,IAAI,UAAU,QAAQD,CAAC,8BAA8BC,CAAG,EAAE,EAElE,GAAIA,EAAM,EACR,MAAM,IAAI,WAAW,QAAQD,CAAC,0CAA0CC,CAAG,EAAE,EAE/EF,GAAQE,EAEV,OAAOF,CACT,EAKaL,GAAgB,CAACQ,EAAgBJ,IAAmC,CAC/E,OAAQI,EAAO,SAAU,CACvB,IAAK,MACH,OAAO,IAAIC,GAAOD,EAAO,KAAMA,EAAO,KAAMJ,CAAI,EAClD,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,KAAMD,EAAO,KACb,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,UACH,OAAO,IAAIK,GAAO,CAChB,SAAU,UACV,QAASD,EAAO,QAChB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,UAAWD,EAAO,UAClB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,QACE,MAAM,IAAI,MAAM,kCAAkCI,EAAO,QAAQ,mBAAmB,EAE1F,ICzDA,IAwBaE,GAxBbC,GAAAC,EAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAgBaN,GAAP,KAAa,CAyCjB,YACIO,EAEAC,EAA8EC,EAAwB,CAExGC,GAAe,EAEf,IAAIC,EACAC,EAEJ,GAAI,OAAOL,GAAS,UAAY,aAAcA,EAO5C,OAHA,KAAK,aAAeA,EAAK,SACzBI,EAAOJ,EAAK,KACZK,EAAOL,EAAK,KACJA,EAAK,SAAU,CACrB,IAAK,aAAc,CACjB,IAAMM,EAAgCC,GAAsC,IAAIH,CAAI,EACpF,GAAI,CAACE,EACH,MAAM,IAAI,UAAU,qBAAqBF,CAAI,uCAAuC,EAEtF,GAAI,EAAEJ,EAAK,gBAAgBM,GACzB,MAAM,IAAI,UAAU,4BAA4BA,EAA8B,IAAI,EAAE,EAEtF,KAAK,QAAUN,EAAK,KACpB,MAEF,IAAK,UAAW,CACd,GAAII,IAAS,UACX,MAAM,IAAI,UAAU,qBAAqBA,CAAI,iCAAiC,EAEhF,KAAK,eAAiBJ,EAAK,QAC3B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,IAAK,aAAc,CACjB,GAAKI,IAAS,WAAaA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAC7FA,IAAS,SAAWA,IAAS,OAChC,MAAM,IAAI,UAAU,qBAAqBA,CAAI,oCAAoC,EAEnF,KAAK,cAAgBJ,EAAK,UAC1B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,QACE,MAAM,IAAI,MAAM,6CAA6C,KAAK,YAAY,GAAG,MAEhF,CAIL,IAAIQ,EACAC,EAEJ,GAAI,OAAOT,GAAS,SAMlB,GAFAI,EAAOJ,EACPS,EAAYP,EACRF,IAAS,SAAU,CAErB,GAAI,CAAC,MAAM,QAAQC,CAAI,EACrB,MAAM,IAAI,UAAU,gDAAiD,EAIvEO,EAAOP,MACF,CAEL,IAAMS,EAAwBH,GAAsC,IAAIP,CAAI,EAC5E,GAAIU,IAA0B,OAC5B,MAAM,IAAI,UAAU,4BAA4BV,CAAI,GAAG,EAEzD,GAAI,MAAM,QAAQC,CAAI,EAAG,CACvB,GAAID,IAAS,WAAaU,IAA0B,YAMlD,MAAM,IAAI,UACN,+FAA+F,EAC1FV,IAAS,UAAYA,IAAS,QAYvCQ,EAAQE,EAA8B,KAAKT,EAAM,MAAM,EAIvDO,EAAQE,EAA8B,KAAKT,CAAI,UAExCA,aAAgBS,EACzBF,EAAOP,MAEP,OAAM,IAAI,UAAU,KAAKG,CAAI,kCAAkCM,CAAqB,EAAE,UAO1FD,EAAYR,EACR,MAAM,QAAQD,CAAI,EAAG,CAEvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qDAAqD,EAE3E,IAAMW,EAAmB,OAAOX,EAAK,CAAC,EACtC,GAAIW,IAAqB,SACvBP,EAAO,SACPI,EAAOR,UACEW,IAAqB,UAC9BP,EAAO,OAIPI,EAAO,WAAW,KAAKR,CAAa,MAEpC,OAAM,IAAI,UAAU,uCAAuCW,CAAgB,GAAG,MAE3E,CAEL,IAAMC,EACFC,GAAsC,IAAIb,EAAK,WAA8C,EACjG,GAAIY,IAAe,OACjB,MAAM,IAAI,UAAU,qCAAqCZ,EAAK,WAAW,GAAG,EAE9EI,EAAOQ,EACPJ,EAAOR,EAKX,GAAIS,IAAc,OAEhBA,EAAY,CAACD,EAAK,MAAM,UACf,CAAC,MAAM,QAAQC,CAAS,EACjC,MAAM,IAAI,UAAU,wCAAyC,EAE/DJ,EAAOI,EAEP,KAAK,QAAUD,EACf,KAAK,aAAe,MAItB,IAAMM,EAAOC,GAAcV,CAAI,EAE/B,GAAI,KAAK,SAAWS,IAAS,KAAK,QAAQ,OACxC,MAAM,IAAI,MAAM,iBAAiBA,CAAI,gCAAgC,KAAK,QAAQ,MAAM,IAAI,EAG9F,KAAK,KAAOV,EACZ,KAAK,KAAOC,EACZ,KAAK,KAAOS,CACd,CAIA,aAAa,UACTE,EACAC,EACoB,CACtB,OAAOC,GAAgBF,EAAOC,CAAO,CACvC,CAEA,OAAO,YACHE,EAA4BF,EAAoC,CAClE,OAAOG,GAAkBD,EAASF,CAAO,CAC3C,CAEA,OAAO,cACHI,EAAgCJ,EAAsC,CACxE,OAAOK,GAAoBD,EAAWJ,CAAO,CAC/C,CAEA,OAAO,iBACHb,EAASmB,EAAwClB,EAAwB,CAC3E,OAAOmB,GAAuBpB,EAAMmB,EAAQlB,CAAI,CAClD,CAKA,UAAUY,EAAgC,CACxC,OAAOQ,GAAgB,KAAMR,CAAO,CACtC,CAEA,YAAYA,EAAkC,CAC5C,OAAOS,GAAkB,KAAMT,CAAO,CACxC,CAgDA,IAAI,MAAI,CAEN,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,QACR,MAAM,IAAI,MACN,gJAC2E,EAEjF,OAAO,KAAK,OACd,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,YACd,CAEA,IAAI,SAAO,CAET,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,eACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,cACd,CAEA,IAAI,WAAS,CAEX,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,cACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,aACd,CAKA,MAAM,QAAQU,EAAqB,CAEjC,OADA,KAAK,YAAW,EACR,KAAK,aAAc,CACzB,IAAK,MACL,IAAK,aACH,OAAO,KAAK,KACd,IAAK,UACL,IAAK,aAAc,CACjB,GAAI,CAAC,KAAK,WACR,MAAM,IAAI,MAAM,qEAAqE,EAEvF,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAE3D,GAAI,CACF,KAAK,cAAgB,GACrB,IAAMnB,EAAO,MAAM,KAAK,WAAU,EAClC,YAAK,WAAa,OAClB,KAAK,aAAe,MACpB,KAAK,QAAUA,EAEXmB,GAAe,KAAK,WACtB,KAAK,SAAQ,EACb,KAAK,SAAW,QAGXnB,UAGP,KAAK,cAAgB,IAGzB,QACE,MAAM,IAAI,MAAM,kCAAkC,KAAK,YAAY,EAAE,EAE3E,CAEA,SAAO,CACL,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAGvD,KAAK,WACP,KAAK,SAAQ,EACb,KAAK,SAAW,QAElB,KAAK,QAAU,OACf,KAAK,eAAiB,OACtB,KAAK,cAAgB,OACrB,KAAK,WAAa,OAClB,KAAK,cAAgB,OAErB,KAAK,aAAe,MACtB,CAKQ,aAAW,CACjB,GAAI,KAAK,eAAiB,OACxB,MAAM,IAAI,MAAM,yBAAyB,CAE7C,CAEA,QAAQH,EAAuB,CAE7B,GADA,KAAK,YAAW,EACZ,KAAK,YAAc,KAAK,SAC1B,MAAM,IAAI,MAAM,iDAAiD,EAEnE,OAAOuB,GAAc,KAAMvB,CAAI,CACjC,KCpaF,IAwUawB,GAxUbC,GAAAC,EAAA,kBAIAC,KAoUaH,GAASA,KCxUtB,IAQaI,GAQPC,GAqBOC,GAUAC,GA/CbC,GAAAC,EAAA,kBAGAC,KAKaN,GAAQ,CAACO,EAAoBC,IAAiB,EACrD,OAAOC,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAI9D,QAAQ,UAAU,GAAGF,CAAU,UAAUC,CAAK,EAAE,CAClD,EAEMP,GAAa,CAACS,EAAaC,IAAqB,CACpD,IAAMC,EAAQ,IAAI,MAAK,EAAG,OAAO,MAAM,aAAa,GAAK,CAAA,EACrDC,EAAe,GACnB,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,GAAID,GAAgB,CAACD,EAAME,CAAC,EAAE,SAAS,YAAY,EAAG,CACpD,IAAIN,EAAQ,QAAQE,CAAG,KAAKE,EAAME,CAAC,EAAE,KAAI,EAAG,MAAM,GAAG,EAAE,CAAC,CAAC,GACrDH,IACFH,GAAS,KAAKG,CAAQ,IAExBX,GAAM,MAAOQ,CAAK,EAClB,OAEEI,EAAME,CAAC,EAAE,SAAS,YAAY,IAChCD,EAAe,IAGrB,EAKaX,GAAoBS,GAAqB,EAChD,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,QAASU,CAAQ,CAC9B,EAKaR,GAAkBQ,GAAqB,EAC9C,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,MAAOU,CAAQ,CAC5B,ICpDA,IAgBaI,GAhBbC,GAAAC,EAAA,kBAGAC,KAIAC,KACAC,KAQaL,GAAP,MAAOM,CAAgB,CAC3B,YAAoBC,EAAgC,CAClD,KAAK,QAAUA,CACjB,CAGA,MAAM,IAAIC,EAAkBC,EAA+BC,EAAiB,CAC1EC,GAAgB,EAChB,IAAMC,EAA4C,CAAA,EAC9CC,EAAsB,CAAA,EAE1B,GAAI,OAAOL,GAAU,UAAYA,IAAU,MAAQA,aAAiBM,IAAU,MAAM,QAAQN,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIO,EAAiB,GAErB,GAAI,OAAON,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBK,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQL,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DM,EAAiB,GAEjB,QAAWC,KAAQP,EAAM,CACvB,GAAI,OAAOO,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAI,KAAK,YAAY,QAAQA,CAAI,IAAM,GACrC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEJ,EAAQI,CAAI,EAAI,KAGlB,GAAI,OAAON,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIO,EAAY,GACVC,EAAW,OAAO,oBAAoBT,CAAI,EAChD,QAAWO,KAAQ,KAAK,YACtB,GAAIE,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKV,EAA4DO,CAAI,GACvEG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBH,EAAQI,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOP,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDG,EAAUJ,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWO,KAAQ,KAAK,WACtB,GAAI,OAAOR,EAAMQ,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQ,KAAK,YACtBJ,EAAQI,CAAI,EAAI,KAMpB,IAAMI,EAAU,MAAM,KAAK,QAAQ,IAAIZ,EAAOI,EAASC,CAAO,EACxDQ,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAAC,GAAc,EACPH,CACT,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,CAOA,aAAa,OACTI,EAAyChB,EAA8BC,EACvEgB,EAAqB,CACvBf,GAAgB,EAEhB,IAAIgB,EACAd,EAA0B,CAAA,EAE9B,GAAI,OAAOY,GAAS,UAElB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7CgB,aAAgB,YAEzB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAGpDgB,aAAgB,aACf,OAAO,kBAAsB,KAAeA,aAAgB,kBAAoB,CACnF,IAAMG,EAASH,EACXI,EAAa,EACbC,EAAaL,EAAK,WACtB,GAAI,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,GAAS,SAAU,CAEnC,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,EAAa,GAAKA,GAAcD,EAAO,WACzC,MAAM,IAAI,WAAW,oCAAoCA,EAAO,UAAU,IAAI,EAGhF,GADAE,EAAaL,EAAK,WAAaI,EAC3B,OAAOnB,GAAS,SAAU,CAE5B,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,GAAc,GAAKD,EAAaC,EAAaF,EAAO,WACtD,MAAM,IAAI,WAAW,oCAAoCA,EAAO,WAAaC,CAAU,IAAI,EAE7F,GAAI,OAAOH,GAAS,UAAYA,IAAS,KACvCb,EAAUa,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7C,OAAOhB,EAAS,IACzB,MAAM,IAAI,UAAU,gCAAkC,UAE/C,OAAOD,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,EAEtDkB,EAAuB,IAAI,WAAWC,EAAQC,EAAYC,CAAU,MAEpE,OAAM,IAAI,UAAU,qDAAyD,EAI/E,GAAM,CAACC,EAASC,CAAuB,EAAI,MAAMC,GAAoCpB,CAAO,EACtFN,EAAU,MAAMwB,EAAQ,8BAA8BJ,EAAsBK,CAAuB,EACzG,OAAAR,GAAc,EACP,IAAIlB,EAAiBC,CAAO,CACrC,CAEA,gBAAc,CACZ,KAAK,QAAQ,eAAc,CAC7B,CACA,cAAY,CACV,KAAK,QAAQ,aAAY,CAC3B,CAEA,IAAI,YAAU,CACZ,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,aAAW,CACb,OAAO,KAAK,QAAQ,WACtB,KCxNF,IA8hBa2B,GA9hBbC,GAAAC,EAAA,kBAGAC,KA2hBaH,GAA4CA,KC9hBzD,IAAAI,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAAC,EAAA,oBCAA,IAgBMC,GAGOC,GAnBbC,GAAAC,EAAA,kBAGAC,KAIAC,KASML,GAA0B,gHAGnBC,GAAP,MAAOK,CAAe,CAC1B,YAAoBC,EAAiCC,EAA4BC,EAAqB,CACpG,KAAK,QAAUF,EACf,KAAK,kBAAoBC,EACzB,KAAK,aAAeC,CACtB,CAKA,IAAI,oBAAkB,CACpB,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,qBAAmB,CACrB,OAAO,KAAK,QAAQ,WACtB,CAEA,IAAI,gBAAc,CAChB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,eAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CACA,IAAI,iBAAe,CACjB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,gBAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CAEA,aAAa,OAAOC,EAA+CC,EAA+B,CAEhG,IAAMC,EAA+BF,EAAgB,WAAa,GAC5DG,EAAoCH,EAAgB,gBAAkB,GACtEI,EAA0BH,GAAkB,CAAA,EAG5C,CAACI,EAASC,CAAuB,EAAI,MAAMC,GAAoCH,CAAO,EAC5F,GAAIC,EAAQ,6BAA8B,CACxC,IAAMR,EAAU,MAAMQ,EAAQ,6BAC1BL,EAAgB,gBAAiBA,EAAgB,WAAYE,EAAWC,EACxEG,CAAuB,EAC3B,OAAO,IAAIV,EAAgBC,EAAS,CAAC,CAACG,EAAgB,eAAgB,CAAC,CAACA,EAAgB,SAAS,MAEjG,OAAM,IAAI,MAAMV,EAAe,CAEnC,CAeA,wBACIkB,EAA+BC,EAAgCC,EAAkBC,EACjFC,EAAiB,CACnB,IAAMC,EAA4C,CAAA,EAC9CT,EAAsB,CAAA,EAE1B,GAAI,OAAOM,GAAU,UAAYA,IAAU,MAAQA,aAAiBI,IAAU,MAAM,QAAQJ,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIK,EAAiB,GAErB,GAAI,OAAOJ,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBG,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQH,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DI,EAAiB,GAEjB,QAAWC,KAAQL,EAAM,CACvB,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAIP,EAAY,QAAQO,CAAI,IAAM,GAChC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEH,EAAQG,CAAI,EAAI,KAGlB,GAAI,OAAOJ,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIK,EAAY,GACVC,EAAW,OAAO,oBAAoBP,CAAI,EAChD,QAAWK,KAAQP,EACjB,GAAIS,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKR,EAAmDK,CAAI,GAC9DG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBF,EAAQG,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOL,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDR,EAAUO,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWK,KAAQR,EACjB,GAAI,OAAOE,EAAMM,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQP,EACjBI,EAAQG,CAAI,EAAI,KAIpB,MAAO,CAACH,EAAST,CAAO,CAC1B,CASA,uCAAuCgB,EAAkC,CACvE,IAAMC,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAOF,CACT,CAEA,MAAM,eAAa,CACjB,MAAM,KAAK,QAAQ,cAAa,CAClC,CAIA,MAAM,aAAaX,EAAkBC,EAA+BC,EAAiB,CACnF,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,mBAAoB,KAAK,oBAAqBM,EAAOC,EAAMC,CAAI,EAC/FQ,EAAU,MAAM,KAAK,QAAQ,aAAaV,EAAOG,EAAST,CAAO,EACvE,OAAO,KAAK,uCAAuCgB,CAAO,CAC5D,CAEA,MAAM,iBAAiBhB,EAA+C,CACpE,GAAI,KAAK,kBACP,MAAM,KAAK,QAAQ,iBAAiBA,GAAW,CAAA,CAAE,MAEjD,OAAM,IAAI,MAAM,oDAAoD,CAExE,CAIA,MAAM,YAAYM,EAAkBC,EAA+BC,EAAiB,CAClF,GAAI,KAAK,aAAc,CACrB,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,eAAgB,KAAK,gBAAiBM,EAAOC,EAAMC,CAAI,EACvFQ,EAAU,MAAM,KAAK,QAAQ,YAAYV,EAAOG,EAAST,CAAO,EACtE,OAAO,KAAK,uCAAuCgB,CAAO,MAE1D,OAAM,IAAI,MAAM,+CAA+C,CAEnE,CAEA,MAAM,kBAAkBI,EAAgB,GAAI,CAC1C,OAAO,KAAK,QAAQ,kBAAkBA,CAAa,CACrD,CAEA,MAAM,qBAAqBC,EAAmBD,EAAgB,GAAI,CAChE,IAAME,EAAa,MAAM,KAAK,kBAAkBF,CAAa,EAG7D,GAAIC,EAAM,SAAW,EAAIC,EACvB,MAAM,IAAI,MACN,qJAC0D,EAEhE,OAAO,KAAK,QAAQ,qBAAqBD,EAAOD,CAAa,CAC/D,CAEA,MAAM,wBAAwBA,EAAgB,GAAI,CAChD,OAAO,KAAK,QAAQ,wBAAwBA,CAAa,CAC3D,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,KCzPF,IAmMaG,GAnMbC,GAAAC,EAAA,kBAKAC,KA8LaH,GAA0CA,KCnMvD,IAAAI,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,UAAAC,GAAA,qBAAAC,GAAA,mBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,QAAAC,GAAA,oBAAAC,KAAA,IAAAC,GAAAC,EAAA,kBAmBAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,OC5BA,IAAAC,GAAAC,EAAA,oBCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IA0FMC,GACAC,GAwFCF,GAnLPG,GAAAC,EAAA,kBAsFAC,KACAC,KACAC,KAEMN,GAAc,wBACdC,GAAgB,WAAW,MAAM,OAASD,GAE5CC,KAEF,KAAK,UAAaM,GAA2C,CAC3D,GAAM,CAAC,KAAAC,EAAM,GAAKC,CAAO,EAAIF,EAAG,KAChC,GAAI,CACF,OAAQC,EAAM,CACZ,IAAK,YACHE,GAAsBD,EAAS,IAAI,EAC9B,KACG,IAAM,CACJE,GAAYF,CAAQ,EAAE,KAClB,IAAM,CACJ,YAAY,CAAC,KAAAD,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,CACP,EACAA,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,MACF,IAAK,UAAW,CACd,GAAM,CAAC,OAAAC,EAAQ,IAAAC,CAAG,EAAIL,EACtBM,GAAOD,EAAKD,CAAM,EACb,KACG,IAAM,CACJ,YAAY,CAAC,KAAAL,CAAI,CAAC,CACpB,EACAI,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,YAAa,CAChB,GAAM,CAAC,OAAAI,CAAM,EAAIP,EACXQ,EAAaC,GAAuBF,CAAM,EAChD,YAAY,CAAC,KAAAR,EAAM,IAAKS,CAAU,CAAmB,EACrD,KACF,CACA,IAAK,SAAU,CACb,GAAM,CAAC,MAAAE,EAAO,QAAAC,CAAO,EAAIX,EACzBY,GAAcF,EAAOC,CAAO,EACvB,KACGE,GAAmB,CACjB,YAAY,CAAC,KAAAd,EAAM,IAAKc,CAAe,CAAmB,CAC5D,EACAV,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,UACHW,GAAed,CAAQ,EACvB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,IAAK,MAAO,CACV,GAAM,CAAC,UAAAgB,EAAW,aAAAC,EAAc,OAAAC,EAAQ,cAAAC,EAAe,QAAAP,CAAO,EAAIX,EAClEmB,GAAIJ,EAAWC,EAAcC,EAAQC,EAAe,IAAI,MAAMA,EAAc,MAAM,EAAE,KAAK,IAAI,EAAGP,CAAO,EAClG,KACGS,GAAW,CACLA,EAAQ,KAAKC,GAAKA,EAAE,CAAC,IAAM,KAAK,EAClC,YAAY,CAAC,KAAAtB,EAAM,IAAK,iDAAiD,CAAC,EAE1E,YACI,CAAC,KAAAA,EAAM,IAAKqB,CAAO,EACnBE,GAA2B,CAAC,GAAGL,EAAQ,GAAGG,CAAO,CAAiC,CAAC,CAE3F,EACAjB,GAAO,CACL,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAC,CACzB,CAAC,EACT,KACF,CACA,IAAK,gBACHoB,GAAavB,CAAQ,EACrB,YAAY,CAAC,KAAAD,CAAI,CAAC,EAClB,MACF,QACF,CACF,OAASI,EAAK,CACZ,YAAY,CAAC,KAAAJ,EAAM,IAAAI,CAAG,CAAmB,CAC3C,CACF,GAGKb,GAAQE,GACX,KACCgC,GACG,IAAI,OAAOA,GAAeC,GAAY,CAAC,KAA0B,SAAsB,KAAMlC,EAAW,CAAC,ICtLjH,IAAAmC,GAAA,GAAAC,GAAAD,GAAA,aAAAE,KAAA,IAAIC,GAAEC,GAA4s0CF,GAAlt0CG,GAAAC,EAAA,kBAAMF,IAAGD,GAAE,YAAY,IAAI,eAAeC,EAAE,CAAC,EAAE,CAAC,SAAS,GAAG,CAAC,OAAOG,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAED,EAAC,CAAC,SAASE,GAAG,CAAC,OAAOH,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEE,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOL,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEI,EAAC,CAAC,SAAS,GAAG,CAAC,OAAON,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEK,CAAC,CAAC,SAAS,GAAG,CAAC,OAAOP,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEM,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOT,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEQ,CAAC,CAAC,SAASC,GAAG,CAAC,OAAOX,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEU,EAAC,CAAC,SAASC,GAAG,CAAC,OAAOb,EAAE,QAAQC,GAAE,QAAQC,GAAE,EAAEY,EAAC,CAAC,IAAIC,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAEpB,CAAC,EAAEqB,EAAE,IAAI,QAAS,CAACtB,EAAEC,IAAI,CAACkB,EAAEnB,EAAEoB,EAAEnB,CAAC,CAAE,EAAEsB,EAAY,OAAO,QAAjB,SAAwBC,EAAc,OAAO,eAAnB,WAAiCC,EAAED,GAAiB,KAAK,MAAnB,aAAwBH,EAAE,kBAAkB,CAACrB,EAAEC,IAAI,EAAEoB,EAAE,KAAKA,EAAE,GAAG,IAAI,MAAM,IAAIrB,EAAEC,CAAC,CAAC,EAAEoB,EAAE,oBAAoB,IAAI,CAAC,OAAOA,EAAE,EAAE,EAAE,IAAIK,EAAE,WAAW,mBAAmB,IAAI,YAAY,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,YAAY,IAAIC,EAAE,IAAI,CAAC,IAAM3B,EAAE,CAACA,EAAEC,EAAE2B,IAAI,IAAIrB,IAAI,CAAC,IAAME,EAAEoB,GAAGC,EAAE7B,IAAI,EAAEM,EAAEP,EAAE,GAAGO,CAAC,EAAE,IAAMwB,EAAE9B,IAAI,EAAE,OAAO6B,IAAIC,IAAI/B,EAAE+B,EAAEH,EAAEE,CAAC,EAAE7B,EAAE2B,EAAE,MAAMC,IAAIpB,EAAE,IAAI,QAAS,CAACT,EAAEC,IAAI,CAAC+B,GAAG,CAAC,QAAQhC,EAAE,OAAOC,CAAC,CAAC,CAAE,EAAEM,CAAC,EAAEN,EAAED,GAAG,SAASC,IAAI,CAAC,GAAG,CAAC,GAAGoB,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMO,EAAEP,EAAE,GAAG,CAAC,GAAGpB,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAEM,EAAE,MAAMP,EAAE,GAAGC,CAAC,EAAE,GAAGoB,EAAE,KAAKO,EAAE,MAAM,MAAM,kBAAkB,EAAEP,EAAE,IAAI,MAAM,EAAE,IAAMZ,EAAEmB,EAAE,OAAO,GAAG,EAAEnB,EAAE,OAAO,CAAC,IAAIT,EAAE,MAAM,QAAQ,IAAIS,CAAC,EAAE,GAAGT,EAAEA,EAAE,OAAQA,GAAGA,CAAE,EAAE,EAAEA,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAC,CAAC,OAAOO,CAAC,QAAC,CAAQc,EAAE,GAAG,IAAI,CAAC,EAAEA,EAAE,kBAAkBrB,EAAEqB,EAAE,kBAAmB,IAAIA,EAAE,kBAAoBrB,GAAGqB,EAAE,kBAAkBrB,CAAE,EAAEqB,EAAE,QAAQpB,EAAED,EAAEqB,EAAE,QAAS,IAAIA,EAAE,QAAUrB,GAAGqB,EAAE,QAAQrB,CAAE,CAAC,EAAEqB,EAAE,mBAAmBpB,EAAED,EAAEqB,EAAE,mBAAoB,IAAIA,EAAE,mBAAqBrB,GAAGqB,EAAE,mBAAmBrB,CAAE,CAAC,EAAEqB,EAAE,cAAcrB,EAAEqB,EAAE,cAAe,IAAIA,EAAE,cAAgBrB,GAAGqB,EAAE,cAAcrB,CAAE,EAAE2B,EAAE,MAAM,EAAEN,EAAE,SAAS,CAACrB,EAAEC,IAAI,CAAC,GAAG0B,IAAI,EAAa3B,IAAX,SAAa,CAAC,CAACqB,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,EAAE,EAAEpB,EAAE,IAAMD,EAAEqB,EAAE,GAAGA,EAAE,mBAAmB,CAACpB,EAAE2B,EAAErB,EAAEE,IAAIT,EAAE,eAAeC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,cAAcpB,GAAGD,EAAE,UAAUC,CAAC,EAAEoB,EAAE,qBAAqB,CAACpB,EAAE2B,EAAErB,IAAIP,EAAE,iBAAiBC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,qBAAqBpB,GAAG,CAACD,EAAE,iBAAiBC,CAAC,CAAC,EAAEoB,EAAE,eAAepB,GAAGD,EAAE,WAAWC,CAAC,CAAC,CAAC,EAAE,IAAIgC,EAAEC,EAAEC,EAAE,OAAO,OAAO,CAAC,EAAEd,CAAC,EAAEe,EAAE,iBAAiBC,EAAE,CAACrC,EAAEC,IAAI,CAAC,MAAMA,CAAC,EAAEqC,EAAE,IAAIf,GAAGC,KAAKA,EAAEc,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAKtC,KAAIsC,EAAEtC,IAAGsC,EAAEA,EAAE,WAAW,OAAO,EAAE,GAAGA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAsFd,IAAIU,EAAElC,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAOA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAE,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAGgC,EAAE,CAACjC,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAMP,EAAE,EAAE,EAAEO,EAAE,aAAa,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASN,EAAEM,EAAE,QAAQ,EAAEqB,EAAE,CAAC,EAAErB,EAAE,QAAQqB,EAAErB,EAAE,KAAK,IAAI,CAAC,GAAG,IAAIgC,EAAE,QAAQ,IAAI,KAAK,OAAO,EAAEC,EAAE,QAAQ,MAAM,KAAK,OAAO,EAAEC,EAAEF,EAAEG,EAAEF,EAAE,GAAG,OAAO,OAAOnB,EAAEc,CAAC,EAAEA,EAAE,KAAKV,EAAE,CAAY,IAASkB,EAAT,SAAY3C,EAAE,CAAC,GAAG,CAAC,IAAIC,EAAED,EAAE,KAAK4B,EAAE3B,EAAE,IAAI,GAAY2B,IAAT,OAAW,CAAC,IAAI5B,EAAE,CAAC,EAAE,KAAK,UAAUC,GAAGD,EAAE,KAAKC,CAAC,EAAE,KAAK,YAAY,IAAI,CAAC,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAE,QAAQA,KAAKD,EAAE2C,EAAG1C,CAAC,EAAE,KAAK,UAAU0C,CAAE,EAAE,QAAU3C,KAAKC,EAAE,SAASoB,EAAErB,CAAC,GAAG,CAACqB,EAAErB,CAAC,EAAE,QAAQqB,EAAErB,CAAC,EAAE,IAAIC,IAAI,CAAC,YAAY,CAAC,GAAG,cAAc,GAAGD,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAWD,GAAT,UAAayC,EAAEpB,EAAErB,CAAC,GAAeA,GAAZ,aAAgB0C,EAAErB,EAAErB,CAAC,IAAII,EAAEH,EAAE,WAAWK,GAAE,EAAEsC,GAAE3C,EAAE,UAAU,CAAC,SAAiB2B,IAAR,MAAU,CAACiB,GAAG5C,EAAE,YAAY,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE6C,GAAG7C,EAAE,WAAW,EAAE8C,GAAG,EAAEC,GAAG,EAAEC,KAAIC,GAAG,EAAED,GAAE,IAAI,GAAG,CAACE,GAAGlD,EAAE,cAAcA,EAAE,GAAG,CAAC,OAAOD,EAAE,CAAC,GAAaA,GAAV,SAAY,MAAMA,CAAC,CAAC,MAAgB4B,IAAX,SAAawB,GAAG,GAAGC,GAAG,EAAE,EAAmBpD,EAAE,SAAnB,iBAA6C2B,IAAjB,eAAmBqB,IAAGK,GAAG,EAAE1B,IAAIc,EAAE,oCAAoCd,CAAC,EAAE,EAAEc,EAAEzC,CAAC,GAAG,OAAOD,EAAE,CAAC,MAAMuD,GAAG,EAAEvD,CAAC,CAAC,EAAjqB,IAAA2C,KAAhBC,GAAEK,GAAE,GAA8qBP,EAAE,YAAY1C,EAAE,CAACA,EAAEA,EAAE,KAAK,GAAG,EAAE,QAAQ,MAAMA,CAAC,CAAC,EAAE,KAAK,MAAM,YAAYA,EAAE,CAAC,YAAY,CAAC,GAAG,QAAQ,KAAKA,EAAE,KAAK,GAAG,EAAE,GAAGoD,GAAG,CAAC,CAAC,CAAC,EAAE/B,EAAE,gBAAgB,CAACrB,EAAEC,IAAI,IAAI,QAASD,GAAG,CAAC4C,GAAEhB,GAAG,CAACA,EAAE,IAAI,YAAY,SAASA,EAAE4B,GAAG,CAAC,EAAEvD,EAAE2B,CAAC,EAAE5B,EAAE,CAAC,CAAC,CAAE,EAAE,KAAK,qBAAqBA,GAAG,CAAC,MAAMA,EAAE,QAAQA,CAAC,EAAE,KAAK,UAAU2C,CAAE,CAAC,IAAIvC,EAAEqD,GAAEC,EAAErD,GAAEG,GAAEE,GAAEC,EAAEC,GAAEE,EAAEE,GAAE2C,EAAEC,EAAE1C,GAAE2C,GAAE,GAAG,SAASvD,IAAG,CAAC,IAAIN,EAAEI,EAAE,OAAOiB,EAAE,MAAMhB,GAAE,IAAI,UAAUL,CAAC,EAAEqB,EAAE,OAAOX,GAAE,IAAI,WAAWV,CAAC,EAAEqB,EAAE,OAAOb,GAAE,IAAI,WAAWR,CAAC,EAAEqB,EAAE,QAAQV,EAAE,IAAI,YAAYX,CAAC,EAAEqB,EAAE,OAAOT,GAAE,IAAI,WAAWZ,CAAC,EAAEqB,EAAE,QAAQP,EAAE,IAAI,YAAYd,CAAC,EAAEqB,EAAE,QAAQL,GAAE,IAAI,aAAahB,CAAC,EAAEqB,EAAE,QAAQH,GAAE,IAAI,aAAalB,CAAC,EAAEqB,EAAE,OAAOsC,EAAE,IAAI,cAAc3D,CAAC,EAAEqB,EAAE,QAAQuC,EAAE,IAAI,eAAe5D,CAAC,CAAC,CAAC,GAAG,CAACyB,EAAE,CAAC,GAAGJ,EAAE,WAAWjB,EAAEiB,EAAE,mBAAmB,GAAGjB,EAAE,IAAI,YAAY,OAAO,CAAC,QAAQ,IAAI,QAAQ,MAAM,OAAO,EAAE,CAAC,GAAG,kBAAkBsB,GAAG,MAAMgB,EAAE,6NAA6N,EAAE,MAAM,YAAY,EAAEpC,GAAE,CAAC,CAAC,IAAIwD,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,CAAC,EAAEC,GAAE,EAAEC,GAAE,KAAKC,GAAE,KAAK,SAASC,IAAG,CAAC,GAAM,EAAEH,IAAL,IAAgBC,KAAP,OAAW,cAAcA,EAAC,EAAEA,GAAE,MAAMC,IAAG,CAAC,IAAInE,EAAEmE,GAAEA,GAAE,KAAKnE,EAAE,CAAC,CAAC,CAAC,SAASqE,GAAErE,EAAE,CAAC,MAAM0C,EAAE1C,EAAE,WAAWA,EAAE,GAAG,EAAE6D,GAAE,GAAGH,EAAE,EAAE1D,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAEoB,EAAEpB,CAAC,EAAEA,CAAC,CAAC,IAAIsE,GAAGC,GAAGvE,GAAGA,EAAE,WAAW,uCAAuC,EAAEwE,GAAGxE,GAAGA,EAAE,WAAW,SAAS,EAAE,SAASyE,GAAGzE,EAAE,CAAC,GAAGkC,EAAE,OAAOA,EAAElC,CAAC,EAAE,KAAK,iDAAiD,CAAC,SAAS0E,GAAG1E,EAAEC,EAAE2B,EAAE,CAAC,OAAO,SAAS5B,EAAE,CAAC,GAAGuB,GAAGC,EAAE,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAACgD,GAAGxE,CAAC,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,CAAC,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAE,EAAE,MAAO,IAAIwE,GAAGzE,CAAC,CAAE,EAAE,GAAGiC,EAAE,OAAO,IAAI,QAAS,CAAChC,EAAE2B,IAAI,CAACK,EAAEjC,EAAGA,GAAGC,EAAE,IAAI,WAAWD,CAAC,CAAC,EAAG4B,CAAC,CAAC,CAAE,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAM,IAAI6C,GAAGzE,CAAC,CAAE,CAAC,EAAEA,CAAC,EAAE,KAAMA,GAAG,YAAY,YAAYA,EAAEC,CAAC,CAAE,EAAE,KAAK2B,EAAG5B,GAAG,CAAC0C,EAAE,0CAA0C1C,CAAC,EAAE,EAAEqE,GAAErE,CAAC,CAAC,CAAE,CAAC,CAAC,SAASwD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,EAAEmB,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAG5D,GAAG,GAAG6D,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAElJ,EAAE,GAAGmJ,GAAG,GAAGC,GAAG,EAAEC,EAAE,CAAC,CAAC,CAAC,IAAIC,GAAG,CAAC,QAAQ,CAAC1J,EAAEC,EAAE2B,EAAEnB,IAAI,CAAC,GAAYY,IAAT,QAAY,CAACA,EAAE,GAAG,MAAO,GAAE,IAAIrB,EAAE2J,GAAG3J,IAAI,CAAC,GAAG,WAAW,IAAI,IAAIA,EAAEA,EAAE,UAAU,CAAC,GAAG,EAAEA,EAAEqB,EAAE,GAAG,IAAIrB,CAAC,GAAG,MAAO,GAAE,GAAGS,KAAK,GAAGR,KAAK,IAAI2B,KAAK,GAAG5B,EAAE,WAAW,MAAO,GAAE,GAAG,CAAC,OAAOO,EAAE,EAAE,IAAIP,EAAE,SAASC,EAAEA,EAAE2B,CAAC,EAAEnB,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAO,EAAC,CAAC,EAAE,QAAQ,IAAI,CAACY,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQrB,GAAGqB,EAAE,GAAGrB,CAAC,EAAE,QAAQA,GAAGqB,EAAE,GAAGrB,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAGrB,EAAEC,EAAE2B,EAAE,EAAE,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAGrB,EAAEC,EAAE2B,CAAC,CAAC,EAAE,QAAQ,IAAiB,OAAO,oBAApB,IAAwC,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,aAAarB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,cAAcrB,EAAE,CAAC,MAAMC,EAAE,KAAK2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,OAAOrB,EAAE,CAAC,IAAIC,EAAE,IAAI2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,MAAMrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,YAAYrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,OAAOrB,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,MAAMrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,iBAAiBrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,cAAcrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,aAAarB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,aAAarB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,WAAWrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,WAAWrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,eAAerB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAAC2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,YAAYrB,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE2B,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,eAAerB,EAAE,CAAC,UAAUC,EAAE,KAAK0J,GAAG/H,CAAC,EAAE,OAAOrB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,eAAerB,EAAE,CAAC,UAAUC,EAAE,KAAK0J,GAAG/H,CAAC,EAAE,OAAOrB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOmB,EAAE,OAAO,OAAO,QAAQlB,EAAE,UAAU,CAACM,CAAC,EAAE,MAAME,EAAE,YAAY,CAACqB,CAAC,EAAE,KAAK,CAACjB,EAAEE,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEG,IAAI,CAAC,EAAE,cAAcE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWkI,GAAGjI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOiB,EAAE,OAAO,OAAO,QAAQhB,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASM,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAME,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASqB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAASjB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASE,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEI,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWmI,GAAGlI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACzB,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,EAAEC,KAAI,CAACL,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOmB,EAAE,OAAO,OAAO,QAAQlB,EAAE,UAAU,CAACM,CAAC,EAAE,MAAME,EAAE,YAAY,CAACqB,CAAC,EAAE,KAAK,CAACjB,EAAEE,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEG,IAAI,CAAC,EAAE,cAAcE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWkI,GAAGjI,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC1B,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,EAAEE,EAAEC,GAAEC,GAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOiB,EAAE,OAAO,OAAO,QAAQhB,EAAE,UAAU,MAAM,KAAK,EAAE,EAAE,SAASM,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,MAAME,EAAE,YAAY,MAAM,KAAK,EAAE,EAAE,SAASqB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,EAAE,SAASjB,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAK,EAAE,EAAE,SAASE,IAAI,EAAE,GAAGA,IAAI,KAAK,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAAC,EAAE,EAAEI,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,GAAE,MAAM,KAAK,EAAE,EAAE,SAASA,KAAI,EAAEC,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWmI,GAAGlI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACzB,EAAEC,IAAI,CAACoB,EAAE,GAAG,oBAAoBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,oBAAoBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,cAAcrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,IAAI,CAACoB,EAAE,GAAG,gBAAgBrB,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,EAAEE,EAAEC,GAAEE,GAAEC,EAAEC,GAAEC,KAAI,CAACJ,EAAE,GAAG,UAAUrB,EAAE,CAAC,OAAOyB,GAAE,OAAO,OAAO,SAASxB,EAAE,UAAU2B,EAAE,kBAAkBrB,EAAE,cAAcE,EAAE,UAAU,CAACqB,EAAEC,CAAC,EAAE,aAAa,CAAClB,EAAEE,CAAC,EAAE,KAAK,CAACE,EAAEE,EAAEC,GAAEE,EAAC,EAAE,QAAQ,CAACC,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACxB,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,OAAOrB,EAAE,CAAC,MAAMC,EAAE,KAAK2B,EAAE,OAAOrB,EAAE,OAAOE,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,SAASrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,SAASrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAAC2B,EAAE,KAAKrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,SAASrB,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAAC2B,EAAE,KAAKrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,IAAI,CAACoB,EAAE,GAAG,UAAUrB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,QAAQrB,EAAE,CAAC,KAAKC,EAAE,WAAW2B,EAAE,WAAWrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACqB,EAAE,GAAG,SAASrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACoB,EAAE,GAAG,iBAAiBrB,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,EAAEE,EAAEC,IAAI,CAACC,EAAE,GAAG,SAASrB,EAAE,CAAC,UAAUC,EAAE,KAAK2B,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAErB,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwBoJ,GAAGlJ,CAAC,EAAE,YAAYqB,EAAE,eAAejB,EAAE,mBAAmBE,EAAE,sBAAsB4I,GAAG1I,CAAC,EAAE,KAAK0I,GAAGxI,CAAC,EAAE,YAAYwI,GAAGvI,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACpB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,IAAI,CAACQ,EAAE,GAAG,QAAQrB,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAE2B,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKqB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEjB,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQb,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,wBAAwBrB,EAAE,CAAC,QAAQC,EAAE,OAAO2B,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,wBAAwBrB,EAAE,CAAC,QAAQC,EAAE,OAAO2B,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAG,QAAQrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACoB,EAAE,GAAG,SAASrB,EAAE,CAAC,SAAS2J,GAAG1J,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,MAAMrB,EAAE,CAAC,KAAKC,EAAE,MAAM2B,EAAE,KAAKrB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,QAAQC,EAAE,SAAS2B,EAAE,QAAQ,CAAC,CAACnB,EAAE,aAAa,CAAC,CAACF,EAAE,OAAOuB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,QAAQC,EAAE,SAAS2B,EAAE,QAAQ,CAAC,CAACnB,EAAE,aAAa,CAAC,CAACF,EAAE,OAAOuB,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,SAASrB,EAAE,CAAC,UAAU,OAAOC,CAAC,EAAE,QAAQ,OAAO2B,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEjB,EAAEE,EAAEE,IAAI,CAACI,EAAE,GAAG,YAAYrB,EAAE,CAAC,SAASC,EAAE,iBAAiB2B,EAAE,gBAAgBrB,EAAE,MAAME,EAAE,SAASqB,EAAE,eAAejB,EAAE,MAAM,KAAK,EAAE,EAAE,SAAS,OAAOE,CAAC,IAAI,EAAE,OAAOA,CAAC,EAAEF,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,uBAAuB,CAAC,CAACI,CAAC,CAAC,CAAC,EAAE,QAAQjB,GAAG,CAACqB,EAAE,GAAG,UAAUrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,gBAAgBrB,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACqB,EAAE,GAAG,WAAWrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEM,EAAEE,EAAEqB,EAAEjB,EAAEI,EAAEE,EAAEC,EAAEE,EAAEC,EAAEC,GAAEC,GAAEC,EAAEC,GAAEM,KAAI,CAACZ,EAAE,GAAG,OAAOrB,EAAE,CAAC,OAAOwB,GAAE,OAAO,OAAO,SAASvB,EAAE,UAAUM,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,MAAMqB,EAAE,aAAajB,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQE,EAAE,MAAM,KAAK,EAAE,EAAE,SAASA,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,EAAE,EAAEE,KAAI,CAAC,EAAE,WAAWkI,GAAGjI,CAAC,EAAE,kBAAkBC,GAAE,MAAM,KAAKZ,EAAE,EAAE,SAASY,KAAI,EAAEM,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQjC,GAAG,CAACqB,EAAE,GAAG,OAAOrB,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,sBAAsBrB,EAAE,CAAC,SAASC,EAAE,WAAW2B,EAAE,MAAMrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,KAAKC,EAAE,QAAQ2B,EAAE,WAAW,CAAC,CAACrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,IAAI,CAACc,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,KAAKC,EAAE,QAAQ2B,EAAE,WAAW,CAAC,CAACrB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACP,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,cAAcrB,EAAE,CAAC,EAAEC,EAAE,EAAE2B,EAAE,cAAcrB,EAAE,KAAKE,EAAE,UAAUqB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,IAAI,CAACT,EAAE,GAAG,qBAAqBrB,EAAE,CAAC,SAASC,EAAE,iBAAiB2B,EAAE,gBAAgBrB,EAAE,MAAME,EAAE,SAASqB,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC9B,EAAEC,IAAI,CAACoB,EAAE,GAAG,YAAYrB,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAE2B,EAAErB,EAAEE,IAAI,CAACY,EAAE,GAAG,kBAAkBrB,EAAE,CAAC,YAAY,CAAC,CAACC,EAAE,SAAS2B,EAAE,mBAAmBrB,EAAE,MAAME,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACT,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,yBAAyBrB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAAC2B,CAAC,CAAC,CAAC,EAAE,QAAQ5B,GAAG,CAACqB,EAAE,GAAGrB,CAAC,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAIoB,EAAE,GAAGrB,EAAEC,EAAEoB,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,EAAE,QAAQ,CAACrB,EAAEC,EAAE2B,IAAI,CAACP,EAAE,GAAG,yBAAyBrB,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAAC2B,CAAC,CAAC,CAAC,CAAC,EAAE,SAASgD,GAAG5E,EAAEC,EAAE2B,EAAE,CAAC,OAAOgI,GAAI,SAAS,CAAC,MAAMvI,EAAE,GAAGrB,EAAEC,EAAE2B,CAAC,CAAC,CAAE,CAAC,CAAC,SAAS+C,IAAI,CAAC,OAAmB,OAAO,oBAApB,GAAuC,CAAC,SAASkF,GAAG7J,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,IAAI8J,GAAG9J,GAAG,CAACA,EAAE,UAAU,EAAEA,EAAE,UAAU,IAAI,CAAC,CAAC,EAAE+J,GAAG/J,GAAG,CAAIgK,GAAG,QAAN,IAAeC,GAAG,EAAEC,GAAGF,GAAG,CAAC,CAAC,GAAG,IAAI/J,EAAE+J,GAAG,IAAI,EAAE,GAAG,CAAC/J,EAAE,MAAO,GAAEkK,GAAG,KAAKlK,CAAC,EAAEmK,GAAGpK,EAAE,EAAE,EAAEC,EAAEA,EAAE,GAAGD,EAAE,GAAG,IAAI4B,EAAE,CAAC,IAAI,MAAM,cAAc5B,EAAE,GAAG,IAAIA,EAAE,GAAG,YAAYA,EAAE,EAAE,EAAE,OAAOC,EAAE,YAAY2B,EAAE5B,EAAE,EAAE,EAAE,CAAC,EAAEqK,GAAG,EAAEC,GAAG,CAACtK,EAAEC,KAAK2B,IAAI,CAAC,QAAQrB,EAAE,EAAEqB,EAAE,OAAOnB,EAAE8J,GAAG,EAAEzI,EAAE0I,GAAG,EAAEjK,CAAC,EAAEwB,EAAED,IAAI,EAAEjB,EAAE,EAAEA,EAAEe,EAAE,OAAOf,IAAI,CAAC,IAAIE,EAAEa,EAAEf,CAAC,EAAY,OAAOE,GAAjB,UAAoB4C,EAAE5B,EAAE,EAAElB,CAAC,EAAE,GAAG8C,EAAE5B,EAAE,EAAElB,EAAE,CAAC,EAAEE,IAAI4C,EAAE5B,EAAE,EAAElB,CAAC,EAAE,GAAGI,EAAE,EAAEc,EAAE,EAAElB,EAAE,IAAI,CAAC,EAAEE,EAAE,CAAC,OAAOf,EAAEyK,GAAGzK,EAAE,EAAEO,EAAEuB,EAAE7B,CAAC,EAAEyK,GAAGjK,CAAC,EAAET,CAAC,EAAE,SAASuJ,GAAGvJ,EAAE,CAAC,GAAGyB,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,CAAC,EAAE,GAAG0D,EAAE1D,EAAE,EAAE,EAAEqK,IAAI,CAAC,QAAQpK,KAAKkK,GAAGL,GAAG7J,CAAC,EAAE,IAAIA,KAAK+J,GAAGF,GAAG7J,CAAC,EAAE+J,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE/I,EAAE,SAASrB,CAAC,EAAE6D,GAAE,EAAE,CAACxB,EAAErC,EAAE,IAAI6J,GAAG7J,CAAC,CAAC,CAAC,CAAC,SAAS2K,GAAG3K,EAAE,CAAC,GAAGyB,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,CAAC,EAAEkJ,GAAGlJ,CAAC,CAAC,CAAC,IAAIkJ,GAAGlJ,GAAG,CAAC,GAAG0D,EAAE1D,EAAEyB,EAAE,MAAMkJ,GAAG3K,CAAC,EAAE,SAASuJ,GAAGvJ,CAAC,CAAC,EAAEgK,GAAG,CAAC,EAAEG,GAAG,CAAC,EAAES,GAAG,CAAC,EAAER,GAAG,CAAC,EAAES,GAAG7K,GAAG,CAAC,IAAIC,EAAED,EAAE,GAAG,OAAOoK,GAAGnK,CAAC,EAAE+J,GAAG,KAAKhK,CAAC,EAAEmK,GAAG,OAAOA,GAAG,QAAQnK,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,EAAE8K,GAAG7K,CAAC,CAAC,EAAE,SAAS+C,IAAI,CAAC4H,GAAG,QAAS5K,GAAGA,EAAE,CAAE,CAAC,CAAC,IAAIkK,GAAGlK,GAAG,IAAI,QAASC,GAAG,CAACD,EAAE,UAAU4B,GAAG,CAAC,IAAIrB,GAAGqB,EAAEA,EAAE,MAAM,IAAI,GAAGA,EAAE,cAAcA,EAAE,cAAcwB,GAAG,EAAE,CAAC,IAAI3C,EAAE2J,GAAGxI,EAAE,YAAY,EAAEnB,EAAEA,EAAE,YAAYmB,EAAEA,EAAE,YAAY,EAAEc,EAAE,0CAA0CnC,CAAC,uBAAuBqB,EAAE,YAAY,qCAAqC,CAAC,MAAsBrB,IAAjB,eAAmB+C,GAAG,EAAkB/C,IAAhB,cAAkBwJ,GAAGnI,CAAC,EAAoBrB,IAAlB,gBAAoBsK,GAAGT,GAAGxI,EAAE,MAAM,CAAC,EAAiBrB,IAAf,cAAkBqB,EAAEA,EAAE,OAAOrB,EAAE6J,GAAGxI,CAAC,EAAE,OAAOwI,GAAGxI,CAAC,EAAEkI,GAAGvJ,CAAC,EAAEuK,GAAGlJ,CAAC,EAAEuI,GAAG,OAAOA,GAAG,QAAQ5J,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,GAAoBA,IAAjB,eAAmB6J,GAAGxI,EAAE,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAarB,IAAX,UAAcP,EAAE,OAAO,GAAGC,EAAED,CAAC,GAAaO,IAAV,QAAY,MAAM,UAAUqB,EAAE,QAAQ,KAAKA,EAAE,IAAI,EAAE,EAAmBA,EAAE,SAAnB,eAA0B5B,EAAE,YAAY4B,CAAC,EAAkBrB,IAAhB,cAAkBc,EAAEO,EAAE,OAAO,EAAE,GAAGA,EAAE,IAAI,EAAErB,GAAGmC,EAAE,kCAAkCnC,CAAC,EAAE,CAAC,EAAEP,EAAE,QAAQA,GAAG,CAAC,MAAM0C,EAAE,yBAAyB1C,EAAE,QAAQ,IAAIA,EAAE,MAAM,KAAKA,EAAE,OAAO,EAAE,EAAEA,CAAC,EAAE,IAAI4B,EAAErB,EAAE,CAAC,EAAE,IAAIqB,IAAI,CAAC,QAAQ,EAAEP,EAAE,eAAeO,CAAC,GAAGrB,EAAE,KAAKqB,CAAC,EAAE5B,EAAE,YAAY,CAAC,IAAI,OAAO,SAASO,EAAE,WAAWH,EAAE,WAAWqD,EAAC,CAAC,CAAC,CAAE,EAAE,SAASwG,IAAI,CAAC,IAAIjK,EAAE,IAAI,OAAO,IAAI,IAAI,YAAY,GAAG,EAAE,CAAC,KAAK,SAAS,WAAW,aAAa,KAAK,YAAY,CAAC,EAAEgK,GAAG,KAAKhK,CAAC,CAAC,CAAC,IAAI+K,GAAG/K,GAAG,CAAC,KAAK,EAAEA,EAAE,QAAQA,EAAE,MAAM,EAAEqB,CAAC,CAAC,EAAE0B,GAAG,IAAI,CAAC,IAAI/C,EAAEoD,GAAG,EAAEnD,EAAEY,EAAE,EAAEb,EAAE,KAAK,IAAI,CAAC,EAAEA,EAAEa,EAAE,EAAEb,EAAE,KAAK,IAAI,CAAC,EAAEgL,GAAG/K,EAAEA,EAAED,CAAC,EAAE0K,GAAGzK,CAAC,CAAC,EAAEkD,GAAG,CAACnD,EAAEC,IAAI,CAACoK,GAAG,EAAErK,EAAEiL,GAAGjL,EAAEC,CAAC,EAAE,EAAEoK,GAAG3G,EAAE1D,EAAEqD,GAAGrD,CAAC,CAAC,EAAE,MAAMkL,EAAE,CAAC,YAAYlL,EAAE,CAAC,KAAK,GAAGA,EAAE,EAAE,CAAC,CAAC,SAAS6E,GAAG7E,EAAEC,EAAE2B,EAAE,CAAC,IAAIrB,EAAE,IAAI2K,GAAGlL,KAAK,CAAC,EAAE,MAAMC,KAAK,EAAE2B,KAAK,EAAEf,EAAE,EAAEN,EAAE,GAAG,KAAK,IAAI,CAAC,EAAE,EAAEM,EAAE,EAAEN,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEN,EAAEY,EAAE,EAAEN,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEqB,EAAE5B,CAAC,CAAC,SAASmL,GAAGnL,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAE6I,GAAG,EAAE,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,EAAEuE,GAAG9E,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASuE,GAAG9E,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGP,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAWmB,IAAT,OAAW,OAAOgB,EAAE,qFAAqF,EAAE,EAAE,IAAIjC,EAAE,CAAC,EAAE,OAAOgB,GAAOhB,EAAE,SAAN,EAAa0K,GAAGnL,EAAEC,EAAE2B,EAAErB,CAAC,GAAGP,EAAE,CAAC,GAAG4B,EAAE,GAAG5B,EAAE,GAAGO,EAAE,GAAGE,CAAC,EAAEgB,GAAGzB,EAAE,GAAG,cAAc,YAAYA,EAAES,CAAC,EAAE,GAAGsJ,GAAG/J,CAAC,EAAE,CAAC,IAAIoL,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAACrL,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,GAAGN,KAAK,GAAG2B,EAAE,IAAIA,EAAE3B,EAAED,EAAE4B,CAAC,GAAG,EAAEA,GAAGrB,IAAI,EAAEqB,EAAE,GAAG,GAAGA,EAAE3B,GAAGD,EAAE,QAAQoL,GAAG,OAAOA,GAAG,OAAOpL,EAAE,kBAAkB0B,EAAE1B,EAAE,MAAMC,EAAE2B,CAAC,EAAE5B,EAAE,SAASC,EAAE2B,CAAC,CAAC,EAAE,IAAIrB,EAAE,GAAGN,EAAE2B,GAAG,CAAC,IAAInB,EAAET,EAAEC,GAAG,EAAE,GAAG,IAAIQ,EAAE,CAAC,IAAIqB,EAAE,GAAG9B,EAAEC,GAAG,EAAE,IAAS,IAAIQ,IAAV,IAAaF,GAAG,OAAO,cAAc,GAAGE,IAAI,EAAEqB,CAAC,MAAM,CAAC,IAAIC,EAAE,GAAG/B,EAAEC,GAAG,EAAE,OAAOQ,GAAQ,IAAIA,IAAV,KAAc,GAAGA,IAAI,GAAGqB,GAAG,EAAEC,GAAG,EAAEtB,IAAI,GAAGqB,GAAG,GAAGC,GAAG,EAAE,GAAG/B,EAAEC,GAAG,GAAGM,GAAG,OAAO,aAAaE,CAAC,GAAGA,GAAG,MAAMF,GAAG,OAAO,aAAa,MAAME,GAAG,GAAG,MAAM,KAAKA,CAAC,EAAE,CAAC,MAAMF,GAAG,OAAO,aAAaE,CAAC,CAAC,CAAC,OAAOF,CAAC,EAAEoJ,GAAG,CAAC3J,EAAEC,KAAKD,KAAK,GAAGqL,GAAG9K,EAAE,EAAEP,EAAEC,CAAC,EAAE,GAAG,SAAS8E,GAAG/E,EAAEC,EAAE2B,EAAE,CAAC,OAAOH,EAAE6I,GAAG,EAAE,EAAEtK,EAAEC,EAAE2B,CAAC,EAAE,CAAC,CAAC,SAASoD,GAAGhF,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,IAAIqL,GAAGtL,GAAG,CAAC,QAAQC,EAAE,EAAE2B,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIrB,EAAEP,EAAE,WAAW4B,CAAC,EAAE,KAAKrB,EAAEN,IAAI,MAAMM,EAAEN,GAAG,EAAE,OAAOM,GAAG,OAAOA,GAAGN,GAAG,EAAE,EAAE2B,GAAG3B,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEsL,GAAG,CAACvL,EAAEC,EAAE2B,EAAErB,IAAI,CAAC,GAAG,EAAE,EAAEA,GAAG,MAAO,GAAE,IAAIE,EAAEmB,KAAK,EAAErB,EAAEqB,EAAErB,EAAE,EAAE,QAAQuB,EAAE,EAAEA,EAAE9B,EAAE,OAAO,EAAE8B,EAAE,CAAC,IAAIC,EAAE/B,EAAE,WAAW8B,CAAC,EAAE,GAAG,OAAOC,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK/B,EAAE,WAAW,EAAE8B,CAAC,GAAG,KAAKC,EAAE,CAAC,GAAGH,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAEG,CAAC,KAAK,CAAC,GAAG,MAAMA,EAAE,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,EAAE,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,KAAK,CAAC,GAAGH,EAAE,GAAGrB,EAAE,MAAMN,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAG9B,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAG,EAAE,CAAC9B,EAAE2B,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,EAAE,CAAC9B,EAAE2B,MAAM,CAAC,EAAE,IAAI,GAAGG,CAAC,CAAC,CAAC,OAAO9B,EAAE2B,IAAI,CAAC,EAAE,EAAEA,EAAEnB,CAAC,EAAE+K,GAAG,CAACxL,EAAEC,EAAE2B,IAAI2J,GAAGvL,EAAEO,EAAE,EAAEN,EAAE2B,CAAC,EAAE,SAASqD,GAAGjF,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAASiF,GAAGlF,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,SAASuD,GAAGnF,EAAEC,EAAE2B,EAAE,CAAC,OAAOH,EAAE6I,GAAG,EAAE,EAAEtK,EAAEC,EAAE2B,CAAC,EAAE,CAAC,CAAC,SAASwD,GAAGpF,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAASoF,GAAGrF,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAO6I,GAAG,EAAE,EAAEtK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,SAAS0D,GAAGtF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASgF,GAAGvF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASiF,GAAGxF,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,GAAGkB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,SAASkF,GAAGzF,EAAE,CAAC,GAAGyB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,CAAC,CAAC,CAAC,SAAS0F,GAAG1F,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,CAAC,CAAC,SAAS0F,GAAG3F,EAAEC,EAAE2B,EAAE,CAAC,GAAGH,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,CAAC,CAAC,CAAC,IAAI6J,GAAGC,GAAG9F,GAAG,IAAI,CAACvB,GAAE,EAAE,CAAC,EAAEsH,GAAG3L,GAAG,CAAC,QAAQC,EAAE,GAAGM,EAAE,EAAEP,IAAI,CAAC,GAAGC,GAAGwL,GAAGlL,EAAE,EAAEP,MAAM,CAAC,CAAC,EAAE,OAAOC,CAAC,EAAE2L,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASC,GAAG/L,EAAEC,EAAE2B,EAAE,CAAC,EAAE,CAAC,GAAG,EAAE,mBAAmB3B,GAAG,MAAM,IAAI,UAAU,yDAAyD,EAAE,OAAO,SAASD,EAAEC,EAAE2B,EAAE,CAAC,EAAE,CAAC,IAAIrB,EAAEN,EAAE,KAAK,GAAG,CAACD,EAAE,MAAM,IAAI0L,GAAG,SAASnL,CAAC,+CAA+C,EAAE,GAAGsL,GAAG,eAAe7L,CAAC,EAAE,CAAC,GAAG4B,EAAE,GAAG,OAAO,MAAM,IAAI8J,GAAG,yBAAyBnL,CAAC,SAAS,CAAC,CAACsL,GAAG7L,CAAC,EAAEC,EAAE,OAAO6L,GAAG9L,CAAC,EAAE4L,GAAG,eAAe5L,CAAC,IAAIC,EAAE2L,GAAG5L,CAAC,EAAE,OAAO4L,GAAG5L,CAAC,EAAEC,EAAE,QAASD,GAAGA,EAAE,CAAE,EAAE,EAAEA,EAAEC,EAAE2B,CAAC,CAAC,CAAC,IAAIoK,GAAG,CAAChM,EAAEC,EAAEc,IAAI,CAAC,OAAOd,EAAE,CAAC,IAAK,GAAE,OAAOc,EAAEf,GAAG,EAAE,EAAEA,IAAI,CAAC,EAAEA,GAAGO,EAAE,EAAEP,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAGS,EAAE,EAAET,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAEA,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOe,EAAEf,GAAG2D,EAAE3D,IAAI,CAAC,EAAEA,GAAG4D,EAAE5D,IAAI,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,0BAA0BC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAAS6F,GAAG7F,EAAEC,EAAE2B,EAAE,CAACA,KAAK,EAAEmK,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,GAAa,OAAOA,GAAjB,UAA8B,OAAOA,GAAjB,SAAmB,MAAMA,EAASA,IAAP,KAAS,QAAkBD,EAAE,OAAOC,IAApB,UAAkCD,IAAV,SAA0BA,IAAb,WAAeC,EAAE,SAAS,EAAE,GAAGA,EAAE,IAAI,UAAU,mBAAmBA,CAAC,QAAQ,KAAK,IAAI,EAAE,EAAE,OAAgB,OAAOA,GAAjB,WAAqBA,EAAE,OAAOA,CAAC,GAAGA,CAAC,EAAE,eAAegM,GAAG,qBAAqBD,GAAG/L,EAAE2B,EAAM3B,EAAE,QAAQ,GAAG,GAAjB,EAAkB,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAIgM,GAAG,EAAE,SAASnG,GAAG9F,EAAEC,EAAE2B,EAAEnB,EAAE,CAACsL,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAa,SAASD,EAAE,CAAC,MAAM,CAAC,CAACA,CAAC,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,OAAOA,EAAE2B,EAAEnB,CAAC,EAAE,eAAewL,GAAG,qBAAqB,SAASjM,EAAE,CAAC,OAAO,KAAK,aAAaO,EAAE,EAAEP,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,IAAIkM,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAE,SAASnF,GAAGhH,EAAE,CAAC,GAAGA,KAAK,IAAO,EAAEmM,GAAGnM,EAAE,CAAC,GAAX,IAAemM,GAAGnM,CAAC,EAAE,OAAOkM,GAAG,KAAKlM,CAAC,EAAE,CAAC,IAAIoM,GAAGpM,GAAG,CAAC,GAAG,CAACA,EAAE,MAAM,IAAI0L,GAAG,oCAAoC1L,CAAC,EAAE,OAAOmM,GAAGnM,CAAC,CAAC,EAAEqM,GAAGrM,GAAG,CAAC,OAAOA,EAAE,CAAC,KAAK,OAAO,MAAO,GAAE,KAAK,KAAK,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,IAAI,GAAG,MAAO,GAAE,QAAQ,IAAMC,EAAEiM,GAAG,IAAI,GAAGC,GAAG,OAAO,OAAOA,GAAGlM,CAAC,EAAED,EAAEmM,GAAGlM,EAAE,CAAC,EAAE,EAAEA,CAAC,CAAC,EAAE,SAASqM,GAAGtM,EAAE,CAAC,OAAO,KAAK,aAAaa,EAAE,EAAEb,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIuM,GAAG,CAAC,KAAK,kBAAkB,aAAavM,GAAG,CAAC,IAAIC,EAAEmM,GAAGpM,CAAC,EAAE,OAAOgH,GAAGhH,CAAC,EAAEC,CAAC,EAAE,WAAW,CAACD,EAAEC,IAAIoM,GAAGpM,CAAC,EAAE,eAAegM,GAAG,qBAAqBK,GAAG,GAAG,IAAI,EAAE,SAASvG,GAAG/F,EAAE,CAAC,OAAO+L,GAAG/L,IAAI,EAAEuM,EAAE,CAAC,CAAC,IAAIC,GAAG,CAACxM,EAAEC,IAAI,CAAC,OAAOA,EAAE,CAAC,IAAK,GAAE,OAAO,SAASD,EAAE,CAAC,OAAO,KAAK,aAAae,EAAE,EAAEf,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,IAAK,GAAE,OAAO,SAASA,EAAE,CAAC,OAAO,KAAK,aAAaiB,EAAE,EAAEjB,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,wBAAwBC,CAAC,MAAMD,CAAC,EAAE,CAAC,CAAC,EAAE,SAASgG,GAAGhG,EAAEC,EAAE2B,EAAE,CAACA,KAAK,EAAEmK,GAAG/L,KAAK,EAAE,CAAC,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,aAAaD,GAAGA,EAAE,WAAW,CAACA,EAAEC,IAAIA,EAAE,eAAegM,GAAG,qBAAqBO,GAAGvM,EAAE2B,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAASqE,GAAGjG,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,GAAGT,KAAK,EAAE4B,KAAK,EAAE3B,EAAE0L,GAAG1L,IAAI,CAAC,EAAOQ,IAAL,KAASA,EAAE,YAAYA,EAAET,GAAGA,EAAMO,IAAJ,EAAM,CAAC,IAAIuB,EAAE,GAAG,EAAEF,EAAEnB,EAAET,GAAGA,GAAG8B,IAAIA,CAAC,CAAC,IAAIC,EAAE9B,EAAE,SAAS,UAAU,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,CAAC,EAAE8L,GAAG/L,EAAE,CAAC,KAAKC,EAAE,aAAaQ,EAAE,WAAWsB,EAAE,eAAekK,GAAG,qBAAqBD,GAAG/L,EAAE2B,EAAMrB,IAAJ,CAAK,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,SAAS2F,GAAGlG,EAAEC,EAAEM,EAAE,CAAC,SAASE,EAAET,EAAE,CAAC,IAAIC,EAAEY,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,OAAOA,EAAEa,EAAE,EAAEb,EAAE,IAAI,IAAI,CAAC,EAAE,IAAI8B,EAAE,EAAE,EAAE,OAAO9B,EAAEC,CAAC,CAAC,CAAC,IAAI6B,EAAE,CAAC,UAAU,WAAW,WAAW,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,cAAc,EAAE7B,CAAC,EAAE8L,GAAG/L,KAAK,EAAE,CAAC,KAAKO,EAAEoL,GAAGpL,IAAI,CAAC,EAAE,aAAaE,EAAE,eAAewL,GAAG,qBAAqBxL,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,SAAS0F,GAAGnG,EAAEC,EAAE,CAACD,KAAK,EAAE,IAAI4B,GAAmB3B,EAAE0L,GAAG1L,IAAI,CAAC,KAA3B,cAA8B8L,GAAG/L,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASD,EAAE,CAAC,IAAIC,EAAEY,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAES,EAAET,EAAE,EAAE,GAAG4B,EAAE,QAAQE,EAAErB,EAAEsB,EAAE,EAAEA,GAAG9B,EAAE,EAAE8B,EAAE,CAAC,IAAIhB,EAAEN,EAAEsB,EAAE,GAAGA,GAAG9B,GAAMM,EAAE,EAAEQ,IAAI,CAAC,GAAZ,EAAc,CAAC,GAAGe,EAAE6H,GAAG7H,EAAEf,EAAEe,CAAC,EAAWb,IAAT,OAAW,IAAIA,EAAEa,OAAOb,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGa,EAAEA,EAAEf,EAAE,CAAC,CAAC,KAAK,CAAC,IAAIE,EAAE,MAAMhB,CAAC,EAAE8B,EAAE,EAAEA,EAAE9B,EAAE,EAAE8B,EAAEd,EAAEc,CAAC,EAAE,OAAO,aAAaxB,EAAE,EAAEE,EAAEsB,IAAI,CAAC,CAAC,EAAEd,EAAEA,EAAE,KAAK,EAAE,CAAC,CAAC,OAAOwL,GAAGzM,CAAC,EAAEiB,CAAC,EAAE,WAAW,SAASjB,EAAEC,EAAE,CAACA,aAAa,cAAcA,EAAE,IAAI,WAAWA,CAAC,GAAG,IAAIQ,EAAY,OAAOR,GAAjB,SAAmB,GAAG,EAAEQ,GAAGR,aAAa,YAAYA,aAAa,mBAAmBA,aAAa,WAAW,MAAM,IAAIyL,GAAG,uCAAuC,EAAE,IAAI5J,EAAEF,GAAGnB,EAAE6K,GAAGrL,CAAC,EAAEA,EAAE,OAAO8B,EAAE2K,GAAG,EAAE5K,EAAE,CAAC,EAAEf,EAAEgB,EAAE,EAAE,GAAGlB,EAAE,EAAEkB,IAAI,IAAI,CAAC,EAAED,EAAEF,GAAGnB,EAAE+K,GAAGvL,EAAEc,EAAEe,EAAE,CAAC,UAAUrB,EAAE,IAAIA,EAAE,EAAEA,EAAEqB,EAAE,EAAErB,EAAE,CAAC,IAAIQ,EAAEhB,EAAE,WAAWQ,CAAC,EAAE,GAAG,IAAIQ,EAAE,MAAMwL,GAAG1L,CAAC,EAAE,IAAI2K,GAAG,wDAAwD,EAAEnL,EAAE,EAAEQ,EAAEN,IAAI,CAAC,EAAEQ,CAAC,KAAM,KAAIR,EAAE,EAAEA,EAAEqB,EAAE,EAAErB,EAAEF,EAAE,EAAEQ,EAAEN,IAAI,CAAC,EAAER,EAAEQ,CAAC,EAAE,OAAcT,IAAP,MAAUA,EAAE,KAAKyM,GAAG1K,CAAC,EAAEA,CAAC,EAAE,eAAekK,GAAG,qBAAqBK,GAAG,GAAGtM,EAAE,CAACyM,GAAGzM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI2M,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,UAAU,EAAE,OAAOC,GAAG,CAAC5M,EAAEC,IAAI,CAAC,QAAQ2B,EAAE5B,GAAG,EAAE+B,EAAEH,EAAE3B,EAAE,EAAE,EAAE2B,GAAGG,IAAI,EAAE,EAAEH,IAAI,CAAC,GAAG,EAAEA,EAAE,GAAG,IAAIA,IAAI,GAAG5B,GAAG2M,GAAG,OAAOA,GAAG,OAAOpM,EAAE,EAAE,MAAMP,EAAE4B,CAAC,CAAC,EAAE,IAAIA,EAAE,GAAGG,EAAE,EAAE,EAAEA,GAAG9B,EAAE,GAAG,EAAE8B,EAAE,CAAC,IAAIlB,EAAEJ,EAAE,EAAET,EAAE,EAAE+B,IAAI,IAAI,CAAC,EAAE,GAAMlB,GAAH,EAAK,MAAMe,GAAG,OAAO,aAAaf,CAAC,CAAC,CAAC,OAAOe,CAAC,EAAEiL,GAAG,CAAC7M,EAAEC,EAAE2B,IAAI,CAAC,GAAGA,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIrB,EAAEN,EAAE2B,GAAGA,GAAG,GAAG,EAAE5B,EAAE,OAAO4B,EAAE,EAAE5B,EAAE,OAAO,QAAQ8B,EAAE,EAAEA,EAAEF,EAAE,EAAEE,EAAE,CAAC,IAAIC,EAAE/B,EAAE,WAAW8B,CAAC,EAAErB,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE8B,EAAE9B,GAAG,CAAC,CAAC,OAAOQ,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEM,CAAC,EAAEuM,GAAG9M,GAAG,EAAEA,EAAE,OAAO+M,GAAG,CAAC/M,EAAEC,IAAI,CAAC,QAAQ2B,EAAE,EAAErB,EAAE,GAAG,EAAEqB,GAAG3B,EAAE,IAAI,CAAC,IAAIQ,EAAE,EAAE,EAAET,EAAE,EAAE4B,IAAI,IAAI,CAAC,EAAE,GAAMnB,GAAH,EAAK,MAAM,EAAEmB,EAAE,OAAOnB,GAAGA,GAAG,MAAMF,GAAG,OAAO,aAAa,MAAME,GAAG,GAAG,MAAM,KAAKA,CAAC,GAAGF,GAAG,OAAO,aAAaE,CAAC,CAAC,CAAC,OAAOF,CAAC,EAAEyM,GAAG,CAAChN,EAAEC,EAAE2B,IAAI,CAAC,GAAG3B,KAAK,EAAE2B,IAAI,WAAW,EAAEA,EAAE,MAAO,GAAE,IAAIrB,EAAEN,EAAE2B,EAAErB,EAAEqB,EAAE,EAAE,QAAQnB,EAAE,EAAEA,EAAET,EAAE,OAAO,EAAES,EAAE,CAAC,IAAIqB,EAAE9B,EAAE,WAAWS,CAAC,EAAE,GAAG,OAAOqB,GAAG,OAAOA,IAAIA,EAAE,QAAQ,KAAKA,IAAI,IAAI,KAAK9B,EAAE,WAAW,EAAES,CAAC,GAAG,EAAE,EAAER,IAAI,IAAI,CAAC,EAAE6B,GAAG7B,GAAG,GAAG,EAAE2B,EAAE,KAAK,CAAC,OAAO,EAAE,EAAE3B,IAAI,IAAI,CAAC,EAAE,EAAEA,EAAEM,CAAC,EAAE0M,GAAGjN,GAAG,CAAC,QAAQC,EAAE,EAAE2B,EAAE,EAAEA,EAAE5B,EAAE,OAAO,EAAE4B,EAAE,CAAC,IAAIrB,EAAEP,EAAE,WAAW4B,CAAC,EAAE,OAAOrB,GAAG,OAAOA,GAAG,EAAEqB,EAAE3B,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAE,SAASmG,GAAGpG,EAAEC,EAAE2B,EAAE,CAAC,GAAG5B,KAAK,EAAEC,KAAK,EAAE2B,EAAE+J,GAAG/J,KAAK,CAAC,EAAM3B,IAAJ,EAAM,IAAIM,EAAEqM,GAAGnM,EAAEoM,GAAG9K,EAAE+K,GAAG/L,EAAEf,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,OAAWC,IAAJ,IAAQM,EAAEwM,GAAGtM,EAAEuM,GAAGjL,EAAEkL,GAAGlM,EAAEf,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,GAAG+L,GAAG/L,EAAE,CAAC,KAAK4B,EAAE,aAAa5B,GAAG,CAAC,QAAQ4B,EAAEnB,EAAEI,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE8B,EAAE9B,EAAE,EAAE+B,GAAE,EAAEA,IAAGtB,EAAE,EAAEsB,GAAE,CAAC,IAAId,GAAEjB,EAAE,EAAE+B,GAAE9B,EAAE8B,IAAGtB,GAAMM,EAAEE,EAAC,GAAN,IAAUa,EAAEvB,EAAEuB,EAAEb,GAAEa,CAAC,EAAWF,IAAT,OAAWA,EAAEE,GAAGF,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGE,GAAGA,EAAEb,GAAEhB,EAAE,CAAC,OAAOwM,GAAGzM,CAAC,EAAE4B,CAAC,EAAE,WAAW,CAAC5B,EAAEO,IAAI,CAAC,GAAa,OAAOA,GAAjB,SAAmB,MAAM,IAAImL,GAAG,6CAA6C9J,CAAC,EAAE,EAAE,IAAIE,EAAEC,EAAExB,CAAC,EAAEQ,EAAE2L,GAAG,EAAE5K,EAAE7B,CAAC,EAAE,OAAOY,EAAE,EAAEE,IAAI,IAAI,CAAC,EAAEe,EAAE7B,EAAEQ,EAAEF,EAAEQ,EAAE,EAAEe,EAAE7B,CAAC,EAASD,IAAP,MAAUA,EAAE,KAAKyM,GAAG1L,CAAC,EAAEA,CAAC,EAAE,eAAekL,GAAG,qBAAqBK,GAAG,GAAGtM,EAAE,CAACyM,GAAGzM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAASqG,GAAGrG,EAAEC,EAAE,CAAC8L,GAAG/L,KAAK,EAAE,CAAC,GAAG,GAAG,KAAKC,EAAE0L,GAAG1L,IAAI,CAAC,EAAE,eAAe,EAAE,aAAa,IAAI,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAIqG,GAAG,IAAI,EAAE,SAASC,GAAGvG,EAAE,CAAC6C,GAAG7C,IAAI,EAAE,CAACwB,EAAE,EAAE,CAACD,EAAE,OAAO,EAAE,EAAEyB,GAAG,CAAC,CAAC,IAAIkK,GAAGlN,GAAG,CAAC,GAAG,CAAC6D,GAAE,GAAG,CAAC,GAAG7D,EAAE,EAAE,EAAE,EAAEqK,IAAI,GAAG,CAAC5I,EAAE4B,GAAGK,CAAC,EAAEwF,GAAGxF,CAAC,CAAC,OAAO1D,EAAE,CAACA,aAAa6J,IAAc7J,GAAV,UAAaqC,EAAE,EAAErC,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAa6J,IAAc7J,GAAV,UAAaqC,EAAE,EAAErC,CAAC,CAAC,CAAC,EAAE,SAAS8C,GAAG9C,EAAE,CAACA,KAAK,EAAc,OAAO,QAAQ,IAA3B,aAAgC,QAAQ,GAAG,EAAE,EAAEA,IAAI,EAAEA,CAAC,EAAE,MAAM,KAAKsD,EAAE,EAAEtD,GAAG,IAAI,QAAQ,MAAM,EAAE,EAAEA,IAAI,EAAE,CAAC,EAAE,CAAC,IAAIsD,GAAG,IAAI,CAAC,IAAItD,EAAEoD,GAAG,EAAEpD,IAAI8C,GAAG9C,CAAC,EAAEkN,GAAGC,EAAE,EAAE,EAAE,SAAS3G,GAAGxG,EAAEC,EAAE,EAAED,KAAK,IAAIC,IAAI,EAAE,WAAWqD,EAAE,EAAE7B,EAAE,YAAY,CAAC,aAAazB,EAAE,IAAI,cAAc,CAAC,GAAGA,EAAEoK,GAAGpK,CAAC,IAAIA,EAAE,YAAY,CAAC,IAAI,cAAc,CAAC,CAAC,CAAC,IAAIoN,GAAG,CAAC,EAAE,SAAS3G,GAAGzG,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,IAAIR,KAAK,EAAEM,GAAG,EAAE6M,GAAG,OAAO7M,EAAEqB,EAAEnB,IAAI,IAAI,EAAEA,EAAE,EAAEA,EAAEF,EAAEE,IAAI2M,GAAG3M,CAAC,EAAEkD,EAAE/B,EAAE,EAAEnB,CAAC,EAAEkD,EAAE/B,EAAE,EAAEnB,EAAE,CAAC,EAAEQ,EAAE,EAAEW,EAAE,EAAEnB,EAAE,IAAI,CAAC,EAAE,OAAOR,EAAEyJ,GAAGzJ,CAAC,EAAEoN,GAAGrN,CAAC,GAAG,GAAGoN,EAAE,CAAC,CAAC,SAAS1G,GAAG1G,EAAE,CAACA,KAAK,EAAEyB,EAAE,YAAY,CAAC,IAAI,gBAAgB,OAAOzB,CAAC,CAAC,EAAE6K,GAAGT,GAAGpK,CAAC,CAAC,CAAC,CAAC,SAAS2G,GAAG3G,EAAE,CAAC,CAAC,IAAIsN,GAAG,CAACtN,EAAEC,IAAI,CAAC,IAAI2B,EAAEiK,GAAG7L,CAAC,EAAE,GAAY4B,IAAT,OAAW,MAAM5B,EAAEuN,GAAGvN,CAAC,EAAE4B,EAAE+J,GAAG3L,CAAC,EAAEyM,GAAGzM,CAAC,EAAE,IAAI0L,GAAG,GAAGzL,CAAC,qBAAqB2B,CAAC,EAAE,EAAE,OAAOA,CAAC,EAAE4L,GAAG,CAACxN,EAAEC,EAAE2B,IAAI,CAAC,IAAIrB,EAAE,CAAC,EAAE,OAAOP,EAAEA,EAAE,WAAWO,EAAEqB,CAAC,EAAErB,EAAE,SAASM,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEoM,GAAG9L,CAAC,GAAGP,CAAC,EAAE,SAAS4G,GAAG5G,EAAEC,EAAE2B,EAAE,CAAC,OAAO3B,KAAK,EAAE2B,KAAK,EAAE5B,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEqN,GAAGrN,EAAE,WAAW,EAAEuN,GAAGvN,EAAE2B,EAAE5B,CAAC,CAAC,CAAC,IAAIyN,GAAGzN,GAAG,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOA,EAAE,CAACqE,GAAErE,CAAC,CAAC,CAAC,EAAE0N,GAAG,EAAE7L,GAAG,KAAK8L,GAAG,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAE/L,GAAG,KAAKgM,GAAG,CAAC,EAAE,SAASpE,GAAG5J,EAAE,CAAC,OAAO,SAASA,EAAE,CAAC,GAAG,CAAC6D,GAAE,CAAC,GAAO6J,KAAJ,EAAO,CAAC,IAAIzN,EAAE,GAAG2B,EAAE,GAAG5B,EAAG,CAACA,EAAE,IAAI,CAAC,GAAG,CAAC6D,KAAI8J,GAAG3N,EAAEC,EAAE,GAAG2B,GAAG,CAAC8L,GAAG,EAAED,GAAI,IAAIQ,GAAGpM,EAAE,CAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAE7B,EAAE,GAAG,GAAG,CAAC,IAAIO,EAAE,UAAU,CAAC,IAAIP,EAAE,EAAE,EAAE6B,GAAG,IAAI,IAAI,CAAC,EAAE,OAAO7B,EAAEkO,EAAGJ,GAAG9N,CAAC,CAAC,EAAE,EAAEqK,GAAGrK,EAAE,CAAC,EAAE,CAAC,OAAOC,EAAE,CAACM,EAAEN,EAAED,EAAE,EAAE,CAAC,IAAIS,EAAE,GAAG,GAAG,CAACoB,GAAG,CAAC,IAAIC,EAAEE,GAAGF,IAAIE,GAAG,MAAMhC,EAAE8B,EAAE,OAAOA,EAAE,SAASvB,CAAC,EAAEE,EAAE,GAAG,CAAC,GAAGT,GAAG,CAACS,EAAE,MAAMF,CAAC,CAAC,CAAE,EAAEqB,EAAE,GAAG3B,IAAIyN,GAAG,EAAE7L,GAAG,UAAU,CAAC,IAAI7B,EAAE0M,GAAG,KAAK,EAAEzM,EAAED,EAAE,GAAGa,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAEC,EAAEY,EAAE,EAAEb,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAE2N,GAAG,CAAC,EAAE,IAAIhM,EAAEiM,GAAG5N,CAAC,EAAE,OAAgB2B,IAAT,SAAaA,EAAEmM,KAAKF,GAAG5N,CAAC,EAAE2B,EAAEkM,GAAGlM,CAAC,EAAE3B,GAAGA,EAAE2B,EAAE,EAAE,EAAE5B,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAED,CAAC,EAAE,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEyN,GAAI,IAAIU,GAAGtM,EAAE,CAAE,EAAE,MAAU6L,KAAJ,GAAQA,GAAG,EAAED,GAAGW,EAAE,EAAE3B,GAAG5K,EAAE,EAAEA,GAAG,KAAKmM,GAAG,QAAQd,EAAE,GAAG7I,GAAE,kBAAkBqJ,EAAE,EAAE,EAAE,OAAOC,EAAE,CAAC,EAAG1N,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAE,CAAC,CAAC,SAAS4G,GAAG7G,EAAE,CAAC,OAAOA,KAAK,EAAE4J,GAAI,KAAK5J,EAAEoM,GAAGpM,CAAC,GAAG,KAAKqM,EAAE,CAAE,CAAC,CAAC,IAAIgC,GAAG,CAAC,EAAE,SAASvH,GAAG9G,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOqB,KAAK,EAAErB,KAAK,GAAGP,EAAEqO,GAAGrO,IAAI,CAAC,GAAG,KAAKC,EAAEmM,GAAGnM,IAAI,CAAC,EAAE2B,EAAErB,CAAC,CAAC,CAAC,IAAI+N,GAAG,CAAC,EAAEC,GAAGvO,GAAG,CAAC,IAAIC,EAAEqO,GAAGtO,CAAC,EAAE,OAAgBC,IAAT,OAAW0L,GAAG3L,CAAC,EAAEC,CAAC,EAAE,SAAS8G,GAAG/G,EAAEC,EAAE2B,EAAErB,EAAEE,EAAE,CAAC,OAAOmB,KAAK,EAAErB,KAAK,EAAEE,KAAK,GAAGT,EAAEqO,GAAGrO,IAAI,CAAC,GAAGC,EAAEmM,GAAGnM,IAAI,CAAC,EAAEA,EAAE2B,EAAE2M,GAAG3M,CAAC,CAAC,EAAErB,EAAEE,CAAC,CAAC,CAAC,IAAI+N,GAAG,IAAc,OAAO,YAAjB,SAA4B,WAAW,SAAS,aAAa,EAAE,EAAE,SAASvH,GAAGjH,EAAE,CAAC,OAAWA,KAAK,IAAT,EAAYqM,GAAGmC,GAAG,CAAC,GAAGxO,EAAEuO,GAAGvO,CAAC,EAAEqM,GAAGmC,GAAG,EAAExO,CAAC,CAAC,EAAE,CAAC,IAAIyO,GAAGzO,GAAG,CAAC,IAAIC,EAAEoO,GAAG,OAAO,OAAOA,GAAG,KAAKrO,CAAC,EAAEC,CAAC,EAAEyO,GAAG,CAAC1O,EAAEC,IAAI,CAAC,QAAQ2B,EAAE,MAAM5B,CAAC,EAAEO,EAAE,EAAEA,EAAEP,EAAE,EAAEO,EAAEqB,EAAErB,CAAC,EAAE+M,GAAGzM,EAAE,EAAEZ,EAAE,EAAEM,IAAI,IAAI,CAAC,EAAE,aAAaA,CAAC,EAAE,OAAOqB,CAAC,EAAE+M,GAAG,CAAC3O,EAAEC,IAAI,OAAO,eAAeA,EAAE,OAAO,CAAC,MAAMD,CAAC,CAAC,EAAE,SAASkH,GAAGlH,EAAEC,EAAE2B,EAAE,CAAC,IAAIrB,GAAGN,EAAEyO,GAAG1O,EAAEC,IAAI,CAAC,GAAG,MAAM,EAAED,IAAI,IAAIS,EAAE;AAAA,EAAwDqB,EAAE,EAAEC,EAAE,CAAC,EAAMH,IAAJ,GAAOG,EAAE,KAAK,KAAK,EAAE,QAAQlB,EAAE,CAAC,SAAS,EAAEE,EAAE,CAACR,CAAC,EAAEU,EAAE,EAAEA,EAAEjB,EAAE,EAAEiB,EAAEc,EAAE,KAAK,MAAMd,CAAC,EAAEJ,EAAE,KAAK,UAAUI,CAAC,EAAEF,EAAE,KAAKd,EAAEgB,CAAC,CAAC,EAAER,GAAG,YAAYQ,CAAC,aAAaA,CAAC,6BAA6Ba,EAAE,IAAIA,EAAE,EAAE;AAAA,EAAOA,GAAG7B,EAAEgB,CAAC,EAAE,eAAe,OAAOR,GAAG,cAAkBmB,IAAJ,EAAM,WAAW,WAAW,IAAIG,EAAE,KAAK,IAAI,CAAC;AAAA,EAAOxB,EAAE,KAAKM,EAAE,KAAK,mBAAmB,EAAEE,EAAE,KAAKyM,EAAE,EAAE/M,GAAG;AAAA,GAA8DI,EAAE,KAAKJ,EAAE;AAAA,CAAM,EAAET,EAAE,SAASA,EAAE,CAAC,IAAIC,GAAE,SAAS,GAAG,EAAEA,cAAa,UAAU,MAAM,IAAI,UAAU,qCAAqC,OAAOA,EAAC,0BAA0B,EAAE,IAAI2B,GAAE+M,GAAG1O,GAAE,MAAM,sBAAuB,UAAU,CAAC,CAAE,EAAE,OAAO2B,GAAE,UAAU3B,GAAE,UAAU2B,GAAE,IAAIA,IAAG5B,EAAEC,GAAE,MAAM2B,GAAE5B,CAAC,aAAa,OAAOA,EAAE4B,EAAC,EAAEf,CAAC,EAAE,GAAGE,CAAC,EAAEa,EAAE,iBAAiB3B,EAAE,IAAKD,GAAGA,EAAE,IAAK,EAAE,KAAK,IAAI,CAAC,QAAQO,EAAE,IAAI,IAAIkO,GAAGE,GAAG/M,EAAE5B,CAAC,CAAC,CAAC,CAAC,SAASmH,GAAGnH,EAAE,CAAC,OAAOA,EAAEuO,GAAGvO,IAAI,CAAC,EAAEqM,GAAGhL,EAAErB,CAAC,CAAC,CAAC,CAAC,SAASoH,GAAGpH,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEmM,GAAGnM,CAAC,EAAEoM,GAAGrM,EAAEC,CAAC,CAAC,CAAC,CAAC,SAASoH,GAAGrH,EAAE,CAAC,GAAGA,KAAK,KAAKmM,GAAGnM,EAAE,CAAC,GAAG,EAAE,CAAC,SAASsH,IAAI,CAAC,OAAO+E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS9E,GAAGvH,EAAE,CAACA,EAAEoM,GAAGpM,IAAI,CAAC,EAAE,QAAQC,EAAE,MAAMD,EAAE,MAAM,EAAE4B,EAAE,EAAEA,EAAE5B,EAAE,OAAO4B,IAAI3B,EAAE2B,CAAC,EAAE5B,EAAE4B,CAAC,EAAE,OAAOyK,GAAGpM,CAAC,CAAC,CAAC,SAASuH,GAAGxH,EAAE,CAAC,OAAOqM,GAAGkC,GAAGvO,IAAI,CAAC,CAAC,CAAC,CAAC,SAASyH,IAAI,CAAC,OAAO4E,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS3E,GAAG1H,EAAE,CAAC,QAAQC,EAAEmM,GAAGpM,KAAK,CAAC,EAAEC,EAAE,QAAQ,CAAC,IAAI2B,EAAE3B,EAAE,IAAI,EAAEA,EAAE,IAAI,EAAE2B,CAAC,CAAC,CAACoF,GAAGhH,CAAC,CAAC,CAAC,SAAS2H,GAAG3H,EAAEC,EAAE2B,EAAE,CAAC3B,KAAK,EAAE2B,KAAK,EAAE5B,EAAEoM,GAAGpM,IAAI,CAAC,EAAEC,EAAEmM,GAAGnM,CAAC,EAAE2B,EAAEwK,GAAGxK,CAAC,EAAE5B,EAAEC,CAAC,EAAE2B,CAAC,CAAC,SAASgG,GAAG5H,EAAEC,EAAE,CAAC,OAAOA,KAAK,EAAED,GAAGA,EAAEsN,GAAGtN,IAAI,EAAE,mBAAmB,GAAG,qBAAqBC,CAAC,EAAEoM,GAAGrM,CAAC,CAAC,CAAC,SAAS6H,GAAG7H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,eAAe,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,UAAU,EAAEA,GAAGA,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,IAAI4O,GAAG5O,GAAMA,EAAE,GAAL,IAAYA,EAAE,KAAL,GAAaA,EAAE,KAAL,GAAU6O,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAE,SAAShH,GAAG9H,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAE,EAAE,EAAEC,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAE,EAAE,EAAEC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,QAAQ,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,SAAS,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,KAAK,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,OAAO,EAAE,IAAI4B,GAAGgN,GAAG5O,EAAE,YAAY,CAAC,EAAE6O,GAAGC,IAAI9O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,CAAC,EAAE2B,EAAE,EAAE,EAAE3B,EAAE,KAAK,IAAI,CAAC,EAAE,IAAID,EAAE,kBAAkB,EAAE4B,EAAE,IAAI,KAAK5B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE,IAAIO,EAAE,IAAI,KAAKP,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEA,EAAE,GAAG4B,GAAGrB,GAAGP,EAAE,kBAAkB,GAAG,KAAK,IAAIO,EAAEqB,CAAC,GAAG,EAAE,EAAE3B,EAAE,KAAK,IAAI,CAAC,EAAED,CAAC,CAAC,SAAS+H,GAAG/H,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAK,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE,KAAK,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,CAAC,EAAE4B,EAAE,EAAE,EAAE5B,EAAE,KAAK,IAAI,CAAC,EAAEO,EAAEN,EAAE,kBAAkB,EAAEQ,EAAE,IAAI,KAAKR,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAE6B,EAAE,IAAI,KAAK7B,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,kBAAkB,EAAEY,EAAE,KAAK,IAAIiB,EAAErB,CAAC,EAAE,MAAO,GAAEmB,EAAE,EAAE,EAAE5B,EAAE,KAAK,IAAI,CAAC,EAAE,EAAOS,GAAGqB,GAAGjB,GAAGN,GAAG,EAAEqB,IAAIf,GAAGN,KAAKE,EAAE,KAAK,IAAIqB,EAAErB,CAAC,EAAER,EAAE,QAAQA,EAAE,QAAQ,EAAE,MAAM,EAAE2B,EAAEf,EAAEJ,GAAGF,EAAE,GAAG,EAAE,EAAEP,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAE2B,GAAGgN,GAAG3O,EAAE,YAAY,CAAC,EAAE4O,GAAGC,IAAI7O,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAE4B,EAAE,EAAE,EAAE5B,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAE,EAAE,EAAED,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAE,EAAE,EAAED,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAE,OAAO,MAAMD,CAAC,EAAE,GAAGA,EAAE,GAAG,CAAC,CAAC,SAASgI,GAAGhI,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAE,CAAC,OAAON,EAAE6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,CAAC,EAAE,GAAG,CAAC,SAASkG,GAAGjI,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAE,CAAC,GAAGL,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,CAAC,CAAC,SAASoG,GAAGlI,EAAEC,EAAE2B,EAAErB,EAAE,CAACP,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAE,IAAIE,EAAG,IAAI,OAAM,YAAY,EAAEqB,EAAE,IAAI,KAAKrB,EAAE,EAAE,CAAC,EAAEM,EAAE,IAAI,KAAKN,EAAE,EAAE,CAAC,EAAEA,EAAEqB,EAAE,kBAAkB,EAAE,IAAIb,EAAEF,EAAE,kBAAkB,EAAEI,EAAE,KAAK,IAAIV,EAAEQ,CAAC,EAAEJ,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE,GAAGmB,EAAE,EAAE,EAAElB,IAAI,IAAI,CAAC,EAAE,EAAOQ,GAAGQ,GAAGa,GAAG9B,EAAEA,GAAGA,EAAE,mBAAmB,OAAO,CAAC,OAAO,GAAG,aAAa,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC,GAAG8B,CAAC,EAAEf,EAAEf,EAAEe,CAAC,EAAEE,EAAER,GAAG+K,GAAG1J,EAAEF,EAAE,EAAE,EAAE4J,GAAGzK,EAAER,EAAE,EAAE,IAAIiL,GAAG1J,EAAEvB,EAAE,EAAE,EAAEiL,GAAGzK,EAAEa,EAAE,EAAE,EAAE,CAAC,IAAImN,GAAG,CAAC,EAAEC,GAAG,CAAChP,EAAEC,IAAI,CAAC8O,GAAG,OAAO,EAAE,QAAQnN,EAAEA,EAAErB,EAAE,EAAEP,MAAM,CAAC,GAAG,CAAC,IAAIS,EAAOmB,GAAL,IAAO3B,IAAIQ,GAAQmB,GAAL,MAAS3B,EAAE,EAAE,EAAE,EAAE8O,GAAG,KAAUnN,GAAL,IAAOf,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAO2B,GAAL,IAAO+B,EAAE1D,IAAI,CAAC,EAAO2B,GAAL,IAAO,EAAE,EAAE3B,IAAI,IAAI,CAAC,EAAEgB,EAAE,EAAEhB,IAAI,IAAI,CAAC,CAAC,EAAEA,GAAGQ,EAAE,EAAE,CAAC,CAAC,OAAOsO,EAAE,EAAE,SAAS5G,GAAGnI,EAAEC,EAAE2B,EAAE,CAAC,OAAO5B,KAAK,EAAEC,EAAE+O,GAAG/O,IAAI,EAAE2B,IAAI,CAAC,EAAE8H,GAAG1J,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,SAASmI,GAAGpI,EAAEC,EAAE2B,EAAE,CAAC,OAAO5B,KAAK,EAAEC,EAAE+O,GAAG/O,IAAI,EAAE2B,IAAI,CAAC,EAAE8H,GAAG1J,CAAC,EAAE,GAAGC,CAAC,CAAC,CAAC,IAAIoI,GAAG,IAAI,CAAC,EAAEC,GAAG,IAAI,KAAK,IAAI,EAAE,SAASC,GAAGvI,EAAEC,EAAE,CAAC,OAAOyC,EAAEiH,GAAG3J,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAIyI,GAAGF,GAAG,IAAI,CAAC,MAAM6B,IAAI,EAAE,QAAQ,EAAE,SAAS5B,IAAI,CAAC,MAAO,WAAU,CAACC,GAAG,IAAI,YAAY,WAAW,YAAY,IAAI,EAAE,IAAIC,GAAG,IAAI,UAAU,oBAAoB,SAASC,IAAI,CAAC,OAAOvE,GAAE,sEAAsE,EAAE,CAAC,CAAC,SAASwE,GAAG7I,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAEM,EAAE,EAAE,OAAO,GAAGP,GAAGC,GAAG,WAAWD,EAAE,MAAM,GAAG,QAAQ4B,EAAE,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAInB,EAAER,GAAG,EAAE,GAAG2B,GAAGnB,EAAE,KAAK,IAAIA,EAAET,EAAE,SAAS,EAAE,IAAI8B,EAAE,KAAKrB,EAAE,KAAK,IAAIT,EAAES,CAAC,EAAE,EAAE,CAACqB,GAAGA,EAAE,IAAI,KAAKA,EAAE,WAAWrB,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEL,EAAE,OAAO,WAAW,OAAO,MAAM,GAAG,CAACA,EAAE,KAAK0B,CAAC,EAAExB,GAAE,EAAE,IAAIyB,EAAE,EAAE,MAAM,CAAC,MAAS,CAAC,CAACA,EAAE,MAAM,CAAC,GAAGA,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,CAAC,IAAIkN,GAAG,KAAK5K,GAAE,iGAAiG,EAAE,GAAG6K,GAAG,CAAC,EAAEC,GAAGnP,GAAG,CAACA,EAAE,QAASA,GAAG,CAAC,IAAIC,EAAEgP,GAAG,EAAEhP,IAAIiP,GAAGjP,CAAC,EAAED,EAAE,CAAE,CAAC,EAAE,SAAS8I,IAAI,CAAC,IAAI9I,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,EAAE,OAAeA,EAAE,CAAC,GAAZ,SAAeA,EAAE,MAAM,EAAEmP,GAAGnP,CAAC,EAAEkP,GAAG,GAAGD,GAAG,EAAEC,GAAG,GAAGlP,EAAEkP,GAAG,EAAE,CAAC,SAASnG,GAAG/I,EAAEC,EAAE2B,EAAE,CAAC,GAAG5B,KAAK,EAAEC,KAAK,EAAEiP,GAAG,IAAIlP,EAAE,IAAIO,EAAE2O,GAAG,QAAiB3O,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM;AAAA,CAAI,GAAG,CAAC,GAAnD,SAAsDA,EAAE,MAAM,EAAE4O,GAAG5O,CAAC,EAAE,QAAQE,EAAE,EAAEF,EAAEE,CAAC,GAAGwO,GAAG,GAAGjP,GAAG,EAAES,EAAE,IAAIT,EAAE,EAAEA,EAAE4B,GAAGrB,EAAEP,EAAES,CAAC,EAAE,EAAET,EAAE,EAAE,EAAEC,EAAE,EAAED,IAAI,IAAI,CAAC,EAAEiP,GAAG,EAAE,OAAOjP,CAAC,CAAC,IAAIoP,GAAGC,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACF,GAAG,CAAC,IAAIpP,EAAEC,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAAG,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEmC,GAAG,gBAAgB,EAAE,IAAIpC,KAAKqP,GAAYA,GAAGrP,CAAC,IAAb,OAAe,OAAOC,EAAED,CAAC,EAAEC,EAAED,CAAC,EAAEqP,GAAGrP,CAAC,EAAE,IAAI4B,EAAE,CAAC,EAAE,IAAI5B,KAAKC,EAAE2B,EAAE,KAAK,GAAG5B,CAAC,IAAIC,EAAED,CAAC,CAAC,EAAE,EAAEoP,GAAGxN,CAAC,CAAC,OAAOwN,EAAE,EAAE,SAASpG,GAAGhJ,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIM,EAAE,EAAE,OAAO+O,GAAG,EAAE,QAAS,CAAC7O,EAAEqB,IAAI,CAAC,IAAIC,EAAE9B,EAAEM,EAAE,IAAIuB,EAAEjB,EAAE,EAAEb,EAAE,EAAE8B,IAAI,IAAI,CAAC,EAAEC,EAAEA,EAAE,EAAEA,EAAEtB,EAAE,OAAO,EAAEsB,EAAE,EAAE,EAAED,MAAM,CAAC,EAAErB,EAAE,WAAWsB,CAAC,EAAE,EAAE,EAAED,IAAI,CAAC,EAAE,EAAEvB,GAAGE,EAAE,OAAO,CAAC,CAAE,EAAE,CAAC,CAAC,SAASwI,GAAGjJ,EAAEC,EAAE,CAAC,GAAGwB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAI2B,EAAE0N,GAAG,EAAEzO,EAAE,EAAEb,IAAI,IAAI,CAAC,EAAE4B,EAAE,OAAO,IAAIrB,EAAE,EAAE,OAAOqB,EAAE,QAAS5B,GAAGO,GAAGP,EAAE,OAAO,CAAE,EAAEa,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEM,EAAE,CAAC,CAAC,SAAS4I,GAAGnJ,EAAE,CAAC,OAAOyB,EAAE6I,GAAG,GAAG,EAAEtK,CAAC,EAAE,EAAE,CAAC,SAASoJ,GAAGpJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAE6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,EAAE,EAAE,CAAC,SAAS8I,GAAGrJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOkB,EAAE6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAErB,CAAC,EAAE,EAAE,CAAC,IAAIgP,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,SAASjG,GAAGtJ,EAAEC,EAAE2B,EAAEnB,EAAE,CAAC,GAAGgB,EAAE,OAAO6I,GAAG,GAAG,EAAEtK,EAAEC,EAAE2B,EAAEnB,CAAC,EAAER,KAAK,EAAE2B,KAAK,EAAEnB,KAAK,EAAE,QAAQqB,EAAE,EAAEC,EAAE,EAAEA,EAAEH,EAAEG,IAAI,CAAC,IAAIhB,EAAEF,EAAE,EAAEZ,IAAI,IAAI,CAAC,EAAEgB,EAAEJ,EAAE,EAAEZ,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQkB,EAAE,EAAEA,EAAEF,EAAEE,IAAI,CAAC,IAAIC,EAAEb,EAAE,EAAEQ,EAAEI,IAAI,CAAC,EAAEE,EAAEkO,GAAGvP,CAAC,EAAMoB,IAAJ,GAAYA,IAAL,KAAapB,IAAJ,EAAMyC,EAAEC,GAAG2I,GAAGhK,EAAE,CAAC,CAAC,EAAEA,EAAE,OAAO,GAAGA,EAAE,KAAKD,CAAC,CAAC,CAACU,GAAGb,CAAC,CAAC,OAAOJ,EAAE,EAAEJ,IAAI,IAAI,CAAC,EAAEqB,EAAE,CAAC,CAAC,IAAI0N,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC1P,EAAEC,IAAI,CAAC,EAAE,EAAE,IAAID,EAAEC,IAAI,CAAC,CAAC,EAAE,SAASuJ,GAAGxJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,SAASE,EAAET,EAAEC,GAAE2B,GAAE,CAAC,IAAI5B,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,IAAGD,EAAE4B,GAAE,CAAC,EAAE5B,EAAE,OAAOA,CAAC,CAAC,SAAS8B,EAAE9B,EAAEC,GAAE,CAAC,OAAOQ,EAAET,EAAEC,GAAE,GAAG,CAAC,CAAC,SAASc,EAAEf,EAAEC,GAAE,CAAC,SAAS2B,GAAE5B,GAAE,CAAC,MAAO,GAAEA,GAAE,GAAG,EAAEA,GAAE,EAAE,CAAC,CAAC,IAAIO,GAAE,OAAYA,GAAEqB,GAAE5B,EAAE,YAAY,EAAEC,GAAE,YAAY,CAAC,KAAxC,IAAiDM,GAAEqB,GAAE5B,EAAE,SAAS,EAAEC,GAAE,SAAS,CAAC,KAAlC,IAAuCM,GAAEqB,GAAE5B,EAAE,QAAQ,EAAEC,GAAE,QAAQ,CAAC,GAAGM,EAAC,CAAC,SAASU,EAAEjB,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAASmB,EAAEnB,EAAE,CAAC,IAAIC,GAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAK,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,EAAE,EAAEC,IAAG,CAAC,IAAI2B,GAAE5B,EAAE,SAAS,EAAEO,IAAGqO,GAAG5O,EAAE,YAAY,CAAC,EAAEwP,GAAGC,IAAI7N,EAAC,EAAE,GAAG,EAAE3B,GAAEM,GAAEP,EAAE,QAAQ,GAAG,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,EAAC,EAAE,KAAK,CAACA,IAAGM,GAAEP,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAG4B,GAAE5B,EAAE,SAAS4B,GAAE,CAAC,GAAG5B,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,EAAE,CAAC,OAAO4B,GAAE,IAAI,KAAK5B,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,GAAEgB,EAAE,IAAI,KAAKjB,EAAE,YAAY,EAAE,EAAE,CAAC,CAAC,EAAE4B,GAAEX,EAAEW,EAAC,EAAE,GAAGb,EAAEd,GAAED,CAAC,EAAE,GAAGe,EAAEa,GAAE5B,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAACA,KAAK,EAAEC,KAAK,EAAE2B,KAAK,EAAErB,KAAK,EAAE,IAAIa,EAAEP,EAAE,EAAEN,EAAE,KAAK,IAAI,CAAC,EAAE,QAAQc,KAAKd,EAAE,CAAC,GAAG,EAAE,EAAEA,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG,EAAE,EAAEA,EAAE,KAAK,IAAI,CAAC,EAAE,GAAGa,EAAEuI,GAAGvI,CAAC,EAAE,EAAE,EAAEQ,EAAE+H,GAAG/H,CAAC,EAAER,EAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAEQ,EAAEA,EAAE,QAAQ,IAAI,OAAOP,EAAE,GAAG,EAAED,EAAEC,CAAC,CAAC,EAAE,IAAIC,GAAE,2DAA2D,MAAM,GAAG,EAAEC,GAAE,wFAAwF,MAAM,GAAG,EAAE,IAAIF,KAAKD,EAAE,CAAC,KAAKpB,GAAGsB,GAAEtB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGsB,GAAEtB,EAAE,EAAE,EAAE,KAAKA,GAAGuB,GAAEvB,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGuB,GAAEvB,EAAE,EAAE,EAAE,KAAKA,GAAG8B,GAAG9B,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGS,EAAET,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAGmB,EAAEnB,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKmB,EAAE,KAAKnB,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAKA,KAAQA,EAAEA,EAAE,KAAR,EAAYA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAI8B,EAAE9B,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,GAAE,EAAE2B,GAAE,EAAEA,IAAG5B,EAAE,GAAG,EAAEC,KAAI2O,GAAG5O,EAAE,GAAG,IAAI,EAAEwP,GAAGC,IAAI7N,IAAG,EAAE,CAAC,OAAOE,EAAE9B,EAAE,GAAGC,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAG8B,EAAE9B,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAG8B,EAAE9B,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAG8B,EAAE,KAAK,OAAO9B,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GAAG,CAAC,IAAIC,GAAE,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,GAAG,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,KAAIA,GAAMA,IAAJ,MAAY2B,IAAG5B,EAAE,GAAG,IAAIA,EAAE,IAAI,IAAtB,GAA6B4B,IAAH,GAAMgN,GAAG5O,EAAE,EAAE,IAAIC,GAAE,QAAQ,CAACA,GAAE,GAAG,IAAI2B,IAAG5B,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAM4B,IAAH,GAASA,IAAH,GAAMgN,GAAG5O,EAAE,GAAG,IAAI,CAAC,IAAIC,IAAG,CAAC,OAAO6B,EAAE7B,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAG8B,EAAE,KAAK,OAAO9B,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAAC,IAAIC,GAAE,IAAID,EAAEA,EAAE,IAAI,OAAOA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAIC,GAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAE4B,EAAEA,EAAE,QAAQ,MAAM,MAAM,EAAER,EAAEQ,EAAE,SAASP,CAAC,IAAIO,EAAEA,EAAE,QAAQ,IAAI,OAAOP,EAAE,GAAG,EAAED,EAAEC,CAAC,EAAEd,CAAC,CAAC,GAAG,OAAOc,EAAE,SAASrB,EAAE,CAAC,IAAIC,GAAE,MAAMqL,GAAGtL,CAAC,EAAE,CAAC,EAAE,OAAOuL,GAAGvL,EAAEC,GAAE,EAAEA,GAAE,MAAM,EAAEA,EAAC,EAAE2B,EAAEA,EAAE,QAAQ,QAAQ,GAAG,CAAC,EAAEP,EAAE,OAAOpB,EAAE,GAAGyP,GAAGrO,EAAErB,CAAC,EAAEqB,EAAE,OAAO,EAAE,CAAC,SAASoI,GAAGzJ,EAAEC,EAAE2B,EAAErB,EAAE,CAAC,OAAOiJ,GAAGxJ,IAAI,EAAEC,IAAI,EAAE2B,IAAI,EAAErB,IAAI,CAAC,CAAC,CAACkB,GAAG,UAAU,CAAC,QAAQzB,EAAEqB,EAAE,WAAW,EAAErB,KAAKiK,GAAG,EAAEnG,GAAE,QAAS,IAAI,CAACG,KAAI,SAASjE,EAAE,CAACyB,EAAEzB,EAAE,EAAE,QAAQ,IAAIgK,GAAG,IAAIE,EAAE,CAAC,EAAE,KAAKlK,CAAC,CAAC,EAAG,IAAIoE,GAAE,CAAE,CAAC,CAAE,CAAC,EAAE,EAAE,QAAQuL,GAAG,MAAM,GAAG,EAAEC,GAAG,EAAE,IAAIA,GAAG,EAAEA,GAAGD,GAAGC,EAAE,EAAE,OAAO,aAAaA,EAAE,EAAEnE,GAAGkE,GAAGjE,GAAGrK,EAAE,aAAa,cAAc,KAAK,CAAC,YAAYrB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,cAAc,CAAC,EAAEqB,EAAE,cAAc,cAAc,KAAK,CAAC,YAAYrB,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,eAAe,CAAC,EAAEmM,GAAG,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE9K,EAAE,oBAAoB,IAAI8K,GAAG,OAAO,EAAE,EAAED,GAAG,OAAO,IAAImB,GAAG,CAAC9D,GAAGoB,GAAGQ,GAAGpG,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGqC,GAAGC,GAAGe,GAAGC,GAAGE,GAAGC,GAAGC,GAAGC,EAAE,EAAE4E,EAAG,UAAU,CAAC,SAASlO,EAAEA,EAAEC,EAAE,CAAC,OAAOiO,EAAGlO,EAAE,QAAQkO,EAAG,UAAU,CAAC,IAAIlO,EAAEkO,EAAGjO,EAAE,CAAC,EAAE,OAAO,CAAC2B,EAAErB,CAAC,IAAI,OAAO,QAAQP,CAAC,EAAEC,EAAE2B,CAAC,EAAc,OAAOrB,GAAnB,WAAqB,IAAIP,IAAI,CAAC4N,GAAG,KAAKhM,CAAC,EAAE,GAAG,CAAC,OAAOrB,EAAE,GAAGP,CAAC,CAAC,QAAC,CAAQ6D,KAAI+J,GAAG,IAAI,EAAE/L,IAAQ6L,KAAJ,GAAYE,GAAG,SAAP,IAAgBF,GAAG,EAAErD,IAAI,EAAEoD,GAAGoC,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEtP,EAAE,OAAON,CAAC,EAAE,EAAEiO,EAAG,UAAU,CAAC,IAAIlO,EAAEkO,EAAGjO,EAAED,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAE2B,EAAE5B,GAAG,IAAIA,EAAE,IAAI,EAAE,OAAOA,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,GAAG,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAG4B,EAAE5B,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,kCAAkC4B,EAAE5B,EAAE,iCAAiC,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAG4B,EAAE5B,EAAE,EAAE,EAAEA,CAAC,EAAE,EAAE4K,GAAG,KAAKsD,EAAG,EAAE,EAAEnK,GAAE,QAAQmK,EAAG,EAAE,EAAEzK,GAAExD,EAAEmE,GAAE,EAAE8J,CAAE,CAAC,IAAIjO,EAAEuD,GAAG,EAAE,GAAGS,KAAI5C,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBpB,EAAED,CAAC,CAAC,OAAOA,EAAE,CAAC0C,EAAE,sDAAsD1C,CAAC,EAAE,EAAEoB,EAAEpB,CAAC,CAAC,CAAC,OAAOsE,KAAKjD,EAAE,WAAWkD,GAAG,kCAAkC,EAAE,mCAAmClD,EAAE,WAAWA,EAAE,WAAW,mCAAmCiB,CAAC,EAAEA,EAAE,mCAAmC,IAAI,IAAI,mCAAmC,YAAY,GAAG,EAAE,KAAK,SAAStC,EAAEC,EAAE,CAAC,IAAI2B,EAAE0C,GAAG,OAAkB,OAAO,YAAY,sBAA/B,YAAqDC,GAAG3C,CAAC,GAAG4C,GAAG5C,CAAC,GAAe,OAAO,OAAnB,WAAyB8C,GAAG9C,EAAE5B,EAAEC,CAAC,EAAE,MAAM2B,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAMrB,GAAG,YAAY,qBAAqBA,EAAEP,CAAC,EAAE,KAAKC,EAAG,SAASM,EAAE,CAAC,OAAOmC,EAAE,kCAAkCnC,CAAC,EAAE,EAAEmC,EAAE,2CAA2C,EAAEgC,GAAG9C,EAAE5B,EAAEC,CAAC,CAAC,CAAE,CAAE,CAAC,EAAEA,EAAG,SAASA,EAAE,CAACD,EAAEC,EAAE,SAASA,EAAE,MAAM,CAAC,CAAE,EAAE,MAAMmB,CAAC,EAAE,CAAC,CAAC,EAAE,EAAEmM,GAAGvN,IAAIuN,GAAGW,EAAG,IAAIlO,CAAC,EAAEkD,GAAG,KAAKA,GAAGgL,EAAG,IAAI,EAAE7M,EAAE,SAAS,CAACrB,EAAEC,KAAKoB,EAAE,SAAS6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEoB,EAAE,iBAAiB,CAACrB,EAAEC,KAAKoB,EAAE,iBAAiB6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEoB,EAAE,yBAAyB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,KAAKI,EAAE,yBAAyB6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,EAAEE,EAAEE,CAAC,EAAEI,EAAE,4BAA4B,CAACrB,EAAEC,KAAKoB,EAAE,4BAA4B6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEoB,EAAE,6BAA6B,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,6BAA6B6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,0BAA0B,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,0BAA0B6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,0BAA0BrB,IAAIqB,EAAE,0BAA0B6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,kBAAkB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,kBAAkB6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,mBAAmBrB,IAAIqB,EAAE,mBAAmB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,wBAAwB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,wBAAwB6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,iBAAiB,CAACrB,EAAEC,KAAKoB,EAAE,iBAAiB6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEoB,EAAE,kBAAkB,CAACrB,EAAEC,KAAKoB,EAAE,kBAAkB6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEoB,EAAE,SAASrB,IAAIqB,EAAE,SAAS6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,iBAAiB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,KAAKT,EAAE,iBAAiB6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,EAAET,EAAE,kBAAkB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,KAAKY,EAAE,kBAAkB6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,kBAAkBrB,IAAIqB,EAAE,kBAAkB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,qBAAqB,CAACrB,EAAEC,EAAE2B,EAAErB,KAAKc,EAAE,qBAAqB6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,sBAAsB,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,sBAAsB6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,sBAAsBrB,IAAIqB,EAAE,sBAAsB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,kBAAkBrB,IAAIqB,EAAE,kBAAkB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,cAAc,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,cAAc6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,eAAe,CAACrB,EAAEC,EAAE2B,EAAErB,KAAKc,EAAE,eAAe6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,CAAC,EAAEc,EAAE,sBAAsBrB,IAAIqB,EAAE,sBAAsB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,mBAAmBrB,IAAIqB,EAAE,mBAAmB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,mBAAmB,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,KAAKY,EAAE,mBAAmB6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEY,EAAE,QAAQ,CAACrB,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,KAAKQ,EAAE,QAAQ6M,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,EAAEC,EAAElB,CAAC,EAAEQ,EAAE,iBAAiBrB,IAAIqB,EAAE,iBAAiB6M,EAAG,IAAIlO,CAAC,EAAEqB,EAAE,YAAY,CAACrB,EAAEC,EAAE2B,KAAKP,EAAE,YAAY6M,EAAG,IAAIlO,EAAEC,EAAE2B,CAAC,EAAEP,EAAE,iBAAiBrB,IAAIqB,EAAE,iBAAiB6M,EAAG,IAAIlO,CAAC,EAAE,IAAI8P,GAAG1M,GAAG,KAAKA,GAAG8K,EAAG,IAAI,EAAExB,GAAGrL,EAAE,QAAQrB,IAAI0M,GAAGrL,EAAE,QAAQ6M,EAAG,IAAIlO,CAAC,EAAEyM,GAAGpL,EAAE,MAAMrB,IAAIyM,GAAGpL,EAAE,MAAM6M,EAAG,IAAIlO,CAAC,EAAE6C,GAAG,CAAC7C,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,KAAKe,GAAGqL,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,EAAEqB,CAAC,EAAEyB,GAAG,KAAKA,GAAG2K,EAAG,IAAI,EAAEzD,GAAG,CAACzK,EAAEC,EAAE2B,EAAErB,EAAEE,KAAKgK,GAAGyD,EAAG,IAAIlO,EAAEC,EAAE2B,EAAErB,EAAEE,CAAC,EAAEqK,GAAG9K,IAAI8K,GAAGoD,EAAG,IAAIlO,CAAC,EAAEqD,GAAGrD,IAAIqD,GAAG6K,EAAG,IAAIlO,CAAC,EAAEmN,GAAG,KAAKA,GAAGe,EAAG,IAAI,EAAElD,GAAG,CAAChL,EAAEC,KAAK+K,GAAGkD,EAAG,IAAIlO,EAAEC,CAAC,EAAEyK,GAAG1K,IAAI0K,GAAGwD,EAAG,IAAIlO,CAAC,EAAEwK,GAAGxK,IAAIwK,GAAG0D,EAAG,IAAIlO,CAAC,EAAEuK,GAAG,KAAKA,GAAG2D,EAAG,IAAI,EAAEjD,GAAG5J,EAAE,WAAW,CAACrB,EAAEC,KAAKgL,GAAG5J,EAAE,WAAW6M,EAAG,IAAIlO,EAAEC,CAAC,EAAEkO,GAAGnO,IAAImO,GAAGD,EAAG,IAAIlO,CAAC,EAAE6P,GAAG,KAAKA,GAAG3B,EAAG,IAAI,EAAED,GAAGjO,IAAIiO,GAAGC,EAAG,IAAIlO,CAAC,EAAEoO,GAAG,KAAKA,GAAGF,EAAG,IAAI,EAAE,SAAS6B,IAAI,CAAC,GAAG,EAAE,EAAE9L,IAAG,GAAGxC,EAAEN,EAAEE,CAAC,EAAEI,GAAGsJ,GAAGhH,EAAC,EAAE,YAAY1C,CAAC,MAAM,CAAC,GAAGA,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQyC,GAAE,QAAQzC,EAAE,OAAO,MAAM,CAAC,EAAE0J,GAAGjH,EAAC,EAAE,EAAEG,IAAG6L,KAAKA,GAAG,GAAGzO,EAAE,UAAU,GAAGwC,KAAIpC,GAAGsJ,GAAGhH,EAAC,EAAE5C,EAAEE,CAAC,EAAEI,GAAGsJ,GAAG/G,EAAC,GAAG,CAAC,CAAC,OAAO3C,EAAE,eAAe,QAAQA,EAAE,cAAc,QAAQA,EAAE,UAAU,IAAIkJ,GAAG,EAAElJ,EAAE,aAAarB,GAAG0K,GAAG1K,CAAC,EAAEqB,EAAE,WAAWrB,GAAGwK,GAAGxK,CAAC,EAAEqB,EAAE,aAAasI,GAAGtI,EAAE,aAAamK,GAAGnK,EAAE,gBAAgBiK,GAAGnH,GAAE,SAASnE,GAAG,CAAC8P,IAAIC,GAAG,EAAED,KAAK3L,GAAEnE,EAAE,EAAE+P,GAAG,EAAEzO,CAAC,GAAUvB,GAAQE,GAAiB,WAAW,MAAM,OAAhC,cAAsCA,GAAE,ICApw0C,IAWa+P,GAePC,GAKAC,GAwCAC,GAsBAC,GAeOC,GAoBPC,GAsBOC,GAtJbC,GAAAC,EAAA,kBAIAC,KAOaV,GAET,GAAS,OAEA,kBAEJ,OAAO,SAAa,IAAe,SAAS,eAAqC,IAE9C,OAAO,KAAS,IAAc,KAAK,UAAU,KAAO,QAO1FC,GAAS,IAAU,OAAO,SAAa,IAAc,OAAY,SAAS,OAK1EC,GAAe,CAACS,EAAkBC,IAA4B,CAClE,GAAI,CACF,IAAMC,EAAUD,GAAkBZ,GAElC,OADYa,EAAU,IAAI,IAAIF,EAAUE,CAAO,EAAI,IAAI,IAAIF,CAAQ,GACxD,SAAWV,EACxB,MAAQ,CACN,MAAO,EACT,CACF,EAgCME,GAAU,MAAMW,GAAyC,CAE7D,IAAMC,EAAO,MADI,MAAM,MAAMD,EAAa,CAAC,YAAa,aAAa,CAAC,GAC1C,KAAK,EACjC,OAAO,IAAI,gBAAgBC,CAAI,CACjC,EAkBMX,GAE0C,cAA+B,QAalEC,GAAoB,SAAkD,CACjF,GAAI,CAACL,GACH,MAAM,IAAI,MAAM,sEAAsE,EAIxF,GAAIE,GAAaF,EAAS,EACxB,MAAO,CAAC,OAAWI,GAAmB,CAAC,EAIzC,IAAMY,EAAM,MAAMb,GAAQH,EAAS,EACnC,MAAO,CAACgB,EAAKZ,GAAmBY,CAAG,CAAC,CACtC,EAOMV,GAGF,cAIK,QAeIC,GAAmB,MAC5BU,EAA+BL,EAC/BM,IAEO,CAAC,OAAWZ,EAAmB,IC1J1C,IAQIa,GACAC,GACAC,GACAC,GAEEC,GAwBAC,GAyBOC,GA+GAC,GA7KbC,GAAAC,EAAA,kBAMAC,KAGIT,GAAc,GACdC,GAAe,GACfC,GAAU,GAERC,GAAyB,IAAe,CAE5C,GAAI,OAAO,kBAAsB,IAC/B,MAAO,GAGT,GAAI,CAGF,OAAI,OAAO,eAAmB,KAC5B,IAAI,eAAe,EAAE,MAAM,YAAY,IAAI,kBAAkB,CAAC,CAAC,EAK1D,YAAY,SAAS,IAAI,WAAW,CACzC,EAAG,GAAI,IAAK,IAAK,EAAG,EAAI,EAAI,EAAG,EAAG,EAAG,EAAI,GAAI,EAAK,EAAI,EAAG,EAAG,EAAI,EAAG,EACnE,EAAG,EAAI,EAAK,EAAK,EAAG,GAAI,GAAI,EAAG,EAAG,EAAG,GAAI,EAAI,IAAK,GAAI,EAAG,EAAG,GAAI,EAClE,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMC,GAAkB,IAAe,CACrC,GAAI,CAeF,OAAO,YAAY,SAAS,IAAI,WAAW,CACzC,EAAK,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,GAAI,EAAK,GAAK,EAAG,GAAI,EACvF,IAAK,GAAI,IAAK,GAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAI,IAAK,IAAK,EAAG,GAAI,EACzF,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEaC,GAAwB,MAAMK,GAA+C,CACxF,GAAIV,GACF,OAAO,QAAQ,QAAQ,EAEzB,GAAIC,GACF,MAAM,IAAI,MAAM,uDAAyD,EAE3E,GAAIC,GACF,MAAM,IAAI,MAAM,oDAAsD,EAGxED,GAAe,GAGf,IAAMU,EAAUD,EAAM,YAClBE,EAAaF,EAAM,WAGvB,GAAI,CAACN,GAAgB,EACnB,MAAM,IAAI,MAAM,+DAA+D,EAIjF,IAAMS,EAAuBV,GAAuB,EAChDS,EAAa,GAAK,CAACC,IACjB,OAAO,KAAS,KAAe,CAAC,KAAK,qBAEvC,QAAQ,KACJ,iCAAmCD,EACnC,uIACkE,EAIxE,QAAQ,KACJ,4GACmC,EAGvCF,EAAM,WAAaE,EAAa,GAGlC,IAAME,EAAYJ,EAAM,UAClBK,EAAqB,OAAOD,GAAc,SAAWA,EAAY,OACjEE,EAAuBF,GAAiC,IACxDG,EAAmBD,GAA6B,MAAQA,EACxDE,EAAwBJ,GAAiC,KACzDK,EAAoBD,GAA8B,MAAQA,EAE1D,CAACE,EAAWC,CAAc,EAAK,MAAMC,GAAiBL,EAAiBF,EAAoBH,EAAa,CAAC,EAE3GW,EAAY,GAEVC,EAA8B,CAAC,EAqDrC,GAlDIb,EAAU,GACZa,EAAM,KAAK,IAAI,QAASC,GAAY,CAClC,WAAW,IAAM,CACfF,EAAY,GACZE,EAAQ,CACV,EAAGd,CAAO,CACZ,CAAC,CAAC,EAIJa,EAAM,KAAK,IAAI,QAAQ,CAACC,EAASC,IAAW,CAC1C,IAAMC,EAAiC,CAKrC,WAAAf,CACF,GAEIO,GAAoBJ,KAMtBY,EAAO,WAAa,CAACC,EAAUC,IAC3BV,IAAqBJ,GAAsBc,GAAmBD,GAGpEP,EAAeM,CAAM,EAAE,KAEnBG,GAAU,CACR7B,GAAe,GACfD,GAAc,GACdD,GAAO+B,EACPL,EAAQ,EACJL,GACF,IAAI,gBAAgBA,CAAS,CAEjC,EAECW,GAAS,CACR9B,GAAe,GACfC,GAAU,GACVwB,EAAOK,CAAI,CACb,CAAC,CACP,CAAC,CAAC,EAEF,MAAM,QAAQ,KAAKP,CAAK,EAEpBD,EACF,MAAM,IAAI,MAAM,2DAA2DZ,CAAO,IAAI,CAE1F,EAEaL,GAAc,IAAqB,CAC9C,GAAIN,IAAeD,GACjB,OAAOA,GAGT,MAAM,IAAI,MAAM,qCAAqC,CACvD,ICnLA,IAKaiC,GAeAC,GA6BAC,GAjDbC,GAAAC,EAAA,kBAGAC,KAEaL,GAAkB,CAACM,EAAcC,IAA6B,CACzE,IAAMC,EAAOC,GAAY,EAEnBC,EAAaF,EAAK,gBAAgBF,CAAI,EAAI,EAC1CK,EAAaH,EAAK,QAAQE,CAAU,EAC1C,OAAAF,EAAK,aAAaF,EAAMK,EAAYD,CAAU,EAC9CH,EAAO,KAAKI,CAAU,EAEfA,CACT,EAMaV,GACT,CAACW,EAAkCC,EAAgBC,EAClDC,IAAuC,CACtC,GAAI,OAAOH,GAAW,UAAYA,IAAY,KAAM,CAClD,GAAIE,EAAK,IAAIF,CAAO,EAClB,MAAM,IAAI,MAAM,+BAA+B,EAE/CE,EAAK,IAAIF,CAAO,CAEpB,CAEA,OAAO,QAAQA,CAAO,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAChD,IAAMC,EAAQL,EAAUA,EAASG,EAAMA,EACvC,GAAI,OAAOC,GAAU,SACnBhB,GAAoBgB,EAAkCC,EAAO,IAAKJ,EAAMC,CAAO,UACtE,OAAOE,GAAU,UAAY,OAAOA,GAAU,SACvDF,EAAQG,EAAMD,EAAM,SAAS,CAAC,UACrB,OAAOA,GAAU,UAC1BF,EAAQG,EAAOD,EAAS,IAAM,GAAG,MAEjC,OAAM,IAAI,MAAM,mCAAmC,OAAOA,CAAK,EAAE,CAErE,CAAC,CACH,EAMSf,GAAkBiB,GAA0B,CACvD,IAAMX,EAAOC,GAAY,EAEnBW,EAAQZ,EAAK,UAAU,EAC7B,GAAI,CACF,IAAMa,EAAeb,EAAK,WAAW,CAAC,EACtCA,EAAK,iBAAiBa,EAAcA,EAAe,CAAC,EACpD,IAAMC,EAAYd,EAAK,OAAOa,EAAe,CAAC,EACxCE,EAAsBf,EAAK,QAAQa,EAAe,EAAI,CAAC,EACvDG,EAAeD,EAAsBf,EAAK,aAAae,CAAmB,EAAI,GACpF,MAAM,IAAI,MAAM,GAAGJ,CAAO,gBAAgBG,CAAS,oBAAoBE,CAAY,EAAE,CACvF,QAAE,CACAhB,EAAK,aAAaY,CAAK,CACzB,CACF,IC/DA,IAQaK,GARbC,GAAAC,EAAA,kBAKAC,KACAC,KAEaJ,GAAiBK,GAA6D,CACzF,IAAMC,EAAOC,GAAY,EACrBC,EAAmB,EACjBC,EAAmB,CAAC,EAEpBC,EAA0CL,GAAW,CAAC,EAE5D,GAAI,CACF,GAAIA,GAAS,mBAAqB,OAChCK,EAAW,iBAAmB,UAE5B,OAAOL,EAAQ,kBAAqB,UAAY,CAAC,OAAO,UAAUA,EAAQ,gBAAgB,GAC1FA,EAAQ,iBAAmB,GAAKA,EAAQ,iBAAmB,EAC7D,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,gBAAgB,EAAE,EAGjF,GAAIA,GAAS,oBAAsB,OACjCK,EAAW,kBAAoB,UACtB,OAAOL,EAAQ,mBAAsB,UAAY,CAAC,OAAO,UAAUA,EAAQ,iBAAiB,EACrG,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,iBAAiB,EAAE,EAG9EA,GAAS,YAAc,SACzBK,EAAW,UAAY,IAGzB,IAAIC,EAAgB,EACpB,OAAIN,GAAS,MAAQ,SACnBM,EAAgBC,GAAgBP,EAAQ,IAAKI,CAAM,GAGrDD,EAAmBF,EAAK,qBACpBI,EAAW,iBAAmBA,EAAW,kBAAoB,CAAC,CAACA,EAAW,UAAYC,CAAa,EACnGH,IAAqB,GACvBK,GAAe,2BAA4B,EAGzCR,GAAS,QAAU,QACrBS,GAAoBT,EAAQ,MAAO,GAAI,IAAI,QAAoC,CAACU,EAAKC,IAAU,CAC7F,IAAMC,EAAgBL,GAAgBG,EAAKN,CAAM,EAC3CS,EAAkBN,GAAgBI,EAAOP,CAAM,EAEjDH,EAAK,sBAAsBE,EAAkBS,EAAeC,CAAe,IAAM,GACnFL,GAAe,iCAAiCE,CAAG,MAAMC,CAAK,GAAG,CAErE,CAAC,EAGI,CAACR,EAAkBC,CAAM,CAClC,OAASU,EAAG,CACV,MAAIX,IAAqB,GACvBF,EAAK,sBAAsBE,CAAgB,EAE7CC,EAAO,QAAQW,GAASd,EAAK,MAAMc,CAAK,CAAC,EACnCD,CACR,CACF,IChEA,IAQME,GAeAC,GAWAC,GAoBAC,GAwDOC,GA9GbC,GAAAC,EAAA,kBAKAC,KACAC,KAEMR,GAA4BS,GAAmD,CACnF,OAAQA,EAAwB,CAC9B,IAAK,WACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,IAAK,MACH,MAAO,IACT,QACE,MAAM,IAAI,MAAM,yCAAyCA,CAAsB,EAAE,CACrF,CACF,EAEMR,GAAoBS,GAAmD,CAC3E,OAAQA,EAAe,CACrB,IAAK,aACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,+BAA+BA,CAAa,EAAE,CAClE,CACF,EAEMR,GAAwBS,GAAmD,CAC1EA,EAAQ,QACXA,EAAQ,MAAQ,CAAC,GAEdA,EAAQ,MAAM,UACjBA,EAAQ,MAAM,QAAU,CAAC,GAE3B,IAAMC,EAAUD,EAAQ,MAAM,QACzBC,EAAQ,+BAEXA,EAAQ,6BAA+B,KAIrCD,EAAQ,oBACRA,EAAQ,mBAAmB,KAAKE,IAAO,OAAOA,GAAO,SAAWA,EAAKA,EAAG,QAAU,QAAQ,IAC5FF,EAAQ,iBAAmB,GAE/B,EAEMR,GACF,CAACW,EAA8BC,EAC9BC,IAA2B,CAC1B,QAAWH,KAAME,EAAoB,CACnC,IAAIE,EAAS,OAAOJ,GAAO,SAAWA,EAAKA,EAAG,KAG9C,OAAQI,EAAQ,CACd,IAAK,QAEH,GADAA,EAAS,QACL,OAAOJ,GAAO,SAAU,CAG1B,IAAMK,EAFeL,GAEsD,WAC3E,GAAIK,EAAY,CACd,IAAMC,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBF,EAAYF,CAAM,EACtDM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,CAAU,GAAG,CAEpF,CACF,CACA,MACF,IAAK,SAEH,GADAD,EAAS,KACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMW,EAAgBX,EACtB,GAAIW,GAAe,gBAAiB,CAClC,GAAIA,EAAc,kBAAoB,QAAUA,EAAc,kBAAoB,OAChF,MAAM,IAAI,MAAM,oDAAoDA,EAAc,eAAe,EAAE,EAErG,IAAML,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBI,EAAc,gBAAiBR,CAAM,EACzEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDC,EAAc,eAAe,GAAG,CAEjG,CACF,CACA,MACF,IAAK,OACL,IAAK,MACH,SACF,QACE,MAAM,IAAI,MAAM,qCAAqCP,CAAM,EAAE,CACjE,CAEA,IAAMQ,EAAmBL,GAAgBH,EAAQD,CAAM,EACnDM,GAAY,EAAE,4BAA4BR,EAAsBW,CAAgB,IAAM,GACxFF,GAAe,oCAAoCN,CAAM,GAAG,CAEhE,CACF,EAESb,GAAqBO,GAAkE,CAClG,IAAMe,EAAOJ,GAAY,EACrBR,EAAuB,EACrBE,EAAmB,CAAC,EAEpBW,EAAkDhB,GAAW,CAAC,EACpET,GAAqByB,CAAc,EAEnC,GAAI,CACF,IAAMlB,EAAyBT,GAAyB2B,EAAe,wBAA0B,KAAK,EAChGjB,EAAgBT,GAAiB0B,EAAe,eAAiB,YAAY,EAC7EC,EACF,OAAOD,EAAe,OAAU,SAAWP,GAAgBO,EAAe,MAAOX,CAAM,EAAI,EAEzFa,EAAmBF,EAAe,kBAAoB,EAC5D,GAAI,CAAC,OAAO,UAAUE,CAAgB,GAAKA,EAAmB,GAAKA,EAAmB,EACpF,MAAM,IAAI,MAAM,qCAAqCA,CAAgB,EAAE,EAGzE,IAAMC,EAAoBH,EAAe,mBAAqB,EAC9D,GAAI,CAAC,OAAO,UAAUG,CAAiB,GAAKA,EAAoB,GAAKA,EAAoB,EACvF,MAAM,IAAI,MAAM,qCAAqCA,CAAiB,EAAE,EAG1E,IAAMC,EAA+B,OAAOJ,EAAe,wBAA2B,SAClFP,GAAgBO,EAAe,uBAAwBX,CAAM,EAC7D,EAcJ,GAZAF,EAAuBY,EAAK,yBACxBjB,EAAwB,CAAC,CAACkB,EAAe,kBAAmB,CAAC,CAACA,EAAe,iBAAkBjB,EAC/F,CAAC,CAACiB,EAAe,gBAAiB,EAAGC,EAAiBC,EAAkBC,EACxEC,CAA4B,EAC5BjB,IAAyB,GAC3BS,GAAe,+BAAgC,EAG7CI,EAAe,oBACjBxB,GAAsBW,EAAsBa,EAAe,mBAAoBX,CAAM,EAGnFW,EAAe,qBAAuB,OAAW,CACnD,GAAI,OAAOA,EAAe,oBAAuB,UAC/C,MAAM,IAAI,MAAM,+CAA+CA,EAAe,kBAAkB,EAAE,EAEpG,IAAMR,EAAgBC,GAAgB,qBAAsBJ,CAAM,EAC5DK,EAAkBD,GAAgBO,EAAe,mBAAmB,SAAS,EAAGX,CAAM,EACxFU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GACI,4DAA4DI,EAAe,kBAAkB,GAAG,CAExG,CAEA,GAAIA,EAAe,uBACjB,OAAW,CAACK,EAAMC,CAAK,IAAK,OAAO,QAAQN,EAAe,sBAAsB,EAAG,CACjF,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,MAAM,kDAAkDA,CAAI,EAAE,EAE1E,GAAI,OAAOC,GAAU,UAAY,CAAC,OAAO,UAAUA,CAAK,GAAKA,EAAQ,EACnE,MAAM,IAAI,MAAM,iEAAiEA,CAAK,EAAE,EAE1F,IAAMC,EAAad,GAAgBY,EAAMhB,CAAM,EAC3CU,EAAK,6BAA6BZ,EAAsBoB,EAAYD,CAAK,IAAM,GACjFV,GAAe,wCAAwCS,CAAI,MAAMC,CAAK,GAAG,CAE7E,CAGF,OAAIN,EAAe,QAAU,QAC3BQ,GAAoBR,EAAe,MAAO,GAAI,IAAI,QAAoC,CAACS,EAAKH,IAAU,CACpG,IAAMd,EAAgBC,GAAgBgB,EAAKpB,CAAM,EAC3CK,EAAkBD,GAAgBa,EAAOjB,CAAM,EAEjDU,EAAK,0BAA0BZ,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GAAe,qCAAqCa,CAAG,MAAMH,CAAK,GAAG,CAEzE,CAAC,EAGI,CAACnB,EAAsBE,CAAM,CACtC,OAASqB,EAAG,CACV,MAAIvB,IAAyB,GAC3BY,EAAK,0BAA0BZ,CAAoB,EAErDE,EAAO,QAAQsB,GAASZ,EAAK,MAAMY,CAAK,CAAC,EACnCD,CACR,CACF,ICpMA,IAuCaE,GAqCAC,GAsCAC,GAMAC,GAqCAC,GAoBAC,GAOAC,GAxLbC,EAAAC,EAAA,kBAuCaR,GAA8BS,GAA2B,CACpE,OAAQA,EAAM,CACZ,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IACT,IAAK,UACH,MAAO,IACT,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,IACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,EAKaR,GAA8BS,GAAqC,CAC9E,OAAQA,EAAW,CACjB,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,UACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,CACzD,CACF,EAMaR,GAAwBS,GACpB,CAAC,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,OAAW,MAAS,EAAEA,CAAQ,EAKxGR,GAAqCM,GAEoD,CAChG,OAAQA,EAAM,CACZ,IAAK,UAEH,OAAO,OAAO,aAAiB,KAAe,aAAa,KAAO,aAAe,YACnF,IAAK,UACH,OAAO,aACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,WACT,IAAK,UACH,OAAO,aACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,SACH,OAAO,eACT,QACE,MAAM,IAAI,MAAM,qBAAqBA,CAAI,EAAE,CAC/C,CACF,EAKSL,GAAwBQ,GAAkE,CACrG,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,EAKaP,GAA4BI,GAAyDA,IAAS,WACvGA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAAYA,IAAS,SAC5FA,IAAS,OAKAH,GAA4BO,GAA0C,CACjF,OAAQA,EAAU,CAChB,IAAK,OACH,MAAO,GACT,IAAK,MACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,ICvMA,IAWaC,GAXbC,GAAAC,EAAA,kBAGAC,KAQaH,GAAW,MAAMI,GAAsE,CAClG,GAAI,OAAOA,GAAS,SAClB,GAAI,GAEF,GAAI,CACF,GAAM,CAAC,SAAAC,CAAQ,EAAI,GAAQ,kBAAkB,EAC7C,OAAO,IAAI,WAAW,MAAMA,EAASD,CAAI,CAAC,CAC5C,OAASE,EAAG,CACV,GAAIA,EAAE,OAAS,wBAAyB,CAEtC,GAAM,CAAC,iBAAAC,CAAgB,EAAI,GAAQ,SAAS,EACtCC,EAASD,EAAiBH,CAAI,EAC9BK,EAAuB,CAAC,EAC9B,cAAiBC,KAASF,EACxBC,EAAO,KAAKC,CAAK,EAEnB,OAAO,IAAI,WAAW,OAAO,OAAOD,CAAM,CAAC,CAC7C,CACA,MAAMH,CACR,KACK,CAEL,IAAMK,EAAW,MAAM,MAAMP,CAAI,EACjC,GAAI,CAACO,EAAS,GACZ,MAAM,IAAI,MAAM,sCAAsCP,CAAI,EAAE,EAE9D,IAAMQ,EAAsBD,EAAS,QAAQ,IAAI,gBAAgB,EAC3DE,EAAWD,EAAsB,SAASA,EAAqB,EAAE,EAAI,EAC3E,GAAIC,EAAW,WAGb,OAAO,IAAI,WAAW,MAAMF,EAAS,YAAY,CAAC,EAC7C,CAEL,GAAI,CAACA,EAAS,KACZ,MAAM,IAAI,MAAM,sCAAsCP,CAAI,qBAAqB,EAEjF,IAAMU,EAASH,EAAS,KAAK,UAAU,EAEnCI,EACJ,GAAI,CAEFA,EAAS,IAAI,YAAYF,CAAQ,CACnC,OAASP,EAAG,CACV,GAAIA,aAAa,WAAY,CAE3B,IAAMU,EAAQ,KAAK,KAAKH,EAAW,KAAK,EACxCE,EAAS,IAAI,YAAY,OAAO,CAAC,QAASC,EAAO,QAASA,CAAK,CAAC,EAAE,MACpE,KACE,OAAMV,CAEV,CAEA,IAAIW,EAAS,EAEb,OAAa,CACX,GAAM,CAAC,KAAAC,EAAM,MAAAC,CAAK,EAAI,MAAML,EAAO,KAAK,EACxC,GAAII,EACF,MAEF,IAAME,EAAYD,EAAM,WACV,IAAI,WAAWJ,EAAQE,EAAQG,CAAS,EAChD,IAAID,CAAK,EACfF,GAAUG,CACZ,CACA,OAAO,IAAI,WAAWL,EAAQ,EAAGF,CAAQ,CAC3C,CACF,KAEK,QAAIT,aAAgB,KAClB,IAAI,WAAW,MAAMA,EAAK,YAAY,CAAC,EACrCA,aAAgB,WAClBA,EAEA,IAAI,WAAWA,CAAI,CAE9B,ICvFA,IAYMiB,GAEAC,GAKFC,GACAC,GAESC,GAQAC,GAWAC,GAzCbC,GAAAC,EAAA,kBAKAC,IAOMT,GAAiB,CAAC,IAAK,IAAK,IAAK,IAAK,GAAG,EAEzCC,GAAQ,CAACS,EAAeC,IAA0B,CAEtD,QAAQ,IAAI,IAAIX,GAAeU,CAAK,CAAC,IAAI,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIC,CAAO,EAAE,CAChF,EAKaP,GAAkB,CAACQ,EAA2BC,IAA0B,CACnFX,GAAiBU,EACjBT,GAAQU,CACV,EAKaR,GAAM,CAACS,EAAoBC,IAAuB,CAC7D,IAAMC,EAAeC,GAAqBH,CAAQ,EAC5CI,EAAcD,GAAqBf,EAAc,EACnDc,GAAgBE,GAClBjB,GAAMe,EAAc,OAAOD,GAAQ,WAAaA,EAAI,EAAIA,CAAG,CAE/D,EAKaT,GAAwB,IAAIa,IAAiC,CACpEhB,IACFE,GAAI,GAAGc,CAAI,CAEf,IC7CA,IAOaC,GAPbC,GAAAC,EAAA,kBAKAC,IAEaH,GAAa,CAACI,EAAyBC,IAE5C,IAAKC,GAAkCD,CAAI,GAAGD,CAAU,ICThE,IAAAG,GAAAC,EAAA,oBCAA,IA8EMC,GA+BAC,GAKAC,GAKAC,GAWFC,GACEC,GAYOC,GAkCPC,GAoSOC,GArdbC,GAAAC,EAAA,kBAIAC,KAEAC,KAwEMZ,GAAsC,IAAI,IAAI,CAClD,CAAC,GAAI,GAAG,EACR,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,EAAE,EACT,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,CAAC,EACZ,CAAC,SAAU,CAAC,EAGZ,CAAC,SAAU,CAAC,EACZ,CAAC,UAAW,CAAC,EACb,CAAC,UAAW,CAAC,CACf,CAAC,EAEKC,GAAsB,CAAC,EAKvBC,GAA4BW,GAAiB,KAAK,KAAKA,EAAO,EAAE,EAAI,GAKpEV,GAAwBU,GAAiB,CAC7C,QAASC,EAAM,EAAGA,EAAMb,GAAU,OAAQa,IAAO,CAC/C,IAAMC,EAAgBd,GAAUa,CAAG,EACnC,GAAID,GAAQE,EACV,OAAOA,CAEX,CAEA,OAAO,KAAK,KAAKF,EAAO,EAAE,EAAI,EAChC,EAEIT,GAAO,EACLC,GAAqB,IAAMD,KAYpBE,GACT,MAAMU,EAAwBC,EAAsBC,EAAsBC,IAC/C,CACrB,IAAMC,EAAalB,GAAyBgB,CAAY,EAClDG,EAAgBL,EAAQ,OAAO,aAEjC,CAAC,KAAMI,EAAY,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAChF,GAAI,CACF,IAAME,EAAiBN,EAAQ,kBAAkB,EACjDA,EAAQ,eAAe,EACvBM,EAAe,mBACXL,EAA+B,EAAuBI,EACtD,EAA4BD,CAChC,EACAJ,EAAQ,MAAM,EAEd,MAAMK,EAAc,SAAS,WAAW,IAAI,EAE5C,IAAME,EAAcF,EAAc,eAAe,EACjD,GAAIF,EAAiB,CAEnB,IAAMK,EAAeL,EAAgB,EACrC,OAAAK,EAAa,IAAI,IAAI,WAAWD,EAAa,EAAGL,CAAY,CAAC,EACtDM,CACT,KAGE,QAAO,IAAI,WAAWD,EAAY,MAAM,EAAGL,CAAY,CAAC,CAE5D,QAAE,CACAG,EAAc,QAAQ,CACxB,CACF,EAEFd,GAAN,KAAmD,CAqBjD,YAAoBS,EAAwB,CAAxB,aAAAA,EAClB,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,2BAA6B,CAAC,EACnC,KAAK,eAAiB,CAAC,EACvB,KAAK,gBAAkB,IAAI,IAC3B,KAAK,uBAAyB,IAAI,IAElC,OAAW,CAACS,CAAK,IAAKzB,GACpBC,GAAU,KAAKwB,CAAG,EAClB,KAAK,YAAY,IAAIA,EAAK,CAAC,CAAC,EAC5B,KAAK,mBAAmB,IAAIA,EAAK,CAAC,CAAC,CAEvC,CAEA,OAAOC,EAAeC,EAAwB,CAC5C,IAAMC,EAAiBD,EAAK,OACtBE,EAAYF,EAAK,WACjBG,EAAYH,EAAK,WACjBd,EAAOX,GAAyB4B,CAAS,EAGzCC,EAAe,KAAK,aAAa,IAAIL,CAAE,EAC7C,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,uCAAuC,EAEzD,GAAIA,EAAa,eAAiBD,EAChC,MAAM,IAAI,MAAM,yCAAyCC,EAAa,YAAY,eAAeD,CAAS,EAAE,EAI9G,IAAME,EAAwB,KAAK,QAAQ,OAAO,aAE9C,CAAC,iBAAkB,GAAM,KAAAnB,EAAM,MAAO,eAAe,UAAY,eAAe,QAAQ,CAAC,EAGvFU,EAAcS,EAAsB,eAAe,EACzD,IAAI,WAAWT,CAAW,EAAE,IAAI,IAAI,WAAWK,EAAgBC,EAAWC,CAAS,CAAC,EACpFE,EAAsB,MAAM,EAI5B,IAAMV,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBAAmBU,EAAuB,EAAGD,EAAa,QAAQ,OAAQ,EAAGlB,CAAI,EAEhGoB,GAAU,UAAW,IAAM,qCAAqCP,CAAE,GAAG,EAErE,KAAK,2BAA2B,KAAKM,CAAqB,CAC5D,CAEA,OAAOE,EAAqBC,EAAgC,CAE1D,IAAMC,EAAqB,KAAK,aAAa,IAAIF,CAAQ,EACzD,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,2CAA2C,EAG7D,IAAMC,EAA0B,KAAK,aAAa,IAAIF,CAAa,EACnE,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAID,EAAmB,eAAiBC,EAAwB,aAC9D,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMxB,EAAOX,GAAyBkC,EAAmB,YAAY,EAG/Dd,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBACXc,EAAmB,QAAQ,OAAQ,EAAGC,EAAwB,QAAQ,OAAQ,EAAGxB,CAAI,CAC3F,CAEA,uBAAuByB,EAAmBpB,EAAsBqB,EAAoC,CAClG,IAAIb,EACJ,GAAIa,EAAgB,CAElB,GADAb,EAAK,KAAK,gBAAgB,IAAIa,CAAc,EACxCb,IAAO,OACT,MAAM,IAAI,MAAM,mCAAmC,EAErD,GAAIY,IAAWC,EACb,OAAAN,GACI,UACA,IAAM,uDAAuDf,CAAY,WACrEQ,CAAE,6BAA6B,EAChCA,EACF,GAAI,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC5E,MAAM,IAAI,MAAM;AAAA,sDAC8B,EAEhD,KAAK,gBAAgB,OAAOa,CAAc,CAC5C,MACEb,EAAKrB,GAAmB,EAG1B,YAAK,aAAa,IAAIqB,EAAI,CAAC,QAAS,CAAC,GAAAA,EAAI,OAA2B,OAAAY,CAAM,EAAG,aAAApB,CAAY,CAAC,EAC1F,KAAK,gBAAgB,IAAIoB,EAAQZ,CAAE,EACnCO,GACI,UACA,IAAM,uDAAuDf,CAAY,WAAWQ,CAAE,eAAe,EAClGA,CACT,CAEA,yBAAyBY,EAAyB,CAChD,IAAMZ,EAAK,KAAK,gBAAgB,IAAIY,CAAM,EACtCZ,IAAO,SACT,KAAK,aAAa,OAAOA,CAAE,EAC3B,KAAK,gBAAgB,OAAOY,CAAM,EAClCL,GAAU,UAAW,IAAM,4DAA4DP,CAAE,EAAE,EAE/F,CAGA,OAAOb,EAAc2B,EAAQ,eAAe,QAAU,eAAe,SAAW,eAAe,SAAmB,CAChH,IAAMpB,EAAajB,GAAqBU,CAAI,EAExCI,EAGEwB,GAAaD,EAAQ,eAAe,WAAa,eAAe,QAEhEE,GAAaF,EAAQ,eAAe,WAAa,eAAe,QACtE,GAAIC,GAAaC,EAAW,CAE1B,IAAMC,GADcF,EAAY,KAAK,YAAc,KAAK,oBAC5B,IAAIrB,CAAU,EACrCuB,EAICA,EAAQ,OAAS,EAEnB1B,EAAY0B,EAAQ,IAAI,EAGxB1B,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAPxEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,CAU1E,MAEEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAGxE,IAAMI,EAAU,CAAC,GAAIvC,GAAmB,EAAG,OAA2B,OAAQY,CAAS,EACvF,YAAK,aAAa,IAAI2B,EAAQ,GAAI,CAAC,QAAAA,EAAS,aAAc/B,CAAI,CAAC,EAE/DoB,GAAU,UAAW,IAAM,uCAAuCpB,CAAI,WAAW+B,EAAQ,EAAE,EAAE,EACtFA,CACT,CAEA,IAAIlB,EAAkC,CACpC,OAAO,KAAK,aAAa,IAAIA,CAAE,GAAG,OACpC,CAEA,QAAQA,EAAuB,CAC7B,IAAMmB,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAAZ,GAAU,UAAW,IAAM,sCAAsCP,CAAE,gBAAgBmB,EAAW,QAAQ,EAAE,EAAE,EAE1G,KAAK,aAAa,OAAOnB,CAAE,EAC3B,KAAK,eAAe,KAAKmB,EAAW,QAAQ,MAAM,EAG3CA,EAAW,YACpB,CAEA,MAAM,SAASnB,EAAeP,EAAkD,CAC9E,IAAM0B,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,qBAAqB,EAEvC,MAAMvC,GAAgB,KAAK,QAASuC,EAAW,QAAQ,OAAQA,EAAW,aAAc1B,CAAe,CACzG,CAEA,uBAA8B,CAC5B,QAAWmB,KAAU,KAAK,2BAExBA,EAAO,QAAQ,EAIjB,GAFA,KAAK,2BAA6B,CAAC,EAE/B,KAAK,eAAe,SAAW,EAInC,GAAI,KAAK,QAAQ,gBAAkB,UAAW,CAC5C,QAAWA,KAAU,KAAK,eAAgB,CACxC,IAAMQ,EAAgB9C,GAAe,IAAIsC,EAAO,IAAI,EAGpD,IAAKA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAEtE,IAAMS,EAAW,KAAK,YAAY,IAAIT,EAAO,IAAI,GAAK,CAAC,EACnDQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAGxB,UAAYA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAE7E,IAAMS,EAAW,KAAK,mBAAmB,IAAIT,EAAO,IAAI,GAAK,CAAC,EAC1DQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAExB,MACEA,EAAO,QAAQ,CAEnB,CACA,KAAK,eAAiB,CAAC,CACzB,KAAO,CAGL,IAAIU,EAAkB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,gBAAiB,EAC/EA,IACHA,EAAkB,CAAC,EACnB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,iBAAmBA,CAAe,GAEjF,QAAWV,KAAU,KAAK,eACxBU,EAAgB,KAAKV,CAAM,EAE7B,KAAK,eAAiB,CAAC,CACzB,CACF,CAEA,SAAU,CACR,KAAK,YAAY,QAASK,GAAY,CACpCA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,mBAAmB,QAASK,GAAY,CAC3CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EAED,KAAK,aAAa,QAASW,GAAY,CACrCA,EAAQ,QAAQ,OAAO,QAAQ,CACjC,CAAC,EAED,KAAK,uBAAuB,QAASN,GAAY,CAC/CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,uBAAyB,IAAI,GACpC,CAEA,iBAAiBY,EAAmB,CAElC,IAAMC,EAAiB,KAAK,uBAAuB,IAAID,CAAS,EAC5DC,IACFA,EAAe,QAAQb,GAAU,CAC/BA,EAAO,QAAQ,CACjB,CAAC,EACD,KAAK,uBAAuB,OAAOY,CAAS,EAEhD,CACF,EAEa1C,GAAuB,IAAI4C,IACpC,IAAI7C,GAAmB,GAAG6C,CAAI,ICtdlC,IAGMC,GAsBOC,GAzBbC,GAAAC,EAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EASaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,IC1B3C,IAKaE,GAaAC,GA6EAC,EA6IAC,GA0MAC,GAkDAC,GACAC,GAzebC,GAAAC,EAAA,kBAKaR,GAAN,KAAiB,CAOtB,OAAO,gBAAgBS,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAGaT,GAAN,KAAoB,CAQzB,OAAO,UAAUU,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFlB,GAAW,gBAAgB,CAACW,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAASC,EAAIN,EAAW,EAAI,EAAGM,GAAKH,EAAOG,IAAK,CAC9C,IAAMC,EAAON,EAAQK,EAAI,EAAI,EAAIR,EAAMG,EAAQK,CAAC,EAC1CE,EAAON,EAAQI,EAAI,EAAI,EAAIP,EAAMG,EAAQI,CAAC,EAEhD,GAAIC,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEF,IAAMC,EAAM,KAAK,IAAIF,EAAMC,CAAI,EAC/B,GAAID,GAAQC,EACVJ,EAAMD,EAAQG,CAAC,EAAI,KAAK,IAAIC,EAAMC,CAAI,MACjC,CAEL,GAAIC,EAAM,EACR,OAEFL,EAAMD,EAAQG,CAAC,EAAI,CACrB,CACF,CAEA,OAAOF,CACT,CAOA,OAAO,iBAAiBM,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAAS,EAAI,EAAG,GAAKD,EAAW,IAC9B,GAAIF,EAAME,EAAY,CAAC,IAAM,GAAKF,EAAME,EAAY,CAAC,IAAMD,EAAWE,EAAY,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CACF,EAGaxB,EAAN,MAAMyB,CAAU,CAIrB,OAAO,KAAKC,EAAiC,CAC3C,OAAOD,EAAU,0BAA0BC,EAAM,EAAGA,EAAK,MAAM,CACjE,CAKA,OAAO,aAAaA,EAAyBC,EAAO,EAAsB,CACxE,IAAMC,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EAEV,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC1B,EAAIA,EAAO,EACf,KAAO,GAAK,GAAG,CACb,GAAIF,EAAK,CAAC,EAAIC,IAAS,EAAG,CACxBE,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAAIC,EACvB,KACF,CACA,GAAIA,EAAOD,EAAK,CAAC,IAAM,EACrB,MAAM,IAAI,MAAM,sBAAsB,EAExCG,EAAQ,CAAC,EAAI,EACbF,GAAQD,EAAK,CAAC,EACd,GACF,CACA,IAAK,IAAK,GAAK,EAAG,IAChBG,EAAQ,CAAC,EAAIH,EAAK,CAAC,EAErB,OAAOG,CACT,CAKA,OAAO,kBAAkBH,EAAyBI,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,wCAAwCJ,EAAK,MAAM,cAAc,EAE/G,OAAOD,EAAU,0BAA0BC,EAAMI,EAAMJ,EAAK,MAAM,CACpE,CAKA,OAAO,gBAAgBA,EAAyBI,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,sCAAsCJ,EAAK,MAAM,cAAc,EAE7G,OAAOD,EAAU,0BAA0BC,EAAM,EAAGI,CAAI,CAC1D,CAKA,OAAO,0BAA0BJ,EAAyBK,EAAeC,EAAqB,CAC5F,IAAIL,EAAO,EACX,QAAS,EAAII,EAAO,EAAIC,EAAK,IAAK,CAGhC,GAAIN,EAAK,CAAC,EAAI,EACZ,MAAM,IAAI,MAEN,+GAA+G,EAErHC,GAAQD,EAAK,CAAC,CAChB,CACA,OAAOC,CACT,CAEA,OAAO,eAAeD,EAA4C,CAChE,IAAME,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMK,EAAU,IAAI,MAAML,CAAI,EAC9BK,EAAQL,EAAO,CAAC,EAAI,EACpBK,EAAQL,EAAO,CAAC,EAAIF,EAAKE,EAAO,CAAC,EACjC,QAASX,EAAIW,EAAO,EAAGX,GAAK,EAAG,EAAEA,EAC/BgB,EAAQhB,CAAC,EAAIgB,EAAQhB,EAAI,CAAC,EAAIS,EAAKT,EAAI,CAAC,EAE1C,OAAOgB,CACT,CAKA,OAAO,cAAcH,EAAcI,EAA4B,CAC7D,GAAIJ,EAAO,CAACI,GAAcJ,GAAQI,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOJ,EAAO,EAAIA,EAAOI,EAAaJ,CACxC,CAEA,OAAO,cAAcK,EAAyBD,EAA+B,CAC3E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,GAAcC,EAAK,MAAM,CAAC,CACvE,CAQA,OAAO,gBAAgB5B,EAAsB8B,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAM/B,EAAE+B,CAAC,CAAC,EAEpB/B,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASmB,EAAyBa,EAA2C,CAClF,IAAMX,EAAOF,EAAK,OAClB,OAAOA,EAAK,IAAI,CAACY,EAAG,IAAMA,EAAIC,EAAI,CAAC,EAAIA,EAAI,EAAIX,CAAI,CAAC,CACtD,CAOA,OAAO,SAASY,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGrB,IAAMqB,IAAMG,EAAOxB,CAAC,CAAC,CAC/C,CACF,EAEahB,GAAN,MAAMyC,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBZ,EAChFa,EAAqBC,EAAsB,CAC7C,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IACxCA,GAAOH,EAAY,OACrBA,EAAY,KAAKD,EAAUI,EAAM,CAAC,CAAC,EAEnCH,EAAYG,CAAG,EAAIJ,EAAUI,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMf,EAAQ,QAChB,GAAIA,EAAQe,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhEf,EAAQ,KAAK,CAAC,EAKlB,QAASe,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMF,EAAU,QAClB,GAAIA,EAAUE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEF,EAAU,KAAK,CAAC,EAKpB,QAASE,EAAM,EAAGA,EAAMH,EAAY,OAAS,EAAGG,IAC9C,GAAIA,EAAMD,EAAK,QACb,GAAIA,EAAKC,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DD,EAAK,KAAK,CAAC,EAKf,QAASC,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAAO,CACjD,GAAIH,EAAYG,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAID,EAAKC,CAAG,GAAKH,EAAYG,CAAG,GAAKD,EAAKC,EAAMH,EAAY,MAAM,GAAKA,EAAYG,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACHJ,EAA8BX,EAA4Ba,EAC1DD,EAAgCE,EAAgBE,EAAwBC,EAAwB,CAClG,GAAKA,EAIL,IAAIH,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIX,EAAQ,SAAYW,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAASI,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CN,EAAa,wBACTE,EAAUI,GAAOC,EAAgB,EAAI,EAAE,EAAGhB,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAChGA,EAAMJ,EAAU,OAAS,EAAGM,CAAO,EAE3C,CAaA,OAAO,uBACHP,EAA2BC,EAA8BX,EAAmBa,EAC5ED,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMO,EAAa,CAACP,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACpFC,CACT,CAYA,OAAO,uBACHP,EAA8BQ,EAA+BnB,EAAmBa,EAChFD,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,GAAKQ,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMD,EAAa,CAACP,EAAU,CAAC,EAAGQ,EAAW,CAAC,CAAC,EAE/C,OAAAV,EAAa,mBAAmB,GAAOE,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACrGC,CACT,CAKA,OAAe,mBACXR,EAA2BC,EAA8BO,EAAsBlB,EAC/Ea,EAA8BD,EAAgCE,EAAgBG,EAAkB,CAClG,GAAIP,EACF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAK,CAAC,MAGnB,SAASH,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAKT,EAAa,wBACzBE,EAAUI,EAAM,CAAC,EAAGf,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAAKA,EAAMJ,EAAU,OAAS,EACxGM,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXG,EAAgBC,EAAgBC,EAAkBC,EAAgBT,EAAgBU,EAClFC,EAAsBR,EAA0B,CAClD,IAAMS,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIN,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAH,EAAKU,CAAY,EAAI,EACrBV,EAAKW,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAN,EAAKU,CAAY,EACgB,KAAK,MAAjCP,IAAY,cAA4BU,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/Db,EAAKW,CAAY,EAAIE,EAAYb,EAAKU,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASN,EAAKU,CAAY,EAAIV,EAAKW,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEapD,GAAN,KAAe,CAIpB,OAAO,qBACH2D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAGahE,GAAW,sBACXC,GAAW,uBCzexB,IAiBakE,GAsMPC,GAoCOC,GAKAC,GAKAC,EAeAC,GAiBAC,GAcAC,GAgBAC,GAmBAC,EA+BPC,GAiTOC,EAaAC,EAaAC,GAgFPC,GAwJOC,GAaAC,GAr7BbC,GAAAC,EAAA,kBAGAC,IACAC,KAaapB,GAAiB,GAsMxBC,GAAoB,CAACoB,EAAcC,IAAiD,CACxF,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,OAAQD,EAAM,CACZ,QACE,OAAOC,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,QACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,QACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,OACE,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mBAAmB,EAErC,MAAO,CAAC,MAAO,YAAY,EAE7B,QACE,MAAM,IAAI,MAAM,sBAAsBD,CAAI,EAAE,CAChD,CACF,EAEanB,GAA8B,CAACmB,EAAgBC,EAAsB,IAAM,CACtF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEapB,GAA4B,CAACkB,EAAgBC,EAAsB,IAAM,CACpF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEanB,EAA6B,IAAIoB,IAA6D,CACzG,IAAMC,EAAoC,CAAC,EAC3C,OAAAD,EAAK,QAAQE,GAAO,CACdA,EAAI,SAAW,GACjBD,EAAgB,KACZ,CAAC,QAAuB,KAAMC,CAAG,EAAG,CAAC,QAAuB,KAAMC,EAAU,eAAeD,CAAG,CAAC,CAAC,CAExG,CAAC,EACMD,CACT,EAMapB,GAAoBuB,GAE3BA,EAAO,IAAM,EACR,EACEA,EAAO,IAAM,EACf,EAGF,EASItB,GAAa,CAACuB,EAAW,MAAOP,EAAqBQ,EAAQ,MACpE,CAACR,GAAcA,IAAe,EACzB,GAAGO,CAAQ,IAAIC,CAAK,IAGtB,MAAMR,CAAU,IAAIO,CAAQ,KAAKC,CAAK,IASlCvB,GAAY,CAACsB,EAAkBP,EAAoBQ,IAC1DD,IAAa,MACRC,EAELR,IAAe,EACV,OAAOQ,CAAK,IAGd,MAAMR,CAAU,SAASQ,CAAK,IAQ1BtB,GAAY,CAACuB,EAAcT,IAClCA,IAAe,EACV,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAC1CT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,MAClBT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAGlCA,EAUItB,EACT,CAACsB,EAAcC,EAAsBC,EAAgBZ,IAC/CU,EAAK,WAAW,WAAW,GAAKE,EAAS,EACvC,OAAQD,GAAW,SACjBX,IAAS,MACJ,GAAGU,CAAI,KAAKC,CAAK,WAAWA,CAAK,eAAeA,CAAK,aAErD,GAAGD,CAAI,KAAKC,CAAK,WAAWA,CAAK,SAGtCX,IAAS,MACJ,GAAGU,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAK,KAAK,MAAMA,EAAQ,EAAI,CAAC,CAAC,KAAKA,EAAQ,EAAI,CAAC,IAEhF,GAAGD,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAKA,EAAQ,CAAC,IAIlDC,EAAS,EAAI,GAAGF,CAAI,IAAIC,CAAK,IAAMD,EAc5CrB,GACF,CAACqB,EAAcG,EAAoBC,EAAuCC,EACzEd,IAAuC,CACtC,IAAMe,EAAa,OAAOF,GAAgB,SACpCG,EAAOD,EAAaF,EAAcA,EAAY,OAC9CI,EAAe,CAAC,GAAG,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAC,EACzCE,EAAcF,EAAO,EAAI,MAAQA,GAAQ,EAAI,MAAMA,CAAI,QAAU,cAAcA,CAAI,IACnFf,EAAatB,GAAkBiC,EAAYZ,CAAU,EACrDmB,EAAY,OAAOlB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACtEmB,EAAc,OAAOnB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACxEF,EAAO,CAAC,QAASmB,EAAa,MAAOC,EAAW,QAASC,EAAa,OAAQR,CAAU,EAExFS,EAAgBjB,GAA+B,OAAOA,GAAQ,SAAWA,EAAM,GAAGA,CAAG,IAErFkB,EAAqB,CACzB,gBAAiB,GACjB,gBAAiB,GACjB,2BAA4B,GAC5B,IAAK,GACL,aAAc,GACd,IAAK,GACL,aAAc,EAChB,EAEMC,EAAgBR,EAAa,YAAc,GAC3CS,EAAQ,GAAGD,CAAa,GAAGd,CAAI,SAC/BgB,EAAU,GAAGF,CAAa,GAAGd,CAAI,WAEnCiB,EAAa,GACjB,QAASC,EAAI,EAAGA,EAAIX,EAAO,EAAGW,IAC5BD,GAAc;AAAA,aACTC,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC9CW,CAAC,gBAAgBxC,EAAasC,EAASE,EAAGX,CAAI,CAAC;AAAA,cAC/CW,CAAC,UAAUA,CAAC;AAAA,oBACNA,CAAC;AAAA,MAGfD,GAAc,WAAWV,EAAO,CAAC,eAEjC,IAAMY,EAAgCZ,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,oBAAoBV,EAAK,OAAO;AAAA,mBAC5BA,EAAK,OAAO;AAAA;AAAA,MAEzB2B,CAAU;AAAA;AAAA,KAIJG,EAAmBC,IACvBR,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIc,EAAY,OAAOrB,CAAI,IAAIqB,CAAS,KAGlDC,EAAoB,CAAC,EAC3B,GAAIf,GAAQ,EACV,QAASW,EAAIX,EAAO,EAAGW,GAAK,EAAGA,IAC7BI,EAAQ,KAAK,GAAG5C,EAAasC,EAASE,EAAGX,CAAI,CAAC,eAAeW,CAAC,IAAI,EAItE,IAAMK,EAAgChB,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,aAAaV,EAAK,OAAO;AAAA,aAC3BgC,EAAQ,KAAK,GAAG,CAAC;AAAA,KAGlBE,EAAmBC,IACvBZ,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIkB,EAAa,OAAOzB,CAAI,IAAIyB,CAAU,KAGpDC,EAAU,IAAIC,IAChBpB,IAAS,EAAI,KAAO,GAAGjB,EAAK,OAAO,IAAIqC,EAAK,IAAIf,CAAY,EAAE,KAAK,GAAG,CAAC,IAErEgB,EAAa,CAACH,EAAoBI,IAClCtB,EAAO,EACF,GAAGkB,CAAU,GAEb,GAAG/C,EAAa+C,EAAYI,EAAKtB,CAAI,CAAC,GAI3CuB,EAAa,CAACL,EAAoBI,EAAoB9B,KACtDQ,EAAO,EACF,GAAGkB,CAAU,IAAI1B,EAAK,IAEtB,GAAGrB,EAAa+C,EAAYI,EAAKtB,CAAI,CAAC,IAAIR,EAAK,IAIpDgC,EAAoE,CAAC,EACrEC,GAA6B,CAACP,EAAoBQ,IAA0B,CAChFpB,EAAmB,2BAA6B,GAChD,IAAMqB,GAAU,GAAGD,EAAO,IAAI,uBAAuBjC,CAAI,SACzD,GAAIkC,MAAWH,EACb,MAAO,GAAGG,EAAO,IAAIT,CAAU,IAEjC,IAAMH,GAAU,CAAC,EACjB,QAASJ,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAAK,CAClC,IAAMW,GAAMI,EAAO,WAAW,gBAAiBf,GAAIe,EAAO,KAAO1B,CAAI,EACrEe,GAAQ,KAAK,GAAGM,EAAWZ,EAASE,EAAC,CAAC,OAAOW,EAAG,MAAMD,EAAWb,EAAOG,EAAC,CAAC,GAAG,CAC/E,CACA,OAAAa,EAAyCG,EAAO,EAC5C,MAAMA,EAAO,mBAAmBD,EAAO,KAAK,OAAO;AAAA,sBACzCX,GAAQ,OAAS,EAAIA,GAAQ,KAAK,GAAG,EAAI,IAAI;AAAA,cAGpD,GAAGY,EAAO,IAAIT,CAAU,GACjC,EAEMU,GAAc,CAACC,EAAuBrC,KAAmB,IAAM,CACnE,GAAIT,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,KAAKrC,CAAK,IAC7B,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,CAAK,8BAA8BA,CAAK,UAC9E,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,mBAAmBrC,CAAK,UAC3C,GAAIT,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,GAAGU,CAAI,IAAIoC,CAAM,8DAA8DrC,CAAK,MAE3F,MAAM,IAAI,MAAM,6CAA6CT,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEG+C,EAAeD,IAA2B,IAAM,CACpD,GAAI9C,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,CAAM,IACnB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,CAAM,OACvB,GAAI9C,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,mBAAmBU,CAAI,IAAIoC,CAAM,oBAAoBpC,CAAI,IAAIoC,CAAM,sBAAsBpC,CAAI,IAChGoC,CAAM,wBAAwBpC,CAAI,IAAIoC,CAAM,oBAEhD,MAAM,IAAI,MAAM,6CAA6C9C,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEGgD,GAA6B/B,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,QAAQoB,CAAS;AAAA,aACrD2B,EAAY,OAAOrC,CAAI,WAAW,CAAC;AAAA,KAGpCuC,EAAoBhC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,EAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,QAAQ9B,CAAS;AAAA,iBACjCV,CAAI,aAAa0B,EAAQe,CAAU,CAAC;AAAA,IAE/C,GAAG,EAEGC,GAAM,IAAIhB,IAA0C,CACxD,GAAIA,EAAQ,SAAWnB,EACrB,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAGlD,IAAMoC,EAAoBjB,EAAQ,IAAId,CAAY,EAAE,KAAK,GAAG,EAE5D,OAAIL,IAAS,EACJ8B,EAAY,IAAI,EACd9B,IAAS,EACX8B,EAAYM,EAAkB,CAAC,CAAC,GAEvC9B,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,CAAiB,IAE3C,EAEMC,GAAgBnB,GAChBlB,EAAO,EACF8B,EAAYZ,CAAU,GAE7BZ,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAIvCoB,GAA6BtC,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,YAAYoB,CAAS;AAAA,MAChEyB,GAAY,OAAOnC,CAAI,YAAa,OAAO,CAAC;AAAA,KAGtC8C,EAAoBvC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,EAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,EAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,CAAc,YAAY9B,CAAS;AAAA,UAC5CV,CAAI,aAAa0B,EAAQe,CAAU,CAAC;AAAA,IAExC,GAAG,EA0EH,MAAO,CACL,KAxCW,IAAM,CACjB,IAAMM,EAAQ,CAAC,EACXC,EAAmB,GACvB,OAAInC,EAAmB,kBACrBkC,EAAM,KAAK5B,CAA6B,EACxC6B,EAAmB,IAEjBnC,EAAmB,kBACrBkC,EAAM,KAAKxB,CAA6B,EACxCyB,EAAmB,IAEjBnC,EAAmB,6BACrB,OAAO,OAAOkB,CAAwC,EAAE,QAAQkB,IAAQF,EAAM,KAAKE,EAAI,CAAC,EACxFD,EAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKD,CAAiB,EAC5BE,EAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKF,EAA0B,EACrCG,EAAmB,IAEjBnC,EAAmB,MACrBkC,EAAM,KAAKR,CAAiB,EAC5BS,EAAmB,IAEjBnC,EAAmB,eACrBkC,EAAM,KAAKT,EAA0B,EACrCU,EAAmB,IAEjB,CAAC1C,GAAc0C,GACjBD,EAAM,QACF,SAAShC,CAAK,MAAMzB,EAAK,OAAO,IAAIc,EAAY,KAAK,GAAG,CAAC,KACzD,SAASY,CAAO,MAAM1B,EAAK,OAAO,IAAIM,EAAU,eAAeQ,CAAW,EAAE,KAAK,GAAG,CAAC,IAAI,EAExF2C,EAAM,KAAK;AAAA,CAAI,CACxB,EAIE,KAAAzD,EACA,gBAAA8B,EACA,gBAAAI,EACA,2BAAAQ,GACA,QAAAN,EACA,WAAAE,EACA,WAAAE,EACA,IAjFU,IAAIoB,IAAkD,CAChE,GAAIA,EAAgB,SAAW3C,EAAO,EACpC,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAElD,IAAMR,EAAQmD,EAAgB3C,CAAI,EAClC,GAAI,OAAOR,GAAU,SACnB,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAM4C,GAAoBO,EAAgB,MAAM,EAAG3C,CAAI,EAAE,IAAIK,CAAY,EAAE,KAAK,GAAG,EAEnF,OAAIL,IAAS,EACJ4B,GAAY,KAAMpC,CAAK,EACrBQ,IAAS,EACX4B,GAAYQ,GAAkB,CAAC,EAAG5C,CAAK,GAE9Cc,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,KAAK5C,CAAK,IAErD,EA6DE,YAAAoC,GACA,aA5DmB,CAACV,EAAoB1B,IACpCQ,EAAO,EACF4B,GAAYV,EAAY1B,CAAK,GAEpCc,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,CAAU,KAAK1B,CAAK,MAuDrD,IAAA2C,GACA,YAAAL,EACA,aAAAO,GAEA,MAAAvC,EACA,KAAAL,EACA,QAAAgB,EACA,MAAAD,EACA,KAAAR,CACF,CACF,EAWS3B,EACT,CAACoB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,QAASb,CAAU,EAW3DV,EACT,CAACmB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,SAAUb,CAAU,EAW5DT,GACT,CAACkB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,WAAYb,CAAU,EA8ErER,GAAN,KAA+C,CAC7C,YAAoBoE,EAA2DC,EAA4B,CAAvF,6BAAAD,EAA2D,YAAAC,EAoG/E,KAAQ,kBAAqC,CAAC,EAC9C,KAAQ,UAA6B,CAAC,EACtC,KAAQ,SAA8B,CAAC,EAwBvC,KAAQ,cAAgB,CA9HoF,CAE5G,sCAAsCvD,EAA6B,CAGjE,MAAO,qBADY,OAAOA,GAAS,SAAW,GAAGA,CAAI,IAAMA,CACrB,eACxC,CAEA,UAAUwD,EAAiDpF,GAAgB,CACzE,IAAMqF,EAAiB,OAAOD,GAAkB,SAAWA,EAAgBA,EAAc,CAAC,EACpFE,EAAiB,OAAOF,GAAkB,SAAW,EAAIA,EAAc,CAAC,EACxEG,EAAiB,OAAOH,GAAkB,SAAW,EAAIA,EAAc,CAAC,EAE9E,GAAIC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,yBAC/B,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,yCAAyC,KAAK,OAAO,wBAAwB,KAC3F,KAAK,OAAO,wBAAwB,KAAK,KAAK,OAAO,wBAAwB,IAAI,EAGvF,GAAIF,EAAiBC,EAAiBC,EAAiB,KAAK,OAAO,kCACjE,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,+CACd,KAAK,OAAO,iCAAiC,GAAG,EAGtD,IAAMC,EAAuB,KAAK,wBAAwB,CAAC,IAAM,GAAK,KAAK,wBAAwB,CAAC,IAAM,EACpGC,EAAYD,EAAuB;AAAA;AAAA,wDAGA;AAAA;AAAA;AAAA;AAAA,yDAKnCE,EAAsBF,EACxB,4DACA;AAAA,mEAEIH,EAAiBC,EAAiBC,CAAc,iBAExD,MAAO,4BAA4BF,CAAc,KAAKC,CAAc,KAAKC,CAAc;AAAA,YAC/EE,CAAS;AAAA,MACfC,CAAmB;AAAA,GAEvB,CAEQ,uBAAuBC,EAA+B,CACxDA,EAAS,OAAS,IAChBA,EAAS,MAAM,WAAW,WAAW,GACvC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,MAAM,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAEpGA,EAAS,QAAQ,WAAW,WAAW,GACzC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,QAAQ,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAG9G,CAEQ,gBAAgBA,EAAyBC,EAA8B,CAC7E,GAAID,EAAS,QAAU,WACrB,MAAM,IAAI,MAAM,+FAA+F,EAEjH,KAAK,UAAU,KAAKA,CAAQ,EAC5B,KAAK,uBAAuBA,CAAQ,EAEpC,IAAME,EAASF,EAAS,QAAU,QAAU,OAAS,aAC/CjD,EAAciD,EAAS,KAAK,QAClC,MAAO,sBAAsBC,CAAY,kBAAkBC,CAAM,KAAKF,EAAS,IAAI,WAAWjD,CAAW,IAC3G,CAEA,oBAAoBoD,EAAoC,CACtD,OAAOA,EAAU,IAAIC,GAAK,KAAK,gBAAgBA,EAAG,KAAK,eAAe,CAAC,EAAE,KAAK;AAAA,CAAI,CACpF,CAEQ,yBAAyBJ,EAA+B,CAC9D,GAAIA,EAAS,QAAU,WACrB,MAAM,IAAI,MACN,sGAAsG,EAG5G,KAAK,kBAAkB,KAAKA,CAAQ,EACpC,KAAK,uBAAuBA,CAAQ,CACtC,CAEA,6BAA6BG,EAA0C,CACrE,OAAAA,EAAU,QAAQC,GAAK,KAAK,yBAAyBA,CAAC,CAAC,EAChD,IACT,CAEA,gBAAgBhE,EAAcV,EAA8BY,EAAS,EAAiB,CACpF,YAAK,SAAS,KAAK,CAAC,KAAAF,EAAM,KAAAV,EAAM,OAAAY,CAAM,CAAC,EAChC,IACT,CAEA,iBAAiB+D,EAAqD,CACpE,YAAK,SAAW,KAAK,SAAS,OAAOA,CAAkB,EAChD,IACT,CAKQ,oBAA6B,CACnC,GAAI,KAAK,SAAS,SAAW,EAC3B,MAAO,GAGT,IAAMC,EAA4B,CAAC,EACnC,OAAW,CAAC,KAAAlE,EAAM,KAAAV,EAAM,OAAAY,CAAM,IAAK,KAAK,SACtC,GAAIA,GAAUA,EAAS,EACjBZ,IAAS,MACX4E,EAAgB,KAAK,cAAclE,CAAI,iBAAiBV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,EAE1FgE,EAAgB,KAAK,GAAGlE,CAAI,eAAeV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,MAE1E,CACL,IAAMiE,EAAWjE,GAAU,MAAQA,IAAW,EAAIZ,EAAO,MAAMY,CAAM,IAAIZ,CAAI,IAC7E4E,EAAgB,KAAK,GAAGlE,CAAI,IAAImE,CAAQ,EAAE,CAC5C,CAGF,MAAO;AAAA,0BACeD,EAAgB,KAAK,IAAI,CAAC;AAAA,2BACzB,KAAK,aAAa,oCAC3C,CAMA,IAAI,2BAAoC,CACtC,OAAO,KAAK,mBAAmB,EAAI,KAAK,UAAU,IAAIhD,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,EAC1E,KAAK,kBAAkB,IAAIA,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,CACzD,CAKA,IAAI,eAAwD,CAC1D,GAAI,KAAK,SAAS,SAAW,EAC3B,OAGF,IAAMkD,EAA6B9E,GAC9B,UACe,EAAE,CAAC,MAAO,MAAO,MAAO,KAAK,EAAE,QAAQA,CAAI,CAAC,EAChE,OAAO,KAAK,SAAS,IAAI+E,GAAM,CAACD,EAA0BC,EAAE,IAAI,EAAGA,EAAE,QAAU,CAAC,CAAE,CACpF,CACF,EAEarF,GAAqB,CAACsF,EAAyClB,IACxE,IAAIrE,GAAiBuF,EAAelB,CAAM,EAYjCnE,GAAmB,CAACsF,EAA4BC,IAA0C,CACrG,IAAMC,EAASF,EAAQ,OACjB9E,EAAiB,CAAC,EACxB,QAASyB,EAAI,EAAGA,EAAIuD,EAAQvD,IAAK,CAC/B,IAAMvB,EAAM8E,EAAS,EAAIvD,EACnB,EAAIqD,EAAQ5E,CAAG,GAAK,GAChB6E,EAASA,EAAS,OAAS,EAAItD,CAAC,GAAK,GACvC,GAAK,IAAM,GACjBzB,EAAK,QAAQE,CAAG,CAEpB,CACA,OAAOF,CACT,ICj8BA,IAeMiF,GAMAC,GAGAC,GAGAC,GAWOC,GA4DAC,GAKAC,GAvGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEMZ,GAAkB,CAACa,EAAmBC,IACvCA,GAAQA,EAAK,SAAWD,EAAa,CAAC,GAAI,IAAI,MAAMA,CAAS,EAAE,KAAK,CAAE,EAAE,QAAQ,EAAIC,EAEnFb,GAAiB,CAACc,EAA+BD,IACnDE,EAAU,gBAAgBD,EAAYf,GAAgBe,EAAW,OAAQD,CAAI,CAAC,EAE5EZ,GAAmB,CAACY,EAAgBG,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKJ,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAM,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEajB,GAA6B,CAACkB,EAAyBC,IAAoC,CACtG,IAAMC,EAAgBF,EAAY,SAC5BR,EAAYQ,EAAY,KAAK,OAC7BP,EAAOd,GAAgBa,EAAWS,CAAQ,EAC1CE,EAAcvB,GAAeoB,EAAY,KAAMP,CAAI,EACnDK,EAASM,EAAe,SAAUF,EAAeC,EAAY,MAAM,EACnEN,EAAQQ,EAAc,IAAKH,EAAeV,CAAS,EACrDc,EACJ,GAAIb,EAAK,SAAW,GAAKA,EAAK,CAAC,IAAM,GAAKA,EAAK,CAAC,IAAM,EAAG,CACvD,IAAMc,EAAWT,EAAO,KAAK,MACvBU,EAA0C,CAAC,GAAI,GAAI,CAAC,EAC1DF,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA,sCAChDS,CAAQ,KAAKC,EAAc,CAAC,EAAI,CAAC,MAAMA,EAAc,CAAC,CAAC;AAAA,IACzFC,EAAa,UAAUD,CAAa,CAAC;AAAA,+BACVA,EAAc,CAAC,CAAC;AAAA,+BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,uCAIRX,EAAM,YAAY,eAAe,CAAC;AAAA;AAAA;AAAA,2BAG9CW,EAAc,CAAC,CAAC;AAAA,2BAChBA,EAAc,CAAC,CAAC;AAAA;AAAA,QAEnCV,EAAO,YAAY,iBAAkB,8BAA8B,CAAC;AAAA;AAAA,IAG1E,MACEQ,EAAmBG,GAA+B;AAAA,IAClDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBZ,EAAOC,CAAM,CAAC;AAAA;AAAA,IAElFjB,GAAiBY,EAAMD,EAAWK,EAAOC,CAAM,CAAC;AAAA;AAAA,IAEhDW,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DX,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGlDA,EAAO,YAAY,aAAcD,EAAM,aAAa,UAAU,CAAC,CAAC;AAAA,KAGpE,MAAO,CACL,KAAM,YACN,YAAa,CAAC,KAAM,GAAGI,CAAQ,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC9D,WAAaV,GAAW,CACtB,IAAMmB,EAAaf,EAAU,KAAKQ,CAAW,EAC7C,MAAO,CACL,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKmB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGC,EAA2BpB,EAAO,CAAC,EAAE,KAAMY,CAAW,CAAC,CAC5G,CACF,EACA,gBAAAG,CACF,CACF,EAEavB,GAAY,CAAC6B,EAAyBC,IAA0C,CAC3FnC,GAAekC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ9B,GAA2B8B,EAAQ,OAAO,CAAC,EAAGC,EAAW,IAAI,CAAC,CAChF,EAEa7B,GAA4B6B,GACrCC,GAA4B,CAAC,KAAMD,EAAW,IAAgB,CAAC,ICxGnE,IAYME,GAaAC,GAaAC,GAaAC,GAYAC,GAQAC,GAYAC,GAcAC,GASAC,GAaOC,GAyEPC,GAkCOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAtQbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAEM3B,GAAqC,CACzC,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,oCACX,UAAW,6BACX,GAAI,6BACJ,GAAI,oCACJ,OAAQ,uBACV,EAEMC,GAA2C,CAC/C,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,wBACX,UAAW,wBACX,GAAI,wBACJ,GAAI,wBACJ,OAAQ,uBACV,EAEMC,GAA4C,CAChD,IAAK,aACL,IAAK,aACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,UAAW,IACX,UAAW,IACX,GAAI,IACJ,GAAI,IACJ,OAAQ,GACV,EAEMC,GAA8C,CAClD,IAAK,YACL,IAAK,YACL,IAAK,YACL,KAAM,YACN,UAAW,YACX,UAAW,iBACX,GAAI,YACJ,GAAI,kBACJ,OAAQ,gBACV,EAEMC,GAAmB,CAACwB,EAAsBC,IAA2B,CACzE,IAAMC,EAAM,CAAC,EACb,QAASC,EAAIF,EAAOD,EAAcG,EAAIF,EAAM,EAAEE,EAC5CD,EAAI,KAAKC,CAAC,EAEZ,OAAOD,CACT,EAEMzB,GAA4B,CAAC2B,EAA0BC,IAAkD,CAC7G,IAAMC,EAAc,CAAC,EACfL,EAAOG,EAAM,OACnB,QAASG,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,IACxBD,EAAY,KAAKF,EAAMG,CAAG,CAAC,EAG/B,IAAMC,EAAcH,EAAK,IAAIE,GAAOH,EAAMG,CAAG,CAAC,EAC9C,MAAO,CAACD,EAAaE,CAAW,CAClC,EAEM9B,GAAuB,CAAC0B,EAAiBC,IAA6B,CAC1E,IAAMJ,EAAOG,EAAM,OAASC,EAAK,OAC3BI,EAAc,CAAC,EACjBC,EAAW,EACf,QAASH,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,GACxBE,EAAY,KAAKL,EAAMM,GAAU,CAAC,EAElCD,EAAY,KAAK,CAAC,EAGtB,OAAOA,CACT,EAEM9B,GAAuB,CAAC0B,EAAgBJ,IAA0B,CACtE,QAASE,EAAI,EAAGA,EAAIE,EAAK,OAAQ,EAAEF,EACjC,GAAIE,EAAKA,EAAK,OAASF,EAAI,CAAC,IAAMF,EAAO,EAAIE,EAC3C,MAAO,GAGX,MAAO,EACT,EAEMvB,GAAqB,CAACyB,EAAgBJ,IAA2B,CACrE,IAAMC,EAAM,CAAC,EACb,GAAI,CAACvB,GAAqB0B,EAAMJ,CAAI,EAAG,CACrC,QAASE,EAAI,EAAGA,EAAIF,EAAM,EAAEE,EACtBE,EAAK,QAAQF,CAAC,IAAM,IACtBD,EAAI,KAAKC,CAAC,EAGdE,EAAK,QAAQM,GAAQT,EAAI,KAAKS,CAAI,CAAC,CACrC,CACA,OAAOT,CACT,EAEarB,GACT,CAAC+B,EAAcC,EAAqCC,EAA+BC,EAClFC,EAA0BV,EAAuBE,IAAuC,CACvF,IAAMS,EAAaH,EAAO,CAAC,EAAE,KAEvBI,EAAaC,EAAU,KAAKb,CAAW,EACvCc,EAAaD,EAAU,KAAKX,CAAW,EAEvCa,EAAQC,EAAc,KAAMR,EAAO,CAAC,EAAE,SAAUG,CAAU,EAC1DM,EAASC,EAAe,SAAUR,EAAgBV,CAAW,EAE7DmB,EAAgB,GAEhBC,EAAsB;AAAA,oDACkBD,CAAa;AAAA,SA+C3D,MAAO,CACL,KAAAb,EACA,YAAAC,EACA,gBA/CuBc,GAA+B;AAAA,UACpDA,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBN,EAAOE,CAAM,CAAC;AAAA,UACjFG,CAAmB;AAAA;AAAA;AAAA;AAAA,WAIlBC,EAAa,UAAUF,CAAa,CAAC;AAAA;AAAA,2CAELA,CAAa;AAAA;AAAA;AAAA,gCAGxBnD,GAAiByC,CAAU,CAAC;AAAA;AAAA,wDAEJU,CAAa;AAAA,iCACpCJ,EAAM,YAAY,YAAY,CAAC;AAAA,yBACvCjD,GAAU2C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,wCAKNU,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAM3BpD,GAAgB0C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAS3CQ,EAAO,YACH,cACA,GACIR,IAAe,OAAS,GAAGQ,EAAO,KAAK,OAAO,yCACtB,GAAGA,EAAO,KAAK,OAAO,IAAIhD,GAAmBwC,CAAU,CAAC,GAAG,EAAE,CAAC;AAAA;AAAA,WAShG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUU,CAAc,CAAC,EACvD,cAAe,CAAC,EAAGE,CAAU,EAC7B,gBAAiB,CAAC,CAAC,QAAuB,KAAME,CAAU,CAAC,CAC7D,EACF,CACF,EAEEtC,GACF,CAAC8C,EAAyBhB,EAAciB,EACvCd,IAAiG,CAChG,IAAMe,EACFF,EAAQ,OAAO,SAAW,EAAIC,EAAaE,GAAiCH,EAAQ,OAAQC,CAAU,EAEtGG,EAAcF,EAAkB,KAChCE,EAAY,SAAW,GAAK,CAACF,EAAkB,oBACjDE,EAAcJ,EAAQ,OAAO,CAAC,EAAE,KAAK,IAAI,CAACK,EAAM9B,IAAMA,CAAC,GAEzD,IAAM+B,EAAgBf,EAAU,cAAca,EAAaJ,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EAEpFvB,EAAO6B,EACPb,EAAQO,EAAQ,OAAO,CAAC,EACtBO,EAAevD,GAAmByB,EAAMuB,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EACvEO,EAAa,OAAS,IACxBd,EAAQO,EAAQ,QACZQ,GAA2BR,EAAQ,OAAO,CAAC,EAAGO,CAAY,EAAG,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAChG9B,EAAO7B,GAAiB6B,EAAK,OAAQgB,EAAM,KAAK,MAAM,GAGxD,GAAM,CAACf,EAAaE,CAAW,EAAI/B,GAA0B4C,EAAM,KAAMhB,CAAI,EACzEgC,EAAmB/B,EACnBwB,EAAkB,WACpBO,EAAmB3D,GAAqB4B,EAAa4B,CAAa,GAGpEN,EAAQ,QACJ/C,GACI+B,EAAM,CAAC,KAAMkB,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACT,CAAK,EAAGN,EAChFa,EAAQ,OAAO,CAAC,EAAE,SAAUS,EAAkB7B,CAAW,EAC7D,CAAC,OAAQ,CAACa,CAAK,CAAC,CAAC,CACvB,EAEStC,GAAmB,CAAC6C,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEa7C,GAAiB,CAAC4C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa5C,GAAiB,CAAC2C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa3C,GAAwB,CAAC0C,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEa1C,GAAkB,CAACyC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEazC,GAAkB,CAACwC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEaxC,GAAmB,CAACuC,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEavC,GAAkB,CAACsC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEatC,GAAwB,CAACqC,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEarC,GAAqB,CAACoC,EAAyBC,IAAuC,CACjG/C,GAAa8C,EAAS,qBAAsBC,EAAY,QAAQ,CAClE,ICxQA,IAYMS,GAoBAC,GACOC,GA2EAC,GAUPC,GAeAC,GAWAC,GAWAC,GAWAC,GAWAC,GAoBAC,GAqBAC,GAoBAC,GAWAC,GAWAC,GAWAC,GAsBOC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GA7WbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KACAC,KAEMhC,GAAkBiC,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,0BAA0B,CAE9C,EAYMhC,GAAkBiC,GAAU,CAAC,GAAI,GAAI,eAAeA,EAAM,aAAa,eAAe,CAAC,IAAK,EAAE,EACvFhC,GACT,CAACiC,EAAcC,EAAqCH,EAA+BI,EAClFC,EAAqBC,EAA0BC,EAAW,GAAOC,EAAoB,KAAuB,CAC3G,IAAMC,EAAwB,CAAC,EACzBC,EAAaV,EAAO,CAAC,EAAE,KACvBW,EAAYD,EAAW,OACvBE,EAAOC,EAAU,cAAcR,EAAWM,CAAS,EACnDG,EAAkB,CAACN,GAAqBI,EAAK,SAAW,EAC9DF,EAAW,QAAQ,CAACK,EAAGC,IAAM,CACvBF,GAAmBF,EAAK,QAAQI,CAAC,GAAK,EACpCT,GACFE,EAAY,KAAK,CAAC,EAGpBA,EAAY,KAAKM,CAAC,CAEtB,CAAC,EACD,IAAME,EAAaR,EAAY,OACzBS,EAAaL,EAAU,KAAKJ,CAAW,EA4C7C,MAAO,CACL,KAAAP,EACA,YAAAC,EACA,gBA9CuBgB,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EAErBnB,EAAQoB,EAAc,KAAMrB,EAAO,CAAC,EAAE,SAAUW,CAAS,EACzDW,EAASC,EAAe,SAAUjB,EAAgBW,CAAU,EAC5DO,EAAMpB,EAASH,EAAOqB,EAAQV,CAAI,EACpCa,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGC,EAAI,EAAGD,EAAIf,EAAWe,IAEhCZ,GAAmBF,EAAK,QAAQc,CAAC,GAAK,GACpCnB,GACFoB,IAGFF,EAAY,YAAYC,CAAC,eAAeA,CAAC,MAAMhB,EAAWgB,CAAC,CAAC,MAAMA,CAAC;AAAA,oBAC3DF,EAAI,CAAC,EAAE,SAAS,YAAY,EAAI,qBAAqBE,CAAC,IAAM,EAAE;AAAA,oBAC9DzB,EAAM,WAAW,gBAAiByB,EAAG,IAAIA,CAAC,EAAE,CAAC;AAAA,oBAC7CD,CAAS;AAAA,qBAGjBL,EAAQ,KAAK,GAAGnB,EAAM,WAAW,gBAAiByB,EAAGJ,EAAO,WAAW,iBAAkBK,CAAC,CAAC,CAAC,GAAG,EAC/FA,KAGJ,MAAO;AAAA;AAAA,UAELR,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBlB,EAAOqB,CAAM,CAAC;AAAA;AAAA,UAElFH,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,+BACvDlB,EAAM,KAAK,OAAO;AAAA,iCAChBqB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,YAClBI,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,CAAC,CAAC;AAAA,YACNC,CAAS;AAAA,YACTD,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,SAAW,EAAIF,EAAO,YAAY,aAAc,OAAO,EAAIE,EAAI,MAAM,CAAC,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,UAE5F,EAME,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUH,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGU,EAA2BlB,EAAYD,CAAW,CAAC,CACxG,EACF,CACF,EAESvC,GACT,CAAC8B,EAA+B6B,IAAmD,CACjF,IAAMjB,EAAiB,CAAC,EACxB,OAAIZ,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,GACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQ8B,GAAKlB,EAAK,KAAK,OAAOkB,CAAC,CAAC,CAAC,EAEzDC,GACH,CAAC,KAAAnB,EAAM,SAAUiB,EAAW,SAAU,kBAAmBA,EAAW,iBAAiB,CAAC,CAC5F,EAEE1D,GACF,CAAC6D,EAAyB9B,EAAc2B,EAA8BzB,IAA6B,CACjG,IAAMJ,EAASgC,EAAQ,OACjBC,EACFjC,EAAO,SAAW,EAAI6B,EAAa3D,GAAiC8B,EAAQ6B,CAAU,EAE1FG,EAAQ,QACJ/D,GACIiC,EAAM,CAAC,KAAM+B,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACjC,EAAO,CAAC,CAAC,EACjFiC,EAAkB,mBAAqBA,EAAkB,KAAK,SAAW,EAAIjE,GAAOoC,EACpF6B,EAAkB,KAAMjC,EAAO,CAAC,EAAE,SAAUiC,EAAkB,SAC9DA,EAAkB,iBAAiB,EACvC,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEE7D,GAAoB,CAAC4D,EAAyBH,IAAuC,CACzF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,eAAgBH,EANf,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,qBACL,CAC8D,CAChE,EAEM5B,GAAgB,CAAC2D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,EACL,CAC0D,CAC5D,EAEM3B,GAAgB,CAAC0D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,sBAC1C,sBACL,CAC0D,CAC5D,EAEM1B,GAAuB,CAACyD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,qBACL,CACiE,CACnE,EAEMzB,GAAiB,CAACwD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAKlC,EAAM,WAAW,gBAAiByB,EAAG,CAAC,CAAC,EAIxD,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMxB,GAAkB,CAACuD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAiB7B7D,GAAiB6D,EAAS,aAAcH,EAhBb,CAAC5B,EAAOqB,EAAQV,IAAS,CAClD,IAAIwB,EAAO,EACX,QAASV,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,KAE1CwB,GAAQJ,EAAQ,OAAO,CAAC,EAAE,KAAKN,CAAC,GAIpC,MAAO,CACL,oBACA,GACA,cAAczB,EAAM,aAAa,eAAe,CAAC,KACjD,eAAeqB,EAAO,KAAK,KAAK,UAAUc,CAAI,IAChD,CACF,CAC4D,CAC9D,EAEM1D,GAAiB,CAACsD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAK,iBAAiBT,CAAC,QAAQ,EAI3C,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMtB,GAAkB,CAACqD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,aAAcH,EANb,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC4D,CAC9D,EAEMrB,GAAiB,CAACoD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,YAAaH,EANZ,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC2D,CAC7D,EAEMpB,GAAuB,CAACmD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,oBAC1C,EACL,CACiE,CACnE,EAEMnB,GACF,CAACuD,EAA0BzB,EAAyBJ,IAAwC,CAC1F,GAAII,EAAK,SAAW,EAClB,OAAOJ,EAGT,IAAIU,EAAa,EACboB,EAAa,EACjB,QAASC,EAAM,EAAGA,EAAM3B,EAAK,OAAQ2B,IAC/B3B,EAAK,QAAQ2B,CAAG,IAAM,GACxBrB,GAAcmB,EAAME,CAAG,EAEvBD,GAAcD,EAAME,CAAG,EAO3B,OAAOD,EAAa,IAAMpB,EAAa,IACzC,EAESnC,GAAa,CAACiD,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FpD,GAAgBuD,EAASH,CAAU,EAEnCW,GAAiBR,EAASH,CAAU,CAExC,EAEa7C,GAAW,CAACgD,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FxD,GAAc2D,EAASH,CAAU,EAEjCY,GAAeT,EAASH,CAAU,CAEtC,EAEa5C,GAAW,CAAC+C,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FvD,GAAc0D,EAASH,CAAU,EAEjCa,GAAeV,EAASH,CAAU,CAEtC,EAEa3C,GAAkB,CAAC8C,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FtD,GAAqByD,EAASH,CAAU,EAExCc,GAAsBX,EAASH,CAAU,CAE7C,EAEa1C,GAAY,CAAC6C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FrD,GAAewD,EAASH,CAAU,EAElCe,GAAgBZ,EAASH,CAAU,CAEvC,EAEazC,GAAY,CAAC4C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FnD,GAAesD,EAASH,CAAU,EAElCgB,GAAgBb,EAASH,CAAU,CAEvC,EAEaxC,GAAa,CAAC2C,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FlD,GAAgBqD,EAASH,CAAU,EAEnCiB,GAAiBd,EAASH,CAAU,CAExC,EAEavC,GAAY,CAAC0C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FjD,GAAeoD,EAASH,CAAU,EAElCkB,GAAgBf,EAASH,CAAU,CAEvC,EAEatC,GAAkB,CAACyC,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FhD,GAAqBmD,EAASH,CAAU,EAExCmB,GAAsBhB,EAASH,CAAU,CAE7C,EAEarC,GAAe,CAACwC,EAAyBH,IAAuC,CACvF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FzD,GAAkB4D,EAASH,CAAU,EAErCoB,GAAmBjB,EAASH,CAAU,CAE1C,ICnXA,IAcMqB,GAeOC,GA0BAC,GA0BAC,GAjFbC,GAAAC,EAAA,kBAOAC,IAEAC,KAGAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAQaR,GAAS,CAACS,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaT,GAAS,CAACQ,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaR,GAA4BQ,GACrCQ,GAA4BR,CAAoE,IClFpG,IAuEMS,GAmKAC,GAsGAC,GA2JAC,GA0HOC,GAqCPC,GAmHOC,GA7vBbC,GAAAC,EAAA,kBAGAC,IAEAC,KAEAC,KAgEMX,GAA0B,CAACY,EAA+BC,IAAoD,CAmClH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAUH,EAAO,CAAC,EAClBI,EAAOJ,EAAO,CAAC,EACfK,EAAYL,EAAO,CAAC,EACpBM,EAAON,EAAO,CAAC,EACfO,EAAuBP,EAAO,CAAC,EAErC,GAAIM,GAAQC,EACV,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIL,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMM,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAkBR,EAAM,KAAK,CAAC,EAEpC,GAAIE,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIA,EAAQ,KAAK,CAAC,IAAMO,EACtB,MAAM,IAAI,MAAM,uEAAuE,EAGzF,GAAIN,EAAK,KAAK,CAAC,IAAMD,EAAQ,KAAK,CAAC,EACjC,MAAM,IAAI,MAAM,oFAAoF,EAGtG,IAAIQ,EAAcP,EAAK,KAAK,CAAC,EAAI,EAC7BQ,EAAcD,EACdE,EAAcD,EAClB,GAAIX,EAAW,eAAe,OAAS,EAAG,CACxC,GAAIA,EAAW,eAAe,SAAW,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAErE,QAAWa,KAAMb,EAAW,eAC1B,GAAIa,EAAKb,EAAW,WAAa,EAC/B,MAAM,IAAI,MAAM,mDAAmD,EAIvEU,EAAcV,EAAW,eAAe,CAAC,EACzCW,EAAcX,EAAW,eAAe,CAAC,EACzCY,EAAcZ,EAAW,eAAe,CAAC,CAC3C,CAEA,IAAMc,EAAmBN,EAEzB,GAAIE,IAAgBC,EAClB,MAAM,IAAI,MAAM,6DAA6D,EAG/E,GAAIR,EAAK,KAAK,CAAC,IAAMO,EAAcC,EAAcC,EAC/C,MAAM,IAAI,MAAM,+EAA+E,EAGjG,IAAIG,EAAqB,EACzB,GAAIV,EAAM,CACR,GAAIM,IAAgBC,EAClB,MAAM,IAAI,MAAM,oDAAoD,EAEtE,GAAIP,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,qCAAqC,EAEvD,GAAIA,EAAK,KAAK,CAAC,IAAM,EACnB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,KAAK,CAAC,IAAME,EACnB,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIF,EAAK,KAAK,CAAC,IAAML,EAAW,SAC9B,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAIK,EAAK,KAAK,CAAC,IAAMM,EAAcX,EAAW,SAC5C,MAAM,IAAI,MAAM,gEAAgE,EAG7EA,EAAW,yBACde,EAAqBV,EAAK,KAAK,CAAC,EAGpC,CAEA,IAAMW,EAAsBF,EAAmBC,EACzCE,EAAoB,GAEpBC,EAAW,EACjB,GAAId,EAGF,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAIC,EACF,MAAM,IAAI,MAAM,uBAAuB,EAGzC,MAAO,CACL,UAAAE,EACA,eAAAC,EACA,mBAAAO,EACA,iBAAAD,EACA,oBAAAE,EACA,kBAAAC,EACA,gBAAAR,EACA,WAAYC,EACZ,YAAAE,EACA,SAAU,KAAK,MAAMF,EAAcV,EAAW,QAAQ,EACtD,UAAW,KAAK,MAAMY,EAAcZ,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAqB,GACrB,aAAc,GACd,UAAW,CACb,CACF,EAEMZ,GAAkC,CAAC+B,EAA0BlB,EAAmBmB,EAAWC,IAAc,CAC7G,IAAMC,EAAaC,GAAiBF,CAAC,EACjCG,EAAK,GACHC,EAAQJ,EAAIC,EACdG,EAAQD,EACVA,EAAK,EACIC,EAAQ,EAAI,KACrBD,EAAK,KAAK,KAAKC,EAAQ,CAAC,GAE1B,IAAMC,EAAoB,KAAK,KAAKL,EAAIC,EAAaE,CAAE,EACjDG,EAAoC,CACxC,CAAC,KAAM1B,EAAM,SAAU,KAAM,EAAIoB,CAAC,EAAG,CAAC,QAAuB,KAAMI,CAAK,EACxE,CAAC,QAAuB,KAAMC,CAAiB,CACjD,EACME,EAAWC,GAA4B5B,EAAM,SAAUqB,CAAU,EACjEQ,EAAUC,KAA0CT,CAAU,EAE9DU,EAAmBC,GAA+B,CACtD,IAAMC,EAAcC,EAAe,IAAKlC,EAAM,SAAUA,EAAM,KAAMqB,CAAU,EAExEc,EAA8B,CAClC,CAAC,KAAM,QAAS,KAFIL,GAA0B9B,EAAM,QAAQ,CAEC,EAAG,CAAC,KAAM,SAAU,KAAM,KAAK,EAC5F,CAAC,KAAM,sBAAuB,KAAM,KAAK,CAC3C,EAEA,MAAO;AAAA,0CAC+BuB,CAAE;AAAA,0CACFA,CAAE;AAAA,IACxCS,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBF,CAAW,CAAC;AAAA,IACrED,EAAa,UAAU,CACrBT,EAAI,EAAG,CACT,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,8BAIwBM,CAAO;AAAA;AAAA,gCAELA,CAAO;AAAA;AAAA,+BAER,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,gDACT,IAAK,GACH,MAAO,oGACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA,uBAINM,CAAO;AAAA;AAAA,0BAEJA,CAAO;AAAA;AAAA,+BAEF,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,aACT,IAAK,GACH,MAAO,8BACT,IAAK,GACH,MAAO,4DACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAMHU,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,yBAIvBJ,CAAO;AAAA,0BACNI,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA,IAI9C,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CAAC,KAAM,GAAGV,CAAE,IAAII,CAAQ,IAAIN,CAAU,EAAE,EACrD,gBAAAU,EACA,WAAY,KAAO,CAAC,QAAS,CAAC,EAAG,cAAe,CAAC,EAAGZ,CAAC,EAAG,gBAAAO,CAAe,EACzE,CACF,EAEMtC,GACF,CAACgD,EAAyBC,EAAeC,EAAiBC,EACzDlC,EAA4CmC,EAAiCzC,EAC7Ee,IAA+B,CAC9B,IAAMC,EAAsBD,EAAqB0B,EAAW,iBACtDC,EAAa,CAACD,EAAW,UAAWA,EAAW,SAAUA,EAAW,eAAgBzB,CAAmB,EACvG2B,EAAaF,EAAW,aAAe,QAAaJ,EAAQ,YAAc,EAC1EO,EAAkBD,EACpB,CAACF,EAAW,UAAWA,EAAW,SAAUzB,EAAqByB,EAAW,QAAQ,EACpF,OAIEI,EAAQ7C,EAAW,QAAU,EAAI,EAAM,KAAK,KAAKyC,EAAW,QAAQ,EAAIzC,EAAW,MACnFsB,EAAaC,GAAiBkB,EAAW,QAAQ,EACjDK,EAAqBL,EAAW,SAAWnB,EAC3CyB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKhC,EAAsB+B,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACMd,EAAoC,CACxC,CAAC,QAAuB,KAAMc,EAAW,cAAc,EAAG,CAAC,QAAuB,KAAMK,CAAkB,EAC1G,CAAC,QAAuB,KAAM9B,CAAmB,EAAG,CAAC,QAAuB,KAAMyB,EAAW,QAAQ,EACrG,CAAC,OAAsB,KAAMI,CAAK,EAAG,CAAC,QAAuB,KAAM9B,CAAkB,EACrF,CAAC,QAAuB,KAAM0B,EAAW,gBAAgB,CAC3D,EAEMQ,EAAwD,CAAC,OAAQ,MAAM,EACzET,GACFS,EAAkB,KAAK,MAAM,EAE3B3C,GACF2C,EAAkB,KAAK,MAAM,EAE/B,IAAMC,EAAU,CAAC,CAAC,KAAMR,EAAY,SAAUJ,EAAE,SAAU,aAAgC,CAAC,EACvFK,GACFO,EAAQ,KAAK,CAAC,KAAMN,EAAkB,SAAUN,EAAE,SAAU,aAAgC,CAAC,EAE/F,IAAMN,EAAmBC,GAA+B,CACtD,IAAMkB,EAASC,EAAc,IAAKd,EAAE,SAAUA,EAAE,KAAMhB,CAAU,EAC1D+B,EAASD,EAAc,MAAOb,EAAI,SAAUA,EAAI,KAAMjB,CAAU,EAChEgC,EAAY,CAACH,EAAQE,CAAM,EACjC,GAAIb,EAAS,CACX,IAAMe,GAAeH,EAAc,WAAYZ,EAAQ,SAAUA,EAAQ,KAAMlB,CAAU,EACzFgC,EAAU,KAAKC,EAAY,CAC7B,CACIjD,GACFgD,EAAU,KACNF,EAAc,yBAA0B9C,EAAqB,SAAUA,EAAqB,IAAI,CAAC,EAEvG,IAAMkD,EAASrB,EAAe,SAAUG,EAAE,SAAUI,CAAU,EACxDe,EAAa,CAACD,CAAM,EACtBb,GACFc,EAAW,KAAKtB,EAAe,cAAeG,EAAE,SAAUM,EAAkBtB,CAAU,CAAC,EAEzF,IAAMQ,EAAUC,KAA0CT,CAAU,EAE9Dc,GAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAM,KAA+B,EACvF,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA;AAAA,gCAECI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,gCAC7CI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,IACzEd,EAAa,iBAAiBG,EAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAMH,IACOP,GAAWG,EACN;AAAA;AAAA,+EAIA;AAAA,wEAGR,CAAC;AAAA,MACNA,EAAa,4DAA8D,EAAE;AAAA,kBACjEb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAOhB,IACKU,GAAWG,EACN;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAQA,yEAER,CAAC;AAAA,QAEAA,EACI,+FACA,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA,mBAKCb,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBASF,IAAM,CACpB,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,wCACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA,8BACkBkC,EAAO,KAAK,KAAK,6BACnClD,EAAuB,oCAAsC,KAAK;AAAA;AAAA,IAGxE,EACA,MAAO,CACL,KAAM,iBACN,YAAa,CACX,KAAM,GAAGgB,CAAU,IAAIhB,IAAyB,MAAS,IAAIkC,IAAY,MAAS,IAAIH,EAAQ,WAAW,GACzG,kBAAAY,CACF,EACA,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAGE1C,GACF,CAAC+C,EAAyBqB,EAAmBC,EAAeC,EAC3DC,EAA6B9C,IAA+B,CAC3D,IAAMC,EAAsBD,EAAqB8C,EAAO,iBAClDC,EAAQD,EAAO,MAAQA,EAAO,MAAQ,EACtCE,EAAsBF,EAAO,YAAcC,EAC3CE,EAAeH,EAAO,YAAc,MAAQxB,EAAQ,YAAc,EAClE4B,EACFD,EAAe,CAACH,EAAO,UAAWA,EAAO,SAAU7C,EAAqB6C,EAAO,QAAQ,EAAI,OACzFK,EAAc,CAACL,EAAO,UAAWA,EAAO,eAAgBE,CAAmB,EAC3EhB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKa,EAAO,UAAYd,CAAS,EACzC,EAAG,KAAK,KAAKc,EAAO,eAAiBd,CAAS,EAC9C,EAAGc,EAAO,UAAYA,EAAO,QAC/B,EAEMlC,EAAoC,CACxC,CAAC,QAAuB,KAAMkC,EAAO,cAAc,EAAG,CAAC,QAAuB,KAAM7C,CAAmB,EACvG,CAAC,QAAuB,KAAM6C,EAAO,SAAS,EAAG,CAAC,QAAuB,KAAMA,EAAO,QAAQ,EAC9F,CAAC,QAAuB,KAAME,CAAmB,EAAG,CAAC,QAAuB,KAAMhD,CAAkB,EACpG,CAAC,QAAuB,KAAM8C,EAAO,gBAAgB,CACvD,EACMZ,EACFW,EAAY,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,EACpDV,EAAU,CAAC,CAAC,KAAMgB,EAAa,SAAUR,EAAM,SAAU,aAAgC,CAAC,EAC5FM,GACFd,EAAQ,KAAK,CAAC,KAAMe,EAAoB,SAAUP,EAAM,SAAU,aAAgC,CAAC,EAErG,IAAM1B,EAAmBC,GAA+B,CACtD,IAAMkC,EAAcf,EAAc,QAASM,EAAM,SAAUA,EAAM,IAAI,EAC/DU,EAAUhB,EAAc,IAAKO,EAAE,SAAUA,EAAE,IAAI,EAC/CL,EAAY,CAACa,EAAaC,CAAO,EACnCR,GACFN,EAAU,KAAKF,EAAc,aAAcQ,EAAU,SAAUA,EAAU,IAAI,CAAC,EAGhF,IAAMH,EAAa,CADJtB,EAAe,SAAUuB,EAAM,SAAUQ,CAAW,CACzC,EACtBF,GACFP,EAAW,KAAKtB,EAAe,gBAAiBuB,EAAM,SAAUO,CAAkB,CAAC,EAErF,IAAM7B,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,gBAAiB,KAAM,KAAK,EACrE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,qBAAsB,KAAM,KAAK,CACvF,EACA,MAAO;AAAA,sBACOW,CAAS;AAAA,gCACCoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,gCAChDoB,EAAY,KAAK,KAAK,KAAKpB,EAAYA,CAAS;AAAA,IAC5Ed,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiB,GAAGkB,EAAW,GAAGG,CAAU,CAAC;AAAA,IACrFxB,EAAa,UAAU,CACjBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMJ,IACQa,GAAaI,EACR;AAAA;AAAA;AAAA,QAKA;AAAA;AAAA,eAIR,CAAC;AAAA,MACNA,EAAe,kEAAoE,EAAE;AAAA,iBAC1EG,EAAY,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAO9B,IACGP,GAAaI,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAQA;AAAA;AAAA,SAIR,CAAC;AAAA,UACFA,EAAe,kFAAoF,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAkBzG,EAEA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,KAAM,GAAGJ,IAAc,MAAS,IAAIvB,EAAQ,WAAW,GAAI,kBAAAY,CAAiB,EAC1F,WAAY,KAAO,CAAC,QAAAC,EAAS,cAAeF,EAAU,gBAAArB,CAAe,GACrE,gBAAAK,CACF,CACF,EAESzC,GACT,CAAC8C,EAAyBC,EAAe+B,EAAeV,EAAeW,EACtEC,EAA6B/B,EAA+BoB,EAC5DtD,EAA4CmC,EAAiCzC,IAA+B,CAC3G,IAAMwE,EAAcnC,EAAQ,YACtBtB,EACF0B,EAAW,aAAe,QAAa+B,EAAc,EAAI/B,EAAW,mBAAqB,EACvFzB,EAAsBD,EAAqB0B,EAAW,iBAEtDgC,EAAWhC,EAAW,aAAe,QAAa+B,EAAc,GAAKhC,EAAW,CAACF,EAAG+B,EAAG7B,CAAO,EAAI,CAACF,EAAG+B,CAAC,EACzG/D,GACFmE,EAAQ,KAAKnE,CAAoB,EAInC,IAAMoD,EAAQrB,EAAQ,QAClBhD,GACIgD,EAASC,EAAG+B,EAAGG,EAAc,EAAIhC,EAAU,OAAWlC,EAAsBmC,EAAYzC,EACxFe,CAAkB,EACtB,CAAC,OAAQ0D,EAAS,QAAUhC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,GAAI,CAAC,EAAI,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAG5GnC,EAAQ,QACJjD,GACIiD,EAASqB,EAAOjB,EAAW,UAAYA,EAAW,SAAWA,EAAW,eACxEzB,CAAmB,EACvB,CAAC,OAAQ,CAAC0C,CAAK,EAAG,QAAS,CAAC,CAAC,CAAC,EAGlC,IAAMgB,EACDjC,EAAW,aAAe,QAAa+B,EAAc,GAAKZ,EAAa,CAACF,EAAOC,EAAGC,CAAS,EAAI,CAACF,EAAOC,CAAC,EAC7GtB,EAAQ,QACJ/C,GACI+C,EAASqB,EAAOC,EAAGa,EAAc,GAAKZ,EAAYA,EAAY,OAAWnB,EAAY1B,CAAkB,EAC3G,CAAC,OAAQ2D,EAAS,QAAUjC,EAAW,aAAe,QAAa+B,EAAc,EAAK,CAAC,EAAG,CAAC,EAAI,CAAC,CAAC,CAAC,CAAC,CACzG,EAEEhF,GAAU,CAAC6C,EAAyBI,IAAoC,CAC5E,IAAMyB,EAAc,CAClBzB,EAAW,UACXA,EAAW,SACXA,EAAW,eACXA,EAAW,QACb,EACMkC,EAAIlC,EAAW,eACfmC,EAAInC,EAAW,gBACfoC,EAAIpC,EAAW,SACfM,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKP,EAAW,SAAWM,CAAS,EAC5C,EAAG,KAAK,KAAKN,EAAW,eAAiBM,CAAS,EAClD,EAAGN,EAAW,UAAYA,EAAW,QACvC,EACM1C,EAAS,CAACsC,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,CAAC,EACjEV,EAAoC,CACxC,CAAC,QAAuB,KAAMgD,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EACnG,CAAC,QAAuB,KAAMpC,EAAW,QAAQ,EAAG,CAAC,QAAuB,KAAMA,EAAW,QAAQ,EACrG,CAAC,QAAuB,KAAMA,EAAW,UAAU,EACnD,CAAC,QAAuB,KAAMA,EAAW,WAAaA,EAAW,WAAaA,EAAW,WAAW,CACtG,EAEMT,EAAmBC,GAA+B,CACtD,IAAM6C,EAAU3C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEa,EAAU5C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEc,EAAU7C,EAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUmE,CAAW,EACpEjE,EAAQmD,EAAc,QAASrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEkF,EAAS7B,EAAc,SAAUrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACnEI,EAAOiD,EAAc,OAAQrD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/D6B,EAAW3B,EAAM,KAAK,QAEtBmC,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAC7G,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,MAAO,KAAM,KAAK,CACjG,EACA,MAAO;AAAA,sBACWW,CAAS;AAAA,oCACKnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAChCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,sCAClCnB,CAAQ,KAAKmB,EAAYA,CAAS;AAAA,IACpEd,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBnC,EAAOgF,EAAQ9E,EAAM2E,EAASC,EAASC,CAAO,CAAC;AAAA,IACxG/C,EAAa,UAAU,CACrBc,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWanB,CAAQ;AAAA,mBACRA,CAAQ;AAAA,mBACRA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoCzB,EAEA,OAAOS,EAAQ,QACX,CACE,KAAM,mBACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAM6B,EAAa,SAAU7B,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,CAC5F,EACA,cAAeW,EACf,gBAAArB,CACF,GACA,gBAAAK,CACF,EACA,CAAC,OAAAjC,EAAQ,QAAS,CAAC,GAAI,GAAI,EAAE,CAAC,CAAC,CACrC,EAEaN,GAAY,CAAC4C,EAAyBrC,IAAqC,CACtF,IAAM6D,EAAS1E,GAAwBkD,EAAQ,OAAQrC,CAAU,EAE3D,CAACsC,EAAG+B,EAAGV,CAAC,EAAInE,GAAQ6C,EAASwB,CAAM,EAEzC,OAAOtE,GACH8C,EAASC,EAAG+B,EAAGV,EAAGtB,EAAQ,OAAO,CAAC,EAAG,OAAW,OAAW,OAAWA,EAAQ,OAAO,CAAC,EAAGwB,EAAQ7D,CAAU,CACjH,ICpwBA,IAsBMkF,GAkCAC,GAgFOC,GAGAC,GA3IbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KACAC,KAGAC,KAWMV,GAAiB,CAACW,EAA+BC,IAA0C,CAC/F,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAME,EAAkB,CAACC,EAA2BC,EAA6BC,IAAoB,CACnG,IAAMC,EAAIF,EAAS,OACnB,GAAIE,IAAMH,EAAO,OACf,MAAM,IAAI,MAAM,GAAGE,CAAO,uBAAuBC,CAAC,EAAE,EAEtDF,EAAS,QAAQ,CAACG,EAAGC,IAAM,CACzB,GAAID,IAAMJ,EAAOK,CAAC,EAChB,MAAM,IAAI,MAAM,GAAGH,CAAO,SAASG,CAAC,gBAAgB,CAExD,CAAC,CACH,EAEA,GAAIR,EAAO,CAAC,EAAE,KAAK,OAAS,EAAG,CAC7B,IAAMS,EAAQR,EAAW,SAAW,OAC/BA,EAAW,QAAUD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EACvBA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EAAE,OAAOA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,CAAC,EACxGA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGC,EAAW,QAAU,EAAI,MAAS,EAC9DC,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,qBAAqB,EAC5DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,iBAAiB,EACxDP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,oBAAoB,EAC3DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,mBAAmB,CAC5D,MACEP,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,qBAAqB,EAC1DE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,iBAAiB,EACtDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,oBAAoB,EACzDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,mBAAmB,CAE5D,EAEMV,GACF,CAACU,EAA+BC,IAAiD,CAC/E,GAAM,CAAC,QAAAS,EAAS,QAAAC,EAAS,OAAAC,CAAM,EAAIX,EAC7BY,EAASb,EAAO,CAAC,EAAE,KACnBc,EAAaH,EAAUI,GAAiBF,EAAOA,EAAO,OAAS,CAAC,CAAC,EAAI,EACrEG,EAAcJ,IAAW,QAAUC,EAAO,OAAS,EAAIC,EAAa,EACpEG,EAAaC,EAAU,KAAKL,CAAM,EAAIC,EAEtCK,EAAoBR,EACpBS,EAAcD,EAAoBN,EAAO,OAASA,EAClDQ,EAAIC,EAAc,IAAKtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMc,CAAU,EACrES,EAAQD,EAAc,QAAStB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC9EQ,EAAOF,EAAc,OAAQtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC5ES,EAAYH,EAAc,YAAatB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACtFU,EAAWJ,EAAc,WAAYtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACpFW,EAAIC,EAAe,IAAK5B,EAAO,CAAC,EAAE,SAAUoB,EAAaN,CAAU,EAGnEe,EAAc,IAAc,CAChC,IAAIC,EAAU,GACd,GAAInB,EACFmB,EAAU,iBACNjB,EAAO,SAAW,EAAM,KACpBD,IAAW,OAAS,iBAAiBC,EAAO,OAAS,CAAC,OAAOC,CAAU,GACnD,kBAAkB,YAE1CF,IAAW,OACbkB,EAAU;AAAA,cACRH,EAAE,WAAW,gBAAiB,IAAK,GAAG,CAAC;AAAA,4BACzBA,EAAE,gBAAgB,eAAe,CAAC,QAC7C,CAELG,EAAU,kBAAkBP,EAAM,KAAK,OAAO;AAAA,qDACLV,EAAO,OAAS,CAAC,KAE1D,QAASL,EAAI,EAAGA,EAAIe,EAAM,KAAMf,IAC9BsB,GAAW,YAAYtB,CAAC,qBAAqBA,CAAC,KAEhDsB,GAAW,iBAAiBP,EAAM,gBAAgB,UAAU,CAAC,GAC/D,CAEF,OAAOO,CACT,EACMC,EAAgCC,GAAyB;AAAA,oBACjDtB,CAAO;AAAA,IACvBsB,EAAO,gBAAgB,aAAc,KAAK,EAAE,iBAAiBX,EAAGE,EAAOC,EAAMC,EAAWC,EAAUC,CAAC,CAAC;AAAA,IACpGK,EAAO,UAAU,CAAC;AAAA,IAClBA,EAAO,sCAAsC,qBAAqB,CAAC;AAAA,0BAC7CL,EAAE,gBAAgB,gBAAgBb,CAAU,EAAE,CAAC;AAAA,MACnEe,EAAY,CAAC;AAAA,kBACDN,EAAM,YAAY,SAAS,CAAC;AAAA,iBAC7BC,EAAK,YAAY,SAAS,CAAC;AAAA,sBACtBC,EAAU,YAAY,SAAS,CAAC;AAAA,qBACjCC,EAAS,YAAY,SAAS,CAAC;AAAA,cACtCL,EAAE,YAAY,YAAY,CAAC;AAAA;AAAA,MAEnCM,EAAE,YAAY,aAAc,OAAO,CAAC;AAAA,KAEpC,MAAO,CACL,KAAM,qBACN,YAAa,CACX,KAAM,GAAG1B,EAAW,OAAO,IAAIA,EAAW,MAAM,IAAIU,CAAO,IAAIG,CAAU,GACzE,kBAAmBK,EAAoB,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,MAAM,EAAI,MACpF,EACA,gBAAiBY,EACjB,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM/B,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKiB,EAAa,EAAuB,CAAC,EAClE,gBAAiBE,EACb,CACE,CAAC,QAAuB,KAAMF,CAAU,EACxC,GAAGgB,EAA2BpB,CAAM,CACtC,EACA,CACE,CAAC,QAAuB,KAAMI,CAAU,CAC1C,CACN,EACF,CACF,EAES1B,GAA4BU,GACrCiC,GAA4BjC,CAAoE,EAEvFT,GAAY,CAAC2C,EAAyBlC,IAA8C,CAC/F,GAAM,CAAC,OAAAD,EAAQ,YAAAoC,CAAW,EAAID,EACxBE,EAAoB9C,GAAyB,CAAC,GAAGU,EAAY,YAAAmC,CAAW,CAAC,EAI/E,GAHIE,GAAI,OAAO,sBACbjD,GAAeW,EAAQqC,CAAiB,EAEtCpC,EAAW,aACb,MAAM,IAAI,MAAM,uDAAuD,EAEvEkC,EAAQ,QAAQ7C,GAAoCU,EAAQqC,CAAiB,CAAC,CAElF,ICtJA,IASME,GAkBAC,GAkCOC,GA7DbC,GAAAC,EAAA,kBAIAC,KAGAC,KAEMN,GAAkBO,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,IAAK,IAAK,IAAI,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC9C,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMN,GAA4BM,GAA+C,CAC/E,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAExBE,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAE3BG,EAAaC,EAAU,KAAKH,CAAW,EAAI,EAE3CI,EAAWL,EAAO,CAAC,EAAE,SACrBM,EAAQC,EAAc,QAASF,EAAUJ,EAAa,CAAC,EACvDO,EAAOD,EAAc,OAAQF,EAAU,CAACH,CAAQ,EAAG,CAAC,EACpDO,EAAWF,EAAc,WAAYF,EAAUJ,EAAa,CAAC,EAC7DS,EAASC,EAAe,SAAUN,EAAUJ,EAAa,CAAC,EAahE,MAAO,CACL,KAAM,UACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,CACpE,GACA,gBAjBuBS,GAA+B;AAAA,qBACrCV,CAAQ;AAAA,IACzBU,EAAa,iBAAiBN,EAAOE,EAAMC,EAAUC,CAAM,CAAC;AAAA;AAAA,IAE5DE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCT,CAAU,CAAC;AAAA,kBAClDG,EAAM,YAAY,YAAY,CAAC;AAAA,UACvCE,EAAK,YAAY,uBAAuB,CAAC,MAAMC,EAAS,YAAY,YAAY,CAAC;AAAA,MACrFC,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAU7C,CACF,EAEaf,GAAWkB,GAAkC,CACxDpB,GAAeoB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQnB,GAAyBmB,EAAQ,MAAM,CAAC,CAC1D,IChEA,IAeMC,GA4BAC,GAiBOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAGAC,GASAC,GAIAC,GA8BPC,GAMOC,GAaAC,GAIAC,GAIAC,GAQAC,GAGAC,GAgBAC,GAcAC,GAKAC,GAIAC,GAIAC,GAMAC,GAOAC,GAIAC,GAIAC,GAIAC,GAMAC,GASAC,GAMAC,GASAC,GAIAC,GAIAC,GAIAC,GAIAC,GAEAC,GAKAC,GAUAC,GAGAC,GAOAC,GAQAC,GAIAC,GAmBAC,GAEAC,GAlVbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMlD,GACF,CAACmD,EAA4BC,EAAkBC,EAAuBC,EACrEC,EAAmCC,IAA8C,CAChF,IAAMC,EAAU,KAAK,KAAKL,EAAW,CAAC,EAElCM,EAAa,GACb,OAAOH,GAAa,SACtBG,EAAa,GAAGH,CAAQ,MAExBG,EAAaH,EAAS,GAAG,EAG3B,IAAMI,EAAQC,EAAc,YAAaP,EAAe,CAACI,CAAO,EAAG,CAAC,EAC9DI,EAASC,EAAe,aAAcR,EAAgB,CAACG,CAAO,EAAG,CAAC,EAExE,MAAO;AAAA,QACLN,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBQ,EAAOE,CAAM,CAAC;AAAA;AAAA,IAEnFL,GAA4B,EAAE;AAAA;AAAA,IAE9BL,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA;AAAA,cAE/DQ,EAAM,YAAY,YAAY,CAAC;AAAA,MACvCE,EAAO,YAAY,aAAcH,CAAU,CAAC;AAAA,IAE9C,EAEEzD,GACF,CAAC0D,EAAmBI,EAAcR,EAAmCC,EACpEQ,EAAmBV,EAAyBK,EAAM,YAA2B,CAC5E,KAAAI,EACA,YAAa,CAAC,KAAMC,EAAU,kBAAmB,CAAC,MAAM,CAAC,EACzD,gBAAiBb,GAAgBnD,GAC7BmD,EAAcc,EAAU,KAAKN,EAAM,IAAI,EAAGA,EAAM,SAAUL,EAAgBC,EAAUC,CAAwB,EAChH,WAAaU,IAAkB,CAC7B,QAAS,CAAC,CAAC,KAAMP,EAAM,KAAM,SAAUL,CAAc,CAAC,EACtD,cACI,CAAC,EAAG,KAAK,KAAKW,EAAU,KAAKC,EAAa,CAAC,EAAE,IAAI,EAAI,GAA0B,CAAgB,CAAC,EACpG,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKD,EAAU,KAAKN,EAAM,IAAI,EAAI,CAAC,CAAC,CACzE,CACF,EACF,GAESzD,GAAOiE,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahE,GAAQgE,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa/D,GAAS+D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa9D,GAAQ8D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa7D,GAAS6D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa5D,GAAQ4D,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EACa3D,GAAS2D,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAOa1D,GAAuB2D,GAChCC,GAA4BD,CAA0B,EAG7C1D,GAAO,CAACyD,EAAyBC,IAAqC,CACjF,IAAIE,EACJ,OAAQF,EAAW,GAAI,CACrB,QACEE,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,QACEA,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,OACEA,EAAO,aACP,MACF,QACE,MAAM,IAAI,WAAW,0EAA0EF,EAAW,EAAE,EAAE,CAClH,CACAD,EAAQ,QACJlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQG,EAAM,OAAWF,EAAW,SAAUA,EAAW,EAAE,CAAC,CAClH,EAOMzD,GAAoC4D,GAAkD,CAC1F,IAAMC,EAAOD,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAIE,GACtFC,EAAOH,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAII,GAC5F,OAAON,GAA4B,CAAC,IAAAG,EAAK,IAAAE,CAAG,CAAC,CAC/C,EAEa9D,GAAO,CAACuD,EAAyBS,IAAyC,CACrF,IAAMR,EAAaD,EAAQ,OAAO,SAAW,EAAIS,EAAiBjE,GAAiCwD,EAAQ,MAAM,EAC3GU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QACJlE,GACIkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,0BAA2B;AAAA,4BACnDF,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,4BAC9CS,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,EAEhEA,EAAW,QAAQ,EACvB,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEavD,GAAQsD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEarD,GAAOqD,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEapD,GAAQoD,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAManD,GAAwBoD,GACjCC,GAA4BD,CAA6B,EAEhDnD,GAAM,CAACkD,EAAyBC,IAAsC,CACjF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK;AAAA,uBAChCF,CAAQ,IAAIT,EAAW,KAAK;AAAA;AAAA,kBAEjCS,CAAQ,QAAQA,CAAQ;AAAA;AAAA;AAAA;AAAA,wBAIlBA,CAAQ,cAAcA,CAAQ;AAAA;AAAA,KAGhDT,EAAW,QAAQ,CAAC,CAC1B,EAEalD,GAAU,CAAC8D,EAAU,QAAU;AAAA,YAChCA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA;AAAA,sBAEGA,CAAO,cAAcA,CAAO;AAAA;AAAA;AAAA;AAAA,GAMrC7D,GAAOgD,GAAkC,CACpD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK7D,GAAQ2D,CAAQ,CAAC,CAAC,CAClH,EAEazD,GAAO+C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa9C,GAAS8C,GAAkC,CACtDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa7C,GAAQ6C,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,sBAAsBA,CAAC,0BAA2B7D,GAAQ2D,CAAQ,CAAC,CAAC,CACpH,EAEatD,GAAY,CAAC4C,EAAyBC,IAAsC,CACvF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaY,GAAK,8BAA8BA,CAAC,KAAKA,CAAC,KAAKA,CAAC,YAAYF,CAAQ,UACpG,6BAA6BA,CAAQ,IAAIT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,CACzF,EAEa5C,GAAO2C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEatD,GAAO0C,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEarD,GAAcyC,GAAkC,CAC3DA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,aAAcY,GAAK,OAAOA,CAAC,EAAE,CAAC,CAChG,EAEapD,GAAQwC,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,WAAWF,CAAQ,SAAS,CAAC,CAC5G,EAEajD,GAAWuC,GAAkC,CACxDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,UAAWY,GAAK,sBAAsBA,CAAC,KAAK,CAAC,CAC/G,EAOalD,GAA8BuC,GACvCC,GAA4BD,CAG3B,EAEQtC,GAAc,CAACqC,EAAyBC,IAA4C,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,cACnBY,GAAK,YAAYF,CAAQ,oBAAoBA,CAAQ,WAAWT,EAAW,KAAK,MAAMW,CAAC,WAAWF,CAAQ,KACtGT,EAAW,IAAI,MACnB,OAAWA,EAAW,QAAQ,CAAC,CACrC,EAEarC,GAAOoC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEanC,GAAQmC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEalC,GAAQkC,GAAkC,CACrDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEajC,GAAOiC,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahC,GAAkB4C,GAAc,QAAQA,CAAC,yBAAyBA,CAAC,2BAA2BA,CAAC,MAE/F3C,GAAQ+B,GAAkC,CAErDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,OAAQhC,EAAc,CAAC,CACzF,EAEaE,GAAe,CAAC2C,EAAU,QAAU;AAAA,qBAC5BA,CAAO;AAAA,qBACPA,CAAO;AAAA,qBACPA,CAAO;AAAA;AAAA,oBAERA,CAAO,cAAcA,CAAO;AAAA,WACrC7C,GAAe,GAAG,CAAC;AAAA;AAAA,EAIjBG,GAAsB2C,GAC/B,uCAAuCA,CAAC,qBAAqBA,CAAC,MAAMA,CAAC,uBAAuBA,CAAC,GAEpF1C,GAAY4B,GAAkC,CACzD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,WAAY7B,GAAoBD,GAAawC,CAAQ,EAAG,OAC3EV,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,EAEa3B,GAAkB,CAAC2B,EAAyBC,IAAwC,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrE,OAAAA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,kBAAmBY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,8BACpF,wCAAwCF,CAAQ,KAAKT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,EAC5F,CACT,EAEa3B,GAAO0B,GAAkC,CACpDA,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEazB,GAAgB,CAACsC,EAAiBE,IAAkB;AAAA,qBAC5CF,CAAO,KAAKE,CAAK;AAAA,cACxBF,CAAO;AAAA,eACNA,CAAO;AAAA;AAAA,6BAEOA,CAAO,cAAcA,CAAO;AAAA;AAAA,kBAEvCA,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYZrC,GAAuBsC,GAAc,mBAAmBA,CAAC,IAEzDrC,GAAY,CAACuB,EAAyBC,IAAsC,CACvF,IAAMe,EAAQL,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAClEA,EAAQ,QAAQlE,GACZkE,EAAQ,OAAO,CAAC,EAAG,YAAaxB,GAAqBD,GAAcyC,EAAOf,EAAW,KAAK,EAAGA,EAAW,SACxGD,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,ICvVA,IAUMiB,GAkBAC,GAyCOC,GArEbC,GAAAC,EAAA,kBAIAC,KAGAC,KACAC,KAEMP,GAAkBQ,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,KAAM,KAAM,KAAK,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EACjD,MAAM,IAAI,MAAM,4CAA4C,EAG9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMP,GAAkCO,GAA+C,CACrF,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAAK,MAAM,EACzCC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAI,EAElC,IAAMC,EAAQC,EAAc,QAASH,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM,CAAC,EACpEI,EAAOD,EAAc,OAAQH,EAAO,CAAC,EAAE,SAAU,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAAG,CAAC,EACvEK,EAASC,EAAe,SAAUN,EAAO,CAAC,EAAE,SAAUC,EAAa,CAAC,EAEpEM,EAAaC,EAAU,KAAKP,CAAW,EAAI,EAC3CQ,EAAWC,GAA4BV,EAAO,CAAC,EAAE,QAAQ,EAsB/D,MAAO,CACL,KAAM,gBACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKO,EAAa,EAAuB,CAAC,CACpE,GACA,gBA1BuBI,GAA+B;AAAA;AAAA,yBAEjCX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,EAAI,CAAC;AAAA;AAAA,IAE9CW,EAAa,iBAAiBT,EAAOE,EAAMC,CAAM,CAAC;AAAA;AAAA,IAElDO,GAAQH,CAAQ,CAAC;AAAA;AAAA,IAEjBE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCJ,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ9DF,EAAO,YAAY,aAAc,uBAAuB,CAAC;AAAA,IAU7D,CACF,EAEaX,GAAiBmB,GAAkC,CAC9DrB,GAAeqB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQpB,GAA+BoB,EAAQ,MAAM,CAAC,CAChE,ICxEA,IAiBMC,GAqGAC,GAsEAC,GAQOC,GAIAC,GAIAC,GAMAC,GAIAC,GAsBAC,GAIAC,GAMAC,GAMAC,GAMAC,GAlQbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KASMjB,GACF,CAACkB,EAA4BC,EAA0BC,EAA0BC,EAChFC,EAAoBC,EAAsBC,EAAsCC,EAChFC,EAAeC,EAAeC,EAAoBC,IAAsC,CACvF,IAAIC,EACAC,EACA,OAAON,GAAa,SACtBK,EAAmBC,EAAmB,CAACC,EAAGC,IAAM,GAAGR,CAAQ,KAAKO,CAAC,MAAMC,CAAC,KAC/D,OAAOR,GAAa,WAC7BK,EAAmBC,EAAmBN,GAEtCK,EAAmBL,EAAS,OAC5BM,EAAmBN,EAAS,QAG9B,IAAMS,EAASC,EAAe,aAAcP,EAAYP,EAAW,OAAQ,CAAC,EACtEW,EAAII,EAAc,QAASV,EAAOP,EAAM,OAAQ,CAAC,EACjDc,EAAIG,EAAc,QAAST,EAAOP,EAAM,OAAQ,CAAC,EAEnDiB,EACJ,GAAIf,EACF,GAAIC,EAAa,CACf,IAAMe,EAAgBC,EAAU,KAAKpB,CAAK,IAAM,EAC1CqB,EAAgBD,EAAU,KAAKnB,CAAK,IAAM,EAC1CqB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC3EuB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC7EkB,GAAiBE,EACnBH,EAAaH,EAAO,YAChB,aACAH,EACIO,EAAgB,GAAGN,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,EACvFQ,EAAgB,GAAGP,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,CAAC,CAAC,EAEjGI,EAAa;AAAA,kCACSH,EAAO,gBAAgB,iBAAiB,CAAC;AAAA,4BAC/CF,EAAE,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,4BACrDD,EAAE,2BAA2B,gBAAiBC,CAAM,CAAC;AAAA,cAEjEA,EAAO,YACH,aACAH,EACIP,GAA+BiB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,kBACpDR,GAA+BkB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAAA,WAGvF,MACEI,EAAaH,EAAO,YAChB,aAAcH,EAAiBC,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAEzF,CACL,GAAI,CAACV,EACH,MAAM,IAAI,MAAM,sFAAsF,EAGxG,IAAMoB,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,eAAeF,CAAC,eAAeA,CAAC,IAC9CG,EAAc,eAAeH,CAAC,eAAeA,CAAC,IACpD,MAAO;AAAA,+BACcA,CAAC,MAAMX,EAAO,gBAAgB,qBAAqBW,CAAC,GAAG,CAAC;AAAA,yBAC9DA,CAAC,MAAMb,EAAE,2BAA2B,gBAAgBa,CAAC,GAAIX,CAAM,CAAC;AAAA,yBAChEW,CAAC,MAAMZ,EAAE,2BAA2B,gBAAgBY,CAAC,GAAIX,CAAM,CAAC;AAAA,wBACjEW,CAAC,aAAaA,CAAC;AAAA,wBACfA,CAAC,aAAaA,CAAC;AAAA,4BACXA,CAAC,aAAaA,CAAC;AAAA,4BACfA,CAAC,aAAaA,CAAC;AAAA,cAC7BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIhB,EAAiBiB,EAAaC,CAAW,CAAC;AAAA,WAE9E,EACIpB,IAAe,EACjBS,EAAa;AAAA;AAAA,cAETM,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,uGAGtCN,EAAa;AAAA,cACTM,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,WAGrD,CAEA,MAAO;AAAA,UACHzB,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBc,EAAGC,EAAGC,CAAM,CAAC;AAAA;AAAA,UAE9EL,GAA4B,EAAE;AAAA;AAAA,UAE9BX,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEmB,CAAU;AAAA,QAEhB,EAEEpC,GACF,CAACgD,EAAcC,EAAkBlB,EAAeC,EAAeR,EAC9DI,EAAmCsB,EAAyBnB,EAAE,WAA0B,CACvF,IAAMoB,EAAc,CAACb,EAAU,SAASP,EAAE,KAAMC,EAAE,IAAI,EAClDoB,EAAcrB,EAAE,KAChBsB,EAAaf,EAAU,KAAKP,EAAE,IAAI,EAElCV,EAAY,GACZE,EAA8B,GAG5B+B,EAAc,CAACH,CAAW,EAChC,GAAIA,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUzB,EAAE,KAAMC,EAAE,KAAM,EAAK,EACrE,GAAI,CAACuB,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEH,EAAcG,EACdF,EAAaf,EAAU,KAAKc,CAAW,EACvC,IAAMf,EAAgBC,EAAU,KAAKP,EAAE,IAAI,IAAM,EAC3CQ,EAAgBD,EAAU,KAAKN,EAAE,IAAI,IAAM,EAC3CQ,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EAC9EU,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EACpFsB,EAAY,KAAKjB,CAAa,EAC9BiB,EAAY,KAAKf,CAAa,EAC9Be,EAAY,KAAKd,CAAoB,EACrCc,EAAY,KAAKb,CAAoB,EAErC,IAAIgB,EAAkB,EACtB,QAASC,EAAI,EAAGA,EAAIN,EAAY,OAAQM,IAAK,CAC3C,IAAMC,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS2B,CAAC,GAAK,EACpCE,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS0B,CAAC,GAAK,EAC1C,GAAIC,IAASC,EACXH,GAAmBE,MAEnB,MAEJ,CACIF,EAAkB,IAAM,GAC1BlC,EAA8B,GAC9BF,EAAY,KACHgB,GAAiBE,GAAiBC,GAAwBC,KACnEpB,EAAY,GAEhB,MAEEA,EAAY,GAEd,OAAAiC,EAAY,KAAKjC,CAAS,EAEnB,CACL,KAAA2B,EACA,YAAa,CACX,KAAMC,EAAWK,EAAY,IAAKV,GAAMA,EAAE,SAAS,CAAC,EAAE,KAAK,GAAG,EAC9D,kBAAmB,CAAC,OAAQ,MAAM,CACpC,EACA,gBAAkB3B,GAAiBlB,GAC/BkB,EAAcc,EAAE,KAAMC,EAAE,KAAMoB,EAAa/B,EAAW8B,EAAa5B,EAA6BC,EAChGO,EAAE,SAAUC,EAAE,SAAUkB,EAAgBtB,CAAwB,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAsB,CAAC,EAC3F,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKf,EAAU,KAAKc,CAAW,EAAI,CAAC,CAAC,EACxE,GAAGS,EAA2B9B,EAAE,KAAMC,EAAE,KAAMoB,CAAW,CAC3D,CACF,EACF,CACF,EAEEnD,GACF,CAAC6D,EAAyBd,EAAcxB,EAA8BI,EACrEqB,EAAmBC,IAAkC,CACpDY,EAAQ,QAAQ9D,GACZgD,EAAMC,GAAY,GAAIa,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGtC,EAAUI,EACtEsB,CAAc,CAAC,CACrB,EAEShD,GAAO4D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa7B,GAAO2D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa5B,GAAS0D,GAAkC,CACtD7D,GACI6D,EAAS,QAAU,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEa3B,GAAOyD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa1B,GAAOwD,GAAkC,CACpD,IAAMC,EAAO5B,EAAc,QAAS2B,EAAQ,OAAO,CAAC,EAAE,SAAUA,EAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,MAE7F7D,GACI6D,EAAS,MAAQ,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,cAAcD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,qBAAqBD,CAAC,IAAIC,CAAC,GAAG,EAC7G;AAAA,wBACkB+B,CAAI,SAASA,CAAI,QAAQA,CAAI;AAAA,iBACpCA,CAAI;AAAA,iBACJA,CAAI;AAAA,uBACEA,CAAI;AAAA,iBACVA,CAAI;AAAA;AAAA,+BAEUA,CAAI,6BAA6BA,CAAI,qBAAqBA,CAAI,IAV1EA,IAAS,MAAQ,QAAU,EAW5B;AAAA;AAAA,oCAEkBA,CAAI,eAAeA,CAAI,cAAcA,CAAI;AAAA;AAAA,oBAEzDA,CAAI;AAAA;AAAA,OAEjB,CACP,EAEaxD,GAAOuD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEaxB,GAAWsD,GAAkC,CACxD7D,GACI6D,EAAS,UAAY,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEavB,GAAQqD,GAAkC,CACrD7D,GACI6D,EAAS,OAAS,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACnG,QAAwB,CAC9B,EAEatB,GAAkBoD,GAAkC,CAC/D7D,GACI6D,EAAS,iBAAmB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAC3G,OAAW,QAAwB,CACzC,EAEarB,GAAemD,GAAkC,CAC5D7D,GACI6D,EAAS,cAAgB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EACxG,OAAW,QAAwB,CACzC,ICtQA,IAeMgC,GA4BAC,GAWAC,GAmBAC,GAkEOC,GAcAC,GAzJbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMX,GAAiB,CAACY,EAA+BC,IAAuB,CAC5E,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,IAAME,EAAiB,EACjBC,EAAiBH,EAAOE,CAAc,EACtCE,EAAYD,EAAe,SAC3BE,EAAYF,EAAe,KAAK,OACtCH,EAAO,QAAQ,CAACM,EAAOC,IAAM,CAC3B,GAAIA,IAAML,EAIV,IAAII,EAAM,WAAaF,EACrB,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAE5DC,EAAM,KAAK,QAAQ,CAACE,EAAKD,IAAM,CAC7B,GAAIA,IAAMN,GAAQO,IAAQL,EAAe,KAAKI,CAAC,EAC7C,MAAM,IAAI,MAAM,kCAAkC,CAEtD,CAAC,EACH,CAAC,CACH,EAEMlB,GAA0B,CAACoB,EAAyBC,IAAwC;AAAA;AAAA,wCAE1DD,CAAe,MAAMC,CAAmB;AAAA,gCAChDD,CAAe;AAAA;AAAA;AAAA;AAAA;AAAA,aAKlCA,CAAe;AAAA,KAGtBnB,GAAmB,CAACU,EAAkCW,IAA0B,CACpF,IAAMF,EAAkBT,EAAO,OAEzBY,EAAsB,CAAC,EAC7B,QAASL,EAAI,EAAGA,EAAIE,EAAiB,EAAEF,EAAG,CACxC,IAAMM,EAAgBF,EAAO,YAAY,aAAcX,EAAOO,CAAC,EAAE,aAAa,SAAS,CAAC,EACpFE,IAAoB,EACtBG,EAAU,KAAKC,CAAa,EACnBN,IAAM,EACfK,EAAU,KAAK,qBAAqBL,CAAC,QAAQM,CAAa,IAAI,EACrDN,IAAME,EAAkB,EACjCG,EAAU,KAAK,UAAUC,CAAa,IAAI,EAE1CD,EAAU,KAAK,0BAA0BL,CAAC,OAAOM,CAAa,IAAI,CAEtE,CACA,OAAOD,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEMrB,GACF,CAACS,EAA+Bc,EAAsBC,EAAuBC,IAAoC,CAC/G,IAAMC,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAmB,IAAI,MAAcnB,EAAO,MAAM,EAClDoB,EAAY,IAAI,MAAqBpB,EAAO,MAAM,EAEpDqB,EAAc,EACZC,EAAwD,CAAC,EACzDC,EAAa,CAAC,EACdC,EAAoC,CAAC,CAAC,QAAuB,KAAMP,CAAU,CAAC,EACpF,QAASV,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCc,GAAerB,EAAOO,CAAC,EAAE,KAAKO,CAAY,EAC1CK,EAAiBZ,CAAC,EAAIc,EACtBE,EAAW,KAAKvB,EAAOO,CAAC,EAAE,KAAK,MAAM,EACrCa,EAAUb,CAAC,EAAIkB,EAAc,QAAQlB,CAAC,GAAIS,EAAUO,EAAWhB,CAAC,CAAC,EACjEe,EAAkB,KAAK,MAAM,EAC7BE,EAAgB,KAAK,CAAC,QAAuB,KAAML,EAAiBZ,CAAC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCiB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAOO,CAAC,EAAE,IAAI,CAAC,EAEpEiB,EAAgB,KAAK,GAAGE,EAA2BX,CAAW,CAAC,EAE/D,IAAMJ,EAASgB,EAAe,SAAUX,EAAUD,EAAY,MAAM,EAC9Da,EAAcjB,EAAO,WAAW,UAAWG,CAAY,EACvDJ,EACF,MAAM,KAAK,MAAMS,EAAiB,MAAM,EAAE,KAAK,CAAC,EAAE,IAAIZ,GAAK,4BAA4BA,CAAC,EAAE,EAAE,KAAK,GAAG,EAClGsB,EAAmBC,GAA+B;AAAA;AAAA,KAEzD,IAAM,CACHA,EAAa,gBAAgB,aAAc,KAAK,EAChD,QAASvB,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjCuB,EAAa,gBAAgB,mBAAmBvB,CAAC,GAAI,KAAK,EAE5D,OAAOuB,EAAa,iBAAiB,GAAGV,EAAWT,CAAM,CAC3D,GAAG,CAAC;AAAA;AAAA,IAENtB,GAAwB8B,EAAiB,OAAQT,CAAmB,CAAC;AAAA;AAAA,IAErEoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DnB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,2CAEbiB,CAAW;AAAA;AAAA,0CAEZT,EAAiB,MAAM,MAAMT,CAAmB;AAAA,QAClFkB,CAAW;AAAA;AAAA;AAAA,MAGbtC,GAAiB8B,EAAWT,CAAM,CAAC;AAAA,KAGnC,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGG,CAAY,GAAI,kBAAAQ,CAAiB,EACxD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAa,SAAAC,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKC,EAAa,EAAuB,CAAC,EAClE,gBAAAO,CACF,GACA,gBAAAK,CACF,CACF,EAESrC,GAAS,CAACuC,EAAyBC,IAAuC,CACrF,IAAMhC,EAAS+B,EAAQ,OACjBE,EAAajC,EAAO,CAAC,EAAE,KACvBc,EAAeI,EAAU,cAAcc,EAAW,KAAMC,EAAW,MAAM,EAC/E7C,GAAeY,EAAQc,CAAY,EACnC,IAAMC,EAAckB,EAAW,MAAM,EACrClB,EAAYD,CAAY,EACpBd,EAAO,OAAO,CAACkC,EAAK5B,IAAU4B,GAAO5B,EAAM,KAAK,OAASQ,EAAeR,EAAM,KAAKQ,CAAY,EAAI,GAAI,CAAC,EAE5G,IAAMqB,EAAiBnC,EAAO,OAAOM,GAASY,EAAU,KAAKZ,EAAM,IAAI,EAAI,CAAC,EAC5EyB,EAAQ,QACJxC,GAAwB4C,EAAgBrB,EAAcC,EAAaf,EAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQmC,CAAc,CAAC,CACtH,EAEa1C,GAAyBuC,GAClCI,GAA4B,CAAC,KAAMJ,EAAW,IAAc,CAAC,IC1JjE,IAiBaK,GAuBAC,GAaAC,GAUAC,GA/DbC,GAAAC,EAAA,kBAGAC,IACAC,KAaaP,GACT,CAACQ,EAA0CC,EAAmBC,EAAW,QAAkB,CACzF,OAAQF,EAAW,WAAY,CAC7B,IAAK,OACH,MAAO,sBAAsBC,CAAS,UACxC,IAAK,UACH,MAAO,YAAYA,CAAS,YAAYA,CAAS,yBACnD,IAAK,OACH,MAAO,wBAAwBA,CAAS,IAAIC,CAAQ,yBAAyBD,CAAS,IAClFC,CAAQ,yBACd,IAAK,cACH,MAAO,eAAeD,CAAS,cAAcA,CAAS,UAAUC,CAAQ,8BACpEA,CAAQ,qBACd,IAAK,YACH,MAAO,kBAAkBA,CAAQ,6CAA6CD,CAAS,UACzF,IAAK,GACH,MAAO,GAET,QACE,MAAM,IAAI,MAAM,0BAA0BD,EAAW,UAAU,EAAE,CACrE,CACF,EAESP,GACT,CAACO,EAA0CG,IAAqC,CAC1EH,EAAW,aAAe,OAC5BG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,OAAQ,EAAG,CAAC,OAAsB,KAAMA,EAAW,OAAQ,CAAC,EAC/FA,EAAW,aAAe,cACnCG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,KAAM,EAAG,CAAC,OAAsB,KAAMA,EAAW,IAAK,CAAC,EAC1FA,EAAW,aAAe,aACnCG,EAAe,KAAK,CAAC,OAAsB,KAAMH,EAAW,KAAM,CAAC,CAEvE,EAESN,GAA2B,CAACM,EAA0CI,IAAgC,CAC7GJ,EAAW,aAAe,OAC5BI,EAAS,KAAK,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,WAAY,KAAM,KAAK,CAAC,EACrEJ,EAAW,aAAe,cACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAAC,EAC9DJ,EAAW,aAAe,aACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAE9C,EAEaT,GACRK,GAAgF,CAC/E,IAAMK,EAAaL,GAAY,YAAwB,GACvD,GAAIK,IAAe,cAAe,CAChC,GAAM,CAACC,EAAOC,CAAI,EAAIP,GAAY,mBAAyC,CAAC,GAAK,EAAG,EACpF,MAAO,CAAC,WAAAK,EAAY,MAAAC,EAAO,KAAAC,CAAI,CACjC,SAAWF,IAAe,OAAQ,CAChC,GAAM,CAACG,EAASC,CAAO,EAAIT,GAAY,mBAAyC,CAACU,GAAUC,EAAQ,EACnG,MAAO,CAAC,WAAAN,EAAY,QAAAI,EAAS,QAAAD,CAAO,CACtC,SAAWH,IAAe,YAAa,CACrC,GAAM,CAACC,CAAK,EAAIN,GAAY,mBAAiC,CAAC,GAAI,EAClE,MAAO,CAAC,WAAAK,EAAY,MAAAC,CAAK,CAC3B,CACA,MAAO,CAAC,WAAAD,CAAU,CACpB,IC7EJ,IAqBaO,GAeAC,GApCbC,GAAAC,EAAA,kBAqBaH,GAAc,CAACI,EAAmBC,IAAqB,CAClE,OAAQD,EAAW,CACjB,IAAK,GACH,OAAOC,EACT,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,QACE,MAAM,IAAI,MAAM,GAAGD,CAAS,8BAA8B,CAC9D,CACF,EAEaH,GAAeK,GAA6B;AAAA,QACjDA,EAAU,iDAAmD,EAAE;UCrCvE,IAqBaC,GArBbC,GAAAC,EAAA,kBAqBaF,GAAiBG,GAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAO3CA,CAAS,YAAYA,CAAS,YAAYA,CAAS;AAAA;IC5B7D,IA8BMC,GAiBAC,GAyBOC,GAuFPC,GAiBAC,GAKOC,GA0JPC,GA8EOC,GA7ZbC,GAAAC,EAAA,kBAqBAC,IAEAC,KAEAC,KACAC,KAEAC,KAEMd,GAA6B,CAACe,EAAoBC,IAClDD,EACK;AAAA;AAAA;AAAA,wDAG6CC,EAAY,iBAAmB,EAAE;AAAA,UAI9E;AAAA;AAAA;AAAA,gDAGqCA,EAAY,iBAAmB,EAAE;AAAA,UAK3Ef,GAAyB,CAACgB,EAAqBC,IAC/CD,EACK;AAAA;AAAA;AAAA;AAAA,UAIDC,IAAqB,EAAI,GAAK,6DAA6D;AAAA;AAAA;AAAA;AAAA;AAAA,YAKzFA,IAAqB,EAAI,GAAK,2CAA2C;AAAA,WAG1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMCA,IAAqB,EAAI,GAAK,yCAAyC;AAAA,WAKtEhB,GACT,CAACiB,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,KAAe,CACpF,IAAMC,EAAaL,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CO,EAAaN,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CQ,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EACtCP,EAAmBS,EAAaP,EAAc,CAAC,EAC/CS,EAAgBP,EAAYF,EAAc,CAAC,EAEjD,GAAI,GAAIH,GAAcC,IAAqB,GAAKC,EAAc,CAAC,IAAM,GAC7D,CAACF,IAAeC,IAAqB,GAAKA,IAAqB,KACjES,EAAaP,EAAc,CAAC,IAAM,GAAKE,EAAYF,EAAc,CAAC,IAAM,GAAKD,EAAc,CAAC,IAAM,GACtG,MAAM,IAAI,MAAM,iBAAiBF,CAAU,8BACvCC,CAAgB,yBAAyBC,EAAc,CAAC,CAAC;AAAA,oCACjCD,CAAgB;AAAA,eACrCS,CAAU,yCAAyCP,EAAc,CAAC,CAAC,eACtEE,CAAS,0CAA0CF,EAAc,CAAC,CAAC,kBACnED,EAAc,CAAC,CAAC,aAAa,EAEnC,MAAO;AAAA,yCAC4BD,CAAgB,IAAIG,CAAI,MAAMM,EAAaT,CAAgB,MAAMU,CAAU;AAAA,2CACzEP,CAAI,MAAMK,EAAaP,EAAc,CAAC,CAAC,MAAMG,CAAS;AAAA;AAAA,uBAE1EH,EAAc,CAAC,CAAC;AAAA,uBAChBA,EAAc,CAAC,CAAC;AAAA,2BACZD,CAAgB;AAAA,oBACvBI,CAAS;AAAA;AAAA,2BAEFF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAUrEG,EAAS,IAAM,iBAAiB;AAAA,IAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,8CACvCS,CAAU;AAAA;AAAA,oBAEpCF,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,iBACpGC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,wBAE9CH,CAAI;AAAA;AAAA;AAAA,8BAGEQ,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAM/B7B,GAA2BiB,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,0CAInBa,CAAa;AAAA;AAAA;AAAA,sFAI7Cb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAU/BE,IAAqB,EAAI,GAAK,4DAA4D;AAAA;AAAA,YAE1FjB,GAAuBgB,EAAYC,CAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5D,EAEEf,GAAyB,CAACY,EAAoBC,IAC9CD,EACK;AAAA;AAAA;AAAA,yCAG8BC,EAAY,iBAAmB,EAAE;AAAA,cAI/D;AAAA;AAAA;AAAA,iCAGsBA,EAAY,iBAAmB,EAAE;AAAA,cAK5DZ,GAA2Ba,GAC7BA,EAAa,gDAAkD,gDAItDZ,GACT,CAACc,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,GACtEM,EAA4B,KAAkB,CAC7C,IAAML,EAAaN,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CM,EAAaP,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CO,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EAE5C,GAAI,EAAEG,EAAaR,EAAc,CAAC,IAAM,GAAKO,EAAaP,EAAc,CAAC,IAAM,GACzEE,EAAYF,EAAc,CAAC,IAAM,GACrC,MAAM,IAAI,MAAM,cAAcQ,CAAU,yCACpCR,EAAc,CAAC,CAAC,gBAAgBO,CAAU,yCAC1CP,EAAc,CAAC,CAAC,eAAeE,CAAS,yCAAyCF,EAAc,CAAC,CAAC,EAAE,EAEzG,IAAMW,EAAgBH,EAAaR,EAAc,CAAC,EAC5CY,EAAgBL,EAAaP,EAAc,CAAC,EAC5CS,EAAgBP,EAAYF,EAAc,CAAC,EAC3Ca,EAAgBH,EAClB;AAAA;AAAA;AAAA,gDAGsCL,CAAU;AAAA,gDACVC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,iDAKTE,CAAU,2BAA2BR,EAAc,CAAC,CAAC;AAAA,mDACnDO,CAAU,2BAA2BP,EAAc,CAAC,CAAC;AAAA,YAC5FjB,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,iDAIRM,CAAS,2BAA2BF,EAAc,CAAC,CAAC;AAAA,uDAC9CM,CAAU,2BAA2BN,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,uCAGrEJ,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAO5CK,CAAI;AAAA;AAAA;AAAA,2DAG2BD,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,0BAI7DH,EAAa,oCAAoCG,EAAc,CAAC,CAAC,KACpD,iCAAiCA,EAAc,CAAC,CAAC,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAUzBA,EAAc,CAAC,CAAC;AAAA;AAAA,4DAEdA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,MAKlE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4CAMkCK,CAAU;AAAA;AAAA,kCAEpBM,CAAa;AAAA,kCACbC,CAAa;AAAA,kCACbH,CAAa;AAAA;AAAA;AAAA;AAAA,sCAITE,CAAa;AAAA,wCACXC,CAAa;AAAA;AAAA;AAAA,QAG7C7B,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sCAKfa,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMrBb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOvCK,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOpBjB,GAAwBa,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBrC,MAAO;AAAA,yCAC4BI,CAAI,KAAKM,CAAU,MAAMC,CAAU;AAAA,yCACnCP,CAAI,KAAKK,CAAU,MAAMJ,CAAS;AAAA,yBAClDH,EAAc,CAAC,CAAC;AAAA,yBAChBA,EAAc,CAAC,CAAC;AAAA,sBACnBG,CAAS;AAAA;AAAA,2BAEJF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,kBAInEG,EAAS,IAAM,iBAAiB;AAAA,MAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,sBAE7EO,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,mBACxFC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,4BAE5CH,CAAI;AAAA,MAC1BY,CAAa;AAAA;AAAA,CAGf,EAEE3B,GACF,CAAC4B,EAAmBC,EAAkBC,EAAyBC,EAC9DC,EAAuCC,EAAiB,KAAkB,CACzE,GAAM,CAACC,EAAaC,EAAaC,CAAU,EAAIJ,EACzC,CAACK,EAAeC,EAAWC,EAAWC,CAAc,EAAIT,EACxDU,EAAiBC,GAAiBR,EAAaE,CAAU,EACzDO,EAAiBD,GAAiBP,EAAaC,CAAU,EACzDQ,EAAWC,GAA4Bd,EAAU,CAAC,EAAE,KAAK,MAAM,EAC/De,EAAc,IAAM,CACxB,IAAMC,EAAQT,EAAU,KAClBU,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBX,EAAU,KAAK,OAAO,IACpD,QAASY,EAAIH,EAAQ,EAAI,EAAGI,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAV,EAAe,QAAQS,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcF,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBE,CACT,EACMG,EAAc,IAAM,CACxB,IAAMC,EAAQd,EAAU,KAClBS,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBV,EAAU,KAAK,OAAO,IACpD,QAASW,EAAIG,EAAQ,EAAI,EAAGF,EAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,IAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,CAAC,IAAM,cAAc,IAEvF,OAAAR,EAAe,QAAQO,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcI,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBJ,CACT,EAwCA,MAvCe;AAAA,kEAC6CZ,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBkB,EAAY,CAAC;AAAA,kBACLR,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kEAKcD,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBwB,EAAY,CAAC;AAAA,kBACLb,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6DAKSe,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BACnEhB,CAAS;AAAA;AAAA;AAAA;AAAA,UAKzBC,EACI,mBAAmBI,EAAiB,cAAgB,GAAGqB,GAAY1B,EAAWgB,CAAQ,CAAC,aAAa,IAChE,EAAsC;AAAA,UAC9Ed,CAAe;AAAA,UACfU,EAAe,aAAa,oBAAqB,OAAO,CAAC;AAAA;AAAA;AAAA,KAK/D,EAESvC,GACT,CAACsD,EAA+BC,EAAoDC,EACnFC,EACAzB,EAAiB,KAAyD,CACzE,IAAM0B,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAASL,EAAO,CAAC,EAAE,KACnBM,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAYL,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAC5FO,EAAYC,EAAU,KAAKF,CAAS,EACpCG,EAAYP,EAAOA,EAAO,OAAS,CAAC,EACpCQ,EAAWR,EAAOA,EAAO,OAAS,CAAC,EACnCS,EAAYR,EAAOA,EAAO,OAAS,CAAC,EACpCS,EAASF,EAAW,IAAM,GAAKC,EAAY,IAAM,EAGjDE,EAAoBJ,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDpD,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDyD,EAAW,CACf,KAAK,KAAKH,EAAYtD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKJ,EAAYpD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKN,EAAYlD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,CAC/D,EAEME,EAAaH,EAAS,EAAI,EAC1BI,EAAa,CAAC,GAAGZ,EAAYK,EAAWC,EAAWK,CAAU,EAC7DzB,EAAQ0B,EAAW,OACnBC,EAAa,CAAC,GAAGZ,EAAYK,EAAUC,EAAYI,CAAU,EAC7DnB,EAAQqB,EAAW,OACnBC,EAAkB,CAACX,EAAWE,EAAWE,EAAYI,CAAU,EAC/DI,EAAoC,CACxC,CAAC,OAAsB,KAAMV,CAAS,EAAG,CAAC,OAAsB,KAAME,CAAS,EAC/E,CAAC,OAAsB,KAAMD,CAAQ,CACvC,EACAU,GAA6BrB,EAAsBoB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2Bf,EAAWU,EAAYC,CAAU,CAAC,EACrF,IAAMK,EAAwD,CAAC,OAAQ,MAAM,EAEvElD,EAAU0B,EAAO,OAAS,EAC5B1B,IACF+C,EAAgB,KAAK,GAAGE,EAA2BvB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEwB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BH,CAAe,CAAC,EAEnE,IAAMK,EAAmBC,IAA+B,CACtD,IAAMjC,GAAYe,EAAU,OACtBrD,EAAYwE,GAAiB,YAAa3B,EAAO,CAAC,EAAE,SAAUP,GAAW,CAAC,EAC1EJ,GAAWC,GAA4BU,EAAO,CAAC,EAAE,QAAQ,EAEzD4B,EAAIC,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUR,EAAOyB,CAAU,EAC5Da,GAAID,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUF,EAAOmB,CAAU,EAC5Dc,GAAS9C,EAAe,SAAUe,EAAO,CAAC,EAAE,SAAUoB,EAAgB,OAAQH,CAAU,EACxFe,GAAiB,CAACJ,EAAGE,EAAC,EAC5B,GAAIxD,EAAS,CACX,IAAM2D,EAAiBvD,EAAiBuC,EAAa,EACrDe,GAAe,KAAKH,EAAc,OAAQ7B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQiC,CAAc,CAAC,CACtG,CACA,IAAMC,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAC7GC,GAAyBlC,EAAsBiC,CAAQ,EACvD,IAAME,GAAW9C,GAA4ByC,GAAO,KAAK,MAAM,EACzDxD,EAAkB8D,GAAqBpC,EAAsB8B,GAAO,KAAK,MAAOK,EAAQ,EACxFE,GAAmB7F,GACrBwE,EAAY3C,EAASC,EAAiB,CAACpB,EAAWyE,EAAGE,GAAGC,EAAM,EAAG,CAACzB,EAAYC,EAAYC,CAAS,EACnG9B,CAAc,EAClB,MAAO;AAAA,IAEHgD,GAAa,iBAAiBQ,CAAQ,EAAE,0BAA0B/E,CAAS,EAAE,iBACzE,GAAG6E,GAAgBD,EAAM,CAAC;AAAA,IACtCO,EAAgB;AAAA,IAERxB,EAASzE,GAA2B0E,EAAmBxD,EAAe8B,GAAUlC,CAAS,EAChFX,GAAuBuE,EAAmBxD,EAAe8B,GAAUlC,CAAS,CAAC;AAAA,oBAE5F,EACA,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAG4D,CAAiB,IAAId,EAAqB,UAAU,IAAIa,CAAM,IAAIpC,CAAc,GACzF,kBAAA8C,CACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGgB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAK,CACF,GACA,gBAAAI,CACF,CACF,ICtfJ,IAiCMc,GA4HOC,GA7JbC,GAAAC,EAAA,kBAqBAC,IACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAoBC,EAAoBC,EAAmBC,EAAU,GAC9FC,EAA4BC,EAAoB,EAAGC,EAAoB,EAAGC,EAAmB,EAC7FC,EAAW,QAAkB,CAC5B,IAAMC,EAAeF,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,uBACT,IAAK,GACH,MAAO,kBAAkBC,CAAQ,8CACnC,IAAK,GACH,MAAO,2BACT,QACE,MAAM,IAAI,MAAM,oBAAoBD,CAAgB,oBAAoB,CAC5E,CACF,EACMG,EAAeH,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,oDACT,IAAK,GACH,MAAO,wDACT,QACE,MAAM,IAAI,MAAM,oBAAoBA,CAAgB,oBAAoB,CAC5E,CACF,EACMI,EAAgBZ,EAAiB;AAAA;AAAA,MAGA;AAAA;AAAA,MAIjCa,EAAkBb,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCc,EAAUd,EAAiB,2BAA6B,2BACxDe,EAASf,EAAiB,2BAA6B,2BACvDgB,EAAMhB,EAAiB,MAAQ,MAC/BiB,EAAMjB,EAAiB,MAAQ,MAC/BkB,EAAe;AAAA;AAAA,qBAENlB,EAAiB,gCAAkC,+BAA+B;AAAA,mBACpFgB,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA,iBAELC,CAAG;AAAA,iBACHA,CAAG;AAAA;AAAA;AAAA,gBAGJA,CAAG;AAAA,oBACCE,GAAYb,EAAmBG,CAAQ,CAAC;AAAA;AAAA;AAAA,8BAG9BK,CAAO,2BAA2BC,CAAM;AAAA,QAC9DH,CAAa;AAAA;AAAA,QAEbF,EAAYJ,CAAiB,CAAC;AAAA;AAAA,qBAI1Bc,EAAUpB,EAAkBC,GAAaE,EAAW;AAAA,wBACxCG,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SACbN,GAAYD,EAAY;AAAA,wBACxCI,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SAEzCY,EAAU,GAAGV,EAAYJ,CAAiB,CAAC,GAE3Ce,EAAUH,GAAYX,EAAkBC,CAAQ,EAChDc,EACFvB,EAAiBmB,GAAYb,EAAmBG,CAAQ,EAAIU,GAAYZ,EAAmBE,CAAQ,EACjGe,EACFxB,EAAiBmB,GAAYZ,EAAmBE,CAAQ,EAAIU,GAAYb,EAAmBG,CAAQ,EACjGgB,EAAkBC,GAAqBrB,EAAYiB,EAASb,CAAQ,EAsB1E,MArBiB;AAAA,yDACkCc,CAAK;AAAA,QACtDvB,EAAiBoB,EAAUC,CAAO;AAAA;AAAA;AAAA,yDAGeG,CAAK;AAAA,QACtDxB,EAAiBqB,EAAUD,CAAO;AAAA;AAAA;AAAA,gEAGsBE,CAAO;AAAA,0BAC7Cd,CAAgB;AAAA;AAAA;AAAA;AAAA,uBAInBR,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGa,CAAe;AAAA,QACfc,GAAYvB,CAAO,CAAC;AAAA,QACpBqB,CAAe;AAAA;AAAA;AAAA,MAKnB,EAESnC,GACT,CAACsC,EAA+BvB,EAA4BwB,EAAgCC,EAC3FC,EAAmBC,EAAkBC,EAAkBC,IAAoD,CAC1G,IAAMlC,EAAiBK,EAAW,SAAW,OACvC8B,EAAanC,EAAiB4B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAWrC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAYtC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAcvC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAASxC,IAAmBmC,EAAa,IAAM,GAAKA,EAAa,IAAM,IAAMI,EAAc,IAAM,EAGjGE,EAAYzC,EAAiBuC,EAAcF,EAAWC,EACtDI,EAAY1C,EAAiBqC,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,iCAAiCD,CAAQ,EAAE,EAEtE,IAAMrC,EAAmBgC,EAAUxC,GAAkBmC,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EY,EAAaJ,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDI,EAAaL,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDK,EAAY,KAAK,IAAIN,EAAc,CAAC,EAAInC,EAAkBmC,EAAc,CAAC,CAAC,EAC1E1C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAY6B,EAAYiB,IAAe,EACvC7C,EAAW6B,EAAWiB,IAAc,EACpCC,EAAeV,EAAS,CAAChC,EAAkB,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EAE3D2C,GAAoC,CACxC,CAAC,OAAsB,KAAMrB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAM,CAAC3B,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EAC7G,CAAC,OAAsB,KAAMA,EAAW,OAAO,EAAG,CAAC,OAAsB,KAAMA,EAAW,SAAS,CACrG,EACA+C,GAA6B/C,EAAY8C,EAAe,EACxDA,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAM0B,GAAwD,CAAC,OAAQ,MAAM,EACzErB,IACFkB,GAAgB,KAAK,GAAGE,EAA2BzB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE0B,GAAkB,KAAK,MAAM,GAE/BH,GAAgB,KAAK,GAAGE,EAA2BxB,CAAW,CAAC,EAE/D,IAAM0B,EAAmBC,IAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,MAAO,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ,CAAC,EAC9E,CAAC,KAAM,WAAY,KAAM,MAAO,OAAQ,CAAC,CAC3C,EACAC,GAAyBrD,EAAYoD,CAAQ,EAG7C,IAAME,GAAanB,EAAS,EAAI,EAC1BoB,GAAIC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EACpDkC,GAAmB;AAAA,qDACsBtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,8BAChDpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,6EAEsBpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,qCAEjEpB,EAAS,MAAQ,EAAE;AAAA,SAE1CuB,EAAIC,EACN,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQpB,IAAqB,EAAI,EAAIA,CAAgB,EAC3FyD,GAAID,EAAc,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EAC5EO,EAAiB,CAACH,EAAGE,EAAC,EACtBE,GAASC,EAAe,SAAUxC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQ8B,EAAU,EAC1F,GAAI1B,EAAS,CACX,IAAMoC,EAAOL,EAAc,OAAQpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EACxFO,EAAe,KAAKG,CAAI,EACxBP,IAAoB;AAAA,0DAC4BtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,+BACpD5D,EAAiB,IAAM,GAAG,GAAGwC,EAAS,MAAQ,EAAE;AAAA,UAEvE,CAEA,MAAO;AAAA,UACL8B,GAAc,yBAAyB,CAAC;AAAA;AAAA;AAAA;AAAA,UAIxCd,GAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGS,EAAgBC,EAAM,CAAC;AAAA,UACnFL,EAAgB;AAAA,UAEdzE,GACIW,EAAgBC,EAAWC,EAAWC,EAAU8B,EAAS5B,EAAY6C,EAAa,CAAC,EAAGA,EAAa,CAAC,EACpGA,EAAa,CAAC,EAAGU,EAAC,CAAC;AAAA,UAEvBpB,EACI+B,GAA2B3B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,CAAS,EACrGuB,GACI5B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,EAAW,GAAO,OACnFf,CAAyB,CAAC,EACxC,EACA,MAAO,CACL,KAAM,eACN,YAAa,CACX,KAAM,GAAG7B,EAAW,QAAQ,IAAIG,CAAgB,IAAIgC,CAAM,IAAIvC,CAAS,IAAIC,CAAS,IAAIC,CAAQ,IAC5F4C,CAAU,IAAIC,CAAU,IAAIC,CAAS,GACzC,kBAAAK,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMzB,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,EACF,GACA,gBAAAI,CACF,CACF,IC9QJ,IA6BMkB,GAQAC,GAGAC,GAQAC,GAOAC,GAgBAC,GAmFOC,GA+DAC,GAzNbC,GAAAC,EAAA,kBAqBAC,IACAC,KAEAC,KAEAC,KAGMb,GAAgBc,GAAkB,CACtC,IAAIC,EAAU,EACd,QAASC,EAAI,EAAGA,EAAIF,EAAI,OAAQE,IAC9BD,GAAWD,EAAIE,CAAC,EAElB,OAAOD,CACT,EAEMd,GAAoBgB,GACtB,OAAOA,GAAU,SAAW,CAACA,EAAOA,EAAOA,CAAK,EAAIA,EAElDf,GAAyB,CAACgB,EAAoBC,IAC9CA,GAAY,EACPD,EAGFA,GAAcA,EAAa,IAAMC,EAAW,GAG/ChB,GACF,CAACiB,EAA+DC,EAAmBC,EAAgBH,EAAW,IAChG,CACR,IAAMI,EAAqBrB,GAAuBmB,EAAWF,CAAQ,EACrE,OAAO,KAAK,OAAOC,EAAW,CAAC,GAAKE,EAAS,GAAKA,EAASC,GAAsB,CAAC,CACpF,EAEFnB,GACF,CAACoB,EAA2CC,EAAuCC,EAClFC,EAAmCC,IAAuD,CACrFA,GAAW,OAEbA,EAAUzB,GAAkBqB,EAASC,EAAY,CAAC,EAAGE,EAAQ,CAAC,CAAC,GAEjE,IAAME,EAA6C,CAAC,EAAG,EAAG,EAAGH,CAAW,EACxE,QAASI,EAAQ,EAAGA,EAAQ,EAAGA,IACzBN,EAAQM,CAAK,EAAI,EAAIF,GAAWH,EAAYK,CAAK,IACnDD,EAASC,CAAK,EAAI,KAAK,OAAON,EAAQM,CAAK,EAAIL,EAAYK,CAAK,EAAI,EAAIF,GAAWD,EAAQG,CAAK,EAAI,CAAC,GAGzG,OAAOD,CACT,EAEExB,GACF,CAAC0B,EAA6BC,EAAiBC,EAAkBC,EAAiBC,EACjFC,EAAsBC,EAAqBC,EAAqBC,EAChEC,IAAqG,CACpG,IAAIC,EACAC,EACAC,EACAC,EAOJ,GALIb,IAAQ,UAEVA,EAAM,GAGJ,OAAOA,GAAQ,SAAU,CAC3BU,EAAU,CAAC,IAAKV,EAAK,OAAQA,EAAK,KAAMA,EAAK,MAAOA,EAAK,MAAOA,EAAK,KAAMA,CAAG,EAC9E,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,CAAG,EACjDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAW,MAAM,QAAQE,CAAG,EAAG,CAC7B,GAAI,CAACA,EAAI,MAAM,CAACc,EAAKC,EAAGhC,IAAQ+B,IAAQ/B,EAAI,CAAC,CAAC,EAC5C,MAAM,MAAM,kCAAkCiB,CAAG,EAAE,EAErDU,EAAU,CAAC,IAAKV,EAAI,CAAC,EAAG,OAAQA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,MAAOA,EAAI,CAAC,EAAG,KAAMA,EAAI,CAAC,CAAC,EAChG,IAAMF,EAAWzB,GACb,CAAC4B,EAASC,EAAUC,EAAS,CAAC,EAAG,CAACI,EAAaC,EAAcC,CAAW,EAAG,EAC3E,CAACL,EAAaC,EAAcC,CAAW,EAAGN,EAAI,CAAC,CAAC,EACpDW,EAAWb,EAAS,CAAC,EACrBc,EAAYd,EAAS,CAAC,EACtBe,EAAWf,EAAS,CAAC,CACvB,SAAWE,IAAQ,aAAc,CAE/BW,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1CQ,EAAY,KAAK,KAAKV,EAAWG,CAAY,EAC7CQ,EAAW,KAAK,KAAKV,EAAUG,CAAW,EAC1C,IAAMU,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,GAAkBL,EAAY,GAAKP,EAAeG,EAAeN,EACjEgB,GAAiBL,EAAW,GAAKP,EAAcG,EAAcN,EAC7DgB,EAAQ,KAAK,MAAMH,EAAgB,CAAC,EACpCI,EAAOJ,EAAgBG,EACvBE,EAAM,KAAK,MAAMJ,EAAiB,CAAC,EACnCK,EAASL,EAAiBI,EAC1BE,EAAO,KAAK,MAAML,EAAgB,CAAC,EACnCM,EAAQN,EAAgBK,EAE9Bb,EAAU,CAAC,IAAAW,EAAK,OAAAC,EAAQ,KAAAC,EAAM,MAAAC,EAAO,MAAAL,EAAO,KAAAC,CAAI,CAClD,KACE,OAAM,MAAM,8BAA8BpB,CAAG,EAAE,EAEjD,MAAO,CAAC,QAAAU,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,CAAQ,CAChD,EA8BStC,GACT,CAACkB,EAAmDC,EACnDE,EAA0C6B,EAA4CzB,EACtF0B,EAAY,GAAOC,EAA6C,iBAA+B,CAC9F,IAAIC,EAAW3B,EAASC,EAAUC,EAAS0B,EAC3C,GAAIF,IAAe,eACjB,CAACC,EAAW3B,EAASC,EAAUC,EAAS0B,CAAU,EAAIpC,UAC7CkC,IAAe,gBACxB,CAACC,EAAWC,EAAY5B,EAASC,EAAUC,CAAO,EAAIV,MAEtD,OAAM,IAAI,MAAM,sBAAsBkC,CAAU,EAAE,EAEpD,GAAM,CAACG,EAAgB,CAAEvB,EAAaC,EAAcC,CAAW,EAAIf,EAE7D,CAACU,EAAaC,EAAcC,CAAW,EAAIpC,GAAiB0B,CAAO,EACnE,CAACmC,EAAeC,EAAgBC,CAAa,EAAI/D,GAAiBuD,CAAS,EAE3ES,EAAuB/D,GAAuBoC,EAAawB,CAAa,EACxEI,EAAwBhE,GAAuBqC,EAAcwB,CAAc,EAC3EI,EAAuBjE,GAAuBsC,EAAawB,CAAa,EACxE,CAAC,QAAAvB,EAAS,SAAAC,EAAU,UAAAC,EAAW,SAAAC,EAAQ,EAAIvC,GAC7C0B,EAAKC,EAASC,EAAUC,EAASC,EAAaC,EAAcC,EAAa4B,EACzEC,EAAuBC,CAAoB,EAEzCzC,GAAc+B,EAAYI,EAAiBD,EAAaC,EAE1DhC,EAAqD,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACvE,OAAI6B,IAAe,gBACjB7B,EAAW,CAAC8B,EAAWjC,GAAagB,EAAUC,EAAWC,EAAQ,EACxDc,IAAe,iBACxB7B,EAAW,CAAC8B,EAAWjB,EAAUC,EAAWC,GAAUlB,EAAW,GAG5D,CACL,UAAAiC,EACA,WAAAD,EACA,QAAA1B,EACA,SAAAC,EACA,QAAAC,EACA,WAAA0B,EACA,SAAAlB,EACA,UAAAC,EACA,SAAAC,GACA,YAAAlB,GACA,QAAAe,EACA,YAAAN,EACA,aAAAC,EACA,YAAAC,EACA,YAAAC,EACA,aAAAC,EACA,YAAAC,EACA,qBAAAyB,EACA,sBAAAC,EACA,qBAAAC,EACA,cAAAL,EACA,eAAAC,EACA,cAAAC,EACA,QAAAxC,EACA,SAAAK,EACA,YAAAJ,CACF,CACF,EAESlB,GACT,CAAC6D,EAA+BC,EAA4BC,EAC3DC,EAA+BC,EAAyBd,IAAoC,CAC3F,IAAMe,EAAiBf,IAAe,eAChCE,EAAaa,EAAiBL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAElEM,EAAS,GACTC,EAA0C,CAAC,GAAI,EAAG,CAAC,EACnDC,EAAiB,CAAC,EAAGN,EAAY,IAAI,CAACxB,EAAG9B,IAAMA,CAAC,CAAC,EACjD6D,EAAW,CAAC,KAAK,KAAK7E,GAAa4E,EAAe,EAAE,IAAIE,GAAKR,EAAYQ,CAAC,CAAC,CAAC,EAAKH,EAAc,CAAC,CAAE,EAAG,EAAG,CAAC,EAE/GI,GAAU,UAAW,IAAM,oCAAoCF,CAAQ,EAAE,EAEzE,IAAMG,EAAmBN,EAAUD,GAAkBb,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EqB,EAAaC,EAAU,KAAKZ,CAAW,EACvCa,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,CAAU,EACnF,CAAC,QAAuB,KAAMC,CAAI,EAAG,CAAC,QAAuB,KAAMH,EAAW,OAAO,EACrF,CAAC,QAAuB,KAAMA,EAAW,SAAS,CACpD,EACAc,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAMiB,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAUlB,EAAO,SAAW,EAC9BkB,IACFH,EAAgB,KAAK,GAAGC,EAA2BhB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEiB,EAAkB,KAAK,MAAM,GAE/BF,EAAgB,KAAK,GAAGC,EAA2Bd,CAAW,CAAC,EAE/D,IAAMiB,EAAmBC,GAA+B,CACtD,IAAMC,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQlB,EAAW,MAAM,EAChG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAC/C,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQH,EAAW,QAAQ,MAAM,EAChE,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,UAAU,MAAM,CACtE,EAEMqB,EAAahB,EAAS,EAAI,EAC1BiB,EAAIC,GAA4BxB,EAAO,CAAC,EAAE,QAAQ,EAElDyB,EAAIC,EACN,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQY,IAAqB,EAAI,EAAIA,CAAgB,EAC3Fe,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EAC5EM,EAAiB,CAACH,EAAGE,CAAC,EACtBE,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUE,EAAY,OAAQoB,CAAU,EACtFS,EAAmB,GACvB,GAAIb,EAAS,CACX,IAAMc,EAAON,EAAc,OAAQ1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQsB,CAAU,EACxFM,EAAe,KAAKI,CAAI,EACxBD,GAAoB;AAAA,8DACgCzB,EAAS,QAAQiB,CAAC,IAAMA,CAAC;AAAA,wBAC/DlB,EAAiB4B,EAAa,SAAU,EAAG,CAAC,EAAIA,EAAa,SAAU,EAAG,CAAC,CAAC,GACtF3B,EAAS,MAAQ,EAAE;AAAA,UAEzB,CAEA,MAAO;AAAA,cACDyB,CAAgB;AAAA;AAAA;AAAA,uBAGPN,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA,uBAI1BE,EAAE,aAAa,UAAU,CAAC;AAAA;AAAA,YAErCP,EAAa,iBAAiBC,CAAQ,EAAE,iBAAiB,GAAGO,EAAgBC,CAAM,CAAC;AAAA,YACnFT,EAAa,UAAU,CAAC;AAAA,YACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACzDS,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACrCI,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,yBAEjDpB,EAAiB4B,EAAa,SAAUR,EAAE,KAAO,EAAGA,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,2CAE/FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAClFpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAAIQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA,gBAE1FpB,EAAiB4B,EAAa,SAAU,EAAGR,EAAE,IAAI,EAChCQ,EAAa,SAAU,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA,8BAKlDpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA,8BAE1GpB,EAAiB4B,EAAa,mBAAoB,EAAGR,EAAE,IAAI,EAAIQ,EAAa,mBAAoB,EAAGR,EAAE,IAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAyB1GpB,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAUjBA,EAAiB;AAAA,0EAEA;AAAA,yEAC4C;AAAA;AAAA,wBAG7DA,EAAiB;AAAA;AAAA;AAAA,wBAIA;AAAA;AAAA;AAAA,qBAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOTA,EAAiB;AAAA;AAAA;AAAA;AAAA,wBAKA;AAAA;AAAA;AAAA;AAAA,qBAIR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAULa,EAAU,oDAAsD,EAAE;AAAA;AAAA,YAG5E,EACA,MAAO,CACL,KAAM,cACN,YACI,CAAC,KAAM,GAAGjB,EAAW,QAAQ,IAAII,CAAc,IAAIO,CAAgB,IAAIM,CAAO,GAAI,kBAAAD,CAAiB,EACvG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGS,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,CACF,GACA,gBAAAI,CACF,CACF,ICtZJ,IAgBae,GAuGAC,GAvHbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAMaR,GACT,CAACS,EAA+BC,EAC/BC,IAAqF,CACpF,IAAMC,EAAUH,EAAO,OAAS,EAC1BI,EAAcD,EAAU,8BAAgC,GACxDE,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KACnBO,EAAyBD,EAAO,CAAC,EAAIL,EAAW,MAEhDO,EAAgBP,EAAW,SAAW,OACtCQ,EAAcC,GAChBL,EAAQC,EAAQL,EAAW,UAAWA,EAAW,KAAMA,EAAW,QAASO,CAAa,EACtFG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,EAAW,SAAS,EAC7F,CAAC,QAAuB,KAAM,CAACA,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC5E,CAAC,QAAuB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EACtE,CAAC,QAAuB,KAAMM,CAAsB,CACtD,EACAO,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,CAAM,CAAC,EAClE,IAAMU,EAAwD,CAAC,OAAQ,MAAM,EACzEb,IACFU,EAAgB,KAAK,GAAGE,EAA2Bf,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEgB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BN,CAAW,CAAC,EAE/D,IAAMQ,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEY,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,MAAM,EACxDsB,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,MAAM,EACxDsB,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,CAAC,EAG9E,IAAM6B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ5B,EAAW,UAAU,MAAM,EACxG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,EAChF,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACA,OAAA6B,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA;AAAA,IAE9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,0BAEtDC,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,8CAEhBX,EAAgB,EAAI,CAAC;AAAA,yDACVA,EAAgB,EAAI,CAAC,oBAClEA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA,iBAGhBW,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMCX,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMrBA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA,uBAKnEA,EAAgBiB,EAAE,IAAI,QAAS,UAAW,SAAU,eAAe,EACnDA,EAAE,IAAI,QAAS,gBAAiB,UAAW,QAAQ,CAAC;AAAA,uBACzDE,EAAE,IAAI,iBAAkB,aAAc,UAAW,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,MAK3EvB,CAAW;AAAA,MACXmB,CAAe;AAAA,MACfJ,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAEzC,EACA,MAAO,CACL,KAAM,cACN,YAAa,CAAC,KAAMlB,EAAW,SAAU,kBAAAe,CAAiB,EAC1D,WAAY,KAAO,CACjB,QAAS,CAAC,CACR,KAAMd,EAA6BA,EAA2BO,CAAW,EAAIA,EAC7E,SAAUT,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,EAESzB,GACT,CAACQ,EAA+BC,EAA4BQ,IAAgD,CAC1G,IAAMN,EAAUH,EAAO,OAAS,EAC1B+B,EAAaC,GAAiBvB,EAAY,CAAC,CAAC,EAC5CwB,EAAeD,GAAiBvB,EAAY,CAAC,CAAC,EAC9CE,EAAaC,EAAU,KAAKH,CAAW,EAAIsB,EAAaE,EACxD5B,EAAS,CAACL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGzB,EAAS,CAACN,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGG,EAAsB,CAACzB,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAIsB,CAAU,EAElGlB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EACxC,CAAC,OAAsB,KAAM,CAACV,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC3E,CAAC,OAAsB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,CACvE,EACAa,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2BV,EAAQC,EAAQ4B,CAAmB,CAAC,EACvF,IAAMC,GAAWF,EAAe,GAAKhC,EAAW,QAAQ,CAAC,EAAIK,EAAO,CAAC,EAC/DW,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUkC,EAAoB,OAAQH,CAAU,EAC5FV,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQ0B,CAAU,EACpEJ,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQyB,CAAU,EACpEH,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM+B,CAAU,CAAC,EAEnF,IAAM3B,EAAcD,EAAU,8BAAgC,GACxD0B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EACxC,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,CACvC,EACA,OAAAC,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA,IAC9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,8CAIlCe,CAAY;AAAA,oCACtBA,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOxBR,EAAE,KAAK,KAAK,KAAKU,CAAO;AAAA,wBACxBhB,EAAO,KAAK,KAAK,KAAKc,CAAY;AAAA;AAAA;AAAA,8CAGZ3B,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGzB6B,CAAO;AAAA;AAAA;AAAA,0BAGXV,EAAE,IAAI,QAAS,gBAAiB,eAAgB,eAAe,CAAC;AAAA;AAAA,0BAEhEA,EAAE,KAAK,KAAK;AAAA;AAAA;AAAA,gDAGUnB,EAAO,CAAC,CAAC;AAAA,wBACjCqB,EAAE,IAAI,WAAY,UAAW,IAAK,gBAAgB,CAAC;AAAA,iCAC1CM,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOlBA,CAAY;AAAA;AAAA,QAE/B7B,CAAW;AAAA,QACXmB,CAAe;AAAA,QACfJ,EAAO,IAAI,QAAS,MAAO,UAAW,iBAAkB,OAAO,CAAC;AAAA;AAAA,IAGlE,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CACX,KAAM,GAAGlB,EAAW,QAAQ,IAAI8B,CAAU,IAAIE,CAAY,IAAIE,CAAO,IAAI7B,EAAO,CAAC,CAAC,IAAIA,EAAO,CAAC,CAAC,GAC/F,kBAAmBH,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMM,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,ICvNJ,IAYamB,GA6IPC,GAUOC,GAnKbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KACAC,KAEaT,GACT,CAACU,EAA+BC,EAAoDC,EACnFC,EACAC,EAAiB,KAAyD,CACzE,IAAMC,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KAEnBO,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIJ,EAAOA,EAAO,OAAS,CAAC,EAC5BK,EAAaC,GAAiBH,CAAC,EAC/BI,EAAcD,GAAiBF,CAAC,EAChCI,EAAeF,GAAiBJ,CAAC,EACjCO,EAAaC,EAAU,KAAKb,CAAW,EAAIQ,EAAaG,EACxDG,EAAUhB,EAAO,OAAS,EAC1BiB,EAAYd,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAE5FgB,EAAsB,CADVH,EAAU,KAAKE,CAAS,EACFV,EAAGC,CAAC,EAEtCW,EAAoC,CACxC,CAAC,QAAuB,KAAML,CAAU,EAAG,CAAC,QAAuB,KAAMP,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,CACjC,EACAW,GAA6BnB,EAAsBkB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,EAA2BJ,EAAWZ,EAAQC,CAAM,CAAC,EACzEU,GACFG,EAAgB,KAAK,GAAGE,EAA2BrB,EAAO,CAAC,EAAE,IAAI,CAAC,EAEpEmB,EAAgB,KAAK,GAAGE,EAA2BH,CAAmB,CAAC,EAEvE,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAYC,GAAiB,aAAczB,EAAO,CAAC,EAAE,SAAUiB,EAAU,MAAM,EAC/ES,EAAIC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQO,CAAW,EACrEgB,EAAID,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQI,CAAU,EACpEmB,EAASC,EAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUkB,EAAoB,OAAQR,CAAU,EAC5FqB,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBjC,EAAsB4B,EAAO,KAAK,MAAOE,CAAQ,EACxFI,EAAiB,CAACT,EAAGE,CAAC,EACxBQ,GAAc,GAClB,GAAIpB,EAAS,CACX,IAAMqB,EAAiBjC,EAAiBM,EAAa,EACrDyB,EAAe,KAAKR,EAAc,OAAQ3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQqC,CAAc,CAAC,EACpGD,GAAc,GACVhC,EAAiB,uBAAuBiC,CAAc,KACrC,YAAYR,EAAO,KAAK,KAAK,kBAAkB,EACtE,CAEA,IAAMS,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,EAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAiBC,GAAiBH,GAAYrB,CAAS,EACvDyB,EAAiBD,GAAiBF,EAAYtB,CAAS,EACvD0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EACrF,CAAC,KAAM,IAAK,KAAM,KAAK,CACzB,EACAC,GAAyB3C,EAAsB0C,EAAQ,EAEvD,IAAME,GAAa,CAACC,EAAyBC,KAA4B,CACvE,IAAMC,EAAOF,EAAS,KAChBG,GAAOH,EAAS,KACtB,GAAIE,IAAS,EACX,MAAO,OAAOC,EAAI,cAAcH,EAAS,KAAK,OAAO,YAEvD,IAAMI,EAAY1B,EAAU,KACxB2B,EAAS,OAAOF,EAAI,aAAaH,EAAS,KAAK,OAAO,IAC1D,QAASM,GAAIJ,EAAO,EAAI,EAAGK,GAAIH,EAAY,EAAGE,IAAK,EAAGA,KAAKC,KACzDF,GAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,OAAOF,EAAY,EAAI,iBAAiBG,EAAC,IAAM,eAAe,IAEhG,OAAAN,GAAc,QAAQK,IAAK,CACzBD,GAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,QAClC,CAAC,EACDD,GAAU,GAAGF,EAAI,YAAYD,EAAO,CAAC;AAAA,uBACxBC,EAAI,YAAYD,EAAO,CAAC,UAC9BG,CACT,EAEMG,GAAa,IAAc,CAC/B,IAAIC,EAAU,eAAe7B,EAAE,KAAK,KAAK,IACzC,QAAS0B,GAAI,EAAGA,GAAIxC,EAAawC,KAC/BG,GAAW;AAAA,0BACGH,EAAC,yBAAyBA,EAAC,2BAA2B1C,CAAU,KAEhF,QAAS0C,GAAI,EAAGA,GAAIvC,EAAcuC,KAAK,CACrCG,GAAW,iCAAiCH,EAAC,yBAAyBxC,CAAW,KAEjF,QAASyC,EAAI,EAAGA,EAAIzC,EAAayC,IAC/BE,GAAW;AAAA,qBACJH,EAAC,WAAWxB,EAAE,KAAK,KAAK,UAAUhB,IAAgB,EAAI,GAAK,IAAIyC,CAAC,GAAG,YAAYA,CAAC,YACnFD,EAAC;AAAA,CAET,CACA,OAAOG,CACT,EAEA,MAAO;AAAA,IAEHhC,EAAa,iBAAiBoB,EAAQ,EAAE,0BAA0BnB,CAAS,EAAE,iBACzE,GAAGW,EAAgBN,CAAM,CAAC;AAAA,IACtCN,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,4CACpCb,CAAU,QAAQA,CAAU;AAAA,8CAC1BA,CAAU;AAAA,iCACvBG,CAAY;AAAA,qCACRA,CAAY;AAAA;AAAA;AAAA,MAG3CX,EAAY,SAAW,EAAI,GAAK,uBAAuBsB,EAAU,gBAAgB,OAAO,CAAC,GAAG;AAAA,MAC5FqB,GAAWnB,EAAGc,EAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,MAC7CmB,GAAWjB,EAAGc,CAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,wBAC3BC,EAAO,KAAK,KAAK,KAAKhB,CAAY;AAAA,oDACND,CAAW;AAAA,QACvD0C,GAAW,CAAC;AAAA;AAAA,2BAEOzC,CAAY;AAAA;AAAA,QAE/BuB,EAAW;AAAA,QACXH,CAAe;AAAA,0BACGJ,EAAO,KAAK,OAAO;AAAA,qBACxBA,EAAO,gBAAgB,aAAa,CAAC;AAAA,QAClDA,EAAO,YAAY,YAAYnB,CAAU,GAAI,OAAO,CAAC;AAAA;AAAA;AAAA,GAIvD,EACA,MAAO,CACL,KAAM,cACN,YAAa,CACX,KAAM,GAAGT,EAAqB,UAAU,IAAIS,CAAU,IAAIE,CAAW,IAAIC,CAAY,IAAIT,CAAc,GACvG,kBAAmBY,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMd,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAClE,gBAAAK,CACF,GACA,gBAAAG,CACF,CACF,EAEE/B,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,CAEtD,EAEaR,GAAUgE,GAAkC,CACvDjE,GAAeiE,EAAQ,MAAM,EAC7B,IAAMtD,EAAcuD,GAAc,UAAUD,EAAQ,OAAO,CAAC,EAAE,KAAMA,EAAQ,OAAO,CAAC,EAAE,KAAM,EAAI,EAChG,GAAI,CAACtD,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMM,EAAIN,EAAYA,EAAY,OAAS,CAAC,EACtCO,EAAI+C,EAAQ,OAAO,CAAC,EAAE,KAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EAC9DhD,EAAI,GAAKC,EAAI,EACf+C,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,EAE3FsD,EAAQ,QAAQE,GAAwBF,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,CAE1F,IChLA,IAgBayD,GA6BPC,GAEAC,GAkDAC,GAmBOC,GA0BPC,GAyIAC,GA0BAC,GAeOC,GAhUbC,GAAAC,EAAA,kBAIAC,KAIAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEalB,GACT,CAACmB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,EAA4BC,IAAqC,CAC/F,IAAMC,EAAYN,EAAW,CAAC,EACxBO,EAAoBP,EAAW,MAAMK,EAAgB,EAAI,EAAGA,EAAgB,EAAI,CAAC,EACjFG,EAAcD,EAAkB,OAChCE,EAAcR,EAAY,CAAC,EAE3BS,EADqBT,EAAY,MAAM,CAAC,EACA,IAAI,CAACU,EAAGC,IAAMD,GAAKA,EAAI,IAAMT,EAAUU,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIR,EAAWS,CAAC,EAAIT,EAAWS,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIR,EAAQQ,CAAC,GAAKR,EAAQQ,CAAC,CAAC,CAAC,EAC5G,OAAAC,EAAY,OAAO,EAAG,EAAGP,CAAS,EAClCO,EAAY,OAAOR,EAAgB,EAAI,EAAG,EAAGI,CAAW,EACjDI,CACT,EAcE/B,GAA2B,CAAC,EAAG,EAAG,EAAG,CAAC,EAEtCC,GAAiB,CAAC+B,EAA+BC,IAAqC,CAG1F,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAG/C,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAME,EAAcF,EAAO,CAAC,EAAE,KAAKC,EAAW,SAAW,OAASD,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFG,EAAkBH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAIC,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAIH,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMN,EAAcM,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWP,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIO,EAAW,QAAQ,SAAWP,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIO,EAAW,KAAK,SAAWP,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIO,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAEM9B,GAA4B,CAA2B+B,EAAeD,IAAqC,CAC/G,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,QAAS,EAAI,EAAG,EAAID,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAE,EACvCb,EAAY,EAAI,CAAC,IAAM,IACzBA,EAAY,EAAI,CAAC,EAAIa,EAAO,CAAC,EAAE,KAAK,CAAC,GAGzC,IAAMI,EAAOH,EAAW,KAAK,MAAM,EACnCI,GAAa,yBACTL,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAaiB,EAAMH,EAAW,SAAW,OACnGA,EAAW,OAAO,EAGtB,IAAMK,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAiB,CAAI,CAAC,EACzCE,CACT,EAEanC,GAAuB8B,GAAwD,CAC1F,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBS,EAAU,CAAC,SAAU,QAAS,aAAc,YAAY,EAAET,EAAW,QAAkB,EACvFb,EAAYa,EAAW,UACvBU,EAAQV,EAAW,MACnBd,EAAcc,EAAW,aACzBG,EAAOH,EAAW,KAClBX,EAAUW,EAAW,QACrBW,EAAYX,EAAW,WAA6B,EAE1D,MAAO,CACL,QAAAS,EACA,OAAAD,EACA,UAAArB,EACA,MAAAuB,EACA,YAAAxB,EACA,KAAAiB,EACA,QAAAd,EACA,SAAAsB,EACA,GAAGL,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEMnC,GAAS,CAACyC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EAKjEe,EAAiBd,EAAW,SAAW,OAC7C,GAAIA,EAAW,QAAU,EAAG,CAM1B,GADmC,CAACY,EAAQ,YAAY,eAAe,QAAQ,GAC7CE,GAAkBf,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMC,EAAW,OACjFD,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAKC,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,EAAG,CAC7F,IAAMF,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZC,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAEhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3ChB,EAAO,SAAW,GACpBkB,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAE3Ba,EAAQ,QACJM,GAAsCD,EAAYJ,EAAoBf,CAAW,EAAG,CAAC,OAAQmB,CAAU,CAAC,CAC9G,MACEL,EAAQ,QAAQO,GAA6BpB,EAAQc,CAAkB,CAAC,EAE1E,MACF,CAEA,IAAMO,EAAUrB,EAAO,SAAW,EAC5BsB,EAActB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACnDQ,EAAavB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EAClDS,EAAgBxB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACrDU,EAAezB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/B0B,EAAc1B,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BD,EAAchC,GAChBiC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZY,EAAY5B,EAAYgB,EAAiB,EAAI,CAAC,EAC9Ca,EAAW7B,EAAYgB,EAAiB,EAAI,CAAC,EAC7CpB,EAAcI,EAAYgB,EAAiB,EAAI,CAAC,EAEhDc,EAAWd,GAAkBU,IAAiBH,GAAeI,IAAgBH,GAC/EtB,EAAW,KAAK,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,EACvD,GAAI4B,GACCJ,IAAiB,GAAKC,IAAgB,GAAKzB,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,GACxGA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,GACrFA,EAAW,KAAK,CAAC,IAAM,EAAI,CAE9B,IAAM6B,EAAQ/B,EAAY,CAAC,EACvBgC,EAAWC,EAAWC,EACpBC,EAAe,CAAC,EACtB,GAAInB,EAAgB,CAClB,IAAMC,GAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAIlE,GAHIA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,IAE5Ba,EAAU,CACZ,IAAMM,EAAYb,EAAcC,EAAaC,EAC7CO,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAG8B,EAAOK,CAAS,CAAC,EACnDH,EAAYhB,GAAiB,QAAQ,CAAC,EAAGmB,EAAWxC,CAAW,CAAC,EAChEsC,EAAoB,CAAC,EAAGH,EAAOnC,CAAW,CAC5C,MACEoC,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAOR,EAAcC,EAAYC,CAAa,CAAC,EAC9EQ,EAAYhB,GAAiB,QAAQ,CAAC,EAAGQ,EAAe7B,CAAW,CAAC,EACpEsC,EAAoB,CAACH,EAAOH,EAAYC,EAAUjC,CAAW,EAE/DuC,EAAa,KAAKH,CAAS,EAC3BG,EAAa,KAAKF,CAAS,CAC7B,MACED,EAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAON,EAAeF,EAAcC,CAAU,CAAC,EAC9ES,EAAYhC,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAGL,EAAa6B,CAAa,CAAC,EAC7DS,EAAoB,CAACH,EAAOnC,EAAagC,EAAYC,CAAQ,EAC7DM,EAAa,KAAKF,CAAS,EAC3BE,EAAa,KAAKH,CAAS,EAEzBV,GACFa,EAAa,KAAKlC,EAAO,CAAC,CAAC,EAE7B,IAAMoC,EAAIH,EAAkB,CAAC,EACvBI,GAAIH,EAAa,CAAC,EAAE,KAAKA,EAAa,CAAC,EAAE,KAAK,OAAS,CAAC,EAE1DE,EAAI,GAAKC,GAAI,EACfxB,EAAQ,QACJyB,GACIJ,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACpF,CAAC,OAAQmB,CAAY,CAAC,EAE1BrB,EAAQ,QACJ0B,GAAwBL,EAAcpB,EAAoBf,EAAakC,EAAmBlB,CAAc,EACxG,CAAC,OAAQmB,CAAY,CAAC,EAE5B,MACF,CAIA,IAAMM,EAAgE,GAGhExB,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAGhC,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACiC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAIhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3CK,GACFH,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAI3B,IAAMyC,EAAY1B,EAAiBY,EAAYC,EAAWjC,EACpD+C,EAAY3B,EAAiBpB,EAAcgC,EAAYC,EACvDe,EAAWlB,EAAeC,EAAcF,EAC9CX,EAAQ,QACJ+B,GACI1B,EAAYJ,EAAoBf,EAAa0C,EAAWC,EAAWC,EAAUtB,EAC7EmB,CAAyB,EAC7B,CAAC,OAAQtB,CAAU,CAAC,CAC1B,EAEM7C,GAAS,CAACwC,EAAyBZ,IAAqC,CAE5E,IAAMV,EAAgBU,EAAW,SAAW,OACtCD,EAAS,CACba,EAAQ,OAAO,CAAC,EAAE,QACdtB,EAEI,CAACsB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5Bb,EAAO,KAAKa,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAMT,EAAO,CAAC,EAAGH,EAAW,KAAK,CAAC,EAAG,EAAGA,EAAW,KAAK,CAAC,CAAC,EACpDX,EAAU,CAAC,CAAC,EAAE,OAAOW,EAAW,OAAO,EACvCb,EAAY,CAAC,CAAC,EAAE,OAAOa,EAAW,SAAS,EAC3Cd,EAAc,CAAC,CAAC,EAAE,OAAOc,EAAW,WAAW,EAC/Ca,EAAqB5C,GAA0B,CAAC,GAAG+B,EAAY,KAAAG,EAAM,QAAAd,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGa,CAAM,EACnHa,EAAQ,QAAQO,GACZpB,EAAQc,EACRf,GAAeR,EAAgB,CAACQ,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAAI,CAAC,CAAC,CAAC,CAC3F,EAEMzB,GAAS,CAACuC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMQ,EAASR,EAAW,SAAW,OAAS,eAAiB,gBACzDa,EAAqB5C,GAA0B+B,EAAYD,CAAM,EACjEI,EAAOH,EAAW,UAAY,SAAWA,EAAW,KAAOA,EAAW,QACtE4C,EAAWC,GACb9C,EAAO,CAAC,EAAE,KACVA,EAAO,CAAC,EAAE,KACVC,EAAW,QACXA,EAAW,UAAgDG,EAA2B,GAAOK,CAAM,EACvGI,EAAQ,QAAQkC,GACZ/C,EAAQc,EAAoB+B,EAAS,SACrC,CAACA,EAAS,YAAaA,EAAS,aAAcA,EAAS,WAAW,EAClE,CAACA,EAAS,QAAQ,MAAOA,EAAS,QAAQ,IAAKA,EAAS,QAAQ,IAAI,EAAGpC,CAAM,CAAC,CACpF,EAEalC,GAAO,CAACsC,EAAyBZ,IAAqC,CACjFhC,GAAe4C,EAAQ,OAAQZ,CAAU,EACrCY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpCxC,GAAOwC,EAASZ,CAAU,EACjBY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAC3CvC,GAAOuC,EAASA,EAAQ,OAAQZ,CAAU,EAE1C7B,GAAOyC,EAASA,EAAQ,OAAQZ,CAAU,CAE9C,ICzUA,IAiCM+C,GA2HOC,GA5JbC,GAAAC,EAAA,kBAqBAC,IACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAU,GAAOC,EAAqCC,EAC/EC,EAAmB,IAAc,CAChC,IAAMC,EAAeD,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,sEACT,IAAK,GACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQED,CAAI;AAAA,cAEf,QACE,MAAM,IAAI,MAAM,oBAAoBC,CAAgB,oBAAoB,CAC5E,CACF,EACME,EAAgBN,EAAiB;AAAA;AAAA,QAGA;AAAA;AAAA,QAIjCO,EAAkBP,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCQ,EAAUR,EAAiB,2BAA6B,2BACxDS,EAAST,EAAiB,2BAA6B,2BACvDU,EAAMV,EAAiB,MAAQ,MAC/BW,EAAMX,EAAiB,MAAQ,MAE/BY,EAAe;AAAA,yBACFZ,EAAiB,2BAA6B,0BAA0B;AAAA,uBAC1EA,EAAiB,gCAAkC,+BAA+B;AAAA,qBACpFU,CAAG;AAAA,qBACHA,CAAG;AAAA;AAAA,mBAELC,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA;AAAA,kCAGYH,CAAO;AAAA,iBACxBL,CAAI;AAAA;AAAA,kCAEaM,CAAM;AAAA,iBACvBN,CAAI;AAAA;AAAA;AAAA;AAAA,kBAIHQ,CAAG;AAAA,QACbL,CAAa;AAAA,0EACqDF,CAAgB,KAE9ES,EAAUb,EAAiB;AAAA,0BACbI,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SACoB;AAAA,0BACbC,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SAEPW,EAAU;AAAA,0BACIV,CAAgB;AAAA,yBACjBJ,EAAiB,2BAA6B,0BAA0B;AAAA;AAAA;AAAA,YAIvFA,EAAiB,yDACA,wDAAwD;AAAA;AAAA;AAAA,UAGzEK,EAAYD,CAAgB,CAAC;AAAA;AAAA,eAExBD,CAAI;AAAA,QAGPY,EAAkBC,GAAqBd,EAAYC,CAAI,EAqB7D,MApBiB;AAAA,uDACgCA,CAAI;AAAA,MACrDH,EAAiBa,EAAUC,CAAO;AAAA;AAAA;AAAA,uDAGeX,CAAI;AAAA,MACrDH,EAAiBc,EAAUD,CAAO;AAAA;AAAA;AAAA,iEAGyBV,CAAI;AAAA,wBAC7CC,CAAgB;AAAA;AAAA;AAAA,uBAGjBJ,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGO,CAAe;AAAA,QACfU,GAAYhB,CAAO,CAAC;AAAA,QACpBc,CAAe;AAAA,8EACuDX,CAAgB;AAAA;AAAA,IAI1F,EAESd,GACT,CAAC4B,EAA+BhB,EAAqCiB,EACpEC,EAAmBC,EAAmBC,EAAkBC,EACxDC,IAAoD,CACnD,IAAMxB,EAAiBE,EAAW,SAAW,OACvCuB,EAAazB,EAAiBkB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAW3B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAY5B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAc7B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAAS9B,GAAmByB,EAAa,IAAM,GAAKA,EAAa,GAAMI,EAAc,IAAM,EAG3FE,EAAY/B,EAAiB6B,EAAcF,EAAWC,EACtDI,EAAYhC,EAAiB2B,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,wCAAwCD,CAAQ,EAAE,EAE7E,IAAM/B,EAAmB0B,EAAS,EAAI,EAChCO,EAAY,KAAK,IAAIJ,EAAc,CAAC,EAAI7B,EAAkB6B,EAAc,CAAC,CAAC,EAC1EK,EAAaR,EAAS,EAAI,EAC1BS,EACF,CAACrC,EAAW,YAAYF,EAAiB,EAAI,CAAC,EAAGE,EAAW,YAAYF,EAAiB,EAAI,CAAC,CAAC,EAC7FwC,EAAsB,CAC1BD,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,IACrGqC,EAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,EAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,GACvG,EACMuC,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFsC,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,CACvF,EAEMwC,EAAoC,CACxC,CAAC,OAAsB,KAAMtB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAMpB,EAAW,OAAO,EACvF,CAAC,OAAsB,KAAMA,EAAW,SAAS,EAAG,CAAC,OAAsB,KAAMqC,CAAU,EAC3F,CAAC,OAAsB,KAAME,CAAI,CACnC,EACAE,GAA6BzC,EAAYwC,CAAe,EACxDA,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAElF,IAAM2B,EAAwD,CAAC,OAAQ,MAAM,EACzEtB,IACFmB,EAAgB,KAAK,GAAGE,EAA2B1B,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE2B,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,EAA2BzB,CAAW,CAAC,EAE/D,IAAM2B,GAAmBC,IAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EAC5EY,GAAID,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACnEiC,EAASC,EAAe,SAAUlC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQmB,CAAU,EACpFe,GAAiB,CAACL,EAAGE,EAAC,EAExBI,GAAmB,GACvB,GAAI/B,EAAS,CACX,IAAMgC,GAAON,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EACxFe,GAAe,KAAKE,EAAI,EACxBD,IAAoB;AAAA,4DAC8BC,GAAK,KAAK,KAAK;AAAA,iCAC1CvD,EAAiB,IAAM,GAAG,GAAG8B,EAAS,MAAQ,EAAE;AAAA,YAEzE,CAEA,IAAM0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ,CAAC,EACrF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQjB,EAAW,MAAM,EAC5D,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQE,EAAK,MAAM,CACjD,EACAgB,GAAyBvD,EAAYsD,EAAQ,EAC7C,IAAME,EAAWC,GAA4BzC,EAAO,CAAC,EAAE,SAAU,CAAC,EAClE,GAAIwC,IAAa,OAASA,IAAa,MACrC,MAAM,IAAI,MAAM,YAAYA,CAAQ,oBAAoB,EAE1D,MAAO;AAAA,UACLE,GAAc,yBAAyB,CAAC;AAAA,UACxCb,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGH,GAAgBF,CAAM,CAAC;AAAA,UACnFG,EAAgB;AAAA,UAChBjE,GAA6BW,EAAgBuB,EAASrB,EAAY8C,EAAE,KAAK,MAAO5C,CAAgB,CAAC;AAAA,UAE/F0B,EAAS+B,GACI3B,EAAmBD,EAAeyB,EAAU,OAAW,CAAC1D,EAAgBqC,CAAS,EACrFyB,GACI5B,EAAmBD,EAAeyB,EAAU,OAAW,CAAC1D,EAAgBqC,EAAW,GACnF,OAAWb,CAAyB,CAAC,EACxD,EAEA,MAAO,CACL,KAAM,wBACN,YACI,CAAC,KAAM,GAAGtB,EAAW,QAAQ,IAAIgC,CAAiB,IAAID,CAAa,IAAIH,CAAM,GAAI,kBAAAe,CAAiB,EACtG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM1B,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAO,CACF,GACA,gBAAAI,EACF,CACF,ICvQJ,IA2BMiB,GAiMOC,GA5NbC,GAAAC,EAAA,kBAmBAC,IACAC,KAEAC,KAEAC,KAGMP,GACF,CAACQ,EAA4BC,EAA+BC,EAAgCC,EAC3FC,EAA+BC,EAAS,GAAOC,EAAkBC,EACjEC,EAAiB,KAAkB,CAClC,IAAMC,EAASD,EAAiB,EAAI,EAC9BE,EAASF,EAAiB,EAAI,EAC9BG,EAAaH,EAAiB,EAAI,EAClCI,EAAgBP,EAAS,EAAI,EAE/BQ,EAAmB;AAAA,iDACoBR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,0BAC9DD,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,KAEvDH,IACFU,GAAoB;AAAA,sDAC0BR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,2BAClEE,EAAiB,IAAM,GAAG,GAAGH,EAAS,MAAQ,EAAE;AAAA,QAGrE,IAAMS,EAAaT,EAAS,EAAI,EAC1BU,EAAIC,EAAc,IAAKf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC5EG,EAAKD,EAAc,KAAMf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC9EI,EAAiB,CAACD,EAAIF,CAAC,EACzBZ,GACFe,EAAe,KAAKF,EAAc,OAAQf,EAAO,CAAC,EAAE,SAAU,CAACC,EAAYS,CAAU,CAAC,EAAE,OAAQG,CAAU,CAAC,EAE7G,IAAMK,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQY,CAAU,EAEpFO,EAAe;AAAA,2BACAjB,EAAuB,cAAgB,gBAAgB;AAAA,kBAChEA,EAAuB,cAAgB,gBAAgB;AAAA,kBACvDA,EAAuB,cAAgB,gBAAgB,MAAMQ,CAAa;AAAA,wBACpER,EAAuB,cAAgB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM7CE,CAAQ,MAAMM,CAAa;AAAA,8BAC/BA,CAAa;AAAA,8BACbN,CAAQ;AAAA;AAAA;AAAA,uBAGfA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,oCAExCA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAOnBA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA,0BACpDA,CAAQ,wBAAwBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAO/CA,CAAQ;AAAA;AAAA;AAAA;AAAA,wCAINA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAUhBS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAMhBW,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA;AAAA,iDAEjBX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAMRK,CAAU;AAAA;AAAA,gCAE3BI,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCASZS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA,oCACjCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAUTM,CAAa;AAAA,qCACXT,EAAU,YAAc,QAAQG,CAAQ,QAAQ;AAAA,YACzEa,EAAO,IAAI,QAAS,IAAK,QAAS,KAAM,OAAO,CAAC;AAAA;AAAA,SAGhDG,EAAc;AAAA,gCACMH,EAAO,gBAAgB,YAAY,CAAC;AAAA,wBAC5CA,EAAO,WAAW,gBAAiB,CAAC,CAAC;AAAA,qBACxCA,EAAO,WAAW,gBAAiBR,CAAU,CAAC;AAAA,oBAC/CQ,EAAO,WAAW,gBAAiBV,CAAM,CAAC;AAAA,oBAC1CU,EAAO,WAAW,gBAAiBT,CAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQpCJ,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,yBAKTA,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,sCAEvCA,CAAQ,sBAAsBG,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAU/CH,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,wCAEvCA,CAAQ,sBAAsBI,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAQlEF,EAAiBS,EAAG,IAAI,QAAS,OAAQ,OAAQ,cAAc,EAC9CA,EAAG,IAAI,QAAS,eAAgB,OAAQ,MAAM,CAAC;AAAA,+BAC3CF,EAAE,IAAI,eAAgB,cAAe,cAAe,aAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM/DZ,EAAU,WAAa,GAAGG,CAAQ,OAAO;AAAA,YAC/Da,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,UAG/C,MAAO;AAAA,IACTnB,EAAa,iBAAiBO,CAAQ,EAAE,iBAAiB,GAAGW,EAAgBC,CAAM,CAAC;AAAA,IACnFN,CAAgB;AAAA;AAAA,MAEdb,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,IAC5EK,EAASgB,EAAeC,CAAW,GACnC,EAES7B,GACT,CAACQ,EAA+BsB,EAC/BC,IAAqF,CACpF,IAAMrB,EAAUF,EAAO,OAAS,EAE1BC,EAAcqB,EAAW,YACzBE,EAAaC,EAAU,KAAKxB,CAAW,EAMvCyB,EAAW,CACf,KAAK,KAAKF,EAAa,EAAE,EACzB,EACA,CACF,EACAG,GAAU,UAAW,IAAM,uCAAuCD,CAAQ,EAAE,EAE5E,IAAMnB,EAAiBe,EAAW,SAAW,OACvCM,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAU,CAACP,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,EACvDQ,EACF,CAACR,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAGe,EAAW,YAAYf,EAAiB,EAAI,CAAC,CAAC,EAC7FwB,EAAY,CAACT,EAAW,UAAU,CAAC,EAAGA,EAAW,UAAU,CAAC,CAAC,EAC7DU,EAAsB,CAC1BF,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,IAC3FQ,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,GAC7F,EACMW,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFU,EAAoB,CAAC,EAAI,EAAI,KAAK,MAAMV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,CAAC,EAAI,CACrF,EAEMlB,EAAS,GACT8B,EAAQZ,EAAW,MACnBa,EAASnC,EAAO,CAAC,EAAE,KACnBoC,EAAwBD,EAAO,CAAC,EAAID,EACpCG,EAAyBF,EAAO,CAAC,EAEjCG,EAAoC,CACxC,CAAC,QAAuB,KAAMd,CAAU,EAAG,CAAC,QAAuB,KAAMK,CAAO,EAChF,CAAC,QAAuB,KAAMC,CAAU,EAAG,CAAC,QAAuB,KAAMC,CAAS,EAClF,CAAC,QAAuB,KAAMC,CAAmB,EAAG,CAAC,OAAsB,KAAMC,CAAI,EACrF,CAAC,QAAuB,KAAMG,CAAqB,EAAG,CAAC,QAAuB,KAAMC,CAAsB,EAC1G,GAAGE,EAA2BvC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9D,EACIE,IACFoC,EAAgB,KAAK,GAAGC,EAA2BvC,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE4B,EAAkB,KAAK,MAAM,GAE/BU,EAAgB,KAAK,GAAGC,EAA2BtC,CAAW,CAAC,EAE/D,IAAME,EAAuBuB,EAAS,CAAC,IAAM,GAAKA,EAAS,CAAC,IAAM,EAC5Dc,EAAmBzC,GAA+B,CACtD,IAAMO,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQuB,EAAQ,MAAM,EACzF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQC,EAAW,MAAM,EAC5D,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,MAAM,EAC1D,CAAC,KAAM,wBAAyB,KAAM,MAAO,OAAQE,EAAoB,MAAM,EAC/E,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAAG,CAAC,KAAM,2BAA4B,KAAM,KAAK,EAChG,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACM5B,EAAWoC,GAA4BzC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO,GACHT,GACIQ,EAAcC,EAAQC,EAAaC,EAASC,EAAsBC,EAAQC,EAAUC,EACpFC,CAAc,CAAC,EACzB,EACA,MAAO,CACL,KAAM,kBACN,YAAa,CAAC,KAAM,GAAGe,EAAW,QAAQ,IAAK,kBAAAM,CAAiB,EAChE,WAAY,KAAO,CACjB,cAAe,CAAC,EAAGF,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,QAAS,CAAC,CACR,KAAMH,EAA6BA,EAA2BtB,CAAW,EAAIA,EAC7E,SAAUD,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,gBAAAsC,CACF,GACA,gBAAAE,CACF,CACF,ICpTJ,IAYME,GAIAC,GAWAC,GAiCAC,GAwCOC,GA+BPC,GAqEAC,GAEAC,GAsDAC,GA6COC,GA7SbC,GAAAC,EAAA,kBAMAC,KACAC,KAEAC,KACAC,KAEMf,GACF,CAACgB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DpB,GAAoB,CAACqB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEMzB,GACF,CAAC0B,EAA+BC,EAAgCC,EAA8BP,EAC7FQ,EAAeP,EAAgBQ,EAA4BC,EAAwBC,EACnFC,IAA0B,CACzB,IAAMC,EAAcR,EAAW,OAAS,EAClCS,EAAoBF,EAAY,SAAW,EACjD,GAAID,EAAc,SAAW,EAC3B,QAASI,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EACjCJ,EAAc,KAAK,CAAC,EAGxB,IAAMK,EAAYX,EAAW,CAAC,EACxBY,EAAcX,EAAYI,EAAgB,EAAI,CAAC,EAAIF,EACzD,QAASO,EAAI,EAAGG,EAAIb,EAAW,OAASQ,GAAeH,EAAgB,EAAI,GAAIK,EAAIF,EAAa,EAAEE,EAAG,EAAEG,EAAG,CACxG,IAAMC,EAASd,EAAWa,CAAC,EACrBpB,EAAUgB,EAAoBK,EAASV,EAAQM,CAAC,EAAIH,EAAYG,CAAC,EACjEhB,EAAWtB,GAAgB0C,EAAQV,EAAQM,CAAC,EAAGd,EAAKc,CAAC,EAAGT,EAAYY,CAAC,EAAGX,EAAUQ,CAAC,EAAGjB,CAAO,EACnGpB,GAAkBqB,EAAUC,EAASC,EAAMc,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRH,EAAQM,CAAC,GAAKI,EAAS,GAAKR,EAAcI,CAAC,GAAKT,EAAYY,CAAC,EAAI,GAAKX,EAAUQ,CAAC,EAAI,EAAId,EAAKc,CAAC,EAC/Fd,EAAKc,EAAIF,CAAW,CAAC,CAE7B,CACAD,EAAY,OAAO,EAAG,EAAGI,CAAS,EAClCJ,EAAY,OAAOF,EAAgB,EAAI,EAAG,EAAGO,CAAW,CAC1D,EAOErC,GACF,CAAoCwC,EAAeC,IAAqC,CACtF,IAAMf,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAClGjB,EAAY,OAAS,EACrB,QAASS,EAAI,EAAGA,EAAIM,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEN,EAC3CT,EAAY,KAAKe,EAAO,CAAC,EAAE,KAAKN,CAAC,CAAC,CAEtC,CACA,IAAMS,EAAiBJ,EAAW,SAAW,OAC7Cd,EAAY,OAAO,EAAG,EAAGe,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC1Cf,EAAY,OAAOkB,EAAiB,EAAI,EAAG,EAAGH,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAE/D,IAAMpB,EAAOmB,EAAW,KAAK,MAAM,EAC7BR,EAAcQ,EAAW,YAAY,MAAM,EAC3CT,EAAgBS,EAAW,cAAc,MAAM,EAC/Cf,EAAagB,EAAO,CAAC,EAAE,KACzBd,EAAYa,EAAW,UAAU,MAAM,EAC3C,GAAIb,EAAU,OAAO,CAACe,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC9C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5Cd,EAAY,IAAI,MAAMM,CAAW,EAAE,KAAK,CAAC,CAC3C,CACA,IAAIJ,EAAUW,EAAW,QAAQ,MAAM,EACvC,GAAIX,EAAQ,OAAO,CAACa,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC5C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5CZ,EAAU,IAAI,MAAMI,CAAW,EAAE,KAAK,CAAC,CACzC,CAGAlC,GACI0B,EAAYC,EAAaC,EAAWa,EAAW,QAASA,EAAW,MAAOnB,EAAMQ,EAASe,EACzFb,EAAeC,CAAW,EAG9B,IAAMa,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAL,EAAM,cAAAU,EAAe,YAAAC,EAAa,UAAAL,EAAW,QAAAE,CAAO,CAAC,EACzFgB,CACT,EAES5C,GAAgCuC,GAAiE,CAC5G,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBpB,EACF,CAAC,SAAU,QAAS,aACnB,YAAY,EAAE,OAAOoB,EAAW,QAAW,IAAc,EAAIA,EAAW,OAAiB,EACxFb,EAAYa,EAAW,UACvBZ,EAAQY,EAAW,MACnBd,EAAcc,EAAW,YACzBnB,EAAOmB,EAAW,KAClBX,EAAUW,EAAW,QACrBS,EAAYT,EAAW,SAA2B,EAClDT,EAAgBS,EAAW,cAC3BR,EAAcQ,EAAW,YAC/B,MAAO,CACL,QAAApB,EACA,OAAA4B,EACA,UAAArB,EACA,MAAAC,EACA,YAAAF,EACA,cAAAK,EACA,YAAAC,EACA,KAAAX,EACA,QAAAQ,EACA,SAAAoB,EACA,GAAGH,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEM5C,GAAiB,CAACuC,EAA+BD,IAA8C,CAGnG,GAAI,CAACC,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAG7D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAMS,EAAcT,EAAO,CAAC,EAAE,KAAKD,EAAW,SAAW,OAASC,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFU,EAAkBV,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIS,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAcX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAID,EAAW,MAGnD,GAAIC,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMW,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMnB,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAG5C,GAFqBD,EAAW,UAAU,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEnDH,EAAW,UAAU,SAAWP,EAClD,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAKvD,GAFmBO,EAAW,QAAQ,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEjDH,EAAW,QAAQ,SAAWP,EAC9C,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAKrD,GADgBO,EAAW,KAAK,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAC9CH,EAAW,KAAK,SAAWP,EAAc,EACtD,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIO,EAAW,cAAc,SAAWP,GAAeO,EAAW,cAAc,SAAW,EACzF,MAAM,IAAI,MAAM,4BAA4BP,CAAW,GAAG,EAM5D,GADuBO,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GACrDH,EAAW,YAAY,SAAW,GACpDA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5D,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAID,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAGMtC,GAAsB,CAAC,EAAG,EAAG,EAAG,CAAC,EAEjCC,GACF,CAACiD,EAAyBZ,EAA+BD,IAA8C,CACrG,IAAMc,EAAqBtD,GAAmCwC,EAAYC,CAAM,EAC1EG,EAAiBJ,EAAW,SAAW,OACvCR,EAAcsB,EAAmB,YACjCjB,EAAcL,EAAYY,EAAiB,EAAI,CAAC,EAChDW,EAAgBd,EAAO,CAAC,EAAE,KAAKG,EAAiB,EAAI,CAAC,EAI3D,GAAIU,EAAmB,QAAU,GAAMjB,IAAgB,GAAKkB,IAAkB,EAAI,CAChFF,EAAQ,QAAQG,GAAiCf,EAAQa,CAAkB,CAAC,EAC5E,MACF,CACA,IAAMG,EAAYzB,EAAYY,EAAiB,EAAI,CAAC,EAC9Cc,EAAW1B,EAAYY,EAAiB,EAAI,CAAC,EAC7Ce,EAAelB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/BmB,EAAcnB,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BoB,EAAYjB,EAAiBa,EAAYC,EAAWrB,EACpDyB,EAAYlB,EAAiBP,EAAcoB,EAAYC,EACvDK,EAAWJ,EAAeC,EAAcL,EAExCS,EAAgE,GAIhEC,EAAoBZ,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJa,GAA2BzB,EAAO,CAAC,EAAGtC,EAAmB,EACzD,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACqC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACa,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKY,GAIhC,IAAME,EAAsB,CAAC1B,EAAO,CAAC,EAAGwB,CAAgB,EAClDG,EAAU3B,EAAO,SAAW,EAC9B2B,IACE,CAACxB,GAAkBH,EAAO,CAAC,EAAE,KAAK,SAAW,EAC/C0B,EAAoB,KAAK1B,EAAO,CAAC,EAAE,QAAQ,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,CAAC,CAAC,EAErE0B,EAAoB,KAAK1B,EAAO,CAAC,CAAC,GAKtCY,EAAQ,QACJgB,GACIF,EAAqBb,EAAoBtB,EAAa6B,EAAWC,EAAWC,EAAUK,EACtFJ,CAAyB,EAC7B,CAAC,OAAQG,CAAmB,CAAC,CACnC,EAEE9D,GAAkB,CAACgD,EAAyBb,IAA8C,CAE9F,IAAMV,EAAgBU,EAAW,SAAW,OAEtCC,EAAS,CACbY,EAAQ,OAAO,CAAC,EAAE,QACdvB,EAEI,CAACuB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5BZ,EAAO,KAAKY,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAI3B,EAAcc,EAAW,aACzBd,EAAY,SAAW,GAAKA,EAAY,CAAC,IAAM,KACjDA,EAAc,CAAC2B,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,GAE1C,IAAI1B,EAAYa,EAAW,WACvBb,EAAU,SAAW,GAAKA,EAAU,CAAC,IAAM,KAC7CA,EAAY,CAAC,CAAC,GAEhB,IAAIE,EAAUW,EAAW,SACrBX,EAAQ,SAAW,GAAKA,EAAQ,CAAC,IAAM,KACzCA,EAAU,CAAC,CAAC,GAEd,IAAIR,EAAOmB,EAAW,KAClBnB,EAAK,SAAW,IAClBA,EAAO,CAAC,EAAG,CAAC,GAEdA,EAAO,CAAC,EAAGA,EAAK,CAAC,EAAG,EAAGA,EAAK,CAAC,CAAC,EAC9BQ,EAAU,CAAC,CAAC,EAAE,OAAOA,CAAO,EAC5BF,EAAY,CAAC,CAAC,EAAE,OAAOA,CAAS,EAChCD,EAAc,CAAC,CAAC,EAAE,OAAOA,CAAW,EACpC,IAAM4B,EACFtD,GAAmC,CAAC,GAAGwC,EAAY,KAAAnB,EAAM,QAAAQ,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGe,CAAM,EACrGY,EAAQ,QAAQG,GACZf,EAAQa,EACRtB,GAAeF,EAAgB,CAACE,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAC/C,CAACA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,CAAC,CAAC,CACtF,EAEa1B,GAAgB,CAAC+C,EAAyBb,IAA8C,CACnGtC,GAAemD,EAAQ,OAAQb,CAAU,EACrCa,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpChD,GAAgBgD,EAASb,CAAU,EAEnCpC,GAAgBiD,EAASA,EAAQ,OAAQb,CAAU,CAEvD,ICpTA,IAgBM8B,GAkDOC,GAOAC,GAzEbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAOMR,GACF,CAACS,EAAmBC,EAA+BC,EAAuBC,IACvD,CACb,IAAMC,EAAaC,EAAU,KAAKJ,CAAU,EACtCK,EAAOL,EAAW,OAClBM,EAAQC,EAAc,QAASR,EAAWM,CAAI,EAC9CG,EAASC,EAAe,SAAUV,EAAWM,CAAI,EACjDK,EAAYT,EAAU,WAAa,EAAiBA,EAAU,cAAc,EAAE,CAAC,EAC3B,OAAOA,EAAU,iBAAiB,EAAE,CAAC,CAAC,EAC1FU,EAAOP,EAAU,cAAcM,EAAWL,CAAI,EAC9CO,EAAmBC,GAA+B,CACtD,IAAMC,EAAQ,QAAQR,EAAM,WAAW,eAAgB,eAAe,CAAC,KACjES,EAAMC,EAAa,uBAAwB,gBAAiBX,CAAI,EAChEY,EAAaf,EAAW,QAAUY,GAASZ,EAAW,UAAY,OAAS,IAAM,IACjFgB,EAAahB,EAAW,QAAUa,EAAMD,GAASZ,EAAW,UAAY,GAAK,QACnF,MAAO;AAAA,kBAEHW,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,CAAM,CAAC;AAAA,kBAClCK,EAAa,UAAU,CAAC;AAAA,oBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,uCACtDL,EAAO,gBAAgB,YAAY,CAAC;AAAA,8BAC7CA,EAAO,KAAK,KAAK;AAAA,sCACTS,CAAU;AAAA,qCACXC,CAAU;AAAA;AAAA,sBAEzBZ,EAAM,WAAW,eAAgB,gBAAiB,QAAQ,CAAC;AAAA,kCAC/CA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,oBAEhDE,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,kBAEjD,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMF,EAAY,SAAUD,CAAS,CAAC,EACjD,cAAe,CAAC,EAAG,KAAK,KAAKI,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,QAAuB,KAAMQ,CAAI,EAC7E,GAAGQ,EAA2BnB,EAAYA,CAAU,CACtD,CAEF,GACA,gBAAAY,CACF,CACF,EAGKrB,GAAS,CAAC6B,EAAyBlB,IAAuC,CACrF,IAAMF,EAAaoB,EAAQ,OAAO,CAAC,EAAE,KAC/BrB,EAAYqB,EAAQ,OAAO,CAAC,EAAE,SAC9BT,EAAOS,EAAQ,OAAO,CAAC,EAC7BA,EAAQ,QAAQ9B,GAAwBS,EAAWC,EAAYW,EAAMT,CAAU,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACjG,EAEaV,GAAyBU,GAA0D,CAC9F,IAAMmB,EAAYnB,EAAW,YAAwB,EAC/CoB,EAAUpB,EAAW,UAAsB,EACjD,OAAOqB,GAA4B,CAAC,UAAAF,EAAW,QAAAC,CAAO,CAAC,CACzD,IC7EA,IAoBME,GASAC,GAWAC,GA2DOC,GAKAC,GAxGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAWMV,GAAkBW,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,gCAAgC,EAElD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,iCAAiC,CAErD,EAEMV,GAAmB,CAACW,EAAgBC,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAAS,EAAI,EAAG,EAAID,EAAM,EAAE,EAC1BG,EAAY,KAAKF,EAAM,WAAW,IAAKF,EAAK,CAAC,EAAG,KAAK,CAAC,GAAG,CAAC,EAE5D,OAAAI,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMd,GAAgC,CAACe,EAAyBC,IAAoD,CAClH,IAAIC,EAAWC,EAAWC,EAAWC,EACjCC,EACAX,EACEY,EAAgBN,EAAW,SAAW,OACtCO,EAAYP,EAAW,UACvBQ,EAAYR,EAAW,OAAS,MAClCM,GACF,CAACL,EAAGC,EAAGC,EAAGC,CAAC,EAAIL,EAAY,KAC3BM,EAAQG,EAAY,CAACP,EAAGC,EAAGC,EAAGI,EAAWA,EAAWH,EAAKG,GAAa,CAAE,EACpD,CAACN,EAAGC,EAAGC,EAAGC,EAAKG,GAAa,EAAIA,EAAWA,CAAS,EACxEb,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,IAEzD,CAACP,EAAGC,EAAGC,EAAGC,CAAC,EAAI,CAACL,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,CAAC,EAClGM,EAAQG,EAAY,CAACP,EAAGM,EAAWA,EAAWH,EAAKG,GAAa,EAAIL,EAAGC,CAAC,EACpD,CAACF,EAAGG,EAAKG,GAAa,EAAIA,EAAWA,EAAWL,EAAGC,CAAC,EACxET,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,GAE3D,IAAMC,EAAsBV,EAAY,QAAQM,CAAK,EAC/CK,EAAoBD,EAAoB,KAAK,OAC7CE,EAAgBZ,EAAY,SAE5Ba,EAAgBC,EAAc,IAAKF,EAAeD,CAAiB,EACnEI,EAAeC,EAAe,SAAUJ,EAAeD,CAAiB,EAExEM,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEhG/B,GAAiBW,EAAMgB,EAAmBE,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEtEG,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DH,EAAa,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGxDA,EAAa,YAAY,aAAcF,EAAc,aAAa,UAAU,CAAC,CAAC;AAAA,KAGlF,MAAO,CACL,KAAM,eACN,YAAa,CAAC,KAAM,GAAGb,EAAY,IAAI,IAAIC,EAAW,SAAS,IAAIA,EAAW,IAAI,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACjH,WAAaP,GAAW,CACtB,IAAMyB,EAAcZ,EAAgB,CAACL,EAAGC,EAAIK,EAAWJ,EAAII,EAAWH,EAAKG,GAAa,CAAE,EACtD,CAACN,EAAGG,EAAKG,GAAa,EAAIL,EAAIK,EAAWJ,EAAII,CAAS,EACpFY,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAkBZ,EAAoB,KACtCa,EAAiBF,EAAU,gBAAgBC,EAAiB3B,CAAI,EACtE,MAAO,CACL,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUzB,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAK0B,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGI,EAA2BF,EAAiBC,CAAc,CAAC,CAChH,CACF,EACA,gBAAAN,CACF,CACF,EAEa/B,GAAe,CAACuC,EAAyBxB,IAA6C,CACjGlB,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxC,GAA8BwC,EAAQ,OAAO,CAAC,EAAGxB,CAAU,CAAC,CAC9E,EAEad,GAA+Bc,GACxCyB,GAA4B,CAC1B,UAAWzB,EAAW,UACtB,KAAMA,EAAW,KACjB,OAAQA,EAAW,MACrB,CAAC,IC7GL,IAsBM0B,GAEAC,GACAC,GACAC,GACAC,GAQAC,GAqBAC,GA4HAC,GAEAC,GA+GOC,GAOAC,GA5SbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAaMhB,GACF,qBACEC,GAAc,IAAMD,GAAgB,KACpCE,GAAkB,IAAMD,GAAc,IACtCE,GAAa,IAAMF,GAAc,MAAQA,GACzCG,GAAiB,IAAMD,GAAa,IAQpCE,GAAN,KAAiB,CACf,YAAYY,EAAa,GAAI,CAC3B,KAAK,gBAAkB,IAAI,IAC3B,KAAK,WAAaA,CACpB,CAGA,UAAUC,EAAgBC,EAAe,CACvC,IAAIC,EAAQ,KAAK,gBAAgB,IAAIF,CAAM,EACvCE,IAAU,OACZA,EAAQ,CAACD,CAAK,EAEdC,EAAM,KAAKD,CAAK,EAElB,KAAK,gBAAgB,IAAID,EAAQE,CAAK,CACxC,CAIF,EAEMd,GAAN,KAAqB,CACnB,YAAYe,EAA+CC,EAAkB,CAAlB,cAAAA,EACzD,KAAK,YAAc,GACnB,KAAK,aAAe,IAAI,IACxB,KAAK,IAAM,IAAI,MACf,KAAK,WAAa,CAAC,EAGnB,GAAI,CAACC,EAAKC,CAAG,EAAIF,EAAS,SAAS,IAAI,EAAIA,EAAS,MAAM,KAAM,CAAC,EAAI,CAACA,EAAU,EAAE,EAClF,GAAI,CAACC,EAAI,MAAM,OAAOnB,EAAc,CAAC,EACnC,MAAM,IAAI,MAAM,kBAAkB,EAapC,GAXmBmB,EAAI,MAAM,GAAG,EACrB,QAAQ,CAACE,EAAWN,IAAU,CACvC,IAAMO,EAAOL,EAAOF,CAAK,EAAE,KAAK,MAAM,EACtC,GAAI,CAACM,EAAU,MAAM,OAAOvB,EAAe,CAAC,EAC1C,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMyB,EAAa,KAAK,YAAYF,EAAW,GAAMC,EAAMP,CAAK,EAChE,KAAK,IAAI,KAAKQ,CAAU,CAC1B,CAAC,EAGGH,IAAQ,GAEVA,GAAO,CAAC,GAAG,KAAK,aAAa,QAAQ,CAAC,EAC1B,OAAO,CAAC,CAACI,EAAKC,CAAI,IAAOA,EAAK,QAAU,GAAKD,IAAQ,KAAM,EAC3D,IAAI,CAAC,CAACA,CAAG,IAAMA,CAAG,EAClB,KAAK,EAAE,UAEf,CAACJ,EAAI,MAAM,OAAOvB,EAAW,CAAC,EAChC,MAAM,IAAI,MAAM,aAAa,EAKduB,EAAI,MAAM,OAAOxB,GAAe,GAAG,CAAC,GAC3C,QAASkB,GAAW,CAC9B,GAAIA,IAAW,MACb,KAAK,WAAa,KAAK,WAAW,OAAO,KAAK,YAAY,MACrD,CACL,IAAMW,EAAO,KAAK,aAAa,IAAIX,CAAM,EACzC,GAAIW,IAAS,OACX,MAAM,IAAI,MAAM,oBAAoB,EAEtC,KAAK,WAAW,KAAKA,EAAK,QAAQ,CACpC,CACF,CAAC,EACD,KAAK,IAAM,KAAK,YAAYL,EAAK,GAAO,KAAK,UAAU,CACzD,CAGA,UAAUN,EAAgBY,EAAkBb,EAAoB,CAC9D,IAAIY,EAAO,KAAK,aAAa,IAAIX,CAAM,EACvC,GAAIW,IAAS,OAAW,CACtB,GAAIA,EAAK,WAAaC,GAAYD,EAAK,QAAU,EAC/C,MAAM,IAAI,MAAM,oBAAoB,EAEpCA,EAAK,QACLA,EAAK,aAAa,KAAKZ,CAAU,CAErC,MACEY,EAAO,CAAC,MAAO,EAAG,SAAAC,EAAU,aAAc,CAACb,CAAU,CAAC,EAExD,KAAK,aAAa,IAAIC,EAAQW,CAAI,CACpC,CAGA,YAAYE,EAAcC,EAAkBN,EAAyBP,EAAQ,GAAgB,CAC3F,IAAMc,EAAOP,EAAK,OACdQ,EAAW,GACXC,EAAe,CAAC,EAChBC,EAAU,EAEd,GAAI,CAACL,EAAK,MAAM,OAAO7B,EAAe,CAAC,GAAM,CAAC8B,GAAWD,IAAS,GAChE,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMM,EAAeN,EAAK,MAAM,OAAO/B,GAAe,GAAG,CAAC,EACpD2B,EAAa,IAAItB,GAAWc,CAAK,EAEvC,OAAAkB,GAAc,QAAQ,CAACnB,EAAgBoB,IAAc,CACnD,GAAIpB,IAAW,MAAO,CACpB,GAAIgB,EACF,MAAM,IAAI,MAAM,6CAA6C,EAE/DA,EAAW,GACX,IAAMK,EAAoBN,EAAOI,EAAa,OAAS,EACvD,GAAIE,EAAoB,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GADAJ,EAAeT,EAAK,MAAMU,EAASA,EAAUG,CAAiB,EAC1D,KAAK,aACP,GAAI,KAAK,aAAa,SAAWJ,EAAa,QAC1C,KAAK,aAAa,SAAS,IAAMA,EAAa,SAAS,EACzD,MAAM,IAAI,MAAM,8BAA8B,UAEvCH,EACT,KAAK,YAAc,GACnB,KAAK,aAAeG,MAEpB,OAAM,IAAI,MAAM,uCAAuC,EAGzD,QAASK,EAAI,EAAGA,EAAIL,EAAa,OAAQK,IAAK,CAC5C,IAAMtB,EAAS,OAAO,aAAa,IAAI,WAAW,CAAC,EAAIsB,CAAC,EACxDb,EAAW,UAAUT,EAAQoB,EAAIE,CAAC,EAClC,KAAK,UAAUtB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAC/C,CACF,MACEQ,EAAW,UAAUT,EAAQoB,GAAK,KAAK,YAAc,KAAK,aAAa,OAAS,EAAI,EAAE,EACtF,KAAK,UAAUpB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAEjD,CAAC,EACMQ,CACT,CAQF,EAEMpB,GAAakC,GAAyBA,EAAO,OAE7CjC,GACF,CAACkC,EAAuCC,EAAkBC,EACzDC,IAAgD,CAE/C,IAAMC,EADQJ,EAAY,IAAKhB,GAASA,EAAK,MAAM,EAC3B,IAAI,CAACO,EAAMd,IAAU4B,EAAc,QAAQ5B,CAAK,GAAIwB,EAAUV,CAAI,CAAC,EACrFe,EAAaC,EAAU,KAAKJ,CAAW,EACvCK,EAASC,EAAe,SAAUR,EAAUE,EAAY,MAAM,EAC9DO,EACF,CAAC,GAAGR,EAAe,aAAa,KAAK,CAAC,EAAE,OAAQ1B,GAAW,CAAC0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,CAAC,EACxGmC,EAAmBC,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EACrBC,EAAW,kBACXC,EAAU,iBACVC,EAAY,eACZC,EAAgC,CAAC,EACjCC,EAAiC,CAAC,EAClCC,EAAiC,CAAC,EAClCC,EAA4B,CAAC,EAC7BC,EAAyBnB,EAAe,aAAa,OAASA,EAAe,IAAI,gBAAgB,KACvGA,EAAe,aAAa,QAAQ,CAACf,EAAMX,IAAW,CACpD,GAAI0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,EAAG,CAClD,IAAM8C,EAAcpB,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,IAAI,CAAC,EAClE8C,IAAgB,QAClBpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBoC,EAAQ,KAAK,GACTT,EAAUR,CAAC,EAAE,WACT,QAAQA,CAAC,UAAWnB,EAAO+B,EAAO,WAAW,gBAAiBc,CAAW,CAAC,CAAC,EAAE,CACvF,CAAC,CACH,CACF,CAAC,CAEL,MACEpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,EAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,IAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,EAAQ,QAAS9C,GAAU,CACzBwC,EAAoB,KAAK,GAAGb,EAAUR,CAAC,EAAE,WAAW,QAAQA,CAAC,UAAWnB,EAAO,GAAGD,CAAM,EAAE,CAAC,EAAE,CAC/F,CAAC,EACD4C,EAAgB,KAAK,WAAWhB,EAAUR,CAAC,EAAE,aAAa,QAAQA,CAAC,SAAS,CAAC,GAAG,CAClF,CACF,CAAC,EACDsB,EAAqB,KACjB,WAAW1C,CAAM,cAAcA,CAAM,eAAeX,GAAUW,CAAM,CAAC,KAAKA,CAAM,OAAO,EAC3F2C,EAAqB,KAAK,GAAG,CAEjC,CAAC,EACD,IAAMK,EAAYH,EACd,CACE,GAAGR,EACH,aAAaT,EAAU,IAAI,CAACqB,EAAU7B,IAAM6B,EAAS,aAAa,QAAQ7B,CAAC,SAAS,CAAC,EAAE,KAAK,KAAK,CAAC,GACpG,EACA,CACE,GAAGiB,EACHE,EACA,GAAGG,EACH,GAAGD,EACHH,EACA,GAAGM,EACHJ,EACA,GAAGG,CACL,EACJ,MAAO;AAAA,cAEHP,EACK,iBAAiBF,EAAgB,IAAKlC,IAAY,CAAC,KAAM,GAAGX,GAAUW,CAAM,CAAC,GAAI,KAAM,KAAK,EAAE,CAAC,EAC/F,gBAAgB,aAAc,KAAK,EACnC,iBAAiB,GAAG4B,EAAWI,CAAM,CAAC;AAAA;AAAA,cAEzCI,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,kCACrDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA,cACxDJ,EAAU,IAAI,CAACsB,EAAM9B,IAAM,YAAYA,CAAC,YAAYQ,EAAUR,CAAC,EAAE,KAAK,OAAO,GAAG,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,cAC5F4B,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,cACpBhB,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,YAE/C,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAe,SAAU,kBAAmBF,EAAY,IAAI,IAAM,MAAM,CAAC,EAC7F,WAAY,IAAM,CAGhB,IAAM2B,EACFjB,EAAgB,OAAQlC,GAAW0B,EAAe,aAAa,IAAI1B,CAAM,CAAC,EACrE,IACIA,IACI,CAAC,QAAuB,KAAM0B,EAAe,aAAa,IAAI1B,CAAM,GAAG,UAAY,CAAC,EAAE,EACvGmD,EAAoB,KAAK,CAAC,QAAuB,KAAMrB,CAAU,CAAC,EAClE,IAAMsB,EACF5B,EAAY,IAAI,CAAChB,EAAM6C,IAAM,CAAC,GAAGC,EAA2B9C,CAAI,CAAC,CAAC,EAC7D,OAAO,CAAC+C,EAAKC,IAAyBD,EAAI,OAAOC,CAAoB,EAAGL,CAAmB,EACpG,OAAAC,EAAgB,KAAK,GAAGE,EAA2B3B,CAAW,CAAC,EACvD,CACN,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAAsB,CACF,CACF,EACA,gBAAAjB,CACF,CACF,EAES5C,GAAS,CAACkE,EAAyBC,IAAuC,CACrF,IAAMhC,EAAiB,IAAItC,GAAeqE,EAAQ,OAAQC,EAAW,QAAQ,EACvE/B,EAAcD,EAAe,WAC7BF,EAAciC,EAAQ,OAAO,IAAI,CAACE,EAAON,IAAMM,EAAM,IAAI,EAC/DF,EAAQ,QAAQnE,GAAwBkC,EAAaiC,EAAQ,OAAO,CAAC,EAAE,SAAU/B,EAAgBC,CAAW,CAAC,CAC/G,EAEanC,GAAyBkE,GAA0D,CAC9F,IAAMtD,EAAYsD,EAAW,SAAoB,QAAQ,OAAQ,EAAE,EACnE,OAAOE,GAA4B,CAAC,SAAAxD,CAAQ,CAAC,CAC/C,IC/SA,IAUMyD,GAiBAC,GAYAC,GAIAC,GAyDOC,GApGbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0BAA0B,EAE5C,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EAEzDG,EAAaD,EAAM,OAASD,EAAW,OAAS,EAAIC,EAAM,OAASD,EAAW,OAC9EG,EAAkBH,EAAW,OAASC,EAAM,OAAS,EAAID,EAAW,OAASC,EAAM,OACvF,KAAOC,EAAaD,EAAM,QAAUE,EAAkBH,EAAW,OAAQ,EAAEE,EAAY,EAAEC,EACvF,GAAIF,EAAMC,CAAU,IAAMF,EAAWG,CAAe,GAAKF,EAAMC,CAAU,IAAM,GAC3EF,EAAWG,CAAe,IAAM,EAClC,MAAM,IAAI,MAAM,oDAAoD,CAG1E,EAEMb,GAAmB,CAACc,EAA2BC,IAAwC,CAC3F,IAAMC,EAAOF,EAAO,OAASC,EAAO,OAC9BJ,EAAkB,CAAC,EACzB,QAASM,EAAI,EAAGA,EAAID,EAAM,EAAEC,EAC1BN,EAAM,KAAKG,EAAOG,CAAC,CAAC,EAEtB,QAASA,EAAI,EAAGA,EAAIF,EAAO,OAAQ,EAAEE,EACnCN,EAAM,KAAKI,EAAOE,CAAC,IAAM,EAAIH,EAAOG,EAAID,CAAI,EAAID,EAAOE,CAAC,CAAC,EAE3D,OAAON,CACT,EAEMV,GAAuB,CAACS,EAA+BC,IACxDD,EAAW,OAASC,EAAM,OAAUX,GAAiBU,EAAYC,CAAK,EAAIX,GAAiBW,EAAOD,CAAU,EAG3GR,GAA2BO,GAA+C,CAC9E,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EACvDS,EAAwBjB,GAAqBS,EAAYC,CAAK,EAC9DQ,EAAWV,EAAO,CAAC,EAAE,SACrBW,EAAaD,IAAa,EAAgB,EAAI,EAC9CE,EAAa,KAAK,KAAKC,EAAU,KAAKJ,CAAW,EAAIE,CAAU,EAE/DG,EAAmBC,GAA+B,CACtD,IAAMC,EAAQC,EAAc,QAASP,EAAUT,EAAW,OAAQU,CAAU,EACtEO,EAASC,EAAe,SAAUT,EAAUD,EAAY,OAAQE,CAAU,EAC5ES,EACJ,GAAIV,IAAa,EAAe,CAC9B,IAAMW,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO;AAAA,6BAChDD,CAAC,MAAML,EAAO,gBAAgB,kBAAkBK,CAAC,GAAG,CAAC;AAAA,sBAC5DA,CAAC,MAAMP,EAAM,2BAA2B,gBAAgBO,CAAC,GAAIL,CAAM,CAAC;AAAA,qBACrEK,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIR,EAAM,YAAY,QAAQO,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAEhFH,EAAa;AAAA,0CACuBT,CAAU;AAAA;AAAA,UAE1CU,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCH,EAAO,YAAY,aAAc,MAAM,CAAC;AAAA,QAE9C,MACEE,EAAa;AAAA,8BACWF,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACtCF,EAAM,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,UAC3EA,EAAO,YAAY,aAAcF,EAAM,YAAY,aAAa,CAAC,CAAC;AAAA,SAGxE,MAAO;AAAA,MACLD,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBC,EAAOE,CAAM,CAAC;AAAA,MAC/EH,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,MACvEK,CAAU,EACd,EAEMK,EACF,CAAC,CAAC,QAAuB,KAAMb,CAAU,EAAG,GAAGc,EAA2BzB,EAAYQ,CAAW,CAAC,EACtG,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGA,EAAY,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACxE,gBAAAK,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBAAAa,CACF,EACF,CACF,EAEa/B,GAAUiC,GAAkC,CACvDrC,GAAeqC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAwBkC,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxE,ICvGA,IAaMC,GAiDOC,GA9DbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KACAC,KAIMP,GAA6BQ,GAAqD,CACtF,IAAMC,EAAWD,EAAa,CAAC,EAAE,SAC3BE,EAAaC,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAChDI,EAAaD,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAEhDK,EAAUD,EAAa,IAAM,EAC7BE,EAAmBC,GAAuC,CAC9D,IAAMC,EAAIC,EAAc,IAAKR,EAAU,CAAC,CAAC,EAAG,CAAC,EACvCS,EAAOD,EAAc,OAAQR,EAAU,CAAC,CAAC,EAAG,CAAC,EAC7CU,EAAIC,EAAe,IAAKX,EAAU,CAAC,CAAC,EAAG,CAAC,EAExCY,EAA8B,CAAC,CAAC,KAAM,kBAAmB,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAEvGC,EAAqBC,GAAe;AAAA,gBAC9BA,CAAC,oCAAoCA,CAAC;AAAA,gBACtCA,CAAC,MAAML,EAAK,YAAY,OAAOK,CAAC,aAAa,CAAC,QAAQA,CAAC,gBAC7DC,EAAoBX,EACtB;AAAA,mBACWK,EAAK,YAAY,uCAAuC,CAAC,IACpE,GAAGI,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC;AAAA,mBACjFN,EAAE,KAAK,KAAK,gCAE3B,MAAO,GAAGD,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBL,EAAGE,EAAMC,CAAC,CAAC;AAAA;AAAA,MAEtEM,GAAaC,GAA0BjB,CAAQ,CAAC,CAAC;AAAA;AAAA,MAEvDM,EAAa,UAAUY,EAAc,CAAC;AAAA,QACpCZ,EAAa,sCAAsC,0BAA0B,CAAC;AAAA;AAAA,gBAEtEC,EAAE,YAAY,YAAY,CAAC;AAAA,QACnCQ,CAAiB;AAAA;AAAA,QAEjBL,EAAE,YAAY,aAAoBS,GAAmB,MAAM,CAAC,CAAC;AAAA,MAEnE,EAEA,MAAO,CACL,KAAM,mBACN,YAAa,CAAC,KAAM,GAAGf,CAAO,GAAI,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACrE,gBAAAC,EACA,WAAae,IAAY,CACvB,QAAS,CAAC,CAAC,KAAMA,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,gBACI,CAAC,CAAC,QAAuB,KAAM,KAAK,KAAKnB,EAAa,CAAC,CAAC,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACxG,cAAe,CAAC,EAAG,KAAK,KAAKF,EAAaiB,GAAiB,CAAC,CAAC,CAC/D,EACF,CACF,EAEa1B,GAAY6B,GAAkC,CACrDA,EAAQ,OAAO,OAAS,GAAKnB,EAAU,KAAKmB,EAAQ,OAAO,CAAC,EAAE,IAAI,IAAM,EACpE7B,GAAS6B,CAAO,EAEtBA,EAAQ,QAAQ9B,GAA0B8B,EAAQ,MAAM,CAAC,CAE7D,ICpEA,IAeMC,GAMAC,GAsGOC,GAGAC,GA9HbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,EAEMT,GAA0B,CAACS,EAA+BC,IAA8C,CAC5G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAeH,EAAO,CAAC,EAAE,KAEzBI,EAAYF,EAAW,OACvBG,EAAOC,EAAU,cAAcL,EAAW,KAAMG,CAAS,EAEzDG,EAAcL,EAAW,MAAM,CAAC,EACtCK,EAAY,OAAOF,EAAM,EAAG,GAAGF,CAAY,EAE3C,IAAMK,EAAeN,EAAWG,CAAI,EAC9BI,EAAaT,EAAO,CAAC,EAAE,WAAa,EAAgB,EAAI,EACxDU,EAAa,KAAK,KAAKJ,EAAU,KAAKC,CAAW,EAAIE,CAAU,EAE/DE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMH,CAAI,EAAG,GAAGO,EAA2BZ,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMO,CAAW,CAChH,EAEMM,EAAmBC,GAA+B,CACtD,IAAMC,EAAOC,EAAc,OAAQhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQS,CAAU,EAClFQ,EAAUD,EAAc,eAAgBhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACjFkB,EAASC,EAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUO,EAAY,OAAQE,CAAU,EAEpFW,EAAmBC,GAA6B,CACpD,IAAMC,EAAcnB,EAAa,OAC7BoB,EAAU,qBAAqBF,CAAC,OAAOJ,EAAQ,KAAK,OAAO,OAC/D,QAASO,EAAI,EAAGA,EAAIF,EAAaE,IAC/BD,GAAW,GAAGD,EAAc,EAAI,iBAAiBD,CAAC,IAAIG,CAAC,IAAM,iBAAiBH,CAAC,EAAE,MAC7Ed,EAAY,OAAS,EAAI,gBAAgBc,CAAC,oBAAoBG,CAAC,IAAM,gBAAgBH,CAAC,EAAE,IAE9FE,GAAW;AAAA,mBACEF,CAAC,MAAMJ,EAAQ,aAAa,iBAAiBI,CAAC,EAAE,CAAC;AAAA,mBACjDA,CAAC;AAAA,iBACHA,CAAC,SAASA,CAAC;AAAA;AAAA,2BAEDA,CAAC,MAAMN,EAAK,KAAK,OAAO;AAAA,UAE7C,QAASS,EAAI,EAAGC,EAAI,EAAGD,EAAIpB,EAAWoB,IAChCA,IAAMnB,GACRkB,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,aAAaA,CAAC,KACvFI,GAAKH,IAELC,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,MACrEd,EAAY,OAAS,EAAI,gBAAgBc,CAAC,IAAII,CAAC,IAAM,gBAAgBJ,CAAC,EAAE,IAC5EI,KAGJ,OAAOF,CACT,EACIG,EACJ,GAAI1B,EAAO,CAAC,EAAE,WAAa,EAAe,CACxC,IAAM2B,EAAmB,CAACC,EAAgBP,EAAWQ,EAAW,KAAO;AAAA,6BAChDR,CAAC,MAAMH,EAAO,gBAAgB,kBAAkBG,CAAC,GAAG,CAAC;AAAA,YACtED,EAAgBC,CAAC,CAAC;AAAA,sBACRA,CAAC,MAAMN,EAAK,gBAAgB,cAAcM,CAAC,EAAE,CAAC;AAAA,qBAC/CA,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BO,CAAM,IAAIP,CAAC,OAAOQ,CAAQ,IAAId,EAAK,YAAY,QAAQM,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAE/EK,EAAa;AAAA,0CACuBjB,CAAU;AAAA;AAAA,UAE1CkB,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCT,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,OAE/C,MACEQ,EAAa;AAAA,4BACSR,EAAO,gBAAgB,YAAY,CAAC;AAAA,QACxDE,EAAgB,EAAE,CAAC;AAAA,oBACPL,EAAK,aAAa,aAAa,CAAC;AAAA,QAC5CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,QAG7C,MAAO;AAAA,QAEHJ,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBC,EAAME,EAASC,CAAM,CAAC;AAAA,QAC5CJ,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,UACzEY,CAAU;AAAA,QAElB,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMzB,EAAW,SAAU,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMM,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAE,CACF,CACF,EAEarB,GAAyBS,GAClC6B,GAA4B,CAAC,KAAM7B,EAAW,IAAc,CAAC,EAEpDR,GAAS,CAACsC,EAAyB9B,IAAuC,CACrF,IAAMD,EAAS+B,EAAQ,OACvBzC,GAAeU,CAAM,EACrB+B,EAAQ,QAAQxC,GAAwBwC,EAAQ,OAAQ9B,CAAU,CAAC,CACrE,IClIA,IAeM+B,GAeAC,GA+DOC,GAGAC,GAhGbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM;AAAA,4DACwC,CAE5D,EAEMT,GACF,CAACS,EAA+BC,IAAsD,CACpF,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAsBH,EAAO,CAAC,EAAE,SAChCI,EAAYF,EAAW,OAEvBG,EAAeL,EAAO,CAAC,EAAE,KACzBM,EAAkBN,EAAO,CAAC,EAAE,SAC5BO,EAAOC,EAAU,cAAcP,EAAW,KAAMG,CAAS,EACzDK,EAAeP,EAAWK,CAAI,EAE9BG,EAAcL,EAAa,MAAM,CAAC,EAClCM,EAAaH,EAAU,KAAKE,CAAW,EAEvCE,EAAQC,EAAc,QAASV,EAAqBC,CAAS,EAC7DU,EAAUD,EAAc,eAAgBP,EAAiBD,EAAa,MAAM,EAC5EU,EAASC,EAAe,SAAUb,EAAqBO,EAAY,MAAM,EAGzEO,EAAoC,CACxC,CAAC,QAAuB,KAAMN,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMF,CAAI,CACpC,EACA,OAAAU,EAAgB,KAAK,GAAGC,EAA2BhB,EAAYG,EAAcK,CAAW,CAAC,EA4BlF,CACL,KAAM,iBACN,YAAa,CAAC,kBA7B8C,CAAC,OAAQ,MAAM,CA6B5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAM,CACF,GACA,gBA9BuBE,GAA+B;AAAA,QAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,EAASC,CAAM,CAAC;AAAA,QAC/CI,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,4BAErDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,kBAE9CD,EAAQ,YAAY,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA,2BAIxBF,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,eAAgB,gBAAiB,UAAU,CAAC;AAAA,oBACjDA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,QAE9CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAY3C,CACF,EAESvB,GAAiCS,GAC1CmB,GAA4B,CAAC,KAAMnB,EAAW,IAAc,CAAC,EAEpDR,GAAiB,CAAC4B,EAAyBpB,IAA+C,CACrG,IAAMD,EAASqB,EAAQ,OACvB/B,GAAeU,CAAM,EACrBqB,EAAQ,QAAQ9B,GAAgC8B,EAAQ,OAAQpB,CAAU,CAAC,CAC7E,ICpGA,IAWMqB,GA0BAC,GAwFOC,GAQAC,GArIbC,GAAAC,EAAA,kBAGAC,IAEAC,KAIAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,UACjCA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,SAC3D,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EASMR,GAAwB,CAACQ,EAA+BC,IAA4C,CACxG,IAAMC,EAASF,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BG,EAASH,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACI,EAAGC,EAAGC,CAAC,EAAIC,GAAS,qBACvBL,EAAQD,EAAW,OAAQE,EAAQF,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGQ,EAAc,CAACJ,EAAGC,CAAC,EACzB,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAMC,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAML,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,OAAsB,KAAML,EAAW,KAAK,EAC/E,CAAC,OAAsB,KAAMA,EAAW,IAAI,CAC9C,EACMW,EAAwD,CAAC,OAAQ,MAAM,EACzEZ,EAAO,SAAW,IACpBW,EAAgB,KAAK,GAAGE,EAA2Bb,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEY,EAAkB,KAAK,MAAM,GAE/BD,EAAgB,KAAK,GAAGE,EAA2BL,CAAW,CAAC,EAE/D,IAAMM,EAAmBC,GAA+B,CACtD,IAAIC,EAAO,GACPf,EAAW,QAAUA,EAAW,OAClCe,EAAO,0DACEf,EAAW,QAAU,CAACA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAUA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAU,CAACA,EAAW,SAC3Ce,EAAO,2DAGT,IAAMC,EAAiBhB,EAAW,QAAU,EAAI,GAAK,2BAC/CiB,EAAIC,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDoB,EAAID,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDqB,EAAWH,EAAE,KAAK,MACpBI,EAAwB,KACtBC,EAAY,CAACL,EAAGE,CAAC,EACnBpB,EAAO,SAAW,IACpBsB,EAAIH,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAChEuB,EAAU,KAAKD,CAAC,GAElB,IAAME,EAASC,EAAe,SAAUzB,EAAO,CAAC,EAAE,SAAUQ,EAAY,MAAM,EAC9Ee,EAAU,KAAKC,CAAM,EACrB,IAAME,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC/G,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAC1D,EACA,MAAO;AAAA,IACPX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA;AAAA,IAEtER,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kBAK9DM,CAAQ;AAAA;AAAA,QAElBL,CAAI;AAAA;AAAA;AAAA,MAGNC,CAAc;AAAA,OACb,IACGK,GAAK,KACA,iBAAiBA,EAAE,2BAA2B,aAAcE,CAAM,CAAC,cACtEH,CAAQ,qBAAqBC,EAAE,YAAY,SAAS,CAAC,IAEpD,IACN,CAAC;AAAA;AAAA,IAGN,EAEA,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGrB,EAAW,QAAQ,GAAI,kBAAAW,CAAiB,EAC/D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAUR,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKS,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAG,CACF,CACF,EAEarB,GAAuBQ,GAAwD,CAC1F,IAAM0B,EAAS1B,EAAW,OACpB2B,EAAS3B,EAAW,OACpB4B,EAAQ5B,EAAW,MACnB6B,EAAO7B,EAAW,KACxB,MAAO,CAAC,OAAA0B,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,SAAU,GAAG7B,EAAW,MAAM,IAAIA,EAAW,MAAM,IAAIA,EAAW,QAAU,CAAC,EAAE,CACtH,EAEaP,GAAO,CAACqC,EAAyB9B,IAAqC,CACjFV,GAAewC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAAsBuC,EAAQ,OAAQ9B,CAAU,CAAC,CACnE,ICxIA,IAaM+B,GAGAC,GAiOOC,GAGPC,GAEAC,GA0COC,GA2BAC,GA3TbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMf,GAAW,CAACgB,EAA+BC,IAC5CD,EAAO,OAASC,GAAOD,EAAOC,CAAC,EAAE,KAAK,OAAS,GAAOC,EAAU,KAAKF,EAAOC,CAAC,EAAE,IAAI,EAAK,EAAID,EAAOC,CAAC,EAAI,OAEvGhB,GAAiB,CAACe,EAA+BG,IAAoD,CACzG,IAAMC,EAAQJ,EAAO,CAAC,EAChBK,EAAMrB,GAASgB,EAAQ,CAAC,EACxBM,EAAQtB,GAASgB,EAAQ,CAAC,EAC1BO,EAAOvB,GAASgB,EAAQ,CAAC,EACzBQ,EAAiBxB,GAASgB,EAAQ,CAAC,EACnCS,EAAuBzB,GAASgB,EAAQ,CAAC,EACzCU,EAAU1B,GAASgB,EAAQ,CAAC,EAC5BW,EAAY3B,GAASgB,EAAQ,CAAC,EAoCpC,GAAII,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMQ,EAAe,GACfC,EAAYT,EAAM,KAAK,CAAC,EACxBU,EAAiBV,EAAM,KAAK,CAAC,EAC7BW,EAAaX,EAAM,KAAK,SAAW,EAAKQ,EAAeR,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3EY,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaZ,EAAW,QAAQ,EAC5D,GAAIO,GAAWC,EAAW,CACxB,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIA,EAAQ,KAAK,CAAC,IAAMG,GAAaH,EAAQ,KAAK,CAAC,IAAMP,EAAW,UAAYO,EAAQ,KAAK,CAAC,IAAMS,EAClG,MAAM,IAAI,MAAM,iFAAiF,EAEnG,GAAIR,EAAU,KAAK,CAAC,IAAME,GAAaF,EAAU,KAAK,CAAC,IAAMR,EAAW,UACpEQ,EAAU,KAAK,CAAC,IAAMQ,EACxB,MAAM,IAAI,MAAM,mFAAmF,EAErG,GAAIT,EAAQ,KAAK,CAAC,IAAMC,EAAU,KAAK,CAAC,EACtC,MAAM,IAAI,MAAM,gFAAgF,EAElG,GAAIA,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEvEM,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,CACpC,SAAWA,GAAWC,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIS,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAIA,EAAI,KAAK,CAAC,IAAMD,EAAM,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,6DAA6D,EAE/EgB,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMc,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIb,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMc,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GC,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,GAAIb,EAAM,CACR,GAAIA,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8CAA8C,EAGhE,GAAID,GACEF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,CAAC,IAAM,EAC/C,MAAM,IAAI,MAAM,oCAAoC,CAG1D,CAEA,IAAIiB,IACJ,GAAIb,EAAgB,CAClBa,EAAW,EACX,IAAMC,EAAWd,EAAe,KAUhC,MATIc,EAAS,SAAW,EAClBA,EAAS,CAAC,IAAMT,EAClBQ,EAAW,EACFC,EAAS,CAAC,IAAM,EAAIT,EAAY,IACzCQ,EAAW,GAEJC,EAAS,SAAW,GAAKA,EAAS,CAAC,IAAMT,GAAaS,EAAS,CAAC,IAAMN,IAC/EK,EAAW,GAETA,IAAa,EACT,IAAI,MAAM,0FAA0F,EAEtG,IAAI,MAAM,oBAAoB,CACtC,CAEA,IAAIE,EAAe,GACfC,EAAcT,EAClB,GAAIT,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FkB,EAAclB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGkB,EAAclB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CiB,EAAe,EACjB,CACF,CAEA,IAAME,EAAsBR,EAAqBD,EAC3CU,EAAsB,GAE5B,GAAIlB,EACF,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIC,EAAsB,CACxB,GAAIA,EAAqB,KAAK,SAAW,EACvC,MAAM,IAAI,MAAM,iEAAiE,EAEnF,GAAKA,EAAqB,KAAK,CAAC,IAAMI,GAAaJ,EAAqB,KAAK,CAAC,IAAM,GAChFA,EAAqB,KAAK,CAAC,IAAMN,EAAW,UAAYM,EAAqB,KAAK,CAAC,IAAMK,GACzFL,EAAqB,KAAK,CAAC,IAAMgB,EACnC,MAAM,IAAI,MAAM,2FAA2F,CAE/G,CAEA,MAAO,CACL,UAAAZ,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAS,EACA,kBAAAP,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAS,EACA,SAAAL,EACA,UAAW,KAAK,MAAMK,EAAcrB,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAuB,EACA,aAAAH,EACA,UAAAH,CACF,CACF,EAEalC,GAAqCiB,GAC9CwB,GAA4B,CAAC,GAAGxB,CAAU,CAAC,EAEzChB,GAAgDwC,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhGvC,GACF,CAACwC,EAAyBC,EAAiBtB,EAAkBM,EAAmBC,EAC/EC,EAAoBe,IAAuB,CAC1C,IAAMC,EAAc,CAAClB,EAAWC,EAAgBC,CAAU,EACpDiB,EAAa9B,EAAU,KAAK6B,CAAW,EACvCE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,QAAuB,KAAMF,CAAU,EACnF,CAAC,QAAuB,KAAMf,CAAU,CAC1C,EAEMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,gBAAiBR,EAAI,SAAUE,CAAW,EAClEO,EAAWC,EAAc,MAAOV,EAAI,SAAUE,CAAW,EACzDS,EAAYD,EAAc,OAAQhC,EAAK,SAAUwB,CAAW,EAE5DU,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,CAC3G,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBH,EAAUE,EAAWJ,CAAM,CAAC;AAAA,IACrFD,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,IAK1E,EAEA,OAAOP,EAAQ,QACX,CACE,KAAM,4BACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACjD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMG,EAAa,SAAUF,EAAI,SAAU,aAAgC,CAAC,EACvF,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAC,CACF,EACA,CAAC,OAAQ,CAACL,EAAKtB,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC7C,EAESlB,GACT,CAACuC,EAAyBf,EAAmB6B,EAAkB5B,EAAwBK,EACtFwB,EAAmBpC,EAAmBuB,IAAwB,CAG7D,IAAIc,EAAgBD,EACpB,GAAKpC,EAOE,CACL,GAAIO,IAAmB,EACrB,MAAM,IAAI,MAAM,mFAAmF,EAEnG,OAAA8B,EACIxD,GAAiBwC,EAASe,EAAOpC,EAAMM,EAAWC,EAAgB4B,EAAWvB,EAAUW,CAAW,EACtGc,EAAgBA,EAAc,QAAQ,CAAC/B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,EAC9ES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAEnD,KAjBE,QAAID,EAAM,KAAK,SAAW,IACxBC,EAAgBD,EAAM,QAAQ,CAAC9B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,GAExES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAanD,EAEStD,GAAqB,CAACsC,EAAyBzB,IAAqC,CAC/F,IAAM2C,EAAS7D,GAAe2C,EAAQ,OAAQzB,CAAU,EAClDC,EAAQwB,EAAQ,OAAO,CAAC,EACxBvB,EAAMrB,GAAS4C,EAAQ,OAAQ,CAAC,EAChCtB,EAAQtB,GAAS4C,EAAQ,OAAQ,CAAC,EAClCrB,EAAOvB,GAAS4C,EAAQ,OAAQ,CAAC,EACjCpB,EAAiBxB,GAAS4C,EAAQ,OAAQ,CAAC,EAC3CnB,EAAuBzB,GAAS4C,EAAQ,OAAQ,CAAC,EACjDlB,EAAU1B,GAAS4C,EAAQ,OAAQ,CAAC,EACpCjB,EAAY3B,GAAS4C,EAAQ,OAAQ,CAAC,EAC5C,GAAIxB,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIC,GAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8BAA8B,EAIhD,IAAM0C,EAAS1C,GAAOC,GAASD,EAAI,KAAK,SAAW,GAAKC,EAAM,KAAK,SAAW,EAExE0C,EAAI3D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAU1C,EAAOG,EAAM,CAAC,EAEtG,GAAIwC,EACF,OAAOE,GACHrB,EAASoB,EAAG3C,EAAKC,EAAOE,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAC7F3C,CAAU,EAEhB,GAAI,CAACE,GAAO,CAACC,EACX,MAAM,IAAI,MAAM,gCAAgC,EAElD,IAAM4C,EAAI7D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,SAAUzC,EAAKE,EAC3FuC,EAAO,UAAU,EAEfK,EAAI9D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,UAAWxC,EAAOC,EAC9F,EAAIuC,EAAO,UAAU,EAEzBG,GACIrB,EAASoB,EAAGE,EAAGC,EAAG3C,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAAQ3C,CAAU,CAC/G,ICrWA,IAUMiD,GAIAC,GAyBAC,GAUOC,GAuCAC,GAxFbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMT,GAAcU,GAChB,MAAM,KAAKA,EAAkB,iBAAiB,EAAG,MAAM,EAGrDT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,IAChEA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,oCAAoC,EAKtD,GAFmCX,GAAWW,EAAO,CAAC,CAAC,EAE3C,SAAWA,EAAO,CAAC,EAAE,KAAK,OACpC,MAAM,IAAI,MAAM,uFAAuF,CAE3G,EAEMT,GAAiB,CAACU,EAA+BC,IAAkD,CACvG,IAAMC,EAAwB,CAAC,EAE/B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQ,EAAEG,EACvCD,EAAY,KAAKF,EAAWG,CAAC,EAAIF,EAAQE,CAAC,CAAC,EAG7C,OAAOD,CACT,EAEaX,GAAwB,CAACQ,EAA+BK,IAAkC,CACrG,IAAMJ,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAA6BG,GAAgBhB,GAAWW,EAAO,CAAC,CAAC,EACjEG,EAAcZ,GAAeU,EAAYC,CAAO,EAChDI,EAAaC,EAAU,KAAKJ,CAAW,EAEvCK,EAAWR,EAAO,CAAC,EAAE,SACrBS,EAAQC,EAAc,QAASF,EAAUP,EAAW,MAAM,EAC1DU,EAASC,EAAe,SAAUJ,EAAUL,EAAY,MAAM,EAE9DU,EAAmBC,GAA+B;AAAA,2BAC/BL,EAAM,QAAQ,GAAGR,CAAU,CAAC;AAAA,QAC/Ca,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAOE,CAAM,CAAC;AAAA,QAClFG,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACrDH,EAAO,gBAAgB,YAAY,CAAC;AAAA,2BACtCF,EAAM,KAAK,OAAO;AAAA,4BACjBR,EAAW,MAAM;AAAA,4BACjBQ,EAAM,WAAW,uBAAwB,GAAG,CAAC;AAAA,gCACzCE,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,UAE9DF,EAAM,WAAW,gBAAiB,IAAK,iBAAiB,CAAC;AAAA;AAAA,QAE3DE,EAAO,YAAY,aAAcF,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,OAG3E,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGP,CAAO,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC7D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKM,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGS,EAA2Bf,EAAO,CAAC,EAAE,KAAMG,CAAW,CAAC,CAC5G,GACA,gBAAAU,CACF,CACF,EAEapB,GAAQuB,GAAkC,CACrD1B,GAAe0B,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxB,GAAsBwB,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACtE,IC3FA,IAeaC,GA6KPC,GAuGOC,GAGPC,GAEAC,GA+BOC,GAvUbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KACAC,KACAC,KACAC,KACAC,KAEaf,GAAiB,CAACgB,EAA+BC,IAAoD,CAChH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAMH,EAAO,CAAC,EACdI,EAAQJ,EAAO,CAAC,EAChBK,EAAUL,EAAO,CAAC,EAClBM,EAAYN,EAAO,CAAC,EA+B1B,GAAIE,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMK,EAAe,GACfC,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAaR,EAAM,KAAK,SAAW,EAAKK,EAAeL,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3ES,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaT,EAAW,QAAQ,EACtDc,EAAaV,GAAWA,EAAQ,KAAK,SAAW,EAChDW,EAAeV,GAAaA,EAAU,KAAK,SAAW,EAEtDW,EAAe,GACrB,GAAIF,GAAcC,EAAc,CAC9B,GAAIX,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIC,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEnEW,GAEFL,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,IAGlCO,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,EAEtC,SAAWU,GAAcC,EACvB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIE,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAID,EAAM,KAAK,CAAC,EAAIC,EAAI,KAAK,CAAC,IAAM,EAClC,MAAM,IAAI,MAAM,sDAAsD,EAExEe,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMW,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIV,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMW,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GI,EAAY,EACZP,EAAmBR,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,IAAMC,IACFC,EAAe,GACfC,EAAcX,EAClB,GAAIN,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FiB,EAAcjB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIO,IAAqBP,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGiB,EAAcjB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CgB,EAAe,EACjB,CACF,CACA,IAAME,EAAsBV,EAAqBD,EAC3CY,EAAsB,GAE5B,MAAO,CACL,UAAAf,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAW,EACA,kBAAAT,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAW,EACA,SAAAP,EACA,UAAW,KAAK,MAAMO,EAAcpB,EAAW,UAAW,EAC1D,SAAUA,EAAW,SACrB,WAAYA,EAAW,WACvB,MAAOA,EAAW,SAAWA,EAAW,WACxC,uBAAwB,GACxB,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAsB,EACA,aAAAH,EACA,UAAAF,EACA,aAAAD,CACF,CACF,EAEMhC,GACF,CAACuC,EAAeC,EAAyBC,EAAoBC,IAA6C,CACxG,IAAMC,EAAc,CAACD,EAAO,UAAWA,EAAO,oBAAqBA,EAAO,WAAaA,EAAO,QAAQ,EAChGE,EAAY,EACZC,EAAaC,EAAU,KAAKH,CAAW,EAAIC,EAC3CG,EAAwBL,EAAO,oBAC/BM,EAASC,EAAe,aAAcR,EAAUE,EAAY,OAAQC,CAAS,EAC7EM,EAASC,EAAc,SAAUZ,EAAE,SAAUA,EAAE,KAAK,OAAQK,CAAS,EACrEQ,EAASZ,EAAIW,EAAc,UAAWX,EAAE,SAAUA,EAAE,KAAK,OAAQI,CAAS,EAAI,OAE9ES,EAAI,KAAK,KAAKX,EAAO,SAAWE,CAAS,EACzCU,EAAW,CAAC,EAAGP,EAAuB,EAAGR,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,EAExDgB,EAAwDf,EAAI,CAAC,OAAQ,MAAM,EAAI,CAAC,MAAM,EAEtFgB,EAAoC,CACxC,CAAC,QAAuB,KAAMX,CAAU,EAAG,CAAC,QAAuB,KAAMH,EAAO,kBAAkB,EAClG,CAAC,QAAuB,KAAMA,EAAO,gBAAgB,EACrD,CAAC,QAAuB,KAAMA,EAAO,mBAAmB,CAC1D,EAEM3B,EAAS,CAACmC,CAAM,EAClBE,GACFI,EAAgB,KACZ,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2BjB,EAAG,IAAI,EAC5E,GAAGiB,EAA2Bd,CAAW,CAAC,EAC9C5B,EAAO,KAAKqC,CAAM,GAElBI,EAAgB,KAAK,GAAGC,EAA2BlB,EAAE,IAAI,EAAG,GAAGkB,EAA2Bd,CAAW,CAAC,EAExG,IAAMe,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,EACxG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CACtC,EAEMC,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDAOVC,EAAS;AAAA;AAAA;AAAA;AAAA,qDAKTC,EAAYrB,EAAI;AAAA,UAClBmB,CAAO;AAAA;AAAA,UAEPC,CAAM;AAAA,WAEY;AAAA,YAChBA,CAAM;AAAA,WAINE,EAAmBC,GAA+B;AAAA;AAAA,IAE1DA,EAAa,iBAAiBL,CAAQ,EAAE,iBAAiB,GAAG3C,EAAQiC,CAAM,CAAC;AAAA,IAC3Ee,EAAa,UAAU,CACnBV,EAAGX,EAAO,WAAa,CACzB,CAAC,CAAC;AAAA,MACFqB,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,oBAC5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKlCN,EAAO,UAAW;AAAA,cAC1BW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKKX,EAAO,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAajCmB,CAAS;AAAA,KAGT,MAAO,CACL,KAAM,gBACN,YAAa,CAAC,KAAM,GAAGnB,EAAO,UAAW,GAAGW,CAAC,GAAG,CAAC,CAACb,CAAC,GAAI,kBAAAe,CAAiB,EACxE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMZ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAea,EACf,gBAAAE,CACF,GACA,gBAAAM,CACF,CACF,EAES7D,GAAsCe,GAC/CgD,GAA4B,CAAC,GAAGhD,CAAU,CAAC,EAEzCd,GAAgD8D,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhG7D,GACF,CAAC8D,EAAyBC,EAAmBC,EAA8BzB,EAC1E0B,IAAwB,CACvB,IAAIC,EAAgBH,EACdI,EAAW5B,EAAO,WAClB6B,EAAQ7B,EAAO,MACrB,OAAIwB,EAAM,KAAK,SAAW,GAAKxB,EAAO,mBAAqB,IACzD2B,EAAgBH,EAAM,QAAQ,CAACxB,EAAO,UAAWA,EAAO,iBAAkB4B,EAAU5B,EAAO,QAAQ,CAAC,GAGlGyB,EACFE,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAeF,EAAQE,EAAc,SAAU3B,CAAM,EAC7E,CAAC,OAAQ,CAAC2B,EAAeF,CAAM,EAAG,QAAS,CAACzB,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAE3FC,EAAgBJ,EAAQ,QACpBjE,GAAwBqE,EAAe,OAAWA,EAAc,SAAU3B,CAAM,EAChF,CAAC,OAAQ,CAAC2B,CAAa,EAAG,QAAS,CAAC3B,EAAO,aAAe0B,EAAc,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjFG,IAAU,IACZF,EAAgBJ,EAAQ,QACpBO,GAAsB,CAACH,CAAa,EAAG,CAAC,EAAG,EAAG,EAAGE,CAAK,CAAC,EAAG,CAAC,OAAQ,CAACF,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EACzGA,EACIA,EAAc,QAAQ,CAAC3B,EAAO,UAAWA,EAAO,oBAAqB4B,EAAWC,EAAO7B,EAAO,QAAQ,CAAC,GAGtGuB,EAAQ,QACXQ,GAA2BJ,EAAenE,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACmE,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CACjD,EAESjE,GAAsB,CAAC6D,EAAyBjD,IAAqC,CAChG,IAAM0B,EAAS3C,GAAekE,EAAQ,OAAQjD,CAAU,EACxD,GAAIiD,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpC,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIA,EAAQ,OAAO,CAAC,GAAG,KAAK,SAAW,EACrC,MAAM,IAAI,MAAM,8BAA8B,EAGhD,IAAMS,EAAIC,GACNV,EAASvB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAUuB,EAAQ,OAAO,CAAC,EAAG,OACvG,CAAC,EACC7C,EAAU6C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OACzF5C,EAAY4C,EAAQ,OAAO,CAAC,GAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EAAIA,EAAQ,OAAO,CAAC,EAAI,OAC3FW,EAAIzE,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG7C,EAASsB,EAAQ,CAAC,EAChFmC,EAAI1E,GAA8B8D,EAASA,EAAQ,OAAO,CAAC,EAAG5C,EAAWqB,EAAQ,CAAC,EACxFoC,GAAeb,EAASS,EAAGE,EAAGC,EAAG,OAAW,OAAW,OAAW,OAAW,OAAWnC,EAAQ1B,CAAU,CAC5G,ICzVA,IAeM+D,GAwGAC,GAwHAC,GAoDOC,GAnSbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAOMR,GACF,CAACS,EAA+BC,IAAoD,CAClF,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdE,EAAO,EACPC,EAAYC,EAAU,gBAAgBJ,EAAQE,CAAI,EAClDG,EAAWD,EAAU,kBAAkBJ,EAAQE,CAAI,EACnDI,EAAaC,GAAiBF,CAAQ,EACtCG,EAAiBH,EAAWC,EAC5BG,EAAa,CAACT,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGQ,CAAc,EAClDE,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EC,EACF,CAAC,CAAC,QAAuB,KAAMN,CAAQ,EAAG,CAAC,QAAuB,KAAMG,CAAc,CAAC,EAC3FG,EAAgB,KAAK,GAAGC,EAA2BH,EAAYA,CAAU,CAAC,EAE1E,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAKlB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACxEW,EAAQD,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEoB,EAAOF,EAAc,OAAQlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/DqB,EAASC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACnFe,EAAY,CAACN,EAAGE,EAAOC,EAAMC,CAAM,EACnCG,EAAWP,EAAE,KAAK,MAClBQ,EAAUjB,IAAe,EAAI,MAAQ,MAAMA,CAAU,QACrDkB,EAAgB,GAEhBC,EAA8B,CAAC,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CAAC,EAC3G,MAAO;AAAA;AAAA;AAAA,2CAG4BF,CAAO,KAAKC,CAAa;AAAA,0BAC1CA,CAAa;AAAA,IACnCV,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGJ,CAAS,CAAC;AAAA,IACtEP,EAAa,UAAUU,CAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOrBD,CAAO;AAAA;AAAA,4BAECA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAahDW,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKhDiB,CAAO;AAAA;AAAA,yBAEEA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,OAAOQ,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAcpDG,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,mFAIYP,EAAW,OAAO;AAAA,yCAC5DkB,EAAM,YAAY,SAAS,CAAC;AAAA,6BACxCC,EAAK,YAAY,SAAS,CAAC;AAAA;AAAA,oBAEpCH,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,MAAMO,CAAQ,IAAIC,CAAO,qBAAqBD,CAAQ,IAC5FC,CAAO;AAAA,QACXJ,EAAO,IAAI,QAAS,UAAW,IAAK,OAAO,CAAC;AAAA;AAAA,IAG9C,EACA,MAAO,CACD,KAAM,wBAEV,YAAa,CAAC,KAAM,GAAGpB,EAAW,OAAO,IAAIO,CAAU,GAAI,kBAAAI,CAAiB,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAGK,CAAS,EAC5B,gBAAAQ,CACF,GACA,gBAAAE,CACF,CACF,EAEEvB,GACF,CAACqC,EAAyBC,EAAmBX,EAAmBC,EAAkBW,EAAWC,EAAWC,EACvGC,IAAoB,CACnB,IAAM1B,EAAaC,GAAiBwB,CAAC,EAC/BE,EAAK,GAGLC,EAAa5B,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC5D6B,EAAc7B,IAAe,EAAI,MAAQ,MAAMA,CAAU,IACzD8B,EAAiB,CAACC,EAAcC,IAAiB,GAAGJ,CAAU,IAAIG,CAAI,KAAKC,CAAI,IAC/EC,EAAcV,EAAIE,EAAIzB,EACtBkC,EAAS,KAAK,KAAKV,EAAIG,CAAE,EAEzBQ,EAA4D,CAAC,MAAM,EACnEC,EAAwC,CAC5C,CAAC,QAAuB,KAAMF,CAAM,EAAG,CAAC,QAAuB,KAAMV,CAAC,EACtE,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAMwB,EAAIC,EAAIzB,CAAU,CAAC,CAC9D,EAEMqC,EAAuB7B,GAA+B,CAC1D,IAAM8B,EAAc5B,EAAc,QAASY,EAAM,SAAUA,EAAM,KAAMtB,CAAU,EACjF,MAAO;AAAA,IACXQ,EAAa,iBAAiB8B,CAAW,CAAC;AAAA,kEACoBV,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAUmB,CAAE,CAAC;AAAA,4CACcA,CAAE;AAAA,+CACCA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAQjCY,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA;AAAA,sBAE9B6B,CAAW;AAAA;AAAA;AAAA;AAAA,2BAINC,EAAe,MAAO,YAAY,CAAC;AAAA,IAExD,EAEMU,EAAanB,EAAQ,QACvB,CACE,KAAM,0BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAmBmC,CAAqB,EAC7E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACZ,EAAGE,EAAGE,EAAI,CAAC,EAAG,UAAwB,CAChD,EACA,cAAe,CAAC,EAAGJ,EAAIE,EAAIzB,CAAU,EACrC,gBAAiBoC,CACnB,GACA,gBAAiBC,CACnB,EACA,CAAC,OAAQ,CAACf,CAAK,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjCjB,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,CAAW,EAAG,CAAC,QAAuB,KAAMT,CAAC,EAC3E,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAM2B,EAAKF,EAAIzB,CAAU,CAAC,CAC/D,EACMI,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EG,EAAmBC,GAA+B,CACtD,IAAMiC,EAAc/B,EAAc,QAASC,EAAM,SAAUA,EAAM,KAAMX,CAAU,EAC3E0C,EAAahC,EAAc,OAAQE,EAAK,SAAUA,EAAK,KAAMZ,CAAU,EAC7E,MAAO;AAAA,2DAC4C4B,CAAU;AAAA,2DACVa,EAAY,KAAK,OAAO;AAAA,0DACzBC,EAAW,KAAK,OAAO;AAAA,kEACfd,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,wBAAwB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKlE+B,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA,mCACjB2B,CAAE;AAAA,gEAC2BA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+DAMHD,CAAO;AAAA,qCACjCG,CAAW;AAAA,yBACvBA,CAAW;AAAA;AAAA,2BAETC,EAAe,eAAgB,cAAc,CAAC;AAAA,IAEnE,EACA,OAAOT,EAAQ,QACX,CACE,KAAM,uCAEN,YAAa,CAAC,KAAM,GAAGrB,CAAU,IAAI0B,CAAO,GAAI,kBAAAtB,CAAiB,EACjE,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACmB,EAAGE,EAAG,CAAC,EAAG,UAAwB,CAC5C,EACA,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAc,EAAuB,CAAC,EACnE,gBAAA5B,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACiC,EAAY7B,EAAOC,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC3D,EAEE3B,GACF,CAACoC,EAAyB7B,EAA+BC,IAAuC,CAC9F,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdiD,EAAIjD,EAAO,CAAC,EACZkD,EAAIlD,EAAOA,EAAO,OAAS,CAAC,EAC5BmD,EAAI/C,EAAU,kBAAkBJ,EAAQ,CAAC,EAAIkD,EAC7C5C,EAAaC,GAAiB2C,CAAC,EAC/BE,EAAahD,EAAU,KAAKH,CAAW,EAAIK,EAC3CK,EACF,CAAC,CAAC,QAAuB,KAAMwC,CAAC,EAAG,CAAC,QAAuB,KAAM,KAAK,MAAMD,EAAI5C,CAAU,CAAC,CAAC,EAC1FI,EAAwD,CAAC,OAAQ,MAAM,EAEvE2C,EAAoB/D,GAAYqC,EAAS7B,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGmD,EAAGE,EAAGD,EAAGnD,EAAW,OAAO,EACrGc,EAAmBC,GAA+B,CACtD,IAAMQ,EAAWgC,GAA4BxD,EAAO,CAAC,EAAE,QAAQ,EACzDyD,EAAYjD,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC3DkD,EAAgBlD,IAAe,EAAIgB,EAAW,MAAMhB,CAAU,IAAIgB,CAAQ,IAE1EsB,EAAc5B,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMQ,CAAU,EACnFmD,EAAerC,EAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUG,EAAaK,CAAU,EAEzF,MAAO;AAAA,2DAC4CsC,EAAY,KAAK,OAAO;AAAA,gEACnBW,CAAS;AAAA,kEACPE,EAAa,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,IAIvF3C,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kDAMsB0C,CAAa,eAAeA,CAAa;AAAA,IAErF,EACA7B,EAAQ,QACJ,CACE,KAAM,4BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAAI,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKsD,EAAa,EAAuB,CAAC,EAClE,gBAAAzC,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACf,EAAO,CAAC,EAAGuD,CAAiB,CAAC,CAAC,CAC9C,EAES7D,GAAe,CAACmC,EAAyB5B,IAA6C,CAC7FA,EAAW,SAAW,OACxBR,GAAkCoC,EAASA,EAAQ,OAAQ5B,CAAU,EAErE4B,EAAQ,QAAQtC,GAA8BsC,EAAQ,OAAQ5B,CAAU,CAAC,CAE7E,ICzSA,IAgBM2D,GAMAC,GA6GOC,GAnIbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAQMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,CAE3D,EAEMP,GACF,CAACO,EAA+BC,EAAiCC,IAAqC,CACpG,IAAMC,EAAaF,EAAW,WAExBG,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAAQL,EAAO,CAAC,EAChBM,EAAO,CAACH,GAAcH,EAAO,CAAC,EAE9BO,EAAcH,EACdI,EAAOC,EAAU,cAAcR,EAAW,KAAMG,EAAO,MAAM,EAC7DM,EAAYD,EAAU,gBAAgBL,EAAQI,CAAI,EAClDG,EAAWF,EAAU,kBAAkBL,EAAQI,CAAI,EAEnDI,EAAYH,EAAU,KAAKJ,EAAM,IAAI,EACrCQ,EAAWP,EAAOG,EAAU,KAAKH,EAAK,IAAI,EAAI,EACpD,GAAIM,IAAcD,GAAaL,GAAQO,IAAaF,EAClD,MAAM,IAAI,MAAM,+BAA+BA,CAAQ;AAAA;AAAA,2BAEpCC,CAAS,qBAAqBC,CAAQ,EAAE,EAG7D,IAAMC,EAA6B,CAAC,EACpC,QAASC,EAAI,EAAGA,EAAIX,EAAO,OAAQ,EAAEW,EAC/BA,EAAIP,EACNM,EAAiB,KAAKV,EAAOW,CAAC,CAAC,EAE/BD,EAAiB,KAAK,CAAC,EAG3B,IAAME,EAAaC,GAAiBN,CAAQ,EACtCO,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAQ,EAC/E,CAAC,QAAuB,KAAM,KAAK,MAAMA,EAAWK,CAAU,CAAC,EAC/D,CAAC,OAAsB,KAAMf,EAAW,OAAO,CACjD,EACIK,GACFY,EAAkB,KAAK,MAAM,EAE/B,IAAME,EAAoBlB,EAAc,EAClCmB,EAAkBnB,EAAc,EAEhCoB,EAAmBC,GAA+B,CACtD,IAAMC,EAAWC,GAA4BzB,EAAO,CAAC,EAAE,QAAQ,EACzD0B,EAAY,CAChBC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAU,EACjEW,EAAc,QAAStB,EAAM,SAAUA,EAAM,KAAMW,CAAU,CAC/D,EACIV,GACFoB,EAAU,KAAKC,EAAc,OAAQrB,EAAK,SAAUA,EAAK,KAAMU,CAAU,CAAC,EAE5EU,EAAU,KAAKE,EAAe,SAAU5B,EAAO,CAAC,EAAE,SAAUO,EAAaS,CAAU,CAAC,EAChFI,GACFM,EAAU,KAAKE,EAAe,qBAAoCd,CAAgB,CAAC,EAEjFO,GACFK,EAAU,KAAKE,EAAe,mBAAkCd,CAAgB,CAAC,EAGnF,IAAMe,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAClE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,CAC5E,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA,IACtEH,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,wBAEvDO,GAAW,MAAOd,CAAU,CAAC;AAAA,+BACtBc,GAAW,MAAOd,CAAU,CAAC;AAAA;AAAA;AAAA,oBAGxCe,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA,iBAInDgB,GAAU,cAAehB,CAAU,CAAC;AAAA,oCACjBgB,GAAU,qBAAsBhB,CAAU,CAAC,yBACnEb,EAAa,GAAK,eAAe;AAAA;AAAA;AAAA,uBAGtB4B,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA,uBAChDe,GAAUP,EAAUR,EAAY,UAAU,CAAC;AAAA,6BACrCU,EAAU,CAAC,EAAE,KAAK,KAAK,cAAcvB,EAAa,GAAK,QAAQ;AAAA,UAClFG,EAAO,KAAKyB,GAAUP,EAAUR,EAAY,SAAS,CAAC,GAAK,EAAE;AAAA;AAAA;AAAA;AAAA,MAIjEI,EAAoB,sCAAwC,EAAE;AAAA,MAC9DC,EAAkB,2CAA6C,EAAE;AAAA,IAEjE,EACMY,EAAU,CAAC,CAAC,KAAM1B,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIoB,GACFa,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAE7DO,GACFY,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAG1D,CACL,KAAM,qBACN,YAAa,CAAC,KAAM,GAAGE,CAAU,IAAId,CAAW,IAAIC,CAAU,GAAI,kBAAAe,CAAiB,EACnF,WAAY,KACP,CAAC,QAAAe,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKvB,EAAY,EAAuB,CAAC,EAAG,gBAAAS,CAAe,GAClG,gBAAAG,CACF,CACF,EAES5B,GAAY,CAACwC,EAAyBjC,IAA0C,CAC3FT,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQzC,GAA2ByC,EAAQ,OAAQjC,EAAYiC,EAAQ,WAAW,CAAC,CAC7F,ICtIA,IAoBMC,GA+BOC,GAqPAC,GAQAC,GAhTbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAWMT,GAAiB,CAACU,EAA+BC,IAA4C,CACjG,GAAID,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,oCAAoC,EAEtD,IAAME,EAAIF,EAAO,CAAC,EACZG,EAAQD,EAAE,KAAK,OACrB,GAAIA,EAAE,KAAKC,EAAQ,CAAC,IAAMF,EAAW,EACnC,MAAM,IAAI,MAAM,wDAAwD,EAE1E,IAAMG,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3FI,EAAWJ,EAAW,UAAY,EAAIA,EAAW,KACjDK,EAAIN,EAAO,CAAC,EAClB,GAAI,CAACO,EAAU,SAASD,EAAE,KAAM,CAACL,EAAW,EAAGG,EAAeC,CAAQ,CAAC,EACrE,MAAM,IAAI,MAAM,6EAA6E,EAG/F,IAAMG,EADSR,EAAO,CAAC,EACI,KAC3B,GAAIO,EAAU,KAAKC,CAAW,IAAMP,EAAW,EAAIG,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAE5C,GAAIJ,EAAO,SAAW,EAAG,CAEvB,IAAMS,EADaT,EAAO,CAAC,EACQ,KAC7BU,EACFT,EAAW,KAAO,EAAKA,EAAW,EAAIG,EAAiBH,EAAW,EAAI,KAAK,OAAOG,EAAgB,GAAK,CAAC,EAC5G,GAAIG,EAAU,KAAKE,CAAe,IAAMC,EACtC,MAAM,IAAI,MAAM,8BAA8B,CAElD,CACF,EAEanB,GACT,CAACS,EAA+BC,EAC/BU,EAAoDC,IAAwD,CAC3G,IAAMC,EAAab,EAAO,CAAC,EAAE,KACvBG,EAAQU,EAAW,OACnBT,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3Fa,EAAYD,EAAWV,EAAQ,CAAC,EAChCY,EAAWd,EAAW,EACtBe,EAAYf,EAAW,EACvBgB,EAAYJ,EAAW,MAAM,EAAGV,EAAQ,CAAC,EACzCe,EAAYX,EAAU,KAAKU,CAAS,EAEpCE,EADWlB,EAAW,UAAY,EAAIA,EAAW,KACpB,EAC7BmB,EAAWpB,EAAO,CAAC,EAAE,SACrBqB,EAAeC,GAAiBR,CAAS,EACzCS,EAAcD,GAAiBrB,EAAW,CAAC,EAC3CuB,EAAcF,GAAiBH,CAAe,EAC9CM,EAAcC,GAAqBN,CAAQ,EAC3CO,EAAsBb,EAAYV,EAAgBqB,EAClDG,EAAwB,KAAK,MAAMhB,EAAiCe,CAAmB,EACvFE,EAA0BzB,GAAiBO,EAAyB,CAAC,GAAKiB,EAAwB,EAClGE,EAAc,CAACD,GAA2BD,GAAyB,EAAKN,GAAiBN,CAAS,EAClGY,GAAyB,GAAMN,GAAiBN,CAAS,GAAK,EAAU,EACA,EACxEe,EAAcd,EAAU,OAAO,CAACH,EAAWE,CAAS,CAAC,EACrDgB,EAAazB,EAAU,KAAKwB,CAAW,EAAID,EAAaT,EAExDY,EAAoCJ,EACtC,CAAC,EACD,CAAC,CAAC,QAAuB,KAAMG,CAAU,EAAG,CAAC,QAAuB,KAAM/B,EAAW,SAAS,CAAC,EAC7FiC,EAAiB,CAAChB,EAAWJ,EAAWC,EAAWQ,CAAW,EAC9DY,EAAS5B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAC5DmC,EAAO,OAAO,GAAI,EAAGhB,EAAkBK,CAAW,EAClDS,EAAgB,KAAK,GAAGG,EAA2BF,CAAc,CAAC,EAClED,EAAgB,KAAK,GAAGG,EAA2BD,CAAM,CAAC,EAC1DF,EAAgB,KAAK,GAAGG,EAA2BpC,EAAO,CAAC,EAAE,IAAI,CAAC,EAC9DA,EAAO,SAAW,GACpBiC,EAAgB,KAAK,GAAGG,EAA2B7B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAE5F,IAAMqC,GAAkB,CAACnB,EAAWJ,EAAWE,EAAYc,CAAU,EACrEG,EAAgB,KAAK,GAAGG,EAA2BC,EAAe,CAAC,EACnE,IAAMC,GAAmBC,GAA+B,CACtD,IAAMC,GAAYN,EAAe,OAC3BhC,EAAIuC,EAAc,IAAKzC,EAAO,CAAC,EAAE,SAAUwC,GAAWjB,CAAW,EACjEjB,GAAImC,EAAc,OAAsBN,EAAO,OAAQX,CAAW,EAClEkB,GAASD,EAAc,SAAUzC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC1E2C,GAAiB,CAACzC,EAAGI,GAAGoC,EAAM,EAC9BE,EACF5C,EAAO,SAAW,EAAIyC,EAAc,iBAAgCzC,EAAO,CAAC,EAAE,KAAK,MAAM,EAAI,OAC7F4C,GACFD,GAAe,KAAKC,CAAU,EAEhC,IAAMC,GAAaR,GAAgB,OAC7BS,EAASC,EAAe,SAAU/C,EAAO,CAAC,EAAE,SAAU6C,GAAYf,CAAU,EAC5EkB,GAA8B,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACpG5B,EAAW6B,GAA4BjD,EAAO,CAAC,EAAE,QAAQ,EAEzDkD,GAAe,IAAM,CACzB,OAAQ3B,EAAa,CACnB,IAAK,GACH,MAAO,SAASH,CAAQ,OAC1B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,IAAK,GACH,MAAO,UAAUA,CAAQ,IAC3B,QACE,MAAM,IAAI,MAAM,GAAGG,CAAW,8BAA8B,CAChE,CACF,GAAG,EAEG4B,GAAkB;AAAA,yCACShC,CAAe,aAAaK,CAAW;AAAA,YACpElB,GAAE,WAAW,YAAa,IAAK,MAAM,CAAC;AAAA,yBACzBA,GAAE,aAAa,WAAW,CAAC;AAAA,qCACfkB,CAAW;AAAA,iCACfA,IAAgB,EAAI,SAAW,kBAAkB;AAAA;AAAA;AAAA;AAAA,uCAI3C0B,CAAW,IACtC,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,GAAGjC,CAAQ,kBAAkBiC,EAAC,OAAOjC,CAAQ,kBAAkBiC,EAAC,IAAI,EACjG,KAAK,IAAI,CAAC;AAAA,0CACe,IAC5B9B,IAAgB,EACX,GAAG2B,CAAW,IACjB,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,uBAAuBA,EAAC,yBAAyB,EAAE,KAAK,IAAI,CAAC,KAE5F,yBAAyBH,CAAW,IAAI,MAAM,CAAC,EAAE,KAAK,YAAY,EAAE,KAAK,GAAG,CAAC,eAErF,CAAC;AAAA;AAAA,uCAE2BrB,EAA0Bf,EAAYO,CAAY;AAAA,gBACzEnB,EAAE,WAAW,YAAasC,GAAY,EAAGX,EAA0B,IAAM,SAASR,CAAY,MAAM,CAAC;AAAA,gBACrGnB,EAAE,WAAW,YAAasC,GAAY,EAAG,aAAa,CAAC;AAAA,mCACpCtC,EAAE,gBAAgB,WAAW,CAAC;AAAA,4BACrCgD,CAAW;AAAA,yCACE,EAAI3B,CAAW;AAAA,8BAC1BrB,EAAE,YAAY,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG3C2B,EAA0B,gDAAkD,kBAAkB,GAClGC,EAAa,EAAI,MAAQ,EAAE,OAC3B,MACK,KACG,CAAC,OAAQ,EAAIP,CAAW,EACxB,CAAC6B,GAAGC,KAAM,GACN9B,IAAgB,EAAI,UAAU8B,EAAC,4BAA4BA,EAAC,IACxC,cAAcA,EAAC,2BAA2BA,EAAC,IAAI,EAAE,EAC5E,KAAK,KAAK,CAAC;AAAA;AAAA,6BAEC,EAAI9B,CAAW;AAAA;AAAA,WAG9B+B,GAAuBV,EAAa;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKlBA,EAAW,YAAY,kBAAkB,CAAC;AAAA,aAExB,GAE1C,OAAOf,EAA0B;AAAA,iDACQiB,EAAO,KAAK,KAAK,KAAKhC,EAAYV,CAAa;AAAA,UACtFmC,EAAa,iBAAiB,GAAGI,GAAgBG,CAAM,CAAC;AAAA,UACxDP,EAAa,UAAU,CACvBnC,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA,2BACiBF,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAI7BA,EAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,qCAEd4B,CAAU;AAAA,sDACOA,CAAU;AAAA,gBAEnBc,EAAa;AAAA,mDACPxC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yCAMvBwC,EAAW,YAAY,uBAAuB,CAAC,8BAC9B,EAAE;AAAA,6BAC/BtC,GAAE,KAAK,OAAO;AAAA,cAC7BA,GAAE,WAAW,YAAa,IAAK,6BAA6B,CAAC;AAAA;AAAA,+DAEZF,CAAa;AAAA,0BAClDsC,GAAO,YAAY,cAAc,CAAC;AAAA;AAAA,+BAE7BtB,CAAQ,IAAIwB,EAAa,2BAA6B,CAAG;AAAA,cAC1EtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA,6CACRL,EAAW,UAAYsB,CAAW;AAAA,yDACtBT,CAAS;AAAA,cACpDqC,EAAe;AAAA;AAAA;AAAA,gCAGGL,EAAO,KAAK,OAAO;AAAA,2CACR,KAAK,KAAKhC,EAAYV,CAAa,CAAC;AAAA,YACnE0C,EAAO,WAAW,iBAAkB,IAAK,OAAO,CAAC;AAAA,YACjDA,EAAO,WAAW,iBAAkBD,GAAa,EAAG,KAAK,CAAC;AAAA,YAC1DC,EAAO,WAAW,iBAAkBD,GAAa,EAAG,kCAAkC,CAAC;AAAA,gCACnEC,EAAO,gBAAgB,gBAAgB,CAAC;AAAA;AAAA;AAAA,wBAGhDhC,CAAS;AAAA,kCACCgC,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA,0CAEhC1C,CAAa;AAAA;AAAA,6CAEVU,CAAS;AAAA;AAAA,gBAEtCgC,EAAO,YAAY,gBAAiB,cAAc,CAAC;AAAA,iCAClC9B,EAAYc,CAAU;AAAA;AAAA;AAAA,WAId;AAAA,UAC/BS,EAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGL,GAAgBG,CAAM,CAAC;AAAA,UACnFP,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,qCACjDO,EAAO,KAAK,KAAK,KAAKzB,CAAY;AAAA,iCACtCyB,EAAO,gBAAgB,YAAY,CAAC;AAAA,sBAC/CA,EAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,sBACnDC,EAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,2BAC9C3C,EAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAKI0C,EAAa;AAAA,8CACZd,CAAU,QAAQ1B,CAAa;AAAA;AAAA,uCAEtCwC,EAAW,YAAY,kBAAkB,CAAC;AAAA,yEAEvB,EAAE;AAAA,oCACxBxC,EAAgB0B,CAAU;AAAA,2BACnCxB,GAAE,KAAK,OAAO;AAAA,qCACJwB,CAAU;AAAA,cACjCxB,GAAE,WAAW,YAAa,IAAK,SAASwB,CAAU,MAAM,CAAC;AAAA;AAAA,+CAExB1B,CAAa;AAAA;AAAA,4BAEhCsC,GAAO,YAAY,aAAa,CAAC;AAAA;AAAA,iCAE5BtB,CAAQ,IAAIwB,EAAa,qDAAuD,CAAG;AAAA,gBACpGtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,gBAEvC6C,EAAe;AAAA;AAAA,gBAEfG,EAAoB;AAAA,sDACkB/B,CAAW;AAAA;AAAA;AAAA,cAIpBqB,EAAa;AAAA,kBACxCU,EAAoB;AAAA,iBAEoB,EAAE;AAAA;AAAA,wCAEpBjC,CAAY;AAAA,gBACpCyB,EAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAGxB,CAAY,YAAY,CAAC;AAAA,gBAChFyB,EAAO,aAAa,iBAAkB,kBAAkB,CAAC;AAAA;AAAA,UAGnE,EACA,MAAO,CACL,KAAMjB,EAA0B,uBAAyB,cACzD,YAAa,CACX,KAAM,GAAG5B,EAAW,QAAQ,IAAIa,CAAS,IAAIM,CAAQ,IAAIpB,EAAO,MAAM,GACtE,kBAAmB,MAAMA,EAAO,MAAM,EAAE,KAAK,MAAM,CACrD,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM+B,EAAa,SAAAX,CAAQ,CAAC,EACvC,KAAMS,EAA0B,uBAAyB,cACzD,cAAeA,EAA0B,CAAC,EAAG,EAAG,EAAG,KAAK,KAAKb,EAAYc,CAAU,EAAG,EAAGZ,CAAS,EACzD,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAC5F,gBAAAC,CACF,GACA,gBAAAK,EACF,CACF,EAES9C,GAAc,CAAC+D,EAAyBtD,IAA4C,CAC/FX,GAAeiE,EAAQ,OAAQtD,CAAU,EACzC,IAAMU,EAAqD4C,EAAQ,4BAA4B,EACzF3C,EAAiC2C,EAAQ,kCAAkC,EACjFA,EAAQ,QAAQhE,GACZgE,EAAQ,OAAQtD,EAAYU,EAA0BC,CAA8B,CAAC,CAC3F,EAEanB,GAA8BQ,GACvCuD,GAA4BvD,CAAsE,ICjTtG,IAiBMwD,GAmBAC,GA0BAC,GA2BAC,GAuBAC,GAuBAC,GAeAC,GAiDAC,GA0BOC,GAjObC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KASMb,GAAkBc,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAIA,EAAO,QAAU,EAAG,CACtB,IAAIC,EAAYD,EAAO,CAAC,EAAE,KAAK,OAAS,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EAI9D,GAHIA,EAAO,SAAW,IACpBC,EAAYD,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAEpD,CAACC,EACH,MAAM,IAAI,MAAM,6EAA6E,CAEjG,CACF,EAEMd,GAAiB,CAACe,EAAuBC,EAAmBC,IAA+B,CAC/F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,sBACSH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,2BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA,gCAGzCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,UAI9E,MAAO;AAAA,oBACWD,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,cAIvBG,CAAK;AAAA;AAAA;AAAA,OAInB,EAEMjB,GAAgB,CAACc,EAAuBC,EAAmBC,IAA+B,CAC9F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,yCAKnEG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,gCAEvDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,oCAI1CI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMhB,GAAa,CAACa,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,+BAI7EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,4BACjDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEtCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMf,GAAa,CAACY,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,EAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA,6BAE/EG,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,+BAE5CI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,6BAChDI,EAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEvCI,EAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMd,GAAgB,CAACW,EAAuBC,EAAmBK,IAAsC,CACrG,OAAQA,EAAW,KAAM,CACvB,IAAK,GACH,OAAOrB,GAAee,EAAQC,EAAWK,EAAW,KAAK,MAAM,EACjE,IAAK,GACH,OAAOpB,GAAcc,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAChE,IAAK,GACH,OAAOnB,GAAWa,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,IAAK,GACH,OAAOlB,GAAWY,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMhB,GAAuB,CAACQ,EAA+BQ,IAA2C,CACtG,IAAMC,EAAcC,EAAU,SAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGQ,EAAW,IAAI,EACxEG,EAAYX,EAAO,CAAC,EAAE,KACtBY,EAAaF,EAAU,KAAKD,CAAW,EACvCI,EACF,CAAC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMJ,EAAW,IAAI,CAAC,EACzFA,EAAW,OAAS,GACtBK,EAAgB,KAAK,CAAC,KAAMb,EAAO,CAAC,EAAE,SAAU,KAAMQ,EAAW,KAAK,CAAC,EAGzEK,EAAgB,KAAK,GAAGC,EAA2Bd,EAAO,CAAC,EAAE,KAAMS,CAAW,CAAC,EAC/E,IAAMM,EAAwD,CAAC,MAAM,EAE/DC,EAAmBC,GAA+B,CACtD,IAAMf,EAASgB,EAAe,SAAUlB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEU,EAAQC,EAAc,IAAKpB,EAAO,CAAC,EAAE,SAAUW,EAAU,MAAM,EAC/DU,EAAWF,EAAM,KAAK,MACtBG,EAAa/B,GAAcW,EAAQS,EAAU,OAAQH,CAAU,EAC/De,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQf,EAAW,KAAK,MAAM,CAAC,EACpG,OAAIA,EAAW,OAAS,GACtBe,EAAS,KAAK,CAAC,KAAM,iBAAkB,KAAMF,CAAkC,CAAC,EAG3E;AAAA,cACGJ,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBJ,EAAOjB,CAAM,CAAC;AAAA,cACvEe,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,4BAE5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,0BAEtCmB,CAAQ;AAAA,cACpBC,CAAU;AAAA;AAAA,UAGtB,EAEA,MAAO,CACL,KAAM,MACN,YAAa,CAAC,KAAM,GAAGd,EAAW,IAAI,GAAI,kBAAAO,CAAiB,EAC3D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMN,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAU,KAAKD,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAAG,CACF,CACF,EAEMvB,GAAgC,CAACO,EAA+BQ,IAA6C,CACjH,GAAIR,EAAO,OAAS,EAAG,CACrB,IAAMwB,EAAexB,EAAO,CAAC,EAAE,iBAAiB,EAC1CyB,EAASzB,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,KAAQA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAI,EAElFG,EAAYH,EAAO,CAAC,EAAE,KAAK,OAC3B0B,EAAa,IAAI,WAAW,EAAIvB,CAAS,EAAE,KAAK,CAAC,EACvD,GAAIH,EAAO,QAAU,EAAG,CACtB,IAAM2B,EAAO3B,EAAO,CAAC,EAAE,iBAAiB,EACxC,QAASM,EAAI,EAAGA,EAAIqB,EAAK,OAAQrB,IAC/BoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,CAAC,EAAI,OAAOkB,EAAalB,CAAC,CAAC,EACpDoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,EAAIH,CAAS,EAAI,OAAOqB,EAAalB,EAAIqB,EAAK,MAAM,CAAC,CAElF,MACEH,EAAa,QAAQ,CAACI,EAAGtB,IAAMoB,EAAW,OAAOpB,CAAC,CAAC,EAAK,OAAOsB,CAAC,CAAE,EAGpE,IAAMC,EAAiB,CAAC,EACxB,OAAAH,EAAW,QAAQE,GAAKC,EAAK,KAAKD,CAAC,CAAC,EAE7B,CAAC,KAAMpB,EAAW,KAAM,MAAAiB,EAAO,KAAAI,CAAI,CAC5C,KACE,QAAOrB,CAEX,EAEad,GAAM,CAACoC,EAAyBtB,IAAoC,CAC/EtB,GAAe4C,EAAQ,MAAM,EAC7B,IAAMC,EAAoBtC,GAA8BqC,EAAQ,OAAQtB,CAAU,EAClFsB,EAAQ,QAAQtC,GAAqBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxF,ICrOA,IAmBMC,GAMAC,GA4BAC,GA2DAC,GAsJAC,GAGAC,GAGAC,GAGAC,GAaAC,GAiCOC,GAYAC,GAKPC,GAWOC,GAKAC,GAUPC,GA6BOC,GAKAC,GAgBAC,GAKAC,GA/ZbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KAIAC,KAQMxB,GAAkByB,GAAwC,CAC9D,GAAIC,GAAI,OAAO,uBAAyB,CAACD,GAAUA,EAAO,SAAW,GACnE,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EAEMxB,GAA0C,CAC5C0B,EAAmBC,EAA2BC,IAAyD,CACzG,IAAMC,EAAiBF,EAAW,SAAW,OACvCG,EAA2BJ,EAAM,KAAK,MAAM,EAC9CG,GACFC,EAAyB,OAAO,EAAG,EAAGA,EAAyB,IAAI,CAAE,EAEvE,IAAMC,EAAe,OAAO,eAAe,KAAKJ,EAAY,WAAW,EACjEK,EAAcL,EAAW,YAAY,MAAM,EAC3CM,EAAUN,EAAW,QAAQ,MAAM,EACnCO,EAAsBH,EAAgBJ,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCS,GAAa,qBAAqBR,EAAkBE,EAA0BE,EAAaC,EAASC,EAAWC,CAAI,EAEnH,IAAME,EAA4BD,GAAa,uBAC3CR,EAAkBE,EAA0BG,EAASC,EAAWF,EAAaG,EAAMR,EAAW,OAAO,EAEnGW,EAAgB,OAAO,OAAO,CAAC,EAAGX,CAAU,EAC9CI,EACF,OAAO,OAAOO,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,UAAAD,EAAW,SAAUP,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOW,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAE1F,IAAMY,EAA2BF,EAA0B,MAAM,EACjE,OAAAE,EAAyB,KAAKA,EAAyB,OAAO,EAAG,CAAC,EAAE,CAAC,CAAC,EAC/D,CAACD,EAAeT,EAAiBU,EAA2BF,CAAyB,CAC9F,EAEMpC,GAAuB,CACzBuC,EACAb,IAAgG,CAClG,IAAME,EAAiBF,EAAW,SAAW,OACvCc,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAaD,EAAU,KAAKf,EAAW,WAAW,EAClDiB,EACF,CAAC,CAAC,QAAuB,KAAMH,CAAU,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACnFE,EAA8B,CAAC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACzG,GAAIlB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAMmB,EAAKnB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoB,EAAKpB,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqB,EAAUrB,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsB,EAAQtB,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuB,EAAoB,CAAC,EAAEF,EAAUC,GACvCL,EAAgB,KACZ,CAAC,QAAuB,KAAME,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAO,EACrC,CAAC,QAAuB,KAAMC,CAAK,CACvC,EACAJ,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,EAEhC,IAAIM,EAAoB,GACxB,GAAIxB,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMyB,EAAKzB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D0B,EAAK1B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD2B,EAAU3B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD4B,EAAQ5B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EACxDwB,EAAoB,CAAC,EAAEG,EAAUC,GACjCX,EAAgB,KACZ,CAAC,QAAuB,KAAMQ,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAO,EAC3G,CAAC,QAAuB,KAAMC,CAAK,CAAC,EAExCV,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAClC,CACA,MAAO,CAACD,EAAiBC,EAAU,GAAMK,EAAmBC,CAAiB,CAC/E,KAAO,CACL,GAAItB,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM2B,EAAgBd,EAAU,eAAef,EAAW,WAAW,EACrEiB,EAAgB,KACZ,CAAC,QAAuB,KAAMY,CAAa,EAAG,CAAC,QAAuB,KAAM7B,EAAW,IAAI,EAC3F,CAAC,QAAuB,KAAMA,EAAW,OAAO,CAAC,EACrDkB,EAAS,KACL,CAAC,KAAM,gBAAiB,KAAM,MAAO,OAAQW,EAAc,MAAM,EACjE,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ7B,EAAW,KAAK,MAAM,EAC1D,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQA,EAAW,QAAQ,MAAM,CAAC,EAErE,IAAM8B,EAAU9B,EAAW,KAAK,OAAO,CAAC+B,EAAKC,IAAQD,EAAMC,CAAG,EAC9D,MAAO,CAACf,EAAiBC,EAAU,CAAC,CAACY,EAAS,GAAO,EAAK,CAC5D,CACF,EAEMvD,GAAsB,CACxB0D,EAA4BC,EAAkBC,EAAcC,EAAyBpC,EACrFqC,EAAaC,EAAaC,EAAerB,EAA6BY,EAAkBP,EACxFC,IAAuC,CACzC,IAAMtB,EAAiBF,EAAW,SAAW,OACvCwC,EAAWN,EAAE,KAAK,MAClBO,EAASC,EAAe,SAAUR,EAAE,KAAK,OAAQE,CAAe,EAEtE,GAAIpC,EAAW,YAAY,QAAU,EAAG,CACtC,IAAI2C,EAAQ,GACRC,EAAQ,GACRC,EAAW,GACTC,EAAUX,GAAQjC,EAAiB,EAAI,GAsB7C,GArBIqB,EACFoB,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO;AAAA,4CACxBA,CAAO;AAAA;AAAA;AAAA;AAAA,kCAIjBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAGjBM,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,kCACxBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAIfrC,EAAW,YAAY,SAAW,EAAG,CACvC,IAAM+C,EAAUZ,GAAQjC,EAAiB,EAAI,GACzCsB,EACFoB,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO,yBAAyBA,CAAO;AAAA;AAAA;AAAA;AAAA,gBAM5FH,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,kBAGpDF,EAAW;AAAA;AAAA,aAGb,CAoBA,MAlBoB;AAAA,cACVZ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,8BAE3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,4BAEvCD,CAAQ,IAAID,CAAK;AAAA;AAAA,gBAE7BK,CAAK;AAAA,gBACLD,CAAK;AAAA,gBACLE,CAAQ;AAAA,gBACRP,CAAG;AAAA;AAAA;AAAA,cAKjB,KAAO,CACL,GAAIpC,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM8C,EAAchD,EAAW,YAAY,OACrCiD,EAAWjD,EAAW,KAAK,OAC7BkD,EAAU,GACd,OAAIpB,EACFoB,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAQgBhB,EAAE,gBAAgB,UAAU,CAAC;AAAA,kBAC3CG,CAAG;AAAA,iBAGfa,EAAU;AAAA;AAAA,8BAEchB,EAAE,gBAAgB,UAAU,CAAC;AAAA,gBAC3CG,CAAG;AAAA,cAGK;AAAA,cACVJ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,8BAC3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,wCAE3BO,CAAW;AAAA;AAAA,4BAEvBR,CAAQ,IAAID,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uCAMNS,EAAc,CAAC;AAAA,0CACZG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA,2CACvDG,EAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA;AAAA,0BAEzEA,EAAc,CAAC;AAAA;AAAA;AAAA,+BAGVb,EAAOa,CAAW,UAAUb,CAAI;AAAA,+CAEvDgB,EAAa,mBAAoB,OAAOhB,EAAOa,CAAW,IAAKA,CAAW,CAAC;AAAA,oCAC/Cb,EAAOa,CAAW,QAAQG,EAAa,gBAAiB,SAAUF,CAAQ,CAAC;AAAA,oBAC3FC,CAAO;AAAA;AAAA,gBAEXZ,CAAG;AAAA;AAAA;AAAA,cAKjB,CACF,EAcM9D,GAAiCwB,GAClC,GAAGA,EAAW,MAAM,IAAIA,EAAW,QAAQ,IAAIA,EAAW,OAAO,IAAIA,EAAW,YAAY,MAAM,GAEjGvB,GAA4CuB,GAC7C,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,eAAe,GAEzEtB,GAAwCsB,GACzC,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,YAAY,IAAIA,EAAW,SAAS,GAE9FrB,GAA6BqB,IAA+D,CAChG,OAAQA,EAAW,OACnB,QAAS,CAAC,SAAU,QAAS,aAAc,YAAY,EAAEA,EAAW,QAAkB,EACtF,SAAUA,EAAW,UACrB,YAAaA,EAAW,aACxB,QAASA,EAAW,QACpB,KAAMA,EAAW,IACnB,GAMMpB,GACF,CAACwE,EAAcrD,EAAmBE,EAA2BD,IAAmD,CAC9G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEiC,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyC,EAAWN,EAAE,KAAK,MAElBG,EAAM,kBACRC,EAAM,GACNe,EAAmB,gBACrBf,GAAO,YAAYE,CAAQ,yBAE3BF,GAAO,YAAYE,CAAQ,oCAE7B,GAAM,CAACvB,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxDpC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EAC3E,IAAM2C,EAAwD,CAAC,MAAM,EACrE,MAAO,CACL,KAAAJ,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAAK,EAAKpB,EAC3FY,EAASP,EAAmBC,CAAiB,CACnD,CACF,EAES3C,GAA8BmB,GAA+D,CACxG,IAAMyD,EAAmBzD,EAAW,oBAAiC,EAE/D0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI0D,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAE1F,IAAMC,EAAwB,CAAC,gBAAAF,EAAiB,GAAGC,EAAM,SAAU,EAAE,EACrE,MAAO,CAAC,GAAGC,EAAuB,SAAUlF,GAAyCkF,CAAqB,CAAC,CAC7G,EAEa7E,GAAc,CAAC8E,EAAyB5D,IAA4C,CAC/F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,cAAegF,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CACnG,EAEMjB,GAAuB,CAC3B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,CACd,EAEaC,GAAoCgB,GAA+D,CAC9G,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEa5E,GAAoB,CAAC2E,EAAyB5D,IAA4C,CACrG5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,oBAAqBgF,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CACxG,EAOMd,GACF,CAACkE,EAAcrD,EAAmBE,EAA2BD,IAA+C,CAC1G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEoC,EAAM;AAAA;AAAA,MAGNC,EAAM,GACNJ,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyD,EAAwD,CAAC,MAAM,EAC/D,CAACvC,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxD,OAAApC,EAAgB,KAAK,GAAGsC,EAA2BxD,EAAM,KAAMc,CAAW,CAAC,EACpE,CACL,KAAAuC,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAChFvC,EAAM,WAAa,GAAoB,OAAS,KAAMmB,EAAUY,EAASP,EAC1EC,CAAiB,CACvB,CACF,EAESrC,GAAU,CAACyE,EAAyB5D,IAAwC,CACvF5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,UAAW0E,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CAC3F,EAEaZ,GAA0BY,GAA2D,CAChG,IAAM8D,EAAe9D,EAAW,cAC1BO,EAAYP,EAAW,UAEvB0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI8D,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAIJ,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,oEAAoE,EAEtF,IAAMK,EAAoB,CAAC,aAAAD,EAAc,UAAAvD,EAAW,GAAGmD,EAAM,SAAU,EAAE,EACzE,MAAO,CAAC,GAAGK,EAAmB,SAAUrF,GAAqCqF,CAAiB,CAAC,CACjG,EAEa1E,GAAgCW,GAA2D,CACtG,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEavE,GAAgB,CAACsE,EAAyB5D,IAAwC,CAC7F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,gBAAiB0E,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CAChG,IClaA,IAUMgE,GAUAC,GAoCOC,GAxDbC,GAAAC,EAAA,kBAGAC,KAEAC,IAGAC,KAEMP,GAAwB,CAACQ,EAAeC,EAAeC,IAAwB,CACnF,IAAMC,EAAiBH,IAAUC,EAC3BG,EAA8BJ,EAAQC,GAASC,EAAQ,EACvDG,EAA8BL,EAAQC,GAASC,EAAQ,EAE7D,GAAIC,GAAkBC,GAA+BC,EACnD,MAAM,IAAI,MAAM,2CAA4C,CAEhE,EAEMZ,GAAyB,CAACO,EAAeC,EAAeC,EAAeI,IAAoC,CAC/G,IAAMC,EAAc,KAAK,IAAI,KAAK,MAAMN,EAAQD,GAASE,CAAK,CAAC,EACzDM,EAAwB,CAACD,CAAW,EACpCE,EAAaF,EACbG,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,KAAMH,EAAU,KAAMN,CAAK,EAAG,CAAC,KAAMM,EAAU,KAAMJ,CAAK,EACtG,GAAGS,EAA2BH,CAAW,CAC3C,EAEMI,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,EAAe,SAAUT,EAAUE,EAAY,MAAM,EAC9DQ,EAAWF,EAAO,KAAK,MACvBG,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAMD,CAAkC,EAC3F,CAAC,KAAM,QAAS,KAAMA,CAAkC,CAC1D,EACA,MAAO;AAAA,UACDH,EAAa,iBAAiBI,CAAQ,EAAE,iBAAiBH,CAAM,CAAC;AAAA,UAChED,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,gDACnCG,CAAQ;AAAA,QAEtD,EAEA,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGV,CAAQ,EAAE,EACjC,gBAAAM,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,EACF,CACF,EAEahB,GAASwB,GAAkC,CACtD,IAAIlB,EAAQ,EACRC,EAAQ,EACRC,EAAQ,EACRgB,EAAQ,OAAO,CAAC,EAAE,WAAa,GACjClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,GAClCA,EAAQ,OAAO,CAAC,EAAE,WAAa,IACxClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,GAE3CC,GAAI,OAAO,sBACb3B,GAAsBQ,EAAOC,EAAOC,CAAK,EAG3CgB,EAAQ,QAAQzB,GAAuBO,EAAOC,EAAOC,EAAOgB,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CACvG,IC1EA,IAiCME,GAuBAC,GASAC,GA6CAC,GAkDAC,GAkCAC,GAaAC,GAwBAC,GAyBAC,GAuBAC,GAkCAC,GAWAC,GAQAC,GAsDAC,GA6EAC,GAwEAC,GAoHAC,GAOOC,GAiBAC,GAnqBbC,GAAAC,EAAA,kBAIAC,IAEAC,KACAC,KAGAC,KAuBMxB,GAAiB,CAACyB,EAAkBC,IAAuC,CAK/E,GAJAD,EAAO,MAAOE,GAAUA,EAAQ,IAAM,IAAM,CAClB,MAAM,IAAI,MAAM,oDAAoD,CACtE,EAAE,EAEtBF,EAAO,OAAS,GAClB,GAAIC,EAAW,OAAS,UACtB,GAAI,EAAED,EAAO,SAAW,GAAKA,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACtGA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACxDA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MACN;AAAA,oGACwF,UAErFC,EAAW,OAAS,SACzB,EAAED,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC/EA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MAAM,+DAA+D,EAIvF,EAEMxB,GAAe,CAACwB,EAA2BG,EAAyBC,IAA2B,CACnGD,EAAK,MAAOD,GAAUA,GAAS,GAAKA,EAAQE,IAAS,IAAM,CACnC,MAAM,IAAI,MAAM,qEAAqE,CACvF,EAAE,EACxB,IAAMC,EAAY,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAG,EAC1C,OAAAD,EAAK,QAAQ,CAACD,EAAOI,IAAUD,EAAUH,CAAK,EAAIF,EAAOM,CAAK,CAAC,EACxDD,CACT,EAEM5B,GACF,CAAC8B,EAA+BN,EAA8BO,EAAsBR,EACnFS,EAAiBC,IAAwB,CACxC,GAAM,CAACC,EAAeC,EAAkBC,CAAe,EAClDL,EAAe,GAAM,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,GAAKD,EAAO,OAAS,EAAK,EAAI,GAAI,EAAE,EACrEH,EAAOG,EAAO,CAAC,EAAE,KAAK,OAC5B,GAAII,EAAgB,GAAKJ,EAAO,OAASI,GAAiBJ,EAAOI,CAAa,EAAE,KAAK,OAAS,EAC5FJ,EAAOI,CAAa,EAAE,gBAAgB,EAAE,QAAST,GAAUQ,EAAI,KAAKR,CAAK,CAAC,UACjED,EAAW,0BAA4B,qBAChD,MAAM,IAAI,MAAM,2FAA2F,EAG7G,GAAIW,EAAmB,GAAKL,EAAO,OAASK,GAAoBL,EAAOK,CAAgB,EAAE,KAAK,OAAS,EAAG,CAExG,GADAL,EAAOK,CAAgB,EAAE,gBAAgB,EAAE,QAASV,GAAUF,EAAO,KAAKE,CAAK,CAAC,EAC5EF,EAAO,SAAW,GACjBA,EAAO,SAAWI,GAASI,GAAgB,IAAMR,EAAO,SAAWC,EAAW,KAAK,OACtF,MAAM,IAAI,MACN,6FAA6F,EAEnG1B,GAAeyB,EAAQC,CAAU,EAC7BA,EAAW,KAAK,OAAS,GAC3BzB,GAAawB,EAAQC,EAAW,KAAMG,CAAI,EAAE,QAAQ,CAACF,EAAOI,IAAUN,EAAOM,CAAK,EAAIJ,CAAK,CAE/F,CACA,GAAIW,EAAkB,GAAKN,EAAO,OAASM,IACzCN,EAAOM,CAAe,EAAE,iBAAiB,EAAE,QAASX,GAAUO,EAAM,KAAK,OAAOP,CAAK,CAAC,CAAC,EACnFO,EAAM,SAAWL,GAASI,GAAgB,IAAMC,EAAM,SAAWR,EAAW,KAAK,QACnF,MAAM,IAAI,MAAM,4FAA4F,EAIhH,GAAIA,EAAW,KAAK,OAAS,EAAG,CAC9B,GAAID,EAAO,SAAWC,EAAW,KAAK,OACpC,MAAM,IAAI,MAAM,0FAA0F,EAE5G,GAAIQ,EAAM,SAAWR,EAAW,KAAK,OACnC,MAAM,IAAI,MACN,8FAA8F,CAEtG,CACA,GAAI,OAAOD,EAAW,KAAe,OAAOS,EAAU,KAAeT,EAAO,OAAS,GAAKS,EAAM,OAASL,EACvG,MAAM,IAAI,MAAM,yDAAyD,CAE7E,EAEE1B,GACF,CAACoC,EAAiDC,IAC9C;AAAA,2DACmDA,CAAK,OAC3D,IAAM,CACD,OAAQD,EAAwB,CAC9B,IAAK,aACH,MAAO,UAAUC,CAAK,gBAAgBA,CAAK,YAC7C,IAAK,qBACH,MAAO;AAAA,8BACSA,CAAK,uBAAuBA,CAAK;AAAA;AAAA;AAAA,qBAInD,IAAK,uBACH,MAAO,WAAWA,CAAK,uBAAuBA,CAAK,YACrD,IAAK,gBACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMaA,CAAK;AAAA;AAAA,0BAEbA,CAAK,6DAA6DA,CAAK;AAAA;AAAA,qBAGrF,IAAK,qBACH,MAAO;AAAA,6BACQA,CAAK,gBAAgBA,CAAK;AAAA,2BAC5BA,CAAK,gBAAgBA,CAAK,yBAAyBA,CAAK;AAAA,0BACzDA,CAAK;AAAA;AAAA,mCAEIA,CAAK,yBAAyBA,CAAK;AAAA,qBAE1D,IAAK,uBACH,MAAO,uBAAuBA,CAAK,YAAYA,CAAK;AAAA,uCAC3BA,CAAK;AAAA,mCACTA,CAAK;AAAA;AAAA,sCAEFA,CAAK,uBAAuBA,CAAK,mBAC3D,IAAK,aACH,MAAO,YAAYA,CAAK,uBAAuBA,CAAK,mBACtD,QACE,MAAM,IAAI,MAAM,6BAA6BD,CAAsB,mBAAmB,CAC1F,CACF,GAAG,EACP,IAEEnC,GAA8B,CAACqC,EAA0BR,EAAsBO,IACjF,6CAA6CA,CAAK,4BAA4BA,CAAK,MAAQ,IAAM,CAC/F,OAAQC,EAAa,CACnB,IAAK,oBACH,MAAO,yIAKT,IAAK,QACH,MAAO,2BACT,IAAK,OACH,MAAO,0BACT,IAAK,qBACH,MAAO,0KAKT,IAAK,SACL,QACE,GAAIR,EAAe,GACjB,MAAO,mLAOT,MAAM,IAAI,MAAM,gBAAgBQ,CAAW,mBAAmB,CAClE,CACF,GAAG,EACH,IAEEpC,GAAY,CAAC8B,EAAwBP,EAAyBC,IAA2B,CAC7F,IAAMa,EAAS,IAAI,MAAMb,CAAI,EAAE,KAAK,CAAC,EAAE,OAAO,IAAI,MAAMA,CAAI,EAAE,KAAK,CAAC,CAAC,EAC/Dc,EAAWR,EAAI,SAAW,EAAIO,EAASP,EAAI,MAAM,EACvD,OAAIP,EAAK,OAAS,GAChBA,EAAK,QAAQ,CAACgB,EAAGC,IAAM,CACrBH,EAAOE,CAAC,EAAID,EAASE,CAAC,EACtBH,EAAOG,EAAIhB,CAAI,EAAIc,EAASf,EAAK,OAASiB,CAAC,CAC7C,CAAC,EACMH,GAEFC,CACT,EAEMrC,GACF,CAACwC,EAA+BrB,EAA2BS,EAA0BN,IACrE,CACV,IAAImB,EAAwB,CAAC,EAC7B,GAAIb,EAAM,OAAS,EACjB,GAAIN,EAAK,OAAS,EAAG,CAEnB,GADAkB,EAAW,QAASF,GAAMG,EAAY,KAAKH,CAAC,CAAC,EACzC,KAAK,IAAI,GAAGhB,CAAI,EAAIkB,EAAW,OACjC,MAAM,IAAI,MAAM,sBAAsB,EAExClB,EAAK,QAAQ,CAACgB,EAAGC,IAAME,EAAYH,CAAC,EAAIV,EAAMW,CAAC,CAAC,CAClD,MACEX,EAAM,QAASU,GAAMG,EAAY,KAAKH,CAAC,CAAC,MAErC,CACL,GAAInB,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyC,EAEzDsB,EAAcD,EAAW,IAAI,CAACnB,EAAOI,IAAU,KAAK,MAAMJ,EAAQF,EAAOM,CAAK,CAAC,CAAC,CAEpF,CACA,OAAOgB,CACT,EAEFxC,GAAoB,CAACuC,EAA+BrB,EAAkBC,IAAiC,CAC3G,IAAMsB,GAAiB,IAAM,CAC3B,OAAQtB,EAAW,sBAAuB,CACxC,IAAK,aACH,OAAOA,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,IAAK,cACH,OAAOC,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAI,GAAKD,EAAO,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGA,EAAQ,OAAO,SAAS,EAC1E,QACE,MAAM,IAAI,MAAM,4BAA4BC,EAAW,qBAAqB,mBAAmB,CACnG,CACF,GAAG,EACHD,EAAO,KAAK,EAAK,EAAGA,EAAO,MAAM,EACjC,IAAMwB,EAAsBH,EAAW,MAAM,EAC7C,OAAIpB,EAAW,KAAK,OAAS,GAC3BA,EAAW,KAAK,QAASkB,GAAMnB,EAAOmB,CAAC,EAAII,CAAa,EACxDtB,EAAW,KAAK,QAASkB,GAAMK,EAAoBL,CAAC,EAAI,KAAK,MAAME,EAAWF,CAAC,EAAInB,EAAOmB,CAAC,CAAC,CAAC,IAE7FnB,EAAO,KAAKuB,EAAe,EAAGvB,EAAO,MAAM,EAC3CwB,EAAoB,QAAQ,CAACL,EAAGC,IAAMI,EAAoBJ,CAAC,EAAI,KAAK,MAAMD,EAAInB,EAAOoB,CAAC,CAAC,CAAC,GAEnFI,CACT,EAEMzC,GACF,CAAC0C,EAAuBJ,EAA+BC,EAAgCI,EACtFC,IAA8B;AAAA,mEACgCF,EAAO,KAAK,OAAO,cAC9EA,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,oCACZG,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,gCAC5CA,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA,sBAC/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA,wBAChDE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,uBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA;AAAA,kCAExDF,EAAO,KAAK,KAAK;AAAA;AAAA,gCAEnBG,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQzFtC,GACF,CAAC6C,EAAsBJ,EAAuBJ,EAA+BC,EAC5EI,EAAsBC,EAAmBG,IAAsC;AAAA,gEACpBL,EAAO,KAAK,OAAO,QAAQI,EAAM,KAAK,OAAO;AAAA,2BAClFA,EAAM,KAAK,OAAO;AAAA,gCACbP,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,sBAE/CG,EAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA;AAAA;AAAA;AAAA,0BAI9CE,EAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,yBAC7CC,EAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA,gCAC5DC,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,EAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA,iBAG9EQ,CAAgB,4CAA4CL,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA,wCAGtDA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAS/CI,EAAM,WAAW,gBAAiB,IAAK,cAAc,CAAC;AAAA;AAAA;AAAA,OAI1D5C,GAAoB,CAAC4C,EAAsBR,IAA0C;AAAA,0CACjDQ,EAAM,KAAK,OAAO;AAAA,gCAC5BR,EAAW,MAAM;AAAA,4BACrBQ,EAAM,WAAW,gBAAiB,GAAG,CAAC;AAAA,gDAClBD,EAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,OAOtGnC,GACF,CAAC2C,EAAsBE,EAAoBC,EAAkBC,IACzDJ,EAAM,KAAOI,EAAc;AAAA,MAC7BJ,EAAM,WAAW,gBAAiBE,EAAY,SAAS,CAAC;AAAA,MACxDF,EAAM,WAAW,gBAAiBG,EAAU,OAAO,CAAC;AAAA,EAEvB,GAE7B7C,GACF,CAAC0C,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUG,EAAWC,EAAUL,CAAU,EAC5CV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,CAAC,EAC9DN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wEAC2Dd,CAAK;AAAA,2BAClDc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBM,EAAW,mBAAmBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QAC9FN,EAAM,WAAW,gBAAiBO,EAAU,mBAAmBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC5FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,+CAGHJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,gBAE/DA,CAAK,sBAAsBoB,CAAS;AAAA,gBACpCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAEzCN,EACI,yBAAyBT,EAAWc,CAAS,CAAC,8BAA8Bd,EAAWe,CAAQ,CAAC;AAAA,iBAC7FF,CAAkB;AAAA,SAErB,EAAE;AAAA,8BACcb,EAAWc,CAAS,CAAC;AAAA,8BACrBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKvBf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA,iBAC1EjB,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWjC,EAEE3B,GACF,CAACyC,EAAsBJ,EAAuBJ,EAA+BC,EAC5EtB,EAA2BU,EAAwB2B,EAAqBP,EACxEI,EAA4BI,IAAoC,CAC/D,IAAMC,EAAOlB,EAAW,SAAW,EAC7BmB,EAAS,GACT,CAACL,EAAWC,CAAQ,EAAIG,EAAO,CAAC,EAAG,CAAC,EAAIC,EAAS,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAC/DzB,EAAQc,EAAM,KAAK,MACnBY,EAAoCC,GAAwB,CAChE,IAAMC,EAAYD,IAAQP,EAAY,MAAQ,MAC9C,MAAO;AAAA,WACJQ,CAAS,qCAAqCd,EAAM,KAAK,OAAO,qBAC/DJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,6BACfU,EAAO,WAAW,iBAAkBiB,CAAG,CAAC;AAAA,2BAC1C3B,CAAK,+DAA+Df,EAAO0C,CAAG,CAAC;AAAA,UAChGpB,EAAYoB,CAAG,CAAC,KAAKrB,EAAWqB,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,MAAMrB,EAAW,MAAM;AAAA,gCAC/DN,CAAK;AAAA;AAAA;AAAA,cAGvBe,CAAgB,0CAA0CT,EAAWqB,CAAG,CAAC;AAAA,mBACpER,CAAkB;AAAA;AAAA,0BAEXnB,CAAK,gBAAgBA,CAAK;AAAA;AAAA,gBAEpC4B,CAAS,KAAK5B,CAAK,oBAAoBA,CAAK;AAAA,gBAC5C4B,CAAS,WAAWA,CAAS,OAAOtB,EAAWqB,CAAG,CAAC;AAAA,eACpD,IACDJ,EACK;AAAA,mCAEER,EACF,UAAUI,CAAkB,IAE5B,GAAGS,CAAS,iBAAiBA,CAAS,KAAKtB,EAAWqB,CAAG,CAAC,WAElE,CAAC;AAAA;AAAA,kCAEsBb,EAAM,KAAK,OAAO;AAAA,YACxCA,EAAM,WAAW,qBAAsBa,EAAK,OAAOC,CAAS,GAAG,CAAC;AAAA,0BAEhED,IAAQP,EAAYN,EAAM,aAAa,oBAAoB,EACvC,2DAA2D;AAAA;AAAA;AAAA,QAIrF,EAEA,MAAO;AAAA,MACPY,EAAiCN,CAAS,CAAC;AAAA,MAC3CM,EAAiCL,CAAQ,CAAC;AAAA,qCACXrB,CAAK,cAAcA,CAAK;AAAA;AAAA,wBAErCA,CAAK,gBAAgBA,CAAK;AAAA,wBAC1BA,CAAK;AAAA,wBACLA,CAAK;AAAA,uBACNA,CAAK;AAAA,oBACRsB,CAAW,wBAAwBA,CAAW,yBACxDA,CAAW,yBAAyBA,CAAW;AAAA,oBACrCA,CAAW,mBAAmBA,CAAW;AAAA,oBACzCA,CAAW,2BAA2BA,CAAW;AAAA,oBACjDA,CAAW,yBAAyBA,CAAW,0BACzDA,CAAW,0BAA0BA,CAAW;AAAA;AAAA;AAAA;AAAA,qCAIrBtB,CAAK,sBAAsBA,CAAK,YAAYA,CAAK;AAAA,oBAClEA,CAAK;AAAA;AAAA;AAAA;AAAA,4CAImBU,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,yBACnDc,EAAM,KAAK,OAAO;AAAA;AAAA;AAAA,KAIvC,EAEExC,GACF,CAACwC,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUY,EAAUT,EAAWC,EAAUL,CAAU,EACtDV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACpEN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wFAC2Ed,CAAK;AAAA,2BAClEc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBe,EAAU,qBAAqBvB,EAAWuB,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9Ff,EAAM,WAAW,gBAAiBM,EAAW,sBAAsBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QACjGN,EAAM,WAAW,gBAAiBO,EAAU,qBAAqBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,gDAGFJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,kBAE9DA,CAAK,sBAAsB6B,CAAQ;AAAA,mBAClC7B,CAAK,sBAAsBoB,CAAS;AAAA,kBACrCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAE3CN,EAAmB,6BAA6BT,EAAWuB,CAAQ,CAAC,oCAC7CvB,EAAWc,CAAS,CAAC,kCAAkCd,EAAWe,CAAQ,CAAC;AAAA,eAC7FF,CAAkB;AAAA,WAEJ,EAAE;AAAA;AAAA,gCAECb,EAAWuB,CAAQ,CAAC;AAAA,oCAChBvB,EAAWc,CAAS,CAAC;AAAA,kCACvBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAO3Bf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA;AAAA,kBAEzEjB,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,iBACNA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,mBAAmBA,CAAK;AAAA,iBAC7BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAgBjC,EAEEzB,GACF,CAACuD,EAAyB5C,EAA8BO,EAAsBsC,EAC7ErC,EAA0BsC,IAA6C,CACtE,IAAM1B,EAAawB,EAAY,KACzBnC,EAAM9B,GAAUmE,EAAU9C,EAAW,KAAMoB,EAAW,MAAM,EAE9DC,EAAczC,GAAgBwC,EAAYyB,EAAarC,EAAOR,EAAW,IAAI,EAC7ED,EAAS8C,EAAY,MAAM,EAC3BA,EAAY,SAAW,IACzB9C,EAASqB,EAAW,IAAI,CAACnB,EAAOI,IAAUJ,IAAU,EAAI,EAAMoB,EAAYhB,CAAK,EAAIJ,CAAK,EACpFD,EAAW,wBAA0B,YACvCqB,EAAcxC,GAAkBuC,EAAYrB,EAAQC,CAAU,IAGlE,IAAMwB,EAASuB,EAAe,SAAUH,EAAY,SAAUvB,EAAY,MAAM,EAC1EO,EAAQoB,EAAc,QAASJ,EAAY,SAAUxB,EAAW,MAAM,EACtE6B,EAAaC,EAAU,KAAK7B,CAAW,EACvC8B,EAAU/B,EAAW,SAAWC,EAAY,QAAUD,EAAW,MAAM,CAACgC,EAAGjC,IAAMiC,IAAM/B,EAAYF,CAAC,CAAC,EACrGU,EAAmB7B,EAAW,0BAA4B,qBAC1DiC,EAAqBjC,EAAW,mBAChCqD,EAAWzB,EAAM,KAAK,MACtB0B,EAAmBC,GAA+B;AAAA,QACtDJ,EAAU,GAAK;AAAA,QACf1E,GAA2CuB,EAAW,wBAAyBqD,CAAQ,CAAC;AAAA,SACvF,IAAM,CACP,OAAQrD,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA,gBACHhB,GAAkB4C,EAAOR,CAAU,CAAC;AAAA,gBACpC1C,GAA4BsB,EAAW,YAAaO,EAAc8C,CAAQ,CAAC;AAAA,gBAE3EtE,GACI6C,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,OAAQoB,CAAgB,CAAC;AAAA,gBAE9F,IAAK,SACH,MAAO;AAAA,gBACH/C,GAA0C0C,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,MAAM,CAAC;AAAA,iBACpG,IAAM,CACT,GAAIW,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GAAGlC,GAAsB0C,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAC3F,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EAC1D,MAAO,GAAGhC,GAAuBwC,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAEjG,MAAM,MAAM,kFAAkF,CAElG,GAAG,CAAC;AAAA,cAEN,IAAK,QACH,MAAO;AAAA,eACJ,IAAM,CACP,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GACHjC,GACIyC,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAQU,EAAKT,EAAW,YAAa6B,EAC7E7B,EAAW,mBAAoBA,EAAW,cAAc,CAAC,GAEjE,MAAM,MAAM,2EAA2E,CAE3F,GAAG,CAAC;AAAA,cAEN,QACE,MAAM,MAAM,qBAAqB,CACrC,CACF,GAAG,CAAC;AAAA,OACH;AAAA,QAEGuD,EAAa,gBAAgB,cAAe,KAAK,EAC5C,gBAAgB,SAAU,MAAOxD,EAAO,MAAM,EAC9C,gBAAgB,MAAO,MAAOU,EAAI,MAAM,EACxC,iBAAiBmB,EAAOJ,CAAM,CAAC;AAAA,QACtC+B,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,UAC1EJ,EAAU,0CAA4C;AAAA,+BACjC3B,EAAO,gBAAgB,YAAY,CAAC;AAAA,6BACtCI,EAAM,KAAK,OAAO;AAAA,WACpC,IAAM,CACT,OAAQ5B,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA;AAAA,yCAEsB4B,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,yCAEnC5B,EAAW,kBAAkB;AAAA,mBAE5D,IAAK,SACH,MAAO,wBACFoB,EAAW,SAAW,GAAKA,EAAW,SAAW,EAAK,wBACA,wBAAwB,oBACrF,IAAK,QACH,MAAO,6DACT,QACE,MAAM,MAAM,4BAA4BpB,EAAW,IAAI,EAAE,CAC7D,CACF,GAAG,CAAC;AAAA,CACT;AAAA,SAGK,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAGA,EAAW,QAAQ,IAAIO,CAAY,IAAIR,EAAO,OAAS,EAAIA,EAAS,EAAE,IAC3ES,EAAM,OAAS,EAAIA,EAAQ,EAAE,IAAIC,EAAI,OAAS,EAAIA,EAAM,EAAE,IAAI0C,CAAO,IAAI/B,CAAU,GACvF,kBAAmB,CAAC,MAAM,CAC5B,EACA,gBAAAkC,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMjC,EAAa,SAAUuB,EAAY,QAAQ,CAAC,EAC7D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,OAAsB,KAAMlD,CAAM,EAC9E,CAAC,OAAsB,KAAMU,CAAG,EAAG,GAAG+C,EAA2BpC,EAAYC,CAAW,CAC1F,CACF,EACF,CACF,EAEE/B,GAAuCmE,GAAoC,CAC/E,IAAMC,EAAmBD,EAAQ,iBAGjC,OAF2B,IAAI,YAAYC,EAAkBA,EAAiB,WAAY,CAAC,EACnD,CAAC,CAE3C,EAEanE,GAAS,CAACkE,EAAyBzD,IAAuC,CACrF,IAAMD,EAAmB,CAAC,EACpBS,EAAkB,CAAC,EACnBC,EAAgB,CAAC,EAKjBF,EAAejB,GAAoCmE,CAAO,EAChE,GAAIzD,EAAW,YAAc,EAC3B,MAAM,MAAM,6DAA6D,EAE3ExB,GAAeiF,EAAQ,OAAQzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAC3EgD,EAAQ,QACJpE,GAAwBoE,EAAQ,OAAO,CAAC,EAAGzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC7G,EAEajB,GAAyBQ,GAA0D,CAC9F,IAAM2D,EAAY3D,EAAW,UACvBE,EAAOF,EAAW,KAClB4D,EACF5D,EAAW,wBACToC,EAAcpC,EAAW,YACzBqC,EAAiBrC,EAAW,iBAA6B,EACzDiC,EAAqBjC,EAAW,mBAChC6D,EAA+C7D,EAAW,sBAC1D8D,EAAa9D,EAAW,KAExBe,EAA4Bf,EAAW,cAAgB,GAAK,SAAWA,EAAW,YACxF,OAAO+D,GAA4B,CACjC,UAAAJ,EACA,KAAAzD,EACA,wBAAA0D,EACA,YAAAxB,EACA,eAAAC,EACA,mBAAAJ,EACA,sBAAA4B,EACA,KAAAC,EACA,YAAA/C,CACF,CAAC,CACH,IC1rBA,IAkBMiD,GAqDAC,GA+FOC,GAtKbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KASMR,GAAiB,CAACS,EAA+BC,IAAgD,CACrG,GAAM,CAACC,EAAOC,EAAaC,EAAUC,CAAQ,EAAIL,EAC3C,CAAC,SAAAM,EAAU,mBAAAC,CAAkB,EAAIN,EAEvC,GAAIC,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wDAAwDA,EAAM,KAAK,MAAM,EAAE,EAE7F,GAAI,CAACM,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,GAAK,CAACK,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,CAAC,GACtFA,EAAY,KAAK,SAAW,EAC9B,MAAM,IAAI,MAAM,uEAAuEA,EAAY,KAAK,MAAM,EAAE,EAElH,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAI,CAACG,EAAU,SAASJ,EAAS,KAAMC,EAAS,IAAI,EAClD,MAAM,IAAI,MAAM,wEAA4E,EAG9F,GAAIE,EAAqB,GAAKD,IAAa,EACzC,MAAM,IAAI,MAAM,iEAAiE,EAGnF,IAAMG,EAAYP,EAAM,KAAK,CAAC,EACxBQ,EAAiBR,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACjDS,EAAoBP,EAAS,KAAK,CAAC,EACnCQ,EAAaJ,EAAU,kBAAkBN,EAAM,KAAM,CAAC,EAAIQ,EAC1DG,EAAWN,IAAuB,EAAIH,EAAS,KAAK,CAAC,EAAI,EAAIQ,EAAaN,EAChF,GAAIC,EAAqBM,EACvB,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIV,EAAY,KAAK,SAAW,EAAG,CACjC,GAAIM,IAAcN,EAAY,KAAK,CAAC,EAClC,MAAM,IAAI,MAAM,sEAAsEA,EAAY,KAAK,CAAC,CAAC,EAAE,EAE7G,GAAIO,IAAmBP,EAAY,KAAK,CAAC,EACvC,MAAM,IAAI,MAAM,2EAA2EA,EAAY,KAAK,CAAC,CAAC,EAAE,CAEpH,CAEA,GAAIU,EAAW,IAAMT,EAAS,KAAK,CAAC,GAAKG,EAAqB,IAAMH,EAAS,KAAK,CAAC,EACjF,MAAM,IAAI,MAAM,kGACZA,EAAS,KAAK,CAAC,CAAC,EAAE,EAGxB,GAAIM,EAAiBC,EACnB,MAAM,IAAI,MAAM,gFAAgF,CAEpG,EAEMnB,GACF,CAACQ,EAA+BC,IAAuD,CACrF,GAAM,CAAC,YAAAa,EAAa,SAAAR,EAAU,mBAAAC,EAAoB,MAAAQ,CAAK,EAAId,EACrDQ,EAAYT,EAAO,CAAC,EAAE,KAAK,CAAC,EAC5BgB,EAAcR,EAAU,kBAAkBR,EAAO,CAAC,EAAE,KAAM,CAAC,EAC3DU,EAAiBV,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACzDY,EAAaI,EAAcN,EAC3BO,EAAyBjB,EAAO,CAAC,EAAE,KAAK,CAAC,EACzCa,EAAWN,IAAuB,EAAIU,EAAyB,EAAIL,EAAaN,EAKhFY,EACF,IAAI,MAAcT,EAAWC,EAAgBE,EAAaC,EAAUA,EAAWI,CAAsB,EACnGE,EAAgBX,EAAU,eAAeU,CAAW,EAEpDE,EAAoC,CACxC,CAAC,OAAsB,KAAML,CAAK,EAClC,CAAC,QAAuB,KAAMG,CAAW,EACzC,CAAC,QAAuB,KAAMC,CAAa,EAI3C,GAAInB,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MAAsB,CAAC,QAAuB,KAAM,CAACgB,EAAaJ,EAAYC,EAAU,CAAC,CAAC,CAAC,EAC/F,CAAC,EACT,GAAIb,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MACA,CAAC,QAAuB,KAAM,CAACgB,EAAaH,EAAUH,EAAiBG,EAAU,CAAC,CAAC,CAAC,EACxF,CAAC,EAET,GAAGQ,EAA2BrB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9G,EAEMsB,EAAmBC,GAA+B,CACtD,IAAMrB,EAAQsB,EAAc,QAASxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEG,EAAcqB,EAAc,eAAgBxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACrFI,EAAWoB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EK,EAAWmB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EyB,EAASC,EAAe,SAAU1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAEjF,OAAAuB,EAAa,iBAAiB,CAC5B,CAAC,KAAM,QAAS,KAAM,KAAK,EAC3B,CAAC,KAAM,eAAgB,KAAM,MAAO,OAAQL,EAAY,MAAM,EAC9D,CAAC,KAAM,iBAAkB,KAAM,MAAO,OAAQC,EAAc,MAAM,EAClE,CAAC,KAAM,uBAAwB,KAAM,MAAO,OAAQA,EAAc,MAAM,CAC1E,CAAC,EAEM;AAAA,UACLI,EAAa,iBAAiBrB,EAAOC,EAAaC,EAAUC,EAAUoB,CAAM,CAAC;AAAA;AAAA,UAE7EF,EAAa,UAAUI,EAAc,CAAC;AAAA,+CACDvB,EAAS,IAAI;AAAA;AAAA;AAAA,YAGhDmB,EAAa,sCAAsC,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA,kBAIpDpB,EAAY,2BAA2B,UAAWuB,EAAe,GAAIvB,EAAY,KAAK,OAAQ,CAAC,CAAC,CAAC;AAAA;AAAA,sBAE7FA,EAAY,YAAY,kBAAkB,CAAC;AAAA,oFACmBW,CAAW;AAAA,yDACtCA,CAAW;AAAA,uBAC7CZ,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEF,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEoB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA,uBACpBvB,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEH,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEqB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA;AAAA;AAAA,cAG7BA,EAAO,YAAY,IAAKvB,EAAM,YAAY,GAAG,CAAC,CAAC;AAAA;AAAA,UAGvD,EAEA,MAAO,CACL,KAAM,kBACN,YAAa,CACX,KAAM0B,GAA4B,CAC1B,YAAAd,CACF,CAAC,EAAE,SACT,kBAAmB,CAAC,OAAQ,OAAQ,OAAQ,MAAM,CACpD,EACA,gBAAAQ,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAU,KAAKU,CAAW,EAAIS,EAAc,CAAC,EAC1E,gBAAAP,CACF,EACF,CACF,EAES3B,GAAkB,CAACoC,EAAyB5B,IAAgD,CACvGV,GAAesC,EAAQ,OAAQ5B,CAAU,EACzC4B,EAAQ,QAAQrC,GAAiCqC,EAAQ,OAAQ5B,CAAU,CAAC,CAC9E,ICzKA,IAeM6B,GAwDAC,GA4IOC,GAnNbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAOMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMC,EAAoBD,EAAO,CAAC,EAC5BE,EAAmBF,EAAO,CAAC,EAC3BG,EAAoBH,EAAO,CAAC,EAElC,GAAIC,EAAM,WAAaC,EAAK,UAAYD,EAAM,WAAaE,EAAM,SAC/D,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIC,EAAK,KAAK,SAAW,GAAKA,EAAK,KAAK,SAAW,EACjD,MAAM,IAAI,MAAM,uBAAuB,EAGzC,IAAME,EAAaH,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EAC7CI,EAAiBJ,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACvD,GAAIC,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAME,EACtC,MAAM,IAAI,MAAM,8CAA8C,EAEhE,GAAIF,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMG,EACtC,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIF,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,IAAMC,EACxC,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBN,EAAO,CAAC,EACjC,GAAIM,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMF,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACA,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMO,EAAmBP,EAAO,CAAC,EACjC,GAAIO,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMH,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACF,EAEMX,GACF,CAACO,EAA+BQ,EAAqCC,EAAqBC,IACvE,CACb,IAAMC,EAAaH,EAAW,WAExBI,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAcH,EACdI,EAAaH,EACbT,EAAaQ,EAAW,MAAM,EAAE,EAAE,CAAC,EACnCK,EAAmBP,EAAaE,EAAW,MAAM,EAAG,EAAE,EAAE,OAAO,CAAC,EAAI,CAAC,EACrEM,EAAe,CAACP,GAAcX,EAAO,OAAS,EAC9CmB,EAAenB,EAAO,OAAS,EAC/BoB,EAAgBV,GAAcD,EAAc,EAC5CY,EAAqBX,GAAcD,EAAc,EACjDa,EAA4Bb,EAAc,EAC1Cc,EAAgB,GAEhBC,EAAaC,GAAiBrB,CAAU,EAExCsB,EAAoC,CACxC,CAAC,QAAuB,KAAMV,CAAU,EACxC,CAAC,QAAuB,KAAMQ,CAAU,EACxC,CAAC,QAAuB,KAAMpB,CAAU,EACxC,CAAC,OAAsB,KAAMI,EAAW,OAAO,CACjD,EACMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAAmC,CACvC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,aAAc,KAAM,KAAK,EAChC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,KAAK,CAC/B,EACMC,EAAY,CAChBC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACjEO,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,EACpEO,EAAc,QAAS/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CACvE,EACIN,GACFY,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAElFL,GACFW,EAAU,KAAKC,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMwB,CAAU,CAAC,EAEtFM,EAAU,KAAKE,EAAe,SAAUhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAChFJ,GACFU,EAAU,KAAKE,EAAe,gBAA+Bf,CAAgB,CAAC,EAE5EI,GACFS,EAAU,KAAKE,EAAe,mBAAkCf,CAAgB,CAAC,EAE/EK,GACFQ,EAAU,KAAKE,EAAe,sBAAuBhC,EAAO,CAAC,EAAE,SAAUe,EAAaS,CAAU,CAAC,EAEnG,IAAMS,EAAWC,GAA4BlC,EAAO,CAAC,EAAE,QAAQ,EACzDmC,EAAcD,KAA4CV,CAAU,EAC1E,MAAO;AAAA;AAAA,QAEXI,EAAa,iBAAiBC,CAAa,EAAE,iBAAiB,GAAGC,CAAS,CAAC;AAAA,0CACzCK,CAAW,KAAKZ,CAAa;AAAA,kDACrBY,CAAW,KAAKZ,CAAa;AAAA;AAAA,QAEvEK,EAAa,UAAU,CACjBL,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA;AAAA,iCAEmBA,CAAa;AAAA;AAAA;AAAA,gDAGEA,CAAa;AAAA;AAAA;AAAA,oBAGzCA,EAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6BAKRJ,EAAe,qBAAuBc,EAAW,OAAO;AAAA;AAAA;AAAA,YAGzEX,EAA4B,2CAA6C,EAAE;AAAA;AAAA,4BAE3Dc,GAAUH,EAAUT,EAAY,OAAO,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMlCD,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAY1Bc,GAAU,MAAOb,CAAU,CAAC;AAAA,wCACTa,GAAU,aAAcb,CAAU,CAAC,gCAC3Db,EAAa,GAAK,eAAe;AAAA,UACvCS,EAAgB,kCAAoC,EAAE;AAAA,UACtDC,EAAqB,4CAA8C,EAAE;AAAA;AAAA;AAAA,qDAG1BV,EAAa,GAAK,KAAKsB,CAAQ,QAAQ;AAAA,cAC9EA,CAAQ;AAAA,cACRf,EAAe,uBAAyB,EAAE;AAAA;AAAA,QAG9C,EACMoB,EAAU,CAAC,CAAC,KAAMvB,EAAa,SAAUf,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIS,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAMrB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB6B,EAAQ,KAAK,CAAC,KAAM1B,EAAY,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAExD,CACL,KAAM,yBACN,YAAa,CACX,KAAM,GAAGwB,CAAU,IAAIJ,CAAa,IAAIC,CAAkB,IAAIC,CAAyB,GACvF,kBAAmBtB,EAAO,IAAI,CAACuC,EAAQC,IAAW,MAAM,CAC1D,EACA,gBAAAb,EACA,WAAY,KAAO,CACjB,QAAAW,EACA,cAAe,CACb,EAAG,KAAK,KAAKtB,EAAaZ,CAAU,CACtC,EACA,gBAAAsB,CACF,EACF,CACF,EAEKhC,GAAgB,CAAC+C,EAAyBjC,IAA8C,CAGnGhB,GAAeiD,EAAQ,MAAM,EAG7B,IAAMH,EAAU,CAAC,CAAC,EACdG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAK,CAAC,EAEhBG,EAAQ,QACJhD,GAA+BgD,EAAQ,OAAQjC,EAAYiC,EAAQ,YAAa,EAAU,EAAG,CAAC,QAAAH,CAAO,CAAC,CAC5G,ICrOA,IAiBMI,GAkBAC,GAcAC,GAeAC,GAcAC,GAsBAC,GAmFOC,GAYAC,GAnMbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAQMb,GAAiB,CAACc,EAA+BC,IAAsC,CAC3F,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIC,EAAW,KAAK,SAAW,GAC7B,GAAIA,EAAW,KAAK,SAAWA,EAAW,OAAO,QAAUA,EAAW,KAAK,SAAWA,EAAW,KAAK,OACpG,MAAM,IAAI,MAAM,iDAAiD,UAE1DA,EAAW,OAAO,SAAWA,EAAW,KAAK,OACtD,MAAM,IAAI,MAAM,2CAA2C,EAE7DD,EAAO,MAAM,CAAC,EAAE,QAAQ,CAACE,EAAGC,IAAQ,CAClC,GAAIH,EAAOG,EAAM,CAAC,EAAE,WAAa,GAAkBH,EAAOG,EAAM,CAAC,EAAE,WAAa,EAC9E,MAAM,IAAI,MAAM,SAASA,CAAG,qCAAqC,CAErE,CAAC,CACH,EAEMhB,GAAY,CAACa,EAA+BG,IAA0B,CAC1E,IAAMC,EAAkB,CAAC,EACzB,GAAIJ,EAAO,OAASG,EAClB,GAAIH,EAAOG,CAAG,EAAE,WAAa,EAC3BH,EAAOG,CAAG,EAAE,iBAAiB,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,UACxDL,EAAOG,CAAG,EAAE,WAAa,EAClCH,EAAOG,CAAG,EAAE,cAAc,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,MAE9D,OAAM,IAAI,MAAM,SAASF,CAAG,qCAAqC,EAGrE,OAAOC,CACT,EAEMhB,GACF,CAACY,EAA+BC,IAAiD,CAC/E,GAAID,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBnB,GAAUa,EAAQ,CAAC,EACtCO,EAAiBpB,GAAUa,EAAQ,CAAC,EACtCQ,EAAiBrB,GAAUa,EAAQ,CAAC,EACxC,OAAIQ,EAAK,SAAW,IAClBA,EAAO,CAAC,GAAG,MAAMR,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,CAAC,GAEzCS,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,KACE,QAAOP,CAEX,EAEEZ,GACF,CAACqB,EAAeC,EAAeC,EAA+BJ,EAAyBK,IACzE,CACR,IAAIC,EAAWJ,EAIf,OAHIA,EAAQ,IACVI,GAAYF,EAAWJ,EAAKG,CAAK,CAAC,GAEhCE,EAAMF,CAAK,EAAI,EACV,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,EAAI,CAAC,CAAC,EAE3D,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,CAAC,CAAC,CAElE,EAEFrB,GACF,CAACc,EAAsBW,EAAuBH,IAC1C,4CAA4CG,EAAO,KAAK,OAAO,QAAQX,EAAM,KAAK,OAAO;AAAA,+BAClEA,EAAM,KAAK,OAAO;AAAA;AAAA,yBAExBQ,EAAW,MAAM;AAAA,kCACRI,EAAa,uBAAwB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BAClEI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BACtDI,EAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,6BACrDI,EAAa,kBAAmB,IAAKJ,EAAW,MAAM,CAAC;AAAA,iCACnDG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAO3DX,EAAM,WAAW,gBAAiB,IAAK,aAAa,CAAC;AAAA;AAAA;AAAA,SAK7Db,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBiB,EAAYC,EAAU,KAAKN,CAAU,EACrCJ,EAAQP,EAAW,KAAK,OAAS,EAAKiB,EAAU,cAAcjB,EAAW,KAAMW,EAAW,MAAM,EAC1D,CAAC,GAAG,MAAMA,EAAW,MAAM,EAAE,KAAK,CAAC,EAC3EC,EAAQ1B,GAAUa,EAAQ,CAAC,EAC/Ba,EAAM,QAASM,GAASA,IAAS,IAAM,IAAM,CACnB,MAAM,IAAI,MAAM,kBAAkB,CACpC,EAAE,EACtBN,EAAM,SAAW,IACnBA,EAAQ,MAAML,EAAK,MAAM,EAAE,KAAK,CAAC,GAEnC,IAAMF,EAASL,EAAW,OAAO,IAAI,CAACmB,EAAOC,IAAMhC,GAAkB+B,EAAOC,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAEjGN,EAAON,EAAW,KAAK,IAAI,CAACqB,EAAKD,IAAMhC,GAAkBiC,EAAKD,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAE/F,GAAIL,EAAK,SAAWF,EAAO,QAAUE,EAAK,SAAWD,EAAK,OACxD,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIC,EAAK,SAAWI,EAAW,OAC7B,QAASS,EAAI,EAAGA,EAAIT,EAAW,OAAQ,EAAES,EAClCb,EAAK,SAASa,CAAC,IAClBf,EAAO,OAAOe,EAAG,EAAG,CAAC,EACrBd,EAAK,OAAOc,EAAG,EAAGT,EAAWS,CAAC,CAAC,EAC/BR,EAAM,OAAOQ,EAAG,EAAG,CAAC,GAI1B,IAAME,EAAQV,EAAM,IAAIM,GAAQ,KAAK,KAAKA,CAAI,CAAC,EAE/CN,EAAM,QAAQ,CAACM,EAAME,EAAGG,IAAU,CAChC,GAAIL,EAAO,EAAG,CACZ,IAAMM,GAAYlB,EAAKc,CAAC,EAAIf,EAAOe,CAAC,GAAKF,EACnCO,EAASpB,EAAOe,CAAC,EACjBM,EAAWD,EAASD,EAAWZ,EAAMQ,CAAC,EAC5Cf,EAAOe,CAAC,EAAIM,EACZpB,EAAKc,CAAC,EAAIK,EACVF,EAAMH,CAAC,EAAI,CAACF,CACd,CACF,CAAC,EAED,IAAMS,EAAchB,EAAW,MAAM,CAAC,EACtCJ,EAAK,QAAQ,CAACqB,EAAM3B,IAAM,CACxB0B,EAAYC,CAAI,EAAI,KAAK,MAAMtB,EAAKsB,CAAI,EAAIvB,EAAOuB,CAAI,GAAKhB,EAAMgB,CAAI,CAAC,CACzE,CAAC,EACD,IAAMC,EAA+B,CAAC,KAAMF,EAAa,SAAU5B,EAAO,CAAC,EAAE,QAAQ,EAE/Ee,EAASgB,EAAe,SAAU/B,EAAO,CAAC,EAAE,SAAU4B,EAAY,MAAM,EACxExB,EAAQ4B,EAAc,QAAShC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEiC,EAAaf,EAAU,KAAKU,CAAW,EACvCM,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ5B,EAAO,MAAM,EACtF,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQiB,EAAM,MAAM,EAAG,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQV,EAAM,MAAM,CACvG,EAEMsB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAM3B,CAAM,EAC/E,CAAC,OAAsB,KAAMiB,CAAK,EAAG,CAAC,QAAuB,KAAMV,CAAK,EACxE,GAAGuB,EAA2BpC,EAAO,CAAC,EAAE,KAAM4B,CAAW,CAC3D,EAEMS,EAAmBC,GAA+B;AAAA,QAClDA,EAAa,iBAAiBJ,CAAQ,EAAE,iBAAiB9B,EAAOW,CAAM,CAAC;AAAA,UACrEzB,GAA0Bc,EAAOW,EAAQH,CAAU,CAAC;AAAA,UACpD0B,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,iCACpDvB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDA,EAAO,YAAY,aAAcX,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,SAE/E,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGmB,EAAM,MAAM,IAAIjB,EAAO,MAAM,IAAIO,EAAM,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACnG,gBAAAwB,EACA,WAAY,KAAO,CACjB,QAAS,CAACP,CAAgB,EAC1B,cAAe,CAAC,EAAG,KAAK,KAAKb,EAAY,EAAuB,CAAC,EACjE,gBAAAkB,CACF,EACF,CACF,EAEa3C,GAAQ,CAAC+C,EAAyBtC,IAAsC,CACnFf,GAAeqD,EAAQ,OAAQtC,CAAU,EACzC,IAAMuC,EAAoBpD,GAAgCmD,EAAQ,OAAQtC,CAAU,EACpFsC,EAAQ,QAAQhD,GAAuBgD,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAO1F,EAEa/C,GAAwBQ,GAAyD,CAC5F,IAAMK,EAASL,EAAW,OACpBM,EAAON,EAAW,KAClBO,EAAOP,EAAW,KACxB,OAAOQ,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,ICxMA,IAeMiC,GAUAC,GAwHOC,GAKAC,GAtJbC,GAAAC,EAAA,kBAOAC,IAEAC,KACAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,CAElD,EAMMT,GAA2B,CAACU,EAAmBC,IAA+C,CAClG,IAAMC,EAAQF,EAAM,KACdG,EAAaC,EAAU,KAAKF,CAAK,EACjCG,EAAK,GACPC,EAAOL,EAAW,KAItB,GAHIK,EAAO,IACTA,EAAOJ,EAAM,OAASI,GAEpBA,EAAOJ,EAAM,OAAS,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMK,EAAOL,EAAMI,CAAI,EACjBE,EAAOL,EAAaI,EACpBE,EAAaC,GAAiBH,CAAI,EAClCI,EAAaJ,EAAOE,EAEpBG,EAAY,CAACC,EAAcJ,IAC3BA,IAAe,EACV,WAAWI,CAAI,OAAOA,CAAI,YAAYA,CAAI,OAAOA,CAAI,OACnDJ,IAAe,EACjB,OAAOI,CAAI,OAAOA,CAAI,MACpBJ,IAAe,EACjB,WAAWI,CAAI,OAAOA,CAAI,QAAQA,CAAI,MAGxCA,EAEHC,EAAIC,EAAc,IAAKf,EAAM,SAAUA,EAAM,KAAMS,CAAU,EAC7DO,EAASC,EAAe,SAAUjB,EAAM,SAAUA,EAAM,KAAMS,CAAU,EACxES,EAAYJ,EAAE,KAAK,MAEnBK,EAAgBC,GAA4BpB,EAAM,QAAQ,IAAM,MAClE,mBAAmBkB,CAAS,oBAC5B,mBAAmBA,CAAS,eAC1BG,EAAmBC,GAA+B;AAAA,sCACpBJ,CAAS;AAAA,sCACTA,CAAS;AAAA,4CACHA,CAAS,KAAKb,CAAE;AAAA;AAAA,4DAEAa,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA,gEAKLA,CAAS;AAAA;AAAA;AAAA;AAAA,QAIjEI,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBR,EAAGE,CAAM,CAAC;AAAA,QAC7EM,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA,qBAGXjB,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMbc,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAmBID,CAAS,IAAIN,EAAU,kBAAmBH,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKtDS,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAeRA,CAAS,IAAIK,GAAU,kBAAmBd,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAU9E,MAAO,CACL,KAAM,UACN,YAAa,CAAC,KAAM,GAAGA,CAAU,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAChE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAO,SAAUF,EAAM,QAAQ,CAAC,EACjD,cAAe,CAAC,EAAGQ,CAAI,EACvB,gBAAiB,CAAC,CAAC,OAAsB,KAAMG,CAAU,CAAC,CAC5D,GACA,gBAAAU,CACF,CACF,EAEa9B,GAAU,CAACiC,EAAyBvB,IAAwC,CACvFZ,GAAemC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAyBkC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CACzE,EAEaT,GAA0BS,GACnCwB,GAA4B,CAAC,KAAMxB,EAAW,IAAc,CAAC,ICvJjE,IAiBMyB,GAMAC,GAWAC,GASAC,GAqBAC,GAuDOC,GAOAC,GA9HbC,GAAAC,EAAA,kBAGAC,IAEAC,KACAC,KAGAC,KAQMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,CAEpC,EAEMZ,GACF,CAACY,EAA+BC,IAAiD,CAC/E,IAAMC,EAAuB,CAAC,EAC1BC,EAAqBF,EAAW,WACpC,OAAID,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQI,GAAKF,EAAW,KAAK,OAAOE,CAAC,CAAC,CAAC,EACpED,EAAaD,EAAW,QAEnBG,GAA4B,CAAC,WAAAF,EAAY,KAAMF,EAAW,KAAM,WAAAC,CAAU,CAAC,CACpF,EAEEb,GAA4BiB,GAAoC;AAAA;AAAA,gCAEtCA,CAAe;AAAA,kBAC7BC,EAAa,8BAA+B,IAAKD,CAAe,CAAC;AAAA;AAAA;AAAA;AAAA,aAItEA,CAAe;AAAA,GAEtBhB,GAAuBkB,GAAsC,CACjE,IAAMF,EAAkBE,EAAQ,OAC1BC,EAAsB,CAAC,EAC7B,QAASC,EAAI,EAAGA,EAAIJ,EAAiB,EAAEI,EAAG,CACxC,IAAMC,EAAgBH,EAAQE,CAAC,EAAE,aAAa,UAAW,mBAAmB,EACxEJ,IAAoB,EACtBG,EAAU,KAAKE,CAAa,EACnBD,IAAM,EACfD,EAAU,KAAK,wBAAwBC,CAAC,QAAQC,CAAa,IAAI,EACxDD,IAAMJ,EAAkB,EACjCG,EAAU,KAAK,UAAUE,CAAa,IAAI,EAE1CF,EAAU,KAAK,6BAA6BC,CAAC,OAAOC,CAAa,IAAI,CAEzE,CACA,MAAO;AAAA,wDAC+CH,EAAQ,CAAC,EAAE,KAAK,OAAO;AAAA,UACrEC,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,QAE9B,EAEMlB,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAWf,EAAO,CAAC,EAAE,SACrBgB,EAAOF,EAAU,cAAcb,EAAW,KAAMW,EAAW,MAAM,EACjEJ,EAAU,IAAI,MAAqBP,EAAW,UAAU,EACxDgB,EAAQC,EAAc,QAASH,EAAUH,EAAW,MAAM,EAC1DO,EAAkB,IAAI,MAAclB,EAAW,UAAU,EACzDmB,EAAkC,CAAC,EACnCC,EAA2B,CAAC,EAC9BC,EAAc,EACZC,EAAoC,CAAC,CAAC,QAAuB,KAAMV,CAAS,CAAC,EACnF,QAASH,EAAI,EAAGA,EAAIT,EAAW,WAAYS,IAAK,CAC9CY,GAAerB,EAAW,WAAWS,CAAC,EACtCS,EAAgBT,CAAC,EAAIY,EACrB,IAAME,EAAcZ,EAAW,MAAM,EACrCY,EAAYvB,EAAW,IAAI,EAAIA,EAAW,WAAWS,CAAC,EACtDW,EAAa,KAAKG,CAAW,EAC7BhB,EAAQE,CAAC,EAAIe,EAAe,SAASf,CAAC,GAAIK,EAAUS,EAAY,MAAM,EACtEJ,EAAkB,KAAK,CAAC,KAAMC,EAAaX,CAAC,EAAG,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,CAC9E,CACAuB,EAAgB,KACZ,CAAC,QAAuB,KAAMJ,CAAe,EAAG,GAAGO,EAA2Bd,EAAY,GAAGS,CAAY,CAAC,EAC9G,IAAMM,EAAmBC,GAA+B;AAAA,IAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,qBAAsB,MAAOT,EAAgB,MAAM,EACnE,iBAAiBF,EAAO,GAAGT,CAAO,CAAC;AAAA,IAC1CnB,GAAyB8B,EAAgB,MAAM,CAAC;AAAA,IAChD7B,GAAoBkB,CAAO,CAAC;AAAA;AAAA,IAE5BoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DX,EAAM,gBAAgB,YAAY,CAAC;AAAA,kBACrCA,EAAM,WAAW,UAAWD,CAAI,CAAC;AAAA;AAAA;AAAA,iBAGlCT,EAAa,8BAA+B,qBAAsBY,EAAgB,MAAM,CAAC;AAAA,QAClGF,EAAM,WAAW,UAAWD,EAAM,OAAO,CAAC;AAAA;AAAA;AAAA,KAIhD,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAMf,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,gBAAA0B,EACA,WAAY,KAAO,CACjB,QAASP,EACT,cAAe,CAAC,EAAG,KAAK,KAAKP,EAAY,EAAuB,CAAC,EACjE,gBAAAU,CACF,EACF,CACF,EAEa/B,GAAQ,CAACqC,EAAyB5B,IAAsC,CACnFd,GAAe0C,EAAQ,MAAM,EAC7B,IAAMC,EACFD,EAAQ,OAAO,SAAW,EAAI5B,EAAab,GAAgCyC,EAAQ,OAAQ5B,CAAU,EACzG4B,EAAQ,QAAQtC,GAAuBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC1F,EAEarC,GAAwBQ,GAAyD,CAC5F,IAAMe,EAAOf,EAAW,KAClBC,EAAuBD,EAAW,WAClCE,EAAaF,EAAW,WAAuB,EAAIC,EAAW,OAASD,EAAW,WACxF,GAAIE,IAAeD,EAAW,OAC5B,MAAM,IAAI,MAAM,+CAA+C,EAEjE,OAAOG,GAA4B,CAAC,KAAAW,EAAM,WAAAb,EAAY,WAAAD,CAAU,CAAC,CACnE,ICtIA,IAUM6B,GA4DAC,GAoCOC,GA1GbC,GAAAC,EAAA,kBAGAC,IAEAC,KAGAC,KAEMP,GACF,CAACQ,EAA4BC,EAA+BC,EAA+BC,EAC1FC,IAAuB,CACtB,IAAMC,EAASC,EAAe,cAAeF,EAAYF,EAAW,OAAQ,CAAC,EACvE,EAAIK,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEO,EAAID,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEQ,EAAIF,EAAc,SAAUN,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EAE1ES,EACEC,EAAa,CAACC,EAAWJ,EAAWC,IAAc,UAAUD,CAAC,KAAKI,CAAC,KAAKH,CAAC,IAC/E,GAAI,CAACN,EACHO,EAAaL,EAAO,YAChB,aACAM,EAAW,EAAE,YAAY,YAAY,EAAGH,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAChG,CACL,IAAMI,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,iBAAiBF,CAAC,gBAAgBA,CAAC,IACjDG,EAAc,iBAAiBH,CAAC,gBAAgBA,CAAC,IAEjDI,EAAc,sBAAsBJ,CAAC,6BAA6BA,CAAC,UACzE,MAAO;AAAA,gCACeA,CAAC,MAAMV,EAAO,gBAAgB,qBAAqBU,CAAC,GAAG,CAAC;AAAA,0BAC9DA,CAAC,MAAM,EAAE,2BAA2B,iBAAiBA,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMP,EAAE,2BAA2B,iBAAiBO,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMN,EAAE,2BAA2B,iBAAiBM,CAAC,GAAIV,CAAM,CAAC;AAAA,yBAClEU,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,6BACZA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,cAC/BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIL,EAAWM,EAAaC,EAAaC,CAAW,CAAC;AAAA,WAErF,EACIf,IAAe,EACjBM,EAAa;AAAA;AAAA,cAETG,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,wGAGtCH,EAAa;AAAA,cACTG,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,WAGtD,CAEA,MAAO;AAAA,UACHb,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBS,EAAG,EAAGD,EAAGH,CAAM,CAAC;AAAA,UACjFL,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEU,CAAU;AAAA,QAEhB,EAEEjB,GAA4BQ,GAA+C,CAC/E,IAAMmB,EAAQnB,EAAO,CAAC,EAAE,KAClBoB,EAAQpB,EAAO,CAAC,EAAE,KAClBqB,EAAQrB,EAAO,CAAC,EAAE,KAClBsB,EAAiBtB,EAAO,CAAC,EAAE,SAE3BE,EAAc,EAAEqB,EAAU,SAASJ,EAAOC,CAAK,GAAKG,EAAU,SAASH,EAAOC,CAAK,GACrFG,EAAcL,EACdM,EAAaF,EAAU,KAAKJ,CAAK,EAGrC,GAAIjB,EAAa,CACf,IAAMwB,EAAkBC,GAAc,UAAUA,GAAc,UAAUR,EAAOC,EAAO,EAAK,EAAIC,EAAO,EAAK,EAC3G,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,6CAA8C,EAEhEF,EAAcE,EACdD,EAAaF,EAAU,KAAKC,CAAW,CACzC,CAEA,IAAMI,EAAU,KAAK,KAAKH,EAAa,CAAC,EAExC,MAAO,CACL,KAAM,QACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,gBAAkB1B,GACdR,GAA2BQ,EAAcC,EAAQwB,EAAatB,EAAaoB,CAAc,EAC7F,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAME,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAgB,CAAC,EACrF,gBACI,CAAC,CAAC,QAAuB,KAAMG,CAAO,EAAG,GAAGC,EAA2BR,EAAOF,EAAOC,EAAOI,CAAW,CAAC,CAC9G,EACF,CACF,EAEa/B,GAASqC,GAAkC,CACtDA,EAAQ,QAAQtC,GAAyBsC,EAAQ,MAAM,CAAC,CAC1D,IC5GA,IA8CaC,GA9CbC,GAAAC,EAAA,kBAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAOavC,GAA+D,IAAI,IAAI,CAClF,CAAC,MAAO,CAAUwC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAwB,CAAC,EAC7C,CAAC,SAAU,CAACC,GAAQD,EAAwB,CAAC,EAC7C,CAAC,OAAQ,CAAUE,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACC,EAAS,CAAC,EAEzB,CAAC,cAAe,CAAMC,GAAkBC,EAA0B,CAAC,EACnE,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,UAAW,CAACC,EAAO,CAAC,EACrB,CAAC,gBAAiB,CAACC,EAAa,CAAC,EACjC,CAAC,OAAQ,CAAUC,GAAeC,EAAmB,CAAC,EACtD,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,gBAAiB,CAACC,GAAeC,EAA4B,CAAC,EAC/D,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,eAAgB,CAACC,GAAcC,EAA2B,CAAC,EAC5D,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,MAAO,CAAUC,GAAcC,EAAoB,CAAC,EACrD,CAAC,QAAS,CAAWC,EAAK,CAAC,EAC3B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACpB,GAAMC,EAAmB,CAAC,EACzC,CAAC,SAAU,CAACoB,GAAQC,EAAqB,CAAC,EAC1C,CAAC,iBAAkB,CAACC,GAAgBC,EAA6B,CAAC,EAClE,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,oBAAqB,CAAMC,GAAwBC,EAAgC,CAAC,EACrF,CAAC,gBAAiB,CAAMC,GAAoBC,EAA4B,CAAC,EACzE,CAAC,UAAW,CAAWC,EAAO,CAAC,EAC/B,CAAC,iBAAkB,CAAWC,EAAc,CAAC,EAC7C,CAAC,sBAAuB,CAACC,GAAqBC,EAAkC,CAAC,EACjF,CAAC,cAAe,CAAUC,GAAsBC,EAA0B,CAAC,EAC3E,CAAC,wBAAyB,CAACC,EAAY,CAAC,EACxC,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,YAAa,CAAUC,GAAoB1B,EAAoB,CAAC,EACjE,CAAC,OAAQ,CAAW2B,EAAI,CAAC,EACzB,CAAC,cAAe,CAAWC,EAAW,CAAC,EACvC,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,cAAe,CAACC,GAAaC,EAA0B,CAAC,EAEzD,CAAC,UAAW,CAAMC,GAAcC,EAAsB,CAAC,EACvD,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,qBAAsB,CAACC,GAAoBC,EAAiC,CAAC,EAC9E,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAACC,EAAG,CAAC,EACb,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,YAAa,CAAUC,GAAoB1C,EAAoB,CAAC,EACjE,CAAC,QAAS,CAAC2C,EAAK,CAAC,EACjB,CAAC,aAAc,CAAUC,EAAU,CAAC,EACpC,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,eAAgB,CAACC,EAAY,CAAC,EAC/B,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,UAAW,CAAUC,EAAO,CAAC,EAC9B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,yBAA0B,CAACC,EAAa,CAAC,EAC1C,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,UAAW,CAACC,GAASC,EAAsB,CAAC,EAC7C,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,kBAAmB,CAAUC,GAA0BzE,EAAoB,CAAC,EAC7E,CAAC,OAAQ,CAAC0E,EAAI,CAAC,EACf,CAAC,YAAa,CAACC,GAAWC,EAAwB,CAAC,EACnD,CAAC,QAAS,CAACC,EAAK,CAAC,CACnB,CAAC,IC5ID,IAoBaC,GApBbC,GAAAC,EAAA,kBAGAC,KAGAC,KAEAC,KAYaL,GAAN,KAAqB,CAI1B,YAAoBM,EAAwB,CAAxB,aAAAA,EAClB,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAmBC,EAAoBC,EAChEC,EAA0D,CAC5DC,GAAiBL,EAAc,YAAY,IAAI,EAC/C,IAAMM,EAAS,KAAK,QAAQ,OACtBC,EAAqB,KAAK,QAAQ,sBAAsB,EAC9D,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,CAAC,EAClE,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAASR,EAClBO,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQC,EAAM,MAAM,CAAC,CAAC,EAE1E,QAAWC,KAAUR,EACnBM,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQE,EAAO,MAAM,CAAC,CAAC,EAEvEN,GACFI,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAUJ,CAAoB,CAAC,EAExE,IAAMO,EAAYL,EAAO,gBACrB,CAAC,OAAQN,EAAc,gBAAgB,mBAAmB,CAAC,EAAG,QAAAQ,EAAS,MAAOR,EAAc,YAAY,IAAI,CAAC,EAEjH,GAAI,KAAK,QAAQ,gBAAkB,YAAa,CAC9C,IAAMY,EAAc,CAClB,SAAU,KAAK,QAAQ,gBACvB,gBAAiBZ,EAAc,gBAC/B,UAAAW,EACA,cAAAR,CACF,EAC2B,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC1E,KAAKS,CAAW,CACtC,CAEAL,EAAmB,YAAYP,EAAc,eAAe,EAC5DO,EAAmB,aAAa,EAAGI,CAAS,EAC5CJ,EAAmB,mBAAmB,GAAGJ,CAAa,EACtD,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,EAAI,CAAC,EACtE,KAAK,QAAQ,yBAET,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACnD,KAAK,QAAQ,YAAc,cAC7B,KAAK,QAAQ,eAAe,EAE1B,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACrD,KAAK,QAAQ,MAAM,EAErBU,GAAeb,EAAc,YAAY,IAAI,CAC/C,CACA,SAAgB,CAEhB,CACA,MAAMc,EAA0BC,EAAiE,CAC/FV,GAAiBS,EAAY,IAAI,EACjC,IAAMR,EAAS,KAAK,QAAQ,OACtBU,EAAuB,CAAC,EAC1BV,EAAO,SAAS,IAAI,YAAY,GAClCU,EAAW,KAAK,aAAa,EAE/B,IAAMC,EAAeC,GAAmBH,EAA6B,KAAK,QAAQ,OAAO,MAAM,EACzFI,EAAWL,EAAY,gBAAgBG,CAAY,EACnDG,EAAO,GAAGJ,EAAW,KAAK;AAAA,CAAI,CAAC;AAAA,EAAKC,EAAa,yBAAyB;AAAA,EAAKE,CAAQ,GACvFE,EAAef,EAAO,mBAAmB,CAAC,KAAAc,EAAM,MAAON,EAAY,IAAI,CAAC,EAC9EQ,GAAU,UAAW,IAAM,YAAYR,EAAY,IAAI,iBAAiBM,CAAI,EAAE,EAE9E,IAAMG,EAAkBjB,EAAO,sBAC3B,CAAC,QAAS,CAAC,OAAQe,EAAc,WAAY,MAAM,EAAG,OAAQ,OAAQ,MAAOP,EAAY,IAAI,CAAC,EAElG,OAAAD,GAAeC,EAAY,IAAI,EACxB,CAAC,YAAAA,EAAa,gBAAAS,EAAiB,qBAAsBN,EAAa,aAAa,CACxF,CAEA,2BAA2Bd,EACE,CAC3B,IAAMqB,EAAI,OAAOrB,GAAkB,SAAWA,EAAgBA,EAAc,EACtEsB,EAAI,OAAOtB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEuB,EAAI,OAAOvB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEwB,EAAoB,KAAK,QAAQ,OAAO,OAAO,iCACrD,GAAIH,GAAKG,GAAqBF,GAAKE,GAAqBD,GAAKC,EAC3D,MAAO,CAACH,EAAGC,EAAGC,CAAC,EAEjB,IAAME,EAAOJ,EAAIC,EAAIC,EACjBG,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EAC/C,GAAIC,EAAkBF,EAAmB,CAEvC,GADAE,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EACvCC,EAAkBF,EACpB,MAAM,IAAI,MAAM,6CAA6C,EAE/D,MAAO,CAACE,EAAiBA,EAAiBA,CAAe,CAC3D,KACE,OAAO,CAACA,EAAiBA,EAAiB,CAAC,CAE/C,CACF,IC3HA,IAmCMC,GA4CAC,GAiBAC,GAwBOC,GAxHbC,GAAAC,EAAA,kBAGAC,KAEAC,IAEAC,KACAC,KACAC,KACAC,KACAC,KAwBMZ,GACF,CAACa,EAAqCC,IAA2E,CAC/G,GAAIA,EAAkB,SAAWD,EAAa,OAC5C,MAAM,IAAI,MAAM,4BAA4BC,EAAkB,MAAM,wCAChED,EAAa,MAAM,GAAG,EAG5B,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIH,EAAa,OAAQ,EAAEG,EAAG,CAC5C,IAAMC,EAAOJ,EAAaG,CAAC,EAAE,SAC7B,OAAQF,EAAkBE,CAAC,EAAG,CAC5B,IAAK,OAAQ,CACXD,EAAW,KAAK,EAAE,EAClB,KACF,CACA,IAAK,OAAQ,CACXA,EAAW,KAAK,GAAGE,CAAI,EAAE,EACzB,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAOL,EAAaG,CAAC,EAAE,KAAK,OAClCD,EAAW,KAAK,GAAGE,CAAI,IAAIC,CAAI,EAAE,EACjC,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAON,EAAaG,CAAC,EAAE,KAAK,KAAK,GAAG,EAC1CD,EAAW,KAAK,GAAGE,CAAI,IAAIE,CAAI,EAAE,EACjC,KACF,CACA,QACE,MAAM,IAAI,MAAM,iCAAiCL,EAAkBE,CAAC,CAAC,EAAE,CAC3E,CACF,CAEA,OAAOD,EAAW,KAAK,GAAG,CAC5B,EASEd,GACF,CAACmB,EAA0BP,EAAqCQ,IAA0C,CAGxG,IAAIC,EAAMF,EAAY,KACtB,OAAIA,EAAY,aAAa,OAC3BE,GAAO,IAAMF,EAAY,YAAY,KAAO,KAE9CE,GAAO,IAAMD,EACT,IACOrB,GACIa,EACAO,EAAY,aAAa,mBACrB,IAAI,MAAwCP,EAAa,MAAM,EAAE,KAAK,MAAM,CAAC,CAAC,GAC1FS,CACT,EAEEpB,GAAN,KAA6C,CAI3C,YAAYqB,EAA6B,CACnCA,IACF,KAAK,aAAeA,EAAY,aAChC,KAAK,OAASA,EAAY,OAE9B,CAEA,eAAeC,EAAwC,CACrD,OAAO,KAAK,eAAiBA,CAC/B,CAEA,SAASC,EAA4B,CACnC,OAAO,KAAK,SAAWA,CACzB,CACF,EAMatB,GAAN,KAAoB,CAApB,cAkBL,sBAAgC,KAOhC,qBAA+B,KAgC/B,KAAQ,eAAyC,KACjD,KAAQ,mBAAiD,KACzD,uBAAoB,GACpB,2BAAwB,EAGxB,KAAQ,eAAsC,CAAC,EAE/C,KAAQ,eAAsD,IAAI,IAOlE,mBAA8B,UAI9B,yBAAkD,IAAI,IAKtD,KAAQ,uBAA2D,IAAI,IAKvE,gCAA4E,IAAI,IA7ChF,IAAI,yBAAoD,CACtD,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,yEAAyE,EAG3F,IAAIuB,EAAO,KAAK,iBAAiB,IAAI,KAAK,eAAe,EACzD,OAAKA,IACHA,EAAO,CAAC,EACR,KAAK,iBAAiB,IAAI,KAAK,gBAAiBA,CAAI,GAG/CA,CACT,CAmCA,MAAM,WAAWC,EAAUC,EAAoC,CAC7D,KAAK,IAAMD,EACX,IAAME,EAAqC,CAAC,EACtCC,EAAwC,CAC5C,eAAgB,CACd,+BAAgCF,EAAQ,OAAO,+BAC/C,iCAAkCA,EAAQ,OAAO,iCACjD,4BAA6BA,EAAQ,OAAO,4BAC5C,cAAeA,EAAQ,OAAO,cAC9B,kCAAmCA,EAAQ,OAAO,kCAClD,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,wBAC3C,EACA,iBAAAC,CACF,EAEID,EAAQ,SAAS,IAAI,qDAAqD,EAC5EC,EAAiB,KAAK,qDAAuE,EACpFD,EAAQ,SAAS,IAAI,iBAAiB,GAC/CC,EAAiB,KAAK,iBAAiB,EAErCD,EAAQ,SAAS,IAAI,YAAY,GACnCC,EAAiB,KAAK,YAAY,EAGpC,KAAK,OAAS,MAAMD,EAAQ,cAAcE,CAAgB,EAC1D,KAAK,YAAc,IAAI5B,GAAgB0B,EAAQ,MAAQ,MAAMA,EAAQ,mBAAmB,CAAC,EACzF,KAAK,eAAiBG,GAAqB,IAAI,EAC/C,KAAK,eAAiB,IAAIC,GAAe,IAAI,EAC7C,KAAK,QAAU,IAAI,IACnB,KAAK,qBAAuB,IAAI,IAChC,KAAK,iBAAmB,IAAI,IAG5BC,GAAgBN,EAAI,SAAW,CAAC,CAACA,EAAI,KAAK,EAI1C,KAAK,OAAO,kBAAoBO,GAAM,CAChCA,EAAG,iBAAiB,oBAEtB,QAAQ,MAAM,mDAAmDA,EAAG,MAAM,OAAO,EAAE,CAEvF,EAEA,OAAO,eACH,KAAK,IAAI,OAAQ,SAAU,CAAC,MAAO,KAAK,OAAQ,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAC3G,OAAO,eACH,KAAK,IAAI,OAAQ,UAAW,CAAC,MAAON,EAAS,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAGxG,KAAK,aAAa,CACpB,CAEA,SAAgB,CACV,OAAO,KAAK,SAAa,KAC3B,KAAK,SAAS,QAAQ,EAExB,KAAK,eAAe,QAAQ,CAC9B,CAEA,mBAAuC,CACrC,OAAK,KAAK,iBACR,KAAK,eAAiB,KAAK,OAAO,qBAAqB,GAElD,KAAK,cACd,CAEA,uBAA+C,CAC7C,GAAI,CAAC,KAAK,mBAAoB,CAC5B,IAAMO,EAAiB,KAAK,kBAAkB,EACxCC,EAAkD,CAAC,EAErD,KAAK,YAAc,cACrBA,EAAsB,gBAAkB,CACtC,SAAU,KAAK,SACf,0BAA2B,KAAK,sBAAwB,EACxD,oBAAqB,KAAK,sBAAwB,EAAI,CACxD,GAGF,KAAK,mBAAqBD,EAAe,iBAAiBC,CAAqB,CACjF,CACA,OAAO,KAAK,kBACd,CAEA,gBAAuB,CACjB,KAAK,qBACP,KAAK,mBAAmB,IAAI,EAC5B,KAAK,mBAAqB,KAE9B,CAEA,OAAc,CACZ,GAAI,CAAC,KAAK,eACR,OAGFC,GAAiB,EAEjB,KAAK,eAAe,EACpB,IAAIC,EACA,KAAK,YAAc,SACrB,KAAK,eAAe,gBAChB,KAAK,SAAW,EAAG,KAAK,sBAAwB,EAAG,KAAK,mBAAqB,CAAC,EAElFA,EAAkB,KAAK,OAAO,aAE1B,CAAC,KAAM,KAAK,sBAAwB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAExG,KAAK,eAAe,IAAIA,EAAiB,KAAK,cAAc,EAC5D,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAe,mBAChB,KAAK,mBAAqB,EAAGA,EAAiB,EAAG,KAAK,sBAAwB,EAAI,CAAC,GAGzF,KAAK,OAAO,MAAM,OAAO,CAAC,KAAK,eAAe,OAAO,CAAC,CAAC,EACvD,KAAK,eAAe,sBAAsB,EAC1C,KAAK,eAAiB,KACtB,KAAK,sBAAwB,EAEzB,KAAK,YAAc,QAChBA,EAAiB,SAAS,WAAW,IAAI,EAAE,KAAK,IAAM,CACzD,IAAMC,EAAa,IAAI,eAAeD,EAAgB,eAAe,CAAC,EAChEE,EAAiB,KAAK,eAAe,IAAIF,CAAe,EAC9D,QAAStB,EAAI,EAAGA,EAAIuB,EAAW,OAAS,EAAGvB,IAAK,CAC9C,IAAMyB,EAAoBD,EAAexB,CAAC,EACpC0B,EAAWD,EAAkB,SAC7BE,EAAa,KAAK,QAAQ,IAAID,CAAQ,EACtCE,EAAaD,EAAW,WACxBE,EAAaF,EAAW,WACxBG,EAAcL,EAAkB,YAChCM,EAAmBN,EAAkB,iBACrCO,EAAoBP,EAAkB,kBACtCQ,EAAeV,EAAWvB,EAAI,CAAC,EAC/BkC,EAAaX,EAAWvB,EAAI,EAAI,CAAC,EAEnC,OAAO,KAAK,cAAkB,MAChC,KAAK,cAAgBiC,GAGvB,IAAME,EAAY,OAAOF,EAAe,KAAK,aAAa,EACpDG,EAAU,OAAOF,EAAa,KAAK,aAAa,EAEtD,GAAI,CAAC,OAAO,cAAcC,CAAS,GAAK,CAAC,OAAO,cAAcC,CAAO,EACnE,MAAM,IAAI,WAAW,2BAA2B,EAGlD,GAAI,KAAK,IAAI,OAAO,WAAW,OAC7B,KAAK,IAAI,OAAO,UAAU,OAAO,CAC/B,QAAS,EACT,eAAgBL,EAAiB,IAC7BM,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,gBAAiBL,EAAkB,IAC/BK,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,SAAAX,EACA,WAAAE,EACA,WAAAC,EACA,YAAAC,EACA,UAAAK,EACA,QAAAC,CACF,CAAC,MACI,CAEL,IAAIG,EAAc,GAClBR,EAAiB,QAAQ,CAACM,EAAOrC,IAAM,CACrCuC,GAAe,SAASvC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC7F,CAAC,EACD,IAAIG,EAAe,GACnBR,EAAkB,QAAQ,CAACK,EAAOrC,IAAM,CACtCwC,GAAgB,UAAUxC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC/F,CAAC,EAED,QAAQ,IAAI,uBAAuBX,CAAQ,IAAIE,CAAU,IAAIC,CAAU,IAAIC,CAAW,KAAKS,CAAW,GAClGC,CAAY,mBAAmBJ,EAAUD,CAAS,KAAK,CAC7D,CACAM,GAAM,MAAO,GAAGX,CAAW,KAAKG,CAAY,KAAKC,CAAU,EAAE,CAC/D,CACAZ,EAAgB,MAAM,EACtB,KAAK,eAAe,OAAOA,CAAe,CAC5C,CAAC,EAEHoB,GAAe,CACjB,CAaA,IAAIC,EAAsBZ,EAAyCa,EAC/DC,EACAC,EACAC,EAAmC,CACrC1B,GAAiBsB,EAAQ,IAAI,EAE7B,IAAMK,EAAwB,CAAC,EAC/B,QAAShD,EAAI,EAAGA,EAAI+B,EAAiB,OAAQ,EAAE/B,EAAG,CAChD,IAAMU,EAAOqB,EAAiB/B,CAAC,EAAE,KAEjC,GAAIU,IAAS,EACX,SAEF,IAAMuC,EAAU,KAAK,eAAe,IAAIvC,CAAI,EAC5C,GAAI,CAACuC,EACH,MAAM,IAAI,MAAM,0BAA0BvC,CAAI,EAAE,EAElDsC,EAAW,KAAKC,CAAO,CACzB,CAEA,GAAM,CAAC,QAAAC,EAAS,cAAAC,EAAe,gBAAAC,CAAe,EAAIT,EAAQ,WAAWZ,CAAgB,EAG/EsB,EAAyBT,EAAc,SAAW,EAAIM,EAAQ,IAAI,CAACI,EAAGtD,IAAMA,CAAC,EAAI4C,EACvF,GAAIS,EAAuB,SAAWH,EAAQ,OAC5C,MAAM,IAAI,MAAM,eAAeG,EAAuB,MAAM,qBAAqBH,EAAQ,MAAM,GAAG,EAIpG,IAAMlB,EAAkC,CAAC,EACnCuB,EAAyB,CAAC,EAChC,QAASvD,EAAI,EAAGA,EAAIkD,EAAQ,OAAQ,EAAElD,EAAG,CAIvC,GAAI,CAAC,OAAO,UAAUqD,EAAuBrD,CAAC,CAAC,GAAKqD,EAAuBrD,CAAC,EAAI,IAC5EqD,EAAuBrD,CAAC,GAAK+C,EAC/B,MAAM,IAAI,MAAM,yBAAyBM,EAAuBrD,CAAC,CAAC,EAAE,EAEtE,GAAIqD,EAAuBrD,CAAC,IAAM,GAChC,SAEF,IAAMwD,EAAcH,EAAuBrD,CAAC,IAAM,GAC5CyD,EAAeJ,EAAuBrD,CAAC,IAAM,GAC7C0D,EAAcF,GAAeC,EAC/BX,EAAyBI,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAC7D6C,EAAmBQ,EAAuBrD,CAAC,EAAGkD,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAGtF,GAFAgC,EAAkB,KAAK0B,CAAU,EAE7BA,EAAW,OAAS,EACtB,SAEF,IAAMT,EAAU,KAAK,eAAe,IAAIS,EAAW,IAAI,EACvD,GAAI,CAACT,EACH,MAAM,IAAI,MAAM,2BAA2BS,EAAW,IAAI,EAAE,EAK9D,GAHIF,GACF,KAAK,cAAc,KAAKP,CAAO,EAE7BQ,EAAc,CAChB,IAAIE,EAAiB,KAAK,qBAAqB,IAAI,KAAK,eAAgB,EACnEA,IACHA,EAAiB,CAAC,EAClB,KAAK,qBAAqB,IAAI,KAAK,gBAAkBA,CAAc,GAErEA,EAAe,KAAKV,CAAO,CAC7B,CACAM,EAAY,KAAKN,CAAO,CAC1B,CAIA,GAAID,EAAW,SAAWjB,EAAiB,QAAUwB,EAAY,SAAWvB,EAAkB,OAAQ,CAEpG,GAAIuB,EAAY,SAAW,EACzB,OAAAb,GAAeC,EAAQ,IAAI,EACpBX,EAMT,MAAM,IAAI,MACN,WAAWW,EAAQ,IAAI,4EAA4E,CACzG,CAKA,IAAIiB,EACJ,GAAIR,EAAiB,CACnB,IAAIS,EAAgB,EACdC,EAAoB,CAAC,EAE3BV,EAAgB,QAAQW,GAAK,CAC3B,IAAMrD,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIrD,EAAK,SAAW,EAClB,OAGF,IAAMsD,EAAgBD,EAAE,OAAS,GAAmB,EAAI,EACpDE,EACAC,GACAH,EAAE,OAAS,IACbG,GAAgBxD,EAAK,OAAS,EAAI,GAAMA,EAAK,OAAS,EAAI,EAAIA,EAAK,OAASsD,EAC5EC,EAAiBvD,EAAK,OAAS,EAAI,GAAKsD,EAAgBtD,EAAK,SAE7DwD,GAAgBxD,EAAK,QAAU,EAAIA,EAAK,OAASsD,EAAgB,GACjEC,EAAiB,IAEnBJ,EAAgB,KAAK,KAAKA,EAAgBK,EAAa,EAAIA,GAC3DJ,EAAQ,KAAKD,CAAa,EAM1B,IAAMM,GAAqBJ,EAAE,OAAS,GAAmB,EAAI,EAC7DF,GAAiBnD,EAAK,OAAS,EAAI,KAAK,KAAKA,EAAK,OAASyD,EAAkB,EAAIF,EAC9CvD,EAAK,OAASsD,CACnD,CAAC,EAID,IAAMI,EAAsB,GAC5BP,EAAgB,KAAK,KAAKA,EAAgBO,CAAmB,EAAIA,EACjE,IAAMC,EAAc,IAAI,YAAYR,CAAa,EACjDT,EAAgB,QAAQ,CAACW,EAAG/D,IAAM,CAChC,IAAMsE,EAASR,EAAQ9D,CAAC,EAClBU,EAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIA,EAAE,OAAS,EACb,IAAI,WAAWM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UAChDqD,EAAE,OAAS,GACpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,GAEpB,IAAI,YAAYM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,UACjDqD,EAAE,OAAS,EACpB,IAAI,aAAaM,EAAaC,EAAQ5D,EAAK,MAAM,EAAE,IAAIA,CAAI,MAE3D,OAAM,IAAI,MAAM,6BAA6B4B,GAA2ByB,EAAE,IAAI,CAAC,EAAE,CAErF,CAAC,EAED,IAAMQ,EAEF,KAAK,eAAe,OAAOV,EAAe,eAAe,SAAW,eAAe,OAAO,EAC9F,KAAK,OAAO,MAAM,YAAYU,EAAkB,OAAQ,EAAGF,EAAa,EAAGR,CAAa,EACxF,KAAK,eAAe,QAAQU,EAAkB,EAAE,EAChDX,EAAuB,CAAC,OAAQ,EAAG,KAAMC,EAAe,OAAQU,EAAkB,MAAM,CAC1F,CAEA,IAAMC,EAA0B,KAAK,eAAe,2BAA2BrB,CAAa,EACtF9C,EAAuBmE,EAAwB,CAAC,IAAM,GAAKA,EAAwB,CAAC,IAAM,EAE1FlE,EAAMrB,GAAwB0D,EAASZ,EAAkB1B,CAAoB,EAC/EoE,EAAW,KAAK,eAAe,YAAYnE,CAAG,EAQlD,GAPKmE,IACHA,EAAW,KAAK,eAAe,MAAM9B,EAAS6B,CAAuB,EACrE,KAAK,eAAe,YAAYlE,EAAKmE,CAAQ,EAC7CC,GAAU,OAAQ,IAAM,mBAAmBpE,CAAG,kBAAkBqC,EAAQ,IAAI,EAAE,GAI5ES,GAAmBqB,EAAS,qBAAsB,CACpD,GAAIrB,EAAgB,SAAWqB,EAAS,qBAAqB,OAC3D,MAAM,IAAI,MAAM,4CAA4CA,EAAS,qBAAqB,MAAM,SAC5FrB,EAAgB,MAAM,gBAAgBqB,EAAS,YAAY,IAAI,IAAI,EAEzE,QAASzE,EAAI,EAAGA,EAAIoD,EAAgB,OAAQpD,IAAK,CAC/C,IAAM2E,EAAUvB,EAAgBpD,CAAC,EAC3B4E,EAAaD,EAAQ,KACrBE,EAAe,OAAOF,EAAQ,MAAS,SAAW,EAAIA,EAAQ,KAAK,OACnE,CAAC1E,EAAM6E,CAAM,EAAIL,EAAS,qBAAqBzE,CAAC,EACtD,GAAI4E,IAAe3E,GAAQ4E,IAAiBC,EAC1C,MAAM,IAAI,MAAM,oBAAoB9E,CAAC,0BAA0BC,CAAI,cAAc6E,CAAM,cACnFF,CAAU,cAAcC,CAAY,gBAAgBJ,EAAS,YAAY,IAAI,IAAI,CAEzF,CACF,CAOA,GALAC,GACI,OACA,IAAM,yBAAyB/B,EAAQ,IAAI,UAAUrC,CAAG,UAAUkE,EAAwB,CAAC,CAAC,IACxFA,EAAwB,CAAC,CAAC,IAAIA,EAAwB,CAAC,CAAC,EAAE,EAE9D,KAAK,YAAc,QAAU,KAAK,gBAAkB,YAAa,CACnE,IAAM/C,EAAuC,CAC3C,SAAU,KAAK,gBACf,YAAagD,EAAS,YAAY,KAClC,iBAAA1C,EACA,kBAAAC,CACF,EACA,KAAK,eAAe,KAAKP,CAAiB,EAEtC,KAAK,gBAAkB,aACK,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC7D,KAAKA,CAAiB,CAEjD,CAEA,YAAK,eAAe,IAAIgD,EAAUzB,EAAYO,EAAaiB,EAAyBZ,CAAoB,EAExGlB,GAAeC,EAAQ,IAAI,EACpBX,CACT,CAEA,OAAO+C,EAAmBrE,EAAwB,CAChD,KAAK,eAAe,OAAOqE,EAAWrE,CAAI,CAC5C,CAEA,OAAOsE,EAAaC,EAAmB,CACrC,KAAK,eAAe,OAAOD,EAAKC,CAAG,CACrC,CAEA,MAAM,SAASF,EAAmBG,EAAkD,CAGlF,MAAM,KAAK,eAAe,SAASH,EAAWG,CAAe,CAC/D,CAEA,MAAMC,EAAsB,CAC1B,OAAO,KAAK,eAAe,OAAOA,CAAI,EAAE,EAC1C,CAEA,KAAKC,EAAqB,CACxB,OAAO,KAAK,eAAe,QAAQA,CAAG,CACxC,CAEA,aAAaxD,EAAoBF,EAAkB2D,EAAoBxD,EAA0B,CAC/F,IAAMyD,EAAKC,GAAwB,IAAI3D,CAAU,EACjD,GAAI,CAAC0D,EACH,MAAM,IAAI,MAAM,2BAA2B1D,CAAU,EAAE,EAGzD,IAAMD,EAAyB,CAC7B,WAAAC,EACA,WAAAC,EACA,YAAayD,EAAG,CAAC,EACjB,WAAY,CAACA,EAAG,CAAC,EAAGD,CAAS,CAC/B,EACA,KAAK,QAAQ,IAAI3D,EAAUC,CAAU,CACvC,CAEA,cAAcD,EAAwB,CACpC,IAAMiC,EAAiB,KAAK,qBAAqB,IAAIjC,CAAQ,EAC7D,GAAIiC,EAAgB,CAClB,QAAWjD,KAAQiD,EACjB,KAAK,eAAe,QAAQjD,EAAK,EAAE,EAErC,KAAK,qBAAqB,OAAOgB,CAAQ,CAC3C,CAEA,KAAK,iBAAiB,OAAOA,CAAQ,EACrC,KAAK,QAAQ,OAAOA,CAAQ,CAC9B,CAEA,cAAcA,EAAkB8D,EAAyBC,EAA6C,CACpG,IAAMC,EAAS,KAAK,QAAQ,IAAIhE,CAAQ,EACxC,GAAI,CAACgE,EACH,MAAM,IAAI,MAAM,uBAAuBhE,CAAQ,EAAE,EAEnD,IAAME,EAAa8D,EAAO,WACpB7D,EAAa6D,EAAO,WACpBC,EAAcD,EAAO,YACrBE,EAAaF,EAAO,WAC1B,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,YAAY9D,CAAU,KAAKC,CAAU,2CAA2C,EAElG,KAAK,gBAAkBH,EAGnBkE,EAAW,CAAC,IACdA,EAAW,CAAC,EAAIA,EAAW,CAAC,EAAEA,EAAW,CAAC,CAAC,EAC3CA,EAAW,CAAC,EAAI,QAGlBlB,GAAU,OAAQ,IAAM,kCAAkC9C,CAAU,KAAKC,CAAU,MAAM,EAEzF,IAAMgE,EAAgB,KAAK,IAAI,MAE/B,KAAK,cAAgB,CAAC,EACtB,GAAI,CACF,OAAIA,GACF,KAAK,OAAO,eAAe,YAAY,EAGzCF,EAAYH,EAASI,EAAW,CAAC,CAAC,EAC3B,CACT,OAASE,EAAG,CACV,OAAAL,EAAO,KAAK,QAAQ,QAAQ,qBAAqB7D,CAAU,KAAKC,CAAU,aAAaiE,CAAC,EAAE,CAAC,EACpF,CACT,QAAE,CACID,GACFJ,EAAO,KAAK,KAAK,OAAO,cAAc,EAAE,KACpCM,GAAOA,EAAM,qCAAqCnE,CAAU,KAAKC,CAAU,MAAMkE,EAAI,OAAO,GAAK,IAAI,CAAC,EAG5G,QAAWrF,KAAQ,KAAK,cACtB,KAAK,eAAe,QAAQA,EAAK,EAAE,EAErC,KAAK,cAAgB,CAAC,EACtB,KAAK,gBAAkB,IACzB,CACF,CAGA,eAAesF,EAAmBC,EAAeC,EAAmBf,EAAsB,CACxF,IAAIgB,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EACxEG,IACHA,EAA4B,IAAI,IAChC,KAAK,2BAA2B,IAAIH,EAAWG,CAAyB,GAG1E,IAAMC,EAAiBD,EAA0B,IAAIF,CAAK,EACpDI,EAAK,KAAK,eAAe,uBAAuBH,EAAQf,EAAMiB,IAAiB,CAAC,CAAC,EACvF,OAAAD,EAA0B,IAAIF,EAAO,CAACI,EAAIH,CAAM,CAAC,EAC1CG,CACT,CACA,kBAAkBL,EAAyB,CACzC,IAAMG,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EAC3EG,IACFA,EAA0B,QAAQG,GAAc,KAAK,eAAe,yBAAyBA,EAAW,CAAC,CAAC,CAAC,EAC3G,KAAK,2BAA2B,OAAON,CAAS,EAEpD,CACA,UAAUjB,EAA8B,CACtC,IAAM9B,EAAU,KAAK,eAAe,IAAI8B,CAAS,EACjD,GAAI,CAAC9B,EACH,MAAM,IAAI,MAAM,2BAA2B8B,CAAS,EAAE,EAExD,OAAO9B,EAAQ,MACjB,CACA,iBAAiBsD,EAAsBpB,EAAclF,EAClB,CACjC,MAAO,UAAY,CACjB,IAAMS,EAAO,MAAM8F,GAAgB,KAAMD,EAAWpB,CAAI,EACxD,OAAOsB,GAAW/F,EAAK,OAAQT,CAAI,CACrC,CACF,CAEA,eAAegG,EAAqB,CAC9B,KAAK,YAAc,iBAKtB,KAAK,mBAA2B,eAAe,KAAK,SAAUA,CAAK,CACtE,CACA,cAAqB,CACnB,KAAK,UAAY,QACb,KAAK,IAAI,OAAO,WAAW,OAAS,YACnC,OAAO,KAAK,IAAI,MAAU,IAAc,KAAK,IAAI,KAAK,MAAQ,KAAK,IAAI,UACtE,KAAK,OAAO,SAAS,IAAI,qDAAqD,EAChF,KAAK,UAAY,gBACR,KAAK,OAAO,SAAS,IAAI,iBAAiB,IACnD,KAAK,UAAY,aAGf,KAAK,YAAc,QAAU,OAAO,KAAK,SAAa,MACxD,KAAK,SAAW,KAAK,OAAO,eAAe,CACzC,KAAM,YACN,MAAO,KAAK,kBAAoB,CAClC,CAAC,EACD,KAAK,mBAAqB,KAAK,OAAO,aAElC,CAAC,KAAM,KAAK,kBAAoB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,aAAa,CAAC,GAG/G,CAEA,cAAqB,CACnBvB,GAAU,OAAQ,cAAc,EAC3B,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,GACtD,KAAK,oBAAoB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAEpD,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,GACzD,KAAK,uBAAuB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAG5D,KAAK,MAAM,EACX,KAAK,cAAgB,WACvB,CACA,YAAmB,CACjBA,GAAU,OAAQ,YAAY,EAE9B,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CACA,QAAe,CACbA,GAAU,OAAQ,QAAQ,EAC1B,KAAK,cAAgB,YACrB,IAAMgC,EAAqB,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,EACxEC,EAAwB,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC9E7B,EAAS4B,EAAoB,OACnC,KAAK,eAAiB,CAAC,EACvB,QAAS1G,EAAI,EAAGA,EAAI8E,EAAQ9E,IAAK,CAC/B,IAAM4G,EAAqB,KAAK,sBAAsB,EAChDC,EAAUH,EAAoB1G,CAAC,EACrC,KAAK,eAAe,KAAK,sBAAwB,CAAC,EAClD4G,EAAmB,YAAYC,EAAQ,eAAe,EACtDD,EAAmB,aAAa,EAAGC,EAAQ,SAAS,EACpDD,EAAmB,mBAAmB,GAAGC,EAAQ,aAAa,EAC9D,KAAK,eAAe,KAAK,sBAAwB,EAAI,CAAC,EACtD,KAAK,wBACD,KAAK,YAAc,QACrB,KAAK,eAAe,KAAKF,EAAuB3G,CAAC,CAAC,GAEhD,KAAK,uBAAyB,KAAK,mBAAqB,KAAK,YAAc,cAC7E,KAAK,eAAe,EAElB,KAAK,uBAAyB,KAAK,mBACrC,KAAK,MAAM,CAEf,CAEA,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CAEA,iBAAiBgG,EAAyB,CACxC,KAAK,kBAAkBA,CAAS,EAC5B,KAAK,oBAAoB,IAAIA,CAAS,GACxC,KAAK,oBAAoB,OAAOA,CAAS,EAEvC,KAAK,uBAAuB,IAAIA,CAAS,GAC3C,KAAK,uBAAuB,OAAOA,CAAS,EAE9C,KAAK,eAAe,iBAAiBA,CAAS,CAChD,CAEA,WAAWA,EAAyB,CAClC,KAAK,iBAAmBA,EACxB,KAAK,aAAa,CACpB,CACF,ICx0BA,IAAAc,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAgBMC,GAuCAC,GAoHOF,GA3KbG,GAAAC,EAAA,kBAMAC,IAEAC,KACAC,KAEAC,KAKMP,GAAN,MAAMQ,CAAqC,CACzC,YACYC,EAAuCC,EAAkCC,EACjEC,EAAyB,CADjC,YAAAH,EAAuC,cAAAC,EAAkC,UAAAC,EACjE,UAAAC,CAA0B,CAE9C,iBAAgC,CAC9B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMC,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,aACJ,IAAI,aAAa,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CAChG,CAEA,kBAAkC,CAChC,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,cACJ,IAAI,cAAc,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjG,CAEA,eAA4B,CAC1B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,WAAe,IAAI,WAAW,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjH,CAEA,QAAQE,EAAwC,CAC9C,GAAID,EAAU,KAAKC,CAAO,IAAMD,EAAU,KAAK,KAAK,IAAI,EACtD,MAAM,IAAI,MAAM,mBAAmB,EAErC,OAAO,IAAIN,EAAe,KAAK,OAAQ,KAAK,SAAU,KAAK,KAAMO,CAAO,CAC1E,CACF,EAEMd,GAAN,KAAmD,CAajD,YAAoBQ,EAA+BO,EAAwBC,EAA2B,CAAlF,YAAAR,EAA+B,aAAAO,EAFnD,KAAQ,iBAAmB,EAC3B,KAAQ,eAAiB,EAEvB,KAAK,YAAcA,EAAQ,YAC3B,IAAME,EAAUT,EAAO,QAGnBU,EAAaF,IAAsB,EACvC,KAAK,gBAAkBC,EAAQC,GAAW,EAC1C,IAAMC,EAAaF,EAAQC,GAAW,EACtC,KAAK,YAAcD,EAAQC,GAAW,EACtC,KAAK,iBAAmBD,EAAQC,GAAW,EAC3C,KAAK,eAAiBD,EAAQC,GAAW,EAEzC,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIF,EAAYE,IAAK,CACnC,IAAMZ,EAAWQ,EAAQC,GAAW,EAC9BR,EAAOO,EAAQC,GAAW,EAC1BI,EAAML,EAAQC,GAAW,EACzBP,EAAiB,CAAC,EACxB,QAASY,EAAI,EAAGA,EAAID,EAAKC,IACvBZ,EAAK,KAAKM,EAAQC,GAAW,CAAC,EAEhCE,EAAO,KAAK,IAAIrB,GAAeS,EAAQC,EAAUC,EAAMC,CAAI,CAAC,CAC9D,CACA,KAAK,OAASS,CAChB,CAhCA,IAAI,kBAA6C,CAC/C,OAAO,KAAK,QAAQ,uBACtB,CACA,IAAI,kBAA+B,CACjC,OAAO,KAAK,OAAO,OAAO,SAAS,KAAK,iBAAkB,KAAK,iBAAmB,KAAK,cAAc,CACvG,CA6BA,6BAAwD,CACtD,MAAO,CACL,KAAK,QAAQ,OAAO,OAAO,yBAA0B,KAAK,QAAQ,OAAO,OAAO,yBAChF,KAAK,QAAQ,OAAO,OAAO,wBAC7B,CACF,CAEA,mCAA4C,CAC1C,OAAO,KAAK,QAAQ,OAAO,OAAO,8BACpC,CAEA,QAAQI,EAAsBC,EAAyE,CAErG,IAAMC,EACFD,GAAsB,QAAQ,IAAIJ,GAAK,OAAOA,GAAM,SAAW,KAAK,OAAOA,CAAC,EAAIA,CAAC,GAAK,KAAK,OAEzFM,EAAgBF,GAAsB,SAAW,CAAC,EAClDG,EAAqB,CAACC,EAAepB,EAAkBE,IACzD,IAAIZ,GAAe,KAAK,OAAQU,EAAU,KAAK,OAAOoB,EAAOlB,CAAI,EAAGA,CAAI,EACtEmB,EAAwB,CAACrB,EAAkBE,IAAwC,CACvF,IAAMoB,EAAcC,GAAqBvB,CAAQ,EACjD,GAAI,CAACsB,EACH,MAAM,IAAI,MAAM,0BAA0BtB,CAAQ,EAAE,EAEtD,IAAMwB,EAAaF,EAAclB,EAAU,KAAKF,CAAI,EAC9CuB,EAAYD,EAAa,EAAI,KAAK,QAAQ,eAAe,OAAOA,CAAU,EAAE,GAAK,EACvF,OAAO,IAAIlC,GAAe,KAAK,OAAQU,EAAUyB,EAAWvB,CAAI,CAClE,EACA,OAAO,KAAK,QAAQ,IAChBa,EAASE,EAAcC,EAAeC,EAAoBE,EAAuB,KAAK,WAAW,CACvG,CAEA,OAAOD,EAAelB,EAAiC,CACrD,IAAMwB,EAAQ,KAAK,OAAO,UAAU,EACpC,GAAI,CACF,IAAMzB,EAAO,KAAK,OAAO,YAAY,EAAIC,EAAK,QAAU,CAAsB,EAC1EyB,EAAS1B,GAAQ,EACrB,KAAK,OAAO,QAAQ0B,GAAQ,EAAIzB,EAAK,OACrC,QAASU,EAAI,EAAGA,EAAIV,EAAK,OAAQU,IAC/B,KAAK,OAAO,QAAQe,GAAQ,EAAIzB,EAAKU,CAAC,EAExC,OAAO,KAAK,OAAO,YAAa,KAAK,gBAAiBQ,EAAOnB,CAAI,CACnE,OAAS2B,EAAG,CACV,MAAM,IAAI,MACN,sCAAsCR,CAAK,gBAAgBlB,CAAI,8GAErD0B,CAAC,EAAE,CACnB,QAAE,CACA,KAAK,OAAO,aAAaF,CAAK,CAChC,CACF,CACF,EA0BarC,GACT,MAAMwC,EAAwB9B,EAAuB+B,EAAUC,IAA2C,CAC5G,IAAMC,EAAWjC,EAAO,SACxB,GAAI,CAACiC,EACH,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIH,IAAS,SAAU,CACrB,IAAMvB,EAAU,IAAI2B,GACpB,MAAM3B,EAAQ,WAAWwB,EAAKC,CAAW,EAEzCC,EAAS,SAAU,CAEjB1B,EAGC4B,GAAiB5B,EAAQ,MAAM4B,CAAI,EAGnCC,GAAgB7B,EAAQ,KAAK6B,CAAG,EAGjC,CAACC,EAAaC,EAAaH,EAAcI,EAAc,KAAU,CAC/D,GAAIA,EACFC,GAAU,UAAW,IAAM,kCAAkCH,CAAG,SAASC,CAAG,UAAUH,CAAI,EAAE,EAC5F5B,EAAQ,OAAO8B,EAAKC,CAAG,MAClB,CACLE,GAAU,UAAW,IAAM,yCAAyCH,CAAG,eAAeC,CAAG,UAAUH,CAAI,EAAE,EACzG,IAAMjC,EAAOF,EAAO,OAAO,SAASqC,IAAQ,GAAIA,IAAQ,GAAKF,CAAI,EACjE5B,EAAQ,OAAO+B,EAAKpC,CAAI,CAC1B,CACF,EAGA,MAAMwB,EAAmBe,EAAoBN,IACxB,CACfK,GACI,UACA,IAAM,wCAAwCd,CAAS,gBAAgBe,CAAU,UAAUN,CAAI,EAAE,EAErG,MAAM5B,EAAQ,SACVmB,EAAW,IAAM1B,EAAO,OAAO,SAASyC,IAAe,GAAIA,IAAe,GAAKN,CAAI,CAAC,CAC1F,EAGJ,CAACO,EAAoBC,EAAkBC,IAAuBrC,EAAQ,aAClEmC,EAAYC,EAAUC,EAAW5C,EAAO,aAAaA,EAAO,iBAAkB2C,CAAQ,CAAC,CAAC,EAG3FE,GAAmBtC,EAAQ,cAAcsC,CAAM,EAGhD,CAACA,EAAgBrC,EAA2BsC,EAAuBC,IAAwC,CACzGP,GACI,UACA,IAAM,mCAAmCM,CAAa,YAAYD,CAAM,uBACpErC,CAAiB,EAAE,EAC3B,IAAMwC,EAAU,IAAIxD,GAAmBQ,EAAQO,EAASC,CAAiB,EACzE,OAAOD,EAAQ,cAAcsC,EAAQG,EAASD,CAAM,CACtD,EAEA,IAAMxC,EAAQ,aAAa,EAE3B,IAAMA,EAAQ,WAAW,EAEzB,IAAMA,EAAQ,OAAO,CACvB,CAAC,CACH,MACE0B,EAAS,OAAO,CAEpB,ICjPA,IAoEMgB,GAWOC,GAWAC,GAoFPC,GAOAC,GAqBOC,GAkBAC,GAmKAC,GAuBAC,GA+EAC,GA6OAC,GAgBAC,GAluBbC,GAAAC,EAAA,kBAWAC,KACAC,KACAC,IACAC,KACAC,KACAC,KAoDMnB,GAAU,CAACoB,EAAoBC,IAA+B,CAChDC,GAAY,EAAE,SAASF,EAAYC,CAAY,IAC/C,GAChBE,GAAe,+BAAgC,CAEnD,EAMatB,GAAc,MAAMuB,GAA4B,CAE3DxB,GAAQwB,EAAI,KAAK,WAAaC,GAAqBD,EAAI,QAAQ,CAAC,CAClE,EAQatB,GAAS,MAAMsB,EAAUE,IAAkC,CACxC,CAE5B,IAAMC,EAAW,cAAuB,KAExC,GAAID,IAAW,SAAU,CAEvB,GAAI,OAAO,UAAc,KAAe,CAAC,UAAU,IACjD,MAAM,IAAI,MAAM,gDAAgD,EAGlE,IAAIE,EAAUJ,EAAI,OAAO,QACzB,GAAKI,GAmBH,GAAI,OAAOA,EAAQ,QAAW,UAAY,OAAOA,EAAQ,UAAa,UAClE,OAAOA,EAAQ,eAAkB,WACnC,MAAM,IAAI,MAAM,kFAAkF,MArBxF,CAEZ,IAAMC,EAAkBL,EAAI,OAAO,gBACnC,GAAIK,IAAoB,QAAaA,IAAoB,aACrDA,IAAoB,mBACtB,MAAM,IAAI,MAAM,qCAAqCA,CAAe,GAAG,EAEzE,IAAMC,EAAuBN,EAAI,OAAO,qBACxC,GAAIM,IAAyB,QAAa,OAAOA,GAAyB,UACxE,MAAM,IAAI,MAAM,0CAA0CA,CAAoB,GAAG,EAGnF,GADAF,EAAU,MAAM,UAAU,IAAI,eAAe,CAAC,gBAAAC,EAAiB,qBAAAC,CAAoB,CAAC,EAChF,CAACF,EACH,MAAM,IAAI,MACN,0GAC+E,CAEvF,CAQA,MAAMD,EAAS,SAAUL,GAAY,EAAGE,EAAKI,CAAO,CACtD,CACA,GAAIF,IAAW,QAAS,CAEtB,GAAI,OAAO,UAAc,KAAe,CAAE,UAAuC,GAC/E,MAAM,IAAI,MAAM,+CAA+C,EAGjE,MAAMC,EAAS,QAASL,GAAY,EAAGE,CAAG,CAC5C,CACF,CACF,EAoCMrB,GAAiB,IAAI,IAOrBC,GAA8B2B,GAA4C,CAC9E,IAAMC,EAAOV,GAAY,EACnBW,EAAQD,EAAK,UAAU,EAC7B,GAAI,CACF,IAAME,EAAaF,EAAK,WAAW,CAAC,EAEpC,OADkBA,EAAK,wBAAwBD,EAAeG,EAAYA,EAAa,CAAC,IACtE,GAChBX,GAAe,uCAAwC,EAElD,CAACS,EAAK,OAAOE,EAAa,CAAC,EAAGF,EAAK,OAAOE,EAAa,EAAI,CAAC,CAAC,CACtE,QAAE,CACAF,EAAK,aAAaC,CAAK,CACzB,CACF,EAQa5B,GAA0B8B,GAAwC,CAC7E,IAAMH,EAAOV,GAAY,EACnBc,EAAkBJ,EAAK,QAAQG,EAAM,UAAU,EACrD,GAAIC,IAAoB,EACtB,MAAM,IAAI,MAAM,+DAA+DD,EAAM,UAAU,GAAG,EAEpG,OAAAH,EAAK,OAAO,IAAIG,EAAOC,CAAe,EAC/B,CAACA,EAAiBD,EAAM,UAAU,CAC3C,EAUa7B,GAAgB,MACzB+B,EACAC,IAAoF,CACtF,IAAIF,EAAyBG,EACvBP,EAAOV,GAAY,EAErB,MAAM,QAAQe,CAAS,EAEzB,CAACD,EAAiBG,CAAe,EAAIF,EAC5BA,EAAU,SAAWL,EAAK,OAAO,OAE1C,CAACI,EAAiBG,CAAe,EAAI,CAACF,EAAU,WAAYA,EAAU,UAAU,EAGhF,CAACD,EAAiBG,CAAe,EAAIlC,GAAuBgC,CAAS,EAGvE,IAAIN,EAAgB,EAChBS,EAAuB,EACvBC,EAAkB,EAClBC,EAAmB,CAAC,EAClBC,EAAwB,CAAC,EACzBC,EAAyB,CAAC,EAEhC,GAAI,CAGF,GAFA,CAACJ,EAAsBE,CAAM,EAAIG,GAAkBP,CAAO,EAEtDA,GAAS,cAAgBN,EAAK,kBAAmB,CACnD,IAAMc,EAAkB,CAAC,EACzB,QAAWC,KAAQT,EAAQ,aAAc,CACvC,IAAMU,EAAO,OAAOD,GAAS,SAAWA,EAAOA,EAAK,KACpDD,EAAgB,KAAKG,GAAS,OAAOF,GAAS,SAAWA,EAAOA,EAAK,IAAI,EAAE,KAAKG,GAAQ,CACtFlB,EAAK,kBAAmBgB,EAAME,CAAI,CACpC,CAAC,CAAC,CACJ,CAGA,MAAM,QAAQ,IAAIJ,CAAe,CACnC,CAEA,QAAWK,KAAYb,GAAS,oBAAsB,CAAC,EAErD,IADqB,OAAOa,GAAa,SAAWA,EAAWA,EAAS,QACnD,QAAS,CAC5B,GAAInB,EAAK,eACP,MAAM,IAAI,MAAM,0CAA0C,EAE5D,GAAI,OAAOmB,GAAa,SAAU,CAChC,IAAMC,EAAeD,EACfE,EAAWD,GAA6D,QACxEE,EAAaF,GAAsD,UACnEG,EAAcH,GAAuD,WACrEhC,EAAcgC,GAAuD,WACrEvB,EAAmBuB,GAAuD,gBAC5EC,EACFrB,EAAK,eAAiBqB,EACbC,EACTtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAcsB,CAAS,EAEhEtB,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,CAAC,WAAAuB,EAAY,WAAAnC,EAAY,gBAAAS,CAAe,CAAC,CAEpG,MACEG,EAAK,eAAiB,MAAM,UAAU,GAAG,cAAc,EAEzD,KACF,CAGFD,EAAgB,MAAMC,EAAK,kBAAkBI,EAAiBG,EAAiBC,CAAoB,EAC/FT,IAAkB,GACpBR,GAAe,yBAA0B,EAIvCS,EAAK,iBACPA,EAAK,eAAiB,QAGxB,GAAM,CAACwB,EAAYC,CAAW,EAAIrD,GAA2B2B,CAAa,EAEpE2B,EAAqB,CAAC,CAACpB,GAAS,mBAEhCqB,EAAa,CAAC,EACdC,EAAc,CAAC,EACfC,EAAwE,CAAC,EAC/E,QAASC,EAAI,EAAGA,EAAIN,EAAYM,IAAK,CACnC,IAAMC,EAAO/B,EAAK,iBAAiBD,EAAe+B,CAAC,EAC/CC,IAAS,GACXxC,GAAe,0BAA2B,EAE5CoB,EAAsB,KAAKoB,CAAI,EAC/BJ,EAAW,KAAK3B,EAAK,aAAa+B,CAAI,CAAC,CACzC,CACA,QAASD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMC,EAAO/B,EAAK,kBAAkBD,EAAe+B,CAAC,EAChDC,IAAS,GACXxC,GAAe,2BAA4B,EAE7CqB,EAAuB,KAAKmB,CAAI,EAChC,IAAMC,EAAahC,EAAK,aAAa+B,CAAI,EACzCH,EAAY,KAAKI,CAAU,EAEG,CAC5B,GAAIN,GAAsBpB,GAAS,0BAA4B,OAAW,CACxEuB,EAAyB,KAAK,YAAY,EAC1C,QACF,CACA,IAAMI,EAAW,OAAO3B,GAAS,yBAA4B,SACzDA,EAAQ,wBACRA,GAAS,0BAA0B0B,CAAU,GAAK,MACtD,GAAIC,IAAa,OAASA,IAAa,cAAgBA,IAAa,aAClE,MAAM,IAAI,MAAM,4CAA4CA,CAAQ,GAAG,EAEzE,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MAAM,4CACZA,CAAQ,4EAA4E,EAE1FJ,EAAyB,KAAKI,CAAQ,CACxC,CACF,CAGA,IAAIC,EAAoC,KACxC,OAAgCL,EAAyB,KAAKM,GAAKA,IAAM,YAAY,IACnF1B,EAAkBT,EAAK,kBAAkBD,CAAa,EAClDU,IAAoB,GACtBlB,GAAe,0BAA2B,EAG5C2C,EAAe,CACb,OAAQzB,EACR,yBAAAoB,EACA,gCAAiCA,EAAyB,IAAIM,GAAKC,GAAyBD,CAAC,CAAC,CAChG,GAGFhE,GAAe,IACX4B,EACA,CAACA,EAAeY,EAAuBC,EAAwBsB,EAAcR,EAAoB,EAAK,CAAC,EACpG,CAAC3B,EAAe4B,EAAYC,CAAW,CAChD,OAASS,EAAG,CACV,MAAA1B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EAEpD7B,IAAoB,GACtBT,EAAK,mBAAmBS,CAAe,EAGrCV,IAAkB,GACpBC,EAAK,mBAAmBD,CAAa,EAEjCsC,CACR,QAAE,CACArC,EAAK,MAAMI,CAAe,EACtBI,IAAyB,GAC3BR,EAAK,0BAA0BQ,CAAoB,EAErDE,EAAO,QAAQ6B,GAASvC,EAAK,MAAMuC,CAAK,CAAC,EAGzCvC,EAAK,sBAAsB,CAC7B,CACF,EAEazB,GAAkBiE,GAA4B,CACzD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,+CAA+CD,CAAS,EAAE,EAE5E,GAAM,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,CAAkB,EAAIe,EAEvGC,IACEhB,GACF1B,EAAK,sBAAsB0C,EAAe,MAAM,EAElD1C,EAAK,mBAAmB0C,EAAe,MAAM,GAG/C1C,EAAK,uBAAuBwC,CAAS,EAErC7B,EAAsB,QAAQ2B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACvD1B,EAAuB,QAAQ0B,GAAOtC,EAAK,SAASsC,CAAG,CAAC,EACxDtC,EAAK,mBAAmBD,CAAa,EACrC5B,GAAe,OAAOqE,CAAS,CACjC,EAEahE,GACT,CAACmE,EAA6BC,EAAyBlC,EAAkB8B,EAAmBK,EAC3FnB,EAAqB,KAAgB,CACpC,GAAI,CAACiB,EAAQ,CACXC,EAAc,KAAK,CAAC,EACpB,MACF,CAEA,IAAM5C,EAAOV,GAAY,EAEnBwD,EAAWH,EAAO,CAAC,EACnBI,EAAOJ,EAAO,CAAC,EACfV,EAAWU,EAAO,CAAC,EAErBK,EACAC,EAEJ,GAAIH,IAAa,UAAYb,IAAa,aACxC,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MACN,2DAA2DY,CAAK,mCAAmC,EAGzG,GAAIZ,IAAa,aAAc,CAC7B,IAAMiB,EAAYP,EAAO,CAAC,EAAE,UACtBQ,EAAqBC,GAAqBC,GAA2BP,CAAQ,CAAC,EACpFG,EAAiBF,EAAK,OAAO,CAACO,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAIJ,EAEnD,IAAMK,EAAiBxD,EAAK,mBAC5B,GAAI,CAACwD,EACH,MAAM,IAAI,MAAM,qEAAqE,EAEvFR,EAAUQ,EAAehB,EAAWK,EAAOK,EAAWD,CAAc,CACtE,KAAO,CACL,IAAM/B,EAAOyB,EAAO,CAAC,EAErB,GAAI,MAAM,QAAQzB,CAAI,EAAG,CAEvB+B,EAAiB,EAAI/B,EAAK,OAC1B8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnB,IAAIS,EAAYT,EAAU,EAC1B,QAASlB,EAAI,EAAGA,EAAIZ,EAAK,OAAQY,IAAK,CACpC,GAAI,OAAOZ,EAAKY,CAAC,GAAM,SACrB,MAAM,IAAI,UAAU,wBAAwBA,CAAC,kBAAkB,EAEjE9B,EAAK,QAAQyD,GAAW,EAAIC,GAAgBxC,EAAKY,CAAC,EAAGpB,CAAM,CAC7D,CACF,MACEuC,EAAiB/B,EAAK,WACtB8B,EAAUhD,EAAK,QAAQiD,CAAc,EACrCvC,EAAO,KAAKsC,CAAO,EACnBhD,EAAK,OAAO,IAAI,IAAI,WAAWkB,EAAK,OAAQA,EAAK,WAAY+B,CAAc,EAAGD,CAAO,CAEzF,CAEA,IAAM/C,EAAQD,EAAK,UAAU,EACvB2D,EAAa3D,EAAK,WAAW,EAAI+C,EAAK,MAAM,EAClD,GAAI,CACF,IAAIa,EAAWD,EAAa,EAC5BZ,EAAK,QAAQc,GAAK7D,EAAK,OAAO4D,GAAU,EAAIC,CAAC,EAC7C,IAAMlB,EAAS3C,EAAK,iBAChBqD,GAA2BP,CAAQ,EAAGE,EAASC,EAAgBU,EAAYZ,EAAK,OAChFX,GAAyBH,CAAQ,CAAC,EAClCU,IAAW,GACbpD,GAAe,iDAAiDiD,CAAS,WAAWK,CAAK,GAAG,EAE9FD,EAAc,KAAKD,CAAM,CAC3B,QAAE,CACA3C,EAAK,aAAaC,CAAK,CACzB,CACF,EAKSxB,GAAM,MACf+D,EAAmBsB,EAAwBC,EAAgCC,EAC3EC,EAA2C3D,IAAoE,CACjH,IAAMN,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,6CAA6CD,CAAS,EAAE,EAE1E,IAAMzC,EAAgB0C,EAAQ,CAAC,EACzB9B,EAAwB8B,EAAQ,CAAC,EACjC7B,EAAyB6B,EAAQ,CAAC,EAClCC,EAAiBD,EAAQ,CAAC,EAC1Bf,EAAqBe,EAAQ,CAAC,EAC9ByB,EAAmBzB,EAAQ,CAAC,EAE5BjB,EAAasC,EAAa,OAC1BrC,EAAcuC,EAAc,OAE9BG,EAAmB,EACnBC,EAA6B,CAAC,EAE5BC,EAA+B,CAAC,EAChCC,EAAgC,CAAC,EACjCC,EAA8B,CAAC,EAE/BC,EAAiBxE,EAAK,UAAU,EAChCyE,EAAoBzE,EAAK,WAAWwB,EAAa,CAAC,EAClDkD,EAAmB1E,EAAK,WAAWwB,EAAa,CAAC,EACjDmD,EAAqB3E,EAAK,WAAWyB,EAAc,CAAC,EACpDmD,EAAoB5E,EAAK,WAAWyB,EAAc,CAAC,EAEzD,GAAI,CACF,CAAC0C,EAAkBC,CAAgB,EAAIS,GAAcvE,CAAO,EAG5D,QAASwB,EAAI,EAAGA,EAAIN,EAAYM,IAC9BtD,GACIuF,EAAajC,CAAC,EAAGuC,EAAoBE,EAAmB/B,EAAWsB,EAAahC,CAAC,EAAGJ,CAAkB,EAI5G,QAASI,EAAI,EAAGA,EAAIL,EAAaK,IAC/BtD,GACIyF,EAAcnC,CAAC,EAAGwC,EAAqBC,EAAmB/B,EAAWhB,EAAawC,EAAclC,CAAC,EACjGJ,CAAkB,EAGxB,IAAIoD,EAAmBL,EAAoB,EACvCM,EAAkBL,EAAmB,EACrCM,GAAoBL,EAAqB,EACzCM,GAAmBL,EAAoB,EAC3C,QAAS9C,EAAI,EAAGA,EAAIN,EAAYM,IAC9B9B,EAAK,QAAQ8E,GAAkB,EAAIT,EAAmBvC,CAAC,EACvD9B,EAAK,QAAQ+E,GAAiB,EAAIpE,EAAsBmD,EAAahC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIL,EAAaK,IAC/B9B,EAAK,QAAQgF,IAAmB,EAAIV,EAAoBxC,CAAC,EACzD9B,EAAK,QAAQiF,IAAkB,EAAIrE,EAAuBoD,EAAclC,CAAC,CAAC,EAG5E,GAAgCY,GAAkB,CAACwB,EAAkB,CACnE,GAAM,CAAC,OAAAgB,EAAQ,yBAAArD,GAA0B,gCAAAsD,EAA+B,EAAIzC,EAE5E,GAAI/B,EAAsB,SAAWa,EACnC,MAAM,IAAI,MAAM,2BACZA,CAAU,4DAA4Db,EAAsB,MAAM,IAAI,EAI5G,QAASmB,GAAI,EAAGA,GAAIN,EAAYM,KAAK,CACnC,IAAMe,EAAQiB,EAAahC,EAAC,EACV,MAAM9B,EAAK,cAAckF,EAAQvE,EAAsBkC,CAAK,EAAGwB,EAAmBvC,EAAC,CAAC,IACpF,GAChBvC,GAAe,oBAAoBuC,EAAC,iBAAiBU,CAAS,GAAG,CAErE,CAGA,QAASV,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMe,EAAQmB,EAAclC,EAAC,EACZmC,EAAcnC,EAAC,IAAI,CAAC,EAIjB9B,EAAK,eAAekF,EAAQtE,EAAuBiC,CAAK,EAAGyB,EAAoBxC,EAAC,EAAG,CAAC,IACpF,GAChBvC,GAAe,mCAAmCuC,EAAC,iBAAiBU,CAAS,GAAG,EAK9ExC,EAAK,eAAekF,EAAQtE,EAAuBiC,CAAK,EAAG,EAAGsC,GAAgCtC,CAAK,CAAC,IACtF,GAChBtD,GAAe,qBAAqBuC,EAAC,QAAQD,GAAyBC,EAAC,CAAC,gBAAgBU,CAAS,GAAG,CAG1G,CACArE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAI,CAAC,CAC9G,CAEA1B,EAAK,iBAAiBD,CAAa,EACnC,IAAIqF,EAC4B1C,EAC9B0C,EAAY,MAAMpF,EAAK,mBACnBD,EAAe2C,EAAe,OAAQjB,EAAakD,EAAoBR,CAAgB,EAE3FiB,EAAY,MAAMpF,EAAK,QACnBD,EAAe2E,EAAkBD,EAAmBjD,EAAYoD,EAAmBnD,EACnFkD,EAAoBR,CAAgB,EAGtCiB,IAAc,GAChB7F,GAAe,0BAA0B,EAG3C,IAAM8F,GAA2B,CAAC,EAElC,QAASvD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMa,GAAS3C,EAAK,QAAQ2E,EAAqB,EAAI7C,CAAC,EACtD,GAAIa,KAAW2B,EAAoBxC,CAAC,EAAG,CAErCuD,GAAO,KAAKpB,EAAcnC,CAAC,CAAE,EAC7B,QACF,CAEA,IAAMwD,GAA2BtF,EAAK,UAAU,EAE1CuF,GAAmBvF,EAAK,WAAW,EAAI,CAAC,EAE1CwF,EAAmB,GACnBC,GAA6BvF,EAAa,EAC9C,GAAI,CACgBF,EAAK,kBACnB2C,GAAQ4C,GAAkBA,GAAmB,EAAGA,GAAmB,EAAGA,GAAmB,EAAE,IAC7E,GAChBhG,GAAe,4CAA4CuC,CAAC,GAAG,EAEjE,IAAI4D,EAAkBH,GAAmB,EACnCzC,EAAW9C,EAAK,QAAQ0F,GAAiB,EAC/CxF,EAAaF,EAAK,QAAQ0F,GAAiB,EAC3C,IAAM/B,GAAa3D,EAAK,QAAQ0F,GAAiB,EAC3CC,GAAa3F,EAAK,QAAQ0F,GAAiB,EAC3C3C,GAAO,CAAC,EACd,QAASjB,GAAI,EAAGA,GAAI6D,GAAY7D,KAC9BiB,GAAK,KAAK/C,EAAK,QAAQ2D,GAAa,EAAI7B,EAAC,CAAC,EAE5C9B,EAAK,SAAS2D,EAAU,EAExB,IAAMiC,GAAO7C,GAAK,OAAO,CAACO,GAAGC,KAAMD,GAAIC,GAAG,CAAC,EAC3CkC,GAAOI,GAA2B/C,CAAQ,EAE1C,IAAMgD,GAAoBpD,GAAgB,yBAAyBsB,EAAclC,CAAC,CAAC,EAEnF,GAAI2D,KAAS,SAAU,CACrB,GAAIK,KAAsB,aACxB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMC,GAAuB,CAAC,EAC1BtC,GAAYvD,EAAa,EAC7B,QAAS4B,GAAI,EAAGA,GAAI8D,GAAM9D,KAAK,CAC7B,IAAMkE,GAAShG,EAAK,QAAQyD,IAAW,EACjCwC,GAAiBnE,KAAM8D,GAAO,EAAI,OAAY5F,EAAK,QAAQyD,EAAS,EAAIuC,GAC9ED,GAAW,KAAK/F,EAAK,aAAagG,GAAQC,EAAc,CAAC,CAC3D,CACAZ,GAAO,KAAK,CAACI,GAAM1C,GAAMgD,GAAY,KAAK,CAAC,CAC7C,SAGMD,KAAsB,cAAgBF,GAAO,EAAG,CAClD,IAAMM,GAAYlG,EAAK,cACvB,GAAI,CAACkG,GACH,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAMhD,GAAYgD,GAAUhG,CAAU,EAChCiG,GAAc/C,GAAqBN,CAAQ,EACjD,GAAIqD,KAAgB,QAAa,CAACC,GAAyBX,EAAI,EAC7D,MAAM,IAAI,MAAM,0BAA0BA,EAAI,EAAE,EAIlDD,EAAmB,GAEnBH,GAAO,KAAK,CACVI,GAAM1C,GAAM,CACV,UAAAG,GACA,SAAUlD,EAAK,qBAAsBkD,GAAW0C,GAAOO,GAAaV,EAAI,EACxE,QAAS,IAAM,CACbzF,EAAK,kBAAkB2C,EAAM,CAC/B,CACF,EACA,YACF,CAAC,CACH,KAAO,CACL,IAAM0D,GAAwBC,GAAkCb,EAAI,EAC9DvE,GAAO,IAAImF,GAAsBT,EAAI,EAC3C,IAAI,WAAW1E,GAAK,OAAQA,GAAK,WAAYA,GAAK,UAAU,EACvD,IAAIlB,EAAK,OAAO,SAASE,EAAYA,EAAagB,GAAK,UAAU,CAAC,EACvEmE,GAAO,KAAK,CAACI,GAAM1C,GAAM7B,GAAM,KAAK,CAAC,CACvC,CAEJ,QAAE,CACAlB,EAAK,aAAasF,EAAwB,EACtCG,KAAS,UAAYvF,GACvBF,EAAK,MAAME,CAAU,EAElBsF,GACHxF,EAAK,kBAAkB2C,EAAM,CAEjC,CACF,CAEA,OAAID,GAAkB,CAAChB,IACrB1B,EAAK,sBAAsB0C,EAAe,MAAM,EAChDvE,GAAe,IACXqE,EACA,CAACzC,EAAeY,EAAuBC,EAAwB8B,EAAgBhB,EAAoB,EAAK,CAAC,GAExG2D,EACT,QAAE,CACArF,EAAK,aAAawE,CAAc,EAEhCH,EAAmB,QAAQkC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EACzDjC,EAAoB,QAAQiC,GAAKvG,EAAK,kBAAkBuG,CAAC,CAAC,EAC1DhC,EAAkB,QAAQiC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,EAExCrC,IAAqB,GACvBnE,EAAK,sBAAsBmE,CAAgB,EAE7CC,EAAiB,QAAQoC,GAAKxG,EAAK,MAAMwG,CAAC,CAAC,CAC7C,CACF,EAKa9H,GAAgB8D,GAA4B,CACvD,IAAMxC,EAAOV,GAAY,EACnBmD,EAAUtE,GAAe,IAAIqE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,oBAAoB,EAEtC,IAAM1C,EAAgB0C,EAAQ,CAAC,EAGzBgE,EAAkBzG,EAAK,iBAAiBD,CAAa,EACvD0G,IAAoB,GACtBlH,GAAe,iCAAkC,EAEnDS,EAAK,SAASyG,CAAe,CAC/B,EAEa9H,GAA8B+H,GAAsE,CAC/G,IAAMC,EAA6B,CAAC,EACpC,QAAWhE,KAAU+D,EAAS,CAC5B,IAAMxF,EAAOyB,EAAO,CAAC,EACjB,CAAC,MAAM,QAAQzB,CAAI,GAAK,WAAYA,GACtCyF,EAAQ,KAAKzF,EAAK,MAAM,CAE5B,CACA,OAAOyF,CACT,IC3uBA,IAUMC,GACFC,GACAC,GACAC,GACAC,GACAC,GAGAC,GACEC,GAEAC,GASAC,GAMAC,GAmCOC,GA8CAC,GAaAC,GAaAC,GAuBAC,GAaAC,GAyBAC,GA5MbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEMvB,GAAU,IAAe,CAAC,CAACwB,GAAI,KAAK,OAAS,OAAO,SAAa,IAEnEtB,GAAe,GACfC,GAAc,GACdC,GAAU,GAKRG,GAAiF,IAAI,IAErFC,GAAmB,CAACiB,EAA8BC,IAA+C,CACrG,IAAMC,EAAQpB,GAAgB,IAAIkB,CAAI,EAClCE,EACFA,EAAM,KAAKD,CAAS,EAEpBnB,GAAgB,IAAIkB,EAAM,CAACC,CAAS,CAAC,CAEzC,EAEMjB,GAAe,IAAY,CAC/B,GAAIP,IAAgB,CAACC,IAAeC,IAAW,CAACH,GAC9C,MAAM,IAAI,MAAM,kBAAkB,CAEtC,EAEMS,GAAwBkB,GAA2C,CACvE,OAAQA,EAAG,KAAK,KAAM,CACpB,IAAK,YACH1B,GAAe,GACX0B,EAAG,KAAK,KACVxB,GAAU,GACVE,GAAkB,CAAC,EAAEsB,EAAG,KAAK,GAAG,IAEhCzB,GAAc,GACdG,GAAkB,CAAC,EAAE,GAEnBD,KACF,IAAI,gBAAgBA,EAAkB,EACtCA,GAAqB,QAEvB,MACF,IAAK,UACL,IAAK,YACL,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,gBAAiB,CACpB,IAAMqB,EAAYnB,GAAgB,IAAIqB,EAAG,KAAK,IAAI,EAC9CA,EAAG,KAAK,IACVF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAG,EAEjCF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAI,EAEpC,KACF,CACA,QACF,CACF,EAGajB,GAAqC,SAA0B,CAC1E,GAAI,CAAAR,GAGJ,IAAID,GACF,MAAM,IAAI,MAAM,0CAA4C,EAE9D,GAAIE,GACF,MAAM,IAAI,MAAM,uCAAyC,EAK3D,GAFAF,GAAe,GAEuBF,GAAQ,EAC5C,OAAO,IAAI,QAAc,CAAC6B,EAASC,IAAW,CAC5C7B,IAAa,UAAU,EAElB8B,GAAkB,EAAE,KAAK,CAAC,CAACC,EAAWC,CAAM,IAAM,CACrD,GAAI,CACFhC,GAAcgC,EACdhC,GAAY,QAAW2B,GAAmBE,EAAOF,CAAE,EACnD3B,GAAY,UAAYS,GACxBJ,GAAoB,CAACuB,EAASC,CAAM,EACpC,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAKV,EAAG,EAC5DvB,GAAY,YAAYiC,CAAO,EAC/B7B,GAAqB2B,CACvB,OAASG,EAAG,CACVL,EAAOK,CAAC,CACV,CACF,EAAGL,CAAM,CACX,CAAC,EAGD,GAAI,CACF,MAAMM,GAAsBZ,GAAI,IAAI,EACpC,MAAWa,GAAYb,EAAG,EAC1BrB,GAAc,EAChB,OAAS,EAAG,CACV,MAAAC,GAAU,GACJ,CACR,QAAE,CACAF,GAAe,EACjB,EAEJ,EAEaU,GAAkB,MAAM0B,GAAkC,CACrE,GAAsCtC,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAK,CAAC,OAAAI,EAAQ,IAAAd,EAAG,CAAC,EACpEvB,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAED,MAAWK,GAAOf,GAAKc,CAAM,CAEjC,EAEazB,GAAyB,MAAM2B,GACJxC,GAAQ,GAC5CS,GAAa,EACN,IAAI,QAAoC,CAACoB,EAASC,IAAW,CAClEtB,GAAiB,YAAa,CAACqB,EAASC,CAAM,CAAC,EAC/C,IAAMI,EAA0B,CAAC,KAAM,YAAa,GAAK,CAAC,OAAAM,CAAM,CAAC,EACjEvC,GAAa,YAAYiC,EAAS,CAACM,EAAO,MAAM,CAAC,CACnD,CAAC,GAEW3B,GAAuB2B,CAAM,EAIhC1B,GACT,MAAM2B,EAA8CC,IACR,CACtC,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI0C,GAAS,wBACX,MAAM,IAAI,MAAM,sEAAsE,EAExF,OAAAjC,GAAa,EACN,IAAI,QAAqC,CAACoB,EAASC,IAAW,CACnEtB,GAAiB,SAAU,CAACqB,EAASC,CAAM,CAAC,EAC5C,IAAMI,EAA0B,CAAC,KAAM,SAAU,GAAK,CAAC,MAAAO,EAAO,QAAS,CAAC,GAAGC,CAAO,CAAC,CAAC,EAC9EC,EAA+B,CAAC,EAClCF,aAAiB,YACnBE,EAAa,KAAKF,EAAM,MAAM,EAEhCxC,GAAa,YAAYiC,EAASS,CAAY,CAChD,CAAC,CACH,KACE,QAAY7B,GAAc2B,EAAOC,CAAO,CAE5C,EAEK3B,GAAiB,MAAM6B,GAAqC,CACvE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAMI,EAA0B,CAAC,KAAM,UAAW,GAAKU,CAAS,EAChE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEInB,GAAe6B,CAAS,CAEjC,EAEa5B,GAAM,MACf4B,EAAmBC,EAAwBC,EAA0BC,EACrEC,EAAqCN,IAAoE,CAC3G,GAAsC1C,GAAQ,EAAG,CAE/C,GAAI8C,EAAO,KAAKG,GAAKA,EAAE,CAAC,IAAM,KAAK,EACjC,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAID,EAAQ,KAAKC,GAAKA,CAAC,EACrB,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAAxC,GAAa,EACN,IAAI,QAAsC,CAACoB,EAASC,IAAW,CACpEtB,GAAiB,MAAO,CAACqB,EAASC,CAAM,CAAC,EACzC,IAAMoB,EAAqBJ,EACrBZ,EACF,CAAC,KAAM,MAAO,GAAK,CAAC,UAAAU,EAAW,aAAAC,EAAc,OAAQK,EAAoB,cAAAH,EAAe,QAAAL,CAAO,CAAC,EACpGzC,GAAa,YAAYiC,EAAciB,GAA2BD,CAAkB,CAAC,CACvF,CAAC,CACH,KACE,QAAYlC,GAAI4B,EAAWC,EAAcC,EAAQC,EAAeC,EAASN,CAAO,CAEpF,EAEazB,GAAe,MAAM2B,GAAqC,CACrE,GAAsC5C,GAAQ,EAC5C,OAAAS,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,gBAAiB,CAACqB,EAASC,CAAM,CAAC,EACnD,IAAMI,EAA0B,CAAC,KAAM,gBAAiB,GAAKU,CAAS,EACtE3C,GAAa,YAAYiC,CAAO,CAClC,CAAC,EAEIjB,GAAa2B,CAAS,CAE/B,ICvNA,IAWaQ,GAWAC,GAiBAC,GAvCbC,GAAAC,EAAA,kBAGAC,KAGAC,KACAC,IACAC,KACAC,KAEaT,GAAuB,CAACU,EAAgBC,IAA0C,CAC7F,OAAQD,EAAO,SAAU,CACvB,IAAK,MACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAMA,EAAO,KAAM,KAAK,EACtD,IAAK,aACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAM,CAAC,UAAWA,EAAO,SAAS,EAAG,YAAY,EAC/E,QACE,MAAM,IAAI,MAAM,0BAA0BA,EAAO,QAAQ,QAAQC,EAAQ,CAAC,EAAE,CAChF,CACF,EAEaV,GAAwBS,GAAmC,CACtE,OAAQA,EAAO,CAAC,EAAG,CACjB,IAAK,MACH,OAAO,IAAIE,GAAOF,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EACnD,IAAK,aAAc,CACjB,IAAMG,EAAWH,EAAO,CAAC,EACzB,GAAI,CAACI,GAAyBD,CAAQ,EACpC,MAAM,IAAI,MAAM,4BAA4BA,CAAQ,+BAA+B,EAErF,GAAM,CAAC,UAAAE,EAAW,SAAAC,EAAU,QAAAC,CAAO,EAAIP,EAAO,CAAC,EAC/C,OAAOE,GAAO,cAAcG,EAAW,CAAC,SAAAF,EAAU,KAAMH,EAAO,CAAC,EAAG,SAAAM,EAAU,QAAAC,CAAO,CAAC,CACvF,CACA,QACE,MAAM,IAAI,MAAM,0BAA0BP,EAAO,CAAC,CAAC,EAAE,CACzD,CACF,EAEaR,GAAN,KAA8E,CAMnF,MAAM,8BAA8BgB,EAAmD,CAErF,OAAOC,GAAuB,MAAMC,GAASF,CAAI,CAAC,CACpD,CAEA,MAAM,UAAUG,EAAiCC,EAA0D,CACzGC,GAAiB,EACjB,IAAIC,EAEA,OAAOH,GAAiB,SACtB,GAEFG,EAAQ,MAAMJ,GAASC,CAAY,EAInCG,EAAQ,MAAM,KAAK,8BAA8BH,CAAY,EAG/DG,EAAQH,EAGV,CAAC,KAAK,UAAW,KAAK,WAAY,KAAK,WAAW,EAAI,MAAMI,GAAcD,EAAOF,CAAO,EACxFI,GAAe,CACjB,CAEA,MAAM,SAAyB,CAC7B,OAAOC,GAAe,KAAK,SAAS,CACtC,CAEA,MAAM,IAAIC,EAAiCC,EAAqCP,EACzC,CACrCC,GAAiB,EACjB,IAAMO,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAChC,OAAO,QAAQH,CAAK,EAAE,QAAQI,GAAO,CACnC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,WAAW,QAAQD,CAAI,EAC1C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,kBAAkBD,CAAI,GAAG,EAE3CH,EAAW,KAAKpB,CAAM,EACtBqB,EAAa,KAAKG,CAAK,CACzB,CAAC,EAED,IAAMC,EAAkC,CAAC,EACnCC,EAA0B,CAAC,EACjC,OAAO,QAAQP,CAAO,EAAE,QAAQG,GAAO,CACrC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,YAAY,QAAQD,CAAI,EAC3C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,mBAAmBD,CAAI,GAAG,EAE5CE,EAAY,KAAKzB,CAAM,EACvB0B,EAAc,KAAKF,CAAK,CAC1B,CAAC,EAED,IAAMG,EACFP,EAAW,IAAI,CAACQ,EAAGC,IAAMvC,GAAqBsC,EAAG,IAAM,UAAU,KAAK,WAAWP,EAAaQ,CAAC,CAAC,CAAC,GAAG,CAAC,EACnGC,EAAUL,EAAY,IACxB,CAACG,EAAGC,IAAMD,EAAItC,GAAqBsC,EAAG,IAAM,WAAW,KAAK,YAAYF,EAAcG,CAAC,CAAC,CAAC,GAAG,EAAI,IAAI,EAElGE,EAAU,MAAMC,GAAI,KAAK,UAAWX,EAAcM,EAAQD,EAAeI,EAASlB,CAAO,EAEzFqB,EAAuC,CAAC,EAC9C,QAASJ,EAAI,EAAGA,EAAIE,EAAQ,OAAQF,IAClCI,EAAU,KAAK,YAAYP,EAAcG,CAAC,CAAC,CAAC,EAAIJ,EAAYI,CAAC,GAAKtC,GAAqBwC,EAAQF,CAAC,CAAC,EAEnG,OAAAb,GAAe,EACRiB,CACT,CAEA,gBAAuB,CAEvB,CAEA,cAAqB,CACdC,GAAa,KAAK,SAAS,CAClC,CACF,IC9HA,IAeaC,GAiDAC,GAhEbC,GAAAC,EAAA,kBAGAC,KAEAC,KACAC,KACAC,KAQaP,GAAkB,IAAY,CAoBzC,IAnBI,OAAOQ,GAAI,KAAK,aAAgB,UAAYA,GAAI,KAAK,YAAc,KACrEA,GAAI,KAAK,YAAc,GAGrBA,GAAI,KAAK,OAAS,IAEpB,QAAQ,KACJ,8HACyE,EAG3E,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,YAAe,UAAY,CAAC,OAAO,UAAUA,GAAI,KAAK,UAAU,GAAKA,GAAI,KAAK,YAAc,EAY9G,GAAI,OAAO,KAAS,KAAe,CAAC,KAAK,oBACvCA,GAAI,KAAK,WAAa,MACjB,CACL,IAAMC,EACF,OAAO,UAAc,IAAc,GAAQ,SAAS,EAAE,KAAK,EAAE,OAAS,UAAU,oBACpFD,GAAI,KAAK,WAAa,KAAK,IAAI,EAAG,KAAK,MAAMC,GAAsB,GAAK,CAAC,CAAC,CAC5E,CASJ,EAEaR,GAAN,KAAuD,CAS5D,MAAM,KAAKS,EAAoC,CAE7CV,GAAgB,EAGhB,MAAMW,GAAmC,EAGzC,MAAMC,GAAgBF,CAAW,CACnC,CAKA,MAAM,8BAA8BG,EAAiCC,EAChC,CACnC,IAAMC,EAAU,IAAIC,GACpB,aAAMD,EAAQ,UAAUF,EAAcC,CAAO,EACtC,QAAQ,QAAQC,CAAO,CAChC,CACF,IC7FA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,KAAA,IAIaA,GAJbC,GAAAC,EAAA,kBAGAC,KACaH,GAAc,IAAII,KCK/BC,KACAA,KAGAA,KCPO,IAAMC,GAAU,SDKvB,IAAOC,GAAQC,GAUe,CAC5B,IAAMC,EAA4C,cAAoC,YAGpFC,GAAgB,SAAUD,EAAa,CAAC,EACxCC,GAAgB,QAASD,EAAa,CAAC,EAEzCC,GAAgB,MAAOD,EAAa,EAAE,EACtCC,GAAgB,OAAQD,EAAa,EAAE,CACzC,CAEA,OAAO,eAAeE,GAAI,SAAU,MAAO,CAAC,MAAOC,GAAS,WAAY,EAAI,CAAC", + "names": ["backends", "backendsSortedByPriority", "registerBackend", "tryResolveAndInitializeBackend", "resolveBackendAndExecutionProviders", "init_backend_impl", "__esmMin", "name", "backend", "priority", "currentBackend", "i", "backendName", "backendInfo", "isInitializing", "e", "options", "eps", "backendHints", "backendNames", "errors", "availableBackendNames", "resolveResult", "err", "filteredEps", "target", "prop", "init_backend", "__esmMin", "init_backend_impl", "version", "init_version", "__esmMin", "logLevelValue", "env", "init_env_impl", "__esmMin", "init_version", "version", "value", "env", "init_env", "__esmMin", "init_env_impl", "tensorToDataURL", "tensorToImageData", "init_tensor_conversion_impl", "__esmMin", "tensor", "options", "canvas", "pixels2DContext", "width", "height", "inputformat", "norm", "normMean", "normBias", "stride", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "j", "R", "G", "B", "A", "image", "channels", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "bufferToTensor", "tensorFromImage", "tensorFromTexture", "tensorFromGpuBuffer", "tensorFromPinnedBuffer", "init_tensor_factory_impl", "__esmMin", "init_tensor_impl", "buffer", "options", "height", "width", "norm", "normMean", "normBias", "inputformat", "outputformat", "stride", "float32Data", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "Tensor", "image", "isHTMLImageEle", "isImageDataEle", "isImageBitmap", "isString", "data", "bufferToTensorOptions", "createCanvas", "createCanvasContext", "canvas", "pixels2DContext", "tempCanvas", "resolve", "reject", "context", "newImage", "img", "texture", "download", "dispose", "dims", "gpuBuffer", "dataType", "type", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "isTypedArrayChecked", "checkTypedArray", "init_tensor_impl_type_mapping", "__esmMin", "isBigInt64ArrayAvailable", "isBigUint64ArrayAvailable", "isFloat16ArrayAvailable", "calculateSize", "tensorReshape", "init_tensor_utils_impl", "__esmMin", "init_tensor_impl", "dims", "size", "i", "dim", "tensor", "Tensor", "Tensor", "init_tensor_impl", "__esmMin", "init_tensor_conversion_impl", "init_tensor_factory_impl", "init_tensor_impl_type_mapping", "init_tensor_utils_impl", "arg0", "arg1", "arg2", "checkTypedArray", "type", "dims", "expectedTypedArrayConstructor", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "data", "maybeDims", "typedArrayConstructor", "firstElementType", "mappedType", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "size", "calculateSize", "image", "options", "tensorFromImage", "texture", "tensorFromTexture", "gpuBuffer", "tensorFromGpuBuffer", "buffer", "tensorFromPinnedBuffer", "tensorToDataURL", "tensorToImageData", "releaseData", "tensorReshape", "Tensor", "init_tensor", "__esmMin", "init_tensor_impl", "TRACE", "TRACE_FUNC", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "init_trace", "__esmMin", "init_env_impl", "deviceType", "label", "env", "msg", "extraMsg", "stack", "hasTraceFunc", "i", "InferenceSession", "init_inference_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "init_trace", "_InferenceSession", "handler", "feeds", "arg1", "arg2", "TRACE_FUNC_BEGIN", "fetches", "options", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "TRACE_FUNC_END", "arg0", "arg3", "filePathOrUint8Array", "buffer", "byteOffset", "byteLength", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "InferenceSession", "init_inference_session", "__esmMin", "init_inference_session_impl", "init_tensor_conversion", "__esmMin", "init_tensor_factory", "__esmMin", "init_onnx_model", "__esmMin", "init_onnx_value", "__esmMin", "noBackendErrMsg", "TrainingSession", "init_training_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "_TrainingSession", "handler", "hasOptimizerModel", "hasEvalModel", "trainingOptions", "sessionOptions", "evalModel", "optimizerModel", "options", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "inputNames", "outputNames", "feeds", "arg1", "arg2", "fetches", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "trainableOnly", "array", "paramsSize", "TrainingSession", "init_training_session", "__esmMin", "init_training_session_impl", "esm_exports", "__export", "InferenceSession", "TRACE", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "Tensor", "TrainingSession", "env", "registerBackend", "init_esm", "__esmMin", "init_backend", "init_env", "init_inference_session", "init_tensor", "init_tensor_conversion", "init_tensor_factory", "init_trace", "init_onnx_model", "init_onnx_value", "init_training_session", "init_wasm_utils_env", "__esmMin", "main_exports", "__export", "main_default", "WORKER_NAME", "isProxyWorker", "init_main", "__esmMin", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "ev", "type", "message", "initializeWebAssembly", "initRuntime", "err", "epName", "env", "initEp", "buffer", "bufferData", "copyFromExternalBuffer", "model", "options", "createSession", "sessionMetadata", "releaseSession", "sessionId", "inputIndices", "inputs", "outputIndices", "run", "outputs", "o", "extractTransferableBuffers", "endProfiling", "urlOverride", "scriptSrc", "ort_wasm_simd_threaded_jsep_exports", "__export", "ort_wasm_simd_threaded_jsep_default", "e", "r", "init_ort_wasm_simd_threaded_jsep", "__esmMin", "x", "R", "L", "n", "H", "a", "D", "F", "P", "s", "B", "u", "I", "f", "$", "l", "c", "d", "b", "m", "p", "h", "g", "v", "t", "zr", "o", "i", "Qr", "y", "w", "A", "_", "C", "O", "j", "T", "S", "W", "Pn", "E", "_n", "Rr", "Te", "_e", "M", "gn", "Se", "yn", "Tn", "Hr", "Cn", "oe", "N", "k", "U", "G", "Y", "z", "V", "q", "J", "X", "K", "Q", "Z", "ee", "re", "te", "ne", "ae", "ue", "se", "Ee", "xe", "He", "De", "Ie", "Ue", "Ge", "$e", "Ye", "Le", "ze", "Ve", "qe", "Je", "Xe", "Ze", "ir", "ur", "hr", "vr", "yr", "wr", "Ar", "Er", "Mr", "xr", "Nr", "Dr", "Pr", "Br", "Ir", "$r", "rt", "nt", "it", "cr", "ut", "dt", "bt", "mt", "pt", "ht", "gt", "vt", "yt", "wt", "At", "_t", "Ct", "St", "Wt", "Et", "Mt", "xt", "Rt", "Ht", "Dt", "Ft", "Pt", "It", "Ut", "Bt", "Gt", "$t", "Yt", "qt", "Jt", "Zt", "en", "he", "rn", "tn", "nn", "on", "me", "ln", "cn", "ie", "Re", "et", "fe", "le", "ce", "ge", "Oe", "Ce", "ve", "we", "de", "be", "xn", "Mn", "On", "En", "pe", "ye", "Ae", "jn", "je", "Wn", "Nn", "We", "Me", "Ne", "ke", "Fe", "Pe", "Be", "Ke", "Qe", "er", "rr", "tr", "nr", "ar", "or", "sr", "fr", "lr", "dr", "br", "mr", "pr", "gr", "An", "wn", "_r", "Cr", "Or", "jr", "Tr", "Sr", "Wr", "kr", "Sn", "Fr", "mn", "Ur", "hn", "Gr", "Yr", "Lr", "Vr", "qr", "Jr", "Xr", "Kr", "Zr", "Hn", "pn", "kn", "Dn", "tt", "at", "ot", "st", "ft", "lt", "ct", "Ot", "jt", "Tt", "Nt", "kt", "Lt", "zt", "Vt", "Xt", "Kt", "Qt", "an", "sn", "un", "fn", "dn", "bn", "Rn", "vn", "Fn", "scriptSrc", "origin", "isSameOrigin", "preload", "createProxyWorker", "importProxyWorker", "embeddedWasmModule", "importWasmModule", "init_wasm_utils_import", "__esmMin", "init_wasm_utils_env", "filename", "prefixOverride", "baseUrl", "absoluteUrl", "blob", "url", "urlOverride", "isMultiThreaded", "wasm", "initialized", "initializing", "aborted", "isMultiThreadSupported", "isSimdSupported", "initializeWebAssembly", "getInstance", "init_wasm_factory", "__esmMin", "init_wasm_utils_import", "flags", "timeout", "numThreads", "multiThreadSupported", "wasmPaths", "wasmPrefixOverride", "mjsPathOverrideFlag", "mjsPathOverride", "wasmPathOverrideFlag", "wasmPathOverride", "objectUrl", "ortWasmFactory", "importWasmModule", "isTimeout", "tasks", "resolve", "reject", "config", "fileName", "scriptDirectory", "module", "what", "allocWasmString", "iterateExtraOptions", "checkLastError", "init_wasm_utils", "__esmMin", "init_wasm_factory", "data", "allocs", "wasm", "getInstance", "dataLength", "dataOffset", "options", "prefix", "seen", "handler", "key", "value", "name", "message", "stack", "paramsOffset", "errorCode", "errorMessagePointer", "errorMessage", "setRunOptions", "init_run_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "options", "wasm", "getInstance", "runOptionsHandle", "allocs", "runOptions", "tagDataOffset", "allocWasmString", "checkLastError", "iterateExtraOptions", "key", "value", "keyDataOffset", "valueDataOffset", "e", "alloc", "getGraphOptimzationLevel", "getExecutionMode", "appendDefaultOptions", "setExecutionProviders", "setSessionOptions", "init_session_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "graphOptimizationLevel", "executionMode", "options", "session", "ep", "sessionOptionsHandle", "executionProviders", "allocs", "epName", "deviceType", "keyDataOffset", "allocWasmString", "valueDataOffset", "getInstance", "checkLastError", "webgpuOptions", "epNameDataOffset", "wasm", "sessionOptions", "logIdDataOffset", "logSeverityLevel", "logVerbosityLevel", "optimizedModelFilePathOffset", "name", "value", "nameOffset", "iterateExtraOptions", "key", "e", "alloc", "tensorDataTypeStringToEnum", "tensorDataTypeEnumToString", "getTensorElementSize", "tensorTypeToTypedArrayConstructor", "logLevelStringToEnum", "isGpuBufferSupportedType", "dataLocationStringToEnum", "init_wasm_common", "__esmMin", "type", "typeProto", "dateType", "logLevel", "location", "loadFile", "init_wasm_utils_load_file", "__esmMin", "init_wasm_utils_env", "file", "readFile", "e", "createReadStream", "stream", "chunks", "chunk", "response", "contentLengthHeader", "fileSize", "reader", "buffer", "pages", "offset", "done", "value", "chunkSize", "logLevelPrefix", "doLog", "configLogLevel", "debug", "configureLogger", "LOG", "LOG_DEBUG", "init_log", "__esmMin", "init_wasm_common", "level", "message", "$configLogLevel", "$debug", "logLevel", "msg", "messageLevel", "logLevelStringToEnum", "configLevel", "args", "createView", "init_tensor_view", "__esmMin", "init_wasm_common", "dataBuffer", "type", "tensorTypeToTypedArrayConstructor", "init_types", "__esmMin", "bucketFreelist", "bucketArr", "calcNormalizedBufferSize", "calcBucketBufferSize", "guid", "createNewGpuDataId", "downloadGpuData", "GpuDataManagerImpl", "createGpuDataManager", "init_gpu_data_manager", "__esmMin", "init_log", "init_types", "size", "idx", "sizeForBucket", "backend", "gpuBuffer", "originalSize", "getTargetBuffer", "bufferSize", "gpuReadBuffer", "commandEncoder", "arrayBuffer", "targetBuffer", "key", "id", "data", "srcArrayBuffer", "srcOffset", "srcLength", "gpuDataCache", "gpuBufferForUploading", "LOG_DEBUG", "sourceId", "destinationId", "sourceGpuDataCache", "destinationGpuDataCache", "buffer", "previousBuffer", "usage", "isStorage", "isUniform", "buffers", "gpuData", "cachedData", "maxInFreeList", "freelist", "capturedBuffers", "storage", "sessionId", "pendingBuffers", "args", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "MatMulUtil", "BroadcastUtil", "ShapeUtil", "PoolConvUtil", "GemmUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "a", "b", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "i", "aLen", "bLen", "max", "shape", "finalShape", "inputRank", "finalRank", "_ShapeUtil", "dims", "size", "rank", "newDims", "axis", "start", "end", "strides", "tensorRank", "axes", "x", "perm", "v", "pad", "shape1", "shape2", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "dim", "isChannelLast", "autoPad", "outputDims", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "WORKGROUP_SIZE", "getWgslMappedType", "tensorTypeToWsglStorageType", "tensorTypeToWsglValueType", "createTensorShapeVariables", "getMaxComponents", "fillVector", "castToF32", "sumVector", "getElementAt", "createIndicesHelper", "inputVariable", "outputVariable", "internalVariable", "ShaderHelperImpl", "createShaderHelper", "getBroadcastDims", "init_common", "__esmMin", "init_wasm_common", "init_util", "type", "components", "mappedType", "dims", "programUniforms", "dim", "ShapeUtil", "size", "dataType", "value", "name", "index", "length", "tensorType", "shapeOrRank", "usage", "useUniform", "rank", "rankIdentity", "indicesType", "valueType", "storageType", "normalizeDim", "implementationUsed", "uniformPrefix", "shape", "strides", "o2iSnippet", "i", "offsetToIndicesImplementation", "offsetToIndices", "varOffset", "offsets", "indicesToOffsetImplementation", "indicesToOffset", "varIndices", "indices", "init", "indicesGet", "idx", "indicesSet", "broadcastedIndicesToOffsetImplementation", "broadcastedIndicesToOffset", "output", "implKey", "setByOffset", "offset", "getByOffset", "getByIndicesImplementation", "getImplementation", "functionParams", "dimsParams", "get", "normalizedIndices", "getByIndices", "setByIndicesImplementation", "setImplementation", "impls", "needShapeStrides", "impl", "indicesAndValue", "normalizedDispatchGroup", "limits", "workgroupSize", "workgroupSizeX", "workgroupSizeY", "workgroupSizeZ", "is1DimensionDispatch", "paramList", "globalIdxDefinition", "variable", "bindingIndex", "access", "variables", "v", "additionalUniforms", "uniformSnippets", "typeTemp", "uniformWgslTypeToDataType", "u", "dispatchGroup", "inShape", "outShape", "inRank", "validateInputs", "getAdjustedPerm", "getOutputShape", "permFunctionBody", "createTransposeProgramInfo", "transpose", "parseTransposeAttributes", "init_transpose", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "inputRank", "perm", "inputShape", "ShapeUtil", "rank", "input", "output", "reverseFunc", "inputTensor", "permAttr", "inputDataType", "outputShape", "outputVariable", "inputVariable", "getShaderSource", "wgslType", "workgroupSize", "shaderHelper", "outputSize", "createTensorShapeVariables", "context", "attributes", "createAttributeWithCacheKey", "reduceOps", "reduceSharedOps", "reduceInitValues", "reduceOutputValues", "getInnerMostAxes", "computeOutAndReduceShapes", "expandShapeToKeepDim", "areAxesInnerMostDims", "getAxesPermutation", "createReduceSharedProgramInfo", "reduceCommon", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "init_reduce_shared", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_reduce", "init_transpose", "numInnerAxes", "rank", "res", "i", "shape", "axes", "outputShape", "dim", "reduceShape", "expandShape", "shapeIdx", "axis", "name", "shaderCache", "inputs", "reduceType", "outputDataType", "inputShape", "outputSize", "ShapeUtil", "reduceSize", "input", "inputVariable", "output", "outputVariable", "workgroupSize", "sharedMemorySnippet", "shaderHelper", "context", "attributes", "updatedAttributes", "createReduceAttributesFromInputs", "updatedAxes", "_dim", "normalizeAxes", "permutedAxes", "createTransposeProgramInfo", "finalOutputShape", "validateInputs", "noOp", "createReduceProgramInfo", "createReduceAttributesFromInputs", "runReduceProgram", "reduceLogSumNaive", "reduceL1Naive", "reduceL2Naive", "reduceLogSumExpNaive", "reduceMaxNaive", "reduceMeanNaive", "reduceMinNaive", "reduceProdNaive", "reduceSumNaive", "reduceSumSquareNaive", "useNaiveReduceMethod", "reduceMean", "reduceL1", "reduceL2", "reduceLogSumExp", "reduceMax", "reduceMin", "reduceProd", "reduceSum", "reduceSumSquare", "reduceLogSum", "init_reduce", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "init_reduce_shared", "inputs", "input", "name", "shaderCache", "reduceOp", "axesInput", "outputDataType", "keepDims", "noopWithEmptyAxes", "outputShape", "inputShape", "inputRank", "axes", "ShapeUtil", "reduceOnAllAxes", "d", "i", "outputRank", "outputSize", "shaderHelper", "idxCopy", "inputVariable", "output", "outputVariable", "ops", "reduceOps", "k", "l", "createTensorShapeVariables", "attributes", "v", "createAttributeWithCacheKey", "context", "updatedAttributes", "_output", "idxZero", "size", "shape", "reduceSize", "dim", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "validateInputs", "argMin", "argMax", "parseArgMinMaxAttributes", "init_argminmax", "__esmMin", "init_wasm_common", "init_attribute_with_cache_key", "init_reduce", "inputs", "context", "attributes", "argMinMaxOp", "input", "output", "axes", "idxZero", "k", "createReduceProgramInfo", "createAttributeWithCacheKey", "validateAttentionInputs", "createInPlaceSoftmaxProgramInfo", "createAttentionProbsProgramInfo", "createVxAttentionScoreProgramInfo", "applyAttention", "prepare", "attention", "init_attention", "__esmMin", "init_wasm_common", "init_types", "init_common", "inputs", "attributes", "input", "weights", "bias", "maskIndex", "past", "relativePositionBias", "batchSize", "sequenceLength", "inputHiddenSize", "qHiddenSize", "kHiddenSize", "vHiddenSize", "sz", "kvSequenceLength", "pastSequenceLength", "totalSequenceLength", "maxSequenceLength", "maskType", "_context", "n", "d", "components", "getMaxComponents", "WG", "dComp", "elementsPerThread", "programUniforms", "dataType", "tensorTypeToWsglStorageType", "f32Type", "tensorTypeToWsglValueType", "getShaderSource", "shaderHelper", "inputHelper", "outputVariable", "uniforms", "context", "q", "key", "pastKey", "parameters", "probsShape", "presentKey", "presentKeyShape", "alpha", "vectorizedHeadSize", "TILE_SIZE", "dispatch", "inputDependencies", "outputs", "qInput", "inputVariable", "kInput", "inputVars", "pastKeyInput", "output", "outputVars", "probs", "v", "pastValue", "params", "nReps", "repeatedVHiddenSize", "presentValue", "presentValueShape", "outputShape", "probsHelper", "vHelper", "k", "_maskIndex", "_past", "outputCount", "inputsK", "inputsV", "M", "K", "N", "outputQ", "outputK", "outputV", "weight", "validateInputs", "createBatchNormInferenceProgramInfo", "parseBatchNormAttributes", "batchNorm", "init_batch_norm", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "checkShapeEqual", "actual", "expected", "message", "r", "v", "i", "shape", "epsilon", "spatial", "format", "yShape", "components", "getMaxComponents", "cComponents", "outputSize", "ShapeUtil", "useShapesUniforms", "shapeOrRank", "x", "inputVariable", "scale", "bias", "inputMean", "inputVar", "y", "outputVariable", "calcCOffset", "cOffset", "getInferenceModeShaderSource", "helper", "createTensorShapeVariables", "createAttributeWithCacheKey", "context", "outputCount", "updatedAttributes", "env", "validateInputs", "createBiasAddProgramInfo", "biasAdd", "init_bias_add", "__esmMin", "init_util", "init_common", "inputs", "outputShape", "channels", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "bias", "residual", "output", "outputVariable", "shaderHelper", "context", "createElementwiseProgramShader", "createElementwiseProgramInfo", "abs", "acos", "acosh", "asin", "asinh", "atan", "atanh", "parseCastAttributes", "cast", "generateClipAttributesFromInputs", "clip", "ceil", "cos", "cosh", "parseAlphaAttributes", "elu", "erfImpl", "erf", "exp", "floor", "gelu", "leakyRelu", "not", "neg", "reciprocal", "relu", "sigmoid", "parseHardSigmoidAttributes", "hardSigmoid", "sin", "sinh", "sqrt", "tan", "tanhExpression", "tanh", "fastGeluImpl", "fastGeluExpression", "fastGelu", "thresholdedRelu", "log", "quickGeluImpl", "quickGeluExpression", "quickgelu", "init_unary_op", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "shaderHelper", "datasize", "inputDataType", "outputDataType", "funcCall", "additionalImplementation", "vecSize", "expression", "input", "inputVariable", "output", "outputVariable", "name", "cacheKey", "ShapeUtil", "inputTensors", "context", "attributes", "createAttributeWithCacheKey", "func", "inputs", "min", "MIN_CLIP", "max", "MAX_CLIP", "clipAttributes", "dataType", "tensorTypeToWsglValueType", "a", "varType", "x", "alpha", "dType", "validateInputs", "createBiasSplitGeluProgramInfo", "biasSplitGelu", "init_bias_split_gelu", "__esmMin", "init_util", "init_common", "init_unary_op", "inputs", "outputShape", "input", "inputVariable", "bias", "output", "outputVariable", "outputSize", "ShapeUtil", "dataType", "tensorTypeToWsglStorageType", "shaderHelper", "erfImpl", "context", "createBinaryOpProgramShader", "createBinaryOpProgramInfo", "runBinaryOp", "add", "div", "equal", "mul", "pow", "sub", "greater", "less", "greaterOrEqual", "lessOrEqual", "init_binary_op", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "dimsA", "dimsB", "dimsOutput", "vectorize", "doBroadcast", "sharedDimensionDivisibleBy4", "funcCall", "typeA", "typeB", "typeOutput", "additionalImplementation", "expressionScalar", "expressionVector", "a", "b", "output", "outputVariable", "inputVariable", "assignment", "isAOneElement", "ShapeUtil", "isBOneElement", "aLastDimDivisibleBy4", "bLastDimDivisibleBy4", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "name", "cacheKey", "outputDataType", "isBroadcast", "outputShape", "outputSize", "cacheKeyAux", "calculatedShape", "BroadcastUtil", "sharedDimension", "i", "dimA", "dimB", "createTensorShapeVariables", "context", "type", "validateInputs", "calculateInputIndexImpl", "assignOutputData", "createConcatProgramInfo", "concat", "parseConcatAttributes", "init_concat", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "axis", "referenceIndex", "referenceInput", "inputType", "inputRank", "input", "i", "dim", "numberOfTensors", "sizeInConcatAxisStr", "output", "codeLines", "returnSnippet", "adjustedAxis", "outputShape", "dataType", "outputSize", "ShapeUtil", "sizeInConcatAxis", "inputVars", "previousSum", "inputDependencies", "inputRanks", "programUniforms", "inputVariable", "createTensorShapeVariables", "outputVariable", "indicesAxis", "getShaderSource", "shaderHelper", "context", "attributes", "inputShape", "sum", "nonEmptyInputs", "createAttributeWithCacheKey", "getActivationSnippet", "appendActivationUniformsData", "appendActivationUniforms", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_wasm_common", "init_util", "attributes", "valueType", "baseType", "programUniform", "uniforms", "activation", "alpha", "beta", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "typeSnippet", "biasSnippet", "init_activation_util", "__esmMin", "component", "dataType", "hasBias", "utilFunctions", "init_conv_util", "__esmMin", "strideStr", "writeDataToSubAVec4Snippet", "calculateResultSnippet", "makeMatMulPackedVec4Source", "writeDataToSubASnippet", "readDataFromSubASnippet", "makeMatMulPackedSource", "matMulReadWriteFnSource", "createMatmulProgramInfo", "init_matmul_packed_webgpu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_fuse_utils", "init_activation_util", "transpose", "batchDims", "transposeA", "innerElementSize", "workPerThread", "workgroupSize", "type", "tileInner", "splitK", "splitedDimInner", "tileAOuter", "tileBOuter", "tileAWidth", "tileAHight", "rowPerThreadB", "sequentialAccessByThreads", "rowPerThreadA", "colPerThreadA", "matmulSnippet", "component", "hasBias", "applyActivation", "variables", "batchShapes", "isChannelsLast", "batchAShape", "batchBShape", "batchShape", "batchVariable", "aVariable", "bVariable", "outputVariable", "broadCastADims", "getBroadcastDims", "broadCastBDims", "dataType", "tensorTypeToWsglStorageType", "getAIndices", "aRank", "batchRank", "resStr", "i", "j", "getBIndices", "bRank", "typeSnippet", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "aShape", "bShape", "outerDimsA", "outerDimsB", "outerDims", "batchSize", "ShapeUtil", "dimAOuter", "dimInner", "dimBOuter", "isVec4", "elementsPerThread", "dispatch", "components", "aShapeTemp", "bShapeTemp", "outputShapeTemp", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "internalVariable", "A", "inputVariable", "B", "output", "inputVariables", "biasComponents", "uniforms", "appendActivationUniforms", "baseType", "getActivationSnippet", "declareFunctions", "conv2dCommonSnippet", "createConv2DMatMulProgramInfo", "init_conv2d_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "fitAOuter", "fitBOuter", "fitInner", "addBias", "attributes", "innerElementSizeX", "innerElementSizeW", "innerElementSize", "dataType", "getXSnippet", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readXSnippet", "typeSnippet", "sampleX", "sampleW", "resType", "aType", "bType", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileAOuter", "tileBOuter", "tileInner", "elementsSize", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "uniforms", "appendActivationUniforms", "components", "t", "tensorTypeToWsglStorageType", "declareFunctions", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "bias", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "arrayProduct", "parse3TupleParam", "getEffectiveFilterSize", "computeDefaultPad", "computeOutputShape4D", "get3DPadAndOutInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "init_conv3d_naive_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "arr", "product", "i", "param", "filterSize", "dilation", "inputShape", "fieldSize", "stride", "effectiveFieldSize", "inShape", "filterShape", "outChannels", "strides", "zeroPad", "outShape", "index", "pad", "inDepth", "inHeight", "inWidth", "strideDepth", "strideHeight", "strideWidth", "filterDepth", "filterHeight", "filterWidth", "padInfo", "outDepth", "outHeight", "outWidth", "val", "_", "padAlongDepth", "padAlongHeight", "padAlongWidth", "front", "back", "top", "bottom", "left", "right", "dilations", "depthwise", "dataFormat", "batchSize", "inChannels", "filterChannels", "dilationDepth", "dilationHeight", "dilationWidth", "effectiveFilterDepth", "effectiveFilterHeight", "effectiveFilterWidth", "inputs", "attributes", "outputShape", "filterDims", "pads", "isChannelsLast", "isVec4", "workGroupSize", "dispatchLayout", "dispatch", "d", "LOG_DEBUG", "innerElementSize", "outputSize", "ShapeUtil", "programUniforms", "createTensorShapeVariables", "inputDependencies", "hasBias", "getShaderSource", "shaderHelper", "uniforms", "components", "t", "tensorTypeToWsglStorageType", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "declareFunctions", "bias", "getElementAt", "createGroupedConvProgramInfo", "createGroupedConvVectorizeProgramInfo", "init_conv_grouped", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_conv", "init_fuse_utils", "inputs", "attributes", "squeezeOutputShapeFunction", "hasBias", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "isChannelLast", "outputShape", "calculateOutputShape", "outputSize", "ShapeUtil", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "x", "inputVariable", "w", "inputVars", "uniforms", "appendActivationUniforms", "components", "getMaxComponents", "outputNumber", "outputShapeInShader", "xNumber", "createNaiveMatmulProgramInfo", "validateInputs", "matMul", "init_matmul", "__esmMin", "init_wasm_common", "init_util", "init_matmul_packed_webgpu", "init_common", "init_fuse_utils", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "isChannelsLast", "aShape", "bShape", "M", "N", "K", "components", "getMaxComponents", "aComponents", "outputNumber", "outputSize", "ShapeUtil", "hasBias", "outerDims", "outputShapeInShader", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "batchDims", "internalVariable", "a", "inputVariable", "b", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "inputVariables", "processBias", "biasComponents", "outerDimsA", "outerDimsB", "broadCastADims", "getBroadcastDims", "broadCastBDims", "uniforms", "appendActivationUniforms", "getIndices", "variable", "broadCastDims", "rank", "name", "batchRank", "resStr", "i", "j", "calcResult", "calcStr", "context", "BroadcastUtil", "createMatmulProgramInfo", "calculateOutputShape", "weightTransposeAttribute", "validateInputs", "getAdjustedConvAttributes", "parseConvAttributes", "conv2d", "conv1d", "conv3d", "conv", "init_conv", "__esmMin", "init_util", "init_conv2d_mm_webgpu", "init_conv3d_naive_webgpu", "init_matmul_packed_webgpu", "init_conv_grouped", "init_fuse_utils", "init_matmul", "init_transpose", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "isChannelLast", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputShape", "inputs", "attributes", "dataChannel", "filterInChannel", "pads", "PoolConvUtil", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "autoPad", "group", "wIsConst", "context", "adjustedAttributes", "isChannelsLast", "transposedWeight", "createTransposeProgramInfo", "convInputs", "createGroupedConvVectorizeProgramInfo", "createGroupedConvProgramInfo", "hasBias", "inputHeight", "inputWidth", "inputChannels", "weightHeight", "weightWidth", "outHeight", "outWidth", "sameSize", "batch", "xReshaped", "wReshaped", "matmulOutputShape", "matmulInputs", "sharedDim", "N", "K", "createNaiveMatmulProgramInfo", "createMatmulProgramInfo", "sequentialAccessByThreads", "dimAOuter", "dimBOuter", "dimInner", "createConv2DMatMulProgramInfo", "convInfo", "computeConv3DInfo", "createConv3DNaiveProgramInfo", "conv2dTransposeCommonSnippet", "createConv2DTransposeMatMulProgramInfo", "init_conv_backprop_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "addBias", "attributes", "type", "innerElementSize", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readASnippet", "sampleA", "sampleW", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileInner", "components", "filterDims", "effectiveFilterDims", "pads", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "x", "inputVariable", "w", "output", "outputVariable", "inputVariables", "declareFunctions", "bias", "uniforms", "appendActivationUniforms", "elemType", "tensorTypeToWsglStorageType", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createConvTranspose2DOpProgramShaderSource", "createConvTranspose2DProgramInfo", "init_conv_backprop_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "shaderHelper", "inputs", "outputShape", "hasBias", "is1DimensionDispatch", "isVec4", "dataType", "uniforms", "isChannelsLast", "rowDim", "colDim", "channelDim", "workPerThread", "declareFunctions", "components", "w", "inputVariable", "dy", "inputVariables", "output", "outputVariable", "codeSnippet4", "codeSnippet", "attributes", "squeezeOutputShapeFunction", "outputSize", "ShapeUtil", "dispatch", "LOG_DEBUG", "inputDependencies", "strides", "filterDims", "dilations", "effectiveFilterDims", "pads", "group", "wShape", "inputChannelsPerGroup", "outputChannelsPerGroup", "programUniforms", "createTensorShapeVariables", "getShaderSource", "tensorTypeToWsglStorageType", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "weightTransposePerm", "convTranspose2d", "convTranspose1d", "convTranspose", "init_conv_transpose", "__esmMin", "init_conv_backprop_mm_webgpu", "init_conv_backprop_webgpu", "init_fuse_utils", "init_transpose", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "group", "strides", "isChannelLast", "outputPadding", "outputShape", "spatialRank", "updateOutputShape", "i", "batchSize", "outChannels", "j", "inSize", "attributes", "inputs", "a", "b", "isChannelsLast", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "wIsConst", "dataChannel", "filterInChannel", "featureMaps", "context", "adjustedAttributes", "inputChannels", "createConvTranspose2DProgramInfo", "outHeight", "outWidth", "weightHeight", "weightWidth", "dimAOuter", "dimBOuter", "dimInner", "sequentialAccessByThreads", "transposedWeight", "createTransposeProgramInfo", "convTransposeInputs", "hasBias", "createConv2DTransposeMatMulProgramInfo", "createCumsumProgramInfo", "cumsum", "parseCumSumAttributes", "init_cumsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputType", "inputShape", "axisInput", "attributes", "outputSize", "ShapeUtil", "rank", "input", "inputVariable", "output", "outputVariable", "axisValue", "axis", "getShaderSource", "shaderHelper", "index", "max", "getElementAt", "lowerLimit", "upperLimit", "createTensorShapeVariables", "context", "exclusive", "reverse", "createAttributeWithCacheKey", "validateInputs", "permFunctionBody", "createDepthToSpaceProgramInfo", "depthToSpace", "parseDepthToSpaceAttributes", "init_depth_to_space", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "perm", "rank", "input", "output", "reverseFunc", "inputTensor", "attributes", "n", "h", "w", "c", "shape", "isChannelLast", "blocksize", "isDCRmode", "reshapedInputTensor", "reshapedInputRank", "inputDataType", "reshapedInput", "inputVariable", "permedOutput", "outputVariable", "getShaderSource", "shaderHelper", "outputShape", "outputSize", "ShapeUtil", "shapeBeforePerm", "shapeAfterPerm", "createTensorShapeVariables", "context", "createAttributeWithCacheKey", "symbolPattern", "termPattern", "termPatternOnly", "lhsPattern", "lhsPatternOnly", "EinsumTerm", "EinsumEquation", "appendMax", "createEinsumProgramInfo", "einsum", "parseEinsumAttributes", "init_einsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputIndex", "symbol", "index", "value", "inputs", "equation", "lhs", "rhs", "inputTerm", "dims", "einsumTerm", "sym", "info", "dimValue", "term", "isInput", "rank", "ellipsis", "ellipsisDims", "nextDim", "indexSymbols", "i", "ellipsisDimLength", "j", "name", "inputShapes", "dataType", "einsumEquation", "outputShape", "inputVars", "inputVariable", "outputSize", "ShapeUtil", "output", "outputVariable", "uniformsSymbols", "getShaderSource", "shaderHelper", "idxCopy", "initProd", "initSum", "updateSum", "reduceOpsSetIndices", "reduceOpsLoopHeaders", "reduceOpsLoopFooters", "reduceOpCompute", "isReduceOpsWithoutLoop", "outputIndex", "indices", "reduceOps", "inputVar", "_var", "programUniformsInit", "programUniforms", "_", "createTensorShapeVariables", "acc", "inputProgramUniforms", "context", "attributes", "input", "createAttributeWithCacheKey", "validateInputs", "getAdjustedShape", "calculateOutputShape", "createExpandProgramInfo", "expand", "init_expand", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "inputShape", "shape", "shapeIndex", "inputShapeIndex", "shape1", "shape2", "diff", "i", "outputShape", "dataType", "components", "outputSize", "ShapeUtil", "getShaderSource", "shaderHelper", "input", "inputVariable", "output", "outputVariable", "assignment", "singleAssignment", "resStr", "x", "typeCast", "programUniforms", "createTensorShapeVariables", "context", "createFastGeluProgramInfo", "fastGelu", "init_fast_gelu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_unary_op", "inputTensors", "dataType", "outputSize", "ShapeUtil", "biasLength", "useVec4", "getShaderSource", "shaderHelper", "x", "inputVariable", "bias", "y", "outputVariable", "uniforms", "singleElementBias", "i", "biasGetExpression", "fastGeluImpl", "tensorTypeToWsglValueType", "WORKGROUP_SIZE", "fastGeluExpression", "inputs", "context", "validateInputs", "createGatherProgramInfo", "parseGatherAttributes", "gather", "init_gather", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "indicesShape", "inputRank", "axis", "ShapeUtil", "outputShape", "axisDimLimit", "components", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "data", "inputVariable", "indices", "output", "outputVariable", "calcDataIndices", "x", "indicesRank", "calcStr", "i", "j", "assignment", "singleAssignment", "resStr", "typeCast", "createAttributeWithCacheKey", "context", "validateInputs", "createGatherElementsProgramInfo", "parseGatherElementsAttributes", "gatherElements", "init_gather_elements", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "inputOutputDataType", "inputRank", "indicesShape", "indicesDataType", "axis", "ShapeUtil", "axisDimLimit", "outputShape", "outputSize", "input", "inputVariable", "indices", "output", "outputVariable", "programUniforms", "createTensorShapeVariables", "shaderHelper", "createAttributeWithCacheKey", "context", "validateInputs", "createGemmProgramInfo", "parseGemmAttributes", "gemm", "init_gemm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "aShape", "bShape", "M", "N", "K", "GemmUtil", "outputShape", "outputSize", "ShapeUtil", "programUniforms", "inputDependencies", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "line", "calculateAlpha", "a", "inputVariable", "b", "dataType", "c", "variables", "output", "outputVariable", "uniforms", "transA", "transB", "alpha", "beta", "context", "getInput", "validateInputs", "parseMultiHeadAttentionAttributes", "weightTransposeAttribute", "addBiasTranspose", "maybeTransposeToBNSHAndAddBias", "multiHeadAttention", "init_multihead_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_types", "init_attention", "init_common", "init_transpose", "inputs", "i", "ShapeUtil", "attributes", "query", "key", "value", "bias", "keyPaddingMask", "relativePositionBias", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "qkvFormat", "maskType", "maskDims", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "createAttributeWithCacheKey", "context", "qkv", "biasOffset", "outputShape", "outputSize", "programUniforms", "getShaderSource", "shaderHelper", "output", "outputVariable", "qkvInput", "inputVariable", "biasInput", "uniforms", "numHeads", "input", "reshapedInput", "createTransposeProgramInfo", "params", "kvBNSH", "Q", "applyAttention", "K", "V", "getRepeats", "validateInputs", "getOutputShape", "createTileProgramInfo", "tile", "init_tile", "__esmMin", "init_wasm_common", "init_util", "init_common", "repeatsTensorView", "inputs", "inputShape", "repeats", "outputShape", "i", "shape", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "output", "outputVariable", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "validateInputs", "createConcatProgramInfo", "parseGroupQueryAttentionAttributes", "weightTransposeAttribute", "maybeExpandAndTransposeToBNSH", "groupQueryAttention", "init_group_query_attention", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_attention", "init_common", "init_multihead_attention", "init_tile", "init_transpose", "inputs", "attributes", "query", "key", "value", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "hasPastKey", "hasPastValue", "isPastkvBSNH", "qkvFormat", "maskType", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "a", "b", "dataType", "params", "outputShape", "component", "outputSize", "ShapeUtil", "presentSequenceLength", "output", "outputVariable", "inputA", "inputVariable", "inputB", "H", "dispatch", "inputDependencies", "programUniforms", "createTensorShapeVariables", "uniforms", "pastStr", "newStr", "concatStr", "getShaderSource", "shaderHelper", "createAttributeWithCacheKey", "context", "input", "pastKV", "outputIndex", "reshapedInput", "numHeads", "nReps", "createTileProgramInfo", "createTransposeProgramInfo", "Q", "maybeTransposeToBNSHAndAddBias", "K", "V", "applyAttention", "createInstanceNormProgramInfo", "computeMean", "createInstanceNormNHWCProgramInfo", "instanceNorm", "init_instance_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "xShape", "outputShape", "axis", "normCount", "ShapeUtil", "normSize", "components", "getMaxComponents", "normPackedSize", "inputShape", "inputDependencies", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "x", "inputVariable", "scale", "bias", "output", "outputVariable", "variables", "dataType", "f32Type", "workgroupSize", "uniforms", "sumVector", "context", "input", "n", "h", "c", "epsilon", "WG", "outputType", "sumCastType", "setOutputValue", "var1", "var2", "unitsOfWork", "wgSize", "meanInputDependencies", "meanProgramUniforms", "getMeanShaderSource", "inputHelper", "fillVector", "meanValues", "scaleHelper", "biasHelper", "N", "C", "H", "outputSize", "channelScaleShift", "tensorTypeToWsglStorageType", "scaleType", "scaleCastType", "outputHelper", "validateInputs", "createLayerNormProgramInfo", "layerNorm", "init_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "outputCount", "simplified", "xShape", "scale", "bias", "outputShape", "axis", "ShapeUtil", "normCount", "normSize", "scaleSize", "biasSize", "meanInvStdDevDim", "i", "components", "getMaxComponents", "inputDependencies", "programUniforms", "hasMeanDataOutput", "hasInvStdOutput", "getShaderSource", "shaderHelper", "dataType", "tensorTypeToWsglStorageType", "variables", "inputVariable", "outputVariable", "uniforms", "fillVector", "castToF32", "sumVector", "outputs", "context", "validateInputs", "createMatMulNBitsProgramInfo", "matMulNBits", "parseMatMulNBitsAttributes", "init_matmulnbits", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "a", "aRank", "nBlocksPerCol", "blobSize", "b", "ShapeUtil", "scalesShape", "zeroPointsShape", "expectedZeroPointsSize", "maxComputeWorkgroupSizes", "maxComputeWorkgroupStorageSize", "inputShape", "dimAOuter", "dimInner", "dimBOuter", "batchDims", "batchSize", "blobSizeInWords", "dataType", "outputNumber", "getMaxComponents", "aComponents", "bComponents", "elementSize", "getTensorElementSize", "workgroupOutputSize", "maxNumberOfComponents", "useBlockwiseMatMulNBits", "components", "outputShape", "outputSize", "programUniforms", "inputShapeTemp", "bShape", "createTensorShapeVariables", "outputShapeTemp", "getShaderSource", "shaderHelper", "inputRank", "inputVariable", "scales", "inputVariables", "zeroPoints", "outputRank", "output", "outputVariable", "uniforms", "tensorTypeToWsglStorageType", "qDqDataType", "processOneBlock", "_", "i", "updateZeroPointIndex", "context", "createAttributeWithCacheKey", "validateInputs", "getPadConstant", "getPadReflect", "getPadEdge", "getPadWrap", "getPadSnippet", "createPadProgramInfo", "createPadAttributesFromInputs", "pad", "init_pad", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "validPads", "output", "inputRank", "padsLength", "block", "i", "getElementAt", "attributes", "outputShape", "ShapeUtil", "inputDims", "outputSize", "programUniforms", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "outputVariable", "input", "inputVariable", "dataType", "padSnippet", "uniforms", "bigInt64Pads", "value", "updatePads", "axes", "v", "pads", "context", "updatedAttributes", "validateInputs", "getAdjustedPoolAttributesAndOutputShape", "getUniformAndPadInfo", "generatePoolingCode", "createShaderKeyFromAttributes", "createAveragePoolShaderKeyFromAttributes", "createMaxPoolShaderKeyFromAttributes", "parsePoolCommonAttributes", "createAveragePoolProgramInfo", "parseAveragePoolAttributes", "averagePool", "globalPoolAttributes", "parseGlobalAveragePoolAttributes", "globalAveragePool", "createMaxPoolProgramInfo", "maxPool", "parseMaxPoolAttributes", "parseGlobalMaxPoolAttributes", "globalMaxPool", "init_pool", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_common", "inputs", "env", "input", "attributes", "isGlobalOperator", "isChannelsLast", "inputShapeAsChannelFirst", "hasDilations", "kernelShape", "strides", "dilations", "pads", "PoolConvUtil", "outputShapeAsChannelFirst", "newAttributes", "outputShapeAsChannelLast", "outputShape", "outputSize", "ShapeUtil", "kernelSize", "programUniforms", "uniforms", "kw", "sw", "pwStart", "pwEnd", "pwStartEndNotZero", "phStartEndNotZero", "kh", "sh", "phStart", "phEnd", "kernelStrides", "hasPads", "sum", "cur", "shaderHelper", "x", "rank", "outputShapeRank", "op1", "op2", "start", "dataType", "output", "outputVariable", "codeW", "codeH", "codeHEnd", "dimIdxW", "dimIdxH", "stridesRank", "padsRank", "padCode", "getElementAt", "name", "adjustedAttributes", "inputVariable", "createTensorShapeVariables", "inputDependencies", "countIncludePad", "attr", "averagePoolAttributes", "context", "format", "storageOrder", "maxPoolAttributes", "validateInputsContent", "createRangeProgramInfo", "range", "init_range", "__esmMin", "init_esm", "init_wasm_common", "init_common", "start", "limit", "delta", "sameStartLimit", "increasingRangeNegativeStep", "decreasingRangePositiveStep", "dataType", "numElements", "outputShape", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "output", "outputVariable", "wgslType", "uniforms", "context", "env", "validateScales", "updateScales", "validateInputs", "getOriginalCoordinateFromResizedCoordinate", "getNearestPixelFromOriginal", "updateRoI", "initOutputShape", "adjustOutputShape", "calculateOriginalIndicesFromOutputIndices", "calculateInputIndicesFromOutputIndices", "checkInputIndices", "setChannelAndBatchIndices", "bilinearInterpolation", "bicubicInterpolation", "trilinearInterpolation", "createResizeProgramInfo", "getOpsetVersionFromCustomDataBuffer", "resize", "parseResizeAttributes", "init_resize", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "scales", "attributes", "value", "axes", "rank", "newScales", "index", "inputs", "opsetVersion", "sizes", "roi", "roiInputIndex", "scalesInputIndex", "sizesInputIndex", "coordinateTransferMode", "dType", "nearestMode", "roiTmp", "roiLocal", "v", "i", "inputShape", "outputShape", "scaleInPolicy", "adjustedOutputShape", "output", "scalesLength", "roiLength", "getElementAt", "input", "useExtrapolation", "channelIdx", "batchIdx", "spacialDims", "extrapolationValue", "heightIdx", "widthIdx", "cubicCoeffA", "excludeOutside", "is2D", "isNchw", "createCubicInterpolationFunction", "idx", "direction", "depthIdx", "inputTensor", "scalesInput", "roiInput", "outputVariable", "inputVariable", "outputSize", "ShapeUtil", "noScale", "d", "dataType", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "customDataBuffer", "antialias", "coordinateTransformMode", "keepAspectRatioPolicy", "mode", "createAttributeWithCacheKey", "validateInputs", "createRotaryEmbeddingProgramInfo", "rotaryEmbedding", "init_rotary_embedding", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "input", "positionIds", "cosCache", "sinCache", "numHeads", "rotaryEmbeddingDim", "ShapeUtil", "batchSize", "sequenceLength", "maxSequenceLength", "hiddenSize", "headSize", "interleaved", "scale", "batchStride", "halfRotaryEmbeddingDim", "globalShape", "globalStrides", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "inputVariable", "output", "outputVariable", "WORKGROUP_SIZE", "createAttributeWithCacheKey", "context", "validateInputs", "createSkipLayerNormProgramInfo", "skipLayerNorm", "init_skip_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "input", "skip", "gamma", "hiddenSize", "sequenceLength", "beta", "bias", "attributes", "outputCount", "isTraining", "simplified", "inputShape", "inputSize", "ShapeUtil", "outputShape", "outputSize", "meanInvStdDevDim", "hasBetaInput", "hasBiasInput", "hasMeanOutput", "hasInvStdDevOutput", "hasInputSkipBiasSumOutput", "workgroupSize", "components", "getMaxComponents", "programUniforms", "getShaderSource", "shaderHelper", "uniformsArray", "variables", "inputVariable", "outputVariable", "dataType", "tensorTypeToWsglStorageType", "vecDataType", "castToF32", "sumVector", "outputs", "_input", "_index", "context", "validateInputs", "readInput", "createSliceAttributesFromInputs", "fixStartEndValues", "calculateInputIndicesImpl", "createSliceProgramInfo", "slice", "parseSliceAttributes", "init_slice", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "_", "idx", "input", "v", "starts", "ends", "axes", "createAttributeWithCacheKey", "value", "index", "inputShape", "steps", "newValue", "output", "getElementAt", "inputSize", "ShapeUtil", "step", "start", "i", "end", "signs", "array", "numSteps", "newEnd", "newStart", "outputShape", "axis", "outputTensorInfo", "outputVariable", "inputVariable", "outputSize", "uniforms", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "validateInputs", "createSoftmaxProgramInfo", "softmax", "parseSoftmaxAttributes", "init_softmax", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "input", "attributes", "shape", "outputSize", "ShapeUtil", "WG", "axis", "cols", "rows", "components", "getMaxComponents", "packedCols", "maxVector", "name", "x", "inputVariable", "output", "outputVariable", "valueType", "threadMaxDecl", "tensorTypeToWsglStorageType", "getShaderSource", "shaderHelper", "sumVector", "context", "createAttributeWithCacheKey", "validateInputs", "createSplitAttributesFromInputs", "calculateOutputIndexImpl", "writeBufferDataImpl", "createSplitProgramInfo", "split", "parseSplitAttributes", "init_split", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "splitSizes", "numOutputs", "v", "createAttributeWithCacheKey", "numberOfTensors", "getElementAt", "outputs", "codeLines", "i", "returnSnippet", "inputShape", "inputSize", "ShapeUtil", "dataType", "axis", "input", "inputVariable", "sizeInSplitAxis", "outputsTensorInfo", "outputShapes", "previousSum", "programUniforms", "outputShape", "outputVariable", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "createWhereOpProgramShader", "createWhereOpProgramInfo", "where", "init_where", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "inputs", "dimsOutput", "isBroadcast", "typeOutput", "output", "outputVariable", "inputVariable", "b", "c", "assignment", "expression", "a", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "expressionC", "dimsA", "dimsB", "dimsC", "outputDataType", "ShapeUtil", "outputShape", "outputSize", "calculatedShape", "BroadcastUtil", "vecSize", "createTensorShapeVariables", "context", "WEBGPU_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_argminmax", "init_attention", "init_batch_norm", "init_bias_add", "init_bias_split_gelu", "init_binary_op", "init_concat", "init_conv", "init_conv_transpose", "init_cumsum", "init_depth_to_space", "init_einsum", "init_expand", "init_fast_gelu", "init_gather", "init_gather_elements", "init_gemm", "init_group_query_attention", "init_instance_norm", "init_layer_norm", "init_matmul", "init_matmulnbits", "init_multihead_attention", "init_pad", "init_pool", "init_range", "init_reduce", "init_resize", "init_rotary_embedding", "init_skip_layer_norm", "init_slice", "init_softmax", "init_split", "init_tile", "init_transpose", "init_unary_op", "init_where", "abs", "acos", "acosh", "add", "argMax", "parseArgMinMaxAttributes", "argMin", "asin", "asinh", "atan", "atanh", "attention", "averagePool", "parseAveragePoolAttributes", "batchNorm", "biasAdd", "biasSplitGelu", "cast", "parseCastAttributes", "ceil", "clip", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "cosh", "cumsum", "parseCumSumAttributes", "depthToSpace", "parseDepthToSpaceAttributes", "div", "einsum", "parseEinsumAttributes", "elu", "parseAlphaAttributes", "equal", "erf", "exp", "expand", "fastGelu", "floor", "gather", "parseGatherAttributes", "gatherElements", "parseGatherElementsAttributes", "gelu", "gemm", "parseGemmAttributes", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "parseGlobalMaxPoolAttributes", "greater", "greaterOrEqual", "groupQueryAttention", "parseGroupQueryAttentionAttributes", "hardSigmoid", "parseHardSigmoidAttributes", "instanceNorm", "layerNorm", "leakyRelu", "less", "lessOrEqual", "log", "matMul", "matMulNBits", "parseMatMulNBitsAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "multiHeadAttention", "parseMultiHeadAttentionAttributes", "neg", "not", "pad", "pow", "quickgelu", "range", "reciprocal", "reduceMin", "reduceMean", "reduceMax", "reduceSum", "reduceProd", "reduceL1", "reduceL2", "reduceLogSum", "reduceLogSumExp", "reduceSumSquare", "relu", "resize", "parseResizeAttributes", "rotaryEmbedding", "sigmoid", "sin", "sinh", "slice", "parseSliceAttributes", "skipLayerNorm", "split", "parseSplitAttributes", "sqrt", "softmax", "parseSoftmaxAttributes", "sub", "tan", "tanh", "thresholdedRelu", "tile", "transpose", "parseTransposeAttributes", "where", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_log", "init_common", "backend", "key", "artifact", "buildArtifact", "inputs", "outputs", "dispatchGroup", "uniformBufferBinding", "TRACE_FUNC_BEGIN", "device", "computePassEncoder", "entries", "input", "output", "bindGroup", "commandInfo", "TRACE_FUNC_END", "programInfo", "normalizedDispatchGroupSize", "extensions", "shaderHelper", "createShaderHelper", "userCode", "code", "shaderModule", "LOG_DEBUG", "computePipeline", "x", "y", "z", "limitPerDimension", "size", "dispatchAverage", "getProgramInputTensorInfoDependencyKey", "getProgramInfoUniqueKey", "AdapterInfoImpl", "WebGpuBackend", "init_backend_webgpu", "__esmMin", "init_esm", "init_wasm_common", "init_log", "init_tensor_view", "init_gpu_data_manager", "init_op_resolve_rules", "init_program_manager", "inputTensors", "inputDependencies", "inputInfos", "i", "type", "rank", "dims", "programInfo", "is1DimensionDispatch", "key", "adapterInfo", "architecture", "vendor", "data", "env", "adapter", "requiredFeatures", "deviceDescriptor", "createGpuDataManager", "ProgramManager", "configureLogger", "ev", "commandEncoder", "computePassDescriptor", "TRACE_FUNC_BEGIN", "queryReadBuffer", "mappedData", "pendingKernels", "pendingKernelInfo", "kernelId", "kernelInfo", "kernelType", "kernelName", "programName", "inputTensorViews", "outputTensorViews", "startTimeU64", "endTimeU64", "startTime", "endTime", "value", "tensorDataTypeEnumToString", "inputShapes", "outputShapes", "TRACE", "TRACE_FUNC_END", "program", "outputIndices", "createKernelOutput", "createIntermediateOutput", "outputCount", "inputDatas", "gpuData", "outputs", "dispatchGroup", "programUniforms", "validatedOutputIndices", "_", "outputDatas", "isTemporary", "isPersistent", "tensorView", "persistentData", "uniformBufferBinding", "currentOffset", "offsets", "v", "sizeOfElement", "sizeOfVecOrMat", "baseAlignment", "elementPerVecOrMat", "maxAlignmentOfField", "arrayBuffer", "offset", "uniformBufferData", "normalizedDispatchGroup", "artifact", "LOG_DEBUG", "uniform", "actualType", "actualLength", "length", "gpuDataId", "src", "dst", "getTargetBuffer", "size", "ptr", "attribute", "op", "WEBGPU_OP_RESOLVE_RULES", "context", "errors", "kernel", "kernelEntry", "attributes", "useErrorScope", "e", "err", "sessionId", "index", "buffer", "sessionInputOutputMapping", "previousBuffer", "id", "bufferInfo", "gpuBuffer", "downloadGpuData", "createView", "sessionCommandList", "sessionPendingKernels", "computePassEncoder", "command", "init_exports", "__export", "init", "TensorViewImpl", "ComputeContextImpl", "init_init", "__esmMin", "init_wasm_common", "init_backend_webgpu", "init_log", "init_util", "_TensorViewImpl", "module", "dataType", "data", "dims", "elementCount", "ShapeUtil", "newDims", "backend", "contextDataOffset", "heapU32", "dataIndex", "inputCount", "inputs", "i", "dim", "d", "program", "inputsOutputsMapping", "mappedInputs", "outputIndices", "createKernelOutput", "index", "createTemporaryOutput", "elementSize", "getTensorElementSize", "bufferSize", "gpuDataId", "stack", "offset", "e", "name", "env", "gpuAdapter", "jsepInit", "WebGpuBackend", "size", "ptr", "src", "dst", "isSourceGpu", "LOG_DEBUG", "dataOffset", "kernelType", "kernelId", "attribute", "kernel", "sessionHandle", "errors", "context", "initOrt", "initRuntime", "initEp", "activeSessions", "getSessionInputOutputCount", "copyFromExternalBuffer", "createSession", "releaseSession", "prepareInputOutputTensor", "run", "endProfiling", "extractTransferableBuffers", "init_wasm_core_impl", "__esmMin", "init_run_options", "init_session_options", "init_wasm_common", "init_wasm_factory", "init_wasm_utils", "init_wasm_utils_load_file", "numThreads", "loggingLevel", "getInstance", "checkLastError", "env", "logLevelStringToEnum", "epName", "initJsep", "adapter", "powerPreference", "forceFallbackAdapter", "sessionHandle", "wasm", "stack", "dataOffset", "model", "modelDataOffset", "modelData", "options", "modelDataLength", "sessionOptionsHandle", "ioBindingHandle", "allocs", "inputNamesUTF8Encoded", "outputNamesUTF8Encoded", "setSessionOptions", "loadingPromises", "file", "path", "loadFile", "data", "provider", "webnnOptions", "context", "gpuDevice", "deviceType", "inputCount", "outputCount", "enableGraphCapture", "inputNames", "outputNames", "outputPreferredLocations", "i", "name", "nameString", "location", "bindingState", "l", "dataLocationStringToEnum", "e", "buf", "alloc", "sessionId", "session", "ioBindingState", "tensor", "tensorHandles", "index", "dataType", "dims", "rawData", "dataByteLength", "gpuBuffer", "elementSizeInBytes", "getTensorElementSize", "tensorDataTypeStringToEnum", "a", "b", "registerBuffer", "dataIndex", "allocWasmString", "dimsOffset", "dimIndex", "d", "inputIndices", "inputTensors", "outputIndices", "outputTensors", "inputOutputBound", "runOptionsHandle", "runOptionsAllocs", "inputTensorHandles", "outputTensorHandles", "inputOutputAllocs", "beforeRunStack", "inputValuesOffset", "inputNamesOffset", "outputValuesOffset", "outputNamesOffset", "setRunOptions", "inputValuesIndex", "inputNamesIndex", "outputValuesIndex", "outputNamesIndex", "handle", "outputPreferredLocationsEncoded", "errorCode", "output", "beforeGetTensorDataStack", "tensorDataOffset", "keepOutputTensor", "type", "tensorDataIndex", "dimsLength", "size", "tensorDataTypeEnumToString", "preferredLocation", "stringData", "offset", "maxBytesToRead", "getBuffer", "elementSize", "isGpuBufferSupportedType", "typedArrayConstructor", "tensorTypeToTypedArrayConstructor", "v", "p", "profileFileName", "tensors", "buffers", "isProxy", "proxyWorker", "initializing", "initialized", "aborted", "temporaryObjectUrl", "initWasmCallbacks", "queuedCallbacks", "enqueueCallbacks", "ensureWorker", "onProxyWorkerMessage", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "copyFromExternalBuffer", "createSession", "releaseSession", "run", "endProfiling", "init_proxy_wrapper", "__esmMin", "init_esm", "init_wasm_core_impl", "init_wasm_factory", "init_wasm_utils_import", "env", "type", "callbacks", "queue", "ev", "resolve", "reject", "importProxyWorker", "objectUrl", "worker", "message", "e", "initializeWebAssembly", "initRuntime", "epName", "initEp", "buffer", "model", "options", "transferable", "sessionId", "inputIndices", "inputs", "outputIndices", "outputs", "t", "serializableInputs", "extractTransferableBuffers", "encodeTensorMetadata", "decodeTensorMetadata", "OnnxruntimeWebAssemblySessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_proxy_wrapper", "init_wasm_common", "init_wasm_utils_env", "init_wasm_utils_load_file", "tensor", "getName", "Tensor", "dataType", "isGpuBufferSupportedType", "gpuBuffer", "download", "dispose", "path", "copyFromExternalBuffer", "loadFile", "pathOrBuffer", "options", "TRACE_FUNC_BEGIN", "model", "createSession", "TRACE_FUNC_END", "releaseSession", "feeds", "fetches", "inputArray", "inputIndices", "kvp", "name", "index", "outputArray", "outputIndices", "inputs", "t", "i", "outputs", "results", "run", "resultMap", "endProfiling", "initializeFlags", "OnnxruntimeWebAssemblyBackend", "init_backend_wasm", "__esmMin", "init_esm", "init_proxy_wrapper", "init_session_handler_inference", "init_wasm_utils_import", "env", "numCpuLogicalCores", "backendName", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "pathOrBuffer", "options", "handler", "OnnxruntimeWebAssemblySessionHandler", "backend_wasm_inference_exports", "__export", "wasmBackend", "init_backend_wasm_inference", "__esmMin", "init_backend_wasm", "OnnxruntimeWebAssemblyBackend", "init_esm", "version", "lib_default", "esm_exports", "wasmBackend", "registerBackend", "env", "version"] }